aqnwb 0.1.0
Loading...
Searching...
No Matches
core.hpp
Go to the documentation of this file.
1#pragma once
2
3#include <array>
4#include <string>
5#include <string_view>
6
8{
9
10const std::string version = "2.7.0";
11
12constexpr std::string_view nwb_base = R"delimiter(
13{"datasets":[{"neurodata_type_def":"NWBData","neurodata_type_inc":"Data","doc":"An abstract data type for a dataset."},{"neurodata_type_def":"TimeSeriesReferenceVectorData","neurodata_type_inc":"VectorData","default_name":"timeseries","dtype":[{"name":"idx_start","dtype":"int32","doc":"Start index into the TimeSeries 'data' and 'timestamp' datasets of the referenced TimeSeries. The first dimension of those arrays is always time."},{"name":"count","dtype":"int32","doc":"Number of data samples available in this time series, during this epoch"},{"name":"timeseries","dtype":{"target_type":"TimeSeries","reftype":"object"},"doc":"The TimeSeries that this index applies to"}],"doc":"Column storing references to a TimeSeries (rows). For each TimeSeries this VectorData column stores the start_index and count to indicate the range in time to be selected as well as an object reference to the TimeSeries."},{"neurodata_type_def":"Image","neurodata_type_inc":"NWBData","dtype":"numeric","dims":[["x","y"],["x","y","r, g, b"],["x","y","r, g, b, a"]],"shape":[[null,null],[null,null,3],[null,null,4]],"doc":"An abstract data type for an image. Shape can be 2-D (x, y), or 3-D where the third dimension can have three or four elements, e.g. (x, y, (r, g, b)) or (x, y, (r, g, b, a)).","attributes":[{"name":"resolution","dtype":"float32","doc":"Pixel resolution of the image, in pixels per centimeter.","required":false},{"name":"description","dtype":"text","doc":"Description of the image.","required":false}]},{"neurodata_type_def":"ImageReferences","neurodata_type_inc":"NWBData","dtype":{"target_type":"Image","reftype":"object"},"dims":["num_images"],"shape":[null],"doc":"Ordered dataset of references to Image objects."}],"groups":[{"neurodata_type_def":"NWBContainer","neurodata_type_inc":"Container","doc":"An abstract data type for a generic container storing collections of data and metadata. Base type for all data and metadata containers."},{"neurodata_type_def":"NWBDataInterface","neurodata_type_inc":"NWBContainer","doc":"An abstract data type for a generic container storing collections of data, as opposed to metadata."},{"neurodata_type_def":"TimeSeries","neurodata_type_inc":"NWBDataInterface","doc":"General purpose time series.","attributes":[{"name":"description","dtype":"text","default_value":"no description","doc":"Description of the time series.","required":false},{"name":"comments","dtype":"text","default_value":"no comments","doc":"Human-readable comments about the TimeSeries. This second descriptive field can be used to store additional information, or descriptive information if the primary description field is populated with a computer-readable string.","required":false}],"datasets":[{"name":"data","dims":[["num_times"],["num_times","num_DIM2"],["num_times","num_DIM2","num_DIM3"],["num_times","num_DIM2","num_DIM3","num_DIM4"]],"shape":[[null],[null,null],[null,null,null],[null,null,null,null]],"doc":"Data values. Data can be in 1-D, 2-D, 3-D, or 4-D. The first dimension should always represent time. This can also be used to store binary data (e.g., image frames). This can also be a link to data stored in an external file.","attributes":[{"name":"conversion","dtype":"float32","default_value":1.0,"doc":"Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as signed 16-bit integers (int16 range -32,768 to 32,767) that correspond to a 5V range (-2.5V to 2.5V), and the data acquisition system gain is 8000X, then the 'conversion' multiplier to get from raw data acquisition values to recorded volts is 2.5/32768/8000 = 9.5367e-9.","required":false},{"name":"offset","dtype":"float32","default_value":0.0,"doc":"Scalar to add to the data after scaling by 'conversion' to finalize its coercion to the specified 'unit'. Two common examples of this include (a) data stored in an unsigned type that requires a shift after scaling to re-center the data, and (b) specialized recording devices that naturally cause a scalar offset with respect to the true units.","required":false},{"name":"resolution","dtype":"float32","default_value":-1.0,"doc":"Smallest meaningful difference between values in data, stored in the specified by unit, e.g., the change in value of the least significant bit, or a larger number if signal noise is known to be present. If unknown, use -1.0.","required":false},{"name":"unit","dtype":"text","doc":"Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'."},{"name":"continuity","dtype":"text","doc":"Optionally describe the continuity of the data. Can be \"continuous\", \"instantaneous\", or \"step\". For example, a voltage trace would be \"continuous\", because samples are recorded from a continuous process. An array of lick times would be \"instantaneous\", because the data represents distinct moments in time. Times of image presentations would be \"step\" because the picture remains the same until the next timepoint. This field is optional, but is useful in providing information about the underlying data. It may inform the way this data is interpreted, the way it is visualized, and what analysis methods are applicable.","required":false}]},{"name":"starting_time","dtype":"float64","doc":"Timestamp of the first sample in seconds. When timestamps are uniformly spaced, the timestamp of the first sample can be specified and all subsequent ones calculated from the sampling rate attribute.","quantity":"?","attributes":[{"name":"rate","dtype":"float32","doc":"Sampling rate, in Hz."},{"name":"unit","dtype":"text","value":"seconds","doc":"Unit of measurement for time, which is fixed to 'seconds'."}]},{"name":"timestamps","dtype":"float64","dims":["num_times"],"shape":[null],"doc":"Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time.","quantity":"?","attributes":[{"name":"interval","dtype":"int32","value":1,"doc":"Value is '1'"},{"name":"unit","dtype":"text","value":"seconds","doc":"Unit of measurement for timestamps, which is fixed to 'seconds'."}]},{"name":"control","dtype":"uint8","dims":["num_times"],"shape":[null],"doc":"Numerical labels that apply to each time point in data for the purpose of querying and slicing data by these values. If present, the length of this array should be the same size as the first dimension of data.","quantity":"?"},{"name":"control_description","dtype":"text","dims":["num_control_values"],"shape":[null],"doc":"Description of each control value. Must be present if control is present. If present, control_description[0] should describe time points where control == 0.","quantity":"?"}],"groups":[{"name":"sync","doc":"Lab-specific time and sync information as provided directly from hardware devices and that is necessary for aligning all acquired time information to a common timebase. The timestamp array stores time in the common timebase. This group will usually only be populated in TimeSeries that are stored external to the NWB file, in files storing raw data. Once timestamp data is calculated, the contents of 'sync' are mostly for archival purposes.","quantity":"?"}]},{"neurodata_type_def":"ProcessingModule","neurodata_type_inc":"NWBContainer","doc":"A collection of processed data.","attributes":[{"name":"description","dtype":"text","doc":"Description of this collection of processed data."}],"groups":[{"neurodata_type_inc":"NWBDataInterface","doc":"Data objects stored in this collection.","quantity":"*"},{"neurodata_type_inc":"DynamicTable","doc":"Tables stored in this collection.","quantity":"*"}]},{"neurodata_type_def":"Images","neurodata_type_inc":"NWBDataInterface","default_name":"Images","doc":"A collection of images with an optional way to specify the order of the images using the \"order_of_images\" dataset. An order must be specified if the images are referenced by index, e.g., from an IndexSeries.","attributes":[{"name":"description","dtype":"text","doc":"Description of this collection of images."}],"datasets":[{"neurodata_type_inc":"Image","doc":"Images stored in this collection.","quantity":"+"},{"name":"order_of_images","neurodata_type_inc":"ImageReferences","doc":"Ordered dataset of references to Image objects stored in the parent group. Each Image object in the Images group should be stored once and only once, so the dataset should have the same length as the number of images.","quantity":"?"}]}]})delimiter";
14
15constexpr std::string_view nwb_device = R"delimiter(
16{"groups":[{"neurodata_type_def":"Device","neurodata_type_inc":"NWBContainer","doc":"Metadata about a data acquisition device, e.g., recording system, electrode, microscope.","attributes":[{"name":"description","dtype":"text","doc":"Description of the device (e.g., model, firmware version, processing software version, etc.) as free-form text.","required":false},{"name":"manufacturer","dtype":"text","doc":"The name of the manufacturer of the device.","required":false}]}]})delimiter";
17
18constexpr std::string_view nwb_epoch = R"delimiter(
19{"groups":[{"neurodata_type_def":"TimeIntervals","neurodata_type_inc":"DynamicTable","doc":"A container for aggregating epoch data and the TimeSeries that each epoch applies to.","datasets":[{"name":"start_time","neurodata_type_inc":"VectorData","dtype":"float32","doc":"Start time of epoch, in seconds."},{"name":"stop_time","neurodata_type_inc":"VectorData","dtype":"float32","doc":"Stop time of epoch, in seconds."},{"name":"tags","neurodata_type_inc":"VectorData","dtype":"text","doc":"User-defined tags that identify or categorize events.","quantity":"?"},{"name":"tags_index","neurodata_type_inc":"VectorIndex","doc":"Index for tags.","quantity":"?"},{"name":"timeseries","neurodata_type_inc":"TimeSeriesReferenceVectorData","doc":"An index into a TimeSeries object.","quantity":"?"},{"name":"timeseries_index","neurodata_type_inc":"VectorIndex","doc":"Index for timeseries.","quantity":"?"}]}]})delimiter";
20
21constexpr std::string_view nwb_image = R"delimiter(
22{"datasets":[{"neurodata_type_def":"GrayscaleImage","neurodata_type_inc":"Image","dims":["x","y"],"shape":[null,null],"doc":"A grayscale image.","dtype":"numeric"},{"neurodata_type_def":"RGBImage","neurodata_type_inc":"Image","dims":["x","y","r, g, b"],"shape":[null,null,3],"doc":"A color image.","dtype":"numeric"},{"neurodata_type_def":"RGBAImage","neurodata_type_inc":"Image","dims":["x","y","r, g, b, a"],"shape":[null,null,4],"doc":"A color image with transparency.","dtype":"numeric"}],"groups":[{"neurodata_type_def":"ImageSeries","neurodata_type_inc":"TimeSeries","doc":"General image data that is common between acquisition and stimulus time series. Sometimes the image data is stored in the file in a raw format while other times it will be stored as a series of external image files in the host file system. The data field will either be binary data, if the data is stored in the NWB file, or empty, if the data is stored in an external image stack. [frame][x][y] or [frame][x][y][z].","datasets":[{"name":"data","dtype":"numeric","dims":[["frame","x","y"],["frame","x","y","z"]],"shape":[[null,null,null],[null,null,null,null]],"doc":"Binary data representing images across frames. If data are stored in an external file, this should be an empty 3D array."},{"name":"dimension","dtype":"int32","dims":["rank"],"shape":[null],"doc":"Number of pixels on x, y, (and z) axes.","quantity":"?"},{"name":"external_file","dtype":"text","dims":["num_files"],"shape":[null],"doc":"Paths to one or more external file(s). The field is only present if format='external'. This is only relevant if the image series is stored in the file system as one or more image file(s). This field should NOT be used if the image is stored in another NWB file and that file is linked to this file.","quantity":"?","attributes":[{"name":"starting_frame","dtype":"int32","dims":["num_files"],"shape":[null],"doc":"Each external image may contain one or more consecutive frames of the full ImageSeries. This attribute serves as an index to indicate which frames each file contains, to facilitate random access. The 'starting_frame' attribute, hence, contains a list of frame numbers within the full ImageSeries of the first frame of each file listed in the parent 'external_file' dataset. Zero-based indexing is used (hence, the first element will always be zero). For example, if the 'external_file' dataset has three paths to files and the first file has 5 frames, the second file has 10 frames, and the third file has 20 frames, then this attribute will have values [0, 5, 15]. If there is a single external file that holds all of the frames of the ImageSeries (and so there is a single element in the 'external_file' dataset), then this attribute should have value [0]."}]},{"name":"format","dtype":"text","default_value":"raw","doc":"Format of image. If this is 'external', then the attribute 'external_file' contains the path information to the image files. If this is 'raw', then the raw (single-channel) binary data is stored in the 'data' dataset. If this attribute is not present, then the default format='raw' case is assumed.","quantity":"?"}],"links":[{"name":"device","target_type":"Device","doc":"Link to the Device object that was used to capture these images.","quantity":"?"}]},{"neurodata_type_def":"ImageMaskSeries","neurodata_type_inc":"ImageSeries","doc":"An alpha mask that is applied to a presented visual stimulus. The 'data' array contains an array of mask values that are applied to the displayed image. Mask values are stored as RGBA. Mask can vary with time. The timestamps array indicates the starting time of a mask, and that mask pattern continues until it's explicitly changed.","links":[{"name":"masked_imageseries","target_type":"ImageSeries","doc":"Link to ImageSeries object that this image mask is applied to."}]},{"neurodata_type_def":"OpticalSeries","neurodata_type_inc":"ImageSeries","doc":"Image data that is presented or recorded. A stimulus template movie will be stored only as an image. When the image is presented as stimulus, additional data is required, such as field of view (e.g., how much of the visual field the image covers, or how what is the area of the target being imaged). If the OpticalSeries represents acquired imaging data, orientation is also important.","datasets":[{"name":"distance","dtype":"float32","doc":"Distance from camera/monitor to target/eye.","quantity":"?"},{"name":"field_of_view","dtype":"float32","dims":[["width, height"],["width, height, depth"]],"shape":[[2],[3]],"doc":"Width, height and depth of image, or imaged area, in meters.","quantity":"?"},{"name":"data","dtype":"numeric","dims":[["frame","x","y"],["frame","x","y","r, g, b"]],"shape":[[null,null,null],[null,null,null,3]],"doc":"Images presented to subject, either grayscale or RGB"},{"name":"orientation","dtype":"text","doc":"Description of image relative to some reference frame (e.g., which way is up). Must also specify frame of reference.","quantity":"?"}]},{"neurodata_type_def":"IndexSeries","neurodata_type_inc":"TimeSeries","doc":"Stores indices to image frames stored in an ImageSeries. The purpose of the IndexSeries is to allow a static image stack to be stored in an Images object, and the images in the stack to be referenced out-of-order. This can be for the display of individual images, or of movie segments (as a movie is simply a series of images). The data field stores the index of the frame in the referenced Images object, and the timestamps array indicates when that image was displayed.","datasets":[{"name":"data","dtype":"uint32","dims":["num_times"],"shape":[null],"doc":"Index of the image (using zero-indexing) in the linked Images object.","attributes":[{"name":"conversion","dtype":"float32","doc":"This field is unused by IndexSeries.","required":false},{"name":"resolution","dtype":"float32","doc":"This field is unused by IndexSeries.","required":false},{"name":"offset","dtype":"float32","doc":"This field is unused by IndexSeries.","required":false},{"name":"unit","dtype":"text","value":"N/A","doc":"This field is unused by IndexSeries and has the value N/A."}]}],"links":[{"name":"indexed_timeseries","target_type":"ImageSeries","doc":"Link to ImageSeries object containing images that are indexed. Use of this link is discouraged and will be deprecated. Link to an Images type instead.","quantity":"?"},{"name":"indexed_images","target_type":"Images","doc":"Link to Images object containing an ordered set of images that are indexed. The Images object must contain a 'ordered_images' dataset specifying the order of the images in the Images type.","quantity":"?"}]}]})delimiter";
23
24constexpr std::string_view nwb_file = R"delimiter(
25{"groups":[{"neurodata_type_def":"NWBFile","neurodata_type_inc":"NWBContainer","name":"root","doc":"An NWB file storing cellular-based neurophysiology data from a single experimental session.","attributes":[{"name":"nwb_version","dtype":"text","value":"2.7.0-alpha","doc":"File version string. Use semantic versioning, e.g. 1.2.1. This will be the name of the format with trailing major, minor and patch numbers."}],"datasets":[{"name":"file_create_date","dtype":"isodatetime","dims":["num_modifications"],"shape":[null],"doc":"A record of the date the file was created and of subsequent modifications. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted strings: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. The file can be created after the experiment was run, so this may differ from the experiment start time. Each modification to the nwb file adds a new entry to the array."},{"name":"identifier","dtype":"text","doc":"A unique text identifier for the file. For example, concatenated lab name, file creation date/time and experimentalist, or a hash of these and/or other values. The goal is that the string should be unique to all other files."},{"name":"session_description","dtype":"text","doc":"A description of the experimental session and data in the file."},{"name":"session_start_time","dtype":"isodatetime","doc":"Date and time of the experiment/session start. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds."},{"name":"timestamps_reference_time","dtype":"isodatetime","doc":"Date and time corresponding to time zero of all timestamps. The date is stored in UTC with local timezone offset as ISO 8601 extended formatted string: 2018-09-28T14:43:54.123+02:00. Dates stored in UTC end in \"Z\" with no timezone offset. Date accuracy is up to milliseconds. All times stored in the file use this time as reference (i.e., time zero)."}],"groups":[{"name":"acquisition","doc":"Data streams recorded from the system, including ephys, ophys, tracking, etc. This group should be read-only after the experiment is completed and timestamps are corrected to a common timebase. The data stored here may be links to raw data stored in external NWB files. This will allow keeping bulky raw data out of the file while preserving the option of keeping some/all in the file. Acquired data includes tracking and experimental data streams (i.e., everything measured from the system). If bulky data is stored in the /acquisition group, the data can exist in a separate NWB file that is linked to by the file being used for processing and analysis.","groups":[{"neurodata_type_inc":"NWBDataInterface","doc":"Acquired, raw data.","quantity":"*"},{"neurodata_type_inc":"DynamicTable","doc":"Tabular data that is relevant to acquisition","quantity":"*"}]},{"name":"analysis","doc":"Lab-specific and custom scientific analysis of data. There is no defined format for the content of this group - the format is up to the individual user/lab. To facilitate sharing analysis data between labs, the contents here should be stored in standard types (e.g., neurodata_types) and appropriately documented. The file can store lab-specific and custom data analysis without restriction on its form or schema, reducing data formatting restrictions on end users. Such data should be placed in the analysis group. The analysis data should be documented so that it could be shared with other labs.","groups":[{"neurodata_type_inc":"NWBContainer","doc":"Custom analysis results.","quantity":"*"},{"neurodata_type_inc":"DynamicTable","doc":"Tabular data that is relevant to data stored in analysis","quantity":"*"}]},{"name":"scratch","doc":"A place to store one-off analysis results. Data placed here is not intended for sharing. By placing data here, users acknowledge that there is no guarantee that their data meets any standard.","quantity":"?","groups":[{"neurodata_type_inc":"NWBContainer","doc":"Any one-off containers","quantity":"*"},{"neurodata_type_inc":"DynamicTable","doc":"Any one-off tables","quantity":"*"}],"datasets":[{"neurodata_type_inc":"ScratchData","doc":"Any one-off datasets","quantity":"*"}]},{"name":"processing","doc":"The home for ProcessingModules. These modules perform intermediate analysis of data that is necessary to perform before scientific analysis. Examples include spike clustering, extracting position from tracking data, stitching together image slices. ProcessingModules can be large and express many data sets from relatively complex analysis (e.g., spike detection and clustering) or small, representing extraction of position information from tracking video, or even binary lick/no-lick decisions. Common software tools (e.g., klustakwik, MClust) are expected to read/write data here. 'Processing' refers to intermediate analysis of the acquired data to make it more amenable to scientific analysis.","groups":[{"neurodata_type_inc":"ProcessingModule","doc":"Intermediate analysis of acquired data.","quantity":"*"}]},{"name":"stimulus","doc":"Data pushed into the system (eg, video stimulus, sound, voltage, etc) and secondary representations of that data (eg, measurements of something used as a stimulus). This group should be made read-only after experiment complete and timestamps are corrected to common timebase. Stores both presented stimuli and stimulus templates, the latter in case the same stimulus is presented multiple times, or is pulled from an external stimulus library. Stimuli are here defined as any signal that is pushed into the system as part of the experiment (eg, sound, video, voltage, etc). Many different experiments can use the same stimuli, and stimuli can be re-used during an experiment. The stimulus group is organized so that one version of template stimuli can be stored and these be used multiple times. These templates can exist in the present file or can be linked to a remote library file.","groups":[{"name":"presentation","doc":"Stimuli presented during the experiment.","groups":[{"neurodata_type_inc":"TimeSeries","doc":"TimeSeries objects containing data of presented stimuli.","quantity":"*"}]},{"name":"templates","doc":"Template stimuli. Timestamps in templates are based on stimulus design and are relative to the beginning of the stimulus. When templates are used, the stimulus instances must convert presentation times to the experiment`s time reference frame.","groups":[{"neurodata_type_inc":"TimeSeries","doc":"TimeSeries objects containing template data of presented stimuli.","quantity":"*"},{"neurodata_type_inc":"Images","doc":"Images objects containing images of presented stimuli.","quantity":"*"}]}]},{"name":"general","doc":"Experimental metadata, including protocol, notes and description of hardware device(s). The metadata stored in this section should be used to describe the experiment. Metadata necessary for interpreting the data is stored with the data. General experimental metadata, including animal strain, experimental protocols, experimenter, devices, etc, are stored under 'general'. Core metadata (e.g., that required to interpret data fields) is stored with the data itself, and implicitly defined by the file specification (e.g., time is in seconds). The strategy used here for storing non-core metadata is to use free-form text fields, such as would appear in sentences or paragraphs from a Methods section. Metadata fields are text to enable them to be more general, for example to represent ranges instead of numerical values. Machine-readable metadata is stored as attributes to these free-form datasets. All entries in the below table are to be included when data is present. Unused groups (e.g., intracellular_ephys in an optophysiology experiment) should not be created unless there is data to store within them.","datasets":[{"name":"data_collection","dtype":"text","doc":"Notes about data collection and analysis.","quantity":"?"},{"name":"experiment_description","dtype":"text","doc":"General description of the experiment.","quantity":"?"},{"name":"experimenter","dtype":"text","doc":"Name of person(s) who performed the experiment. Can also specify roles of different people involved.","quantity":"?","dims":["num_experimenters"],"shape":[null]},{"name":"institution","dtype":"text","doc":"Institution(s) where experiment was performed.","quantity":"?"},{"name":"keywords","dtype":"text","dims":["num_keywords"],"shape":[null],"doc":"Terms to search over.","quantity":"?"},{"name":"lab","dtype":"text","doc":"Laboratory where experiment was performed.","quantity":"?"},{"name":"notes","dtype":"text","doc":"Notes about the experiment.","quantity":"?"},{"name":"pharmacology","dtype":"text","doc":"Description of drugs used, including how and when they were administered. Anesthesia(s), painkiller(s), etc., plus dosage, concentration, etc.","quantity":"?"},{"name":"protocol","dtype":"text","doc":"Experimental protocol, if applicable. e.g., include IACUC protocol number.","quantity":"?"},{"name":"related_publications","dtype":"text","doc":"Publication information. PMID, DOI, URL, etc.","dims":["num_publications"],"shape":[null],"quantity":"?"},{"name":"session_id","dtype":"text","doc":"Lab-specific ID for the session.","quantity":"?"},{"name":"slices","dtype":"text","doc":"Description of slices, including information about preparation thickness, orientation, temperature, and bath solution.","quantity":"?"},{"name":"source_script","dtype":"text","doc":"Script file or link to public source code used to create this NWB file.","quantity":"?","attributes":[{"name":"file_name","dtype":"text","doc":"Name of script file."}]},{"name":"stimulus","dtype":"text","doc":"Notes about stimuli, such as how and where they were presented.","quantity":"?"},{"name":"surgery","dtype":"text","doc":"Narrative description about surgery/surgeries, including date(s) and who performed surgery.","quantity":"?"},{"name":"virus","dtype":"text","doc":"Information about virus(es) used in experiments, including virus ID, source, date made, injection location, volume, etc.","quantity":"?"}],"groups":[{"neurodata_type_inc":"LabMetaData","doc":"Place-holder than can be extended so that lab-specific meta-data can be placed in /general.","quantity":"*"},{"name":"devices","doc":"Description of hardware devices used during experiment, e.g., monitors, ADC boards, microscopes, etc.","quantity":"?","groups":[{"neurodata_type_inc":"Device","doc":"Data acquisition devices.","quantity":"*"}]},{"name":"subject","neurodata_type_inc":"Subject","doc":"Information about the animal or person from which the data was measured.","quantity":"?"},{"name":"extracellular_ephys","doc":"Metadata related to extracellular electrophysiology.","quantity":"?","groups":[{"neurodata_type_inc":"ElectrodeGroup","doc":"Physical group of electrodes.","quantity":"*"},{"name":"electrodes","neurodata_type_inc":"DynamicTable","doc":"A table of all electrodes (i.e. channels) used for recording.","quantity":"?","datasets":[{"name":"x","neurodata_type_inc":"VectorData","dtype":"float32","doc":"x coordinate of the channel location in the brain (+x is posterior).","quantity":"?"},{"name":"y","neurodata_type_inc":"VectorData","dtype":"float32","doc":"y coordinate of the channel location in the brain (+y is inferior).","quantity":"?"},{"name":"z","neurodata_type_inc":"VectorData","dtype":"float32","doc":"z coordinate of the channel location in the brain (+z is right).","quantity":"?"},{"name":"imp","neurodata_type_inc":"VectorData","dtype":"float32","doc":"Impedance of the channel, in ohms.","quantity":"?"},{"name":"location","neurodata_type_inc":"VectorData","dtype":"text","doc":"Location of the electrode (channel). Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible."},{"name":"filtering","neurodata_type_inc":"VectorData","dtype":"text","doc":"Description of hardware filtering, including the filter name and frequency cutoffs.","quantity":"?"},{"name":"group","neurodata_type_inc":"VectorData","dtype":{"target_type":"ElectrodeGroup","reftype":"object"},"doc":"Reference to the ElectrodeGroup this electrode is a part of."},{"name":"group_name","neurodata_type_inc":"VectorData","dtype":"text","doc":"Name of the ElectrodeGroup this electrode is a part of."},{"name":"rel_x","neurodata_type_inc":"VectorData","dtype":"float32","doc":"x coordinate in electrode group","quantity":"?"},{"name":"rel_y","neurodata_type_inc":"VectorData","dtype":"float32","doc":"y coordinate in electrode group","quantity":"?"},{"name":"rel_z","neurodata_type_inc":"VectorData","dtype":"float32","doc":"z coordinate in electrode group","quantity":"?"},{"name":"reference","neurodata_type_inc":"VectorData","dtype":"text","doc":"Description of the reference electrode and/or reference scheme used for this electrode, e.g., \"stainless steel skull screw\" or \"online common average referencing\".","quantity":"?"}]}]},{"name":"intracellular_ephys","doc":"Metadata related to intracellular electrophysiology.","quantity":"?","datasets":[{"name":"filtering","dtype":"text","doc":"[DEPRECATED] Use IntracellularElectrode.filtering instead. Description of filtering used. Includes filtering type and parameters, frequency fall-off, etc. If this changes between TimeSeries, filter description should be stored as a text attribute for each TimeSeries.","quantity":"?"}],"groups":[{"neurodata_type_inc":"IntracellularElectrode","doc":"An intracellular electrode.","quantity":"*"},{"name":"sweep_table","neurodata_type_inc":"SweepTable","doc":"[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable and ExperimentalConditions tables provide enhanced support for experiment metadata.","quantity":"?"},{"name":"intracellular_recordings","neurodata_type_inc":"IntracellularRecordingsTable","doc":"A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response are recorded as as part of an experiment. In this case both, the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.","quantity":"?"},{"name":"simultaneous_recordings","neurodata_type_inc":"SimultaneousRecordingsTable","doc":"A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes","quantity":"?"},{"name":"sequential_recordings","neurodata_type_inc":"SequentialRecordingsTable","doc":"A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where the a sequence of stimuli of the same type with varying parameters have been presented in a sequence.","quantity":"?"},{"name":"repetitions","neurodata_type_inc":"RepetitionsTable","doc":"A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence.","quantity":"?"},{"name":"experimental_conditions","neurodata_type_inc":"ExperimentalConditionsTable","doc":"A table for grouping different intracellular recording repetitions together that belong to the same experimental experimental_conditions.","quantity":"?"}]},{"name":"optogenetics","doc":"Metadata describing optogenetic stimuluation.","quantity":"?","groups":[{"neurodata_type_inc":"OptogeneticStimulusSite","doc":"An optogenetic stimulation site.","quantity":"*"}]},{"name":"optophysiology","doc":"Metadata related to optophysiology.","quantity":"?","groups":[{"neurodata_type_inc":"ImagingPlane","doc":"An imaging plane.","quantity":"*"}]}]},{"name":"intervals","doc":"Experimental intervals, whether that be logically distinct sub-experiments having a particular scientific goal, trials (see trials subgroup) during an experiment, or epochs (see epochs subgroup) deriving from analysis of data.","quantity":"?","groups":[{"name":"epochs","neurodata_type_inc":"TimeIntervals","doc":"Divisions in time marking experimental stages or sub-divisions of a single recording session.","quantity":"?"},{"name":"trials","neurodata_type_inc":"TimeIntervals","doc":"Repeated experimental events that have a logical grouping.","quantity":"?"},{"name":"invalid_times","neurodata_type_inc":"TimeIntervals","doc":"Time intervals that should be removed from analysis.","quantity":"?"},{"neurodata_type_inc":"TimeIntervals","doc":"Optional additional table(s) for describing other experimental time intervals.","quantity":"*"}]},{"name":"units","neurodata_type_inc":"Units","doc":"Data about sorted spike units.","quantity":"?"}]},{"neurodata_type_def":"LabMetaData","neurodata_type_inc":"NWBContainer","doc":"Lab-specific meta-data."},{"neurodata_type_def":"Subject","neurodata_type_inc":"NWBContainer","doc":"Information about the animal or person from which the data was measured.","datasets":[{"name":"age","dtype":"text","doc":"Age of subject. Can be supplied instead of 'date_of_birth'.","quantity":"?","attributes":[{"name":"reference","doc":"Age is with reference to this event. Can be 'birth' or 'gestational'. If reference is omitted, 'birth' is implied.","dtype":"text","required":false,"default_value":"birth"}]},{"name":"date_of_birth","dtype":"isodatetime","doc":"Date of birth of subject. Can be supplied instead of 'age'.","quantity":"?"},{"name":"description","dtype":"text","doc":"Description of subject and where subject came from (e.g., breeder, if animal).","quantity":"?"},{"name":"genotype","dtype":"text","doc":"Genetic strain. If absent, assume Wild Type (WT).","quantity":"?"},{"name":"sex","dtype":"text","doc":"Gender of subject.","quantity":"?"},{"name":"species","dtype":"text","doc":"Species of subject.","quantity":"?"},{"name":"strain","dtype":"text","doc":"Strain of subject.","quantity":"?"},{"name":"subject_id","dtype":"text","doc":"ID of animal/person used/participating in experiment (lab convention).","quantity":"?"},{"name":"weight","dtype":"text","doc":"Weight at time of experiment, at time of surgery and at other important times.","quantity":"?"}]}],"datasets":[{"neurodata_type_def":"ScratchData","neurodata_type_inc":"NWBData","doc":"Any one-off datasets","attributes":[{"name":"notes","doc":"Any notes the user has about the dataset being stored","dtype":"text"}]}]})delimiter";
26
27constexpr std::string_view nwb_misc = R"delimiter(
28{"groups":[{"neurodata_type_def":"AbstractFeatureSeries","neurodata_type_inc":"TimeSeries","doc":"Abstract features, such as quantitative descriptions of sensory stimuli. The TimeSeries::data field is a 2D array, storing those features (e.g., for visual grating stimulus this might be orientation, spatial frequency and contrast). Null stimuli (eg, uniform gray) can be marked as being an independent feature (eg, 1.0 for gray, 0.0 for actual stimulus) or by storing NaNs for feature values, or through use of the TimeSeries::control fields. A set of features is considered to persist until the next set of features is defined. The final set of features stored should be the null set. This is useful when storing the raw stimulus is impractical.","datasets":[{"name":"data","dtype":"numeric","dims":[["num_times"],["num_times","num_features"]],"shape":[[null],[null,null]],"doc":"Values of each feature at each time.","attributes":[{"name":"unit","dtype":"text","default_value":"see 'feature_units'","doc":"Since there can be different units for different features, store the units in 'feature_units'. The default value for this attribute is \"see 'feature_units'\".","required":false}]},{"name":"feature_units","dtype":"text","dims":["num_features"],"shape":[null],"doc":"Units of each feature.","quantity":"?"},{"name":"features","dtype":"text","dims":["num_features"],"shape":[null],"doc":"Description of the features represented in TimeSeries::data."}]},{"neurodata_type_def":"AnnotationSeries","neurodata_type_inc":"TimeSeries","doc":"Stores user annotations made during an experiment. The data[] field stores a text array, and timestamps are stored for each annotation (ie, interval=1). This is largely an alias to a standard TimeSeries storing a text array but that is identifiable as storing annotations in a machine-readable way.","datasets":[{"name":"data","dtype":"text","dims":["num_times"],"shape":[null],"doc":"Annotations made during an experiment.","attributes":[{"name":"resolution","dtype":"float32","value":-1.0,"doc":"Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0."},{"name":"unit","dtype":"text","value":"n/a","doc":"Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'."}]}]},{"neurodata_type_def":"IntervalSeries","neurodata_type_inc":"TimeSeries","doc":"Stores intervals of data. The timestamps field stores the beginning and end of intervals. The data field stores whether the interval just started (>0 value) or ended (<0 value). Different interval types can be represented in the same series by using multiple key values (eg, 1 for feature A, 2 for feature B, 3 for feature C, etc). The field data stores an 8-bit integer. This is largely an alias of a standard TimeSeries but that is identifiable as representing time intervals in a machine-readable way.","datasets":[{"name":"data","dtype":"int8","dims":["num_times"],"shape":[null],"doc":"Use values >0 if interval started, <0 if interval ended.","attributes":[{"name":"resolution","dtype":"float32","value":-1.0,"doc":"Smallest meaningful difference between values in data. Annotations have no units, so the value is fixed to -1.0."},{"name":"unit","dtype":"text","value":"n/a","doc":"Base unit of measurement for working with the data. Annotations have no units, so the value is fixed to 'n/a'."}]}]},{"neurodata_type_def":"DecompositionSeries","neurodata_type_inc":"TimeSeries","doc":"Spectral analysis of a time series, e.g. of an LFP or a speech signal.","datasets":[{"name":"data","dtype":"numeric","dims":["num_times","num_channels","num_bands"],"shape":[null,null,null],"doc":"Data decomposed into frequency bands.","attributes":[{"name":"unit","dtype":"text","default_value":"no unit","doc":"Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion'."}]},{"name":"metric","dtype":"text","doc":"The metric used, e.g. phase, amplitude, power."},{"name":"source_channels","neurodata_type_inc":"DynamicTableRegion","doc":"DynamicTableRegion pointer to the channels that this decomposition series was generated from.","quantity":"?"}],"groups":[{"name":"bands","neurodata_type_inc":"DynamicTable","doc":"Table for describing the bands that this series was generated from. There should be one row in this table for each band.","datasets":[{"name":"band_name","neurodata_type_inc":"VectorData","dtype":"text","doc":"Name of the band, e.g. theta."},{"name":"band_limits","neurodata_type_inc":"VectorData","dtype":"float32","dims":["num_bands","low, high"],"shape":[null,2],"doc":"Low and high limit of each band in Hz. If it is a Gaussian filter, use 2 SD on either side of the center."},{"name":"band_mean","neurodata_type_inc":"VectorData","dtype":"float32","dims":["num_bands"],"shape":[null],"doc":"The mean Gaussian filters, in Hz."},{"name":"band_stdev","neurodata_type_inc":"VectorData","dtype":"float32","dims":["num_bands"],"shape":[null],"doc":"The standard deviation of Gaussian filters, in Hz."}]}],"links":[{"name":"source_timeseries","target_type":"TimeSeries","doc":"Link to TimeSeries object that this data was calculated from. Metadata about electrodes and their position can be read from that ElectricalSeries so it is not necessary to store that information here.","quantity":"?"}]},{"neurodata_type_def":"Units","neurodata_type_inc":"DynamicTable","default_name":"Units","doc":"Data about spiking units. Event times of observed units (e.g. cell, synapse, etc.) should be concatenated and stored in spike_times.","datasets":[{"name":"spike_times_index","neurodata_type_inc":"VectorIndex","doc":"Index into the spike_times dataset.","quantity":"?"},{"name":"spike_times","neurodata_type_inc":"VectorData","dtype":"float64","doc":"Spike times for each unit in seconds.","quantity":"?","attributes":[{"name":"resolution","dtype":"float64","doc":"The smallest possible difference between two spike times. Usually 1 divided by the acquisition sampling rate from which spike times were extracted, but could be larger if the acquisition time series was downsampled or smaller if the acquisition time series was smoothed/interpolated and it is possible for the spike time to be between samples.","required":false}]},{"name":"obs_intervals_index","neurodata_type_inc":"VectorIndex","doc":"Index into the obs_intervals dataset.","quantity":"?"},{"name":"obs_intervals","neurodata_type_inc":"VectorData","dtype":"float64","dims":["num_intervals","start|end"],"shape":[null,2],"doc":"Observation intervals for each unit.","quantity":"?"},{"name":"electrodes_index","neurodata_type_inc":"VectorIndex","doc":"Index into electrodes.","quantity":"?"},{"name":"electrodes","neurodata_type_inc":"DynamicTableRegion","doc":"Electrode that each spike unit came from, specified using a DynamicTableRegion.","quantity":"?"},{"name":"electrode_group","neurodata_type_inc":"VectorData","dtype":{"target_type":"ElectrodeGroup","reftype":"object"},"doc":"Electrode group that each spike unit came from.","quantity":"?"},{"name":"waveform_mean","neurodata_type_inc":"VectorData","dtype":"float32","dims":[["num_units","num_samples"],["num_units","num_samples","num_electrodes"]],"shape":[[null,null],[null,null,null]],"doc":"Spike waveform mean for each spike unit.","quantity":"?","attributes":[{"name":"sampling_rate","dtype":"float32","doc":"Sampling rate, in hertz.","required":false},{"name":"unit","dtype":"text","value":"volts","doc":"Unit of measurement. This value is fixed to 'volts'.","required":false}]},{"name":"waveform_sd","neurodata_type_inc":"VectorData","dtype":"float32","dims":[["num_units","num_samples"],["num_units","num_samples","num_electrodes"]],"shape":[[null,null],[null,null,null]],"doc":"Spike waveform standard deviation for each spike unit.","quantity":"?","attributes":[{"name":"sampling_rate","dtype":"float32","doc":"Sampling rate, in hertz.","required":false},{"name":"unit","dtype":"text","value":"volts","doc":"Unit of measurement. This value is fixed to 'volts'.","required":false}]},{"name":"waveforms","neurodata_type_inc":"VectorData","dtype":"numeric","dims":["num_waveforms","num_samples"],"shape":[null,null],"doc":"Individual waveforms for each spike on each electrode. This is a doubly indexed column. The 'waveforms_index' column indexes which waveforms in this column belong to the same spike event for a given unit, where each waveform was recorded from a different electrode. The 'waveforms_index_index' column indexes the 'waveforms_index' column to indicate which spike events belong to a given unit. For example, if the 'waveforms_index_index' column has values [2, 5, 6], then the first 2 elements of the 'waveforms_index' column correspond to the 2 spike events of the first unit, the next 3 elements of the 'waveforms_index' column correspond to the 3 spike events of the second unit, and the next 1 element of the 'waveforms_index' column corresponds to the 1 spike event of the third unit. If the 'waveforms_index' column has values [3, 6, 8, 10, 12, 13], then the first 3 elements of the 'waveforms' column contain the 3 spike waveforms that were recorded from 3 different electrodes for the first spike time of the first unit. See https://nwb-schema.readthedocs.io/en/stable/format_description.html#doubly-ragged-arrays for a graphical representation of this example. When there is only one electrode for each unit (i.e., each spike time is associated with a single waveform), then the 'waveforms_index' column will have values 1, 2, ..., N, where N is the number of spike events. The number of electrodes for each spike event should be the same within a given unit. The 'electrodes' column should be used to indicate which electrodes are associated with each unit, and the order of the waveforms within a given unit x spike event should be in the same order as the electrodes referenced in the 'electrodes' column of this table. The number of samples for each waveform must be the same.","quantity":"?","attributes":[{"name":"sampling_rate","dtype":"float32","doc":"Sampling rate, in hertz.","required":false},{"name":"unit","dtype":"text","value":"volts","doc":"Unit of measurement. This value is fixed to 'volts'.","required":false}]},{"name":"waveforms_index","neurodata_type_inc":"VectorIndex","doc":"Index into the waveforms dataset. One value for every spike event. See 'waveforms' for more detail.","quantity":"?"},{"name":"waveforms_index_index","neurodata_type_inc":"VectorIndex","doc":"Index into the waveforms_index dataset. One value for every unit (row in the table). See 'waveforms' for more detail.","quantity":"?"}]}]})delimiter";
29
30constexpr std::string_view nwb_behavior = R"delimiter(
31{"groups":[{"neurodata_type_def":"SpatialSeries","neurodata_type_inc":"TimeSeries","doc":"Direction, e.g., of gaze or travel, or position. The TimeSeries::data field is a 2D array storing position or direction relative to some reference frame. Array structure: [num measurements] [num dimensions]. Each SpatialSeries has a text dataset reference_frame that indicates the zero-position, or the zero-axes for direction. For example, if representing gaze direction, 'straight-ahead' might be a specific pixel on the monitor, or some other point in space. For position data, the 0,0 point might be the top-left corner of an enclosure, as viewed from the tracking camera. The unit of data will indicate how to interpret SpatialSeries values.","datasets":[{"name":"data","dtype":"numeric","dims":[["num_times"],["num_times","x"],["num_times","x,y"],["num_times","x,y,z"]],"shape":[[null],[null,1],[null,2],[null,3]],"doc":"1-D or 2-D array storing position or direction relative to some reference frame.","attributes":[{"name":"unit","dtype":"text","default_value":"meters","doc":"Base unit of measurement for working with the data. The default value is 'meters'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'.","required":false}]},{"name":"reference_frame","dtype":"text","doc":"Description defining what exactly 'straight-ahead' means.","quantity":"?"}]},{"neurodata_type_def":"BehavioralEpochs","neurodata_type_inc":"NWBDataInterface","default_name":"BehavioralEpochs","doc":"TimeSeries for storing behavioral epochs. The objective of this and the other two Behavioral interfaces (e.g. BehavioralEvents and BehavioralTimeSeries) is to provide generic hooks for software tools/scripts. This allows a tool/script to take the output one specific interface (e.g., UnitTimes) and plot that data relative to another data modality (e.g., behavioral events) without having to define all possible modalities in advance. Declaring one of these interfaces means that one or more TimeSeries of the specified type is published. These TimeSeries should reside in a group having the same name as the interface. For example, if a BehavioralTimeSeries interface is declared, the module will have one or more TimeSeries defined in the module sub-group 'BehavioralTimeSeries'. BehavioralEpochs should use IntervalSeries. BehavioralEvents is used for irregular events. BehavioralTimeSeries is for continuous data.","groups":[{"neurodata_type_inc":"IntervalSeries","doc":"IntervalSeries object containing start and stop times of epochs.","quantity":"*"}]},{"neurodata_type_def":"BehavioralEvents","neurodata_type_inc":"NWBDataInterface","default_name":"BehavioralEvents","doc":"TimeSeries for storing behavioral events. See description of <a href=\"#BehavioralEpochs\">BehavioralEpochs</a> for more details.","groups":[{"neurodata_type_inc":"TimeSeries","doc":"TimeSeries object containing behavioral events.","quantity":"*"}]},{"neurodata_type_def":"BehavioralTimeSeries","neurodata_type_inc":"NWBDataInterface","default_name":"BehavioralTimeSeries","doc":"TimeSeries for storing Behavoioral time series data. See description of <a href=\"#BehavioralEpochs\">BehavioralEpochs</a> for more details.","groups":[{"neurodata_type_inc":"TimeSeries","doc":"TimeSeries object containing continuous behavioral data.","quantity":"*"}]},{"neurodata_type_def":"PupilTracking","neurodata_type_inc":"NWBDataInterface","default_name":"PupilTracking","doc":"Eye-tracking data, representing pupil size.","groups":[{"neurodata_type_inc":"TimeSeries","doc":"TimeSeries object containing time series data on pupil size.","quantity":"+"}]},{"neurodata_type_def":"EyeTracking","neurodata_type_inc":"NWBDataInterface","default_name":"EyeTracking","doc":"Eye-tracking data, representing direction of gaze.","groups":[{"neurodata_type_inc":"SpatialSeries","doc":"SpatialSeries object containing data measuring direction of gaze.","quantity":"*"}]},{"neurodata_type_def":"CompassDirection","neurodata_type_inc":"NWBDataInterface","default_name":"CompassDirection","doc":"With a CompassDirection interface, a module publishes a SpatialSeries object representing a floating point value for theta. The SpatialSeries::reference_frame field should indicate what direction corresponds to 0 and which is the direction of rotation (this should be clockwise). The si_unit for the SpatialSeries should be radians or degrees.","groups":[{"neurodata_type_inc":"SpatialSeries","doc":"SpatialSeries object containing direction of gaze travel.","quantity":"*"}]},{"neurodata_type_def":"Position","neurodata_type_inc":"NWBDataInterface","default_name":"Position","doc":"Position data, whether along the x, x/y or x/y/z axis.","groups":[{"neurodata_type_inc":"SpatialSeries","doc":"SpatialSeries object containing position data.","quantity":"+"}]}]})delimiter";
32
33constexpr std::string_view nwb_ecephys = R"delimiter(
34{"groups":[{"neurodata_type_def":"ElectricalSeries","neurodata_type_inc":"TimeSeries","doc":"A time series of acquired voltage data from extracellular recordings. The data field is an int or float array storing data in volts. The first dimension should always represent time. The second dimension, if present, should represent channels.","attributes":[{"name":"filtering","dtype":"text","doc":"Filtering applied to all channels of the data. For example, if this ElectricalSeries represents high-pass-filtered data (also known as AP Band), then this value could be \"High-pass 4-pole Bessel filter at 500 Hz\". If this ElectricalSeries represents low-pass-filtered LFP data and the type of filter is unknown, then this value could be \"Low-pass filter at 300 Hz\". If a non-standard filter type is used, provide as much detail about the filter properties as possible.","required":false}],"datasets":[{"name":"data","dtype":"numeric","dims":[["num_times"],["num_times","num_channels"],["num_times","num_channels","num_samples"]],"shape":[[null],[null,null],[null,null,null]],"doc":"Recorded voltage data.","attributes":[{"name":"unit","dtype":"text","value":"volts","doc":"Base unit of measurement for working with the data. This value is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion', followed by 'channel_conversion' (if present), and then add 'offset'."}]},{"name":"electrodes","neurodata_type_inc":"DynamicTableRegion","doc":"DynamicTableRegion pointer to the electrodes that this time series was generated from."},{"name":"channel_conversion","dtype":"float32","dims":["num_channels"],"shape":[null],"doc":"Channel-specific conversion factor. Multiply the data in the 'data' dataset by these values along the channel axis (as indicated by axis attribute) AND by the global conversion factor in the 'conversion' attribute of 'data' to get the data values in Volts, i.e, data in Volts = data * data.conversion * channel_conversion. This approach allows for both global and per-channel data conversion factors needed to support the storage of electrical recordings as native values generated by data acquisition systems. If this dataset is not present, then there is no channel-specific conversion factor, i.e. it is 1 for all channels.","quantity":"?","attributes":[{"name":"axis","dtype":"int32","value":1,"doc":"The zero-indexed axis of the 'data' dataset that the channel-specific conversion factor corresponds to. This value is fixed to 1."}]}]},{"neurodata_type_def":"SpikeEventSeries","neurodata_type_inc":"ElectricalSeries","doc":"Stores snapshots/snippets of recorded spike events (i.e., threshold crossings). This may also be raw data, as reported by ephys hardware. If so, the TimeSeries::description field should describe how events were detected. All SpikeEventSeries should reside in a module (under EventWaveform interface) even if the spikes were reported and stored by hardware. All events span the same recording channels and store snapshots of equal duration. TimeSeries::data array structure: [num events] [num channels] [num samples] (or [num events] [num samples] for single electrode).","datasets":[{"name":"data","dtype":"numeric","dims":[["num_events","num_samples"],["num_events","num_channels","num_samples"]],"shape":[[null,null],[null,null,null]],"doc":"Spike waveforms.","attributes":[{"name":"unit","dtype":"text","value":"volts","doc":"Unit of measurement for waveforms, which is fixed to 'volts'."}]},{"name":"timestamps","dtype":"float64","dims":["num_times"],"shape":[null],"doc":"Timestamps for samples stored in data, in seconds, relative to the common experiment master-clock stored in NWBFile.timestamps_reference_time. Timestamps are required for the events. Unlike for TimeSeries, timestamps are required for SpikeEventSeries and are thus re-specified here.","attributes":[{"name":"interval","dtype":"int32","value":1,"doc":"Value is '1'"},{"name":"unit","dtype":"text","value":"seconds","doc":"Unit of measurement for timestamps, which is fixed to 'seconds'."}]}]},{"neurodata_type_def":"FeatureExtraction","neurodata_type_inc":"NWBDataInterface","default_name":"FeatureExtraction","doc":"Features, such as PC1 and PC2, that are extracted from signals stored in a SpikeEventSeries or other source.","datasets":[{"name":"description","dtype":"text","dims":["num_features"],"shape":[null],"doc":"Description of features (eg, ''PC1'') for each of the extracted features."},{"name":"features","dtype":"float32","dims":["num_events","num_channels","num_features"],"shape":[null,null,null],"doc":"Multi-dimensional array of features extracted from each event."},{"name":"times","dtype":"float64","dims":["num_events"],"shape":[null],"doc":"Times of events that features correspond to (can be a link)."},{"name":"electrodes","neurodata_type_inc":"DynamicTableRegion","doc":"DynamicTableRegion pointer to the electrodes that this time series was generated from."}]},{"neurodata_type_def":"EventDetection","neurodata_type_inc":"NWBDataInterface","default_name":"EventDetection","doc":"Detected spike events from voltage trace(s).","datasets":[{"name":"detection_method","dtype":"text","doc":"Description of how events were detected, such as voltage threshold, or dV/dT threshold, as well as relevant values."},{"name":"source_idx","dtype":"int32","dims":["num_events"],"shape":[null],"doc":"Indices (zero-based) into source ElectricalSeries::data array corresponding to time of event. ''description'' should define what is meant by time of event (e.g., .25 ms before action potential peak, zero-crossing time, etc). The index points to each event from the raw data."},{"name":"times","dtype":"float64","dims":["num_events"],"shape":[null],"doc":"Timestamps of events, in seconds.","attributes":[{"name":"unit","dtype":"text","value":"seconds","doc":"Unit of measurement for event times, which is fixed to 'seconds'."}]}],"links":[{"name":"source_electricalseries","target_type":"ElectricalSeries","doc":"Link to the ElectricalSeries that this data was calculated from. Metadata about electrodes and their position can be read from that ElectricalSeries so it's not necessary to include that information here."}]},{"neurodata_type_def":"EventWaveform","neurodata_type_inc":"NWBDataInterface","default_name":"EventWaveform","doc":"Represents either the waveforms of detected events, as extracted from a raw data trace in /acquisition, or the event waveforms that were stored during experiment acquisition.","groups":[{"neurodata_type_inc":"SpikeEventSeries","doc":"SpikeEventSeries object(s) containing detected spike event waveforms.","quantity":"*"}]},{"neurodata_type_def":"FilteredEphys","neurodata_type_inc":"NWBDataInterface","default_name":"FilteredEphys","doc":"Electrophysiology data from one or more channels that has been subjected to filtering. Examples of filtered data include Theta and Gamma (LFP has its own interface). FilteredEphys modules publish an ElectricalSeries for each filtered channel or set of channels. The name of each ElectricalSeries is arbitrary but should be informative. The source of the filtered data, whether this is from analysis of another time series or as acquired by hardware, should be noted in each's TimeSeries::description field. There is no assumed 1::1 correspondence between filtered ephys signals and electrodes, as a single signal can apply to many nearby electrodes, and one electrode may have different filtered (e.g., theta and/or gamma) signals represented. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.","groups":[{"neurodata_type_inc":"ElectricalSeries","doc":"ElectricalSeries object(s) containing filtered electrophysiology data.","quantity":"+"}]},{"neurodata_type_def":"LFP","neurodata_type_inc":"NWBDataInterface","default_name":"LFP","doc":"LFP data from one or more channels. The electrode map in each published ElectricalSeries will identify which channels are providing LFP data. Filter properties should be noted in the ElectricalSeries 'filtering' attribute.","groups":[{"neurodata_type_inc":"ElectricalSeries","doc":"ElectricalSeries object(s) containing LFP data for one or more channels.","quantity":"+"}]},{"neurodata_type_def":"ElectrodeGroup","neurodata_type_inc":"NWBContainer","doc":"A physical grouping of electrodes, e.g. a shank of an array.","attributes":[{"name":"description","dtype":"text","doc":"Description of this electrode group."},{"name":"location","dtype":"text","doc":"Location of electrode group. Specify the area, layer, comments on estimation of area/layer, etc. Use standard atlas names for anatomical regions when possible."}],"datasets":[{"name":"position","dtype":[{"name":"x","dtype":"float32","doc":"x coordinate"},{"name":"y","dtype":"float32","doc":"y coordinate"},{"name":"z","dtype":"float32","doc":"z coordinate"}],"doc":"stereotaxic or common framework coordinates","quantity":"?"}],"links":[{"name":"device","target_type":"Device","doc":"Link to the device that was used to record from this electrode group."}]},{"neurodata_type_def":"ClusterWaveforms","neurodata_type_inc":"NWBDataInterface","default_name":"ClusterWaveforms","doc":"DEPRECATED The mean waveform shape, including standard deviation, of the different clusters. Ideally, the waveform analysis should be performed on data that is only high-pass filtered. This is a separate module because it is expected to require updating. For example, IMEC probes may require different storage requirements to store/display mean waveforms, requiring a new interface or an extension of this one.","datasets":[{"name":"waveform_filtering","dtype":"text","doc":"Filtering applied to data before generating mean/sd"},{"name":"waveform_mean","dtype":"float32","dims":["num_clusters","num_samples"],"shape":[null,null],"doc":"The mean waveform for each cluster, using the same indices for each wave as cluster numbers in the associated Clustering module (i.e, cluster 3 is in array slot [3]). Waveforms corresponding to gaps in cluster sequence should be empty (e.g., zero- filled)"},{"name":"waveform_sd","dtype":"float32","dims":["num_clusters","num_samples"],"shape":[null,null],"doc":"Stdev of waveforms for each cluster, using the same indices as in mean"}],"links":[{"name":"clustering_interface","target_type":"Clustering","doc":"Link to Clustering interface that was the source of the clustered data"}]},{"neurodata_type_def":"Clustering","neurodata_type_inc":"NWBDataInterface","default_name":"Clustering","doc":"DEPRECATED Clustered spike data, whether from automatic clustering tools (e.g., klustakwik) or as a result of manual sorting.","datasets":[{"name":"description","dtype":"text","doc":"Description of clusters or clustering, (e.g. cluster 0 is noise, clusters curated using Klusters, etc)"},{"name":"num","dtype":"int32","dims":["num_events"],"shape":[null],"doc":"Cluster number of each event"},{"name":"peak_over_rms","dtype":"float32","dims":["num_clusters"],"shape":[null],"doc":"Maximum ratio of waveform peak to RMS on any channel in the cluster (provides a basic clustering metric)."},{"name":"times","dtype":"float64","dims":["num_events"],"shape":[null],"doc":"Times of clustered events, in seconds. This may be a link to times field in associated FeatureExtraction module."}]}]})delimiter";
35
36constexpr std::string_view nwb_icephys = R"delimiter(
37{"groups":[{"neurodata_type_def":"PatchClampSeries","neurodata_type_inc":"TimeSeries","doc":"An abstract base class for patch-clamp data - stimulus or response, current or voltage.","attributes":[{"name":"stimulus_description","dtype":"text","doc":"Protocol/stimulus name for this patch-clamp dataset."},{"name":"sweep_number","dtype":"uint32","doc":"Sweep number, allows to group different PatchClampSeries together.","required":false}],"datasets":[{"name":"data","dtype":"numeric","dims":["num_times"],"shape":[null],"doc":"Recorded voltage or current.","attributes":[{"name":"unit","dtype":"text","doc":"Base unit of measurement for working with the data. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'."}]},{"name":"gain","dtype":"float32","doc":"Gain of the recording, in units Volt/Amp (v-clamp) or Volt/Volt (c-clamp).","quantity":"?"}],"links":[{"name":"electrode","target_type":"IntracellularElectrode","doc":"Link to IntracellularElectrode object that describes the electrode that was used to apply or record this data."}]},{"neurodata_type_def":"CurrentClampSeries","neurodata_type_inc":"PatchClampSeries","doc":"Voltage data from an intracellular current-clamp recording. A corresponding CurrentClampStimulusSeries (stored separately as a stimulus) is used to store the current injected.","datasets":[{"name":"data","doc":"Recorded voltage.","attributes":[{"name":"unit","dtype":"text","value":"volts","doc":"Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'."}]},{"name":"bias_current","dtype":"float32","doc":"Bias current, in amps.","quantity":"?"},{"name":"bridge_balance","dtype":"float32","doc":"Bridge balance, in ohms.","quantity":"?"},{"name":"capacitance_compensation","dtype":"float32","doc":"Capacitance compensation, in farads.","quantity":"?"}]},{"neurodata_type_def":"IZeroClampSeries","neurodata_type_inc":"CurrentClampSeries","doc":"Voltage data from an intracellular recording when all current and amplifier settings are off (i.e., CurrentClampSeries fields will be zero). There is no CurrentClampStimulusSeries associated with an IZero series because the amplifier is disconnected and no stimulus can reach the cell.","attributes":[{"name":"stimulus_description","dtype":"text","doc":"An IZeroClampSeries has no stimulus, so this attribute is automatically set to \"N/A\"","value":"N/A"}],"datasets":[{"name":"bias_current","dtype":"float32","value":0.0,"doc":"Bias current, in amps, fixed to 0.0."},{"name":"bridge_balance","dtype":"float32","value":0.0,"doc":"Bridge balance, in ohms, fixed to 0.0."},{"name":"capacitance_compensation","dtype":"float32","value":0.0,"doc":"Capacitance compensation, in farads, fixed to 0.0."}]},{"neurodata_type_def":"CurrentClampStimulusSeries","neurodata_type_inc":"PatchClampSeries","doc":"Stimulus current applied during current clamp recording.","datasets":[{"name":"data","doc":"Stimulus current applied.","attributes":[{"name":"unit","dtype":"text","value":"amperes","doc":"Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'."}]}]},{"neurodata_type_def":"VoltageClampSeries","neurodata_type_inc":"PatchClampSeries","doc":"Current data from an intracellular voltage-clamp recording. A corresponding VoltageClampStimulusSeries (stored separately as a stimulus) is used to store the voltage injected.","datasets":[{"name":"data","doc":"Recorded current.","attributes":[{"name":"unit","dtype":"text","value":"amperes","doc":"Base unit of measurement for working with the data. which is fixed to 'amperes'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'."}]},{"name":"capacitance_fast","dtype":"float32","doc":"Fast capacitance, in farads.","quantity":"?","attributes":[{"name":"unit","dtype":"text","value":"farads","doc":"Unit of measurement for capacitance_fast, which is fixed to 'farads'."}]},{"name":"capacitance_slow","dtype":"float32","doc":"Slow capacitance, in farads.","quantity":"?","attributes":[{"name":"unit","dtype":"text","value":"farads","doc":"Unit of measurement for capacitance_fast, which is fixed to 'farads'."}]},{"name":"resistance_comp_bandwidth","dtype":"float32","doc":"Resistance compensation bandwidth, in hertz.","quantity":"?","attributes":[{"name":"unit","dtype":"text","value":"hertz","doc":"Unit of measurement for resistance_comp_bandwidth, which is fixed to 'hertz'."}]},{"name":"resistance_comp_correction","dtype":"float32","doc":"Resistance compensation correction, in percent.","quantity":"?","attributes":[{"name":"unit","dtype":"text","value":"percent","doc":"Unit of measurement for resistance_comp_correction, which is fixed to 'percent'."}]},{"name":"resistance_comp_prediction","dtype":"float32","doc":"Resistance compensation prediction, in percent.","quantity":"?","attributes":[{"name":"unit","dtype":"text","value":"percent","doc":"Unit of measurement for resistance_comp_prediction, which is fixed to 'percent'."}]},{"name":"whole_cell_capacitance_comp","dtype":"float32","doc":"Whole cell capacitance compensation, in farads.","quantity":"?","attributes":[{"name":"unit","dtype":"text","value":"farads","doc":"Unit of measurement for whole_cell_capacitance_comp, which is fixed to 'farads'."}]},{"name":"whole_cell_series_resistance_comp","dtype":"float32","doc":"Whole cell series resistance compensation, in ohms.","quantity":"?","attributes":[{"name":"unit","dtype":"text","value":"ohms","doc":"Unit of measurement for whole_cell_series_resistance_comp, which is fixed to 'ohms'."}]}]},{"neurodata_type_def":"VoltageClampStimulusSeries","neurodata_type_inc":"PatchClampSeries","doc":"Stimulus voltage applied during a voltage clamp recording.","datasets":[{"name":"data","doc":"Stimulus voltage applied.","attributes":[{"name":"unit","dtype":"text","value":"volts","doc":"Base unit of measurement for working with the data. which is fixed to 'volts'. Actual stored values are not necessarily stored in these units. To access the data in these units, multiply 'data' by 'conversion' and add 'offset'."}]}]},{"neurodata_type_def":"IntracellularElectrode","neurodata_type_inc":"NWBContainer","doc":"An intracellular electrode and its metadata.","datasets":[{"name":"cell_id","dtype":"text","doc":"unique ID of the cell","quantity":"?"},{"name":"description","dtype":"text","doc":"Description of electrode (e.g., whole-cell, sharp, etc.)."},{"name":"filtering","dtype":"text","doc":"Electrode specific filtering.","quantity":"?"},{"name":"initial_access_resistance","dtype":"text","doc":"Initial access resistance.","quantity":"?"},{"name":"location","dtype":"text","doc":"Location of the electrode. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible.","quantity":"?"},{"name":"resistance","dtype":"text","doc":"Electrode resistance, in ohms.","quantity":"?"},{"name":"seal","dtype":"text","doc":"Information about seal used for recording.","quantity":"?"},{"name":"slice","dtype":"text","doc":"Information about slice used for recording.","quantity":"?"}],"links":[{"name":"device","target_type":"Device","doc":"Device that was used to record from this electrode."}]},{"neurodata_type_def":"SweepTable","neurodata_type_inc":"DynamicTable","doc":"[DEPRECATED] Table used to group different PatchClampSeries. SweepTable is being replaced by IntracellularRecordingsTable and SimultaneousRecordingsTable tables. Additional SequentialRecordingsTable, RepetitionsTable, and ExperimentalConditions tables provide enhanced support for experiment metadata.","datasets":[{"name":"sweep_number","neurodata_type_inc":"VectorData","dtype":"uint32","doc":"Sweep number of the PatchClampSeries in that row."},{"name":"series","neurodata_type_inc":"VectorData","dtype":{"target_type":"PatchClampSeries","reftype":"object"},"doc":"The PatchClampSeries with the sweep number in that row."},{"name":"series_index","neurodata_type_inc":"VectorIndex","doc":"Index for series."}]},{"neurodata_type_def":"IntracellularElectrodesTable","neurodata_type_inc":"DynamicTable","doc":"Table for storing intracellular electrode related metadata.","attributes":[{"name":"description","dtype":"text","value":"Table for storing intracellular electrode related metadata.","doc":"Description of what is in this dynamic table."}],"datasets":[{"name":"electrode","neurodata_type_inc":"VectorData","dtype":{"target_type":"IntracellularElectrode","reftype":"object"},"doc":"Column for storing the reference to the intracellular electrode."}]},{"neurodata_type_def":"IntracellularStimuliTable","neurodata_type_inc":"DynamicTable","doc":"Table for storing intracellular stimulus related metadata.","attributes":[{"name":"description","dtype":"text","value":"Table for storing intracellular stimulus related metadata.","doc":"Description of what is in this dynamic table."}],"datasets":[{"name":"stimulus","neurodata_type_inc":"TimeSeriesReferenceVectorData","doc":"Column storing the reference to the recorded stimulus for the recording (rows)."},{"name":"stimulus_template","neurodata_type_inc":"TimeSeriesReferenceVectorData","doc":"Column storing the reference to the stimulus template for the recording (rows).","quantity":"?"}]},{"neurodata_type_def":"IntracellularResponsesTable","neurodata_type_inc":"DynamicTable","doc":"Table for storing intracellular response related metadata.","attributes":[{"name":"description","dtype":"text","value":"Table for storing intracellular response related metadata.","doc":"Description of what is in this dynamic table."}],"datasets":[{"name":"response","neurodata_type_inc":"TimeSeriesReferenceVectorData","doc":"Column storing the reference to the recorded response for the recording (rows)"}]},{"neurodata_type_def":"IntracellularRecordingsTable","neurodata_type_inc":"AlignedDynamicTable","name":"intracellular_recordings","doc":"A table to group together a stimulus and response from a single electrode and a single simultaneous recording. Each row in the table represents a single recording consisting typically of a stimulus and a corresponding response. In some cases, however, only a stimulus or a response is recorded as part of an experiment. In this case, both the stimulus and response will point to the same TimeSeries while the idx_start and count of the invalid column will be set to -1, thus, indicating that no values have been recorded for the stimulus or response, respectively. Note, a recording MUST contain at least a stimulus or a response. Typically the stimulus and response are PatchClampSeries. However, the use of AD/DA channels that are not associated to an electrode is also common in intracellular electrophysiology, in which case other TimeSeries may be used.","attributes":[{"name":"description","dtype":"text","value":"A table to group together a stimulus and response from a single electrode and a single simultaneous recording and for storing metadata about the intracellular recording.","doc":"Description of the contents of this table. Inherited from AlignedDynamicTable and overwritten here to fix the value of the attribute."}],"groups":[{"name":"electrodes","neurodata_type_inc":"IntracellularElectrodesTable","doc":"Table for storing intracellular electrode related metadata."},{"name":"stimuli","neurodata_type_inc":"IntracellularStimuliTable","doc":"Table for storing intracellular stimulus related metadata."},{"name":"responses","neurodata_type_inc":"IntracellularResponsesTable","doc":"Table for storing intracellular response related metadata."}]},{"neurodata_type_def":"SimultaneousRecordingsTable","neurodata_type_inc":"DynamicTable","name":"simultaneous_recordings","doc":"A table for grouping different intracellular recordings from the IntracellularRecordingsTable table together that were recorded simultaneously from different electrodes.","datasets":[{"name":"recordings","neurodata_type_inc":"DynamicTableRegion","doc":"A reference to one or more rows in the IntracellularRecordingsTable table.","attributes":[{"name":"table","dtype":{"target_type":"IntracellularRecordingsTable","reftype":"object"},"doc":"Reference to the IntracellularRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here."}]},{"name":"recordings_index","neurodata_type_inc":"VectorIndex","doc":"Index dataset for the recordings column."}]},{"neurodata_type_def":"SequentialRecordingsTable","neurodata_type_inc":"DynamicTable","name":"sequential_recordings","doc":"A table for grouping different sequential recordings from the SimultaneousRecordingsTable table together. This is typically used to group together sequential recordings where a sequence of stimuli of the same type with varying parameters have been presented in a sequence.","datasets":[{"name":"simultaneous_recordings","neurodata_type_inc":"DynamicTableRegion","doc":"A reference to one or more rows in the SimultaneousRecordingsTable table.","attributes":[{"name":"table","dtype":{"target_type":"SimultaneousRecordingsTable","reftype":"object"},"doc":"Reference to the SimultaneousRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here."}]},{"name":"simultaneous_recordings_index","neurodata_type_inc":"VectorIndex","doc":"Index dataset for the simultaneous_recordings column."},{"name":"stimulus_type","neurodata_type_inc":"VectorData","dtype":"text","doc":"The type of stimulus used for the sequential recording."}]},{"neurodata_type_def":"RepetitionsTable","neurodata_type_inc":"DynamicTable","name":"repetitions","doc":"A table for grouping different sequential intracellular recordings together. With each SequentialRecording typically representing a particular type of stimulus, the RepetitionsTable table is typically used to group sets of stimuli applied in sequence.","datasets":[{"name":"sequential_recordings","neurodata_type_inc":"DynamicTableRegion","doc":"A reference to one or more rows in the SequentialRecordingsTable table.","attributes":[{"name":"table","dtype":{"target_type":"SequentialRecordingsTable","reftype":"object"},"doc":"Reference to the SequentialRecordingsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here."}]},{"name":"sequential_recordings_index","neurodata_type_inc":"VectorIndex","doc":"Index dataset for the sequential_recordings column."}]},{"neurodata_type_def":"ExperimentalConditionsTable","neurodata_type_inc":"DynamicTable","name":"experimental_conditions","doc":"A table for grouping different intracellular recording repetitions together that belong to the same experimental condition.","datasets":[{"name":"repetitions","neurodata_type_inc":"DynamicTableRegion","doc":"A reference to one or more rows in the RepetitionsTable table.","attributes":[{"name":"table","dtype":{"target_type":"RepetitionsTable","reftype":"object"},"doc":"Reference to the RepetitionsTable table that this table region applies to. This specializes the attribute inherited from DynamicTableRegion to fix the type of table that can be referenced here."}]},{"name":"repetitions_index","neurodata_type_inc":"VectorIndex","doc":"Index dataset for the repetitions column."}]}]})delimiter";
38
39constexpr std::string_view nwb_ogen = R"delimiter(
40{"groups":[{"neurodata_type_def":"OptogeneticSeries","neurodata_type_inc":"TimeSeries","doc":"An optogenetic stimulus.","datasets":[{"name":"data","dtype":"numeric","dims":[["num_times"],["num_times","num_rois"]],"shape":[[null],[null,null]],"doc":"Applied power for optogenetic stimulus, in watts. Shape can be 1D or 2D. 2D data is meant to be used in an extension of OptogeneticSeries that defines what the second dimension represents.","attributes":[{"name":"unit","dtype":"text","value":"watts","doc":"Unit of measurement for data, which is fixed to 'watts'."}]}],"links":[{"name":"site","target_type":"OptogeneticStimulusSite","doc":"Link to OptogeneticStimulusSite object that describes the site to which this stimulus was applied."}]},{"neurodata_type_def":"OptogeneticStimulusSite","neurodata_type_inc":"NWBContainer","doc":"A site of optogenetic stimulation.","datasets":[{"name":"description","dtype":"text","doc":"Description of stimulation site."},{"name":"excitation_lambda","dtype":"float32","doc":"Excitation wavelength, in nm."},{"name":"location","dtype":"text","doc":"Location of the stimulation site. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible."}],"links":[{"name":"device","target_type":"Device","doc":"Device that generated the stimulus."}]}]})delimiter";
41
42constexpr std::string_view nwb_ophys = R"delimiter(
43{"groups":[{"neurodata_type_def":"OnePhotonSeries","neurodata_type_inc":"ImageSeries","doc":"Image stack recorded over time from 1-photon microscope.","attributes":[{"name":"pmt_gain","dtype":"float32","doc":"Photomultiplier gain.","required":false},{"name":"scan_line_rate","dtype":"float32","doc":"Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.","required":false},{"name":"exposure_time","dtype":"float32","doc":"Exposure time of the sample; often the inverse of the frequency.","required":false},{"name":"binning","dtype":"uint8","doc":"Amount of pixels combined into 'bins'; could be 1, 2, 4, 8, etc.","required":false},{"name":"power","dtype":"float32","doc":"Power of the excitation in mW, if known.","required":false},{"name":"intensity","dtype":"float32","doc":"Intensity of the excitation in mW/mm^2, if known.","required":false}],"links":[{"name":"imaging_plane","target_type":"ImagingPlane","doc":"Link to ImagingPlane object from which this TimeSeries data was generated."}]},{"neurodata_type_def":"TwoPhotonSeries","neurodata_type_inc":"ImageSeries","doc":"Image stack recorded over time from 2-photon microscope.","attributes":[{"name":"pmt_gain","dtype":"float32","doc":"Photomultiplier gain.","required":false},{"name":"scan_line_rate","dtype":"float32","doc":"Lines imaged per second. This is also stored in /general/optophysiology but is kept here as it is useful information for analysis, and so good to be stored w/ the actual data.","required":false}],"datasets":[{"name":"field_of_view","dtype":"float32","dims":[["width|height"],["width|height|depth"]],"shape":[[2],[3]],"doc":"Width, height and depth of image, or imaged area, in meters.","quantity":"?"}],"links":[{"name":"imaging_plane","target_type":"ImagingPlane","doc":"Link to ImagingPlane object from which this TimeSeries data was generated."}]},{"neurodata_type_def":"RoiResponseSeries","neurodata_type_inc":"TimeSeries","doc":"ROI responses over an imaging plane. The first dimension represents time. The second dimension, if present, represents ROIs.","datasets":[{"name":"data","dtype":"numeric","dims":[["num_times"],["num_times","num_ROIs"]],"shape":[[null],[null,null]],"doc":"Signals from ROIs."},{"name":"rois","neurodata_type_inc":"DynamicTableRegion","doc":"DynamicTableRegion referencing into an ROITable containing information on the ROIs stored in this timeseries."}]},{"neurodata_type_def":"DfOverF","neurodata_type_inc":"NWBDataInterface","default_name":"DfOverF","doc":"dF/F information about a region of interest (ROI). Storage hierarchy of dF/F should be the same as for segmentation (i.e., same names for ROIs and for image planes).","groups":[{"neurodata_type_inc":"RoiResponseSeries","doc":"RoiResponseSeries object(s) containing dF/F for a ROI.","quantity":"+"}]},{"neurodata_type_def":"Fluorescence","neurodata_type_inc":"NWBDataInterface","default_name":"Fluorescence","doc":"Fluorescence information about a region of interest (ROI). Storage hierarchy of fluorescence should be the same as for segmentation (ie, same names for ROIs and for image planes).","groups":[{"neurodata_type_inc":"RoiResponseSeries","doc":"RoiResponseSeries object(s) containing fluorescence data for a ROI.","quantity":"+"}]},{"neurodata_type_def":"ImageSegmentation","neurodata_type_inc":"NWBDataInterface","default_name":"ImageSegmentation","doc":"Stores pixels in an image that represent different regions of interest (ROIs) or masks. All segmentation for a given imaging plane is stored together, with storage for multiple imaging planes (masks) supported. Each ROI is stored in its own subgroup, with the ROI group containing both a 2D mask and a list of pixels that make up this mask. Segments can also be used for masking neuropil. If segmentation is allowed to change with time, a new imaging plane (or module) is required and ROI names should remain consistent between them.","groups":[{"neurodata_type_inc":"PlaneSegmentation","doc":"Results from image segmentation of a specific imaging plane.","quantity":"+"}]},{"neurodata_type_def":"PlaneSegmentation","neurodata_type_inc":"DynamicTable","doc":"Results from image segmentation of a specific imaging plane.","datasets":[{"name":"image_mask","neurodata_type_inc":"VectorData","dims":[["num_roi","num_x","num_y"],["num_roi","num_x","num_y","num_z"]],"shape":[[null,null,null],[null,null,null,null]],"doc":"ROI masks for each ROI. Each image mask is the size of the original imaging plane (or volume) and members of the ROI are finite non-zero.","quantity":"?"},{"name":"pixel_mask_index","neurodata_type_inc":"VectorIndex","doc":"Index into pixel_mask.","quantity":"?"},{"name":"pixel_mask","neurodata_type_inc":"VectorData","dtype":[{"name":"x","dtype":"uint32","doc":"Pixel x-coordinate."},{"name":"y","dtype":"uint32","doc":"Pixel y-coordinate."},{"name":"weight","dtype":"float32","doc":"Weight of the pixel."}],"doc":"Pixel masks for each ROI: a list of indices and weights for the ROI. Pixel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation","quantity":"?"},{"name":"voxel_mask_index","neurodata_type_inc":"VectorIndex","doc":"Index into voxel_mask.","quantity":"?"},{"name":"voxel_mask","neurodata_type_inc":"VectorData","dtype":[{"name":"x","dtype":"uint32","doc":"Voxel x-coordinate."},{"name":"y","dtype":"uint32","doc":"Voxel y-coordinate."},{"name":"z","dtype":"uint32","doc":"Voxel z-coordinate."},{"name":"weight","dtype":"float32","doc":"Weight of the voxel."}],"doc":"Voxel masks for each ROI: a list of indices and weights for the ROI. Voxel masks are concatenated and parsing of this dataset is maintained by the PlaneSegmentation","quantity":"?"}],"groups":[{"name":"reference_images","doc":"Image stacks that the segmentation masks apply to.","groups":[{"neurodata_type_inc":"ImageSeries","doc":"One or more image stacks that the masks apply to (can be one-element stack).","quantity":"*"}]}],"links":[{"name":"imaging_plane","target_type":"ImagingPlane","doc":"Link to ImagingPlane object from which this data was generated."}]},{"neurodata_type_def":"ImagingPlane","neurodata_type_inc":"NWBContainer","doc":"An imaging plane and its metadata.","datasets":[{"name":"description","dtype":"text","doc":"Description of the imaging plane.","quantity":"?"},{"name":"excitation_lambda","dtype":"float32","doc":"Excitation wavelength, in nm."},{"name":"imaging_rate","dtype":"float32","doc":"Rate that images are acquired, in Hz. If the corresponding TimeSeries is present, the rate should be stored there instead.","quantity":"?"},{"name":"indicator","dtype":"text","doc":"Calcium indicator."},{"name":"location","dtype":"text","doc":"Location of the imaging plane. Specify the area, layer, comments on estimation of area/layer, stereotaxic coordinates if in vivo, etc. Use standard atlas names for anatomical regions when possible."},{"name":"manifold","dtype":"float32","dims":[["height","width","x, y, z"],["height","width","depth","x, y, z"]],"shape":[[null,null,3],[null,null,null,3]],"doc":"DEPRECATED Physical position of each pixel. 'xyz' represents the position of the pixel relative to the defined coordinate space. Deprecated in favor of origin_coords and grid_spacing.","quantity":"?","attributes":[{"name":"conversion","dtype":"float32","default_value":1.0,"doc":"Scalar to multiply each element in data to convert it to the specified 'unit'. If the data are stored in acquisition system units or other units that require a conversion to be interpretable, multiply the data by 'conversion' to convert the data to the specified 'unit'. e.g. if the data acquisition system stores values in this object as pixels from x = -500 to 499, y = -500 to 499 that correspond to a 2 m x 2 m range, then the 'conversion' multiplier to get from raw data acquisition pixel units to meters is 2/1000.","required":false},{"name":"unit","dtype":"text","default_value":"meters","doc":"Base unit of measurement for working with the data. The default value is 'meters'.","required":false}]},{"name":"origin_coords","dtype":"float32","dims":[["x, y"],["x, y, z"]],"shape":[[2],[3]],"doc":"Physical location of the first element of the imaging plane (0, 0) for 2-D data or (0, 0, 0) for 3-D data. See also reference_frame for what the physical location is relative to (e.g., bregma).","quantity":"?","attributes":[{"name":"unit","dtype":"text","default_value":"meters","doc":"Measurement units for origin_coords. The default value is 'meters'."}]},{"name":"grid_spacing","dtype":"float32","dims":[["x, y"],["x, y, z"]],"shape":[[2],[3]],"doc":"Space between pixels in (x, y) or voxels in (x, y, z) directions, in the specified unit. Assumes imaging plane is a regular grid. See also reference_frame to interpret the grid.","quantity":"?","attributes":[{"name":"unit","dtype":"text","default_value":"meters","doc":"Measurement units for grid_spacing. The default value is 'meters'."}]},{"name":"reference_frame","dtype":"text","doc":"Describes reference frame of origin_coords and grid_spacing. For example, this can be a text description of the anatomical location and orientation of the grid defined by origin_coords and grid_spacing or the vectors needed to transform or rotate the grid to a common anatomical axis (e.g., AP/DV/ML). This field is necessary to interpret origin_coords and grid_spacing. If origin_coords and grid_spacing are not present, then this field is not required. For example, if the microscope takes 10 x 10 x 2 images, where the first value of the data matrix (index (0, 0, 0)) corresponds to (-1.2, -0.6, -2) mm relative to bregma, the spacing between pixels is 0.2 mm in x, 0.2 mm in y and 0.5 mm in z, and larger numbers in x means more anterior, larger numbers in y means more rightward, and larger numbers in z means more ventral, then enter the following -- origin_coords = (-1.2, -0.6, -2) grid_spacing = (0.2, 0.2, 0.5) reference_frame = \"Origin coordinates are relative to bregma. First dimension corresponds to anterior-posterior axis (larger index = more anterior). Second dimension corresponds to medial-lateral axis (larger index = more rightward). Third dimension corresponds to dorsal-ventral axis (larger index = more ventral).\"","quantity":"?"}],"groups":[{"neurodata_type_inc":"OpticalChannel","doc":"An optical channel used to record from an imaging plane.","quantity":"+"}],"links":[{"name":"device","target_type":"Device","doc":"Link to the Device object that was used to record from this electrode."}]},{"neurodata_type_def":"OpticalChannel","neurodata_type_inc":"NWBContainer","doc":"An optical channel used to record from an imaging plane.","datasets":[{"name":"description","dtype":"text","doc":"Description or other notes about the channel."},{"name":"emission_lambda","dtype":"float32","doc":"Emission wavelength for channel, in nm."}]},{"neurodata_type_def":"MotionCorrection","neurodata_type_inc":"NWBDataInterface","default_name":"MotionCorrection","doc":"An image stack where all frames are shifted (registered) to a common coordinate system, to account for movement and drift between frames. Note: each frame at each point in time is assumed to be 2-D (has only x & y dimensions).","groups":[{"neurodata_type_inc":"CorrectedImageStack","doc":"Results from motion correction of an image stack.","quantity":"+"}]},{"neurodata_type_def":"CorrectedImageStack","neurodata_type_inc":"NWBDataInterface","doc":"Results from motion correction of an image stack.","groups":[{"name":"corrected","neurodata_type_inc":"ImageSeries","doc":"Image stack with frames shifted to the common coordinates."},{"name":"xy_translation","neurodata_type_inc":"TimeSeries","doc":"Stores the x,y delta necessary to align each frame to the common coordinates, for example, to align each frame to a reference image."}],"links":[{"name":"original","target_type":"ImageSeries","doc":"Link to ImageSeries object that is being registered."}]}]})delimiter";
44
45constexpr std::string_view nwb_retinotopy = R"delimiter(
46{"groups":[{"neurodata_type_def":"ImagingRetinotopy","neurodata_type_inc":"NWBDataInterface","default_name":"ImagingRetinotopy","doc":"DEPRECATED. Intrinsic signal optical imaging or widefield imaging for measuring retinotopy. Stores orthogonal maps (e.g., altitude/azimuth; radius/theta) of responses to specific stimuli and a combined polarity map from which to identify visual areas. This group does not store the raw responses imaged during retinotopic mapping or the stimuli presented, but rather the resulting phase and power maps after applying a Fourier transform on the averaged responses. Note: for data consistency, all images and arrays are stored in the format [row][column] and [row, col], which equates to [y][x]. Field of view and dimension arrays may appear backward (i.e., y before x).","datasets":[{"name":"axis_1_phase_map","dtype":"float32","dims":["num_rows","num_cols"],"shape":[null,null],"doc":"Phase response to stimulus on the first measured axis.","attributes":[{"name":"dimension","dtype":"int32","dims":["num_rows, num_cols"],"shape":[2],"doc":"Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width."},{"name":"field_of_view","dtype":"float32","dims":["height, width"],"shape":[2],"doc":"Size of viewing area, in meters."},{"name":"unit","dtype":"text","doc":"Unit that axis data is stored in (e.g., degrees)."}]},{"name":"axis_1_power_map","dtype":"float32","dims":["num_rows","num_cols"],"shape":[null,null],"doc":"Power response on the first measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.","quantity":"?","attributes":[{"name":"dimension","dtype":"int32","dims":["num_rows, num_cols"],"shape":[2],"doc":"Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width."},{"name":"field_of_view","dtype":"float32","dims":["height, width"],"shape":[2],"doc":"Size of viewing area, in meters."},{"name":"unit","dtype":"text","doc":"Unit that axis data is stored in (e.g., degrees)."}]},{"name":"axis_2_phase_map","dtype":"float32","dims":["num_rows","num_cols"],"shape":[null,null],"doc":"Phase response to stimulus on the second measured axis.","attributes":[{"name":"dimension","dtype":"int32","dims":["num_rows, num_cols"],"shape":[2],"doc":"Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width."},{"name":"field_of_view","dtype":"float32","dims":["height, width"],"shape":[2],"doc":"Size of viewing area, in meters."},{"name":"unit","dtype":"text","doc":"Unit that axis data is stored in (e.g., degrees)."}]},{"name":"axis_2_power_map","dtype":"float32","dims":["num_rows","num_cols"],"shape":[null,null],"doc":"Power response on the second measured axis. Response is scaled so 0.0 is no power in the response and 1.0 is maximum relative power.","quantity":"?","attributes":[{"name":"dimension","dtype":"int32","dims":["num_rows, num_cols"],"shape":[2],"doc":"Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width."},{"name":"field_of_view","dtype":"float32","dims":["height, width"],"shape":[2],"doc":"Size of viewing area, in meters."},{"name":"unit","dtype":"text","doc":"Unit that axis data is stored in (e.g., degrees)."}]},{"name":"axis_descriptions","dtype":"text","dims":["axis_1, axis_2"],"shape":[2],"doc":"Two-element array describing the contents of the two response axis fields. Description should be something like ['altitude', 'azimuth'] or '['radius', 'theta']."},{"name":"focal_depth_image","dtype":"uint16","dims":["num_rows","num_cols"],"shape":[null,null],"doc":"Gray-scale image taken with same settings/parameters (e.g., focal depth, wavelength) as data collection. Array format: [rows][columns].","quantity":"?","attributes":[{"name":"bits_per_pixel","dtype":"int32","doc":"Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value."},{"name":"dimension","dtype":"int32","dims":["num_rows, num_cols"],"shape":[2],"doc":"Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width."},{"name":"field_of_view","dtype":"float32","dims":["height, width"],"shape":[2],"doc":"Size of viewing area, in meters."},{"name":"focal_depth","dtype":"float32","doc":"Focal depth offset, in meters."},{"name":"format","dtype":"text","doc":"Format of image. Right now only 'raw' is supported."}]},{"name":"sign_map","dtype":"float32","dims":["num_rows","num_cols"],"shape":[null,null],"doc":"Sine of the angle between the direction of the gradient in axis_1 and axis_2.","quantity":"?","attributes":[{"name":"dimension","dtype":"int32","dims":["num_rows, num_cols"],"shape":[2],"doc":"Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width."},{"name":"field_of_view","dtype":"float32","dims":["height, width"],"shape":[2],"doc":"Size of viewing area, in meters."}]},{"name":"vasculature_image","dtype":"uint16","dims":["num_rows","num_cols"],"shape":[null,null],"doc":"Gray-scale anatomical image of cortical surface. Array structure: [rows][columns]","attributes":[{"name":"bits_per_pixel","dtype":"int32","doc":"Number of bits used to represent each value. This is necessary to determine maximum (white) pixel value"},{"name":"dimension","dtype":"int32","dims":["num_rows, num_cols"],"shape":[2],"doc":"Number of rows and columns in the image. NOTE: row, column representation is equivalent to height, width."},{"name":"field_of_view","dtype":"float32","dims":["height, width"],"shape":[2],"doc":"Size of viewing area, in meters."},{"name":"format","dtype":"text","doc":"Format of image. Right now only 'raw' is supported."}]}]}]})delimiter";
47
48constexpr std::string_view namespaces = R"delimiter(
49{"namespaces":[{"name":"core","doc":"NWB namespace","author":["Andrew Tritt","Oliver Ruebel","Ryan Ly","Ben Dichter","Keith Godfrey","Jeff Teeters"],"contact":["ajtritt@lbl.gov","oruebel@lbl.gov","rly@lbl.gov","bdichter@lbl.gov","keithg@alleninstitute.org","jteeters@berkeley.edu"],"full_name":"NWB core","schema":[{"namespace":"hdmf-common"},{"source":"nwb.base"},{"source":"nwb.device"},{"source":"nwb.epoch"},{"source":"nwb.image"},{"source":"nwb.file"},{"source":"nwb.misc"},{"source":"nwb.behavior"},{"source":"nwb.ecephys"},{"source":"nwb.icephys"},{"source":"nwb.ogen"},{"source":"nwb.ophys"},{"source":"nwb.retinotopy"}],"version":"2.7.0"}]})delimiter";
50
51constexpr std::array<std::pair<std::string_view, std::string_view>, 13>
52 specVariables {{{"nwb.base", nwb_base},
53 {"nwb.device", nwb_device},
54 {"nwb.epoch", nwb_epoch},
55 {"nwb.image", nwb_image},
56 {"nwb.file", nwb_file},
57 {"nwb.misc", nwb_misc},
58 {"nwb.behavior", nwb_behavior},
59 {"nwb.ecephys", nwb_ecephys},
60 {"nwb.icephys", nwb_icephys},
61 {"nwb.ogen", nwb_ogen},
62 {"nwb.ophys", nwb_ophys},
63 {"nwb.retinotopy", nwb_retinotopy},
64 {"namespace", namespaces}}};
65} // namespace AQNWB::SPEC::CORE
Definition core.hpp:8
constexpr std::string_view nwb_ogen
Definition core.hpp:39
constexpr std::string_view namespaces
Definition core.hpp:48
constexpr std::string_view nwb_device
Definition core.hpp:15
constexpr std::string_view nwb_retinotopy
Definition core.hpp:45
constexpr std::string_view nwb_ophys
Definition core.hpp:42
constexpr std::string_view nwb_ecephys
Definition core.hpp:33
constexpr std::string_view nwb_icephys
Definition core.hpp:36
const std::string version
Definition core.hpp:10
constexpr std::string_view nwb_behavior
Definition core.hpp:30
constexpr std::string_view nwb_base
Definition core.hpp:12
constexpr std::string_view nwb_epoch
Definition core.hpp:18
constexpr std::string_view nwb_file
Definition core.hpp:24
constexpr std::array< std::pair< std::string_view, std::string_view >, 13 > specVariables
Definition core.hpp:52
constexpr std::string_view nwb_misc
Definition core.hpp:27
constexpr std::string_view nwb_image
Definition core.hpp:21