diff --git a/.github/workflows/test-install.yml b/.github/workflows/test-install.yml index 0e97f8a..0faddf9 100644 --- a/.github/workflows/test-install.yml +++ b/.github/workflows/test-install.yml @@ -29,4 +29,3 @@ jobs: run: pip install -e . - name: Test module load run: python -c "import hnasko_lab_to_nwb" - diff --git a/.gitignore b/.gitignore index 6198485..7347b6c 100644 --- a/.gitignore +++ b/.gitignore @@ -146,4 +146,4 @@ dmypy.json .DS_Store # NWB files -**.nwb \ No newline at end of file +**.nwb diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 0776b73..fa6228a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -15,11 +15,11 @@ repos: rev: v2.3.0 hooks: - id: codespell - additional_dependencies: + additional_dependencies: - tomli - repo: https://github.com/astral-sh/ruff-pre-commit rev: v0.6.9 hooks: - id: ruff - args: [ --fix ] \ No newline at end of file + args: [ --fix ] diff --git a/README.md b/README.md index 3841bd8..bc83eac 100644 --- a/README.md +++ b/README.md @@ -14,8 +14,8 @@ pip install hnasko-lab-to-nwb We recommend that you install the package inside a [virtual environment](https://docs.python.org/3/tutorial/venv. html). A simple way of doing this is to use a [conda environment](https://docs.conda. -io/projects/conda/en/latest/user-guide/concepts/environments.html) from the `conda` package manager ([installation -instructions](https://docs.conda.io/en/latest/miniconda.html)). Detailed instructions on how to use conda +io/projects/conda/en/latest/user-guide/concepts/environments.html) from the `conda` package manager ([installation +instructions](https://docs.conda.io/en/latest/miniconda.html)). Detailed instructions on how to use conda environments can be found in their [documentation](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html). ### Running a specific conversion @@ -30,10 +30,10 @@ python convert_session.py ``` ## Installation from GitHub -Another option is to install the package directly from GitHub. This option has the advantage that the source code -can be modified if you need to amend some of the code we originally provided to adapt to future experimental -differences. To install the conversion from GitHub you will need to use `git` ([installation instructions] (https://github.com/git-guides/install-git)). -We also recommend the installation of `conda` ([installation instructions](https://docs.conda.io/en/latest/miniconda.html)) as it contains all the required +Another option is to install the package directly from GitHub. This option has the advantage that the source code +can be modified if you need to amend some of the code we originally provided to adapt to future experimental +differences. To install the conversion from GitHub you will need to use `git` ([installation instructions] (https://github.com/git-guides/install-git)). +We also recommend the installation of `conda` ([installation instructions](https://docs.conda.io/en/latest/miniconda.html)) as it contains all the required machinery in a single and simple install. From a terminal (note that conda should install one in your system) you can do the following: @@ -47,7 +47,7 @@ conda activate hnasko-lab-to-nwb-env This creates a [conda environment](https://docs.conda.io/projects/conda/en/latest/user-guide/concepts/environments.html) which isolates the conversion code from your system libraries. We recommend that you run all your conversion related tasks and analysis from the created environment in order to minimize issues related to package dependencies. -Alternatively, if you want to avoid conda altogether (for example if you use another virtual environment tool) you +Alternatively, if you want to avoid conda altogether (for example if you use another virtual environment tool) you can install the repository with the following commands using only pip: ``` @@ -94,11 +94,11 @@ Each conversion is organized in a directory of its own in the `src` directory: └── __init__.py -For example, for the conversion `embargo_2025` you can find a directory located in `src/hnasko-lab-to-nwb/embargo_2025`. +For example, for the conversion `embargo_2025` you can find a directory located in `src/hnasko-lab-to-nwb/embargo_2025`. Inside each conversion directory you can find the following files: -* `convert_sesion.py`: this script defines the function to convert one full session of the conversion. +* `convert_sesion.py`: this script defines the function to convert one full session of the conversion. * `metadata.yml`: metadata in yaml format for this specific conversion. * `behaviorinterface.py`: the behavior interface. Usually ad-hoc for each conversion. * `nwbconverter.py`: the place where the `NWBConverter` class is defined. diff --git a/pyproject.toml b/pyproject.toml index 6e29e33..f6fa89d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,4 +83,3 @@ fixable = ["ALL"] [tool.ruff.lint.isort] relative-imports-order = "closest-to-furthest" known-first-party = ["neuroconv"] - diff --git a/src/hnasko_lab_to_nwb/embargo_2025/__init__.py b/src/hnasko_lab_to_nwb/embargo_2025/__init__.py index f2f6185..e69de29 100644 --- a/src/hnasko_lab_to_nwb/embargo_2025/__init__.py +++ b/src/hnasko_lab_to_nwb/embargo_2025/__init__.py @@ -1,2 +0,0 @@ -from .behaviorinterface import Embargo2025BehaviorInterface -from .nwbconverter import Embargo2025NWBConverter diff --git a/src/hnasko_lab_to_nwb/embargo_2025/behaviorinterface.py b/src/hnasko_lab_to_nwb/embargo_2025/behaviorinterface.py deleted file mode 100644 index e165d97..0000000 --- a/src/hnasko_lab_to_nwb/embargo_2025/behaviorinterface.py +++ /dev/null @@ -1,25 +0,0 @@ -"""Primary class for converting experiment-specific behavior.""" -from pynwb.file import NWBFile - -from neuroconv.basedatainterface import BaseDataInterface -from neuroconv.utils import DeepDict - -class Embargo2025BehaviorInterface(BaseDataInterface): - """Behavior interface for embargo_2025 conversion""" - - keywords = ["behavior"] - - def __init__(self): - # This should load the data lazily and prepare variables you need - pass - - def get_metadata(self) -> DeepDict: - # Automatically retrieve as much metadata as possible from the source files available - metadata = super().get_metadata() - - return metadata - - def add_to_nwbfile(self, nwbfile: NWBFile, metadata: dict): - # All the custom code to add the data the nwbfile - - raise NotImplementedError() diff --git a/src/hnasko_lab_to_nwb/embargo_2025/convert_all_sessions.py b/src/hnasko_lab_to_nwb/embargo_2025/convert_all_sessions.py index 084d52f..efb7936 100644 --- a/src/hnasko_lab_to_nwb/embargo_2025/convert_all_sessions.py +++ b/src/hnasko_lab_to_nwb/embargo_2025/convert_all_sessions.py @@ -1,9 +1,10 @@ """Primary script to run to convert all sessions in a dataset using session_to_nwb.""" -from pathlib import Path -from typing import Union +import traceback from concurrent.futures import ProcessPoolExecutor, as_completed +from pathlib import Path from pprint import pformat -import traceback +from typing import Union + from tqdm import tqdm from .embargo_2025_convert_session import session_to_nwb @@ -39,7 +40,7 @@ def dataset_to_nwb( for session_to_nwb_kwargs in session_to_nwb_kwargs_per_session: session_to_nwb_kwargs["output_dir_path"] = output_dir_path session_to_nwb_kwargs["verbose"] = verbose - exception_file_path = data_dir_path / f"ERROR_.txt" # Add error file path here + exception_file_path = data_dir_path / f"ERROR_.txt" # Add error file path here futures.append( executor.submit( safe_session_to_nwb, @@ -86,11 +87,11 @@ def get_session_to_nwb_kwargs_per_session( list[dict[str, Any]] A list of dictionaries containing the kwargs for session_to_nwb for each session. """ - ##### - # # Implement this function to return the kwargs for session_to_nwb for each session - # This can be a specific list with hard-coded sessions, a path expansion or any conversion specific logic that you might need - ##### - raise NotImplementedError + ##### + # # Implement this function to return the kwargs for session_to_nwb for each session + # This can be a specific list with hard-coded sessions, a path expansion or any conversion specific logic that you might need + ##### + raise NotImplementedError if __name__ == "__main__": diff --git a/src/hnasko_lab_to_nwb/embargo_2025/convert_session.py b/src/hnasko_lab_to_nwb/embargo_2025/convert_session.py index e71f851..dfa4136 100644 --- a/src/hnasko_lab_to_nwb/embargo_2025/convert_session.py +++ b/src/hnasko_lab_to_nwb/embargo_2025/convert_session.py @@ -1,70 +1,101 @@ """Primary script to run to convert an entire session for of data using the NWBConverter.""" + from pathlib import Path from typing import Union -import datetime -from zoneinfo import ZoneInfo - -from neuroconv.utils import load_dict_from_file, dict_deep_update -from .embargo_2025 import Embargo2025NWBConverter +from nwbconverter import Embargo2025NWBConverter +from neuroconv.utils import dict_deep_update, load_dict_from_file -def session_to_nwb(data_dir_path: Union[str, Path], output_dir_path: Union[str, Path], stub_test: bool = False): - data_dir_path = Path(data_dir_path) +def session_to_nwb( + output_dir_path: Union[str, Path], + subject_id: str, + session_id: str, + tdt_folder_path: Union[str, Path], + protocol_type: str, + stub_test: bool = False, +): output_dir_path = Path(output_dir_path) if stub_test: output_dir_path = output_dir_path / "nwb_stub" output_dir_path.mkdir(parents=True, exist_ok=True) - session_id = "subject_identifier_usually" - nwbfile_path = output_dir_path / f"{session_id}.nwb" - + nwbfile_path = output_dir_path / f"sub-{subject_id}_ses-{session_id}.nwb" + + valid_protocols = {"Varying durations", "Varying frequencies", "Shocks"} + if protocol_type not in valid_protocols: + raise ValueError(f"Invalid protocol_type: {protocol_type}. Must be one of {valid_protocols}.") + if protocol_type == "Varying durations": + session_description = ( + "The subject is placed in a plastic tub and is recorded for 3.5 minutes. " + "The subject receives a 40 Hz stimulation at various durations (i.e. 250ms, 1s and 4s) " + "5 times for each duration) with an inter-stimulus interval (ISI) of 10s. " + ) + elif protocol_type == "Varying frequencies": + session_description = ( + "The subject is placed in a plastic tub and undergoes 3 recording sessions corresponding " + "to a fixed duration of stimulation (i.e., 250ms, 1s, and 4s). Each session lasted 8 minutes. " + "The subject receives optogenetic stimulation at varying frequencies " + "(5 Hz, 10 Hz , 20 Hz and 40 Hz) 5 times for each duration with an ISI of 10s. " + ) + elif protocol_type == "Shocks": + session_description = ( + "The subject is placed in a shock chamber and recorded for 6 minutes. " + "Uncued shocks (0.3 mA) at various durations (250ms, 1s and 4s, 5 times for each duration) " + "are delivered in a randomized order and ISI." + ) source_data = dict() conversion_options = dict() - # Add Recording - source_data.update(dict(Recording=dict())) - conversion_options.update(dict(Recording=dict(stub_test=stub_test))) - - # Add Sorting - source_data.update(dict(Sorting=dict())) - conversion_options.update(dict(Sorting=dict())) + # Add FiberPhotometry - # Add Behavior - source_data.update(dict(Behavior=dict())) - conversion_options.update(dict(Behavior=dict())) + source_data.update(dict(FiberPhotometry=dict(folder_path=tdt_folder_path))) + conversion_options.update(dict(FiberPhotometry=dict())) converter = Embargo2025NWBConverter(source_data=source_data) - # Add datetime to conversion - metadata = converter.get_metadata() - date = datetime.datetime(year=2020, month=1, day=1, tzinfo=ZoneInfo("US/Eastern")) - metadata["NWBFile"]["session_start_time"] = date - # Update default metadata with the editable in the corresponding yaml file - editable_metadata_path = Path(__file__).parent / "embargo_2025_metadata.yaml" + metadata = converter.get_metadata() + editable_metadata_path = Path(__file__).parent / "metadata.yaml" editable_metadata = load_dict_from_file(editable_metadata_path) metadata = dict_deep_update(metadata, editable_metadata) - metadata["Subject"]["subject_id"] = "a_subject_id" # Modify here or in the yaml file + metadata["Subject"]["subject_id"] = subject_id + metadata["NWBFile"]["session_id"] = session_id + metadata["NWBFile"]["session_description"] = session_description # Run conversion - converter.run_conversion(metadata=metadata, nwbfile_path=nwbfile_path, conversion_options=conversion_options) + converter.run_conversion( + metadata=metadata, nwbfile_path=nwbfile_path, conversion_options=conversion_options, overwrite=True + ) if __name__ == "__main__": # Parameters for conversion - data_dir_path = Path("/Directory/With/Raw/Formats/") - output_dir_path = Path("~/conversion_nwb/") - stub_test = False - -<<<<<<< HEAD:hnasko-lab-to-nwb/src/hnasko_lab_to_nwb/embargo_2025/convert_session.py - session_to_nwb(data_dir_path=data_dir_path, output_dir_path=output_dir_path, stub_test=stub_test) -======= - session_to_nwb(data_dir_path=data_dir_path, - output_dir_path=output_dir_path, - stub_test=stub_test, - ) ->>>>>>> main:hnasko-lab-to-nwb/src/hnasko_lab_to_nwb/embargo_2025/embargo_2025_convert_session.py + data_dir_path = Path("D:/Hnasko-CN-data-share/SN pan GABA recordings/PPN/") + output_dir_path = Path("D:/hnasko_lab_conversion_nwb") + from neuroconv.tools.path_expansion import LocalPathExpander + + data_dir_path = "D:/Hnasko-CN-data-share/SN pan GABA recordings/PPN/" + # Specify source data + source_data_spec = { + "FiberPhotometry": { + "base_directory": data_dir_path + "Fiber photometry_TDT", + "folder_path": "{protocol_type}/{subject_id}-{session_id}", + } + } + # Instantiate LocalPathExpander + path_expander = LocalPathExpander() + # Expand paths and extract metadata + metadata_list = path_expander.expand_paths(source_data_spec) + for metadata in metadata_list: + session_to_nwb( + output_dir_path=output_dir_path, + subject_id=metadata["metadata"]["Subject"]["subject_id"], + session_id=metadata["metadata"]["NWBFile"]["session_id"], + tdt_folder_path=metadata["source_data"]["FiberPhotometry"]["folder_path"], + protocol_type=metadata["metadata"]["extras"]["protocol_type"], + stub_test=True, + ) diff --git a/src/hnasko_lab_to_nwb/embargo_2025/metadata.yaml b/src/hnasko_lab_to_nwb/embargo_2025/metadata.yaml index 8056f3f..5795f0a 100644 --- a/src/hnasko_lab_to_nwb/embargo_2025/metadata.yaml +++ b/src/hnasko_lab_to_nwb/embargo_2025/metadata.yaml @@ -3,19 +3,124 @@ NWBFile: - Keyword1 - Keyword2 - Keyword3 - related_publications: - https://doi.org/### or link to APA or MLA citation of the publication - session_description: - A rich text description of the experiment. Can also just be the abstract of the publication. - institution: Institution where the lab is located + experiment_description: + Mice were freely moving on a plastic tub. + Simultaneous passive optogenetic stimulation and fiber photometry recordings were conducted during the first two days. + Excitatory inputs from either the STN or PPN to SN were stimulated and the activity of SN GABAergic neurons were recorded. + Then mice underwent uncued electrical shocks and were recorded using fiber photometry. + institution: UC San Diego School of Medicine lab: Hnasko experimenter: - Last, First Middle - Last, First Middle + surgery: Mice > 6 weeks old were anesthetized with isoflurane (4% for induction; 1-2% for maintenance). + For fiber photometry experiments, mice were unilaterally injected with 300nl of AAV1-fDIO-Ef1a-Gcamp6f + (4e12 vg/mL, Addgene 1283125) in SNr (AP -3.3, ML 1.3, DV -4.60) and 150nl or 200nl AAV5-DIO-Syn-ChrimsonR-Tdtomato + (4e12 vg/mL or 8.5e12 vg/mL, Addgene 62723) in either STN (AP -2.00, ML 1.6, DV -4.50) or PPN (AP -4.48, ML 1.1, DV -3.75). + A single photometry fiber (RWD, 400um, 0.39 NA) was implanted in SNr (AP -3.3, ML 1.4, DV -4.40), + which served for optogenetic stimulation and photometry recordings. + Mice were allowed to recover for 4 weeks before starting experiments. Subject: - species: Rattus norvegicus - description: A rich text description of the subject - age: TBD # in ISO 8601, such as "P1W2D" - sex: TBD # One of M, F, U, or O - date_of_birth: 2014-06-22 00:00:00-04:00 # Example - + species: Mus musculus + description: Male and female were bred at University of California San Diego (UCSD) and group-housed on a 12-hour light/dark cycle, + with food and water ad libitum. VGLUT2-IRES-CRE and VGAT-2A-FlpO-D knock-in mice were obtained from the Jackson Laboratory + Slc17a6tm2(cre)Lowl (RRID:IMSR_JAX:016963) Slc32a1tm1.1(flpo)Hze (RRID:IMSR_JAX:029591) and maintained backcrossed on to C57BL/6J. + VGlut2-Cre animals were crossed with VGAT-flp animals to generate dual transgenic animals that were VGlut2-Cre/VGAT-flp and used for experiments. + All experiments were performed on animals of at least 6 weeks of age and in accordance with protocols approved by UCSD Institutional Animal Care and Use Committee. + age: P10W # To ask + sex: U # To ask + strain: C57BL/6J + genotype: VGLUT2-IRES-CRE;VGAT-2A-FlpO-D # To check with lab point person + # date_of_birth: 2014-06-22 00:00:00-04:00 # Example +Ophys: + FiberPhotometry: + OpticalFibers: + - name: optical_fiber + description: Chronically implantable optic fiber (RWD) with 400 um core, 0.39 NA, was implanted in SNr (AP -3.3, ML 1.4, DV -4.40), which served for optogenetic stimulation and photometry recordings. + manufacturer: RWD + # model: unknown + numerical_aperture: 0.39 + core_diameter_in_um: 400.0 + ExcitationSources: + - name: excitation_source + description: The excitation wavelength for Gcamp6f indicator. + manufacturer: TDT LUX LED + # model: TBD + illumination_type: LED + excitation_wavelength_in_nm: 465.0 + - name: excitation_source_isosbestic + description: The excitation wavelength for Gcamp6 indicator. + manufacturer: TDT LUX LED + # model: TBD + illumination_type: LED + excitation_wavelength_in_nm: 405.0 + Photodetectors: + - name: photodetector + # description: TBD + manufacturer: TDT LUX Photosensors + # model: TBD + detector_type: photosensors + detected_wavelength_in_nm: 470.0 # TBD + # gain: # TBD + DichroicMirrors: + - name: dichroic_mirror + # description: TBD + manufacturer: Doric Lenses + # model: TBD + Indicators: + - name: Gcamp6f + description: "Mice were unilaterally injected with 300nl of AAV1-fDIO-Ef1a-Gcamp6f (4e12 vg/mL, Addgene 1283125) in SNr (AP -3.3, ML 1.3, DV -4.6)" + manufacturer: Addgene 1283125 + label: AAV1-fDIO-Ef1a-Gcamp6f + injection_location: SNr + injection_coordinates_in_mm: [-3.3, 1.3, -4.6] +# - name: Tdtomato +# description: "Mice were unilaterally injected with 150nl or 200nl AAV5-DIO-Syn-ChrimsonR-Tdtomato (4e12 vg/mL or 8.5e12 vg/mL, Addgene 62723) in the STN (AP -2.00, ML 1.6, DV -4.5)." +# manufacturer: Addgene 62723 +# label: AAV5-DIO-Syn-ChrimsonR-Tdtomato +# injection_location: STN +# injection_coordinates_in_mm: [-2.00, 1.6, -4.5] + - name: Tdtomato + description: "Mice were unilaterally injected with 150nl or 200nl AAV5-DIO-Syn-ChrimsonR-Tdtomato (4e12 vg/mL or 8.5e12 vg/mL, Addgene 62723) in the PPN (AP -4.48, ML 1.1, DV -3.75)." + manufacturer: Addgene 62723 + label: AAV5-DIO-Syn-ChrimsonR-Tdtomato + injection_location: PPN + injection_coordinates_in_mm: [-4.48, 1.1, -3.75] + FiberPhotometryTable: + name: fiber_photometry_table + description: The metadata of the fiber photometry setup. + The recordings were done using a fiber photometry rig with optical components from Tucker David Technologies (TDT) + and Doric lenses controlled by a real-time processor from TDT (RZ10x). + TDT software Synapse was used for data acquisition. + Gcamp6f was excited by amplitude modulated signals from two light-emitting diodes (465- and 405-nm isosbestic control, TDT). + Stimulation and shock timestamps were digitized in Synapse software by respectively AnyMaze and MedPC. + rows: + - name: 0 + location: SNr + coordinates: [-3.3, 1.4, -4.40] + indicator: Gcamp6f + optical_fiber: optical_fiber + excitation_source: excitation_source + photodetector: photodetector + dichroic_mirror: dichroic_mirror + - name: 1 + location: SNr + coordinates: [-3.3, 1.4, -4.40] + indicator: Gcamp6f + optical_fiber: optical_fiber + excitation_source: excitation_source_isosbestic + photodetector: photodetector + dichroic_mirror: dichroic_mirror + FiberPhotometryResponseSeries: + - name: calcium_signal + description: The raw fiber photometry signal from Tucker David Technologies (TDT) acquisition system. + stream_name: _465A + unit: a.u. + fiber_photometry_table_region: [0] + fiber_photometry_table_region_description: The region of the FiberPhotometryTable corresponding to the raw signal. + - name: isosbestic_signal + description: The raw fiber photometry signal from Tucker David Technologies (TDT) acquisition system. + stream_name: _405A + unit: a.u. + fiber_photometry_table_region: [1] + fiber_photometry_table_region_description: The region of the FiberPhotometryTable corresponding to the raw signal. diff --git a/src/hnasko_lab_to_nwb/embargo_2025/notes.md b/src/hnasko_lab_to_nwb/embargo_2025/notes.md index 5181ea1..2c5880e 100644 --- a/src/hnasko_lab_to_nwb/embargo_2025/notes.md +++ b/src/hnasko_lab_to_nwb/embargo_2025/notes.md @@ -1 +1,44 @@ # Notes concerning the embargo_2025 conversion + +## Experiment description +3 experimental days: +### Day 1: Varying duration +During day 1, mice were placed in a plastic tub and were recorded for 3.5 minutes. +The mice received a 40 Hz stimulation at various durations (i.e. 250ms, 1s and 4s) 5 times for each duration +with an inter-stimulus interval (ISI) of 10s. +### Day 2: Varying frequencies +The next day, the animals were placed in the same set-up and underwent 3 recording sessions corresponding +to a fixed duration of stimulation (i.e., 250ms, 1s, and 4s). Each session lasted 8 minutes. +The animals received optogenetic stimulation at varying frequencies (5 Hz, 10 Hz , 20 Hz and 40 Hz) +5 times for each duration with an ISI of 10s. +### Day 3: Shock +During day 3, animals were placed in a shock chamber and recorded for 6 minutes. +Uncued shocks (0.3 mA) at various durations (250ms, 1s and 4s, 5 times for each duration) were delivered +in a randomized order and ISI. + +## TDT data + +### TDT stream +For all subjects and all session types: +- From stream '_405A' --> isosbestic signal +- From stream '_465A' --> calcium signal +- Stream 'Fi1r' --> TODO: ask point person + +### TDT events +For "Varying duration" sessions +- 'ssm_'--> time intervals for optogenetic stimulation delivered for 250ms each stimulus, +- 's1s_'--> time intervals for optogenetic stimulation delivered for 1s each stimulus, +- 's4s_'--> time intervals for optogenetic stimulation delivered for 4s each stimulus +- +For "Varying frequencies" sessions +- 'H10_'--> time intervals for optogenetic stimulation delivered at 10Hz, +- 'H20_'--> time intervals for optogenetic stimulation delivered at 20Hz, +- 'H40_'--> time intervals for optogenetic stimulation delivered at 40Hz, +- 'H05_'--> time intervals for optogenetic stimulation delivered at 5Hz + +For "Shock" sessions +- 'CSm_' --> TODO: ask point person +- 'CSp_' --> TODO: ask point person + +## AnyMaze data +TODO: Contact AnyMaze support diff --git a/src/hnasko_lab_to_nwb/embargo_2025/nwbconverter.py b/src/hnasko_lab_to_nwb/embargo_2025/nwbconverter.py index 9252dc5..58e1ce5 100644 --- a/src/hnasko_lab_to_nwb/embargo_2025/nwbconverter.py +++ b/src/hnasko_lab_to_nwb/embargo_2025/nwbconverter.py @@ -1,18 +1,11 @@ """Primary NWBConverter class for this dataset.""" from neuroconv import NWBConverter -from neuroconv.datainterfaces import ( - SpikeGLXRecordingInterface, - PhySortingInterface, -) - -from .embargo_2025 import Embargo2025BehaviorInterface +from neuroconv.datainterfaces import TDTFiberPhotometryInterface class Embargo2025NWBConverter(NWBConverter): """Primary conversion class for my extracellular electrophysiology dataset.""" data_interface_classes = dict( - Recording=SpikeGLXRecordingInterface, - Sorting=PhySortingInterface, - Behavior=Embargo2025BehaviorInterface, + FiberPhotometry=TDTFiberPhotometryInterface, )