Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Align the starting_time of the processed fiber photometry data #29

Merged
merged 7 commits into from
May 27, 2024
8 changes: 8 additions & 0 deletions src/dombeck_lab_to_nwb/azcorra2023/azcorra2023nwbconverter.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,11 @@ def add_to_nwbfile(self, nwbfile: NWBFile, metadata, conversion_options: Optiona

backend_configuration = get_default_backend_configuration(nwbfile=nwbfile, backend="hdf5")
configure_backend(nwbfile=nwbfile, backend_configuration=backend_configuration)

def temporally_align_data_interfaces(self):
"""
Align the starting time of processed fiber photometry data after cropping.
"""
processed_interface = self.data_interface_objects["ProcessedFiberPhotometry"]
aligned_starting_time = processed_interface.get_starting_time()
processed_interface.set_aligned_starting_time(aligned_starting_time=aligned_starting_time)
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
from pathlib import Path
from typing import Optional

from neuroconv.basedatainterface import BaseDataInterface
import numpy as np
from neuroconv import BaseTemporalAlignmentInterface
from neuroconv.utils import FilePathType
from pymatreader import read_mat
from pynwb import NWBFile

from dombeck_lab_to_nwb.azcorra2023.photometry_utils.add_fiber_photometry import add_fiber_photometry_series


class Azcorra2023FiberPhotometryInterface(BaseDataInterface):
class Azcorra2023FiberPhotometryInterface(BaseTemporalAlignmentInterface):
"""Data interface for Azcorra2023 fiber photometry data conversion."""

display_name = "Azcorra2023BinnedPhotometry"
Expand All @@ -36,6 +37,8 @@ def __init__(
self.verbose = verbose
binned_photometry_data = read_mat(filename=str(self.file_path))
self._photometry_data = binned_photometry_data["#subsystem#"]["MCOS"][2]
self._sampling_frequency = 100.0
self._timestamps = None

depth_ids = binned_photometry_data["#subsystem#"]["MCOS"][5]
depth_ids = [depth_ids] if isinstance(depth_ids, str) else depth_ids
Expand All @@ -48,6 +51,24 @@ def __init__(

self.column_names = binned_photometry_data["#subsystem#"]["MCOS"][7]

def get_original_timestamps(self) -> np.ndarray:
binned_photometry_data = read_mat(filename=str(self.file_path))["#subsystem#"]["MCOS"][2]
channel_index = self.column_names.index("chMov")
data = binned_photometry_data[channel_index]
if len(self.depth_ids) > 1:
data = data[self.depth_index]
num_frames = len(data)
return np.arange(num_frames) / self._sampling_frequency

def get_timestamps(self, stub_test: bool = False) -> np.ndarray:
timestamps = self._timestamps if self._timestamps is not None else self.get_original_timestamps()
if stub_test:
return timestamps[:6000]
return timestamps

def set_aligned_timestamps(self, aligned_timestamps: np.ndarray) -> None:
self._timestamps = np.array(aligned_timestamps)

Comment on lines +54 to +71
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

def add_to_nwbfile(
self,
nwbfile: NWBFile,
Expand Down Expand Up @@ -87,11 +108,12 @@ def add_to_nwbfile(
if len(self.depth_ids) > 1:
data = data[self.depth_index]

timestamps = self.get_timestamps(stub_test=stub_test)
add_fiber_photometry_series(
nwbfile=nwbfile,
metadata=metadata,
data=data if not stub_test else data[:6000],
rate=100.0,
timestamps=timestamps,
fiber_photometry_series_name=series_name,
table_region_ind=series_ind,
parent_container="acquisition",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,20 @@ def __init__(
self._processed_photometry_data = processed_photometry_data["data"]
self._timestamps = None
self._sampling_frequency = 100.0
crop_point = processed_photometry_data["cropStart"]
# If the crop point is a list, it means that the end of the recording was cropped as well
self._crop_start = crop_point[0] if isinstance(crop_point, np.ndarray) else crop_point

def get_starting_time(self) -> float:
"""
Return the starting time of the processed photometry data.
If the start of the picoscope recording had artefacts, the corrupted segment was cut off manually and the cropping point
was saved as "cropStart". We are using this value to align the starting time of the processed data with the
picoscope data.
"""
if self._crop_start == 1:
return 0.0
return self._crop_start / self._sampling_frequency

def get_metadata(self) -> dict:
metadata = super().get_metadata()
Expand Down Expand Up @@ -76,12 +90,7 @@ def get_metadata(self) -> dict:

def get_original_timestamps(self) -> np.ndarray:
processed_photometry_data = read_mat(filename=str(self.file_path))["data6"]["data"]
traces = (
processed_photometry_data["chGreen"]
if "chGreen" in processed_photometry_data
else processed_photometry_data["chRed"]
)
num_frames = len(traces)
num_frames = len(processed_photometry_data["chMov"])
return np.arange(num_frames) / self._sampling_frequency

def get_timestamps(self, stub_test: bool = False) -> np.ndarray:
Expand All @@ -105,9 +114,13 @@ def add_continuous_behavior(self, nwbfile: NWBFile, metadata: dict):
), f"Velocity data not found in {self.source_data['file_path']}."
velocity = self._processed_photometry_data["chMov"]
velocity_metadata = behavior_metadata["Velocity"]

timestamps = self.get_timestamps()

velocity_ts = TimeSeries(
data=velocity,
rate=self._sampling_frequency,
starting_time=timestamps[0],
**velocity_metadata,
)

Expand All @@ -119,6 +132,7 @@ def add_continuous_behavior(self, nwbfile: NWBFile, metadata: dict):
acceleration_ts = TimeSeries(
data=acceleration,
rate=self._sampling_frequency,
starting_time=timestamps[0],
**acceleration_metadata,
)

Expand Down Expand Up @@ -155,6 +169,8 @@ def add_delta_f_over_f_traces(
"""
ophys_module = get_module(nwbfile=nwbfile, name="ophys", description=f"Processed fiber photometry data.")

timestamps = self.get_timestamps(stub_test=stub_test)

for series_ind, (channel_name, series_name) in enumerate(
channel_name_to_photometry_series_name_mapping.items()
):
Expand All @@ -181,6 +197,7 @@ def add_delta_f_over_f_traces(
data=data_to_add,
unit="n.a.",
rate=self._sampling_frequency,
starting_time=timestamps[0],
fiber_photometry_table_region=fiber_photometry_table_region,
)

Expand All @@ -196,8 +213,7 @@ def add_delta_f_over_f_traces(
nwbfile=nwbfile,
metadata=metadata,
data=data_to_add,
rate=self._sampling_frequency,
unit="n.a.",
timestamps=timestamps,
fiber_photometry_series_name=series_name,
table_region_ind=series_ind,
parent_container="processing/ophys",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import numpy as np
from neuroconv.tools import get_module
from neuroconv.utils import calculate_regular_series_rate
from pynwb import NWBFile
from ndx_fiber_photometry import (
FiberPhotometryTable,
Expand Down Expand Up @@ -40,10 +41,9 @@ def add_fiber_photometry_series(
nwbfile: NWBFile,
metadata: dict,
data: np.ndarray,
rate: float,
timestamps: np.ndarray,
fiber_photometry_series_name: str,
table_region_ind: int = 0,
unit: str = "F",
parent_container: Literal["acquisition", "processing/ophys"] = "acquisition",
):
fiber_photometry_metadata = metadata["Ophys"]["FiberPhotometry"]
Expand Down Expand Up @@ -207,13 +207,20 @@ def add_fiber_photometry_series(
region=[table_region_ind], description="source fibers"
)

timing_kwargs = dict()
rate = calculate_regular_series_rate(series=timestamps)
if rate is not None:
timing_kwargs.update(rate=rate, starting_time=timestamps[0])
else:
timing_kwargs.update(timestamps=timestamps)

fiber_photometry_response_series = FiberPhotometryResponseSeries(
name=trace_metadata["name"],
description=trace_metadata["description"],
data=data,
unit=unit,
rate=rate,
unit="n.a.",
fiber_photometry_table_region=fiber_photometry_table_region,
**timing_kwargs,
)

if parent_container == "acquisition":
Expand Down