Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add electrodes interface for Huszar #65

Merged
merged 10 commits into from
Jun 28, 2023
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
from pathlib import Path

import numpy as np
from pynwb.file import NWBFile, TimeIntervals, TimeSeries
from pynwb.behavior import SpatialSeries, Position
from hdmf.backends.hdf5.h5_utils import H5DataIO

from neuroconv.utils.json_schema import FolderPathType
from neuroconv.basedatainterface import BaseDataInterface
from neuroconv.tools.nwb_helpers import get_module

from neuroconv.utils.json_schema import FolderPathType
from pynwb.behavior import CompassDirection, Position, SpatialSeries
from pynwb.file import NWBFile, TimeIntervals, TimeSeries
from scipy.io import loadmat as loadmat_scipy
from pymatreader import read_mat

Expand Down
43 changes: 25 additions & 18 deletions buzsaki_lab_to_nwb/huszar_hippocampus_dynamics/convert_session.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
"""Primary script to run to convert an entire session of data using the NWBConverter."""
from neuroconv.utils import load_dict_from_file, dict_deep_update

from neuroconv.utils import load_dict_from_file, dict_deep_update
from converter import HuzsarNWBConverter
from pathlib import Path

Expand All @@ -23,19 +23,10 @@ def session_to_nwbfile(session_dir_path, output_dir_path, stub_test=False, write
source_data = dict()
conversion_options = dict()

# Add sorter
file_path = session_dir_path / f"{session_id}.spikes.cellinfo.mat"
source_data.update(Sorting=dict(file_path=str(file_path), sampling_frequency=30_000.0))

# Add behavior data
source_data.update(Behavior8Maze=dict(folder_path=str(session_dir_path)))
conversion_options.update(Behavior8Maze=dict(stub_test=stub_test))

source_data.update(BehaviorSleep=dict(folder_path=str(session_dir_path)))

# Add Recordings
file_path = session_dir_path / f"{session_id}.dat"
xml_file_path = session_dir_path / f"{session_id}.xml"
raw_recording_file_available = file_path.is_file()

if file_path.is_file():
size_in_GB = file_path.stat().st_size / 1000
Expand All @@ -55,6 +46,8 @@ def session_to_nwbfile(session_dir_path, output_dir_path, stub_test=False, write
# Add LFP
file_path = session_dir_path / f"{session_id}.lfp"
folder_path = session_dir_path
lfp_file_available = file_path.is_file()

if file_path.is_file():
size_in_GB = file_path.stat().st_size / 1000**3

Expand All @@ -64,12 +57,26 @@ def session_to_nwbfile(session_dir_path, output_dir_path, stub_test=False, write

source_data.update(LFP=dict(file_path=str(file_path), xml_file_path=str(xml_file_path)))
conversion_options.update(LFP=dict(stub_test=stub_test, write_electrical_series=write_electrical_series))

else:
print(f"Skipping LFP interface for {session_id} because the file {file_path} does not have any data.")

else:
print(f"Skipping LFP interface for {session_id} because the file {file_path} does not exist.")

write_ecephys_metadata = (not raw_recording_file_available) and (not lfp_file_available)

# Add sorter
file_path = session_dir_path / f"{session_id}.spikes.cellinfo.mat"
source_data.update(Sorting=dict(file_path=str(file_path), verbose=verbose))
conversion_options.update(Sorting=dict(write_ecephys_metadata=write_ecephys_metadata))

# Add behavior data
source_data.update(Behavior8Maze=dict(folder_path=str(session_dir_path)))
conversion_options.update(Behavior8Maze=dict(stub_test=stub_test))

source_data.update(BehaviorSleep=dict(folder_path=str(session_dir_path)))

# Add epochs
source_data.update(Epochs=dict(folder_path=str(session_dir_path)))

Expand Down Expand Up @@ -97,22 +104,22 @@ def session_to_nwbfile(session_dir_path, output_dir_path, stub_test=False, write
metadata = dict_deep_update(metadata, editable_metadata)

# Run conversion
converter.run_conversion(
nwbfile = converter.run_conversion(
nwbfile_path=nwbfile_path,
metadata=metadata,
conversion_options=conversion_options,
overwrite=True,
)

return nwbfile


if __name__ == "__main__":
# Parameters for conversion
stub_test = True # Converts a only a stub of the data for quick iteration and testing
verbose = True
output_dir_path = Path.home() / "conversion_nwb"
# session_dir_path = Path("/Volumes/neurodata/buzaki/HuszarR/optotagCA1/e13/e13_16f1/e13_16f1_210302")
# session_dir_path = Path("/Volumes/neurodata/buzaki/HuszarR/optotagCA1/e13/e13_26m1/e13_26m1_211019/e13_26m1_211019")
# session_dir_path = Path("/home/heberto/buzaki/e13_16f1_210302/")
session_dir_path = Path("/shared/catalystneuro/HuszarR/optotagCA1/e13/e13_26m1/e13_26m1_211019")

session_to_nwbfile(session_dir_path, output_dir_path, stub_test=stub_test, verbose=verbose)
project_root = Path("/shared/catalystneuro/HuszarR/optotagCA1")
session_dir_path = project_root / "e13" / "e13_26m1" / "e13_26m1_211119"
assert session_dir_path.is_dir()
nwbfile = session_to_nwbfile(session_dir_path, output_dir_path, stub_test=stub_test, verbose=verbose)
27 changes: 3 additions & 24 deletions buzsaki_lab_to_nwb/huszar_hippocampus_dynamics/converter.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,13 @@
from pathlib import Path
from datetime import datetime
from pathlib import Path
from zoneinfo import ZoneInfo

import numpy as np
from scipy.io import loadmat as loadmat_scipy

from neuroconv import NWBConverter
from scipy.io import loadmat as loadmat_scipy

from neuroconv.datainterfaces import NeuroScopeLFPInterface, NeuroScopeRecordingInterface

# from laserpulsesinterface import ValeroLaserPulsesInterface
from ripplesinterface import (
HuszarProcessingRipplesEventsInterface,
)
Expand All @@ -24,8 +22,7 @@
from epochsinterface import HuszarEpochsInterface
from trialsinterface import HuszarTrialsInterface


from sortinginterface import CellExplorerSortingInterface
from neuroconv.datainterfaces import CellExplorerSortingInterface


class HuzsarNWBConverter(NWBConverter):
Expand All @@ -49,24 +46,6 @@ def __init__(self, source_data: dict, verbose: bool = True):
self.session_folder_path = Path(self.data_interface_objects["Behavior8Maze"].source_data["folder_path"])
self.session_id = self.session_folder_path.stem

# Add electrode locations (modeled after yutavcnwbconverter)
electrode_chan_map_file_path = self.session_folder_path / "chanMap.mat"
chan_map = loadmat_scipy(electrode_chan_map_file_path)
xcoords = [x[0] for x in chan_map["xcoords"]]
ycoords = [y[0] for y in chan_map["ycoords"]]
kcoords = [y[0] for y in chan_map["kcoords"]]

for channel_id in chan_map["chanMap0ind"]:
if self.data_interface_objects.get("LFP"):
self.data_interface_objects["LFP"].recording_extractor.set_channel_locations(
locations=[xcoords[channel_id], ycoords[channel_id], kcoords[channel_id]], channel_ids=channel_id
)

if self.data_interface_objects.get("Recording"):
self.data_interface_objects["Recording"].recording_extractor.set_channel_locations(
locations=[xcoords[channel_id], ycoords[channel_id], kcoords[channel_id]], channel_ids=channel_id
)

def get_metadata(self):
metadata = super().get_metadata()
session_file = self.session_folder_path / f"{self.session_id}.session.mat"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ hdf5storage>=0.1.18
neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@main
pymatreader==0.0.31
ndx-events==0.2.0
spikeinterface==0.97.1
spikeinterface==0.97.1
lxml==4.9.2
141 changes: 0 additions & 141 deletions buzsaki_lab_to_nwb/huszar_hippocampus_dynamics/sortinginterface.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,16 @@

def access_behavior_property_safe(property, parent, behavior_mat):
trial_info = behavior_mat["behavior"]["trials"]
nest_depth = len(trial_info["position_trcat"])

position_trcat = trial_info["position_trcat"]
value = parent[property]
if nest_depth > 1:

if isinstance(position_trcat, list) and len(position_trcat) > 1: # Check nest depth
value = [num for sublist in value for num in sublist] # Flatten the list if large depth

assert len(value) == len(
trial_info["recordings"]
) # Save access properties should have the same length as the number of recordings
) # Safe access properties should have the same length as the number of recordings

return value

Expand Down
Loading