Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Various fixes #64

Merged
merged 50 commits into from
Aug 21, 2024
Merged
Show file tree
Hide file tree
Changes from 20 commits
Commits
Show all changes
50 commits
Select commit Hold shift + click to select a range
5db013f
fix-62-63
alejoe91 Mar 7, 2024
10b17df
Fix register_button
alejoe91 Mar 8, 2024
ce02754
data_processing.py -> data_loader.py
alejoe91 Mar 8, 2024
4cee522
fix data loader import
alejoe91 Mar 8, 2024
4674634
Add unit name annotation in load_spiketrains
alejoe91 Mar 8, 2024
17dc78d
Add trial columns only if needed
alejoe91 Mar 9, 2024
8a32e86
Add trial columns only if needed2
alejoe91 Mar 9, 2024
278e086
Re-copy actions after failures
alejoe91 Mar 9, 2024
370123e
Re-copy actions after failures 2
alejoe91 Mar 9, 2024
fa7b780
Remove excess spikes
alejoe91 Mar 13, 2024
cee3c45
Merge branch 'bug-fixes' of github.com:CINPLA/expipe-plugin-cinpla in…
alejoe91 Mar 13, 2024
aa3b828
Add print statement
alejoe91 Mar 14, 2024
aeda526
fix registration with depth
alejoe91 Mar 14, 2024
6219b9c
Add log for adjustment and annotate tabs
alejoe91 Mar 14, 2024
78898b1
Remove excess spikes at the right place!
alejoe91 Mar 15, 2024
95d7ae7
Raise error for multiple experiments/openephys folders
alejoe91 Mar 15, 2024
3a0eab4
Sort actions in widget
alejoe91 Mar 15, 2024
ebd4b02
Fix spike train loader
alejoe91 Mar 15, 2024
c526f51
Set include_events to True and fix load_spiketrain for tetrodes
alejoe91 Apr 5, 2024
3ee8a8d
add tools for processing, unit tracking and notebook registration
lepmik Apr 6, 2024
5a449ca
correct channel avg
lepmik Apr 6, 2024
f9caf63
Speed up import time
alejoe91 Apr 8, 2024
e924703
bug in unit retrieval pluss some cleanup
lepmik Apr 15, 2024
6541759
bugfix
lepmik Apr 16, 2024
afc15b0
More comprehensive .gitignore
nicolossus May 16, 2024
579de74
Bump Python version from 3.10 to 3.11
nicolossus May 16, 2024
4d373f0
Configure pre-commit hooks
nicolossus May 16, 2024
aac8d4f
Fix formatting, add ruff specs and new deps
nicolossus May 16, 2024
6d57541
Update installation description and add 'How to contribute'
nicolossus May 16, 2024
07209df
Remove old, commented out code
nicolossus May 16, 2024
fa8a44e
Add formatting and linter check to CI
nicolossus May 16, 2024
46bc067
Remove duplicate pytest install
nicolossus May 16, 2024
5f4c528
Reformat the codebase ✨ 🍰 ✨
nicolossus May 16, 2024
8a9b685
Correct the source path
nicolossus May 16, 2024
41e8eb7
Exclude test_data datasets from formatter and linter
nicolossus May 16, 2024
cb2c9ff
Revert formating and linting changes
nicolossus May 16, 2024
8d8e408
Install tbb only on non-Darwin platforms and remove spython for now
nicolossus May 16, 2024
367f33f
Processing+Curation: extract waveforms from all spikes
alejoe91 Jun 6, 2024
1898ca4
set t[0] in tracking to zero
lepmik Jun 11, 2024
b3aad4e
redo last commit and make read unit id faster
lepmik Jun 11, 2024
87e12aa
possibility to get specific group
lepmik Jun 11, 2024
991f7fd
subtract relative session start time for all timestamps (session star…
lepmik Jun 14, 2024
9b1637e
remove session time subtraction
lepmik Jun 14, 2024
66447c7
bugfix
lepmik Jun 17, 2024
3497a51
Remove units with less than n_components spikes
alejoe91 Jun 20, 2024
dc8c5b6
Pin scikit-learn and fix tests
alejoe91 Jun 20, 2024
b5424ff
Pin SI version
alejoe91 Aug 20, 2024
763f01a
Merge pull request #80 from CINPLA/fix-phy-groups
alejoe91 Aug 20, 2024
8483af7
Merge pull request #72 from CINPLA/dev_nico
nicolossus Aug 21, 2024
9bc0a2a
Fix formatting
nicolossus Aug 21, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
import spikeinterface.extractors as se

from pynwb import NWBHDF5IO
from .utils import _get_data_path
from .scripts.utils import _get_data_path


def get_data_path(action):
Expand Down Expand Up @@ -260,22 +260,23 @@ def load_spiketrains(data_path, channel_group=None, lim=None):
unit_id for unit_index, unit_id in enumerate(sorting.unit_ids) if groups[unit_index] == channel_group
]
sptr = []
# build neo pbjects
# build neo objects
for unit in unit_ids:
times = sorting.get_unit_spike_train(unit, return_times=True) * pq.s
spike_times = sorting.get_unit_spike_train(unit, return_times=True) * pq.s
if lim is None:
times = recording.get_times() * pq.s
t_start = times[0]
t_stop = times[-1]
else:
t_start = pq.Quantity(lim[0], "s")
t_stop = pq.Quantity(lim[1], "s")
mask = (times >= t_start) & (times <= t_stop)
times = times[mask]
mask = (spike_times >= t_start) & (spike_times <= t_stop)
spike_times = spike_times[mask]

st = neo.SpikeTrain(
times=times, t_start=t_start, t_stop=t_stop, sampling_rate=sorting.sampling_frequency * pq.Hz
times=spike_times, t_start=t_start, t_stop=t_stop, sampling_rate=sorting.sampling_frequency * pq.Hz
)
st.annotations.update({"name": unit})
for p in sorting.get_property_keys():
st.annotations.update({p: sorting.get_unit_property(unit, p)})
sptr.append(st)
Expand Down Expand Up @@ -313,7 +314,7 @@ def load_unit_annotations(data_path, channel_group=None):
]

for unit in unit_ids:
annotations = {}
annotations = {"name": unit}
for p in sorting.get_property_keys():
annotations.update({p: sorting.get_unit_property(unit, p)})
units.append(annotations)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,14 +78,15 @@ def add_to_nwbfile(
rising = rising[:-1]

if len(rising) == len(falling):
nwbfile.add_trial_column(
name="channel",
description="Open Ephys channel",
)
nwbfile.add_trial_column(
name="processor",
description="Open Ephys processor that recorded the event",
)
if nwbfile.trials is None:
nwbfile.add_trial_column(
name="channel",
description="Open Ephys channel",
)
nwbfile.add_trial_column(
name="processor",
description="Open Ephys processor that recorded the event",
)
start_times = times[rising].rescale("s").magnitude
stop_times = times[falling].rescale("s").magnitude
for start, stop in zip(start_times, stop_times):
Expand Down
22 changes: 15 additions & 7 deletions src/expipe_plugin_cinpla/scripts/convert_old_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,20 @@ def convert_old_project(
delimiter = "*" * len(process_msg)
print(f"\n{delimiter}\n{process_msg}\n{delimiter}\n")
old_action = old_actions[action_id]

old_action_folder = old_project.path / "actions" / action_id
new_action_folder = new_project.path / "actions" / action_id
old_data_folder = old_action_folder / "data"
new_data_folder = new_action_folder / "data"
# main.exdir
old_exdir_folder = old_data_folder / "main.exdir"

if exist_ok and not new_action_folder.is_dir():
# Copy action that previously failed
print(f">>> Re-copying action {action_id} to new project\n")
shutil.copytree(
old_action_folder, new_action_folder, ignore=shutil.ignore_patterns("main.exdir", ".git")
)
new_action = new_project.actions[action_id]

# replace file in attributes.yaml
Expand All @@ -136,18 +150,12 @@ def convert_old_project(
attributes_str = attributes_str.replace("main.exdir", "main.nwb")
attributes_file.write_text(attributes_str)

old_data_folder = old_project.path / "actions" / action_id / "data"
new_data_folder = new_project.path / "actions" / action_id / "data"

# main.exdir
old_exdir_folder = old_data_folder / "main.exdir"

# find open-ephys folder
acquisition_folder = old_exdir_folder / "acquisition"
openephys_folders = [p for p in acquisition_folder.iterdir() if p.is_dir()]
if len(openephys_folders) != 1:
print(f"Found {len(openephys_folders)} openephys folders in {acquisition_folder}!")
continue
raise ValueError("Expected to find exactly one openephys folder")
openephys_path = openephys_folders[0]
# here we assume the following action name: {entity_id}-{date}-{session}
entity_id = action_id.split("-")[0]
Expand Down
4 changes: 4 additions & 0 deletions src/expipe_plugin_cinpla/scripts/curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,10 @@ def apply_curation(self, sorter, curated_sorting):
else:
recording = self.load_processed_recording(sorter)

# remove excess spikes
print("Removing excess spikes from curated sorting")
curated_sorting = sc.remove_excess_spikes(curated_sorting, recording=recording)

# if not sort by group, extract dense and estimate group
if "group" not in curated_sorting.get_property_keys():
compute_and_set_unit_groups(curated_sorting, recording)
Expand Down
23 changes: 11 additions & 12 deletions src/expipe_plugin_cinpla/scripts/utils.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import sys
import shutil
from datetime import datetime
from datetime import datetime, timedelta
from pathlib import Path, PureWindowsPath
import numpy as np

Expand Down Expand Up @@ -47,7 +47,7 @@ def query_yes_no(question, default="yes", answer=None):


def deltadate(adjustdate, regdate):
delta = regdate - adjustdate if regdate > adjustdate else datetime.timedelta.max
delta = regdate - adjustdate if regdate > adjustdate else timedelta.max
return delta


Expand Down Expand Up @@ -176,19 +176,18 @@ def _make_data_path(action, overwrite, suffix=".nwb"):


def _get_data_path(action):
if "main" not in action.data:
return
try:
if "main" not in action.data:
return
data_path = action.data_path("main")
if not data_path.is_dir():
action_path = action._backend.path
project_path = action_path.parent.parent
# data_path = action.data['main']
data_path = project_path / str(Path(PureWindowsPath(action.data["main"])))
return data_path
except:
data_path = Path("None")
pass
if not data_path.is_dir():
action_path = action._backend.path
project_path = action_path.parent.parent
# data_path = action.data['main']
data_path = project_path / str(Path(PureWindowsPath(action.data["main"])))
return data_path
return


def register_templates(action, templates, overwrite=False):
Expand Down
Empty file.
Loading
Loading