From 8a9f06d0e986f22d86a9cc3042d35320319408e5 Mon Sep 17 00:00:00 2001
From: Kushal Bakshi <52367253+kushalbakshi@users.noreply.github.com>
Date: Tue, 7 Nov 2023 21:59:50 +0000
Subject: [PATCH 1/2] Install `elements` in DevContainer
---
.devcontainer/Dockerfile | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index f8b4297..7cb9a7b 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -33,7 +33,7 @@ RUN \
# pipeline dependencies
apt-get install gcc g++ ffmpeg libsm6 libxext6 -y && \
pip install numpy Cython && \
- pip install --no-cache-dir -e /tmp/element-miniscope[caiman_requirements,caiman] && \
+ pip install --no-cache-dir -e /tmp/element-miniscope[elements,caiman_requirements,caiman] && \
caimanmanager.py install && \
# clean up
rm -rf /tmp/element-miniscope && \
From e203726195f51407b8b851343322d3be1adb2088 Mon Sep 17 00:00:00 2001
From: Kushal Bakshi <52367253+kushalbakshi@users.noreply.github.com>
Date: Wed, 8 Nov 2023 21:51:13 +0000
Subject: [PATCH 2/2] Revert paramset_idx to paramset_id
---
element_miniscope/miniscope.py | 14 +-
notebooks/tutorial.ipynb | 1740 +++++++++++++++++++++++++++++---
2 files changed, 1633 insertions(+), 121 deletions(-)
diff --git a/element_miniscope/miniscope.py b/element_miniscope/miniscope.py
index d5c34ba..6faeed0 100644
--- a/element_miniscope/miniscope.py
+++ b/element_miniscope/miniscope.py
@@ -366,7 +366,7 @@ class ProcessingParamSet(dj.Lookup):
"""Parameters of the processing method.
Attributes:
- paramset_idx (foreign key, smallint): Unique parameter set ID.
+ paramset_id (foreign key, smallint): Unique parameter set ID.
ProcessingMethod (varchar(16) ): ProcessingMethod from the lookup table.
paramset_desc (varchar(128) ): Description of the parameter set.
paramset_set_hash (uuid): UUID hash for parameter set.
@@ -375,7 +375,7 @@ class ProcessingParamSet(dj.Lookup):
definition = """
# Parameter set used for processing of miniscope data
- paramset_idx: smallint
+ paramset_id: smallint
---
-> ProcessingMethod
paramset_desc: varchar(128)
@@ -388,7 +388,7 @@ class ProcessingParamSet(dj.Lookup):
def insert_new_params(
cls,
processing_method: str,
- paramset_idx: int,
+ paramset_id: int,
paramset_desc: str,
params: dict,
processing_method_desc: str = "",
@@ -397,7 +397,7 @@ def insert_new_params(
Args:
processing_method (str): Name of the processing method or software.
- paramset_idx (int): Unique number for the set of processing parameters.
+ paramset_id (int): Unique number for the set of processing parameters.
paramset_desc (str): Description of the processing parameter set.
params (dict): Dictionary of processing parameters for the selected processing_method.
processing_method_desc (str, optional): Description of the processing method. Defaults to "".
@@ -411,7 +411,7 @@ def insert_new_params(
)
param_dict = {
"processing_method": processing_method,
- "paramset_idx": paramset_idx,
+ "paramset_id": paramset_id,
"paramset_desc": paramset_desc,
"params": params,
"param_set_hash": dict_to_uuid(params),
@@ -419,8 +419,8 @@ def insert_new_params(
q_param = cls & {"param_set_hash": param_dict["param_set_hash"]}
if q_param: # If the specified param-set already exists
- pname = q_param.fetch1("paramset_idx")
- if pname == paramset_idx: # If the existed set has the same name: job done
+ pname = q_param.fetch1("paramset_id")
+ if pname == paramset_id: # If the existed set has the same name: job done
return
else: # If not same name: human error, try adding with different name
raise dj.DataJointError(
diff --git a/notebooks/tutorial.ipynb b/notebooks/tutorial.ipynb
index c3abb08..66c0448 100644
--- a/notebooks/tutorial.ipynb
+++ b/notebooks/tutorial.ipynb
@@ -8,9 +8,9 @@
"# DataJoint Elements Tutorial: Miniscope\n",
"\n",
"Welcome to the tutorial for DataJoint's open-source data pipeline for miniature\n",
- "fluorescent microscopes (miniscope). This tutorial aims to provide a comprehensive understanding of the\n",
+ "fluorescent microscope imaging (miniscope). This tutorial aims to provide a comprehensive understanding of the\n",
"open-source data pipeline created using `element-miniscope` for processing\n",
- "and analyzing calcium dynamics in neurons. \n",
+ "and analyzing neuronal calcium dynamics acquired with the UCLA Miniscope and Miniscope DAQ. \n",
"\n",
"**In this tutorial, we will cover:**\n",
"- The basics:\n",
@@ -35,7 +35,7 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
@@ -83,9 +83,19 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 2,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "[2023-11-08 21:19:10,053][WARNING]: lab.Project and related tables will be removed in a future version of Element Lab. Please use the project schema.\n",
+ "[2023-11-08 21:19:10,055][INFO]: Connecting root@fakeservices.datajoint.io:3306\n",
+ "[2023-11-08 21:19:10,063][INFO]: Connected root@fakeservices.datajoint.io:3306\n"
+ ]
+ }
+ ],
"source": [
"from tutorial_pipeline import (\n",
" lab,\n",
@@ -104,9 +114,20 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 3,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "Schema `neuro_subject`"
+ ]
+ },
+ "execution_count": 3,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"subject.schema"
]
@@ -120,9 +141,101 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 4,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ "
\n",
+ "
\n",
+ " \n",
+ " \n",
+ "
\n",
+ " \n",
+ "
Total: 0
\n",
+ " "
+ ],
+ "text/plain": [
+ "*subject subject_nickna sex subject_birth_ subject_descri\n",
+ "+---------+ +------------+ +-----+ +------------+ +------------+\n",
+ "\n",
+ " (Total: 0)"
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"subject.Subject()"
]
@@ -138,9 +251,520 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 5,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "image/svg+xml": [
+ "\n",
+ "\n",
+ "%3 \n",
+ " \n",
+ "\n",
+ "\n",
+ "0 \n",
+ "\n",
+ "0 \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Fluorescence.Trace \n",
+ "\n",
+ " \n",
+ "miniscope.Fluorescence.Trace \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "0->miniscope.Fluorescence.Trace \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "1 \n",
+ "\n",
+ "1 \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Segmentation.Mask \n",
+ "\n",
+ " \n",
+ "miniscope.Segmentation.Mask \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "1->miniscope.Segmentation.Mask \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "2 \n",
+ "\n",
+ "2 \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "2->miniscope.MotionCorrection \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingQualityMetrics.Trace \n",
+ "\n",
+ " \n",
+ "miniscope.ProcessingQualityMetrics.Trace \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Fluorescence.Trace->miniscope.ProcessingQualityMetrics.Trace \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Activity.Trace \n",
+ "\n",
+ " \n",
+ "miniscope.Activity.Trace \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Fluorescence.Trace->miniscope.Activity.Trace \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Fluorescence \n",
+ "\n",
+ "\n",
+ "miniscope.Fluorescence \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Fluorescence->miniscope.Fluorescence.Trace \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Activity \n",
+ "\n",
+ "\n",
+ "miniscope.Activity \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Fluorescence->miniscope.Activity \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingQualityMetrics \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingQualityMetrics \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Fluorescence->miniscope.ProcessingQualityMetrics \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.RecordingLocation \n",
+ "\n",
+ " \n",
+ "miniscope.RecordingLocation \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingTask \n",
+ "\n",
+ " \n",
+ "miniscope.ProcessingTask \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Processing \n",
+ "\n",
+ "\n",
+ "miniscope.Processing \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingTask->miniscope.Processing \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MaskClassification \n",
+ "\n",
+ "\n",
+ "miniscope.MaskClassification \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MaskClassification.MaskType \n",
+ "\n",
+ " \n",
+ "miniscope.MaskClassification.MaskType \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MaskClassification->miniscope.MaskClassification.MaskType \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection.Block \n",
+ "\n",
+ " \n",
+ "miniscope.MotionCorrection.Block \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection.Summary \n",
+ "\n",
+ " \n",
+ "miniscope.MotionCorrection.Summary \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Curation \n",
+ "\n",
+ " \n",
+ "miniscope.Curation \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Curation->miniscope.MotionCorrection \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Segmentation \n",
+ "\n",
+ "\n",
+ "miniscope.Segmentation \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Curation->miniscope.Segmentation \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.RecordingInfo \n",
+ "\n",
+ "\n",
+ "miniscope.RecordingInfo \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.RecordingInfo->miniscope.ProcessingTask \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.RecordingInfo.File \n",
+ "\n",
+ " \n",
+ "miniscope.RecordingInfo.File \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.RecordingInfo->miniscope.RecordingInfo.File \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Activity->miniscope.Activity.Trace \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.AcquisitionSoftware \n",
+ "\n",
+ " \n",
+ "miniscope.AcquisitionSoftware \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Recording \n",
+ "\n",
+ " \n",
+ "miniscope.Recording \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.AcquisitionSoftware->miniscope.Recording \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection->miniscope.MotionCorrection.Summary \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection.RigidMotionCorrection \n",
+ "\n",
+ " \n",
+ "miniscope.MotionCorrection.RigidMotionCorrection \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection->miniscope.MotionCorrection.RigidMotionCorrection \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection.NonRigidMotionCorrection \n",
+ "\n",
+ " \n",
+ "miniscope.MotionCorrection.NonRigidMotionCorrection \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection->miniscope.MotionCorrection.NonRigidMotionCorrection \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ActivityExtractionMethod \n",
+ "\n",
+ " \n",
+ "miniscope.ActivityExtractionMethod \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ActivityExtractionMethod->miniscope.Activity \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MotionCorrection.NonRigidMotionCorrection->miniscope.MotionCorrection.Block \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Recording->miniscope.RecordingLocation \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Recording->miniscope.RecordingInfo \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingMethod \n",
+ "\n",
+ " \n",
+ "miniscope.ProcessingMethod \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingParamSet \n",
+ "\n",
+ " \n",
+ "miniscope.ProcessingParamSet \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingMethod->miniscope.ProcessingParamSet \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MaskType \n",
+ "\n",
+ " \n",
+ "miniscope.MaskType \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MaskType->miniscope.MaskClassification.MaskType \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingParamSet->miniscope.ProcessingTask \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Channel \n",
+ "\n",
+ " \n",
+ "miniscope.Channel \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Channel->0 \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Channel->1 \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Channel->2 \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "subject.Subject \n",
+ "\n",
+ " \n",
+ "subject.Subject \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "session.Session \n",
+ "\n",
+ " \n",
+ "session.Session \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "subject.Subject->session.Session \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.ProcessingQualityMetrics->miniscope.ProcessingQualityMetrics.Trace \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Segmentation->miniscope.Fluorescence \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Segmentation->miniscope.MaskClassification \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Segmentation->miniscope.Segmentation.Mask \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "session.Session->miniscope.Recording \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Processing->miniscope.Curation \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Segmentation.Mask->miniscope.Fluorescence.Trace \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.Segmentation.Mask->miniscope.MaskClassification.MaskType \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MaskClassificationMethod \n",
+ "\n",
+ " \n",
+ "miniscope.MaskClassificationMethod \n",
+ " \n",
+ " \n",
+ " \n",
+ "\n",
+ "\n",
+ "miniscope.MaskClassificationMethod->miniscope.MaskClassification \n",
+ " \n",
+ " \n",
+ " \n",
+ " "
+ ],
+ "text/plain": [
+ ""
+ ]
+ },
+ "execution_count": 5,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"(dj.Diagram(subject.Subject) + dj.Diagram(session.Session) + dj.Diagram(miniscope))"
]
@@ -207,18 +831,49 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 6,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "subject : varchar(8) \n",
+ "---\n",
+ "subject_nickname=\"\" : varchar(64) \n",
+ "sex : enum('M','F','U') \n",
+ "subject_birth_date : date \n",
+ "subject_description=\"\" : varchar(1024) \n",
+ "\n"
+ ]
+ }
+ ],
"source": [
"print(subject.Subject.describe())"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 7,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "# \n",
+ "subject : varchar(8) # \n",
+ "---\n",
+ "subject_nickname=\"\" : varchar(64) # \n",
+ "sex : enum('M','F','U') # \n",
+ "subject_birth_date : date # \n",
+ "subject_description=\"\" : varchar(1024) # "
+ ]
+ },
+ "execution_count": 7,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"subject.Subject.heading"
]
@@ -235,9 +890,105 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 8,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ "
\n",
+ " \n",
+ " subject1 \n",
+ " \n",
+ "U \n",
+ "2023-01-01 \n",
+ " \n",
+ "
\n",
+ " \n",
+ "
Total: 1
\n",
+ " "
+ ],
+ "text/plain": [
+ "*subject subject_nickna sex subject_birth_ subject_descri\n",
+ "+----------+ +------------+ +-----+ +------------+ +------------+\n",
+ "subject1 U 2023-01-01 \n",
+ " (Total: 1)"
+ ]
+ },
+ "execution_count": 8,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"subject.Subject.insert1(\n",
" dict(subject=\"subject1\", subject_birth_date=\"2023-01-01\", sex=\"U\")\n",
@@ -255,18 +1006,41 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 9,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "-> subject.Subject\n",
+ "session_datetime : datetime \n",
+ "\n"
+ ]
+ }
+ ],
"source": [
"print(session.Session.describe())"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 10,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "# \n",
+ "subject : varchar(8) # \n",
+ "session_datetime : datetime # "
+ ]
+ },
+ "execution_count": 10,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"session.Session.heading"
]
@@ -289,7 +1063,7 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 11,
"metadata": {},
"outputs": [],
"source": [
@@ -298,9 +1072,93 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 12,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ "
\n",
+ " \n",
+ " subject1 \n",
+ "2023-01-01 00:00:00 \n",
+ "
\n",
+ " \n",
+ "
Total: 1
\n",
+ " "
+ ],
+ "text/plain": [
+ "*subject *session_datet\n",
+ "+----------+ +------------+\n",
+ "subject1 2023-01-01 00:\n",
+ " (Total: 1)"
+ ]
+ },
+ "execution_count": 12,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"session.Session.insert1(session_key)\n",
"session.Session()"
@@ -322,9 +1180,97 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 13,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ "
\n",
+ " \n",
+ " subject1 \n",
+ "2023-01-01 00:00:00 \n",
+ "session1 \n",
+ "
\n",
+ " \n",
+ "
Total: 1
\n",
+ " "
+ ],
+ "text/plain": [
+ "*subject *session_datet session_dir \n",
+ "+----------+ +------------+ +------------+\n",
+ "subject1 2023-01-01 00: session1 \n",
+ " (Total: 1)"
+ ]
+ },
+ "execution_count": 13,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"session.SessionDirectory.insert1(dict(**session_key, session_dir=\"session1\"))\n",
"session.SessionDirectory()"
@@ -335,26 +1281,55 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "As the workflow diagram indicates, the tables in the `probe` schemas need to\n",
- "contain data before the tables in the `ephys` schema accept any data. Let's\n",
- "start by inserting into `probe.Probe`, a table containing metadata about a\n",
- "multielectrode probe. "
+ "As the workflow diagram indicates, the first table in the `miniscope` schema is the `miniscope.Recording` table."
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 16,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stdout",
+ "output_type": "stream",
+ "text": [
+ "-> session.Session\n",
+ "recording_id : int \n",
+ "---\n",
+ "-> lab.Device\n",
+ "-> miniscope.AcquisitionSoftware\n",
+ "recording_notes=\"\" : varchar(4095) # free-notes\n",
+ "\n"
+ ]
+ }
+ ],
"source": [
- "print(miniscope.Recording.describe)"
+ "print(miniscope.Recording.describe())"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 15,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "# \n",
+ "subject : varchar(8) # \n",
+ "session_datetime : datetime # \n",
+ "recording_id : int # \n",
+ "---\n",
+ "device : varchar(32) # \n",
+ "acq_software : varchar(24) # \n",
+ "recording_notes=\"\" : varchar(4095) # free-notes"
+ ]
+ },
+ "execution_count": 15,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"miniscope.Recording.heading"
]
@@ -364,22 +1339,36 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "The probe metadata is used by the downstream `ProbeInsertion` table which we\n",
- "insert data into in the cells below:"
+ "As you can see from the `describe()` method, this lab contains a dependency on the `Device` table within the `lab` schema. Let's quickly insert a miniscope device before inserting into `miniscope.Recording`. "
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 17,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "# \n",
+ "device : varchar(32) # \n",
+ "---\n",
+ "modality : varchar(64) # \n",
+ "description=\"\" : varchar(256) # "
+ ]
+ },
+ "execution_count": 17,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"lab.Device.heading"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 18,
"metadata": {},
"outputs": [],
"source": [
@@ -388,7 +1377,7 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 19,
"metadata": {},
"outputs": [],
"source": [
@@ -410,7 +1399,7 @@
"\n",
"### Automatically populate tables\n",
"\n",
- "`miniscope.MiniscopeRecordingInfo` is the first table in this pipeline that can be\n",
+ "`miniscope.RecordingInfo` is the first table in this pipeline that can be\n",
"automatically populated using using the `populate()` method.\n",
"\n",
"In DataJoint, the `populate()` method is a powerful feature designed to fill tables based on the logic defined in the table's `make` method. Here's a breakdown of its functionality:\n",
@@ -419,36 +1408,261 @@
"\n",
"- **Dependency Resolution**: Before populating a table, `populate()` ensures all its dependencies are populated. This maintains the integrity and consistency of the data.\n",
"\n",
- "- **Part Tables**: If a table has part tables associated with it, calling `populate()` on the main table will also populate its part tables. This is especially useful in cases like `ephys.EphysRecording` and its part table `ephys.EphysRecording.EphysFile`, as they are closely linked in terms of data lineage.\n",
+ "- **Part Tables**: If a table has part tables associated with it, calling `populate()` on the main table will also populate its part tables. This is especially useful in cases like `miniscope.RecordingInfo` and its part table `miniscope.RecordingInfo.File`, as they are closely linked in terms of data lineage.\n",
"\n",
"- **Restriction**: The `populate()` method can be restricted to specific entries. For instance, by providing a `session_key`, we're ensuring the method only operates on the data relevant to that particular session. This is both efficient and avoids unnecessary operations.\n",
"\n",
- "In the upcoming cells, we'll make use of the `populate()` method to fill the `miniscope.MiniscopeRecordingInfo` table and its part table. Remember, while this operation is automated, it's essential to understand the underlying logic to ensure accurate and consistent data entry.\n"
+ "In the upcoming cells, we'll make use of the `populate()` method to fill the `miniscope.RecordingInfo` table and its part table. Remember, while this operation is automated, it's essential to understand the underlying logic to ensure accurate and consistent data entry.\n"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 20,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " Store metadata about recording \n",
+ " \n",
+ "
\n",
+ " \n",
+ " \n",
+ "
\n",
+ " \n",
+ "
Total: 0
\n",
+ " "
+ ],
+ "text/plain": [
+ "*subject *session_datet *recording_id nchannels nframes px_height px_width um_height um_width fps gain spatial_downsa led_power time_stamp recording_date recording_dura\n",
+ "+---------+ +------------+ +------------+ +-----------+ +---------+ +-----------+ +----------+ +-----------+ +----------+ +-----+ +------+ +------------+ +-----------+ +--------+ +------------+ +------------+\n",
+ "\n",
+ " (Total: 0)"
+ ]
+ },
+ "execution_count": 20,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"miniscope.RecordingInfo()"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 21,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ "
\n",
+ " \n",
+ " \n",
+ "
\n",
+ " \n",
+ "
Total: 0
\n",
+ " "
+ ],
+ "text/plain": [
+ "*subject *session_datet *recording_id *file_id file_path \n",
+ "+---------+ +------------+ +------------+ +---------+ +-----------+\n",
+ "\n",
+ " (Total: 0)"
+ ]
+ },
+ "execution_count": 21,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"miniscope.RecordingInfo.File()"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 22,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "name": "stderr",
+ "output_type": "stream",
+ "text": [
+ "RecordingInfo: 100%|██████████| 1/1 [00:01<00:00, 1.80s/it]\n"
+ ]
+ }
+ ],
"source": [
"miniscope.RecordingInfo.populate(session_key, display_progress=True)"
]
@@ -463,20 +1677,256 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 24,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " Store metadata about recording \n",
+ " \n",
+ "
\n",
+ " \n",
+ " subject1 \n",
+ "2023-01-01 00:00:00 \n",
+ "1 \n",
+ "1 \n",
+ "111770 \n",
+ "600 \n",
+ "600 \n",
+ "nan \n",
+ "nan \n",
+ "20.0 \n",
+ "2.0 \n",
+ "1 \n",
+ "5.0 \n",
+ "=BLOB= \n",
+ "None \n",
+ "5588.5 \n",
+ "
\n",
+ " \n",
+ "
Total: 1
\n",
+ " "
+ ],
+ "text/plain": [
+ "*subject *session_datet *recording_id nchannels nframes px_height px_width um_height um_width fps gain spatial_downsa led_power time_stamp recording_date recording_dura\n",
+ "+----------+ +------------+ +------------+ +-----------+ +---------+ +-----------+ +----------+ +-----------+ +----------+ +------+ +------+ +------------+ +-----------+ +--------+ +------------+ +------------+\n",
+ "subject1 2023-01-01 00: 1 1 111770 600 600 nan nan 20.0 2.0 1 5.0 =BLOB= None 5588.5 \n",
+ " (Total: 1)"
+ ]
+ },
+ "execution_count": 24,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "miniscope.MiniscopeRecordingInfo()"
+ "miniscope.RecordingInfo()"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 25,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ " \n",
+ "
\n",
+ " \n",
+ " subject1 \n",
+ "2023-01-01 00:00:00 \n",
+ "1 \n",
+ "0 \n",
+ "session1/0.avi \n",
+ "
\n",
+ " \n",
+ "
Total: 1
\n",
+ " "
+ ],
+ "text/plain": [
+ "*subject *session_datet *recording_id *file_id file_path \n",
+ "+----------+ +------------+ +------------+ +---------+ +------------+\n",
+ "subject1 2023-01-01 00: 1 0 session1/0.avi\n",
+ " (Total: 1)"
+ ]
+ },
+ "execution_count": 25,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "miniscope.MiniscopeRecordingInfo.File()"
+ "miniscope.RecordingInfo.File()"
]
},
{
@@ -496,51 +1946,93 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 26,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "# Parameter set used for processing of miniscope data\n",
+ "paramset_idx : smallint # \n",
+ "---\n",
+ "processing_method : varchar(16) # \n",
+ "paramset_desc : varchar(128) # \n",
+ "param_set_hash : uuid # \n",
+ "params : longblob # dictionary of all applicable parameters"
+ ]
+ },
+ "execution_count": 26,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
"miniscope.ProcessingParamSet.heading"
]
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 27,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "ename": "NameError",
+ "evalue": "name 'ephys' is not defined",
+ "output_type": "error",
+ "traceback": [
+ "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
+ "\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
+ "Cell \u001b[0;32mIn[27], line 26\u001b[0m\n\u001b[1;32m 1\u001b[0m \u001b[39m# insert clustering task manually\u001b[39;00m\n\u001b[1;32m 2\u001b[0m params_ks \u001b[39m=\u001b[39m {\n\u001b[1;32m 3\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mfs\u001b[39m\u001b[39m\"\u001b[39m: \u001b[39m30000\u001b[39m,\n\u001b[1;32m 4\u001b[0m \u001b[39m\"\u001b[39m\u001b[39mfshigh\u001b[39m\u001b[39m\"\u001b[39m: \u001b[39m150\u001b[39m,\n\u001b[0;32m (...)\u001b[0m\n\u001b[1;32m 24\u001b[0m \u001b[39m\"\u001b[39m\u001b[39museRAM\u001b[39m\u001b[39m\"\u001b[39m: \u001b[39m0\u001b[39m,\n\u001b[1;32m 25\u001b[0m }\n\u001b[0;32m---> 26\u001b[0m ephys\u001b[39m.\u001b[39mClusteringParamSet\u001b[39m.\u001b[39minsert_new_params(\n\u001b[1;32m 27\u001b[0m clustering_method\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mkilosort2\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[1;32m 28\u001b[0m paramset_idx\u001b[39m=\u001b[39m\u001b[39m0\u001b[39m,\n\u001b[1;32m 29\u001b[0m params\u001b[39m=\u001b[39mparams_ks,\n\u001b[1;32m 30\u001b[0m paramset_desc\u001b[39m=\u001b[39m\u001b[39m\"\u001b[39m\u001b[39mSpike sorting using Kilosort2\u001b[39m\u001b[39m\"\u001b[39m,\n\u001b[1;32m 31\u001b[0m )\n\u001b[1;32m 32\u001b[0m ephys\u001b[39m.\u001b[39mClusteringParamSet()\n",
+ "\u001b[0;31mNameError\u001b[0m: name 'ephys' is not defined"
+ ]
+ }
+ ],
"source": [
- "# insert clustering task manually\n",
- "params_ks = {\n",
- " \"fs\": 30000,\n",
- " \"fshigh\": 150,\n",
- " \"minfr_goodchannels\": 0.1,\n",
- " \"Th\": [10, 4],\n",
- " \"lam\": 10,\n",
- " \"AUCsplit\": 0.9,\n",
- " \"minFR\": 0.02,\n",
- " \"momentum\": [20, 400],\n",
- " \"sigmaMask\": 30,\n",
- " \"ThPr\": 8,\n",
- " \"spkTh\": -6,\n",
- " \"reorder\": 1,\n",
- " \"nskip\": 25,\n",
- " \"GPU\": 1,\n",
- " \"Nfilt\": 1024,\n",
- " \"nfilt_factor\": 4,\n",
- " \"ntbuff\": 64,\n",
- " \"whiteningRange\": 32,\n",
- " \"nSkipCov\": 25,\n",
- " \"scaleproc\": 200,\n",
- " \"nPCs\": 3,\n",
- " \"useRAM\": 0,\n",
- "}\n",
- "ephys.ClusteringParamSet.insert_new_params(\n",
- " clustering_method=\"kilosort2\",\n",
- " paramset_idx=0,\n",
- " params=params_ks,\n",
- " paramset_desc=\"Spike sorting using Kilosort2\",\n",
+ "params = dict(\n",
+ " decay_time=0.4,\n",
+ " pw_rigid=False,\n",
+ " max_shifts=(5, 5),\n",
+ " gSig_filt=(3, 3),\n",
+ " strides=(48, 48),\n",
+ " overlaps=(24, 24),\n",
+ " max_deviation_rigid=3,\n",
+ " border_nan=\"copy\",\n",
+ " method_init=\"corr_pnr\",\n",
+ " K=None,\n",
+ " gSig=(3, 3),\n",
+ " gSiz=(13, 13),\n",
+ " merge_thr=0.7,\n",
+ " p=1,\n",
+ " tsub=2,\n",
+ " ssub=1,\n",
+ " rf=40,\n",
+ " stride=20,\n",
+ " only_init=True,\n",
+ " nb=0,\n",
+ " nb_patch=0,\n",
+ " method_deconvolution=\"oasis\",\n",
+ " low_rank_background=None,\n",
+ " update_background_components=True,\n",
+ " min_corr=0.8,\n",
+ " min_pnr=10,\n",
+ " normalize_init=False,\n",
+ " center_psf=True,\n",
+ " ssub_B=2,\n",
+ " ring_size_factor=1.4,\n",
+ " del_duplicates=True,\n",
+ " border_pix=0,\n",
+ " min_SNR=3,\n",
+ " rval_thr=0.85,\n",
+ " use_cnn=False,\n",
")\n",
- "ephys.ClusteringParamSet()"
+ "\n",
+ "\n",
+ "miniscope.ProcessingParamSet.insert_new_params(\n",
+ " processing_method=\"caiman\",\n",
+ " paramset_id=0,\n",
+ " paramset_desc=\"Miniscope analysis with CaImAn using default parameters\",\n",
+ " params=params,\n",
+ ")"
]
},
{
@@ -548,9 +2040,9 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "Now that we've inserted kilosort parameters into the `ClusteringParamSet` table,\n",
- "we're almost ready to sort our data. DataJoint uses a `ClusteringTask` table to\n",
- "manage which `EphysRecording` and `ClusteringParamSet` should be used during processing. \n",
+ "Now that we've inserted kilosort parameters into the `ProcessingParamSet` table,\n",
+ "we're almost ready to run image processing. DataJoint uses a `ProcessingTask` table to\n",
+ "manage which `Recording` and `ProcessingParamSet` should be used during processing. \n",
"\n",
"This table is important for defining several important aspects of\n",
"downstream processing. Let's view the attributes to get a better understanding. "
@@ -558,11 +2050,29 @@
},
{
"cell_type": "code",
- "execution_count": null,
+ "execution_count": 28,
"metadata": {},
- "outputs": [],
+ "outputs": [
+ {
+ "data": {
+ "text/plain": [
+ "# Manual table marking a processing task to be triggered or manually processed\n",
+ "subject : varchar(8) # \n",
+ "session_datetime : datetime # \n",
+ "recording_id : int # \n",
+ "paramset_idx : smallint # \n",
+ "---\n",
+ "processing_output_dir : varchar(255) # relative to the root data directory\n",
+ "task_mode=\"load\" : enum('load','trigger') # 'load': load existing results"
+ ]
+ },
+ "execution_count": 28,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
"source": [
- "ephys.ClusteringTask.heading"
+ "miniscope.ProcessingTask.heading"
]
},
{
@@ -570,14 +2080,14 @@
"cell_type": "markdown",
"metadata": {},
"source": [
- "The `ClusteringTask` table contains two important attributes: \n",
- "+ `paramset_idx` \n",
+ "The `ProcessingTask` table contains two important attributes: \n",
+ "+ `paramset_id` \n",
"+ `task_mode` \n",
"\n",
- "The `paramset_idx` attribute tracks\n",
+ "The `paramset_id` attribute tracks\n",
"your kilosort parameter sets. You can choose the parameter set using which \n",
- "you want spike sort ephys data. For example, `paramset_idx=0` may contain\n",
- "default parameters for kilosort processing whereas `paramset_idx=1` contains your custom parameters for sorting. This\n",
+ "you want spike sort ephys data. For example, `paramset_id=0` may contain\n",
+ "default parameters for kilosort processing whereas `paramset_id=1` contains your custom parameters for sorting. This\n",
"attribute tells the `Processing` table which set of parameters you are processing in a given `populate()`.\n",
"\n",
"The `task_mode` attribute can be set to either `load` or `trigger`. When set to `load`,\n",
@@ -591,13 +2101,13 @@
"metadata": {},
"outputs": [],
"source": [
- "ephys.ClusteringTask.insert1(\n",
+ "miniscope.ProcessingTask.insert1(\n",
" dict(\n",
- " session_key,\n",
- " insertion_number=1,\n",
- " paramset_idx=0,\n",
+ " **session_key,\n",
+ " recording_id=1,\n",
+ " paramset_id=0,\n",
" task_mode=\"load\", # load or trigger\n",
- " clustering_output_dir=\"processed/subject5/session1/probe_1/kilosort2-5_1\",\n",
+ " processing_output_dir=\"session1\",\n",
" )\n",
")"
]
@@ -608,7 +2118,7 @@
"metadata": {},
"outputs": [],
"source": [
- "ephys.Clustering.populate(session_key, display_progress=True)"
+ "miniscope.Processing.populate(session_key, display_progress=True)"
]
},
{
@@ -627,7 +2137,7 @@
"metadata": {},
"outputs": [],
"source": [
- "ephys.Curation.heading"
+ "miniscope.Curation.heading"
]
},
{
@@ -636,8 +2146,8 @@
"metadata": {},
"outputs": [],
"source": [
- "clustering_key = (ephys.ClusteringTask & session_key).fetch1(\"KEY\")\n",
- "ephys.Curation().create1_from_clustering_task(clustering_key)"
+ "clustering_key = (miniscope.ProcessingTask & session_key).fetch1(\"KEY\")\n",
+ "miniscope.Curation().create1_from_clustering_task(clustering_key)"
]
},
{
@@ -655,9 +2165,11 @@
"metadata": {},
"outputs": [],
"source": [
- "ephys.CuratedClustering.populate(session_key, display_progress=True)\n",
- "ephys.LFP.populate(session_key, display_progress=True)\n",
- "ephys.WaveformSet.populate(session_key, display_progress=True)"
+ "miniscope.CuratedClustering.populate(session_key, display_progress=True)\n",
+ "miniscope.MotionCorrection.populate(session_key, display_progress=True)\n",
+ "miniscope.Segmentation.populate(session_key, display_progress=True)\n",
+ "miniscope.Fluorescence.populate(session_key, display_progress=True)\n",
+ "miniscope.Activity.populate(session_key, display_progress=True)"
]
},
{