Skip to content

Commit

Permalink
Add comments
Browse files Browse the repository at this point in the history
  • Loading branch information
khl02007 committed Oct 6, 2023
1 parent aa975fa commit 9765f34
Show file tree
Hide file tree
Showing 3 changed files with 30 additions and 6 deletions.
3 changes: 2 additions & 1 deletion src/spyglass/spikesorting/v1/curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,9 @@
@schema
class Curation(dj.Manual):
definition = """
# Curation of a SpikeSorting. Use `insert_curation` to insert rows if possible.
-> SpikeSorting
curation_id=0: int # a number corresponding to the index of this curation
curation_id=0: int
---
parent_curation_id=-1: int
-> AnalysisNwbfile
Expand Down
24 changes: 22 additions & 2 deletions src/spyglass/spikesorting/v1/figurl_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ class FigURLCurationSelection(dj.Manual):
metrics_figurl: longblob # metrics to display in the figURL
"""

@staticmethod
def generate_curation_uri(key: Dict) -> str:
"""Generates a kachery-cloud URI containing curation info from a row in Curation table
Expand All @@ -44,12 +45,20 @@ def generate_curation_uri(key: Dict) -> str:
unit_ids = nwb_sorting["id"][:]
labels = nwb_sorting["labels"][:]
merge_groups = nwb_sorting["merge_groups"][:]

unit_ids = [str(unit_id) for unit_id in unit_ids]

if labels:
labels_dict = dict(zip(unit_ids, labels))
else:
labels_dict = {}

if merge_groups:
merge_groups_list = _merge_dict_to_list(merge_groups)
merge_groups_list = [
[str(unit_id) for unit_id in merge_group]
for merge_group in merge_groups_list
]
else:
merge_groups_list = []

Expand All @@ -70,7 +79,7 @@ class FigURLCuration(dj.Computed):
url: varchar(1000)
"""

def make(self, key: Dict):
def make(self, key: dict):
# FETCH
sorting_analysis_file_name = (Curation & key).fetch1(
"analysis_file_name"
Expand Down Expand Up @@ -101,6 +110,8 @@ def make(self, key: Dict):

unit_metrics = _reformat_metrics(metric_dict)

# TODO: figure out a way to specify the similarity metrics

# Generate the figURL
key["url"] = _generate_figurl(
R=recording,
Expand All @@ -114,6 +125,14 @@ def make(self, key: Dict):
# INSERT
self.insert1(key, skip_duplicates=True)

@classmethod
def get_labels(cls):
return NotImplementedError

@classmethod
def get_merge_groups(cls):
return NotImplementedError


def _generate_figurl(
R: si.BaseRecording,
Expand Down Expand Up @@ -143,6 +162,7 @@ def _generate_figurl(
max_num_snippets_per_segment=max_num_snippets_per_segment,
channel_neighborhood_size=channel_neighborhood_size,
)

# create a fake unit similarity matrix (for future reference)
# similarity_scores = []
# for u1 in X.unit_ids:
Expand All @@ -154,7 +174,7 @@ def _generate_figurl(
# similarity=similarity_matrix[(X.unit_ids==u1),(X.unit_ids==u2)]
# )
# )
# Create the similarity matrix view
# # Create the similarity matrix view
# unit_similarity_matrix_view = vv.UnitSimilarityMatrix(
# unit_ids=X.unit_ids,
# similarity_scores=similarity_scores
Expand Down
9 changes: 6 additions & 3 deletions src/spyglass/spikesorting/v1/metric_curation.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,23 +210,26 @@ def make(self, key):
os.mkdir(waveforms_dir)
except FileExistsError:
pass
print("Extracting waveforms...")
waveforms = si.extract_waveforms(
recording=recording,
sorting=sorting,
folder=waveforms_dir,
**waveform_param,
)
# compute metrics
print("Computing metrics...")
metrics = {}
for metric_name, metric_param_dict in metric_param.items():
metrics[metric_name] = self._compute_metric(
nwb_file_name, waveforms, metric_name, **metric_param_dict
)
# generate labels and merge groups
labels = self._compute_labels(metrics, label_param)

print("Applying curation...")
labels = self._compute_labels(metrics, label_param)
merge_groups = self._compute_merge_groups(metrics, merge_param)
# save everything in NWB

print("Saving to NWB...")
(
key["analysis_file_name"],
key["object_id"],
Expand Down

0 comments on commit 9765f34

Please sign in to comment.