Skip to content

Commit

Permalink
fixes and shell script
Browse files Browse the repository at this point in the history
  • Loading branch information
luiztauffer committed Dec 22, 2023
1 parent 5692dbf commit 5751ba0
Show file tree
Hide file tree
Showing 8 changed files with 52 additions and 21 deletions.
6 changes: 6 additions & 0 deletions si_kilosort25/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@ RUN git clone https://github.com/SpikeInterface/spikeinterface_pipelines.git &&
git checkout dev && \
pip install -e .

# Install spikeinterface from source, for now
RUN git clone https://github.com/SpikeInterface/spikeinterface.git && \
cd spikeinterface && \
# git checkout dev && \
pip install -e .[full]

# Copy files into the container
WORKDIR /app
COPY *.py ./
Expand Down
1 change: 1 addition & 0 deletions si_kilosort25/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ class PipelineContext(BaseModel):
input: InputFile = Field(description='Input NWB file')
output: OutputFile = Field(description='Output NWB file')
lazy_read_input: bool = Field(default=True, description='Lazy read input file')
stub_test: bool = Field(default=False, description='Stub test')
recording_context: RecordingContext = Field(description='Recording context')
preprocessing_context: PreprocessingContext = Field(default=PreprocessingContext(), description='Preprocessing context')
sorting_context: SortingContext = Field(default=SortingContext(), description='Sorting context')
Expand Down
30 changes: 17 additions & 13 deletions si_kilosort25/processor_pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,14 @@
import os
import pynwb
import h5py
import logging

from models import PipelineContext
from nwb_utils import NwbRecording, create_sorting_out_nwb_file
from nwb_utils import create_sorting_out_nwb_file


logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)


class PipelineProcessor(ProcessorBase):
Expand All @@ -22,28 +27,27 @@ class PipelineProcessor(ProcessorBase):
def run(context: PipelineContext):

# Create SI recording from InputFile
# input = context.input
# recording = NwbRecording(
# file=input.get_h5py_file(),
# electrical_series_path=context.recording_context.electrical_series_path
# )
print('Opening remote input file')
logger.info('Opening remote input file')
download = not context.lazy_read_input
ff = context.input.get_file(download=download)

print('Creating input recording')
logger.info('Creating input recording')
recording = NwbRecordingExtractor(
file=ff,
electrical_series_location=context.recording_context.electrical_series_path,
# file_path=context.input.get_url(),
# stream_mode="remfile"
)

if context.stub_test:
n_frames = int(min(3_000_000, recording.get_num_frames()))
recording = recording.frame_slice(start_frame=0, end_frame=n_frames)

############### FOR TESTING -- REMOVE LATER ############
print(recording)

from spikeinterface.sorters import Kilosort2_5Sorter
Kilosort2_5Sorter.set_kilosort2_5_path(kilosort2_5_path="/mnt/shared_storage/Github/Kilosort")
# from spikeinterface.sorters import Kilosort2_5Sorter
# Kilosort2_5Sorter.set_kilosort2_5_path(kilosort2_5_path="/mnt/shared_storage/Github/Kilosort")
#######################################################

# TODO - run pipeline
Expand All @@ -52,7 +56,7 @@ def run(context: PipelineContext):
'chunk_duration': '1s',
'progress_bar': False
}
print('Running pipeline')
logger.info('Running pipeline')
_, sorting, _ = si_pipeline.run_pipeline(
recording=recording,
scratch_folder="./scratch/",
Expand All @@ -65,7 +69,7 @@ def run(context: PipelineContext):
)

# TODO - upload output file
print('Writing output NWB file')
logger.info('Writing output NWB file')
h5_file = h5py.File(ff, 'r')
with pynwb.NWBHDF5IO(file=h5_file, mode='r', load_namespaces=True) as io:
# with pynwb.NWBHDF5IO(file=input.get_h5py_file(), mode='r', load_namespaces=True) as io:
Expand All @@ -81,5 +85,5 @@ def run(context: PipelineContext):
sorting_out_fname=sorting_out_fname
)

print('Uploading output NWB file')
logger.info('Uploading output NWB file')
context.output.set(sorting_out_fname)
3 changes: 2 additions & 1 deletion si_kilosort25/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
spikeinterface[full]==0.99.1
pynwb
# spikeinterface[full]==0.99.1
# spikeinterface_pipelines
# dendro
7 changes: 0 additions & 7 deletions si_kilosort25/sample_context.yaml

This file was deleted.

6 changes: 6 additions & 0 deletions si_kilosort25/sample_context_1.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
input: https://dandi-api-staging-dandisets.s3.amazonaws.com/blobs/3c7/8e6/3c78e6c9-d196-4bea-a7ce-494a315789be
output: ./output/sorting.nwb
lazy_read_input: true
stub_test: false
recording_context:
electrical_series_path: /acquisition/ElectricalSeriesRaw
1 change: 1 addition & 0 deletions si_kilosort25/sample_context_2.yaml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
input: https://dandi-api-staging-dandisets.s3.amazonaws.com/blobs/1ed/41e/1ed41e35-8445-4608-b327-b30f74388bea
output: ./output/sorting.nwb
lazy_read_input: true
stub_test: false
recording_context:
electrical_series_path: /acquisition/ElectricalSeriesRaw
19 changes: 19 additions & 0 deletions si_kilosort25/test_in_container.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
#!/bin/bash

# Docker image
IMAGE="ghcr.io/catalystneuro/dendro_si_kilosort25"

# Command to be executed inside the container
ENTRYPOINT_CMD="dendro"
ARGS="test-app-processor --app-dir . --processor spikeinterface_pipeline_ks25 --context sample_context_1.yaml"


# Run the Docker container
docker run --gpus all \
-v $(pwd):/app \
-v /mnt/shared_storage/Github/dendro/python:/src/dendro/python \
-v /mnt/shared_storage/Github/spikeinterface_pipelines:/src/spikeinterface_pipelines \
-w /app \
--entrypoint "$ENTRYPOINT_CMD" \
$IMAGE \
$ARGS

0 comments on commit 5751ba0

Please sign in to comment.