diff --git a/si_kilosort25/Dockerfile b/si_kilosort25/Dockerfile index 3dab148..a5dffaf 100644 --- a/si_kilosort25/Dockerfile +++ b/si_kilosort25/Dockerfile @@ -12,6 +12,12 @@ RUN git clone https://github.com/SpikeInterface/spikeinterface_pipelines.git && git checkout dev && \ pip install -e . +# Install spikeinterface from source, for now +RUN git clone https://github.com/SpikeInterface/spikeinterface.git && \ + cd spikeinterface && \ + # git checkout dev && \ + pip install -e .[full] + # Copy files into the container WORKDIR /app COPY *.py ./ diff --git a/si_kilosort25/models.py b/si_kilosort25/models.py index 9538cdb..45c730b 100644 --- a/si_kilosort25/models.py +++ b/si_kilosort25/models.py @@ -110,6 +110,7 @@ class PipelineContext(BaseModel): input: InputFile = Field(description='Input NWB file') output: OutputFile = Field(description='Output NWB file') lazy_read_input: bool = Field(default=True, description='Lazy read input file') + stub_test: bool = Field(default=False, description='Stub test') recording_context: RecordingContext = Field(description='Recording context') preprocessing_context: PreprocessingContext = Field(default=PreprocessingContext(), description='Preprocessing context') sorting_context: SortingContext = Field(default=SortingContext(), description='Sorting context') diff --git a/si_kilosort25/processor_pipeline.py b/si_kilosort25/processor_pipeline.py index 02be072..bc18694 100644 --- a/si_kilosort25/processor_pipeline.py +++ b/si_kilosort25/processor_pipeline.py @@ -4,9 +4,14 @@ import os import pynwb import h5py +import logging from models import PipelineContext -from nwb_utils import NwbRecording, create_sorting_out_nwb_file +from nwb_utils import create_sorting_out_nwb_file + + +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) class PipelineProcessor(ProcessorBase): @@ -22,16 +27,11 @@ class PipelineProcessor(ProcessorBase): def run(context: PipelineContext): # Create SI recording from InputFile - # input = context.input - # recording = NwbRecording( - # file=input.get_h5py_file(), - # electrical_series_path=context.recording_context.electrical_series_path - # ) - print('Opening remote input file') + logger.info('Opening remote input file') download = not context.lazy_read_input ff = context.input.get_file(download=download) - print('Creating input recording') + logger.info('Creating input recording') recording = NwbRecordingExtractor( file=ff, electrical_series_location=context.recording_context.electrical_series_path, @@ -39,11 +39,15 @@ def run(context: PipelineContext): # stream_mode="remfile" ) + if context.stub_test: + n_frames = int(min(3_000_000, recording.get_num_frames())) + recording = recording.frame_slice(start_frame=0, end_frame=n_frames) + ############### FOR TESTING -- REMOVE LATER ############ print(recording) - from spikeinterface.sorters import Kilosort2_5Sorter - Kilosort2_5Sorter.set_kilosort2_5_path(kilosort2_5_path="/mnt/shared_storage/Github/Kilosort") + # from spikeinterface.sorters import Kilosort2_5Sorter + # Kilosort2_5Sorter.set_kilosort2_5_path(kilosort2_5_path="/mnt/shared_storage/Github/Kilosort") ####################################################### # TODO - run pipeline @@ -52,7 +56,7 @@ def run(context: PipelineContext): 'chunk_duration': '1s', 'progress_bar': False } - print('Running pipeline') + logger.info('Running pipeline') _, sorting, _ = si_pipeline.run_pipeline( recording=recording, scratch_folder="./scratch/", @@ -65,7 +69,7 @@ def run(context: PipelineContext): ) # TODO - upload output file - print('Writing output NWB file') + logger.info('Writing output NWB file') h5_file = h5py.File(ff, 'r') with pynwb.NWBHDF5IO(file=h5_file, mode='r', load_namespaces=True) as io: # with pynwb.NWBHDF5IO(file=input.get_h5py_file(), mode='r', load_namespaces=True) as io: @@ -81,5 +85,5 @@ def run(context: PipelineContext): sorting_out_fname=sorting_out_fname ) - print('Uploading output NWB file') + logger.info('Uploading output NWB file') context.output.set(sorting_out_fname) diff --git a/si_kilosort25/requirements.txt b/si_kilosort25/requirements.txt index 1ed70be..153f45e 100644 --- a/si_kilosort25/requirements.txt +++ b/si_kilosort25/requirements.txt @@ -1,3 +1,4 @@ -spikeinterface[full]==0.99.1 +pynwb +# spikeinterface[full]==0.99.1 # spikeinterface_pipelines # dendro diff --git a/si_kilosort25/sample_context.yaml b/si_kilosort25/sample_context.yaml deleted file mode 100644 index 75e7095..0000000 --- a/si_kilosort25/sample_context.yaml +++ /dev/null @@ -1,7 +0,0 @@ -input: - local_file_name: /mnt/shared_storage/taufferconsulting/client_catalystneuro/project_jaz/stub_test.nwb -output: - output_file_name: output/sorting.nwb -lazy_read_input: true -recording_context: - electrical_series_path: /acquisition/ElectricalSeriesAP diff --git a/si_kilosort25/sample_context_1.yaml b/si_kilosort25/sample_context_1.yaml new file mode 100644 index 0000000..99fbc96 --- /dev/null +++ b/si_kilosort25/sample_context_1.yaml @@ -0,0 +1,6 @@ +input: https://dandi-api-staging-dandisets.s3.amazonaws.com/blobs/3c7/8e6/3c78e6c9-d196-4bea-a7ce-494a315789be +output: ./output/sorting.nwb +lazy_read_input: true +stub_test: false +recording_context: + electrical_series_path: /acquisition/ElectricalSeriesRaw diff --git a/si_kilosort25/sample_context_2.yaml b/si_kilosort25/sample_context_2.yaml index dcfc52d..0abfbe3 100644 --- a/si_kilosort25/sample_context_2.yaml +++ b/si_kilosort25/sample_context_2.yaml @@ -1,5 +1,6 @@ input: https://dandi-api-staging-dandisets.s3.amazonaws.com/blobs/1ed/41e/1ed41e35-8445-4608-b327-b30f74388bea output: ./output/sorting.nwb lazy_read_input: true +stub_test: false recording_context: electrical_series_path: /acquisition/ElectricalSeriesRaw diff --git a/si_kilosort25/test_in_container.sh b/si_kilosort25/test_in_container.sh new file mode 100644 index 0000000..b889390 --- /dev/null +++ b/si_kilosort25/test_in_container.sh @@ -0,0 +1,19 @@ +#!/bin/bash + +# Docker image +IMAGE="ghcr.io/catalystneuro/dendro_si_kilosort25" + +# Command to be executed inside the container +ENTRYPOINT_CMD="dendro" +ARGS="test-app-processor --app-dir . --processor spikeinterface_pipeline_ks25 --context sample_context_1.yaml" + + +# Run the Docker container +docker run --gpus all \ + -v $(pwd):/app \ + -v /mnt/shared_storage/Github/dendro/python:/src/dendro/python \ + -v /mnt/shared_storage/Github/spikeinterface_pipelines:/src/spikeinterface_pipelines \ + -w /app \ + --entrypoint "$ENTRYPOINT_CMD" \ + $IMAGE \ + $ARGS