Skip to content

Commit a7b6427

Browse files
authored
Merge pull request #917 from NeurodataWithoutBorders/staging
Port SpikeInterface update for tutorial generation
2 parents 5e0f6e1 + d639cb4 commit a7b6427

File tree

5 files changed

+39
-34
lines changed

5 files changed

+39
-34
lines changed

environments/environment-Linux.yml

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -15,13 +15,11 @@ dependencies:
1515
- flask-cors == 4.0.0
1616
- flask_restx == 1.1.0
1717
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
18-
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
19-
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
20-
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
21-
- spikeinterface == 0.100.5
18+
- neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.0
2219
- scikit-learn == 1.4.0 # Tutorial data generation
2320
- tqdm_publisher >= 0.0.1 # Progress bars
2421
- tzlocal >= 5.2 # Frontend timezone handling
2522
- ndx-pose == 0.1.1
26-
- nwbinspector==0.6.2
23+
- nwbinspector == 0.6.2
2724
- tables
25+
- numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5

environments/environment-MAC-apple-silicon.yml

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,10 @@ dependencies:
2323
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
2424
# NOTE: the NeuroConv wheel on PyPI includes sonpy which is not compatible with arm64, so build and install
2525
# NeuroConv from GitHub, which will remove the sonpy dependency when building from Mac arm64
26-
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
27-
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
28-
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
29-
- spikeinterface == 0.100.5
26+
- neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.0
3027
- scikit-learn == 1.4.0 # Tutorial data generation
3128
- tqdm_publisher >= 0.0.1 # Progress bars
3229
- tzlocal >= 5.2 # Frontend timezone handling
3330
- ndx-pose == 0.1.1
34-
- nwbinspector==0.6.2
31+
- nwbinspector == 0.6.2
32+
- numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5

environments/environment-MAC-intel.yml

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@ dependencies:
77
- nodejs = 18.16.1
88
# install these from conda-forge so that dependent packages get included in the distributable
99
- jsonschema = 4.18.0 # installs jsonschema-specifications
10-
- pytables = 3.10.2 # Install from conda-forge because PyPI version results in hdf5 conflicts and missing libs
1110
- pip
1211
- pip:
1312
- setuptools==70.0.0
@@ -19,12 +18,13 @@ dependencies:
1918
- flask-cors == 4.0.0
2019
- flask_restx == 1.1.0
2120
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
22-
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
23-
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
24-
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
25-
- spikeinterface == 0.100.5
21+
- neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.0
2622
- scikit-learn == 1.4.0 # Tutorial data generation
2723
- tqdm_publisher >= 0.0.1 # Progress bars
2824
- tzlocal >= 5.2 # Frontend timezone handling
2925
- ndx-pose == 0.1.1
30-
- nwbinspector==0.6.2
26+
- nwbinspector == 0.6.2
27+
- numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5
28+
- h5py == 3.12.1 # 3.13.0 uses features in hdf5 1.14.4 that is not available in earlier hdf5 libs packaged
29+
# with tables==3.9.1 (latest that can be used by neuroconv 0.6.0).
30+
# h5py and tables need to be consistent for electron build for unknown reason

environments/environment-Windows.yml

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -18,13 +18,11 @@ dependencies:
1818
- flask-cors === 3.0.10
1919
- flask_restx == 1.1.0
2020
- werkzeug < 3.0 # werkzeug 3.0 deprecates features used by flask 2.3.2. Remove this when updating flask.
21-
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
22-
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
23-
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
24-
- spikeinterface == 0.100.5
21+
- neuroconv[dandi,compressors,ecephys,ophys,behavior,text] == 0.6.0
2522
- scikit-learn == 1.4.0 # Tutorial data generation
2623
- tqdm_publisher >= 0.0.1 # Progress bars
2724
- tzlocal >= 5.2 # Frontend timezone handling
2825
- ndx-pose == 0.1.1
29-
- nwbinspector==0.6.2
26+
- nwbinspector == 0.6.2
3027
- tables
28+
- numcodecs < 0.16.0 # numcodecs 0.16.0 is not compatible with zarr 2.18.5

src/pyflask/manageNeuroconv/manage_neuroconv.py

Lines changed: 24 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1668,11 +1668,13 @@ def generate_test_data(output_path: str):
16681668
"""
16691669
Autogenerate the data formats needed for the tutorial pipeline.
16701670
1671-
Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy spiking data.
1671+
Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy sorting data.
16721672
"""
16731673
import spikeinterface
1674-
from spikeinterface.exporters import export_to_phy
1675-
from spikeinterface.preprocessing import bandpass_filter, resample, scale
1674+
import spikeinterface.exporters
1675+
import spikeinterface.preprocessing
1676+
1677+
spikeinterface.set_global_job_kwargs(n_jobs=-1)
16761678

16771679
base_path = Path(output_path)
16781680
spikeglx_output_folder = base_path / "spikeglx"
@@ -1687,8 +1689,8 @@ def generate_test_data(output_path: str):
16871689
lf_sampling_frequency = 2_500.0
16881690
downsample_factor = int(ap_sampling_frequency / lf_sampling_frequency)
16891691

1690-
# Generate synthetic spiking and voltage traces with waveforms around them
1691-
artificial_ap_band_in_uV, spiking = spikeinterface.generate_ground_truth_recording(
1692+
# Generate synthetic sorting and voltage traces with waveforms around them
1693+
artificial_ap_band_in_uV, sorting = spikeinterface.generate_ground_truth_recording(
16921694
durations=[duration_in_s],
16931695
sampling_frequency=ap_sampling_frequency,
16941696
num_channels=number_of_channels,
@@ -1697,12 +1699,18 @@ def generate_test_data(output_path: str):
16971699
seed=0, # Fixed seed for reproducibility
16981700
)
16991701

1700-
unscaled_artificial_ap_band = scale(recording=artificial_ap_band_in_uV, gain=1 / conversion_factor_to_uV)
1702+
unscaled_artificial_ap_band = spikeinterface.preprocessing.scale(
1703+
recording=artificial_ap_band_in_uV, gain=1 / conversion_factor_to_uV
1704+
)
17011705
int16_artificial_ap_band = unscaled_artificial_ap_band.astype(dtype="int16")
17021706
int16_artificial_ap_band.set_channel_gains(conversion_factor_to_uV)
17031707

1704-
unscaled_artificial_lf_filter = bandpass_filter(recording=unscaled_artificial_ap_band, freq_min=0.5, freq_max=1_000)
1705-
unscaled_artificial_lf_band = resample(recording=unscaled_artificial_lf_filter, resample_rate=2_500)
1708+
unscaled_artificial_lf_filter = spikeinterface.preprocessing.bandpass_filter(
1709+
recording=unscaled_artificial_ap_band, freq_min=0.5, freq_max=1_000
1710+
)
1711+
unscaled_artificial_lf_band = spikeinterface.preprocessing.decimate(
1712+
recording=unscaled_artificial_lf_filter, decimation_factor=downsample_factor
1713+
)
17061714
int16_artificial_lf_band = unscaled_artificial_lf_band.astype(dtype="int16")
17071715
int16_artificial_lf_band.set_channel_gains(conversion_factor_to_uV)
17081716

@@ -1725,13 +1733,16 @@ def generate_test_data(output_path: str):
17251733
with open(file=lf_meta_file_path, mode="w") as io:
17261734
io.write(lf_meta_content)
17271735

1728-
# Make Phy folder
1729-
waveform_extractor = spikeinterface.extract_waveforms(
1730-
recording=artificial_ap_band_in_uV, sorting=spiking, mode="memory"
1736+
# Make Phy folder - see https://spikeinterface.readthedocs.io/en/latest/modules/exporters.html
1737+
sorting_analyzer = spikeinterface.create_sorting_analyzer(
1738+
sorting=sorting, recording=artificial_ap_band_in_uV, mode="memory", sparse=False
17311739
)
1740+
sorting_analyzer.compute(["random_spikes", "waveforms", "templates", "noise_levels"])
1741+
sorting_analyzer.compute("spike_amplitudes")
1742+
sorting_analyzer.compute("principal_components", n_components=5, mode="by_channel_local")
17321743

1733-
export_to_phy(
1734-
waveform_extractor=waveform_extractor, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False
1744+
spikeinterface.exporters.export_to_phy(
1745+
sorting_analyzer=sorting_analyzer, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False
17351746
)
17361747

17371748

0 commit comments

Comments
 (0)