Skip to content

Commit

Permalink
port over other PR
Browse files Browse the repository at this point in the history
  • Loading branch information
CodyCBakerPhD committed Aug 21, 2024
1 parent 6356e1f commit bba6612
Show file tree
Hide file tree
Showing 5 changed files with 16 additions and 24 deletions.
6 changes: 2 additions & 4 deletions environments/environment-Linux.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,8 @@ dependencies:
- flask == 2.3.2
- flask-cors == 4.0.0
- flask_restx == 1.1.0
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
- spikeinterface == 0.100.5
# For stability, NeuroConv is pinned at a commit just prior to Pydantic schema inference refactor
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@8fa1c8f46316a7192073713e3e4e78c25a2e0d36#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
6 changes: 2 additions & 4 deletions environments/environment-MAC-apple-silicon.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,10 +23,8 @@ dependencies:
- flask_restx == 1.1.0
# NOTE: the NeuroConv wheel on PyPI includes sonpy which is not compatible with arm64, so build and install
# NeuroConv from GitHub, which will remove the sonpy dependency when building from Mac arm64
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
- spikeinterface == 0.100.5
# For stability, NeuroConv is pinned at a commit just prior to Pydantic schema inference refactor
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@8fa1c8f46316a7192073713e3e4e78c25a2e0d36#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
6 changes: 2 additions & 4 deletions environments/environment-MAC-intel.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,8 @@ dependencies:
- flask == 2.3.2
- flask-cors == 4.0.0
- flask_restx == 1.1.0
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
- spikeinterface == 0.100.5
# For stability, NeuroConv is pinned at a commit just prior to Pydantic schema inference refactor
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@8fa1c8f46316a7192073713e3e4e78c25a2e0d36#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
6 changes: 2 additions & 4 deletions environments/environment-Windows.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,8 @@ dependencies:
- flask == 2.3.2
- flask-cors === 3.0.10
- flask_restx == 1.1.0
# For stability, NeuroConv is pinned at a commit just prior to breaking SpikeInterface compatibility
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@fa636458aa5c321f1c2c08f6e682b4a52d5a83f3#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
# For stability, pinning SpikeInterface to a version that works with NeuroConv and with tutorial generation
- spikeinterface == 0.100.5
# For stability, NeuroConv is pinned at a commit just prior to Pydantic schema inference refactor
- neuroconv @ git+https://github.com/catalystneuro/neuroconv.git@8fa1c8f46316a7192073713e3e4e78c25a2e0d36#neuroconv[dandi,compressors,ecephys,ophys,behavior,text]
- scikit-learn == 1.4.0 # Tutorial data generation
- tqdm_publisher >= 0.0.1 # Progress bars
- tzlocal >= 5.2 # Frontend timezone handling
16 changes: 8 additions & 8 deletions src/pyflask/manageNeuroconv/manage_neuroconv.py
Original file line number Diff line number Diff line change
Expand Up @@ -1665,11 +1665,11 @@ def generate_test_data(output_path: str):
"""
Autogenerate the data formats needed for the tutorial pipeline.
Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy spiking data.
Consists of a single-probe single-segment SpikeGLX recording (both AP and LF bands) as well as Phy sorting data.
"""
import spikeinterface
from spikeinterface.exporters import export_to_phy
from spikeinterface.preprocessing import bandpass_filter, resample, scale
from spikeinterface.preprocessing import bandpass_filter, decimate, scale

base_path = Path(output_path)
spikeglx_output_folder = base_path / "spikeglx"
Expand All @@ -1684,8 +1684,8 @@ def generate_test_data(output_path: str):
lf_sampling_frequency = 2_500.0
downsample_factor = int(ap_sampling_frequency / lf_sampling_frequency)

# Generate synthetic spiking and voltage traces with waveforms around them
artificial_ap_band_in_uV, spiking = spikeinterface.generate_ground_truth_recording(
# Generate synthetic sorting and voltage traces with waveforms around them
artificial_ap_band_in_uV, sorting = spikeinterface.generate_ground_truth_recording(
durations=[duration_in_s],
sampling_frequency=ap_sampling_frequency,
num_channels=number_of_channels,
Expand All @@ -1699,7 +1699,7 @@ def generate_test_data(output_path: str):
int16_artificial_ap_band.set_channel_gains(conversion_factor_to_uV)

unscaled_artificial_lf_filter = bandpass_filter(recording=unscaled_artificial_ap_band, freq_min=0.5, freq_max=1_000)
unscaled_artificial_lf_band = resample(recording=unscaled_artificial_lf_filter, resample_rate=2_500)
unscaled_artificial_lf_band = decimate(recording=unscaled_artificial_lf_filter, decimation_factor=downsample_factor)
int16_artificial_lf_band = unscaled_artificial_lf_band.astype(dtype="int16")
int16_artificial_lf_band.set_channel_gains(conversion_factor_to_uV)

Expand All @@ -1723,12 +1723,12 @@ def generate_test_data(output_path: str):
io.write(lf_meta_content)

# Make Phy folder
waveform_extractor = spikeinterface.extract_waveforms(
recording=artificial_ap_band_in_uV, sorting=spiking, mode="memory"
sorting_analyzer = si.create_sorting_analyzer(
sorting=sorting, recording=artificial_ap_band_in_uV, mode="memory", sparse=False
)

export_to_phy(
waveform_extractor=waveform_extractor, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False
sorting_analyzer=sorting_analyzer, output_folder=phy_output_folder, remove_if_exists=True, copy_binary=False
)


Expand Down

0 comments on commit bba6612

Please sign in to comment.