Spike Analysis Tutorial#
This tutorial covers working with spike detection data and integrating it with EEG features in NeuRodent.
Overview#
Spike Analysis Results integrate frequency-domain spike detection with continuous EEG analysis:
Load spike detection data from frequency-domain analysis
Compute peri-event time histograms (PETH)
Analyze spike-LFP relationships
Integrate with WAR for combined analysis
import sys
from pathlib import Path
import logging
import numpy as np
import matplotlib.pyplot as plt
from neurodent import core, visualization
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
/home/runner/work/neurodent/neurodent/.venv/lib/python3.10/site-packages/tqdm/auto.py:21: TqdmWarning: IProgress not found. Please update jupyter and ipywidgets. See https://ipywidgets.readthedocs.io/en/stable/user_install.html
from .autonotebook import tqdm as notebook_tqdm
1. Loading Spike-Sorted Data#
NeuRodent works with spike-sorted data from various spike sorters via SpikeInterface:
# Load data
data_path = Path("/path/to/spike/data")
animal_id = "animal_001"
# Create AnimalOrganizer using pattern-based discovery
ao = visualization.AnimalOrganizer(
pattern=str(data_path / "**/*.edf"),
animal_id=animal_id,
lro_kwargs={"mode": "si", "extract_func": "read_edf"},
)
/tmp/ipykernel_2764/961982889.py:6: UserWarning: Pattern has no placeholders (e.g., '{animal}', '{session}'). Metadata extraction will be limited. Got: '/path/to/spike/data/**/*.edf'
ao = visualization.AnimalOrganizer(
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
Cell In[2], line 6
3 animal_id = "animal_001"
5 # Create AnimalOrganizer using pattern-based discovery
----> 6 ao = visualization.AnimalOrganizer(
7 pattern=str(data_path / "**/*.edf"),
8 animal_id=animal_id,
9 lro_kwargs={"mode": "si", "extract_func": "read_edf"},
10 )
File ~/work/neurodent/neurodent/src/neurodent/visualization/results.py:211, in AnimalOrganizer.__init__(self, pattern, animal_id, skip_sessions, truncate, assume_from_number, lro_kwargs, normalize_session)
208 self._animalday_folder_groups[session].append(path_val)
210 if not self._animalday_folder_groups:
--> 211 raise ValueError(f"No items discovered for pattern: {pattern}")
213 if truncate:
214 from neurodent import core
ValueError: No items discovered for pattern: /path/to/spike/data/**/*.edf
2. Frequency Domain Spike Detection#
Use frequency-domain spike detection for population spike analysis:
# Compute frequency domain spike analysis
fdsars = ao.compute_frequency_domain_spike_analysis(
multiprocess_mode='serial'
)
print(f"Found {len(fdsars)} sessions")
for fdsar in fdsars:
print(fdsar)
3. Working with Spike Analysis Results#
Access spike-related features:
# Get spike counts per channel
for fdsar in fdsars:
counts = fdsar.get_spike_counts_per_channel()
total = fdsar.get_total_spike_count()
print(f"Session {fdsar.animal_day}: {total} total spikes")
for ch_name, count in zip(fdsar.channel_names, counts):
print(f" {ch_name}: {count} spikes")
4. Converting to MNE Format#
SAR can be converted to MNE format for advanced analysis:
# Convert each SAR to MNE format
for sar_session in sar:
# Convert to MNE Raw object
sar_session.convert_to_mne(
chunk_len=1440 # Length in minutes
)
# Save as FIF format
output_path = Path("./mne_output") / sar_session.animal_day
sar_session.save_fif_and_json(output_path)
print("Converted to MNE format")
5. Loading Saved Results#
Load previously saved spike analysis results:
# Load FDSAR from saved files
sar_path = Path("./mne_output/animal_001_2023-12-15")
fdsar_loaded = visualization.FrequencyDomainSpikeAnalysisResult.load_fif_and_json(sar_path)
# Access the MNE raw object
raw = fdsar_loaded.result_mne
print(f"Loaded {raw.info['nchan']} channels, {raw.n_times / raw.info['sfreq']:.1f}s duration")
6. Peri-Event Analysis with MNE#
Analyze EEG around spike times:
import mne
# Extract events from annotations (spike times)
events, event_dict = mne.events_from_annotations(mne_obj)
print(f"Found {len(events)} events")
print(f"Event types: {list(event_dict.keys())}")
# Create epochs around spike times
epochs = mne.Epochs(
mne_obj,
events,
event_id=event_dict,
tmin=-0.5, # 500 ms before spike
tmax=1.0, # 1 s after spike
baseline=(-0.5, -0.1),
preload=True
)
print(f"Created {len(epochs)} epochs")
7. Spike-Triggered Averages#
Compute spike-triggered average of LFP:
# Compute evoked response (spike-triggered average)
evoked = epochs.average()
# Plot spike-triggered average
fig = evoked.plot(spatial_colors=True, gfp=True)
plt.title("Spike-Triggered Average")
plt.show()
8. Time-Frequency Analysis#
Analyze oscillations around spike times:
# Compute time-frequency representation
freqs = np.arange(1, 50, 1) # 1-50 Hz
n_cycles = freqs / 2 # Number of cycles increases with frequency
# Compute TFR using Morlet wavelets
power = epochs.compute_tfr(
method='morlet',
freqs=freqs,
n_cycles=n_cycles,
use_fft=True,
average=True
)
# Plot TFR
power.plot(
picks='eeg',
baseline=(-0.5, -0.1),
mode='percent',
title="Peri-Spike Time-Frequency"
)
plt.show()
9. Spike-LFP Phase Locking#
Analyze phase relationships between spikes and LFP oscillations:
# Filter for specific frequency band (e.g., theta: 4-8 Hz)
epochs_filtered = epochs.copy().filter(
l_freq=4,
h_freq=8,
method='iir'
)
# Apply Hilbert transform to get instantaneous phase
epochs_hilbert = epochs_filtered.apply_hilbert()
# Extract phase at spike time (t=0)
spike_phases = np.angle(epochs_hilbert.get_data()[:, :, epochs.time_as_index(0)[0]])
# Plot phase distribution
plt.figure(figsize=(10, 5))
plt.subplot(1, 2, 1)
plt.hist(spike_phases.flatten(), bins=50, density=True)
plt.xlabel("Phase (radians)")
plt.ylabel("Density")
plt.title("Spike Phase Distribution")
# Polar plot
plt.subplot(1, 2, 2, projection='polar')
plt.hist(spike_phases.flatten(), bins=50, density=True)
plt.title("Spike Phase (Polar)")
plt.tight_layout()
plt.show()
10. Integrating with WAR#
Combine spike analysis with windowed EEG analysis:
# Compute both WAR and SAR
war = ao.compute_windowed_analysis(
features=['rms', 'psdband', 'nspike'], # Include spike count
multiprocess_mode='serial'
)
# Access spike counts from WAR
spike_counts = war.nspike
print(f"Spike count data shape: {spike_counts.shape}")
# Plot spike counts over time
ap = visualization.AnimalPlotter(war)
fig = ap.plot_feature_over_time('nspike')
plt.title("Spike Counts Over Time")
plt.show()
Summary#
This tutorial covered:
Loading spike-sorted data
Computing spike analysis (SAR)
Converting to MNE format
Peri-event analysis
Spike-triggered averages
Time-frequency analysis around spikes
Spike-LFP phase locking
Integrating spikes with windowed analysis
Next Steps#
Visualization Tutorial: Advanced plotting for spike data
Windowed Analysis Tutorial: Combine spike counts with other features