Source code for yass.detect.run
"""
Detection pipeline
"""
from yass.util import file_loader
from yass.detect import threshold
[docs]def run(standarized_path, standarized_params, whiten_filter,
if_file_exists='skip', save_results=False, function=threshold.run):
"""Execute detect step
Parameters
----------
standarized_path: str or pathlib.Path
Path to standarized data binary file
standarized_params: dict, str or pathlib.Path
Dictionary with standarized data parameters or path to a yaml file
whiten_filter: numpy.ndarray, str or pathlib.Path
Whiten matrix or path to a npy file
if_file_exists: str, optional
One of 'overwrite', 'abort', 'skip'. Control de behavior for every
generated file. If 'overwrite' it replaces the files if any exist,
if 'abort' it raises a ValueError exception if any file exists,
if 'skip' if skips the operation if any file exists
save_results: bool, optional
Whether to save results to disk, defaults to False
Returns
-------
clear_scores: numpy.ndarray (n_spikes, n_features, n_channels)
3D array with the scores for the clear spikes, first simension is
the number of spikes, second is the nymber of features and third the
number of channels
spike_index_clear: numpy.ndarray (n_clear_spikes, 2)
2D array with indexes for clear spikes, first column contains the
spike location in the recording and the second the main channel
(channel whose amplitude is maximum)
spike_index_all: numpy.ndarray (n_collided_spikes, 2)
2D array with indexes for all spikes, first column contains the
spike location in the recording and the second the main channel
(channel whose amplitude is maximum)
Notes
-----
Running the preprocessor will generate the followiing files in
CONFIG.data.root_folder/output_directory/ (if save_results is
True):
* ``spike_index_clear.npy`` - Same as spike_index_clear returned
* ``spike_index_all.npy`` - Same as spike_index_collision returned
* ``rotation.npy`` - Rotation matrix for dimensionality reduction
* ``scores_clear.npy`` - Scores for clear spikes
Threshold detector runs on CPU, neural network detector runs CPU and GPU,
depending on how tensorflow is configured.
Examples
--------
.. literalinclude:: ../../examples/pipeline/detect.py
"""
# load files in case they are strings or Path objects
standarized_params = file_loader(standarized_params)
whiten_filter = file_loader(whiten_filter)
return function(standarized_path,
standarized_params,
whiten_filter,
if_file_exists,
save_results)