Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
N_total = params.getint('data', 'N_total')
sampling_rate = params.getint('data', 'sampling_rate')
do_temporal_whitening = params.getboolean('whitening', 'temporal')
do_spatial_whitening = params.getboolean('whitening', 'spatial')
spike_thresh = params.getfloat('detection', 'spike_thresh')
file_out_suff = params.get('data', 'file_out_suff')
N_t = params.getint('detection', 'N_t')
nodes, edges = get_nodes_and_edges(params)
chunk_size = N_t
if do_spatial_whitening:
spatial_whitening = load_data(params, 'spatial_whitening')
if do_temporal_whitening:
temporal_whitening = load_data(params, 'temporal_whitening')
thresholds = load_data(params, 'thresholds')
try:
result = load_data(params, 'results')
except Exception:
result = {'spiketimes' : {}, 'amplitudes' : {}}
curve = numpy.zeros((len(triggers), len(result['spiketimes'].keys()), lims[1]+lims[0]), dtype=numpy.int32)
count = 0
for count, t_spike in enumerate(triggers):
for key in result['spiketimes'].keys():
elec = int(key.split('_')[1])
idx = numpy.where((result['spiketimes'][key] > t_spike - lims[0]) & (result['spiketimes'][key] < t_spike + lims[0]))
curve[count, elec, t_spike - result['spiketimes'][key][idx]] += 1
pylab.subplot(111)
pylab.imshow(numpy.mean(curve, 0), aspect='auto')
do_spatial_whitening = params.getboolean('whitening', 'spatial')
spike_thresh = params.getfloat('detection', 'spike_thresh')
file_out_suff = params.get('data', 'file_out_suff')
N_t = params.getint('detection', 'N_t')
nodes, edges = get_nodes_and_edges(params)
chunk_size = N_t
if do_spatial_whitening:
spatial_whitening = load_data(params, 'spatial_whitening')
if do_temporal_whitening:
temporal_whitening = load_data(params, 'temporal_whitening')
thresholds = load_data(params, 'thresholds')
try:
result = load_data(params, 'results')
except Exception:
result = {'spiketimes' : {}, 'amplitudes' : {}}
curve = numpy.zeros((len(triggers), len(result['spiketimes'].keys()), lims[1]+lims[0]), dtype=numpy.int32)
count = 0
for count, t_spike in enumerate(triggers):
for key in result['spiketimes'].keys():
elec = int(key.split('_')[1])
idx = numpy.where((result['spiketimes'][key] > t_spike - lims[0]) & (result['spiketimes'][key] < t_spike + lims[0]))
curve[count, elec, t_spike - result['spiketimes'][key][idx]] += 1
pylab.subplot(111)
pylab.imshow(numpy.mean(curve, 0), aspect='auto')
return curve
N_e = params.getint('data', 'N_e')
N_t = params.getint('detection', 'N_t')
N_total = params.nb_channels
sampling_rate = params.rate
do_temporal_whitening = params.getboolean('whitening', 'temporal')
do_spatial_whitening = params.getboolean('whitening', 'spatial')
spike_thresh = params.getfloat('detection', 'spike_thresh')
file_out_suff = params.get('data', 'file_out_suff')
template_shift = params.getint('detection', 'template_shift')
nodes, edges = get_nodes_and_edges(params)
chunk_size = (t_stop - t_start)*sampling_rate
padding = (t_start*sampling_rate, t_start*sampling_rate)
suff = params.get('data', 'suffix')
if do_spatial_whitening:
spatial_whitening = load_data(params, 'spatial_whitening')
if do_temporal_whitening:
temporal_whitening = load_data(params, 'temporal_whitening')
thresholds = load_data(params, 'thresholds')
data = data_file.get_data(0, chunk_size, padding=padding, chunk_size=chunk_size, nodes=nodes)
data_shape = len(data)
data_file.close()
if do_spatial_whitening:
data = numpy.dot(data, spatial_whitening)
if do_temporal_whitening:
data = scipy.ndimage.filters.convolve1d(data, temporal_whitening, axis=0, mode='constant')
try:
result = load_data(params, 'results')
except Exception:
def delete_mixtures(params, nb_cpu, nb_gpu, use_gpu):
data_file = params.data_file
N_e = params.getint('data', 'N_e')
N_total = params.nb_channels
N_t = params.getint('detection', 'N_t')
template_shift = params.getint('detection', 'template_shift')
cc_merge = params.getfloat('clustering', 'cc_merge')
mixtures = []
to_remove = []
filename = params.get('data', 'file_out_suff') + '.overlap-mixtures.hdf5'
norm_templates = load_data(params, 'norm-templates')
best_elec = load_data(params, 'electrodes')
limits = load_data(params, 'limits')
nodes, edges = get_nodes_and_edges(params)
inv_nodes = numpy.zeros(N_total, dtype=numpy.int32)
inv_nodes[nodes] = numpy.arange(len(nodes))
decimation = params.getboolean('clustering', 'decimation')
overlap = get_overlaps(params, extension='-mixtures', erase=True, normalize=False, maxoverlap=False, verbose=False, half=True, use_gpu=use_gpu, nb_cpu=nb_cpu, nb_gpu=nb_gpu, decimation=decimation)
overlap.close()
SHARED_MEMORY = get_shared_memory_flag(params)
if SHARED_MEMORY:
c_overs = load_data_memshared(params, 'overlaps', extension='-mixtures', use_gpu=use_gpu, nb_cpu=nb_cpu, nb_gpu=nb_gpu)
else:
c_overs = load_data(params, 'overlaps', extension='-mixtures')
N_e = params.getint('data', 'N_e')
N_t = params.getint('data', 'N_t')
cc_merge = params.getfloat('clustering', 'cc_merge')
x, N_tm = templates.shape
nb_temp = N_tm/2
merged = [nb_temp, 0]
mixtures = []
to_remove = []
overlap = get_overlaps(comm, params, extension='-mixtures', erase=True, parallel_hdf5=parallel_hdf5, normalize=False, maxoverlap=False, verbose=False, half=True)
filename = params.get('data', 'file_out_suff') + '.overlap-mixtures.hdf5'
result = []
norm_templates = load_data(params, 'norm-templates')
templates = load_data(params, 'templates')
result = load_data(params, 'clusters')
best_elec = load_data(params, 'electrodes')
limits = load_data(params, 'limits')
N_total = params.getint('data', 'N_total')
nodes, edges = get_nodes_and_edges(params)
inv_nodes = numpy.zeros(N_total, dtype=numpy.int32)
inv_nodes[nodes] = numpy.argsort(nodes)
distances = numpy.zeros((nb_temp, nb_temp), dtype=numpy.float32)
over_x = overlap.get('over_x')[:]
over_y = overlap.get('over_y')[:]
over_data = overlap.get('over_data')[:]
over_shape = overlap.get('over_shape')[:]
overlap.close()
overlap = scipy.sparse.csr_matrix((over_data, (over_x, over_y)), shape=over_shape)
best_elec = load_data(params, 'electrodes')
limits = load_data(params, 'limits')
nodes, edges = get_nodes_and_edges(params)
inv_nodes = numpy.zeros(N_total, dtype=numpy.int32)
inv_nodes[nodes] = numpy.arange(len(nodes))
decimation = params.getboolean('clustering', 'decimation')
overlap = get_overlaps(params, extension='-mixtures', erase=True, normalize=False, maxoverlap=False, verbose=False, half=True, use_gpu=use_gpu, nb_cpu=nb_cpu, nb_gpu=nb_gpu, decimation=decimation)
overlap.close()
SHARED_MEMORY = get_shared_memory_flag(params)
if SHARED_MEMORY:
c_overs = load_data_memshared(params, 'overlaps', extension='-mixtures', use_gpu=use_gpu, nb_cpu=nb_cpu, nb_gpu=nb_gpu)
else:
c_overs = load_data(params, 'overlaps', extension='-mixtures')
if SHARED_MEMORY:
templates = load_data_memshared(params, 'templates', normalize=False)
else:
templates = load_data(params, 'templates')
x, N_tm = templates.shape
nb_temp = int(N_tm//2)
merged = [nb_temp, 0]
supports = {}
for t in range(N_e):
elecs = numpy.take(inv_nodes, edges[nodes[t]])
supports[t] = elecs
overlap_0 = numpy.zeros(nb_temp, dtype=numpy.float32)
# print("tprs: {}".format(tprs))
##### end temporary zone
fpers = load_data(params, 'false-positive-error-rates')
fners = load_data(params, 'false-negative-error-rates')
##### TODO: remove temporary zone
# print("fpers: {}".format(fpers))
# print("fners: {}".format(fners))
##### end temporary zone
fpers = 100.0 * fpers
fners = 100.0 * fners
##### TODO: clean temporary zone
# res = None
# error = None
sc_fpers = load_data(params, 'sc-false-positive-error-rates')
sc_fners = load_data(params, 'sc-false-negative-error-rates')
sc_fper = load_data(params, 'sc-best-false-positive-error-rate')
sc_fner = load_data(params, 'sc-best-false-negative-error-rate')
selection = load_data(params, 'selection')
##### TODO: remove temporary zone
# print("sc_fpers: {}".format(sc_fpers))
# print("sc_fners: {}".format(sc_fners))
# print("sc_fper: {}".format(sc_fper))
# print("sc_fner: {}".format(sc_fner))
# print("selection: {}".format(selection))
##### end temporary zone
sc_fpers = 100.0 * sc_fpers
sc_fners = 100.0 * sc_fners
sc_fper = 100.0 * sc_fper
sc_fner = 100.0 * sc_fner
##### end temporary zone
def view_roc_curve_(params, save=None):
'''Plot ROC curve.'''
fprs = load_data(params, 'false-positive-rates')
tprs = load_data(params, 'true-positive-rates')
##### TODO: remove temporary zone
# print("fprs: {}".format(fprs))
# print("tprs: {}".format(tprs))
##### end temporary zone
fpers = load_data(params, 'false-positive-error-rates')
fners = load_data(params, 'false-negative-error-rates')
##### TODO: remove temporary zone
# print("fpers: {}".format(fpers))
# print("fners: {}".format(fners))
##### end temporary zone
fpers = 100.0 * fpers
fners = 100.0 * fners
##### TODO: clean temporary zone
templates = load_data(params, 'templates')
N_e = params.getint('data', 'N_e')
N_t = params.getint('data', 'N_t')
cc_merge = params.getfloat('clustering', 'cc_merge')
x, N_tm = templates.shape
nb_temp = N_tm/2
merged = [nb_temp, 0]
mixtures = []
to_remove = []
overlap = get_overlaps(comm, params, extension='-mixtures', erase=True, parallel_hdf5=parallel_hdf5, normalize=False, maxoverlap=False, verbose=False, half=True)
filename = params.get('data', 'file_out_suff') + '.overlap-mixtures.hdf5'
result = []
norm_templates = load_data(params, 'norm-templates')
templates = load_data(params, 'templates')
result = load_data(params, 'clusters')
best_elec = load_data(params, 'electrodes')
limits = load_data(params, 'limits')
N_total = params.getint('data', 'N_total')
nodes, edges = get_nodes_and_edges(params)
inv_nodes = numpy.zeros(N_total, dtype=numpy.int32)
inv_nodes[nodes] = numpy.argsort(nodes)
distances = numpy.zeros((nb_temp, nb_temp), dtype=numpy.float32)
over_x = overlap.get('over_x')[:]
over_y = overlap.get('over_y')[:]
over_data = overlap.get('over_data')[:]
over_shape = overlap.get('over_shape')[:]
overlap.close()
def view_roc_curve_(params, save=None):
'''Plot ROC curve.'''
fprs = load_data(params, 'false-positive-rates')
tprs = load_data(params, 'true-positive-rates')
##### TODO: remove temporary zone
# print("fprs: {}".format(fprs))
# print("tprs: {}".format(tprs))
##### end temporary zone
fpers = load_data(params, 'false-positive-error-rates')
fners = load_data(params, 'false-negative-error-rates')
##### TODO: remove temporary zone
# print("fpers: {}".format(fpers))
# print("fners: {}".format(fners))
##### end temporary zone
fpers = 100.0 * fpers
fners = 100.0 * fners
##### TODO: clean temporary zone