Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
@docval({'name': 'name', 'type': str, 'doc': 'the name of this node'})
def __init__(self, **kwargs):
super(Node, self).__init__(name=kwargs['name'])
@docval(*get_docval(TimeSeries.__init__, 'name'), # required
{'name': 'data', 'type': ('array_data', 'data', TimeSeries), # required
'doc': ('The data values. Must be 3D, where the first dimension must be time, the second dimension must '
'be channels, and the third dimension must be bands.'),
'shape': (None, None, None)},
*get_docval(TimeSeries.__init__, 'description'),
{'name': 'metric', 'type': str, # required
'doc': "metric of analysis. recommended: 'phase', 'amplitude', 'power'"},
{'name': 'unit', 'type': str, 'doc': 'SI unit of measurement', 'default': 'no unit'},
{'name': 'bands', 'type': DynamicTable,
'doc': 'a table for describing the frequency bands that the signal was decomposed into', 'default': None},
{'name': 'source_timeseries', 'type': TimeSeries,
'doc': 'the input TimeSeries from this analysis', 'default': None},
*get_docval(TimeSeries.__init__, 'resolution', 'conversion', 'timestamps', 'starting_time', 'rate',
'comments', 'control', 'control_description'))
def __init__(self, **kwargs):
metric, source_timeseries, bands = popargs('metric', 'source_timeseries', 'bands', kwargs)
@docval(*filter(_not_parent, get_docval(container_type.__init__)), func_name=func_name, doc=doc,
returns="the %s object that was created" % cls.__join(container_type), rtype=container_type)
def _func(self, **kwargs):
cargs, ckwargs = fmt_docval_args(container_type.__init__, kwargs)
ret = container_type(*cargs, **ckwargs)
getattr(self, add_name)(ret)
return ret
return _func
@docval({'name': 'namespace_path', 'type': str,
'doc': 'the path to the YAML with the namespace definition'},
returns="the namespaces loaded from the given file", rtype=tuple,
is_method=False)
def load_namespaces(**kwargs):
'''
Load namespaces from file
'''
namespace_path = getargs('namespace_path', kwargs)
return __TYPE_MAP.load_namespaces(namespace_path)
@docval({'name': 'description', 'type': str, 'doc': 'a brief description of what the region is'},
{'name': 'region', 'type': (slice, list, tuple), 'doc': 'the indices of the table', 'default': slice(None)},
{'name': 'name', 'type': str, 'doc': 'the name of the ROITableRegion', 'default': 'rois'})
def create_roi_table_region(self, **kwargs):
return call_docval_func(self.create_region, kwargs)
@docval({'name': 'time', 'type': 'float', 'doc': 'the time point of this feature'},
{'name': 'features', 'type': (list, np.ndarray), 'doc': 'the feature values for this time point'})
def add_features(self, **kwargs):
time, features = getargs('time', 'features', kwargs)
if type(self.timestamps) == list and type(self.data) is list:
self.timestamps.append(time)
self.data.append(features)
else:
raise ValueError('Can only add feature if timestamps and data are lists')
@docval({'name': 'container_name', 'type': str, 'doc': 'the name of the NWBContainer to retrieve'})
def get_container(self, **kwargs):
'''
Retrieve an NWBContainer from this ProcessingModule
'''
container_name = getargs('container_name', kwargs)
warn(PendingDeprecationWarning('get_container will be replaced by get_data_interface'))
return self.get_data_interface(container_name)
@docval({'name': 'name', 'type': str, 'doc': 'The name of this TimeSeries dataset'},
{'name': 'data', 'type': ('array_data', 'data', 'TimeSeries'),
'doc': 'The data this TimeSeries dataset stores. Can also store binary data e.g. image frames',
'default': None},
{'name': 'unit', 'type': str, 'doc': 'The base unit of measurement (should be SI unit)', 'default': None},
{'name': 'resolution', 'type': (str, float),
'doc': 'The smallest meaningful difference (in specified unit) between values in data',
'default': _default_resolution},
# Optional arguments:
{'name': 'conversion', 'type': (str, float),
'doc': 'Scalar to multiply each element in data to convert it to the specified unit',
'default': _default_conversion},
{'name': 'timestamps', 'type': ('array_data', 'data', 'TimeSeries'),
'doc': 'Timestamps for samples stored in data', 'default': None},
{'name': 'starting_time', 'type': float, 'doc': 'The timestamp of the first sample', 'default': None},
{'name': 'rate', 'type': float, 'doc': 'Sampling rate in Hz', 'default': None},
@docval({'name': 'NWBDataInterface', 'type': (NWBDataInterface, DynamicTable),
'doc': 'the NWBDataInterface to add to this Module'})
def add_data_interface(self, **kwargs):
NWBDataInterface = getargs('NWBDataInterface', kwargs)
warn(PendingDeprecationWarning('add_data_interface will be replaced by add'))
self.add(NWBDataInterface)
@docval({'name': 'timeseries', 'type': TimeSeries})
def add_stimulus(self, timeseries):
self._add_stimulus_internal(timeseries)
self._update_sweep_table(timeseries)