Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_run(traj, to_print):
assert isinstance(traj, SingleRun)
print to_print
x = traj.x.value
y = traj.f_add_derived_parameter('y')
y.val = x**2
smurf = Result('','','','')
z = traj.f_add_result('Nada.Moo',smurf)
z.val = y()+1
print 'Dat wars'
new_key = key.split(SparseResult.IDENTIFIER)[0]
is_dia = load_dict.pop(new_key + SparseResult.IDENTIFIER + 'is_dia')
name_list = SparseParameter._get_name_list(is_dia)
rename_list = ['%s%s%s' % (new_key, SparseResult.IDENTIFIER, name)
for name in name_list]
data_list = [load_dict.pop(name) for name in rename_list]
matrix = SparseParameter._reconstruct_matrix(data_list)
self._data[new_key] = matrix
else:
self._data[key] = load_dict[key]
class PickleResult(Result):
""" Result that digest everything and simply pickles it!
Note that it is not checked whether data can be pickled, so take care that it works!
You can pass the pickle protocol via `protocol=2` to the constructor or change it with
the `v_protocol` property. Default protocol is 0.
Note that after storage to disk changing the protocol has no effect.
If the parameter is loaded, `v_protocol` is set to a protocol used to
store an item. Note that items are reconstructed from a dictionary and the protocol
is taken from the first one found in the dictionary. This is a rather arbitrary choice.
Yet, the underlying assumption is that all items were pickled with the same protocol,
which is the general case.
"""
PROTOCOL = PickleParameter.PROTOCOL
@copydoc(Result.f_set_single)
def f_set_single(self, name, item):
if SparseResult.IDENTIFIER in name:
raise AttributeError('Your result name contains the identifier for sparse matrices,'
' please do not use %s in your result names.' %
SparseResult.IDENTIFIER)
else:
super(SparseResult, self).f_set_single(name, item)
class StorageContextManager(object):
def __init__(self, storage_result):
self._storage_result = storage_result
def __enter__(self):
self._storage_result.f_open_store()
return self
def __exit__(self, exception_type, exception_value, traceback):
self._storage_result.f_close_store()
def f_flush_store(self):
self._storage_result.f_flush_store()
class StorageDataResult(Result, KnowsTrajectory):
def __init__(self, full_name, trajectory, *args, **kwargs):
self._traj = trajectory
super(StorageDataResult, self).__init__(full_name, *args, **kwargs)
# def __setstate__(self, statedict):
# """Called after loading a pickle dump.
#
# Restores `__dict__` from `statedict` and sets dependencies for all results
#
# """
# super(StorageDataResult, self).__setstate__(statedict)
# for name in self._data:
# item = self._data[name]
# try:
# item._set_dependencies(self._traj, self.v_full_name, name)
# except AttributeError:
def read(self):
return self._request_data('pandas_get', args=())
# return self.data
FLAG_CLASS_MAPPING = {
pypetconstants.ARRAY: SharedArray,
pypetconstants.CARRAY: SharedCArray,
pypetconstants.EARRAY: SharedEArray,
pypetconstants.VLARRAY: SharedVLArray,
pypetconstants.TABLE: SharedTable,
pypetconstants.FRAME: SharedPandasFrame,
}
class SharedResult(Result, KnowsTrajectory):
"""Behaves exactly like the normal `Result` but accepts `SharedData` subclasses as data."""
__slots__ = ('_traj',)
SUPPORTED_DATA = set(FLAG_CLASS_MAPPING.values())
def __init__(self, full_name, trajectory, *args, **kwargs):
self._traj = trajectory
super(SharedResult, self).__init__(full_name, *args, **kwargs)
@property
def traj(self):
return self._traj
def _supports(self, item):
"""Checks if outer data structure is supported."""
def __setattr__(self, key, value):
if key[0] == '_':
# We set a private attribute
super(Result, self).__setattr__(key, value)
elif hasattr(self.__class__, key):
# Work around for python properties
python_property = getattr(self.__class__, key)
if python_property.fset is None:
raise AttributeError('%s is read only!' % key)
else:
python_property.fset(self, value)
else:
self.f_set_single(key, value)
super(Result, self).__setattr__(key, value)
elif hasattr(self.__class__, key):
# Work around for python properties
python_property = getattr(self.__class__, key)
if python_property.fset is None:
raise AttributeError('%s is read only!' % key)
else:
python_property.fset(self, value)
else:
self.f_set_single(key, value)
def __getattr__(self, name):
return self.f_get(name)
class SparseResult(Result):
"""Handles Scipy sparse matrices.
Supported Formats are csr, csc, bsr, and dia.
Subclasses the standard result and can also handle all data supported by
:class:`~pypet.parameter.Result`.
"""
IDENTIFIER = SparseParameter.IDENTIFIER
"""Identifier string to label sparse matrix data"""
__slots__ = []
@copydoc(Result.f_set_single)
def f_set_single(self, name, item):
self._filename = None
else:
if file_title is None:
file_title = filename
self._filename = filename
self._storage_service = HDF5StorageService(filename=filename, file_title=file_title)
# Index of a trajectory is -1, if the trajectory should behave like a single run
# and blind out other single run results, this can be changed via 'v_as_run'.
self._idx = -1
self._as_run = None
self._standard_parameter = Parameter
self._standard_result = Result
self._standard_leaf = Result
self._stored = False
self._full_copy = False
self._dynamic_imports = ['pypet.parameter.PickleParameter']
self._is_run = False
# Helper variable: During a multiprocessing single run, the trajectory is usually
# pickled without all the parameter exploration ranges and all run information
# As a consequence, __len__ would return 1, so we need to store the length in this
# helper variable to return the correct length during single runs.
self._length_during_run = None
if not dynamically_imported_classes is None:
self.f_add_to_dynamic_imports(dynamically_imported_classes)
explore_list = []
for value in value_col:
brian_quantity = value * unit
explore_list.append(brian_quantity)
self._explored_range = tuple(explore_list)
self._explored = True
except KeyError:
super(BrianParameter, self)._load(load_dict)
self._default = self._data
self._locked = True
class BrianResult(Result):
""" A result class that can handle BRIAN quantities.
Note that only scalar BRIAN quantities are supported, lists, tuples or dictionaries
of BRIAN quantities cannot be handled.
Supports also all data supported by the standard :class:`~pypet.parameter.Result`.
Storage mode works as for :class:`~pypet.brian.parameter.BrianParameter`.
"""
IDENTIFIER = BrianParameter.IDENTIFIER
''' Identifier String to label brian data '''
FLOAT_MODE = 'FLOAT'
'''Float storage mode'''
explore_table = load_dict['explored_data' + Brian2Parameter.IDENTIFIER]
value_col = explore_table['value']
explore_list = [value * unit for value in value_col]
self._explored_range = explore_list
self._explored = True
except KeyError:
super(Brian2Parameter, self)._load(load_dict)
self._default = self._data
self._locked = True
class Brian2Result(Result):
""" A result class that can handle BRIAN2 quantities.
Note that only scalar BRIAN2 quantities are supported, lists, tuples or dictionaries
of BRIAN2 quantities cannot be handled.
Supports also all data supported by the standard :class:`~pypet.parameter.Result`.
"""
IDENTIFIER = Brian2Parameter.IDENTIFIER
''' Identifier String to label brian result '''
__slots__ = ()
def __init__(self, full_name, *args, **kwargs):
super(Brian2Result, self).__init__(full_name, *args, **kwargs)