Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_run(traj, to_print):
assert isinstance(traj, SingleRun)
print to_print
x = traj.x.value
y = traj.f_add_derived_parameter('y')
y.val = x**2
smurf = Result('','','','')
z = traj.f_add_result('Nada.Moo',smurf)
z.val = y()+1
print 'Dat wars'
self._timestamp = init_time
self._time = formatted_time
# In case the user provided a git repository path, a git commit is performed
# and the environment's hexsha is taken from the commit if the commit was triggered by
# this particular environment, otherwise a new one is generated
if self._git_repository is not None:
new_commit, self._hexsha = make_git_commit(self, self._git_repository,
self._git_message, self._git_fail)
# Identifier hexsha
else:
new_commit = False
if not new_commit:
# Otherwise we need to create a novel hexsha
self._hexsha = hashlib.sha1(compat.tobytes(self.v_trajectory.v_name +
str(self.v_trajectory.v_timestamp) +
str(self.v_timestamp) +
VERSION)).hexdigest()
# Create the name of the environment
short_hexsha = self._hexsha[0:7]
name = 'environment'
self._name = name + '_' + str(short_hexsha) + '_' + self._time # Name of environment
# The trajectory should know the hexsha of the current environment.
# Thus, for all runs, one can identify by which environment they were run.
self._traj._environment_hexsha = self._hexsha
self._traj._environment_name = self._name
self._logging_manager.trajectory = self._traj
self._logging_manager.remove_null_handler()
def _load(self, load_dict):
"""Loads data from `load_dict`
Reconstruction of sparse matrices similar to the :class:`~pypet.parameter.SparseParameter`.
"""
for key in compat.listkeys(load_dict):
# We delete keys over time:
if key in load_dict:
if SparseResult.IDENTIFIER in key:
new_key = key.split(SparseResult.IDENTIFIER)[0]
is_dia = load_dict.pop(new_key + SparseResult.IDENTIFIER + 'is_dia')
name_list = SparseParameter._get_name_list(is_dia)
rename_list = ['%s%s%s' % (new_key, SparseResult.IDENTIFIER, name)
for name in name_list]
data_list = [load_dict.pop(name) for name in rename_list]
matrix = SparseParameter._reconstruct_matrix(data_list)
self._data[new_key] = matrix
else:
self._data[key] = load_dict[key]
item = args[0]
try:
name = item.v_full_name
instance = item
create_new = False
except AttributeError:
pass
# If the item is not an instance yet, check if args[0] is a class and args[1] is
# a string describing the new name of the instance.
# If args[0] is not a class it is assumed to be the name of the new instance.
if create_new:
if len(args) > 0 and inspect.isclass(args[0]):
constructor = args.pop(0)
if len(args) > 0 and isinstance(args[0], compat.base_type):
name = args.pop(0)
elif 'name' in kwargs:
name = kwargs.pop('name')
elif 'full_name' in kwargs:
name = kwargs.pop('full_name')
else:
raise ValueError('Could not determine a name of the new item you want to add. '
'Either pass the name as positional argument or as a keyword '
'argument `name`.')
if check_naming:
split_names = name.split('.')
for idx, name in enumerate(split_names):
translated_shortcut = self._translate_shortcut(name)
if translated_shortcut:
translated_shortcut = self._replace_wildcard(translated_shortcut)
else:
commentstr = ''
if user_message:
user_message += ' -- '
message = '%sTrajectory: `%s`, Time: `%s`, %s' % \
(user_message, traj.v_name, traj.v_time, commentstr)
# Detect changes:
diff = index.diff(None)
if diff:
if git_fail:
# User requested fail instead of a new commit
raise pex.GitDiffError('Found not committed changes!')
# Make the commit
repo.git.add('-u')
commit = index.commit(message)
new_commit = True
else:
# Take old commit
commit = repo.commit(None)
new_commit = False
# Add the commit info to the trajectory
add_commit_variables(traj, commit)
return new_commit, commit.hexsha
new_key = key.split(SparseResult.IDENTIFIER)[0]
is_dia = load_dict.pop(new_key + SparseResult.IDENTIFIER + 'is_dia')
name_list = SparseParameter._get_name_list(is_dia)
rename_list = ['%s%s%s' % (new_key, SparseResult.IDENTIFIER, name)
for name in name_list]
data_list = [load_dict.pop(name) for name in rename_list]
matrix = SparseParameter._reconstruct_matrix(data_list)
self._data[new_key] = matrix
else:
self._data[key] = load_dict[key]
class PickleResult(Result):
""" Result that digest everything and simply pickles it!
Note that it is not checked whether data can be pickled, so take care that it works!
You can pass the pickle protocol via `protocol=2` to the constructor or change it with
the `v_protocol` property. Default protocol is 0.
Note that after storage to disk changing the protocol has no effect.
If the parameter is loaded, `v_protocol` is set to a protocol used to
store an item. Note that items are reconstructed from a dictionary and the protocol
is taken from the first one found in the dictionary. This is a rather arbitrary choice.
Yet, the underlying assumption is that all items were pickled with the same protocol,
which is the general case.
"""
PROTOCOL = PickleParameter.PROTOCOL
def _load(self, load_dict):
"""Loads data from `load_dict`
Reconstruction of sparse matrices similar to the :class:`~pypet.parameter.SparseParameter`.
"""
for key in compat.listkeys(load_dict):
# We delete keys over time:
if key in load_dict:
if SparseResult.IDENTIFIER in key:
new_key = key.split(SparseResult.IDENTIFIER)[0]
is_dia = load_dict.pop(new_key + SparseResult.IDENTIFIER + 'is_dia')
name_list = SparseParameter._get_name_list(is_dia)
rename_list = ['%s%s%s' % (new_key, SparseResult.IDENTIFIER, name)
for name in name_list]
data_list = [load_dict.pop(name) for name in rename_list]
matrix = SparseParameter._reconstruct_matrix(data_list)
self._data[new_key] = matrix
else:
self._data[key] = load_dict[key]
"""
for key in compat.listkeys(load_dict):
# We delete keys over time:
if key in load_dict:
if SparseResult.IDENTIFIER in key:
new_key = key.split(SparseResult.IDENTIFIER)[0]
is_dia = load_dict.pop(new_key + SparseResult.IDENTIFIER + 'is_dia')
name_list = SparseParameter._get_name_list(is_dia)
rename_list = ['%s%s%s' % (new_key, SparseResult.IDENTIFIER, name)
for name in name_list]
data_list = [load_dict.pop(name) for name in rename_list]
matrix = SparseParameter._reconstruct_matrix(data_list)
self._data[new_key] = matrix
else:
self._data[key] = load_dict[key]
self._args = continue_dict['args']
# Keyword arguments to the user's job function
self._kwargs = continue_dict['kwargs']
# Postproc Function
self._postproc = continue_dict['postproc']
# Postprog args
self._postproc_args = continue_dict['postproc_args']
# Postproc Kwargs
self._postproc_kwargs = continue_dict['postproc_kwargs']
old_start_timestamp = continue_dict['start_timestamp']
# Unpack the trajectory
self._traj.v_full_copy = continue_dict['full_copy']
# Load meta data
self._traj.f_load(load_parameters=pypetconstants.LOAD_NOTHING,
load_derived_parameters=pypetconstants.LOAD_NOTHING,
load_results=pypetconstants.LOAD_NOTHING,
load_other_data=pypetconstants.LOAD_NOTHING)
# Now we have to reconstruct previous results
result_list = []
full_filename_list = []
for filename in os.listdir(self._continue_path):
_, ext = os.path.splitext(filename)
if ext != '.rcnt':
continue
full_filename = os.path.join(self._continue_path, filename)
cnt_file = open(full_filename, 'rb')
result_list.append(dill.load(cnt_file))
' %d. They should be equally long!' %
(full_name,array_length,len(traj)))
class_constructor = traj._create_class(class_name)
instance = class_constructor(name,comment=comment)
parent_traj_node._nn_interface._add_from_leaf_instance(parent_traj_node,instance)
if array_length:
traj._explored_parameters[instance.v_full_name]=instance
self._ann_load_annotations(instance,node=hdf5group)
if load_data in [pypetconstants.LOAD_DATA, pypetconstants.UPDATE_DATA]:
self._prm_load_parameter_or_result(instance,_hdf5_group=hdf5group)
else:
if not name in parent_traj_node._children:
new_traj_node = parent_traj_node._nn_interface._add_from_group_name(
parent_traj_node, name)
newly_created = True
else:
new_traj_node = parent_traj_node._children[name]
newly_created=False
if (load_data in [pypetconstants.LOAD_DATA, pypetconstants.LOAD_SKELETON] or
newly_created):
self._ann_load_annotations(new_traj_node,node=hdf5group)