Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def main():
filename = os.path.join('hdf5', 'example_05.hdf5')
env = Environment(trajectory='Example_05_Euler_Integration',
filename=filename,
file_title='Example_05_Euler_Integration',
comment='Go for Euler!')
traj = env.v_trajectory
trajectory_name = traj.v_name
# 1st a) phase parameter addition
add_parameters(traj)
# 1st b) phase preparation
# We will add the differential equation (well, its source code only) as a derived parameter
traj.f_add_derived_parameter(FunctionParameter,'diff_eq', diff_lorenz,
comment='Source code of our equation!')
def main():
""" Main *boilerplate* function to start simulation """
# Now let's make use of logging
logger = logging.getLogger()
# Create folders for data and plots
folder = os.path.join(os.getcwd(), 'experiments', 'ca_patterns_pypet')
if not os.path.isdir(folder):
os.makedirs(folder)
filename = os.path.join(folder, 'all_patterns.hdf5')
# Create an environment
env = Environment(trajectory='cellular_automata',
multiproc=True,
ncores=4,
wrap_mode='QUEUE',
filename=filename,
overwrite_file=True)
# extract the trajectory
traj = env.traj
traj.v_lazy_adding = True
traj.par.ncells = 400, 'Number of cells'
traj.par.steps = 250, 'Number of timesteps'
traj.par.rule_number = 30, 'The ca rule'
traj.par.initial_name = 'random', 'The type of initial state'
traj.par.seed = 100042, 'RNG Seed'
with open("bin/logging.yaml") as f:
l_dict = yaml.load(f)
log_output_file = os.path.join(paths.results_path, l_dict['handlers']['file']['filename'])
l_dict['handlers']['file']['filename'] = log_output_file
logging.config.dictConfig(l_dict)
print("All output can be found in file ", log_output_file)
print("Change the values in logging.yaml to control log level and destination")
print("e.g. change the handler to console for the loggers you're interesting in to get output to stdout")
traj_file = os.path.join(paths.output_dir_path, 'data.h5')
# Create an environment that handles running our simulation
# This initializes a PyPet environment
env = Environment(trajectory=name, filename=traj_file, file_title='{} data'.format(name),
comment='{} data'.format(name),
add_time=True,
freeze_input=True,
multiproc=True,
use_scoop=True,
wrap_mode=pypetconstants.WRAP_MODE_LOCAL,
automatic_storing=True,
log_stdout=False, # Sends stdout to logs
log_folder=os.path.join(paths.output_dir_path, 'logs')
)
# Get the trajectory from the environment
traj = env.trajectory
# NOTE: Innerloop simulator
optimizee = FunctionOptimizee(traj, 'rastrigin')
dataset_path = '/Users/raphaelholca/Documents/data-sets/MNIST',
pad_size = (parameter_dict['conv_filter_side']-1)/2,
load_test = True
)
""" create directory to save data """
parameter_dict['pypet'] = True
parameter_dict['verbose'] = 0
parameter_dict['pypet_name'] = parameter_dict['name']
save_path = os.path.join('output', parameter_dict['name'])
pp.check_dir(save_path, overwrite=False)
print_dict = parameter_dict.copy()
print_dict.update(explore_dict)
""" create pypet environment """
env = pypet.Environment(trajectory = 'explore_perf',
log_stdout = False,
add_time = False,
multiproc = True,
ncores = 6,
filename = os.path.join(save_path, 'explore_perf.hdf5'))
traj = env.v_trajectory
pp.add_parameters(traj, parameter_dict)
explore_dict = pypet.cartesian_product(explore_dict, tuple(explore_dict.keys())) #if not all entry of dict need be explored through cartesian product replace tuple(.) only with relevant dict keys in tuple
explore_dict['name'] = pp.set_run_names(explore_dict, parameter_dict['name'])
traj.f_explore(explore_dict)
""" launch simulation with pypet for parameter exploration """
def main():
filename = os.path.join('hdf5', 'Clustered_Network.hdf5')
# If we pass a filename to the trajectory a new HDF5StorageService will
# be automatically created
traj = Trajectory(filename=filename,
dynamically_imported_classes=[BrianMonitorResult,
BrianParameter])
# Let's create and fake environment to enable logging:
env = Environment(traj, do_single_runs=False)
# Load the trajectory, but onyl laod the skeleton of the results
traj.f_load(index=-1, load_parameters=2, load_derived_parameters=2, load_results=1)
# Find the result instances related to the fano factor
fano_dict = traj.f_get_from_runs('mean_fano_factor', fast_access=False)
# Load the data of the fano factor results
ffs = fano_dict.values()
traj.f_load_items(ffs)
# Extract all values and R_ee values for each run
ffs_values = [x.f_get() for x in ffs]
Rees = traj.f_get('R_ee').f_get_range()
print("All output can be found in file ", log_output_file)
print("Change the values in logging.yaml to control log level and destination")
print("e.g. change the handler to console for the loggers you're interesting in to get output to stdout")
traj_file = os.path.join(paths.output_dir_path, 'data.h5')
rundict = [{'function': 'rosenbrock',
'bound_min': [-2, -2, -2, -2, -2, -2],
'bound_max': [2, 2, 2, 2, 2, 2]}]
for config in rundict:
# Create an environment that handles running our simulation
# This initializes a PyPet environment
env = Environment(trajectory=name, filename=traj_file, file_title='{} data'.format(name),
comment='{} data'.format(name),
add_time=True,
freeze_input=True,
multiproc=True,
use_scoop=True,
wrap_mode=pypetconstants.WRAP_MODE_LOCAL,
automatic_storing=True,
log_stdout=True, # Sends stdout to logs
log_folder=os.path.join(paths.output_dir_path, 'logs')
)
# Get the trajectory from the environment
traj = env.trajectory
# NOTE: Innerloop simulator
optimizee = FunctionOptimizee(traj, config['function'])
def main():
"""Main function to protect the *entry point* of the program.
If you want to use multiprocessing with SCOOP you need to wrap your
main code creating an environment into a function. Otherwise
the newly started child processes will re-execute the code and throw
errors (also see http://scoop.readthedocs.org/en/latest/usage.html#pitfalls).
"""
# Create an environment that handles running.
# Let's enable multiprocessing with scoop:
filename = os.path.join('hdf5', 'example_21.hdf5')
env = Environment(trajectory='Example_21_SCOOP',
filename=filename,
file_title='Example_21_SCOOP',
log_stdout=True,
comment='Multiprocessing example using SCOOP!',
multiproc=True,
freeze_input=True, # We want to save overhead and freeze input
use_scoop=True, # Yes we want SCOOP!
wrap_mode=pypetconstants.WRAP_MODE_LOCAL, # SCOOP only works with 'LOCAL'
# or 'NETLOCK' wrapping
overwrite_file=True)
# Get the trajectory from the environment
traj = env.trajectory
# Add both parameters
traj.f_add_parameter('x', 1.0, comment='I am the first dimension!')
def main():
filename = os.path.join('hdf5', 'FiringRate.hdf5')
env = Environment(trajectory='FiringRate',
comment='Experiment to measure the firing rate '
'of a leaky integrate and fire neuron. '
'Exploring different input currents, '
'as well as refractory periods',
add_time=False, # We don't want to add the current time to the name,
log_stdout=True,
log_config='DEFAULT',
multiproc=True,
ncores=2, #My laptop has 2 cores ;-)
wrap_mode='QUEUE',
filename=filename,
overwrite_file=True)
traj = env.trajectory
# Add parameters
import os # For path names being viable under Windows and Linux
from pypet import Environment, cartesian_product
from pypet import pypetconstants
def multiply(traj):
"""Sophisticated simulation of multiplication"""
z=traj.x*traj.y
traj.f_add_result('z',z, comment='I am the product of two reals!')
# Create an environment that handles running
filename = os.path.join('hdf5', 'example_08.hdf5')
env = Environment(trajectory='Example08',filename=filename,
file_title='Example08',
comment='Another example!')
# Get the trajectory from the environment
traj = env.v_trajectory
# Add both parameters
traj.f_add_parameter('x', 1, comment='I am the first dimension!')
traj.f_add_parameter('y', 1, comment='I am the second dimension!')
# Explore the parameters with a cartesian product:
traj.f_explore(cartesian_product({'x':[1,2,3,4], 'y':[6,7,8]}))
# Run the simulation
env.f_run(multiply)
def main():
filename = os.path.join('hdf5', 'FiringRate.hdf5')
env = Environment(trajectory='FiringRatePipeline',
comment='Experiment to measure the firing rate '
'of a leaky integrate and fire neuron. '
'Exploring different input currents, '
'as well as refractory periods',
add_time=False, # We don't want to add the current time to the name,
log_stdout=True,
multiproc=True,
ncores=2, #My laptop has 2 cores ;-)
filename=filename,
overwrite_file=True)
env.pipeline(mypipeline)
# Finally disable logging and close all log-files
env.disable_logging()