Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_cast_list(test_list, test_str):
assert ps.is_list(test_list)
assert ps.is_list(util.cast_list(test_list))
assert not ps.is_list(test_str)
assert ps.is_list(util.cast_list(test_str))
def test_cast_list(test_list, test_str):
assert ps.is_list(test_list)
assert ps.is_list(util.cast_list(test_list))
assert not ps.is_list(test_str)
assert ps.is_list(util.cast_list(test_str))
def run_trial_test_dist(spec_file, spec_name=False):
spec = spec_util.get(spec_file, spec_name)
spec = spec_util.override_test_spec(spec)
info_space = InfoSpace()
info_space.tick('trial')
spec['meta']['distributed'] = True
spec['meta']['max_session'] = 2
trial = Trial(spec, info_space)
# manually run the logic to obtain global nets for testing to ensure global net gets updated
global_nets = trial.init_global_nets()
# only test first network
if ps.is_list(global_nets): # multiagent only test first
net = list(global_nets[0].values())[0]
else:
net = list(global_nets.values())[0]
session_datas = trial.parallelize_sessions(global_nets)
trial.session_data_dict = {data.index[0]: data for data in session_datas}
trial_data = analysis.analyze_trial(trial)
trial.close()
assert isinstance(trial_data, pd.DataFrame)
def guard_multi_pdparams(pdparams, body):
'''Guard pdparams for multi action'''
action_dim = body.action_dim
is_multi_action = ps.is_iterable(action_dim)
if is_multi_action:
assert ps.is_list(pdparams)
pdparams = [t.clone() for t in pdparams] # clone for grad safety
assert len(pdparams) == len(action_dim), pdparams
# transpose into (batch_size, [action_dims])
pdparams = [list(torch.split(t, action_dim, dim=0)) for t in torch.cat(pdparams, dim=1)]
return pdparams
fc_dims = [self.in_dim] + self.fc_hid_layers
self.fc_model = net_util.build_fc_model(fc_dims, self.hid_layers_activation)
self.rnn_input_dim = fc_dims[-1]
# RNN model
self.rnn_model = getattr(nn, net_util.get_nn_name(self.cell_type))(
input_size=self.rnn_input_dim,
hidden_size=self.rnn_hidden_size,
num_layers=self.rnn_num_layers,
batch_first=True, bidirectional=self.bidirectional)
# tails. avoid list for single-tail for compute speed
if ps.is_integer(self.out_dim):
self.model_tail = net_util.build_fc_model([self.rnn_hidden_size, self.out_dim], self.out_layer_activation)
else:
if not ps.is_list(self.out_layer_activation):
self.out_layer_activation = [self.out_layer_activation] * len(out_dim)
assert len(self.out_layer_activation) == len(self.out_dim)
tails = []
for out_d, out_activ in zip(self.out_dim, self.out_layer_activation):
tail = net_util.build_fc_model([self.rnn_hidden_size, out_d], out_activ)
tails.append(tail)
self.model_tails = nn.ModuleList(tails)
net_util.init_layers(self, self.init_fn)
self.loss_fn = net_util.get_loss_fn(self, self.loss_spec)
self.to(self.device)
self.train()
def build_model_tails(self, out_dim, out_layer_activation):
'''Build each model_tail. These are stored as Sequential models in model_tails'''
if not ps.is_list(out_layer_activation):
out_layer_activation = [out_layer_activation] * len(out_dim)
model_tails = nn.ModuleList()
if ps.is_empty(self.tail_hid_layers):
for out_d, out_activ in zip(out_dim, out_layer_activation):
tail = net_util.build_fc_model([self.body_hid_layers[-1], out_d], out_activ)
model_tails.append(tail)
else:
assert len(self.tail_hid_layers) == len(out_dim), 'Hydra tail hid_params inconsistent with number out dims'
for out_d, out_activ, hid_layers in zip(out_dim, out_layer_activation, self.tail_hid_layers):
dims = hid_layers
model_tail = net_util.build_fc_model(dims, self.hid_layers_activation)
tail_out = net_util.build_fc_model([dims[-1], out_d], out_activ)
model_tail.add_module(str(len(model_tail)), tail_out)
model_tails.append(model_tail)
return model_tails
def build_conv_layers(self, conv_hid_layers):
'''
Builds all of the convolutional layers in the network and store in a Sequential model
'''
conv_layers = []
in_d = self.in_dim[0] # input channel
for i, hid_layer in enumerate(conv_hid_layers):
hid_layer = [tuple(e) if ps.is_list(e) else e for e in hid_layer] # guard list-to-tuple
# hid_layer = out_d, kernel, stride, padding, dilation
conv_layers.append(nn.Conv2d(in_d, *hid_layer))
if self.hid_layers_activation is not None:
conv_layers.append(net_util.get_activation_fn(self.hid_layers_activation))
# Don't include batch norm in the first layer
if self.batch_norm and i != 0:
conv_layers.append(nn.BatchNorm2d(hid_layer[0]))
in_d = hid_layer[0] # update to out_d
conv_model = nn.Sequential(*conv_layers)
return conv_model
def get_out_dim(body, add_critic=False):
'''Construct the NetClass out_dim for a body according to is_discrete, action_type, and whether to add a critic unit'''
policy_out_dim = get_policy_out_dim(body)
if add_critic:
if ps.is_list(policy_out_dim):
out_dim = policy_out_dim + [1]
else:
out_dim = [policy_out_dim, 1]
else:
out_dim = policy_out_dim
return out_dim
"""
if not callable(updater):
updater = pyd.constant(updater)
if customizer is not None and not callable(customizer):
call_customizer = partial(callit, clone, customizer, argcount=1)
elif customizer:
call_customizer = partial(callit, customizer,
argcount=getargcount(customizer, maxargs=3))
else:
call_customizer = None
default_type = dict if isinstance(obj, dict) else list
tokens = to_path_tokens(path)
if not pyd.is_list(tokens): # pragma: no cover
tokens = [tokens]
last_key = pyd.last(tokens)
if isinstance(last_key, PathToken):
last_key = last_key.key
target = obj
for idx, token in enumerate(pyd.initial(tokens)):
if isinstance(token, PathToken):
key = token.key
default_factory = pyd.get(tokens,
[idx + 1, 'default_factory'],
default=default_type)
else:
def build_model_tails(self, out_dim, out_layer_activation):
'''Build each model_tail. These are stored as Sequential models in model_tails'''
if not ps.is_list(out_layer_activation):
out_layer_activation = [out_layer_activation] * len(out_dim)
model_tails = nn.ModuleList()
if ps.is_empty(self.tail_hid_layers):
for out_d, out_activ in zip(out_dim, out_layer_activation):
tail = net_util.build_fc_model([self.body_hid_layers[-1], out_d], out_activ)
model_tails.append(tail)
else:
assert len(self.tail_hid_layers) == len(out_dim), 'Hydra tail hid_params inconsistent with number out dims'
for out_d, out_activ, hid_layers in zip(out_dim, out_layer_activation, self.tail_hid_layers):
dims = hid_layers
model_tail = net_util.build_fc_model(dims, self.hid_layers_activation)
tail_out = net_util.build_fc_model([dims[-1], out_d], out_activ)
model_tail.add_module(str(len(model_tail)), tail_out)
model_tails.append(model_tail)
return model_tails