Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
(param, grad), arguments = e
if _workspace.HasTensor(grad):
grads.append(grad)
arguments = dict(arguments, **extra_arguments)
update_ops.append(
_proto_utils.
MakeOperatorDef(
op_type=updater.type(),
inputs=[grad],
outputs=[param],
name=_helper.OperatorHelper.get_name(),
**arguments
)
)
else:
_logging.info('Skip to update Tensor({}).'.format(param))
# Check data parallel if necessary
if _mpi.Is_Init():
(rank, group), arguments = _mpi.AllowParallel(), {}
if rank != -1:
arguments['mode'] = '%s_ALLREDUCE' % _mpi.GetParallelMode()
arguments['root'], (arguments['comm'], arguments['group']) \
= group[0], _mpi.CreateGroup(root=group[0], incl=group)
update_ops.insert(
0, _proto_utils.
MakeOperatorDef(
op_type='CollectiveUpdate',
inputs=grads,
outputs=grads,
name=_helper.OperatorHelper.get_name(),
**arguments
file_path = prefix + filename + suffix
if _mpi.Is_Init():
if not _mpi.AllowSnapshot(): return
file_path = file_path + '.rank.{}'.format(_mpi.Rank())
dir = os.path.split(file_path)[0]
if len(dir) > 0 and not os.path.exists(dir): os.makedirs(dir)
if format == 'pickle':
state_dict = {}
for tensor in tensors:
state_dict[tensor.name] = FetchTensor(tensor)
with open(file_path, 'wb') as f:
pickle.dump(state_dict, f, pickle.HIGHEST_PROTOCOL)
_logging.info('Snapshot Model@: ' + file_path)
_logging.info('Model Format: Pickle')
elif format == 'caffe':
names = [tensor.name for tensor in tensors]
get_default_workspace().Snapshot(file_path, names, 1)
else:
raise TypeError('Unknown binary format: ' + format)
def cleanup():
def terminate(processes):
for process in processes:
process.terminate()
process.join()
terminate(self._fetchers)
if local_rank == 0: _logging.info('Terminate BlobFetcher.')
terminate(self._transformers)
if local_rank == 0: _logging.info('Terminate DataTransformer.')
terminate(self._readers)
if local_rank == 0: _logging.info('Terminate DataReader.')
import atexit
The graph name to run.
"""
options = _cfg.GetGlobalOptions()
if options['log_meta_graph']: print(graph_def)
if options['export_meta_graph']:
if not os.path.exists(options['export_meta_graph']):
try:
os.makedirs(options['export_meta_graph'])
except Exception:
raise ValueError('The given prefix is invalid.')
path = os.path.join(
options['export_meta_graph'],
graph_def.name + '.metatxt')
with open(path, 'w') as f: f.write(str(graph_def))
_logging.info('Export meta graph to: {}'.format(path))
return get_default_workspace().CreateGraph(
_stringify_proto(graph_def), options['log_optimized_graph'])
def cleanup():
def terminate(processes):
for process in processes:
process.terminate()
process.join()
terminate(self._fetchers)
if local_rank == 0: _logging.info('Terminate BlobFetcher.')
terminate(self._transformers)
if local_rank == 0: _logging.info('Terminate DataTransformer.')
terminate(self._readers)
if local_rank == 0: _logging.info('Terminate DataReader.')
import atexit
def cleanup():
def terminate(processes):
for process in processes:
process.terminate()
process.join()
terminate(self._fetchers)
if local_rank == 0: _logging.info('Terminate BlobFetcher.')
terminate(self._transformers)
if local_rank == 0: _logging.info('Terminate DataTransformer.')
terminate(self._readers)
if local_rank == 0: _logging.info('Terminate DataReader.')
import atexit
The format of this binary file.
Returns
-------
None
"""
assert os.path.exists(binary_file), \
'Binary file({}) does not exist.'.format(binary_file)
if format == 'pickle':
try:
state_dict = pickle.load(open(binary_file, 'rb'))
except UnicodeDecodeError:
state_dict = pickle.load(
open(binary_file, 'rb'), encoding='iso-8859-1')
_logging.info('Restore From Model@: ' + binary_file)
_logging.info('Model Format: Pickle')
for k, v in state_dict.items():
if HasTensor(k):
FeedTensor(k, v)
_logging.info('Tensor({}) is restored.'.format(k))
elif format == 'caffe':
get_default_workspace().Restore(binary_file, 1)
else:
raise TypeError('Unknown binary format: ' + format)
def cleanup():
def terminate(processes):
for process in processes:
process.terminate()
process.join()
terminate(self._fetchers)
if local_rank == 0: _logging.info('Terminate BlobFetcher.')
terminate(self._transformers)
if local_rank == 0: _logging.info('Terminate DataTransformer.')
terminate(self._readers)
if local_rank == 0: _logging.info('Terminate DataReader.')
import atexit
def cleanup():
def terminate(processes):
for process in processes:
process.terminate()
process.join()
terminate(self._fetchers)
if local_rank == 0: _logging.info('Terminate BlobFetcher.')
terminate(self._transformers)
if local_rank == 0: _logging.info('Terminate DataTransformer.')
terminate(self._readers)
if local_rank == 0: _logging.info('Terminate DataReader.')
import atexit