Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
else:
# Serve data from events files.
try:
tensor_events = self._multiplexer.Tensors(run, tag)
except KeyError:
raise errors.NotFoundError(
"No histogram tag %r for run %r" % (tag, run)
)
if downsample_to is not None:
rng = random.Random(0)
tensor_events = _downsample(rng, tensor_events, downsample_to)
events = [
[
e.wall_time,
e.step,
tensor_util.make_ndarray(e.tensor_proto).tolist(),
]
for e in tensor_events
]
return (events, "application/json")
node_name: The name of the node (without the output slot).
tensor_proto: A tensor proto of data.
node_name_set: An optional set of node names that are relevant. If not
provided, no filtering by relevance occurs.
Returns:
An event_accumulator.HealthPillEvent. Or None if one could not be created.
"""
if node_name_set and node_name not in node_name_set:
# This event is not relevant.
return None
# Since we seek health pills for a specific step, this function
# returns 1 health pill per node per step. The wall time is the
# seconds since the epoch.
elements = list(tensor_util.make_ndarray(tensor_proto))
return HealthPillEvent(
wall_time=wall_time,
step=step,
device_name=device_name,
output_slot=output_slot,
node_name=node_name,
dtype=repr(tf.as_dtype(elements[12])),
shape=elements[14:],
value=elements,
)
values = [
(wall_time, step, self._get_value(data, dtype_enum))
for (step, wall_time, data, dtype_enum) in cursor
]
else:
try:
tensor_events = self._multiplexer.Tensors(run, tag)
except KeyError:
raise errors.NotFoundError(
"No scalar data for run=%r, tag=%r" % (run, tag)
)
values = [
(
tensor_event.wall_time,
tensor_event.step,
tensor_util.make_ndarray(tensor_event.tensor_proto).item(),
)
for tensor_event in tensor_events
]
if output_format == OutputFormat.CSV:
string_io = StringIO()
writer = csv.writer(string_io)
writer.writerow(["Wall time", "Step", "Value"])
writer.writerows(values)
return (string_io.getvalue(), "text/csv")
else:
return (values, "application/json")
def _convert_tensor_event(event):
"""Helper for `read_tensors`."""
return provider.TensorDatum(
step=event.step,
wall_time=event.wall_time,
numpy=tensor_util.make_ndarray(event.tensor_proto),
)
def process_string_tensor_event(event):
"""Convert a TensorEvent into a JSON-compatible response."""
string_arr = tensor_util.make_ndarray(event.tensor_proto)
html = text_array_to_html(string_arr)
return {
"wall_time": event.wall_time,
"step": event.step,
"text": html,
}
def _get_sample(self, tensor_event, sample):
"""Returns a single sample from a batch of samples."""
data = tensor_util.make_ndarray(tensor_event.tensor_proto)
return data[sample].tolist()
for tag, tagdata in six.iteritems(tagged_data)
}
tag_to_id = self._maybe_init_tags(run_id, tag_to_metadata)
tensor_values = []
for tag, tagdata in six.iteritems(tagged_data):
tag_id = tag_to_id[tag]
for step, wall_time, tensor_proto in tagdata.values:
dtype = tensor_proto.dtype
shape = ",".join(
str(d.size) for d in tensor_proto.tensor_shape.dim
)
# Use tensor_proto.tensor_content if it's set, to skip relatively
# expensive extraction into intermediate ndarray.
data = self._make_blob(
tensor_proto.tensor_content
or tensor_util.make_ndarray(tensor_proto).tobytes()
)
tensor_values.append(
(tag_id, step, wall_time, dtype, shape, data)
)
self._db.executemany(
"""
INSERT OR REPLACE INTO Tensors (
series, step, computed_time, dtype, shape, data
) VALUES (?, ?, ?, ?, ?, ?)
""",
tensor_values,
)