Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
ON Tensors.rowid = T1.tensor_rowid
WHERE
series = (
SELECT tag_id
FROM Runs
CROSS JOIN Tags USING (run_id)
WHERE Runs.run_name = :run AND Tags.tag_name = :tag)
AND step IS NOT NULL
AND dtype = :dtype
/* Should be n-vector, n >= 3: [width, height, samples...] */
AND (NOT INSTR(shape, ',') AND CAST (shape AS INT) >= 3)
AND T0.idx = 0
AND T1.idx = 1
ORDER BY step
""",
{"run": run, "tag": tag, "dtype": tf.string.as_datatype_enum},
)
return [
{
"wall_time": computed_time,
"step": step,
"width": width,
"height": height,
"query": self._query_for_individual_image(
run, tag, sample, index
),
}
for index, (computed_time, step, width, height) in enumerate(
cursor
)
]
response = []
Args:
logdir: A log directory that contains event files.
event_file: Or, a particular event file path.
tag: An optional tag name to query for.
Returns:
A list of InspectionUnit objects.
"""
if logdir:
subdirs = io_wrapper.GetLogdirSubdirectories(logdir)
inspection_units = []
for subdir in subdirs:
generator = itertools.chain(
*[
generator_from_event_file(os.path.join(subdir, f))
for f in tf.io.gfile.listdir(subdir)
if io_wrapper.IsTensorFlowEventsFile(
os.path.join(subdir, f)
)
]
)
inspection_units.append(
InspectionUnit(
name=subdir,
generator=generator,
field_to_obs=get_field_to_observations_map(generator, tag),
)
)
if inspection_units:
print(
"Found event files in:\n{}\n".format(
"\n".join([u.name for u in inspection_units])
def _migrate_image_value(value):
image_value = value.image
data = [
tf.compat.as_bytes(str(image_value.width)),
tf.compat.as_bytes(str(image_value.height)),
tf.compat.as_bytes(image_value.encoded_image_string),
]
summary_metadata = image_metadata.create_summary_metadata(
display_name=value.metadata.display_name or value.tag,
description=value.metadata.summary_description,
)
return make_summary(value.tag, summary_metadata, data)
`logdir` used by the summary_writer.
Raises:
ValueError: If the summary writer does not have a `logdir`.
"""
# Convert from `tf.compat.v1.summary.FileWriter` if necessary.
logdir = getattr(logdir, "get_logdir", lambda: logdir)()
# Sanity checks.
if logdir is None:
raise ValueError("Expected logdir to be a path, but got None")
# Saving the config file in the logdir.
config_pbtxt = _text_format.MessageToString(config)
path = os.path.join(logdir, _projector_plugin.PROJECTOR_FILENAME)
with tf.io.gfile.GFile(path, "w") as f:
f.write(config_pbtxt)
def _latest_checkpoints_changed(configs, run_path_pairs):
"""Returns true if the latest checkpoint has changed in any of the runs."""
for run_name, assets_dir in run_path_pairs:
if run_name not in configs:
config = ProjectorConfig()
config_fpath = os.path.join(assets_dir, PROJECTOR_FILENAME)
if tf.io.gfile.exists(config_fpath):
with tf.io.gfile.GFile(config_fpath, "r") as f:
file_content = f.read()
text_format.Merge(file_content, config)
else:
config = configs[run_name]
# See if you can find a checkpoint file in the logdir.
logdir = _assets_dir_to_logdir(assets_dir)
ckpt_path = _find_latest_checkpoint(logdir)
if not ckpt_path:
continue
if config.model_checkpoint_path != ckpt_path:
return True
return False
Over time, the object's temperature will adjust to match the
temperature of its environment. We'll track the object's temperature,
how far it is from the room's temperature, and how much it changes at
each time step.
Arguments:
logdir: the top-level directory into which to write summary data
run_name: the name of this run; will be created as a subdirectory
under logdir
initial_temperature: float; the object's initial temperature
ambient_temperature: float; the temperature of the enclosing room
heat_coefficient: float; a measure of the object's thermal
conductivity
"""
tf.reset_default_graph()
tf.set_random_seed(0)
with tf.name_scope('temperature'):
# Create a mutable variable to hold the object's temperature, and
# create a scalar summary to track its value over time. The name of
# the summary will appear as "temperature/current" due to the
# name-scope above.
temperature = tf.Variable(tf.constant(initial_temperature),
name='temperature')
summary.op('current', temperature,
display_name='Temperature',
description='The temperature of the object under '
'simulation, in Kelvins.')
# Compute how much the object's temperature differs from that of its
# environment, and track this, too: likewise, as
# "temperature/difference_to_ambient".
def _IsDirectory(parent, item):
"""Helper that returns if parent/item is a directory."""
return tf.io.gfile.isdir(os.path.join(parent, item))
run_name = 'temperature:t0=%g,tA=%g,kH=%g' % (
initial_temperature, final_temperature, heat_coefficient)
if verbose:
print('--- Running: %s' % run_name)
run(logdir, run_name,
initial_temperature, final_temperature, heat_coefficient)
def main(unused_argv):
print('Saving output to %s.' % LOGDIR)
run_all(LOGDIR, verbose=True)
print('Done. Output saved to %s.' % LOGDIR)
if __name__ == '__main__':
tf.app.run()
def run_all(logdir, verbose=False):
"""Generate a bunch of histogram data, and write it to logdir."""
del verbose
tf.set_random_seed(0)
k = tf.placeholder(tf.float32)
# Make a normal distribution, with a shifting mean
mean_moving_normal = tf.random_normal(shape=[1000], mean=(5*k), stddev=1)
# Record that distribution into a histogram summary
histogram_summary.op("normal/moving_mean",
mean_moving_normal,
description="A normal distribution whose mean changes "
"over time.")
# Make a normal distribution with shrinking variance
shrinking_normal = tf.random_normal(shape=[1000], mean=0, stddev=1-(k))
# Record that distribution too
histogram_summary.op("normal/shrinking_variance", shrinking_normal,
description="A normal distribution whose variance "
costly disk reads by using this method. However, for other file systems, this
method might prove slower because the file system performs a walk per call to
glob (in which case it might as well just perform 1 walk).
Args:
top: A path to a directory.
Yields:
A (dir_path, file_paths) tuple for each directory/subdirectory.
"""
current_glob_string = os.path.join(_EscapeGlobCharacters(top), "*")
level = 0
while True:
logger.info("GlobAndListFiles: Starting to glob level %d", level)
glob = tf.io.gfile.glob(current_glob_string)
logger.info(
"GlobAndListFiles: %d files glob-ed at level %d", len(glob), level
)
if not glob:
# This subdirectory level lacks files. Terminate.
return
# Map subdirectory to a list of files.
pairs = collections.defaultdict(list)
for file_path in glob:
pairs[os.path.dirname(file_path)].append(file_path)
for dir_name, file_paths in six.iteritems(pairs):
yield (dir_name, tuple(file_paths))
if len(pairs) == 1: