Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
import platform
import signal
import sys
import threading
from six.moves import xrange # pylint:disable=redefined-builtin
import tensorflow as tf
from werkzeug import wrappers
from tensorboard.backend import http_util
from tensorboard.plugins import base_plugin
from tensorboard.plugins.debugger import constants
from tensorboard.plugins.debugger import interactive_debugger_server_lib
from tensorboard.util import tb_logging
logger = tb_logging.get_logger()
# HTTP routes.
_ACK_ROUTE = "/ack"
_COMM_ROUTE = "/comm"
_DEBUGGER_GRAPH_ROUTE = "/debugger_graph"
_DEBUGGER_GRPC_HOST_PORT_ROUTE = "/debugger_grpc_host_port"
_GATED_GRPC_ROUTE = "/gated_grpc"
_TENSOR_DATA_ROUTE = "/tensor_data"
_SOURCE_CODE_ROUTE = "/source_code"
class InteractiveDebuggerPlugin(base_plugin.TBPlugin):
"""Interactive TensorFlow Debugger plugin.
This underlies the interactive Debugger Dashboard.
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Information about histogram summaries."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorboard.compat.proto import summary_pb2
from tensorboard.plugins.histogram import plugin_data_pb2
from tensorboard.util import tb_logging
logger = tb_logging.get_logger()
PLUGIN_NAME = "histograms"
# The most recent value for the `version` field of the
# `HistogramPluginData` proto.
PROTO_VERSION = 0
def create_summary_metadata(display_name, description):
"""Create a `summary_pb2.SummaryMetadata` proto for histogram plugin data.
Returns:
A `summary_pb2.SummaryMetadata` protobuf object.
"""
content = plugin_data_pb2.HistogramPluginData(version=PROTO_VERSION)
return summary_pb2.SummaryMetadata(
from __future__ import print_function
import base64
import collections
import json
import six
from tensorboard import errors
from tensorboard.backend.event_processing import plugin_event_accumulator
from tensorboard.data import provider
from tensorboard.plugins.graph import metadata as graphs_metadata
from tensorboard.util import tb_logging
from tensorboard.util import tensor_util
logger = tb_logging.get_logger()
class MultiplexerDataProvider(provider.DataProvider):
def __init__(self, multiplexer, logdir):
"""Trivial initializer.
Args:
multiplexer: A `plugin_event_multiplexer.EventMultiplexer` (note:
not a boring old `event_multiplexer.EventMultiplexer`).
logdir: The log directory from which data is being read. Only used
cosmetically. Should be a `str`.
"""
self._multiplexer = multiplexer
self._logdir = logdir
def _validate_experiment_id(self, experiment_id):
import json
from six.moves import queue
import tensorflow as tf
from tensorboard.plugins.debugger import comm_channel as comm_channel_lib
from tensorboard.plugins.debugger import debug_graphs_helper
from tensorboard.plugins.debugger import tensor_helper
from tensorboard.plugins.debugger import tensor_store as tensor_store_lib
from tensorboard.util import tb_logging
from tensorflow.core.debug import debug_service_pb2
from tensorflow.python import debug as tf_debug
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.debug.lib import grpc_debug_server
logger = tb_logging.get_logger()
RunKey = collections.namedtuple(
"RunKey", ["input_names", "output_names", "target_nodes"]
)
def _extract_device_name_from_event(event):
"""Extract device name from a tf.Event proto carrying tensor value."""
plugin_data_content = json.loads(
tf.compat.as_str(event.summary.value[0].metadata.plugin_data.content)
)
return plugin_data_content["device"]
def _comm_metadata(run_key, timestamp):
return {
import os
import threading
import six
from six.moves import queue, xrange # pylint: disable=redefined-builtin
from tensorboard.backend.event_processing import directory_watcher
from tensorboard.backend.event_processing import (
plugin_event_accumulator as event_accumulator,
)
from tensorboard.backend.event_processing import io_wrapper
from tensorboard.util import tb_logging
logger = tb_logging.get_logger()
class EventMultiplexer(object):
"""An `EventMultiplexer` manages access to multiple `EventAccumulator`s.
Each `EventAccumulator` is associated with a `run`, which is a self-contained
TensorFlow execution. The `EventMultiplexer` provides methods for extracting
information about events from multiple `run`s.
Example usage for loading specific runs from files:
```python
x = EventMultiplexer({'run1': 'path/to/run1', 'run2': 'path/to/run2'})
x.Reload()
```
# limitations under the License.
# ==============================================================================
"""Sample text summaries exhibiting all the text plugin features."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl import app
from absl import logging
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorboard.util import tb_logging
logger = tb_logging.get_logger()
# Directory into which to write tensorboard data.
LOGDIR = "/tmp/text_demo"
# Number of steps for which to write data.
STEPS = 16
def simple_example(step):
# Text summaries log arbitrary text. This can be encoded with ASCII or
# UTF-8. Here's a simple example, wherein we greet the user on each
# step:
step_string = tf.as_string(step)
greeting = tf.strings.join(["Hello from step ", step_string, "!"])
tf.compat.v1.summary.text("greeting", greeting)
from __future__ import print_function
import locale
import logging
import os
import re
import sys
import time
from absl import logging as absl_logging
import six
from tensorboard.compat import tf
from tensorboard.util import tb_logging
logger = tb_logging.get_logger()
# TODO(stephanwlee): Move this to program.py
def setup_logging():
"""Configures Python logging the way the TensorBoard team likes it.
This should be called exactly once at the beginning of main().
"""
# TODO(stephanwlee): Check the flag passed from CLI and set it to WARN only
# it was not explicitly set
absl_logging.set_verbosity(absl_logging.WARN)
def closeable(class_):
"""Makes a class with a close method able to be a context manager.
from tensorboard import data_compat
from tensorboard.backend.event_processing import directory_loader
from tensorboard.backend.event_processing import directory_watcher
from tensorboard.backend.event_processing import event_file_loader
from tensorboard.backend.event_processing import io_wrapper
from tensorboard.backend.event_processing import plugin_asset_util
from tensorboard.backend.event_processing import reservoir
from tensorboard.compat import tf
from tensorboard.compat.proto import config_pb2
from tensorboard.compat.proto import event_pb2
from tensorboard.compat.proto import graph_pb2
from tensorboard.compat.proto import meta_graph_pb2
from tensorboard.util import tb_logging
logger = tb_logging.get_logger()
namedtuple = collections.namedtuple
TensorEvent = namedtuple("TensorEvent", ["wall_time", "step", "tensor_proto"])
## The tagTypes below are just arbitrary strings chosen to pass the type
## information of the tag from the backend to the frontend
TENSORS = "tensors"
GRAPH = "graph"
META_GRAPH = "meta_graph"
RUN_METADATA = "run_metadata"
DEFAULT_SIZE_GUIDANCE = {
TENSORS: 500,
}
import sys
import threading
import time
import types # pylint: disable=unused-import
import six
from tensorboard import db
from tensorboard.compat.proto import event_pb2
from tensorboard.util import tb_logging
from tensorboard.util import platform_util
from tensorboard.util import util
import tensorflow as tf
logger = tb_logging.get_logger()
class Record(collections.namedtuple('Record', ('record', 'offset'))):
"""Value class for a record returned by RecordReader.
Fields:
record: The byte string record that was read.
offset: The byte offset in the file *after* this record was read.
:type record: str
:type offset: int
"""
__slots__ = () # Enforces use of only tuple fields.
@util.closeable