I am done

This commit is contained in:
2024-10-30 22:14:35 +01:00
parent 720dc28c09
commit 40e2a747cf
36901 changed files with 5011519 additions and 0 deletions

View File

@ -0,0 +1,35 @@
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Central API entry point for summary operations.
This module exposes summary ops for the standard TensorBoard plugins.
"""
# If the V1 summary API is accessible, load and re-export it here.
try:
from tensorboard.summary import v1 # noqa: F401
except ImportError:
pass
# Load the V2 summary API if accessible.
try:
from tensorboard.summary import v2 # noqa: F401
from tensorboard.summary.v2 import * # noqa: F401
except ImportError:
pass
from tensorboard.summary._output import DirectoryOutput # noqa: F401
from tensorboard.summary._output import Output # noqa: F401
from tensorboard.summary._writer import Writer # noqa: F401

View File

@ -0,0 +1,124 @@
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generalized output options for writing tensor-formatted summary data."""
from tensorboard.compat.proto import event_pb2
from tensorboard.compat.proto import summary_pb2
from tensorboard.summary.writer import event_file_writer
from tensorboard.util import tensor_util
import abc
class Output(abc.ABC):
"""Interface for emitting tensor-formatted summary data.
Implementations of this interface can be passed to Writer to customize
how summary data is actually persisted (e.g. to disk, to memory, over
the network, etc.).
TODO(#4581): This API should be considered EXPERIMENTAL and subject to
backwards-incompatible changes without notice.
"""
@abc.abstractmethod
def emit_scalar(
self,
*,
plugin_name,
tag,
data,
step,
wall_time,
tag_metadata=None,
description=None,
):
"""Emits one scalar data point to this Output.
Args:
plugin_name: string name to uniquely identify the type of time series
(historically associated with a TensorBoard plugin).
tag: string tag used to uniquely identify this time series.
data: `np.float32` scalar value for this data point.
step: `np.int64` scalar step value for this data point.
wall_time: `float` seconds since the Unix epoch, representing the
real-world timestamp for this data point.
tag_metadata: optional bytes containing metadata for this entire time
series. This should be constant for a given tag; only the first
value encountered will be used.
description: optional string description for this entire time series.
This should be constant for a given tag; only the first value
encountered will be used.
"""
pass
@abc.abstractmethod
def flush(self):
"""Flushes any data that has been buffered."""
pass
@abc.abstractmethod
def close(self):
"""Closes the Output and also flushes any buffered data."""
pass
class DirectoryOutput(Output):
"""Outputs summary data by writing event files to a log directory.
TODO(#4581): This API should be considered EXPERIMENTAL and subject to
backwards-incompatible changes without notice.
"""
def __init__(self, path):
"""Creates a `DirectoryOutput` for the given path."""
self._ev_writer = event_file_writer.EventFileWriter(path)
def emit_scalar(
self,
*,
plugin_name,
tag,
data,
step,
wall_time,
tag_metadata=None,
description=None,
):
"""See `Output`."""
# TODO(#4581): cache summary metadata to emit only once.
summary_metadata = summary_pb2.SummaryMetadata(
plugin_data=summary_pb2.SummaryMetadata.PluginData(
plugin_name=plugin_name, content=tag_metadata
),
summary_description=description,
data_class=summary_pb2.DataClass.DATA_CLASS_SCALAR,
)
tensor_proto = tensor_util.make_tensor_proto(data)
event = event_pb2.Event(wall_time=wall_time, step=step)
event.summary.value.add(
tag=tag, tensor=tensor_proto, metadata=summary_metadata
)
self._ev_writer.add_event(event)
def flush(self):
"""See `Output`."""
self._ev_writer.flush()
def close(self):
"""See `Output`."""
# No need to call flush first since EventFileWriter already
# will do this for us when we call close().
self._ev_writer.close()

View File

@ -0,0 +1,14 @@
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================

View File

@ -0,0 +1,179 @@
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# NOTE: This module exists to provide the `tf.summary` module in the TensorFlow
# API using symbols defined in TensorBoard. This works via a mechanism within
# TensorFlow's API construction logic called "component_api_helper" that imports
# an arbitrary module and inserts it into the TF APIs as a "component API". That
# logic is specifically hardcoded to look for this exact tensorboard module.
#
# This note is in a comment, not the module docstring, because the module
# docstring below is what users will see as the tf.summary docstring and in the
# generated API documentation, and this is just an implementation detail.
"""Operations for writing summary data, for use in analysis and visualization.
The `tf.summary` module provides APIs for writing summary data. This data can be
visualized in TensorBoard, the visualization toolkit that comes with TensorFlow.
See the [TensorBoard website](https://www.tensorflow.org/tensorboard) for more
detailed tutorials about how to use these APIs, or some quick examples below.
Example usage with eager execution, the default in TF 2.0:
```python
writer = tf.summary.create_file_writer("/tmp/mylogs")
with writer.as_default():
for step in range(100):
# other model code would go here
tf.summary.scalar("my_metric", 0.5, step=step)
writer.flush()
```
Example usage with `tf.function` graph execution:
```python
writer = tf.summary.create_file_writer("/tmp/mylogs")
@tf.function
def my_func(step):
# other model code would go here
with writer.as_default():
tf.summary.scalar("my_metric", 0.5, step=step)
for step in range(100):
my_func(step)
writer.flush()
```
Example usage with legacy TF 1.x graph execution:
```python
with tf.compat.v1.Graph().as_default():
step = tf.Variable(0, dtype=tf.int64)
step_update = step.assign_add(1)
writer = tf.summary.create_file_writer("/tmp/mylogs")
with writer.as_default():
tf.summary.scalar("my_metric", 0.5, step=step)
all_summary_ops = tf.compat.v1.summary.all_v2_summary_ops()
writer_flush = writer.flush()
sess = tf.compat.v1.Session()
sess.run([writer.init(), step.initializer])
for i in range(100):
sess.run(all_summary_ops)
sess.run(step_update)
sess.run(writer_flush)
```
"""
# Keep this import outside the function below for internal sync reasons.
import tensorflow as tf
def reexport_tf_summary():
"""Re-export all symbols from the original tf.summary.
This function finds the original tf.summary V2 API and re-exports all the
symbols from it within this module as well, so that when this module is
patched into the TF API namespace as the new tf.summary, the effect is an
overlay that just adds TensorBoard-provided symbols to the module.
Finding the original tf.summary V2 API module reliably is a challenge, since
this code runs *during* the overall TF API import process and depending on
the order of imports (which is subject to change), different parts of the API
may or may not be defined at the point in time we attempt to access them. This
code also may be inserted into two places in the API (tf and tf.compat.v2)
and may be re-executed multiple times even for the same place in the API (due
to the TF module import system not populating sys.modules properly), so it
needs to be robust to many different scenarios.
The one constraint we can count on is that everywhere this module is loaded
(via the component_api_helper mechanism in TF), it's going to be the 'summary'
submodule of a larger API package that already has a 'summary' attribute
that contains the TF-only summary API symbols we need to re-export. This
may either be the original TF-only summary module (the first time we load
this module) or a pre-existing copy of this module (if we're re-loading this
module again). We don't actually need to differentiate those two cases,
because it's okay if we re-import our own TensorBoard-provided symbols; they
will just be overwritten later on in this file.
So given that guarantee, the approach we take is to first attempt to locate
a TF V2 API package that already has a 'summary' attribute (most likely this
is the parent package into which we're being imported, but not necessarily),
and then do the dynamic version of "from tf_api_package.summary import *".
Lastly, this logic is encapsulated in a function to avoid symbol leakage.
"""
import sys
# API packages to check for the original V2 summary API, in preference order
# to avoid going "under the hood" to the _api packages unless necessary.
# Skip the top-level `tensorflow` package since it's hard to confirm that it
# is the actual v2 API (just checking tf.__version__ is not always enough).
packages = [
"tensorflow.compat.v2",
"tensorflow_core._api.v2",
"tensorflow_core._api.v2.compat.v2",
"tensorflow_core._api.v1.compat.v2",
# Old names for `tensorflow_core._api.*`.
"tensorflow._api.v2",
"tensorflow._api.v2.compat.v2",
"tensorflow._api.v1.compat.v2",
]
def dynamic_wildcard_import(module):
"""Implements the logic of "from module import *" for the given
module."""
symbols = getattr(module, "__all__", None)
if symbols is None:
symbols = [
k for k in module.__dict__.keys() if not k.startswith("_")
]
globals().update(
{symbol: getattr(module, symbol) for symbol in symbols}
)
notfound = object() # sentinel value
for package_name in packages:
package = sys.modules.get(package_name, notfound)
if package is notfound:
# Either it isn't in this installation at all (e.g. the _api.vX packages
# are only in API version X), it isn't imported yet, or it was imported
# but not inserted into sys.modules under its user-facing name (for the
# non-'_api' packages), at which point we continue down the list to look
# "under the hood" for it via its '_api' package name.
continue
module = getattr(package, "summary", None)
if module is None:
# This happens if the package hasn't been fully imported yet. For example,
# the 'tensorflow' package won't yet have 'summary' attribute if we are
# loading this code via the 'tensorflow.compat...' path and 'compat' is
# imported before 'summary' in the 'tensorflow' __init__.py file.
continue
# Success, we hope. Import all the public symbols into this module.
dynamic_wildcard_import(module)
return
reexport_tf_summary()
from tensorboard.summary.v2 import audio # noqa: F401
from tensorboard.summary.v2 import histogram # noqa: F401
from tensorboard.summary.v2 import image # noqa: F401
from tensorboard.summary.v2 import scalar # noqa: F401
from tensorboard.summary.v2 import text # noqa: F401
del tf, reexport_tf_summary

View File

@ -0,0 +1,105 @@
# Copyright 2021 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementation for tensorboard.summary.Writer and related symbols.
This provides a TensorBoard-native summary writing API that only depends
on numpy and not any particular ML framework.
"""
import time
import numpy as np
from tensorboard.plugins.scalar import metadata as scalars_metadata
from tensorboard.summary import _output
class Writer:
"""Writes summary data for visualization in TensorBoard.
This class is not thread-safe.
TODO(#4581): This API should be considered EXPERIMENTAL and subject to
backwards-incompatible changes without notice.
"""
def __init__(self, output):
"""Constructs a Writer.
Args:
output: `tensorboard.summary.Output` object, or a string which will be
interpreted as shorthand for an `Output` of the appropriate type. The
only currently supported type is `DirectoryOutput`, where the string
value given here will be used as the directory path.
"""
if isinstance(output, _output.Output):
self._output = output
elif isinstance(output, str):
self._output = _output.DirectoryOutput(output)
else:
raise TypeError("Unsupported output object %r" % output)
self._closed = False
def _check_not_closed(self):
if self._closed:
raise RuntimeError("Writer is already closed")
def flush(self):
"""Flushes any buffered data."""
self._check_not_closed()
self._output.flush()
def close(self):
"""Closes the writer and prevents further use."""
self._check_not_closed()
self._output.close()
self._closed = True
def add_scalar(self, tag, data, step, *, wall_time=None, description=None):
"""Adds a scalar summary.
Args:
tag: string tag used to uniquely identify this time series.
data: numeric scalar value for this data point. Accepts any value that
can be converted to a `np.float32` scalar.
step: integer step value for this data point. Accepts any value that
can be converted to a `np.int64` scalar.
wall_time: optional `float` seconds since the Unix epoch, representing
the real-world timestamp for this data point. Defaults to None in
which case the current time will be used.
description: optional string description for this entire time series.
This should be constant for a given tag; only the first value
encountered will be used.
"""
self._check_not_closed()
validated_data = _validate_scalar_shape(np.float32(data), "data")
validated_step = _validate_scalar_shape(np.int64(step), "step")
wall_time = wall_time if wall_time is not None else time.time()
self._output.emit_scalar(
plugin_name=scalars_metadata.PLUGIN_NAME,
tag=tag,
data=validated_data,
step=validated_step,
wall_time=wall_time,
description=description,
)
def _validate_scalar_shape(ndarray, name):
if ndarray.ndim != 0:
raise ValueError(
"Expected scalar value for %r but got %r" % (name, ndarray)
)
return ndarray

View File

@ -0,0 +1,52 @@
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Central API entry point for v1 versions of summary operations.
This module simply offers a shorter way to access the members of modules
like `tensorboard.plugins.scalar.summary`.
"""
from tensorboard.plugins.audio import summary as _audio_summary
from tensorboard.plugins.custom_scalar import summary as _custom_scalar_summary
from tensorboard.plugins.histogram import summary as _histogram_summary
from tensorboard.plugins.image import summary as _image_summary
from tensorboard.plugins.pr_curve import summary as _pr_curve_summary
from tensorboard.plugins.scalar import summary as _scalar_summary
from tensorboard.plugins.text import summary as _text_summary
audio = _audio_summary.op
audio_pb = _audio_summary.pb
custom_scalar = _custom_scalar_summary.op
custom_scalar_pb = _custom_scalar_summary.pb
histogram = _histogram_summary.op
histogram_pb = _histogram_summary.pb
image = _image_summary.op
image_pb = _image_summary.pb
pr_curve = _pr_curve_summary.op
pr_curve_pb = _pr_curve_summary.pb
pr_curve_streaming_op = _pr_curve_summary.streaming_op
pr_curve_raw_data_op = _pr_curve_summary.raw_data_op
pr_curve_raw_data_pb = _pr_curve_summary.raw_data_pb
scalar = _scalar_summary.op
scalar_pb = _scalar_summary.pb
text = _text_summary.op
text_pb = _text_summary.pb

View File

@ -0,0 +1,25 @@
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Central API entry point for v2 versions of summary operations.
This module exposes v2 summary ops for the standard TensorBoard plugins.
"""
# pylint: disable=unused-import
from tensorboard.plugins.audio.summary_v2 import audio # noqa: F401
from tensorboard.plugins.histogram.summary_v2 import histogram # noqa: F401
from tensorboard.plugins.image.summary_v2 import image # noqa: F401
from tensorboard.plugins.scalar.summary_v2 import scalar # noqa: F401
from tensorboard.plugins.text.summary_v2 import text # noqa: F401

View File

@ -0,0 +1,13 @@
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

View File

@ -0,0 +1,292 @@
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Writes events to disk in a logdir."""
import os
import queue
import socket
import threading
import time
from tensorboard.compat import tf
from tensorboard.compat.proto import event_pb2
from tensorboard.summary.writer.record_writer import RecordWriter
class AtomicCounter:
def __init__(self, initial_value):
self._value = initial_value
self._lock = threading.Lock()
def get(self):
with self._lock:
try:
return self._value
finally:
self._value += 1
_global_uid = AtomicCounter(0)
class EventFileWriter:
"""Writes `Event` protocol buffers to an event file.
The `EventFileWriter` class creates an event file in the specified
directory, and asynchronously writes Event protocol buffers to the
file. The Event file is encoded using the tfrecord format, which is
similar to RecordIO.
"""
def __init__(
self, logdir, max_queue_size=10, flush_secs=120, filename_suffix=""
):
"""Creates a `EventFileWriter` and an event file to write to.
On construction the summary writer creates a new event file in `logdir`.
This event file will contain `Event` protocol buffers, which are written to
disk via the add_event method.
The other arguments to the constructor control the asynchronous writes to
the event file:
Args:
logdir: A string. Directory where event file will be written.
max_queue_size: Integer. Size of the queue for pending events and summaries.
flush_secs: Number. How often, in seconds, to flush the
pending events and summaries to disk.
"""
self._logdir = logdir
tf.io.gfile.makedirs(logdir)
self._file_name = (
os.path.join(
logdir,
"events.out.tfevents.%010d.%s.%s.%s"
% (
time.time(),
socket.gethostname(),
os.getpid(),
_global_uid.get(),
),
)
+ filename_suffix
) # noqa E128
self._general_file_writer = tf.io.gfile.GFile(self._file_name, "wb")
self._async_writer = _AsyncWriter(
RecordWriter(self._general_file_writer), max_queue_size, flush_secs
)
# Initialize an event instance.
_event = event_pb2.Event(
wall_time=time.time(),
file_version="brain.Event:2",
source_metadata=event_pb2.SourceMetadata(
writer="tensorboard.summary.writer.event_file_writer"
),
)
self.add_event(_event)
self.flush()
def get_logdir(self):
"""Returns the directory where event file will be written."""
return self._logdir
def add_event(self, event):
"""Adds an event to the event file.
Args:
event: An `Event` protocol buffer.
"""
if not isinstance(event, event_pb2.Event):
raise TypeError(
"Expected an event_pb2.Event proto, "
" but got %s" % type(event)
)
self._async_writer.write(event.SerializeToString())
def flush(self):
"""Flushes the event file to disk.
Call this method to make sure that all pending events have been
written to disk.
"""
self._async_writer.flush()
def close(self):
"""Performs a final flush of the event file to disk, stops the
write/flush worker and closes the file.
Call this method when you do not need the summary writer
anymore.
"""
self._async_writer.close()
class _AsyncWriter:
"""Writes bytes to a file."""
def __init__(self, record_writer, max_queue_size=20, flush_secs=120):
"""Writes bytes to a file asynchronously. An instance of this class
holds a queue to keep the incoming data temporarily. Data passed to the
`write` function will be put to the queue and the function returns
immediately. This class also maintains a thread to write data in the
queue to disk. The first initialization parameter is an instance of
`tensorboard.summary.record_writer` which computes the CRC checksum and
then write the combined result to the disk. So we use an async approach
to improve performance.
Args:
record_writer: A RecordWriter instance
max_queue_size: Integer. Size of the queue for pending bytestrings.
flush_secs: Number. How often, in seconds, to flush the
pending bytestrings to disk.
"""
self._writer = record_writer
self._closed = False
self._byte_queue = queue.Queue(max_queue_size)
self._worker = _AsyncWriterThread(
self._byte_queue, self._writer, flush_secs
)
self._lock = threading.Lock()
self._worker.start()
def write(self, bytestring):
"""Enqueue the given bytes to be written asychronously."""
with self._lock:
# Status of the worker should be checked under the lock to avoid
# multiple threads passing the check and then switching just before
# blocking on putting to the queue which might result in a deadlock.
self._check_worker_status()
if self._closed:
raise IOError("Writer is closed")
self._byte_queue.put(bytestring)
# Check the status again in case the background worker thread has
# failed in the meantime to avoid waiting until the next call to
# surface the error.
self._check_worker_status()
def flush(self):
"""Write all the enqueued bytestring before this flush call to disk.
Block until all the above bytestring are written.
"""
with self._lock:
self._check_worker_status()
if self._closed:
raise IOError("Writer is closed")
self._byte_queue.join()
self._writer.flush()
# Check the status again in case the background worker thread has
# failed in the meantime to avoid waiting until the next call to
# surface the error.
self._check_worker_status()
def close(self):
"""Closes the underlying writer, flushing any pending writes first."""
if not self._closed:
with self._lock:
if not self._closed:
self._closed = True
self._worker.stop()
self._writer.flush()
self._writer.close()
def _check_worker_status(self):
"""Makes sure the worker thread is still running and raises exception
thrown in the worker thread otherwise.
"""
exception = self._worker.exception
if exception is not None:
raise exception
class _AsyncWriterThread(threading.Thread):
"""Thread that processes asynchronous writes for _AsyncWriter."""
def __init__(self, queue, record_writer, flush_secs):
"""Creates an _AsyncWriterThread.
Args:
queue: A Queue from which to dequeue data.
record_writer: An instance of record_writer writer.
flush_secs: How often, in seconds, to flush the
pending file to disk.
"""
threading.Thread.__init__(self)
self.daemon = True
self.exception = None
self._queue = queue
self._record_writer = record_writer
self._flush_secs = flush_secs
# The first data will be flushed immediately.
self._next_flush_time = 0
self._has_pending_data = False
self._shutdown_signal = object()
def stop(self):
self._queue.put(self._shutdown_signal)
self.join()
def run(self):
try:
self._run()
except Exception as ex:
self.exception = ex
try:
# In case there's a thread blocked on putting an item into the
# queue or a thread blocked on flushing, pop all items from the
# queue to let the foreground thread proceed.
while True:
self._queue.get(False)
self._queue.task_done()
except queue.Empty:
pass
raise
def _run(self):
# Here wait on the queue until an data appears, or till the next
# time to flush the writer, whichever is earlier. If we have an
# data, write it. If not, an empty queue exception will be raised
# and we can proceed to flush the writer.
while True:
now = time.time()
queue_wait_duration = self._next_flush_time - now
data = None
try:
if queue_wait_duration > 0:
data = self._queue.get(True, queue_wait_duration)
else:
data = self._queue.get(False)
if data is self._shutdown_signal:
return
self._record_writer.write(data)
self._has_pending_data = True
except queue.Empty:
pass
finally:
if data:
self._queue.task_done()
now = time.time()
if now > self._next_flush_time:
if self._has_pending_data:
# Small optimization - if there are no pending data,
# there's no need to flush, since each flush can be
# expensive (e.g. uploading a new file to a server).
self._record_writer.flush()
self._has_pending_data = False
# Do it again in flush_secs.
self._next_flush_time = now + self._flush_secs

View File

@ -0,0 +1,50 @@
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import struct
from tensorboard.compat.tensorflow_stub.pywrap_tensorflow import masked_crc32c
class RecordWriter:
"""Write encoded protobuf to a file with packing defined in tensorflow."""
def __init__(self, writer):
"""Open a file to keep the tensorboard records.
Args:
writer: A file-like object that implements `write`, `flush` and `close`.
"""
self._writer = writer
# Format of a single record: (little-endian)
# uint64 length
# uint32 masked crc of length
# byte data[length]
# uint32 masked crc of data
def write(self, data):
header = struct.pack("<Q", len(data))
header_crc = struct.pack("<I", masked_crc32c(header))
footer_crc = struct.pack("<I", masked_crc32c(data))
self._writer.write(header + header_crc + data + footer_crc)
def flush(self):
self._writer.flush()
def close(self):
self._writer.close()
@property
def closed(self):
return self._writer.closed