I am done

This commit is contained in:
2024-10-30 22:14:35 +01:00
parent 720dc28c09
commit 40e2a747cf
36901 changed files with 5011519 additions and 0 deletions

View File

@ -0,0 +1,70 @@
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Compatibility interfaces for TensorBoard.
This module provides logic for importing variations on the TensorFlow
APIs, as lazily loaded imports to help avoid circular dependency issues
and defer the search and loading of the module until necessary.
"""
import tensorboard.lazy as _lazy
@_lazy.lazy_load("tensorboard.compat.tf")
def tf():
"""Provide the root module of a TF-like API for use within TensorBoard.
By default this is equivalent to `import tensorflow as tf`, but it can be used
in combination with //tensorboard/compat:tensorflow (to fall back to a stub TF
API implementation if the real one is not available) or with
//tensorboard/compat:no_tensorflow (to force unconditional use of the stub).
Returns:
The root module of a TF-like API, if available.
Raises:
ImportError: if a TF-like API is not available.
"""
try:
from tensorboard.compat import notf # noqa: F401
except ImportError:
try:
import tensorflow
return tensorflow
except ImportError:
pass
from tensorboard.compat import tensorflow_stub
return tensorflow_stub
@_lazy.lazy_load("tensorboard.compat.tf2")
def tf2():
"""Provide the root module of a TF-2.0 API for use within TensorBoard.
Returns:
The root module of a TF-2.0 API, if available.
Raises:
ImportError: if a TF-2.0 API is not available.
"""
# Resolve the lazy `tf` compat API from earlier in this file and try to find
# tf.compat.v2. Don't check tf.__version__ since this is not always reliable
# if TF was built with tf_api_version!=2.
if hasattr(tf, "compat") and hasattr(tf.compat, "v2"):
return tf.compat.v2
raise ImportError("cannot import tensorflow 2.0 API")

View File

@ -0,0 +1,35 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/allocation_description.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5tensorboard/compat/proto/allocation_description.proto\x12\x0btensorboard\"\xa3\x01\n\x15\x41llocationDescription\x12\x17\n\x0frequested_bytes\x18\x01 \x01(\x03\x12\x17\n\x0f\x61llocated_bytes\x18\x02 \x01(\x03\x12\x16\n\x0e\x61llocator_name\x18\x03 \x01(\t\x12\x15\n\rallocation_id\x18\x04 \x01(\x03\x12\x1c\n\x14has_single_reference\x18\x05 \x01(\x08\x12\x0b\n\x03ptr\x18\x06 \x01(\x04\x42\x9b\x01\n\x18org.tensorflow.frameworkB\x1b\x41llocationDescriptionProtosP\x01Z]github.com/tensorflow/tensorflow/tensorflow/go/core/framework/allocation_description_go_proto\xf8\x01\x01\x62\x06proto3')
_ALLOCATIONDESCRIPTION = DESCRIPTOR.message_types_by_name['AllocationDescription']
AllocationDescription = _reflection.GeneratedProtocolMessageType('AllocationDescription', (_message.Message,), {
'DESCRIPTOR' : _ALLOCATIONDESCRIPTION,
'__module__' : 'tensorboard.compat.proto.allocation_description_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.AllocationDescription)
})
_sym_db.RegisterMessage(AllocationDescription)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\033AllocationDescriptionProtosP\001Z]github.com/tensorflow/tensorflow/tensorflow/go/core/framework/allocation_description_go_proto\370\001\001'
_ALLOCATIONDESCRIPTION._serialized_start=71
_ALLOCATIONDESCRIPTION._serialized_end=234
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,82 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/api_def.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import attr_value_pb2 as tensorboard_dot_compat_dot_proto_dot_attr__value__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&tensorboard/compat/proto/api_def.proto\x12\x0btensorboard\x1a)tensorboard/compat/proto/attr_value.proto\"\xe7\x05\n\x06\x41piDef\x12\x15\n\rgraph_op_name\x18\x01 \x01(\t\x12\x1b\n\x13\x64\x65precation_message\x18\x0c \x01(\t\x12\x1b\n\x13\x64\x65precation_version\x18\r \x01(\x05\x12\x32\n\nvisibility\x18\x02 \x01(\x0e\x32\x1e.tensorboard.ApiDef.Visibility\x12.\n\x08\x65ndpoint\x18\x03 \x03(\x0b\x32\x1c.tensorboard.ApiDef.Endpoint\x12\'\n\x06in_arg\x18\x04 \x03(\x0b\x32\x17.tensorboard.ApiDef.Arg\x12(\n\x07out_arg\x18\x05 \x03(\x0b\x32\x17.tensorboard.ApiDef.Arg\x12\x11\n\targ_order\x18\x0b \x03(\t\x12&\n\x04\x61ttr\x18\x06 \x03(\x0b\x32\x18.tensorboard.ApiDef.Attr\x12\x0f\n\x07summary\x18\x07 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x08 \x01(\t\x12\x1a\n\x12\x64\x65scription_prefix\x18\t \x01(\t\x12\x1a\n\x12\x64\x65scription_suffix\x18\n \x01(\t\x1aI\n\x08\x45ndpoint\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x12\n\ndeprecated\x18\x03 \x01(\x08\x12\x1b\n\x13\x64\x65precation_version\x18\x04 \x01(\x05\x1a;\n\x03\x41rg\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\trename_to\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x1ak\n\x04\x41ttr\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\trename_to\x18\x02 \x01(\t\x12-\n\rdefault_value\x18\x03 \x01(\x0b\x32\x16.tensorboard.AttrValue\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\"G\n\nVisibility\x12\x16\n\x12\x44\x45\x46\x41ULT_VISIBILITY\x10\x00\x12\x0b\n\x07VISIBLE\x10\x01\x12\x08\n\x04SKIP\x10\x02\x12\n\n\x06HIDDEN\x10\x03\"*\n\x07\x41piDefs\x12\x1f\n\x02op\x18\x01 \x03(\x0b\x32\x13.tensorboard.ApiDefB}\n\x18org.tensorflow.frameworkB\x0c\x41piDefProtosP\x01ZNgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/api_def_go_proto\xf8\x01\x01\x62\x06proto3')
_APIDEF = DESCRIPTOR.message_types_by_name['ApiDef']
_APIDEF_ENDPOINT = _APIDEF.nested_types_by_name['Endpoint']
_APIDEF_ARG = _APIDEF.nested_types_by_name['Arg']
_APIDEF_ATTR = _APIDEF.nested_types_by_name['Attr']
_APIDEFS = DESCRIPTOR.message_types_by_name['ApiDefs']
_APIDEF_VISIBILITY = _APIDEF.enum_types_by_name['Visibility']
ApiDef = _reflection.GeneratedProtocolMessageType('ApiDef', (_message.Message,), {
'Endpoint' : _reflection.GeneratedProtocolMessageType('Endpoint', (_message.Message,), {
'DESCRIPTOR' : _APIDEF_ENDPOINT,
'__module__' : 'tensorboard.compat.proto.api_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ApiDef.Endpoint)
})
,
'Arg' : _reflection.GeneratedProtocolMessageType('Arg', (_message.Message,), {
'DESCRIPTOR' : _APIDEF_ARG,
'__module__' : 'tensorboard.compat.proto.api_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ApiDef.Arg)
})
,
'Attr' : _reflection.GeneratedProtocolMessageType('Attr', (_message.Message,), {
'DESCRIPTOR' : _APIDEF_ATTR,
'__module__' : 'tensorboard.compat.proto.api_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ApiDef.Attr)
})
,
'DESCRIPTOR' : _APIDEF,
'__module__' : 'tensorboard.compat.proto.api_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ApiDef)
})
_sym_db.RegisterMessage(ApiDef)
_sym_db.RegisterMessage(ApiDef.Endpoint)
_sym_db.RegisterMessage(ApiDef.Arg)
_sym_db.RegisterMessage(ApiDef.Attr)
ApiDefs = _reflection.GeneratedProtocolMessageType('ApiDefs', (_message.Message,), {
'DESCRIPTOR' : _APIDEFS,
'__module__' : 'tensorboard.compat.proto.api_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ApiDefs)
})
_sym_db.RegisterMessage(ApiDefs)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\014ApiDefProtosP\001ZNgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/api_def_go_proto\370\001\001'
_APIDEF._serialized_start=99
_APIDEF._serialized_end=842
_APIDEF_ENDPOINT._serialized_start=526
_APIDEF_ENDPOINT._serialized_end=599
_APIDEF_ARG._serialized_start=601
_APIDEF_ARG._serialized_end=660
_APIDEF_ATTR._serialized_start=662
_APIDEF_ATTR._serialized_end=769
_APIDEF_VISIBILITY._serialized_start=771
_APIDEF_VISIBILITY._serialized_end=842
_APIDEFS._serialized_start=844
_APIDEFS._serialized_end=886
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,80 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/attr_value.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import tensor_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__pb2
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)tensorboard/compat/proto/attr_value.proto\x12\x0btensorboard\x1a%tensorboard/compat/proto/tensor.proto\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\"\xaf\x04\n\tAttrValue\x12\x0b\n\x01s\x18\x02 \x01(\x0cH\x00\x12\x0b\n\x01i\x18\x03 \x01(\x03H\x00\x12\x0b\n\x01\x66\x18\x04 \x01(\x02H\x00\x12\x0b\n\x01\x62\x18\x05 \x01(\x08H\x00\x12%\n\x04type\x18\x06 \x01(\x0e\x32\x15.tensorboard.DataTypeH\x00\x12.\n\x05shape\x18\x07 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProtoH\x00\x12*\n\x06tensor\x18\x08 \x01(\x0b\x32\x18.tensorboard.TensorProtoH\x00\x12\x30\n\x04list\x18\x01 \x01(\x0b\x32 .tensorboard.AttrValue.ListValueH\x00\x12)\n\x04\x66unc\x18\n \x01(\x0b\x32\x19.tensorboard.NameAttrListH\x00\x12\x15\n\x0bplaceholder\x18\t \x01(\tH\x00\x1a\xed\x01\n\tListValue\x12\t\n\x01s\x18\x02 \x03(\x0c\x12\r\n\x01i\x18\x03 \x03(\x03\x42\x02\x10\x01\x12\r\n\x01\x66\x18\x04 \x03(\x02\x42\x02\x10\x01\x12\r\n\x01\x62\x18\x05 \x03(\x08\x42\x02\x10\x01\x12\'\n\x04type\x18\x06 \x03(\x0e\x32\x15.tensorboard.DataTypeB\x02\x10\x01\x12,\n\x05shape\x18\x07 \x03(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12(\n\x06tensor\x18\x08 \x03(\x0b\x32\x18.tensorboard.TensorProto\x12\'\n\x04\x66unc\x18\t \x03(\x0b\x32\x19.tensorboard.NameAttrListB\x07\n\x05value\"\x94\x01\n\x0cNameAttrList\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x31\n\x04\x61ttr\x18\x02 \x03(\x0b\x32#.tensorboard.NameAttrList.AttrEntry\x1a\x43\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorboard.AttrValue:\x02\x38\x01\x42\x83\x01\n\x18org.tensorflow.frameworkB\x0f\x41ttrValueProtosP\x01ZQgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/attr_value_go_proto\xf8\x01\x01\x62\x06proto3')
_ATTRVALUE = DESCRIPTOR.message_types_by_name['AttrValue']
_ATTRVALUE_LISTVALUE = _ATTRVALUE.nested_types_by_name['ListValue']
_NAMEATTRLIST = DESCRIPTOR.message_types_by_name['NameAttrList']
_NAMEATTRLIST_ATTRENTRY = _NAMEATTRLIST.nested_types_by_name['AttrEntry']
AttrValue = _reflection.GeneratedProtocolMessageType('AttrValue', (_message.Message,), {
'ListValue' : _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), {
'DESCRIPTOR' : _ATTRVALUE_LISTVALUE,
'__module__' : 'tensorboard.compat.proto.attr_value_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.AttrValue.ListValue)
})
,
'DESCRIPTOR' : _ATTRVALUE,
'__module__' : 'tensorboard.compat.proto.attr_value_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.AttrValue)
})
_sym_db.RegisterMessage(AttrValue)
_sym_db.RegisterMessage(AttrValue.ListValue)
NameAttrList = _reflection.GeneratedProtocolMessageType('NameAttrList', (_message.Message,), {
'AttrEntry' : _reflection.GeneratedProtocolMessageType('AttrEntry', (_message.Message,), {
'DESCRIPTOR' : _NAMEATTRLIST_ATTRENTRY,
'__module__' : 'tensorboard.compat.proto.attr_value_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NameAttrList.AttrEntry)
})
,
'DESCRIPTOR' : _NAMEATTRLIST,
'__module__' : 'tensorboard.compat.proto.attr_value_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NameAttrList)
})
_sym_db.RegisterMessage(NameAttrList)
_sym_db.RegisterMessage(NameAttrList.AttrEntry)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\017AttrValueProtosP\001ZQgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/attr_value_go_proto\370\001\001'
_ATTRVALUE_LISTVALUE.fields_by_name['i']._options = None
_ATTRVALUE_LISTVALUE.fields_by_name['i']._serialized_options = b'\020\001'
_ATTRVALUE_LISTVALUE.fields_by_name['f']._options = None
_ATTRVALUE_LISTVALUE.fields_by_name['f']._serialized_options = b'\020\001'
_ATTRVALUE_LISTVALUE.fields_by_name['b']._options = None
_ATTRVALUE_LISTVALUE.fields_by_name['b']._serialized_options = b'\020\001'
_ATTRVALUE_LISTVALUE.fields_by_name['type']._options = None
_ATTRVALUE_LISTVALUE.fields_by_name['type']._serialized_options = b'\020\001'
_NAMEATTRLIST_ATTRENTRY._options = None
_NAMEATTRLIST_ATTRENTRY._serialized_options = b'8\001'
_ATTRVALUE._serialized_start=181
_ATTRVALUE._serialized_end=740
_ATTRVALUE_LISTVALUE._serialized_start=494
_ATTRVALUE_LISTVALUE._serialized_end=731
_NAMEATTRLIST._serialized_start=743
_NAMEATTRLIST._serialized_end=891
_NAMEATTRLIST_ATTRENTRY._serialized_start=824
_NAMEATTRLIST_ATTRENTRY._serialized_end=891
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,58 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/cluster.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&tensorboard/compat/proto/cluster.proto\x12\x0btensorboard\"s\n\x06JobDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\x05tasks\x18\x02 \x03(\x0b\x32\x1e.tensorboard.JobDef.TasksEntry\x1a,\n\nTasksEntry\x12\x0b\n\x03key\x18\x01 \x01(\x05\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\".\n\nClusterDef\x12 \n\x03job\x18\x01 \x03(\x0b\x32\x13.tensorboard.JobDefB\x87\x01\n\x1aorg.tensorflow.distruntimeB\rClusterProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3')
_JOBDEF = DESCRIPTOR.message_types_by_name['JobDef']
_JOBDEF_TASKSENTRY = _JOBDEF.nested_types_by_name['TasksEntry']
_CLUSTERDEF = DESCRIPTOR.message_types_by_name['ClusterDef']
JobDef = _reflection.GeneratedProtocolMessageType('JobDef', (_message.Message,), {
'TasksEntry' : _reflection.GeneratedProtocolMessageType('TasksEntry', (_message.Message,), {
'DESCRIPTOR' : _JOBDEF_TASKSENTRY,
'__module__' : 'tensorboard.compat.proto.cluster_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.JobDef.TasksEntry)
})
,
'DESCRIPTOR' : _JOBDEF,
'__module__' : 'tensorboard.compat.proto.cluster_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.JobDef)
})
_sym_db.RegisterMessage(JobDef)
_sym_db.RegisterMessage(JobDef.TasksEntry)
ClusterDef = _reflection.GeneratedProtocolMessageType('ClusterDef', (_message.Message,), {
'DESCRIPTOR' : _CLUSTERDEF,
'__module__' : 'tensorboard.compat.proto.cluster_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ClusterDef)
})
_sym_db.RegisterMessage(ClusterDef)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\032org.tensorflow.distruntimeB\rClusterProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_JOBDEF_TASKSENTRY._options = None
_JOBDEF_TASKSENTRY._serialized_options = b'8\001'
_JOBDEF._serialized_start=55
_JOBDEF._serialized_end=170
_JOBDEF_TASKSENTRY._serialized_start=126
_JOBDEF_TASKSENTRY._serialized_end=170
_CLUSTERDEF._serialized_start=172
_CLUSTERDEF._serialized_end=218
# @@protoc_insertion_point(module_scope)

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,45 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/coordination_config.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2tensorboard/compat/proto/coordination_config.proto\x12\x0btensorboard\"1\n\x0e\x43oordinatedJob\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tnum_tasks\x18\x02 \x01(\x05\"\xd1\x03\n\x19\x43oordinationServiceConfig\x12\x14\n\x0cservice_type\x18\x01 \x01(\t\x12\x16\n\x0eservice_leader\x18\x02 \x01(\t\x12\x1b\n\x13\x65nable_health_check\x18\x03 \x01(\x08\x12&\n\x1e\x63luster_register_timeout_in_ms\x18\x04 \x01(\x03\x12\x1f\n\x17heartbeat_timeout_in_ms\x18\x05 \x01(\x03\x12\x39\n\x14\x63oordinated_job_list\x18\n \x03(\x0b\x32\x1b.tensorboard.CoordinatedJob\x12&\n\x1eshutdown_barrier_timeout_in_ms\x18\x07 \x01(\x03\x12*\n\"agent_destruction_without_shutdown\x18\x08 \x01(\x08\x12\x18\n\x10recoverable_jobs\x18\t \x03(\t\x12*\n\"allow_new_incarnation_to_reconnect\x18\x0b \x01(\x08\x12\x15\n\rforce_disable\x18\x0c \x01(\x08\x12.\n&poll_for_error_from_service_at_startup\x18\r \x01(\x08J\x04\x08\x06\x10\x07\x42WZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_protob\x06proto3')
_COORDINATEDJOB = DESCRIPTOR.message_types_by_name['CoordinatedJob']
_COORDINATIONSERVICECONFIG = DESCRIPTOR.message_types_by_name['CoordinationServiceConfig']
CoordinatedJob = _reflection.GeneratedProtocolMessageType('CoordinatedJob', (_message.Message,), {
'DESCRIPTOR' : _COORDINATEDJOB,
'__module__' : 'tensorboard.compat.proto.coordination_config_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CoordinatedJob)
})
_sym_db.RegisterMessage(CoordinatedJob)
CoordinationServiceConfig = _reflection.GeneratedProtocolMessageType('CoordinationServiceConfig', (_message.Message,), {
'DESCRIPTOR' : _COORDINATIONSERVICECONFIG,
'__module__' : 'tensorboard.compat.proto.coordination_config_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CoordinationServiceConfig)
})
_sym_db.RegisterMessage(CoordinationServiceConfig)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto'
_COORDINATEDJOB._serialized_start=67
_COORDINATEDJOB._serialized_end=116
_COORDINATIONSERVICECONFIG._serialized_start=119
_COORDINATIONSERVICECONFIG._serialized_end=584
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,87 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/cost_graph.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)tensorboard/compat/proto/cost_graph.proto\x12\x0btensorboard\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\"\xd0\x06\n\x0c\x43ostGraphDef\x12,\n\x04node\x18\x01 \x03(\x0b\x32\x1e.tensorboard.CostGraphDef.Node\x12\x36\n\x04\x63ost\x18\x02 \x03(\x0b\x32(.tensorboard.CostGraphDef.AggregatedCost\x1a\xa6\x05\n\x04Node\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x01(\t\x12\n\n\x02id\x18\x03 \x01(\x05\x12<\n\ninput_info\x18\x04 \x03(\x0b\x32(.tensorboard.CostGraphDef.Node.InputInfo\x12>\n\x0boutput_info\x18\x05 \x03(\x0b\x32).tensorboard.CostGraphDef.Node.OutputInfo\x12\x1d\n\x15temporary_memory_size\x18\x06 \x01(\x03\x12\x1e\n\x16persistent_memory_size\x18\x0c \x01(\x03\x12!\n\x15host_temp_memory_size\x18\n \x01(\x03\x42\x02\x18\x01\x12#\n\x17\x64\x65vice_temp_memory_size\x18\x0b \x01(\x03\x42\x02\x18\x01\x12)\n\x1d\x64\x65vice_persistent_memory_size\x18\x10 \x01(\x03\x42\x02\x18\x01\x12\x14\n\x0c\x63ompute_cost\x18\t \x01(\x03\x12\x14\n\x0c\x63ompute_time\x18\x0e \x01(\x03\x12\x13\n\x0bmemory_time\x18\x0f \x01(\x03\x12\x10\n\x08is_final\x18\x07 \x01(\x08\x12\x15\n\rcontrol_input\x18\x08 \x03(\x05\x12\x12\n\ninaccurate\x18\x11 \x01(\x08\x1a;\n\tInputInfo\x12\x16\n\x0epreceding_node\x18\x01 \x01(\x05\x12\x16\n\x0epreceding_port\x18\x02 \x01(\x05\x1a\x88\x01\n\nOutputInfo\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x18\n\x10\x61lias_input_port\x18\x02 \x01(\x03\x12,\n\x05shape\x18\x03 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12$\n\x05\x64type\x18\x04 \x01(\x0e\x32\x15.tensorboard.DataType\x1a\x31\n\x0e\x41ggregatedCost\x12\x0c\n\x04\x63ost\x18\x01 \x01(\x02\x12\x11\n\tdimension\x18\x02 \x01(\tB\x83\x01\n\x18org.tensorflow.frameworkB\x0f\x43ostGraphProtosP\x01ZQgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/cost_graph_go_proto\xf8\x01\x01\x62\x06proto3')
_COSTGRAPHDEF = DESCRIPTOR.message_types_by_name['CostGraphDef']
_COSTGRAPHDEF_NODE = _COSTGRAPHDEF.nested_types_by_name['Node']
_COSTGRAPHDEF_NODE_INPUTINFO = _COSTGRAPHDEF_NODE.nested_types_by_name['InputInfo']
_COSTGRAPHDEF_NODE_OUTPUTINFO = _COSTGRAPHDEF_NODE.nested_types_by_name['OutputInfo']
_COSTGRAPHDEF_AGGREGATEDCOST = _COSTGRAPHDEF.nested_types_by_name['AggregatedCost']
CostGraphDef = _reflection.GeneratedProtocolMessageType('CostGraphDef', (_message.Message,), {
'Node' : _reflection.GeneratedProtocolMessageType('Node', (_message.Message,), {
'InputInfo' : _reflection.GeneratedProtocolMessageType('InputInfo', (_message.Message,), {
'DESCRIPTOR' : _COSTGRAPHDEF_NODE_INPUTINFO,
'__module__' : 'tensorboard.compat.proto.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CostGraphDef.Node.InputInfo)
})
,
'OutputInfo' : _reflection.GeneratedProtocolMessageType('OutputInfo', (_message.Message,), {
'DESCRIPTOR' : _COSTGRAPHDEF_NODE_OUTPUTINFO,
'__module__' : 'tensorboard.compat.proto.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CostGraphDef.Node.OutputInfo)
})
,
'DESCRIPTOR' : _COSTGRAPHDEF_NODE,
'__module__' : 'tensorboard.compat.proto.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CostGraphDef.Node)
})
,
'AggregatedCost' : _reflection.GeneratedProtocolMessageType('AggregatedCost', (_message.Message,), {
'DESCRIPTOR' : _COSTGRAPHDEF_AGGREGATEDCOST,
'__module__' : 'tensorboard.compat.proto.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CostGraphDef.AggregatedCost)
})
,
'DESCRIPTOR' : _COSTGRAPHDEF,
'__module__' : 'tensorboard.compat.proto.cost_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CostGraphDef)
})
_sym_db.RegisterMessage(CostGraphDef)
_sym_db.RegisterMessage(CostGraphDef.Node)
_sym_db.RegisterMessage(CostGraphDef.Node.InputInfo)
_sym_db.RegisterMessage(CostGraphDef.Node.OutputInfo)
_sym_db.RegisterMessage(CostGraphDef.AggregatedCost)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\017CostGraphProtosP\001ZQgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/cost_graph_go_proto\370\001\001'
_COSTGRAPHDEF_NODE.fields_by_name['host_temp_memory_size']._options = None
_COSTGRAPHDEF_NODE.fields_by_name['host_temp_memory_size']._serialized_options = b'\030\001'
_COSTGRAPHDEF_NODE.fields_by_name['device_temp_memory_size']._options = None
_COSTGRAPHDEF_NODE.fields_by_name['device_temp_memory_size']._serialized_options = b'\030\001'
_COSTGRAPHDEF_NODE.fields_by_name['device_persistent_memory_size']._options = None
_COSTGRAPHDEF_NODE.fields_by_name['device_persistent_memory_size']._serialized_options = b'\030\001'
_COSTGRAPHDEF._serialized_start=142
_COSTGRAPHDEF._serialized_end=990
_COSTGRAPHDEF_NODE._serialized_start=261
_COSTGRAPHDEF_NODE._serialized_end=939
_COSTGRAPHDEF_NODE_INPUTINFO._serialized_start=741
_COSTGRAPHDEF_NODE_INPUTINFO._serialized_end=800
_COSTGRAPHDEF_NODE_OUTPUTINFO._serialized_start=803
_COSTGRAPHDEF_NODE_OUTPUTINFO._serialized_end=939
_COSTGRAPHDEF_AGGREGATEDCOST._serialized_start=941
_COSTGRAPHDEF_AGGREGATEDCOST._serialized_end=990
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,70 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/cpp_shape_inference.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import full_type_pb2 as tensorboard_dot_compat_dot_proto_dot_full__type__pb2
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n2tensorboard/compat/proto/cpp_shape_inference.proto\x12\x0btensorboard\x1a(tensorboard/compat/proto/full_type.proto\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\"\xa1\x03\n\x17\x43ppShapeInferenceResult\x12,\n\x05shape\x18\x01 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12\x44\n\x0bhandle_data\x18\x04 \x01(\x0b\x32/.tensorboard.CppShapeInferenceResult.HandleData\x1a\x96\x01\n\x12HandleShapeAndType\x12,\n\x05shape\x18\x01 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12$\n\x05\x64type\x18\x02 \x01(\x0e\x32\x15.tensorboard.DataType\x12&\n\x04type\x18\x04 \x01(\x0b\x32\x18.tensorboard.FullTypeDefJ\x04\x08\x03\x10\x04\x1am\n\nHandleData\x12\x0e\n\x06is_set\x18\x01 \x01(\x08\x12O\n\x0eshape_and_type\x18\x02 \x03(\x0b\x32\x37.tensorboard.CppShapeInferenceResult.HandleShapeAndTypeJ\x04\x08\x02\x10\x03J\x04\x08\x03\x10\x04\"e\n\x1d\x43ppShapeInferenceInputsNeeded\x12\x1c\n\x14input_tensors_needed\x18\x01 \x03(\x05\x12&\n\x1einput_tensors_as_shapes_needed\x18\x02 \x03(\x05\x42\x61Z\\github.com/tensorflow/tensorflow/tensorflow/go/python/framework/cpp_shape_inference_go_proto\xf8\x01\x01\x62\x06proto3')
_CPPSHAPEINFERENCERESULT = DESCRIPTOR.message_types_by_name['CppShapeInferenceResult']
_CPPSHAPEINFERENCERESULT_HANDLESHAPEANDTYPE = _CPPSHAPEINFERENCERESULT.nested_types_by_name['HandleShapeAndType']
_CPPSHAPEINFERENCERESULT_HANDLEDATA = _CPPSHAPEINFERENCERESULT.nested_types_by_name['HandleData']
_CPPSHAPEINFERENCEINPUTSNEEDED = DESCRIPTOR.message_types_by_name['CppShapeInferenceInputsNeeded']
CppShapeInferenceResult = _reflection.GeneratedProtocolMessageType('CppShapeInferenceResult', (_message.Message,), {
'HandleShapeAndType' : _reflection.GeneratedProtocolMessageType('HandleShapeAndType', (_message.Message,), {
'DESCRIPTOR' : _CPPSHAPEINFERENCERESULT_HANDLESHAPEANDTYPE,
'__module__' : 'tensorboard.compat.proto.cpp_shape_inference_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CppShapeInferenceResult.HandleShapeAndType)
})
,
'HandleData' : _reflection.GeneratedProtocolMessageType('HandleData', (_message.Message,), {
'DESCRIPTOR' : _CPPSHAPEINFERENCERESULT_HANDLEDATA,
'__module__' : 'tensorboard.compat.proto.cpp_shape_inference_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CppShapeInferenceResult.HandleData)
})
,
'DESCRIPTOR' : _CPPSHAPEINFERENCERESULT,
'__module__' : 'tensorboard.compat.proto.cpp_shape_inference_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CppShapeInferenceResult)
})
_sym_db.RegisterMessage(CppShapeInferenceResult)
_sym_db.RegisterMessage(CppShapeInferenceResult.HandleShapeAndType)
_sym_db.RegisterMessage(CppShapeInferenceResult.HandleData)
CppShapeInferenceInputsNeeded = _reflection.GeneratedProtocolMessageType('CppShapeInferenceInputsNeeded', (_message.Message,), {
'DESCRIPTOR' : _CPPSHAPEINFERENCEINPUTSNEEDED,
'__module__' : 'tensorboard.compat.proto.cpp_shape_inference_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CppShapeInferenceInputsNeeded)
})
_sym_db.RegisterMessage(CppShapeInferenceInputsNeeded)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'Z\\github.com/tensorflow/tensorflow/tensorflow/go/python/framework/cpp_shape_inference_go_proto\370\001\001'
_CPPSHAPEINFERENCERESULT._serialized_start=193
_CPPSHAPEINFERENCERESULT._serialized_end=610
_CPPSHAPEINFERENCERESULT_HANDLESHAPEANDTYPE._serialized_start=337
_CPPSHAPEINFERENCERESULT_HANDLESHAPEANDTYPE._serialized_end=487
_CPPSHAPEINFERENCERESULT_HANDLEDATA._serialized_start=489
_CPPSHAPEINFERENCERESULT_HANDLEDATA._serialized_end=598
_CPPSHAPEINFERENCEINPUTSNEEDED._serialized_start=612
_CPPSHAPEINFERENCEINPUTSNEEDED._serialized_end=713
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,65 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/debug.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$tensorboard/compat/proto/debug.proto\x12\x0btensorboard\"\x8e\x01\n\x10\x44\x65\x62ugTensorWatch\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x13\n\x0boutput_slot\x18\x02 \x01(\x05\x12\x11\n\tdebug_ops\x18\x03 \x03(\t\x12\x12\n\ndebug_urls\x18\x04 \x03(\t\x12+\n#tolerate_debug_op_creation_failures\x18\x05 \x01(\x08\"\x82\x01\n\x0c\x44\x65\x62ugOptions\x12>\n\x17\x64\x65\x62ug_tensor_watch_opts\x18\x04 \x03(\x0b\x32\x1d.tensorboard.DebugTensorWatch\x12\x13\n\x0bglobal_step\x18\n \x01(\x03\x12\x1d\n\x15reset_disk_byte_usage\x18\x0b \x01(\x08\"j\n\x12\x44\x65\x62uggedSourceFile\x12\x0c\n\x04host\x18\x01 \x01(\t\x12\x11\n\tfile_path\x18\x02 \x01(\t\x12\x15\n\rlast_modified\x18\x03 \x01(\x03\x12\r\n\x05\x62ytes\x18\x04 \x01(\x03\x12\r\n\x05lines\x18\x05 \x03(\t\"L\n\x13\x44\x65\x62uggedSourceFiles\x12\x35\n\x0csource_files\x18\x01 \x03(\x0b\x32\x1f.tensorboard.DebuggedSourceFileB\x83\x01\n\x18org.tensorflow.frameworkB\x0b\x44\x65\x62ugProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3')
_DEBUGTENSORWATCH = DESCRIPTOR.message_types_by_name['DebugTensorWatch']
_DEBUGOPTIONS = DESCRIPTOR.message_types_by_name['DebugOptions']
_DEBUGGEDSOURCEFILE = DESCRIPTOR.message_types_by_name['DebuggedSourceFile']
_DEBUGGEDSOURCEFILES = DESCRIPTOR.message_types_by_name['DebuggedSourceFiles']
DebugTensorWatch = _reflection.GeneratedProtocolMessageType('DebugTensorWatch', (_message.Message,), {
'DESCRIPTOR' : _DEBUGTENSORWATCH,
'__module__' : 'tensorboard.compat.proto.debug_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.DebugTensorWatch)
})
_sym_db.RegisterMessage(DebugTensorWatch)
DebugOptions = _reflection.GeneratedProtocolMessageType('DebugOptions', (_message.Message,), {
'DESCRIPTOR' : _DEBUGOPTIONS,
'__module__' : 'tensorboard.compat.proto.debug_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.DebugOptions)
})
_sym_db.RegisterMessage(DebugOptions)
DebuggedSourceFile = _reflection.GeneratedProtocolMessageType('DebuggedSourceFile', (_message.Message,), {
'DESCRIPTOR' : _DEBUGGEDSOURCEFILE,
'__module__' : 'tensorboard.compat.proto.debug_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.DebuggedSourceFile)
})
_sym_db.RegisterMessage(DebuggedSourceFile)
DebuggedSourceFiles = _reflection.GeneratedProtocolMessageType('DebuggedSourceFiles', (_message.Message,), {
'DESCRIPTOR' : _DEBUGGEDSOURCEFILES,
'__module__' : 'tensorboard.compat.proto.debug_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.DebuggedSourceFiles)
})
_sym_db.RegisterMessage(DebuggedSourceFiles)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\013DebugProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_DEBUGTENSORWATCH._serialized_start=54
_DEBUGTENSORWATCH._serialized_end=196
_DEBUGOPTIONS._serialized_start=199
_DEBUGOPTIONS._serialized_end=329
_DEBUGGEDSOURCEFILE._serialized_start=331
_DEBUGGEDSOURCEFILE._serialized_end=437
_DEBUGGEDSOURCEFILES._serialized_start=439
_DEBUGGEDSOURCEFILES._serialized_end=515
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,149 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/event.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import summary_pb2 as tensorboard_dot_compat_dot_proto_dot_summary__pb2
try:
tensorboard_dot_compat_dot_proto_dot_histogram__pb2 = tensorboard_dot_compat_dot_proto_dot_summary__pb2.tensorboard_dot_compat_dot_proto_dot_histogram__pb2
except AttributeError:
tensorboard_dot_compat_dot_proto_dot_histogram__pb2 = tensorboard_dot_compat_dot_proto_dot_summary__pb2.tensorboard.compat.proto.histogram_pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$tensorboard/compat/proto/event.proto\x12\x0btensorboard\x1a&tensorboard/compat/proto/summary.proto\"\xf9\x02\n\x05\x45vent\x12\x11\n\twall_time\x18\x01 \x01(\x01\x12\x0c\n\x04step\x18\x02 \x01(\x03\x12\x16\n\x0c\x66ile_version\x18\x03 \x01(\tH\x00\x12\x13\n\tgraph_def\x18\x04 \x01(\x0cH\x00\x12\'\n\x07summary\x18\x05 \x01(\x0b\x32\x14.tensorboard.SummaryH\x00\x12\x32\n\x0blog_message\x18\x06 \x01(\x0b\x32\x17.tensorboard.LogMessageB\x02\x18\x01H\x00\x12.\n\x0bsession_log\x18\x07 \x01(\x0b\x32\x17.tensorboard.SessionLogH\x00\x12=\n\x13tagged_run_metadata\x18\x08 \x01(\x0b\x32\x1e.tensorboard.TaggedRunMetadataH\x00\x12\x18\n\x0emeta_graph_def\x18\t \x01(\x0cH\x00\x12\x34\n\x0fsource_metadata\x18\n \x01(\x0b\x32\x1b.tensorboard.SourceMetadataB\x06\n\x04what\" \n\x0eSourceMetadata\x12\x0e\n\x06writer\x18\x01 \x01(\t\"\xa2\x01\n\nLogMessage\x12,\n\x05level\x18\x01 \x01(\x0e\x32\x1d.tensorboard.LogMessage.Level\x12\x0f\n\x07message\x18\x02 \x01(\t\"Q\n\x05Level\x12\x0b\n\x07UNKNOWN\x10\x00\x12\r\n\tDEBUGGING\x10\n\x12\x08\n\x04INFO\x10\x14\x12\x08\n\x04WARN\x10\x1e\x12\t\n\x05\x45RROR\x10(\x12\t\n\x05\x46\x41TAL\x10\x32\x1a\x02\x18\x01:\x02\x18\x01\"\xb7\x01\n\nSessionLog\x12\x35\n\x06status\x18\x01 \x01(\x0e\x32%.tensorboard.SessionLog.SessionStatus\x12\x17\n\x0f\x63heckpoint_path\x18\x02 \x01(\t\x12\x0b\n\x03msg\x18\x03 \x01(\t\"L\n\rSessionStatus\x12\x16\n\x12STATUS_UNSPECIFIED\x10\x00\x12\t\n\x05START\x10\x01\x12\x08\n\x04STOP\x10\x02\x12\x0e\n\nCHECKPOINT\x10\x03\"6\n\x11TaggedRunMetadata\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12\x14\n\x0crun_metadata\x18\x02 \x01(\x0c\"$\n\x0eWatchdogConfig\x12\x12\n\ntimeout_ms\x18\x01 \x01(\x03\"&\n\x11RequestedExitCode\x12\x11\n\texit_code\x18\x01 \x01(\x05\"\xb9\x01\n\x16WorkerHeartbeatRequest\x12\x36\n\rshutdown_mode\x18\x01 \x01(\x0e\x32\x1f.tensorboard.WorkerShutdownMode\x12\x34\n\x0fwatchdog_config\x18\x02 \x01(\x0b\x32\x1b.tensorboard.WatchdogConfig\x12\x31\n\texit_code\x18\x03 \x01(\x0b\x32\x1e.tensorboard.RequestedExitCode\"\x85\x01\n\x17WorkerHeartbeatResponse\x12\x30\n\rhealth_status\x18\x01 \x01(\x0e\x32\x19.tensorboard.WorkerHealth\x12&\n\nworker_log\x18\x02 \x03(\x0b\x32\x12.tensorboard.Event\x12\x10\n\x08hostname\x18\x03 \x01(\t*[\n\x0cWorkerHealth\x12\x06\n\x02OK\x10\x00\x12\x1c\n\x18RECEIVED_SHUTDOWN_SIGNAL\x10\x01\x12\x12\n\x0eINTERNAL_ERROR\x10\x02\x12\x11\n\rSHUTTING_DOWN\x10\x03*k\n\x12WorkerShutdownMode\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x12\n\x0eNOT_CONFIGURED\x10\x01\x12\x18\n\x14WAIT_FOR_COORDINATOR\x10\x02\x12\x1a\n\x16SHUTDOWN_AFTER_TIMEOUT\x10\x03\x42p\n\x13org.tensorflow.utilB\x0b\x45ventProtosP\x01ZGgithub.com/tensorflow/tensorflow/tensorflow/go/core/util/event_go_proto\xf8\x01\x01\x62\x06proto3')
_WORKERHEALTH = DESCRIPTOR.enum_types_by_name['WorkerHealth']
WorkerHealth = enum_type_wrapper.EnumTypeWrapper(_WORKERHEALTH)
_WORKERSHUTDOWNMODE = DESCRIPTOR.enum_types_by_name['WorkerShutdownMode']
WorkerShutdownMode = enum_type_wrapper.EnumTypeWrapper(_WORKERSHUTDOWNMODE)
OK = 0
RECEIVED_SHUTDOWN_SIGNAL = 1
INTERNAL_ERROR = 2
SHUTTING_DOWN = 3
DEFAULT = 0
NOT_CONFIGURED = 1
WAIT_FOR_COORDINATOR = 2
SHUTDOWN_AFTER_TIMEOUT = 3
_EVENT = DESCRIPTOR.message_types_by_name['Event']
_SOURCEMETADATA = DESCRIPTOR.message_types_by_name['SourceMetadata']
_LOGMESSAGE = DESCRIPTOR.message_types_by_name['LogMessage']
_SESSIONLOG = DESCRIPTOR.message_types_by_name['SessionLog']
_TAGGEDRUNMETADATA = DESCRIPTOR.message_types_by_name['TaggedRunMetadata']
_WATCHDOGCONFIG = DESCRIPTOR.message_types_by_name['WatchdogConfig']
_REQUESTEDEXITCODE = DESCRIPTOR.message_types_by_name['RequestedExitCode']
_WORKERHEARTBEATREQUEST = DESCRIPTOR.message_types_by_name['WorkerHeartbeatRequest']
_WORKERHEARTBEATRESPONSE = DESCRIPTOR.message_types_by_name['WorkerHeartbeatResponse']
_LOGMESSAGE_LEVEL = _LOGMESSAGE.enum_types_by_name['Level']
_SESSIONLOG_SESSIONSTATUS = _SESSIONLOG.enum_types_by_name['SessionStatus']
Event = _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), {
'DESCRIPTOR' : _EVENT,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.Event)
})
_sym_db.RegisterMessage(Event)
SourceMetadata = _reflection.GeneratedProtocolMessageType('SourceMetadata', (_message.Message,), {
'DESCRIPTOR' : _SOURCEMETADATA,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SourceMetadata)
})
_sym_db.RegisterMessage(SourceMetadata)
LogMessage = _reflection.GeneratedProtocolMessageType('LogMessage', (_message.Message,), {
'DESCRIPTOR' : _LOGMESSAGE,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.LogMessage)
})
_sym_db.RegisterMessage(LogMessage)
SessionLog = _reflection.GeneratedProtocolMessageType('SessionLog', (_message.Message,), {
'DESCRIPTOR' : _SESSIONLOG,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SessionLog)
})
_sym_db.RegisterMessage(SessionLog)
TaggedRunMetadata = _reflection.GeneratedProtocolMessageType('TaggedRunMetadata', (_message.Message,), {
'DESCRIPTOR' : _TAGGEDRUNMETADATA,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TaggedRunMetadata)
})
_sym_db.RegisterMessage(TaggedRunMetadata)
WatchdogConfig = _reflection.GeneratedProtocolMessageType('WatchdogConfig', (_message.Message,), {
'DESCRIPTOR' : _WATCHDOGCONFIG,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.WatchdogConfig)
})
_sym_db.RegisterMessage(WatchdogConfig)
RequestedExitCode = _reflection.GeneratedProtocolMessageType('RequestedExitCode', (_message.Message,), {
'DESCRIPTOR' : _REQUESTEDEXITCODE,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.RequestedExitCode)
})
_sym_db.RegisterMessage(RequestedExitCode)
WorkerHeartbeatRequest = _reflection.GeneratedProtocolMessageType('WorkerHeartbeatRequest', (_message.Message,), {
'DESCRIPTOR' : _WORKERHEARTBEATREQUEST,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.WorkerHeartbeatRequest)
})
_sym_db.RegisterMessage(WorkerHeartbeatRequest)
WorkerHeartbeatResponse = _reflection.GeneratedProtocolMessageType('WorkerHeartbeatResponse', (_message.Message,), {
'DESCRIPTOR' : _WORKERHEARTBEATRESPONSE,
'__module__' : 'tensorboard.compat.proto.event_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.WorkerHeartbeatResponse)
})
_sym_db.RegisterMessage(WorkerHeartbeatResponse)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023org.tensorflow.utilB\013EventProtosP\001ZGgithub.com/tensorflow/tensorflow/tensorflow/go/core/util/event_go_proto\370\001\001'
_EVENT.fields_by_name['log_message']._options = None
_EVENT.fields_by_name['log_message']._serialized_options = b'\030\001'
_LOGMESSAGE_LEVEL._options = None
_LOGMESSAGE_LEVEL._serialized_options = b'\030\001'
_LOGMESSAGE._options = None
_LOGMESSAGE._serialized_options = b'\030\001'
_WORKERHEALTH._serialized_start=1316
_WORKERHEALTH._serialized_end=1407
_WORKERSHUTDOWNMODE._serialized_start=1409
_WORKERSHUTDOWNMODE._serialized_end=1516
_EVENT._serialized_start=94
_EVENT._serialized_end=471
_SOURCEMETADATA._serialized_start=473
_SOURCEMETADATA._serialized_end=505
_LOGMESSAGE._serialized_start=508
_LOGMESSAGE._serialized_end=670
_LOGMESSAGE_LEVEL._serialized_start=585
_LOGMESSAGE_LEVEL._serialized_end=666
_SESSIONLOG._serialized_start=673
_SESSIONLOG._serialized_end=856
_SESSIONLOG_SESSIONSTATUS._serialized_start=780
_SESSIONLOG_SESSIONSTATUS._serialized_end=856
_TAGGEDRUNMETADATA._serialized_start=858
_TAGGEDRUNMETADATA._serialized_end=912
_WATCHDOGCONFIG._serialized_start=914
_WATCHDOGCONFIG._serialized_end=950
_REQUESTEDEXITCODE._serialized_start=952
_REQUESTEDEXITCODE._serialized_end=990
_WORKERHEARTBEATREQUEST._serialized_start=993
_WORKERHEARTBEATREQUEST._serialized_end=1178
_WORKERHEARTBEATRESPONSE._serialized_start=1181
_WORKERHEARTBEATRESPONSE._serialized_end=1314
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,74 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/full_type.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(tensorboard/compat/proto/full_type.proto\x12\x0btensorboard\"\x81\x01\n\x0b\x46ullTypeDef\x12(\n\x07type_id\x18\x01 \x01(\x0e\x32\x17.tensorboard.FullTypeId\x12&\n\x04\x61rgs\x18\x02 \x03(\x0b\x32\x18.tensorboard.FullTypeDef\x12\x0b\n\x01s\x18\x03 \x01(\tH\x00\x12\x0b\n\x01i\x18\x04 \x01(\x03H\x00\x42\x06\n\x04\x61ttr*\xda\x04\n\nFullTypeId\x12\r\n\tTFT_UNSET\x10\x00\x12\x0b\n\x07TFT_VAR\x10\x01\x12\x0b\n\x07TFT_ANY\x10\x02\x12\x0f\n\x0bTFT_PRODUCT\x10\x03\x12\r\n\tTFT_NAMED\x10\x04\x12\x10\n\x0cTFT_FOR_EACH\x10\x14\x12\x10\n\x0cTFT_CALLABLE\x10\x64\x12\x0f\n\nTFT_TENSOR\x10\xe8\x07\x12\x0e\n\tTFT_ARRAY\x10\xe9\x07\x12\x11\n\x0cTFT_OPTIONAL\x10\xea\x07\x12\x10\n\x0bTFT_LITERAL\x10\xeb\x07\x12\x10\n\x0bTFT_ENCODED\x10\xec\x07\x12\x15\n\x10TFT_SHAPE_TENSOR\x10\xed\x07\x12\r\n\x08TFT_BOOL\x10\xc8\x01\x12\x0e\n\tTFT_UINT8\x10\xc9\x01\x12\x0f\n\nTFT_UINT16\x10\xca\x01\x12\x0f\n\nTFT_UINT32\x10\xcb\x01\x12\x0f\n\nTFT_UINT64\x10\xcc\x01\x12\r\n\x08TFT_INT8\x10\xcd\x01\x12\x0e\n\tTFT_INT16\x10\xce\x01\x12\x0e\n\tTFT_INT32\x10\xcf\x01\x12\x0e\n\tTFT_INT64\x10\xd0\x01\x12\r\n\x08TFT_HALF\x10\xd1\x01\x12\x0e\n\tTFT_FLOAT\x10\xd2\x01\x12\x0f\n\nTFT_DOUBLE\x10\xd3\x01\x12\x11\n\x0cTFT_BFLOAT16\x10\xd7\x01\x12\x12\n\rTFT_COMPLEX64\x10\xd4\x01\x12\x13\n\x0eTFT_COMPLEX128\x10\xd5\x01\x12\x0f\n\nTFT_STRING\x10\xd6\x01\x12\x10\n\x0bTFT_DATASET\x10\xf6N\x12\x0f\n\nTFT_RAGGED\x10\xf7N\x12\x11\n\x0cTFT_ITERATOR\x10\xf8N\x12\x13\n\x0eTFT_MUTEX_LOCK\x10\xdaO\x12\x17\n\x12TFT_LEGACY_VARIANT\x10\xdbOB\x81\x01\n\x18org.tensorflow.frameworkB\x0e\x46ullTypeProtosP\x01ZPgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/full_type_go_proto\xf8\x01\x01\x62\x06proto3')
_FULLTYPEID = DESCRIPTOR.enum_types_by_name['FullTypeId']
FullTypeId = enum_type_wrapper.EnumTypeWrapper(_FULLTYPEID)
TFT_UNSET = 0
TFT_VAR = 1
TFT_ANY = 2
TFT_PRODUCT = 3
TFT_NAMED = 4
TFT_FOR_EACH = 20
TFT_CALLABLE = 100
TFT_TENSOR = 1000
TFT_ARRAY = 1001
TFT_OPTIONAL = 1002
TFT_LITERAL = 1003
TFT_ENCODED = 1004
TFT_SHAPE_TENSOR = 1005
TFT_BOOL = 200
TFT_UINT8 = 201
TFT_UINT16 = 202
TFT_UINT32 = 203
TFT_UINT64 = 204
TFT_INT8 = 205
TFT_INT16 = 206
TFT_INT32 = 207
TFT_INT64 = 208
TFT_HALF = 209
TFT_FLOAT = 210
TFT_DOUBLE = 211
TFT_BFLOAT16 = 215
TFT_COMPLEX64 = 212
TFT_COMPLEX128 = 213
TFT_STRING = 214
TFT_DATASET = 10102
TFT_RAGGED = 10103
TFT_ITERATOR = 10104
TFT_MUTEX_LOCK = 10202
TFT_LEGACY_VARIANT = 10203
_FULLTYPEDEF = DESCRIPTOR.message_types_by_name['FullTypeDef']
FullTypeDef = _reflection.GeneratedProtocolMessageType('FullTypeDef', (_message.Message,), {
'DESCRIPTOR' : _FULLTYPEDEF,
'__module__' : 'tensorboard.compat.proto.full_type_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FullTypeDef)
})
_sym_db.RegisterMessage(FullTypeDef)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\016FullTypeProtosP\001ZPgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/full_type_go_proto\370\001\001'
_FULLTYPEID._serialized_start=190
_FULLTYPEID._serialized_end=792
_FULLTYPEDEF._serialized_start=58
_FULLTYPEDEF._serialized_end=187
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,157 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/function.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import attr_value_pb2 as tensorboard_dot_compat_dot_proto_dot_attr__value__pb2
from tensorboard.compat.proto import node_def_pb2 as tensorboard_dot_compat_dot_proto_dot_node__def__pb2
from tensorboard.compat.proto import op_def_pb2 as tensorboard_dot_compat_dot_proto_dot_op__def__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\'tensorboard/compat/proto/function.proto\x12\x0btensorboard\x1a)tensorboard/compat/proto/attr_value.proto\x1a\'tensorboard/compat/proto/node_def.proto\x1a%tensorboard/compat/proto/op_def.proto\"\xab\x01\n\x12\x46unctionDefLibrary\x12*\n\x08\x66unction\x18\x01 \x03(\x0b\x32\x18.tensorboard.FunctionDef\x12*\n\x08gradient\x18\x02 \x03(\x0b\x32\x18.tensorboard.GradientDef\x12=\n\x14registered_gradients\x18\x03 \x03(\x0b\x32\x1f.tensorboard.RegisteredGradient\"\xcf\x06\n\x0b\x46unctionDef\x12%\n\tsignature\x18\x01 \x01(\x0b\x32\x12.tensorboard.OpDef\x12\x30\n\x04\x61ttr\x18\x05 \x03(\x0b\x32\".tensorboard.FunctionDef.AttrEntry\x12\x37\n\x08\x61rg_attr\x18\x07 \x03(\x0b\x32%.tensorboard.FunctionDef.ArgAttrEntry\x12Q\n\x16resource_arg_unique_id\x18\x08 \x03(\x0b\x32\x31.tensorboard.FunctionDef.ResourceArgUniqueIdEntry\x12&\n\x08node_def\x18\x03 \x03(\x0b\x32\x14.tensorboard.NodeDef\x12.\n\x03ret\x18\x04 \x03(\x0b\x32!.tensorboard.FunctionDef.RetEntry\x12=\n\x0b\x63ontrol_ret\x18\x06 \x03(\x0b\x32(.tensorboard.FunctionDef.ControlRetEntry\x1a\x43\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorboard.AttrValue:\x02\x38\x01\x1a\x8a\x01\n\x08\x41rgAttrs\x12\x39\n\x04\x61ttr\x18\x01 \x03(\x0b\x32+.tensorboard.FunctionDef.ArgAttrs.AttrEntry\x1a\x43\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorboard.AttrValue:\x02\x38\x01\x1aQ\n\x0c\x41rgAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\x30\n\x05value\x18\x02 \x01(\x0b\x32!.tensorboard.FunctionDef.ArgAttrs:\x02\x38\x01\x1a:\n\x18ResourceArgUniqueIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x1a*\n\x08RetEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1a\x31\n\x0f\x43ontrolRetEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01J\x04\x08\x02\x10\x03\";\n\x0bGradientDef\x12\x15\n\rfunction_name\x18\x01 \x01(\t\x12\x15\n\rgradient_func\x18\x02 \x01(\t\"G\n\x12RegisteredGradient\x12\x15\n\rgradient_func\x18\x01 \x01(\t\x12\x1a\n\x12registered_op_type\x18\x02 \x01(\tB\x80\x01\n\x18org.tensorflow.frameworkB\x0e\x46unctionProtosP\x01ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/function_go_proto\xf8\x01\x01\x62\x06proto3')
_FUNCTIONDEFLIBRARY = DESCRIPTOR.message_types_by_name['FunctionDefLibrary']
_FUNCTIONDEF = DESCRIPTOR.message_types_by_name['FunctionDef']
_FUNCTIONDEF_ATTRENTRY = _FUNCTIONDEF.nested_types_by_name['AttrEntry']
_FUNCTIONDEF_ARGATTRS = _FUNCTIONDEF.nested_types_by_name['ArgAttrs']
_FUNCTIONDEF_ARGATTRS_ATTRENTRY = _FUNCTIONDEF_ARGATTRS.nested_types_by_name['AttrEntry']
_FUNCTIONDEF_ARGATTRENTRY = _FUNCTIONDEF.nested_types_by_name['ArgAttrEntry']
_FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY = _FUNCTIONDEF.nested_types_by_name['ResourceArgUniqueIdEntry']
_FUNCTIONDEF_RETENTRY = _FUNCTIONDEF.nested_types_by_name['RetEntry']
_FUNCTIONDEF_CONTROLRETENTRY = _FUNCTIONDEF.nested_types_by_name['ControlRetEntry']
_GRADIENTDEF = DESCRIPTOR.message_types_by_name['GradientDef']
_REGISTEREDGRADIENT = DESCRIPTOR.message_types_by_name['RegisteredGradient']
FunctionDefLibrary = _reflection.GeneratedProtocolMessageType('FunctionDefLibrary', (_message.Message,), {
'DESCRIPTOR' : _FUNCTIONDEFLIBRARY,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDefLibrary)
})
_sym_db.RegisterMessage(FunctionDefLibrary)
FunctionDef = _reflection.GeneratedProtocolMessageType('FunctionDef', (_message.Message,), {
'AttrEntry' : _reflection.GeneratedProtocolMessageType('AttrEntry', (_message.Message,), {
'DESCRIPTOR' : _FUNCTIONDEF_ATTRENTRY,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDef.AttrEntry)
})
,
'ArgAttrs' : _reflection.GeneratedProtocolMessageType('ArgAttrs', (_message.Message,), {
'AttrEntry' : _reflection.GeneratedProtocolMessageType('AttrEntry', (_message.Message,), {
'DESCRIPTOR' : _FUNCTIONDEF_ARGATTRS_ATTRENTRY,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDef.ArgAttrs.AttrEntry)
})
,
'DESCRIPTOR' : _FUNCTIONDEF_ARGATTRS,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDef.ArgAttrs)
})
,
'ArgAttrEntry' : _reflection.GeneratedProtocolMessageType('ArgAttrEntry', (_message.Message,), {
'DESCRIPTOR' : _FUNCTIONDEF_ARGATTRENTRY,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDef.ArgAttrEntry)
})
,
'ResourceArgUniqueIdEntry' : _reflection.GeneratedProtocolMessageType('ResourceArgUniqueIdEntry', (_message.Message,), {
'DESCRIPTOR' : _FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDef.ResourceArgUniqueIdEntry)
})
,
'RetEntry' : _reflection.GeneratedProtocolMessageType('RetEntry', (_message.Message,), {
'DESCRIPTOR' : _FUNCTIONDEF_RETENTRY,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDef.RetEntry)
})
,
'ControlRetEntry' : _reflection.GeneratedProtocolMessageType('ControlRetEntry', (_message.Message,), {
'DESCRIPTOR' : _FUNCTIONDEF_CONTROLRETENTRY,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDef.ControlRetEntry)
})
,
'DESCRIPTOR' : _FUNCTIONDEF,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionDef)
})
_sym_db.RegisterMessage(FunctionDef)
_sym_db.RegisterMessage(FunctionDef.AttrEntry)
_sym_db.RegisterMessage(FunctionDef.ArgAttrs)
_sym_db.RegisterMessage(FunctionDef.ArgAttrs.AttrEntry)
_sym_db.RegisterMessage(FunctionDef.ArgAttrEntry)
_sym_db.RegisterMessage(FunctionDef.ResourceArgUniqueIdEntry)
_sym_db.RegisterMessage(FunctionDef.RetEntry)
_sym_db.RegisterMessage(FunctionDef.ControlRetEntry)
GradientDef = _reflection.GeneratedProtocolMessageType('GradientDef', (_message.Message,), {
'DESCRIPTOR' : _GRADIENTDEF,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GradientDef)
})
_sym_db.RegisterMessage(GradientDef)
RegisteredGradient = _reflection.GeneratedProtocolMessageType('RegisteredGradient', (_message.Message,), {
'DESCRIPTOR' : _REGISTEREDGRADIENT,
'__module__' : 'tensorboard.compat.proto.function_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.RegisteredGradient)
})
_sym_db.RegisterMessage(RegisteredGradient)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\016FunctionProtosP\001ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/function_go_proto\370\001\001'
_FUNCTIONDEF_ATTRENTRY._options = None
_FUNCTIONDEF_ATTRENTRY._serialized_options = b'8\001'
_FUNCTIONDEF_ARGATTRS_ATTRENTRY._options = None
_FUNCTIONDEF_ARGATTRS_ATTRENTRY._serialized_options = b'8\001'
_FUNCTIONDEF_ARGATTRENTRY._options = None
_FUNCTIONDEF_ARGATTRENTRY._serialized_options = b'8\001'
_FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY._options = None
_FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY._serialized_options = b'8\001'
_FUNCTIONDEF_RETENTRY._options = None
_FUNCTIONDEF_RETENTRY._serialized_options = b'8\001'
_FUNCTIONDEF_CONTROLRETENTRY._options = None
_FUNCTIONDEF_CONTROLRETENTRY._serialized_options = b'8\001'
_FUNCTIONDEFLIBRARY._serialized_start=180
_FUNCTIONDEFLIBRARY._serialized_end=351
_FUNCTIONDEF._serialized_start=354
_FUNCTIONDEF._serialized_end=1201
_FUNCTIONDEF_ATTRENTRY._serialized_start=749
_FUNCTIONDEF_ATTRENTRY._serialized_end=816
_FUNCTIONDEF_ARGATTRS._serialized_start=819
_FUNCTIONDEF_ARGATTRS._serialized_end=957
_FUNCTIONDEF_ARGATTRS_ATTRENTRY._serialized_start=749
_FUNCTIONDEF_ARGATTRS_ATTRENTRY._serialized_end=816
_FUNCTIONDEF_ARGATTRENTRY._serialized_start=959
_FUNCTIONDEF_ARGATTRENTRY._serialized_end=1040
_FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY._serialized_start=1042
_FUNCTIONDEF_RESOURCEARGUNIQUEIDENTRY._serialized_end=1100
_FUNCTIONDEF_RETENTRY._serialized_start=1102
_FUNCTIONDEF_RETENTRY._serialized_end=1144
_FUNCTIONDEF_CONTROLRETENTRY._serialized_start=1146
_FUNCTIONDEF_CONTROLRETENTRY._serialized_end=1195
_GRADIENTDEF._serialized_start=1203
_GRADIENTDEF._serialized_end=1262
_REGISTEREDGRADIENT._serialized_start=1264
_REGISTEREDGRADIENT._serialized_end=1335
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,111 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/graph_debug_info.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n/tensorboard/compat/proto/graph_debug_info.proto\x12\x0btensorboard\"\xab\x06\n\x0eGraphDebugInfo\x12\r\n\x05\x66iles\x18\x01 \x03(\t\x12\x41\n\x0c\x66rames_by_id\x18\x04 \x03(\x0b\x32+.tensorboard.GraphDebugInfo.FramesByIdEntry\x12\x41\n\x0ctraces_by_id\x18\x06 \x03(\x0b\x32+.tensorboard.GraphDebugInfo.TracesByIdEntry\x12\x37\n\x06traces\x18\x02 \x03(\x0b\x32\'.tensorboard.GraphDebugInfo.TracesEntry\x12H\n\x10name_to_trace_id\x18\x05 \x03(\x0b\x32..tensorboard.GraphDebugInfo.NameToTraceIdEntry\x1aX\n\x0b\x46ileLineCol\x12\x12\n\nfile_index\x18\x01 \x01(\x05\x12\x0c\n\x04line\x18\x02 \x01(\x05\x12\x0b\n\x03\x63ol\x18\x03 \x01(\x05\x12\x0c\n\x04\x66unc\x18\x04 \x01(\t\x12\x0c\n\x04\x63ode\x18\x05 \x01(\t\x1a\x63\n\nStackTrace\x12?\n\x0e\x66ile_line_cols\x18\x01 \x03(\x0b\x32\'.tensorboard.GraphDebugInfo.FileLineCol\x12\x14\n\x08\x66rame_id\x18\x02 \x03(\x06\x42\x02\x10\x01\x1aZ\n\x0f\x46ramesByIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.tensorboard.GraphDebugInfo.FileLineCol:\x02\x38\x01\x1aY\n\x0fTracesByIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\x06\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.tensorboard.GraphDebugInfo.StackTrace:\x02\x38\x01\x1aU\n\x0bTracesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x35\n\x05value\x18\x02 \x01(\x0b\x32&.tensorboard.GraphDebugInfo.StackTrace:\x02\x38\x01\x1a\x34\n\x12NameToTraceIdEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x06:\x02\x38\x01\x42\x8c\x01\n\x18org.tensorflow.frameworkB\x14GraphDebugInfoProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01')
_GRAPHDEBUGINFO = DESCRIPTOR.message_types_by_name['GraphDebugInfo']
_GRAPHDEBUGINFO_FILELINECOL = _GRAPHDEBUGINFO.nested_types_by_name['FileLineCol']
_GRAPHDEBUGINFO_STACKTRACE = _GRAPHDEBUGINFO.nested_types_by_name['StackTrace']
_GRAPHDEBUGINFO_FRAMESBYIDENTRY = _GRAPHDEBUGINFO.nested_types_by_name['FramesByIdEntry']
_GRAPHDEBUGINFO_TRACESBYIDENTRY = _GRAPHDEBUGINFO.nested_types_by_name['TracesByIdEntry']
_GRAPHDEBUGINFO_TRACESENTRY = _GRAPHDEBUGINFO.nested_types_by_name['TracesEntry']
_GRAPHDEBUGINFO_NAMETOTRACEIDENTRY = _GRAPHDEBUGINFO.nested_types_by_name['NameToTraceIdEntry']
GraphDebugInfo = _reflection.GeneratedProtocolMessageType('GraphDebugInfo', (_message.Message,), {
'FileLineCol' : _reflection.GeneratedProtocolMessageType('FileLineCol', (_message.Message,), {
'DESCRIPTOR' : _GRAPHDEBUGINFO_FILELINECOL,
'__module__' : 'tensorboard.compat.proto.graph_debug_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GraphDebugInfo.FileLineCol)
})
,
'StackTrace' : _reflection.GeneratedProtocolMessageType('StackTrace', (_message.Message,), {
'DESCRIPTOR' : _GRAPHDEBUGINFO_STACKTRACE,
'__module__' : 'tensorboard.compat.proto.graph_debug_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GraphDebugInfo.StackTrace)
})
,
'FramesByIdEntry' : _reflection.GeneratedProtocolMessageType('FramesByIdEntry', (_message.Message,), {
'DESCRIPTOR' : _GRAPHDEBUGINFO_FRAMESBYIDENTRY,
'__module__' : 'tensorboard.compat.proto.graph_debug_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GraphDebugInfo.FramesByIdEntry)
})
,
'TracesByIdEntry' : _reflection.GeneratedProtocolMessageType('TracesByIdEntry', (_message.Message,), {
'DESCRIPTOR' : _GRAPHDEBUGINFO_TRACESBYIDENTRY,
'__module__' : 'tensorboard.compat.proto.graph_debug_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GraphDebugInfo.TracesByIdEntry)
})
,
'TracesEntry' : _reflection.GeneratedProtocolMessageType('TracesEntry', (_message.Message,), {
'DESCRIPTOR' : _GRAPHDEBUGINFO_TRACESENTRY,
'__module__' : 'tensorboard.compat.proto.graph_debug_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GraphDebugInfo.TracesEntry)
})
,
'NameToTraceIdEntry' : _reflection.GeneratedProtocolMessageType('NameToTraceIdEntry', (_message.Message,), {
'DESCRIPTOR' : _GRAPHDEBUGINFO_NAMETOTRACEIDENTRY,
'__module__' : 'tensorboard.compat.proto.graph_debug_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GraphDebugInfo.NameToTraceIdEntry)
})
,
'DESCRIPTOR' : _GRAPHDEBUGINFO,
'__module__' : 'tensorboard.compat.proto.graph_debug_info_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GraphDebugInfo)
})
_sym_db.RegisterMessage(GraphDebugInfo)
_sym_db.RegisterMessage(GraphDebugInfo.FileLineCol)
_sym_db.RegisterMessage(GraphDebugInfo.StackTrace)
_sym_db.RegisterMessage(GraphDebugInfo.FramesByIdEntry)
_sym_db.RegisterMessage(GraphDebugInfo.TracesByIdEntry)
_sym_db.RegisterMessage(GraphDebugInfo.TracesEntry)
_sym_db.RegisterMessage(GraphDebugInfo.NameToTraceIdEntry)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\024GraphDebugInfoProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_GRAPHDEBUGINFO_STACKTRACE.fields_by_name['frame_id']._options = None
_GRAPHDEBUGINFO_STACKTRACE.fields_by_name['frame_id']._serialized_options = b'\020\001'
_GRAPHDEBUGINFO_FRAMESBYIDENTRY._options = None
_GRAPHDEBUGINFO_FRAMESBYIDENTRY._serialized_options = b'8\001'
_GRAPHDEBUGINFO_TRACESBYIDENTRY._options = None
_GRAPHDEBUGINFO_TRACESBYIDENTRY._serialized_options = b'8\001'
_GRAPHDEBUGINFO_TRACESENTRY._options = None
_GRAPHDEBUGINFO_TRACESENTRY._serialized_options = b'8\001'
_GRAPHDEBUGINFO_NAMETOTRACEIDENTRY._options = None
_GRAPHDEBUGINFO_NAMETOTRACEIDENTRY._serialized_options = b'8\001'
_GRAPHDEBUGINFO._serialized_start=65
_GRAPHDEBUGINFO._serialized_end=876
_GRAPHDEBUGINFO_FILELINECOL._serialized_start=363
_GRAPHDEBUGINFO_FILELINECOL._serialized_end=451
_GRAPHDEBUGINFO_STACKTRACE._serialized_start=453
_GRAPHDEBUGINFO_STACKTRACE._serialized_end=552
_GRAPHDEBUGINFO_FRAMESBYIDENTRY._serialized_start=554
_GRAPHDEBUGINFO_FRAMESBYIDENTRY._serialized_end=644
_GRAPHDEBUGINFO_TRACESBYIDENTRY._serialized_start=646
_GRAPHDEBUGINFO_TRACESBYIDENTRY._serialized_end=735
_GRAPHDEBUGINFO_TRACESENTRY._serialized_start=737
_GRAPHDEBUGINFO_TRACESENTRY._serialized_end=822
_GRAPHDEBUGINFO_NAMETOTRACEIDENTRY._serialized_start=824
_GRAPHDEBUGINFO_NAMETOTRACEIDENTRY._serialized_end=876
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,41 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/graph.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import function_pb2 as tensorboard_dot_compat_dot_proto_dot_function__pb2
from tensorboard.compat.proto import graph_debug_info_pb2 as tensorboard_dot_compat_dot_proto_dot_graph__debug__info__pb2
from tensorboard.compat.proto import node_def_pb2 as tensorboard_dot_compat_dot_proto_dot_node__def__pb2
from tensorboard.compat.proto import versions_pb2 as tensorboard_dot_compat_dot_proto_dot_versions__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$tensorboard/compat/proto/graph.proto\x12\x0btensorboard\x1a\'tensorboard/compat/proto/function.proto\x1a/tensorboard/compat/proto/graph_debug_info.proto\x1a\'tensorboard/compat/proto/node_def.proto\x1a\'tensorboard/compat/proto/versions.proto\"\xd1\x01\n\x08GraphDef\x12\"\n\x04node\x18\x01 \x03(\x0b\x32\x14.tensorboard.NodeDef\x12)\n\x08versions\x18\x04 \x01(\x0b\x32\x17.tensorboard.VersionDef\x12\x13\n\x07version\x18\x03 \x01(\x05\x42\x02\x18\x01\x12\x30\n\x07library\x18\x02 \x01(\x0b\x32\x1f.tensorboard.FunctionDefLibrary\x12/\n\ndebug_info\x18\x05 \x01(\x0b\x32\x1b.tensorboard.GraphDebugInfoBz\n\x18org.tensorflow.frameworkB\x0bGraphProtosP\x01ZLgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/graph_go_proto\xf8\x01\x01\x62\x06proto3')
_GRAPHDEF = DESCRIPTOR.message_types_by_name['GraphDef']
GraphDef = _reflection.GeneratedProtocolMessageType('GraphDef', (_message.Message,), {
'DESCRIPTOR' : _GRAPHDEF,
'__module__' : 'tensorboard.compat.proto.graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.GraphDef)
})
_sym_db.RegisterMessage(GraphDef)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\013GraphProtosP\001ZLgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/graph_go_proto\370\001\001'
_GRAPHDEF.fields_by_name['version']._options = None
_GRAPHDEF.fields_by_name['version']._serialized_options = b'\030\001'
_GRAPHDEF._serialized_start=226
_GRAPHDEF._serialized_end=435
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,39 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/histogram.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(tensorboard/compat/proto/histogram.proto\x12\x0btensorboard\"\x87\x01\n\x0eHistogramProto\x12\x0b\n\x03min\x18\x01 \x01(\x01\x12\x0b\n\x03max\x18\x02 \x01(\x01\x12\x0b\n\x03num\x18\x03 \x01(\x01\x12\x0b\n\x03sum\x18\x04 \x01(\x01\x12\x13\n\x0bsum_squares\x18\x05 \x01(\x01\x12\x18\n\x0c\x62ucket_limit\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\x12\n\x06\x62ucket\x18\x07 \x03(\x01\x42\x02\x10\x01\x42\\\n\x18org.tensorflow.frameworkP\x01Z;github.com/google/tsl/tsl/go/core/protobuf/summary_go_proto\xf8\x01\x01\x62\x06proto3')
_HISTOGRAMPROTO = DESCRIPTOR.message_types_by_name['HistogramProto']
HistogramProto = _reflection.GeneratedProtocolMessageType('HistogramProto', (_message.Message,), {
'DESCRIPTOR' : _HISTOGRAMPROTO,
'__module__' : 'tensorboard.compat.proto.histogram_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.HistogramProto)
})
_sym_db.RegisterMessage(HistogramProto)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkP\001Z;github.com/google/tsl/tsl/go/core/protobuf/summary_go_proto\370\001\001'
_HISTOGRAMPROTO.fields_by_name['bucket_limit']._options = None
_HISTOGRAMPROTO.fields_by_name['bucket_limit']._serialized_options = b'\020\001'
_HISTOGRAMPROTO.fields_by_name['bucket']._options = None
_HISTOGRAMPROTO.fields_by_name['bucket']._serialized_options = b'\020\001'
_HISTOGRAMPROTO._serialized_start=58
_HISTOGRAMPROTO._serialized_end=193
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,254 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/meta_graph.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
from tensorboard.compat.proto import graph_pb2 as tensorboard_dot_compat_dot_proto_dot_graph__pb2
from tensorboard.compat.proto import op_def_pb2 as tensorboard_dot_compat_dot_proto_dot_op__def__pb2
from tensorboard.compat.proto import tensor_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__pb2
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
from tensorboard.compat.proto import saved_object_graph_pb2 as tensorboard_dot_compat_dot_proto_dot_saved__object__graph__pb2
from tensorboard.compat.proto import saver_pb2 as tensorboard_dot_compat_dot_proto_dot_saver__pb2
from tensorboard.compat.proto import struct_pb2 as tensorboard_dot_compat_dot_proto_dot_struct__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)tensorboard/compat/proto/meta_graph.proto\x12\x0btensorboard\x1a\x19google/protobuf/any.proto\x1a$tensorboard/compat/proto/graph.proto\x1a%tensorboard/compat/proto/op_def.proto\x1a%tensorboard/compat/proto/tensor.proto\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\x1a\x31tensorboard/compat/proto/saved_object_graph.proto\x1a$tensorboard/compat/proto/saver.proto\x1a%tensorboard/compat/proto/struct.proto\"\xb3\x07\n\x0cMetaGraphDef\x12<\n\rmeta_info_def\x18\x01 \x01(\x0b\x32%.tensorboard.MetaGraphDef.MetaInfoDef\x12(\n\tgraph_def\x18\x02 \x01(\x0b\x32\x15.tensorboard.GraphDef\x12(\n\tsaver_def\x18\x03 \x01(\x0b\x32\x15.tensorboard.SaverDef\x12\x44\n\x0e\x63ollection_def\x18\x04 \x03(\x0b\x32,.tensorboard.MetaGraphDef.CollectionDefEntry\x12\x42\n\rsignature_def\x18\x05 \x03(\x0b\x32+.tensorboard.MetaGraphDef.SignatureDefEntry\x12\x31\n\x0e\x61sset_file_def\x18\x06 \x03(\x0b\x32\x19.tensorboard.AssetFileDef\x12\x37\n\x10object_graph_def\x18\x07 \x01(\x0b\x32\x1d.tensorboard.SavedObjectGraph\x1a\xf8\x02\n\x0bMetaInfoDef\x12\x1a\n\x12meta_graph_version\x18\x01 \x01(\t\x12-\n\x10stripped_op_list\x18\x02 \x01(\x0b\x32\x13.tensorboard.OpList\x12&\n\x08\x61ny_info\x18\x03 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x0c\n\x04tags\x18\x04 \x03(\t\x12\x1a\n\x12tensorflow_version\x18\x05 \x01(\t\x12\x1e\n\x16tensorflow_git_version\x18\x06 \x01(\t\x12\x1e\n\x16stripped_default_attrs\x18\x07 \x01(\x08\x12T\n\x10\x66unction_aliases\x18\x08 \x03(\x0b\x32:.tensorboard.MetaGraphDef.MetaInfoDef.FunctionAliasesEntry\x1a\x36\n\x14\x46unctionAliasesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\x1aP\n\x12\x43ollectionDefEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12)\n\x05value\x18\x02 \x01(\x0b\x32\x1a.tensorboard.CollectionDef:\x02\x38\x01\x1aN\n\x11SignatureDefEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12(\n\x05value\x18\x02 \x01(\x0b\x32\x19.tensorboard.SignatureDef:\x02\x38\x01\"\xe4\x03\n\rCollectionDef\x12\x38\n\tnode_list\x18\x01 \x01(\x0b\x32#.tensorboard.CollectionDef.NodeListH\x00\x12:\n\nbytes_list\x18\x02 \x01(\x0b\x32$.tensorboard.CollectionDef.BytesListH\x00\x12:\n\nint64_list\x18\x03 \x01(\x0b\x32$.tensorboard.CollectionDef.Int64ListH\x00\x12:\n\nfloat_list\x18\x04 \x01(\x0b\x32$.tensorboard.CollectionDef.FloatListH\x00\x12\x36\n\x08\x61ny_list\x18\x05 \x01(\x0b\x32\".tensorboard.CollectionDef.AnyListH\x00\x1a\x19\n\x08NodeList\x12\r\n\x05value\x18\x01 \x03(\t\x1a\x1a\n\tBytesList\x12\r\n\x05value\x18\x01 \x03(\x0c\x1a\x1e\n\tInt64List\x12\x11\n\x05value\x18\x01 \x03(\x03\x42\x02\x10\x01\x1a\x1e\n\tFloatList\x12\x11\n\x05value\x18\x01 \x03(\x02\x42\x02\x10\x01\x1a.\n\x07\x41nyList\x12#\n\x05value\x18\x01 \x03(\x0b\x32\x14.google.protobuf.AnyB\x06\n\x04kind\"\xd7\x03\n\nTensorInfo\x12\x0e\n\x04name\x18\x01 \x01(\tH\x00\x12\x37\n\ncoo_sparse\x18\x04 \x01(\x0b\x32!.tensorboard.TensorInfo.CooSparseH\x00\x12\x43\n\x10\x63omposite_tensor\x18\x05 \x01(\x0b\x32\'.tensorboard.TensorInfo.CompositeTensorH\x00\x12$\n\x05\x64type\x18\x02 \x01(\x0e\x32\x15.tensorboard.DataType\x12\x33\n\x0ctensor_shape\x18\x03 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x1a\x65\n\tCooSparse\x12\x1a\n\x12values_tensor_name\x18\x01 \x01(\t\x12\x1b\n\x13indices_tensor_name\x18\x02 \x01(\t\x12\x1f\n\x17\x64\x65nse_shape_tensor_name\x18\x03 \x01(\t\x1am\n\x0f\x43ompositeTensor\x12-\n\ttype_spec\x18\x01 \x01(\x0b\x32\x1a.tensorboard.TypeSpecProto\x12+\n\ncomponents\x18\x02 \x03(\x0b\x32\x17.tensorboard.TensorInfoB\n\n\x08\x65ncoding\"\xaa\x03\n\x0cSignatureDef\x12\x35\n\x06inputs\x18\x01 \x03(\x0b\x32%.tensorboard.SignatureDef.InputsEntry\x12\x37\n\x07outputs\x18\x02 \x03(\x0b\x32&.tensorboard.SignatureDef.OutputsEntry\x12\x13\n\x0bmethod_name\x18\x03 \x01(\t\x12\x39\n\x08\x64\x65\x66\x61ults\x18\x04 \x03(\x0b\x32\'.tensorboard.SignatureDef.DefaultsEntry\x1a\x46\n\x0bInputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.tensorboard.TensorInfo:\x02\x38\x01\x1aG\n\x0cOutputsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12&\n\x05value\x18\x02 \x01(\x0b\x32\x17.tensorboard.TensorInfo:\x02\x38\x01\x1aI\n\rDefaultsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\'\n\x05value\x18\x02 \x01(\x0b\x32\x18.tensorboard.TensorProto:\x02\x38\x01\"N\n\x0c\x41ssetFileDef\x12,\n\x0btensor_info\x18\x01 \x01(\x0b\x32\x17.tensorboard.TensorInfo\x12\x10\n\x08\x66ilename\x18\x02 \x01(\tB\x87\x01\n\x18org.tensorflow.frameworkB\x0fMetaGraphProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3')
_METAGRAPHDEF = DESCRIPTOR.message_types_by_name['MetaGraphDef']
_METAGRAPHDEF_METAINFODEF = _METAGRAPHDEF.nested_types_by_name['MetaInfoDef']
_METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY = _METAGRAPHDEF_METAINFODEF.nested_types_by_name['FunctionAliasesEntry']
_METAGRAPHDEF_COLLECTIONDEFENTRY = _METAGRAPHDEF.nested_types_by_name['CollectionDefEntry']
_METAGRAPHDEF_SIGNATUREDEFENTRY = _METAGRAPHDEF.nested_types_by_name['SignatureDefEntry']
_COLLECTIONDEF = DESCRIPTOR.message_types_by_name['CollectionDef']
_COLLECTIONDEF_NODELIST = _COLLECTIONDEF.nested_types_by_name['NodeList']
_COLLECTIONDEF_BYTESLIST = _COLLECTIONDEF.nested_types_by_name['BytesList']
_COLLECTIONDEF_INT64LIST = _COLLECTIONDEF.nested_types_by_name['Int64List']
_COLLECTIONDEF_FLOATLIST = _COLLECTIONDEF.nested_types_by_name['FloatList']
_COLLECTIONDEF_ANYLIST = _COLLECTIONDEF.nested_types_by_name['AnyList']
_TENSORINFO = DESCRIPTOR.message_types_by_name['TensorInfo']
_TENSORINFO_COOSPARSE = _TENSORINFO.nested_types_by_name['CooSparse']
_TENSORINFO_COMPOSITETENSOR = _TENSORINFO.nested_types_by_name['CompositeTensor']
_SIGNATUREDEF = DESCRIPTOR.message_types_by_name['SignatureDef']
_SIGNATUREDEF_INPUTSENTRY = _SIGNATUREDEF.nested_types_by_name['InputsEntry']
_SIGNATUREDEF_OUTPUTSENTRY = _SIGNATUREDEF.nested_types_by_name['OutputsEntry']
_SIGNATUREDEF_DEFAULTSENTRY = _SIGNATUREDEF.nested_types_by_name['DefaultsEntry']
_ASSETFILEDEF = DESCRIPTOR.message_types_by_name['AssetFileDef']
MetaGraphDef = _reflection.GeneratedProtocolMessageType('MetaGraphDef', (_message.Message,), {
'MetaInfoDef' : _reflection.GeneratedProtocolMessageType('MetaInfoDef', (_message.Message,), {
'FunctionAliasesEntry' : _reflection.GeneratedProtocolMessageType('FunctionAliasesEntry', (_message.Message,), {
'DESCRIPTOR' : _METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.MetaGraphDef.MetaInfoDef.FunctionAliasesEntry)
})
,
'DESCRIPTOR' : _METAGRAPHDEF_METAINFODEF,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.MetaGraphDef.MetaInfoDef)
})
,
'CollectionDefEntry' : _reflection.GeneratedProtocolMessageType('CollectionDefEntry', (_message.Message,), {
'DESCRIPTOR' : _METAGRAPHDEF_COLLECTIONDEFENTRY,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.MetaGraphDef.CollectionDefEntry)
})
,
'SignatureDefEntry' : _reflection.GeneratedProtocolMessageType('SignatureDefEntry', (_message.Message,), {
'DESCRIPTOR' : _METAGRAPHDEF_SIGNATUREDEFENTRY,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.MetaGraphDef.SignatureDefEntry)
})
,
'DESCRIPTOR' : _METAGRAPHDEF,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.MetaGraphDef)
})
_sym_db.RegisterMessage(MetaGraphDef)
_sym_db.RegisterMessage(MetaGraphDef.MetaInfoDef)
_sym_db.RegisterMessage(MetaGraphDef.MetaInfoDef.FunctionAliasesEntry)
_sym_db.RegisterMessage(MetaGraphDef.CollectionDefEntry)
_sym_db.RegisterMessage(MetaGraphDef.SignatureDefEntry)
CollectionDef = _reflection.GeneratedProtocolMessageType('CollectionDef', (_message.Message,), {
'NodeList' : _reflection.GeneratedProtocolMessageType('NodeList', (_message.Message,), {
'DESCRIPTOR' : _COLLECTIONDEF_NODELIST,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CollectionDef.NodeList)
})
,
'BytesList' : _reflection.GeneratedProtocolMessageType('BytesList', (_message.Message,), {
'DESCRIPTOR' : _COLLECTIONDEF_BYTESLIST,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CollectionDef.BytesList)
})
,
'Int64List' : _reflection.GeneratedProtocolMessageType('Int64List', (_message.Message,), {
'DESCRIPTOR' : _COLLECTIONDEF_INT64LIST,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CollectionDef.Int64List)
})
,
'FloatList' : _reflection.GeneratedProtocolMessageType('FloatList', (_message.Message,), {
'DESCRIPTOR' : _COLLECTIONDEF_FLOATLIST,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CollectionDef.FloatList)
})
,
'AnyList' : _reflection.GeneratedProtocolMessageType('AnyList', (_message.Message,), {
'DESCRIPTOR' : _COLLECTIONDEF_ANYLIST,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CollectionDef.AnyList)
})
,
'DESCRIPTOR' : _COLLECTIONDEF,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CollectionDef)
})
_sym_db.RegisterMessage(CollectionDef)
_sym_db.RegisterMessage(CollectionDef.NodeList)
_sym_db.RegisterMessage(CollectionDef.BytesList)
_sym_db.RegisterMessage(CollectionDef.Int64List)
_sym_db.RegisterMessage(CollectionDef.FloatList)
_sym_db.RegisterMessage(CollectionDef.AnyList)
TensorInfo = _reflection.GeneratedProtocolMessageType('TensorInfo', (_message.Message,), {
'CooSparse' : _reflection.GeneratedProtocolMessageType('CooSparse', (_message.Message,), {
'DESCRIPTOR' : _TENSORINFO_COOSPARSE,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TensorInfo.CooSparse)
})
,
'CompositeTensor' : _reflection.GeneratedProtocolMessageType('CompositeTensor', (_message.Message,), {
'DESCRIPTOR' : _TENSORINFO_COMPOSITETENSOR,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TensorInfo.CompositeTensor)
})
,
'DESCRIPTOR' : _TENSORINFO,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TensorInfo)
})
_sym_db.RegisterMessage(TensorInfo)
_sym_db.RegisterMessage(TensorInfo.CooSparse)
_sym_db.RegisterMessage(TensorInfo.CompositeTensor)
SignatureDef = _reflection.GeneratedProtocolMessageType('SignatureDef', (_message.Message,), {
'InputsEntry' : _reflection.GeneratedProtocolMessageType('InputsEntry', (_message.Message,), {
'DESCRIPTOR' : _SIGNATUREDEF_INPUTSENTRY,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SignatureDef.InputsEntry)
})
,
'OutputsEntry' : _reflection.GeneratedProtocolMessageType('OutputsEntry', (_message.Message,), {
'DESCRIPTOR' : _SIGNATUREDEF_OUTPUTSENTRY,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SignatureDef.OutputsEntry)
})
,
'DefaultsEntry' : _reflection.GeneratedProtocolMessageType('DefaultsEntry', (_message.Message,), {
'DESCRIPTOR' : _SIGNATUREDEF_DEFAULTSENTRY,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SignatureDef.DefaultsEntry)
})
,
'DESCRIPTOR' : _SIGNATUREDEF,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SignatureDef)
})
_sym_db.RegisterMessage(SignatureDef)
_sym_db.RegisterMessage(SignatureDef.InputsEntry)
_sym_db.RegisterMessage(SignatureDef.OutputsEntry)
_sym_db.RegisterMessage(SignatureDef.DefaultsEntry)
AssetFileDef = _reflection.GeneratedProtocolMessageType('AssetFileDef', (_message.Message,), {
'DESCRIPTOR' : _ASSETFILEDEF,
'__module__' : 'tensorboard.compat.proto.meta_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.AssetFileDef)
})
_sym_db.RegisterMessage(AssetFileDef)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\017MetaGraphProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY._options = None
_METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY._serialized_options = b'8\001'
_METAGRAPHDEF_COLLECTIONDEFENTRY._options = None
_METAGRAPHDEF_COLLECTIONDEFENTRY._serialized_options = b'8\001'
_METAGRAPHDEF_SIGNATUREDEFENTRY._options = None
_METAGRAPHDEF_SIGNATUREDEFENTRY._serialized_options = b'8\001'
_COLLECTIONDEF_INT64LIST.fields_by_name['value']._options = None
_COLLECTIONDEF_INT64LIST.fields_by_name['value']._serialized_options = b'\020\001'
_COLLECTIONDEF_FLOATLIST.fields_by_name['value']._options = None
_COLLECTIONDEF_FLOATLIST.fields_by_name['value']._serialized_options = b'\020\001'
_SIGNATUREDEF_INPUTSENTRY._options = None
_SIGNATUREDEF_INPUTSENTRY._serialized_options = b'8\001'
_SIGNATUREDEF_OUTPUTSENTRY._options = None
_SIGNATUREDEF_OUTPUTSENTRY._serialized_options = b'8\001'
_SIGNATUREDEF_DEFAULTSENTRY._options = None
_SIGNATUREDEF_DEFAULTSENTRY._serialized_options = b'8\001'
_METAGRAPHDEF._serialized_start=413
_METAGRAPHDEF._serialized_end=1360
_METAGRAPHDEF_METAINFODEF._serialized_start=822
_METAGRAPHDEF_METAINFODEF._serialized_end=1198
_METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY._serialized_start=1144
_METAGRAPHDEF_METAINFODEF_FUNCTIONALIASESENTRY._serialized_end=1198
_METAGRAPHDEF_COLLECTIONDEFENTRY._serialized_start=1200
_METAGRAPHDEF_COLLECTIONDEFENTRY._serialized_end=1280
_METAGRAPHDEF_SIGNATUREDEFENTRY._serialized_start=1282
_METAGRAPHDEF_SIGNATUREDEFENTRY._serialized_end=1360
_COLLECTIONDEF._serialized_start=1363
_COLLECTIONDEF._serialized_end=1847
_COLLECTIONDEF_NODELIST._serialized_start=1674
_COLLECTIONDEF_NODELIST._serialized_end=1699
_COLLECTIONDEF_BYTESLIST._serialized_start=1701
_COLLECTIONDEF_BYTESLIST._serialized_end=1727
_COLLECTIONDEF_INT64LIST._serialized_start=1729
_COLLECTIONDEF_INT64LIST._serialized_end=1759
_COLLECTIONDEF_FLOATLIST._serialized_start=1761
_COLLECTIONDEF_FLOATLIST._serialized_end=1791
_COLLECTIONDEF_ANYLIST._serialized_start=1793
_COLLECTIONDEF_ANYLIST._serialized_end=1839
_TENSORINFO._serialized_start=1850
_TENSORINFO._serialized_end=2321
_TENSORINFO_COOSPARSE._serialized_start=2097
_TENSORINFO_COOSPARSE._serialized_end=2198
_TENSORINFO_COMPOSITETENSOR._serialized_start=2200
_TENSORINFO_COMPOSITETENSOR._serialized_end=2309
_SIGNATUREDEF._serialized_start=2324
_SIGNATUREDEF._serialized_end=2750
_SIGNATUREDEF_INPUTSENTRY._serialized_start=2532
_SIGNATUREDEF_INPUTSENTRY._serialized_end=2602
_SIGNATUREDEF_OUTPUTSENTRY._serialized_start=2604
_SIGNATUREDEF_OUTPUTSENTRY._serialized_end=2675
_SIGNATUREDEF_DEFAULTSENTRY._serialized_start=2677
_SIGNATUREDEF_DEFAULTSENTRY._serialized_end=2750
_ASSETFILEDEF._serialized_start=2752
_ASSETFILEDEF._serialized_end=2830
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,61 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/node_def.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import attr_value_pb2 as tensorboard_dot_compat_dot_proto_dot_attr__value__pb2
from tensorboard.compat.proto import full_type_pb2 as tensorboard_dot_compat_dot_proto_dot_full__type__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\'tensorboard/compat/proto/node_def.proto\x12\x0btensorboard\x1a)tensorboard/compat/proto/attr_value.proto\x1a(tensorboard/compat/proto/full_type.proto\"\x8a\x03\n\x07NodeDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\n\n\x02op\x18\x02 \x01(\t\x12\r\n\x05input\x18\x03 \x03(\t\x12\x0e\n\x06\x64\x65vice\x18\x04 \x01(\t\x12,\n\x04\x61ttr\x18\x05 \x03(\x0b\x32\x1e.tensorboard.NodeDef.AttrEntry\x12K\n\x17\x65xperimental_debug_info\x18\x06 \x01(\x0b\x32*.tensorboard.NodeDef.ExperimentalDebugInfo\x12\x33\n\x11\x65xperimental_type\x18\x07 \x01(\x0b\x32\x18.tensorboard.FullTypeDef\x1a\x43\n\tAttrEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorboard.AttrValue:\x02\x38\x01\x1aQ\n\x15\x45xperimentalDebugInfo\x12\x1b\n\x13original_node_names\x18\x01 \x03(\t\x12\x1b\n\x13original_func_names\x18\x02 \x03(\tB{\n\x18org.tensorflow.frameworkB\tNodeProtoP\x01ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/node_def_go_proto\xf8\x01\x01\x62\x06proto3')
_NODEDEF = DESCRIPTOR.message_types_by_name['NodeDef']
_NODEDEF_ATTRENTRY = _NODEDEF.nested_types_by_name['AttrEntry']
_NODEDEF_EXPERIMENTALDEBUGINFO = _NODEDEF.nested_types_by_name['ExperimentalDebugInfo']
NodeDef = _reflection.GeneratedProtocolMessageType('NodeDef', (_message.Message,), {
'AttrEntry' : _reflection.GeneratedProtocolMessageType('AttrEntry', (_message.Message,), {
'DESCRIPTOR' : _NODEDEF_ATTRENTRY,
'__module__' : 'tensorboard.compat.proto.node_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NodeDef.AttrEntry)
})
,
'ExperimentalDebugInfo' : _reflection.GeneratedProtocolMessageType('ExperimentalDebugInfo', (_message.Message,), {
'DESCRIPTOR' : _NODEDEF_EXPERIMENTALDEBUGINFO,
'__module__' : 'tensorboard.compat.proto.node_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NodeDef.ExperimentalDebugInfo)
})
,
'DESCRIPTOR' : _NODEDEF,
'__module__' : 'tensorboard.compat.proto.node_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NodeDef)
})
_sym_db.RegisterMessage(NodeDef)
_sym_db.RegisterMessage(NodeDef.AttrEntry)
_sym_db.RegisterMessage(NodeDef.ExperimentalDebugInfo)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\tNodeProtoP\001ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/node_def_go_proto\370\001\001'
_NODEDEF_ATTRENTRY._options = None
_NODEDEF_ATTRENTRY._serialized_options = b'8\001'
_NODEDEF._serialized_start=142
_NODEDEF._serialized_end=536
_NODEDEF_ATTRENTRY._serialized_start=386
_NODEDEF_ATTRENTRY._serialized_end=453
_NODEDEF_EXPERIMENTALDEBUGINFO._serialized_start=455
_NODEDEF_EXPERIMENTALDEBUGINFO._serialized_end=536
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,81 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/op_def.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import attr_value_pb2 as tensorboard_dot_compat_dot_proto_dot_attr__value__pb2
from tensorboard.compat.proto import full_type_pb2 as tensorboard_dot_compat_dot_proto_dot_full__type__pb2
from tensorboard.compat.proto import resource_handle_pb2 as tensorboard_dot_compat_dot_proto_dot_resource__handle__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%tensorboard/compat/proto/op_def.proto\x12\x0btensorboard\x1a)tensorboard/compat/proto/attr_value.proto\x1a(tensorboard/compat/proto/full_type.proto\x1a.tensorboard/compat/proto/resource_handle.proto\x1a$tensorboard/compat/proto/types.proto\"\xfc\x06\n\x05OpDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\tinput_arg\x18\x02 \x03(\x0b\x32\x19.tensorboard.OpDef.ArgDef\x12-\n\noutput_arg\x18\x03 \x03(\x0b\x32\x19.tensorboard.OpDef.ArgDef\x12\x16\n\x0e\x63ontrol_output\x18\x14 \x03(\t\x12(\n\x04\x61ttr\x18\x04 \x03(\x0b\x32\x1a.tensorboard.OpDef.AttrDef\x12/\n\x0b\x64\x65precation\x18\x08 \x01(\x0b\x32\x1a.tensorboard.OpDeprecation\x12\x0f\n\x07summary\x18\x05 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x06 \x01(\t\x12\x16\n\x0eis_commutative\x18\x12 \x01(\x08\x12\x14\n\x0cis_aggregate\x18\x10 \x01(\x08\x12\x13\n\x0bis_stateful\x18\x11 \x01(\x08\x12\"\n\x1a\x61llows_uninitialized_input\x18\x13 \x01(\x08\x12$\n\x1cis_distributed_communication\x18\x15 \x01(\x08\x1a\x9f\x02\n\x06\x41rgDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12#\n\x04type\x18\x03 \x01(\x0e\x32\x15.tensorboard.DataType\x12\x11\n\ttype_attr\x18\x04 \x01(\t\x12\x13\n\x0bnumber_attr\x18\x05 \x01(\t\x12\x16\n\x0etype_list_attr\x18\x06 \x01(\t\x12\x43\n\x0bhandle_data\x18\x07 \x03(\x0b\x32..tensorboard.ResourceHandleProto.DtypeAndShape\x12\x0e\n\x06is_ref\x18\x10 \x01(\x08\x12\x38\n\x16\x65xperimental_full_type\x18\x11 \x01(\x0b\x32\x18.tensorboard.FullTypeDef\x1a\xbf\x01\n\x07\x41ttrDef\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12-\n\rdefault_value\x18\x03 \x01(\x0b\x32\x16.tensorboard.AttrValue\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t\x12\x13\n\x0bhas_minimum\x18\x05 \x01(\x08\x12\x0f\n\x07minimum\x18\x06 \x01(\x03\x12.\n\x0e\x61llowed_values\x18\x07 \x01(\x0b\x32\x16.tensorboard.AttrValue\"5\n\rOpDeprecation\x12\x0f\n\x07version\x18\x01 \x01(\x05\x12\x13\n\x0b\x65xplanation\x18\x02 \x01(\t\"(\n\x06OpList\x12\x1e\n\x02op\x18\x01 \x03(\x0b\x32\x12.tensorboard.OpDefB{\n\x18org.tensorflow.frameworkB\x0bOpDefProtosP\x01ZMgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/op_def_go_proto\xf8\x01\x01\x62\x06proto3')
_OPDEF = DESCRIPTOR.message_types_by_name['OpDef']
_OPDEF_ARGDEF = _OPDEF.nested_types_by_name['ArgDef']
_OPDEF_ATTRDEF = _OPDEF.nested_types_by_name['AttrDef']
_OPDEPRECATION = DESCRIPTOR.message_types_by_name['OpDeprecation']
_OPLIST = DESCRIPTOR.message_types_by_name['OpList']
OpDef = _reflection.GeneratedProtocolMessageType('OpDef', (_message.Message,), {
'ArgDef' : _reflection.GeneratedProtocolMessageType('ArgDef', (_message.Message,), {
'DESCRIPTOR' : _OPDEF_ARGDEF,
'__module__' : 'tensorboard.compat.proto.op_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.OpDef.ArgDef)
})
,
'AttrDef' : _reflection.GeneratedProtocolMessageType('AttrDef', (_message.Message,), {
'DESCRIPTOR' : _OPDEF_ATTRDEF,
'__module__' : 'tensorboard.compat.proto.op_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.OpDef.AttrDef)
})
,
'DESCRIPTOR' : _OPDEF,
'__module__' : 'tensorboard.compat.proto.op_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.OpDef)
})
_sym_db.RegisterMessage(OpDef)
_sym_db.RegisterMessage(OpDef.ArgDef)
_sym_db.RegisterMessage(OpDef.AttrDef)
OpDeprecation = _reflection.GeneratedProtocolMessageType('OpDeprecation', (_message.Message,), {
'DESCRIPTOR' : _OPDEPRECATION,
'__module__' : 'tensorboard.compat.proto.op_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.OpDeprecation)
})
_sym_db.RegisterMessage(OpDeprecation)
OpList = _reflection.GeneratedProtocolMessageType('OpList', (_message.Message,), {
'DESCRIPTOR' : _OPLIST,
'__module__' : 'tensorboard.compat.proto.op_def_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.OpList)
})
_sym_db.RegisterMessage(OpList)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\013OpDefProtosP\001ZMgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/op_def_go_proto\370\001\001'
_OPDEF._serialized_start=226
_OPDEF._serialized_end=1118
_OPDEF_ARGDEF._serialized_start=637
_OPDEF_ARGDEF._serialized_end=924
_OPDEF_ATTRDEF._serialized_start=927
_OPDEF_ATTRDEF._serialized_end=1118
_OPDEPRECATION._serialized_start=1120
_OPDEPRECATION._serialized_end=1173
_OPLIST._serialized_start=1175
_OPLIST._serialized_end=1215
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,48 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/resource_handle.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.tensorboard/compat/proto/resource_handle.proto\x12\x0btensorboard\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\"\xa8\x02\n\x13ResourceHandleProto\x12\x0e\n\x06\x64\x65vice\x18\x01 \x01(\t\x12\x11\n\tcontainer\x18\x02 \x01(\t\x12\x0c\n\x04name\x18\x03 \x01(\t\x12\x11\n\thash_code\x18\x04 \x01(\x04\x12\x17\n\x0fmaybe_type_name\x18\x05 \x01(\t\x12I\n\x11\x64types_and_shapes\x18\x06 \x03(\x0b\x32..tensorboard.ResourceHandleProto.DtypeAndShape\x1a\x63\n\rDtypeAndShape\x12$\n\x05\x64type\x18\x01 \x01(\x0e\x32\x15.tensorboard.DataType\x12,\n\x05shape\x18\x02 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProtoJ\x04\x08\x07\x10\x08\x42\x87\x01\n\x18org.tensorflow.frameworkB\x0eResourceHandleP\x01ZVgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/resource_handle_go_proto\xf8\x01\x01\x62\x06proto3')
_RESOURCEHANDLEPROTO = DESCRIPTOR.message_types_by_name['ResourceHandleProto']
_RESOURCEHANDLEPROTO_DTYPEANDSHAPE = _RESOURCEHANDLEPROTO.nested_types_by_name['DtypeAndShape']
ResourceHandleProto = _reflection.GeneratedProtocolMessageType('ResourceHandleProto', (_message.Message,), {
'DtypeAndShape' : _reflection.GeneratedProtocolMessageType('DtypeAndShape', (_message.Message,), {
'DESCRIPTOR' : _RESOURCEHANDLEPROTO_DTYPEANDSHAPE,
'__module__' : 'tensorboard.compat.proto.resource_handle_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ResourceHandleProto.DtypeAndShape)
})
,
'DESCRIPTOR' : _RESOURCEHANDLEPROTO,
'__module__' : 'tensorboard.compat.proto.resource_handle_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ResourceHandleProto)
})
_sym_db.RegisterMessage(ResourceHandleProto)
_sym_db.RegisterMessage(ResourceHandleProto.DtypeAndShape)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\016ResourceHandleP\001ZVgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/resource_handle_go_proto\370\001\001'
_RESOURCEHANDLEPROTO._serialized_start=147
_RESOURCEHANDLEPROTO._serialized_end=443
_RESOURCEHANDLEPROTO_DTYPEANDSHAPE._serialized_start=338
_RESOURCEHANDLEPROTO_DTYPEANDSHAPE._serialized_end=437
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,93 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/rewriter_config.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import attr_value_pb2 as tensorboard_dot_compat_dot_proto_dot_attr__value__pb2
from tensorboard.compat.proto import verifier_config_pb2 as tensorboard_dot_compat_dot_proto_dot_verifier__config__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.tensorboard/compat/proto/rewriter_config.proto\x12\x0btensorboard\x1a)tensorboard/compat/proto/attr_value.proto\x1a.tensorboard/compat/proto/verifier_config.proto\";\n\x13\x41utoParallelOptions\x12\x0e\n\x06\x65nable\x18\x01 \x01(\x08\x12\x14\n\x0cnum_replicas\x18\x02 \x01(\x05\"+\n\x16ScopedAllocatorOptions\x12\x11\n\tenable_op\x18\x01 \x03(\t\"\xb2\x16\n\x0eRewriterConfig\x12\x44\n\x15\x63pu_layout_conversion\x18\x32 \x01(\x0e\x32%.tensorboard.RewriterConfig.CpuLayout\x12<\n\x10layout_optimizer\x18\x01 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12<\n\x10\x63onstant_folding\x18\x03 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12>\n\x12shape_optimization\x18\r \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x35\n\tremapping\x18\x0e \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12G\n\x1b\x63ommon_subgraph_elimination\x18\x18 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x43\n\x17\x61rithmetic_optimization\x18\x07 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x43\n\x17\x64\x65pendency_optimization\x18\x08 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12=\n\x11loop_optimization\x18\t \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x41\n\x15\x66unction_optimization\x18\n \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12:\n\x0e\x64\x65\x62ug_stripper\x18\x0b \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x1d\n\x15\x64isable_model_pruning\x18\x02 \x01(\x08\x12I\n\x1dscoped_allocator_optimization\x18\x0f \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x44\n\x18pin_to_host_optimization\x18\x12 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x43\n\x17implementation_selector\x18\x16 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12@\n\x14\x61uto_mixed_precision\x18\x17 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x44\n\x18\x61uto_mixed_precision_mkl\x18\x19 \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12P\n$auto_mixed_precision_onednn_bfloat16\x18\x1f \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x44\n\x18\x61uto_mixed_precision_cpu\x18\x1d \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12\x1e\n\x16\x64isable_meta_optimizer\x18\x13 \x01(\x08\x12\x1d\n\x15\x64isable_tfg_optimizer\x18 \x01(\x08\x12\x41\n\x15use_plugin_optimizers\x18\x1c \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12P\n$experimental_conditional_code_motion\x18\x1e \x01(\x0e\x32\".tensorboard.RewriterConfig.Toggle\x12P\n\x19meta_optimizer_iterations\x18\x0c \x01(\x0e\x32-.tensorboard.RewriterConfig.NumIterationsType\x12\x17\n\x0fmin_graph_nodes\x18\x11 \x01(\x05\x12;\n3experimental_disable_compressed_tensor_optimization\x18\x1a \x01(\x08\x12;\n3experimental_disable_folding_quantization_emulation\x18\x1b \x01(\x08\x12\x43\n\x13memory_optimization\x18\x04 \x01(\x0e\x32&.tensorboard.RewriterConfig.MemOptType\x12/\n\'memory_optimizer_target_node_name_scope\x18\x06 \x01(\t\x12!\n\x19meta_optimizer_timeout_ms\x18\x14 \x01(\x03\x12\x37\n\rauto_parallel\x18\x05 \x01(\x0b\x32 .tensorboard.AutoParallelOptions\x12 \n\x18\x66\x61il_on_optimizer_errors\x18\x15 \x01(\x08\x12\x42\n\x15scoped_allocator_opts\x18\x10 \x01(\x0b\x32#.tensorboard.ScopedAllocatorOptions\x12\x12\n\noptimizers\x18\x64 \x03(\t\x12L\n\x11\x63ustom_optimizers\x18\xc8\x01 \x03(\x0b\x32\x30.tensorboard.RewriterConfig.CustomGraphOptimizer\x12\x45\n\x1finter_optimizer_verifier_config\x18\xac\x02 \x01(\x0b\x32\x1b.tensorboard.VerifierConfig\x12G\n!post_optimization_verifier_config\x18\xad\x02 \x01(\x0b\x32\x1b.tensorboard.VerifierConfig\x1a\xcc\x01\n\x14\x43ustomGraphOptimizer\x12\x0c\n\x04name\x18\x01 \x01(\t\x12Y\n\rparameter_map\x18\x02 \x03(\x0b\x32\x42.tensorboard.RewriterConfig.CustomGraphOptimizer.ParameterMapEntry\x1aK\n\x11ParameterMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.tensorboard.AttrValue:\x02\x38\x01\"d\n\x06Toggle\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x06\n\x02ON\x10\x01\x12\x07\n\x03OFF\x10\x02\x12\x0e\n\nAGGRESSIVE\x10\x03\x12\x15\n\x11\x45XPERIMENTAL_MLIR\x10\x04\x12\x15\n\x11\x45XPERIMENTAL_BOTH\x10\x05\"I\n\tCpuLayout\x12\x18\n\x14NO_CONVERSION_ON_CPU\x10\x00\x12\x10\n\x0cNCHW_TO_NHWC\x10\x01\x12\x10\n\x0cNHWC_TO_NCHW\x10\x02\"<\n\x11NumIterationsType\x12\x15\n\x11\x44\x45\x46\x41ULT_NUM_ITERS\x10\x00\x12\x07\n\x03ONE\x10\x01\x12\x07\n\x03TWO\x10\x02\"\x9f\x01\n\nMemOptType\x12\x13\n\x0f\x44\x45\x46\x41ULT_MEM_OPT\x10\x00\x12\x0e\n\nNO_MEM_OPT\x10\x01\x12\n\n\x06MANUAL\x10\x02\x12\x17\n\x13SWAPPING_HEURISTICS\x10\x04\x12\x1c\n\x18RECOMPUTATION_HEURISTICS\x10\x05\x12\x19\n\x15SCHEDULING_HEURISTICS\x10\x06\x12\x0e\n\nHEURISTICS\x10\x03\x42\x8c\x01\n\x18org.tensorflow.frameworkB\x14RewriterConfigProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3')
_AUTOPARALLELOPTIONS = DESCRIPTOR.message_types_by_name['AutoParallelOptions']
_SCOPEDALLOCATOROPTIONS = DESCRIPTOR.message_types_by_name['ScopedAllocatorOptions']
_REWRITERCONFIG = DESCRIPTOR.message_types_by_name['RewriterConfig']
_REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER = _REWRITERCONFIG.nested_types_by_name['CustomGraphOptimizer']
_REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER_PARAMETERMAPENTRY = _REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER.nested_types_by_name['ParameterMapEntry']
_REWRITERCONFIG_TOGGLE = _REWRITERCONFIG.enum_types_by_name['Toggle']
_REWRITERCONFIG_CPULAYOUT = _REWRITERCONFIG.enum_types_by_name['CpuLayout']
_REWRITERCONFIG_NUMITERATIONSTYPE = _REWRITERCONFIG.enum_types_by_name['NumIterationsType']
_REWRITERCONFIG_MEMOPTTYPE = _REWRITERCONFIG.enum_types_by_name['MemOptType']
AutoParallelOptions = _reflection.GeneratedProtocolMessageType('AutoParallelOptions', (_message.Message,), {
'DESCRIPTOR' : _AUTOPARALLELOPTIONS,
'__module__' : 'tensorboard.compat.proto.rewriter_config_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.AutoParallelOptions)
})
_sym_db.RegisterMessage(AutoParallelOptions)
ScopedAllocatorOptions = _reflection.GeneratedProtocolMessageType('ScopedAllocatorOptions', (_message.Message,), {
'DESCRIPTOR' : _SCOPEDALLOCATOROPTIONS,
'__module__' : 'tensorboard.compat.proto.rewriter_config_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ScopedAllocatorOptions)
})
_sym_db.RegisterMessage(ScopedAllocatorOptions)
RewriterConfig = _reflection.GeneratedProtocolMessageType('RewriterConfig', (_message.Message,), {
'CustomGraphOptimizer' : _reflection.GeneratedProtocolMessageType('CustomGraphOptimizer', (_message.Message,), {
'ParameterMapEntry' : _reflection.GeneratedProtocolMessageType('ParameterMapEntry', (_message.Message,), {
'DESCRIPTOR' : _REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER_PARAMETERMAPENTRY,
'__module__' : 'tensorboard.compat.proto.rewriter_config_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.RewriterConfig.CustomGraphOptimizer.ParameterMapEntry)
})
,
'DESCRIPTOR' : _REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER,
'__module__' : 'tensorboard.compat.proto.rewriter_config_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.RewriterConfig.CustomGraphOptimizer)
})
,
'DESCRIPTOR' : _REWRITERCONFIG,
'__module__' : 'tensorboard.compat.proto.rewriter_config_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.RewriterConfig)
})
_sym_db.RegisterMessage(RewriterConfig)
_sym_db.RegisterMessage(RewriterConfig.CustomGraphOptimizer)
_sym_db.RegisterMessage(RewriterConfig.CustomGraphOptimizer.ParameterMapEntry)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\024RewriterConfigProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER_PARAMETERMAPENTRY._options = None
_REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER_PARAMETERMAPENTRY._serialized_options = b'8\001'
_AUTOPARALLELOPTIONS._serialized_start=154
_AUTOPARALLELOPTIONS._serialized_end=213
_SCOPEDALLOCATOROPTIONS._serialized_start=215
_SCOPEDALLOCATOROPTIONS._serialized_end=258
_REWRITERCONFIG._serialized_start=261
_REWRITERCONFIG._serialized_end=3127
_REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER._serialized_start=2522
_REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER._serialized_end=2726
_REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER_PARAMETERMAPENTRY._serialized_start=2651
_REWRITERCONFIG_CUSTOMGRAPHOPTIMIZER_PARAMETERMAPENTRY._serialized_end=2726
_REWRITERCONFIG_TOGGLE._serialized_start=2728
_REWRITERCONFIG_TOGGLE._serialized_end=2828
_REWRITERCONFIG_CPULAYOUT._serialized_start=2830
_REWRITERCONFIG_CPULAYOUT._serialized_end=2903
_REWRITERCONFIG_NUMITERATIONSTYPE._serialized_start=2905
_REWRITERCONFIG_NUMITERATIONSTYPE._serialized_end=2965
_REWRITERCONFIG_MEMOPTTYPE._serialized_start=2968
_REWRITERCONFIG_MEMOPTTYPE._serialized_end=3127
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,35 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/rpc_options.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n*tensorboard/compat/proto/rpc_options.proto\x12\x0btensorboard\"\xd5\x01\n\nRPCOptions\x12$\n\x1cuse_rpc_for_inprocess_master\x18\x01 \x01(\x08\x12\x1d\n\x15\x63ompression_algorithm\x18\x02 \x01(\t\x12\x19\n\x11\x63ompression_level\x18\x03 \x01(\x05\x12\x1a\n\x12\x63\x61\x63he_rpc_response\x18\x04 \x01(\x08\x12*\n\"disable_session_connection_sharing\x18\x05 \x01(\x08\x12\x1f\n\x17num_channels_per_target\x18\x06 \x01(\x05\x42@Z>github.com/google/tsl/tsl/go/protobuf/for_core_protos_go_protob\x06proto3')
_RPCOPTIONS = DESCRIPTOR.message_types_by_name['RPCOptions']
RPCOptions = _reflection.GeneratedProtocolMessageType('RPCOptions', (_message.Message,), {
'DESCRIPTOR' : _RPCOPTIONS,
'__module__' : 'tensorboard.compat.proto.rpc_options_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.RPCOptions)
})
_sym_db.RegisterMessage(RPCOptions)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'Z>github.com/google/tsl/tsl/go/protobuf/for_core_protos_go_proto'
_RPCOPTIONS._serialized_start=60
_RPCOPTIONS._serialized_end=273
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,193 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/saved_object_graph.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
from tensorboard.compat.proto import variable_pb2 as tensorboard_dot_compat_dot_proto_dot_variable__pb2
from tensorboard.compat.proto import versions_pb2 as tensorboard_dot_compat_dot_proto_dot_versions__pb2
from tensorboard.compat.proto import struct_pb2 as tensorboard_dot_compat_dot_proto_dot_struct__pb2
from tensorboard.compat.proto import trackable_object_graph_pb2 as tensorboard_dot_compat_dot_proto_dot_trackable__object__graph__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n1tensorboard/compat/proto/saved_object_graph.proto\x12\x0btensorboard\x1a\x19google/protobuf/any.proto\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\x1a\'tensorboard/compat/proto/variable.proto\x1a\'tensorboard/compat/proto/versions.proto\x1a%tensorboard/compat/proto/struct.proto\x1a\x35tensorboard/compat/proto/trackable_object_graph.proto\"\xeb\x01\n\x10SavedObjectGraph\x12\'\n\x05nodes\x18\x01 \x03(\x0b\x32\x18.tensorboard.SavedObject\x12P\n\x12\x63oncrete_functions\x18\x02 \x03(\x0b\x32\x34.tensorboard.SavedObjectGraph.ConcreteFunctionsEntry\x1a\\\n\x16\x43oncreteFunctionsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x31\n\x05value\x18\x02 \x01(\x0b\x32\".tensorboard.SavedConcreteFunction:\x02\x38\x01\"\xdd\x07\n\x0bSavedObject\x12S\n\x08\x63hildren\x18\x01 \x03(\x0b\x32\x41.tensorboard.TrackableObjectGraph.TrackableObject.ObjectReference\x12W\n\x0c\x64\x65pendencies\x18\x0f \x03(\x0b\x32\x41.tensorboard.TrackableObjectGraph.TrackableObject.ObjectReference\x12_\n\x0eslot_variables\x18\x03 \x03(\x0b\x32G.tensorboard.TrackableObjectGraph.TrackableObject.SlotVariableReference\x12\x33\n\x0buser_object\x18\x04 \x01(\x0b\x32\x1c.tensorboard.SavedUserObjectH\x00\x12(\n\x05\x61sset\x18\x05 \x01(\x0b\x32\x17.tensorboard.SavedAssetH\x00\x12.\n\x08\x66unction\x18\x06 \x01(\x0b\x32\x1a.tensorboard.SavedFunctionH\x00\x12.\n\x08variable\x18\x07 \x01(\x0b\x32\x1a.tensorboard.SavedVariableH\x00\x12H\n\x16\x62\x61re_concrete_function\x18\x08 \x01(\x0b\x32&.tensorboard.SavedBareConcreteFunctionH\x00\x12.\n\x08\x63onstant\x18\t \x01(\x0b\x32\x1a.tensorboard.SavedConstantH\x00\x12.\n\x08resource\x18\n \x01(\x0b\x32\x1a.tensorboard.SavedResourceH\x00\x12\x36\n\x0f\x63\x61ptured_tensor\x18\x0c \x01(\x0b\x32\x1b.tensorboard.CapturedTensorH\x00\x12G\n\x10saveable_objects\x18\x0b \x03(\x0b\x32-.tensorboard.SavedObject.SaveableObjectsEntry\x12\x17\n\x0fregistered_name\x18\r \x01(\t\x12\x33\n\x15serialized_user_proto\x18\x0e \x01(\x0b\x32\x14.google.protobuf.Any\x12\x18\n\x10registered_saver\x18\x10 \x01(\t\x1aS\n\x14SaveableObjectsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12*\n\x05value\x18\x02 \x01(\x0b\x32\x1b.tensorboard.SaveableObject:\x02\x38\x01\x42\x06\n\x04kindJ\x04\x08\x02\x10\x03R\nattributes\"e\n\x0fSavedUserObject\x12\x12\n\nidentifier\x18\x01 \x01(\t\x12(\n\x07version\x18\x02 \x01(\x0b\x32\x17.tensorboard.VersionDef\x12\x14\n\x08metadata\x18\x03 \x01(\tB\x02\x18\x01\"*\n\nSavedAsset\x12\x1c\n\x14\x61sset_file_def_index\x18\x01 \x01(\x05\"]\n\rSavedFunction\x12\x1a\n\x12\x63oncrete_functions\x18\x01 \x03(\t\x12\x30\n\rfunction_spec\x18\x02 \x01(\x0b\x32\x19.tensorboard.FunctionSpec\"9\n\x0e\x43\x61pturedTensor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x19\n\x11\x63oncrete_function\x18\x02 \x01(\t\"\xaa\x01\n\x15SavedConcreteFunction\x12\x14\n\x0c\x62ound_inputs\x18\x02 \x03(\x05\x12\x43\n\x1d\x63\x61nonicalized_input_signature\x18\x03 \x01(\x0b\x32\x1c.tensorboard.StructuredValue\x12\x36\n\x10output_signature\x18\x04 \x01(\x0b\x32\x1c.tensorboard.StructuredValue\"\xae\x01\n\x19SavedBareConcreteFunction\x12\x1e\n\x16\x63oncrete_function_name\x18\x01 \x01(\t\x12\x19\n\x11\x61rgument_keywords\x18\x02 \x03(\t\x12$\n\x1c\x61llowed_positional_arguments\x18\x03 \x01(\x03\x12\x30\n\rfunction_spec\x18\x04 \x01(\x0b\x32\x19.tensorboard.FunctionSpec\"\"\n\rSavedConstant\x12\x11\n\toperation\x18\x01 \x01(\t\"\xdc\x02\n\rSavedVariable\x12$\n\x05\x64type\x18\x01 \x01(\x0e\x32\x15.tensorboard.DataType\x12,\n\x05shape\x18\x02 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12\x11\n\ttrainable\x18\x03 \x01(\x08\x12=\n\x0fsynchronization\x18\x04 \x01(\x0e\x32$.tensorboard.VariableSynchronization\x12\x35\n\x0b\x61ggregation\x18\x05 \x01(\x0e\x32 .tensorboard.VariableAggregation\x12\x0c\n\x04name\x18\x06 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x07 \x01(\t\x12P\n,experimental_distributed_variable_components\x18\x08 \x03(\x0b\x32\x1a.tensorboard.SavedVariable\"\xfe\x01\n\x0c\x46unctionSpec\x12\x31\n\x0b\x66ullargspec\x18\x01 \x01(\x0b\x32\x1c.tensorboard.StructuredValue\x12\x11\n\tis_method\x18\x02 \x01(\x08\x12\x35\n\x0finput_signature\x18\x05 \x01(\x0b\x32\x1c.tensorboard.StructuredValue\x12\x39\n\x0bjit_compile\x18\x06 \x01(\x0e\x32$.tensorboard.FunctionSpec.JitCompile\"*\n\nJitCompile\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x06\n\x02ON\x10\x01\x12\x07\n\x03OFF\x10\x02J\x04\x08\x03\x10\x04J\x04\x08\x04\x10\x05\"\x1f\n\rSavedResource\x12\x0e\n\x06\x64\x65vice\x18\x01 \x01(\t\"A\n\x0eSaveableObject\x12\x15\n\rsave_function\x18\x02 \x01(\x05\x12\x18\n\x10restore_function\x18\x03 \x01(\x05\x42ZZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3')
_SAVEDOBJECTGRAPH = DESCRIPTOR.message_types_by_name['SavedObjectGraph']
_SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY = _SAVEDOBJECTGRAPH.nested_types_by_name['ConcreteFunctionsEntry']
_SAVEDOBJECT = DESCRIPTOR.message_types_by_name['SavedObject']
_SAVEDOBJECT_SAVEABLEOBJECTSENTRY = _SAVEDOBJECT.nested_types_by_name['SaveableObjectsEntry']
_SAVEDUSEROBJECT = DESCRIPTOR.message_types_by_name['SavedUserObject']
_SAVEDASSET = DESCRIPTOR.message_types_by_name['SavedAsset']
_SAVEDFUNCTION = DESCRIPTOR.message_types_by_name['SavedFunction']
_CAPTUREDTENSOR = DESCRIPTOR.message_types_by_name['CapturedTensor']
_SAVEDCONCRETEFUNCTION = DESCRIPTOR.message_types_by_name['SavedConcreteFunction']
_SAVEDBARECONCRETEFUNCTION = DESCRIPTOR.message_types_by_name['SavedBareConcreteFunction']
_SAVEDCONSTANT = DESCRIPTOR.message_types_by_name['SavedConstant']
_SAVEDVARIABLE = DESCRIPTOR.message_types_by_name['SavedVariable']
_FUNCTIONSPEC = DESCRIPTOR.message_types_by_name['FunctionSpec']
_SAVEDRESOURCE = DESCRIPTOR.message_types_by_name['SavedResource']
_SAVEABLEOBJECT = DESCRIPTOR.message_types_by_name['SaveableObject']
_FUNCTIONSPEC_JITCOMPILE = _FUNCTIONSPEC.enum_types_by_name['JitCompile']
SavedObjectGraph = _reflection.GeneratedProtocolMessageType('SavedObjectGraph', (_message.Message,), {
'ConcreteFunctionsEntry' : _reflection.GeneratedProtocolMessageType('ConcreteFunctionsEntry', (_message.Message,), {
'DESCRIPTOR' : _SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedObjectGraph.ConcreteFunctionsEntry)
})
,
'DESCRIPTOR' : _SAVEDOBJECTGRAPH,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedObjectGraph)
})
_sym_db.RegisterMessage(SavedObjectGraph)
_sym_db.RegisterMessage(SavedObjectGraph.ConcreteFunctionsEntry)
SavedObject = _reflection.GeneratedProtocolMessageType('SavedObject', (_message.Message,), {
'SaveableObjectsEntry' : _reflection.GeneratedProtocolMessageType('SaveableObjectsEntry', (_message.Message,), {
'DESCRIPTOR' : _SAVEDOBJECT_SAVEABLEOBJECTSENTRY,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedObject.SaveableObjectsEntry)
})
,
'DESCRIPTOR' : _SAVEDOBJECT,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedObject)
})
_sym_db.RegisterMessage(SavedObject)
_sym_db.RegisterMessage(SavedObject.SaveableObjectsEntry)
SavedUserObject = _reflection.GeneratedProtocolMessageType('SavedUserObject', (_message.Message,), {
'DESCRIPTOR' : _SAVEDUSEROBJECT,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedUserObject)
})
_sym_db.RegisterMessage(SavedUserObject)
SavedAsset = _reflection.GeneratedProtocolMessageType('SavedAsset', (_message.Message,), {
'DESCRIPTOR' : _SAVEDASSET,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedAsset)
})
_sym_db.RegisterMessage(SavedAsset)
SavedFunction = _reflection.GeneratedProtocolMessageType('SavedFunction', (_message.Message,), {
'DESCRIPTOR' : _SAVEDFUNCTION,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedFunction)
})
_sym_db.RegisterMessage(SavedFunction)
CapturedTensor = _reflection.GeneratedProtocolMessageType('CapturedTensor', (_message.Message,), {
'DESCRIPTOR' : _CAPTUREDTENSOR,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.CapturedTensor)
})
_sym_db.RegisterMessage(CapturedTensor)
SavedConcreteFunction = _reflection.GeneratedProtocolMessageType('SavedConcreteFunction', (_message.Message,), {
'DESCRIPTOR' : _SAVEDCONCRETEFUNCTION,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedConcreteFunction)
})
_sym_db.RegisterMessage(SavedConcreteFunction)
SavedBareConcreteFunction = _reflection.GeneratedProtocolMessageType('SavedBareConcreteFunction', (_message.Message,), {
'DESCRIPTOR' : _SAVEDBARECONCRETEFUNCTION,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedBareConcreteFunction)
})
_sym_db.RegisterMessage(SavedBareConcreteFunction)
SavedConstant = _reflection.GeneratedProtocolMessageType('SavedConstant', (_message.Message,), {
'DESCRIPTOR' : _SAVEDCONSTANT,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedConstant)
})
_sym_db.RegisterMessage(SavedConstant)
SavedVariable = _reflection.GeneratedProtocolMessageType('SavedVariable', (_message.Message,), {
'DESCRIPTOR' : _SAVEDVARIABLE,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedVariable)
})
_sym_db.RegisterMessage(SavedVariable)
FunctionSpec = _reflection.GeneratedProtocolMessageType('FunctionSpec', (_message.Message,), {
'DESCRIPTOR' : _FUNCTIONSPEC,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.FunctionSpec)
})
_sym_db.RegisterMessage(FunctionSpec)
SavedResource = _reflection.GeneratedProtocolMessageType('SavedResource', (_message.Message,), {
'DESCRIPTOR' : _SAVEDRESOURCE,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SavedResource)
})
_sym_db.RegisterMessage(SavedResource)
SaveableObject = _reflection.GeneratedProtocolMessageType('SaveableObject', (_message.Message,), {
'DESCRIPTOR' : _SAVEABLEOBJECT,
'__module__' : 'tensorboard.compat.proto.saved_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SaveableObject)
})
_sym_db.RegisterMessage(SaveableObject)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY._options = None
_SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY._serialized_options = b'8\001'
_SAVEDOBJECT_SAVEABLEOBJECTSENTRY._options = None
_SAVEDOBJECT_SAVEABLEOBJECTSENTRY._serialized_options = b'8\001'
_SAVEDUSEROBJECT.fields_by_name['metadata']._options = None
_SAVEDUSEROBJECT.fields_by_name['metadata']._serialized_options = b'\030\001'
_SAVEDOBJECTGRAPH._serialized_start=353
_SAVEDOBJECTGRAPH._serialized_end=588
_SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY._serialized_start=496
_SAVEDOBJECTGRAPH_CONCRETEFUNCTIONSENTRY._serialized_end=588
_SAVEDOBJECT._serialized_start=591
_SAVEDOBJECT._serialized_end=1580
_SAVEDOBJECT_SAVEABLEOBJECTSENTRY._serialized_start=1471
_SAVEDOBJECT_SAVEABLEOBJECTSENTRY._serialized_end=1554
_SAVEDUSEROBJECT._serialized_start=1582
_SAVEDUSEROBJECT._serialized_end=1683
_SAVEDASSET._serialized_start=1685
_SAVEDASSET._serialized_end=1727
_SAVEDFUNCTION._serialized_start=1729
_SAVEDFUNCTION._serialized_end=1822
_CAPTUREDTENSOR._serialized_start=1824
_CAPTUREDTENSOR._serialized_end=1881
_SAVEDCONCRETEFUNCTION._serialized_start=1884
_SAVEDCONCRETEFUNCTION._serialized_end=2054
_SAVEDBARECONCRETEFUNCTION._serialized_start=2057
_SAVEDBARECONCRETEFUNCTION._serialized_end=2231
_SAVEDCONSTANT._serialized_start=2233
_SAVEDCONSTANT._serialized_end=2267
_SAVEDVARIABLE._serialized_start=2270
_SAVEDVARIABLE._serialized_end=2618
_FUNCTIONSPEC._serialized_start=2621
_FUNCTIONSPEC._serialized_end=2875
_FUNCTIONSPEC_JITCOMPILE._serialized_start=2821
_FUNCTIONSPEC_JITCOMPILE._serialized_end=2863
_SAVEDRESOURCE._serialized_start=2877
_SAVEDRESOURCE._serialized_end=2908
_SAVEABLEOBJECT._serialized_start=2910
_SAVEABLEOBJECT._serialized_end=2975
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/saver.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$tensorboard/compat/proto/saver.proto\x12\x0btensorboard\"\x9f\x02\n\x08SaverDef\x12\x1c\n\x14\x66ilename_tensor_name\x18\x01 \x01(\t\x12\x18\n\x10save_tensor_name\x18\x02 \x01(\t\x12\x17\n\x0frestore_op_name\x18\x03 \x01(\t\x12\x13\n\x0bmax_to_keep\x18\x04 \x01(\x05\x12\x0f\n\x07sharded\x18\x05 \x01(\x08\x12%\n\x1dkeep_checkpoint_every_n_hours\x18\x06 \x01(\x02\x12>\n\x07version\x18\x07 \x01(\x0e\x32-.tensorboard.SaverDef.CheckpointFormatVersion\"5\n\x17\x43heckpointFormatVersion\x12\n\n\x06LEGACY\x10\x00\x12\x06\n\x02V1\x10\x01\x12\x06\n\x02V2\x10\x02\x42~\n\x13org.tensorflow.utilB\x0bSaverProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3')
_SAVERDEF = DESCRIPTOR.message_types_by_name['SaverDef']
_SAVERDEF_CHECKPOINTFORMATVERSION = _SAVERDEF.enum_types_by_name['CheckpointFormatVersion']
SaverDef = _reflection.GeneratedProtocolMessageType('SaverDef', (_message.Message,), {
'DESCRIPTOR' : _SAVERDEF,
'__module__' : 'tensorboard.compat.proto.saver_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SaverDef)
})
_sym_db.RegisterMessage(SaverDef)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\023org.tensorflow.utilB\013SaverProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_SAVERDEF._serialized_start=54
_SAVERDEF._serialized_end=341
_SAVERDEF_CHECKPOINTFORMATVERSION._serialized_start=288
_SAVERDEF_CHECKPOINTFORMATVERSION._serialized_end=341
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,116 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/step_stats.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import allocation_description_pb2 as tensorboard_dot_compat_dot_proto_dot_allocation__description__pb2
from tensorboard.compat.proto import tensor_description_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__description__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n)tensorboard/compat/proto/step_stats.proto\x12\x0btensorboard\x1a\x35tensorboard/compat/proto/allocation_description.proto\x1a\x31tensorboard/compat/proto/tensor_description.proto\"=\n\x10\x41llocationRecord\x12\x14\n\x0c\x61lloc_micros\x18\x01 \x01(\x03\x12\x13\n\x0b\x61lloc_bytes\x18\x02 \x01(\x03\"\xc5\x01\n\x13\x41llocatorMemoryUsed\x12\x16\n\x0e\x61llocator_name\x18\x01 \x01(\t\x12\x13\n\x0btotal_bytes\x18\x02 \x01(\x03\x12\x12\n\npeak_bytes\x18\x03 \x01(\x03\x12\x12\n\nlive_bytes\x18\x04 \x01(\x03\x12\x39\n\x12\x61llocation_records\x18\x06 \x03(\x0b\x32\x1d.tensorboard.AllocationRecord\x12\x1e\n\x16\x61llocator_bytes_in_use\x18\x05 \x01(\x03\"V\n\nNodeOutput\x12\x0c\n\x04slot\x18\x01 \x01(\x05\x12:\n\x12tensor_description\x18\x03 \x01(\x0b\x32\x1e.tensorboard.TensorDescription\"\xec\x01\n\x0bMemoryStats\x12\x18\n\x10temp_memory_size\x18\x01 \x01(\x03\x12\x1e\n\x16persistent_memory_size\x18\x03 \x01(\x03\x12#\n\x1bpersistent_tensor_alloc_ids\x18\x05 \x03(\x03\x12#\n\x17\x64\x65vice_temp_memory_size\x18\x02 \x01(\x03\x42\x02\x18\x01\x12)\n\x1d\x64\x65vice_persistent_memory_size\x18\x04 \x01(\x03\x42\x02\x18\x01\x12.\n\"device_persistent_tensor_alloc_ids\x18\x06 \x03(\x03\x42\x02\x18\x01\"\xa2\x04\n\rNodeExecStats\x12\x11\n\tnode_name\x18\x01 \x01(\t\x12\x18\n\x10\x61ll_start_micros\x18\x02 \x01(\x03\x12\x1b\n\x13op_start_rel_micros\x18\x03 \x01(\x03\x12\x19\n\x11op_end_rel_micros\x18\x04 \x01(\x03\x12\x1a\n\x12\x61ll_end_rel_micros\x18\x05 \x01(\x03\x12\x30\n\x06memory\x18\x06 \x03(\x0b\x32 .tensorboard.AllocatorMemoryUsed\x12\'\n\x06output\x18\x07 \x03(\x0b\x32\x17.tensorboard.NodeOutput\x12\x16\n\x0etimeline_label\x18\x08 \x01(\t\x12\x18\n\x10scheduled_micros\x18\t \x01(\x03\x12\x11\n\tthread_id\x18\n \x01(\r\x12=\n\x11referenced_tensor\x18\x0b \x03(\x0b\x32\".tensorboard.AllocationDescription\x12.\n\x0cmemory_stats\x18\x0c \x01(\x0b\x32\x18.tensorboard.MemoryStats\x12\x17\n\x0f\x61ll_start_nanos\x18\r \x01(\x03\x12\x1a\n\x12op_start_rel_nanos\x18\x0e \x01(\x03\x12\x18\n\x10op_end_rel_nanos\x18\x0f \x01(\x03\x12\x19\n\x11\x61ll_end_rel_nanos\x18\x10 \x01(\x03\x12\x17\n\x0fscheduled_nanos\x18\x11 \x01(\x03\"\xca\x01\n\x0f\x44\x65viceStepStats\x12\x0e\n\x06\x64\x65vice\x18\x01 \x01(\t\x12.\n\nnode_stats\x18\x02 \x03(\x0b\x32\x1a.tensorboard.NodeExecStats\x12\x43\n\x0cthread_names\x18\x03 \x03(\x0b\x32-.tensorboard.DeviceStepStats.ThreadNamesEntry\x1a\x32\n\x10ThreadNamesEntry\x12\x0b\n\x03key\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"<\n\tStepStats\x12/\n\tdev_stats\x18\x01 \x03(\x0b\x32\x1c.tensorboard.DeviceStepStatsB\x83\x01\n\x18org.tensorflow.frameworkB\x0fStepStatsProtosP\x01ZQgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/step_stats_go_proto\xf8\x01\x01\x62\x06proto3')
_ALLOCATIONRECORD = DESCRIPTOR.message_types_by_name['AllocationRecord']
_ALLOCATORMEMORYUSED = DESCRIPTOR.message_types_by_name['AllocatorMemoryUsed']
_NODEOUTPUT = DESCRIPTOR.message_types_by_name['NodeOutput']
_MEMORYSTATS = DESCRIPTOR.message_types_by_name['MemoryStats']
_NODEEXECSTATS = DESCRIPTOR.message_types_by_name['NodeExecStats']
_DEVICESTEPSTATS = DESCRIPTOR.message_types_by_name['DeviceStepStats']
_DEVICESTEPSTATS_THREADNAMESENTRY = _DEVICESTEPSTATS.nested_types_by_name['ThreadNamesEntry']
_STEPSTATS = DESCRIPTOR.message_types_by_name['StepStats']
AllocationRecord = _reflection.GeneratedProtocolMessageType('AllocationRecord', (_message.Message,), {
'DESCRIPTOR' : _ALLOCATIONRECORD,
'__module__' : 'tensorboard.compat.proto.step_stats_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.AllocationRecord)
})
_sym_db.RegisterMessage(AllocationRecord)
AllocatorMemoryUsed = _reflection.GeneratedProtocolMessageType('AllocatorMemoryUsed', (_message.Message,), {
'DESCRIPTOR' : _ALLOCATORMEMORYUSED,
'__module__' : 'tensorboard.compat.proto.step_stats_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.AllocatorMemoryUsed)
})
_sym_db.RegisterMessage(AllocatorMemoryUsed)
NodeOutput = _reflection.GeneratedProtocolMessageType('NodeOutput', (_message.Message,), {
'DESCRIPTOR' : _NODEOUTPUT,
'__module__' : 'tensorboard.compat.proto.step_stats_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NodeOutput)
})
_sym_db.RegisterMessage(NodeOutput)
MemoryStats = _reflection.GeneratedProtocolMessageType('MemoryStats', (_message.Message,), {
'DESCRIPTOR' : _MEMORYSTATS,
'__module__' : 'tensorboard.compat.proto.step_stats_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.MemoryStats)
})
_sym_db.RegisterMessage(MemoryStats)
NodeExecStats = _reflection.GeneratedProtocolMessageType('NodeExecStats', (_message.Message,), {
'DESCRIPTOR' : _NODEEXECSTATS,
'__module__' : 'tensorboard.compat.proto.step_stats_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NodeExecStats)
})
_sym_db.RegisterMessage(NodeExecStats)
DeviceStepStats = _reflection.GeneratedProtocolMessageType('DeviceStepStats', (_message.Message,), {
'ThreadNamesEntry' : _reflection.GeneratedProtocolMessageType('ThreadNamesEntry', (_message.Message,), {
'DESCRIPTOR' : _DEVICESTEPSTATS_THREADNAMESENTRY,
'__module__' : 'tensorboard.compat.proto.step_stats_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.DeviceStepStats.ThreadNamesEntry)
})
,
'DESCRIPTOR' : _DEVICESTEPSTATS,
'__module__' : 'tensorboard.compat.proto.step_stats_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.DeviceStepStats)
})
_sym_db.RegisterMessage(DeviceStepStats)
_sym_db.RegisterMessage(DeviceStepStats.ThreadNamesEntry)
StepStats = _reflection.GeneratedProtocolMessageType('StepStats', (_message.Message,), {
'DESCRIPTOR' : _STEPSTATS,
'__module__' : 'tensorboard.compat.proto.step_stats_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.StepStats)
})
_sym_db.RegisterMessage(StepStats)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\017StepStatsProtosP\001ZQgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/step_stats_go_proto\370\001\001'
_MEMORYSTATS.fields_by_name['device_temp_memory_size']._options = None
_MEMORYSTATS.fields_by_name['device_temp_memory_size']._serialized_options = b'\030\001'
_MEMORYSTATS.fields_by_name['device_persistent_memory_size']._options = None
_MEMORYSTATS.fields_by_name['device_persistent_memory_size']._serialized_options = b'\030\001'
_MEMORYSTATS.fields_by_name['device_persistent_tensor_alloc_ids']._options = None
_MEMORYSTATS.fields_by_name['device_persistent_tensor_alloc_ids']._serialized_options = b'\030\001'
_DEVICESTEPSTATS_THREADNAMESENTRY._options = None
_DEVICESTEPSTATS_THREADNAMESENTRY._serialized_options = b'8\001'
_ALLOCATIONRECORD._serialized_start=164
_ALLOCATIONRECORD._serialized_end=225
_ALLOCATORMEMORYUSED._serialized_start=228
_ALLOCATORMEMORYUSED._serialized_end=425
_NODEOUTPUT._serialized_start=427
_NODEOUTPUT._serialized_end=513
_MEMORYSTATS._serialized_start=516
_MEMORYSTATS._serialized_end=752
_NODEEXECSTATS._serialized_start=755
_NODEEXECSTATS._serialized_end=1301
_DEVICESTEPSTATS._serialized_start=1304
_DEVICESTEPSTATS._serialized_end=1506
_DEVICESTEPSTATS_THREADNAMESENTRY._serialized_start=1456
_DEVICESTEPSTATS_THREADNAMESENTRY._serialized_end=1506
_STEPSTATS._serialized_start=1508
_STEPSTATS._serialized_end=1568
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,144 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/struct.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import tensor_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__pb2
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%tensorboard/compat/proto/struct.proto\x12\x0btensorboard\x1a%tensorboard/compat/proto/tensor.proto\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\"\xfd\x05\n\x0fStructuredValue\x12,\n\nnone_value\x18\x01 \x01(\x0b\x32\x16.tensorboard.NoneValueH\x00\x12\x17\n\rfloat64_value\x18\x0b \x01(\x01H\x00\x12\x15\n\x0bint64_value\x18\x0c \x01(\x12H\x00\x12\x16\n\x0cstring_value\x18\r \x01(\tH\x00\x12\x14\n\nbool_value\x18\x0e \x01(\x08H\x00\x12;\n\x12tensor_shape_value\x18\x1f \x01(\x0b\x32\x1d.tensorboard.TensorShapeProtoH\x00\x12\x33\n\x12tensor_dtype_value\x18 \x01(\x0e\x32\x15.tensorboard.DataTypeH\x00\x12\x39\n\x11tensor_spec_value\x18! \x01(\x0b\x32\x1c.tensorboard.TensorSpecProtoH\x00\x12\x35\n\x0ftype_spec_value\x18\" \x01(\x0b\x32\x1a.tensorboard.TypeSpecProtoH\x00\x12H\n\x19\x62ounded_tensor_spec_value\x18# \x01(\x0b\x32#.tensorboard.BoundedTensorSpecProtoH\x00\x12,\n\nlist_value\x18\x33 \x01(\x0b\x32\x16.tensorboard.ListValueH\x00\x12.\n\x0btuple_value\x18\x34 \x01(\x0b\x32\x17.tensorboard.TupleValueH\x00\x12,\n\ndict_value\x18\x35 \x01(\x0b\x32\x16.tensorboard.DictValueH\x00\x12\x39\n\x11named_tuple_value\x18\x36 \x01(\x0b\x32\x1c.tensorboard.NamedTupleValueH\x00\x12\x30\n\x0ctensor_value\x18\x37 \x01(\x0b\x32\x18.tensorboard.TensorProtoH\x00\x12/\n\x0bnumpy_value\x18\x38 \x01(\x0b\x32\x18.tensorboard.TensorProtoH\x00\x42\x06\n\x04kind\"\x0b\n\tNoneValue\"9\n\tListValue\x12,\n\x06values\x18\x01 \x03(\x0b\x32\x1c.tensorboard.StructuredValue\":\n\nTupleValue\x12,\n\x06values\x18\x01 \x03(\x0b\x32\x1c.tensorboard.StructuredValue\"\x8c\x01\n\tDictValue\x12\x32\n\x06\x66ields\x18\x01 \x03(\x0b\x32\".tensorboard.DictValue.FieldsEntry\x1aK\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.tensorboard.StructuredValue:\x02\x38\x01\"E\n\tPairValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x1c.tensorboard.StructuredValue\"G\n\x0fNamedTupleValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06values\x18\x02 \x03(\x0b\x32\x16.tensorboard.PairValue\"s\n\x0fTensorSpecProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05shape\x18\x02 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12$\n\x05\x64type\x18\x03 \x01(\x0e\x32\x15.tensorboard.DataType\"\xd0\x01\n\x16\x42oundedTensorSpecProto\x12\x0c\n\x04name\x18\x01 \x01(\t\x12,\n\x05shape\x18\x02 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12$\n\x05\x64type\x18\x03 \x01(\x0e\x32\x15.tensorboard.DataType\x12)\n\x07minimum\x18\x04 \x01(\x0b\x32\x18.tensorboard.TensorProto\x12)\n\x07maximum\x18\x05 \x01(\x0b\x32\x18.tensorboard.TensorProto\"\xfa\x03\n\rTypeSpecProto\x12\x41\n\x0ftype_spec_class\x18\x01 \x01(\x0e\x32(.tensorboard.TypeSpecProto.TypeSpecClass\x12\x30\n\ntype_state\x18\x02 \x01(\x0b\x32\x1c.tensorboard.StructuredValue\x12\x1c\n\x14type_spec_class_name\x18\x03 \x01(\t\x12\x1b\n\x13num_flat_components\x18\x04 \x01(\x05\"\xb8\x02\n\rTypeSpecClass\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x16\n\x12SPARSE_TENSOR_SPEC\x10\x01\x12\x17\n\x13INDEXED_SLICES_SPEC\x10\x02\x12\x16\n\x12RAGGED_TENSOR_SPEC\x10\x03\x12\x15\n\x11TENSOR_ARRAY_SPEC\x10\x04\x12\x15\n\x11\x44\x41TA_DATASET_SPEC\x10\x05\x12\x16\n\x12\x44\x41TA_ITERATOR_SPEC\x10\x06\x12\x11\n\rOPTIONAL_SPEC\x10\x07\x12\x14\n\x10PER_REPLICA_SPEC\x10\x08\x12\x11\n\rVARIABLE_SPEC\x10\t\x12\x16\n\x12ROW_PARTITION_SPEC\x10\n\x12\x18\n\x14REGISTERED_TYPE_SPEC\x10\x0c\x12\x17\n\x13\x45XTENSION_TYPE_SPEC\x10\r\"\x04\x08\x0b\x10\x0b\x42WZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_protob\x06proto3')
_STRUCTUREDVALUE = DESCRIPTOR.message_types_by_name['StructuredValue']
_NONEVALUE = DESCRIPTOR.message_types_by_name['NoneValue']
_LISTVALUE = DESCRIPTOR.message_types_by_name['ListValue']
_TUPLEVALUE = DESCRIPTOR.message_types_by_name['TupleValue']
_DICTVALUE = DESCRIPTOR.message_types_by_name['DictValue']
_DICTVALUE_FIELDSENTRY = _DICTVALUE.nested_types_by_name['FieldsEntry']
_PAIRVALUE = DESCRIPTOR.message_types_by_name['PairValue']
_NAMEDTUPLEVALUE = DESCRIPTOR.message_types_by_name['NamedTupleValue']
_TENSORSPECPROTO = DESCRIPTOR.message_types_by_name['TensorSpecProto']
_BOUNDEDTENSORSPECPROTO = DESCRIPTOR.message_types_by_name['BoundedTensorSpecProto']
_TYPESPECPROTO = DESCRIPTOR.message_types_by_name['TypeSpecProto']
_TYPESPECPROTO_TYPESPECCLASS = _TYPESPECPROTO.enum_types_by_name['TypeSpecClass']
StructuredValue = _reflection.GeneratedProtocolMessageType('StructuredValue', (_message.Message,), {
'DESCRIPTOR' : _STRUCTUREDVALUE,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.StructuredValue)
})
_sym_db.RegisterMessage(StructuredValue)
NoneValue = _reflection.GeneratedProtocolMessageType('NoneValue', (_message.Message,), {
'DESCRIPTOR' : _NONEVALUE,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NoneValue)
})
_sym_db.RegisterMessage(NoneValue)
ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), {
'DESCRIPTOR' : _LISTVALUE,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.ListValue)
})
_sym_db.RegisterMessage(ListValue)
TupleValue = _reflection.GeneratedProtocolMessageType('TupleValue', (_message.Message,), {
'DESCRIPTOR' : _TUPLEVALUE,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TupleValue)
})
_sym_db.RegisterMessage(TupleValue)
DictValue = _reflection.GeneratedProtocolMessageType('DictValue', (_message.Message,), {
'FieldsEntry' : _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), {
'DESCRIPTOR' : _DICTVALUE_FIELDSENTRY,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.DictValue.FieldsEntry)
})
,
'DESCRIPTOR' : _DICTVALUE,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.DictValue)
})
_sym_db.RegisterMessage(DictValue)
_sym_db.RegisterMessage(DictValue.FieldsEntry)
PairValue = _reflection.GeneratedProtocolMessageType('PairValue', (_message.Message,), {
'DESCRIPTOR' : _PAIRVALUE,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.PairValue)
})
_sym_db.RegisterMessage(PairValue)
NamedTupleValue = _reflection.GeneratedProtocolMessageType('NamedTupleValue', (_message.Message,), {
'DESCRIPTOR' : _NAMEDTUPLEVALUE,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.NamedTupleValue)
})
_sym_db.RegisterMessage(NamedTupleValue)
TensorSpecProto = _reflection.GeneratedProtocolMessageType('TensorSpecProto', (_message.Message,), {
'DESCRIPTOR' : _TENSORSPECPROTO,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TensorSpecProto)
})
_sym_db.RegisterMessage(TensorSpecProto)
BoundedTensorSpecProto = _reflection.GeneratedProtocolMessageType('BoundedTensorSpecProto', (_message.Message,), {
'DESCRIPTOR' : _BOUNDEDTENSORSPECPROTO,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.BoundedTensorSpecProto)
})
_sym_db.RegisterMessage(BoundedTensorSpecProto)
TypeSpecProto = _reflection.GeneratedProtocolMessageType('TypeSpecProto', (_message.Message,), {
'DESCRIPTOR' : _TYPESPECPROTO,
'__module__' : 'tensorboard.compat.proto.struct_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TypeSpecProto)
})
_sym_db.RegisterMessage(TypeSpecProto)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto'
_DICTVALUE_FIELDSENTRY._options = None
_DICTVALUE_FIELDSENTRY._serialized_options = b'8\001'
_STRUCTUREDVALUE._serialized_start=177
_STRUCTUREDVALUE._serialized_end=942
_NONEVALUE._serialized_start=944
_NONEVALUE._serialized_end=955
_LISTVALUE._serialized_start=957
_LISTVALUE._serialized_end=1014
_TUPLEVALUE._serialized_start=1016
_TUPLEVALUE._serialized_end=1074
_DICTVALUE._serialized_start=1077
_DICTVALUE._serialized_end=1217
_DICTVALUE_FIELDSENTRY._serialized_start=1142
_DICTVALUE_FIELDSENTRY._serialized_end=1217
_PAIRVALUE._serialized_start=1219
_PAIRVALUE._serialized_end=1288
_NAMEDTUPLEVALUE._serialized_start=1290
_NAMEDTUPLEVALUE._serialized_end=1361
_TENSORSPECPROTO._serialized_start=1363
_TENSORSPECPROTO._serialized_end=1478
_BOUNDEDTENSORSPECPROTO._serialized_start=1481
_BOUNDEDTENSORSPECPROTO._serialized_end=1689
_TYPESPECPROTO._serialized_start=1692
_TYPESPECPROTO._serialized_end=2198
_TYPESPECPROTO_TYPESPECCLASS._serialized_start=1886
_TYPESPECPROTO_TYPESPECCLASS._serialized_end=2198
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,111 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/summary.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import histogram_pb2 as tensorboard_dot_compat_dot_proto_dot_histogram__pb2
from tensorboard.compat.proto import tensor_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__pb2
from tensorboard.compat.proto.histogram_pb2 import *
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&tensorboard/compat/proto/summary.proto\x12\x0btensorboard\x1a(tensorboard/compat/proto/histogram.proto\x1a%tensorboard/compat/proto/tensor.proto\"\'\n\x12SummaryDescription\x12\x11\n\ttype_hint\x18\x01 \x01(\t\"\xe2\x01\n\x0fSummaryMetadata\x12<\n\x0bplugin_data\x18\x01 \x01(\x0b\x32\'.tensorboard.SummaryMetadata.PluginData\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x1b\n\x13summary_description\x18\x03 \x01(\t\x12*\n\ndata_class\x18\x04 \x01(\x0e\x32\x16.tensorboard.DataClass\x1a\x32\n\nPluginData\x12\x13\n\x0bplugin_name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x02 \x01(\x0c\"\xe4\x04\n\x07Summary\x12)\n\x05value\x18\x01 \x03(\x0b\x32\x1a.tensorboard.Summary.Value\x1aX\n\x05Image\x12\x0e\n\x06height\x18\x01 \x01(\x05\x12\r\n\x05width\x18\x02 \x01(\x05\x12\x12\n\ncolorspace\x18\x03 \x01(\x05\x12\x1c\n\x14\x65ncoded_image_string\x18\x04 \x01(\x0c\x1a}\n\x05\x41udio\x12\x13\n\x0bsample_rate\x18\x01 \x01(\x02\x12\x14\n\x0cnum_channels\x18\x02 \x01(\x03\x12\x15\n\rlength_frames\x18\x03 \x01(\x03\x12\x1c\n\x14\x65ncoded_audio_string\x18\x04 \x01(\x0c\x12\x14\n\x0c\x63ontent_type\x18\x05 \x01(\t\x1a\xd4\x02\n\x05Value\x12\x11\n\tnode_name\x18\x07 \x01(\t\x12\x0b\n\x03tag\x18\x01 \x01(\t\x12.\n\x08metadata\x18\t \x01(\x0b\x32\x1c.tensorboard.SummaryMetadata\x12\x16\n\x0csimple_value\x18\x02 \x01(\x02H\x00\x12&\n\x1cobsolete_old_style_histogram\x18\x03 \x01(\x0cH\x00\x12+\n\x05image\x18\x04 \x01(\x0b\x32\x1a.tensorboard.Summary.ImageH\x00\x12,\n\x05histo\x18\x05 \x01(\x0b\x32\x1b.tensorboard.HistogramProtoH\x00\x12+\n\x05\x61udio\x18\x06 \x01(\x0b\x32\x1a.tensorboard.Summary.AudioH\x00\x12*\n\x06tensor\x18\x08 \x01(\x0b\x32\x18.tensorboard.TensorProtoH\x00\x42\x07\n\x05value*o\n\tDataClass\x12\x16\n\x12\x44\x41TA_CLASS_UNKNOWN\x10\x00\x12\x15\n\x11\x44\x41TA_CLASS_SCALAR\x10\x01\x12\x15\n\x11\x44\x41TA_CLASS_TENSOR\x10\x02\x12\x1c\n\x18\x44\x41TA_CLASS_BLOB_SEQUENCE\x10\x03\x42~\n\x18org.tensorflow.frameworkB\rSummaryProtosP\x01ZNgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/summary_go_proto\xf8\x01\x01P\x00\x62\x06proto3')
_DATACLASS = DESCRIPTOR.enum_types_by_name['DataClass']
DataClass = enum_type_wrapper.EnumTypeWrapper(_DATACLASS)
DATA_CLASS_UNKNOWN = 0
DATA_CLASS_SCALAR = 1
DATA_CLASS_TENSOR = 2
DATA_CLASS_BLOB_SEQUENCE = 3
_SUMMARYDESCRIPTION = DESCRIPTOR.message_types_by_name['SummaryDescription']
_SUMMARYMETADATA = DESCRIPTOR.message_types_by_name['SummaryMetadata']
_SUMMARYMETADATA_PLUGINDATA = _SUMMARYMETADATA.nested_types_by_name['PluginData']
_SUMMARY = DESCRIPTOR.message_types_by_name['Summary']
_SUMMARY_IMAGE = _SUMMARY.nested_types_by_name['Image']
_SUMMARY_AUDIO = _SUMMARY.nested_types_by_name['Audio']
_SUMMARY_VALUE = _SUMMARY.nested_types_by_name['Value']
SummaryDescription = _reflection.GeneratedProtocolMessageType('SummaryDescription', (_message.Message,), {
'DESCRIPTOR' : _SUMMARYDESCRIPTION,
'__module__' : 'tensorboard.compat.proto.summary_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SummaryDescription)
})
_sym_db.RegisterMessage(SummaryDescription)
SummaryMetadata = _reflection.GeneratedProtocolMessageType('SummaryMetadata', (_message.Message,), {
'PluginData' : _reflection.GeneratedProtocolMessageType('PluginData', (_message.Message,), {
'DESCRIPTOR' : _SUMMARYMETADATA_PLUGINDATA,
'__module__' : 'tensorboard.compat.proto.summary_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SummaryMetadata.PluginData)
})
,
'DESCRIPTOR' : _SUMMARYMETADATA,
'__module__' : 'tensorboard.compat.proto.summary_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SummaryMetadata)
})
_sym_db.RegisterMessage(SummaryMetadata)
_sym_db.RegisterMessage(SummaryMetadata.PluginData)
Summary = _reflection.GeneratedProtocolMessageType('Summary', (_message.Message,), {
'Image' : _reflection.GeneratedProtocolMessageType('Image', (_message.Message,), {
'DESCRIPTOR' : _SUMMARY_IMAGE,
'__module__' : 'tensorboard.compat.proto.summary_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.Summary.Image)
})
,
'Audio' : _reflection.GeneratedProtocolMessageType('Audio', (_message.Message,), {
'DESCRIPTOR' : _SUMMARY_AUDIO,
'__module__' : 'tensorboard.compat.proto.summary_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.Summary.Audio)
})
,
'Value' : _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), {
'DESCRIPTOR' : _SUMMARY_VALUE,
'__module__' : 'tensorboard.compat.proto.summary_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.Summary.Value)
})
,
'DESCRIPTOR' : _SUMMARY,
'__module__' : 'tensorboard.compat.proto.summary_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.Summary)
})
_sym_db.RegisterMessage(Summary)
_sym_db.RegisterMessage(Summary.Image)
_sym_db.RegisterMessage(Summary.Audio)
_sym_db.RegisterMessage(Summary.Value)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\rSummaryProtosP\001ZNgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/summary_go_proto\370\001\001'
_DATACLASS._serialized_start=1021
_DATACLASS._serialized_end=1132
_SUMMARYDESCRIPTION._serialized_start=136
_SUMMARYDESCRIPTION._serialized_end=175
_SUMMARYMETADATA._serialized_start=178
_SUMMARYMETADATA._serialized_end=404
_SUMMARYMETADATA_PLUGINDATA._serialized_start=354
_SUMMARYMETADATA_PLUGINDATA._serialized_end=404
_SUMMARY._serialized_start=407
_SUMMARY._serialized_end=1019
_SUMMARY_IMAGE._serialized_start=461
_SUMMARY_IMAGE._serialized_end=549
_SUMMARY_AUDIO._serialized_start=551
_SUMMARY_AUDIO._serialized_end=676
_SUMMARY_VALUE._serialized_start=679
_SUMMARY_VALUE._serialized_end=1019
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/tensor_description.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import allocation_description_pb2 as tensorboard_dot_compat_dot_proto_dot_allocation__description__pb2
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n1tensorboard/compat/proto/tensor_description.proto\x12\x0btensorboard\x1a\x35tensorboard/compat/proto/allocation_description.proto\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\"\xab\x01\n\x11TensorDescription\x12$\n\x05\x64type\x18\x01 \x01(\x0e\x32\x15.tensorboard.DataType\x12,\n\x05shape\x18\x02 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12\x42\n\x16\x61llocation_description\x18\x04 \x01(\x0b\x32\".tensorboard.AllocationDescriptionB\x93\x01\n\x18org.tensorflow.frameworkB\x17TensorDescriptionProtosP\x01ZYgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_description_go_proto\xf8\x01\x01\x62\x06proto3')
_TENSORDESCRIPTION = DESCRIPTOR.message_types_by_name['TensorDescription']
TensorDescription = _reflection.GeneratedProtocolMessageType('TensorDescription', (_message.Message,), {
'DESCRIPTOR' : _TENSORDESCRIPTION,
'__module__' : 'tensorboard.compat.proto.tensor_description_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TensorDescription)
})
_sym_db.RegisterMessage(TensorDescription)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\027TensorDescriptionProtosP\001ZYgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_description_go_proto\370\001\001'
_TENSORDESCRIPTION._serialized_start=205
_TENSORDESCRIPTION._serialized_end=376
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,68 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/tensor.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from tensorboard.compat.proto import resource_handle_pb2 as tensorboard_dot_compat_dot_proto_dot_resource__handle__pb2
from tensorboard.compat.proto import tensor_shape_pb2 as tensorboard_dot_compat_dot_proto_dot_tensor__shape__pb2
from tensorboard.compat.proto import types_pb2 as tensorboard_dot_compat_dot_proto_dot_types__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%tensorboard/compat/proto/tensor.proto\x12\x0btensorboard\x1a.tensorboard/compat/proto/resource_handle.proto\x1a+tensorboard/compat/proto/tensor_shape.proto\x1a$tensorboard/compat/proto/types.proto\"\xa4\x04\n\x0bTensorProto\x12$\n\x05\x64type\x18\x01 \x01(\x0e\x32\x15.tensorboard.DataType\x12\x33\n\x0ctensor_shape\x18\x02 \x01(\x0b\x32\x1d.tensorboard.TensorShapeProto\x12\x16\n\x0eversion_number\x18\x03 \x01(\x05\x12\x16\n\x0etensor_content\x18\x04 \x01(\x0c\x12\x14\n\x08half_val\x18\r \x03(\x05\x42\x02\x10\x01\x12\x15\n\tfloat_val\x18\x05 \x03(\x02\x42\x02\x10\x01\x12\x16\n\ndouble_val\x18\x06 \x03(\x01\x42\x02\x10\x01\x12\x13\n\x07int_val\x18\x07 \x03(\x05\x42\x02\x10\x01\x12\x12\n\nstring_val\x18\x08 \x03(\x0c\x12\x18\n\x0cscomplex_val\x18\t \x03(\x02\x42\x02\x10\x01\x12\x15\n\tint64_val\x18\n \x03(\x03\x42\x02\x10\x01\x12\x14\n\x08\x62ool_val\x18\x0b \x03(\x08\x42\x02\x10\x01\x12\x18\n\x0c\x64\x63omplex_val\x18\x0c \x03(\x01\x42\x02\x10\x01\x12=\n\x13resource_handle_val\x18\x0e \x03(\x0b\x32 .tensorboard.ResourceHandleProto\x12\x38\n\x0bvariant_val\x18\x0f \x03(\x0b\x32#.tensorboard.VariantTensorDataProto\x12\x16\n\nuint32_val\x18\x10 \x03(\rB\x02\x10\x01\x12\x16\n\nuint64_val\x18\x11 \x03(\x04\x42\x02\x10\x01\x12\x12\n\nfloat8_val\x18\x12 \x01(\x0c\"h\n\x16VariantTensorDataProto\x12\x11\n\ttype_name\x18\x01 \x01(\t\x12\x10\n\x08metadata\x18\x02 \x01(\x0c\x12)\n\x07tensors\x18\x03 \x03(\x0b\x32\x18.tensorboard.TensorProtoB|\n\x18org.tensorflow.frameworkB\x0cTensorProtosP\x01ZMgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_go_proto\xf8\x01\x01\x62\x06proto3')
_TENSORPROTO = DESCRIPTOR.message_types_by_name['TensorProto']
_VARIANTTENSORDATAPROTO = DESCRIPTOR.message_types_by_name['VariantTensorDataProto']
TensorProto = _reflection.GeneratedProtocolMessageType('TensorProto', (_message.Message,), {
'DESCRIPTOR' : _TENSORPROTO,
'__module__' : 'tensorboard.compat.proto.tensor_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TensorProto)
})
_sym_db.RegisterMessage(TensorProto)
VariantTensorDataProto = _reflection.GeneratedProtocolMessageType('VariantTensorDataProto', (_message.Message,), {
'DESCRIPTOR' : _VARIANTTENSORDATAPROTO,
'__module__' : 'tensorboard.compat.proto.tensor_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.VariantTensorDataProto)
})
_sym_db.RegisterMessage(VariantTensorDataProto)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\014TensorProtosP\001ZMgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_go_proto\370\001\001'
_TENSORPROTO.fields_by_name['half_val']._options = None
_TENSORPROTO.fields_by_name['half_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['float_val']._options = None
_TENSORPROTO.fields_by_name['float_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['double_val']._options = None
_TENSORPROTO.fields_by_name['double_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['int_val']._options = None
_TENSORPROTO.fields_by_name['int_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['scomplex_val']._options = None
_TENSORPROTO.fields_by_name['scomplex_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['int64_val']._options = None
_TENSORPROTO.fields_by_name['int64_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['bool_val']._options = None
_TENSORPROTO.fields_by_name['bool_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['dcomplex_val']._options = None
_TENSORPROTO.fields_by_name['dcomplex_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['uint32_val']._options = None
_TENSORPROTO.fields_by_name['uint32_val']._serialized_options = b'\020\001'
_TENSORPROTO.fields_by_name['uint64_val']._options = None
_TENSORPROTO.fields_by_name['uint64_val']._serialized_options = b'\020\001'
_TENSORPROTO._serialized_start=186
_TENSORPROTO._serialized_end=734
_VARIANTTENSORDATAPROTO._serialized_start=736
_VARIANTTENSORDATAPROTO._serialized_end=840
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,46 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/tensor_shape.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n+tensorboard/compat/proto/tensor_shape.proto\x12\x0btensorboard\"{\n\x10TensorShapeProto\x12.\n\x03\x64im\x18\x02 \x03(\x0b\x32!.tensorboard.TensorShapeProto.Dim\x12\x14\n\x0cunknown_rank\x18\x03 \x01(\x08\x1a!\n\x03\x44im\x12\x0c\n\x04size\x18\x01 \x01(\x03\x12\x0c\n\x04name\x18\x02 \x01(\tB\x87\x01\n\x18org.tensorflow.frameworkB\x11TensorShapeProtosP\x01ZSgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_shape_go_proto\xf8\x01\x01\x62\x06proto3')
_TENSORSHAPEPROTO = DESCRIPTOR.message_types_by_name['TensorShapeProto']
_TENSORSHAPEPROTO_DIM = _TENSORSHAPEPROTO.nested_types_by_name['Dim']
TensorShapeProto = _reflection.GeneratedProtocolMessageType('TensorShapeProto', (_message.Message,), {
'Dim' : _reflection.GeneratedProtocolMessageType('Dim', (_message.Message,), {
'DESCRIPTOR' : _TENSORSHAPEPROTO_DIM,
'__module__' : 'tensorboard.compat.proto.tensor_shape_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TensorShapeProto.Dim)
})
,
'DESCRIPTOR' : _TENSORSHAPEPROTO,
'__module__' : 'tensorboard.compat.proto.tensor_shape_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TensorShapeProto)
})
_sym_db.RegisterMessage(TensorShapeProto)
_sym_db.RegisterMessage(TensorShapeProto.Dim)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\021TensorShapeProtosP\001ZSgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/tensor_shape_go_proto\370\001\001'
_TENSORSHAPEPROTO._serialized_start=60
_TENSORSHAPEPROTO._serialized_end=183
_TENSORSHAPEPROTO_DIM._serialized_start=150
_TENSORSHAPEPROTO_DIM._serialized_end=183
# @@protoc_insertion_point(module_scope)

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,90 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/trackable_object_graph.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import wrappers_pb2 as google_dot_protobuf_dot_wrappers__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n5tensorboard/compat/proto/trackable_object_graph.proto\x12\x0btensorboard\x1a\x1egoogle/protobuf/wrappers.proto\"\xf8\x05\n\x14TrackableObjectGraph\x12@\n\x05nodes\x18\x01 \x03(\x0b\x32\x31.tensorboard.TrackableObjectGraph.TrackableObject\x1a\x9d\x05\n\x0fTrackableObject\x12S\n\x08\x63hildren\x18\x01 \x03(\x0b\x32\x41.tensorboard.TrackableObjectGraph.TrackableObject.ObjectReference\x12V\n\nattributes\x18\x02 \x03(\x0b\x32\x42.tensorboard.TrackableObjectGraph.TrackableObject.SerializedTensor\x12_\n\x0eslot_variables\x18\x03 \x03(\x0b\x32G.tensorboard.TrackableObjectGraph.TrackableObject.SlotVariableReference\x12\x36\n\x10registered_saver\x18\x04 \x01(\x0b\x32\x1c.tensorboard.RegisteredSaver\x12\x39\n\x15has_checkpoint_values\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.BoolValue\x1a\x36\n\x0fObjectReference\x12\x0f\n\x07node_id\x18\x01 \x01(\x05\x12\x12\n\nlocal_name\x18\x02 \x01(\t\x1a\x63\n\x10SerializedTensor\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tfull_name\x18\x02 \x01(\t\x12\x16\n\x0e\x63heckpoint_key\x18\x03 \x01(\tJ\x04\x08\x04\x10\x05R\x10optional_restore\x1al\n\x15SlotVariableReference\x12!\n\x19original_variable_node_id\x18\x01 \x01(\x05\x12\x11\n\tslot_name\x18\x02 \x01(\t\x12\x1d\n\x15slot_variable_node_id\x18\x03 \x01(\x05\"4\n\x0fRegisteredSaver\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0bobject_name\x18\x02 \x01(\tBZZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3')
_TRACKABLEOBJECTGRAPH = DESCRIPTOR.message_types_by_name['TrackableObjectGraph']
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT = _TRACKABLEOBJECTGRAPH.nested_types_by_name['TrackableObject']
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_OBJECTREFERENCE = _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT.nested_types_by_name['ObjectReference']
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SERIALIZEDTENSOR = _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT.nested_types_by_name['SerializedTensor']
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SLOTVARIABLEREFERENCE = _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT.nested_types_by_name['SlotVariableReference']
_REGISTEREDSAVER = DESCRIPTOR.message_types_by_name['RegisteredSaver']
TrackableObjectGraph = _reflection.GeneratedProtocolMessageType('TrackableObjectGraph', (_message.Message,), {
'TrackableObject' : _reflection.GeneratedProtocolMessageType('TrackableObject', (_message.Message,), {
'ObjectReference' : _reflection.GeneratedProtocolMessageType('ObjectReference', (_message.Message,), {
'DESCRIPTOR' : _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_OBJECTREFERENCE,
'__module__' : 'tensorboard.compat.proto.trackable_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TrackableObjectGraph.TrackableObject.ObjectReference)
})
,
'SerializedTensor' : _reflection.GeneratedProtocolMessageType('SerializedTensor', (_message.Message,), {
'DESCRIPTOR' : _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SERIALIZEDTENSOR,
'__module__' : 'tensorboard.compat.proto.trackable_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TrackableObjectGraph.TrackableObject.SerializedTensor)
})
,
'SlotVariableReference' : _reflection.GeneratedProtocolMessageType('SlotVariableReference', (_message.Message,), {
'DESCRIPTOR' : _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SLOTVARIABLEREFERENCE,
'__module__' : 'tensorboard.compat.proto.trackable_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TrackableObjectGraph.TrackableObject.SlotVariableReference)
})
,
'DESCRIPTOR' : _TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT,
'__module__' : 'tensorboard.compat.proto.trackable_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TrackableObjectGraph.TrackableObject)
})
,
'DESCRIPTOR' : _TRACKABLEOBJECTGRAPH,
'__module__' : 'tensorboard.compat.proto.trackable_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.TrackableObjectGraph)
})
_sym_db.RegisterMessage(TrackableObjectGraph)
_sym_db.RegisterMessage(TrackableObjectGraph.TrackableObject)
_sym_db.RegisterMessage(TrackableObjectGraph.TrackableObject.ObjectReference)
_sym_db.RegisterMessage(TrackableObjectGraph.TrackableObject.SerializedTensor)
_sym_db.RegisterMessage(TrackableObjectGraph.TrackableObject.SlotVariableReference)
RegisteredSaver = _reflection.GeneratedProtocolMessageType('RegisteredSaver', (_message.Message,), {
'DESCRIPTOR' : _REGISTEREDSAVER,
'__module__' : 'tensorboard.compat.proto.trackable_object_graph_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.RegisteredSaver)
})
_sym_db.RegisterMessage(RegisteredSaver)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_TRACKABLEOBJECTGRAPH._serialized_start=103
_TRACKABLEOBJECTGRAPH._serialized_end=863
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT._serialized_start=194
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT._serialized_end=863
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_OBJECTREFERENCE._serialized_start=598
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_OBJECTREFERENCE._serialized_end=652
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SERIALIZEDTENSOR._serialized_start=654
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SERIALIZEDTENSOR._serialized_end=753
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SLOTVARIABLEREFERENCE._serialized_start=755
_TRACKABLEOBJECTGRAPH_TRACKABLEOBJECT_SLOTVARIABLEREFERENCE._serialized_end=863
_REGISTEREDSAVER._serialized_start=865
_REGISTEREDSAVER._serialized_end=917
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,95 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/types.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$tensorboard/compat/proto/types.proto\x12\x0btensorboard\":\n\x0fSerializedDType\x12\'\n\x08\x64\x61tatype\x18\x01 \x01(\x0e\x32\x15.tensorboard.DataType*\xc6\x07\n\x08\x44\x61taType\x12\x0e\n\nDT_INVALID\x10\x00\x12\x0c\n\x08\x44T_FLOAT\x10\x01\x12\r\n\tDT_DOUBLE\x10\x02\x12\x0c\n\x08\x44T_INT32\x10\x03\x12\x0c\n\x08\x44T_UINT8\x10\x04\x12\x0c\n\x08\x44T_INT16\x10\x05\x12\x0b\n\x07\x44T_INT8\x10\x06\x12\r\n\tDT_STRING\x10\x07\x12\x10\n\x0c\x44T_COMPLEX64\x10\x08\x12\x0c\n\x08\x44T_INT64\x10\t\x12\x0b\n\x07\x44T_BOOL\x10\n\x12\x0c\n\x08\x44T_QINT8\x10\x0b\x12\r\n\tDT_QUINT8\x10\x0c\x12\r\n\tDT_QINT32\x10\r\x12\x0f\n\x0b\x44T_BFLOAT16\x10\x0e\x12\r\n\tDT_QINT16\x10\x0f\x12\x0e\n\nDT_QUINT16\x10\x10\x12\r\n\tDT_UINT16\x10\x11\x12\x11\n\rDT_COMPLEX128\x10\x12\x12\x0b\n\x07\x44T_HALF\x10\x13\x12\x0f\n\x0b\x44T_RESOURCE\x10\x14\x12\x0e\n\nDT_VARIANT\x10\x15\x12\r\n\tDT_UINT32\x10\x16\x12\r\n\tDT_UINT64\x10\x17\x12\x12\n\x0e\x44T_FLOAT8_E5M2\x10\x18\x12\x14\n\x10\x44T_FLOAT8_E4M3FN\x10\x19\x12\x0b\n\x07\x44T_INT4\x10\x1d\x12\x0c\n\x08\x44T_UINT4\x10\x1e\x12\x10\n\x0c\x44T_FLOAT_REF\x10\x65\x12\x11\n\rDT_DOUBLE_REF\x10\x66\x12\x10\n\x0c\x44T_INT32_REF\x10g\x12\x10\n\x0c\x44T_UINT8_REF\x10h\x12\x10\n\x0c\x44T_INT16_REF\x10i\x12\x0f\n\x0b\x44T_INT8_REF\x10j\x12\x11\n\rDT_STRING_REF\x10k\x12\x14\n\x10\x44T_COMPLEX64_REF\x10l\x12\x10\n\x0c\x44T_INT64_REF\x10m\x12\x0f\n\x0b\x44T_BOOL_REF\x10n\x12\x10\n\x0c\x44T_QINT8_REF\x10o\x12\x11\n\rDT_QUINT8_REF\x10p\x12\x11\n\rDT_QINT32_REF\x10q\x12\x13\n\x0f\x44T_BFLOAT16_REF\x10r\x12\x11\n\rDT_QINT16_REF\x10s\x12\x12\n\x0e\x44T_QUINT16_REF\x10t\x12\x11\n\rDT_UINT16_REF\x10u\x12\x15\n\x11\x44T_COMPLEX128_REF\x10v\x12\x0f\n\x0b\x44T_HALF_REF\x10w\x12\x13\n\x0f\x44T_RESOURCE_REF\x10x\x12\x12\n\x0e\x44T_VARIANT_REF\x10y\x12\x11\n\rDT_UINT32_REF\x10z\x12\x11\n\rDT_UINT64_REF\x10{\x12\x16\n\x12\x44T_FLOAT8_E5M2_REF\x10|\x12\x18\n\x14\x44T_FLOAT8_E4M3FN_REF\x10}\x12\x10\n\x0b\x44T_INT4_REF\x10\x81\x01\x12\x11\n\x0c\x44T_UINT4_REF\x10\x82\x01\x42z\n\x18org.tensorflow.frameworkB\x0bTypesProtosP\x01ZLgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/types_go_proto\xf8\x01\x01\x62\x06proto3')
_DATATYPE = DESCRIPTOR.enum_types_by_name['DataType']
DataType = enum_type_wrapper.EnumTypeWrapper(_DATATYPE)
DT_INVALID = 0
DT_FLOAT = 1
DT_DOUBLE = 2
DT_INT32 = 3
DT_UINT8 = 4
DT_INT16 = 5
DT_INT8 = 6
DT_STRING = 7
DT_COMPLEX64 = 8
DT_INT64 = 9
DT_BOOL = 10
DT_QINT8 = 11
DT_QUINT8 = 12
DT_QINT32 = 13
DT_BFLOAT16 = 14
DT_QINT16 = 15
DT_QUINT16 = 16
DT_UINT16 = 17
DT_COMPLEX128 = 18
DT_HALF = 19
DT_RESOURCE = 20
DT_VARIANT = 21
DT_UINT32 = 22
DT_UINT64 = 23
DT_FLOAT8_E5M2 = 24
DT_FLOAT8_E4M3FN = 25
DT_INT4 = 29
DT_UINT4 = 30
DT_FLOAT_REF = 101
DT_DOUBLE_REF = 102
DT_INT32_REF = 103
DT_UINT8_REF = 104
DT_INT16_REF = 105
DT_INT8_REF = 106
DT_STRING_REF = 107
DT_COMPLEX64_REF = 108
DT_INT64_REF = 109
DT_BOOL_REF = 110
DT_QINT8_REF = 111
DT_QUINT8_REF = 112
DT_QINT32_REF = 113
DT_BFLOAT16_REF = 114
DT_QINT16_REF = 115
DT_QUINT16_REF = 116
DT_UINT16_REF = 117
DT_COMPLEX128_REF = 118
DT_HALF_REF = 119
DT_RESOURCE_REF = 120
DT_VARIANT_REF = 121
DT_UINT32_REF = 122
DT_UINT64_REF = 123
DT_FLOAT8_E5M2_REF = 124
DT_FLOAT8_E4M3FN_REF = 125
DT_INT4_REF = 129
DT_UINT4_REF = 130
_SERIALIZEDDTYPE = DESCRIPTOR.message_types_by_name['SerializedDType']
SerializedDType = _reflection.GeneratedProtocolMessageType('SerializedDType', (_message.Message,), {
'DESCRIPTOR' : _SERIALIZEDDTYPE,
'__module__' : 'tensorboard.compat.proto.types_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SerializedDType)
})
_sym_db.RegisterMessage(SerializedDType)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\013TypesProtosP\001ZLgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/types_go_proto\370\001\001'
_DATATYPE._serialized_start=114
_DATATYPE._serialized_end=1080
_SERIALIZEDDTYPE._serialized_start=53
_SERIALIZEDDTYPE._serialized_end=111
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,62 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/variable.proto
"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\'tensorboard/compat/proto/variable.proto\x12\x0btensorboard\"\xcb\x02\n\x0bVariableDef\x12\x15\n\rvariable_name\x18\x01 \x01(\t\x12\x1a\n\x12initial_value_name\x18\x06 \x01(\t\x12\x18\n\x10initializer_name\x18\x02 \x01(\t\x12\x15\n\rsnapshot_name\x18\x03 \x01(\t\x12:\n\x13save_slice_info_def\x18\x04 \x01(\x0b\x32\x1d.tensorboard.SaveSliceInfoDef\x12\x13\n\x0bis_resource\x18\x05 \x01(\x08\x12\x11\n\ttrainable\x18\x07 \x01(\x08\x12=\n\x0fsynchronization\x18\x08 \x01(\x0e\x32$.tensorboard.VariableSynchronization\x12\x35\n\x0b\x61ggregation\x18\t \x01(\x0e\x32 .tensorboard.VariableAggregation\"`\n\x10SaveSliceInfoDef\x12\x11\n\tfull_name\x18\x01 \x01(\t\x12\x12\n\nfull_shape\x18\x02 \x03(\x03\x12\x12\n\nvar_offset\x18\x03 \x03(\x03\x12\x11\n\tvar_shape\x18\x04 \x03(\x03*\xac\x01\n\x17VariableSynchronization\x12!\n\x1dVARIABLE_SYNCHRONIZATION_AUTO\x10\x00\x12!\n\x1dVARIABLE_SYNCHRONIZATION_NONE\x10\x01\x12%\n!VARIABLE_SYNCHRONIZATION_ON_WRITE\x10\x02\x12$\n VARIABLE_SYNCHRONIZATION_ON_READ\x10\x03*\x9e\x01\n\x13VariableAggregation\x12\x1d\n\x19VARIABLE_AGGREGATION_NONE\x10\x00\x12\x1c\n\x18VARIABLE_AGGREGATION_SUM\x10\x01\x12\x1d\n\x19VARIABLE_AGGREGATION_MEAN\x10\x02\x12+\n\'VARIABLE_AGGREGATION_ONLY_FIRST_REPLICA\x10\x03\x42\x80\x01\n\x18org.tensorflow.frameworkB\x0eVariableProtosP\x01ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/variable_go_proto\xf8\x01\x01\x62\x06proto3')
_VARIABLESYNCHRONIZATION = DESCRIPTOR.enum_types_by_name['VariableSynchronization']
VariableSynchronization = enum_type_wrapper.EnumTypeWrapper(_VARIABLESYNCHRONIZATION)
_VARIABLEAGGREGATION = DESCRIPTOR.enum_types_by_name['VariableAggregation']
VariableAggregation = enum_type_wrapper.EnumTypeWrapper(_VARIABLEAGGREGATION)
VARIABLE_SYNCHRONIZATION_AUTO = 0
VARIABLE_SYNCHRONIZATION_NONE = 1
VARIABLE_SYNCHRONIZATION_ON_WRITE = 2
VARIABLE_SYNCHRONIZATION_ON_READ = 3
VARIABLE_AGGREGATION_NONE = 0
VARIABLE_AGGREGATION_SUM = 1
VARIABLE_AGGREGATION_MEAN = 2
VARIABLE_AGGREGATION_ONLY_FIRST_REPLICA = 3
_VARIABLEDEF = DESCRIPTOR.message_types_by_name['VariableDef']
_SAVESLICEINFODEF = DESCRIPTOR.message_types_by_name['SaveSliceInfoDef']
VariableDef = _reflection.GeneratedProtocolMessageType('VariableDef', (_message.Message,), {
'DESCRIPTOR' : _VARIABLEDEF,
'__module__' : 'tensorboard.compat.proto.variable_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.VariableDef)
})
_sym_db.RegisterMessage(VariableDef)
SaveSliceInfoDef = _reflection.GeneratedProtocolMessageType('SaveSliceInfoDef', (_message.Message,), {
'DESCRIPTOR' : _SAVESLICEINFODEF,
'__module__' : 'tensorboard.compat.proto.variable_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.SaveSliceInfoDef)
})
_sym_db.RegisterMessage(SaveSliceInfoDef)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\016VariableProtosP\001ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/variable_go_proto\370\001\001'
_VARIABLESYNCHRONIZATION._serialized_start=489
_VARIABLESYNCHRONIZATION._serialized_end=661
_VARIABLEAGGREGATION._serialized_start=664
_VARIABLEAGGREGATION._serialized_end=822
_VARIABLEDEF._serialized_start=57
_VARIABLEDEF._serialized_end=388
_SAVESLICEINFODEF._serialized_start=390
_SAVESLICEINFODEF._serialized_end=486
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,38 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/verifier_config.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.tensorboard/compat/proto/verifier_config.proto\x12\x0btensorboard\"\x9c\x01\n\x0eVerifierConfig\x12\"\n\x1averification_timeout_in_ms\x18\x01 \x01(\x03\x12>\n\x12structure_verifier\x18\x02 \x01(\x0e\x32\".tensorboard.VerifierConfig.Toggle\"&\n\x06Toggle\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x00\x12\x06\n\x02ON\x10\x01\x12\x07\n\x03OFF\x10\x02\x42\x8c\x01\n\x18org.tensorflow.frameworkB\x14VerifierConfigProtosP\x01ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\xf8\x01\x01\x62\x06proto3')
_VERIFIERCONFIG = DESCRIPTOR.message_types_by_name['VerifierConfig']
_VERIFIERCONFIG_TOGGLE = _VERIFIERCONFIG.enum_types_by_name['Toggle']
VerifierConfig = _reflection.GeneratedProtocolMessageType('VerifierConfig', (_message.Message,), {
'DESCRIPTOR' : _VERIFIERCONFIG,
'__module__' : 'tensorboard.compat.proto.verifier_config_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.VerifierConfig)
})
_sym_db.RegisterMessage(VerifierConfig)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\024VerifierConfigProtosP\001ZUgithub.com/tensorflow/tensorflow/tensorflow/go/core/protobuf/for_core_protos_go_proto\370\001\001'
_VERIFIERCONFIG._serialized_start=64
_VERIFIERCONFIG._serialized_end=220
_VERIFIERCONFIG_TOGGLE._serialized_start=182
_VERIFIERCONFIG_TOGGLE._serialized_end=220
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,35 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: tensorboard/compat/proto/versions.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\'tensorboard/compat/proto/versions.proto\x12\x0btensorboard\"K\n\nVersionDef\x12\x10\n\x08producer\x18\x01 \x01(\x05\x12\x14\n\x0cmin_consumer\x18\x02 \x01(\x05\x12\x15\n\rbad_consumers\x18\x03 \x03(\x05\x42\x80\x01\n\x18org.tensorflow.frameworkB\x0eVersionsProtosP\x01ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/versions_go_proto\xf8\x01\x01\x62\x06proto3')
_VERSIONDEF = DESCRIPTOR.message_types_by_name['VersionDef']
VersionDef = _reflection.GeneratedProtocolMessageType('VersionDef', (_message.Message,), {
'DESCRIPTOR' : _VERSIONDEF,
'__module__' : 'tensorboard.compat.proto.versions_pb2'
# @@protoc_insertion_point(class_scope:tensorboard.VersionDef)
})
_sym_db.RegisterMessage(VersionDef)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
DESCRIPTOR._serialized_options = b'\n\030org.tensorflow.frameworkB\016VersionsProtosP\001ZOgithub.com/tensorflow/tensorflow/tensorflow/go/core/framework/versions_go_proto\370\001\001'
_VERSIONDEF._serialized_start=56
_VERSIONDEF._serialized_end=131
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,38 @@
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from tensorboard.compat.proto.config_pb2 import * # noqa
from tensorboard.compat.proto.event_pb2 import * # noqa
from tensorboard.compat.proto.graph_pb2 import * # noqa
from tensorboard.compat.proto.meta_graph_pb2 import * # noqa
from tensorboard.compat.proto.summary_pb2 import * # noqa
from .dtypes import as_dtype # noqa
from .dtypes import DType # noqa
from .dtypes import string # noqa
from . import app # noqa
from . import compat # noqa
from . import dtypes # noqa
from . import error_codes # noqa
from . import errors # noqa
from . import flags # noqa
from . import io # noqa
from . import pywrap_tensorflow # noqa
from . import tensor_shape # noqa
compat.v1.errors = errors
# Set a fake __version__ to help distinguish this as our own stub API.
__version__ = "stub"

View File

@ -0,0 +1,124 @@
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Generic entry point script."""
import errno as _errno
import sys as _sys
from . import flags
def _usage(shorthelp):
"""Writes __main__'s docstring to stdout with some help text.
Args:
shorthelp: bool, if True, prints only flags from the main module,
rather than all flags.
"""
doc = _sys.modules["__main__"].__doc__
if not doc:
doc = "\nUSAGE: %s [flags]\n" % _sys.argv[0]
doc = flags.text_wrap(doc, indent=" ", firstline_indent="")
else:
# Replace all '%s' with sys.argv[0], and all '%%' with '%'.
num_specifiers = doc.count("%") - 2 * doc.count("%%")
try:
doc %= (_sys.argv[0],) * num_specifiers
except (OverflowError, TypeError, ValueError):
# Just display the docstring as-is.
pass
if shorthelp:
flag_str = flags.FLAGS.main_module_help()
else:
flag_str = str(flags.FLAGS)
try:
_sys.stdout.write(doc)
if flag_str:
_sys.stdout.write("\nflags:\n")
_sys.stdout.write(flag_str)
_sys.stdout.write("\n")
except IOError as e:
# We avoid printing a huge backtrace if we get EPIPE, because
# "foo.par --help | less" is a frequent use case.
if e.errno != _errno.EPIPE:
raise
class _HelpFlag(flags.BooleanFlag):
"""Special boolean flag that displays usage and raises SystemExit."""
NAME = "help"
SHORT_NAME = "h"
def __init__(self):
super().__init__(
self.NAME, False, "show this help", short_name=self.SHORT_NAME
)
def parse(self, arg):
if arg:
_usage(shorthelp=True)
print()
print("Try --helpfull to get a list of all flags.")
_sys.exit(1)
class _HelpshortFlag(_HelpFlag):
"""--helpshort is an alias for --help."""
NAME = "helpshort"
SHORT_NAME = None
class _HelpfullFlag(flags.BooleanFlag):
"""Display help for flags in main module and all dependent modules."""
def __init__(self):
super().__init__("helpfull", False, "show full help")
def parse(self, arg):
if arg:
_usage(shorthelp=False)
_sys.exit(1)
_define_help_flags_called = False
def _define_help_flags():
global _define_help_flags_called
if not _define_help_flags_called:
flags.DEFINE_flag(_HelpFlag())
flags.DEFINE_flag(_HelpfullFlag())
flags.DEFINE_flag(_HelpshortFlag())
_define_help_flags_called = True
# @tf_export('app.run')
def run(main=None, argv=None):
"""Runs the program with an optional 'main' function and 'argv' list."""
# Define help flags.
_define_help_flags()
# Parse known flags.
argv = flags.FLAGS(_sys.argv if argv is None else argv, known_only=True)
main = main or _sys.modules["__main__"].main
# Call the main function, passing through any arguments
# to the final program.
_sys.exit(main(argv))

View File

@ -0,0 +1,132 @@
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functions for Python 2 vs. 3 compatibility.
## Conversion routines
In addition to the functions below, `as_str` converts an object to a `str`.
## Types
The compatibility module also provides the following types:
* `bytes_or_text_types`
* `complex_types`
* `integral_types`
* `real_types`
"""
import numbers as _numbers
import numpy as _np
from tensorboard.compat.tensorflow_stub.compat.v1 import * # noqa
def as_bytes(bytes_or_text, encoding="utf-8"):
"""Converts either bytes or unicode to `bytes`, using utf-8 encoding for
text.
Args:
bytes_or_text: A `bytes`, `str`, or `unicode` object.
encoding: A string indicating the charset for encoding unicode.
Returns:
A `bytes` object.
Raises:
TypeError: If `bytes_or_text` is not a binary or unicode string.
"""
if isinstance(bytes_or_text, str):
return bytes_or_text.encode(encoding)
elif isinstance(bytes_or_text, bytes):
return bytes_or_text
else:
raise TypeError(
"Expected binary or unicode string, got %r" % (bytes_or_text,)
)
def as_text(bytes_or_text, encoding="utf-8"):
"""Returns the given argument as a unicode string.
Args:
bytes_or_text: A `bytes`, `str`, or `unicode` object.
encoding: A string indicating the charset for decoding unicode.
Returns:
A `unicode` (Python 2) or `str` (Python 3) object.
Raises:
TypeError: If `bytes_or_text` is not a binary or unicode string.
"""
if isinstance(bytes_or_text, str):
return bytes_or_text
elif isinstance(bytes_or_text, bytes):
return bytes_or_text.decode(encoding)
else:
raise TypeError(
"Expected binary or unicode string, got %r" % bytes_or_text
)
# Convert an object to a `str` in both Python 2 and 3.
as_str = as_text
# @tf_export('compat.as_str_any')
def as_str_any(value):
"""Converts to `str` as `str(value)`, but use `as_str` for `bytes`.
Args:
value: A object that can be converted to `str`.
Returns:
A `str` object.
"""
if isinstance(value, bytes):
return as_str(value)
else:
return str(value)
# @tf_export('compat.path_to_str')
def path_to_str(path):
"""Returns the file system path representation of a `PathLike` object, else
as it is.
Args:
path: An object that can be converted to path representation.
Returns:
A `str` object.
"""
if hasattr(path, "__fspath__"):
path = as_str_any(path.__fspath__())
return path
# Numpy 1.8 scalars don't inherit from numbers.Integral in Python 3, so we
# need to check them specifically. The same goes from Real and Complex.
integral_types = (_numbers.Integral, _np.integer)
# tf_export('compat.integral_types').export_constant(__name__, 'integral_types')
real_types = (_numbers.Real, _np.integer, _np.floating)
# tf_export('compat.real_types').export_constant(__name__, 'real_types')
complex_types = (_numbers.Complex, _np.number)
# tf_export('compat.complex_types').export_constant(__name__, 'complex_types')
# Either bytes or text.
bytes_or_text_types = (bytes, str)
# tf_export('compat.bytes_or_text_types').export_constant(__name__,
# 'bytes_or_text_types')

View File

@ -0,0 +1,20 @@
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Set this in tensorboard/compat/tensorflow_stub/__init__.py to eliminate
# any cycles on import
#
# from tensorboard.compat.tensorflow_stub import pywrap_tensorflow # noqa

View File

@ -0,0 +1,692 @@
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Library of dtypes (Tensor element types)."""
import numpy as np
from . import pywrap_tensorflow
from tensorboard.compat.proto import types_pb2
_np_bfloat16 = pywrap_tensorflow.TF_bfloat16_type()
# @tf_export("DType")
class DType:
"""Represents the type of the elements in a `Tensor`.
The following `DType` objects are defined:
* `tf.float16`: 16-bit half-precision floating-point.
* `tf.float32`: 32-bit single-precision floating-point.
* `tf.float64`: 64-bit double-precision floating-point.
* `tf.bfloat16`: 16-bit truncated floating-point.
* `tf.complex64`: 64-bit single-precision complex.
* `tf.complex128`: 128-bit double-precision complex.
* `tf.int8`: 8-bit signed integer.
* `tf.uint8`: 8-bit unsigned integer.
* `tf.uint16`: 16-bit unsigned integer.
* `tf.uint32`: 32-bit unsigned integer.
* `tf.uint64`: 64-bit unsigned integer.
* `tf.int16`: 16-bit signed integer.
* `tf.int32`: 32-bit signed integer.
* `tf.int64`: 64-bit signed integer.
* `tf.bool`: Boolean.
* `tf.string`: String.
* `tf.qint8`: Quantized 8-bit signed integer.
* `tf.quint8`: Quantized 8-bit unsigned integer.
* `tf.qint16`: Quantized 16-bit signed integer.
* `tf.quint16`: Quantized 16-bit unsigned integer.
* `tf.qint32`: Quantized 32-bit signed integer.
* `tf.resource`: Handle to a mutable resource.
* `tf.variant`: Values of arbitrary types.
In addition, variants of these types with the `_ref` suffix are
defined for reference-typed tensors.
The `tf.as_dtype()` function converts numpy types and string type
names to a `DType` object.
"""
def __init__(self, type_enum):
"""Creates a new `DataType`.
NOTE(mrry): In normal circumstances, you should not need to
construct a `DataType` object directly. Instead, use the
`tf.as_dtype()` function.
Args:
type_enum: A `types_pb2.DataType` enum value.
Raises:
TypeError: If `type_enum` is not a value `types_pb2.DataType`.
"""
# TODO(mrry): Make the necessary changes (using __new__) to ensure
# that calling this returns one of the interned values.
type_enum = int(type_enum)
if (
type_enum not in types_pb2.DataType.values()
or type_enum == types_pb2.DT_INVALID
):
raise TypeError(
"type_enum is not a valid types_pb2.DataType: %s" % type_enum
)
self._type_enum = type_enum
@property
def _is_ref_dtype(self):
"""Returns `True` if this `DType` represents a reference type."""
return self._type_enum > 100
@property
def _as_ref(self):
"""Returns a reference `DType` based on this `DType`."""
if self._is_ref_dtype:
return self
else:
return _INTERN_TABLE[self._type_enum + 100]
@property
def base_dtype(self):
"""Returns a non-reference `DType` based on this `DType`."""
if self._is_ref_dtype:
return _INTERN_TABLE[self._type_enum - 100]
else:
return self
@property
def real_dtype(self):
"""Returns the dtype correspond to this dtype's real part."""
base = self.base_dtype
if base == complex64:
return float32
elif base == complex128:
return float64
else:
return self
@property
def is_numpy_compatible(self):
return self._type_enum not in _NUMPY_INCOMPATIBLE
@property
def as_numpy_dtype(self):
"""Returns a `numpy.dtype` based on this `DType`."""
return _TF_TO_NP[self._type_enum]
@property
def as_datatype_enum(self):
"""Returns a `types_pb2.DataType` enum value based on this `DType`."""
return self._type_enum
@property
def is_bool(self):
"""Returns whether this is a boolean data type."""
return self.base_dtype == bool
@property
def is_integer(self):
"""Returns whether this is a (non-quantized) integer type."""
return (
self.is_numpy_compatible
and not self.is_quantized
and np.issubdtype(self.as_numpy_dtype, np.integer)
)
@property
def is_floating(self):
"""Returns whether this is a (non-quantized, real) floating point
type."""
return (
self.is_numpy_compatible
and np.issubdtype(self.as_numpy_dtype, np.floating)
) or self.base_dtype == bfloat16
@property
def is_complex(self):
"""Returns whether this is a complex floating point type."""
return self.base_dtype in (complex64, complex128)
@property
def is_quantized(self):
"""Returns whether this is a quantized data type."""
return self.base_dtype in _QUANTIZED_DTYPES_NO_REF
@property
def is_unsigned(self):
"""Returns whether this type is unsigned.
Non-numeric, unordered, and quantized types are not considered unsigned, and
this function returns `False`.
Returns:
Whether a `DType` is unsigned.
"""
try:
return self.min == 0
except TypeError:
return False
@property
def min(self):
"""Returns the minimum representable value in this data type.
Raises:
TypeError: if this is a non-numeric, unordered, or quantized type.
"""
if self.is_quantized or self.base_dtype in (
bool,
string,
complex64,
complex128,
):
raise TypeError("Cannot find minimum value of %s." % self)
# there is no simple way to get the min value of a dtype, we have to check
# float and int types separately
try:
return np.finfo(self.as_numpy_dtype).min
except: # bare except as possible raises by finfo not documented
try:
return np.iinfo(self.as_numpy_dtype).min
except:
if self.base_dtype == bfloat16:
return _np_bfloat16(float.fromhex("-0x1.FEp127"))
raise TypeError("Cannot find minimum value of %s." % self)
@property
def max(self):
"""Returns the maximum representable value in this data type.
Raises:
TypeError: if this is a non-numeric, unordered, or quantized type.
"""
if self.is_quantized or self.base_dtype in (
bool,
string,
complex64,
complex128,
):
raise TypeError("Cannot find maximum value of %s." % self)
# there is no simple way to get the max value of a dtype, we have to check
# float and int types separately
try:
return np.finfo(self.as_numpy_dtype).max
except: # bare except as possible raises by finfo not documented
try:
return np.iinfo(self.as_numpy_dtype).max
except:
if self.base_dtype == bfloat16:
return _np_bfloat16(float.fromhex("0x1.FEp127"))
raise TypeError("Cannot find maximum value of %s." % self)
@property
def limits(self, clip_negative=True):
"""Return intensity limits, i.e. (min, max) tuple, of the dtype.
Args:
clip_negative : bool, optional
If True, clip the negative range (i.e. return 0 for min intensity)
even if the image dtype allows negative values.
Returns
min, max : tuple
Lower and upper intensity limits.
"""
min, max = dtype_range[
self.as_numpy_dtype
] # pylint: disable=redefined-builtin
if clip_negative:
min = 0 # pylint: disable=redefined-builtin
return min, max
def is_compatible_with(self, other):
"""Returns True if the `other` DType will be converted to this DType.
The conversion rules are as follows:
```python
DType(T) .is_compatible_with(DType(T)) == True
DType(T) .is_compatible_with(DType(T).as_ref) == True
DType(T).as_ref.is_compatible_with(DType(T)) == False
DType(T).as_ref.is_compatible_with(DType(T).as_ref) == True
```
Args:
other: A `DType` (or object that may be converted to a `DType`).
Returns:
True if a Tensor of the `other` `DType` will be implicitly converted to
this `DType`.
"""
other = as_dtype(other)
return self._type_enum in (
other.as_datatype_enum,
other.base_dtype.as_datatype_enum,
)
def __eq__(self, other):
"""Returns True iff this DType refers to the same type as `other`."""
if other is None:
return False
try:
dtype = as_dtype(other).as_datatype_enum
return self._type_enum == dtype # pylint: disable=protected-access
except TypeError:
return False
def __ne__(self, other):
"""Returns True iff self != other."""
return not self.__eq__(other)
@property
def name(self):
"""Returns the string name for this `DType`."""
return _TYPE_TO_STRING[self._type_enum]
def __int__(self):
return self._type_enum
def __str__(self):
return "<dtype: %r>" % self.name
def __repr__(self):
return "tf." + self.name
def __hash__(self):
return self._type_enum
def __reduce__(self):
return as_dtype, (self.name,)
@property
def size(self):
if (
self._type_enum == types_pb2.DT_VARIANT
or self._type_enum == types_pb2.DT_RESOURCE
):
return 1
return np.dtype(self.as_numpy_dtype).itemsize
# Define data type range of numpy dtype
dtype_range = {
np.bool_: (False, True),
np.uint8: (0, 255),
np.uint16: (0, 65535),
np.int8: (-128, 127),
np.int16: (-32768, 32767),
np.int64: (-(2**63), 2**63 - 1),
np.uint64: (0, 2**64 - 1),
np.int32: (-(2**31), 2**31 - 1),
np.uint32: (0, 2**32 - 1),
np.float32: (-1, 1),
np.float64: (-1, 1),
}
# Define standard wrappers for the types_pb2.DataType enum.
resource = DType(types_pb2.DT_RESOURCE)
# tf_export("resource").export_constant(__name__, "resource")
variant = DType(types_pb2.DT_VARIANT)
# tf_export("variant").export_constant(__name__, "variant")
float16 = DType(types_pb2.DT_HALF)
# tf_export("float16").export_constant(__name__, "float16")
half = float16
# tf_export("half").export_constant(__name__, "half")
float32 = DType(types_pb2.DT_FLOAT)
# tf_export("float32").export_constant(__name__, "float32")
float64 = DType(types_pb2.DT_DOUBLE)
# tf_export("float64").export_constant(__name__, "float64")
double = float64
# tf_export("double").export_constant(__name__, "double")
int32 = DType(types_pb2.DT_INT32)
# tf_export("int32").export_constant(__name__, "int32")
uint8 = DType(types_pb2.DT_UINT8)
# tf_export("uint8").export_constant(__name__, "uint8")
uint16 = DType(types_pb2.DT_UINT16)
# tf_export("uint16").export_constant(__name__, "uint16")
uint32 = DType(types_pb2.DT_UINT32)
# tf_export("uint32").export_constant(__name__, "uint32")
uint64 = DType(types_pb2.DT_UINT64)
# tf_export("uint64").export_constant(__name__, "uint64")
int16 = DType(types_pb2.DT_INT16)
# tf_export("int16").export_constant(__name__, "int16")
int8 = DType(types_pb2.DT_INT8)
# tf_export("int8").export_constant(__name__, "int8")
string = DType(types_pb2.DT_STRING)
# tf_export("string").export_constant(__name__, "string")
complex64 = DType(types_pb2.DT_COMPLEX64)
# tf_export("complex64").export_constant(__name__, "complex64")
complex128 = DType(types_pb2.DT_COMPLEX128)
# tf_export("complex128").export_constant(__name__, "complex128")
int64 = DType(types_pb2.DT_INT64)
# tf_export("int64").export_constant(__name__, "int64")
bool = DType(types_pb2.DT_BOOL) # pylint: disable=redefined-builtin
# tf_export("bool").export_constant(__name__, "bool")
qint8 = DType(types_pb2.DT_QINT8)
# tf_export("qint8").export_constant(__name__, "qint8")
quint8 = DType(types_pb2.DT_QUINT8)
# tf_export("quint8").export_constant(__name__, "quint8")
qint16 = DType(types_pb2.DT_QINT16)
# tf_export("qint16").export_constant(__name__, "qint16")
quint16 = DType(types_pb2.DT_QUINT16)
# tf_export("quint16").export_constant(__name__, "quint16")
qint32 = DType(types_pb2.DT_QINT32)
# tf_export("qint32").export_constant(__name__, "qint32")
resource_ref = DType(types_pb2.DT_RESOURCE_REF)
variant_ref = DType(types_pb2.DT_VARIANT_REF)
bfloat16 = DType(types_pb2.DT_BFLOAT16)
# tf_export("bfloat16").export_constant(__name__, "bfloat16")
float16_ref = DType(types_pb2.DT_HALF_REF)
half_ref = float16_ref
float32_ref = DType(types_pb2.DT_FLOAT_REF)
float64_ref = DType(types_pb2.DT_DOUBLE_REF)
double_ref = float64_ref
int32_ref = DType(types_pb2.DT_INT32_REF)
uint32_ref = DType(types_pb2.DT_UINT32_REF)
uint8_ref = DType(types_pb2.DT_UINT8_REF)
uint16_ref = DType(types_pb2.DT_UINT16_REF)
int16_ref = DType(types_pb2.DT_INT16_REF)
int8_ref = DType(types_pb2.DT_INT8_REF)
string_ref = DType(types_pb2.DT_STRING_REF)
complex64_ref = DType(types_pb2.DT_COMPLEX64_REF)
complex128_ref = DType(types_pb2.DT_COMPLEX128_REF)
int64_ref = DType(types_pb2.DT_INT64_REF)
uint64_ref = DType(types_pb2.DT_UINT64_REF)
bool_ref = DType(types_pb2.DT_BOOL_REF)
qint8_ref = DType(types_pb2.DT_QINT8_REF)
quint8_ref = DType(types_pb2.DT_QUINT8_REF)
qint16_ref = DType(types_pb2.DT_QINT16_REF)
quint16_ref = DType(types_pb2.DT_QUINT16_REF)
qint32_ref = DType(types_pb2.DT_QINT32_REF)
bfloat16_ref = DType(types_pb2.DT_BFLOAT16_REF)
_NUMPY_INCOMPATIBLE = frozenset(
[
types_pb2.DT_VARIANT,
types_pb2.DT_VARIANT_REF,
types_pb2.DT_RESOURCE,
types_pb2.DT_RESOURCE_REF,
]
)
# Maintain an intern table so that we don't have to create a large
# number of small objects.
_INTERN_TABLE = {
types_pb2.DT_HALF: float16,
types_pb2.DT_FLOAT: float32,
types_pb2.DT_DOUBLE: float64,
types_pb2.DT_INT32: int32,
types_pb2.DT_UINT8: uint8,
types_pb2.DT_UINT16: uint16,
types_pb2.DT_UINT32: uint32,
types_pb2.DT_UINT64: uint64,
types_pb2.DT_INT16: int16,
types_pb2.DT_INT8: int8,
types_pb2.DT_STRING: string,
types_pb2.DT_COMPLEX64: complex64,
types_pb2.DT_COMPLEX128: complex128,
types_pb2.DT_INT64: int64,
types_pb2.DT_BOOL: bool,
types_pb2.DT_QINT8: qint8,
types_pb2.DT_QUINT8: quint8,
types_pb2.DT_QINT16: qint16,
types_pb2.DT_QUINT16: quint16,
types_pb2.DT_QINT32: qint32,
types_pb2.DT_BFLOAT16: bfloat16,
types_pb2.DT_RESOURCE: resource,
types_pb2.DT_VARIANT: variant,
types_pb2.DT_HALF_REF: float16_ref,
types_pb2.DT_FLOAT_REF: float32_ref,
types_pb2.DT_DOUBLE_REF: float64_ref,
types_pb2.DT_INT32_REF: int32_ref,
types_pb2.DT_UINT32_REF: uint32_ref,
types_pb2.DT_UINT8_REF: uint8_ref,
types_pb2.DT_UINT16_REF: uint16_ref,
types_pb2.DT_INT16_REF: int16_ref,
types_pb2.DT_INT8_REF: int8_ref,
types_pb2.DT_STRING_REF: string_ref,
types_pb2.DT_COMPLEX64_REF: complex64_ref,
types_pb2.DT_COMPLEX128_REF: complex128_ref,
types_pb2.DT_INT64_REF: int64_ref,
types_pb2.DT_UINT64_REF: uint64_ref,
types_pb2.DT_BOOL_REF: bool_ref,
types_pb2.DT_QINT8_REF: qint8_ref,
types_pb2.DT_QUINT8_REF: quint8_ref,
types_pb2.DT_QINT16_REF: qint16_ref,
types_pb2.DT_QUINT16_REF: quint16_ref,
types_pb2.DT_QINT32_REF: qint32_ref,
types_pb2.DT_BFLOAT16_REF: bfloat16_ref,
types_pb2.DT_RESOURCE_REF: resource_ref,
types_pb2.DT_VARIANT_REF: variant_ref,
}
# Standard mappings between types_pb2.DataType values and string names.
_TYPE_TO_STRING = {
types_pb2.DT_HALF: "float16",
types_pb2.DT_FLOAT: "float32",
types_pb2.DT_DOUBLE: "float64",
types_pb2.DT_INT32: "int32",
types_pb2.DT_UINT8: "uint8",
types_pb2.DT_UINT16: "uint16",
types_pb2.DT_UINT32: "uint32",
types_pb2.DT_UINT64: "uint64",
types_pb2.DT_INT16: "int16",
types_pb2.DT_INT8: "int8",
types_pb2.DT_STRING: "string",
types_pb2.DT_COMPLEX64: "complex64",
types_pb2.DT_COMPLEX128: "complex128",
types_pb2.DT_INT64: "int64",
types_pb2.DT_BOOL: "bool",
types_pb2.DT_QINT8: "qint8",
types_pb2.DT_QUINT8: "quint8",
types_pb2.DT_QINT16: "qint16",
types_pb2.DT_QUINT16: "quint16",
types_pb2.DT_QINT32: "qint32",
types_pb2.DT_BFLOAT16: "bfloat16",
types_pb2.DT_RESOURCE: "resource",
types_pb2.DT_VARIANT: "variant",
types_pb2.DT_HALF_REF: "float16_ref",
types_pb2.DT_FLOAT_REF: "float32_ref",
types_pb2.DT_DOUBLE_REF: "float64_ref",
types_pb2.DT_INT32_REF: "int32_ref",
types_pb2.DT_UINT32_REF: "uint32_ref",
types_pb2.DT_UINT8_REF: "uint8_ref",
types_pb2.DT_UINT16_REF: "uint16_ref",
types_pb2.DT_INT16_REF: "int16_ref",
types_pb2.DT_INT8_REF: "int8_ref",
types_pb2.DT_STRING_REF: "string_ref",
types_pb2.DT_COMPLEX64_REF: "complex64_ref",
types_pb2.DT_COMPLEX128_REF: "complex128_ref",
types_pb2.DT_INT64_REF: "int64_ref",
types_pb2.DT_UINT64_REF: "uint64_ref",
types_pb2.DT_BOOL_REF: "bool_ref",
types_pb2.DT_QINT8_REF: "qint8_ref",
types_pb2.DT_QUINT8_REF: "quint8_ref",
types_pb2.DT_QINT16_REF: "qint16_ref",
types_pb2.DT_QUINT16_REF: "quint16_ref",
types_pb2.DT_QINT32_REF: "qint32_ref",
types_pb2.DT_BFLOAT16_REF: "bfloat16_ref",
types_pb2.DT_RESOURCE_REF: "resource_ref",
types_pb2.DT_VARIANT_REF: "variant_ref",
}
_STRING_TO_TF = {
value: _INTERN_TABLE[key] for key, value in _TYPE_TO_STRING.items()
}
# Add non-canonical aliases.
_STRING_TO_TF["half"] = float16
_STRING_TO_TF["half_ref"] = float16_ref
_STRING_TO_TF["float"] = float32
_STRING_TO_TF["float_ref"] = float32_ref
_STRING_TO_TF["double"] = float64
_STRING_TO_TF["double_ref"] = float64_ref
# Numpy representation for quantized dtypes.
#
# These are magic strings that are used in the swig wrapper to identify
# quantized types.
# TODO(mrry,keveman): Investigate Numpy type registration to replace this
# hard-coding of names.
_np_qint8 = np.dtype([("qint8", np.int8)])
_np_quint8 = np.dtype([("quint8", np.uint8)])
_np_qint16 = np.dtype([("qint16", np.int16)])
_np_quint16 = np.dtype([("quint16", np.uint16)])
_np_qint32 = np.dtype([("qint32", np.int32)])
# _np_bfloat16 is defined by a module import.
# Custom struct dtype for directly-fed ResourceHandles of supported type(s).
np_resource = np.dtype([("resource", np.ubyte)])
# Standard mappings between types_pb2.DataType values and numpy.dtypes.
_NP_TO_TF = frozenset(
[
(np.float16, float16),
(np.float32, float32),
(np.float64, float64),
(np.int32, int32),
(np.int64, int64),
(np.uint8, uint8),
(np.uint16, uint16),
(np.uint32, uint32),
(np.uint64, uint64),
(np.int16, int16),
(np.int8, int8),
(np.complex64, complex64),
(np.complex128, complex128),
(np.object_, string),
(np.bool_, bool),
(_np_qint8, qint8),
(_np_quint8, quint8),
(_np_qint16, qint16),
(_np_quint16, quint16),
(_np_qint32, qint32),
# TODO(#1677): _np_bfloat16 is defined as 0. This causes `as_dtype` to
# error. Add below back after we fix `TF_bfloat16_type`.
# (_np_bfloat16, bfloat16),
]
)
_TF_TO_NP = {
types_pb2.DT_HALF: np.float16,
types_pb2.DT_FLOAT: np.float32,
types_pb2.DT_DOUBLE: np.float64,
types_pb2.DT_INT32: np.int32,
types_pb2.DT_UINT8: np.uint8,
types_pb2.DT_UINT16: np.uint16,
types_pb2.DT_UINT32: np.uint32,
types_pb2.DT_UINT64: np.uint64,
types_pb2.DT_INT16: np.int16,
types_pb2.DT_INT8: np.int8,
# NOTE(touts): For strings we use np.object as it supports variable length
# strings.
types_pb2.DT_STRING: np.object_,
types_pb2.DT_COMPLEX64: np.complex64,
types_pb2.DT_COMPLEX128: np.complex128,
types_pb2.DT_INT64: np.int64,
types_pb2.DT_BOOL: np.bool_,
types_pb2.DT_QINT8: _np_qint8,
types_pb2.DT_QUINT8: _np_quint8,
types_pb2.DT_QINT16: _np_qint16,
types_pb2.DT_QUINT16: _np_quint16,
types_pb2.DT_QINT32: _np_qint32,
types_pb2.DT_BFLOAT16: _np_bfloat16,
# Ref types
types_pb2.DT_HALF_REF: np.float16,
types_pb2.DT_FLOAT_REF: np.float32,
types_pb2.DT_DOUBLE_REF: np.float64,
types_pb2.DT_INT32_REF: np.int32,
types_pb2.DT_UINT32_REF: np.uint32,
types_pb2.DT_UINT8_REF: np.uint8,
types_pb2.DT_UINT16_REF: np.uint16,
types_pb2.DT_INT16_REF: np.int16,
types_pb2.DT_INT8_REF: np.int8,
types_pb2.DT_STRING_REF: np.object_,
types_pb2.DT_COMPLEX64_REF: np.complex64,
types_pb2.DT_COMPLEX128_REF: np.complex128,
types_pb2.DT_INT64_REF: np.int64,
types_pb2.DT_UINT64_REF: np.uint64,
types_pb2.DT_BOOL_REF: np.bool_,
types_pb2.DT_QINT8_REF: _np_qint8,
types_pb2.DT_QUINT8_REF: _np_quint8,
types_pb2.DT_QINT16_REF: _np_qint16,
types_pb2.DT_QUINT16_REF: _np_quint16,
types_pb2.DT_QINT32_REF: _np_qint32,
types_pb2.DT_BFLOAT16_REF: _np_bfloat16,
}
_QUANTIZED_DTYPES_NO_REF = frozenset([qint8, quint8, qint16, quint16, qint32])
_QUANTIZED_DTYPES_REF = frozenset(
[qint8_ref, quint8_ref, qint16_ref, quint16_ref, qint32_ref]
)
QUANTIZED_DTYPES = _QUANTIZED_DTYPES_REF.union(_QUANTIZED_DTYPES_NO_REF)
# tf_export("QUANTIZED_DTYPES").export_constant(__name__, "QUANTIZED_DTYPES")
_PYTHON_TO_TF = {float: float32, bool: bool}
# @tf_export("as_dtype")
def as_dtype(type_value):
"""Converts the given `type_value` to a `DType`.
Args:
type_value: A value that can be converted to a `tf.DType` object. This may
currently be a `tf.DType` object, a [`DataType`
enum](https://www.tensorflow.org/code/tensorflow/core/framework/types.proto),
a string type name, or a `numpy.dtype`.
Returns:
A `DType` corresponding to `type_value`.
Raises:
TypeError: If `type_value` cannot be converted to a `DType`.
"""
if isinstance(type_value, DType):
return type_value
try:
return _INTERN_TABLE[type_value]
except KeyError:
pass
try:
return _STRING_TO_TF[type_value]
except KeyError:
pass
try:
return _PYTHON_TO_TF[type_value]
except KeyError:
pass
if isinstance(type_value, np.dtype):
# The numpy dtype for strings is variable length. We can not compare
# dtype with a single constant (np.string does not exist) to decide
# dtype is a "string" type. We need to compare the dtype.type to be
# sure it's a string type.
if type_value.type == np.bytes_ or type_value.type == np.str_:
return string
if isinstance(type_value, (type, np.dtype)):
for key, val in _NP_TO_TF:
try:
if key == type_value:
return val
except TypeError as e:
raise TypeError(
"Cannot convert {} to a dtype. {}".format(type_value, e)
)
raise TypeError(
"Cannot convert value %r to a TensorFlow DType." % type_value
)

View File

@ -0,0 +1,169 @@
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# Contains error codes defined in tensorflow/core/lib/core/error_codes.proto
# Not an error; returned on success
OK = 0
# The operation was cancelled (typically by the caller).
CANCELLED = 1
"""
Unknown error. An example of where this error may be returned is
if a Status value received from another address space belongs to
an error-space that is not known in this address space. Also
errors raised by APIs that do not return enough error information
may be converted to this error.
"""
UNKNOWN = 2
"""
Client specified an invalid argument. Note that this differs
from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
that are problematic regardless of the state of the system
(e.g., a malformed file name).
"""
INVALID_ARGUMENT = 3
"""
Deadline expired before operation could complete. For operations
that change the state of the system, this error may be returned
even if the operation has completed successfully. For example, a
successful response from a server could have been delayed long
enough for the deadline to expire.
"""
DEADLINE_EXCEEDED = 4
"""
Some requested entity (e.g., file or directory) was not found.
For privacy reasons, this code *may* be returned when the client
does not have the access right to the entity.
"""
NOT_FOUND = 5
"""
Some entity that we attempted to create (e.g., file or directory)
already exists.
"""
ALREADY_EXISTS = 6
"""
The caller does not have permission to execute the specified
operation. PERMISSION_DENIED must not be used for rejections
caused by exhausting some resource (use RESOURCE_EXHAUSTED
instead for those errors). PERMISSION_DENIED must not be
used if the caller can not be identified (use UNAUTHENTICATED
instead for those errors).
"""
PERMISSION_DENIED = 7
"""
Some resource has been exhausted, perhaps a per-user quota, or
perhaps the entire file system is out of space.
"""
RESOURCE_EXHAUSTED = 8
"""
Operation was rejected because the system is not in a state
required for the operation's execution. For example, directory
to be deleted may be non-empty, an rmdir operation is applied to
a non-directory, etc.
A litmus test that may help a service implementor in deciding
between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
(a) Use UNAVAILABLE if the client can retry just the failing call.
(b) Use ABORTED if the client should retry at a higher-level
(e.g., restarting a read-modify-write sequence).
(c) Use FAILED_PRECONDITION if the client should not retry until
the system state has been explicitly fixed. E.g., if an "rmdir"
fails because the directory is non-empty, FAILED_PRECONDITION
should be returned since the client should not retry unless
they have first fixed up the directory by deleting files from it.
(d) Use FAILED_PRECONDITION if the client performs conditional
REST Get/Update/Delete on a resource and the resource on the
server does not match the condition. E.g., conflicting
read-modify-write on the same resource.
"""
FAILED_PRECONDITION = 9
"""
The operation was aborted, typically due to a concurrency issue
like sequencer check failures, transaction aborts, etc.
See litmus test above for deciding between FAILED_PRECONDITION,
ABORTED, and UNAVAILABLE.
"""
ABORTED = 10
"""
Operation tried to iterate past the valid input range. E.g., seeking or
reading past end of file.
Unlike INVALID_ARGUMENT, this error indicates a problem that may
be fixed if the system state changes. For example, a 32-bit file
system will generate INVALID_ARGUMENT if asked to read at an
offset that is not in the range [0,2^32-1], but it will generate
OUT_OF_RANGE if asked to read from an offset past the current
file size.
There is a fair bit of overlap between FAILED_PRECONDITION and
OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific
error) when it applies so that callers who are iterating through
a space can easily look for an OUT_OF_RANGE error to detect when
they are done.
"""
OUT_OF_RANGE = 11
# Operation is not implemented or not supported/enabled in this service.
UNIMPLEMENTED = 12
"""
Internal errors. Means some invariant expected by the underlying
system has been broken. If you see one of these errors,
something is very broken.
"""
INTERNAL = 13
"""
The service is currently unavailable. This is a most likely a
transient condition and may be corrected by retrying with
a backoff.
See litmus test above for deciding between FAILED_PRECONDITION,
ABORTED, and UNAVAILABLE.
"""
UNAVAILABLE = 14
# Unrecoverable data loss or corruption.
DATA_LOSS = 15
"""
The request does not have valid authentication credentials for the
operation.
"""
UNAUTHENTICATED = 16
"""
An extra enum entry to prevent people from writing code that
fails to compile when a new code is added.
Nobody should ever reference this enumeration entry. In particular,
if you write C++ code that switches on this enumeration, add a default:
case instead of a case that mentions this enumeration entry.
Nobody should rely on the value (currently 20) listed here. It
may change in the future.
"""
DO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_ = 20

View File

@ -0,0 +1,508 @@
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Exception types for TensorFlow errors."""
import traceback
import warnings
from . import error_codes
# @tf_export("OpError", "errors.OpError")
class OpError(Exception):
"""A generic error that is raised when TensorFlow execution fails.
Whenever possible, the session will raise a more specific subclass
of `OpError` from the `tf.errors` module.
"""
def __init__(self, node_def, op, message, error_code):
"""Creates a new `OpError` indicating that a particular op failed.
Args:
node_def: The `node_def_pb2.NodeDef` proto representing the op that
failed, if known; otherwise None.
op: The `ops.Operation` that failed, if known; otherwise None.
message: The message string describing the failure.
error_code: The `error_codes.Code` describing the error.
"""
super().__init__()
self._message = message
self._node_def = node_def
self._op = op
self._error_code = error_code
@property
def message(self):
"""The error message that describes the error."""
return self._message
@property
def op(self):
"""The operation that failed, if known.
*N.B.* If the failed op was synthesized at runtime, e.g. a `Send`
or `Recv` op, there will be no corresponding
@{tf.Operation}
object. In that case, this will return `None`, and you should
instead use the @{tf.OpError.node_def} to
discover information about the op.
Returns:
The `Operation` that failed, or None.
"""
return self._op
@property
def error_code(self):
"""The integer error code that describes the error."""
return self._error_code
@property
def node_def(self):
"""The `NodeDef` proto representing the op that failed."""
return self._node_def
def __str__(self):
if self._op is not None:
output = [
"%s\n\nCaused by op %r, defined at:\n"
% (self.message, self._op.name)
]
curr_traceback_list = traceback.format_list(self._op.traceback)
output.extend(curr_traceback_list)
# pylint: disable=protected-access
original_op = self._op._original_op
# pylint: enable=protected-access
while original_op is not None:
output.append(
"\n...which was originally created as op %r, defined at:\n"
% (original_op.name,)
)
prev_traceback_list = curr_traceback_list
curr_traceback_list = traceback.format_list(
original_op.traceback
)
# Attempt to elide large common subsequences of the subsequent
# stack traces.
#
# TODO(mrry): Consider computing the actual longest common subsequence.
is_eliding = False
elide_count = 0
last_elided_line = None
for line, line_in_prev in zip(
curr_traceback_list, prev_traceback_list
):
if line == line_in_prev:
if is_eliding:
elide_count += 1
last_elided_line = line
else:
output.append(line)
is_eliding = True
elide_count = 0
else:
if is_eliding:
if elide_count > 0:
output.extend(
[
"[elided %d identical lines from previous traceback]\n"
% (elide_count - 1,),
last_elided_line,
]
)
is_eliding = False
output.extend(line)
# pylint: disable=protected-access
original_op = original_op._original_op
# pylint: enable=protected-access
output.append(
"\n%s (see above for traceback): %s\n"
% (type(self).__name__, self.message)
)
return "".join(output)
else:
return self.message
OK = error_codes.OK
# tf_export("errors.OK").export_constant(__name__, "OK")
CANCELLED = error_codes.CANCELLED
# tf_export("errors.CANCELLED").export_constant(__name__, "CANCELLED")
UNKNOWN = error_codes.UNKNOWN
# tf_export("errors.UNKNOWN").export_constant(__name__, "UNKNOWN")
INVALID_ARGUMENT = error_codes.INVALID_ARGUMENT
# tf_export("errors.INVALID_ARGUMENT").export_constant(__name__,
# "INVALID_ARGUMENT")
DEADLINE_EXCEEDED = error_codes.DEADLINE_EXCEEDED
# tf_export("errors.DEADLINE_EXCEEDED").export_constant(__name__,
# "DEADLINE_EXCEEDED")
NOT_FOUND = error_codes.NOT_FOUND
# tf_export("errors.NOT_FOUND").export_constant(__name__, "NOT_FOUND")
ALREADY_EXISTS = error_codes.ALREADY_EXISTS
# tf_export("errors.ALREADY_EXISTS").export_constant(__name__, "ALREADY_EXISTS")
PERMISSION_DENIED = error_codes.PERMISSION_DENIED
# tf_export("errors.PERMISSION_DENIED").export_constant(__name__,
# "PERMISSION_DENIED")
UNAUTHENTICATED = error_codes.UNAUTHENTICATED
# tf_export("errors.UNAUTHENTICATED").export_constant(__name__, "UNAUTHENTICATED")
RESOURCE_EXHAUSTED = error_codes.RESOURCE_EXHAUSTED
# tf_export("errors.RESOURCE_EXHAUSTED").export_constant(__name__,
# "RESOURCE_EXHAUSTED")
FAILED_PRECONDITION = error_codes.FAILED_PRECONDITION
# tf_export("errors.FAILED_PRECONDITION").export_constant(__name__,
# "FAILED_PRECONDITION")
ABORTED = error_codes.ABORTED
# tf_export("errors.ABORTED").export_constant(__name__, "ABORTED")
OUT_OF_RANGE = error_codes.OUT_OF_RANGE
# tf_export("errors.OUT_OF_RANGE").export_constant(__name__, "OUT_OF_RANGE")
UNIMPLEMENTED = error_codes.UNIMPLEMENTED
# tf_export("errors.UNIMPLEMENTED").export_constant(__name__, "UNIMPLEMENTED")
INTERNAL = error_codes.INTERNAL
# tf_export("errors.INTERNAL").export_constant(__name__, "INTERNAL")
UNAVAILABLE = error_codes.UNAVAILABLE
# tf_export("errors.UNAVAILABLE").export_constant(__name__, "UNAVAILABLE")
DATA_LOSS = error_codes.DATA_LOSS
# tf_export("errors.DATA_LOSS").export_constant(__name__, "DATA_LOSS")
# @tf_export("errors.CancelledError")
class CancelledError(OpError):
"""Raised when an operation or step is cancelled.
For example, a long-running operation (e.g.
@{tf.QueueBase.enqueue} may be
cancelled by running another operation (e.g.
@{tf.QueueBase.close},
or by @{tf.Session.close}.
A step that is running such a long-running operation will fail by raising
`CancelledError`.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates a `CancelledError`."""
super().__init__(node_def, op, message, CANCELLED)
# @tf_export("errors.UnknownError")
class UnknownError(OpError):
"""Unknown error.
An example of where this error may be returned is if a Status value
received from another address space belongs to an error-space that
is not known to this address space. Also errors raised by APIs that
do not return enough error information may be converted to this
error.
@@__init__
"""
def __init__(self, node_def, op, message, error_code=UNKNOWN):
"""Creates an `UnknownError`."""
super().__init__(node_def, op, message, error_code)
# @tf_export("errors.InvalidArgumentError")
class InvalidArgumentError(OpError):
"""Raised when an operation receives an invalid argument.
This may occur, for example, if an operation is receives an input
tensor that has an invalid value or shape. For example, the
@{tf.matmul} op will raise this
error if it receives an input that is not a matrix, and the
@{tf.reshape} op will raise
this error if the new shape does not match the number of elements in the input
tensor.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates an `InvalidArgumentError`."""
super().__init__(node_def, op, message, INVALID_ARGUMENT)
# @tf_export("errors.DeadlineExceededError")
class DeadlineExceededError(OpError):
"""Raised when a deadline expires before an operation could complete.
This exception is not currently used.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates a `DeadlineExceededError`."""
super().__init__(node_def, op, message, DEADLINE_EXCEEDED)
# @tf_export("errors.NotFoundError")
class NotFoundError(OpError):
"""Raised when a requested entity (e.g., a file or directory) was not
found.
For example, running the
@{tf.WholeFileReader.read}
operation could raise `NotFoundError` if it receives the name of a file that
does not exist.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates a `NotFoundError`."""
super().__init__(node_def, op, message, NOT_FOUND)
# @tf_export("errors.AlreadyExistsError")
class AlreadyExistsError(OpError):
"""Raised when an entity that we attempted to create already exists.
For example, running an operation that saves a file
(e.g. @{tf.train.Saver.save})
could potentially raise this exception if an explicit filename for an
existing file was passed.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates an `AlreadyExistsError`."""
super().__init__(node_def, op, message, ALREADY_EXISTS)
# @tf_export("errors.PermissionDeniedError")
class PermissionDeniedError(OpError):
"""Raised when the caller does not have permission to run an operation.
For example, running the
@{tf.WholeFileReader.read}
operation could raise `PermissionDeniedError` if it receives the name of a
file for which the user does not have the read file permission.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates a `PermissionDeniedError`."""
super().__init__(node_def, op, message, PERMISSION_DENIED)
# @tf_export("errors.UnauthenticatedError")
class UnauthenticatedError(OpError):
"""The request does not have valid authentication credentials.
This exception is not currently used.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates an `UnauthenticatedError`."""
super().__init__(node_def, op, message, UNAUTHENTICATED)
# @tf_export("errors.ResourceExhaustedError")
class ResourceExhaustedError(OpError):
"""Some resource has been exhausted.
For example, this error might be raised if a per-user quota is
exhausted, or perhaps the entire file system is out of space.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates a `ResourceExhaustedError`."""
super().__init__(node_def, op, message, RESOURCE_EXHAUSTED)
# @tf_export("errors.FailedPreconditionError")
class FailedPreconditionError(OpError):
"""Operation was rejected because the system is not in a state to execute
it.
This exception is most commonly raised when running an operation
that reads a @{tf.Variable}
before it has been initialized.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates a `FailedPreconditionError`."""
super().__init__(node_def, op, message, FAILED_PRECONDITION)
# @tf_export("errors.AbortedError")
class AbortedError(OpError):
"""The operation was aborted, typically due to a concurrent action.
For example, running a
@{tf.QueueBase.enqueue}
operation may raise `AbortedError` if a
@{tf.QueueBase.close} operation
previously ran.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates an `AbortedError`."""
super().__init__(node_def, op, message, ABORTED)
# @tf_export("errors.OutOfRangeError")
class OutOfRangeError(OpError):
"""Raised when an operation iterates past the valid input range.
This exception is raised in "end-of-file" conditions, such as when a
@{tf.QueueBase.dequeue}
operation is blocked on an empty queue, and a
@{tf.QueueBase.close}
operation executes.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates an `OutOfRangeError`."""
super().__init__(node_def, op, message, OUT_OF_RANGE)
# @tf_export("errors.UnimplementedError")
class UnimplementedError(OpError):
"""Raised when an operation has not been implemented.
Some operations may raise this error when passed otherwise-valid
arguments that it does not currently support. For example, running
the @{tf.nn.max_pool} operation
would raise this error if pooling was requested on the batch dimension,
because this is not yet supported.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates an `UnimplementedError`."""
super().__init__(node_def, op, message, UNIMPLEMENTED)
# @tf_export("errors.InternalError")
class InternalError(OpError):
"""Raised when the system experiences an internal error.
This exception is raised when some invariant expected by the runtime
has been broken. Catching this exception is not recommended.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates an `InternalError`."""
super().__init__(node_def, op, message, INTERNAL)
# @tf_export("errors.UnavailableError")
class UnavailableError(OpError):
"""Raised when the runtime is currently unavailable.
This exception is not currently used.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates an `UnavailableError`."""
super().__init__(node_def, op, message, UNAVAILABLE)
# @tf_export("errors.DataLossError")
class DataLossError(OpError):
"""Raised when unrecoverable data loss or corruption is encountered.
For example, this may be raised by running a
@{tf.WholeFileReader.read}
operation, if the file is truncated while it is being read.
@@__init__
"""
def __init__(self, node_def, op, message):
"""Creates a `DataLossError`."""
super().__init__(node_def, op, message, DATA_LOSS)
_CODE_TO_EXCEPTION_CLASS = {
CANCELLED: CancelledError,
UNKNOWN: UnknownError,
INVALID_ARGUMENT: InvalidArgumentError,
DEADLINE_EXCEEDED: DeadlineExceededError,
NOT_FOUND: NotFoundError,
ALREADY_EXISTS: AlreadyExistsError,
PERMISSION_DENIED: PermissionDeniedError,
UNAUTHENTICATED: UnauthenticatedError,
RESOURCE_EXHAUSTED: ResourceExhaustedError,
FAILED_PRECONDITION: FailedPreconditionError,
ABORTED: AbortedError,
OUT_OF_RANGE: OutOfRangeError,
UNIMPLEMENTED: UnimplementedError,
INTERNAL: InternalError,
UNAVAILABLE: UnavailableError,
DATA_LOSS: DataLossError,
}
_EXCEPTION_CLASS_TO_CODE = dict(
((class_, code) for (code, class_) in _CODE_TO_EXCEPTION_CLASS.items())
)
# @tf_export("errors.exception_type_from_error_code")
def exception_type_from_error_code(error_code):
return _CODE_TO_EXCEPTION_CLASS[error_code]
# @tf_export("errors.error_code_from_exception_type")
def error_code_from_exception_type(cls):
return _EXCEPTION_CLASS_TO_CODE[cls]
def _make_specific_exception(node_def, op, message, error_code):
try:
exc_type = exception_type_from_error_code(error_code)
return exc_type(node_def, op, message)
except KeyError:
warnings.warn("Unknown error code: %d" % error_code)
return UnknownError(node_def, op, message, error_code)
# Named like a function for backwards compatibility with the
# @tf_contextlib.contextmanager version, which was switched to a class to avoid
# some object creation overhead.
# TODO(b/77295559): expand use of TF_Status* SWIG typemap and deprecate this.
# @tf_export("errors.raise_exception_on_not_ok_status") # pylint: disable=invalid-name
class raise_exception_on_not_ok_status:
"""Context manager to check for C API status."""
def __enter__(self):
return "Status not OK"
def __exit__(self, type_arg, value_arg, traceback_arg):
return False # False values do not suppress exceptions

View File

@ -0,0 +1,125 @@
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Import router for absl.flags.
See https://github.com/abseil/abseil-py.
"""
import logging as _logging
import sys as _sys
# go/tf-wildcard-import
from absl.flags import * # pylint: disable=wildcard-import
# Since we wrap absl.flags DEFINE functions, we need to declare this module
# does not affect key flags.
disclaim_key_flags() # pylint: disable=undefined-variable
_RENAMED_ARGUMENTS = {
"flag_name": "name",
"default_value": "default",
"docstring": "help",
}
def _wrap_define_function(original_function):
"""Wraps absl.flags's define functions so tf.flags accepts old names."""
def wrapper(*args, **kwargs):
"""Wrapper function that turns old keyword names to new ones."""
has_old_names = False
for old_name, new_name in __RENAMED_ARGUMENTS.items():
if old_name in kwargs:
has_old_names = True
value = kwargs.pop(old_name)
kwargs[new_name] = value
if has_old_names:
_logging.warning(
"Use of the keyword argument names (flag_name, default_value, "
"docstring) is deprecated, please use (name, default, help) instead."
)
return original_function(*args, **kwargs)
return wrapper
class _FlagValuesWrapper:
"""Wrapper class for absl.flags.FLAGS.
The difference is that tf.compat.v1.flags.FLAGS implicitly parses
flags with sys.argv when accessing the FLAGS values before it's
explicitly parsed, while absl.flags.FLAGS raises an exception.
"""
def __init__(self, flags_object):
self.__dict__["__wrapped"] = flags_object
def __getattribute__(self, name):
if name == "__dict__":
return super().__getattribute__(name)
elif name == "__class__":
return super.__class__
return self.__dict__["__wrapped"].__getattribute__(name)
def __getattr__(self, name):
wrapped = self.__dict__["__wrapped"]
# To maintain backwards compatibility, implicitly parse flags when reading
# a flag.
if not wrapped.is_parsed():
wrapped(_sys.argv)
return wrapped.__getattr__(name)
def __setattr__(self, name, value):
return self.__dict__["__wrapped"].__setattr__(name, value)
def __delattr__(self, name):
return self.__dict__["__wrapped"].__delattr__(name)
def __dir__(self):
return self.__dict__["__wrapped"].__dir__()
def __getitem__(self, name):
return self.__dict__["__wrapped"].__getitem__(name)
def __setitem__(self, name, flag):
return self.__dict__["__wrapped"].__setitem__(name, flag)
def __len__(self):
return self.__dict__["__wrapped"].__len__()
def __iter__(self):
return self.__dict__["__wrapped"].__iter__()
def __str__(self):
return self.__dict__["__wrapped"].__str__()
def __call__(self, *args, **kwargs):
return self.__dict__["__wrapped"].__call__(*args, **kwargs)
# pylint: disable=invalid-name,used-before-assignment
# absl.flags APIs use `default` as the name of the default value argument.
# Allow the following functions continue to accept `default_value`.
DEFINE_string = _wrap_define_function(DEFINE_string)
DEFINE_boolean = _wrap_define_function(DEFINE_boolean)
DEFINE_bool = DEFINE_boolean
DEFINE_float = _wrap_define_function(DEFINE_float)
DEFINE_integer = _wrap_define_function(DEFINE_integer)
# pylint: enable=invalid-name,used-before-assignment
FLAGS = _FlagValuesWrapper(FLAGS) # pylint: disable=used-before-assignment

View File

@ -0,0 +1,17 @@
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from . import gfile # noqa

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,287 @@
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""A wrapper for TensorFlow SWIG-generated bindings."""
import array
import struct
from . import errors
from .io import gfile
TFE_DEVICE_PLACEMENT_WARN = 0
TFE_DEVICE_PLACEMENT_SILENT_FOR_INT32 = 0
TFE_DEVICE_PLACEMENT_SILENT = 0
TFE_DEVICE_PLACEMENT_EXPLICIT = 0
def __getattr__(attr):
return 0
def TF_bfloat16_type():
return 0
def masked_crc32c(data):
x = u32(crc32c(data))
return u32(((x >> 15) | u32(x << 17)) + 0xA282EAD8)
def u32(x):
return x & 0xFFFFFFFF
# fmt: off
CRC_TABLE = (
0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4,
0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB,
0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B,
0x4D43CFD0, 0xBF284CD3, 0xAC78BF27, 0x5E133C24,
0x105EC76F, 0xE235446C, 0xF165B798, 0x030E349B,
0xD7C45070, 0x25AFD373, 0x36FF2087, 0xC494A384,
0x9A879FA0, 0x68EC1CA3, 0x7BBCEF57, 0x89D76C54,
0x5D1D08BF, 0xAF768BBC, 0xBC267848, 0x4E4DFB4B,
0x20BD8EDE, 0xD2D60DDD, 0xC186FE29, 0x33ED7D2A,
0xE72719C1, 0x154C9AC2, 0x061C6936, 0xF477EA35,
0xAA64D611, 0x580F5512, 0x4B5FA6E6, 0xB93425E5,
0x6DFE410E, 0x9F95C20D, 0x8CC531F9, 0x7EAEB2FA,
0x30E349B1, 0xC288CAB2, 0xD1D83946, 0x23B3BA45,
0xF779DEAE, 0x05125DAD, 0x1642AE59, 0xE4292D5A,
0xBA3A117E, 0x4851927D, 0x5B016189, 0xA96AE28A,
0x7DA08661, 0x8FCB0562, 0x9C9BF696, 0x6EF07595,
0x417B1DBC, 0xB3109EBF, 0xA0406D4B, 0x522BEE48,
0x86E18AA3, 0x748A09A0, 0x67DAFA54, 0x95B17957,
0xCBA24573, 0x39C9C670, 0x2A993584, 0xD8F2B687,
0x0C38D26C, 0xFE53516F, 0xED03A29B, 0x1F682198,
0x5125DAD3, 0xA34E59D0, 0xB01EAA24, 0x42752927,
0x96BF4DCC, 0x64D4CECF, 0x77843D3B, 0x85EFBE38,
0xDBFC821C, 0x2997011F, 0x3AC7F2EB, 0xC8AC71E8,
0x1C661503, 0xEE0D9600, 0xFD5D65F4, 0x0F36E6F7,
0x61C69362, 0x93AD1061, 0x80FDE395, 0x72966096,
0xA65C047D, 0x5437877E, 0x4767748A, 0xB50CF789,
0xEB1FCBAD, 0x197448AE, 0x0A24BB5A, 0xF84F3859,
0x2C855CB2, 0xDEEEDFB1, 0xCDBE2C45, 0x3FD5AF46,
0x7198540D, 0x83F3D70E, 0x90A324FA, 0x62C8A7F9,
0xB602C312, 0x44694011, 0x5739B3E5, 0xA55230E6,
0xFB410CC2, 0x092A8FC1, 0x1A7A7C35, 0xE811FF36,
0x3CDB9BDD, 0xCEB018DE, 0xDDE0EB2A, 0x2F8B6829,
0x82F63B78, 0x709DB87B, 0x63CD4B8F, 0x91A6C88C,
0x456CAC67, 0xB7072F64, 0xA457DC90, 0x563C5F93,
0x082F63B7, 0xFA44E0B4, 0xE9141340, 0x1B7F9043,
0xCFB5F4A8, 0x3DDE77AB, 0x2E8E845F, 0xDCE5075C,
0x92A8FC17, 0x60C37F14, 0x73938CE0, 0x81F80FE3,
0x55326B08, 0xA759E80B, 0xB4091BFF, 0x466298FC,
0x1871A4D8, 0xEA1A27DB, 0xF94AD42F, 0x0B21572C,
0xDFEB33C7, 0x2D80B0C4, 0x3ED04330, 0xCCBBC033,
0xA24BB5A6, 0x502036A5, 0x4370C551, 0xB11B4652,
0x65D122B9, 0x97BAA1BA, 0x84EA524E, 0x7681D14D,
0x2892ED69, 0xDAF96E6A, 0xC9A99D9E, 0x3BC21E9D,
0xEF087A76, 0x1D63F975, 0x0E330A81, 0xFC588982,
0xB21572C9, 0x407EF1CA, 0x532E023E, 0xA145813D,
0x758FE5D6, 0x87E466D5, 0x94B49521, 0x66DF1622,
0x38CC2A06, 0xCAA7A905, 0xD9F75AF1, 0x2B9CD9F2,
0xFF56BD19, 0x0D3D3E1A, 0x1E6DCDEE, 0xEC064EED,
0xC38D26C4, 0x31E6A5C7, 0x22B65633, 0xD0DDD530,
0x0417B1DB, 0xF67C32D8, 0xE52CC12C, 0x1747422F,
0x49547E0B, 0xBB3FFD08, 0xA86F0EFC, 0x5A048DFF,
0x8ECEE914, 0x7CA56A17, 0x6FF599E3, 0x9D9E1AE0,
0xD3D3E1AB, 0x21B862A8, 0x32E8915C, 0xC083125F,
0x144976B4, 0xE622F5B7, 0xF5720643, 0x07198540,
0x590AB964, 0xAB613A67, 0xB831C993, 0x4A5A4A90,
0x9E902E7B, 0x6CFBAD78, 0x7FAB5E8C, 0x8DC0DD8F,
0xE330A81A, 0x115B2B19, 0x020BD8ED, 0xF0605BEE,
0x24AA3F05, 0xD6C1BC06, 0xC5914FF2, 0x37FACCF1,
0x69E9F0D5, 0x9B8273D6, 0x88D28022, 0x7AB90321,
0xAE7367CA, 0x5C18E4C9, 0x4F48173D, 0xBD23943E,
0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81,
0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E,
0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E,
0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351,
)
# fmt: on
CRC_INIT = 0
_MASK = 0xFFFFFFFF
def crc_update(crc, data):
"""Update CRC-32C checksum with data.
Args:
crc: 32-bit checksum to update as long.
data: byte array, string or iterable over bytes.
Returns:
32-bit updated CRC-32C as long.
"""
if type(data) != array.array or data.itemsize != 1:
buf = array.array("B", data)
else:
buf = data
crc ^= _MASK
for b in buf:
table_index = (crc ^ b) & 0xFF
crc = (CRC_TABLE[table_index] ^ (crc >> 8)) & _MASK
return crc ^ _MASK
def crc_finalize(crc):
"""Finalize CRC-32C checksum.
This function should be called as last step of crc calculation.
Args:
crc: 32-bit checksum as long.
Returns:
finalized 32-bit checksum as long
"""
return crc & _MASK
def crc32c(data):
"""Compute CRC-32C checksum of the data.
Args:
data: byte array, string or iterable over bytes.
Returns:
32-bit CRC-32C checksum of data as long.
"""
return crc_finalize(crc_update(CRC_INIT, data))
class PyRecordReader_New:
def __init__(
self, filename=None, start_offset=0, compression_type=None, status=None
):
if filename is None:
raise errors.NotFoundError(
None, None, "No filename provided, cannot read Events"
)
if not gfile.exists(filename):
raise errors.NotFoundError(
None,
None,
"{} does not point to valid Events file".format(filename),
)
if start_offset:
raise errors.UnimplementedError(
None, None, "start offset not supported by compat reader"
)
if compression_type:
# TODO: Handle gzip and zlib compressed files
raise errors.UnimplementedError(
None, None, "compression not supported by compat reader"
)
self.filename = filename
self.start_offset = start_offset
self.compression_type = compression_type
self.status = status
self.curr_event = None
self.file_handle = gfile.GFile(self.filename, "rb")
# Maintain a buffer of partially read records, so we can recover from
# truncated records upon a retry.
self._buffer = b""
self._buffer_pos = 0
def GetNext(self):
# Each new read should start at the beginning of any partial record.
self._buffer_pos = 0
# Read the header
self.curr_event = None
header_str = self._read(8)
if not header_str:
# Hit EOF so raise and exit
raise errors.OutOfRangeError(None, None, "No more events to read")
if len(header_str) < 8:
raise self._truncation_error("header")
header = struct.unpack("<Q", header_str)
# Read the crc32, which is 4 bytes, and check it against
# the crc32 of the header
crc_header_str = self._read(4)
if len(crc_header_str) < 4:
raise self._truncation_error("header crc")
crc_header = struct.unpack("<I", crc_header_str)
header_crc_calc = masked_crc32c(header_str)
if header_crc_calc != crc_header[0]:
raise errors.DataLossError(
None, None, "{} failed header crc32 check".format(self.filename)
)
# The length of the header tells us how many bytes the Event
# string takes
header_len = int(header[0])
event_str = self._read(header_len)
if len(event_str) < header_len:
raise self._truncation_error("data")
event_crc_calc = masked_crc32c(event_str)
# The next 4 bytes contain the crc32 of the Event string,
# which we check for integrity.
crc_event_str = self._read(4)
if len(crc_event_str) < 4:
raise self._truncation_error("data crc")
crc_event = struct.unpack("<I", crc_event_str)
if event_crc_calc != crc_event[0]:
raise errors.DataLossError(
None,
None,
"{} failed event crc32 check".format(self.filename),
)
# Set the current event to be read later by record() call
self.curr_event = event_str
# Clear the buffered partial record since we're done reading it.
self._buffer = b""
def _read(self, n):
"""Read up to n bytes from the underlying file, with buffering.
Reads are satisfied from a buffer of previous data read starting at
`self._buffer_pos` until the buffer is exhausted, and then from the
actual underlying file. Any new data is added to the buffer, and
`self._buffer_pos` is advanced to the point in the buffer past all
data returned as part of this read.
Args:
n: non-negative number of bytes to read
Returns:
bytestring of data read, up to n bytes
"""
result = self._buffer[self._buffer_pos : self._buffer_pos + n]
self._buffer_pos += len(result)
n -= len(result)
if n > 0:
new_data = self.file_handle.read(n)
result += new_data
self._buffer += new_data
self._buffer_pos += len(new_data)
return result
def _truncation_error(self, section):
return errors.DataLossError(
None,
None,
"{} has truncated record in {}".format(self.filename, section),
)
def record(self):
return self.curr_event

File diff suppressed because it is too large Load Diff