Tensorflow protobuf stubs (#9873)

Co-authored-by: Mehdi Drissi <mdrissi@snapchat.com>
Co-authored-by: Alex Waygood <Alex.Waygood@Gmail.com>
Co-authored-by: Jelle Zijlstra <jelle.zijlstra@gmail.com>
This commit is contained in:
Mehdi Drissi
2023-03-15 00:51:16 -07:00
committed by GitHub
parent 8877c12cdf
commit 1d15121f1d
84 changed files with 19320 additions and 1 deletions

View File

@@ -64,3 +64,7 @@ tensorflow.keras.layers.Layer.__new__
# disagreements.
tensorflow.keras.layers.*.build
tensorflow.keras.layers.*.compute_output_shape
# pb2.pyi generated by mypy-protobuf diverge with runtime in many ways. These stubs
# are mainly tested in mypy-protobuf.
.*_pb2.*

View File

@@ -1,6 +1,6 @@
version = "2.11.*"
# requires a version of numpy with a `py.typed` file
requires = ["numpy>=1.20"]
requires = ["numpy>=1.20", "types-protobuf"]
[tool.stubtest]
ignore_missing_stub = true

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,127 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Protocol messages for describing the configuration of the ExampleParserOp."""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_pb2
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class VarLenFeatureProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DTYPE_FIELD_NUMBER: builtins.int
VALUES_OUTPUT_TENSOR_NAME_FIELD_NUMBER: builtins.int
INDICES_OUTPUT_TENSOR_NAME_FIELD_NUMBER: builtins.int
SHAPES_OUTPUT_TENSOR_NAME_FIELD_NUMBER: builtins.int
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
values_output_tensor_name: builtins.str
indices_output_tensor_name: builtins.str
shapes_output_tensor_name: builtins.str
def __init__(
self,
*,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
values_output_tensor_name: builtins.str | None = ...,
indices_output_tensor_name: builtins.str | None = ...,
shapes_output_tensor_name: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["dtype", b"dtype", "indices_output_tensor_name", b"indices_output_tensor_name", "shapes_output_tensor_name", b"shapes_output_tensor_name", "values_output_tensor_name", b"values_output_tensor_name"]) -> None: ...
global___VarLenFeatureProto = VarLenFeatureProto
@typing_extensions.final
class FixedLenFeatureProto(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DTYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
DEFAULT_VALUE_FIELD_NUMBER: builtins.int
VALUES_OUTPUT_TENSOR_NAME_FIELD_NUMBER: builtins.int
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
@property
def default_value(self) -> tensorflow.core.framework.tensor_pb2.TensorProto: ...
values_output_tensor_name: builtins.str
def __init__(
self,
*,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
default_value: tensorflow.core.framework.tensor_pb2.TensorProto | None = ...,
values_output_tensor_name: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["default_value", b"default_value", "shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["default_value", b"default_value", "dtype", b"dtype", "shape", b"shape", "values_output_tensor_name", b"values_output_tensor_name"]) -> None: ...
global___FixedLenFeatureProto = FixedLenFeatureProto
@typing_extensions.final
class FeatureConfiguration(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FIXED_LEN_FEATURE_FIELD_NUMBER: builtins.int
VAR_LEN_FEATURE_FIELD_NUMBER: builtins.int
@property
def fixed_len_feature(self) -> global___FixedLenFeatureProto: ...
@property
def var_len_feature(self) -> global___VarLenFeatureProto: ...
def __init__(
self,
*,
fixed_len_feature: global___FixedLenFeatureProto | None = ...,
var_len_feature: global___VarLenFeatureProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["config", b"config", "fixed_len_feature", b"fixed_len_feature", "var_len_feature", b"var_len_feature"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["config", b"config", "fixed_len_feature", b"fixed_len_feature", "var_len_feature", b"var_len_feature"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["config", b"config"]) -> typing_extensions.Literal["fixed_len_feature", "var_len_feature"] | None: ...
global___FeatureConfiguration = FeatureConfiguration
@typing_extensions.final
class ExampleParserConfiguration(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class FeatureMapEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___FeatureConfiguration: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___FeatureConfiguration | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
FEATURE_MAP_FIELD_NUMBER: builtins.int
@property
def feature_map(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FeatureConfiguration]: ...
def __init__(
self,
*,
feature_map: collections.abc.Mapping[builtins.str, global___FeatureConfiguration] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["feature_map", b"feature_map"]) -> None: ...
global___ExampleParserConfiguration = ExampleParserConfiguration

View File

@@ -0,0 +1,337 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Protocol messages for describing input data Examples for machine learning
model training or inference.
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
import tensorflow.core.example.feature_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class Example(google.protobuf.message.Message):
"""An Example is a mostly-normalized data format for storing data for
training and inference. It contains a key-value store (features); where
each key (string) maps to a Feature message (which is oneof packed BytesList,
FloatList, or Int64List). This flexible and compact format allows the
storage of large amounts of typed data, but requires that the data shape
and use be determined by the configuration files and parsers that are used to
read and write this format. That is, the Example is mostly *not* a
self-describing format. In TensorFlow, Examples are read in row-major
format, so any configuration that describes data with rank-2 or above
should keep this in mind. If you flatten a matrix into a FloatList it should
be stored as [ row 0 ... row 1 ... row M-1 ]
An Example for a movie recommendation application:
features {
feature {
key: "age"
value { float_list {
value: 29.0
}}
}
feature {
key: "movie"
value { bytes_list {
value: "The Shawshank Redemption"
value: "Fight Club"
}}
}
feature {
key: "movie_ratings"
value { float_list {
value: 9.0
value: 9.7
}}
}
feature {
key: "suggestion"
value { bytes_list {
value: "Inception"
}}
}
# Note that this feature exists to be used as a label in training.
# E.g., if training a logistic regression model to predict purchase
# probability in our learning tool we would set the label feature to
# "suggestion_purchased".
feature {
key: "suggestion_purchased"
value { float_list {
value: 1.0
}}
}
# Similar to "suggestion_purchased" above this feature exists to be used
# as a label in training.
# E.g., if training a linear regression model to predict purchase
# price in our learning tool we would set the label feature to
# "purchase_price".
feature {
key: "purchase_price"
value { float_list {
value: 9.99
}}
}
}
A conformant Example data set obeys the following conventions:
- If a Feature K exists in one example with data type T, it must be of
type T in all other examples when present. It may be omitted.
- The number of instances of Feature K list data may vary across examples,
depending on the requirements of the model.
- If a Feature K doesn't exist in an example, a K-specific default will be
used, if configured.
- If a Feature K exists in an example but contains no items, the intent
is considered to be an empty tensor and no default will be used.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FEATURES_FIELD_NUMBER: builtins.int
@property
def features(self) -> tensorflow.core.example.feature_pb2.Features: ...
def __init__(
self,
*,
features: tensorflow.core.example.feature_pb2.Features | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["features", b"features"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["features", b"features"]) -> None: ...
global___Example = Example
@typing_extensions.final
class SequenceExample(google.protobuf.message.Message):
"""A SequenceExample is an Example representing one or more sequences, and
some context. The context contains features which apply to the entire
example. The feature_lists contain a key, value map where each key is
associated with a repeated set of Features (a FeatureList).
A FeatureList thus represents the values of a feature identified by its key
over time / frames.
Below is a SequenceExample for a movie recommendation application recording a
sequence of ratings by a user. The time-independent features ("locale",
"age", "favorites") describing the user are part of the context. The sequence
of movies the user rated are part of the feature_lists. For each movie in the
sequence we have information on its name and actors and the user's rating.
This information is recorded in three separate feature_list(s).
In the example below there are only two movies. All three feature_list(s),
namely "movie_ratings", "movie_names", and "actors" have a feature value for
both movies. Note, that "actors" is itself a bytes_list with multiple
strings per movie.
context: {
feature: {
key : "locale"
value: {
bytes_list: {
value: [ "pt_BR" ]
}
}
}
feature: {
key : "age"
value: {
float_list: {
value: [ 19.0 ]
}
}
}
feature: {
key : "favorites"
value: {
bytes_list: {
value: [ "Majesty Rose", "Savannah Outen", "One Direction" ]
}
}
}
}
feature_lists: {
feature_list: {
key : "movie_ratings"
value: {
feature: {
float_list: {
value: [ 4.5 ]
}
}
feature: {
float_list: {
value: [ 5.0 ]
}
}
}
}
feature_list: {
key : "movie_names"
value: {
feature: {
bytes_list: {
value: [ "The Shawshank Redemption" ]
}
}
feature: {
bytes_list: {
value: [ "Fight Club" ]
}
}
}
}
feature_list: {
key : "actors"
value: {
feature: {
bytes_list: {
value: [ "Tim Robbins", "Morgan Freeman" ]
}
}
feature: {
bytes_list: {
value: [ "Brad Pitt", "Edward Norton", "Helena Bonham Carter" ]
}
}
}
}
}
A conformant SequenceExample data set obeys the following conventions:
Context:
- All conformant context features K must obey the same conventions as
a conformant Example's features (see above).
Feature lists:
- A FeatureList L may be missing in an example; it is up to the
parser configuration to determine if this is allowed or considered
an empty list (zero length).
- If a FeatureList L exists, it may be empty (zero length).
- If a FeatureList L is non-empty, all features within the FeatureList
must have the same data type T. Even across SequenceExamples, the type T
of the FeatureList identified by the same key must be the same. An entry
without any values may serve as an empty feature.
- If a FeatureList L is non-empty, it is up to the parser configuration
to determine if all features within the FeatureList must
have the same size. The same holds for this FeatureList across multiple
examples.
- For sequence modeling, e.g.:
http://colah.github.io/posts/2015-08-Understanding-LSTMs/
https://github.com/tensorflow/nmt
the feature lists represent a sequence of frames.
In this scenario, all FeatureLists in a SequenceExample have the same
number of Feature messages, so that the ith element in each FeatureList
is part of the ith frame (or time step).
Examples of conformant and non-conformant examples' FeatureLists:
Conformant FeatureLists:
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { float_list: { value: [ 5.0 ] } } }
} }
Non-conformant FeatureLists (mismatched types):
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { int64_list: { value: [ 5 ] } } }
} }
Conditionally conformant FeatureLists, the parser configuration determines
if the feature sizes must match:
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { float_list: { value: [ 5.0, 6.0 ] } } }
} }
Conformant pair of SequenceExample
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { float_list: { value: [ 5.0 ] } } }
} }
and:
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { float_list: { value: [ 5.0 ] } }
feature: { float_list: { value: [ 2.0 ] } } }
} }
Conformant pair of SequenceExample
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { float_list: { value: [ 5.0 ] } } }
} }
and:
feature_lists: { feature_list: {
key: "movie_ratings"
value: { }
} }
Conditionally conformant pair of SequenceExample, the parser configuration
determines if the second feature_lists is consistent (zero-length) or
invalid (missing "movie_ratings"):
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { float_list: { value: [ 5.0 ] } } }
} }
and:
feature_lists: { }
Non-conformant pair of SequenceExample (mismatched types)
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { float_list: { value: [ 5.0 ] } } }
} }
and:
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { int64_list: { value: [ 4 ] } }
feature: { int64_list: { value: [ 5 ] } }
feature: { int64_list: { value: [ 2 ] } } }
} }
Conditionally conformant pair of SequenceExample; the parser configuration
determines if the feature sizes must match:
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.5 ] } }
feature: { float_list: { value: [ 5.0 ] } } }
} }
and:
feature_lists: { feature_list: {
key: "movie_ratings"
value: { feature: { float_list: { value: [ 4.0 ] } }
feature: { float_list: { value: [ 5.0, 3.0 ] } }
} }
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CONTEXT_FIELD_NUMBER: builtins.int
FEATURE_LISTS_FIELD_NUMBER: builtins.int
@property
def context(self) -> tensorflow.core.example.feature_pb2.Features: ...
@property
def feature_lists(self) -> tensorflow.core.example.feature_pb2.FeatureLists: ...
def __init__(
self,
*,
context: tensorflow.core.example.feature_pb2.Features | None = ...,
feature_lists: tensorflow.core.example.feature_pb2.FeatureLists | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["context", b"context", "feature_lists", b"feature_lists"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["context", b"context", "feature_lists", b"feature_lists"]) -> None: ...
global___SequenceExample = SequenceExample

View File

@@ -0,0 +1,245 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Protocol messages for describing features for machine learning model
training or inference.
There are three base Feature types:
- bytes
- float
- int64
A Feature contains Lists which may hold zero or more values. These
lists are the base values BytesList, FloatList, Int64List.
Features are organized into categories by name. The Features message
contains the mapping from name to Feature.
Example Features for a movie recommendation application:
feature {
key: "age"
value { float_list {
value: 29.0
}}
}
feature {
key: "movie"
value { bytes_list {
value: "The Shawshank Redemption"
value: "Fight Club"
}}
}
feature {
key: "movie_ratings"
value { float_list {
value: 9.0
value: 9.7
}}
}
feature {
key: "suggestion"
value { bytes_list {
value: "Inception"
}}
}
feature {
key: "suggestion_purchased"
value { int64_list {
value: 1
}}
}
feature {
key: "purchase_price"
value { float_list {
value: 9.99
}}
}
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class BytesList(google.protobuf.message.Message):
"""LINT.IfChange
Containers to hold repeated fundamental values.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ...
def __init__(
self,
*,
value: collections.abc.Iterable[builtins.bytes] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
global___BytesList = BytesList
@typing_extensions.final
class FloatList(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ...
def __init__(
self,
*,
value: collections.abc.Iterable[builtins.float] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
global___FloatList = FloatList
@typing_extensions.final
class Int64List(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
def __init__(
self,
*,
value: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
global___Int64List = Int64List
@typing_extensions.final
class Feature(google.protobuf.message.Message):
"""Containers for non-sequential data."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
BYTES_LIST_FIELD_NUMBER: builtins.int
FLOAT_LIST_FIELD_NUMBER: builtins.int
INT64_LIST_FIELD_NUMBER: builtins.int
@property
def bytes_list(self) -> global___BytesList: ...
@property
def float_list(self) -> global___FloatList: ...
@property
def int64_list(self) -> global___Int64List: ...
def __init__(
self,
*,
bytes_list: global___BytesList | None = ...,
float_list: global___FloatList | None = ...,
int64_list: global___Int64List | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["bytes_list", "float_list", "int64_list"] | None: ...
global___Feature = Feature
@typing_extensions.final
class Features(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class FeatureEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___Feature: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___Feature | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
FEATURE_FIELD_NUMBER: builtins.int
@property
def feature(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___Feature]:
"""Map from feature name to feature."""
def __init__(
self,
*,
feature: collections.abc.Mapping[builtins.str, global___Feature] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["feature", b"feature"]) -> None: ...
global___Features = Features
@typing_extensions.final
class FeatureList(google.protobuf.message.Message):
"""Containers for sequential data.
A FeatureList contains lists of Features. These may hold zero or more
Feature values.
FeatureLists are organized into categories by name. The FeatureLists message
contains the mapping from name to FeatureList.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FEATURE_FIELD_NUMBER: builtins.int
@property
def feature(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Feature]: ...
def __init__(
self,
*,
feature: collections.abc.Iterable[global___Feature] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["feature", b"feature"]) -> None: ...
global___FeatureList = FeatureList
@typing_extensions.final
class FeatureLists(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class FeatureListEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___FeatureList: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___FeatureList | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
FEATURE_LIST_FIELD_NUMBER: builtins.int
@property
def feature_list(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___FeatureList]:
"""Map from feature name to feature list."""
def __init__(
self,
*,
feature_list: collections.abc.Mapping[builtins.str, global___FeatureList] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["feature_list", b"feature_list"]) -> None: ...
global___FeatureLists = FeatureLists

View File

@@ -0,0 +1,51 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class AllocationDescription(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
REQUESTED_BYTES_FIELD_NUMBER: builtins.int
ALLOCATED_BYTES_FIELD_NUMBER: builtins.int
ALLOCATOR_NAME_FIELD_NUMBER: builtins.int
ALLOCATION_ID_FIELD_NUMBER: builtins.int
HAS_SINGLE_REFERENCE_FIELD_NUMBER: builtins.int
PTR_FIELD_NUMBER: builtins.int
requested_bytes: builtins.int
"""Total number of bytes requested"""
allocated_bytes: builtins.int
"""Total number of bytes allocated if known"""
allocator_name: builtins.str
"""Name of the allocator used"""
allocation_id: builtins.int
"""Identifier of the allocated buffer if known"""
has_single_reference: builtins.bool
"""Set if this tensor only has one remaining reference"""
ptr: builtins.int
"""Address of the allocation."""
def __init__(
self,
*,
requested_bytes: builtins.int | None = ...,
allocated_bytes: builtins.int | None = ...,
allocator_name: builtins.str | None = ...,
allocation_id: builtins.int | None = ...,
has_single_reference: builtins.bool | None = ...,
ptr: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["allocated_bytes", b"allocated_bytes", "allocation_id", b"allocation_id", "allocator_name", b"allocator_name", "has_single_reference", b"has_single_reference", "ptr", b"ptr", "requested_bytes", b"requested_bytes"]) -> None: ...
global___AllocationDescription = AllocationDescription

View File

@@ -0,0 +1,272 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Defines the text format for including per-op API definition and
overrides for client language op code generators.
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.attr_value_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ApiDef(google.protobuf.message.Message):
"""Used to specify and override the default API & behavior in the
generated code for client languages, from what you would get from
the OpDef alone. There will be a set of ApiDefs that are common
to all client languages, and another set per client language.
The per-client-language ApiDefs will inherit values from the
common ApiDefs which it can either replace or modify.
We separate the API definition from the OpDef so we can evolve the
API while remaining backwards compatible when interpreting old
graphs. Overrides go in an "api_def.pbtxt" file with a text-format
ApiDefs message.
WARNING: Be *very* careful changing the API for any existing op --
you can change the semantics of existing code. These changes may
need to wait until a major release of TensorFlow to avoid breaking
our compatibility promises.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _Visibility:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _VisibilityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ApiDef._Visibility.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEFAULT_VISIBILITY: ApiDef._Visibility.ValueType # 0
"""Normally this is "VISIBLE" unless you are inheriting a
different value from another ApiDef.
"""
VISIBLE: ApiDef._Visibility.ValueType # 1
"""Publicly visible in the API."""
SKIP: ApiDef._Visibility.ValueType # 2
"""Do not include this op in the generated API. If visibility is
set to 'SKIP', other fields are ignored for this op.
"""
HIDDEN: ApiDef._Visibility.ValueType # 3
"""Hide this op by putting it into an internal namespace (or whatever
is appropriate in the target language).
"""
class Visibility(_Visibility, metaclass=_VisibilityEnumTypeWrapper): ...
DEFAULT_VISIBILITY: ApiDef.Visibility.ValueType # 0
"""Normally this is "VISIBLE" unless you are inheriting a
different value from another ApiDef.
"""
VISIBLE: ApiDef.Visibility.ValueType # 1
"""Publicly visible in the API."""
SKIP: ApiDef.Visibility.ValueType # 2
"""Do not include this op in the generated API. If visibility is
set to 'SKIP', other fields are ignored for this op.
"""
HIDDEN: ApiDef.Visibility.ValueType # 3
"""Hide this op by putting it into an internal namespace (or whatever
is appropriate in the target language).
"""
@typing_extensions.final
class Endpoint(google.protobuf.message.Message):
"""If you specify any endpoint, this will replace all of the
inherited endpoints. The first endpoint should be the
"canonical" endpoint, and should not be deprecated (unless all
endpoints are deprecated).
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
DEPRECATED_FIELD_NUMBER: builtins.int
DEPRECATION_VERSION_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name should be either like "CamelCaseName" or
"Package.CamelCaseName". Client-language-specific ApiDefs may
use a snake_case convention instead of CamelCase.
"""
deprecated: builtins.bool
"""Set if this endpoint is deprecated. If set to true, a message suggesting
to use a non-deprecated endpoint instead will be printed. If all
endpoints are deprecated, set deprecation_message in ApiDef instead.
"""
deprecation_version: builtins.int
"""Major version when an endpoint will be deleted. For e.g. set this
value to 2 if endpoint should be removed in TensorFlow 2.0 and
deprecated in versions before that.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
deprecated: builtins.bool | None = ...,
deprecation_version: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["deprecated", b"deprecated", "deprecation_version", b"deprecation_version", "name", b"name"]) -> None: ...
@typing_extensions.final
class Arg(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
RENAME_TO_FIELD_NUMBER: builtins.int
DESCRIPTION_FIELD_NUMBER: builtins.int
name: builtins.str
rename_to: builtins.str
"""Change the name used to access this arg in the API from what
is used in the GraphDef. Note that these names in `backticks`
will also be replaced in the summary & description fields.
"""
description: builtins.str
"""Note: this will replace any inherited arg doc. There is no
current way of modifying arg descriptions (other than replacing
them entirely) as can be done with op descriptions.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
rename_to: builtins.str | None = ...,
description: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "name", b"name", "rename_to", b"rename_to"]) -> None: ...
@typing_extensions.final
class Attr(google.protobuf.message.Message):
"""Description of the graph-construction-time configuration of this
Op. That is to say, this describes the attr fields that will
be specified in the NodeDef.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
RENAME_TO_FIELD_NUMBER: builtins.int
DEFAULT_VALUE_FIELD_NUMBER: builtins.int
DESCRIPTION_FIELD_NUMBER: builtins.int
name: builtins.str
rename_to: builtins.str
"""Change the name used to access this attr in the API from what
is used in the GraphDef. Note that these names in `backticks`
will also be replaced in the summary & description fields.
"""
@property
def default_value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue:
"""Specify a new default value to use for this attr. This default
will be used when creating new graphs, as opposed to the
default in the OpDef, which will be used when interpreting old
GraphDefs.
"""
description: builtins.str
"""Note: this will replace any inherited attr doc, there is no current
way of modifying attr descriptions as can be done with op descriptions.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
rename_to: builtins.str | None = ...,
default_value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ...,
description: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["default_value", b"default_value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["default_value", b"default_value", "description", b"description", "name", b"name", "rename_to", b"rename_to"]) -> None: ...
GRAPH_OP_NAME_FIELD_NUMBER: builtins.int
DEPRECATION_MESSAGE_FIELD_NUMBER: builtins.int
DEPRECATION_VERSION_FIELD_NUMBER: builtins.int
VISIBILITY_FIELD_NUMBER: builtins.int
ENDPOINT_FIELD_NUMBER: builtins.int
IN_ARG_FIELD_NUMBER: builtins.int
OUT_ARG_FIELD_NUMBER: builtins.int
ARG_ORDER_FIELD_NUMBER: builtins.int
ATTR_FIELD_NUMBER: builtins.int
SUMMARY_FIELD_NUMBER: builtins.int
DESCRIPTION_FIELD_NUMBER: builtins.int
DESCRIPTION_PREFIX_FIELD_NUMBER: builtins.int
DESCRIPTION_SUFFIX_FIELD_NUMBER: builtins.int
graph_op_name: builtins.str
"""Name of the op (in the OpDef) to specify the API for."""
deprecation_message: builtins.str
"""If this op is deprecated, set deprecation message to the message
that should be logged when this op is used.
The message should indicate alternative op to use, if any.
"""
deprecation_version: builtins.int
"""Major version when the op will be deleted. For e.g. set this
value to 2 if op API should be removed in TensorFlow 2.0 and
deprecated in versions before that.
"""
visibility: global___ApiDef.Visibility.ValueType
@property
def endpoint(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef.Endpoint]: ...
@property
def in_arg(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef.Arg]: ...
@property
def out_arg(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef.Arg]: ...
@property
def arg_order(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""List of original in_arg names to specify new argument order.
Length of arg_order should be either empty to keep current order
or match size of in_arg.
"""
@property
def attr(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef.Attr]: ...
summary: builtins.str
"""One-line human-readable description of what the Op does."""
description: builtins.str
"""Additional, longer human-readable description of what the Op does."""
description_prefix: builtins.str
"""Modify an existing/inherited description by adding text to the beginning
or end.
"""
description_suffix: builtins.str
def __init__(
self,
*,
graph_op_name: builtins.str | None = ...,
deprecation_message: builtins.str | None = ...,
deprecation_version: builtins.int | None = ...,
visibility: global___ApiDef.Visibility.ValueType | None = ...,
endpoint: collections.abc.Iterable[global___ApiDef.Endpoint] | None = ...,
in_arg: collections.abc.Iterable[global___ApiDef.Arg] | None = ...,
out_arg: collections.abc.Iterable[global___ApiDef.Arg] | None = ...,
arg_order: collections.abc.Iterable[builtins.str] | None = ...,
attr: collections.abc.Iterable[global___ApiDef.Attr] | None = ...,
summary: builtins.str | None = ...,
description: builtins.str | None = ...,
description_prefix: builtins.str | None = ...,
description_suffix: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["arg_order", b"arg_order", "attr", b"attr", "deprecation_message", b"deprecation_message", "deprecation_version", b"deprecation_version", "description", b"description", "description_prefix", b"description_prefix", "description_suffix", b"description_suffix", "endpoint", b"endpoint", "graph_op_name", b"graph_op_name", "in_arg", b"in_arg", "out_arg", b"out_arg", "summary", b"summary", "visibility", b"visibility"]) -> None: ...
global___ApiDef = ApiDef
@typing_extensions.final
class ApiDefs(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OP_FIELD_NUMBER: builtins.int
@property
def op(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ApiDef]: ...
def __init__(
self,
*,
op: collections.abc.Iterable[global___ApiDef] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["op", b"op"]) -> None: ...
global___ApiDefs = ApiDefs

View File

@@ -0,0 +1,187 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_pb2
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class AttrValue(google.protobuf.message.Message):
"""Protocol buffer representing the value for an attr used to configure an Op.
Comment indicates the corresponding attr type. Only the field matching the
attr type may be filled.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class ListValue(google.protobuf.message.Message):
"""LINT.IfChange"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
S_FIELD_NUMBER: builtins.int
I_FIELD_NUMBER: builtins.int
F_FIELD_NUMBER: builtins.int
B_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
TENSOR_FIELD_NUMBER: builtins.int
FUNC_FIELD_NUMBER: builtins.int
@property
def s(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]:
""""list(string)" """
@property
def i(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
""""list(int)" """
@property
def f(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
""""list(float)" """
@property
def b(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]:
""""list(bool)" """
@property
def type(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.core.framework.types_pb2.DataType.ValueType]:
""""list(type)" """
@property
def shape(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto]:
""""list(shape)" """
@property
def tensor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_pb2.TensorProto]:
""""list(tensor)" """
@property
def func(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NameAttrList]:
""""list(attr)" """
def __init__(
self,
*,
s: collections.abc.Iterable[builtins.bytes] | None = ...,
i: collections.abc.Iterable[builtins.int] | None = ...,
f: collections.abc.Iterable[builtins.float] | None = ...,
b: collections.abc.Iterable[builtins.bool] | None = ...,
type: collections.abc.Iterable[tensorflow.core.framework.types_pb2.DataType.ValueType] | None = ...,
shape: collections.abc.Iterable[tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto] | None = ...,
tensor: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ...,
func: collections.abc.Iterable[global___NameAttrList] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["b", b"b", "f", b"f", "func", b"func", "i", b"i", "s", b"s", "shape", b"shape", "tensor", b"tensor", "type", b"type"]) -> None: ...
S_FIELD_NUMBER: builtins.int
I_FIELD_NUMBER: builtins.int
F_FIELD_NUMBER: builtins.int
B_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
TENSOR_FIELD_NUMBER: builtins.int
LIST_FIELD_NUMBER: builtins.int
FUNC_FIELD_NUMBER: builtins.int
PLACEHOLDER_FIELD_NUMBER: builtins.int
s: builtins.bytes
""""string" """
i: builtins.int
""""int" """
f: builtins.float
""""float" """
b: builtins.bool
""""bool" """
type: tensorflow.core.framework.types_pb2.DataType.ValueType
""""type" """
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto:
""""shape" """
@property
def tensor(self) -> tensorflow.core.framework.tensor_pb2.TensorProto:
""""tensor" """
@property
def list(self) -> global___AttrValue.ListValue:
"""any "list(...)" """
@property
def func(self) -> global___NameAttrList:
""""func" represents a function. func.name is a function's name or
a primitive op's name. func.attr.first is the name of an attr
defined for that function. func.attr.second is the value for
that attr in the instantiation.
"""
placeholder: builtins.str
"""This is a placeholder only used in nodes defined inside a
function. It indicates the attr value will be supplied when
the function is instantiated. For example, let us suppose a
node "N" in function "FN". "N" has an attr "A" with value
placeholder = "foo". When FN is instantiated with attr "foo"
set to "bar", the instantiated node N's attr A will have been
given the value "bar".
"""
def __init__(
self,
*,
s: builtins.bytes | None = ...,
i: builtins.int | None = ...,
f: builtins.float | None = ...,
b: builtins.bool | None = ...,
type: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
tensor: tensorflow.core.framework.tensor_pb2.TensorProto | None = ...,
list: global___AttrValue.ListValue | None = ...,
func: global___NameAttrList | None = ...,
placeholder: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["b", b"b", "f", b"f", "func", b"func", "i", b"i", "list", b"list", "placeholder", b"placeholder", "s", b"s", "shape", b"shape", "tensor", b"tensor", "type", b"type", "value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["b", b"b", "f", b"f", "func", b"func", "i", b"i", "list", b"list", "placeholder", b"placeholder", "s", b"s", "shape", b"shape", "tensor", b"tensor", "type", b"type", "value", b"value"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["s", "i", "f", "b", "type", "shape", "tensor", "list", "func", "placeholder"] | None: ...
global___AttrValue = AttrValue
@typing_extensions.final
class NameAttrList(google.protobuf.message.Message):
"""A list of attr names and their values. The whole list is attached
with a string name. E.g., MatMul[T=float].
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class AttrEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___AttrValue: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___AttrValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
ATTR_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___AttrValue]: ...
def __init__(
self,
*,
name: builtins.str | None = ...,
attr: collections.abc.Mapping[builtins.str, global___AttrValue] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["attr", b"attr", "name", b"name"]) -> None: ...
global___NameAttrList = NameAttrList

View File

@@ -0,0 +1,190 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class CostGraphDef(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class Node(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class InputInfo(google.protobuf.message.Message):
"""Inputs of this node. They must be executed before this node can be
executed. An input is a particular output of another node, specified
by the node id and the output index.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PRECEDING_NODE_FIELD_NUMBER: builtins.int
PRECEDING_PORT_FIELD_NUMBER: builtins.int
preceding_node: builtins.int
preceding_port: builtins.int
def __init__(
self,
*,
preceding_node: builtins.int | None = ...,
preceding_port: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["preceding_node", b"preceding_node", "preceding_port", b"preceding_port"]) -> None: ...
@typing_extensions.final
class OutputInfo(google.protobuf.message.Message):
"""Outputs of this node."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SIZE_FIELD_NUMBER: builtins.int
ALIAS_INPUT_PORT_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
size: builtins.int
alias_input_port: builtins.int
"""If >= 0, the output is an alias of an input. Note that an alias input
may itself be an alias. The algorithm will therefore need to follow
those pointers.
"""
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
def __init__(
self,
*,
size: builtins.int | None = ...,
alias_input_port: builtins.int | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["alias_input_port", b"alias_input_port", "dtype", b"dtype", "shape", b"shape", "size", b"size"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
DEVICE_FIELD_NUMBER: builtins.int
ID_FIELD_NUMBER: builtins.int
INPUT_INFO_FIELD_NUMBER: builtins.int
OUTPUT_INFO_FIELD_NUMBER: builtins.int
TEMPORARY_MEMORY_SIZE_FIELD_NUMBER: builtins.int
PERSISTENT_MEMORY_SIZE_FIELD_NUMBER: builtins.int
HOST_TEMP_MEMORY_SIZE_FIELD_NUMBER: builtins.int
DEVICE_TEMP_MEMORY_SIZE_FIELD_NUMBER: builtins.int
DEVICE_PERSISTENT_MEMORY_SIZE_FIELD_NUMBER: builtins.int
COMPUTE_COST_FIELD_NUMBER: builtins.int
COMPUTE_TIME_FIELD_NUMBER: builtins.int
MEMORY_TIME_FIELD_NUMBER: builtins.int
IS_FINAL_FIELD_NUMBER: builtins.int
CONTROL_INPUT_FIELD_NUMBER: builtins.int
INACCURATE_FIELD_NUMBER: builtins.int
name: builtins.str
"""The name of the node. Names are globally unique."""
device: builtins.str
"""The device of the node. Can be empty if the node is mapped to the
default partition or partitioning hasn't been run yet.
"""
id: builtins.int
"""The id of the node. Node ids are only unique inside a partition."""
@property
def input_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node.InputInfo]: ...
@property
def output_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node.OutputInfo]: ...
temporary_memory_size: builtins.int
"""Temporary memory used by this node."""
persistent_memory_size: builtins.int
"""Persistent memory used by this node."""
host_temp_memory_size: builtins.int
device_temp_memory_size: builtins.int
device_persistent_memory_size: builtins.int
compute_cost: builtins.int
"""Estimate of the computational cost of this node, in microseconds."""
compute_time: builtins.int
"""Analytical estimate of the computational cost of this node, in
microseconds.
"""
memory_time: builtins.int
"""Analytical estimate of the memory access cost of this node, in
microseconds.
"""
is_final: builtins.bool
"""If true, the output is permanent: it can't be discarded, because this
node is part of the "final output". Nodes may depend on final nodes.
"""
@property
def control_input(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Ids of the control inputs for this node."""
inaccurate: builtins.bool
"""Are the costs inaccurate?"""
def __init__(
self,
*,
name: builtins.str | None = ...,
device: builtins.str | None = ...,
id: builtins.int | None = ...,
input_info: collections.abc.Iterable[global___CostGraphDef.Node.InputInfo] | None = ...,
output_info: collections.abc.Iterable[global___CostGraphDef.Node.OutputInfo] | None = ...,
temporary_memory_size: builtins.int | None = ...,
persistent_memory_size: builtins.int | None = ...,
host_temp_memory_size: builtins.int | None = ...,
device_temp_memory_size: builtins.int | None = ...,
device_persistent_memory_size: builtins.int | None = ...,
compute_cost: builtins.int | None = ...,
compute_time: builtins.int | None = ...,
memory_time: builtins.int | None = ...,
is_final: builtins.bool | None = ...,
control_input: collections.abc.Iterable[builtins.int] | None = ...,
inaccurate: builtins.bool | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["compute_cost", b"compute_cost", "compute_time", b"compute_time", "control_input", b"control_input", "device", b"device", "device_persistent_memory_size", b"device_persistent_memory_size", "device_temp_memory_size", b"device_temp_memory_size", "host_temp_memory_size", b"host_temp_memory_size", "id", b"id", "inaccurate", b"inaccurate", "input_info", b"input_info", "is_final", b"is_final", "memory_time", b"memory_time", "name", b"name", "output_info", b"output_info", "persistent_memory_size", b"persistent_memory_size", "temporary_memory_size", b"temporary_memory_size"]) -> None: ...
@typing_extensions.final
class AggregatedCost(google.protobuf.message.Message):
"""Total cost of this graph, typically used for balancing decisions."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
COST_FIELD_NUMBER: builtins.int
DIMENSION_FIELD_NUMBER: builtins.int
cost: builtins.float
"""Aggregated cost value."""
dimension: builtins.str
"""Aggregated cost dimension (e.g. 'memory', 'compute', 'network')."""
def __init__(
self,
*,
cost: builtins.float | None = ...,
dimension: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["cost", b"cost", "dimension", b"dimension"]) -> None: ...
NODE_FIELD_NUMBER: builtins.int
COST_FIELD_NUMBER: builtins.int
@property
def node(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.Node]: ...
@property
def cost(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CostGraphDef.AggregatedCost]: ...
def __init__(
self,
*,
node: collections.abc.Iterable[global___CostGraphDef.Node] | None = ...,
cost: collections.abc.Iterable[global___CostGraphDef.AggregatedCost] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["cost", b"cost", "node", b"node"]) -> None: ...
global___CostGraphDef = CostGraphDef

View File

@@ -0,0 +1,32 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class Metadata(google.protobuf.message.Message):
"""next: 2"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
name: builtins.bytes
def __init__(
self,
*,
name: builtins.bytes | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name"]) -> None: ...
global___Metadata = Metadata

View File

@@ -0,0 +1,344 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.model_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _AutoShardPolicy:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _AutoShardPolicyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AutoShardPolicy.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
AUTO: _AutoShardPolicy.ValueType # 0
"""AUTO: Attempts FILE-based sharding, falling back to DATA-based sharding."""
FILE: _AutoShardPolicy.ValueType # 1
"""FILE: Shards by input files (i.e. each worker will get a set of files to
process). When this option is selected, make sure that there is at least as
many files as workers. If there are fewer input files than workers, a
runtime error will be raised.
"""
DATA: _AutoShardPolicy.ValueType # 2
"""DATA: Shards by elements produced by the dataset. Each worker will process
the whole dataset and discard the portion that is not for itself. Note that
for this mode to correctly partitions the dataset elements, the dataset
needs to produce elements in a deterministic order.
"""
HINT: _AutoShardPolicy.ValueType # 3
"""HINT: Looks for the presence of `shard(SHARD_HINT, ...)` which is treated
as a placeholder to replace with `shard(num_workers, worker_index)`.
"""
OFF: _AutoShardPolicy.ValueType # -1
"""OFF: No sharding will be performed."""
class AutoShardPolicy(_AutoShardPolicy, metaclass=_AutoShardPolicyEnumTypeWrapper):
"""Represents the type of auto-sharding we enable."""
AUTO: AutoShardPolicy.ValueType # 0
"""AUTO: Attempts FILE-based sharding, falling back to DATA-based sharding."""
FILE: AutoShardPolicy.ValueType # 1
"""FILE: Shards by input files (i.e. each worker will get a set of files to
process). When this option is selected, make sure that there is at least as
many files as workers. If there are fewer input files than workers, a
runtime error will be raised.
"""
DATA: AutoShardPolicy.ValueType # 2
"""DATA: Shards by elements produced by the dataset. Each worker will process
the whole dataset and discard the portion that is not for itself. Note that
for this mode to correctly partitions the dataset elements, the dataset
needs to produce elements in a deterministic order.
"""
HINT: AutoShardPolicy.ValueType # 3
"""HINT: Looks for the presence of `shard(SHARD_HINT, ...)` which is treated
as a placeholder to replace with `shard(num_workers, worker_index)`.
"""
OFF: AutoShardPolicy.ValueType # -1
"""OFF: No sharding will be performed."""
global___AutoShardPolicy = AutoShardPolicy
class _ExternalStatePolicy:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ExternalStatePolicyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ExternalStatePolicy.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
POLICY_WARN: _ExternalStatePolicy.ValueType # 0
POLICY_IGNORE: _ExternalStatePolicy.ValueType # 1
POLICY_FAIL: _ExternalStatePolicy.ValueType # 2
class ExternalStatePolicy(_ExternalStatePolicy, metaclass=_ExternalStatePolicyEnumTypeWrapper):
"""Represents how to handle external state during serialization."""
POLICY_WARN: ExternalStatePolicy.ValueType # 0
POLICY_IGNORE: ExternalStatePolicy.ValueType # 1
POLICY_FAIL: ExternalStatePolicy.ValueType # 2
global___ExternalStatePolicy = ExternalStatePolicy
@typing_extensions.final
class AutotuneOptions(google.protobuf.message.Message):
"""next: 5"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ENABLED_FIELD_NUMBER: builtins.int
CPU_BUDGET_FIELD_NUMBER: builtins.int
RAM_BUDGET_FIELD_NUMBER: builtins.int
AUTOTUNE_ALGORITHM_FIELD_NUMBER: builtins.int
enabled: builtins.bool
cpu_budget: builtins.int
ram_budget: builtins.int
autotune_algorithm: tensorflow.core.framework.model_pb2.AutotuneAlgorithm.ValueType
def __init__(
self,
*,
enabled: builtins.bool | None = ...,
cpu_budget: builtins.int | None = ...,
ram_budget: builtins.int | None = ...,
autotune_algorithm: tensorflow.core.framework.model_pb2.AutotuneAlgorithm.ValueType | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["autotune_algorithm", b"autotune_algorithm", "cpu_budget", b"cpu_budget", "enabled", b"enabled", "optional_autotune_algorithm", b"optional_autotune_algorithm", "optional_cpu_budget", b"optional_cpu_budget", "optional_enabled", b"optional_enabled", "optional_ram_budget", b"optional_ram_budget", "ram_budget", b"ram_budget"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["autotune_algorithm", b"autotune_algorithm", "cpu_budget", b"cpu_budget", "enabled", b"enabled", "optional_autotune_algorithm", b"optional_autotune_algorithm", "optional_cpu_budget", b"optional_cpu_budget", "optional_enabled", b"optional_enabled", "optional_ram_budget", b"optional_ram_budget", "ram_budget", b"ram_budget"]) -> None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_autotune_algorithm", b"optional_autotune_algorithm"]) -> typing_extensions.Literal["autotune_algorithm"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_cpu_budget", b"optional_cpu_budget"]) -> typing_extensions.Literal["cpu_budget"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_enabled", b"optional_enabled"]) -> typing_extensions.Literal["enabled"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_ram_budget", b"optional_ram_budget"]) -> typing_extensions.Literal["ram_budget"] | None: ...
global___AutotuneOptions = AutotuneOptions
@typing_extensions.final
class CardinalityOptions(google.protobuf.message.Message):
"""next: 2"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _ComputeLevel:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ComputeLevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CardinalityOptions._ComputeLevel.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
CARDINALITY_COMPUTE_UNSPECIFIED: CardinalityOptions._ComputeLevel.ValueType # 0
CARDINALITY_COMPUTE_LOW: CardinalityOptions._ComputeLevel.ValueType # 1
"""Cardinality will only be computed if it can be determined in a cheap
manner (ie. without reading from file sources). If the cardinality would
be nontrivial to compute, Cardinality() will return UNKNOWN_CARDINALITY.
"""
CARDINALITY_COMPUTE_MODERATE: CardinalityOptions._ComputeLevel.ValueType # 2
"""Moderate effort will be made to determine cardinality, such as reading
index data from source files. If significant work is needed to compute
cardinality (e.g. reading entire source file contents or executing user
defined functions), Cardinality() will return UNKNOWN_CARDINALITY.
"""
class ComputeLevel(_ComputeLevel, metaclass=_ComputeLevelEnumTypeWrapper): ...
CARDINALITY_COMPUTE_UNSPECIFIED: CardinalityOptions.ComputeLevel.ValueType # 0
CARDINALITY_COMPUTE_LOW: CardinalityOptions.ComputeLevel.ValueType # 1
"""Cardinality will only be computed if it can be determined in a cheap
manner (ie. without reading from file sources). If the cardinality would
be nontrivial to compute, Cardinality() will return UNKNOWN_CARDINALITY.
"""
CARDINALITY_COMPUTE_MODERATE: CardinalityOptions.ComputeLevel.ValueType # 2
"""Moderate effort will be made to determine cardinality, such as reading
index data from source files. If significant work is needed to compute
cardinality (e.g. reading entire source file contents or executing user
defined functions), Cardinality() will return UNKNOWN_CARDINALITY.
"""
COMPUTE_LEVEL_FIELD_NUMBER: builtins.int
compute_level: global___CardinalityOptions.ComputeLevel.ValueType
def __init__(
self,
*,
compute_level: global___CardinalityOptions.ComputeLevel.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["compute_level", b"compute_level"]) -> None: ...
global___CardinalityOptions = CardinalityOptions
@typing_extensions.final
class DistributeOptions(google.protobuf.message.Message):
"""next: 3"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
AUTO_SHARD_POLICY_FIELD_NUMBER: builtins.int
NUM_DEVICES_FIELD_NUMBER: builtins.int
auto_shard_policy: global___AutoShardPolicy.ValueType
num_devices: builtins.int
def __init__(
self,
*,
auto_shard_policy: global___AutoShardPolicy.ValueType | None = ...,
num_devices: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["num_devices", b"num_devices", "optional_num_devices", b"optional_num_devices"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["auto_shard_policy", b"auto_shard_policy", "num_devices", b"num_devices", "optional_num_devices", b"optional_num_devices"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_num_devices", b"optional_num_devices"]) -> typing_extensions.Literal["num_devices"] | None: ...
global___DistributeOptions = DistributeOptions
@typing_extensions.final
class OptimizationOptions(google.protobuf.message.Message):
"""next: 20"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
APPLY_DEFAULT_OPTIMIZATIONS_FIELD_NUMBER: builtins.int
FILTER_FUSION_FIELD_NUMBER: builtins.int
MAP_AND_BATCH_FUSION_FIELD_NUMBER: builtins.int
MAP_AND_FILTER_FUSION_FIELD_NUMBER: builtins.int
MAP_FUSION_FIELD_NUMBER: builtins.int
MAP_PARALLELIZATION_FIELD_NUMBER: builtins.int
NOOP_ELIMINATION_FIELD_NUMBER: builtins.int
PARALLEL_BATCH_FIELD_NUMBER: builtins.int
SHUFFLE_AND_REPEAT_FUSION_FIELD_NUMBER: builtins.int
FILTER_PARALLELIZATION_FIELD_NUMBER: builtins.int
INJECT_PREFETCH_FIELD_NUMBER: builtins.int
apply_default_optimizations: builtins.bool
filter_fusion: builtins.bool
map_and_batch_fusion: builtins.bool
map_and_filter_fusion: builtins.bool
map_fusion: builtins.bool
map_parallelization: builtins.bool
noop_elimination: builtins.bool
parallel_batch: builtins.bool
shuffle_and_repeat_fusion: builtins.bool
filter_parallelization: builtins.bool
inject_prefetch: builtins.bool
def __init__(
self,
*,
apply_default_optimizations: builtins.bool | None = ...,
filter_fusion: builtins.bool | None = ...,
map_and_batch_fusion: builtins.bool | None = ...,
map_and_filter_fusion: builtins.bool | None = ...,
map_fusion: builtins.bool | None = ...,
map_parallelization: builtins.bool | None = ...,
noop_elimination: builtins.bool | None = ...,
parallel_batch: builtins.bool | None = ...,
shuffle_and_repeat_fusion: builtins.bool | None = ...,
filter_parallelization: builtins.bool | None = ...,
inject_prefetch: builtins.bool | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["apply_default_optimizations", b"apply_default_optimizations", "filter_fusion", b"filter_fusion", "filter_parallelization", b"filter_parallelization", "inject_prefetch", b"inject_prefetch", "map_and_batch_fusion", b"map_and_batch_fusion", "map_and_filter_fusion", b"map_and_filter_fusion", "map_fusion", b"map_fusion", "map_parallelization", b"map_parallelization", "noop_elimination", b"noop_elimination", "optional_apply_default_optimizations", b"optional_apply_default_optimizations", "optional_filter_fusion", b"optional_filter_fusion", "optional_filter_parallelization", b"optional_filter_parallelization", "optional_inject_prefetch", b"optional_inject_prefetch", "optional_map_and_batch_fusion", b"optional_map_and_batch_fusion", "optional_map_and_filter_fusion", b"optional_map_and_filter_fusion", "optional_map_fusion", b"optional_map_fusion", "optional_map_parallelization", b"optional_map_parallelization", "optional_noop_elimination", b"optional_noop_elimination", "optional_parallel_batch", b"optional_parallel_batch", "optional_shuffle_and_repeat_fusion", b"optional_shuffle_and_repeat_fusion", "parallel_batch", b"parallel_batch", "shuffle_and_repeat_fusion", b"shuffle_and_repeat_fusion"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["apply_default_optimizations", b"apply_default_optimizations", "filter_fusion", b"filter_fusion", "filter_parallelization", b"filter_parallelization", "inject_prefetch", b"inject_prefetch", "map_and_batch_fusion", b"map_and_batch_fusion", "map_and_filter_fusion", b"map_and_filter_fusion", "map_fusion", b"map_fusion", "map_parallelization", b"map_parallelization", "noop_elimination", b"noop_elimination", "optional_apply_default_optimizations", b"optional_apply_default_optimizations", "optional_filter_fusion", b"optional_filter_fusion", "optional_filter_parallelization", b"optional_filter_parallelization", "optional_inject_prefetch", b"optional_inject_prefetch", "optional_map_and_batch_fusion", b"optional_map_and_batch_fusion", "optional_map_and_filter_fusion", b"optional_map_and_filter_fusion", "optional_map_fusion", b"optional_map_fusion", "optional_map_parallelization", b"optional_map_parallelization", "optional_noop_elimination", b"optional_noop_elimination", "optional_parallel_batch", b"optional_parallel_batch", "optional_shuffle_and_repeat_fusion", b"optional_shuffle_and_repeat_fusion", "parallel_batch", b"parallel_batch", "shuffle_and_repeat_fusion", b"shuffle_and_repeat_fusion"]) -> None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_apply_default_optimizations", b"optional_apply_default_optimizations"]) -> typing_extensions.Literal["apply_default_optimizations"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_filter_fusion", b"optional_filter_fusion"]) -> typing_extensions.Literal["filter_fusion"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_filter_parallelization", b"optional_filter_parallelization"]) -> typing_extensions.Literal["filter_parallelization"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_inject_prefetch", b"optional_inject_prefetch"]) -> typing_extensions.Literal["inject_prefetch"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_map_and_batch_fusion", b"optional_map_and_batch_fusion"]) -> typing_extensions.Literal["map_and_batch_fusion"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_map_and_filter_fusion", b"optional_map_and_filter_fusion"]) -> typing_extensions.Literal["map_and_filter_fusion"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_map_fusion", b"optional_map_fusion"]) -> typing_extensions.Literal["map_fusion"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_map_parallelization", b"optional_map_parallelization"]) -> typing_extensions.Literal["map_parallelization"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_noop_elimination", b"optional_noop_elimination"]) -> typing_extensions.Literal["noop_elimination"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_parallel_batch", b"optional_parallel_batch"]) -> typing_extensions.Literal["parallel_batch"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_shuffle_and_repeat_fusion", b"optional_shuffle_and_repeat_fusion"]) -> typing_extensions.Literal["shuffle_and_repeat_fusion"] | None: ...
global___OptimizationOptions = OptimizationOptions
@typing_extensions.final
class ThreadingOptions(google.protobuf.message.Message):
"""next: 3"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MAX_INTRA_OP_PARALLELISM_FIELD_NUMBER: builtins.int
PRIVATE_THREADPOOL_SIZE_FIELD_NUMBER: builtins.int
max_intra_op_parallelism: builtins.int
private_threadpool_size: builtins.int
def __init__(
self,
*,
max_intra_op_parallelism: builtins.int | None = ...,
private_threadpool_size: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["max_intra_op_parallelism", b"max_intra_op_parallelism", "optional_max_intra_op_parallelism", b"optional_max_intra_op_parallelism", "optional_private_threadpool_size", b"optional_private_threadpool_size", "private_threadpool_size", b"private_threadpool_size"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["max_intra_op_parallelism", b"max_intra_op_parallelism", "optional_max_intra_op_parallelism", b"optional_max_intra_op_parallelism", "optional_private_threadpool_size", b"optional_private_threadpool_size", "private_threadpool_size", b"private_threadpool_size"]) -> None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_max_intra_op_parallelism", b"optional_max_intra_op_parallelism"]) -> typing_extensions.Literal["max_intra_op_parallelism"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_private_threadpool_size", b"optional_private_threadpool_size"]) -> typing_extensions.Literal["private_threadpool_size"] | None: ...
global___ThreadingOptions = ThreadingOptions
@typing_extensions.final
class Options(google.protobuf.message.Message):
"""Message stored with Dataset objects to control how datasets are processed and
optimized.
next: 8
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DETERMINISTIC_FIELD_NUMBER: builtins.int
AUTOTUNE_OPTIONS_FIELD_NUMBER: builtins.int
DISTRIBUTE_OPTIONS_FIELD_NUMBER: builtins.int
OPTIMIZATION_OPTIONS_FIELD_NUMBER: builtins.int
SLACK_FIELD_NUMBER: builtins.int
THREADING_OPTIONS_FIELD_NUMBER: builtins.int
EXTERNAL_STATE_POLICY_FIELD_NUMBER: builtins.int
deterministic: builtins.bool
@property
def autotune_options(self) -> global___AutotuneOptions:
"""The distribution strategy options associated with the dataset."""
@property
def distribute_options(self) -> global___DistributeOptions:
"""The distribution strategy options associated with the dataset."""
@property
def optimization_options(self) -> global___OptimizationOptions:
"""The optimization options associated with the dataset."""
slack: builtins.bool
@property
def threading_options(self) -> global___ThreadingOptions:
"""The threading options associated with the dataset."""
external_state_policy: global___ExternalStatePolicy.ValueType
def __init__(
self,
*,
deterministic: builtins.bool | None = ...,
autotune_options: global___AutotuneOptions | None = ...,
distribute_options: global___DistributeOptions | None = ...,
optimization_options: global___OptimizationOptions | None = ...,
slack: builtins.bool | None = ...,
threading_options: global___ThreadingOptions | None = ...,
external_state_policy: global___ExternalStatePolicy.ValueType | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["autotune_options", b"autotune_options", "deterministic", b"deterministic", "distribute_options", b"distribute_options", "external_state_policy", b"external_state_policy", "optimization_options", b"optimization_options", "optional_deterministic", b"optional_deterministic", "optional_external_state_policy", b"optional_external_state_policy", "optional_slack", b"optional_slack", "slack", b"slack", "threading_options", b"threading_options"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["autotune_options", b"autotune_options", "deterministic", b"deterministic", "distribute_options", b"distribute_options", "external_state_policy", b"external_state_policy", "optimization_options", b"optimization_options", "optional_deterministic", b"optional_deterministic", "optional_external_state_policy", b"optional_external_state_policy", "optional_slack", b"optional_slack", "slack", b"slack", "threading_options", b"threading_options"]) -> None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_deterministic", b"optional_deterministic"]) -> typing_extensions.Literal["deterministic"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_external_state_policy", b"optional_external_state_policy"]) -> typing_extensions.Literal["external_state_policy"] | None: ...
@typing.overload
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_slack", b"optional_slack"]) -> typing_extensions.Literal["slack"] | None: ...
global___Options = Options

View File

@@ -0,0 +1,131 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class InterconnectLink(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DEVICE_ID_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
STRENGTH_FIELD_NUMBER: builtins.int
device_id: builtins.int
type: builtins.str
strength: builtins.int
def __init__(
self,
*,
device_id: builtins.int | None = ...,
type: builtins.str | None = ...,
strength: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["device_id", b"device_id", "strength", b"strength", "type", b"type"]) -> None: ...
global___InterconnectLink = InterconnectLink
@typing_extensions.final
class LocalLinks(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
LINK_FIELD_NUMBER: builtins.int
@property
def link(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InterconnectLink]: ...
def __init__(
self,
*,
link: collections.abc.Iterable[global___InterconnectLink] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["link", b"link"]) -> None: ...
global___LocalLinks = LocalLinks
@typing_extensions.final
class DeviceLocality(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
BUS_ID_FIELD_NUMBER: builtins.int
NUMA_NODE_FIELD_NUMBER: builtins.int
LINKS_FIELD_NUMBER: builtins.int
bus_id: builtins.int
"""Optional bus locality of device. Default value of 0 means
no specific locality. Specific localities are indexed from 1.
"""
numa_node: builtins.int
"""Optional NUMA locality of device."""
@property
def links(self) -> global___LocalLinks:
"""Optional local interconnect links to other devices."""
def __init__(
self,
*,
bus_id: builtins.int | None = ...,
numa_node: builtins.int | None = ...,
links: global___LocalLinks | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["links", b"links"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["bus_id", b"bus_id", "links", b"links", "numa_node", b"numa_node"]) -> None: ...
global___DeviceLocality = DeviceLocality
@typing_extensions.final
class DeviceAttributes(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
DEVICE_TYPE_FIELD_NUMBER: builtins.int
MEMORY_LIMIT_FIELD_NUMBER: builtins.int
LOCALITY_FIELD_NUMBER: builtins.int
INCARNATION_FIELD_NUMBER: builtins.int
PHYSICAL_DEVICE_DESC_FIELD_NUMBER: builtins.int
XLA_GLOBAL_ID_FIELD_NUMBER: builtins.int
name: builtins.str
"""Fully specified name of the device within a cluster."""
device_type: builtins.str
"""String representation of device_type."""
memory_limit: builtins.int
"""Memory capacity of device in bytes."""
@property
def locality(self) -> global___DeviceLocality:
"""Platform-specific data about device that may be useful
for supporting efficient data transfers.
"""
incarnation: builtins.int
"""A device is assigned a global unique number each time it is
initialized. "incarnation" should never be 0.
"""
physical_device_desc: builtins.str
"""String representation of the physical device that this device maps to."""
xla_global_id: builtins.int
"""A physical device ID for use in XLA DeviceAssignments, unique across
clients in a multi-client setup. Set to -1 if unavailable, non-negative
otherwise.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
device_type: builtins.str | None = ...,
memory_limit: builtins.int | None = ...,
locality: global___DeviceLocality | None = ...,
incarnation: builtins.int | None = ...,
physical_device_desc: builtins.str | None = ...,
xla_global_id: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["locality", b"locality"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["device_type", b"device_type", "incarnation", b"incarnation", "locality", b"locality", "memory_limit", b"memory_limit", "name", b"name", "physical_device_desc", b"physical_device_desc", "xla_global_id", b"xla_global_id"]) -> None: ...
global___DeviceAttributes = DeviceAttributes

View File

@@ -0,0 +1,589 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _FullTypeId:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _FullTypeIdEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_FullTypeId.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
TFT_UNSET: _FullTypeId.ValueType # 0
"""The default represents an uninitialized values."""
TFT_VAR: _FullTypeId.ValueType # 1
"""Type symbols. Used to construct more complex type expressions like
algebraic data types.
Type variables may serve as placeholder for any other type ID in type
templates.
Examples:
TFT_DATASET[TFT_VAR["T"]] is a Dataset returning a type indicated by "T".
TFT_TENSOR[TFT_VAR["T"]] is a Tensor of n element type indicated by "T".
TFT_TENSOR[TFT_VAR["T"]], TFT_TENSOR[TFT_VAR["T"]] are two tensors of
identical element types.
TFT_TENSOR[TFT_VAR["P"]], TFT_TENSOR[TFT_VAR["Q"]] are two tensors of
independent element types.
"""
TFT_ANY: _FullTypeId.ValueType # 2
"""Wildcard type. Describes a parameter of unknown type. In TensorFlow, that
can mean either a "Top" type (accepts any type), or a dynamically typed
object whose type is unknown in context.
Important: "unknown" does not necessarily mean undeterminable!
"""
TFT_PRODUCT: _FullTypeId.ValueType # 3
"""The algebraic product type. This is an algebraic type that may be used just
for logical grouping. Not to confused with TFT_TUPLE which describes a
concrete object of several elements.
Example:
TFT_DATASET[TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_FLOAT64]]]
is a Dataset producing two tensors, an integer one and a float one.
"""
TFT_NAMED: _FullTypeId.ValueType # 4
"""Represents a named field, with the name stored in the attribute.
Parametrization:
TFT_NAMED[<type>]{<name>}
* <type> is the type of the field
* <name> is the field name, as string (thpugh can theoretically be an int
as well)
Example:
TFT_RECORD[
TFT_NAMED[TFT_TENSOR[TFT_INT32]]{'foo'},
TFT_NAMED[TFT_TENSOR[TFT_FLOAT32]]{'bar'},
]
is a structure with two fields, an int tensor "foo" and a float tensor
"bar".
"""
TFT_FOR_EACH: _FullTypeId.ValueType # 20
"""Template definition. Expands the variables by repeating a template as
arguments of container.
Parametrization:
TFT_FOR_EACH[<container_type>, <template>, <expansions>]
* <container_type> is the type of the container that the template will be
expanded into
* <template> is any type definition that potentially contains type
variables
* <expansions> is a TFT_VAR and may include more types in the future
Example:
TFT_FOR_EACH[
TFT_PRODUCT,
TFT_TENSOR[TFT_VAR["t"]],
TFT_VAR["t"]
]
will substitute a T = TFT_INT32 to TFT_PRODUCT[TFT_TENSOR[TFT_INT32]]
and a T = (TFT_INT32, TFT_INT64) to
TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_INT64]].
"""
TFT_CALLABLE: _FullTypeId.ValueType # 100
"""Callable types describe functions and ops.
Parametrization:
TFT_CALLABLE[<arg type>, <return type>]
* <arg type> is the type of the arguments; TFT_PRODUCT represents
multiple
arguments.
* <return type> is the return type; TFT_PRODUCT represents multiple
return values (that means that callables returning multiple things
don't necessarily return a single tuple).
Example:
TFT_CALLABLE[
TFT_ANY,
TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_FLOAT64]],
]
is a callable with unspecified (for now) input arguments, and
two return values of type tensor.
"""
TFT_TENSOR: _FullTypeId.ValueType # 1000
"""Concrete type IDs, representing "proper" data types that can describe
runtime TensorFlow objects.
The usual Tensor. This is a parametric type.
Parametrization:
TFT_TENSOR[<element type>, <shape type>]
* <element type> is currently limited to one of the element types
defined below.
* <shape type> is not yet defined, and may only be TFT_UNKNOWN for now.
A TFT_SHAPE type will be defined in the future.
Example:
TFT_TENSOR[TFT_INT32, TFT_UNKNOWN]
is a Tensor of int32 element type and unknown shape.
TODO(mdan): Define TFT_SHAPE and add more examples.
"""
TFT_ARRAY: _FullTypeId.ValueType # 1001
"""Array (or tensorflow::TensorList in the variant type registry).
Note: this is not to be confused with the deprecated `TensorArray*` ops
which are not supported by FullType.
This type represents a random-access list whose elements can be
described by a single type. Although immutable, Array is expected to
support efficient mutation semantics (i.e. element update) in the
user-facing API.
The element type may be generic or even TFT_ANY for a heterogenous list.
Parametrization:
TFT_ARRAY[<element type>]
* <element type> may be any concrete type.
Examples:
TFT_ARRAY[TFT_TENSOR[TFT_INT32]] is a TensorArray holding int32 Tensors
of any shape.
TFT_ARRAY[TFT_TENSOR[TFT_UNKNOWN]] is a TensorArray holding Tensors of
mixed element types.
TFT_ARRAY[TFT_UNKNOWN] is a TensorArray holding any element type.
TFT_ARRAY[] is equivalent to TFT_ARRAY[TFT_UNKNOWN].
TFT_ARRAY[TFT_ARRAY[]] is an array or arrays (of unknown types).
"""
TFT_OPTIONAL: _FullTypeId.ValueType # 1002
"""Optional (or tensorflow::OptionalVariant in the variant type registry).
This type represents a value that may either hold an element of a single
specified type, or nothing at all.
Parametrization:
TFT_OPTIONAL[<element type>]
* <element type> may be any concrete type.
Examples:
TFT_OPTIONAL[TFT_TENSOR[TFT_INT32]] is an Optional holding an int32
Tensor of any shape.
"""
TFT_LITERAL: _FullTypeId.ValueType # 1003
"""Literal types describe compile-time constant values.
Literal types may also participate in dependent types.
Parametrization:
TFT_LITERAL[<value type>]{<value>}
* <value type> may be any concrete type compatible that can hold <value>
* <value> is the type's attribute, and holds the actual literal value
Examples:
TFT_LITERAL[TFT_INT32]{1} is the compile-time constant 1.
"""
TFT_ENCODED: _FullTypeId.ValueType # 1004
"""Encoding types describe a value of a certain type, encoded as a different
type.
Parametrization:
TFT_ENCODED[<encoded type>, <encoding type>]
* <encoded type> may be any type
* <encoding type> may be any type
Examples:
TFT_ENCODING[TFT_INT32, TFT_STRING] is an integer encoded as string.
"""
TFT_BOOL: _FullTypeId.ValueType # 200
"""Type attributes. These always appear in the parametrization of a type,
never alone. For example, there is no such thing as a "bool" TensorFlow
object (for now).
The bool element type.
TODO(mdan): Quantized types, legacy representations (e.g. ref)
"""
TFT_UINT8: _FullTypeId.ValueType # 201
"""Integer element types."""
TFT_UINT16: _FullTypeId.ValueType # 202
TFT_UINT32: _FullTypeId.ValueType # 203
TFT_UINT64: _FullTypeId.ValueType # 204
TFT_INT8: _FullTypeId.ValueType # 205
TFT_INT16: _FullTypeId.ValueType # 206
TFT_INT32: _FullTypeId.ValueType # 207
TFT_INT64: _FullTypeId.ValueType # 208
TFT_HALF: _FullTypeId.ValueType # 209
"""Floating-point element types."""
TFT_FLOAT: _FullTypeId.ValueType # 210
TFT_DOUBLE: _FullTypeId.ValueType # 211
TFT_BFLOAT16: _FullTypeId.ValueType # 215
TFT_COMPLEX64: _FullTypeId.ValueType # 212
"""Complex element types.
TODO(mdan): Represent as TFT_COMPLEX[TFT_DOUBLE] instead?
"""
TFT_COMPLEX128: _FullTypeId.ValueType # 213
TFT_STRING: _FullTypeId.ValueType # 214
"""The string element type."""
TFT_DATASET: _FullTypeId.ValueType # 10102
"""Other types that we don't know yet whether they will become part of the
core type system or be consisdered third-party (and consequently moved to
user-defined type mechanisms). Presently, they are effectively in the core
type system, because key compilation passes like Placer account for their
existence.
Datasets created by tf.data ops and APIs. Datasets have generator/iterable
semantics, that is, one can construct an iterator from them. Like
Array, they are considered to return elements that can be described
by a single type. Unlike Array, they do not support random access or
mutation, and can potentially produce an infinite number of elements.
A datasets can produce logical structures (e.g. multiple elements). This
is expressed using TFT_PRODUCT.
Parametrization: TFT_DATASET[<element type>].
* <element type> may be a concrete type or a type symbol. It represents
the data type of the elements produced by the dataset.
Examples:
TFT_DATSET[TFT_TENSOR[TFT_INT32]] is a Dataset producing single int32
Tensors of unknown shape.
TFT_DATSET[TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_FLOAT32]] is
a Dataset producing pairs of Tensors, one integer and one float.
Note: The high ID number is to prepare for the eventuality that Datasets
will be supported by user types in the future.
"""
TFT_RAGGED: _FullTypeId.ValueType # 10103
"""A ragged tensor created by tf.ragged ops and APIs.
Parametrization: TFT_RAGGED[<element_type>].
"""
TFT_ITERATOR: _FullTypeId.ValueType # 10104
"""Iterators created by tf.data ops and APIs. Very similar to Datasets, except
they are mutable.
Parametrization: TFT_ITERATOR[<element type>].
* <element type> may be a concrete type or a type symbol. It represents
the data type of the elements produced by the dataset.
"""
TFT_MUTEX_LOCK: _FullTypeId.ValueType # 10202
"""A mutex lock tensor, produced by tf.raw_ops.MutexLock.
Unlike strict execution models, where ownership of a lock is denoted by
"running after the lock has been acquired", in non-strict mode, lock
ownership is in the true sense: "the op argument representing the lock is
available".
Mutex locks are the dynamic counterpart of control dependencies.
TODO(mdan): Properly document this thing.
Parametrization: TFT_MUTEX_LOCK[].
"""
TFT_LEGACY_VARIANT: _FullTypeId.ValueType # 10203
"""The equivalent of a Tensor with DT_VARIANT dtype, kept here to simplify
translation. This type should not normally appear after type inference.
Note that LEGACY_VARIANT != ANY: TENSOR[INT32] is a subtype of ANY, but is
not a subtype of LEGACY_VARIANT.
"""
class FullTypeId(_FullTypeId, metaclass=_FullTypeIdEnumTypeWrapper):
"""LINT.IfChange
Experimental. Represents the complete type information of a TensorFlow value.
"""
TFT_UNSET: FullTypeId.ValueType # 0
"""The default represents an uninitialized values."""
TFT_VAR: FullTypeId.ValueType # 1
"""Type symbols. Used to construct more complex type expressions like
algebraic data types.
Type variables may serve as placeholder for any other type ID in type
templates.
Examples:
TFT_DATASET[TFT_VAR["T"]] is a Dataset returning a type indicated by "T".
TFT_TENSOR[TFT_VAR["T"]] is a Tensor of n element type indicated by "T".
TFT_TENSOR[TFT_VAR["T"]], TFT_TENSOR[TFT_VAR["T"]] are two tensors of
identical element types.
TFT_TENSOR[TFT_VAR["P"]], TFT_TENSOR[TFT_VAR["Q"]] are two tensors of
independent element types.
"""
TFT_ANY: FullTypeId.ValueType # 2
"""Wildcard type. Describes a parameter of unknown type. In TensorFlow, that
can mean either a "Top" type (accepts any type), or a dynamically typed
object whose type is unknown in context.
Important: "unknown" does not necessarily mean undeterminable!
"""
TFT_PRODUCT: FullTypeId.ValueType # 3
"""The algebraic product type. This is an algebraic type that may be used just
for logical grouping. Not to confused with TFT_TUPLE which describes a
concrete object of several elements.
Example:
TFT_DATASET[TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_FLOAT64]]]
is a Dataset producing two tensors, an integer one and a float one.
"""
TFT_NAMED: FullTypeId.ValueType # 4
"""Represents a named field, with the name stored in the attribute.
Parametrization:
TFT_NAMED[<type>]{<name>}
* <type> is the type of the field
* <name> is the field name, as string (thpugh can theoretically be an int
as well)
Example:
TFT_RECORD[
TFT_NAMED[TFT_TENSOR[TFT_INT32]]{'foo'},
TFT_NAMED[TFT_TENSOR[TFT_FLOAT32]]{'bar'},
]
is a structure with two fields, an int tensor "foo" and a float tensor
"bar".
"""
TFT_FOR_EACH: FullTypeId.ValueType # 20
"""Template definition. Expands the variables by repeating a template as
arguments of container.
Parametrization:
TFT_FOR_EACH[<container_type>, <template>, <expansions>]
* <container_type> is the type of the container that the template will be
expanded into
* <template> is any type definition that potentially contains type
variables
* <expansions> is a TFT_VAR and may include more types in the future
Example:
TFT_FOR_EACH[
TFT_PRODUCT,
TFT_TENSOR[TFT_VAR["t"]],
TFT_VAR["t"]
]
will substitute a T = TFT_INT32 to TFT_PRODUCT[TFT_TENSOR[TFT_INT32]]
and a T = (TFT_INT32, TFT_INT64) to
TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_INT64]].
"""
TFT_CALLABLE: FullTypeId.ValueType # 100
"""Callable types describe functions and ops.
Parametrization:
TFT_CALLABLE[<arg type>, <return type>]
* <arg type> is the type of the arguments; TFT_PRODUCT represents
multiple
arguments.
* <return type> is the return type; TFT_PRODUCT represents multiple
return values (that means that callables returning multiple things
don't necessarily return a single tuple).
Example:
TFT_CALLABLE[
TFT_ANY,
TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_FLOAT64]],
]
is a callable with unspecified (for now) input arguments, and
two return values of type tensor.
"""
TFT_TENSOR: FullTypeId.ValueType # 1000
"""Concrete type IDs, representing "proper" data types that can describe
runtime TensorFlow objects.
The usual Tensor. This is a parametric type.
Parametrization:
TFT_TENSOR[<element type>, <shape type>]
* <element type> is currently limited to one of the element types
defined below.
* <shape type> is not yet defined, and may only be TFT_UNKNOWN for now.
A TFT_SHAPE type will be defined in the future.
Example:
TFT_TENSOR[TFT_INT32, TFT_UNKNOWN]
is a Tensor of int32 element type and unknown shape.
TODO(mdan): Define TFT_SHAPE and add more examples.
"""
TFT_ARRAY: FullTypeId.ValueType # 1001
"""Array (or tensorflow::TensorList in the variant type registry).
Note: this is not to be confused with the deprecated `TensorArray*` ops
which are not supported by FullType.
This type represents a random-access list whose elements can be
described by a single type. Although immutable, Array is expected to
support efficient mutation semantics (i.e. element update) in the
user-facing API.
The element type may be generic or even TFT_ANY for a heterogenous list.
Parametrization:
TFT_ARRAY[<element type>]
* <element type> may be any concrete type.
Examples:
TFT_ARRAY[TFT_TENSOR[TFT_INT32]] is a TensorArray holding int32 Tensors
of any shape.
TFT_ARRAY[TFT_TENSOR[TFT_UNKNOWN]] is a TensorArray holding Tensors of
mixed element types.
TFT_ARRAY[TFT_UNKNOWN] is a TensorArray holding any element type.
TFT_ARRAY[] is equivalent to TFT_ARRAY[TFT_UNKNOWN].
TFT_ARRAY[TFT_ARRAY[]] is an array or arrays (of unknown types).
"""
TFT_OPTIONAL: FullTypeId.ValueType # 1002
"""Optional (or tensorflow::OptionalVariant in the variant type registry).
This type represents a value that may either hold an element of a single
specified type, or nothing at all.
Parametrization:
TFT_OPTIONAL[<element type>]
* <element type> may be any concrete type.
Examples:
TFT_OPTIONAL[TFT_TENSOR[TFT_INT32]] is an Optional holding an int32
Tensor of any shape.
"""
TFT_LITERAL: FullTypeId.ValueType # 1003
"""Literal types describe compile-time constant values.
Literal types may also participate in dependent types.
Parametrization:
TFT_LITERAL[<value type>]{<value>}
* <value type> may be any concrete type compatible that can hold <value>
* <value> is the type's attribute, and holds the actual literal value
Examples:
TFT_LITERAL[TFT_INT32]{1} is the compile-time constant 1.
"""
TFT_ENCODED: FullTypeId.ValueType # 1004
"""Encoding types describe a value of a certain type, encoded as a different
type.
Parametrization:
TFT_ENCODED[<encoded type>, <encoding type>]
* <encoded type> may be any type
* <encoding type> may be any type
Examples:
TFT_ENCODING[TFT_INT32, TFT_STRING] is an integer encoded as string.
"""
TFT_BOOL: FullTypeId.ValueType # 200
"""Type attributes. These always appear in the parametrization of a type,
never alone. For example, there is no such thing as a "bool" TensorFlow
object (for now).
The bool element type.
TODO(mdan): Quantized types, legacy representations (e.g. ref)
"""
TFT_UINT8: FullTypeId.ValueType # 201
"""Integer element types."""
TFT_UINT16: FullTypeId.ValueType # 202
TFT_UINT32: FullTypeId.ValueType # 203
TFT_UINT64: FullTypeId.ValueType # 204
TFT_INT8: FullTypeId.ValueType # 205
TFT_INT16: FullTypeId.ValueType # 206
TFT_INT32: FullTypeId.ValueType # 207
TFT_INT64: FullTypeId.ValueType # 208
TFT_HALF: FullTypeId.ValueType # 209
"""Floating-point element types."""
TFT_FLOAT: FullTypeId.ValueType # 210
TFT_DOUBLE: FullTypeId.ValueType # 211
TFT_BFLOAT16: FullTypeId.ValueType # 215
TFT_COMPLEX64: FullTypeId.ValueType # 212
"""Complex element types.
TODO(mdan): Represent as TFT_COMPLEX[TFT_DOUBLE] instead?
"""
TFT_COMPLEX128: FullTypeId.ValueType # 213
TFT_STRING: FullTypeId.ValueType # 214
"""The string element type."""
TFT_DATASET: FullTypeId.ValueType # 10102
"""Other types that we don't know yet whether they will become part of the
core type system or be consisdered third-party (and consequently moved to
user-defined type mechanisms). Presently, they are effectively in the core
type system, because key compilation passes like Placer account for their
existence.
Datasets created by tf.data ops and APIs. Datasets have generator/iterable
semantics, that is, one can construct an iterator from them. Like
Array, they are considered to return elements that can be described
by a single type. Unlike Array, they do not support random access or
mutation, and can potentially produce an infinite number of elements.
A datasets can produce logical structures (e.g. multiple elements). This
is expressed using TFT_PRODUCT.
Parametrization: TFT_DATASET[<element type>].
* <element type> may be a concrete type or a type symbol. It represents
the data type of the elements produced by the dataset.
Examples:
TFT_DATSET[TFT_TENSOR[TFT_INT32]] is a Dataset producing single int32
Tensors of unknown shape.
TFT_DATSET[TFT_PRODUCT[TFT_TENSOR[TFT_INT32], TFT_TENSOR[TFT_FLOAT32]] is
a Dataset producing pairs of Tensors, one integer and one float.
Note: The high ID number is to prepare for the eventuality that Datasets
will be supported by user types in the future.
"""
TFT_RAGGED: FullTypeId.ValueType # 10103
"""A ragged tensor created by tf.ragged ops and APIs.
Parametrization: TFT_RAGGED[<element_type>].
"""
TFT_ITERATOR: FullTypeId.ValueType # 10104
"""Iterators created by tf.data ops and APIs. Very similar to Datasets, except
they are mutable.
Parametrization: TFT_ITERATOR[<element type>].
* <element type> may be a concrete type or a type symbol. It represents
the data type of the elements produced by the dataset.
"""
TFT_MUTEX_LOCK: FullTypeId.ValueType # 10202
"""A mutex lock tensor, produced by tf.raw_ops.MutexLock.
Unlike strict execution models, where ownership of a lock is denoted by
"running after the lock has been acquired", in non-strict mode, lock
ownership is in the true sense: "the op argument representing the lock is
available".
Mutex locks are the dynamic counterpart of control dependencies.
TODO(mdan): Properly document this thing.
Parametrization: TFT_MUTEX_LOCK[].
"""
TFT_LEGACY_VARIANT: FullTypeId.ValueType # 10203
"""The equivalent of a Tensor with DT_VARIANT dtype, kept here to simplify
translation. This type should not normally appear after type inference.
Note that LEGACY_VARIANT != ANY: TENSOR[INT32] is a subtype of ANY, but is
not a subtype of LEGACY_VARIANT.
"""
global___FullTypeId = FullTypeId
@typing_extensions.final
class FullTypeDef(google.protobuf.message.Message):
"""Highly experimental and very likely to change.
This encoding uses tags instead of dedicated messages for regularity. In
particular the encoding imposes no restrictions on what the parameters of any
type should be, which in particular needs to be true for type symbols.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TYPE_ID_FIELD_NUMBER: builtins.int
ARGS_FIELD_NUMBER: builtins.int
S_FIELD_NUMBER: builtins.int
I_FIELD_NUMBER: builtins.int
type_id: global___FullTypeId.ValueType
"""The principal type represented by this object. This may be a concrete type
(Tensor, Dataset) a type variable (used for dependent types) a type
symbol (Any, Union). See FullTypeId for details.
"""
@property
def args(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FullTypeDef]: ...
s: builtins.str
i: builtins.int
"""TODO(mdan): list/tensor, map? Need to reconcile with TFT_RECORD, etc."""
def __init__(
self,
*,
type_id: global___FullTypeId.ValueType | None = ...,
args: collections.abc.Iterable[global___FullTypeDef] | None = ...,
s: builtins.str | None = ...,
i: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["attr", b"attr", "i", b"i", "s", b"s"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["args", b"args", "attr", b"attr", "i", b"i", "s", b"s", "type_id", b"type_id"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["attr", b"attr"]) -> typing_extensions.Literal["s", "i"] | None: ...
global___FullTypeDef = FullTypeDef

View File

@@ -0,0 +1,309 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.attr_value_pb2
import tensorflow.core.framework.node_def_pb2
import tensorflow.core.framework.op_def_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class FunctionDefLibrary(google.protobuf.message.Message):
"""A library is a set of named functions."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_FIELD_NUMBER: builtins.int
GRADIENT_FIELD_NUMBER: builtins.int
REGISTERED_GRADIENTS_FIELD_NUMBER: builtins.int
@property
def function(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___FunctionDef]: ...
@property
def gradient(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GradientDef]: ...
@property
def registered_gradients(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RegisteredGradient]: ...
def __init__(
self,
*,
function: collections.abc.Iterable[global___FunctionDef] | None = ...,
gradient: collections.abc.Iterable[global___GradientDef] | None = ...,
registered_gradients: collections.abc.Iterable[global___RegisteredGradient] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["function", b"function", "gradient", b"gradient", "registered_gradients", b"registered_gradients"]) -> None: ...
global___FunctionDefLibrary = FunctionDefLibrary
@typing_extensions.final
class FunctionDef(google.protobuf.message.Message):
"""A function can be instantiated when the runtime can bind every attr
with a value. When a GraphDef has a call to a function, it must
have binding for every attr defined in the signature.
TODO(zhifengc):
* device spec, etc.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class AttrEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
@typing_extensions.final
class ArgAttrs(google.protobuf.message.Message):
"""Attributes for function arguments. These attributes are the same set of
valid attributes as to _Arg nodes.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class AttrEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
ATTR_FIELD_NUMBER: builtins.int
@property
def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: ...
def __init__(
self,
*,
attr: collections.abc.Mapping[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["attr", b"attr"]) -> None: ...
@typing_extensions.final
class ArgAttrEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
@property
def value(self) -> global___FunctionDef.ArgAttrs: ...
def __init__(
self,
*,
key: builtins.int | None = ...,
value: global___FunctionDef.ArgAttrs | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
@typing_extensions.final
class ResourceArgUniqueIdEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
value: builtins.int
def __init__(
self,
*,
key: builtins.int | None = ...,
value: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
@typing_extensions.final
class RetEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
value: builtins.str
def __init__(
self,
*,
key: builtins.str | None = ...,
value: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
@typing_extensions.final
class ControlRetEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
value: builtins.str
def __init__(
self,
*,
key: builtins.str | None = ...,
value: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
SIGNATURE_FIELD_NUMBER: builtins.int
ATTR_FIELD_NUMBER: builtins.int
ARG_ATTR_FIELD_NUMBER: builtins.int
RESOURCE_ARG_UNIQUE_ID_FIELD_NUMBER: builtins.int
NODE_DEF_FIELD_NUMBER: builtins.int
RET_FIELD_NUMBER: builtins.int
CONTROL_RET_FIELD_NUMBER: builtins.int
@property
def signature(self) -> tensorflow.core.framework.op_def_pb2.OpDef:
"""The definition of the function's name, arguments, return values,
attrs etc.
"""
@property
def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]:
"""Attributes specific to this function definition."""
@property
def arg_attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, global___FunctionDef.ArgAttrs]: ...
@property
def resource_arg_unique_id(self) -> google.protobuf.internal.containers.ScalarMap[builtins.int, builtins.int]:
"""Unique IDs for each resource argument, used to track aliasing resources. If
Argument A and Argument B alias each other, then
resource_arg_unique_ids[A.index] == resource_arg_unique_ids[B.index].
If this field is empty, none of the arguments could alias; otherwise, every
resource argument should have an entry in this field.
When instantiated, the unique IDs will be attached to the _Arg nodes'
"_resource_arg_unique_id" attribute.
"""
@property
def node_def(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.node_def_pb2.NodeDef]:
"""The body of the function. Unlike the NodeDefs in a GraphDef, attrs
may have values of type `placeholder` and the `input` field uses
the "output" format above.
By convention, "op" in node_def is resolved by consulting with a
user-defined library first. If not resolved, "func" is assumed to
be a builtin op.
"""
@property
def ret(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
"""A mapping from the output arg names from `signature` to the
outputs from `node_def` that should be returned by the function.
"""
@property
def control_ret(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
"""A mapping from control output names from `signature` to node names in
`node_def` which should be control outputs of this function.
"""
def __init__(
self,
*,
signature: tensorflow.core.framework.op_def_pb2.OpDef | None = ...,
attr: collections.abc.Mapping[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue] | None = ...,
arg_attr: collections.abc.Mapping[builtins.int, global___FunctionDef.ArgAttrs] | None = ...,
resource_arg_unique_id: collections.abc.Mapping[builtins.int, builtins.int] | None = ...,
node_def: collections.abc.Iterable[tensorflow.core.framework.node_def_pb2.NodeDef] | None = ...,
ret: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
control_ret: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["signature", b"signature"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["arg_attr", b"arg_attr", "attr", b"attr", "control_ret", b"control_ret", "node_def", b"node_def", "resource_arg_unique_id", b"resource_arg_unique_id", "ret", b"ret", "signature", b"signature"]) -> None: ...
global___FunctionDef = FunctionDef
@typing_extensions.final
class GradientDef(google.protobuf.message.Message):
"""GradientDef defines the gradient function of a function defined in
a function library.
A gradient function g (specified by gradient_func) for a function f
(specified by function_name) must follow the following:
The function 'f' must be a numerical function which takes N inputs
and produces M outputs. Its gradient function 'g', which is a
function taking N + M inputs and produces N outputs.
I.e. if we have
(y1, y2, ..., y_M) = f(x1, x2, ..., x_N),
then, g is
(dL/dx1, dL/dx2, ..., dL/dx_N) = g(x1, x2, ..., x_N,
dL/dy1, dL/dy2, ..., dL/dy_M),
where L is a scalar-value function of (x1, x2, ..., xN) (e.g., the
loss function). dL/dx_i is the partial derivative of L with respect
to x_i.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_NAME_FIELD_NUMBER: builtins.int
GRADIENT_FUNC_FIELD_NUMBER: builtins.int
function_name: builtins.str
"""The function name."""
gradient_func: builtins.str
"""The gradient function's name."""
def __init__(
self,
*,
function_name: builtins.str | None = ...,
gradient_func: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["function_name", b"function_name", "gradient_func", b"gradient_func"]) -> None: ...
global___GradientDef = GradientDef
@typing_extensions.final
class RegisteredGradient(google.protobuf.message.Message):
"""RegisteredGradient stores a gradient function that is registered in the
gradients library and used in the ops of a function in the function library.
Unlike GradientDef, these gradients are identified by op type, and not
directly linked to any function.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
GRADIENT_FUNC_FIELD_NUMBER: builtins.int
REGISTERED_OP_TYPE_FIELD_NUMBER: builtins.int
gradient_func: builtins.str
"""The gradient function's name."""
registered_op_type: builtins.str
"""The gradient function's registered op type."""
def __init__(
self,
*,
gradient_func: builtins.str | None = ...,
registered_op_type: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["gradient_func", b"gradient_func", "registered_op_type", b"registered_op_type"]) -> None: ...
global___RegisteredGradient = RegisteredGradient

View File

@@ -0,0 +1,85 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.function_pb2
import tensorflow.core.framework.node_def_pb2
import tensorflow.core.framework.versions_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class GraphDef(google.protobuf.message.Message):
"""Represents the graph of operations"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_FIELD_NUMBER: builtins.int
VERSIONS_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
LIBRARY_FIELD_NUMBER: builtins.int
@property
def node(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.node_def_pb2.NodeDef]: ...
@property
def versions(self) -> tensorflow.core.framework.versions_pb2.VersionDef:
"""Compatibility versions of the graph. See core/public/version.h for version
history. The GraphDef version is distinct from the TensorFlow version, and
each release of TensorFlow will support a range of GraphDef versions.
"""
version: builtins.int
"""Deprecated single version field; use versions above instead. Since all
GraphDef changes before "versions" was introduced were forward
compatible, this field is entirely ignored.
"""
@property
def library(self) -> tensorflow.core.framework.function_pb2.FunctionDefLibrary:
""""library" provides user-defined functions.
Naming:
* library.function.name are in a flat namespace.
NOTE: We may need to change it to be hierarchical to support
different orgs. E.g.,
{ "/google/nn", { ... }},
{ "/google/vision", { ... }}
{ "/org_foo/module_bar", { ... }}
map<string, FunctionDefLib> named_lib;
* If node[i].op is the name of one function in "library",
node[i] is deemed as a function call. Otherwise, node[i].op
must be a primitive operation supported by the runtime.
Function call semantics:
* The callee may start execution as soon as some of its inputs
are ready. The caller may want to use Tuple() mechanism to
ensure all inputs are ready in the same time.
* The consumer of return values may start executing as soon as
the return values the consumer depends on are ready. The
consumer may want to use Tuple() mechanism to ensure the
consumer does not start until all return values of the callee
function are ready.
"""
def __init__(
self,
*,
node: collections.abc.Iterable[tensorflow.core.framework.node_def_pb2.NodeDef] | None = ...,
versions: tensorflow.core.framework.versions_pb2.VersionDef | None = ...,
version: builtins.int | None = ...,
library: tensorflow.core.framework.function_pb2.FunctionDefLibrary | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["library", b"library", "versions", b"versions"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["library", b"library", "node", b"node", "version", b"version", "versions", b"versions"]) -> None: ...
global___GraphDef = GraphDef

View File

@@ -0,0 +1,240 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.types_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class GraphTransferNodeInput(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_ID_FIELD_NUMBER: builtins.int
OUTPUT_PORT_FIELD_NUMBER: builtins.int
node_id: builtins.int
output_port: builtins.int
def __init__(
self,
*,
node_id: builtins.int | None = ...,
output_port: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["node_id", b"node_id", "output_port", b"output_port"]) -> None: ...
global___GraphTransferNodeInput = GraphTransferNodeInput
@typing_extensions.final
class GraphTransferNodeInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
NODE_ID_FIELD_NUMBER: builtins.int
TYPE_NAME_FIELD_NUMBER: builtins.int
SOC_OP_ID_FIELD_NUMBER: builtins.int
PADDING_ID_FIELD_NUMBER: builtins.int
INPUT_COUNT_FIELD_NUMBER: builtins.int
OUTPUT_COUNT_FIELD_NUMBER: builtins.int
name: builtins.str
node_id: builtins.int
type_name: builtins.str
soc_op_id: builtins.int
padding_id: builtins.int
input_count: builtins.int
output_count: builtins.int
def __init__(
self,
*,
name: builtins.str | None = ...,
node_id: builtins.int | None = ...,
type_name: builtins.str | None = ...,
soc_op_id: builtins.int | None = ...,
padding_id: builtins.int | None = ...,
input_count: builtins.int | None = ...,
output_count: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["input_count", b"input_count", "name", b"name", "node_id", b"node_id", "output_count", b"output_count", "padding_id", b"padding_id", "soc_op_id", b"soc_op_id", "type_name", b"type_name"]) -> None: ...
global___GraphTransferNodeInfo = GraphTransferNodeInfo
@typing_extensions.final
class GraphTransferConstNodeInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
NODE_ID_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
DATA_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
name: builtins.str
node_id: builtins.int
@property
def shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
data: builtins.bytes
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
def __init__(
self,
*,
name: builtins.str | None = ...,
node_id: builtins.int | None = ...,
shape: collections.abc.Iterable[builtins.int] | None = ...,
data: builtins.bytes | None = ...,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "dtype", b"dtype", "name", b"name", "node_id", b"node_id", "shape", b"shape"]) -> None: ...
global___GraphTransferConstNodeInfo = GraphTransferConstNodeInfo
@typing_extensions.final
class GraphTransferNodeInputInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_ID_FIELD_NUMBER: builtins.int
NODE_INPUT_FIELD_NUMBER: builtins.int
node_id: builtins.int
@property
def node_input(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInput]: ...
def __init__(
self,
*,
node_id: builtins.int | None = ...,
node_input: collections.abc.Iterable[global___GraphTransferNodeInput] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["node_id", b"node_id", "node_input", b"node_input"]) -> None: ...
global___GraphTransferNodeInputInfo = GraphTransferNodeInputInfo
@typing_extensions.final
class GraphTransferNodeOutputInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_ID_FIELD_NUMBER: builtins.int
MAX_BYTE_SIZE_FIELD_NUMBER: builtins.int
node_id: builtins.int
@property
def max_byte_size(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
def __init__(
self,
*,
node_id: builtins.int | None = ...,
max_byte_size: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["max_byte_size", b"max_byte_size", "node_id", b"node_id"]) -> None: ...
global___GraphTransferNodeOutputInfo = GraphTransferNodeOutputInfo
@typing_extensions.final
class GraphTransferGraphInputNodeInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
def __init__(
self,
*,
name: builtins.str | None = ...,
shape: collections.abc.Iterable[builtins.int] | None = ...,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["dtype", b"dtype", "name", b"name", "shape", b"shape"]) -> None: ...
global___GraphTransferGraphInputNodeInfo = GraphTransferGraphInputNodeInfo
@typing_extensions.final
class GraphTransferGraphOutputNodeInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
def __init__(
self,
*,
name: builtins.str | None = ...,
shape: collections.abc.Iterable[builtins.int] | None = ...,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["dtype", b"dtype", "name", b"name", "shape", b"shape"]) -> None: ...
global___GraphTransferGraphOutputNodeInfo = GraphTransferGraphOutputNodeInfo
@typing_extensions.final
class GraphTransferInfo(google.protobuf.message.Message):
"""Protocol buffer representing a handle to a tensorflow resource. Handles are
not valid across executions, but can be serialized back and forth from within
a single run.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _Destination:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _DestinationEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[GraphTransferInfo._Destination.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
NOP: GraphTransferInfo._Destination.ValueType # 0
HEXAGON: GraphTransferInfo._Destination.ValueType # 1
class Destination(_Destination, metaclass=_DestinationEnumTypeWrapper): ...
NOP: GraphTransferInfo.Destination.ValueType # 0
HEXAGON: GraphTransferInfo.Destination.ValueType # 1
NODE_INFO_FIELD_NUMBER: builtins.int
CONST_NODE_INFO_FIELD_NUMBER: builtins.int
NODE_INPUT_INFO_FIELD_NUMBER: builtins.int
NODE_OUTPUT_INFO_FIELD_NUMBER: builtins.int
GRAPH_INPUT_NODE_INFO_FIELD_NUMBER: builtins.int
GRAPH_OUTPUT_NODE_INFO_FIELD_NUMBER: builtins.int
DESTINATION_FIELD_NUMBER: builtins.int
@property
def node_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInfo]: ...
@property
def const_node_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferConstNodeInfo]: ...
@property
def node_input_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeInputInfo]: ...
@property
def node_output_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferNodeOutputInfo]: ...
@property
def graph_input_node_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferGraphInputNodeInfo]:
"""Input Node parameters of transferred graph"""
@property
def graph_output_node_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphTransferGraphOutputNodeInfo]: ...
destination: global___GraphTransferInfo.Destination.ValueType
"""Destination of graph transfer"""
def __init__(
self,
*,
node_info: collections.abc.Iterable[global___GraphTransferNodeInfo] | None = ...,
const_node_info: collections.abc.Iterable[global___GraphTransferConstNodeInfo] | None = ...,
node_input_info: collections.abc.Iterable[global___GraphTransferNodeInputInfo] | None = ...,
node_output_info: collections.abc.Iterable[global___GraphTransferNodeOutputInfo] | None = ...,
graph_input_node_info: collections.abc.Iterable[global___GraphTransferGraphInputNodeInfo] | None = ...,
graph_output_node_info: collections.abc.Iterable[global___GraphTransferGraphOutputNodeInfo] | None = ...,
destination: global___GraphTransferInfo.Destination.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["const_node_info", b"const_node_info", "destination", b"destination", "graph_input_node_info", b"graph_input_node_info", "graph_output_node_info", b"graph_output_node_info", "node_info", b"node_info", "node_input_info", b"node_input_info", "node_output_info", b"node_output_info"]) -> None: ...
global___GraphTransferInfo = GraphTransferInfo

View File

@@ -0,0 +1,103 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.attr_value_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class KernelDef(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class AttrConstraint(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
ALLOWED_VALUES_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name of an attr from the Op."""
@property
def allowed_values(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue:
"""A list of values that this kernel supports for this attr.
Like OpDef.AttrDef.allowed_values, except for kernels instead of Ops.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
allowed_values: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["allowed_values", b"allowed_values"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["allowed_values", b"allowed_values", "name", b"name"]) -> None: ...
OP_FIELD_NUMBER: builtins.int
DEVICE_TYPE_FIELD_NUMBER: builtins.int
CONSTRAINT_FIELD_NUMBER: builtins.int
HOST_MEMORY_ARG_FIELD_NUMBER: builtins.int
LABEL_FIELD_NUMBER: builtins.int
PRIORITY_FIELD_NUMBER: builtins.int
op: builtins.str
"""Must match the name of an Op."""
device_type: builtins.str
"""Type of device this kernel runs on."""
@property
def constraint(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KernelDef.AttrConstraint]: ...
@property
def host_memory_arg(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Names of the Op's input_/output_args that reside in host memory
instead of device memory.
"""
label: builtins.str
"""This allows experimental kernels to be registered for an op that
won't be used unless the user specifies a "_kernel" attr with
value matching this.
"""
priority: builtins.int
"""Prioritization of kernel amongst different devices. By default we assume
priority is 0. The higher the priority the better. By default (i.e. if
this is not set), we prefer GPU kernels over CPU.
"""
def __init__(
self,
*,
op: builtins.str | None = ...,
device_type: builtins.str | None = ...,
constraint: collections.abc.Iterable[global___KernelDef.AttrConstraint] | None = ...,
host_memory_arg: collections.abc.Iterable[builtins.str] | None = ...,
label: builtins.str | None = ...,
priority: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["constraint", b"constraint", "device_type", b"device_type", "host_memory_arg", b"host_memory_arg", "label", b"label", "op", b"op", "priority", b"priority"]) -> None: ...
global___KernelDef = KernelDef
@typing_extensions.final
class KernelList(google.protobuf.message.Message):
"""A collection of KernelDefs"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KERNEL_FIELD_NUMBER: builtins.int
@property
def kernel(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KernelDef]: ...
def __init__(
self,
*,
kernel: collections.abc.Iterable[global___KernelDef] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["kernel", b"kernel"]) -> None: ...
global___KernelList = KernelList

View File

@@ -0,0 +1,192 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_description_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class MemoryLogStep(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STEP_ID_FIELD_NUMBER: builtins.int
HANDLE_FIELD_NUMBER: builtins.int
step_id: builtins.int
"""Process-unique step id."""
handle: builtins.str
"""Handle describing the feeds and fetches of the step."""
def __init__(
self,
*,
step_id: builtins.int | None = ...,
handle: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["handle", b"handle", "step_id", b"step_id"]) -> None: ...
global___MemoryLogStep = MemoryLogStep
@typing_extensions.final
class MemoryLogTensorAllocation(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STEP_ID_FIELD_NUMBER: builtins.int
KERNEL_NAME_FIELD_NUMBER: builtins.int
TENSOR_FIELD_NUMBER: builtins.int
step_id: builtins.int
"""Process-unique step id."""
kernel_name: builtins.str
"""Name of the kernel making the allocation as set in GraphDef,
e.g., "affine2/weights/Assign".
"""
@property
def tensor(self) -> tensorflow.core.framework.tensor_description_pb2.TensorDescription:
"""Allocated tensor details."""
def __init__(
self,
*,
step_id: builtins.int | None = ...,
kernel_name: builtins.str | None = ...,
tensor: tensorflow.core.framework.tensor_description_pb2.TensorDescription | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tensor", b"tensor"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["kernel_name", b"kernel_name", "step_id", b"step_id", "tensor", b"tensor"]) -> None: ...
global___MemoryLogTensorAllocation = MemoryLogTensorAllocation
@typing_extensions.final
class MemoryLogTensorDeallocation(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ALLOCATION_ID_FIELD_NUMBER: builtins.int
ALLOCATOR_NAME_FIELD_NUMBER: builtins.int
allocation_id: builtins.int
"""Id of the tensor buffer being deallocated, used to match to a
corresponding allocation.
"""
allocator_name: builtins.str
"""Name of the allocator used."""
def __init__(
self,
*,
allocation_id: builtins.int | None = ...,
allocator_name: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["allocation_id", b"allocation_id", "allocator_name", b"allocator_name"]) -> None: ...
global___MemoryLogTensorDeallocation = MemoryLogTensorDeallocation
@typing_extensions.final
class MemoryLogTensorOutput(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STEP_ID_FIELD_NUMBER: builtins.int
KERNEL_NAME_FIELD_NUMBER: builtins.int
INDEX_FIELD_NUMBER: builtins.int
TENSOR_FIELD_NUMBER: builtins.int
step_id: builtins.int
"""Process-unique step id."""
kernel_name: builtins.str
"""Name of the kernel producing an output as set in GraphDef, e.g.,
"affine2/weights/Assign".
"""
index: builtins.int
"""Index of the output being set."""
@property
def tensor(self) -> tensorflow.core.framework.tensor_description_pb2.TensorDescription:
"""Output tensor details."""
def __init__(
self,
*,
step_id: builtins.int | None = ...,
kernel_name: builtins.str | None = ...,
index: builtins.int | None = ...,
tensor: tensorflow.core.framework.tensor_description_pb2.TensorDescription | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tensor", b"tensor"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["index", b"index", "kernel_name", b"kernel_name", "step_id", b"step_id", "tensor", b"tensor"]) -> None: ...
global___MemoryLogTensorOutput = MemoryLogTensorOutput
@typing_extensions.final
class MemoryLogRawAllocation(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STEP_ID_FIELD_NUMBER: builtins.int
OPERATION_FIELD_NUMBER: builtins.int
NUM_BYTES_FIELD_NUMBER: builtins.int
PTR_FIELD_NUMBER: builtins.int
ALLOCATION_ID_FIELD_NUMBER: builtins.int
ALLOCATOR_NAME_FIELD_NUMBER: builtins.int
step_id: builtins.int
"""Process-unique step id."""
operation: builtins.str
"""Name of the operation making the allocation."""
num_bytes: builtins.int
"""Number of bytes in the allocation."""
ptr: builtins.int
"""Address of the allocation."""
allocation_id: builtins.int
"""Id of the tensor buffer being allocated, used to match to a
corresponding deallocation.
"""
allocator_name: builtins.str
"""Name of the allocator used."""
def __init__(
self,
*,
step_id: builtins.int | None = ...,
operation: builtins.str | None = ...,
num_bytes: builtins.int | None = ...,
ptr: builtins.int | None = ...,
allocation_id: builtins.int | None = ...,
allocator_name: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["allocation_id", b"allocation_id", "allocator_name", b"allocator_name", "num_bytes", b"num_bytes", "operation", b"operation", "ptr", b"ptr", "step_id", b"step_id"]) -> None: ...
global___MemoryLogRawAllocation = MemoryLogRawAllocation
@typing_extensions.final
class MemoryLogRawDeallocation(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STEP_ID_FIELD_NUMBER: builtins.int
OPERATION_FIELD_NUMBER: builtins.int
ALLOCATION_ID_FIELD_NUMBER: builtins.int
ALLOCATOR_NAME_FIELD_NUMBER: builtins.int
DEFERRED_FIELD_NUMBER: builtins.int
step_id: builtins.int
"""Process-unique step id."""
operation: builtins.str
"""Name of the operation making the deallocation."""
allocation_id: builtins.int
"""Id of the tensor buffer being deallocated, used to match to a
corresponding allocation.
"""
allocator_name: builtins.str
"""Name of the allocator used."""
deferred: builtins.bool
"""True if the deallocation is queued and will be performed later,
e.g. for GPU lazy freeing of buffers.
"""
def __init__(
self,
*,
step_id: builtins.int | None = ...,
operation: builtins.str | None = ...,
allocation_id: builtins.int | None = ...,
allocator_name: builtins.str | None = ...,
deferred: builtins.bool | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["allocation_id", b"allocation_id", "allocator_name", b"allocator_name", "deferred", b"deferred", "operation", b"operation", "step_id", b"step_id"]) -> None: ...
global___MemoryLogRawDeallocation = MemoryLogRawDeallocation

View File

@@ -0,0 +1,274 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _NodeClass:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _NodeClassEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_NodeClass.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNKNOWN: _NodeClass.ValueType # 0
INTERLEAVE_MANY: _NodeClass.ValueType # 1
ASYNC_INTERLEAVE_MANY: _NodeClass.ValueType # 2
KNOWN_RATIO: _NodeClass.ValueType # 3
ASYNC_KNOWN_RATIO: _NodeClass.ValueType # 4
UNKNOWN_RATIO: _NodeClass.ValueType # 5
ASYNC_UNKNOWN_RATIO: _NodeClass.ValueType # 6
class NodeClass(_NodeClass, metaclass=_NodeClassEnumTypeWrapper):
"""Class of a node in the performance model."""
UNKNOWN: NodeClass.ValueType # 0
INTERLEAVE_MANY: NodeClass.ValueType # 1
ASYNC_INTERLEAVE_MANY: NodeClass.ValueType # 2
KNOWN_RATIO: NodeClass.ValueType # 3
ASYNC_KNOWN_RATIO: NodeClass.ValueType # 4
UNKNOWN_RATIO: NodeClass.ValueType # 5
ASYNC_UNKNOWN_RATIO: NodeClass.ValueType # 6
global___NodeClass = NodeClass
class _AutotuneAlgorithm:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _AutotuneAlgorithmEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AutotuneAlgorithm.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEFAULT: _AutotuneAlgorithm.ValueType # 0
HILL_CLIMB: _AutotuneAlgorithm.ValueType # 1
GRADIENT_DESCENT: _AutotuneAlgorithm.ValueType # 2
MAX_PARALLELISM: _AutotuneAlgorithm.ValueType # 3
STAGE_BASED: _AutotuneAlgorithm.ValueType # 4
class AutotuneAlgorithm(_AutotuneAlgorithm, metaclass=_AutotuneAlgorithmEnumTypeWrapper):
"""Algorithm used for model autotuning optimization."""
DEFAULT: AutotuneAlgorithm.ValueType # 0
HILL_CLIMB: AutotuneAlgorithm.ValueType # 1
GRADIENT_DESCENT: AutotuneAlgorithm.ValueType # 2
MAX_PARALLELISM: AutotuneAlgorithm.ValueType # 3
STAGE_BASED: AutotuneAlgorithm.ValueType # 4
global___AutotuneAlgorithm = AutotuneAlgorithm
@typing_extensions.final
class ModelProto(google.protobuf.message.Message):
"""Protocol buffer representing the data used by the autotuning modeling
framework.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class Node(google.protobuf.message.Message):
"""General representation of a node in the model."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class Parameter(google.protobuf.message.Message):
"""Represents a node parameter."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
STATE_VALUE_FIELD_NUMBER: builtins.int
MIN_FIELD_NUMBER: builtins.int
MAX_FIELD_NUMBER: builtins.int
TUNABLE_FIELD_NUMBER: builtins.int
name: builtins.str
"""Human-readable name of the parameter."""
value: builtins.float
"""Identifies the model value of the parameter. This can be different from
the actual value (e.g. during optimization search).
"""
state_value: builtins.float
"""The actual value of the parameter."""
min: builtins.float
"""Minimum value of the parameter."""
max: builtins.float
"""Maximum value of the parameter."""
tunable: builtins.bool
"""Identifies whether the parameter should participate in autotuning."""
def __init__(
self,
*,
name: builtins.str | None = ...,
value: builtins.float | None = ...,
state_value: builtins.float | None = ...,
min: builtins.float | None = ...,
max: builtins.float | None = ...,
tunable: builtins.bool | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["max", b"max", "min", b"min", "name", b"name", "state_value", b"state_value", "tunable", b"tunable", "value", b"value"]) -> None: ...
ID_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
AUTOTUNE_FIELD_NUMBER: builtins.int
BUFFERED_BYTES_FIELD_NUMBER: builtins.int
BUFFERED_ELEMENTS_FIELD_NUMBER: builtins.int
BYTES_CONSUMED_FIELD_NUMBER: builtins.int
BYTES_PRODUCED_FIELD_NUMBER: builtins.int
NUM_ELEMENTS_FIELD_NUMBER: builtins.int
PROCESSING_TIME_FIELD_NUMBER: builtins.int
RECORD_METRICS_FIELD_NUMBER: builtins.int
PARAMETERS_FIELD_NUMBER: builtins.int
INPUT_PROCESSING_TIME_SUM_FIELD_NUMBER: builtins.int
INPUT_PROCESSING_TIME_COUNT_FIELD_NUMBER: builtins.int
INPUTS_FIELD_NUMBER: builtins.int
NODE_CLASS_FIELD_NUMBER: builtins.int
RATIO_FIELD_NUMBER: builtins.int
MEMORY_RATIO_FIELD_NUMBER: builtins.int
id: builtins.int
"""Unique node ID."""
name: builtins.str
"""Human-readable name of the node."""
autotune: builtins.bool
"""An indication whether autotuning is enabled for this node."""
buffered_bytes: builtins.int
"""The number of bytes stored in this node's buffer."""
buffered_elements: builtins.int
"""The number of elements stored in this node's buffer."""
bytes_consumed: builtins.int
"""The number of bytes consumed by the node."""
bytes_produced: builtins.int
"""The number of bytes produced by the node."""
num_elements: builtins.int
"""The number of elements produced by the node."""
processing_time: builtins.int
"""The aggregate processing time spent in this node in nanoseconds."""
record_metrics: builtins.bool
"""An indication whether this node records metrics about produced and
consumed elements.
"""
@property
def parameters(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ModelProto.Node.Parameter]:
"""Parameters of this node."""
input_processing_time_sum: builtins.float
"""Statistic of inputs processing time history."""
input_processing_time_count: builtins.int
@property
def inputs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""IDs of inputs of this node."""
node_class: global___NodeClass.ValueType
"""Class of this node."""
ratio: builtins.float
"""Ratio of input to output elements. This is only used by KNOWN_RATIO and
ASYNC_KNOWN_RATIO nodes.
"""
memory_ratio: builtins.float
"""Ratio identifies how many parallelism calls are introduced by one
buffered element. This is only used by ASYNC_KNOWN_RATIO nodes.
"""
def __init__(
self,
*,
id: builtins.int | None = ...,
name: builtins.str | None = ...,
autotune: builtins.bool | None = ...,
buffered_bytes: builtins.int | None = ...,
buffered_elements: builtins.int | None = ...,
bytes_consumed: builtins.int | None = ...,
bytes_produced: builtins.int | None = ...,
num_elements: builtins.int | None = ...,
processing_time: builtins.int | None = ...,
record_metrics: builtins.bool | None = ...,
parameters: collections.abc.Iterable[global___ModelProto.Node.Parameter] | None = ...,
input_processing_time_sum: builtins.float | None = ...,
input_processing_time_count: builtins.int | None = ...,
inputs: collections.abc.Iterable[builtins.int] | None = ...,
node_class: global___NodeClass.ValueType | None = ...,
ratio: builtins.float | None = ...,
memory_ratio: builtins.float | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["autotune", b"autotune", "buffered_bytes", b"buffered_bytes", "buffered_elements", b"buffered_elements", "bytes_consumed", b"bytes_consumed", "bytes_produced", b"bytes_produced", "id", b"id", "input_processing_time_count", b"input_processing_time_count", "input_processing_time_sum", b"input_processing_time_sum", "inputs", b"inputs", "memory_ratio", b"memory_ratio", "name", b"name", "node_class", b"node_class", "num_elements", b"num_elements", "parameters", b"parameters", "processing_time", b"processing_time", "ratio", b"ratio", "record_metrics", b"record_metrics"]) -> None: ...
@typing_extensions.final
class NodesEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
@property
def value(self) -> global___ModelProto.Node: ...
def __init__(
self,
*,
key: builtins.int | None = ...,
value: global___ModelProto.Node | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
@typing_extensions.final
class OptimizationParams(google.protobuf.message.Message):
"""Contains parameters of the model autotuning optimization."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ALGORITHM_FIELD_NUMBER: builtins.int
CPU_BUDGET_FIELD_NUMBER: builtins.int
RAM_BUDGET_FIELD_NUMBER: builtins.int
MODEL_INPUT_TIME_FIELD_NUMBER: builtins.int
algorithm: global___AutotuneAlgorithm.ValueType
"""Algorithm used for autotuning optimization."""
cpu_budget: builtins.int
"""Number of available logical threads."""
ram_budget: builtins.int
"""Amount of available memory in bytes."""
model_input_time: builtins.float
"""Time between two consecutive `GetNext` calls to the iterator represented
by the output node.
"""
def __init__(
self,
*,
algorithm: global___AutotuneAlgorithm.ValueType | None = ...,
cpu_budget: builtins.int | None = ...,
ram_budget: builtins.int | None = ...,
model_input_time: builtins.float | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["algorithm", b"algorithm", "cpu_budget", b"cpu_budget", "model_input_time", b"model_input_time", "ram_budget", b"ram_budget"]) -> None: ...
NODES_FIELD_NUMBER: builtins.int
OUTPUT_FIELD_NUMBER: builtins.int
ID_COUNTER_FIELD_NUMBER: builtins.int
OPTIMIZATION_PARAMS_FIELD_NUMBER: builtins.int
@property
def nodes(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, global___ModelProto.Node]:
"""Map of node IDs to nodes of this model."""
output: builtins.int
"""ID of the output node of this model."""
id_counter: builtins.int
"""Counter for node IDs of this model."""
@property
def optimization_params(self) -> global___ModelProto.OptimizationParams: ...
def __init__(
self,
*,
nodes: collections.abc.Mapping[builtins.int, global___ModelProto.Node] | None = ...,
output: builtins.int | None = ...,
id_counter: builtins.int | None = ...,
optimization_params: global___ModelProto.OptimizationParams | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["optimization_params", b"optimization_params"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["id_counter", b"id_counter", "nodes", b"nodes", "optimization_params", b"optimization_params", "output", b"output"]) -> None: ...
global___ModelProto = ModelProto

View File

@@ -0,0 +1,162 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.attr_value_pb2
import tensorflow.core.framework.full_type_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class NodeDef(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class AttrEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
@typing_extensions.final
class ExperimentalDebugInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ORIGINAL_NODE_NAMES_FIELD_NUMBER: builtins.int
ORIGINAL_FUNC_NAMES_FIELD_NUMBER: builtins.int
@property
def original_node_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Opaque string inserted into error messages created by the runtime.
This is intended to store the list of names of the nodes from the
original graph that this node was derived. For example if this node, say
C, was result of a fusion of 2 nodes A and B, then 'original_node' would
be {A, B}. This information can be used to map errors originating at the
current node to some top level source code.
"""
@property
def original_func_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""This is intended to store the list of names of the functions from the
original graph that this node was derived. For example if this node, say
C, was result of a fusion of node A in function FA and node B in function
FB, then `original_funcs` would be {FA, FB}. If the node is in the top
level graph, the `original_func` is empty. This information, with the
`original_node_names` can be used to map errors originating at the
current ndoe to some top level source code.
"""
def __init__(
self,
*,
original_node_names: collections.abc.Iterable[builtins.str] | None = ...,
original_func_names: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["original_func_names", b"original_func_names", "original_node_names", b"original_node_names"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
OP_FIELD_NUMBER: builtins.int
INPUT_FIELD_NUMBER: builtins.int
DEVICE_FIELD_NUMBER: builtins.int
ATTR_FIELD_NUMBER: builtins.int
EXPERIMENTAL_DEBUG_INFO_FIELD_NUMBER: builtins.int
EXPERIMENTAL_TYPE_FIELD_NUMBER: builtins.int
name: builtins.str
"""The name given to this operator. Used for naming inputs,
logging, visualization, etc. Unique within a single GraphDef.
Must match the regexp "[A-Za-z0-9.][A-Za-z0-9_>./]*".
"""
op: builtins.str
"""The operation name. There may be custom parameters in attrs.
Op names starting with an underscore are reserved for internal use.
"""
@property
def input(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Each input is "node:src_output" with "node" being a string name and
"src_output" indicating which output tensor to use from "node". If
"src_output" is 0 the ":0" suffix can be omitted. Regular inputs
may optionally be followed by control inputs that have the format
"^node".
"""
device: builtins.str
"""A (possibly partial) specification for the device on which this
node should be placed.
The expected syntax for this string is as follows:
DEVICE_SPEC ::= PARTIAL_SPEC
PARTIAL_SPEC ::= ("/" CONSTRAINT) *
CONSTRAINT ::= ("job:" JOB_NAME)
| ("replica:" [1-9][0-9]*)
| ("task:" [1-9][0-9]*)
| ("device:" [A-Za-z]* ":" ([1-9][0-9]* | "*") )
Valid values for this string include:
* "/job:worker/replica:0/task:1/device:GPU:3" (full specification)
* "/job:worker/device:GPU:3" (partial specification)
* "" (no specification)
If the constraints do not resolve to a single device (or if this
field is empty or not present), the runtime will attempt to
choose a device automatically.
"""
@property
def attr(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]:
"""Operation-specific graph-construction-time configuration.
Note that this should include all attrs defined in the
corresponding OpDef, including those with a value matching
the default -- this allows the default to change and makes
NodeDefs easier to interpret on their own. However, if
an attr with a default is not specified in this list, the
default will be used.
The "names" (keys) must match the regexp "[a-z][a-z0-9_]+" (and
one of the names from the corresponding OpDef's attr field).
The values must have a type matching the corresponding OpDef
attr's type field.
TODO(josh11b): Add some examples here showing best practices.
"""
@property
def experimental_debug_info(self) -> global___NodeDef.ExperimentalDebugInfo:
"""This stores debug information associated with the node."""
@property
def experimental_type(self) -> tensorflow.core.framework.full_type_pb2.FullTypeDef:
"""The complete type of this node. Experimental and subject to change.
Currently, the field only contains the return types of the node. That will
extend in the future to contain the entire signature of the node, as a
function type.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
op: builtins.str | None = ...,
input: collections.abc.Iterable[builtins.str] | None = ...,
device: builtins.str | None = ...,
attr: collections.abc.Mapping[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue] | None = ...,
experimental_debug_info: global___NodeDef.ExperimentalDebugInfo | None = ...,
experimental_type: tensorflow.core.framework.full_type_pb2.FullTypeDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["experimental_debug_info", b"experimental_debug_info", "experimental_type", b"experimental_type"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["attr", b"attr", "device", b"device", "experimental_debug_info", b"experimental_debug_info", "experimental_type", b"experimental_type", "input", b"input", "name", b"name", "op", b"op"]) -> None: ...
global___NodeDef = NodeDef

View File

@@ -0,0 +1,317 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.attr_value_pb2
import tensorflow.core.framework.full_type_pb2
import tensorflow.core.framework.resource_handle_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class OpDef(google.protobuf.message.Message):
"""Defines an operation. A NodeDef in a GraphDef specifies an Op by
using the "op" field which should match the name of a OpDef.
LINT.IfChange
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class ArgDef(google.protobuf.message.Message):
"""For describing inputs and outputs."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
DESCRIPTION_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
TYPE_ATTR_FIELD_NUMBER: builtins.int
NUMBER_ATTR_FIELD_NUMBER: builtins.int
TYPE_LIST_ATTR_FIELD_NUMBER: builtins.int
HANDLE_DATA_FIELD_NUMBER: builtins.int
IS_REF_FIELD_NUMBER: builtins.int
EXPERIMENTAL_FULL_TYPE_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name for the input/output. Should match the regexp "[a-z][a-z0-9_]*"."""
description: builtins.str
"""Human readable description."""
type: tensorflow.core.framework.types_pb2.DataType.ValueType
"""Describes the type of one or more tensors that are accepted/produced
by this input/output arg. The only legal combinations are:
* For a single tensor: either the "type" field is set or the
"type_attr" field is set to the name of an attr with type "type".
* For a sequence of tensors with the same type: the "number_attr"
field will be set to the name of an attr with type "int", and
either the "type" or "type_attr" field will be set as for
single tensors.
* For a sequence of tensors, the "type_list_attr" field will be set
to the name of an attr with type "list(type)".
"""
type_attr: builtins.str
"""if specified, attr must have type "type" """
number_attr: builtins.str
"""if specified, attr must have type "int" """
type_list_attr: builtins.str
"""If specified, attr must have type "list(type)", and none of
type, type_attr, and number_attr may be specified.
"""
@property
def handle_data(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto.DtypeAndShape]:
"""The handle data for resource inputs."""
is_ref: builtins.bool
"""For inputs: if true, the inputs are required to be refs.
By default, inputs can be either refs or non-refs.
For outputs: if true, outputs are refs, otherwise they are not.
"""
@property
def experimental_full_type(self) -> tensorflow.core.framework.full_type_pb2.FullTypeDef:
"""Experimental. Full type declaration for this argument.
The full type specification combines type, type_attr, type_list_attr,
etc. into a unified representation.
This declaration may contain non-concrete types (for example,
Tensor<TypeVar<'T'>> is a valid type declaration.
Note: this is a transient field. The long-term aim is to represent the
entire OpDef as a single type: a callable. In that context, this field is
just the type of a single argument.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
description: builtins.str | None = ...,
type: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
type_attr: builtins.str | None = ...,
number_attr: builtins.str | None = ...,
type_list_attr: builtins.str | None = ...,
handle_data: collections.abc.Iterable[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto.DtypeAndShape] | None = ...,
is_ref: builtins.bool | None = ...,
experimental_full_type: tensorflow.core.framework.full_type_pb2.FullTypeDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["experimental_full_type", b"experimental_full_type"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["description", b"description", "experimental_full_type", b"experimental_full_type", "handle_data", b"handle_data", "is_ref", b"is_ref", "name", b"name", "number_attr", b"number_attr", "type", b"type", "type_attr", b"type_attr", "type_list_attr", b"type_list_attr"]) -> None: ...
@typing_extensions.final
class AttrDef(google.protobuf.message.Message):
"""Description of the graph-construction-time configuration of this
Op. That is to say, this describes the attr fields that will
be specified in the NodeDef.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
DEFAULT_VALUE_FIELD_NUMBER: builtins.int
DESCRIPTION_FIELD_NUMBER: builtins.int
HAS_MINIMUM_FIELD_NUMBER: builtins.int
MINIMUM_FIELD_NUMBER: builtins.int
ALLOWED_VALUES_FIELD_NUMBER: builtins.int
name: builtins.str
"""A descriptive name for the argument. May be used, e.g. by the
Python client, as a keyword argument name, and so should match
the regexp "[a-z][a-z0-9_]+".
"""
type: builtins.str
"""One of the type names from attr_value.proto ("string", "list(string)",
"int", etc.).
"""
@property
def default_value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue:
"""A reasonable default for this attribute if the user does not supply
a value. If not specified, the user must supply a value.
"""
description: builtins.str
"""Human-readable description."""
has_minimum: builtins.bool
"""--- Constraints ---
These constraints are only in effect if specified. Default is no
constraints.
For type == "int", this is a minimum value. For "list(___)"
types, this is the minimum length.
"""
minimum: builtins.int
@property
def allowed_values(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue:
"""The set of allowed values. Has type that is the "list" version
of the "type" field above (uses the "list" field of AttrValue).
If type == "type" or "list(type)" above, then the "type" field
of "allowed_values.list" has the set of allowed DataTypes.
If type == "string" or "list(string)", then the "s" field of
"allowed_values.list" has the set of allowed strings.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
type: builtins.str | None = ...,
default_value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ...,
description: builtins.str | None = ...,
has_minimum: builtins.bool | None = ...,
minimum: builtins.int | None = ...,
allowed_values: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["allowed_values", b"allowed_values", "default_value", b"default_value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["allowed_values", b"allowed_values", "default_value", b"default_value", "description", b"description", "has_minimum", b"has_minimum", "minimum", b"minimum", "name", b"name", "type", b"type"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
INPUT_ARG_FIELD_NUMBER: builtins.int
OUTPUT_ARG_FIELD_NUMBER: builtins.int
CONTROL_OUTPUT_FIELD_NUMBER: builtins.int
ATTR_FIELD_NUMBER: builtins.int
DEPRECATION_FIELD_NUMBER: builtins.int
SUMMARY_FIELD_NUMBER: builtins.int
DESCRIPTION_FIELD_NUMBER: builtins.int
IS_COMMUTATIVE_FIELD_NUMBER: builtins.int
IS_AGGREGATE_FIELD_NUMBER: builtins.int
IS_STATEFUL_FIELD_NUMBER: builtins.int
ALLOWS_UNINITIALIZED_INPUT_FIELD_NUMBER: builtins.int
IS_DISTRIBUTED_COMMUNICATION_FIELD_NUMBER: builtins.int
name: builtins.str
"""Op names starting with an underscore are reserved for internal use.
Names should be CamelCase and match the regexp "[A-Z][a-zA-Z0-9>_]*".
"""
@property
def input_arg(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OpDef.ArgDef]:
"""Description of the input(s)."""
@property
def output_arg(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OpDef.ArgDef]:
"""Description of the output(s)."""
@property
def control_output(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Named control outputs for this operation. Useful only for composite
operations (i.e. functions) which want to name different control outputs.
"""
@property
def attr(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OpDef.AttrDef]: ...
@property
def deprecation(self) -> global___OpDeprecation:
"""Optional deprecation based on GraphDef versions."""
summary: builtins.str
"""One-line human-readable description of what the Op does."""
description: builtins.str
"""Additional, longer human-readable description of what the Op does."""
is_commutative: builtins.bool
"""-------------------------------------------------------------------------
Which optimizations this operation can participate in.
True if the operation is commutative ("op(a,b) == op(b,a)" for all inputs)
"""
is_aggregate: builtins.bool
"""If is_aggregate is true, then this operation accepts N >= 2
inputs and produces 1 output all of the same type. Should be
associative and commutative, and produce output with the same
shape as the input. The optimizer may replace an aggregate op
taking input from multiple devices with a tree of aggregate ops
that aggregate locally within each device (and possibly within
groups of nearby devices) before communicating.
TODO(josh11b): Implement that optimization.
for things like add
"""
is_stateful: builtins.bool
"""-------------------------------------------------------------------------
Optimization constraints.
Ops are marked as stateful if their behavior depends on some state beyond
their input tensors (e.g. variable reading op) or if they have
a side-effect (e.g. printing or asserting ops). Equivalently, stateless ops
must always produce the same output for the same input and have
no side-effects.
By default Ops may be moved between devices. Stateful ops should
either not be moved, or should only be moved if that state can also
be moved (e.g. via some sort of save / restore).
Stateful ops are guaranteed to never be optimized away by Common
Subexpression Elimination (CSE).
for things like variables, queue
"""
allows_uninitialized_input: builtins.bool
"""-------------------------------------------------------------------------
Non-standard options.
By default, all inputs to an Op must be initialized Tensors. Ops
that may initialize tensors for the first time should set this
field to true, to allow the Op to take an uninitialized Tensor as
input.
for Assign, etc.
"""
is_distributed_communication: builtins.bool
"""Indicates whether the op implementation uses distributed communication.
If True, the op is allowed to return errors for network disconnection and
trigger TF network failure handling logics.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
input_arg: collections.abc.Iterable[global___OpDef.ArgDef] | None = ...,
output_arg: collections.abc.Iterable[global___OpDef.ArgDef] | None = ...,
control_output: collections.abc.Iterable[builtins.str] | None = ...,
attr: collections.abc.Iterable[global___OpDef.AttrDef] | None = ...,
deprecation: global___OpDeprecation | None = ...,
summary: builtins.str | None = ...,
description: builtins.str | None = ...,
is_commutative: builtins.bool | None = ...,
is_aggregate: builtins.bool | None = ...,
is_stateful: builtins.bool | None = ...,
allows_uninitialized_input: builtins.bool | None = ...,
is_distributed_communication: builtins.bool | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["deprecation", b"deprecation"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["allows_uninitialized_input", b"allows_uninitialized_input", "attr", b"attr", "control_output", b"control_output", "deprecation", b"deprecation", "description", b"description", "input_arg", b"input_arg", "is_aggregate", b"is_aggregate", "is_commutative", b"is_commutative", "is_distributed_communication", b"is_distributed_communication", "is_stateful", b"is_stateful", "name", b"name", "output_arg", b"output_arg", "summary", b"summary"]) -> None: ...
global___OpDef = OpDef
@typing_extensions.final
class OpDeprecation(google.protobuf.message.Message):
"""Information about version-dependent deprecation of an op"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VERSION_FIELD_NUMBER: builtins.int
EXPLANATION_FIELD_NUMBER: builtins.int
version: builtins.int
"""First GraphDef version at which the op is disallowed."""
explanation: builtins.str
"""Explanation of why it was deprecated and what to use instead."""
def __init__(
self,
*,
version: builtins.int | None = ...,
explanation: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["explanation", b"explanation", "version", b"version"]) -> None: ...
global___OpDeprecation = OpDeprecation
@typing_extensions.final
class OpList(google.protobuf.message.Message):
"""A collection of OpDefs"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OP_FIELD_NUMBER: builtins.int
@property
def op(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___OpDef]: ...
def __init__(
self,
*,
op: collections.abc.Iterable[global___OpDef] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["op", b"op"]) -> None: ...
global___OpList = OpList

View File

@@ -0,0 +1,43 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ReaderBaseState(google.protobuf.message.Message):
"""For serializing and restoring the state of ReaderBase, see
reader_base.h for details.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
WORK_STARTED_FIELD_NUMBER: builtins.int
WORK_FINISHED_FIELD_NUMBER: builtins.int
NUM_RECORDS_PRODUCED_FIELD_NUMBER: builtins.int
CURRENT_WORK_FIELD_NUMBER: builtins.int
work_started: builtins.int
work_finished: builtins.int
num_records_produced: builtins.int
current_work: builtins.bytes
def __init__(
self,
*,
work_started: builtins.int | None = ...,
work_finished: builtins.int | None = ...,
num_records_produced: builtins.int | None = ...,
current_work: builtins.bytes | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["current_work", b"current_work", "num_records_produced", b"num_records_produced", "work_finished", b"work_finished", "work_started", b"work_started"]) -> None: ...
global___ReaderBaseState = ReaderBaseState

View File

@@ -0,0 +1,85 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ResourceHandleProto(google.protobuf.message.Message):
"""Protocol buffer representing a handle to a tensorflow resource. Handles are
not valid across executions, but can be serialized back and forth from within
a single run.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class DtypeAndShape(google.protobuf.message.Message):
"""Protocol buffer representing a pair of (data type, tensor shape)."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DTYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
def __init__(
self,
*,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["dtype", b"dtype", "shape", b"shape"]) -> None: ...
DEVICE_FIELD_NUMBER: builtins.int
CONTAINER_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
HASH_CODE_FIELD_NUMBER: builtins.int
MAYBE_TYPE_NAME_FIELD_NUMBER: builtins.int
DTYPES_AND_SHAPES_FIELD_NUMBER: builtins.int
device: builtins.str
"""Unique name for the device containing the resource."""
container: builtins.str
"""Container in which this resource is placed."""
name: builtins.str
"""Unique name of this resource."""
hash_code: builtins.int
"""Hash code for the type of the resource. Is only valid in the same device
and in the same execution.
"""
maybe_type_name: builtins.str
"""For debug-only, the name of the type pointed to by this handle, if
available.
"""
@property
def dtypes_and_shapes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceHandleProto.DtypeAndShape]:
"""Data types and shapes for the underlying resource."""
def __init__(
self,
*,
device: builtins.str | None = ...,
container: builtins.str | None = ...,
name: builtins.str | None = ...,
hash_code: builtins.int | None = ...,
maybe_type_name: builtins.str | None = ...,
dtypes_and_shapes: collections.abc.Iterable[global___ResourceHandleProto.DtypeAndShape] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["container", b"container", "device", b"device", "dtypes_and_shapes", b"dtypes_and_shapes", "hash_code", b"hash_code", "maybe_type_name", b"maybe_type_name", "name", b"name"]) -> None: ...
global___ResourceHandleProto = ResourceHandleProto

View File

@@ -0,0 +1,265 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.allocation_description_pb2
import tensorflow.core.framework.tensor_description_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class AllocationRecord(google.protobuf.message.Message):
"""An allocation/de-allocation operation performed by the allocator."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ALLOC_MICROS_FIELD_NUMBER: builtins.int
ALLOC_BYTES_FIELD_NUMBER: builtins.int
alloc_micros: builtins.int
"""The timestamp of the operation."""
alloc_bytes: builtins.int
"""Number of bytes allocated, or de-allocated if negative."""
def __init__(
self,
*,
alloc_micros: builtins.int | None = ...,
alloc_bytes: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["alloc_bytes", b"alloc_bytes", "alloc_micros", b"alloc_micros"]) -> None: ...
global___AllocationRecord = AllocationRecord
@typing_extensions.final
class AllocatorMemoryUsed(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ALLOCATOR_NAME_FIELD_NUMBER: builtins.int
TOTAL_BYTES_FIELD_NUMBER: builtins.int
PEAK_BYTES_FIELD_NUMBER: builtins.int
LIVE_BYTES_FIELD_NUMBER: builtins.int
ALLOCATION_RECORDS_FIELD_NUMBER: builtins.int
ALLOCATOR_BYTES_IN_USE_FIELD_NUMBER: builtins.int
allocator_name: builtins.str
total_bytes: builtins.int
"""These are per-node allocator memory stats."""
peak_bytes: builtins.int
live_bytes: builtins.int
"""The bytes that are not deallocated."""
@property
def allocation_records(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AllocationRecord]:
"""The allocation and deallocation timeline."""
allocator_bytes_in_use: builtins.int
"""These are snapshots of the overall allocator memory stats.
The number of live bytes currently allocated by the allocator.
"""
def __init__(
self,
*,
allocator_name: builtins.str | None = ...,
total_bytes: builtins.int | None = ...,
peak_bytes: builtins.int | None = ...,
live_bytes: builtins.int | None = ...,
allocation_records: collections.abc.Iterable[global___AllocationRecord] | None = ...,
allocator_bytes_in_use: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["allocation_records", b"allocation_records", "allocator_bytes_in_use", b"allocator_bytes_in_use", "allocator_name", b"allocator_name", "live_bytes", b"live_bytes", "peak_bytes", b"peak_bytes", "total_bytes", b"total_bytes"]) -> None: ...
global___AllocatorMemoryUsed = AllocatorMemoryUsed
@typing_extensions.final
class NodeOutput(google.protobuf.message.Message):
"""Output sizes recorded for a single execution of a graph node."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SLOT_FIELD_NUMBER: builtins.int
TENSOR_DESCRIPTION_FIELD_NUMBER: builtins.int
slot: builtins.int
@property
def tensor_description(self) -> tensorflow.core.framework.tensor_description_pb2.TensorDescription: ...
def __init__(
self,
*,
slot: builtins.int | None = ...,
tensor_description: tensorflow.core.framework.tensor_description_pb2.TensorDescription | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tensor_description", b"tensor_description"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["slot", b"slot", "tensor_description", b"tensor_description"]) -> None: ...
global___NodeOutput = NodeOutput
@typing_extensions.final
class MemoryStats(google.protobuf.message.Message):
"""For memory tracking."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TEMP_MEMORY_SIZE_FIELD_NUMBER: builtins.int
PERSISTENT_MEMORY_SIZE_FIELD_NUMBER: builtins.int
PERSISTENT_TENSOR_ALLOC_IDS_FIELD_NUMBER: builtins.int
DEVICE_TEMP_MEMORY_SIZE_FIELD_NUMBER: builtins.int
DEVICE_PERSISTENT_MEMORY_SIZE_FIELD_NUMBER: builtins.int
DEVICE_PERSISTENT_TENSOR_ALLOC_IDS_FIELD_NUMBER: builtins.int
temp_memory_size: builtins.int
persistent_memory_size: builtins.int
@property
def persistent_tensor_alloc_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
device_temp_memory_size: builtins.int
device_persistent_memory_size: builtins.int
@property
def device_persistent_tensor_alloc_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
def __init__(
self,
*,
temp_memory_size: builtins.int | None = ...,
persistent_memory_size: builtins.int | None = ...,
persistent_tensor_alloc_ids: collections.abc.Iterable[builtins.int] | None = ...,
device_temp_memory_size: builtins.int | None = ...,
device_persistent_memory_size: builtins.int | None = ...,
device_persistent_tensor_alloc_ids: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["device_persistent_memory_size", b"device_persistent_memory_size", "device_persistent_tensor_alloc_ids", b"device_persistent_tensor_alloc_ids", "device_temp_memory_size", b"device_temp_memory_size", "persistent_memory_size", b"persistent_memory_size", "persistent_tensor_alloc_ids", b"persistent_tensor_alloc_ids", "temp_memory_size", b"temp_memory_size"]) -> None: ...
global___MemoryStats = MemoryStats
@typing_extensions.final
class NodeExecStats(google.protobuf.message.Message):
"""Time/size stats recorded for a single execution of a graph node."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_NAME_FIELD_NUMBER: builtins.int
ALL_START_MICROS_FIELD_NUMBER: builtins.int
OP_START_REL_MICROS_FIELD_NUMBER: builtins.int
OP_END_REL_MICROS_FIELD_NUMBER: builtins.int
ALL_END_REL_MICROS_FIELD_NUMBER: builtins.int
MEMORY_FIELD_NUMBER: builtins.int
OUTPUT_FIELD_NUMBER: builtins.int
TIMELINE_LABEL_FIELD_NUMBER: builtins.int
SCHEDULED_MICROS_FIELD_NUMBER: builtins.int
THREAD_ID_FIELD_NUMBER: builtins.int
REFERENCED_TENSOR_FIELD_NUMBER: builtins.int
MEMORY_STATS_FIELD_NUMBER: builtins.int
ALL_START_NANOS_FIELD_NUMBER: builtins.int
OP_START_REL_NANOS_FIELD_NUMBER: builtins.int
OP_END_REL_NANOS_FIELD_NUMBER: builtins.int
ALL_END_REL_NANOS_FIELD_NUMBER: builtins.int
SCHEDULED_NANOS_FIELD_NUMBER: builtins.int
node_name: builtins.str
"""TODO(tucker): Use some more compact form of node identity than
the full string name. Either all processes should agree on a
global id (cost_id?) for each node, or we should use a hash of
the name.
"""
all_start_micros: builtins.int
op_start_rel_micros: builtins.int
op_end_rel_micros: builtins.int
all_end_rel_micros: builtins.int
@property
def memory(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AllocatorMemoryUsed]: ...
@property
def output(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NodeOutput]: ...
timeline_label: builtins.str
scheduled_micros: builtins.int
thread_id: builtins.int
@property
def referenced_tensor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.allocation_description_pb2.AllocationDescription]: ...
@property
def memory_stats(self) -> global___MemoryStats: ...
all_start_nanos: builtins.int
op_start_rel_nanos: builtins.int
op_end_rel_nanos: builtins.int
all_end_rel_nanos: builtins.int
scheduled_nanos: builtins.int
def __init__(
self,
*,
node_name: builtins.str | None = ...,
all_start_micros: builtins.int | None = ...,
op_start_rel_micros: builtins.int | None = ...,
op_end_rel_micros: builtins.int | None = ...,
all_end_rel_micros: builtins.int | None = ...,
memory: collections.abc.Iterable[global___AllocatorMemoryUsed] | None = ...,
output: collections.abc.Iterable[global___NodeOutput] | None = ...,
timeline_label: builtins.str | None = ...,
scheduled_micros: builtins.int | None = ...,
thread_id: builtins.int | None = ...,
referenced_tensor: collections.abc.Iterable[tensorflow.core.framework.allocation_description_pb2.AllocationDescription] | None = ...,
memory_stats: global___MemoryStats | None = ...,
all_start_nanos: builtins.int | None = ...,
op_start_rel_nanos: builtins.int | None = ...,
op_end_rel_nanos: builtins.int | None = ...,
all_end_rel_nanos: builtins.int | None = ...,
scheduled_nanos: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["memory_stats", b"memory_stats"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["all_end_rel_micros", b"all_end_rel_micros", "all_end_rel_nanos", b"all_end_rel_nanos", "all_start_micros", b"all_start_micros", "all_start_nanos", b"all_start_nanos", "memory", b"memory", "memory_stats", b"memory_stats", "node_name", b"node_name", "op_end_rel_micros", b"op_end_rel_micros", "op_end_rel_nanos", b"op_end_rel_nanos", "op_start_rel_micros", b"op_start_rel_micros", "op_start_rel_nanos", b"op_start_rel_nanos", "output", b"output", "referenced_tensor", b"referenced_tensor", "scheduled_micros", b"scheduled_micros", "scheduled_nanos", b"scheduled_nanos", "thread_id", b"thread_id", "timeline_label", b"timeline_label"]) -> None: ...
global___NodeExecStats = NodeExecStats
@typing_extensions.final
class DeviceStepStats(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class ThreadNamesEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
value: builtins.str
def __init__(
self,
*,
key: builtins.int | None = ...,
value: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
DEVICE_FIELD_NUMBER: builtins.int
NODE_STATS_FIELD_NUMBER: builtins.int
THREAD_NAMES_FIELD_NUMBER: builtins.int
device: builtins.str
@property
def node_stats(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NodeExecStats]: ...
@property
def thread_names(self) -> google.protobuf.internal.containers.ScalarMap[builtins.int, builtins.str]:
"""Its key is thread id."""
def __init__(
self,
*,
device: builtins.str | None = ...,
node_stats: collections.abc.Iterable[global___NodeExecStats] | None = ...,
thread_names: collections.abc.Mapping[builtins.int, builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["device", b"device", "node_stats", b"node_stats", "thread_names", b"thread_names"]) -> None: ...
global___DeviceStepStats = DeviceStepStats
@typing_extensions.final
class StepStats(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DEV_STATS_FIELD_NUMBER: builtins.int
@property
def dev_stats(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DeviceStepStats]: ...
def __init__(
self,
*,
dev_stats: collections.abc.Iterable[global___DeviceStepStats] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["dev_stats", b"dev_stats"]) -> None: ...
global___StepStats = StepStats

View File

@@ -0,0 +1,294 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_pb2
import tensorflow.tsl.protobuf.histogram_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
from tensorflow.tsl.protobuf.histogram_pb2 import (
HistogramProto as HistogramProto,
)
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _DataClass:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _DataClassEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataClass.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DATA_CLASS_UNKNOWN: _DataClass.ValueType # 0
"""Unknown data class, used (implicitly) for legacy data. Will not be
processed by data ingestion pipelines.
"""
DATA_CLASS_SCALAR: _DataClass.ValueType # 1
"""Scalar time series. Each `Value` for the corresponding tag must have
`tensor` set to a rank-0 tensor of type `DT_FLOAT` (float32).
"""
DATA_CLASS_TENSOR: _DataClass.ValueType # 2
"""Tensor time series. Each `Value` for the corresponding tag must have
`tensor` set. The tensor value is arbitrary, but should be small to
accommodate direct storage in database backends: an upper bound of a few
kilobytes is a reasonable rule of thumb.
"""
DATA_CLASS_BLOB_SEQUENCE: _DataClass.ValueType # 3
"""Blob sequence time series. Each `Value` for the corresponding tag must
have `tensor` set to a rank-1 tensor of bytestring dtype.
"""
class DataClass(_DataClass, metaclass=_DataClassEnumTypeWrapper): ...
DATA_CLASS_UNKNOWN: DataClass.ValueType # 0
"""Unknown data class, used (implicitly) for legacy data. Will not be
processed by data ingestion pipelines.
"""
DATA_CLASS_SCALAR: DataClass.ValueType # 1
"""Scalar time series. Each `Value` for the corresponding tag must have
`tensor` set to a rank-0 tensor of type `DT_FLOAT` (float32).
"""
DATA_CLASS_TENSOR: DataClass.ValueType # 2
"""Tensor time series. Each `Value` for the corresponding tag must have
`tensor` set. The tensor value is arbitrary, but should be small to
accommodate direct storage in database backends: an upper bound of a few
kilobytes is a reasonable rule of thumb.
"""
DATA_CLASS_BLOB_SEQUENCE: DataClass.ValueType # 3
"""Blob sequence time series. Each `Value` for the corresponding tag must
have `tensor` set to a rank-1 tensor of bytestring dtype.
"""
global___DataClass = DataClass
@typing_extensions.final
class SummaryDescription(google.protobuf.message.Message):
"""Metadata associated with a series of Summary data"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TYPE_HINT_FIELD_NUMBER: builtins.int
type_hint: builtins.str
"""Hint on how plugins should process the data in this series.
Supported values include "scalar", "histogram", "image", "audio"
"""
def __init__(
self,
*,
type_hint: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["type_hint", b"type_hint"]) -> None: ...
global___SummaryDescription = SummaryDescription
@typing_extensions.final
class SummaryMetadata(google.protobuf.message.Message):
"""A SummaryMetadata encapsulates information on which plugins are able to make
use of a certain summary value.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class PluginData(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PLUGIN_NAME_FIELD_NUMBER: builtins.int
CONTENT_FIELD_NUMBER: builtins.int
plugin_name: builtins.str
"""The name of the plugin this data pertains to."""
content: builtins.bytes
"""The content to store for the plugin. The best practice is for this to be
a binary serialized protocol buffer.
"""
def __init__(
self,
*,
plugin_name: builtins.str | None = ...,
content: builtins.bytes | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["content", b"content", "plugin_name", b"plugin_name"]) -> None: ...
PLUGIN_DATA_FIELD_NUMBER: builtins.int
DISPLAY_NAME_FIELD_NUMBER: builtins.int
SUMMARY_DESCRIPTION_FIELD_NUMBER: builtins.int
DATA_CLASS_FIELD_NUMBER: builtins.int
@property
def plugin_data(self) -> global___SummaryMetadata.PluginData:
"""Data that associates a summary with a certain plugin."""
display_name: builtins.str
"""Display name for viewing in TensorBoard."""
summary_description: builtins.str
"""Longform readable description of the summary sequence. Markdown supported."""
data_class: global___DataClass.ValueType
"""Class of data stored in this time series. Required for compatibility with
TensorBoard's generic data facilities (`DataProvider`, et al.). This value
imposes constraints on the dtype and shape of the corresponding tensor
values. See `DataClass` docs for details.
"""
def __init__(
self,
*,
plugin_data: global___SummaryMetadata.PluginData | None = ...,
display_name: builtins.str | None = ...,
summary_description: builtins.str | None = ...,
data_class: global___DataClass.ValueType | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["plugin_data", b"plugin_data"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["data_class", b"data_class", "display_name", b"display_name", "plugin_data", b"plugin_data", "summary_description", b"summary_description"]) -> None: ...
global___SummaryMetadata = SummaryMetadata
@typing_extensions.final
class Summary(google.protobuf.message.Message):
"""A Summary is a set of named values to be displayed by the
visualizer.
Summaries are produced regularly during training, as controlled by
the "summary_interval_secs" attribute of the training operation.
Summaries are also produced at the end of an evaluation.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class Image(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
HEIGHT_FIELD_NUMBER: builtins.int
WIDTH_FIELD_NUMBER: builtins.int
COLORSPACE_FIELD_NUMBER: builtins.int
ENCODED_IMAGE_STRING_FIELD_NUMBER: builtins.int
height: builtins.int
"""Dimensions of the image."""
width: builtins.int
colorspace: builtins.int
"""Valid colorspace values are
1 - grayscale
2 - grayscale + alpha
3 - RGB
4 - RGBA
5 - DIGITAL_YUV
6 - BGRA
"""
encoded_image_string: builtins.bytes
"""Image data in encoded format. All image formats supported by
image_codec::CoderUtil can be stored here.
"""
def __init__(
self,
*,
height: builtins.int | None = ...,
width: builtins.int | None = ...,
colorspace: builtins.int | None = ...,
encoded_image_string: builtins.bytes | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["colorspace", b"colorspace", "encoded_image_string", b"encoded_image_string", "height", b"height", "width", b"width"]) -> None: ...
@typing_extensions.final
class Audio(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SAMPLE_RATE_FIELD_NUMBER: builtins.int
NUM_CHANNELS_FIELD_NUMBER: builtins.int
LENGTH_FRAMES_FIELD_NUMBER: builtins.int
ENCODED_AUDIO_STRING_FIELD_NUMBER: builtins.int
CONTENT_TYPE_FIELD_NUMBER: builtins.int
sample_rate: builtins.float
"""Sample rate of the audio in Hz."""
num_channels: builtins.int
"""Number of channels of audio."""
length_frames: builtins.int
"""Length of the audio in frames (samples per channel)."""
encoded_audio_string: builtins.bytes
"""Encoded audio data and its associated RFC 2045 content type (e.g.
"audio/wav").
"""
content_type: builtins.str
def __init__(
self,
*,
sample_rate: builtins.float | None = ...,
num_channels: builtins.int | None = ...,
length_frames: builtins.int | None = ...,
encoded_audio_string: builtins.bytes | None = ...,
content_type: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["content_type", b"content_type", "encoded_audio_string", b"encoded_audio_string", "length_frames", b"length_frames", "num_channels", b"num_channels", "sample_rate", b"sample_rate"]) -> None: ...
@typing_extensions.final
class Value(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_NAME_FIELD_NUMBER: builtins.int
TAG_FIELD_NUMBER: builtins.int
METADATA_FIELD_NUMBER: builtins.int
SIMPLE_VALUE_FIELD_NUMBER: builtins.int
OBSOLETE_OLD_STYLE_HISTOGRAM_FIELD_NUMBER: builtins.int
IMAGE_FIELD_NUMBER: builtins.int
HISTO_FIELD_NUMBER: builtins.int
AUDIO_FIELD_NUMBER: builtins.int
TENSOR_FIELD_NUMBER: builtins.int
node_name: builtins.str
"""This field is deprecated and will not be set."""
tag: builtins.str
"""Tag name for the data. Used by TensorBoard plugins to organize data. Tags
are often organized by scope (which contains slashes to convey
hierarchy). For example: foo/bar/0
"""
@property
def metadata(self) -> global___SummaryMetadata:
"""Contains metadata on the summary value such as which plugins may use it.
Take note that many summary values may lack a metadata field. This is
because the FileWriter only keeps a metadata object on the first summary
value with a certain tag for each tag. TensorBoard then remembers which
tags are associated with which plugins. This saves space.
"""
simple_value: builtins.float
obsolete_old_style_histogram: builtins.bytes
@property
def image(self) -> global___Summary.Image: ...
@property
def histo(self) -> tensorflow.tsl.protobuf.histogram_pb2.HistogramProto: ...
@property
def audio(self) -> global___Summary.Audio: ...
@property
def tensor(self) -> tensorflow.core.framework.tensor_pb2.TensorProto: ...
def __init__(
self,
*,
node_name: builtins.str | None = ...,
tag: builtins.str | None = ...,
metadata: global___SummaryMetadata | None = ...,
simple_value: builtins.float | None = ...,
obsolete_old_style_histogram: builtins.bytes | None = ...,
image: global___Summary.Image | None = ...,
histo: tensorflow.tsl.protobuf.histogram_pb2.HistogramProto | None = ...,
audio: global___Summary.Audio | None = ...,
tensor: tensorflow.core.framework.tensor_pb2.TensorProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["audio", b"audio", "histo", b"histo", "image", b"image", "metadata", b"metadata", "obsolete_old_style_histogram", b"obsolete_old_style_histogram", "simple_value", b"simple_value", "tensor", b"tensor", "value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["audio", b"audio", "histo", b"histo", "image", b"image", "metadata", b"metadata", "node_name", b"node_name", "obsolete_old_style_histogram", b"obsolete_old_style_histogram", "simple_value", b"simple_value", "tag", b"tag", "tensor", b"tensor", "value", b"value"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["simple_value", "obsolete_old_style_histogram", "image", "histo", "audio", "tensor"] | None: ...
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Summary.Value]:
"""Set of values for the summary."""
def __init__(
self,
*,
value: collections.abc.Iterable[global___Summary.Value] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
global___Summary = Summary

View File

@@ -0,0 +1,45 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
import tensorflow.core.framework.allocation_description_pb2
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class TensorDescription(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DTYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
ALLOCATION_DESCRIPTION_FIELD_NUMBER: builtins.int
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
"""Data type of tensor elements"""
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto:
"""Shape of the tensor."""
@property
def allocation_description(self) -> tensorflow.core.framework.allocation_description_pb2.AllocationDescription:
"""Information about the size and allocator used for the data"""
def __init__(
self,
*,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
allocation_description: tensorflow.core.framework.allocation_description_pb2.AllocationDescription | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["allocation_description", b"allocation_description", "shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["allocation_description", b"allocation_description", "dtype", b"dtype", "shape", b"shape"]) -> None: ...
global___TensorDescription = TensorDescription

View File

@@ -0,0 +1,168 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.resource_handle_pb2
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class TensorProto(google.protobuf.message.Message):
"""Protocol buffer representing a tensor."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DTYPE_FIELD_NUMBER: builtins.int
TENSOR_SHAPE_FIELD_NUMBER: builtins.int
VERSION_NUMBER_FIELD_NUMBER: builtins.int
TENSOR_CONTENT_FIELD_NUMBER: builtins.int
HALF_VAL_FIELD_NUMBER: builtins.int
FLOAT_VAL_FIELD_NUMBER: builtins.int
DOUBLE_VAL_FIELD_NUMBER: builtins.int
INT_VAL_FIELD_NUMBER: builtins.int
STRING_VAL_FIELD_NUMBER: builtins.int
SCOMPLEX_VAL_FIELD_NUMBER: builtins.int
INT64_VAL_FIELD_NUMBER: builtins.int
BOOL_VAL_FIELD_NUMBER: builtins.int
DCOMPLEX_VAL_FIELD_NUMBER: builtins.int
RESOURCE_HANDLE_VAL_FIELD_NUMBER: builtins.int
VARIANT_VAL_FIELD_NUMBER: builtins.int
UINT32_VAL_FIELD_NUMBER: builtins.int
UINT64_VAL_FIELD_NUMBER: builtins.int
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
@property
def tensor_shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto:
"""Shape of the tensor. TODO(touts): sort out the 0-rank issues."""
version_number: builtins.int
"""Only one of the representations below is set, one of "tensor_contents" and
the "xxx_val" attributes. We are not using oneof because as oneofs cannot
contain repeated fields it would require another extra set of messages.
Version number.
In version 0, if the "repeated xxx" representations contain only one
element, that element is repeated to fill the shape. This makes it easy
to represent a constant Tensor with a single value.
"""
tensor_content: builtins.bytes
"""Serialized raw tensor content from either Tensor::AsProtoTensorContent or
memcpy in tensorflow::grpc::EncodeTensorToByteBuffer. This representation
can be used for all tensor types. The purpose of this representation is to
reduce serialization overhead during RPC call by avoiding serialization of
many repeated small items.
"""
@property
def half_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Type specific representations that make it easy to create tensor protos in
all languages. Only the representation corresponding to "dtype" can
be set. The values hold the flattened representation of the tensor in
row major order.
DT_HALF, DT_BFLOAT16. Note that since protobuf has no int16 type, we'll
have some pointless zero padding for each value here.
"""
@property
def float_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
"""DT_FLOAT."""
@property
def double_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
"""DT_DOUBLE."""
@property
def int_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""DT_INT32, DT_INT16, DT_UINT16, DT_INT8, DT_UINT8."""
@property
def string_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]:
"""DT_STRING"""
@property
def scomplex_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
"""DT_COMPLEX64. scomplex_val(2*i) and scomplex_val(2*i+1) are real
and imaginary parts of i-th single precision complex.
"""
@property
def int64_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""DT_INT64"""
@property
def bool_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]:
"""DT_BOOL"""
@property
def dcomplex_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
"""DT_COMPLEX128. dcomplex_val(2*i) and dcomplex_val(2*i+1) are real
and imaginary parts of i-th double precision complex.
"""
@property
def resource_handle_val(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto]:
"""DT_RESOURCE"""
@property
def variant_val(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___VariantTensorDataProto]:
"""DT_VARIANT"""
@property
def uint32_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""DT_UINT32"""
@property
def uint64_val(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""DT_UINT64"""
def __init__(
self,
*,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
tensor_shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
version_number: builtins.int | None = ...,
tensor_content: builtins.bytes | None = ...,
half_val: collections.abc.Iterable[builtins.int] | None = ...,
float_val: collections.abc.Iterable[builtins.float] | None = ...,
double_val: collections.abc.Iterable[builtins.float] | None = ...,
int_val: collections.abc.Iterable[builtins.int] | None = ...,
string_val: collections.abc.Iterable[builtins.bytes] | None = ...,
scomplex_val: collections.abc.Iterable[builtins.float] | None = ...,
int64_val: collections.abc.Iterable[builtins.int] | None = ...,
bool_val: collections.abc.Iterable[builtins.bool] | None = ...,
dcomplex_val: collections.abc.Iterable[builtins.float] | None = ...,
resource_handle_val: collections.abc.Iterable[tensorflow.core.framework.resource_handle_pb2.ResourceHandleProto] | None = ...,
variant_val: collections.abc.Iterable[global___VariantTensorDataProto] | None = ...,
uint32_val: collections.abc.Iterable[builtins.int] | None = ...,
uint64_val: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tensor_shape", b"tensor_shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["bool_val", b"bool_val", "dcomplex_val", b"dcomplex_val", "double_val", b"double_val", "dtype", b"dtype", "float_val", b"float_val", "half_val", b"half_val", "int64_val", b"int64_val", "int_val", b"int_val", "resource_handle_val", b"resource_handle_val", "scomplex_val", b"scomplex_val", "string_val", b"string_val", "tensor_content", b"tensor_content", "tensor_shape", b"tensor_shape", "uint32_val", b"uint32_val", "uint64_val", b"uint64_val", "variant_val", b"variant_val", "version_number", b"version_number"]) -> None: ...
global___TensorProto = TensorProto
@typing_extensions.final
class VariantTensorDataProto(google.protobuf.message.Message):
"""Protocol buffer representing the serialization format of DT_VARIANT tensors."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TYPE_NAME_FIELD_NUMBER: builtins.int
METADATA_FIELD_NUMBER: builtins.int
TENSORS_FIELD_NUMBER: builtins.int
type_name: builtins.str
"""Name of the type of objects being serialized."""
metadata: builtins.bytes
"""Portions of the object that are not Tensors."""
@property
def tensors(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TensorProto]:
"""Tensors contained within objects being serialized."""
def __init__(
self,
*,
type_name: builtins.str | None = ...,
metadata: builtins.bytes | None = ...,
tensors: collections.abc.Iterable[global___TensorProto] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["metadata", b"metadata", "tensors", b"tensors", "type_name", b"type_name"]) -> None: ...
global___VariantTensorDataProto = VariantTensorDataProto

View File

@@ -0,0 +1,81 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Protocol buffer representing the shape of tensors."""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class TensorShapeProto(google.protobuf.message.Message):
"""Dimensions of a tensor."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class Dim(google.protobuf.message.Message):
"""One dimension of the tensor."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SIZE_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
size: builtins.int
"""Size of the tensor in that dimension.
This value must be >= -1, but values of -1 are reserved for "unknown"
shapes (values of -1 mean "unknown" dimension). Certain wrappers
that work with TensorShapeProto may fail at runtime when deserializing
a TensorShapeProto containing a dim value of -1.
"""
name: builtins.str
"""Optional name of the tensor dimension."""
def __init__(
self,
*,
size: builtins.int | None = ...,
name: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "size", b"size"]) -> None: ...
DIM_FIELD_NUMBER: builtins.int
UNKNOWN_RANK_FIELD_NUMBER: builtins.int
@property
def dim(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TensorShapeProto.Dim]:
"""Dimensions of the tensor, such as {"input", 30}, {"output", 40}
for a 30 x 40 2D tensor. If an entry has size -1, this
corresponds to a dimension of unknown size. The names are
optional.
The order of entries in "dim" matters: It indicates the layout of the
values in the tensor in-memory representation.
The first entry in "dim" is the outermost dimension used to layout the
values, the last entry is the innermost dimension. This matches the
in-memory layout of RowMajor Eigen tensors.
If "dim.size()" > 0, "unknown_rank" must be false.
"""
unknown_rank: builtins.bool
"""If true, the number of dimensions in the shape is unknown.
If true, "dim.size()" must be 0.
"""
def __init__(
self,
*,
dim: collections.abc.Iterable[global___TensorShapeProto.Dim] | None = ...,
unknown_rank: builtins.bool | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["dim", b"dim", "unknown_rank", b"unknown_rank"]) -> None: ...
global___TensorShapeProto = TensorShapeProto

View File

@@ -0,0 +1,65 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Protocol buffer representing slices of a tensor"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class TensorSliceProto(google.protobuf.message.Message):
"""Can only be interpreted if you know the corresponding TensorShape."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class Extent(google.protobuf.message.Message):
"""Extent of the slice in one dimension.
Either both or no attributes must be set. When no attribute is set
means: All data in that dimension.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
START_FIELD_NUMBER: builtins.int
LENGTH_FIELD_NUMBER: builtins.int
start: builtins.int
"""Start index of the slice, starting at 0."""
length: builtins.int
def __init__(
self,
*,
start: builtins.int | None = ...,
length: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["has_length", b"has_length", "length", b"length"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["has_length", b"has_length", "length", b"length", "start", b"start"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["has_length", b"has_length"]) -> typing_extensions.Literal["length"] | None: ...
EXTENT_FIELD_NUMBER: builtins.int
@property
def extent(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TensorSliceProto.Extent]:
"""Extent of the slice in all tensor dimensions.
Must have one entry for each of the dimension of the tensor that this
slice belongs to. The order of sizes is the same as the order of
dimensions in the TensorShape.
"""
def __init__(
self,
*,
extent: collections.abc.Iterable[global___TensorSliceProto.Extent] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["extent", b"extent"]) -> None: ...
global___TensorSliceProto = TensorSliceProto

View File

@@ -0,0 +1,178 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _DataType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _DataTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataType.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DT_INVALID: _DataType.ValueType # 0
"""Not a legal value for DataType. Used to indicate a DataType field
has not been set.
"""
DT_FLOAT: _DataType.ValueType # 1
"""Data types that all computation devices are expected to be
capable to support.
"""
DT_DOUBLE: _DataType.ValueType # 2
DT_INT32: _DataType.ValueType # 3
DT_UINT8: _DataType.ValueType # 4
DT_INT16: _DataType.ValueType # 5
DT_INT8: _DataType.ValueType # 6
DT_STRING: _DataType.ValueType # 7
DT_COMPLEX64: _DataType.ValueType # 8
"""Single-precision complex"""
DT_INT64: _DataType.ValueType # 9
DT_BOOL: _DataType.ValueType # 10
DT_QINT8: _DataType.ValueType # 11
"""Quantized int8"""
DT_QUINT8: _DataType.ValueType # 12
"""Quantized uint8"""
DT_QINT32: _DataType.ValueType # 13
"""Quantized int32"""
DT_BFLOAT16: _DataType.ValueType # 14
"""Float32 truncated to 16 bits. Only for cast ops."""
DT_QINT16: _DataType.ValueType # 15
"""Quantized int16"""
DT_QUINT16: _DataType.ValueType # 16
"""Quantized uint16"""
DT_UINT16: _DataType.ValueType # 17
DT_COMPLEX128: _DataType.ValueType # 18
"""Double-precision complex"""
DT_HALF: _DataType.ValueType # 19
DT_RESOURCE: _DataType.ValueType # 20
DT_VARIANT: _DataType.ValueType # 21
"""Arbitrary C++ data types"""
DT_UINT32: _DataType.ValueType # 22
DT_UINT64: _DataType.ValueType # 23
DT_FLOAT_REF: _DataType.ValueType # 101
"""Do not use! These are only for parameters. Every enum above
should have a corresponding value below (verified by types_test).
"""
DT_DOUBLE_REF: _DataType.ValueType # 102
DT_INT32_REF: _DataType.ValueType # 103
DT_UINT8_REF: _DataType.ValueType # 104
DT_INT16_REF: _DataType.ValueType # 105
DT_INT8_REF: _DataType.ValueType # 106
DT_STRING_REF: _DataType.ValueType # 107
DT_COMPLEX64_REF: _DataType.ValueType # 108
DT_INT64_REF: _DataType.ValueType # 109
DT_BOOL_REF: _DataType.ValueType # 110
DT_QINT8_REF: _DataType.ValueType # 111
DT_QUINT8_REF: _DataType.ValueType # 112
DT_QINT32_REF: _DataType.ValueType # 113
DT_BFLOAT16_REF: _DataType.ValueType # 114
DT_QINT16_REF: _DataType.ValueType # 115
DT_QUINT16_REF: _DataType.ValueType # 116
DT_UINT16_REF: _DataType.ValueType # 117
DT_COMPLEX128_REF: _DataType.ValueType # 118
DT_HALF_REF: _DataType.ValueType # 119
DT_RESOURCE_REF: _DataType.ValueType # 120
DT_VARIANT_REF: _DataType.ValueType # 121
DT_UINT32_REF: _DataType.ValueType # 122
DT_UINT64_REF: _DataType.ValueType # 123
class DataType(_DataType, metaclass=_DataTypeEnumTypeWrapper):
"""(== suppress_warning documentation-presence ==)
LINT.IfChange
"""
DT_INVALID: DataType.ValueType # 0
"""Not a legal value for DataType. Used to indicate a DataType field
has not been set.
"""
DT_FLOAT: DataType.ValueType # 1
"""Data types that all computation devices are expected to be
capable to support.
"""
DT_DOUBLE: DataType.ValueType # 2
DT_INT32: DataType.ValueType # 3
DT_UINT8: DataType.ValueType # 4
DT_INT16: DataType.ValueType # 5
DT_INT8: DataType.ValueType # 6
DT_STRING: DataType.ValueType # 7
DT_COMPLEX64: DataType.ValueType # 8
"""Single-precision complex"""
DT_INT64: DataType.ValueType # 9
DT_BOOL: DataType.ValueType # 10
DT_QINT8: DataType.ValueType # 11
"""Quantized int8"""
DT_QUINT8: DataType.ValueType # 12
"""Quantized uint8"""
DT_QINT32: DataType.ValueType # 13
"""Quantized int32"""
DT_BFLOAT16: DataType.ValueType # 14
"""Float32 truncated to 16 bits. Only for cast ops."""
DT_QINT16: DataType.ValueType # 15
"""Quantized int16"""
DT_QUINT16: DataType.ValueType # 16
"""Quantized uint16"""
DT_UINT16: DataType.ValueType # 17
DT_COMPLEX128: DataType.ValueType # 18
"""Double-precision complex"""
DT_HALF: DataType.ValueType # 19
DT_RESOURCE: DataType.ValueType # 20
DT_VARIANT: DataType.ValueType # 21
"""Arbitrary C++ data types"""
DT_UINT32: DataType.ValueType # 22
DT_UINT64: DataType.ValueType # 23
DT_FLOAT_REF: DataType.ValueType # 101
"""Do not use! These are only for parameters. Every enum above
should have a corresponding value below (verified by types_test).
"""
DT_DOUBLE_REF: DataType.ValueType # 102
DT_INT32_REF: DataType.ValueType # 103
DT_UINT8_REF: DataType.ValueType # 104
DT_INT16_REF: DataType.ValueType # 105
DT_INT8_REF: DataType.ValueType # 106
DT_STRING_REF: DataType.ValueType # 107
DT_COMPLEX64_REF: DataType.ValueType # 108
DT_INT64_REF: DataType.ValueType # 109
DT_BOOL_REF: DataType.ValueType # 110
DT_QINT8_REF: DataType.ValueType # 111
DT_QUINT8_REF: DataType.ValueType # 112
DT_QINT32_REF: DataType.ValueType # 113
DT_BFLOAT16_REF: DataType.ValueType # 114
DT_QINT16_REF: DataType.ValueType # 115
DT_QUINT16_REF: DataType.ValueType # 116
DT_UINT16_REF: DataType.ValueType # 117
DT_COMPLEX128_REF: DataType.ValueType # 118
DT_HALF_REF: DataType.ValueType # 119
DT_RESOURCE_REF: DataType.ValueType # 120
DT_VARIANT_REF: DataType.ValueType # 121
DT_UINT32_REF: DataType.ValueType # 122
DT_UINT64_REF: DataType.ValueType # 123
global___DataType = DataType
@typing_extensions.final
class SerializedDType(google.protobuf.message.Message):
"""Represents a serialized tf.dtypes.Dtype"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DATATYPE_FIELD_NUMBER: builtins.int
datatype: global___DataType.ValueType
def __init__(
self,
*,
datatype: global___DataType.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["datatype", b"datatype"]) -> None: ...
global___SerializedDType = SerializedDType

View File

@@ -0,0 +1,192 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _VariableSynchronization:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _VariableSynchronizationEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_VariableSynchronization.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
VARIABLE_SYNCHRONIZATION_AUTO: _VariableSynchronization.ValueType # 0
"""`AUTO`: Indicates that the synchronization will be determined by the
current `DistributionStrategy` (eg. With `MirroredStrategy` this would be
`ON_WRITE`).
"""
VARIABLE_SYNCHRONIZATION_NONE: _VariableSynchronization.ValueType # 1
"""`NONE`: Indicates that there will only be one copy of the variable, so
there is no need to sync.
"""
VARIABLE_SYNCHRONIZATION_ON_WRITE: _VariableSynchronization.ValueType # 2
"""`ON_WRITE`: Indicates that the variable will be updated across devices
every time it is written.
"""
VARIABLE_SYNCHRONIZATION_ON_READ: _VariableSynchronization.ValueType # 3
"""`ON_READ`: Indicates that the variable will be aggregated across devices
when it is read (eg. when checkpointing or when evaluating an op that uses
the variable).
"""
class VariableSynchronization(_VariableSynchronization, metaclass=_VariableSynchronizationEnumTypeWrapper):
"""Indicates when a distributed variable will be synced."""
VARIABLE_SYNCHRONIZATION_AUTO: VariableSynchronization.ValueType # 0
"""`AUTO`: Indicates that the synchronization will be determined by the
current `DistributionStrategy` (eg. With `MirroredStrategy` this would be
`ON_WRITE`).
"""
VARIABLE_SYNCHRONIZATION_NONE: VariableSynchronization.ValueType # 1
"""`NONE`: Indicates that there will only be one copy of the variable, so
there is no need to sync.
"""
VARIABLE_SYNCHRONIZATION_ON_WRITE: VariableSynchronization.ValueType # 2
"""`ON_WRITE`: Indicates that the variable will be updated across devices
every time it is written.
"""
VARIABLE_SYNCHRONIZATION_ON_READ: VariableSynchronization.ValueType # 3
"""`ON_READ`: Indicates that the variable will be aggregated across devices
when it is read (eg. when checkpointing or when evaluating an op that uses
the variable).
"""
global___VariableSynchronization = VariableSynchronization
class _VariableAggregation:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _VariableAggregationEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_VariableAggregation.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
VARIABLE_AGGREGATION_NONE: _VariableAggregation.ValueType # 0
"""`NONE`: This is the default, giving an error if you use a
variable-update operation with multiple replicas.
"""
VARIABLE_AGGREGATION_SUM: _VariableAggregation.ValueType # 1
"""`SUM`: Add the updates across replicas."""
VARIABLE_AGGREGATION_MEAN: _VariableAggregation.ValueType # 2
"""`MEAN`: Take the arithmetic mean ("average") of the updates across
replicas.
"""
VARIABLE_AGGREGATION_ONLY_FIRST_REPLICA: _VariableAggregation.ValueType # 3
"""`ONLY_FIRST_REPLICA`: This is for when every replica is performing the same
update, but we only want to perform the update once. Used, e.g., for the
global step counter.
"""
class VariableAggregation(_VariableAggregation, metaclass=_VariableAggregationEnumTypeWrapper):
"""Indicates how a distributed variable will be aggregated."""
VARIABLE_AGGREGATION_NONE: VariableAggregation.ValueType # 0
"""`NONE`: This is the default, giving an error if you use a
variable-update operation with multiple replicas.
"""
VARIABLE_AGGREGATION_SUM: VariableAggregation.ValueType # 1
"""`SUM`: Add the updates across replicas."""
VARIABLE_AGGREGATION_MEAN: VariableAggregation.ValueType # 2
"""`MEAN`: Take the arithmetic mean ("average") of the updates across
replicas.
"""
VARIABLE_AGGREGATION_ONLY_FIRST_REPLICA: VariableAggregation.ValueType # 3
"""`ONLY_FIRST_REPLICA`: This is for when every replica is performing the same
update, but we only want to perform the update once. Used, e.g., for the
global step counter.
"""
global___VariableAggregation = VariableAggregation
@typing_extensions.final
class VariableDef(google.protobuf.message.Message):
"""Protocol buffer representing a Variable."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VARIABLE_NAME_FIELD_NUMBER: builtins.int
INITIAL_VALUE_NAME_FIELD_NUMBER: builtins.int
INITIALIZER_NAME_FIELD_NUMBER: builtins.int
SNAPSHOT_NAME_FIELD_NUMBER: builtins.int
SAVE_SLICE_INFO_DEF_FIELD_NUMBER: builtins.int
IS_RESOURCE_FIELD_NUMBER: builtins.int
TRAINABLE_FIELD_NUMBER: builtins.int
SYNCHRONIZATION_FIELD_NUMBER: builtins.int
AGGREGATION_FIELD_NUMBER: builtins.int
variable_name: builtins.str
"""Name of the variable tensor."""
initial_value_name: builtins.str
"""Name of the tensor holding the variable's initial value."""
initializer_name: builtins.str
"""Name of the initializer op."""
snapshot_name: builtins.str
"""Name of the snapshot tensor."""
@property
def save_slice_info_def(self) -> global___SaveSliceInfoDef:
"""Support for saving variables as slices of a larger variable."""
is_resource: builtins.bool
"""Whether to represent this as a ResourceVariable."""
trainable: builtins.bool
"""Whether this variable should be trained."""
synchronization: global___VariableSynchronization.ValueType
"""Indicates when a distributed variable will be synced."""
aggregation: global___VariableAggregation.ValueType
"""Indicates how a distributed variable will be aggregated."""
def __init__(
self,
*,
variable_name: builtins.str | None = ...,
initial_value_name: builtins.str | None = ...,
initializer_name: builtins.str | None = ...,
snapshot_name: builtins.str | None = ...,
save_slice_info_def: global___SaveSliceInfoDef | None = ...,
is_resource: builtins.bool | None = ...,
trainable: builtins.bool | None = ...,
synchronization: global___VariableSynchronization.ValueType | None = ...,
aggregation: global___VariableAggregation.ValueType | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["save_slice_info_def", b"save_slice_info_def"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["aggregation", b"aggregation", "initial_value_name", b"initial_value_name", "initializer_name", b"initializer_name", "is_resource", b"is_resource", "save_slice_info_def", b"save_slice_info_def", "snapshot_name", b"snapshot_name", "synchronization", b"synchronization", "trainable", b"trainable", "variable_name", b"variable_name"]) -> None: ...
global___VariableDef = VariableDef
@typing_extensions.final
class SaveSliceInfoDef(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FULL_NAME_FIELD_NUMBER: builtins.int
FULL_SHAPE_FIELD_NUMBER: builtins.int
VAR_OFFSET_FIELD_NUMBER: builtins.int
VAR_SHAPE_FIELD_NUMBER: builtins.int
full_name: builtins.str
"""Name of the full variable of which this is a slice."""
@property
def full_shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Shape of the full variable."""
@property
def var_offset(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Offset of this variable into the full variable."""
@property
def var_shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Shape of this variable."""
def __init__(
self,
*,
full_name: builtins.str | None = ...,
full_shape: collections.abc.Iterable[builtins.int] | None = ...,
var_offset: collections.abc.Iterable[builtins.int] | None = ...,
var_shape: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["full_name", b"full_name", "full_shape", b"full_shape", "var_offset", b"var_offset", "var_shape", b"var_shape"]) -> None: ...
global___SaveSliceInfoDef = SaveSliceInfoDef

View File

@@ -0,0 +1,56 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class VersionDef(google.protobuf.message.Message):
"""Version information for a piece of serialized data
There are different types of versions for each type of data
(GraphDef, etc.), but they all have the same common shape
described here.
Each consumer has "consumer" and "min_producer" versions (specified
elsewhere). A consumer is allowed to consume this data if
producer >= min_producer
consumer >= min_consumer
consumer not in bad_consumers
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PRODUCER_FIELD_NUMBER: builtins.int
MIN_CONSUMER_FIELD_NUMBER: builtins.int
BAD_CONSUMERS_FIELD_NUMBER: builtins.int
producer: builtins.int
"""The version of the code that produced this data."""
min_consumer: builtins.int
"""Any consumer below this version is not allowed to consume this data."""
@property
def bad_consumers(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Specific consumer versions which are disallowed (e.g. due to bugs)."""
def __init__(
self,
*,
producer: builtins.int | None = ...,
min_consumer: builtins.int | None = ...,
bad_consumers: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bad_consumers", b"bad_consumers", "min_consumer", b"min_consumer", "producer", b"producer"]) -> None: ...
global___VersionDef = VersionDef

View File

@@ -0,0 +1,162 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class MemAllocatorStats(google.protobuf.message.Message):
"""Some of the data from AllocatorStats"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NUM_ALLOCS_FIELD_NUMBER: builtins.int
BYTES_IN_USE_FIELD_NUMBER: builtins.int
PEAK_BYTES_IN_USE_FIELD_NUMBER: builtins.int
LARGEST_ALLOC_SIZE_FIELD_NUMBER: builtins.int
FRAGMENTATION_METRIC_FIELD_NUMBER: builtins.int
num_allocs: builtins.int
bytes_in_use: builtins.int
peak_bytes_in_use: builtins.int
largest_alloc_size: builtins.int
fragmentation_metric: builtins.float
def __init__(
self,
*,
num_allocs: builtins.int | None = ...,
bytes_in_use: builtins.int | None = ...,
peak_bytes_in_use: builtins.int | None = ...,
largest_alloc_size: builtins.int | None = ...,
fragmentation_metric: builtins.float | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bytes_in_use", b"bytes_in_use", "fragmentation_metric", b"fragmentation_metric", "largest_alloc_size", b"largest_alloc_size", "num_allocs", b"num_allocs", "peak_bytes_in_use", b"peak_bytes_in_use"]) -> None: ...
global___MemAllocatorStats = MemAllocatorStats
@typing_extensions.final
class MemChunk(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ADDRESS_FIELD_NUMBER: builtins.int
SIZE_FIELD_NUMBER: builtins.int
REQUESTED_SIZE_FIELD_NUMBER: builtins.int
BIN_FIELD_NUMBER: builtins.int
OP_NAME_FIELD_NUMBER: builtins.int
FREED_AT_COUNT_FIELD_NUMBER: builtins.int
ACTION_COUNT_FIELD_NUMBER: builtins.int
IN_USE_FIELD_NUMBER: builtins.int
STEP_ID_FIELD_NUMBER: builtins.int
address: builtins.int
size: builtins.int
requested_size: builtins.int
bin: builtins.int
op_name: builtins.str
freed_at_count: builtins.int
action_count: builtins.int
in_use: builtins.bool
step_id: builtins.int
def __init__(
self,
*,
address: builtins.int | None = ...,
size: builtins.int | None = ...,
requested_size: builtins.int | None = ...,
bin: builtins.int | None = ...,
op_name: builtins.str | None = ...,
freed_at_count: builtins.int | None = ...,
action_count: builtins.int | None = ...,
in_use: builtins.bool | None = ...,
step_id: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["action_count", b"action_count", "address", b"address", "bin", b"bin", "freed_at_count", b"freed_at_count", "in_use", b"in_use", "op_name", b"op_name", "requested_size", b"requested_size", "size", b"size", "step_id", b"step_id"]) -> None: ...
global___MemChunk = MemChunk
@typing_extensions.final
class BinSummary(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
BIN_FIELD_NUMBER: builtins.int
TOTAL_BYTES_IN_USE_FIELD_NUMBER: builtins.int
TOTAL_BYTES_IN_BIN_FIELD_NUMBER: builtins.int
TOTAL_CHUNKS_IN_USE_FIELD_NUMBER: builtins.int
TOTAL_CHUNKS_IN_BIN_FIELD_NUMBER: builtins.int
bin: builtins.int
total_bytes_in_use: builtins.int
total_bytes_in_bin: builtins.int
total_chunks_in_use: builtins.int
total_chunks_in_bin: builtins.int
def __init__(
self,
*,
bin: builtins.int | None = ...,
total_bytes_in_use: builtins.int | None = ...,
total_bytes_in_bin: builtins.int | None = ...,
total_chunks_in_use: builtins.int | None = ...,
total_chunks_in_bin: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bin", b"bin", "total_bytes_in_bin", b"total_bytes_in_bin", "total_bytes_in_use", b"total_bytes_in_use", "total_chunks_in_bin", b"total_chunks_in_bin", "total_chunks_in_use", b"total_chunks_in_use"]) -> None: ...
global___BinSummary = BinSummary
@typing_extensions.final
class SnapShot(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ACTION_COUNT_FIELD_NUMBER: builtins.int
SIZE_FIELD_NUMBER: builtins.int
action_count: builtins.int
size: builtins.int
def __init__(
self,
*,
action_count: builtins.int | None = ...,
size: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["action_count", b"action_count", "size", b"size"]) -> None: ...
global___SnapShot = SnapShot
@typing_extensions.final
class MemoryDump(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ALLOCATOR_NAME_FIELD_NUMBER: builtins.int
BIN_SUMMARY_FIELD_NUMBER: builtins.int
CHUNK_FIELD_NUMBER: builtins.int
SNAP_SHOT_FIELD_NUMBER: builtins.int
STATS_FIELD_NUMBER: builtins.int
allocator_name: builtins.str
@property
def bin_summary(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BinSummary]: ...
@property
def chunk(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MemChunk]: ...
@property
def snap_shot(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SnapShot]: ...
@property
def stats(self) -> global___MemAllocatorStats: ...
def __init__(
self,
*,
allocator_name: builtins.str | None = ...,
bin_summary: collections.abc.Iterable[global___BinSummary] | None = ...,
chunk: collections.abc.Iterable[global___MemChunk] | None = ...,
snap_shot: collections.abc.Iterable[global___SnapShot] | None = ...,
stats: global___MemAllocatorStats | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["stats", b"stats"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["allocator_name", b"allocator_name", "bin_summary", b"bin_summary", "chunk", b"chunk", "snap_shot", b"snap_shot", "stats", b"stats"]) -> None: ...
global___MemoryDump = MemoryDump

View File

@@ -0,0 +1,136 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class JobDef(google.protobuf.message.Message):
"""This file contains protos to be used when defining a TensorFlow
cluster.
EXAMPLES
--------
1. A single-process cluster, containing "/job:local/task:0".
Cluster:
job { name: 'local' tasks { key: 0 value: 'localhost:2222' } }
Server:
cluster { $CLUSTER } job_name: 'local' task_index: 0
2. A two-process cluster, containing "/job:local/task:{0,1}".
Cluster:
job { name: 'local' tasks { key: 0 value: 'localhost:2222' }
tasks { key: 1 value: 'localhost:2223' } }
Servers:
cluster { $CLUSTER } job_name: 'local' task_index: 0
cluster { $CLUSTER } job_name: 'local' task_index: 1
3. A two-job cluster, containing "/job:worker/task:{0,1,2}" and
"/job:ps/task:{0,1}".
Cluster:
job { name: 'worker' tasks { key: 0 value: 'worker1:2222' }
tasks { key: 1 value: 'worker2:2222' }
tasks { key: 2 value: 'worker3:2222' } }
job { name: 'ps' tasks { key: 0 value: 'ps0:2222' }
tasks { key: 1 value: 'ps1:2222' } }
Servers:
cluster { $CLUSTER } job_name: 'worker' task_index: 0
cluster { $CLUSTER } job_name: 'worker' task_index: 1
cluster { $CLUSTER } job_name: 'worker' task_index: 2
cluster { $CLUSTER } job_name: 'ps' task_index: 0
cluster { $CLUSTER } job_name: 'ps' task_index: 1
Defines a single job in a TensorFlow cluster.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class TasksEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
value: builtins.str
def __init__(
self,
*,
key: builtins.int | None = ...,
value: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
TASKS_FIELD_NUMBER: builtins.int
name: builtins.str
"""The name of this job."""
@property
def tasks(self) -> google.protobuf.internal.containers.ScalarMap[builtins.int, builtins.str]:
"""Mapping from task ID to "hostname:port" string.
If the `name` field contains "worker", and the `tasks` map contains a
mapping from 7 to "example.org:2222", then the device prefix
"/job:worker/task:7" will be assigned to "example.org:2222".
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
tasks: collections.abc.Mapping[builtins.int, builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "tasks", b"tasks"]) -> None: ...
global___JobDef = JobDef
@typing_extensions.final
class ClusterDef(google.protobuf.message.Message):
"""Defines a TensorFlow cluster as a set of jobs."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
JOB_FIELD_NUMBER: builtins.int
@property
def job(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___JobDef]:
"""The jobs that comprise the cluster."""
def __init__(
self,
*,
job: collections.abc.Iterable[global___JobDef] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["job", b"job"]) -> None: ...
global___ClusterDef = ClusterDef

View File

@@ -0,0 +1,40 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
import tensorflow.core.protobuf.struct_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class CompositeTensorVariantMetadata(google.protobuf.message.Message):
"""Metadata for CompositeTensorVariant, used when serializing as Variant.
We define a new message here (rather than directly using TypeSpecProto for
the metadata string) to retain flexibility to change the metadata encoding
to support additional features.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TYPE_SPEC_PROTO_FIELD_NUMBER: builtins.int
@property
def type_spec_proto(self) -> tensorflow.core.protobuf.struct_pb2.TypeSpecProto: ...
def __init__(
self,
*,
type_spec_proto: tensorflow.core.protobuf.struct_pb2.TypeSpecProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["type_spec_proto", b"type_spec_proto"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["type_spec_proto", b"type_spec_proto"]) -> None: ...
global___CompositeTensorVariantMetadata = CompositeTensorVariantMetadata

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,194 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ValuesDef(google.protobuf.message.Message):
"""Control flow context related protocol buffers.
Protocol buffer representing the values in ControlFlowContext.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class ExternalValuesEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
value: builtins.str
def __init__(
self,
*,
key: builtins.str | None = ...,
value: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
VALUES_FIELD_NUMBER: builtins.int
EXTERNAL_VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Value names that have been seen in this context."""
@property
def external_values(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
"""Value names referenced by but external to this context."""
def __init__(
self,
*,
values: collections.abc.Iterable[builtins.str] | None = ...,
external_values: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["external_values", b"external_values", "values", b"values"]) -> None: ...
global___ValuesDef = ValuesDef
@typing_extensions.final
class ControlFlowContextDef(google.protobuf.message.Message):
"""Container for any kind of control flow context. Any other control flow
contexts that are added below should also be added here.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
COND_CTXT_FIELD_NUMBER: builtins.int
WHILE_CTXT_FIELD_NUMBER: builtins.int
@property
def cond_ctxt(self) -> global___CondContextDef: ...
@property
def while_ctxt(self) -> global___WhileContextDef: ...
def __init__(
self,
*,
cond_ctxt: global___CondContextDef | None = ...,
while_ctxt: global___WhileContextDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["cond_ctxt", b"cond_ctxt", "ctxt", b"ctxt", "while_ctxt", b"while_ctxt"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["cond_ctxt", b"cond_ctxt", "ctxt", b"ctxt", "while_ctxt", b"while_ctxt"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["ctxt", b"ctxt"]) -> typing_extensions.Literal["cond_ctxt", "while_ctxt"] | None: ...
global___ControlFlowContextDef = ControlFlowContextDef
@typing_extensions.final
class CondContextDef(google.protobuf.message.Message):
"""Protocol buffer representing a CondContext object."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CONTEXT_NAME_FIELD_NUMBER: builtins.int
PRED_NAME_FIELD_NUMBER: builtins.int
PIVOT_NAME_FIELD_NUMBER: builtins.int
BRANCH_FIELD_NUMBER: builtins.int
VALUES_DEF_FIELD_NUMBER: builtins.int
NESTED_CONTEXTS_FIELD_NUMBER: builtins.int
context_name: builtins.str
"""Name of the context."""
pred_name: builtins.str
"""Name of the pred tensor."""
pivot_name: builtins.str
"""Name of the pivot tensor."""
branch: builtins.int
"""Branch prediction. 0 or 1."""
@property
def values_def(self) -> global___ValuesDef:
"""Values and external values in control flow context."""
@property
def nested_contexts(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlFlowContextDef]:
"""Contexts contained inside this context (e.g. nested conds)."""
def __init__(
self,
*,
context_name: builtins.str | None = ...,
pred_name: builtins.str | None = ...,
pivot_name: builtins.str | None = ...,
branch: builtins.int | None = ...,
values_def: global___ValuesDef | None = ...,
nested_contexts: collections.abc.Iterable[global___ControlFlowContextDef] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["values_def", b"values_def"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["branch", b"branch", "context_name", b"context_name", "nested_contexts", b"nested_contexts", "pivot_name", b"pivot_name", "pred_name", b"pred_name", "values_def", b"values_def"]) -> None: ...
global___CondContextDef = CondContextDef
@typing_extensions.final
class WhileContextDef(google.protobuf.message.Message):
"""Protocol buffer representing a WhileContext object."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CONTEXT_NAME_FIELD_NUMBER: builtins.int
PARALLEL_ITERATIONS_FIELD_NUMBER: builtins.int
BACK_PROP_FIELD_NUMBER: builtins.int
SWAP_MEMORY_FIELD_NUMBER: builtins.int
PIVOT_NAME_FIELD_NUMBER: builtins.int
PIVOT_FOR_PRED_NAME_FIELD_NUMBER: builtins.int
PIVOT_FOR_BODY_NAME_FIELD_NUMBER: builtins.int
LOOP_EXIT_NAMES_FIELD_NUMBER: builtins.int
LOOP_ENTER_NAMES_FIELD_NUMBER: builtins.int
VALUES_DEF_FIELD_NUMBER: builtins.int
MAXIMUM_ITERATIONS_NAME_FIELD_NUMBER: builtins.int
NESTED_CONTEXTS_FIELD_NUMBER: builtins.int
context_name: builtins.str
"""Name of the context."""
parallel_iterations: builtins.int
"""The number of iterations allowed to run in parallel."""
back_prop: builtins.bool
"""Whether backprop is enabled for this while loop."""
swap_memory: builtins.bool
"""Whether GPU-CPU memory swap is enabled for this loop."""
pivot_name: builtins.str
"""Name of the pivot tensor."""
pivot_for_pred_name: builtins.str
"""Name of the pivot_for_pred tensor."""
pivot_for_body_name: builtins.str
"""Name of the pivot_for_body tensor."""
@property
def loop_exit_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""List of names for exit tensors."""
@property
def loop_enter_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""List of names for enter tensors."""
@property
def values_def(self) -> global___ValuesDef:
"""Values and external values in control flow context."""
maximum_iterations_name: builtins.str
"""Optional name of the maximum_iterations tensor."""
@property
def nested_contexts(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ControlFlowContextDef]:
"""Contexts contained inside this context (e.g. nested whiles)."""
def __init__(
self,
*,
context_name: builtins.str | None = ...,
parallel_iterations: builtins.int | None = ...,
back_prop: builtins.bool | None = ...,
swap_memory: builtins.bool | None = ...,
pivot_name: builtins.str | None = ...,
pivot_for_pred_name: builtins.str | None = ...,
pivot_for_body_name: builtins.str | None = ...,
loop_exit_names: collections.abc.Iterable[builtins.str] | None = ...,
loop_enter_names: collections.abc.Iterable[builtins.str] | None = ...,
values_def: global___ValuesDef | None = ...,
maximum_iterations_name: builtins.str | None = ...,
nested_contexts: collections.abc.Iterable[global___ControlFlowContextDef] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["values_def", b"values_def"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["back_prop", b"back_prop", "context_name", b"context_name", "loop_enter_names", b"loop_enter_names", "loop_exit_names", b"loop_exit_names", "maximum_iterations_name", b"maximum_iterations_name", "nested_contexts", b"nested_contexts", "parallel_iterations", b"parallel_iterations", "pivot_for_body_name", b"pivot_for_body_name", "pivot_for_pred_name", b"pivot_for_pred_name", "pivot_name", b"pivot_name", "swap_memory", b"swap_memory", "values_def", b"values_def"]) -> None: ...
global___WhileContextDef = WhileContextDef

View File

@@ -0,0 +1,112 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class CoordinatedJob(google.protobuf.message.Message):
"""Represents a job type and the number of tasks under this job.
For example, ("worker", 20) implies that there will be 20 worker tasks.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
NUM_TASKS_FIELD_NUMBER: builtins.int
name: builtins.str
num_tasks: builtins.int
def __init__(
self,
*,
name: builtins.str | None = ...,
num_tasks: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "num_tasks", b"num_tasks"]) -> None: ...
global___CoordinatedJob = CoordinatedJob
@typing_extensions.final
class CoordinationServiceConfig(google.protobuf.message.Message):
"""Coordination service configuration parameters.
The system picks appropriate values for fields that are not set.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SERVICE_TYPE_FIELD_NUMBER: builtins.int
SERVICE_LEADER_FIELD_NUMBER: builtins.int
ENABLE_HEALTH_CHECK_FIELD_NUMBER: builtins.int
CLUSTER_REGISTER_TIMEOUT_IN_MS_FIELD_NUMBER: builtins.int
HEARTBEAT_TIMEOUT_IN_MS_FIELD_NUMBER: builtins.int
COORDINATED_JOB_LIST_FIELD_NUMBER: builtins.int
SHUTDOWN_BARRIER_TIMEOUT_IN_MS_FIELD_NUMBER: builtins.int
AGENT_DESTRUCTION_WITHOUT_SHUTDOWN_FIELD_NUMBER: builtins.int
RECOVERABLE_JOBS_FIELD_NUMBER: builtins.int
service_type: builtins.str
"""Type of coordination service implementation to enable.
For example, setting the service type as "standalone" starts a service
instance on the leader task to provide the coordination services such as
heartbeats and consistent key-value store.
"""
service_leader: builtins.str
"""Address where the coordination service instance is hosted."""
enable_health_check: builtins.bool
"""Whether to enable the health check mechanism."""
cluster_register_timeout_in_ms: builtins.int
"""Maximum wait time for all members in the cluster to be registered."""
heartbeat_timeout_in_ms: builtins.int
"""Heartbeat timeout, if a task does not record heartbeat in this time
window, it will be considered disconnected.
Note: This is also used as a grace period to accept any heartbeats after
the agent has disconnected, to account for the lag time between the service
recording the state change and the agent stopping heartbeats.
"""
@property
def coordinated_job_list(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___CoordinatedJob]: ...
shutdown_barrier_timeout_in_ms: builtins.int
"""Denotes how long to wait for all coordination agents to reach the barriers
(after the first shutdown request) before disconnecting together. If
set to 0, no barrier is imposed upon shutdown and each worker can
disconnect individually.
"""
agent_destruction_without_shutdown: builtins.bool
"""If set, agents do not make an explicit Shutdown() call. Service will only
find out about the disconnecte agent via stale heartbeats. Used for
testing.
"""
@property
def recoverable_jobs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""The list of jobs which are recoverable. If a task in this list fails,
it will not propagate error to other tasks.
If empty, no jobs will be recoverable and every task failure will cause
error propagation to other tasks.
"""
def __init__(
self,
*,
service_type: builtins.str | None = ...,
service_leader: builtins.str | None = ...,
enable_health_check: builtins.bool | None = ...,
cluster_register_timeout_in_ms: builtins.int | None = ...,
heartbeat_timeout_in_ms: builtins.int | None = ...,
coordinated_job_list: collections.abc.Iterable[global___CoordinatedJob] | None = ...,
shutdown_barrier_timeout_in_ms: builtins.int | None = ...,
agent_destruction_without_shutdown: builtins.bool | None = ...,
recoverable_jobs: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["agent_destruction_without_shutdown", b"agent_destruction_without_shutdown", "cluster_register_timeout_in_ms", b"cluster_register_timeout_in_ms", "coordinated_job_list", b"coordinated_job_list", "enable_health_check", b"enable_health_check", "heartbeat_timeout_in_ms", b"heartbeat_timeout_in_ms", "recoverable_jobs", b"recoverable_jobs", "service_leader", b"service_leader", "service_type", b"service_type", "shutdown_barrier_timeout_in_ms", b"shutdown_barrier_timeout_in_ms"]) -> None: ...
global___CoordinationServiceConfig = CoordinationServiceConfig

View File

@@ -0,0 +1,66 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ErrorSourceProto(google.protobuf.message.Message):
"""If included as a payload, this message contains the error source information
where the error was raised.
URI: "type.googleapis.com/tensorflow.core.platform.ErrorSourceProto"
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _ErrorSource:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ErrorSourceEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ErrorSourceProto._ErrorSource.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNKNOWN: ErrorSourceProto._ErrorSource.ValueType # 0
TPU_COMPILE_OP: ErrorSourceProto._ErrorSource.ValueType # 1
TF_XLA_BRIDGE: ErrorSourceProto._ErrorSource.ValueType # 2
"""Old bridge."""
MLIR_BRIDGE_PHASE_1: ErrorSourceProto._ErrorSource.ValueType # 3
"""TPUBridge."""
MLIR_BRIDGE_PHASE_2: ErrorSourceProto._ErrorSource.ValueType # 4
"""LegalizeToHlo."""
EAGER_REMOTE_MGR: ErrorSourceProto._ErrorSource.ValueType # 5
"""eager::RemoteMgr."""
class ErrorSource(_ErrorSource, metaclass=_ErrorSourceEnumTypeWrapper): ...
UNKNOWN: ErrorSourceProto.ErrorSource.ValueType # 0
TPU_COMPILE_OP: ErrorSourceProto.ErrorSource.ValueType # 1
TF_XLA_BRIDGE: ErrorSourceProto.ErrorSource.ValueType # 2
"""Old bridge."""
MLIR_BRIDGE_PHASE_1: ErrorSourceProto.ErrorSource.ValueType # 3
"""TPUBridge."""
MLIR_BRIDGE_PHASE_2: ErrorSourceProto.ErrorSource.ValueType # 4
"""LegalizeToHlo."""
EAGER_REMOTE_MGR: ErrorSourceProto.ErrorSource.ValueType # 5
"""eager::RemoteMgr."""
ERROR_SOURCE_FIELD_NUMBER: builtins.int
error_source: global___ErrorSourceProto.ErrorSource.ValueType
def __init__(
self,
*,
error_source: global___ErrorSourceProto.ErrorSource.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["error_source", b"error_source"]) -> None: ...
global___ErrorSourceProto = ErrorSourceProto

View File

@@ -0,0 +1,231 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _DeploymentMode:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _DeploymentModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DeploymentMode.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEPLOYMENT_MODE_UNSPECIFIED: _DeploymentMode.ValueType # 0
DEPLOYMENT_MODE_COLOCATED: _DeploymentMode.ValueType # 1
"""tf.data service workers colocate with TF workers."""
DEPLOYMENT_MODE_REMOTE: _DeploymentMode.ValueType # 2
"""tf.data service workers run in dedicated tf.data hosts."""
DEPLOYMENT_MODE_HYBRID: _DeploymentMode.ValueType # 3
"""tf.data service workers run in colocated TF hosts and dedicated tf.data
hosts.
"""
class DeploymentMode(_DeploymentMode, metaclass=_DeploymentModeEnumTypeWrapper):
"""tf.data service deployment mode."""
DEPLOYMENT_MODE_UNSPECIFIED: DeploymentMode.ValueType # 0
DEPLOYMENT_MODE_COLOCATED: DeploymentMode.ValueType # 1
"""tf.data service workers colocate with TF workers."""
DEPLOYMENT_MODE_REMOTE: DeploymentMode.ValueType # 2
"""tf.data service workers run in dedicated tf.data hosts."""
DEPLOYMENT_MODE_HYBRID: DeploymentMode.ValueType # 3
"""tf.data service workers run in colocated TF hosts and dedicated tf.data
hosts.
"""
global___DeploymentMode = DeploymentMode
@typing_extensions.final
class ProcessingModeDef(google.protobuf.message.Message):
"""Next tag: 2"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _ShardingPolicy:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ShardingPolicyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ProcessingModeDef._ShardingPolicy.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
OFF: ProcessingModeDef._ShardingPolicy.ValueType # 0
"""No sharding will be performed. Each worker produces the entire dataset
without any sharding. With this mode, the best practice is to shuffle the
dataset nondeterministically so that workers process the dataset in
different orders.
"""
DYNAMIC: ProcessingModeDef._ShardingPolicy.ValueType # 1
"""The input dataset is dynamically split among workers at runtime. Each
worker gets the next split when it reads data from the dispatcher. There
is no fixed sharding with this mode.
"""
FILE: ProcessingModeDef._ShardingPolicy.ValueType # 2
"""The following are static sharding policies. The semantics are similar to
`tf.data.experimental.AutoShardPolicy`. These policies require:
* The tf.data service cluster has a fixed size, and you need to specify
the workers in DispatcherConfig.
* Each client only reads from the local tf.data service worker.
Shards by input files (each worker will get a set of files to process).
When this option is selected, make sure that there is at least as many
files as workers. If there are fewer input files than workers, a runtime
error will be raised.
"""
DATA: ProcessingModeDef._ShardingPolicy.ValueType # 3
"""Shards by elements produced by the dataset. Each worker will process the
whole dataset and discard the portion that is not for itself. Note that
for this mode to correctly partitions the dataset elements, the dataset
needs to produce elements in a deterministic order.
"""
FILE_OR_DATA: ProcessingModeDef._ShardingPolicy.ValueType # 4
"""Attempts FILE-based sharding, falling back to DATA-based sharding on
failures.
"""
HINT: ProcessingModeDef._ShardingPolicy.ValueType # 5
"""Looks for the presence of `shard(SHARD_HINT, ...)` which is treated as a
placeholder to replace with `shard(num_workers, worker_index)`.
"""
class ShardingPolicy(_ShardingPolicy, metaclass=_ShardingPolicyEnumTypeWrapper):
"""Specifies how data is sharded among tf.data service workers."""
OFF: ProcessingModeDef.ShardingPolicy.ValueType # 0
"""No sharding will be performed. Each worker produces the entire dataset
without any sharding. With this mode, the best practice is to shuffle the
dataset nondeterministically so that workers process the dataset in
different orders.
"""
DYNAMIC: ProcessingModeDef.ShardingPolicy.ValueType # 1
"""The input dataset is dynamically split among workers at runtime. Each
worker gets the next split when it reads data from the dispatcher. There
is no fixed sharding with this mode.
"""
FILE: ProcessingModeDef.ShardingPolicy.ValueType # 2
"""The following are static sharding policies. The semantics are similar to
`tf.data.experimental.AutoShardPolicy`. These policies require:
* The tf.data service cluster has a fixed size, and you need to specify
the workers in DispatcherConfig.
* Each client only reads from the local tf.data service worker.
Shards by input files (each worker will get a set of files to process).
When this option is selected, make sure that there is at least as many
files as workers. If there are fewer input files than workers, a runtime
error will be raised.
"""
DATA: ProcessingModeDef.ShardingPolicy.ValueType # 3
"""Shards by elements produced by the dataset. Each worker will process the
whole dataset and discard the portion that is not for itself. Note that
for this mode to correctly partitions the dataset elements, the dataset
needs to produce elements in a deterministic order.
"""
FILE_OR_DATA: ProcessingModeDef.ShardingPolicy.ValueType # 4
"""Attempts FILE-based sharding, falling back to DATA-based sharding on
failures.
"""
HINT: ProcessingModeDef.ShardingPolicy.ValueType # 5
"""Looks for the presence of `shard(SHARD_HINT, ...)` which is treated as a
placeholder to replace with `shard(num_workers, worker_index)`.
"""
SHARDING_POLICY_FIELD_NUMBER: builtins.int
sharding_policy: global___ProcessingModeDef.ShardingPolicy.ValueType
def __init__(
self,
*,
sharding_policy: global___ProcessingModeDef.ShardingPolicy.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["sharding_policy", b"sharding_policy"]) -> None: ...
global___ProcessingModeDef = ProcessingModeDef
@typing_extensions.final
class DataServiceMetadata(google.protobuf.message.Message):
"""Metadata related to tf.data service datasets.
Next tag: 4
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _Compression:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _CompressionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DataServiceMetadata._Compression.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
COMPRESSION_UNSPECIFIED: DataServiceMetadata._Compression.ValueType # 0
COMPRESSION_OFF: DataServiceMetadata._Compression.ValueType # 1
"""No compression."""
COMPRESSION_SNAPPY: DataServiceMetadata._Compression.ValueType # 2
"""Snappy compression as defined in tensorflow/core/platform/snappy.h."""
class Compression(_Compression, metaclass=_CompressionEnumTypeWrapper): ...
COMPRESSION_UNSPECIFIED: DataServiceMetadata.Compression.ValueType # 0
COMPRESSION_OFF: DataServiceMetadata.Compression.ValueType # 1
"""No compression."""
COMPRESSION_SNAPPY: DataServiceMetadata.Compression.ValueType # 2
"""Snappy compression as defined in tensorflow/core/platform/snappy.h."""
ELEMENT_SPEC_FIELD_NUMBER: builtins.int
COMPRESSION_FIELD_NUMBER: builtins.int
CARDINALITY_FIELD_NUMBER: builtins.int
element_spec: builtins.bytes
"""Serialized element spec."""
compression: global___DataServiceMetadata.Compression.ValueType
cardinality: builtins.int
"""Cardinality of the dataset."""
def __init__(
self,
*,
element_spec: builtins.bytes | None = ...,
compression: global___DataServiceMetadata.Compression.ValueType | None = ...,
cardinality: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["element_spec", b"element_spec", "optional_element_spec", b"optional_element_spec"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["cardinality", b"cardinality", "compression", b"compression", "element_spec", b"element_spec", "optional_element_spec", b"optional_element_spec"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["optional_element_spec", b"optional_element_spec"]) -> typing_extensions.Literal["element_spec"] | None: ...
global___DataServiceMetadata = DataServiceMetadata
@typing_extensions.final
class CrossTrainerCacheOptions(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TRAINER_ID_FIELD_NUMBER: builtins.int
trainer_id: builtins.str
def __init__(
self,
*,
trainer_id: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["trainer_id", b"trainer_id"]) -> None: ...
global___CrossTrainerCacheOptions = CrossTrainerCacheOptions
@typing_extensions.final
class DataServiceConfig(google.protobuf.message.Message):
"""Data service config available to the client through GetDataServiceConfig RPC.
Next tag: 2
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DEPLOYMENT_MODE_FIELD_NUMBER: builtins.int
deployment_mode: global___DeploymentMode.ValueType
def __init__(
self,
*,
deployment_mode: global___DeploymentMode.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["deployment_mode", b"deployment_mode"]) -> None: ...
global___DataServiceConfig = DataServiceConfig

View File

@@ -0,0 +1,575 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_pb2
import tensorflow.core.protobuf.graph_debug_info_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _TensorDebugMode:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _TensorDebugModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_TensorDebugMode.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNSPECIFIED: _TensorDebugMode.ValueType # 0
NO_TENSOR: _TensorDebugMode.ValueType # 1
"""Only records what tensors are computed, eagerly or in graphs.
No information regarding the value of the tensor is available.
"""
CURT_HEALTH: _TensorDebugMode.ValueType # 2
"""A minimalist health summary for float-type tensors.
Contains information only about the presence/absence of pathological
values including Infinity and NaN.
Applicable only to float dtypes.
"""
CONCISE_HEALTH: _TensorDebugMode.ValueType # 3
"""A concise health summary for float-type tensors.
Contains more information that CURT_HEALTH.
Infinity and NaN are treated differently.
Applicable only to float and integer dtypes.
"""
FULL_HEALTH: _TensorDebugMode.ValueType # 4
"""A detailed health summary.
Contains further detailed information than `CONCISE_HEALTH`.
Information about device, dtype and shape are included.
Counts for various types of values (Infinity, NaN, negative, zero,
positive) are included.
Applicable to float, integer and boolean dtypes.
"""
SHAPE: _TensorDebugMode.ValueType # 5
"""Provides full runtime shape information, up to a maximum rank, beyond
which the dimension sizes are truncated.
"""
FULL_NUMERICS: _TensorDebugMode.ValueType # 6
"""Full numeric summary.
Including device, dtype, shape, counts of various types of values
(Infinity, NaN, negative, zero, positive), and summary statistics
(minimum, maximum, mean and variance).
Applicable to float, integer and boolean dtypes.
"""
FULL_TENSOR: _TensorDebugMode.ValueType # 7
"""Full tensor value."""
REDUCE_INF_NAN_THREE_SLOTS: _TensorDebugMode.ValueType # 8
"""Reduce the elements of a tensor to a rank-1 tensor of shape [3], in which
- the 1st element is -inf if any element of the tensor is -inf,
or zero otherwise.
- the 2nd element is +inf if any element of the tensor is +inf,
or zero otherwise.
- the 3rd element is nan if any element of the tensor is nan, or zero
otherwise.
"""
class TensorDebugMode(_TensorDebugMode, metaclass=_TensorDebugModeEnumTypeWrapper):
"""Available modes for extracting debugging information from a Tensor.
TODO(cais): Document the detailed column names and semantics in a separate
markdown file once the implementation settles.
"""
UNSPECIFIED: TensorDebugMode.ValueType # 0
NO_TENSOR: TensorDebugMode.ValueType # 1
"""Only records what tensors are computed, eagerly or in graphs.
No information regarding the value of the tensor is available.
"""
CURT_HEALTH: TensorDebugMode.ValueType # 2
"""A minimalist health summary for float-type tensors.
Contains information only about the presence/absence of pathological
values including Infinity and NaN.
Applicable only to float dtypes.
"""
CONCISE_HEALTH: TensorDebugMode.ValueType # 3
"""A concise health summary for float-type tensors.
Contains more information that CURT_HEALTH.
Infinity and NaN are treated differently.
Applicable only to float and integer dtypes.
"""
FULL_HEALTH: TensorDebugMode.ValueType # 4
"""A detailed health summary.
Contains further detailed information than `CONCISE_HEALTH`.
Information about device, dtype and shape are included.
Counts for various types of values (Infinity, NaN, negative, zero,
positive) are included.
Applicable to float, integer and boolean dtypes.
"""
SHAPE: TensorDebugMode.ValueType # 5
"""Provides full runtime shape information, up to a maximum rank, beyond
which the dimension sizes are truncated.
"""
FULL_NUMERICS: TensorDebugMode.ValueType # 6
"""Full numeric summary.
Including device, dtype, shape, counts of various types of values
(Infinity, NaN, negative, zero, positive), and summary statistics
(minimum, maximum, mean and variance).
Applicable to float, integer and boolean dtypes.
"""
FULL_TENSOR: TensorDebugMode.ValueType # 7
"""Full tensor value."""
REDUCE_INF_NAN_THREE_SLOTS: TensorDebugMode.ValueType # 8
"""Reduce the elements of a tensor to a rank-1 tensor of shape [3], in which
- the 1st element is -inf if any element of the tensor is -inf,
or zero otherwise.
- the 2nd element is +inf if any element of the tensor is +inf,
or zero otherwise.
- the 3rd element is nan if any element of the tensor is nan, or zero
otherwise.
"""
global___TensorDebugMode = TensorDebugMode
@typing_extensions.final
class DebugEvent(google.protobuf.message.Message):
"""An Event related to the debugging of a TensorFlow program."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
WALL_TIME_FIELD_NUMBER: builtins.int
STEP_FIELD_NUMBER: builtins.int
DEBUG_METADATA_FIELD_NUMBER: builtins.int
SOURCE_FILE_FIELD_NUMBER: builtins.int
STACK_FRAME_WITH_ID_FIELD_NUMBER: builtins.int
GRAPH_OP_CREATION_FIELD_NUMBER: builtins.int
DEBUGGED_GRAPH_FIELD_NUMBER: builtins.int
EXECUTION_FIELD_NUMBER: builtins.int
GRAPH_EXECUTION_TRACE_FIELD_NUMBER: builtins.int
GRAPH_ID_FIELD_NUMBER: builtins.int
DEBUGGED_DEVICE_FIELD_NUMBER: builtins.int
wall_time: builtins.float
"""Timestamp in seconds (with microsecond precision)."""
step: builtins.int
"""Step of training (if available)."""
@property
def debug_metadata(self) -> global___DebugMetadata:
"""Metadata related to this debugging data."""
@property
def source_file(self) -> global___SourceFile:
"""The content of a source file."""
@property
def stack_frame_with_id(self) -> global___StackFrameWithId:
"""A stack frame (filename, line number and column number, function name and
code string) with ID.
"""
@property
def graph_op_creation(self) -> global___GraphOpCreation:
"""The creation of an op within a graph (e.g., a FuncGraph compiled from
a Python function).
"""
@property
def debugged_graph(self) -> global___DebuggedGraph:
"""Information about a debugged graph."""
@property
def execution(self) -> global___Execution:
"""Execution of an op or a Graph (e.g., a tf.function)."""
@property
def graph_execution_trace(self) -> global___GraphExecutionTrace:
"""A graph execution trace: Contains information about the intermediate
tensors computed during the graph execution.
"""
graph_id: builtins.str
"""The ID of the graph (i.e., FuncGraph) executed here: applicable only
to the execution of a FuncGraph.
"""
@property
def debugged_device(self) -> global___DebuggedDevice:
"""A device on which debugger-instrumented ops and/or tensors reside."""
def __init__(
self,
*,
wall_time: builtins.float | None = ...,
step: builtins.int | None = ...,
debug_metadata: global___DebugMetadata | None = ...,
source_file: global___SourceFile | None = ...,
stack_frame_with_id: global___StackFrameWithId | None = ...,
graph_op_creation: global___GraphOpCreation | None = ...,
debugged_graph: global___DebuggedGraph | None = ...,
execution: global___Execution | None = ...,
graph_execution_trace: global___GraphExecutionTrace | None = ...,
graph_id: builtins.str | None = ...,
debugged_device: global___DebuggedDevice | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["debug_metadata", b"debug_metadata", "debugged_device", b"debugged_device", "debugged_graph", b"debugged_graph", "execution", b"execution", "graph_execution_trace", b"graph_execution_trace", "graph_id", b"graph_id", "graph_op_creation", b"graph_op_creation", "source_file", b"source_file", "stack_frame_with_id", b"stack_frame_with_id", "what", b"what"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["debug_metadata", b"debug_metadata", "debugged_device", b"debugged_device", "debugged_graph", b"debugged_graph", "execution", b"execution", "graph_execution_trace", b"graph_execution_trace", "graph_id", b"graph_id", "graph_op_creation", b"graph_op_creation", "source_file", b"source_file", "stack_frame_with_id", b"stack_frame_with_id", "step", b"step", "wall_time", b"wall_time", "what", b"what"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["what", b"what"]) -> typing_extensions.Literal["debug_metadata", "source_file", "stack_frame_with_id", "graph_op_creation", "debugged_graph", "execution", "graph_execution_trace", "graph_id", "debugged_device"] | None: ...
global___DebugEvent = DebugEvent
@typing_extensions.final
class DebugMetadata(google.protobuf.message.Message):
"""Metadata about the debugger and the debugged TensorFlow program."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TENSORFLOW_VERSION_FIELD_NUMBER: builtins.int
FILE_VERSION_FIELD_NUMBER: builtins.int
TFDBG_RUN_ID_FIELD_NUMBER: builtins.int
tensorflow_version: builtins.str
"""Version of TensorFlow."""
file_version: builtins.str
"""Version of the DebugEvent file format.
Has a format of "debug.Event:<number>", e.g., "debug.Event:1".
"""
tfdbg_run_id: builtins.str
"""A unique ID for the current run of tfdbg.
A run of tfdbg is defined as a TensorFlow job instrumented by tfdbg.
Multiple hosts in a distributed TensorFlow job instrumented by tfdbg
have the same ID.
"""
def __init__(
self,
*,
tensorflow_version: builtins.str | None = ...,
file_version: builtins.str | None = ...,
tfdbg_run_id: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["file_version", b"file_version", "tensorflow_version", b"tensorflow_version", "tfdbg_run_id", b"tfdbg_run_id"]) -> None: ...
global___DebugMetadata = DebugMetadata
@typing_extensions.final
class SourceFile(google.protobuf.message.Message):
"""Content of a source file involved in the execution of the debugged TensorFlow
program.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FILE_PATH_FIELD_NUMBER: builtins.int
HOST_NAME_FIELD_NUMBER: builtins.int
LINES_FIELD_NUMBER: builtins.int
file_path: builtins.str
"""Path to the file."""
host_name: builtins.str
"""Name of the host on which the file is located."""
@property
def lines(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Line-by-line content of the file."""
def __init__(
self,
*,
file_path: builtins.str | None = ...,
host_name: builtins.str | None = ...,
lines: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["file_path", b"file_path", "host_name", b"host_name", "lines", b"lines"]) -> None: ...
global___SourceFile = SourceFile
@typing_extensions.final
class StackFrameWithId(google.protobuf.message.Message):
"""A stack frame with ID."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ID_FIELD_NUMBER: builtins.int
FILE_LINE_COL_FIELD_NUMBER: builtins.int
id: builtins.str
"""A unique ID for the stack frame: A UUID-like string."""
@property
def file_line_col(self) -> tensorflow.core.protobuf.graph_debug_info_pb2.GraphDebugInfo.FileLineCol:
"""Stack frame, i.e., a frame of a stack trace, containing information
regarding the file name, line number, function name, code content
of the line, and column number (if available).
"""
def __init__(
self,
*,
id: builtins.str | None = ...,
file_line_col: tensorflow.core.protobuf.graph_debug_info_pb2.GraphDebugInfo.FileLineCol | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["file_line_col", b"file_line_col"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["file_line_col", b"file_line_col", "id", b"id"]) -> None: ...
global___StackFrameWithId = StackFrameWithId
@typing_extensions.final
class CodeLocation(google.protobuf.message.Message):
"""Code location information: A stack trace with host-name information.
Instead of encoding the detailed stack trace, this proto refers to IDs of
stack frames stored as `StackFrameWithId` protos.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
HOST_NAME_FIELD_NUMBER: builtins.int
STACK_FRAME_IDS_FIELD_NUMBER: builtins.int
host_name: builtins.str
"""Host name on which the source files are located."""
@property
def stack_frame_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""ID to a stack frame, each of which is pointed to
by a unique ID. The ordering of the frames is consistent with Python's
`traceback.extract_tb()`.
"""
def __init__(
self,
*,
host_name: builtins.str | None = ...,
stack_frame_ids: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["host_name", b"host_name", "stack_frame_ids", b"stack_frame_ids"]) -> None: ...
global___CodeLocation = CodeLocation
@typing_extensions.final
class GraphOpCreation(google.protobuf.message.Message):
"""The creation of an op in a TensorFlow Graph (e.g., FuncGraph in TF2)."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OP_TYPE_FIELD_NUMBER: builtins.int
OP_NAME_FIELD_NUMBER: builtins.int
GRAPH_NAME_FIELD_NUMBER: builtins.int
GRAPH_ID_FIELD_NUMBER: builtins.int
DEVICE_NAME_FIELD_NUMBER: builtins.int
INPUT_NAMES_FIELD_NUMBER: builtins.int
NUM_OUTPUTS_FIELD_NUMBER: builtins.int
CODE_LOCATION_FIELD_NUMBER: builtins.int
OUTPUT_TENSOR_IDS_FIELD_NUMBER: builtins.int
op_type: builtins.str
"""Type of the op (e.g., "MatMul")."""
op_name: builtins.str
"""Name of the op (e.g., "Dense/MatMul_1")."""
graph_name: builtins.str
"""Name of the graph that the op is a part of (if available)."""
graph_id: builtins.str
"""Unique ID of the graph (generated by debugger).
This is the ID of the immediately-enclosing graph.
"""
device_name: builtins.str
"""Name of the device that the op is assigned to (if available)."""
@property
def input_names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Names of the input tensors to the op."""
num_outputs: builtins.int
"""Number of output tensors emitted by the op."""
@property
def code_location(self) -> global___CodeLocation:
"""The unique ID for code location (stack trace) of the op's creation."""
@property
def output_tensor_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Unique IDs for the output tensors of this op."""
def __init__(
self,
*,
op_type: builtins.str | None = ...,
op_name: builtins.str | None = ...,
graph_name: builtins.str | None = ...,
graph_id: builtins.str | None = ...,
device_name: builtins.str | None = ...,
input_names: collections.abc.Iterable[builtins.str] | None = ...,
num_outputs: builtins.int | None = ...,
code_location: global___CodeLocation | None = ...,
output_tensor_ids: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["code_location", b"code_location"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["code_location", b"code_location", "device_name", b"device_name", "graph_id", b"graph_id", "graph_name", b"graph_name", "input_names", b"input_names", "num_outputs", b"num_outputs", "op_name", b"op_name", "op_type", b"op_type", "output_tensor_ids", b"output_tensor_ids"]) -> None: ...
global___GraphOpCreation = GraphOpCreation
@typing_extensions.final
class DebuggedGraph(google.protobuf.message.Message):
"""A debugger-instrumented graph."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
GRAPH_ID_FIELD_NUMBER: builtins.int
GRAPH_NAME_FIELD_NUMBER: builtins.int
INSTRUMENTED_OPS_FIELD_NUMBER: builtins.int
ORIGINAL_GRAPH_DEF_FIELD_NUMBER: builtins.int
INSTRUMENTED_GRAPH_DEF_FIELD_NUMBER: builtins.int
OUTER_CONTEXT_ID_FIELD_NUMBER: builtins.int
graph_id: builtins.str
"""An ID for the graph.
This can be used up to look up graph names. Generated by the debugger.
"""
graph_name: builtins.str
"""Name of the graph (if available)."""
@property
def instrumented_ops(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Names of the instrumented ops. This can be used to look up op name
based on the numeric-summary tensors (2nd column).
"""
original_graph_def: builtins.bytes
"""Original (uninstrumented) GraphDef (if available)."""
instrumented_graph_def: builtins.bytes
"""An encoded version of a GraphDef.
This graph may include the debugger-inserted ops.
"""
outer_context_id: builtins.str
"""IDs of the immediate enclosing context (graph), if any."""
def __init__(
self,
*,
graph_id: builtins.str | None = ...,
graph_name: builtins.str | None = ...,
instrumented_ops: collections.abc.Iterable[builtins.str] | None = ...,
original_graph_def: builtins.bytes | None = ...,
instrumented_graph_def: builtins.bytes | None = ...,
outer_context_id: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["graph_id", b"graph_id", "graph_name", b"graph_name", "instrumented_graph_def", b"instrumented_graph_def", "instrumented_ops", b"instrumented_ops", "original_graph_def", b"original_graph_def", "outer_context_id", b"outer_context_id"]) -> None: ...
global___DebuggedGraph = DebuggedGraph
@typing_extensions.final
class DebuggedDevice(google.protobuf.message.Message):
"""A device on which ops and/or tensors are instrumented by the debugger."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DEVICE_NAME_FIELD_NUMBER: builtins.int
DEVICE_ID_FIELD_NUMBER: builtins.int
device_name: builtins.str
"""Name of the device."""
device_id: builtins.int
"""A debugger-generated ID for the device. Guaranteed to be unique within
the scope of the debugged TensorFlow program, including single-host and
multi-host settings.
TODO(cais): Test the uniqueness guarantee in multi-host settings.
"""
def __init__(
self,
*,
device_name: builtins.str | None = ...,
device_id: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["device_id", b"device_id", "device_name", b"device_name"]) -> None: ...
global___DebuggedDevice = DebuggedDevice
@typing_extensions.final
class Execution(google.protobuf.message.Message):
"""Data relating to the eager execution of an op or a Graph.
For a op that generates N output tensors (N >= 0), only one
Execution proto will be used to describe the execution event.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OP_TYPE_FIELD_NUMBER: builtins.int
NUM_OUTPUTS_FIELD_NUMBER: builtins.int
GRAPH_ID_FIELD_NUMBER: builtins.int
INPUT_TENSOR_IDS_FIELD_NUMBER: builtins.int
OUTPUT_TENSOR_IDS_FIELD_NUMBER: builtins.int
TENSOR_DEBUG_MODE_FIELD_NUMBER: builtins.int
TENSOR_PROTOS_FIELD_NUMBER: builtins.int
CODE_LOCATION_FIELD_NUMBER: builtins.int
OUTPUT_TENSOR_DEVICE_IDS_FIELD_NUMBER: builtins.int
op_type: builtins.str
"""Op type (e.g., "MatMul").
In the case of a Graph, this is the name of the Graph.
"""
num_outputs: builtins.int
"""Number of output tensors."""
graph_id: builtins.str
"""The graph that's executed: applicable only to the eager
execution of a FuncGraph.
"""
@property
def input_tensor_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""IDs of the input tensors (if available)."""
@property
def output_tensor_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""IDs of the output tensors (if availbable).
If specified, must have the same length as tensor_protos.
"""
tensor_debug_mode: global___TensorDebugMode.ValueType
"""Type of the tensor value encapsulated in this proto."""
@property
def tensor_protos(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_pb2.TensorProto]:
"""Output Tensor values in the type described by `tensor_value_type`.
The length of this should match `num_outputs`.
"""
@property
def code_location(self) -> global___CodeLocation:
"""Stack trace of the eager execution."""
@property
def output_tensor_device_ids(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Debugged-generated IDs of the devices on which the output tensors reside.
To look up details about the device (e.g., name), cross-reference this
field with the DebuggedDevice messages.
"""
def __init__(
self,
*,
op_type: builtins.str | None = ...,
num_outputs: builtins.int | None = ...,
graph_id: builtins.str | None = ...,
input_tensor_ids: collections.abc.Iterable[builtins.int] | None = ...,
output_tensor_ids: collections.abc.Iterable[builtins.int] | None = ...,
tensor_debug_mode: global___TensorDebugMode.ValueType | None = ...,
tensor_protos: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ...,
code_location: global___CodeLocation | None = ...,
output_tensor_device_ids: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["code_location", b"code_location"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["code_location", b"code_location", "graph_id", b"graph_id", "input_tensor_ids", b"input_tensor_ids", "num_outputs", b"num_outputs", "op_type", b"op_type", "output_tensor_device_ids", b"output_tensor_device_ids", "output_tensor_ids", b"output_tensor_ids", "tensor_debug_mode", b"tensor_debug_mode", "tensor_protos", b"tensor_protos"]) -> None: ...
global___Execution = Execution
@typing_extensions.final
class GraphExecutionTrace(google.protobuf.message.Message):
"""Data relating to an execution of a Graph (e.g., an eager execution of a
FuncGraph).
The values of the intermediate tensors computed in the graph are recorded
in this proto. A graph execution may correspond to one or more pieces of
`GraphExecutionTrace`, depending on whether the instrumented tensor values
are summarized in an aggregated or separate fashion.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TFDBG_CONTEXT_ID_FIELD_NUMBER: builtins.int
OP_NAME_FIELD_NUMBER: builtins.int
OUTPUT_SLOT_FIELD_NUMBER: builtins.int
TENSOR_DEBUG_MODE_FIELD_NUMBER: builtins.int
TENSOR_PROTO_FIELD_NUMBER: builtins.int
DEVICE_NAME_FIELD_NUMBER: builtins.int
tfdbg_context_id: builtins.str
"""Unique ID of the context that the executed op(s) belong to (e.g., a
compiled concrete tf.function).
"""
op_name: builtins.str
"""Name of the op (applicable only in the case of the `FULL_TENSOR` trace
level).
"""
output_slot: builtins.int
"""Output slot of the tensor (applicable only in the case of the `FULL_TENSOR`
trace level).
"""
tensor_debug_mode: global___TensorDebugMode.ValueType
"""Type of the tensor value encapsulated in this proto."""
@property
def tensor_proto(self) -> tensorflow.core.framework.tensor_pb2.TensorProto:
"""Tensor value in the type described by `tensor_value_type`.
This tensor may summarize the value of a single intermediate op of the
graph, or those of multiple intermediate tensors.
"""
device_name: builtins.str
"""Name of the device that the op belongs to."""
def __init__(
self,
*,
tfdbg_context_id: builtins.str | None = ...,
op_name: builtins.str | None = ...,
output_slot: builtins.int | None = ...,
tensor_debug_mode: global___TensorDebugMode.ValueType | None = ...,
tensor_proto: tensorflow.core.framework.tensor_pb2.TensorProto | None = ...,
device_name: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tensor_proto", b"tensor_proto"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["device_name", b"device_name", "op_name", b"op_name", "output_slot", b"output_slot", "tensor_debug_mode", b"tensor_debug_mode", "tensor_proto", b"tensor_proto", "tfdbg_context_id", b"tfdbg_context_id"]) -> None: ...
global___GraphExecutionTrace = GraphExecutionTrace

View File

@@ -0,0 +1,170 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class DebugTensorWatch(google.protobuf.message.Message):
"""Option for watching a node in TensorFlow Debugger (tfdbg)."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_NAME_FIELD_NUMBER: builtins.int
OUTPUT_SLOT_FIELD_NUMBER: builtins.int
DEBUG_OPS_FIELD_NUMBER: builtins.int
DEBUG_URLS_FIELD_NUMBER: builtins.int
TOLERATE_DEBUG_OP_CREATION_FAILURES_FIELD_NUMBER: builtins.int
node_name: builtins.str
"""Name of the node to watch.
Use "*" for wildcard. But note: currently, regex is not supported in
general.
"""
output_slot: builtins.int
"""Output slot to watch.
The semantics of output_slot == -1 is that all outputs of the node
will be watched (i.e., a wildcard).
Other negative values of output_slot are invalid and will lead to
errors currently.
"""
@property
def debug_ops(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Name(s) of the debugging op(s).
One or more than one probes on a tensor.
e.g., {"DebugIdentity", "DebugNanCount"}
"""
@property
def debug_urls(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""URL(s) for debug targets(s).
Supported URL formats are:
- file:///foo/tfdbg_dump: Writes out Event content to file
/foo/tfdbg_dump. Assumes all directories can be created if they don't
already exist.
- grpc://localhost:11011: Sends an RPC request to an EventListener
service running at localhost:11011 with the event.
- memcbk:///event_key: Routes tensors to clients using the
callback registered with the DebugCallbackRegistry for event_key.
Each debug op listed in debug_ops will publish its output tensor (debug
signal) to all URLs in debug_urls.
N.B. Session::Run() supports concurrent invocations of the same inputs
(feed keys), outputs and target nodes. If such concurrent invocations
are to be debugged, the callers of Session::Run() must use distinct
debug_urls to make sure that the streamed or dumped events do not overlap
among the invocations.
TODO(cais): More visible documentation of this in g3docs.
"""
tolerate_debug_op_creation_failures: builtins.bool
"""Do not error out if debug op creation fails (e.g., due to dtype
incompatibility). Instead, just log the failure.
"""
def __init__(
self,
*,
node_name: builtins.str | None = ...,
output_slot: builtins.int | None = ...,
debug_ops: collections.abc.Iterable[builtins.str] | None = ...,
debug_urls: collections.abc.Iterable[builtins.str] | None = ...,
tolerate_debug_op_creation_failures: builtins.bool | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["debug_ops", b"debug_ops", "debug_urls", b"debug_urls", "node_name", b"node_name", "output_slot", b"output_slot", "tolerate_debug_op_creation_failures", b"tolerate_debug_op_creation_failures"]) -> None: ...
global___DebugTensorWatch = DebugTensorWatch
@typing_extensions.final
class DebugOptions(google.protobuf.message.Message):
"""Options for initializing DebuggerState in TensorFlow Debugger (tfdbg)."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DEBUG_TENSOR_WATCH_OPTS_FIELD_NUMBER: builtins.int
GLOBAL_STEP_FIELD_NUMBER: builtins.int
RESET_DISK_BYTE_USAGE_FIELD_NUMBER: builtins.int
@property
def debug_tensor_watch_opts(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DebugTensorWatch]:
"""Debugging options"""
global_step: builtins.int
"""Caller-specified global step count.
Note that this is distinct from the session run count and the executor
step count.
"""
reset_disk_byte_usage: builtins.bool
"""Whether the total disk usage of tfdbg is to be reset to zero
in this Session.run call. This is used by wrappers and hooks
such as the local CLI ones to indicate that the dumped tensors
are cleaned up from the disk after each Session.run.
"""
def __init__(
self,
*,
debug_tensor_watch_opts: collections.abc.Iterable[global___DebugTensorWatch] | None = ...,
global_step: builtins.int | None = ...,
reset_disk_byte_usage: builtins.bool | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["debug_tensor_watch_opts", b"debug_tensor_watch_opts", "global_step", b"global_step", "reset_disk_byte_usage", b"reset_disk_byte_usage"]) -> None: ...
global___DebugOptions = DebugOptions
@typing_extensions.final
class DebuggedSourceFile(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
HOST_FIELD_NUMBER: builtins.int
FILE_PATH_FIELD_NUMBER: builtins.int
LAST_MODIFIED_FIELD_NUMBER: builtins.int
BYTES_FIELD_NUMBER: builtins.int
LINES_FIELD_NUMBER: builtins.int
host: builtins.str
"""The host name on which a source code file is located."""
file_path: builtins.str
"""Path to the source code file."""
last_modified: builtins.int
"""The timestamp at which the source code file is last modified."""
bytes: builtins.int
"""Byte size of the file."""
@property
def lines(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Line-by-line content of the source code file."""
def __init__(
self,
*,
host: builtins.str | None = ...,
file_path: builtins.str | None = ...,
last_modified: builtins.int | None = ...,
bytes: builtins.int | None = ...,
lines: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bytes", b"bytes", "file_path", b"file_path", "host", b"host", "last_modified", b"last_modified", "lines", b"lines"]) -> None: ...
global___DebuggedSourceFile = DebuggedSourceFile
@typing_extensions.final
class DebuggedSourceFiles(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SOURCE_FILES_FIELD_NUMBER: builtins.int
@property
def source_files(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___DebuggedSourceFile]:
"""A collection of source code files."""
def __init__(
self,
*,
source_files: collections.abc.Iterable[global___DebuggedSourceFile] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["source_files", b"source_files"]) -> None: ...
global___DebuggedSourceFiles = DebuggedSourceFiles

View File

@@ -0,0 +1,139 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Copyright 2020 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class TaskDeviceFilters(google.protobuf.message.Message):
"""This file contains protos to be used when defining a TensorFlow
cluster.
Configure device filters for remote tasks in the cluster. When associated
with a ClusterDef in setting up the cluster, a remote task will ignore all
devices which do not match any of its filters. Device filters must be
configured at the cluster startup, and cannot be updated once the cluster is
up and running.
EXAMPLES
--------
A two-job cluster with the following ClusterDef:
Cluster:
job { name: 'worker' tasks { key: 0 value: 'worker1:2222' }
tasks { key: 1 value: 'worker2:2222' } }
job { name: 'ps' tasks { key: 0 value: 'ps0:2222' }
tasks { key: 1 value: 'ps1:2222' } }
Set device filters to isolate worker tasks:
ClusterDeviceFilters:
job { name: 'worker' tasks { key: 0
value: device_filter '/job:ps'
device_filter '/job:worker/task:0' }
tasks { key: 1
value: device_filter '/job:ps'
device_filter '/job:worker/task:1' } }
Defines the device filters for a remote task.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DEVICE_FILTERS_FIELD_NUMBER: builtins.int
@property
def device_filters(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
def __init__(
self,
*,
device_filters: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["device_filters", b"device_filters"]) -> None: ...
global___TaskDeviceFilters = TaskDeviceFilters
@typing_extensions.final
class JobDeviceFilters(google.protobuf.message.Message):
"""Defines the device filters for tasks in a job."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class TasksEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
@property
def value(self) -> global___TaskDeviceFilters: ...
def __init__(
self,
*,
key: builtins.int | None = ...,
value: global___TaskDeviceFilters | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
TASKS_FIELD_NUMBER: builtins.int
name: builtins.str
"""The name of this job."""
@property
def tasks(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, global___TaskDeviceFilters]:
"""Mapping from task ID to task device filters."""
def __init__(
self,
*,
name: builtins.str | None = ...,
tasks: collections.abc.Mapping[builtins.int, global___TaskDeviceFilters] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "tasks", b"tasks"]) -> None: ...
global___JobDeviceFilters = JobDeviceFilters
@typing_extensions.final
class ClusterDeviceFilters(google.protobuf.message.Message):
"""Defines the device filters for jobs in a cluster."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
JOBS_FIELD_NUMBER: builtins.int
@property
def jobs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___JobDeviceFilters]: ...
def __init__(
self,
*,
jobs: collections.abc.Iterable[global___JobDeviceFilters] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["jobs", b"jobs"]) -> None: ...
global___ClusterDeviceFilters = ClusterDeviceFilters

View File

@@ -0,0 +1,136 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class DeviceProperties(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class EnvironmentEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
value: builtins.str
def __init__(
self,
*,
key: builtins.str | None = ...,
value: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
TYPE_FIELD_NUMBER: builtins.int
VENDOR_FIELD_NUMBER: builtins.int
MODEL_FIELD_NUMBER: builtins.int
FREQUENCY_FIELD_NUMBER: builtins.int
NUM_CORES_FIELD_NUMBER: builtins.int
ENVIRONMENT_FIELD_NUMBER: builtins.int
NUM_REGISTERS_FIELD_NUMBER: builtins.int
L1_CACHE_SIZE_FIELD_NUMBER: builtins.int
L2_CACHE_SIZE_FIELD_NUMBER: builtins.int
L3_CACHE_SIZE_FIELD_NUMBER: builtins.int
SHARED_MEMORY_SIZE_PER_MULTIPROCESSOR_FIELD_NUMBER: builtins.int
MEMORY_SIZE_FIELD_NUMBER: builtins.int
BANDWIDTH_FIELD_NUMBER: builtins.int
type: builtins.str
"""Device type (CPU, GPU, ...)"""
vendor: builtins.str
"""Vendor (Intel, nvidia, ...)"""
model: builtins.str
"""Model (Haswell, K40, ...)"""
frequency: builtins.int
"""Core Frequency in Mhz"""
num_cores: builtins.int
"""Number of cores"""
@property
def environment(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
"""Version of the tools and libraries used with this device (e.g. gcc 4.9,
cudnn 5.1)
"""
num_registers: builtins.int
"""Number of registers per core."""
l1_cache_size: builtins.int
"""L1 cache size in bytes"""
l2_cache_size: builtins.int
"""L2 cache size in bytes"""
l3_cache_size: builtins.int
"""L3 cache size in bytes"""
shared_memory_size_per_multiprocessor: builtins.int
"""Shared memory size per multiprocessor in bytes. This field is
applicable to GPUs only.
"""
memory_size: builtins.int
"""Memory size in bytes"""
bandwidth: builtins.int
"""Memory bandwidth in KB/s"""
def __init__(
self,
*,
type: builtins.str | None = ...,
vendor: builtins.str | None = ...,
model: builtins.str | None = ...,
frequency: builtins.int | None = ...,
num_cores: builtins.int | None = ...,
environment: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
num_registers: builtins.int | None = ...,
l1_cache_size: builtins.int | None = ...,
l2_cache_size: builtins.int | None = ...,
l3_cache_size: builtins.int | None = ...,
shared_memory_size_per_multiprocessor: builtins.int | None = ...,
memory_size: builtins.int | None = ...,
bandwidth: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bandwidth", b"bandwidth", "environment", b"environment", "frequency", b"frequency", "l1_cache_size", b"l1_cache_size", "l2_cache_size", b"l2_cache_size", "l3_cache_size", b"l3_cache_size", "memory_size", b"memory_size", "model", b"model", "num_cores", b"num_cores", "num_registers", b"num_registers", "shared_memory_size_per_multiprocessor", b"shared_memory_size_per_multiprocessor", "type", b"type", "vendor", b"vendor"]) -> None: ...
global___DeviceProperties = DeviceProperties
@typing_extensions.final
class NamedDevice(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
PROPERTIES_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def properties(self) -> global___DeviceProperties: ...
def __init__(
self,
*,
name: builtins.str | None = ...,
properties: global___DeviceProperties | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["properties", b"properties"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "properties", b"properties"]) -> None: ...
global___NamedDevice = NamedDevice

View File

@@ -0,0 +1,85 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class GrpcPayloadContainer(google.protobuf.message.Message):
"""Used to serialize and transmit tensorflow::Status payloads through
grpc::Status `error_details` since grpc::Status lacks payload API.
TODO(b/204231601): Use GRPC API once supported.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class PayloadsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
value: builtins.bytes
def __init__(
self,
*,
key: builtins.str | None = ...,
value: builtins.bytes | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
PAYLOADS_FIELD_NUMBER: builtins.int
@property
def payloads(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.bytes]: ...
def __init__(
self,
*,
payloads: collections.abc.Mapping[builtins.str, builtins.bytes] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["payloads", b"payloads"]) -> None: ...
global___GrpcPayloadContainer = GrpcPayloadContainer
@typing_extensions.final
class GrpcPayloadsLost(google.protobuf.message.Message):
"""If included as a payload, this message flags the Status to have lost payloads
during the GRPC transmission.
URI: "type.googleapis.com/tensorflow.distributed_runtime.GrpcPayloadsLost"
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
def __init__(
self,
) -> None: ...
global___GrpcPayloadsLost = GrpcPayloadsLost
@typing_extensions.final
class WorkerPossiblyRestarted(google.protobuf.message.Message):
"""If included as a payload, this message flags the Status to be a possible
outcome of a worker restart.
URI:
"type.googleapis.com/tensorflow.distributed_runtime.WorkerPossiblyRestarted"
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
def __init__(
self,
) -> None: ...
global___WorkerPossiblyRestarted = WorkerPossiblyRestarted

View File

@@ -0,0 +1,32 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Add a dummy package name. Having no package, like
core/lib/core/error_codes.proto, or having tensorflow.error, like
tsl/protobuf/error_codes.proto, results in name collision errors in generated
code for some users that use JS through J2CL.
"""
import google.protobuf.descriptor
from tensorflow.tsl.protobuf.error_codes_pb2 import (
ABORTED as ABORTED,
ALREADY_EXISTS as ALREADY_EXISTS,
CANCELLED as CANCELLED,
Code as Code,
DATA_LOSS as DATA_LOSS,
DEADLINE_EXCEEDED as DEADLINE_EXCEEDED,
DO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_ as DO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_,
FAILED_PRECONDITION as FAILED_PRECONDITION,
INTERNAL as INTERNAL,
INVALID_ARGUMENT as INVALID_ARGUMENT,
NOT_FOUND as NOT_FOUND,
OK as OK,
OUT_OF_RANGE as OUT_OF_RANGE,
PERMISSION_DENIED as PERMISSION_DENIED,
RESOURCE_EXHAUSTED as RESOURCE_EXHAUSTED,
UNAUTHENTICATED as UNAUTHENTICATED,
UNAVAILABLE as UNAVAILABLE,
UNIMPLEMENTED as UNIMPLEMENTED,
UNKNOWN as UNKNOWN,
)
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor

View File

@@ -0,0 +1,60 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
import tensorflow.core.framework.versions_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class FingerprintDef(google.protobuf.message.Message):
"""Protocol buffer representing a SavedModel Fingerprint.
If there are multiple MetaGraphDefs in the SavedModel, the FingerprintDef
corresponds to the first one.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
GRAPH_DEF_CHECKSUM_FIELD_NUMBER: builtins.int
GRAPH_DEF_PROGRAM_HASH_FIELD_NUMBER: builtins.int
SIGNATURE_DEF_HASH_FIELD_NUMBER: builtins.int
SAVED_OBJECT_GRAPH_HASH_FIELD_NUMBER: builtins.int
CHECKPOINT_HASH_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
graph_def_checksum: builtins.int
"""Hash of the graph_def, referred to as a "checksum"."""
graph_def_program_hash: builtins.int
"""Hash of regularized graph_def."""
signature_def_hash: builtins.int
"""Hash of the regularized (sorted) SignatureDefs."""
saved_object_graph_hash: builtins.int
"""Hash of the regularized SavedObjectGraph."""
checkpoint_hash: builtins.int
"""Hash of the checkpoint."""
@property
def version(self) -> tensorflow.core.framework.versions_pb2.VersionDef:
"""Version specification of the fingerprint."""
def __init__(
self,
*,
graph_def_checksum: builtins.int | None = ...,
graph_def_program_hash: builtins.int | None = ...,
signature_def_hash: builtins.int | None = ...,
saved_object_graph_hash: builtins.int | None = ...,
checkpoint_hash: builtins.int | None = ...,
version: tensorflow.core.framework.versions_pb2.VersionDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["version", b"version"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["checkpoint_hash", b"checkpoint_hash", "graph_def_checksum", b"graph_def_checksum", "graph_def_program_hash", b"graph_def_program_hash", "saved_object_graph_hash", b"saved_object_graph_hash", "signature_def_hash", b"signature_def_hash", "version", b"version"]) -> None: ...
global___FingerprintDef = FingerprintDef

View File

@@ -0,0 +1,120 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class GraphDebugInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class FileLineCol(google.protobuf.message.Message):
"""This represents a file/line location in the source code."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FILE_INDEX_FIELD_NUMBER: builtins.int
LINE_FIELD_NUMBER: builtins.int
COL_FIELD_NUMBER: builtins.int
FUNC_FIELD_NUMBER: builtins.int
CODE_FIELD_NUMBER: builtins.int
file_index: builtins.int
"""File name index, which can be used to retrieve the file name string from
`files`. The value should be between 0 and (len(files)-1)
"""
line: builtins.int
"""Line number in the file."""
col: builtins.int
"""Col number in the file line."""
func: builtins.str
"""Name of function contains the file line."""
code: builtins.str
"""Source code contained in this file line."""
def __init__(
self,
*,
file_index: builtins.int | None = ...,
line: builtins.int | None = ...,
col: builtins.int | None = ...,
func: builtins.str | None = ...,
code: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["code", b"code", "col", b"col", "file_index", b"file_index", "func", b"func", "line", b"line"]) -> None: ...
@typing_extensions.final
class StackTrace(google.protobuf.message.Message):
"""This represents a stack trace which is a ordered list of `FileLineCol`."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FILE_LINE_COLS_FIELD_NUMBER: builtins.int
@property
def file_line_cols(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___GraphDebugInfo.FileLineCol]:
"""Each line in the stack trace."""
def __init__(
self,
*,
file_line_cols: collections.abc.Iterable[global___GraphDebugInfo.FileLineCol] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["file_line_cols", b"file_line_cols"]) -> None: ...
@typing_extensions.final
class TracesEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___GraphDebugInfo.StackTrace: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___GraphDebugInfo.StackTrace | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
FILES_FIELD_NUMBER: builtins.int
TRACES_FIELD_NUMBER: builtins.int
@property
def files(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""This stores all the source code file names and can be indexed by the
`file_index`.
"""
@property
def traces(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___GraphDebugInfo.StackTrace]:
"""This maps a node name to a stack trace in the source code.
The map key is a mangling of the containing function and op name with
syntax:
op.name '@' func_name
For ops in the top-level graph, the func_name is the empty string.
Note that op names are restricted to a small number of characters which
exclude '@', making it impossible to collide keys of this form. Function
names accept a much wider set of characters.
It would be preferable to avoid mangling and use a tuple key of (op.name,
func_name), but this is not supported with protocol buffers.
"""
def __init__(
self,
*,
files: collections.abc.Iterable[builtins.str] | None = ...,
traces: collections.abc.Mapping[builtins.str, global___GraphDebugInfo.StackTrace] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["files", b"files", "traces", b"traces"]) -> None: ...
global___GraphDebugInfo = GraphDebugInfo

View File

@@ -0,0 +1,669 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.any_pb2
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.graph_pb2
import tensorflow.core.framework.op_def_pb2
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
import tensorflow.core.protobuf.saved_object_graph_pb2
import tensorflow.core.protobuf.saver_pb2
import tensorflow.core.protobuf.struct_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class MetaGraphDef(google.protobuf.message.Message):
"""Protocol buffer containing the following which are necessary to restart
training, run inference. It can be used to serialize/de-serialize memory
objects necessary for running computation in a graph when crossing the
process boundary. It can be used for long term storage of graphs,
cross-language execution of graphs, etc.
MetaInfoDef
GraphDef
SaverDef
CollectionDef
TensorInfo
SignatureDef
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class MetaInfoDef(google.protobuf.message.Message):
"""Meta information regarding the graph to be exported. To be used by users
of this protocol buffer to encode information regarding their meta graph.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class FunctionAliasesEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
value: builtins.str
def __init__(
self,
*,
key: builtins.str | None = ...,
value: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
META_GRAPH_VERSION_FIELD_NUMBER: builtins.int
STRIPPED_OP_LIST_FIELD_NUMBER: builtins.int
ANY_INFO_FIELD_NUMBER: builtins.int
TAGS_FIELD_NUMBER: builtins.int
TENSORFLOW_VERSION_FIELD_NUMBER: builtins.int
TENSORFLOW_GIT_VERSION_FIELD_NUMBER: builtins.int
STRIPPED_DEFAULT_ATTRS_FIELD_NUMBER: builtins.int
FUNCTION_ALIASES_FIELD_NUMBER: builtins.int
meta_graph_version: builtins.str
"""User specified Version string. Can be the name of the model and revision,
steps this model has been trained to, etc.
"""
@property
def stripped_op_list(self) -> tensorflow.core.framework.op_def_pb2.OpList:
"""A copy of the OpDefs used by the producer of this graph_def.
Descriptions and Ops not used in graph_def are stripped out.
"""
@property
def any_info(self) -> google.protobuf.any_pb2.Any:
"""A serialized protobuf. Can be the time this meta graph is created, or
modified, or name of the model.
"""
@property
def tags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""User supplied tag(s) on the meta_graph and included graph_def.
MetaGraphDefs should be tagged with their capabilities or use-cases.
Examples: "train", "serve", "gpu", "tpu", etc.
These tags enable loaders to access the MetaGraph(s) appropriate for a
specific use-case or runtime environment.
"""
tensorflow_version: builtins.str
"""The __version__ string of the tensorflow build used to write this graph.
This will be populated by the framework, which will overwrite any user
supplied value.
"""
tensorflow_git_version: builtins.str
"""The __git_version__ string of the tensorflow build used to write this
graph. This will be populated by the framework, which will overwrite any
user supplied value.
"""
stripped_default_attrs: builtins.bool
"""A flag to denote whether default-valued attrs have been stripped from
the nodes in this graph_def.
"""
@property
def function_aliases(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
"""FunctionDef name to aliases mapping."""
def __init__(
self,
*,
meta_graph_version: builtins.str | None = ...,
stripped_op_list: tensorflow.core.framework.op_def_pb2.OpList | None = ...,
any_info: google.protobuf.any_pb2.Any | None = ...,
tags: collections.abc.Iterable[builtins.str] | None = ...,
tensorflow_version: builtins.str | None = ...,
tensorflow_git_version: builtins.str | None = ...,
stripped_default_attrs: builtins.bool | None = ...,
function_aliases: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["any_info", b"any_info", "stripped_op_list", b"stripped_op_list"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["any_info", b"any_info", "function_aliases", b"function_aliases", "meta_graph_version", b"meta_graph_version", "stripped_default_attrs", b"stripped_default_attrs", "stripped_op_list", b"stripped_op_list", "tags", b"tags", "tensorflow_git_version", b"tensorflow_git_version", "tensorflow_version", b"tensorflow_version"]) -> None: ...
@typing_extensions.final
class CollectionDefEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___CollectionDef: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___CollectionDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
@typing_extensions.final
class SignatureDefEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___SignatureDef: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___SignatureDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
META_INFO_DEF_FIELD_NUMBER: builtins.int
GRAPH_DEF_FIELD_NUMBER: builtins.int
SAVER_DEF_FIELD_NUMBER: builtins.int
COLLECTION_DEF_FIELD_NUMBER: builtins.int
SIGNATURE_DEF_FIELD_NUMBER: builtins.int
ASSET_FILE_DEF_FIELD_NUMBER: builtins.int
OBJECT_GRAPH_DEF_FIELD_NUMBER: builtins.int
@property
def meta_info_def(self) -> global___MetaGraphDef.MetaInfoDef: ...
@property
def graph_def(self) -> tensorflow.core.framework.graph_pb2.GraphDef:
"""GraphDef."""
@property
def saver_def(self) -> tensorflow.core.protobuf.saver_pb2.SaverDef:
"""SaverDef."""
@property
def collection_def(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___CollectionDef]:
"""collection_def: Map from collection name to collections.
See CollectionDef section for details.
"""
@property
def signature_def(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___SignatureDef]:
"""signature_def: Map from user supplied key for a signature to a single
SignatureDef.
"""
@property
def asset_file_def(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AssetFileDef]:
"""Asset file def to be used with the defined graph."""
@property
def object_graph_def(self) -> tensorflow.core.protobuf.saved_object_graph_pb2.SavedObjectGraph:
"""Extra information about the structure of functions and stateful objects."""
def __init__(
self,
*,
meta_info_def: global___MetaGraphDef.MetaInfoDef | None = ...,
graph_def: tensorflow.core.framework.graph_pb2.GraphDef | None = ...,
saver_def: tensorflow.core.protobuf.saver_pb2.SaverDef | None = ...,
collection_def: collections.abc.Mapping[builtins.str, global___CollectionDef] | None = ...,
signature_def: collections.abc.Mapping[builtins.str, global___SignatureDef] | None = ...,
asset_file_def: collections.abc.Iterable[global___AssetFileDef] | None = ...,
object_graph_def: tensorflow.core.protobuf.saved_object_graph_pb2.SavedObjectGraph | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["graph_def", b"graph_def", "meta_info_def", b"meta_info_def", "object_graph_def", b"object_graph_def", "saver_def", b"saver_def"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["asset_file_def", b"asset_file_def", "collection_def", b"collection_def", "graph_def", b"graph_def", "meta_info_def", b"meta_info_def", "object_graph_def", b"object_graph_def", "saver_def", b"saver_def", "signature_def", b"signature_def"]) -> None: ...
global___MetaGraphDef = MetaGraphDef
@typing_extensions.final
class CollectionDef(google.protobuf.message.Message):
"""CollectionDef should cover most collections.
To add a user-defined collection, do one of the following:
1. For simple data types, such as string, int, float:
tf.add_to_collection("your_collection_name", your_simple_value)
strings will be stored as bytes_list.
2. For Protobuf types, there are three ways to add them:
1) tf.add_to_collection("your_collection_name",
your_proto.SerializeToString())
collection_def {
key: "user_defined_bytes_collection"
value {
bytes_list {
value: "queue_name: \\"test_queue\\"\\n"
}
}
}
or
2) tf.add_to_collection("your_collection_name", str(your_proto))
collection_def {
key: "user_defined_string_collection"
value {
bytes_list {
value: "\\n\\ntest_queue"
}
}
}
or
3) any_buf = any_pb2.Any()
tf.add_to_collection("your_collection_name",
any_buf.Pack(your_proto))
collection_def {
key: "user_defined_any_collection"
value {
any_list {
value {
type_url: "type.googleapis.com/tensorflow.QueueRunnerDef"
value: "\\n\\ntest_queue"
}
}
}
}
3. For Python objects, implement to_proto() and from_proto(), and register
them in the following manner:
ops.register_proto_function("your_collection_name",
proto_type,
to_proto=YourPythonObject.to_proto,
from_proto=YourPythonObject.from_proto)
These functions will be invoked to serialize and de-serialize the
collection. For example,
ops.register_proto_function(ops.GraphKeys.GLOBAL_VARIABLES,
proto_type=variable_pb2.VariableDef,
to_proto=Variable.to_proto,
from_proto=Variable.from_proto)
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class NodeList(google.protobuf.message.Message):
"""NodeList is used for collecting nodes in graph. For example
collection_def {
key: "summaries"
value {
node_list {
value: "input_producer/ScalarSummary:0"
value: "shuffle_batch/ScalarSummary:0"
value: "ImageSummary:0"
}
}
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
def __init__(
self,
*,
value: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
@typing_extensions.final
class BytesList(google.protobuf.message.Message):
"""BytesList is used for collecting strings and serialized protobufs. For
example:
collection_def {
key: "trainable_variables"
value {
bytes_list {
value: "\\n\\017conv1/weights:0\\022\\024conv1/weights/Assign
\\032\\024conv1/weights/read:0"
value: "\\n\\016conv1/biases:0\\022\\023conv1/biases/Assign\\032
\\023conv1/biases/read:0"
}
}
}
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ...
def __init__(
self,
*,
value: collections.abc.Iterable[builtins.bytes] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
@typing_extensions.final
class Int64List(google.protobuf.message.Message):
"""Int64List is used for collecting int, int64 and long values."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
def __init__(
self,
*,
value: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
@typing_extensions.final
class FloatList(google.protobuf.message.Message):
"""FloatList is used for collecting float values."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ...
def __init__(
self,
*,
value: collections.abc.Iterable[builtins.float] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
@typing_extensions.final
class AnyList(google.protobuf.message.Message):
"""AnyList is used for collecting Any protos."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
@property
def value(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.any_pb2.Any]: ...
def __init__(
self,
*,
value: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["value", b"value"]) -> None: ...
NODE_LIST_FIELD_NUMBER: builtins.int
BYTES_LIST_FIELD_NUMBER: builtins.int
INT64_LIST_FIELD_NUMBER: builtins.int
FLOAT_LIST_FIELD_NUMBER: builtins.int
ANY_LIST_FIELD_NUMBER: builtins.int
@property
def node_list(self) -> global___CollectionDef.NodeList: ...
@property
def bytes_list(self) -> global___CollectionDef.BytesList: ...
@property
def int64_list(self) -> global___CollectionDef.Int64List: ...
@property
def float_list(self) -> global___CollectionDef.FloatList: ...
@property
def any_list(self) -> global___CollectionDef.AnyList: ...
def __init__(
self,
*,
node_list: global___CollectionDef.NodeList | None = ...,
bytes_list: global___CollectionDef.BytesList | None = ...,
int64_list: global___CollectionDef.Int64List | None = ...,
float_list: global___CollectionDef.FloatList | None = ...,
any_list: global___CollectionDef.AnyList | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["any_list", b"any_list", "bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind", "node_list", b"node_list"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["any_list", b"any_list", "bytes_list", b"bytes_list", "float_list", b"float_list", "int64_list", b"int64_list", "kind", b"kind", "node_list", b"node_list"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["node_list", "bytes_list", "int64_list", "float_list", "any_list"] | None: ...
global___CollectionDef = CollectionDef
@typing_extensions.final
class TensorInfo(google.protobuf.message.Message):
"""Information about a Tensor necessary for feeding or retrieval."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class CooSparse(google.protobuf.message.Message):
"""For sparse tensors, The COO encoding stores a triple of values, indices,
and shape.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUES_TENSOR_NAME_FIELD_NUMBER: builtins.int
INDICES_TENSOR_NAME_FIELD_NUMBER: builtins.int
DENSE_SHAPE_TENSOR_NAME_FIELD_NUMBER: builtins.int
values_tensor_name: builtins.str
"""The shape of the values Tensor is [?]. Its dtype must be the dtype of
the SparseTensor as a whole, given in the enclosing TensorInfo.
"""
indices_tensor_name: builtins.str
"""The indices Tensor must have dtype int64 and shape [?, ?]."""
dense_shape_tensor_name: builtins.str
"""The dynamic logical shape represented by the SparseTensor is recorded in
the Tensor referenced here. It must have dtype int64 and shape [?].
"""
def __init__(
self,
*,
values_tensor_name: builtins.str | None = ...,
indices_tensor_name: builtins.str | None = ...,
dense_shape_tensor_name: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["dense_shape_tensor_name", b"dense_shape_tensor_name", "indices_tensor_name", b"indices_tensor_name", "values_tensor_name", b"values_tensor_name"]) -> None: ...
@typing_extensions.final
class CompositeTensor(google.protobuf.message.Message):
"""Generic encoding for composite tensors."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TYPE_SPEC_FIELD_NUMBER: builtins.int
COMPONENTS_FIELD_NUMBER: builtins.int
@property
def type_spec(self) -> tensorflow.core.protobuf.struct_pb2.TypeSpecProto:
"""The serialized TypeSpec for the composite tensor."""
@property
def components(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TensorInfo]:
"""A TensorInfo for each flattened component tensor."""
def __init__(
self,
*,
type_spec: tensorflow.core.protobuf.struct_pb2.TypeSpecProto | None = ...,
components: collections.abc.Iterable[global___TensorInfo] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["type_spec", b"type_spec"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["components", b"components", "type_spec", b"type_spec"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
COO_SPARSE_FIELD_NUMBER: builtins.int
COMPOSITE_TENSOR_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
TENSOR_SHAPE_FIELD_NUMBER: builtins.int
name: builtins.str
"""For dense `Tensor`s, the name of the tensor in the graph."""
@property
def coo_sparse(self) -> global___TensorInfo.CooSparse:
"""There are many possible encodings of sparse matrices
(https://en.wikipedia.org/wiki/Sparse_matrix). Currently, TensorFlow
uses only the COO encoding. This is supported and documented in the
SparseTensor Python class.
"""
@property
def composite_tensor(self) -> global___TensorInfo.CompositeTensor:
"""Generic encoding for CompositeTensors."""
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
@property
def tensor_shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto:
"""The static shape should be recorded here, to the extent that it can
be known in advance. In the case of a SparseTensor, this field describes
the logical shape of the represented tensor (aka dense_shape).
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
coo_sparse: global___TensorInfo.CooSparse | None = ...,
composite_tensor: global___TensorInfo.CompositeTensor | None = ...,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
tensor_shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["composite_tensor", b"composite_tensor", "coo_sparse", b"coo_sparse", "encoding", b"encoding", "name", b"name", "tensor_shape", b"tensor_shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["composite_tensor", b"composite_tensor", "coo_sparse", b"coo_sparse", "dtype", b"dtype", "encoding", b"encoding", "name", b"name", "tensor_shape", b"tensor_shape"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["encoding", b"encoding"]) -> typing_extensions.Literal["name", "coo_sparse", "composite_tensor"] | None: ...
global___TensorInfo = TensorInfo
@typing_extensions.final
class SignatureDef(google.protobuf.message.Message):
"""SignatureDef defines the signature of a computation supported by a TensorFlow
graph.
For example, a model with two loss computations, sharing a single input,
might have the following signature_def map, in a MetaGraphDef message.
Note that across the two SignatureDefs "loss_A" and "loss_B", the input key,
output key, and method_name are identical, and will be used by system(s) that
implement or rely upon this particular loss method. The output tensor names
differ, demonstrating how different outputs can exist for the same method.
signature_def {
key: "loss_A"
value {
inputs {
key: "input"
value {
name: "input:0"
dtype: DT_STRING
tensor_shape: ...
}
}
outputs {
key: "loss_output"
value {
name: "loss_output_A:0"
dtype: DT_FLOAT
tensor_shape: ...
}
}
method_name: "some/package/compute_loss"
}
...
}
signature_def {
key: "loss_B"
value {
inputs {
key: "input"
value {
name: "input:0"
dtype: DT_STRING
tensor_shape: ...
}
}
outputs {
key: "loss_output"
value {
name: "loss_output_B:0"
dtype: DT_FLOAT
tensor_shape: ...
}
}
method_name: "some/package/compute_loss"
}
...
}
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class InputsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___TensorInfo: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___TensorInfo | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
@typing_extensions.final
class OutputsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___TensorInfo: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___TensorInfo | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
INPUTS_FIELD_NUMBER: builtins.int
OUTPUTS_FIELD_NUMBER: builtins.int
METHOD_NAME_FIELD_NUMBER: builtins.int
@property
def inputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___TensorInfo]:
"""Named input parameters."""
@property
def outputs(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___TensorInfo]:
"""Named output parameters."""
method_name: builtins.str
"""Extensible method_name information enabling third-party users to mark a
SignatureDef as supporting a particular method. This enables producers and
consumers of SignatureDefs, e.g. a model definition library and a serving
library to have a clear hand-off regarding the semantics of a computation.
Note that multiple SignatureDefs in a single MetaGraphDef may have the same
method_name. This is commonly used to support multi-headed computation,
where a single graph computation may return multiple results.
"""
def __init__(
self,
*,
inputs: collections.abc.Mapping[builtins.str, global___TensorInfo] | None = ...,
outputs: collections.abc.Mapping[builtins.str, global___TensorInfo] | None = ...,
method_name: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["inputs", b"inputs", "method_name", b"method_name", "outputs", b"outputs"]) -> None: ...
global___SignatureDef = SignatureDef
@typing_extensions.final
class AssetFileDef(google.protobuf.message.Message):
"""An asset file def for a single file or a set of sharded files with the same
name.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TENSOR_INFO_FIELD_NUMBER: builtins.int
FILENAME_FIELD_NUMBER: builtins.int
@property
def tensor_info(self) -> global___TensorInfo:
"""The tensor to bind the asset filename to."""
filename: builtins.str
"""The filename within an assets directory. Note: does not include the path
prefix, i.e. directories. For an asset at /tmp/path/vocab.txt, the filename
would be "vocab.txt".
"""
def __init__(
self,
*,
tensor_info: global___TensorInfo | None = ...,
filename: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tensor_info", b"tensor_info"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["filename", b"filename", "tensor_info", b"tensor_info"]) -> None: ...
global___AssetFileDef = AssetFileDef

View File

@@ -0,0 +1,46 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class NamedTensorProto(google.protobuf.message.Message):
"""A pair of tensor name and tensor values."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
TENSOR_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name of the tensor."""
@property
def tensor(self) -> tensorflow.core.framework.tensor_pb2.TensorProto:
"""The client can populate a TensorProto using a tensorflow::Tensor`, or
directly using the protobuf field accessors.
The client specifies whether the returned tensor values should be
filled tensor fields (float_val, int_val, etc.) or encoded in a
compact form in tensor.tensor_content.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
tensor: tensorflow.core.framework.tensor_pb2.TensorProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tensor", b"tensor"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "tensor", b"tensor"]) -> None: ...
global___NamedTensorProto = NamedTensorProto

View File

@@ -0,0 +1,56 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.tsl.protobuf.error_codes_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class QueueRunnerDef(google.protobuf.message.Message):
"""Protocol buffer representing a QueueRunner."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
QUEUE_NAME_FIELD_NUMBER: builtins.int
ENQUEUE_OP_NAME_FIELD_NUMBER: builtins.int
CLOSE_OP_NAME_FIELD_NUMBER: builtins.int
CANCEL_OP_NAME_FIELD_NUMBER: builtins.int
QUEUE_CLOSED_EXCEPTION_TYPES_FIELD_NUMBER: builtins.int
queue_name: builtins.str
"""Queue name."""
@property
def enqueue_op_name(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""A list of enqueue operations."""
close_op_name: builtins.str
"""The operation to run to close the queue."""
cancel_op_name: builtins.str
"""The operation to run to cancel the queue."""
@property
def queue_closed_exception_types(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType]:
"""A list of exception types considered to signal a safely closed queue
if raised during enqueue operations.
"""
def __init__(
self,
*,
queue_name: builtins.str | None = ...,
enqueue_op_name: collections.abc.Iterable[builtins.str] | None = ...,
close_op_name: builtins.str | None = ...,
cancel_op_name: builtins.str | None = ...,
queue_closed_exception_types: collections.abc.Iterable[tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["cancel_op_name", b"cancel_op_name", "close_op_name", b"close_op_name", "enqueue_op_name", b"enqueue_op_name", "queue_closed_exception_types", b"queue_closed_exception_types", "queue_name", b"queue_name"]) -> None: ...
global___QueueRunnerDef = QueueRunnerDef

View File

@@ -0,0 +1,80 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ResourceDtypeAndShape(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DTYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
def __init__(
self,
*,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["dtype", b"dtype", "shape", b"shape"]) -> None: ...
global___ResourceDtypeAndShape = ResourceDtypeAndShape
@typing_extensions.final
class RemoteTensorHandle(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OP_ID_FIELD_NUMBER: builtins.int
OUTPUT_NUM_FIELD_NUMBER: builtins.int
DEVICE_FIELD_NUMBER: builtins.int
OP_DEVICE_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
RESOURCE_DTYPES_AND_SHAPES_FIELD_NUMBER: builtins.int
op_id: builtins.int
"""The ID of the operation that produced this tensor."""
output_num: builtins.int
"""The index into the outputs of the operation that produced this tensor."""
device: builtins.str
"""Device where the tensor is located. Cannot be empty.
For multi-device functions, it's the default device passed to placer.
"""
op_device: builtins.str
"""Device of the operation producing this tensor. Can be empty if the
operation producing this tensor is a multi-device function.
"""
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
"""Tensor type."""
@property
def resource_dtypes_and_shapes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceDtypeAndShape]:
"""Optional data types and shapes of a remote resource variable."""
def __init__(
self,
*,
op_id: builtins.int | None = ...,
output_num: builtins.int | None = ...,
device: builtins.str | None = ...,
op_device: builtins.str | None = ...,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
resource_dtypes_and_shapes: collections.abc.Iterable[global___ResourceDtypeAndShape] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["device", b"device", "dtype", b"dtype", "op_device", b"op_device", "op_id", b"op_id", "output_num", b"output_num", "resource_dtypes_and_shapes", b"resource_dtypes_and_shapes"]) -> None: ...
global___RemoteTensorHandle = RemoteTensorHandle

View File

@@ -0,0 +1,486 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.attr_value_pb2
import tensorflow.core.protobuf.verifier_config_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class AutoParallelOptions(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ENABLE_FIELD_NUMBER: builtins.int
NUM_REPLICAS_FIELD_NUMBER: builtins.int
enable: builtins.bool
num_replicas: builtins.int
def __init__(
self,
*,
enable: builtins.bool | None = ...,
num_replicas: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["enable", b"enable", "num_replicas", b"num_replicas"]) -> None: ...
global___AutoParallelOptions = AutoParallelOptions
@typing_extensions.final
class ScopedAllocatorOptions(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ENABLE_OP_FIELD_NUMBER: builtins.int
@property
def enable_op(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""If present, only perform optimization for these ops."""
def __init__(
self,
*,
enable_op: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["enable_op", b"enable_op"]) -> None: ...
global___ScopedAllocatorOptions = ScopedAllocatorOptions
@typing_extensions.final
class RewriterConfig(google.protobuf.message.Message):
"""Graph rewriting is experimental and subject to change, not covered by any
API stability guarantees.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _Toggle:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ToggleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._Toggle.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEFAULT: RewriterConfig._Toggle.ValueType # 0
ON: RewriterConfig._Toggle.ValueType # 1
OFF: RewriterConfig._Toggle.ValueType # 2
AGGRESSIVE: RewriterConfig._Toggle.ValueType # 3
"""Enable some aggressive optimizations that use assumptions that TF graphs
may break. For example, assume the shape of a placeholder matches its
actual feed.
"""
EXPERIMENTAL_MLIR: RewriterConfig._Toggle.ValueType # 4
"""Run MLIR pass if there's one implemented in TFG, do nothing otherwise.
I.e., if there's no corresponding TFG pass, it's an OFF. This is supposed
to be mapped with `ON` and there's no `AGGRESSIVE` in MLIR pass now.
"""
EXPERIMENTAL_BOTH: RewriterConfig._Toggle.ValueType # 5
"""Run both MLIR and Grappler passes consecutively and MLIR pass will come
first.
"""
class Toggle(_Toggle, metaclass=_ToggleEnumTypeWrapper):
"""Configuration options for the meta-optimizer. Unless otherwise noted, these
configuration options do not apply to explicitly triggered optimization
passes in the optimizers field.
"""
DEFAULT: RewriterConfig.Toggle.ValueType # 0
ON: RewriterConfig.Toggle.ValueType # 1
OFF: RewriterConfig.Toggle.ValueType # 2
AGGRESSIVE: RewriterConfig.Toggle.ValueType # 3
"""Enable some aggressive optimizations that use assumptions that TF graphs
may break. For example, assume the shape of a placeholder matches its
actual feed.
"""
EXPERIMENTAL_MLIR: RewriterConfig.Toggle.ValueType # 4
"""Run MLIR pass if there's one implemented in TFG, do nothing otherwise.
I.e., if there's no corresponding TFG pass, it's an OFF. This is supposed
to be mapped with `ON` and there's no `AGGRESSIVE` in MLIR pass now.
"""
EXPERIMENTAL_BOTH: RewriterConfig.Toggle.ValueType # 5
"""Run both MLIR and Grappler passes consecutively and MLIR pass will come
first.
"""
class _CpuLayout:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _CpuLayoutEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._CpuLayout.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
NO_CONVERSION_ON_CPU: RewriterConfig._CpuLayout.ValueType # 0
NCHW_TO_NHWC: RewriterConfig._CpuLayout.ValueType # 1
NHWC_TO_NCHW: RewriterConfig._CpuLayout.ValueType # 2
class CpuLayout(_CpuLayout, metaclass=_CpuLayoutEnumTypeWrapper):
"""Enum for layout conversion between NCHW and NHWC on CPU. Default is OFF."""
NO_CONVERSION_ON_CPU: RewriterConfig.CpuLayout.ValueType # 0
NCHW_TO_NHWC: RewriterConfig.CpuLayout.ValueType # 1
NHWC_TO_NCHW: RewriterConfig.CpuLayout.ValueType # 2
class _NumIterationsType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _NumIterationsTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._NumIterationsType.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEFAULT_NUM_ITERS: RewriterConfig._NumIterationsType.ValueType # 0
ONE: RewriterConfig._NumIterationsType.ValueType # 1
TWO: RewriterConfig._NumIterationsType.ValueType # 2
class NumIterationsType(_NumIterationsType, metaclass=_NumIterationsTypeEnumTypeWrapper):
"""Enum controlling the number of times to run optimizers. The default is to
run them twice.
"""
DEFAULT_NUM_ITERS: RewriterConfig.NumIterationsType.ValueType # 0
ONE: RewriterConfig.NumIterationsType.ValueType # 1
TWO: RewriterConfig.NumIterationsType.ValueType # 2
class _MemOptType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _MemOptTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[RewriterConfig._MemOptType.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEFAULT_MEM_OPT: RewriterConfig._MemOptType.ValueType # 0
"""The default setting (SCHEDULING and SWAPPING HEURISTICS only)"""
NO_MEM_OPT: RewriterConfig._MemOptType.ValueType # 1
"""Disabled in the meta-optimizer."""
MANUAL: RewriterConfig._MemOptType.ValueType # 2
"""Driven by manual op-level annotations."""
SWAPPING_HEURISTICS: RewriterConfig._MemOptType.ValueType # 4
"""Driven by heuristics. The behavior of these heuristics is subject to
change. Currently includes an experimental recomputation and swapping
heuristics. Manual annotations are respected, but additional nodes are
selected automatically.
Swapping heuristic will move a tensor from the GPU to the CPU and move
it back when needed to reduce peak memory usage.
"""
RECOMPUTATION_HEURISTICS: RewriterConfig._MemOptType.ValueType # 5
"""Recomputation heuristics will recompute ops (such as Relu activation)
during backprop instead of storing them, reducing peak memory usage.
"""
SCHEDULING_HEURISTICS: RewriterConfig._MemOptType.ValueType # 6
"""Scheduling will split big ops such as AddN and try to enforce a schedule
of the new computations that decreases peak memory usage.
"""
HEURISTICS: RewriterConfig._MemOptType.ValueType # 3
"""Use any combination of swapping and recomputation heuristics."""
class MemOptType(_MemOptType, metaclass=_MemOptTypeEnumTypeWrapper): ...
DEFAULT_MEM_OPT: RewriterConfig.MemOptType.ValueType # 0
"""The default setting (SCHEDULING and SWAPPING HEURISTICS only)"""
NO_MEM_OPT: RewriterConfig.MemOptType.ValueType # 1
"""Disabled in the meta-optimizer."""
MANUAL: RewriterConfig.MemOptType.ValueType # 2
"""Driven by manual op-level annotations."""
SWAPPING_HEURISTICS: RewriterConfig.MemOptType.ValueType # 4
"""Driven by heuristics. The behavior of these heuristics is subject to
change. Currently includes an experimental recomputation and swapping
heuristics. Manual annotations are respected, but additional nodes are
selected automatically.
Swapping heuristic will move a tensor from the GPU to the CPU and move
it back when needed to reduce peak memory usage.
"""
RECOMPUTATION_HEURISTICS: RewriterConfig.MemOptType.ValueType # 5
"""Recomputation heuristics will recompute ops (such as Relu activation)
during backprop instead of storing them, reducing peak memory usage.
"""
SCHEDULING_HEURISTICS: RewriterConfig.MemOptType.ValueType # 6
"""Scheduling will split big ops such as AddN and try to enforce a schedule
of the new computations that decreases peak memory usage.
"""
HEURISTICS: RewriterConfig.MemOptType.ValueType # 3
"""Use any combination of swapping and recomputation heuristics."""
@typing_extensions.final
class CustomGraphOptimizer(google.protobuf.message.Message):
"""Message to describe custom graph optimizer and its parameters"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class ParameterMapEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> tensorflow.core.framework.attr_value_pb2.AttrValue: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: tensorflow.core.framework.attr_value_pb2.AttrValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
PARAMETER_MAP_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def parameter_map(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue]: ...
def __init__(
self,
*,
name: builtins.str | None = ...,
parameter_map: collections.abc.Mapping[builtins.str, tensorflow.core.framework.attr_value_pb2.AttrValue] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "parameter_map", b"parameter_map"]) -> None: ...
CPU_LAYOUT_CONVERSION_FIELD_NUMBER: builtins.int
LAYOUT_OPTIMIZER_FIELD_NUMBER: builtins.int
CONSTANT_FOLDING_FIELD_NUMBER: builtins.int
SHAPE_OPTIMIZATION_FIELD_NUMBER: builtins.int
REMAPPING_FIELD_NUMBER: builtins.int
COMMON_SUBGRAPH_ELIMINATION_FIELD_NUMBER: builtins.int
ARITHMETIC_OPTIMIZATION_FIELD_NUMBER: builtins.int
DEPENDENCY_OPTIMIZATION_FIELD_NUMBER: builtins.int
LOOP_OPTIMIZATION_FIELD_NUMBER: builtins.int
FUNCTION_OPTIMIZATION_FIELD_NUMBER: builtins.int
DEBUG_STRIPPER_FIELD_NUMBER: builtins.int
DISABLE_MODEL_PRUNING_FIELD_NUMBER: builtins.int
SCOPED_ALLOCATOR_OPTIMIZATION_FIELD_NUMBER: builtins.int
PIN_TO_HOST_OPTIMIZATION_FIELD_NUMBER: builtins.int
IMPLEMENTATION_SELECTOR_FIELD_NUMBER: builtins.int
AUTO_MIXED_PRECISION_FIELD_NUMBER: builtins.int
AUTO_MIXED_PRECISION_MKL_FIELD_NUMBER: builtins.int
AUTO_MIXED_PRECISION_ONEDNN_BFLOAT16_FIELD_NUMBER: builtins.int
AUTO_MIXED_PRECISION_CPU_FIELD_NUMBER: builtins.int
DISABLE_META_OPTIMIZER_FIELD_NUMBER: builtins.int
USE_PLUGIN_OPTIMIZERS_FIELD_NUMBER: builtins.int
EXPERIMENTAL_CONDITIONAL_CODE_MOTION_FIELD_NUMBER: builtins.int
META_OPTIMIZER_ITERATIONS_FIELD_NUMBER: builtins.int
MIN_GRAPH_NODES_FIELD_NUMBER: builtins.int
EXPERIMENTAL_DISABLE_COMPRESSED_TENSOR_OPTIMIZATION_FIELD_NUMBER: builtins.int
EXPERIMENTAL_DISABLE_FOLDING_QUANTIZATION_EMULATION_FIELD_NUMBER: builtins.int
MEMORY_OPTIMIZATION_FIELD_NUMBER: builtins.int
MEMORY_OPTIMIZER_TARGET_NODE_NAME_SCOPE_FIELD_NUMBER: builtins.int
META_OPTIMIZER_TIMEOUT_MS_FIELD_NUMBER: builtins.int
AUTO_PARALLEL_FIELD_NUMBER: builtins.int
FAIL_ON_OPTIMIZER_ERRORS_FIELD_NUMBER: builtins.int
SCOPED_ALLOCATOR_OPTS_FIELD_NUMBER: builtins.int
OPTIMIZERS_FIELD_NUMBER: builtins.int
CUSTOM_OPTIMIZERS_FIELD_NUMBER: builtins.int
INTER_OPTIMIZER_VERIFIER_CONFIG_FIELD_NUMBER: builtins.int
POST_OPTIMIZATION_VERIFIER_CONFIG_FIELD_NUMBER: builtins.int
cpu_layout_conversion: global___RewriterConfig.CpuLayout.ValueType
"""CPU Conversion settings between NHCW and NCHW."""
layout_optimizer: global___RewriterConfig.Toggle.ValueType
"""Optimize tensor layouts (default is ON)
e.g. This will try to use NCHW layout on GPU which is faster.
"""
constant_folding: global___RewriterConfig.Toggle.ValueType
"""Fold constants (default is ON)
Statically infer the value of tensors when possible, and materialize the
result using constants.
"""
shape_optimization: global___RewriterConfig.Toggle.ValueType
"""Shape optimizations (default is ON)
Simplify computations made on shapes.
"""
remapping: global___RewriterConfig.Toggle.ValueType
"""Remapping (default is ON)
Remap subgraphs onto more efficient implementations.
"""
common_subgraph_elimination: global___RewriterConfig.Toggle.ValueType
"""Common subgraph elimination (default is ON)
e.g. Simplify arithmetic ops; merge ops with same value (like constants).
"""
arithmetic_optimization: global___RewriterConfig.Toggle.ValueType
"""Arithmetic optimizations (default is ON)
e.g. Simplify arithmetic ops; merge ops with same value (like constants).
"""
dependency_optimization: global___RewriterConfig.Toggle.ValueType
"""Control dependency optimizations (default is ON).
Remove redundant control dependencies, which may enable other optimization.
"""
loop_optimization: global___RewriterConfig.Toggle.ValueType
"""Loop optimizations (default is ON)."""
function_optimization: global___RewriterConfig.Toggle.ValueType
"""Function optimizations (default is ON)."""
debug_stripper: global___RewriterConfig.Toggle.ValueType
"""Strips debug-related nodes from the graph (off by default)."""
disable_model_pruning: builtins.bool
"""If true, don't remove unnecessary ops from the graph"""
scoped_allocator_optimization: global___RewriterConfig.Toggle.ValueType
"""Try to allocate some independent Op outputs contiguously in order to
merge or eliminate downstream Ops (off by default).
"""
pin_to_host_optimization: global___RewriterConfig.Toggle.ValueType
"""Force small ops onto the CPU (default is OFF)."""
implementation_selector: global___RewriterConfig.Toggle.ValueType
"""Enable the swap of kernel implementations based on the device placement
(default is ON).
"""
auto_mixed_precision: global___RewriterConfig.Toggle.ValueType
"""Optimize data types for CUDA (default is OFF).
This will try to use float16 on GPU which is faster.
Note that this can change the numerical stability of the graph and may
require the use of loss scaling to maintain model convergence.
"""
auto_mixed_precision_mkl: global___RewriterConfig.Toggle.ValueType
"""Optimize data types for oneDNN (default is OFF).
This will try to use bfloat16 on CPUs, which is faster.
Note that this can change the numerical stability of the graph.
Note: this is deprecated.
It is replaced by auto_mixed_precision_onednn_bfloat16
"""
auto_mixed_precision_onednn_bfloat16: global___RewriterConfig.Toggle.ValueType
"""Optimize data types for oneDNN (default is OFF).
This will try to use bfloat16 on CPUs, which is faster.
Note that this can change the numerical stability of the graph.
Note: this is equivalent to the deprecated option auto_mixed_precision_mkl
"""
auto_mixed_precision_cpu: global___RewriterConfig.Toggle.ValueType
"""Emulate a model using data type float16 on CPU (default is OFF).
This will try to emulate the float16 inputs and outputs of an operator
on CPU to have better correlation with float16 on GPU; however the
computation in the operator is based on float32.
Note that this can change the numerical stability of the graph.
"""
disable_meta_optimizer: builtins.bool
"""Disable the entire meta optimizer (off by default)."""
use_plugin_optimizers: global___RewriterConfig.Toggle.ValueType
"""Optimizers registered by plugin (default is ON)"""
experimental_conditional_code_motion: global___RewriterConfig.Toggle.ValueType
"""Conditional code motion (default is ON)."""
meta_optimizer_iterations: global___RewriterConfig.NumIterationsType.ValueType
"""Controls how many times we run the optimizers in meta optimizer (default
is once).
"""
min_graph_nodes: builtins.int
"""The minimum number of nodes in a graph to optimizer. For smaller graphs,
optimization is skipped.
0 means the system picks an appropriate number.
< 0 means do not skip optimization.
"""
experimental_disable_compressed_tensor_optimization: builtins.bool
"""Disable optimizations that assume compressed tensors. Note that this flag
is experimental and may be removed in the future.
"""
experimental_disable_folding_quantization_emulation: builtins.bool
"""Disable folding quantization emulation ops such as FakeQuantWithMinMax* and
QuantizeAndDequantize*. Some compilers (e.g. the TF-to-tflite converter)
have to extract quantization configs (e.g. min/max range, number of bits,
and per-channel) from the quantization emulation ops. Note that this flag
is experimental and may be removed in the future. See b/174138564 for more
details.
"""
memory_optimization: global___RewriterConfig.MemOptType.ValueType
"""Configures memory optimization passes through the meta-optimizer. Has no
effect on manually requested memory optimization passes in the optimizers
field.
"""
memory_optimizer_target_node_name_scope: builtins.str
"""A node name scope for node names which are valid outputs of recomputations.
Inputs to nodes that match this scope may be recomputed (subject either to
manual annotation of those input nodes or to manual annotation and
heuristics depending on memory_optimization), but the nodes themselves will
not be recomputed. This matches any sub-scopes as well, meaning the scope
can appear not just as a top-level scope. For example, if the value is
"gradients/", the default, it will match node name "gradients/foo",
"foo/gradients/bar", but not "foo_gradients/"
"""
meta_optimizer_timeout_ms: builtins.int
"""Maximum number of milliseconds to spend optimizing a single graph before
timing out. If less than or equal to 0 (default value) the optimizer will
never time out.
"""
@property
def auto_parallel(self) -> global___AutoParallelOptions:
"""Configures AutoParallel optimization passes either through the
meta-optimizer or when manually specified through the optimizers field.
"""
fail_on_optimizer_errors: builtins.bool
"""If true, any optimization pass failing will cause the MetaOptimizer to
stop with an error. By default - or when set to false, failing passes are
skipped silently.
"""
@property
def scoped_allocator_opts(self) -> global___ScopedAllocatorOptions: ...
@property
def optimizers(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""If non-empty, will use this as an alternative way to specify a list of
optimizations to turn on and the order of the optimizations (replacing the
meta-optimizer).
Of the RewriterConfig options, only the AutoParallel configuration options
(the auto_parallel field) apply to manually requested optimization passes
("autoparallel"). Memory optimization passes ("memory") invoked here are
not configurable (in contrast to memory optimization passes through the
meta-optimizer) and act only on manual op annotations.
Custom optimizers (see custom_optimizers) that are not part of this
schedule will be run after - in the order that they were specified.
"""
@property
def custom_optimizers(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RewriterConfig.CustomGraphOptimizer]:
"""list of CustomGraphOptimizers to apply."""
@property
def inter_optimizer_verifier_config(self) -> tensorflow.core.protobuf.verifier_config_pb2.VerifierConfig:
"""VerifierConfig specifying the verifiers to be run after every optimizer."""
@property
def post_optimization_verifier_config(self) -> tensorflow.core.protobuf.verifier_config_pb2.VerifierConfig:
"""VerifierConfig specifying the verifiers to be run at the end, after all
optimizers have run.
"""
def __init__(
self,
*,
cpu_layout_conversion: global___RewriterConfig.CpuLayout.ValueType | None = ...,
layout_optimizer: global___RewriterConfig.Toggle.ValueType | None = ...,
constant_folding: global___RewriterConfig.Toggle.ValueType | None = ...,
shape_optimization: global___RewriterConfig.Toggle.ValueType | None = ...,
remapping: global___RewriterConfig.Toggle.ValueType | None = ...,
common_subgraph_elimination: global___RewriterConfig.Toggle.ValueType | None = ...,
arithmetic_optimization: global___RewriterConfig.Toggle.ValueType | None = ...,
dependency_optimization: global___RewriterConfig.Toggle.ValueType | None = ...,
loop_optimization: global___RewriterConfig.Toggle.ValueType | None = ...,
function_optimization: global___RewriterConfig.Toggle.ValueType | None = ...,
debug_stripper: global___RewriterConfig.Toggle.ValueType | None = ...,
disable_model_pruning: builtins.bool | None = ...,
scoped_allocator_optimization: global___RewriterConfig.Toggle.ValueType | None = ...,
pin_to_host_optimization: global___RewriterConfig.Toggle.ValueType | None = ...,
implementation_selector: global___RewriterConfig.Toggle.ValueType | None = ...,
auto_mixed_precision: global___RewriterConfig.Toggle.ValueType | None = ...,
auto_mixed_precision_mkl: global___RewriterConfig.Toggle.ValueType | None = ...,
auto_mixed_precision_onednn_bfloat16: global___RewriterConfig.Toggle.ValueType | None = ...,
auto_mixed_precision_cpu: global___RewriterConfig.Toggle.ValueType | None = ...,
disable_meta_optimizer: builtins.bool | None = ...,
use_plugin_optimizers: global___RewriterConfig.Toggle.ValueType | None = ...,
experimental_conditional_code_motion: global___RewriterConfig.Toggle.ValueType | None = ...,
meta_optimizer_iterations: global___RewriterConfig.NumIterationsType.ValueType | None = ...,
min_graph_nodes: builtins.int | None = ...,
experimental_disable_compressed_tensor_optimization: builtins.bool | None = ...,
experimental_disable_folding_quantization_emulation: builtins.bool | None = ...,
memory_optimization: global___RewriterConfig.MemOptType.ValueType | None = ...,
memory_optimizer_target_node_name_scope: builtins.str | None = ...,
meta_optimizer_timeout_ms: builtins.int | None = ...,
auto_parallel: global___AutoParallelOptions | None = ...,
fail_on_optimizer_errors: builtins.bool | None = ...,
scoped_allocator_opts: global___ScopedAllocatorOptions | None = ...,
optimizers: collections.abc.Iterable[builtins.str] | None = ...,
custom_optimizers: collections.abc.Iterable[global___RewriterConfig.CustomGraphOptimizer] | None = ...,
inter_optimizer_verifier_config: tensorflow.core.protobuf.verifier_config_pb2.VerifierConfig | None = ...,
post_optimization_verifier_config: tensorflow.core.protobuf.verifier_config_pb2.VerifierConfig | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["auto_parallel", b"auto_parallel", "inter_optimizer_verifier_config", b"inter_optimizer_verifier_config", "post_optimization_verifier_config", b"post_optimization_verifier_config", "scoped_allocator_opts", b"scoped_allocator_opts"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["arithmetic_optimization", b"arithmetic_optimization", "auto_mixed_precision", b"auto_mixed_precision", "auto_mixed_precision_cpu", b"auto_mixed_precision_cpu", "auto_mixed_precision_mkl", b"auto_mixed_precision_mkl", "auto_mixed_precision_onednn_bfloat16", b"auto_mixed_precision_onednn_bfloat16", "auto_parallel", b"auto_parallel", "common_subgraph_elimination", b"common_subgraph_elimination", "constant_folding", b"constant_folding", "cpu_layout_conversion", b"cpu_layout_conversion", "custom_optimizers", b"custom_optimizers", "debug_stripper", b"debug_stripper", "dependency_optimization", b"dependency_optimization", "disable_meta_optimizer", b"disable_meta_optimizer", "disable_model_pruning", b"disable_model_pruning", "experimental_conditional_code_motion", b"experimental_conditional_code_motion", "experimental_disable_compressed_tensor_optimization", b"experimental_disable_compressed_tensor_optimization", "experimental_disable_folding_quantization_emulation", b"experimental_disable_folding_quantization_emulation", "fail_on_optimizer_errors", b"fail_on_optimizer_errors", "function_optimization", b"function_optimization", "implementation_selector", b"implementation_selector", "inter_optimizer_verifier_config", b"inter_optimizer_verifier_config", "layout_optimizer", b"layout_optimizer", "loop_optimization", b"loop_optimization", "memory_optimization", b"memory_optimization", "memory_optimizer_target_node_name_scope", b"memory_optimizer_target_node_name_scope", "meta_optimizer_iterations", b"meta_optimizer_iterations", "meta_optimizer_timeout_ms", b"meta_optimizer_timeout_ms", "min_graph_nodes", b"min_graph_nodes", "optimizers", b"optimizers", "pin_to_host_optimization", b"pin_to_host_optimization", "post_optimization_verifier_config", b"post_optimization_verifier_config", "remapping", b"remapping", "scoped_allocator_optimization", b"scoped_allocator_optimization", "scoped_allocator_opts", b"scoped_allocator_opts", "shape_optimization", b"shape_optimization", "use_plugin_optimizers", b"use_plugin_optimizers"]) -> None: ...
global___RewriterConfig = RewriterConfig

View File

@@ -0,0 +1,46 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.protobuf.meta_graph_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class SavedModel(google.protobuf.message.Message):
"""SavedModel is the high level serialization format for TensorFlow Models.
See [todo: doc links, similar to session_bundle] for more information.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SAVED_MODEL_SCHEMA_VERSION_FIELD_NUMBER: builtins.int
META_GRAPHS_FIELD_NUMBER: builtins.int
saved_model_schema_version: builtins.int
"""The schema version of the SavedModel instance. Used for versioning when
making future changes to the specification/implementation. Initial value
at release will be 1.
"""
@property
def meta_graphs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.protobuf.meta_graph_pb2.MetaGraphDef]:
"""One or more MetaGraphs."""
def __init__(
self,
*,
saved_model_schema_version: builtins.int | None = ...,
meta_graphs: collections.abc.Iterable[tensorflow.core.protobuf.meta_graph_pb2.MetaGraphDef] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["meta_graphs", b"meta_graphs", "saved_model_schema_version", b"saved_model_schema_version"]) -> None: ...
global___SavedModel = SavedModel

View File

@@ -0,0 +1,565 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.any_pb2
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
import tensorflow.core.framework.variable_pb2
import tensorflow.core.framework.versions_pb2
import tensorflow.core.protobuf.struct_pb2
import tensorflow.core.protobuf.trackable_object_graph_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class SavedObjectGraph(google.protobuf.message.Message):
"""SavedObjectGraph shares some structure with TrackableObjectGraph, but
SavedObjectGraph belongs to the MetaGraph and contains pointers to functions
and type information, while TrackableObjectGraph lives in the checkpoint
and contains pointers only to variable values.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class ConcreteFunctionsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___SavedConcreteFunction: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___SavedConcreteFunction | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
NODES_FIELD_NUMBER: builtins.int
CONCRETE_FUNCTIONS_FIELD_NUMBER: builtins.int
@property
def nodes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SavedObject]:
"""Flattened list of objects in the object graph.
The position of the object in this list indicates its id.
Nodes[0] is considered the root node.
"""
@property
def concrete_functions(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___SavedConcreteFunction]:
"""Information about captures and output structures in concrete functions.
Referenced from SavedBareConcreteFunction and SavedFunction.
"""
def __init__(
self,
*,
nodes: collections.abc.Iterable[global___SavedObject] | None = ...,
concrete_functions: collections.abc.Mapping[builtins.str, global___SavedConcreteFunction] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["concrete_functions", b"concrete_functions", "nodes", b"nodes"]) -> None: ...
global___SavedObjectGraph = SavedObjectGraph
@typing_extensions.final
class SavedObject(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class SaveableObjectsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___SaveableObject: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___SaveableObject | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
CHILDREN_FIELD_NUMBER: builtins.int
DEPENDENCIES_FIELD_NUMBER: builtins.int
SLOT_VARIABLES_FIELD_NUMBER: builtins.int
USER_OBJECT_FIELD_NUMBER: builtins.int
ASSET_FIELD_NUMBER: builtins.int
FUNCTION_FIELD_NUMBER: builtins.int
VARIABLE_FIELD_NUMBER: builtins.int
BARE_CONCRETE_FUNCTION_FIELD_NUMBER: builtins.int
CONSTANT_FIELD_NUMBER: builtins.int
RESOURCE_FIELD_NUMBER: builtins.int
CAPTURED_TENSOR_FIELD_NUMBER: builtins.int
SAVEABLE_OBJECTS_FIELD_NUMBER: builtins.int
REGISTERED_NAME_FIELD_NUMBER: builtins.int
SERIALIZED_USER_PROTO_FIELD_NUMBER: builtins.int
REGISTERED_SAVER_FIELD_NUMBER: builtins.int
@property
def children(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference]:
"""Objects which this object depends on: named edges in the dependency
graph.
Note: All kinds of SavedObject may have children, except
"constant" and "captured_tensor".
"""
@property
def dependencies(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference]:
"""Ordered list of dependencies that must be loaded before this object.
SavedModel loads with the bottom-up approach, by first creating all objects
(in the order defined by the dependencies), then connecting the edges.
"""
@property
def slot_variables(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.SlotVariableReference]:
"""Slot variables owned by this object. This describes the three-way
(optimizer, variable, slot variable) relationship; none of the three
depend on the others directly.
Note: currently only valid if kind == "user_object".
"""
@property
def user_object(self) -> global___SavedUserObject: ...
@property
def asset(self) -> global___SavedAsset: ...
@property
def function(self) -> global___SavedFunction: ...
@property
def variable(self) -> global___SavedVariable: ...
@property
def bare_concrete_function(self) -> global___SavedBareConcreteFunction: ...
@property
def constant(self) -> global___SavedConstant: ...
@property
def resource(self) -> global___SavedResource: ...
@property
def captured_tensor(self) -> global___CapturedTensor: ...
@property
def saveable_objects(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___SaveableObject]:
"""Stores the functions used to save and restore this object. At most one of
`saveable_objects` or `registered_saver` is defined for each SavedObject.
See the comment below for the difference between SaveableObject and
registered savers.
"""
registered_name: builtins.str
"""The fields below are filled when the user serializes a registered Trackable
class or an object with a registered saver function.
Registered classes may save additional metadata and supersede the
default loading process where nodes are recreated from the proto.
If the registered class cannot be found, then the object will load as one
one of the default trackable objects: Autotrackable (a class similar to
tf.Module), tf.function, or tf.Variable.
Unlike SaveableObjects, which store the functions for saving and restoring
from tensors, registered savers allow Trackables to write checkpoint shards
directly (e.g. for performance or coordination reasons).
*All registered savers must be available when loading the SavedModel.*
The name of the registered class of the form "{package}.{class_name}".
This field is used to search for the registered class at loading time.
"""
@property
def serialized_user_proto(self) -> google.protobuf.any_pb2.Any:
"""The user-generated proto storing metadata for this object, to be passed to
the registered classes's _deserialize_from_proto method when this object is
loaded from the SavedModel.
"""
registered_saver: builtins.str
"""String name of the registered saver. At most one of `saveable_objects` or
`registered_saver` is defined for each SavedObject.
"""
def __init__(
self,
*,
children: collections.abc.Iterable[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference] | None = ...,
dependencies: collections.abc.Iterable[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.ObjectReference] | None = ...,
slot_variables: collections.abc.Iterable[tensorflow.core.protobuf.trackable_object_graph_pb2.TrackableObjectGraph.TrackableObject.SlotVariableReference] | None = ...,
user_object: global___SavedUserObject | None = ...,
asset: global___SavedAsset | None = ...,
function: global___SavedFunction | None = ...,
variable: global___SavedVariable | None = ...,
bare_concrete_function: global___SavedBareConcreteFunction | None = ...,
constant: global___SavedConstant | None = ...,
resource: global___SavedResource | None = ...,
captured_tensor: global___CapturedTensor | None = ...,
saveable_objects: collections.abc.Mapping[builtins.str, global___SaveableObject] | None = ...,
registered_name: builtins.str | None = ...,
serialized_user_proto: google.protobuf.any_pb2.Any | None = ...,
registered_saver: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["asset", b"asset", "bare_concrete_function", b"bare_concrete_function", "captured_tensor", b"captured_tensor", "constant", b"constant", "function", b"function", "kind", b"kind", "resource", b"resource", "serialized_user_proto", b"serialized_user_proto", "user_object", b"user_object", "variable", b"variable"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["asset", b"asset", "bare_concrete_function", b"bare_concrete_function", "captured_tensor", b"captured_tensor", "children", b"children", "constant", b"constant", "dependencies", b"dependencies", "function", b"function", "kind", b"kind", "registered_name", b"registered_name", "registered_saver", b"registered_saver", "resource", b"resource", "saveable_objects", b"saveable_objects", "serialized_user_proto", b"serialized_user_proto", "slot_variables", b"slot_variables", "user_object", b"user_object", "variable", b"variable"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["user_object", "asset", "function", "variable", "bare_concrete_function", "constant", "resource", "captured_tensor"] | None: ...
global___SavedObject = SavedObject
@typing_extensions.final
class SavedUserObject(google.protobuf.message.Message):
"""A SavedUserObject is an object (in the object-oriented language of the
TensorFlow program) of some user- or framework-defined class other than
those handled specifically by the other kinds of SavedObjects.
This object cannot be evaluated as a tensor, and therefore cannot be bound
to an input of a function.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
IDENTIFIER_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
METADATA_FIELD_NUMBER: builtins.int
identifier: builtins.str
"""Corresponds to a registration of the type to use in the loading program."""
@property
def version(self) -> tensorflow.core.framework.versions_pb2.VersionDef:
"""Version information from the producer of this SavedUserObject."""
metadata: builtins.str
"""Metadata for deserializing this object.
Deprecated! At the time of deprecation, Keras was the only user of this
field, and its saving and loading code will be updated shortly.
Please save your application-specific metadata to a separate file.
"""
def __init__(
self,
*,
identifier: builtins.str | None = ...,
version: tensorflow.core.framework.versions_pb2.VersionDef | None = ...,
metadata: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["version", b"version"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["identifier", b"identifier", "metadata", b"metadata", "version", b"version"]) -> None: ...
global___SavedUserObject = SavedUserObject
@typing_extensions.final
class SavedAsset(google.protobuf.message.Message):
"""A SavedAsset points to an asset in the MetaGraph.
When bound to a function this object evaluates to a tensor with the absolute
filename. Users should not depend on a particular part of the filename to
remain stable (e.g. basename could be changed).
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ASSET_FILE_DEF_INDEX_FIELD_NUMBER: builtins.int
asset_file_def_index: builtins.int
"""Index into `MetaGraphDef.asset_file_def[]` that describes the Asset.
Only the field `AssetFileDef.filename` is used. Other fields, such as
`AssetFileDef.tensor_info`, MUST be ignored.
"""
def __init__(
self,
*,
asset_file_def_index: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["asset_file_def_index", b"asset_file_def_index"]) -> None: ...
global___SavedAsset = SavedAsset
@typing_extensions.final
class SavedFunction(google.protobuf.message.Message):
"""A function with multiple signatures, possibly with non-Tensor arguments."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CONCRETE_FUNCTIONS_FIELD_NUMBER: builtins.int
FUNCTION_SPEC_FIELD_NUMBER: builtins.int
@property
def concrete_functions(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
@property
def function_spec(self) -> global___FunctionSpec: ...
def __init__(
self,
*,
concrete_functions: collections.abc.Iterable[builtins.str] | None = ...,
function_spec: global___FunctionSpec | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["function_spec", b"function_spec"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["concrete_functions", b"concrete_functions", "function_spec", b"function_spec"]) -> None: ...
global___SavedFunction = SavedFunction
@typing_extensions.final
class CapturedTensor(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
CONCRETE_FUNCTION_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name of captured tensor"""
concrete_function: builtins.str
"""Name of concrete function which contains the computed graph tensor."""
def __init__(
self,
*,
name: builtins.str | None = ...,
concrete_function: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["concrete_function", b"concrete_function", "name", b"name"]) -> None: ...
global___CapturedTensor = CapturedTensor
@typing_extensions.final
class SavedConcreteFunction(google.protobuf.message.Message):
"""Stores low-level information about a concrete function. Referenced in either
a SavedFunction or a SavedBareConcreteFunction.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
BOUND_INPUTS_FIELD_NUMBER: builtins.int
CANONICALIZED_INPUT_SIGNATURE_FIELD_NUMBER: builtins.int
OUTPUT_SIGNATURE_FIELD_NUMBER: builtins.int
@property
def bound_inputs(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
@property
def canonicalized_input_signature(self) -> tensorflow.core.protobuf.struct_pb2.StructuredValue:
"""Input in canonicalized form that was received to create this concrete
function.
"""
@property
def output_signature(self) -> tensorflow.core.protobuf.struct_pb2.StructuredValue:
"""Output that was the return value of this function after replacing all
Tensors with TensorSpecs. This can be an arbitrary nested function and will
be used to reconstruct the full structure from pure tensors.
"""
def __init__(
self,
*,
bound_inputs: collections.abc.Iterable[builtins.int] | None = ...,
canonicalized_input_signature: tensorflow.core.protobuf.struct_pb2.StructuredValue | None = ...,
output_signature: tensorflow.core.protobuf.struct_pb2.StructuredValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["canonicalized_input_signature", b"canonicalized_input_signature", "output_signature", b"output_signature"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["bound_inputs", b"bound_inputs", "canonicalized_input_signature", b"canonicalized_input_signature", "output_signature", b"output_signature"]) -> None: ...
global___SavedConcreteFunction = SavedConcreteFunction
@typing_extensions.final
class SavedBareConcreteFunction(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CONCRETE_FUNCTION_NAME_FIELD_NUMBER: builtins.int
ARGUMENT_KEYWORDS_FIELD_NUMBER: builtins.int
ALLOWED_POSITIONAL_ARGUMENTS_FIELD_NUMBER: builtins.int
FUNCTION_SPEC_FIELD_NUMBER: builtins.int
concrete_function_name: builtins.str
"""Identifies a SavedConcreteFunction."""
@property
def argument_keywords(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""A sequence of unique strings, one per Tensor argument."""
allowed_positional_arguments: builtins.int
"""The prefix of `argument_keywords` which may be identified by position."""
@property
def function_spec(self) -> global___FunctionSpec:
"""The spec of the function that this ConcreteFunction is traced from. This
allows the ConcreteFunction to be called with nest structure inputs. This
field may not be populated. If this field is absent, the concrete function
can only be called with flat inputs.
TODO(b/169361281): support calling saved ConcreteFunction with structured
inputs in C++ SavedModel API.
"""
def __init__(
self,
*,
concrete_function_name: builtins.str | None = ...,
argument_keywords: collections.abc.Iterable[builtins.str] | None = ...,
allowed_positional_arguments: builtins.int | None = ...,
function_spec: global___FunctionSpec | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["function_spec", b"function_spec"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["allowed_positional_arguments", b"allowed_positional_arguments", "argument_keywords", b"argument_keywords", "concrete_function_name", b"concrete_function_name", "function_spec", b"function_spec"]) -> None: ...
global___SavedBareConcreteFunction = SavedBareConcreteFunction
@typing_extensions.final
class SavedConstant(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OPERATION_FIELD_NUMBER: builtins.int
operation: builtins.str
"""An Operation name for a ConstantOp in this SavedObjectGraph's MetaGraph."""
def __init__(
self,
*,
operation: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["operation", b"operation"]) -> None: ...
global___SavedConstant = SavedConstant
@typing_extensions.final
class SavedVariable(google.protobuf.message.Message):
"""Represents a Variable that is initialized by loading the contents from the
checkpoint.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DTYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
TRAINABLE_FIELD_NUMBER: builtins.int
SYNCHRONIZATION_FIELD_NUMBER: builtins.int
AGGREGATION_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
DEVICE_FIELD_NUMBER: builtins.int
EXPERIMENTAL_DISTRIBUTED_VARIABLE_COMPONENTS_FIELD_NUMBER: builtins.int
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
trainable: builtins.bool
synchronization: tensorflow.core.framework.variable_pb2.VariableSynchronization.ValueType
aggregation: tensorflow.core.framework.variable_pb2.VariableAggregation.ValueType
name: builtins.str
device: builtins.str
@property
def experimental_distributed_variable_components(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SavedVariable]:
"""List of component variables for a distributed variable.
When this field is non-empty, the SavedVariable will be assumed
to be a distributed variable defined by the components listed here.
This is only supported by experimental loaders at the moment.
"""
def __init__(
self,
*,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
trainable: builtins.bool | None = ...,
synchronization: tensorflow.core.framework.variable_pb2.VariableSynchronization.ValueType | None = ...,
aggregation: tensorflow.core.framework.variable_pb2.VariableAggregation.ValueType | None = ...,
name: builtins.str | None = ...,
device: builtins.str | None = ...,
experimental_distributed_variable_components: collections.abc.Iterable[global___SavedVariable] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["aggregation", b"aggregation", "device", b"device", "dtype", b"dtype", "experimental_distributed_variable_components", b"experimental_distributed_variable_components", "name", b"name", "shape", b"shape", "synchronization", b"synchronization", "trainable", b"trainable"]) -> None: ...
global___SavedVariable = SavedVariable
@typing_extensions.final
class FunctionSpec(google.protobuf.message.Message):
"""Represents `FunctionSpec` used in `Function`. This represents a
function that has been wrapped as a TensorFlow `Function`.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _JitCompile:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _JitCompileEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[FunctionSpec._JitCompile.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEFAULT: FunctionSpec._JitCompile.ValueType # 0
ON: FunctionSpec._JitCompile.ValueType # 1
OFF: FunctionSpec._JitCompile.ValueType # 2
class JitCompile(_JitCompile, metaclass=_JitCompileEnumTypeWrapper):
"""Whether the function should be compiled by XLA.
The public interface to `tf.function` uses an optional boolean to
represent three distinct states for this field. Unfortunately, proto3
removes the ability to explicitly check for the presence or absence of a
field, so we instead map to an enum.
See `tf.function` for details.
"""
DEFAULT: FunctionSpec.JitCompile.ValueType # 0
ON: FunctionSpec.JitCompile.ValueType # 1
OFF: FunctionSpec.JitCompile.ValueType # 2
FULLARGSPEC_FIELD_NUMBER: builtins.int
IS_METHOD_FIELD_NUMBER: builtins.int
INPUT_SIGNATURE_FIELD_NUMBER: builtins.int
JIT_COMPILE_FIELD_NUMBER: builtins.int
@property
def fullargspec(self) -> tensorflow.core.protobuf.struct_pb2.StructuredValue:
"""Full arg spec from inspect.getfullargspec()."""
is_method: builtins.bool
"""Whether this represents a class method."""
@property
def input_signature(self) -> tensorflow.core.protobuf.struct_pb2.StructuredValue:
"""The input signature, if specified."""
jit_compile: global___FunctionSpec.JitCompile.ValueType
def __init__(
self,
*,
fullargspec: tensorflow.core.protobuf.struct_pb2.StructuredValue | None = ...,
is_method: builtins.bool | None = ...,
input_signature: tensorflow.core.protobuf.struct_pb2.StructuredValue | None = ...,
jit_compile: global___FunctionSpec.JitCompile.ValueType | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["fullargspec", b"fullargspec", "input_signature", b"input_signature"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["fullargspec", b"fullargspec", "input_signature", b"input_signature", "is_method", b"is_method", "jit_compile", b"jit_compile"]) -> None: ...
global___FunctionSpec = FunctionSpec
@typing_extensions.final
class SavedResource(google.protobuf.message.Message):
"""A SavedResource represents a TF object that holds state during its lifetime.
An object of this type can have a reference to a:
create_resource() and an initialize() function.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DEVICE_FIELD_NUMBER: builtins.int
device: builtins.str
"""A device specification indicating a required placement for the resource
creation function, e.g. "CPU". An empty string allows the user to select a
device.
"""
def __init__(
self,
*,
device: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["device", b"device"]) -> None: ...
global___SavedResource = SavedResource
@typing_extensions.final
class SaveableObject(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SAVE_FUNCTION_FIELD_NUMBER: builtins.int
RESTORE_FUNCTION_FIELD_NUMBER: builtins.int
save_function: builtins.int
"""Node ids of concrete functions for saving and loading from a checkpoint.
These functions save and restore directly from tensors.
"""
restore_function: builtins.int
def __init__(
self,
*,
save_function: builtins.int | None = ...,
restore_function: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["restore_function", b"restore_function", "save_function", b"save_function"]) -> None: ...
global___SaveableObject = SaveableObject

View File

@@ -0,0 +1,91 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class SaverDef(google.protobuf.message.Message):
"""Protocol buffer representing the configuration of a Saver."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _CheckpointFormatVersion:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _CheckpointFormatVersionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SaverDef._CheckpointFormatVersion.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
LEGACY: SaverDef._CheckpointFormatVersion.ValueType # 0
"""Internal legacy format."""
V1: SaverDef._CheckpointFormatVersion.ValueType # 1
"""Deprecated format: tf.Saver() which works with tensorflow::table::Table."""
V2: SaverDef._CheckpointFormatVersion.ValueType # 2
"""Current format: more efficient."""
class CheckpointFormatVersion(_CheckpointFormatVersion, metaclass=_CheckpointFormatVersionEnumTypeWrapper):
"""A version number that identifies a different on-disk checkpoint format.
Usually, each subclass of BaseSaverBuilder works with a particular
version/format. However, it is possible that the same builder may be
upgraded to support a newer checkpoint format in the future.
"""
LEGACY: SaverDef.CheckpointFormatVersion.ValueType # 0
"""Internal legacy format."""
V1: SaverDef.CheckpointFormatVersion.ValueType # 1
"""Deprecated format: tf.Saver() which works with tensorflow::table::Table."""
V2: SaverDef.CheckpointFormatVersion.ValueType # 2
"""Current format: more efficient."""
FILENAME_TENSOR_NAME_FIELD_NUMBER: builtins.int
SAVE_TENSOR_NAME_FIELD_NUMBER: builtins.int
RESTORE_OP_NAME_FIELD_NUMBER: builtins.int
MAX_TO_KEEP_FIELD_NUMBER: builtins.int
SHARDED_FIELD_NUMBER: builtins.int
KEEP_CHECKPOINT_EVERY_N_HOURS_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
filename_tensor_name: builtins.str
"""The name of the tensor in which to specify the filename when saving or
restoring a model checkpoint.
"""
save_tensor_name: builtins.str
"""The operation to run when saving a model checkpoint."""
restore_op_name: builtins.str
"""The operation to run when restoring a model checkpoint."""
max_to_keep: builtins.int
"""Maximum number of checkpoints to keep. If 0, no checkpoints are deleted."""
sharded: builtins.bool
"""Shard the save files, one per device that has Variable nodes."""
keep_checkpoint_every_n_hours: builtins.float
"""How often to keep an additional checkpoint. If not specified, only the last
"max_to_keep" checkpoints are kept; if specified, in addition to keeping
the last "max_to_keep" checkpoints, an additional checkpoint will be kept
for every n hours of training.
"""
version: global___SaverDef.CheckpointFormatVersion.ValueType
def __init__(
self,
*,
filename_tensor_name: builtins.str | None = ...,
save_tensor_name: builtins.str | None = ...,
restore_op_name: builtins.str | None = ...,
max_to_keep: builtins.int | None = ...,
sharded: builtins.bool | None = ...,
keep_checkpoint_every_n_hours: builtins.float | None = ...,
version: global___SaverDef.CheckpointFormatVersion.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["filename_tensor_name", b"filename_tensor_name", "keep_checkpoint_every_n_hours", b"keep_checkpoint_every_n_hours", "max_to_keep", b"max_to_keep", "restore_op_name", b"restore_op_name", "save_tensor_name", b"save_tensor_name", "sharded", b"sharded", "version", b"version"]) -> None: ...
global___SaverDef = SaverDef

View File

@@ -0,0 +1,176 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.protobuf.data_service_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class DispatcherConfig(google.protobuf.message.Message):
"""Configuration for a tf.data service DispatchServer.
Next id: 10
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PORT_FIELD_NUMBER: builtins.int
PROTOCOL_FIELD_NUMBER: builtins.int
WORK_DIR_FIELD_NUMBER: builtins.int
FAULT_TOLERANT_MODE_FIELD_NUMBER: builtins.int
WORKER_ADDRESSES_FIELD_NUMBER: builtins.int
DEPLOYMENT_MODE_FIELD_NUMBER: builtins.int
JOB_GC_CHECK_INTERVAL_MS_FIELD_NUMBER: builtins.int
JOB_GC_TIMEOUT_MS_FIELD_NUMBER: builtins.int
CLIENT_TIMEOUT_MS_FIELD_NUMBER: builtins.int
port: builtins.int
"""The port for the dispatcher to bind to. A value of 0 indicates that the
dispatcher may bind to any available port.
"""
protocol: builtins.str
"""The protocol for the dispatcher to use when connecting to workers."""
work_dir: builtins.str
"""A work directory to use for storing dispatcher state, and for recovering
during restarts. The empty string indicates not to use any work directory.
"""
fault_tolerant_mode: builtins.bool
"""Whether to run in fault tolerant mode, where dispatcher state is saved
across restarts. Requires that `work_dir` is nonempty.
"""
@property
def worker_addresses(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""(Optional.) If the job uses auto-sharding, it needs to specify a fixed list
of worker addresses that will register with the dispatcher. The worker
addresses should be in the format "host" or "host:port", where "port" is an
integer, named port, or %port% to match any port.
"""
deployment_mode: tensorflow.core.protobuf.data_service_pb2.DeploymentMode.ValueType
"""(Optional.) tf.data service deployment mode. Supported values are "REMOTE",
"COLOCATED", and "HYBRID". If unspecified, it is assumed to be "REMOTE".
"""
job_gc_check_interval_ms: builtins.int
"""How often the dispatcher should scan through to delete old and unused
jobs. A value of 0 indicates that the decision should be left up to the
runtime.
"""
job_gc_timeout_ms: builtins.int
"""How long a job needs to be unused before it becomes a candidate for garbage
collection. A value of -1 indicates that jobs should never be garbage
collected. A value of 0 indicates that the decision should be left up to
the runtime.
"""
client_timeout_ms: builtins.int
"""How long to wait before garbage-collecting a client that hasn't
heartbeated to the dispatcher. A value of 0 indicates that the timeout
should be left to the runtime.
"""
def __init__(
self,
*,
port: builtins.int | None = ...,
protocol: builtins.str | None = ...,
work_dir: builtins.str | None = ...,
fault_tolerant_mode: builtins.bool | None = ...,
worker_addresses: collections.abc.Iterable[builtins.str] | None = ...,
deployment_mode: tensorflow.core.protobuf.data_service_pb2.DeploymentMode.ValueType | None = ...,
job_gc_check_interval_ms: builtins.int | None = ...,
job_gc_timeout_ms: builtins.int | None = ...,
client_timeout_ms: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["client_timeout_ms", b"client_timeout_ms", "deployment_mode", b"deployment_mode", "fault_tolerant_mode", b"fault_tolerant_mode", "job_gc_check_interval_ms", b"job_gc_check_interval_ms", "job_gc_timeout_ms", b"job_gc_timeout_ms", "port", b"port", "protocol", b"protocol", "work_dir", b"work_dir", "worker_addresses", b"worker_addresses"]) -> None: ...
global___DispatcherConfig = DispatcherConfig
@typing_extensions.final
class WorkerConfig(google.protobuf.message.Message):
"""Configuration for a tf.data service WorkerServer.
Next id: 12
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PORT_FIELD_NUMBER: builtins.int
PROTOCOL_FIELD_NUMBER: builtins.int
DISPATCHER_ADDRESS_FIELD_NUMBER: builtins.int
WORKER_ADDRESS_FIELD_NUMBER: builtins.int
WORKER_TAGS_FIELD_NUMBER: builtins.int
HEARTBEAT_INTERVAL_MS_FIELD_NUMBER: builtins.int
DISPATCHER_TIMEOUT_MS_FIELD_NUMBER: builtins.int
DATA_TRANSFER_PROTOCOL_FIELD_NUMBER: builtins.int
DATA_TRANSFER_ADDRESS_FIELD_NUMBER: builtins.int
CROSS_TRAINER_CACHE_SIZE_BYTES_FIELD_NUMBER: builtins.int
SHUTDOWN_QUIET_PERIOD_MS_FIELD_NUMBER: builtins.int
port: builtins.int
"""The port for the worker to bind to. A value of 0 indicates that the
worker may bind to any available port.
"""
protocol: builtins.str
"""The protocol for the worker to use when connecting to the dispatcher."""
dispatcher_address: builtins.str
"""The address of the dispatcher to register with."""
worker_address: builtins.str
"""The address of the worker server. The substring "%port%", if specified,
will be replaced with the worker's bound port. This is useful when the port
is set to `0`.
"""
@property
def worker_tags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Tags attached to the worker. This allows reading from selected workers.
For example, by applying a "COLOCATED" tag, tf.data service is able to read
from the local tf.data worker if one exists, then from off-TF-host workers,
to avoid cross-TF-host reads.
"""
heartbeat_interval_ms: builtins.int
"""How often the worker should heartbeat to the master. A value of 0 indicates
that the decision should be left up to the runtime.
"""
dispatcher_timeout_ms: builtins.int
"""How long to retry requests to the dispatcher before giving up and reporting
an error. A value of 0 indicates that the decision should be left up to the
runtime.
"""
data_transfer_protocol: builtins.str
"""The protocol for the worker to use when transferring data to clients."""
data_transfer_address: builtins.str
"""The data transfer address of the worker server. The substring "%port%", if
specified, will be replaced with the worker's bound port. This is useful
when the port is set to `0`.
"""
cross_trainer_cache_size_bytes: builtins.int
"""Maximum size of the cross-trainer cache in bytes. If enabled, make sure
your training job provides sufficient memory resources.
"""
shutdown_quiet_period_ms: builtins.int
"""When shutting down a worker, how long to wait for the gRPC server to
process the final requests. This is used to achieve clean shutdown in unit
tests.
"""
def __init__(
self,
*,
port: builtins.int | None = ...,
protocol: builtins.str | None = ...,
dispatcher_address: builtins.str | None = ...,
worker_address: builtins.str | None = ...,
worker_tags: collections.abc.Iterable[builtins.str] | None = ...,
heartbeat_interval_ms: builtins.int | None = ...,
dispatcher_timeout_ms: builtins.int | None = ...,
data_transfer_protocol: builtins.str | None = ...,
data_transfer_address: builtins.str | None = ...,
cross_trainer_cache_size_bytes: builtins.int | None = ...,
shutdown_quiet_period_ms: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["cross_trainer_cache_size_bytes", b"cross_trainer_cache_size_bytes", "data_transfer_address", b"data_transfer_address", "data_transfer_protocol", b"data_transfer_protocol", "dispatcher_address", b"dispatcher_address", "dispatcher_timeout_ms", b"dispatcher_timeout_ms", "heartbeat_interval_ms", b"heartbeat_interval_ms", "port", b"port", "protocol", b"protocol", "shutdown_quiet_period_ms", b"shutdown_quiet_period_ms", "worker_address", b"worker_address", "worker_tags", b"worker_tags"]) -> None: ...
global___WorkerConfig = WorkerConfig

View File

@@ -0,0 +1,126 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_pb2
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class SnapshotRecord(google.protobuf.message.Message):
"""Each SnapshotRecord represents one batch of pre-processed input data. A batch
consists of a list of tensors that we encode as TensorProtos. This message
doesn't store the structure of the batch.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TENSOR_FIELD_NUMBER: builtins.int
@property
def tensor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_pb2.TensorProto]: ...
def __init__(
self,
*,
tensor: collections.abc.Iterable[tensorflow.core.framework.tensor_pb2.TensorProto] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["tensor", b"tensor"]) -> None: ...
global___SnapshotRecord = SnapshotRecord
@typing_extensions.final
class SnapshotMetadataRecord(google.protobuf.message.Message):
"""This stores the metadata information present in each snapshot record."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
GRAPH_HASH_FIELD_NUMBER: builtins.int
RUN_ID_FIELD_NUMBER: builtins.int
CREATION_TIMESTAMP_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
NUM_ELEMENTS_FIELD_NUMBER: builtins.int
FINALIZED_FIELD_NUMBER: builtins.int
graph_hash: builtins.str
"""Stores the fingerprint of the graph that describes the dataset that is
snapshotted.
"""
run_id: builtins.str
"""Run ID that this snapshot corresponds to."""
creation_timestamp: builtins.int
"""Time when we started creating this snapshot."""
version: builtins.int
"""Version of the snapshot data file format."""
@property
def dtype(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[tensorflow.core.framework.types_pb2.DataType.ValueType]:
"""A list of tensor dtype corresponding to each element of the snapshot."""
num_elements: builtins.int
"""The number of elements in the snapshot."""
finalized: builtins.bool
def __init__(
self,
*,
graph_hash: builtins.str | None = ...,
run_id: builtins.str | None = ...,
creation_timestamp: builtins.int | None = ...,
version: builtins.int | None = ...,
dtype: collections.abc.Iterable[tensorflow.core.framework.types_pb2.DataType.ValueType] | None = ...,
num_elements: builtins.int | None = ...,
finalized: builtins.bool | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["creation_timestamp", b"creation_timestamp", "dtype", b"dtype", "finalized", b"finalized", "graph_hash", b"graph_hash", "num_elements", b"num_elements", "run_id", b"run_id", "version", b"version"]) -> None: ...
global___SnapshotMetadataRecord = SnapshotMetadataRecord
@typing_extensions.final
class TensorMetadata(google.protobuf.message.Message):
"""Metadata for a single tensor in the Snapshot Record."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TENSOR_SHAPE_FIELD_NUMBER: builtins.int
TENSOR_SIZE_BYTES_FIELD_NUMBER: builtins.int
@property
def tensor_shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
tensor_size_bytes: builtins.int
"""Number of uncompressed bytes used to store the tensor representation."""
def __init__(
self,
*,
tensor_shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
tensor_size_bytes: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tensor_shape", b"tensor_shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["tensor_shape", b"tensor_shape", "tensor_size_bytes", b"tensor_size_bytes"]) -> None: ...
global___TensorMetadata = TensorMetadata
@typing_extensions.final
class SnapshotTensorMetadata(google.protobuf.message.Message):
"""Metadata for all the tensors in a Snapshot Record."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TENSOR_METADATA_FIELD_NUMBER: builtins.int
@property
def tensor_metadata(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TensorMetadata]: ...
def __init__(
self,
*,
tensor_metadata: collections.abc.Iterable[global___TensorMetadata] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["tensor_metadata", b"tensor_metadata"]) -> None: ...
global___SnapshotTensorMetadata = SnapshotTensorMetadata

View File

@@ -0,0 +1,420 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_pb2
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.types_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class StructuredValue(google.protobuf.message.Message):
"""`StructuredValue` represents a dynamically typed value representing various
data structures that are inspired by Python data structures typically used in
TensorFlow functions as inputs and outputs.
For example when saving a Layer there may be a `training` argument. If the
user passes a boolean True/False, that switches between two concrete
TensorFlow functions. In order to switch between them in the same way after
loading the SavedModel, we need to represent "True" and "False".
A more advanced example might be a function which takes a list of
dictionaries mapping from strings to Tensors. In order to map from
user-specified arguments `[{"a": tf.constant(1.)}, {"q": tf.constant(3.)}]`
after load to the right saved TensorFlow function, we need to represent the
nested structure and the strings, recording that we have a trace for anything
matching `[{"a": tf.TensorSpec(None, tf.float32)}, {"q": tf.TensorSpec([],
tf.float64)}]` as an example.
Likewise functions may return nested structures of Tensors, for example
returning a dictionary mapping from strings to Tensors. In order for the
loaded function to return the same structure we need to serialize it.
This is an ergonomic aid for working with loaded SavedModels, not a promise
to serialize all possible function signatures. For example we do not expect
to pickle generic Python objects, and ideally we'd stay language-agnostic.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NONE_VALUE_FIELD_NUMBER: builtins.int
FLOAT64_VALUE_FIELD_NUMBER: builtins.int
INT64_VALUE_FIELD_NUMBER: builtins.int
STRING_VALUE_FIELD_NUMBER: builtins.int
BOOL_VALUE_FIELD_NUMBER: builtins.int
TENSOR_SHAPE_VALUE_FIELD_NUMBER: builtins.int
TENSOR_DTYPE_VALUE_FIELD_NUMBER: builtins.int
TENSOR_SPEC_VALUE_FIELD_NUMBER: builtins.int
TYPE_SPEC_VALUE_FIELD_NUMBER: builtins.int
BOUNDED_TENSOR_SPEC_VALUE_FIELD_NUMBER: builtins.int
LIST_VALUE_FIELD_NUMBER: builtins.int
TUPLE_VALUE_FIELD_NUMBER: builtins.int
DICT_VALUE_FIELD_NUMBER: builtins.int
NAMED_TUPLE_VALUE_FIELD_NUMBER: builtins.int
@property
def none_value(self) -> global___NoneValue:
"""Represents None."""
float64_value: builtins.float
"""Represents a double-precision floating-point value (a Python `float`)."""
int64_value: builtins.int
"""Represents a signed integer value, limited to 64 bits.
Larger values from Python's arbitrary-precision integers are unsupported.
"""
string_value: builtins.str
"""Represents a string of Unicode characters stored in a Python `str`.
In Python 3, this is exactly what type `str` is.
In Python 2, this is the UTF-8 encoding of the characters.
For strings with ASCII characters only (as often used in TensorFlow code)
there is effectively no difference between the language versions.
The obsolescent `unicode` type of Python 2 is not supported here.
"""
bool_value: builtins.bool
"""Represents a boolean value."""
@property
def tensor_shape_value(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto:
"""Represents a TensorShape."""
tensor_dtype_value: tensorflow.core.framework.types_pb2.DataType.ValueType
"""Represents an enum value for dtype."""
@property
def tensor_spec_value(self) -> global___TensorSpecProto:
"""Represents a value for tf.TensorSpec."""
@property
def type_spec_value(self) -> global___TypeSpecProto:
"""Represents a value for tf.TypeSpec."""
@property
def bounded_tensor_spec_value(self) -> global___BoundedTensorSpecProto:
"""Represents a value for tf.BoundedTensorSpec."""
@property
def list_value(self) -> global___ListValue:
"""Represents a list of `Value`."""
@property
def tuple_value(self) -> global___TupleValue:
"""Represents a tuple of `Value`."""
@property
def dict_value(self) -> global___DictValue:
"""Represents a dict `Value`."""
@property
def named_tuple_value(self) -> global___NamedTupleValue:
"""Represents Python's namedtuple."""
def __init__(
self,
*,
none_value: global___NoneValue | None = ...,
float64_value: builtins.float | None = ...,
int64_value: builtins.int | None = ...,
string_value: builtins.str | None = ...,
bool_value: builtins.bool | None = ...,
tensor_shape_value: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
tensor_dtype_value: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
tensor_spec_value: global___TensorSpecProto | None = ...,
type_spec_value: global___TypeSpecProto | None = ...,
bounded_tensor_spec_value: global___BoundedTensorSpecProto | None = ...,
list_value: global___ListValue | None = ...,
tuple_value: global___TupleValue | None = ...,
dict_value: global___DictValue | None = ...,
named_tuple_value: global___NamedTupleValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["bool_value", b"bool_value", "bounded_tensor_spec_value", b"bounded_tensor_spec_value", "dict_value", b"dict_value", "float64_value", b"float64_value", "int64_value", b"int64_value", "kind", b"kind", "list_value", b"list_value", "named_tuple_value", b"named_tuple_value", "none_value", b"none_value", "string_value", b"string_value", "tensor_dtype_value", b"tensor_dtype_value", "tensor_shape_value", b"tensor_shape_value", "tensor_spec_value", b"tensor_spec_value", "tuple_value", b"tuple_value", "type_spec_value", b"type_spec_value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["bool_value", b"bool_value", "bounded_tensor_spec_value", b"bounded_tensor_spec_value", "dict_value", b"dict_value", "float64_value", b"float64_value", "int64_value", b"int64_value", "kind", b"kind", "list_value", b"list_value", "named_tuple_value", b"named_tuple_value", "none_value", b"none_value", "string_value", b"string_value", "tensor_dtype_value", b"tensor_dtype_value", "tensor_shape_value", b"tensor_shape_value", "tensor_spec_value", b"tensor_spec_value", "tuple_value", b"tuple_value", "type_spec_value", b"type_spec_value"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["none_value", "float64_value", "int64_value", "string_value", "bool_value", "tensor_shape_value", "tensor_dtype_value", "tensor_spec_value", "type_spec_value", "bounded_tensor_spec_value", "list_value", "tuple_value", "dict_value", "named_tuple_value"] | None: ...
global___StructuredValue = StructuredValue
@typing_extensions.final
class NoneValue(google.protobuf.message.Message):
"""Represents None."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
def __init__(
self,
) -> None: ...
global___NoneValue = NoneValue
@typing_extensions.final
class ListValue(google.protobuf.message.Message):
"""Represents a Python list."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StructuredValue]: ...
def __init__(
self,
*,
values: collections.abc.Iterable[global___StructuredValue] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
global___ListValue = ListValue
@typing_extensions.final
class TupleValue(google.protobuf.message.Message):
"""Represents a Python tuple."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___StructuredValue]: ...
def __init__(
self,
*,
values: collections.abc.Iterable[global___StructuredValue] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
global___TupleValue = TupleValue
@typing_extensions.final
class DictValue(google.protobuf.message.Message):
"""Represents a Python dict keyed by `str`.
The comment on Unicode from Value.string_value applies analogously.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class FieldsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___StructuredValue: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___StructuredValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
FIELDS_FIELD_NUMBER: builtins.int
@property
def fields(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___StructuredValue]: ...
def __init__(
self,
*,
fields: collections.abc.Mapping[builtins.str, global___StructuredValue] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields"]) -> None: ...
global___DictValue = DictValue
@typing_extensions.final
class PairValue(google.protobuf.message.Message):
"""Represents a (key, value) pair."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___StructuredValue: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___StructuredValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
global___PairValue = PairValue
@typing_extensions.final
class NamedTupleValue(google.protobuf.message.Message):
"""Represents Python's namedtuple."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
VALUES_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___PairValue]: ...
def __init__(
self,
*,
name: builtins.str | None = ...,
values: collections.abc.Iterable[global___PairValue] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "values", b"values"]) -> None: ...
global___NamedTupleValue = NamedTupleValue
@typing_extensions.final
class TensorSpecProto(google.protobuf.message.Message):
"""A protobuf to represent tf.TensorSpec."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
def __init__(
self,
*,
name: builtins.str | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["dtype", b"dtype", "name", b"name", "shape", b"shape"]) -> None: ...
global___TensorSpecProto = TensorSpecProto
@typing_extensions.final
class BoundedTensorSpecProto(google.protobuf.message.Message):
"""A protobuf to represent tf.BoundedTensorSpec."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
DTYPE_FIELD_NUMBER: builtins.int
MINIMUM_FIELD_NUMBER: builtins.int
MAXIMUM_FIELD_NUMBER: builtins.int
name: builtins.str
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
@property
def minimum(self) -> tensorflow.core.framework.tensor_pb2.TensorProto: ...
@property
def maximum(self) -> tensorflow.core.framework.tensor_pb2.TensorProto: ...
def __init__(
self,
*,
name: builtins.str | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
minimum: tensorflow.core.framework.tensor_pb2.TensorProto | None = ...,
maximum: tensorflow.core.framework.tensor_pb2.TensorProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["maximum", b"maximum", "minimum", b"minimum", "shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["dtype", b"dtype", "maximum", b"maximum", "minimum", b"minimum", "name", b"name", "shape", b"shape"]) -> None: ...
global___BoundedTensorSpecProto = BoundedTensorSpecProto
@typing_extensions.final
class TypeSpecProto(google.protobuf.message.Message):
"""Represents a tf.TypeSpec"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _TypeSpecClass:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _TypeSpecClassEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TypeSpecProto._TypeSpecClass.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNKNOWN: TypeSpecProto._TypeSpecClass.ValueType # 0
SPARSE_TENSOR_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 1
"""tf.SparseTensorSpec"""
INDEXED_SLICES_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 2
"""tf.IndexedSlicesSpec"""
RAGGED_TENSOR_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 3
"""tf.RaggedTensorSpec"""
TENSOR_ARRAY_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 4
"""tf.TensorArraySpec"""
DATA_DATASET_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 5
"""tf.data.DatasetSpec"""
DATA_ITERATOR_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 6
"""IteratorSpec from data/ops/iterator_ops.py"""
OPTIONAL_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 7
"""tf.OptionalSpec"""
PER_REPLICA_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 8
"""PerReplicaSpec from distribute/values.py"""
VARIABLE_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 9
"""tf.VariableSpec"""
ROW_PARTITION_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 10
"""RowPartitionSpec from ragged/row_partition.py"""
REGISTERED_TYPE_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 12
"""The type registered as type_spec_class_name."""
EXTENSION_TYPE_SPEC: TypeSpecProto._TypeSpecClass.ValueType # 13
"""Subclasses of tf.ExtensionType"""
class TypeSpecClass(_TypeSpecClass, metaclass=_TypeSpecClassEnumTypeWrapper): ...
UNKNOWN: TypeSpecProto.TypeSpecClass.ValueType # 0
SPARSE_TENSOR_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 1
"""tf.SparseTensorSpec"""
INDEXED_SLICES_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 2
"""tf.IndexedSlicesSpec"""
RAGGED_TENSOR_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 3
"""tf.RaggedTensorSpec"""
TENSOR_ARRAY_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 4
"""tf.TensorArraySpec"""
DATA_DATASET_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 5
"""tf.data.DatasetSpec"""
DATA_ITERATOR_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 6
"""IteratorSpec from data/ops/iterator_ops.py"""
OPTIONAL_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 7
"""tf.OptionalSpec"""
PER_REPLICA_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 8
"""PerReplicaSpec from distribute/values.py"""
VARIABLE_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 9
"""tf.VariableSpec"""
ROW_PARTITION_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 10
"""RowPartitionSpec from ragged/row_partition.py"""
REGISTERED_TYPE_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 12
"""The type registered as type_spec_class_name."""
EXTENSION_TYPE_SPEC: TypeSpecProto.TypeSpecClass.ValueType # 13
"""Subclasses of tf.ExtensionType"""
TYPE_SPEC_CLASS_FIELD_NUMBER: builtins.int
TYPE_STATE_FIELD_NUMBER: builtins.int
TYPE_SPEC_CLASS_NAME_FIELD_NUMBER: builtins.int
NUM_FLAT_COMPONENTS_FIELD_NUMBER: builtins.int
type_spec_class: global___TypeSpecProto.TypeSpecClass.ValueType
@property
def type_state(self) -> global___StructuredValue:
"""The value returned by TypeSpec._serialize()."""
type_spec_class_name: builtins.str
"""The name of the TypeSpec class.
* If type_spec_class == REGISTERED_TYPE_SPEC, the TypeSpec class is
the one registered under this name. For types registered outside
core TensorFlow by an add-on library, that library must be loaded
before this value can be deserialized by nested_structure_coder.
* If type_spec_class specifies a particular TypeSpec class, this field is
redundant with the type_spec_class enum, and is only used for error
reporting in older binaries that do not know the tupe_spec_class enum.
"""
num_flat_components: builtins.int
"""The number of flat tensor components required by this TypeSpec."""
def __init__(
self,
*,
type_spec_class: global___TypeSpecProto.TypeSpecClass.ValueType | None = ...,
type_state: global___StructuredValue | None = ...,
type_spec_class_name: builtins.str | None = ...,
num_flat_components: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["type_state", b"type_state"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["num_flat_components", b"num_flat_components", "type_spec_class", b"type_spec_class", "type_spec_class_name", b"type_spec_class_name", "type_state", b"type_state"]) -> None: ...
global___TypeSpecProto = TypeSpecProto

View File

@@ -0,0 +1,130 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.tensor_slice_pb2
import tensorflow.core.framework.types_pb2
import tensorflow.core.framework.versions_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class BundleHeaderProto(google.protobuf.message.Message):
"""Protos used in the tensor bundle module (tf/core/util/tensor_bundle/).
Special header that is associated with a bundle.
TODO(zongheng,zhifengc): maybe in the future, we can add information about
which binary produced this checkpoint, timestamp, etc. Sometime, these can be
valuable debugging information. And if needed, these can be used as defensive
information ensuring reader (binary version) of the checkpoint and the writer
(binary version) must match within certain range, etc.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _Endianness:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _EndiannessEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[BundleHeaderProto._Endianness.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
LITTLE: BundleHeaderProto._Endianness.ValueType # 0
BIG: BundleHeaderProto._Endianness.ValueType # 1
class Endianness(_Endianness, metaclass=_EndiannessEnumTypeWrapper):
"""An enum indicating the endianness of the platform that produced this
bundle. A bundle can only be read by a platform with matching endianness.
Defaults to LITTLE, as most modern platforms are little-endian.
Affects the binary tensor data bytes only, not the metadata in protobufs.
"""
LITTLE: BundleHeaderProto.Endianness.ValueType # 0
BIG: BundleHeaderProto.Endianness.ValueType # 1
NUM_SHARDS_FIELD_NUMBER: builtins.int
ENDIANNESS_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
num_shards: builtins.int
"""Number of data files in the bundle."""
endianness: global___BundleHeaderProto.Endianness.ValueType
@property
def version(self) -> tensorflow.core.framework.versions_pb2.VersionDef:
"""Versioning of the tensor bundle format."""
def __init__(
self,
*,
num_shards: builtins.int | None = ...,
endianness: global___BundleHeaderProto.Endianness.ValueType | None = ...,
version: tensorflow.core.framework.versions_pb2.VersionDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["version", b"version"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["endianness", b"endianness", "num_shards", b"num_shards", "version", b"version"]) -> None: ...
global___BundleHeaderProto = BundleHeaderProto
@typing_extensions.final
class BundleEntryProto(google.protobuf.message.Message):
"""Describes the metadata related to a checkpointed tensor."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DTYPE_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
SHARD_ID_FIELD_NUMBER: builtins.int
OFFSET_FIELD_NUMBER: builtins.int
SIZE_FIELD_NUMBER: builtins.int
CRC32C_FIELD_NUMBER: builtins.int
SLICES_FIELD_NUMBER: builtins.int
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType
"""The tensor dtype and shape."""
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto: ...
shard_id: builtins.int
"""The binary content of the tensor lies in:
File "shard_id": bytes [offset, offset + size).
"""
offset: builtins.int
size: builtins.int
crc32c: builtins.int
"""The CRC32C checksum of the tensor bytes."""
@property
def slices(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto]:
"""Iff present, this entry represents a partitioned tensor. The previous
fields are interpreted as follows:
"dtype", "shape": describe the full tensor.
"shard_id", "offset", "size", "crc32c": all IGNORED.
These information for each slice can be looked up in their own
BundleEntryProto, keyed by each "slice_name".
"""
def __init__(
self,
*,
dtype: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
shard_id: builtins.int | None = ...,
offset: builtins.int | None = ...,
size: builtins.int | None = ...,
crc32c: builtins.int | None = ...,
slices: collections.abc.Iterable[tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["crc32c", b"crc32c", "dtype", b"dtype", "offset", b"offset", "shape", b"shape", "shard_id", b"shard_id", "size", b"size", "slices", b"slices"]) -> None: ...
global___BundleEntryProto = BundleEntryProto

View File

@@ -0,0 +1,91 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
import tensorflow.core.protobuf.cluster_pb2
import tensorflow.core.protobuf.config_pb2
import tensorflow.core.protobuf.device_filters_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ServerDef(google.protobuf.message.Message):
"""Defines the configuration of a single TensorFlow server."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CLUSTER_FIELD_NUMBER: builtins.int
JOB_NAME_FIELD_NUMBER: builtins.int
TASK_INDEX_FIELD_NUMBER: builtins.int
DEFAULT_SESSION_CONFIG_FIELD_NUMBER: builtins.int
PROTOCOL_FIELD_NUMBER: builtins.int
PORT_FIELD_NUMBER: builtins.int
CLUSTER_DEVICE_FILTERS_FIELD_NUMBER: builtins.int
@property
def cluster(self) -> tensorflow.core.protobuf.cluster_pb2.ClusterDef:
"""The cluster of which this server is a member."""
job_name: builtins.str
"""The name of the job of which this server is a member.
NOTE(mrry): The `cluster` field must contain a `JobDef` with a `name` field
that matches this name.
"""
task_index: builtins.int
"""The task index of this server in its job.
NOTE: The `cluster` field must contain a `JobDef` with a matching `name`
and a mapping in its `tasks` field for this index.
"""
@property
def default_session_config(self) -> tensorflow.core.protobuf.config_pb2.ConfigProto:
"""The default configuration for sessions that run on this server."""
protocol: builtins.str
"""The protocol to be used by this server.
Acceptable values include: "grpc", "grpc+verbs".
"""
port: builtins.int
"""The server port. If not set, then we identify the port from the job_name."""
@property
def cluster_device_filters(self) -> tensorflow.core.protobuf.device_filters_pb2.ClusterDeviceFilters:
"""Device filters for remote tasks in the cluster.
NOTE: This is an experimental feature and only effective in TensorFlow 2.x.
"""
def __init__(
self,
*,
cluster: tensorflow.core.protobuf.cluster_pb2.ClusterDef | None = ...,
job_name: builtins.str | None = ...,
task_index: builtins.int | None = ...,
default_session_config: tensorflow.core.protobuf.config_pb2.ConfigProto | None = ...,
protocol: builtins.str | None = ...,
port: builtins.int | None = ...,
cluster_device_filters: tensorflow.core.protobuf.device_filters_pb2.ClusterDeviceFilters | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["cluster", b"cluster", "cluster_device_filters", b"cluster_device_filters", "default_session_config", b"default_session_config"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["cluster", b"cluster", "cluster_device_filters", b"cluster_device_filters", "default_session_config", b"default_session_config", "job_name", b"job_name", "port", b"port", "protocol", b"protocol", "task_index", b"task_index"]) -> None: ...
global___ServerDef = ServerDef

View File

@@ -0,0 +1,66 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.compiler.xla.service.hlo_pb2
import tensorflow.tsl.protobuf.error_codes_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class CompilationResultProto(google.protobuf.message.Message):
"""Describes the result of a TPU compilation. This is also used as TPU
compilation result status payload.
URI: "type.googleapis.com/tensorflow.tpu.CompilationResultProto"
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _ErrorCode:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ErrorCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[CompilationResultProto._ErrorCode.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNKNOWN: CompilationResultProto._ErrorCode.ValueType # 0
OUT_OF_MEMORY: CompilationResultProto._ErrorCode.ValueType # 1
class ErrorCode(_ErrorCode, metaclass=_ErrorCodeEnumTypeWrapper): ...
UNKNOWN: CompilationResultProto.ErrorCode.ValueType # 0
OUT_OF_MEMORY: CompilationResultProto.ErrorCode.ValueType # 1
STATUS_CODE_FIELD_NUMBER: builtins.int
STATUS_ERROR_MESSAGE_FIELD_NUMBER: builtins.int
HLO_PROTOS_FIELD_NUMBER: builtins.int
ERROR_CODE_FIELD_NUMBER: builtins.int
status_code: tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType
"""The error message, if any, returned during compilation."""
status_error_message: builtins.str
@property
def hlo_protos(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.compiler.xla.service.hlo_pb2.HloProto]:
"""HLO proto."""
error_code: global___CompilationResultProto.ErrorCode.ValueType
def __init__(
self,
*,
status_code: tensorflow.tsl.protobuf.error_codes_pb2.Code.ValueType | None = ...,
status_error_message: builtins.str | None = ...,
hlo_protos: collections.abc.Iterable[tensorflow.compiler.xla.service.hlo_pb2.HloProto] | None = ...,
error_code: global___CompilationResultProto.ErrorCode.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["error_code", b"error_code", "hlo_protos", b"hlo_protos", "status_code", b"status_code", "status_error_message", b"status_error_message"]) -> None: ...
global___CompilationResultProto = CompilationResultProto

View File

@@ -0,0 +1,45 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class PaddingMap(google.protobuf.message.Message):
"""A mapping between the dynamic shape dimension of an input and the arg that
represents the real shape.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ARG_INDEX_FIELD_NUMBER: builtins.int
SHAPE_INDEX_FIELD_NUMBER: builtins.int
PADDING_ARG_INDEX_FIELD_NUMBER: builtins.int
arg_index: builtins.int
"""Input arg index with dynamic shapes."""
shape_index: builtins.int
"""The dynamic shape dimension index."""
padding_arg_index: builtins.int
"""The arg index that dynamic dimension maps to, which represents the value
of the real shape.
"""
def __init__(
self,
*,
arg_index: builtins.int | None = ...,
shape_index: builtins.int | None = ...,
padding_arg_index: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["arg_index", b"arg_index", "padding_arg_index", b"padding_arg_index", "shape_index", b"shape_index"]) -> None: ...
global___PaddingMap = PaddingMap

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,119 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class TPUHardwareFeature(google.protobuf.message.Message):
"""Describes features of a tpu."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _EmbeddingFeature:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _EmbeddingFeatureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUHardwareFeature._EmbeddingFeature.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNSUPPORTED: TPUHardwareFeature._EmbeddingFeature.ValueType # 0
"""No embedding lookup accelerator available on the tpu."""
V1: TPUHardwareFeature._EmbeddingFeature.ValueType # 1
"""Embedding lookup accelerator V1. The embedding lookup operation can only
be placed at the beginning of computation. Only one instance of embedding
lookup layer is allowed.
"""
V2: TPUHardwareFeature._EmbeddingFeature.ValueType # 2
"""Embedding lookup accelerator V2. The embedding lookup operation can be
placed anywhere of the computation. Multiple instances of embedding
lookup layer is allowed.
"""
class EmbeddingFeature(_EmbeddingFeature, metaclass=_EmbeddingFeatureEnumTypeWrapper):
"""Embedding feature of a tpu."""
UNSUPPORTED: TPUHardwareFeature.EmbeddingFeature.ValueType # 0
"""No embedding lookup accelerator available on the tpu."""
V1: TPUHardwareFeature.EmbeddingFeature.ValueType # 1
"""Embedding lookup accelerator V1. The embedding lookup operation can only
be placed at the beginning of computation. Only one instance of embedding
lookup layer is allowed.
"""
V2: TPUHardwareFeature.EmbeddingFeature.ValueType # 2
"""Embedding lookup accelerator V2. The embedding lookup operation can be
placed anywhere of the computation. Multiple instances of embedding
lookup layer is allowed.
"""
EMBEDDING_FEATURE_FIELD_NUMBER: builtins.int
embedding_feature: global___TPUHardwareFeature.EmbeddingFeature.ValueType
def __init__(
self,
*,
embedding_feature: global___TPUHardwareFeature.EmbeddingFeature.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["embedding_feature", b"embedding_feature"]) -> None: ...
global___TPUHardwareFeature = TPUHardwareFeature
@typing_extensions.final
class TopologyProto(google.protobuf.message.Message):
"""Describes the geometry of a TPU mesh."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MESH_SHAPE_FIELD_NUMBER: builtins.int
NUM_TASKS_FIELD_NUMBER: builtins.int
NUM_TPU_DEVICES_PER_TASK_FIELD_NUMBER: builtins.int
DEVICE_COORDINATES_FIELD_NUMBER: builtins.int
TPU_HARDWARE_FEATURE_FIELD_NUMBER: builtins.int
@property
def mesh_shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""The dimensions of the TPU topology, in cores. Typically, this is a 4D
topology [x, y, z, core], where the major dimensions correspond to TPU
chips, and the minor dimension describes the number of cores on a multicore
chip.
"""
num_tasks: builtins.int
"""Number of TensorFlow tasks in the cluster."""
num_tpu_devices_per_task: builtins.int
"""Number of TPU devices per task."""
@property
def device_coordinates(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""A flattened rank 3 int32 array with shape
[num_tasks, num_tpu_devices_per_task, len(mesh_shape)].
`tasks` is the number of tasks in the TPU cluster, `devices` is the number
of TPU devices per task, and the minor dimension corresponds to a position
in the TPU mesh topology. Each entry [task, device, axis] gives the
`axis`-th coordinate in the topology of a task/device pair.
"""
@property
def tpu_hardware_feature(self) -> global___TPUHardwareFeature:
"""TPU supported features."""
def __init__(
self,
*,
mesh_shape: collections.abc.Iterable[builtins.int] | None = ...,
num_tasks: builtins.int | None = ...,
num_tpu_devices_per_task: builtins.int | None = ...,
device_coordinates: collections.abc.Iterable[builtins.int] | None = ...,
tpu_hardware_feature: global___TPUHardwareFeature | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["tpu_hardware_feature", b"tpu_hardware_feature"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["device_coordinates", b"device_coordinates", "mesh_shape", b"mesh_shape", "num_tasks", b"num_tasks", "num_tpu_devices_per_task", b"num_tpu_devices_per_task", "tpu_hardware_feature", b"tpu_hardware_feature"]) -> None: ...
global___TopologyProto = TopologyProto

View File

@@ -0,0 +1,271 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.protobuf.tpu.optimization_parameters_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class TPUEmbeddingConfiguration(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _Mode:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUEmbeddingConfiguration._Mode.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNSPECIFIED: TPUEmbeddingConfiguration._Mode.ValueType # 0
INFERENCE: TPUEmbeddingConfiguration._Mode.ValueType # 1
TRAINING: TPUEmbeddingConfiguration._Mode.ValueType # 2
BACKWARD_PASS_ONLY: TPUEmbeddingConfiguration._Mode.ValueType # 3
class Mode(_Mode, metaclass=_ModeEnumTypeWrapper):
"""Mode. Should the embedding layer program be run for inference (just forward
pass), training (both forward and backward pass) or just the backward_pass.
"""
UNSPECIFIED: TPUEmbeddingConfiguration.Mode.ValueType # 0
INFERENCE: TPUEmbeddingConfiguration.Mode.ValueType # 1
TRAINING: TPUEmbeddingConfiguration.Mode.ValueType # 2
BACKWARD_PASS_ONLY: TPUEmbeddingConfiguration.Mode.ValueType # 3
class _ShardingStrategy:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ShardingStrategyEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TPUEmbeddingConfiguration._ShardingStrategy.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DIV_DEFAULT: TPUEmbeddingConfiguration._ShardingStrategy.ValueType # 0
MOD: TPUEmbeddingConfiguration._ShardingStrategy.ValueType # 1
class ShardingStrategy(_ShardingStrategy, metaclass=_ShardingStrategyEnumTypeWrapper):
"""Sharding strategy of the embedding tables among the hosts.
If the sharding_strategy is "mod", each id is assigned to host
"id % num_hosts". For instance, 13 ids are split across 5 hosts as:
[[0, 5, 10], [1, 6, 11], [2, 7, 12], [3, 8], [4, 9]].
If the sharding_strategy is "div", ids are assigned to hosts in a
contiguous manner. In this case, 13 ids are split across 5 hosts as:
[[0, 1, 2], [3, 4, 5], [6, 7, 8], [9, 10], [11, 12]].
In both the strategies, if the id space does not evenly divide the number
of hosts, each of the first "table_descriptor.vocabulary_size % num_hosts"
hosts will be assigned one more id.
This partitioning strategy exactly follows that in the embedding_lookup
TensorFlow function at tensorflow/python/ops/embedding_ops.py.
"""
DIV_DEFAULT: TPUEmbeddingConfiguration.ShardingStrategy.ValueType # 0
MOD: TPUEmbeddingConfiguration.ShardingStrategy.ValueType # 1
@typing_extensions.final
class TableDescriptor(google.protobuf.message.Message):
"""Description of the various embedding tables."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
VOCABULARY_SIZE_FIELD_NUMBER: builtins.int
DIMENSION_FIELD_NUMBER: builtins.int
NUM_FEATURES_FIELD_NUMBER: builtins.int
OPTIMIZATION_PARAMETERS_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name of the table."""
vocabulary_size: builtins.int
"""Size of the vocabulary (i.e., number of rows) in the table."""
dimension: builtins.int
"""The embedding dimension (i.e., the width of the embedding table)."""
num_features: builtins.int
"""Number of features mapped to this table."""
@property
def optimization_parameters(self) -> tensorflow.core.protobuf.tpu.optimization_parameters_pb2.OptimizationParameters:
"""Details of the learning algorithm used to update the embedding
parameters.
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
vocabulary_size: builtins.int | None = ...,
dimension: builtins.int | None = ...,
num_features: builtins.int | None = ...,
optimization_parameters: tensorflow.core.protobuf.tpu.optimization_parameters_pb2.OptimizationParameters | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["optimization_parameters", b"optimization_parameters"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["dimension", b"dimension", "name", b"name", "num_features", b"num_features", "optimization_parameters", b"optimization_parameters", "vocabulary_size", b"vocabulary_size"]) -> None: ...
@typing_extensions.final
class FeatureDescriptor(google.protobuf.message.Message):
"""Description of different input features."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
TABLE_ID_FIELD_NUMBER: builtins.int
INPUT_SHAPE_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name of the input feature."""
table_id: builtins.int
"""Index of the corresponding table in the TableDescriptor list."""
@property
def input_shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Static shape of the inputs (excluding the reduction axis). Note that
the shape of the actual inputs provided using the infeed op must be
strictly smaller than input_shape. The outputs received at the TensorCore
will have rank = input_shape.size() + 1. The innermost axis corresponds
to the embedding dimension. If the input has shape [m, n, k] (excluding
the reduction axis) and the embedding dimension is d, the output received
at the TensorCore will have shape [m, n, k, d].
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
table_id: builtins.int | None = ...,
input_shape: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["input_shape", b"input_shape", "name", b"name", "table_id", b"table_id"]) -> None: ...
@typing_extensions.final
class SpmdSharding(google.protobuf.message.Message):
"""SPMD (Single Program Multiple Data) sharding configuration for
TPUEmbedding. When model parallelism is used on the TensorCore, the number
of cores per replica must be passed to TPUEmbedding so that the right
shapes can be computed in the TF/XLA bridge.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ENABLED_FIELD_NUMBER: builtins.int
NUM_CORES_PER_REPLICA_FIELD_NUMBER: builtins.int
enabled: builtins.bool
"""Whether SPMD sharding is enabled."""
num_cores_per_replica: builtins.int
"""Number of cores per replica."""
def __init__(
self,
*,
enabled: builtins.bool | None = ...,
num_cores_per_replica: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["enabled", b"enabled", "num_cores_per_replica", b"num_cores_per_replica"]) -> None: ...
TABLE_DESCRIPTOR_FIELD_NUMBER: builtins.int
MODE_FIELD_NUMBER: builtins.int
BATCH_SIZE_PER_TENSOR_CORE_FIELD_NUMBER: builtins.int
NUM_HOSTS_FIELD_NUMBER: builtins.int
NUM_TENSOR_CORES_FIELD_NUMBER: builtins.int
SHARDING_STRATEGY_FIELD_NUMBER: builtins.int
PIPELINE_EXECUTION_WITH_TENSOR_CORE_FIELD_NUMBER: builtins.int
PROFILE_DATA_DIRECTORY_FIELD_NUMBER: builtins.int
FEATURE_DESCRIPTOR_FIELD_NUMBER: builtins.int
SPMD_SHARDING_FIELD_NUMBER: builtins.int
@property
def table_descriptor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TPUEmbeddingConfiguration.TableDescriptor]: ...
mode: global___TPUEmbeddingConfiguration.Mode.ValueType
batch_size_per_tensor_core: builtins.int
"""Number of samples in each batch of embedding layer activations sent to
the TensorCore.
"""
num_hosts: builtins.int
"""Number of TPU hosts used for inference/training."""
num_tensor_cores: builtins.int
"""Number of TensorCore used for inference/training."""
sharding_strategy: global___TPUEmbeddingConfiguration.ShardingStrategy.ValueType
pipeline_execution_with_tensor_core: builtins.bool
"""This parameter determines if the execution of the sparse core will be
pipelined with that of the TensorCore. This parameter only affects results
when mode=TRAINING. If mode=INFERENCE or BACKWARD_PASS_ONLY, this parameter
does not affect execution and hence, is a don't care value.
false: The execution of the sparse core is not pipelined with that of the
TensorCore. The forward pass of every step on the sparse core is executed
only after the backward pass of the previous step is complete. And the
backward pass on the sparse core is executed only after the embedding
gradients have been computed on the TensorCore on every step. This ensures
that the activations on every step observe the gradient updates from the
previous step on both the sparse core and the TensorCore.
true: The execution of the sparse core is pipelined with that of the
TensorCore. The forward pass of every step on the sparse core can be
executed after the forward pass of the previous step is complete without
waiting for the backward pass. This improves the utilization of the sparse
core allowing it to process step N+1 while the embedding gradients for step
N are computed on the TensorCore. The backward pass of every step on the
sparse core is executed directly after the forward pass for the next step
is complete. The drawback is that embedding activations for step N+1 do not
observe the embedding gradient updates from step N. This could affect model
quality if step N and N+1 involve the same set of embedding IDs. However,
since the embedding updates are sparse, this is generally not considered a
problem.
"""
profile_data_directory: builtins.str
"""Directory where embedding lookup statistics are stored. These statistics
summarize information about the inputs to the embedding lookup
operation, in particular, the average number of embedding IDs per example
and how well the embedding IDs are load balanced across the system. The
lookup statistics are used during TPU initialization for embedding table
partitioning. Collection of lookup statistics is done at runtime by
profiling the embedding inputs: only 3% of input samples are profiled to
minimize host CPU overhead. Once a suitable number of samples are
profiled, the lookup statistics are saved to table-specific files in the
profile data directory generally at the end of a TPU training loop. The
filename corresponding to each table is obtained by hashing table specific
parameters (e.g., table name and number of features) and global
configuration parameters (e.g., sharding strategy and TPU worker task
count). The same profile data directory can be shared amongst several
models to reuse embedding lookup statistics.
"""
@property
def feature_descriptor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TPUEmbeddingConfiguration.FeatureDescriptor]:
"""If the feature_descriptor field is populated, the model should NOT populate
TableDescriptor.num_features and batch_size_per_tensor_core. These two
fields will be auto-populated by the TPUEmbedding rewrite passes.
"""
@property
def spmd_sharding(self) -> global___TPUEmbeddingConfiguration.SpmdSharding: ...
def __init__(
self,
*,
table_descriptor: collections.abc.Iterable[global___TPUEmbeddingConfiguration.TableDescriptor] | None = ...,
mode: global___TPUEmbeddingConfiguration.Mode.ValueType | None = ...,
batch_size_per_tensor_core: builtins.int | None = ...,
num_hosts: builtins.int | None = ...,
num_tensor_cores: builtins.int | None = ...,
sharding_strategy: global___TPUEmbeddingConfiguration.ShardingStrategy.ValueType | None = ...,
pipeline_execution_with_tensor_core: builtins.bool | None = ...,
profile_data_directory: builtins.str | None = ...,
feature_descriptor: collections.abc.Iterable[global___TPUEmbeddingConfiguration.FeatureDescriptor] | None = ...,
spmd_sharding: global___TPUEmbeddingConfiguration.SpmdSharding | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["spmd_sharding", b"spmd_sharding"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["batch_size_per_tensor_core", b"batch_size_per_tensor_core", "feature_descriptor", b"feature_descriptor", "mode", b"mode", "num_hosts", b"num_hosts", "num_tensor_cores", b"num_tensor_cores", "pipeline_execution_with_tensor_core", b"pipeline_execution_with_tensor_core", "profile_data_directory", b"profile_data_directory", "sharding_strategy", b"sharding_strategy", "spmd_sharding", b"spmd_sharding", "table_descriptor", b"table_descriptor"]) -> None: ...
global___TPUEmbeddingConfiguration = TPUEmbeddingConfiguration
@typing_extensions.final
class TPUEmbeddingError(google.protobuf.message.Message):
"""A placeholder message that is used to define a unique Status payload
URL for TPU embedding errors.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
def __init__(
self,
) -> None: ...
global___TPUEmbeddingError = TPUEmbeddingError

View File

@@ -0,0 +1,176 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import google.protobuf.wrappers_pb2
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class TrackableObjectGraph(google.protobuf.message.Message):
"""A TensorBundle addition which saves extra information about the objects which
own variables, allowing for more robust checkpoint loading into modified
programs.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class TrackableObject(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class ObjectReference(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_ID_FIELD_NUMBER: builtins.int
LOCAL_NAME_FIELD_NUMBER: builtins.int
node_id: builtins.int
"""An index into `TrackableObjectGraph.nodes`, indicating the object
being referenced.
"""
local_name: builtins.str
"""A user-provided name for the edge."""
def __init__(
self,
*,
node_id: builtins.int | None = ...,
local_name: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["local_name", b"local_name", "node_id", b"node_id"]) -> None: ...
@typing_extensions.final
class SerializedTensor(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
FULL_NAME_FIELD_NUMBER: builtins.int
CHECKPOINT_KEY_FIELD_NUMBER: builtins.int
name: builtins.str
"""A name for the Tensor. Simple variables have only one
`SerializedTensor` named "VARIABLE_VALUE" by convention. This value may
be restored on object creation as an optimization.
"""
full_name: builtins.str
"""The full name of the variable/tensor, if applicable. Used to allow
name-based loading of checkpoints which were saved using an
object-based API. Should match the checkpoint key which would have been
assigned by tf.train.Saver.
"""
checkpoint_key: builtins.str
"""The generated name of the Tensor in the checkpoint."""
def __init__(
self,
*,
name: builtins.str | None = ...,
full_name: builtins.str | None = ...,
checkpoint_key: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["checkpoint_key", b"checkpoint_key", "full_name", b"full_name", "name", b"name"]) -> None: ...
@typing_extensions.final
class SlotVariableReference(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ORIGINAL_VARIABLE_NODE_ID_FIELD_NUMBER: builtins.int
SLOT_NAME_FIELD_NUMBER: builtins.int
SLOT_VARIABLE_NODE_ID_FIELD_NUMBER: builtins.int
original_variable_node_id: builtins.int
"""An index into `TrackableObjectGraph.nodes`, indicating the
variable object this slot was created for.
"""
slot_name: builtins.str
"""The name of the slot (e.g. "m"/"v")."""
slot_variable_node_id: builtins.int
"""An index into `TrackableObjectGraph.nodes`, indicating the
`Object` with the value of the slot variable.
"""
def __init__(
self,
*,
original_variable_node_id: builtins.int | None = ...,
slot_name: builtins.str | None = ...,
slot_variable_node_id: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["original_variable_node_id", b"original_variable_node_id", "slot_name", b"slot_name", "slot_variable_node_id", b"slot_variable_node_id"]) -> None: ...
CHILDREN_FIELD_NUMBER: builtins.int
ATTRIBUTES_FIELD_NUMBER: builtins.int
SLOT_VARIABLES_FIELD_NUMBER: builtins.int
REGISTERED_SAVER_FIELD_NUMBER: builtins.int
HAS_CHECKPOINT_VALUES_FIELD_NUMBER: builtins.int
@property
def children(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject.ObjectReference]:
"""Objects which this object depends on."""
@property
def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject.SerializedTensor]:
"""Serialized data specific to this object."""
@property
def slot_variables(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject.SlotVariableReference]:
"""Slot variables owned by this object."""
@property
def registered_saver(self) -> global___RegisteredSaver:
"""The registered saver used to save this object. If this saver is not
present when loading the checkpoint, then loading will fail.
"""
@property
def has_checkpoint_values(self) -> google.protobuf.wrappers_pb2.BoolValue:
"""Whether this object has checkpoint values or descendants with checkpoint
values. This is computed at save time to avoid traversing the entire
object graph proto when restoring (which also has to traverse the live
object graph).
"""
def __init__(
self,
*,
children: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject.ObjectReference] | None = ...,
attributes: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject.SerializedTensor] | None = ...,
slot_variables: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject.SlotVariableReference] | None = ...,
registered_saver: global___RegisteredSaver | None = ...,
has_checkpoint_values: google.protobuf.wrappers_pb2.BoolValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["has_checkpoint_values", b"has_checkpoint_values", "registered_saver", b"registered_saver"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "children", b"children", "has_checkpoint_values", b"has_checkpoint_values", "registered_saver", b"registered_saver", "slot_variables", b"slot_variables"]) -> None: ...
NODES_FIELD_NUMBER: builtins.int
@property
def nodes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___TrackableObjectGraph.TrackableObject]: ...
def __init__(
self,
*,
nodes: collections.abc.Iterable[global___TrackableObjectGraph.TrackableObject] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["nodes", b"nodes"]) -> None: ...
global___TrackableObjectGraph = TrackableObjectGraph
@typing_extensions.final
class RegisteredSaver(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
OBJECT_NAME_FIELD_NUMBER: builtins.int
name: builtins.str
"""The name of the registered saver/restore function."""
object_name: builtins.str
"""Unique auto-generated name of the object."""
def __init__(
self,
*,
name: builtins.str | None = ...,
object_name: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "object_name", b"object_name"]) -> None: ...
global___RegisteredSaver = RegisteredSaver

View File

@@ -0,0 +1,35 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class RecvBufRespExtra(google.protobuf.message.Message):
"""Extra data needed on a non-RDMA RecvBufResponse."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TENSOR_CONTENT_FIELD_NUMBER: builtins.int
@property
def tensor_content(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bytes]: ...
def __init__(
self,
*,
tensor_content: collections.abc.Iterable[builtins.bytes] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["tensor_content", b"tensor_content"]) -> None: ...
global___RecvBufRespExtra = RecvBufRespExtra

View File

@@ -0,0 +1,56 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class VerifierConfig(google.protobuf.message.Message):
"""The config for graph verifiers."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _Toggle:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ToggleEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[VerifierConfig._Toggle.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEFAULT: VerifierConfig._Toggle.ValueType # 0
ON: VerifierConfig._Toggle.ValueType # 1
OFF: VerifierConfig._Toggle.ValueType # 2
class Toggle(_Toggle, metaclass=_ToggleEnumTypeWrapper): ...
DEFAULT: VerifierConfig.Toggle.ValueType # 0
ON: VerifierConfig.Toggle.ValueType # 1
OFF: VerifierConfig.Toggle.ValueType # 2
VERIFICATION_TIMEOUT_IN_MS_FIELD_NUMBER: builtins.int
STRUCTURE_VERIFIER_FIELD_NUMBER: builtins.int
verification_timeout_in_ms: builtins.int
"""Deadline for completion of all verification i.e. all the Toggle ON
verifiers must complete execution within this time.
"""
structure_verifier: global___VerifierConfig.Toggle.ValueType
"""Perform structural validation on a tensorflow graph. Default is OFF."""
def __init__(
self,
*,
verification_timeout_in_ms: builtins.int | None = ...,
structure_verifier: global___VerifierConfig.Toggle.ValueType | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["structure_verifier", b"structure_verifier", "verification_timeout_in_ms", b"verification_timeout_in_ms"]) -> None: ...
global___VerifierConfig = VerifierConfig

View File

@@ -0,0 +1,331 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import sys
import tensorflow.core.framework.summary_pb2
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _WorkerHealth:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _WorkerHealthEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_WorkerHealth.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
OK: _WorkerHealth.ValueType # 0
"""By default a worker is healthy."""
RECEIVED_SHUTDOWN_SIGNAL: _WorkerHealth.ValueType # 1
INTERNAL_ERROR: _WorkerHealth.ValueType # 2
SHUTTING_DOWN: _WorkerHealth.ValueType # 3
"""Worker has been instructed to shutdown after a timeout."""
class WorkerHealth(_WorkerHealth, metaclass=_WorkerHealthEnumTypeWrapper):
"""Worker heartbeat messages. Support for these operations is currently
internal and expected to change.
Current health status of a worker.
"""
OK: WorkerHealth.ValueType # 0
"""By default a worker is healthy."""
RECEIVED_SHUTDOWN_SIGNAL: WorkerHealth.ValueType # 1
INTERNAL_ERROR: WorkerHealth.ValueType # 2
SHUTTING_DOWN: WorkerHealth.ValueType # 3
"""Worker has been instructed to shutdown after a timeout."""
global___WorkerHealth = WorkerHealth
class _WorkerShutdownMode:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _WorkerShutdownModeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_WorkerShutdownMode.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
DEFAULT: _WorkerShutdownMode.ValueType # 0
NOT_CONFIGURED: _WorkerShutdownMode.ValueType # 1
WAIT_FOR_COORDINATOR: _WorkerShutdownMode.ValueType # 2
SHUTDOWN_AFTER_TIMEOUT: _WorkerShutdownMode.ValueType # 3
class WorkerShutdownMode(_WorkerShutdownMode, metaclass=_WorkerShutdownModeEnumTypeWrapper):
"""Indicates the behavior of the worker when an internal error or shutdown
signal is received.
"""
DEFAULT: WorkerShutdownMode.ValueType # 0
NOT_CONFIGURED: WorkerShutdownMode.ValueType # 1
WAIT_FOR_COORDINATOR: WorkerShutdownMode.ValueType # 2
SHUTDOWN_AFTER_TIMEOUT: WorkerShutdownMode.ValueType # 3
global___WorkerShutdownMode = WorkerShutdownMode
@typing_extensions.final
class Event(google.protobuf.message.Message):
"""Protocol buffer representing an event that happened during
the execution of a Brain model.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
WALL_TIME_FIELD_NUMBER: builtins.int
STEP_FIELD_NUMBER: builtins.int
FILE_VERSION_FIELD_NUMBER: builtins.int
GRAPH_DEF_FIELD_NUMBER: builtins.int
SUMMARY_FIELD_NUMBER: builtins.int
LOG_MESSAGE_FIELD_NUMBER: builtins.int
SESSION_LOG_FIELD_NUMBER: builtins.int
TAGGED_RUN_METADATA_FIELD_NUMBER: builtins.int
META_GRAPH_DEF_FIELD_NUMBER: builtins.int
wall_time: builtins.float
"""Timestamp of the event."""
step: builtins.int
"""Global step of the event."""
file_version: builtins.str
"""An event file was started, with the specified version.
This is use to identify the contents of the record IO files
easily. Current version is "brain.Event:2". All versions
start with "brain.Event:".
"""
graph_def: builtins.bytes
"""An encoded version of a GraphDef."""
@property
def summary(self) -> tensorflow.core.framework.summary_pb2.Summary:
"""A summary was generated."""
@property
def log_message(self) -> global___LogMessage:
"""The user output a log message. This was theoretically used by the defunct
tensorboard_logging module, which has since been removed; this field is
now deprecated and should not be used.
"""
@property
def session_log(self) -> global___SessionLog:
"""The state of the session which can be used for restarting after crashes."""
@property
def tagged_run_metadata(self) -> global___TaggedRunMetadata:
"""The metadata returned by running a session.run() call."""
meta_graph_def: builtins.bytes
"""An encoded version of a MetaGraphDef."""
def __init__(
self,
*,
wall_time: builtins.float | None = ...,
step: builtins.int | None = ...,
file_version: builtins.str | None = ...,
graph_def: builtins.bytes | None = ...,
summary: tensorflow.core.framework.summary_pb2.Summary | None = ...,
log_message: global___LogMessage | None = ...,
session_log: global___SessionLog | None = ...,
tagged_run_metadata: global___TaggedRunMetadata | None = ...,
meta_graph_def: builtins.bytes | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["file_version", b"file_version", "graph_def", b"graph_def", "log_message", b"log_message", "meta_graph_def", b"meta_graph_def", "session_log", b"session_log", "summary", b"summary", "tagged_run_metadata", b"tagged_run_metadata", "what", b"what"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["file_version", b"file_version", "graph_def", b"graph_def", "log_message", b"log_message", "meta_graph_def", b"meta_graph_def", "session_log", b"session_log", "step", b"step", "summary", b"summary", "tagged_run_metadata", b"tagged_run_metadata", "wall_time", b"wall_time", "what", b"what"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["what", b"what"]) -> typing_extensions.Literal["file_version", "graph_def", "summary", "log_message", "session_log", "tagged_run_metadata", "meta_graph_def"] | None: ...
global___Event = Event
@typing_extensions.final
class LogMessage(google.protobuf.message.Message):
"""Protocol buffer used for logging messages to the events file.
This was theoretically used by the defunct tensorboard_logging module, which
has been removed; this message is now deprecated and should not be used.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _Level:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _LevelEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[LogMessage._Level.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNKNOWN: LogMessage._Level.ValueType # 0
DEBUGGING: LogMessage._Level.ValueType # 10
"""Note: The logging level 10 cannot be named DEBUG. Some software
projects compile their C/C++ code with -DDEBUG in debug builds. So the
C++ code generated from this file should not have an identifier named
DEBUG.
"""
INFO: LogMessage._Level.ValueType # 20
WARN: LogMessage._Level.ValueType # 30
ERROR: LogMessage._Level.ValueType # 40
FATAL: LogMessage._Level.ValueType # 50
class Level(_Level, metaclass=_LevelEnumTypeWrapper): ...
UNKNOWN: LogMessage.Level.ValueType # 0
DEBUGGING: LogMessage.Level.ValueType # 10
"""Note: The logging level 10 cannot be named DEBUG. Some software
projects compile their C/C++ code with -DDEBUG in debug builds. So the
C++ code generated from this file should not have an identifier named
DEBUG.
"""
INFO: LogMessage.Level.ValueType # 20
WARN: LogMessage.Level.ValueType # 30
ERROR: LogMessage.Level.ValueType # 40
FATAL: LogMessage.Level.ValueType # 50
LEVEL_FIELD_NUMBER: builtins.int
MESSAGE_FIELD_NUMBER: builtins.int
level: global___LogMessage.Level.ValueType
message: builtins.str
def __init__(
self,
*,
level: global___LogMessage.Level.ValueType | None = ...,
message: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["level", b"level", "message", b"message"]) -> None: ...
global___LogMessage = LogMessage
@typing_extensions.final
class SessionLog(google.protobuf.message.Message):
"""Protocol buffer used for logging session state."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _SessionStatus:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _SessionStatusEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SessionLog._SessionStatus.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
STATUS_UNSPECIFIED: SessionLog._SessionStatus.ValueType # 0
START: SessionLog._SessionStatus.ValueType # 1
STOP: SessionLog._SessionStatus.ValueType # 2
CHECKPOINT: SessionLog._SessionStatus.ValueType # 3
class SessionStatus(_SessionStatus, metaclass=_SessionStatusEnumTypeWrapper): ...
STATUS_UNSPECIFIED: SessionLog.SessionStatus.ValueType # 0
START: SessionLog.SessionStatus.ValueType # 1
STOP: SessionLog.SessionStatus.ValueType # 2
CHECKPOINT: SessionLog.SessionStatus.ValueType # 3
STATUS_FIELD_NUMBER: builtins.int
CHECKPOINT_PATH_FIELD_NUMBER: builtins.int
MSG_FIELD_NUMBER: builtins.int
status: global___SessionLog.SessionStatus.ValueType
checkpoint_path: builtins.str
"""This checkpoint_path contains both the path and filename."""
msg: builtins.str
def __init__(
self,
*,
status: global___SessionLog.SessionStatus.ValueType | None = ...,
checkpoint_path: builtins.str | None = ...,
msg: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["checkpoint_path", b"checkpoint_path", "msg", b"msg", "status", b"status"]) -> None: ...
global___SessionLog = SessionLog
@typing_extensions.final
class TaggedRunMetadata(google.protobuf.message.Message):
"""For logging the metadata output for a single session.run() call."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TAG_FIELD_NUMBER: builtins.int
RUN_METADATA_FIELD_NUMBER: builtins.int
tag: builtins.str
"""Tag name associated with this metadata."""
run_metadata: builtins.bytes
"""Byte-encoded version of the `RunMetadata` proto in order to allow lazy
deserialization.
"""
def __init__(
self,
*,
tag: builtins.str | None = ...,
run_metadata: builtins.bytes | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["run_metadata", b"run_metadata", "tag", b"tag"]) -> None: ...
global___TaggedRunMetadata = TaggedRunMetadata
@typing_extensions.final
class WatchdogConfig(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TIMEOUT_MS_FIELD_NUMBER: builtins.int
timeout_ms: builtins.int
def __init__(
self,
*,
timeout_ms: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["timeout_ms", b"timeout_ms"]) -> None: ...
global___WatchdogConfig = WatchdogConfig
@typing_extensions.final
class RequestedExitCode(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
EXIT_CODE_FIELD_NUMBER: builtins.int
exit_code: builtins.int
def __init__(
self,
*,
exit_code: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["exit_code", b"exit_code"]) -> None: ...
global___RequestedExitCode = RequestedExitCode
@typing_extensions.final
class WorkerHeartbeatRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SHUTDOWN_MODE_FIELD_NUMBER: builtins.int
WATCHDOG_CONFIG_FIELD_NUMBER: builtins.int
EXIT_CODE_FIELD_NUMBER: builtins.int
shutdown_mode: global___WorkerShutdownMode.ValueType
@property
def watchdog_config(self) -> global___WatchdogConfig: ...
@property
def exit_code(self) -> global___RequestedExitCode: ...
def __init__(
self,
*,
shutdown_mode: global___WorkerShutdownMode.ValueType | None = ...,
watchdog_config: global___WatchdogConfig | None = ...,
exit_code: global___RequestedExitCode | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["exit_code", b"exit_code", "watchdog_config", b"watchdog_config"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["exit_code", b"exit_code", "shutdown_mode", b"shutdown_mode", "watchdog_config", b"watchdog_config"]) -> None: ...
global___WorkerHeartbeatRequest = WorkerHeartbeatRequest
@typing_extensions.final
class WorkerHeartbeatResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
HEALTH_STATUS_FIELD_NUMBER: builtins.int
WORKER_LOG_FIELD_NUMBER: builtins.int
HOSTNAME_FIELD_NUMBER: builtins.int
health_status: global___WorkerHealth.ValueType
@property
def worker_log(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Event]: ...
hostname: builtins.str
def __init__(
self,
*,
health_status: global___WorkerHealth.ValueType | None = ...,
worker_log: collections.abc.Iterable[global___Event] | None = ...,
hostname: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["health_status", b"health_status", "hostname", b"hostname", "worker_log", b"worker_log"]) -> None: ...
global___WorkerHeartbeatResponse = WorkerHeartbeatResponse

View File

@@ -0,0 +1,72 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class MemmappedFileSystemDirectoryElement(google.protobuf.message.Message):
"""A message that describes one region of memmapped file."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OFFSET_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
LENGTH_FIELD_NUMBER: builtins.int
offset: builtins.int
name: builtins.str
length: builtins.int
def __init__(
self,
*,
offset: builtins.int | None = ...,
name: builtins.str | None = ...,
length: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["length", b"length", "name", b"name", "offset", b"offset"]) -> None: ...
global___MemmappedFileSystemDirectoryElement = MemmappedFileSystemDirectoryElement
@typing_extensions.final
class MemmappedFileSystemDirectory(google.protobuf.message.Message):
"""A directory of regions in a memmapped file."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ELEMENT_FIELD_NUMBER: builtins.int
@property
def element(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MemmappedFileSystemDirectoryElement]: ...
def __init__(
self,
*,
element: collections.abc.Iterable[global___MemmappedFileSystemDirectoryElement] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["element", b"element"]) -> None: ...
global___MemmappedFileSystemDirectory = MemmappedFileSystemDirectory

View File

@@ -0,0 +1,165 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
A checkpoint file is an sstable. The value for each record is a serialized
SavedTensorSlices message (defined below).
Each checkpoint file has a record with the empty key (""), which corresponds
to a SavedTensorSlices message that contains a "meta", that serves as a
table of contents on all the tensor slices saved in this file. Since the key
is "", it's always the first record in each file.
Each of the rest of the records in a checkpoint stores the raw data of a
particular tensor slice, in SavedSlice format. The corresponding key is an
ordered code that encodes the name of the tensor and the slice
information. The name is also stored in the SaveSlice message for ease of
debugging and manual examination.
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.core.framework.tensor_pb2
import tensorflow.core.framework.tensor_shape_pb2
import tensorflow.core.framework.tensor_slice_pb2
import tensorflow.core.framework.types_pb2
import tensorflow.core.framework.versions_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class SavedSliceMeta(google.protobuf.message.Message):
"""Metadata describing the set of slices of the same tensor saved in a
checkpoint file.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
SHAPE_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
SLICE_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name of the tensor."""
@property
def shape(self) -> tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto:
"""Shape of the tensor"""
type: tensorflow.core.framework.types_pb2.DataType.ValueType
"""Type of the tensor"""
@property
def slice(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto]:
"""Explicit list of slices saved in the checkpoint file."""
def __init__(
self,
*,
name: builtins.str | None = ...,
shape: tensorflow.core.framework.tensor_shape_pb2.TensorShapeProto | None = ...,
type: tensorflow.core.framework.types_pb2.DataType.ValueType | None = ...,
slice: collections.abc.Iterable[tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto] | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["shape", b"shape"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["name", b"name", "shape", b"shape", "slice", b"slice", "type", b"type"]) -> None: ...
global___SavedSliceMeta = SavedSliceMeta
@typing_extensions.final
class SavedTensorSliceMeta(google.protobuf.message.Message):
"""Metadata describing the set of tensor slices saved in a checkpoint file.
It is always stored at the beginning of each checkpoint file.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TENSOR_FIELD_NUMBER: builtins.int
VERSIONS_FIELD_NUMBER: builtins.int
@property
def tensor(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SavedSliceMeta]:
"""Each SavedSliceMeta describes the slices for one tensor."""
@property
def versions(self) -> tensorflow.core.framework.versions_pb2.VersionDef:
"""Compatibility version of this checkpoint. See core/public/version.h
for version history.
"""
def __init__(
self,
*,
tensor: collections.abc.Iterable[global___SavedSliceMeta] | None = ...,
versions: tensorflow.core.framework.versions_pb2.VersionDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["versions", b"versions"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["tensor", b"tensor", "versions", b"versions"]) -> None: ...
global___SavedTensorSliceMeta = SavedTensorSliceMeta
@typing_extensions.final
class SavedSlice(google.protobuf.message.Message):
"""Saved tensor slice: it stores the name of the tensors, the slice, and the
raw data.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
SLICE_FIELD_NUMBER: builtins.int
DATA_FIELD_NUMBER: builtins.int
name: builtins.str
"""Name of the tensor that this slice belongs to. This must be identical to
the name used to encode the key for this record.
"""
@property
def slice(self) -> tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto:
"""Extent of the slice. Must have one entry for each of the dimension of the
tensor that this slice belongs to.
"""
@property
def data(self) -> tensorflow.core.framework.tensor_pb2.TensorProto:
"""The raw data of the slice is stored as a TensorProto. Only raw data are
stored (we don't fill in fields such as dtype or tensor_shape).
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
slice: tensorflow.core.framework.tensor_slice_pb2.TensorSliceProto | None = ...,
data: tensorflow.core.framework.tensor_pb2.TensorProto | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["data", b"data", "slice", b"slice"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "name", b"name", "slice", b"slice"]) -> None: ...
global___SavedSlice = SavedSlice
@typing_extensions.final
class SavedTensorSlices(google.protobuf.message.Message):
"""Each record in a v3 checkpoint file is a serialized SavedTensorSlices
message.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
META_FIELD_NUMBER: builtins.int
DATA_FIELD_NUMBER: builtins.int
@property
def meta(self) -> global___SavedTensorSliceMeta:
"""This is only present at the first item of each checkpoint file and serves
as a table of contents, listing all the tensor slices saved in this file.
"""
@property
def data(self) -> global___SavedSlice:
"""This exists in all but the first item of each checkpoint file."""
def __init__(
self,
*,
meta: global___SavedTensorSliceMeta | None = ...,
data: global___SavedSlice | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["data", b"data", "meta", b"meta"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["data", b"data", "meta", b"meta"]) -> None: ...
global___SavedTensorSlices = SavedTensorSlices

View File

@@ -0,0 +1,575 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Protocol messages for describing the results of benchmarks and unit tests."""
import builtins
import collections.abc
import google.protobuf.any_pb2
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import google.protobuf.wrappers_pb2
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class EntryValue(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
DOUBLE_VALUE_FIELD_NUMBER: builtins.int
STRING_VALUE_FIELD_NUMBER: builtins.int
double_value: builtins.float
string_value: builtins.str
def __init__(
self,
*,
double_value: builtins.float | None = ...,
string_value: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["double_value", b"double_value", "kind", b"kind", "string_value", b"string_value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["double_value", b"double_value", "kind", b"kind", "string_value", b"string_value"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["double_value", "string_value"] | None: ...
global___EntryValue = EntryValue
@typing_extensions.final
class MetricEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
MIN_VALUE_FIELD_NUMBER: builtins.int
MAX_VALUE_FIELD_NUMBER: builtins.int
name: builtins.str
"""Metric name"""
value: builtins.float
"""Metric value"""
@property
def min_value(self) -> google.protobuf.wrappers_pb2.DoubleValue:
"""The minimum acceptable value for the metric if specified"""
@property
def max_value(self) -> google.protobuf.wrappers_pb2.DoubleValue:
"""The maximum acceptable value for the metric if specified"""
def __init__(
self,
*,
name: builtins.str | None = ...,
value: builtins.float | None = ...,
min_value: google.protobuf.wrappers_pb2.DoubleValue | None = ...,
max_value: google.protobuf.wrappers_pb2.DoubleValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["max_value", b"max_value", "min_value", b"min_value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["max_value", b"max_value", "min_value", b"min_value", "name", b"name", "value", b"value"]) -> None: ...
global___MetricEntry = MetricEntry
@typing_extensions.final
class BenchmarkEntry(google.protobuf.message.Message):
"""Each unit test or benchmark in a test or benchmark run provides
some set of information. Here we provide some reasonable keys
one would expect to see, with optional key/value pairs for things
we haven't considered.
This BenchmarkEntry should be emitted by each unit test or benchmark
reporter.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class ExtrasEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
@property
def value(self) -> global___EntryValue: ...
def __init__(
self,
*,
key: builtins.str | None = ...,
value: global___EntryValue | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
NAME_FIELD_NUMBER: builtins.int
ITERS_FIELD_NUMBER: builtins.int
CPU_TIME_FIELD_NUMBER: builtins.int
WALL_TIME_FIELD_NUMBER: builtins.int
THROUGHPUT_FIELD_NUMBER: builtins.int
EXTRAS_FIELD_NUMBER: builtins.int
METRICS_FIELD_NUMBER: builtins.int
name: builtins.str
"""The name of the specific benchmark or test
(e.g. BM_AdjustContrast_gpu_B_W_H)
"""
iters: builtins.int
"""If a benchmark, how many iterations it was run for"""
cpu_time: builtins.float
"""Total cpu time used for all iterations (in seconds)"""
wall_time: builtins.float
"""Total wall time used for all iterations (in seconds)"""
throughput: builtins.float
"""Throughput (in MB/s)"""
@property
def extras(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, global___EntryValue]:
"""Generic map from result key to value."""
@property
def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricEntry]:
"""Metric name, value and expected range. This can include accuracy metrics
typically used to determine whether the accuracy test has passed
"""
def __init__(
self,
*,
name: builtins.str | None = ...,
iters: builtins.int | None = ...,
cpu_time: builtins.float | None = ...,
wall_time: builtins.float | None = ...,
throughput: builtins.float | None = ...,
extras: collections.abc.Mapping[builtins.str, global___EntryValue] | None = ...,
metrics: collections.abc.Iterable[global___MetricEntry] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["cpu_time", b"cpu_time", "extras", b"extras", "iters", b"iters", "metrics", b"metrics", "name", b"name", "throughput", b"throughput", "wall_time", b"wall_time"]) -> None: ...
global___BenchmarkEntry = BenchmarkEntry
@typing_extensions.final
class BenchmarkEntries(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ENTRY_FIELD_NUMBER: builtins.int
@property
def entry(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___BenchmarkEntry]: ...
def __init__(
self,
*,
entry: collections.abc.Iterable[global___BenchmarkEntry] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["entry", b"entry"]) -> None: ...
global___BenchmarkEntries = BenchmarkEntries
@typing_extensions.final
class BuildConfiguration(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MODE_FIELD_NUMBER: builtins.int
CC_FLAGS_FIELD_NUMBER: builtins.int
OPTS_FIELD_NUMBER: builtins.int
mode: builtins.str
"""opt, dbg, etc"""
@property
def cc_flags(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""CC compiler flags, if known"""
@property
def opts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Bazel compilation options, if known"""
def __init__(
self,
*,
mode: builtins.str | None = ...,
cc_flags: collections.abc.Iterable[builtins.str] | None = ...,
opts: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["cc_flags", b"cc_flags", "mode", b"mode", "opts", b"opts"]) -> None: ...
global___BuildConfiguration = BuildConfiguration
@typing_extensions.final
class CommitId(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CHANGELIST_FIELD_NUMBER: builtins.int
HASH_FIELD_NUMBER: builtins.int
SNAPSHOT_FIELD_NUMBER: builtins.int
PENDING_CHANGELIST_FIELD_NUMBER: builtins.int
changelist: builtins.int
"""Submitted changelist."""
hash: builtins.str
snapshot: builtins.str
"""Hash of intermediate change between hash/changelist and what was tested.
Not used if the build is from a commit without modifications.
"""
pending_changelist: builtins.int
"""Changelist tested if the change list is not already submitted."""
def __init__(
self,
*,
changelist: builtins.int | None = ...,
hash: builtins.str | None = ...,
snapshot: builtins.str | None = ...,
pending_changelist: builtins.int | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["changelist", b"changelist", "hash", b"hash", "kind", b"kind"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["changelist", b"changelist", "hash", b"hash", "kind", b"kind", "pending_changelist", b"pending_changelist", "snapshot", b"snapshot"]) -> None: ...
def WhichOneof(self, oneof_group: typing_extensions.Literal["kind", b"kind"]) -> typing_extensions.Literal["changelist", "hash"] | None: ...
global___CommitId = CommitId
@typing_extensions.final
class CPUInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class CacheSizeEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
value: builtins.int
def __init__(
self,
*,
key: builtins.str | None = ...,
value: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
NUM_CORES_FIELD_NUMBER: builtins.int
NUM_CORES_ALLOWED_FIELD_NUMBER: builtins.int
MHZ_PER_CPU_FIELD_NUMBER: builtins.int
CPU_INFO_FIELD_NUMBER: builtins.int
CPU_GOVERNOR_FIELD_NUMBER: builtins.int
CACHE_SIZE_FIELD_NUMBER: builtins.int
num_cores: builtins.int
num_cores_allowed: builtins.int
mhz_per_cpu: builtins.float
"""How fast are these cpus?"""
cpu_info: builtins.str
"""Additional cpu information. For example,
Intel Ivybridge with HyperThreading (24 cores) dL1:32KB dL2:256KB dL3:30MB
"""
cpu_governor: builtins.str
"""What kind of cpu scaling is enabled on the host.
Examples include "performance", "ondemand", "conservative", "mixed".
"""
@property
def cache_size(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.int]:
"""Cache sizes (in bytes), e.g. "L2": 262144 (for 256KB)"""
def __init__(
self,
*,
num_cores: builtins.int | None = ...,
num_cores_allowed: builtins.int | None = ...,
mhz_per_cpu: builtins.float | None = ...,
cpu_info: builtins.str | None = ...,
cpu_governor: builtins.str | None = ...,
cache_size: collections.abc.Mapping[builtins.str, builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["cache_size", b"cache_size", "cpu_governor", b"cpu_governor", "cpu_info", b"cpu_info", "mhz_per_cpu", b"mhz_per_cpu", "num_cores", b"num_cores", "num_cores_allowed", b"num_cores_allowed"]) -> None: ...
global___CPUInfo = CPUInfo
@typing_extensions.final
class MemoryInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TOTAL_FIELD_NUMBER: builtins.int
AVAILABLE_FIELD_NUMBER: builtins.int
total: builtins.int
"""Total virtual memory in bytes"""
available: builtins.int
"""Immediately available memory in bytes"""
def __init__(
self,
*,
total: builtins.int | None = ...,
available: builtins.int | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["available", b"available", "total", b"total"]) -> None: ...
global___MemoryInfo = MemoryInfo
@typing_extensions.final
class GPUInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MODEL_FIELD_NUMBER: builtins.int
UUID_FIELD_NUMBER: builtins.int
BUS_ID_FIELD_NUMBER: builtins.int
model: builtins.str
"""e.g. "Tesla K40c" """
uuid: builtins.str
"""Final entry in output of "nvidia-smi -L" """
bus_id: builtins.str
"""e.g. "0000:04:00.0" """
def __init__(
self,
*,
model: builtins.str | None = ...,
uuid: builtins.str | None = ...,
bus_id: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bus_id", b"bus_id", "model", b"model", "uuid", b"uuid"]) -> None: ...
global___GPUInfo = GPUInfo
@typing_extensions.final
class PlatformInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
BITS_FIELD_NUMBER: builtins.int
LINKAGE_FIELD_NUMBER: builtins.int
MACHINE_FIELD_NUMBER: builtins.int
RELEASE_FIELD_NUMBER: builtins.int
SYSTEM_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
bits: builtins.str
"""e.g. '64bit'"""
linkage: builtins.str
"""e.g. 'ELF'"""
machine: builtins.str
"""e.g. 'i386'"""
release: builtins.str
"""e.g. '3.13.0-76-generic'"""
system: builtins.str
"""e.g. 'Linux'"""
version: builtins.str
"""e.g. '#120-Ubuntu SMP Mon Jan 18 15:59:10 UTC 2016'"""
def __init__(
self,
*,
bits: builtins.str | None = ...,
linkage: builtins.str | None = ...,
machine: builtins.str | None = ...,
release: builtins.str | None = ...,
system: builtins.str | None = ...,
version: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bits", b"bits", "linkage", b"linkage", "machine", b"machine", "release", b"release", "system", b"system", "version", b"version"]) -> None: ...
global___PlatformInfo = PlatformInfo
@typing_extensions.final
class AvailableDeviceInfo(google.protobuf.message.Message):
"""Matches DeviceAttributes"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
MEMORY_LIMIT_FIELD_NUMBER: builtins.int
PHYSICAL_DESCRIPTION_FIELD_NUMBER: builtins.int
name: builtins.str
"""Device name."""
type: builtins.str
"""Device type, e.g. 'CPU' or 'GPU'."""
memory_limit: builtins.int
"""Memory capacity in bytes."""
physical_description: builtins.str
"""The physical description of this device."""
def __init__(
self,
*,
name: builtins.str | None = ...,
type: builtins.str | None = ...,
memory_limit: builtins.int | None = ...,
physical_description: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["memory_limit", b"memory_limit", "name", b"name", "physical_description", b"physical_description", "type", b"type"]) -> None: ...
global___AvailableDeviceInfo = AvailableDeviceInfo
@typing_extensions.final
class MachineConfiguration(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
HOSTNAME_FIELD_NUMBER: builtins.int
SERIAL_IDENTIFIER_FIELD_NUMBER: builtins.int
PLATFORM_INFO_FIELD_NUMBER: builtins.int
CPU_INFO_FIELD_NUMBER: builtins.int
DEVICE_INFO_FIELD_NUMBER: builtins.int
AVAILABLE_DEVICE_INFO_FIELD_NUMBER: builtins.int
MEMORY_INFO_FIELD_NUMBER: builtins.int
hostname: builtins.str
"""Host name of machine that ran the benchmark."""
serial_identifier: builtins.str
"""Unique serial number of the machine."""
@property
def platform_info(self) -> global___PlatformInfo:
"""Additional platform information."""
@property
def cpu_info(self) -> global___CPUInfo:
"""CPU Information."""
@property
def device_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[google.protobuf.any_pb2.Any]:
"""Other devices that are attached and relevant (e.g. GPUInfo)."""
@property
def available_device_info(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AvailableDeviceInfo]:
"""Devices accessible to the test (e.g. as given by list_local_devices)."""
@property
def memory_info(self) -> global___MemoryInfo: ...
def __init__(
self,
*,
hostname: builtins.str | None = ...,
serial_identifier: builtins.str | None = ...,
platform_info: global___PlatformInfo | None = ...,
cpu_info: global___CPUInfo | None = ...,
device_info: collections.abc.Iterable[google.protobuf.any_pb2.Any] | None = ...,
available_device_info: collections.abc.Iterable[global___AvailableDeviceInfo] | None = ...,
memory_info: global___MemoryInfo | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["cpu_info", b"cpu_info", "memory_info", b"memory_info", "platform_info", b"platform_info"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["available_device_info", b"available_device_info", "cpu_info", b"cpu_info", "device_info", b"device_info", "hostname", b"hostname", "memory_info", b"memory_info", "platform_info", b"platform_info", "serial_identifier", b"serial_identifier"]) -> None: ...
global___MachineConfiguration = MachineConfiguration
@typing_extensions.final
class RunConfiguration(google.protobuf.message.Message):
"""Run-specific items such as arguments to the test / benchmark."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
@typing_extensions.final
class EnvVarsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
value: builtins.str
def __init__(
self,
*,
key: builtins.str | None = ...,
value: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
ARGUMENT_FIELD_NUMBER: builtins.int
ENV_VARS_FIELD_NUMBER: builtins.int
@property
def argument(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
@property
def env_vars(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
"""Environment variables used to run the test/benchmark."""
def __init__(
self,
*,
argument: collections.abc.Iterable[builtins.str] | None = ...,
env_vars: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["argument", b"argument", "env_vars", b"env_vars"]) -> None: ...
global___RunConfiguration = RunConfiguration
@typing_extensions.final
class TestResults(google.protobuf.message.Message):
"""The output of one benchmark / test run. Each run contains a list of
tests or benchmarks, stored as BenchmarkEntry messages.
This message should be emitted by the reporter (which runs the
test / BM in a subprocess and then reads the emitted BenchmarkEntry messages;
usually from a serialized json file, finally collecting them along
with additional information about the test run.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _BenchmarkType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _BenchmarkTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[TestResults._BenchmarkType.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNKNOWN: TestResults._BenchmarkType.ValueType # 0
"""Fallback for protos written before Type was introduced."""
CPP_MICROBENCHMARK: TestResults._BenchmarkType.ValueType # 1
PYTHON_BENCHMARK: TestResults._BenchmarkType.ValueType # 2
ANDROID_BENCHMARK: TestResults._BenchmarkType.ValueType # 3
EDGE_BENCHMARK: TestResults._BenchmarkType.ValueType # 4
IOS_BENCHMARK: TestResults._BenchmarkType.ValueType # 5
class BenchmarkType(_BenchmarkType, metaclass=_BenchmarkTypeEnumTypeWrapper):
"""The type of benchmark."""
UNKNOWN: TestResults.BenchmarkType.ValueType # 0
"""Fallback for protos written before Type was introduced."""
CPP_MICROBENCHMARK: TestResults.BenchmarkType.ValueType # 1
PYTHON_BENCHMARK: TestResults.BenchmarkType.ValueType # 2
ANDROID_BENCHMARK: TestResults.BenchmarkType.ValueType # 3
EDGE_BENCHMARK: TestResults.BenchmarkType.ValueType # 4
IOS_BENCHMARK: TestResults.BenchmarkType.ValueType # 5
TARGET_FIELD_NUMBER: builtins.int
ENTRIES_FIELD_NUMBER: builtins.int
BUILD_CONFIGURATION_FIELD_NUMBER: builtins.int
COMMIT_ID_FIELD_NUMBER: builtins.int
START_TIME_FIELD_NUMBER: builtins.int
RUN_TIME_FIELD_NUMBER: builtins.int
MACHINE_CONFIGURATION_FIELD_NUMBER: builtins.int
RUN_CONFIGURATION_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
BENCHMARK_TYPE_FIELD_NUMBER: builtins.int
RUN_MODE_FIELD_NUMBER: builtins.int
TF_VERSION_FIELD_NUMBER: builtins.int
target: builtins.str
"""The target of the run, e.g.:
//tensorflow/core:kernels_adjust_contrast_op_benchmark_test
"""
@property
def entries(self) -> global___BenchmarkEntries:
"""The list of tests or benchmarks in this run."""
@property
def build_configuration(self) -> global___BuildConfiguration:
"""The configuration of the build (compiled opt? with cuda? any copts?)"""
@property
def commit_id(self) -> global___CommitId:
"""The commit id (git hash or changelist)"""
start_time: builtins.int
"""The time the run started (in seconds of UTC time since Unix epoch)"""
run_time: builtins.float
"""The amount of time the total run took (wall time in seconds)"""
@property
def machine_configuration(self) -> global___MachineConfiguration:
"""Machine-specific parameters (Platform and CPU info)"""
@property
def run_configuration(self) -> global___RunConfiguration:
"""Run-specific parameters (arguments, etc)"""
name: builtins.str
"""Benchmark target identifier."""
benchmark_type: global___TestResults.BenchmarkType.ValueType
run_mode: builtins.str
"""Used for differentiating between continuous and debug builds.
Must be one of:
* cbuild: results from continuous build.
* presubmit: results from oneshot requests.
* culprit: results from culprit finder rerun.
"""
tf_version: builtins.str
"""TensorFlow version this benchmark runs against.
This can be either set to full version or just the major version.
"""
def __init__(
self,
*,
target: builtins.str | None = ...,
entries: global___BenchmarkEntries | None = ...,
build_configuration: global___BuildConfiguration | None = ...,
commit_id: global___CommitId | None = ...,
start_time: builtins.int | None = ...,
run_time: builtins.float | None = ...,
machine_configuration: global___MachineConfiguration | None = ...,
run_configuration: global___RunConfiguration | None = ...,
name: builtins.str | None = ...,
benchmark_type: global___TestResults.BenchmarkType.ValueType | None = ...,
run_mode: builtins.str | None = ...,
tf_version: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["build_configuration", b"build_configuration", "commit_id", b"commit_id", "entries", b"entries", "machine_configuration", b"machine_configuration", "run_configuration", b"run_configuration"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["benchmark_type", b"benchmark_type", "build_configuration", b"build_configuration", "commit_id", b"commit_id", "entries", b"entries", "machine_configuration", b"machine_configuration", "name", b"name", "run_configuration", b"run_configuration", "run_mode", b"run_mode", "run_time", b"run_time", "start_time", b"start_time", "target", b"target", "tf_version", b"tf_version"]) -> None: ...
global___TestResults = TestResults

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def __getattr__(name: str) -> Incomplete: ...

View File

@@ -0,0 +1,3 @@
from _typeshed import Incomplete
def __getattr__(name: str) -> Incomplete: ...

View File

@@ -0,0 +1,105 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
This file is a copy of the TensorBoard ProjectorConfig proto.
Keep this file in sync with the source proto definition at
https://github.com/tensorflow/tensorboard/blob/master/tensorboard/plugins/projector/projector_config.proto
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class SpriteMetadata(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
IMAGE_PATH_FIELD_NUMBER: builtins.int
SINGLE_IMAGE_DIM_FIELD_NUMBER: builtins.int
image_path: builtins.str
@property
def single_image_dim(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""[width, height] of a single image in the sprite."""
def __init__(
self,
*,
image_path: builtins.str | None = ...,
single_image_dim: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["image_path", b"image_path", "single_image_dim", b"single_image_dim"]) -> None: ...
global___SpriteMetadata = SpriteMetadata
@typing_extensions.final
class EmbeddingInfo(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
TENSOR_NAME_FIELD_NUMBER: builtins.int
METADATA_PATH_FIELD_NUMBER: builtins.int
BOOKMARKS_PATH_FIELD_NUMBER: builtins.int
TENSOR_SHAPE_FIELD_NUMBER: builtins.int
SPRITE_FIELD_NUMBER: builtins.int
TENSOR_PATH_FIELD_NUMBER: builtins.int
tensor_name: builtins.str
metadata_path: builtins.str
bookmarks_path: builtins.str
@property
def tensor_shape(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Shape of the 2D tensor [N x D]. If missing, it will be inferred from the
model checkpoint.
"""
@property
def sprite(self) -> global___SpriteMetadata: ...
tensor_path: builtins.str
"""Path to the TSV file holding the tensor values. If missing, the tensor
is assumed to be stored in the model checkpoint.
"""
def __init__(
self,
*,
tensor_name: builtins.str | None = ...,
metadata_path: builtins.str | None = ...,
bookmarks_path: builtins.str | None = ...,
tensor_shape: collections.abc.Iterable[builtins.int] | None = ...,
sprite: global___SpriteMetadata | None = ...,
tensor_path: builtins.str | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["sprite", b"sprite"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["bookmarks_path", b"bookmarks_path", "metadata_path", b"metadata_path", "sprite", b"sprite", "tensor_name", b"tensor_name", "tensor_path", b"tensor_path", "tensor_shape", b"tensor_shape"]) -> None: ...
global___EmbeddingInfo = EmbeddingInfo
@typing_extensions.final
class ProjectorConfig(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MODEL_CHECKPOINT_PATH_FIELD_NUMBER: builtins.int
EMBEDDINGS_FIELD_NUMBER: builtins.int
MODEL_CHECKPOINT_DIR_FIELD_NUMBER: builtins.int
model_checkpoint_path: builtins.str
"""Path to the checkpoint file. Use either this or model_checkpoint_dir."""
@property
def embeddings(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___EmbeddingInfo]: ...
model_checkpoint_dir: builtins.str
"""Path to the checkpoint directory. The directory will be scanned for the
latest checkpoint file.
"""
def __init__(
self,
*,
model_checkpoint_path: builtins.str | None = ...,
embeddings: collections.abc.Iterable[global___EmbeddingInfo] | None = ...,
model_checkpoint_dir: builtins.str | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["embeddings", b"embeddings", "model_checkpoint_dir", b"model_checkpoint_dir", "model_checkpoint_path", b"model_checkpoint_path"]) -> None: ...
global___ProjectorConfig = ProjectorConfig

View File

@@ -0,0 +1,80 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Protobuf containing the metadata for each Keras object saved in a SavedModel."""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
import tensorflow.python.keras.protobuf.versions_pb2
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class SavedMetadata(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODES_FIELD_NUMBER: builtins.int
@property
def nodes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SavedObject]:
"""Nodes represent trackable objects in the SavedModel. The data for every
Keras object is stored.
"""
def __init__(
self,
*,
nodes: collections.abc.Iterable[global___SavedObject] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["nodes", b"nodes"]) -> None: ...
global___SavedMetadata = SavedMetadata
@typing_extensions.final
class SavedObject(google.protobuf.message.Message):
"""Metadata of an individual Keras object."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NODE_ID_FIELD_NUMBER: builtins.int
NODE_PATH_FIELD_NUMBER: builtins.int
IDENTIFIER_FIELD_NUMBER: builtins.int
METADATA_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
node_id: builtins.int
"""Index of the node in the SavedModel SavedObjectGraph."""
node_path: builtins.str
"""String path from root (e.g. "root.child_layer")"""
identifier: builtins.str
"""Identifier to determine loading function.
Must be one of:
_tf_keras_input_layer, _tf_keras_layer, _tf_keras_metric,
_tf_keras_model, _tf_keras_network, _tf_keras_rnn_layer,
_tf_keras_sequential
"""
metadata: builtins.str
"""Metadata containing a JSON-serialized object with the non-TensorFlow
attributes for this Keras object.
"""
@property
def version(self) -> tensorflow.python.keras.protobuf.versions_pb2.VersionDef:
"""Version defined by the code serializing this Keras object."""
def __init__(
self,
*,
node_id: builtins.int | None = ...,
node_path: builtins.str | None = ...,
identifier: builtins.str | None = ...,
metadata: builtins.str | None = ...,
version: tensorflow.python.keras.protobuf.versions_pb2.VersionDef | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["version", b"version"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["identifier", b"identifier", "metadata", b"metadata", "node_id", b"node_id", "node_path", b"node_path", "version", b"version"]) -> None: ...
global___SavedObject = SavedObject

View File

@@ -0,0 +1,62 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class VersionDef(google.protobuf.message.Message):
"""This file is a copy of the TensorFlow Versions proto.
Keep this file in sync with the source proto definition at
https://github.com/tensorflow/tensorflow/blob/master/tensorflow/core/framework/versions.proto
Version information for a piece of serialized data
There are different types of versions for each type of data
(GraphDef, etc.), but they all have the same common shape
described here.
Each consumer has "consumer" and "min_producer" versions (specified
elsewhere). A consumer is allowed to consume this data if
producer >= min_producer
consumer >= min_consumer
consumer not in bad_consumers
LINT.IfChange
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PRODUCER_FIELD_NUMBER: builtins.int
MIN_CONSUMER_FIELD_NUMBER: builtins.int
BAD_CONSUMERS_FIELD_NUMBER: builtins.int
producer: builtins.int
"""The version of the code that produced this data."""
min_consumer: builtins.int
"""Any consumer below this version is not allowed to consume this data."""
@property
def bad_consumers(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
"""Specific consumer versions which are disallowed (e.g. due to bugs)."""
def __init__(
self,
*,
producer: builtins.int | None = ...,
min_consumer: builtins.int | None = ...,
bad_consumers: collections.abc.Iterable[builtins.int] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bad_consumers", b"bad_consumers", "min_consumer", b"min_consumer", "producer", b"producer"]) -> None: ...
global___VersionDef = VersionDef

View File

@@ -0,0 +1,289 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
TODO(b/247876220): Change package and java_package once we figure out how to
migrate.
"""
import builtins
import google.protobuf.descriptor
import google.protobuf.internal.enum_type_wrapper
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _Code:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _CodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_Code.ValueType], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
OK: _Code.ValueType # 0
"""Not an error; returned on success"""
CANCELLED: _Code.ValueType # 1
"""The operation was cancelled (typically by the caller)."""
UNKNOWN: _Code.ValueType # 2
"""Unknown error. An example of where this error may be returned is
if a Status value received from another address space belongs to
an error-space that is not known in this address space. Also
errors raised by APIs that do not return enough error information
may be converted to this error.
"""
INVALID_ARGUMENT: _Code.ValueType # 3
"""Client specified an invalid argument. Note that this differs
from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
that are problematic regardless of the state of the system
(e.g., a malformed file name).
"""
DEADLINE_EXCEEDED: _Code.ValueType # 4
"""Deadline expired before operation could complete. For operations
that change the state of the system, this error may be returned
even if the operation has completed successfully. For example, a
successful response from a server could have been delayed long
enough for the deadline to expire.
"""
NOT_FOUND: _Code.ValueType # 5
"""Some requested entity (e.g., file or directory) was not found.
For privacy reasons, this code *may* be returned when the client
does not have the access right to the entity.
"""
ALREADY_EXISTS: _Code.ValueType # 6
"""Some entity that we attempted to create (e.g., file or directory)
already exists.
"""
PERMISSION_DENIED: _Code.ValueType # 7
"""The caller does not have permission to execute the specified
operation. PERMISSION_DENIED must not be used for rejections
caused by exhausting some resource (use RESOURCE_EXHAUSTED
instead for those errors). PERMISSION_DENIED must not be
used if the caller can not be identified (use UNAUTHENTICATED
instead for those errors).
"""
UNAUTHENTICATED: _Code.ValueType # 16
"""The request does not have valid authentication credentials for the
operation.
"""
RESOURCE_EXHAUSTED: _Code.ValueType # 8
"""Some resource has been exhausted, perhaps a per-user quota, or
perhaps the entire file system is out of space.
"""
FAILED_PRECONDITION: _Code.ValueType # 9
"""Operation was rejected because the system is not in a state
required for the operation's execution. For example, directory
to be deleted may be non-empty, an rmdir operation is applied to
a non-directory, etc.
A litmus test that may help a service implementor in deciding
between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
(a) Use UNAVAILABLE if the client can retry just the failing call.
(b) Use ABORTED if the client should retry at a higher-level
(e.g., restarting a read-modify-write sequence).
(c) Use FAILED_PRECONDITION if the client should not retry until
the system state has been explicitly fixed. E.g., if an "rmdir"
fails because the directory is non-empty, FAILED_PRECONDITION
should be returned since the client should not retry unless
they have first fixed up the directory by deleting files from it.
(d) Use FAILED_PRECONDITION if the client performs conditional
REST Get/Update/Delete on a resource and the resource on the
server does not match the condition. E.g., conflicting
read-modify-write on the same resource.
"""
ABORTED: _Code.ValueType # 10
"""The operation was aborted, typically due to a concurrency issue
like sequencer check failures, transaction aborts, etc.
See litmus test above for deciding between FAILED_PRECONDITION,
ABORTED, and UNAVAILABLE.
"""
OUT_OF_RANGE: _Code.ValueType # 11
"""Operation tried to iterate past the valid input range. E.g., seeking or
reading past end of file.
Unlike INVALID_ARGUMENT, this error indicates a problem that may
be fixed if the system state changes. For example, a 32-bit file
system will generate INVALID_ARGUMENT if asked to read at an
offset that is not in the range [0,2^32-1], but it will generate
OUT_OF_RANGE if asked to read from an offset past the current
file size.
There is a fair bit of overlap between FAILED_PRECONDITION and
OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific
error) when it applies so that callers who are iterating through
a space can easily look for an OUT_OF_RANGE error to detect when
they are done.
"""
UNIMPLEMENTED: _Code.ValueType # 12
"""Operation is not implemented or not supported/enabled in this service."""
INTERNAL: _Code.ValueType # 13
"""Internal errors. Means some invariant expected by the underlying
system has been broken. If you see one of these errors,
something is very broken.
"""
UNAVAILABLE: _Code.ValueType # 14
"""The service is currently unavailable. This is a most likely a
transient condition and may be corrected by retrying with
a backoff.
See litmus test above for deciding between FAILED_PRECONDITION,
ABORTED, and UNAVAILABLE.
"""
DATA_LOSS: _Code.ValueType # 15
"""Unrecoverable data loss or corruption."""
DO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_: _Code.ValueType # 20
"""An extra enum entry to prevent people from writing code that
fails to compile when a new code is added.
Nobody should ever reference this enumeration entry. In particular,
if you write C++ code that switches on this enumeration, add a default:
case instead of a case that mentions this enumeration entry.
Nobody should rely on the value (currently 20) listed here. It
may change in the future.
"""
class Code(_Code, metaclass=_CodeEnumTypeWrapper):
"""The canonical error codes for TensorFlow APIs.
Warnings:
- Do not change any numeric assignments.
- Changes to this list should only be made if there is a compelling
need that can't be satisfied in another way. Such changes
must be approved by at least two OWNERS.
- These error codes must match gRPC and protobuf error codes (except for
DO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_).
Sometimes multiple error codes may apply. Services should return
the most specific error code that applies. For example, prefer
OUT_OF_RANGE over FAILED_PRECONDITION if both codes apply.
Similarly prefer NOT_FOUND or ALREADY_EXISTS over FAILED_PRECONDITION.
"""
OK: Code.ValueType # 0
"""Not an error; returned on success"""
CANCELLED: Code.ValueType # 1
"""The operation was cancelled (typically by the caller)."""
UNKNOWN: Code.ValueType # 2
"""Unknown error. An example of where this error may be returned is
if a Status value received from another address space belongs to
an error-space that is not known in this address space. Also
errors raised by APIs that do not return enough error information
may be converted to this error.
"""
INVALID_ARGUMENT: Code.ValueType # 3
"""Client specified an invalid argument. Note that this differs
from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments
that are problematic regardless of the state of the system
(e.g., a malformed file name).
"""
DEADLINE_EXCEEDED: Code.ValueType # 4
"""Deadline expired before operation could complete. For operations
that change the state of the system, this error may be returned
even if the operation has completed successfully. For example, a
successful response from a server could have been delayed long
enough for the deadline to expire.
"""
NOT_FOUND: Code.ValueType # 5
"""Some requested entity (e.g., file or directory) was not found.
For privacy reasons, this code *may* be returned when the client
does not have the access right to the entity.
"""
ALREADY_EXISTS: Code.ValueType # 6
"""Some entity that we attempted to create (e.g., file or directory)
already exists.
"""
PERMISSION_DENIED: Code.ValueType # 7
"""The caller does not have permission to execute the specified
operation. PERMISSION_DENIED must not be used for rejections
caused by exhausting some resource (use RESOURCE_EXHAUSTED
instead for those errors). PERMISSION_DENIED must not be
used if the caller can not be identified (use UNAUTHENTICATED
instead for those errors).
"""
UNAUTHENTICATED: Code.ValueType # 16
"""The request does not have valid authentication credentials for the
operation.
"""
RESOURCE_EXHAUSTED: Code.ValueType # 8
"""Some resource has been exhausted, perhaps a per-user quota, or
perhaps the entire file system is out of space.
"""
FAILED_PRECONDITION: Code.ValueType # 9
"""Operation was rejected because the system is not in a state
required for the operation's execution. For example, directory
to be deleted may be non-empty, an rmdir operation is applied to
a non-directory, etc.
A litmus test that may help a service implementor in deciding
between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE:
(a) Use UNAVAILABLE if the client can retry just the failing call.
(b) Use ABORTED if the client should retry at a higher-level
(e.g., restarting a read-modify-write sequence).
(c) Use FAILED_PRECONDITION if the client should not retry until
the system state has been explicitly fixed. E.g., if an "rmdir"
fails because the directory is non-empty, FAILED_PRECONDITION
should be returned since the client should not retry unless
they have first fixed up the directory by deleting files from it.
(d) Use FAILED_PRECONDITION if the client performs conditional
REST Get/Update/Delete on a resource and the resource on the
server does not match the condition. E.g., conflicting
read-modify-write on the same resource.
"""
ABORTED: Code.ValueType # 10
"""The operation was aborted, typically due to a concurrency issue
like sequencer check failures, transaction aborts, etc.
See litmus test above for deciding between FAILED_PRECONDITION,
ABORTED, and UNAVAILABLE.
"""
OUT_OF_RANGE: Code.ValueType # 11
"""Operation tried to iterate past the valid input range. E.g., seeking or
reading past end of file.
Unlike INVALID_ARGUMENT, this error indicates a problem that may
be fixed if the system state changes. For example, a 32-bit file
system will generate INVALID_ARGUMENT if asked to read at an
offset that is not in the range [0,2^32-1], but it will generate
OUT_OF_RANGE if asked to read from an offset past the current
file size.
There is a fair bit of overlap between FAILED_PRECONDITION and
OUT_OF_RANGE. We recommend using OUT_OF_RANGE (the more specific
error) when it applies so that callers who are iterating through
a space can easily look for an OUT_OF_RANGE error to detect when
they are done.
"""
UNIMPLEMENTED: Code.ValueType # 12
"""Operation is not implemented or not supported/enabled in this service."""
INTERNAL: Code.ValueType # 13
"""Internal errors. Means some invariant expected by the underlying
system has been broken. If you see one of these errors,
something is very broken.
"""
UNAVAILABLE: Code.ValueType # 14
"""The service is currently unavailable. This is a most likely a
transient condition and may be corrected by retrying with
a backoff.
See litmus test above for deciding between FAILED_PRECONDITION,
ABORTED, and UNAVAILABLE.
"""
DATA_LOSS: Code.ValueType # 15
"""Unrecoverable data loss or corruption."""
DO_NOT_USE_RESERVED_FOR_FUTURE_EXPANSION_USE_DEFAULT_IN_SWITCH_INSTEAD_: Code.ValueType # 20
"""An extra enum entry to prevent people from writing code that
fails to compile when a new code is added.
Nobody should ever reference this enumeration entry. In particular,
if you write C++ code that switches on this enumeration, add a default:
case instead of a case that mentions this enumeration entry.
Nobody should rely on the value (currently 20) listed here. It
may change in the future.
"""
global___Code = Code

View File

@@ -0,0 +1,62 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class HistogramProto(google.protobuf.message.Message):
"""Serialization format for histogram module in
tsl/lib/histogram/histogram.h
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MIN_FIELD_NUMBER: builtins.int
MAX_FIELD_NUMBER: builtins.int
NUM_FIELD_NUMBER: builtins.int
SUM_FIELD_NUMBER: builtins.int
SUM_SQUARES_FIELD_NUMBER: builtins.int
BUCKET_LIMIT_FIELD_NUMBER: builtins.int
BUCKET_FIELD_NUMBER: builtins.int
min: builtins.float
max: builtins.float
num: builtins.float
sum: builtins.float
sum_squares: builtins.float
@property
def bucket_limit(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
"""Parallel arrays encoding the bucket boundaries and the bucket values.
bucket(i) is the count for the bucket i. The range for
a bucket is:
i == 0: -DBL_MAX .. bucket_limit(0)
i != 0: bucket_limit(i-1) .. bucket_limit(i)
"""
@property
def bucket(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ...
def __init__(
self,
*,
min: builtins.float | None = ...,
max: builtins.float | None = ...,
num: builtins.float | None = ...,
sum: builtins.float | None = ...,
sum_squares: builtins.float | None = ...,
bucket_limit: collections.abc.Iterable[builtins.float] | None = ...,
bucket: collections.abc.Iterable[builtins.float] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["bucket", b"bucket", "bucket_limit", b"bucket_limit", "max", b"max", "min", b"min", "num", b"num", "sum", b"sum", "sum_squares", b"sum_squares"]) -> None: ...
global___HistogramProto = HistogramProto