Bump protobuf to 3.20.1 (#8609)

There are even newer versions available (4.*), but I had found a bug
in it which prevented it from working with mypy-protobuf. For now,
sticking to just 3.20.1
This commit is contained in:
Nipunn Koorapati
2022-08-25 05:28:08 -07:00
committed by GitHub
parent 1c56148ce9
commit c17c0d5607
9 changed files with 50 additions and 22 deletions

View File

@@ -11,7 +11,7 @@ set -ex -o pipefail
# followed by committing the changes to typeshed
#
# Update these two variables when rerunning script
PROTOBUF_VERSION=3.19.3
PROTOBUF_VERSION=3.20.1
MYPY_PROTOBUF_VERSION=v3.2.0
if uname -a | grep Darwin; then
@@ -54,11 +54,11 @@ find "$REPO_ROOT/stubs/protobuf/" -name '*_pb2.pyi' -delete
# Roughly reproduce the subset of .proto files on the public interface as described
# by find_package_modules in the protobuf setup.py.
# The logic (as of 3.14.0) can roughly be described as a allowlist of .proto files
# The logic (as of 3.20.1) can roughly be described as a allowlist of .proto files
# further limited to exclude *test* and internal/
# https://github.com/protocolbuffers/protobuf/blob/master/python/setup.py
PROTO_FILES=$(grep "generate_proto.*google" $PYTHON_PROTOBUF_DIR/python/setup.py | \
cut -d\" -f2 | \
PROTO_FILES=$(grep "GenProto.*google" $PYTHON_PROTOBUF_DIR/python/setup.py | \
cut -d\' -f2 | \
grep -v "test" | \
grep -v google/protobuf/internal/ | \
grep -v google/protobuf/pyext/python.proto | \
@@ -69,6 +69,7 @@ PROTO_FILES=$(grep "generate_proto.*google" $PYTHON_PROTOBUF_DIR/python/setup.py
)
# And regenerate!
# shellcheck disable=SC2086
protoc_install/bin/protoc --proto_path="$PYTHON_PROTOBUF_DIR/src" --mypy_out="relax_strict_optional_primitives:$REPO_ROOT/stubs/protobuf" $PROTO_FILES
isort "$REPO_ROOT/stubs/protobuf"

View File

@@ -32,3 +32,7 @@ google.protobuf.descriptor.Descriptor.__init__
google.protobuf.descriptor.Descriptor.__new__
google.protobuf.descriptor.ServiceDescriptor.__init__
google.protobuf.descriptor.ServiceDescriptor.__new__
# Set to None at runtime - which doesn't match the Sequence base class.
# It's a hack - just allow it.
google.protobuf.internal.containers.BaseContainer.__hash__

View File

@@ -1,2 +1,2 @@
version = "3.19.*"
version = "3.20.*"
extra_description = "Generated with aid from mypy-protobuf v3.2.0"

View File

@@ -37,7 +37,7 @@ class Any(google.protobuf.message.Message, google.protobuf.internal.well_known_t
foo = any.unpack(Foo.class);
}
Example 3: Pack and unpack a message in Python.
Example 3: Pack and unpack a message in Python.
foo = Foo(...)
any = Any()
@@ -47,7 +47,7 @@ class Any(google.protobuf.message.Message, google.protobuf.internal.well_known_t
any.Unpack(foo)
...
Example 4: Pack and unpack a message in Go
Example 4: Pack and unpack a message in Go
foo := &pb.Foo{...}
any, err := anypb.New(foo)
@@ -68,7 +68,7 @@ class Any(google.protobuf.message.Message, google.protobuf.internal.well_known_t
JSON
====
The JSON representation of an `Any` value uses the regular
representation of the deserialized, embedded message, with an
additional field `@type` which contains the type URL. Example:

View File

@@ -240,6 +240,8 @@ class MethodDescriptor(DescriptorBase):
containing_service,
input_type,
output_type,
client_streaming=...,
server_streaming=...,
options=...,
serialized_options=...,
create_key=...,
@@ -250,6 +252,8 @@ class MethodDescriptor(DescriptorBase):
containing_service: Any
input_type: Any
output_type: Any
client_streaming: bool
server_streaming: bool
def __init__(
self,
name,
@@ -258,6 +262,8 @@ class MethodDescriptor(DescriptorBase):
containing_service,
input_type,
output_type,
client_streaming=...,
server_streaming=...,
options=...,
serialized_options=...,
create_key=...,

View File

@@ -373,7 +373,6 @@ class FieldDescriptorProto(google.protobuf.message.Message):
For booleans, "true" or "false".
For strings, contains the default text contents (not escaped in any way).
For bytes, contains the C escaped value. All bytes >= 128 are escaped.
TODO(kenton): Base-64 encode?
"""
oneof_index: builtins.int
@@ -956,6 +955,7 @@ class FieldOptions(google.protobuf.message.Message):
PACKED_FIELD_NUMBER: builtins.int
JSTYPE_FIELD_NUMBER: builtins.int
LAZY_FIELD_NUMBER: builtins.int
UNVERIFIED_LAZY_FIELD_NUMBER: builtins.int
DEPRECATED_FIELD_NUMBER: builtins.int
WEAK_FIELD_NUMBER: builtins.int
UNINTERPRETED_OPTION_FIELD_NUMBER: builtins.int
@@ -1017,6 +1017,18 @@ class FieldOptions(google.protobuf.message.Message):
implementation must either *always* check its required fields, or *never*
check its required fields, regardless of whether or not the message has
been parsed.
As of 2021, lazy does no correctness checks on the byte stream during
parsing. This may lead to crashes if and when an invalid byte stream is
finally parsed upon access.
TODO(b/211906113): Enable validation on lazy fields.
"""
unverified_lazy: builtins.bool
"""unverified_lazy does no correctness checks on the byte stream. This should
only be used where lazy with verification is prohibitive for performance
reasons.
"""
deprecated: builtins.bool
@@ -1039,12 +1051,13 @@ class FieldOptions(google.protobuf.message.Message):
packed: typing.Optional[builtins.bool] = ...,
jstype: typing.Optional[global___FieldOptions.JSType.ValueType] = ...,
lazy: typing.Optional[builtins.bool] = ...,
unverified_lazy: typing.Optional[builtins.bool] = ...,
deprecated: typing.Optional[builtins.bool] = ...,
weak: typing.Optional[builtins.bool] = ...,
uninterpreted_option: typing.Optional[typing.Iterable[global___UninterpretedOption]] = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["ctype",b"ctype","deprecated",b"deprecated","jstype",b"jstype","lazy",b"lazy","packed",b"packed","weak",b"weak"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["ctype",b"ctype","deprecated",b"deprecated","jstype",b"jstype","lazy",b"lazy","packed",b"packed","uninterpreted_option",b"uninterpreted_option","weak",b"weak"]) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["ctype",b"ctype","deprecated",b"deprecated","jstype",b"jstype","lazy",b"lazy","packed",b"packed","unverified_lazy",b"unverified_lazy","weak",b"weak"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["ctype",b"ctype","deprecated",b"deprecated","jstype",b"jstype","lazy",b"lazy","packed",b"packed","uninterpreted_option",b"uninterpreted_option","unverified_lazy",b"unverified_lazy","weak",b"weak"]) -> None: ...
global___FieldOptions = FieldOptions
class OneofOptions(google.protobuf.message.Message):
@@ -1287,8 +1300,8 @@ class SourceCodeInfo(google.protobuf.message.Message):
location.
Each element is a field number or an index. They form a path from
the root FileDescriptorProto to the place where the definition. For
example, this path:
the root FileDescriptorProto to the place where the definition occurs.
For example, this path:
[ 4, 3, 2, 7, 1 ]
refers to:
file.message_type(3) // 4, 3

View File

@@ -37,10 +37,7 @@ class RepeatedScalarFieldContainer(BaseContainer[_ScalarV]):
def __setitem__(self, key: int, value: _ScalarV) -> None: ...
@overload
def __setitem__(self, key: slice, value: Iterable[_ScalarV]) -> None: ...
def __getslice__(self, start: int, stop: int) -> list[_ScalarV]: ...
def __setslice__(self, start: int, stop: int, values: Iterable[_ScalarV]) -> None: ...
def __delitem__(self, key: int | slice) -> None: ...
def __delslice__(self, start: int, stop: int) -> None: ...
def __eq__(self, other: object) -> bool: ...
class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]):
@@ -52,9 +49,7 @@ class RepeatedCompositeFieldContainer(BaseContainer[_MessageV]):
def MergeFrom(self: _M, other: _M) -> None: ...
def remove(self, elem: _MessageV) -> None: ...
def pop(self, key: int = ...) -> _MessageV: ...
def __getslice__(self, start: int, stop: int) -> list[_MessageV]: ...
def __delitem__(self, key: int | slice) -> None: ...
def __delslice__(self, start: int, stop: int) -> None: ...
def __eq__(self, other: object) -> bool: ...
class ScalarMap(MutableMapping[_K, _ScalarV]):

View File

@@ -1,4 +1,4 @@
from datetime import datetime, timedelta
from datetime import datetime, timedelta, tzinfo
from typing import Any as tAny
class Any:
@@ -23,7 +23,7 @@ class Timestamp:
def FromMicroseconds(self, micros: int) -> None: ...
def FromMilliseconds(self, millis: int) -> None: ...
def FromSeconds(self, seconds: int) -> None: ...
def ToDatetime(self) -> datetime: ...
def ToDatetime(self, tzinfo: tzinfo | None = ...) -> datetime: ...
def FromDatetime(self, dt: datetime) -> None: ...
class Duration:

View File

@@ -18,6 +18,7 @@ def MessageToJson(
use_integers_for_enums: bool = ...,
descriptor_pool: DescriptorPool | None = ...,
float_precision: int | None = ...,
ensure_ascii: bool = ...,
) -> str: ...
def MessageToDict(
message: Message,
@@ -28,8 +29,16 @@ def MessageToDict(
float_precision: int | None = ...,
) -> dict[str, Any]: ...
def Parse(
text: bytes | str, message: _MessageT, ignore_unknown_fields: bool = ..., descriptor_pool: DescriptorPool | None = ...
text: bytes | str,
message: _MessageT,
ignore_unknown_fields: bool = ...,
descriptor_pool: DescriptorPool | None = ...,
max_recursion_depth: int = ...,
) -> _MessageT: ...
def ParseDict(
js_dict: Any, message: _MessageT, ignore_unknown_fields: bool = ..., descriptor_pool: DescriptorPool | None = ...
js_dict: Any,
message: _MessageT,
ignore_unknown_fields: bool = ...,
descriptor_pool: DescriptorPool | None = ...,
max_recursion_depth: int = ...,
) -> _MessageT: ...