mirror of
https://github.com/CCOSTAN/Home-AssistantConfig.git
synced 2025-11-06 09:45:07 +00:00
Initial Configuration Push
This commit is contained in:
33
deps/google/protobuf/__init__.py
vendored
Normal file
33
deps/google/protobuf/__init__.py
vendored
Normal file
@@ -0,0 +1,33 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# Copyright 2007 Google Inc. All Rights Reserved.
|
||||
|
||||
__version__ = '3.0.0b2'
|
||||
BIN
deps/google/protobuf/__pycache__/__init__.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/__init__.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/any_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/any_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/api_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/api_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/descriptor.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/descriptor.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/descriptor_database.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/descriptor_database.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/descriptor_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/descriptor_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/descriptor_pool.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/descriptor_pool.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/duration_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/duration_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/empty_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/empty_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/field_mask_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/field_mask_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/json_format.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/json_format.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/map_unittest_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/map_unittest_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/message.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/message.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/message_factory.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/message_factory.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/proto_builder.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/proto_builder.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/reflection.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/reflection.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/service.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/service.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/service_reflection.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/service_reflection.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/source_context_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/source_context_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/struct_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/struct_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/symbol_database.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/symbol_database.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/text_encoding.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/text_encoding.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/text_format.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/text_format.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/timestamp_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/timestamp_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/type_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/type_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_arena_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_arena_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_custom_options_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_custom_options_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_import_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_import_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_import_public_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_import_public_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_mset_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_mset_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_mset_wire_format_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_mset_wire_format_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_no_arena_import_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_no_arena_import_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_no_arena_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_no_arena_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_no_generic_services_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_no_generic_services_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/unittest_proto3_arena_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/unittest_proto3_arena_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/__pycache__/wrappers_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/__pycache__/wrappers_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
78
deps/google/protobuf/any_pb2.py
vendored
Normal file
78
deps/google/protobuf/any_pb2.py
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/any.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/protobuf/any.proto',
|
||||
package='google.protobuf',
|
||||
syntax='proto3',
|
||||
serialized_pb=_b('\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42K\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
)
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_ANY = _descriptor.Descriptor(
|
||||
name='Any',
|
||||
full_name='google.protobuf.Any',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='type_url', full_name='google.protobuf.Any.type_url', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='google.protobuf.Any.value', index=1,
|
||||
number=2, type=12, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b(""),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=46,
|
||||
serialized_end=84,
|
||||
)
|
||||
|
||||
DESCRIPTOR.message_types_by_name['Any'] = _ANY
|
||||
|
||||
Any = _reflection.GeneratedProtocolMessageType('Any', (_message.Message,), dict(
|
||||
DESCRIPTOR = _ANY,
|
||||
__module__ = 'google.protobuf.any_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.Any)
|
||||
))
|
||||
_sym_db.RegisterMessage(Any)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\010AnyProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
250
deps/google/protobuf/api_pb2.py
vendored
Normal file
250
deps/google/protobuf/api_pb2.py
vendored
Normal file
@@ -0,0 +1,250 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/api.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
|
||||
from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/protobuf/api.proto',
|
||||
package='google.protobuf',
|
||||
syntax='proto3',
|
||||
serialized_pb=_b('\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBK\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
,
|
||||
dependencies=[google_dot_protobuf_dot_source__context__pb2.DESCRIPTOR,google_dot_protobuf_dot_type__pb2.DESCRIPTOR,])
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_API = _descriptor.Descriptor(
|
||||
name='Api',
|
||||
full_name='google.protobuf.Api',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='google.protobuf.Api.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='methods', full_name='google.protobuf.Api.methods', index=1,
|
||||
number=2, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='options', full_name='google.protobuf.Api.options', index=2,
|
||||
number=3, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='version', full_name='google.protobuf.Api.version', index=3,
|
||||
number=4, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='source_context', full_name='google.protobuf.Api.source_context', index=4,
|
||||
number=5, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='mixins', full_name='google.protobuf.Api.mixins', index=5,
|
||||
number=6, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='syntax', full_name='google.protobuf.Api.syntax', index=6,
|
||||
number=7, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=113,
|
||||
serialized_end=370,
|
||||
)
|
||||
|
||||
|
||||
_METHOD = _descriptor.Descriptor(
|
||||
name='Method',
|
||||
full_name='google.protobuf.Method',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='google.protobuf.Method.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='request_type_url', full_name='google.protobuf.Method.request_type_url', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='request_streaming', full_name='google.protobuf.Method.request_streaming', index=2,
|
||||
number=3, type=8, cpp_type=7, label=1,
|
||||
has_default_value=False, default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='response_type_url', full_name='google.protobuf.Method.response_type_url', index=3,
|
||||
number=4, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='response_streaming', full_name='google.protobuf.Method.response_streaming', index=4,
|
||||
number=5, type=8, cpp_type=7, label=1,
|
||||
has_default_value=False, default_value=False,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='options', full_name='google.protobuf.Method.options', index=5,
|
||||
number=6, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='syntax', full_name='google.protobuf.Method.syntax', index=6,
|
||||
number=7, type=14, cpp_type=8, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=373,
|
||||
serialized_end=586,
|
||||
)
|
||||
|
||||
|
||||
_MIXIN = _descriptor.Descriptor(
|
||||
name='Mixin',
|
||||
full_name='google.protobuf.Mixin',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='google.protobuf.Mixin.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='root', full_name='google.protobuf.Mixin.root', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=588,
|
||||
serialized_end=623,
|
||||
)
|
||||
|
||||
_API.fields_by_name['methods'].message_type = _METHOD
|
||||
_API.fields_by_name['options'].message_type = google_dot_protobuf_dot_type__pb2._OPTION
|
||||
_API.fields_by_name['source_context'].message_type = google_dot_protobuf_dot_source__context__pb2._SOURCECONTEXT
|
||||
_API.fields_by_name['mixins'].message_type = _MIXIN
|
||||
_API.fields_by_name['syntax'].enum_type = google_dot_protobuf_dot_type__pb2._SYNTAX
|
||||
_METHOD.fields_by_name['options'].message_type = google_dot_protobuf_dot_type__pb2._OPTION
|
||||
_METHOD.fields_by_name['syntax'].enum_type = google_dot_protobuf_dot_type__pb2._SYNTAX
|
||||
DESCRIPTOR.message_types_by_name['Api'] = _API
|
||||
DESCRIPTOR.message_types_by_name['Method'] = _METHOD
|
||||
DESCRIPTOR.message_types_by_name['Mixin'] = _MIXIN
|
||||
|
||||
Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), dict(
|
||||
DESCRIPTOR = _API,
|
||||
__module__ = 'google.protobuf.api_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.Api)
|
||||
))
|
||||
_sym_db.RegisterMessage(Api)
|
||||
|
||||
Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), dict(
|
||||
DESCRIPTOR = _METHOD,
|
||||
__module__ = 'google.protobuf.api_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.Method)
|
||||
))
|
||||
_sym_db.RegisterMessage(Method)
|
||||
|
||||
Mixin = _reflection.GeneratedProtocolMessageType('Mixin', (_message.Message,), dict(
|
||||
DESCRIPTOR = _MIXIN,
|
||||
__module__ = 'google.protobuf.api_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.Mixin)
|
||||
))
|
||||
_sym_db.RegisterMessage(Mixin)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\010ApiProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
0
deps/google/protobuf/compiler/__init__.py
vendored
Normal file
0
deps/google/protobuf/compiler/__init__.py
vendored
Normal file
BIN
deps/google/protobuf/compiler/__pycache__/__init__.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/compiler/__pycache__/__init__.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/compiler/__pycache__/plugin_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
188
deps/google/protobuf/compiler/plugin_pb2.py
vendored
Normal file
188
deps/google/protobuf/compiler/plugin_pb2.py
vendored
Normal file
@@ -0,0 +1,188 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/compiler/plugin.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/protobuf/compiler/plugin.proto',
|
||||
package='google.protobuf.compiler',
|
||||
syntax='proto2',
|
||||
serialized_pb=_b('\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"}\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\"\xaa\x01\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\tB7\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ\tplugin_go')
|
||||
,
|
||||
dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_CODEGENERATORREQUEST = _descriptor.Descriptor(
|
||||
name='CodeGeneratorRequest',
|
||||
full_name='google.protobuf.compiler.CodeGeneratorRequest',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='file_to_generate', full_name='google.protobuf.compiler.CodeGeneratorRequest.file_to_generate', index=0,
|
||||
number=1, type=9, cpp_type=9, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='parameter', full_name='google.protobuf.compiler.CodeGeneratorRequest.parameter', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='proto_file', full_name='google.protobuf.compiler.CodeGeneratorRequest.proto_file', index=2,
|
||||
number=15, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto2',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=101,
|
||||
serialized_end=226,
|
||||
)
|
||||
|
||||
|
||||
_CODEGENERATORRESPONSE_FILE = _descriptor.Descriptor(
|
||||
name='File',
|
||||
full_name='google.protobuf.compiler.CodeGeneratorResponse.File',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='name', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.name', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='insertion_point', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point', index=1,
|
||||
number=2, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='content', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.content', index=2,
|
||||
number=15, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto2',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=337,
|
||||
serialized_end=399,
|
||||
)
|
||||
|
||||
_CODEGENERATORRESPONSE = _descriptor.Descriptor(
|
||||
name='CodeGeneratorResponse',
|
||||
full_name='google.protobuf.compiler.CodeGeneratorResponse',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='error', full_name='google.protobuf.compiler.CodeGeneratorResponse.error', index=0,
|
||||
number=1, type=9, cpp_type=9, label=1,
|
||||
has_default_value=False, default_value=_b("").decode('utf-8'),
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='file', full_name='google.protobuf.compiler.CodeGeneratorResponse.file', index=1,
|
||||
number=15, type=11, cpp_type=10, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[_CODEGENERATORRESPONSE_FILE, ],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto2',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=229,
|
||||
serialized_end=399,
|
||||
)
|
||||
|
||||
_CODEGENERATORREQUEST.fields_by_name['proto_file'].message_type = google_dot_protobuf_dot_descriptor__pb2._FILEDESCRIPTORPROTO
|
||||
_CODEGENERATORRESPONSE_FILE.containing_type = _CODEGENERATORRESPONSE
|
||||
_CODEGENERATORRESPONSE.fields_by_name['file'].message_type = _CODEGENERATORRESPONSE_FILE
|
||||
DESCRIPTOR.message_types_by_name['CodeGeneratorRequest'] = _CODEGENERATORREQUEST
|
||||
DESCRIPTOR.message_types_by_name['CodeGeneratorResponse'] = _CODEGENERATORRESPONSE
|
||||
|
||||
CodeGeneratorRequest = _reflection.GeneratedProtocolMessageType('CodeGeneratorRequest', (_message.Message,), dict(
|
||||
DESCRIPTOR = _CODEGENERATORREQUEST,
|
||||
__module__ = 'google.protobuf.compiler.plugin_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest)
|
||||
))
|
||||
_sym_db.RegisterMessage(CodeGeneratorRequest)
|
||||
|
||||
CodeGeneratorResponse = _reflection.GeneratedProtocolMessageType('CodeGeneratorResponse', (_message.Message,), dict(
|
||||
|
||||
File = _reflection.GeneratedProtocolMessageType('File', (_message.Message,), dict(
|
||||
DESCRIPTOR = _CODEGENERATORRESPONSE_FILE,
|
||||
__module__ = 'google.protobuf.compiler.plugin_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File)
|
||||
))
|
||||
,
|
||||
DESCRIPTOR = _CODEGENERATORRESPONSE,
|
||||
__module__ = 'google.protobuf.compiler.plugin_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse)
|
||||
))
|
||||
_sym_db.RegisterMessage(CodeGeneratorResponse)
|
||||
_sym_db.RegisterMessage(CodeGeneratorResponse.File)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\034com.google.protobuf.compilerB\014PluginProtosZ\tplugin_go'))
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
966
deps/google/protobuf/descriptor.py
vendored
Normal file
966
deps/google/protobuf/descriptor.py
vendored
Normal file
@@ -0,0 +1,966 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Descriptors essentially contain exactly the information found in a .proto
|
||||
file, in types that make this information accessible in Python.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import six
|
||||
|
||||
from google.protobuf.internal import api_implementation
|
||||
|
||||
_USE_C_DESCRIPTORS = False
|
||||
if api_implementation.Type() == 'cpp':
|
||||
# Used by MakeDescriptor in cpp mode
|
||||
import os
|
||||
import uuid
|
||||
from google.protobuf.pyext import _message
|
||||
_USE_C_DESCRIPTORS = getattr(_message, '_USE_C_DESCRIPTORS', False)
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Base error for this module."""
|
||||
|
||||
|
||||
class TypeTransformationError(Error):
|
||||
"""Error transforming between python proto type and corresponding C++ type."""
|
||||
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
# This metaclass allows to override the behavior of code like
|
||||
# isinstance(my_descriptor, FieldDescriptor)
|
||||
# and make it return True when the descriptor is an instance of the extension
|
||||
# type written in C++.
|
||||
class DescriptorMetaclass(type):
|
||||
def __instancecheck__(cls, obj):
|
||||
if super(DescriptorMetaclass, cls).__instancecheck__(obj):
|
||||
return True
|
||||
if isinstance(obj, cls._C_DESCRIPTOR_CLASS):
|
||||
return True
|
||||
return False
|
||||
else:
|
||||
# The standard metaclass; nothing changes.
|
||||
DescriptorMetaclass = type
|
||||
|
||||
|
||||
class DescriptorBase(six.with_metaclass(DescriptorMetaclass)):
|
||||
|
||||
"""Descriptors base class.
|
||||
|
||||
This class is the base of all descriptor classes. It provides common options
|
||||
related functionality.
|
||||
|
||||
Attributes:
|
||||
has_options: True if the descriptor has non-default options. Usually it
|
||||
is not necessary to read this -- just call GetOptions() which will
|
||||
happily return the default instance. However, it's sometimes useful
|
||||
for efficiency, and also useful inside the protobuf implementation to
|
||||
avoid some bootstrapping issues.
|
||||
"""
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
# The class, or tuple of classes, that are considered as "virtual
|
||||
# subclasses" of this descriptor class.
|
||||
_C_DESCRIPTOR_CLASS = ()
|
||||
|
||||
def __init__(self, options, options_class_name):
|
||||
"""Initialize the descriptor given its options message and the name of the
|
||||
class of the options message. The name of the class is required in case
|
||||
the options message is None and has to be created.
|
||||
"""
|
||||
self._options = options
|
||||
self._options_class_name = options_class_name
|
||||
|
||||
# Does this descriptor have non-default options?
|
||||
self.has_options = options is not None
|
||||
|
||||
def _SetOptions(self, options, options_class_name):
|
||||
"""Sets the descriptor's options
|
||||
|
||||
This function is used in generated proto2 files to update descriptor
|
||||
options. It must not be used outside proto2.
|
||||
"""
|
||||
self._options = options
|
||||
self._options_class_name = options_class_name
|
||||
|
||||
# Does this descriptor have non-default options?
|
||||
self.has_options = options is not None
|
||||
|
||||
def GetOptions(self):
|
||||
"""Retrieves descriptor options.
|
||||
|
||||
This method returns the options set or creates the default options for the
|
||||
descriptor.
|
||||
"""
|
||||
if self._options:
|
||||
return self._options
|
||||
from google.protobuf import descriptor_pb2
|
||||
try:
|
||||
options_class = getattr(descriptor_pb2, self._options_class_name)
|
||||
except AttributeError:
|
||||
raise RuntimeError('Unknown options class name %s!' %
|
||||
(self._options_class_name))
|
||||
self._options = options_class()
|
||||
return self._options
|
||||
|
||||
|
||||
class _NestedDescriptorBase(DescriptorBase):
|
||||
"""Common class for descriptors that can be nested."""
|
||||
|
||||
def __init__(self, options, options_class_name, name, full_name,
|
||||
file, containing_type, serialized_start=None,
|
||||
serialized_end=None):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
options: Protocol message options or None
|
||||
to use default message options.
|
||||
options_class_name: (str) The class name of the above options.
|
||||
|
||||
name: (str) Name of this protocol message type.
|
||||
full_name: (str) Fully-qualified name of this protocol message type,
|
||||
which will include protocol "package" name and the name of any
|
||||
enclosing types.
|
||||
file: (FileDescriptor) Reference to file info.
|
||||
containing_type: if provided, this is a nested descriptor, with this
|
||||
descriptor as parent, otherwise None.
|
||||
serialized_start: The start index (inclusive) in block in the
|
||||
file.serialized_pb that describes this descriptor.
|
||||
serialized_end: The end index (exclusive) in block in the
|
||||
file.serialized_pb that describes this descriptor.
|
||||
"""
|
||||
super(_NestedDescriptorBase, self).__init__(
|
||||
options, options_class_name)
|
||||
|
||||
self.name = name
|
||||
# TODO(falk): Add function to calculate full_name instead of having it in
|
||||
# memory?
|
||||
self.full_name = full_name
|
||||
self.file = file
|
||||
self.containing_type = containing_type
|
||||
|
||||
self._serialized_start = serialized_start
|
||||
self._serialized_end = serialized_end
|
||||
|
||||
def GetTopLevelContainingType(self):
|
||||
"""Returns the root if this is a nested type, or itself if its the root."""
|
||||
desc = self
|
||||
while desc.containing_type is not None:
|
||||
desc = desc.containing_type
|
||||
return desc
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to the matching proto in descriptor_pb2.
|
||||
|
||||
Args:
|
||||
proto: An empty proto instance from descriptor_pb2.
|
||||
|
||||
Raises:
|
||||
Error: If self couldnt be serialized, due to to few constructor arguments.
|
||||
"""
|
||||
if (self.file is not None and
|
||||
self._serialized_start is not None and
|
||||
self._serialized_end is not None):
|
||||
proto.ParseFromString(self.file.serialized_pb[
|
||||
self._serialized_start:self._serialized_end])
|
||||
else:
|
||||
raise Error('Descriptor does not contain serialization.')
|
||||
|
||||
|
||||
class Descriptor(_NestedDescriptorBase):
|
||||
|
||||
"""Descriptor for a protocol message type.
|
||||
|
||||
A Descriptor instance has the following attributes:
|
||||
|
||||
name: (str) Name of this protocol message type.
|
||||
full_name: (str) Fully-qualified name of this protocol message type,
|
||||
which will include protocol "package" name and the name of any
|
||||
enclosing types.
|
||||
|
||||
containing_type: (Descriptor) Reference to the descriptor of the
|
||||
type containing us, or None if this is top-level.
|
||||
|
||||
fields: (list of FieldDescriptors) Field descriptors for all
|
||||
fields in this type.
|
||||
fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as in |fields|, but indexed by "number" attribute in each
|
||||
FieldDescriptor.
|
||||
fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as in |fields|, but indexed by "name" attribute in each
|
||||
FieldDescriptor.
|
||||
fields_by_camelcase_name: (dict str -> FieldDescriptor) Same
|
||||
FieldDescriptor objects as in |fields|, but indexed by
|
||||
"camelcase_name" attribute in each FieldDescriptor.
|
||||
|
||||
nested_types: (list of Descriptors) Descriptor references
|
||||
for all protocol message types nested within this one.
|
||||
nested_types_by_name: (dict str -> Descriptor) Same Descriptor
|
||||
objects as in |nested_types|, but indexed by "name" attribute
|
||||
in each Descriptor.
|
||||
|
||||
enum_types: (list of EnumDescriptors) EnumDescriptor references
|
||||
for all enums contained within this type.
|
||||
enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor
|
||||
objects as in |enum_types|, but indexed by "name" attribute
|
||||
in each EnumDescriptor.
|
||||
enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping
|
||||
from enum value name to EnumValueDescriptor for that value.
|
||||
|
||||
extensions: (list of FieldDescriptor) All extensions defined directly
|
||||
within this message type (NOT within a nested type).
|
||||
extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as |extensions|, but indexed by "name" attribute of each
|
||||
FieldDescriptor.
|
||||
|
||||
is_extendable: Does this type define any extension ranges?
|
||||
|
||||
oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields
|
||||
in this message.
|
||||
oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|,
|
||||
but indexed by "name" attribute.
|
||||
|
||||
file: (FileDescriptor) Reference to file descriptor.
|
||||
"""
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
_C_DESCRIPTOR_CLASS = _message.Descriptor
|
||||
|
||||
def __new__(cls, name, full_name, filename, containing_type, fields,
|
||||
nested_types, enum_types, extensions, options=None,
|
||||
is_extendable=True, extension_ranges=None, oneofs=None,
|
||||
file=None, serialized_start=None, serialized_end=None,
|
||||
syntax=None):
|
||||
_message.Message._CheckCalledFromGeneratedFile()
|
||||
return _message.default_pool.FindMessageTypeByName(full_name)
|
||||
|
||||
# NOTE(tmarek): The file argument redefining a builtin is nothing we can
|
||||
# fix right now since we don't know how many clients already rely on the
|
||||
# name of the argument.
|
||||
def __init__(self, name, full_name, filename, containing_type, fields,
|
||||
nested_types, enum_types, extensions, options=None,
|
||||
is_extendable=True, extension_ranges=None, oneofs=None,
|
||||
file=None, serialized_start=None, serialized_end=None,
|
||||
syntax=None): # pylint:disable=redefined-builtin
|
||||
"""Arguments to __init__() are as described in the description
|
||||
of Descriptor fields above.
|
||||
|
||||
Note that filename is an obsolete argument, that is not used anymore.
|
||||
Please use file.name to access this as an attribute.
|
||||
"""
|
||||
super(Descriptor, self).__init__(
|
||||
options, 'MessageOptions', name, full_name, file,
|
||||
containing_type, serialized_start=serialized_start,
|
||||
serialized_end=serialized_end)
|
||||
|
||||
# We have fields in addition to fields_by_name and fields_by_number,
|
||||
# so that:
|
||||
# 1. Clients can index fields by "order in which they're listed."
|
||||
# 2. Clients can easily iterate over all fields with the terse
|
||||
# syntax: for f in descriptor.fields: ...
|
||||
self.fields = fields
|
||||
for field in self.fields:
|
||||
field.containing_type = self
|
||||
self.fields_by_number = dict((f.number, f) for f in fields)
|
||||
self.fields_by_name = dict((f.name, f) for f in fields)
|
||||
self._fields_by_camelcase_name = None
|
||||
|
||||
self.nested_types = nested_types
|
||||
for nested_type in nested_types:
|
||||
nested_type.containing_type = self
|
||||
self.nested_types_by_name = dict((t.name, t) for t in nested_types)
|
||||
|
||||
self.enum_types = enum_types
|
||||
for enum_type in self.enum_types:
|
||||
enum_type.containing_type = self
|
||||
self.enum_types_by_name = dict((t.name, t) for t in enum_types)
|
||||
self.enum_values_by_name = dict(
|
||||
(v.name, v) for t in enum_types for v in t.values)
|
||||
|
||||
self.extensions = extensions
|
||||
for extension in self.extensions:
|
||||
extension.extension_scope = self
|
||||
self.extensions_by_name = dict((f.name, f) for f in extensions)
|
||||
self.is_extendable = is_extendable
|
||||
self.extension_ranges = extension_ranges
|
||||
self.oneofs = oneofs if oneofs is not None else []
|
||||
self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
|
||||
for oneof in self.oneofs:
|
||||
oneof.containing_type = self
|
||||
self.syntax = syntax or "proto2"
|
||||
|
||||
@property
|
||||
def fields_by_camelcase_name(self):
|
||||
if self._fields_by_camelcase_name is None:
|
||||
self._fields_by_camelcase_name = dict(
|
||||
(f.camelcase_name, f) for f in self.fields)
|
||||
return self._fields_by_camelcase_name
|
||||
|
||||
def EnumValueName(self, enum, value):
|
||||
"""Returns the string name of an enum value.
|
||||
|
||||
This is just a small helper method to simplify a common operation.
|
||||
|
||||
Args:
|
||||
enum: string name of the Enum.
|
||||
value: int, value of the enum.
|
||||
|
||||
Returns:
|
||||
string name of the enum value.
|
||||
|
||||
Raises:
|
||||
KeyError if either the Enum doesn't exist or the value is not a valid
|
||||
value for the enum.
|
||||
"""
|
||||
return self.enum_types_by_name[enum].values_by_number[value].name
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to a descriptor_pb2.DescriptorProto.
|
||||
|
||||
Args:
|
||||
proto: An empty descriptor_pb2.DescriptorProto.
|
||||
"""
|
||||
# This function is overriden to give a better doc comment.
|
||||
super(Descriptor, self).CopyToProto(proto)
|
||||
|
||||
|
||||
# TODO(robinson): We should have aggressive checking here,
|
||||
# for example:
|
||||
# * If you specify a repeated field, you should not be allowed
|
||||
# to specify a default value.
|
||||
# * [Other examples here as needed].
|
||||
#
|
||||
# TODO(robinson): for this and other *Descriptor classes, we
|
||||
# might also want to lock things down aggressively (e.g.,
|
||||
# prevent clients from setting the attributes). Having
|
||||
# stronger invariants here in general will reduce the number
|
||||
# of runtime checks we must do in reflection.py...
|
||||
class FieldDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a single field in a .proto file.
|
||||
|
||||
A FieldDescriptor instance has the following attributes:
|
||||
|
||||
name: (str) Name of this field, exactly as it appears in .proto.
|
||||
full_name: (str) Name of this field, including containing scope. This is
|
||||
particularly relevant for extensions.
|
||||
camelcase_name: (str) Camelcase name of this field.
|
||||
index: (int) Dense, 0-indexed index giving the order that this
|
||||
field textually appears within its message in the .proto file.
|
||||
number: (int) Tag number declared for this field in the .proto file.
|
||||
|
||||
type: (One of the TYPE_* constants below) Declared type.
|
||||
cpp_type: (One of the CPPTYPE_* constants below) C++ type used to
|
||||
represent this field.
|
||||
|
||||
label: (One of the LABEL_* constants below) Tells whether this
|
||||
field is optional, required, or repeated.
|
||||
has_default_value: (bool) True if this field has a default value defined,
|
||||
otherwise false.
|
||||
default_value: (Varies) Default value of this field. Only
|
||||
meaningful for non-repeated scalar fields. Repeated fields
|
||||
should always set this to [], and non-repeated composite
|
||||
fields should always set this to None.
|
||||
|
||||
containing_type: (Descriptor) Descriptor of the protocol message
|
||||
type that contains this field. Set by the Descriptor constructor
|
||||
if we're passed into one.
|
||||
Somewhat confusingly, for extension fields, this is the
|
||||
descriptor of the EXTENDED message, not the descriptor
|
||||
of the message containing this field. (See is_extension and
|
||||
extension_scope below).
|
||||
message_type: (Descriptor) If a composite field, a descriptor
|
||||
of the message type contained in this field. Otherwise, this is None.
|
||||
enum_type: (EnumDescriptor) If this field contains an enum, a
|
||||
descriptor of that enum. Otherwise, this is None.
|
||||
|
||||
is_extension: True iff this describes an extension field.
|
||||
extension_scope: (Descriptor) Only meaningful if is_extension is True.
|
||||
Gives the message that immediately contains this extension field.
|
||||
Will be None iff we're a top-level (file-level) extension field.
|
||||
|
||||
options: (descriptor_pb2.FieldOptions) Protocol message field options or
|
||||
None to use default field options.
|
||||
|
||||
containing_oneof: (OneofDescriptor) If the field is a member of a oneof
|
||||
union, contains its descriptor. Otherwise, None.
|
||||
"""
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::Type enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
TYPE_DOUBLE = 1
|
||||
TYPE_FLOAT = 2
|
||||
TYPE_INT64 = 3
|
||||
TYPE_UINT64 = 4
|
||||
TYPE_INT32 = 5
|
||||
TYPE_FIXED64 = 6
|
||||
TYPE_FIXED32 = 7
|
||||
TYPE_BOOL = 8
|
||||
TYPE_STRING = 9
|
||||
TYPE_GROUP = 10
|
||||
TYPE_MESSAGE = 11
|
||||
TYPE_BYTES = 12
|
||||
TYPE_UINT32 = 13
|
||||
TYPE_ENUM = 14
|
||||
TYPE_SFIXED32 = 15
|
||||
TYPE_SFIXED64 = 16
|
||||
TYPE_SINT32 = 17
|
||||
TYPE_SINT64 = 18
|
||||
MAX_TYPE = 18
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::CppType enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
CPPTYPE_INT32 = 1
|
||||
CPPTYPE_INT64 = 2
|
||||
CPPTYPE_UINT32 = 3
|
||||
CPPTYPE_UINT64 = 4
|
||||
CPPTYPE_DOUBLE = 5
|
||||
CPPTYPE_FLOAT = 6
|
||||
CPPTYPE_BOOL = 7
|
||||
CPPTYPE_ENUM = 8
|
||||
CPPTYPE_STRING = 9
|
||||
CPPTYPE_MESSAGE = 10
|
||||
MAX_CPPTYPE = 10
|
||||
|
||||
_PYTHON_TO_CPP_PROTO_TYPE_MAP = {
|
||||
TYPE_DOUBLE: CPPTYPE_DOUBLE,
|
||||
TYPE_FLOAT: CPPTYPE_FLOAT,
|
||||
TYPE_ENUM: CPPTYPE_ENUM,
|
||||
TYPE_INT64: CPPTYPE_INT64,
|
||||
TYPE_SINT64: CPPTYPE_INT64,
|
||||
TYPE_SFIXED64: CPPTYPE_INT64,
|
||||
TYPE_UINT64: CPPTYPE_UINT64,
|
||||
TYPE_FIXED64: CPPTYPE_UINT64,
|
||||
TYPE_INT32: CPPTYPE_INT32,
|
||||
TYPE_SFIXED32: CPPTYPE_INT32,
|
||||
TYPE_SINT32: CPPTYPE_INT32,
|
||||
TYPE_UINT32: CPPTYPE_UINT32,
|
||||
TYPE_FIXED32: CPPTYPE_UINT32,
|
||||
TYPE_BYTES: CPPTYPE_STRING,
|
||||
TYPE_STRING: CPPTYPE_STRING,
|
||||
TYPE_BOOL: CPPTYPE_BOOL,
|
||||
TYPE_MESSAGE: CPPTYPE_MESSAGE,
|
||||
TYPE_GROUP: CPPTYPE_MESSAGE
|
||||
}
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::Label enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
LABEL_OPTIONAL = 1
|
||||
LABEL_REQUIRED = 2
|
||||
LABEL_REPEATED = 3
|
||||
MAX_LABEL = 3
|
||||
|
||||
# Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
|
||||
# and kLastReservedNumber in descriptor.h
|
||||
MAX_FIELD_NUMBER = (1 << 29) - 1
|
||||
FIRST_RESERVED_FIELD_NUMBER = 19000
|
||||
LAST_RESERVED_FIELD_NUMBER = 19999
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
_C_DESCRIPTOR_CLASS = _message.FieldDescriptor
|
||||
|
||||
def __new__(cls, name, full_name, index, number, type, cpp_type, label,
|
||||
default_value, message_type, enum_type, containing_type,
|
||||
is_extension, extension_scope, options=None,
|
||||
has_default_value=True, containing_oneof=None):
|
||||
_message.Message._CheckCalledFromGeneratedFile()
|
||||
if is_extension:
|
||||
return _message.default_pool.FindExtensionByName(full_name)
|
||||
else:
|
||||
return _message.default_pool.FindFieldByName(full_name)
|
||||
|
||||
def __init__(self, name, full_name, index, number, type, cpp_type, label,
|
||||
default_value, message_type, enum_type, containing_type,
|
||||
is_extension, extension_scope, options=None,
|
||||
has_default_value=True, containing_oneof=None):
|
||||
"""The arguments are as described in the description of FieldDescriptor
|
||||
attributes above.
|
||||
|
||||
Note that containing_type may be None, and may be set later if necessary
|
||||
(to deal with circular references between message types, for example).
|
||||
Likewise for extension_scope.
|
||||
"""
|
||||
super(FieldDescriptor, self).__init__(options, 'FieldOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self._camelcase_name = None
|
||||
self.index = index
|
||||
self.number = number
|
||||
self.type = type
|
||||
self.cpp_type = cpp_type
|
||||
self.label = label
|
||||
self.has_default_value = has_default_value
|
||||
self.default_value = default_value
|
||||
self.containing_type = containing_type
|
||||
self.message_type = message_type
|
||||
self.enum_type = enum_type
|
||||
self.is_extension = is_extension
|
||||
self.extension_scope = extension_scope
|
||||
self.containing_oneof = containing_oneof
|
||||
if api_implementation.Type() == 'cpp':
|
||||
if is_extension:
|
||||
self._cdescriptor = _message.default_pool.FindExtensionByName(full_name)
|
||||
else:
|
||||
self._cdescriptor = _message.default_pool.FindFieldByName(full_name)
|
||||
else:
|
||||
self._cdescriptor = None
|
||||
|
||||
@property
|
||||
def camelcase_name(self):
|
||||
if self._camelcase_name is None:
|
||||
self._camelcase_name = _ToCamelCase(self.name)
|
||||
return self._camelcase_name
|
||||
|
||||
@staticmethod
|
||||
def ProtoTypeToCppProtoType(proto_type):
|
||||
"""Converts from a Python proto type to a C++ Proto Type.
|
||||
|
||||
The Python ProtocolBuffer classes specify both the 'Python' datatype and the
|
||||
'C++' datatype - and they're not the same. This helper method should
|
||||
translate from one to another.
|
||||
|
||||
Args:
|
||||
proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
|
||||
Returns:
|
||||
descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
|
||||
Raises:
|
||||
TypeTransformationError: when the Python proto type isn't known.
|
||||
"""
|
||||
try:
|
||||
return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
|
||||
except KeyError:
|
||||
raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
|
||||
|
||||
|
||||
class EnumDescriptor(_NestedDescriptorBase):
|
||||
|
||||
"""Descriptor for an enum defined in a .proto file.
|
||||
|
||||
An EnumDescriptor instance has the following attributes:
|
||||
|
||||
name: (str) Name of the enum type.
|
||||
full_name: (str) Full name of the type, including package name
|
||||
and any enclosing type(s).
|
||||
|
||||
values: (list of EnumValueDescriptors) List of the values
|
||||
in this enum.
|
||||
values_by_name: (dict str -> EnumValueDescriptor) Same as |values|,
|
||||
but indexed by the "name" field of each EnumValueDescriptor.
|
||||
values_by_number: (dict int -> EnumValueDescriptor) Same as |values|,
|
||||
but indexed by the "number" field of each EnumValueDescriptor.
|
||||
containing_type: (Descriptor) Descriptor of the immediate containing
|
||||
type of this enum, or None if this is an enum defined at the
|
||||
top level in a .proto file. Set by Descriptor's constructor
|
||||
if we're passed into one.
|
||||
file: (FileDescriptor) Reference to file descriptor.
|
||||
options: (descriptor_pb2.EnumOptions) Enum options message or
|
||||
None to use default enum options.
|
||||
"""
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
_C_DESCRIPTOR_CLASS = _message.EnumDescriptor
|
||||
|
||||
def __new__(cls, name, full_name, filename, values,
|
||||
containing_type=None, options=None, file=None,
|
||||
serialized_start=None, serialized_end=None):
|
||||
_message.Message._CheckCalledFromGeneratedFile()
|
||||
return _message.default_pool.FindEnumTypeByName(full_name)
|
||||
|
||||
def __init__(self, name, full_name, filename, values,
|
||||
containing_type=None, options=None, file=None,
|
||||
serialized_start=None, serialized_end=None):
|
||||
"""Arguments are as described in the attribute description above.
|
||||
|
||||
Note that filename is an obsolete argument, that is not used anymore.
|
||||
Please use file.name to access this as an attribute.
|
||||
"""
|
||||
super(EnumDescriptor, self).__init__(
|
||||
options, 'EnumOptions', name, full_name, file,
|
||||
containing_type, serialized_start=serialized_start,
|
||||
serialized_end=serialized_end)
|
||||
|
||||
self.values = values
|
||||
for value in self.values:
|
||||
value.type = self
|
||||
self.values_by_name = dict((v.name, v) for v in values)
|
||||
self.values_by_number = dict((v.number, v) for v in values)
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to a descriptor_pb2.EnumDescriptorProto.
|
||||
|
||||
Args:
|
||||
proto: An empty descriptor_pb2.EnumDescriptorProto.
|
||||
"""
|
||||
# This function is overriden to give a better doc comment.
|
||||
super(EnumDescriptor, self).CopyToProto(proto)
|
||||
|
||||
|
||||
class EnumValueDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a single value within an enum.
|
||||
|
||||
name: (str) Name of this value.
|
||||
index: (int) Dense, 0-indexed index giving the order that this
|
||||
value appears textually within its enum in the .proto file.
|
||||
number: (int) Actual number assigned to this enum value.
|
||||
type: (EnumDescriptor) EnumDescriptor to which this value
|
||||
belongs. Set by EnumDescriptor's constructor if we're
|
||||
passed into one.
|
||||
options: (descriptor_pb2.EnumValueOptions) Enum value options message or
|
||||
None to use default enum value options options.
|
||||
"""
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
_C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
|
||||
|
||||
def __new__(cls, name, index, number, type=None, options=None):
|
||||
_message.Message._CheckCalledFromGeneratedFile()
|
||||
# There is no way we can build a complete EnumValueDescriptor with the
|
||||
# given parameters (the name of the Enum is not known, for example).
|
||||
# Fortunately generated files just pass it to the EnumDescriptor()
|
||||
# constructor, which will ignore it, so returning None is good enough.
|
||||
return None
|
||||
|
||||
def __init__(self, name, index, number, type=None, options=None):
|
||||
"""Arguments are as described in the attribute description above."""
|
||||
super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions')
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.number = number
|
||||
self.type = type
|
||||
|
||||
|
||||
class OneofDescriptor(object):
|
||||
"""Descriptor for a oneof field.
|
||||
|
||||
name: (str) Name of the oneof field.
|
||||
full_name: (str) Full name of the oneof field, including package name.
|
||||
index: (int) 0-based index giving the order of the oneof field inside
|
||||
its containing type.
|
||||
containing_type: (Descriptor) Descriptor of the protocol message
|
||||
type that contains this field. Set by the Descriptor constructor
|
||||
if we're passed into one.
|
||||
fields: (list of FieldDescriptor) The list of field descriptors this
|
||||
oneof can contain.
|
||||
"""
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
_C_DESCRIPTOR_CLASS = _message.OneofDescriptor
|
||||
|
||||
def __new__(cls, name, full_name, index, containing_type, fields):
|
||||
_message.Message._CheckCalledFromGeneratedFile()
|
||||
return _message.default_pool.FindOneofByName(full_name)
|
||||
|
||||
def __init__(self, name, full_name, index, containing_type, fields):
|
||||
"""Arguments are as described in the attribute description above."""
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.index = index
|
||||
self.containing_type = containing_type
|
||||
self.fields = fields
|
||||
|
||||
|
||||
class ServiceDescriptor(_NestedDescriptorBase):
|
||||
|
||||
"""Descriptor for a service.
|
||||
|
||||
name: (str) Name of the service.
|
||||
full_name: (str) Full name of the service, including package name.
|
||||
index: (int) 0-indexed index giving the order that this services
|
||||
definition appears withing the .proto file.
|
||||
methods: (list of MethodDescriptor) List of methods provided by this
|
||||
service.
|
||||
options: (descriptor_pb2.ServiceOptions) Service options message or
|
||||
None to use default service options.
|
||||
file: (FileDescriptor) Reference to file info.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, index, methods, options=None, file=None,
|
||||
serialized_start=None, serialized_end=None):
|
||||
super(ServiceDescriptor, self).__init__(
|
||||
options, 'ServiceOptions', name, full_name, file,
|
||||
None, serialized_start=serialized_start,
|
||||
serialized_end=serialized_end)
|
||||
self.index = index
|
||||
self.methods = methods
|
||||
# Set the containing service for each method in this service.
|
||||
for method in self.methods:
|
||||
method.containing_service = self
|
||||
|
||||
def FindMethodByName(self, name):
|
||||
"""Searches for the specified method, and returns its descriptor."""
|
||||
for method in self.methods:
|
||||
if name == method.name:
|
||||
return method
|
||||
return None
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to a descriptor_pb2.ServiceDescriptorProto.
|
||||
|
||||
Args:
|
||||
proto: An empty descriptor_pb2.ServiceDescriptorProto.
|
||||
"""
|
||||
# This function is overriden to give a better doc comment.
|
||||
super(ServiceDescriptor, self).CopyToProto(proto)
|
||||
|
||||
|
||||
class MethodDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a method in a service.
|
||||
|
||||
name: (str) Name of the method within the service.
|
||||
full_name: (str) Full name of method.
|
||||
index: (int) 0-indexed index of the method inside the service.
|
||||
containing_service: (ServiceDescriptor) The service that contains this
|
||||
method.
|
||||
input_type: The descriptor of the message that this method accepts.
|
||||
output_type: The descriptor of the message that this method returns.
|
||||
options: (descriptor_pb2.MethodOptions) Method options message or
|
||||
None to use default method options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, index, containing_service,
|
||||
input_type, output_type, options=None):
|
||||
"""The arguments are as described in the description of MethodDescriptor
|
||||
attributes above.
|
||||
|
||||
Note that containing_service may be None, and may be set later if necessary.
|
||||
"""
|
||||
super(MethodDescriptor, self).__init__(options, 'MethodOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.index = index
|
||||
self.containing_service = containing_service
|
||||
self.input_type = input_type
|
||||
self.output_type = output_type
|
||||
|
||||
|
||||
class FileDescriptor(DescriptorBase):
|
||||
"""Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
|
||||
|
||||
Note that enum_types_by_name, extensions_by_name, and dependencies
|
||||
fields are only set by the message_factory module, and not by the
|
||||
generated proto code.
|
||||
|
||||
name: name of file, relative to root of source tree.
|
||||
package: name of the package
|
||||
syntax: string indicating syntax of the file (can be "proto2" or "proto3")
|
||||
serialized_pb: (str) Byte string of serialized
|
||||
descriptor_pb2.FileDescriptorProto.
|
||||
dependencies: List of other FileDescriptors this FileDescriptor depends on.
|
||||
message_types_by_name: Dict of message names of their descriptors.
|
||||
enum_types_by_name: Dict of enum names and their descriptors.
|
||||
extensions_by_name: Dict of extension names and their descriptors.
|
||||
pool: the DescriptorPool this descriptor belongs to. When not passed to the
|
||||
constructor, the global default pool is used.
|
||||
"""
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
_C_DESCRIPTOR_CLASS = _message.FileDescriptor
|
||||
|
||||
def __new__(cls, name, package, options=None, serialized_pb=None,
|
||||
dependencies=None, syntax=None, pool=None):
|
||||
# FileDescriptor() is called from various places, not only from generated
|
||||
# files, to register dynamic proto files and messages.
|
||||
if serialized_pb:
|
||||
# TODO(amauryfa): use the pool passed as argument. This will work only
|
||||
# for C++-implemented DescriptorPools.
|
||||
return _message.default_pool.AddSerializedFile(serialized_pb)
|
||||
else:
|
||||
return super(FileDescriptor, cls).__new__(cls)
|
||||
|
||||
def __init__(self, name, package, options=None, serialized_pb=None,
|
||||
dependencies=None, syntax=None, pool=None):
|
||||
"""Constructor."""
|
||||
super(FileDescriptor, self).__init__(options, 'FileOptions')
|
||||
|
||||
if pool is None:
|
||||
from google.protobuf import descriptor_pool
|
||||
pool = descriptor_pool.Default()
|
||||
self.pool = pool
|
||||
self.message_types_by_name = {}
|
||||
self.name = name
|
||||
self.package = package
|
||||
self.syntax = syntax or "proto2"
|
||||
self.serialized_pb = serialized_pb
|
||||
|
||||
self.enum_types_by_name = {}
|
||||
self.extensions_by_name = {}
|
||||
self.dependencies = (dependencies or [])
|
||||
|
||||
if (api_implementation.Type() == 'cpp' and
|
||||
self.serialized_pb is not None):
|
||||
_message.default_pool.AddSerializedFile(self.serialized_pb)
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to a descriptor_pb2.FileDescriptorProto.
|
||||
|
||||
Args:
|
||||
proto: An empty descriptor_pb2.FileDescriptorProto.
|
||||
"""
|
||||
proto.ParseFromString(self.serialized_pb)
|
||||
|
||||
|
||||
def _ParseOptions(message, string):
|
||||
"""Parses serialized options.
|
||||
|
||||
This helper function is used to parse serialized options in generated
|
||||
proto2 files. It must not be used outside proto2.
|
||||
"""
|
||||
message.ParseFromString(string)
|
||||
return message
|
||||
|
||||
|
||||
def _ToCamelCase(name):
|
||||
"""Converts name to camel-case and returns it."""
|
||||
capitalize_next = False
|
||||
result = []
|
||||
|
||||
for c in name:
|
||||
if c == '_':
|
||||
if result:
|
||||
capitalize_next = True
|
||||
elif capitalize_next:
|
||||
result.append(c.upper())
|
||||
capitalize_next = False
|
||||
else:
|
||||
result += c
|
||||
|
||||
# Lower-case the first letter.
|
||||
if result and result[0].isupper():
|
||||
result[0] = result[0].lower()
|
||||
return ''.join(result)
|
||||
|
||||
|
||||
def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True,
|
||||
syntax=None):
|
||||
"""Make a protobuf Descriptor given a DescriptorProto protobuf.
|
||||
|
||||
Handles nested descriptors. Note that this is limited to the scope of defining
|
||||
a message inside of another message. Composite fields can currently only be
|
||||
resolved if the message is defined in the same scope as the field.
|
||||
|
||||
Args:
|
||||
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
|
||||
package: Optional package name for the new message Descriptor (string).
|
||||
build_file_if_cpp: Update the C++ descriptor pool if api matches.
|
||||
Set to False on recursion, so no duplicates are created.
|
||||
syntax: The syntax/semantics that should be used. Set to "proto3" to get
|
||||
proto3 field presence semantics.
|
||||
Returns:
|
||||
A Descriptor for protobuf messages.
|
||||
"""
|
||||
if api_implementation.Type() == 'cpp' and build_file_if_cpp:
|
||||
# The C++ implementation requires all descriptors to be backed by the same
|
||||
# definition in the C++ descriptor pool. To do this, we build a
|
||||
# FileDescriptorProto with the same definition as this descriptor and build
|
||||
# it into the pool.
|
||||
from google.protobuf import descriptor_pb2
|
||||
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
|
||||
file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
|
||||
|
||||
# Generate a random name for this proto file to prevent conflicts with any
|
||||
# imported ones. We need to specify a file name so the descriptor pool
|
||||
# accepts our FileDescriptorProto, but it is not important what that file
|
||||
# name is actually set to.
|
||||
proto_name = str(uuid.uuid4())
|
||||
|
||||
if package:
|
||||
file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
|
||||
proto_name + '.proto')
|
||||
file_descriptor_proto.package = package
|
||||
else:
|
||||
file_descriptor_proto.name = proto_name + '.proto'
|
||||
|
||||
_message.default_pool.Add(file_descriptor_proto)
|
||||
result = _message.default_pool.FindFileByName(file_descriptor_proto.name)
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
return result.message_types_by_name[desc_proto.name]
|
||||
|
||||
full_message_name = [desc_proto.name]
|
||||
if package: full_message_name.insert(0, package)
|
||||
|
||||
# Create Descriptors for enum types
|
||||
enum_types = {}
|
||||
for enum_proto in desc_proto.enum_type:
|
||||
full_name = '.'.join(full_message_name + [enum_proto.name])
|
||||
enum_desc = EnumDescriptor(
|
||||
enum_proto.name, full_name, None, [
|
||||
EnumValueDescriptor(enum_val.name, ii, enum_val.number)
|
||||
for ii, enum_val in enumerate(enum_proto.value)])
|
||||
enum_types[full_name] = enum_desc
|
||||
|
||||
# Create Descriptors for nested types
|
||||
nested_types = {}
|
||||
for nested_proto in desc_proto.nested_type:
|
||||
full_name = '.'.join(full_message_name + [nested_proto.name])
|
||||
# Nested types are just those defined inside of the message, not all types
|
||||
# used by fields in the message, so no loops are possible here.
|
||||
nested_desc = MakeDescriptor(nested_proto,
|
||||
package='.'.join(full_message_name),
|
||||
build_file_if_cpp=False,
|
||||
syntax=syntax)
|
||||
nested_types[full_name] = nested_desc
|
||||
|
||||
fields = []
|
||||
for field_proto in desc_proto.field:
|
||||
full_name = '.'.join(full_message_name + [field_proto.name])
|
||||
enum_desc = None
|
||||
nested_desc = None
|
||||
if field_proto.HasField('type_name'):
|
||||
type_name = field_proto.type_name
|
||||
full_type_name = '.'.join(full_message_name +
|
||||
[type_name[type_name.rfind('.')+1:]])
|
||||
if full_type_name in nested_types:
|
||||
nested_desc = nested_types[full_type_name]
|
||||
elif full_type_name in enum_types:
|
||||
enum_desc = enum_types[full_type_name]
|
||||
# Else type_name references a non-local type, which isn't implemented
|
||||
field = FieldDescriptor(
|
||||
field_proto.name, full_name, field_proto.number - 1,
|
||||
field_proto.number, field_proto.type,
|
||||
FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
|
||||
field_proto.label, None, nested_desc, enum_desc, None, False, None,
|
||||
options=field_proto.options, has_default_value=False)
|
||||
fields.append(field)
|
||||
|
||||
desc_name = '.'.join(full_message_name)
|
||||
return Descriptor(desc_proto.name, desc_name, None, None, fields,
|
||||
list(nested_types.values()), list(enum_types.values()), [],
|
||||
options=desc_proto.options)
|
||||
141
deps/google/protobuf/descriptor_database.py
vendored
Normal file
141
deps/google/protobuf/descriptor_database.py
vendored
Normal file
@@ -0,0 +1,141 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides a container for DescriptorProtos."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DescriptorDatabaseConflictingDefinitionError(Error):
|
||||
"""Raised when a proto is added with the same name & different descriptor."""
|
||||
|
||||
|
||||
class DescriptorDatabase(object):
|
||||
"""A container accepting FileDescriptorProtos and maps DescriptorProtos."""
|
||||
|
||||
def __init__(self):
|
||||
self._file_desc_protos_by_file = {}
|
||||
self._file_desc_protos_by_symbol = {}
|
||||
|
||||
def Add(self, file_desc_proto):
|
||||
"""Adds the FileDescriptorProto and its types to this database.
|
||||
|
||||
Args:
|
||||
file_desc_proto: The FileDescriptorProto to add.
|
||||
Raises:
|
||||
DescriptorDatabaseException: if an attempt is made to add a proto
|
||||
with the same name but different definition than an exisiting
|
||||
proto in the database.
|
||||
"""
|
||||
proto_name = file_desc_proto.name
|
||||
if proto_name not in self._file_desc_protos_by_file:
|
||||
self._file_desc_protos_by_file[proto_name] = file_desc_proto
|
||||
elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
|
||||
raise DescriptorDatabaseConflictingDefinitionError(
|
||||
'%s already added, but with different descriptor.' % proto_name)
|
||||
|
||||
# Add the top-level Message, Enum and Extension descriptors to the index.
|
||||
package = file_desc_proto.package
|
||||
for message in file_desc_proto.message_type:
|
||||
self._file_desc_protos_by_symbol.update(
|
||||
(name, file_desc_proto) for name in _ExtractSymbols(message, package))
|
||||
for enum in file_desc_proto.enum_type:
|
||||
self._file_desc_protos_by_symbol[
|
||||
'.'.join((package, enum.name))] = file_desc_proto
|
||||
for extension in file_desc_proto.extension:
|
||||
self._file_desc_protos_by_symbol[
|
||||
'.'.join((package, extension.name))] = file_desc_proto
|
||||
|
||||
def FindFileByName(self, name):
|
||||
"""Finds the file descriptor proto by file name.
|
||||
|
||||
Typically the file name is a relative path ending to a .proto file. The
|
||||
proto with the given name will have to have been added to this database
|
||||
using the Add method or else an error will be raised.
|
||||
|
||||
Args:
|
||||
name: The file name to find.
|
||||
|
||||
Returns:
|
||||
The file descriptor proto matching the name.
|
||||
|
||||
Raises:
|
||||
KeyError if no file by the given name was added.
|
||||
"""
|
||||
|
||||
return self._file_desc_protos_by_file[name]
|
||||
|
||||
def FindFileContainingSymbol(self, symbol):
|
||||
"""Finds the file descriptor proto containing the specified symbol.
|
||||
|
||||
The symbol should be a fully qualified name including the file descriptor's
|
||||
package and any containing messages. Some examples:
|
||||
|
||||
'some.package.name.Message'
|
||||
'some.package.name.Message.NestedEnum'
|
||||
|
||||
The file descriptor proto containing the specified symbol must be added to
|
||||
this database using the Add method or else an error will be raised.
|
||||
|
||||
Args:
|
||||
symbol: The fully qualified symbol name.
|
||||
|
||||
Returns:
|
||||
The file descriptor proto containing the symbol.
|
||||
|
||||
Raises:
|
||||
KeyError if no file contains the specified symbol.
|
||||
"""
|
||||
|
||||
return self._file_desc_protos_by_symbol[symbol]
|
||||
|
||||
|
||||
def _ExtractSymbols(desc_proto, package):
|
||||
"""Pulls out all the symbols from a descriptor proto.
|
||||
|
||||
Args:
|
||||
desc_proto: The proto to extract symbols from.
|
||||
package: The package containing the descriptor type.
|
||||
|
||||
Yields:
|
||||
The fully qualified name found in the descriptor.
|
||||
"""
|
||||
|
||||
message_name = '.'.join((package, desc_proto.name))
|
||||
yield message_name
|
||||
for nested_type in desc_proto.nested_type:
|
||||
for symbol in _ExtractSymbols(nested_type, message_name):
|
||||
yield symbol
|
||||
for enum_type in desc_proto.enum_type:
|
||||
yield '.'.join((message_name, enum_type.name))
|
||||
1704
deps/google/protobuf/descriptor_pb2.py
vendored
Normal file
1704
deps/google/protobuf/descriptor_pb2.py
vendored
Normal file
File diff suppressed because one or more lines are too long
749
deps/google/protobuf/descriptor_pool.py
vendored
Normal file
749
deps/google/protobuf/descriptor_pool.py
vendored
Normal file
@@ -0,0 +1,749 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides DescriptorPool to use as a container for proto2 descriptors.
|
||||
|
||||
The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
|
||||
a collection of protocol buffer descriptors for use when dynamically creating
|
||||
message types at runtime.
|
||||
|
||||
For most applications protocol buffers should be used via modules generated by
|
||||
the protocol buffer compiler tool. This should only be used when the type of
|
||||
protocol buffers used in an application or library cannot be predetermined.
|
||||
|
||||
Below is a straightforward example on how to use this class:
|
||||
|
||||
pool = DescriptorPool()
|
||||
file_descriptor_protos = [ ... ]
|
||||
for file_descriptor_proto in file_descriptor_protos:
|
||||
pool.Add(file_descriptor_proto)
|
||||
my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
|
||||
|
||||
The message descriptor can be used in conjunction with the message_factory
|
||||
module in order to create a protocol buffer class that can be encoded and
|
||||
decoded.
|
||||
|
||||
If you want to get a Python class for the specified proto, use the
|
||||
helper functions inside google.protobuf.message_factory
|
||||
directly instead of this class.
|
||||
"""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import descriptor_database
|
||||
from google.protobuf import text_encoding
|
||||
|
||||
|
||||
_USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS
|
||||
|
||||
|
||||
def _NormalizeFullyQualifiedName(name):
|
||||
"""Remove leading period from fully-qualified type name.
|
||||
|
||||
Due to b/13860351 in descriptor_database.py, types in the root namespace are
|
||||
generated with a leading period. This function removes that prefix.
|
||||
|
||||
Args:
|
||||
name: A str, the fully-qualified symbol name.
|
||||
|
||||
Returns:
|
||||
A str, the normalized fully-qualified symbol name.
|
||||
"""
|
||||
return name.lstrip('.')
|
||||
|
||||
|
||||
class DescriptorPool(object):
|
||||
"""A collection of protobufs dynamically constructed by descriptor protos."""
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
|
||||
def __new__(cls, descriptor_db=None):
|
||||
# pylint: disable=protected-access
|
||||
return descriptor._message.DescriptorPool(descriptor_db)
|
||||
|
||||
def __init__(self, descriptor_db=None):
|
||||
"""Initializes a Pool of proto buffs.
|
||||
|
||||
The descriptor_db argument to the constructor is provided to allow
|
||||
specialized file descriptor proto lookup code to be triggered on demand. An
|
||||
example would be an implementation which will read and compile a file
|
||||
specified in a call to FindFileByName() and not require the call to Add()
|
||||
at all. Results from this database will be cached internally here as well.
|
||||
|
||||
Args:
|
||||
descriptor_db: A secondary source of file descriptors.
|
||||
"""
|
||||
|
||||
self._internal_db = descriptor_database.DescriptorDatabase()
|
||||
self._descriptor_db = descriptor_db
|
||||
self._descriptors = {}
|
||||
self._enum_descriptors = {}
|
||||
self._file_descriptors = {}
|
||||
|
||||
def Add(self, file_desc_proto):
|
||||
"""Adds the FileDescriptorProto and its types to this pool.
|
||||
|
||||
Args:
|
||||
file_desc_proto: The FileDescriptorProto to add.
|
||||
"""
|
||||
|
||||
self._internal_db.Add(file_desc_proto)
|
||||
|
||||
def AddSerializedFile(self, serialized_file_desc_proto):
|
||||
"""Adds the FileDescriptorProto and its types to this pool.
|
||||
|
||||
Args:
|
||||
serialized_file_desc_proto: A bytes string, serialization of the
|
||||
FileDescriptorProto to add.
|
||||
"""
|
||||
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf import descriptor_pb2
|
||||
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
serialized_file_desc_proto)
|
||||
self.Add(file_desc_proto)
|
||||
|
||||
def AddDescriptor(self, desc):
|
||||
"""Adds a Descriptor to the pool, non-recursively.
|
||||
|
||||
If the Descriptor contains nested messages or enums, the caller must
|
||||
explicitly register them. This method also registers the FileDescriptor
|
||||
associated with the message.
|
||||
|
||||
Args:
|
||||
desc: A Descriptor.
|
||||
"""
|
||||
if not isinstance(desc, descriptor.Descriptor):
|
||||
raise TypeError('Expected instance of descriptor.Descriptor.')
|
||||
|
||||
self._descriptors[desc.full_name] = desc
|
||||
self.AddFileDescriptor(desc.file)
|
||||
|
||||
def AddEnumDescriptor(self, enum_desc):
|
||||
"""Adds an EnumDescriptor to the pool.
|
||||
|
||||
This method also registers the FileDescriptor associated with the message.
|
||||
|
||||
Args:
|
||||
enum_desc: An EnumDescriptor.
|
||||
"""
|
||||
|
||||
if not isinstance(enum_desc, descriptor.EnumDescriptor):
|
||||
raise TypeError('Expected instance of descriptor.EnumDescriptor.')
|
||||
|
||||
self._enum_descriptors[enum_desc.full_name] = enum_desc
|
||||
self.AddFileDescriptor(enum_desc.file)
|
||||
|
||||
def AddFileDescriptor(self, file_desc):
|
||||
"""Adds a FileDescriptor to the pool, non-recursively.
|
||||
|
||||
If the FileDescriptor contains messages or enums, the caller must explicitly
|
||||
register them.
|
||||
|
||||
Args:
|
||||
file_desc: A FileDescriptor.
|
||||
"""
|
||||
|
||||
if not isinstance(file_desc, descriptor.FileDescriptor):
|
||||
raise TypeError('Expected instance of descriptor.FileDescriptor.')
|
||||
self._file_descriptors[file_desc.name] = file_desc
|
||||
|
||||
def FindFileByName(self, file_name):
|
||||
"""Gets a FileDescriptor by file name.
|
||||
|
||||
Args:
|
||||
file_name: The path to the file to get a descriptor for.
|
||||
|
||||
Returns:
|
||||
A FileDescriptor for the named file.
|
||||
|
||||
Raises:
|
||||
KeyError: if the file can not be found in the pool.
|
||||
"""
|
||||
|
||||
try:
|
||||
return self._file_descriptors[file_name]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
file_proto = self._internal_db.FindFileByName(file_name)
|
||||
except KeyError as error:
|
||||
if self._descriptor_db:
|
||||
file_proto = self._descriptor_db.FindFileByName(file_name)
|
||||
else:
|
||||
raise error
|
||||
if not file_proto:
|
||||
raise KeyError('Cannot find a file named %s' % file_name)
|
||||
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
||||
|
||||
def FindFileContainingSymbol(self, symbol):
|
||||
"""Gets the FileDescriptor for the file containing the specified symbol.
|
||||
|
||||
Args:
|
||||
symbol: The name of the symbol to search for.
|
||||
|
||||
Returns:
|
||||
A FileDescriptor that contains the specified symbol.
|
||||
|
||||
Raises:
|
||||
KeyError: if the file can not be found in the pool.
|
||||
"""
|
||||
|
||||
symbol = _NormalizeFullyQualifiedName(symbol)
|
||||
try:
|
||||
return self._descriptors[symbol].file
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
return self._enum_descriptors[symbol].file
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
|
||||
except KeyError as error:
|
||||
if self._descriptor_db:
|
||||
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
|
||||
else:
|
||||
raise error
|
||||
if not file_proto:
|
||||
raise KeyError('Cannot find a file containing %s' % symbol)
|
||||
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
||||
|
||||
def FindMessageTypeByName(self, full_name):
|
||||
"""Loads the named descriptor from the pool.
|
||||
|
||||
Args:
|
||||
full_name: The full name of the descriptor to load.
|
||||
|
||||
Returns:
|
||||
The descriptor for the named type.
|
||||
"""
|
||||
|
||||
full_name = _NormalizeFullyQualifiedName(full_name)
|
||||
if full_name not in self._descriptors:
|
||||
self.FindFileContainingSymbol(full_name)
|
||||
return self._descriptors[full_name]
|
||||
|
||||
def FindEnumTypeByName(self, full_name):
|
||||
"""Loads the named enum descriptor from the pool.
|
||||
|
||||
Args:
|
||||
full_name: The full name of the enum descriptor to load.
|
||||
|
||||
Returns:
|
||||
The enum descriptor for the named type.
|
||||
"""
|
||||
|
||||
full_name = _NormalizeFullyQualifiedName(full_name)
|
||||
if full_name not in self._enum_descriptors:
|
||||
self.FindFileContainingSymbol(full_name)
|
||||
return self._enum_descriptors[full_name]
|
||||
|
||||
def FindFieldByName(self, full_name):
|
||||
"""Loads the named field descriptor from the pool.
|
||||
|
||||
Args:
|
||||
full_name: The full name of the field descriptor to load.
|
||||
|
||||
Returns:
|
||||
The field descriptor for the named field.
|
||||
"""
|
||||
full_name = _NormalizeFullyQualifiedName(full_name)
|
||||
message_name, _, field_name = full_name.rpartition('.')
|
||||
message_descriptor = self.FindMessageTypeByName(message_name)
|
||||
return message_descriptor.fields_by_name[field_name]
|
||||
|
||||
def FindExtensionByName(self, full_name):
|
||||
"""Loads the named extension descriptor from the pool.
|
||||
|
||||
Args:
|
||||
full_name: The full name of the extension descriptor to load.
|
||||
|
||||
Returns:
|
||||
A FieldDescriptor, describing the named extension.
|
||||
"""
|
||||
full_name = _NormalizeFullyQualifiedName(full_name)
|
||||
message_name, _, extension_name = full_name.rpartition('.')
|
||||
try:
|
||||
# Most extensions are nested inside a message.
|
||||
scope = self.FindMessageTypeByName(message_name)
|
||||
except KeyError:
|
||||
# Some extensions are defined at file scope.
|
||||
scope = self.FindFileContainingSymbol(full_name)
|
||||
return scope.extensions_by_name[extension_name]
|
||||
|
||||
def _ConvertFileProtoToFileDescriptor(self, file_proto):
|
||||
"""Creates a FileDescriptor from a proto or returns a cached copy.
|
||||
|
||||
This method also has the side effect of loading all the symbols found in
|
||||
the file into the appropriate dictionaries in the pool.
|
||||
|
||||
Args:
|
||||
file_proto: The proto to convert.
|
||||
|
||||
Returns:
|
||||
A FileDescriptor matching the passed in proto.
|
||||
"""
|
||||
|
||||
if file_proto.name not in self._file_descriptors:
|
||||
built_deps = list(self._GetDeps(file_proto.dependency))
|
||||
direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
|
||||
|
||||
file_descriptor = descriptor.FileDescriptor(
|
||||
pool=self,
|
||||
name=file_proto.name,
|
||||
package=file_proto.package,
|
||||
syntax=file_proto.syntax,
|
||||
options=file_proto.options,
|
||||
serialized_pb=file_proto.SerializeToString(),
|
||||
dependencies=direct_deps)
|
||||
if _USE_C_DESCRIPTORS:
|
||||
# When using C++ descriptors, all objects defined in the file were added
|
||||
# to the C++ database when the FileDescriptor was built above.
|
||||
# Just add them to this descriptor pool.
|
||||
def _AddMessageDescriptor(message_desc):
|
||||
self._descriptors[message_desc.full_name] = message_desc
|
||||
for nested in message_desc.nested_types:
|
||||
_AddMessageDescriptor(nested)
|
||||
for enum_type in message_desc.enum_types:
|
||||
_AddEnumDescriptor(enum_type)
|
||||
def _AddEnumDescriptor(enum_desc):
|
||||
self._enum_descriptors[enum_desc.full_name] = enum_desc
|
||||
for message_type in file_descriptor.message_types_by_name.values():
|
||||
_AddMessageDescriptor(message_type)
|
||||
for enum_type in file_descriptor.enum_types_by_name.values():
|
||||
_AddEnumDescriptor(enum_type)
|
||||
else:
|
||||
scope = {}
|
||||
|
||||
# This loop extracts all the message and enum types from all the
|
||||
# dependencies of the file_proto. This is necessary to create the
|
||||
# scope of available message types when defining the passed in
|
||||
# file proto.
|
||||
for dependency in built_deps:
|
||||
scope.update(self._ExtractSymbols(
|
||||
dependency.message_types_by_name.values()))
|
||||
scope.update((_PrefixWithDot(enum.full_name), enum)
|
||||
for enum in dependency.enum_types_by_name.values())
|
||||
|
||||
for message_type in file_proto.message_type:
|
||||
message_desc = self._ConvertMessageDescriptor(
|
||||
message_type, file_proto.package, file_descriptor, scope,
|
||||
file_proto.syntax)
|
||||
file_descriptor.message_types_by_name[message_desc.name] = (
|
||||
message_desc)
|
||||
|
||||
for enum_type in file_proto.enum_type:
|
||||
file_descriptor.enum_types_by_name[enum_type.name] = (
|
||||
self._ConvertEnumDescriptor(enum_type, file_proto.package,
|
||||
file_descriptor, None, scope))
|
||||
|
||||
for index, extension_proto in enumerate(file_proto.extension):
|
||||
extension_desc = self._MakeFieldDescriptor(
|
||||
extension_proto, file_proto.package, index, is_extension=True)
|
||||
extension_desc.containing_type = self._GetTypeFromScope(
|
||||
file_descriptor.package, extension_proto.extendee, scope)
|
||||
self._SetFieldType(extension_proto, extension_desc,
|
||||
file_descriptor.package, scope)
|
||||
file_descriptor.extensions_by_name[extension_desc.name] = (
|
||||
extension_desc)
|
||||
|
||||
for desc_proto in file_proto.message_type:
|
||||
self._SetAllFieldTypes(file_proto.package, desc_proto, scope)
|
||||
|
||||
if file_proto.package:
|
||||
desc_proto_prefix = _PrefixWithDot(file_proto.package)
|
||||
else:
|
||||
desc_proto_prefix = ''
|
||||
|
||||
for desc_proto in file_proto.message_type:
|
||||
desc = self._GetTypeFromScope(
|
||||
desc_proto_prefix, desc_proto.name, scope)
|
||||
file_descriptor.message_types_by_name[desc_proto.name] = desc
|
||||
|
||||
self.Add(file_proto)
|
||||
self._file_descriptors[file_proto.name] = file_descriptor
|
||||
|
||||
return self._file_descriptors[file_proto.name]
|
||||
|
||||
def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
|
||||
scope=None, syntax=None):
|
||||
"""Adds the proto to the pool in the specified package.
|
||||
|
||||
Args:
|
||||
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
|
||||
package: The package the proto should be located in.
|
||||
file_desc: The file containing this message.
|
||||
scope: Dict mapping short and full symbols to message and enum types.
|
||||
|
||||
Returns:
|
||||
The added descriptor.
|
||||
"""
|
||||
|
||||
if package:
|
||||
desc_name = '.'.join((package, desc_proto.name))
|
||||
else:
|
||||
desc_name = desc_proto.name
|
||||
|
||||
if file_desc is None:
|
||||
file_name = None
|
||||
else:
|
||||
file_name = file_desc.name
|
||||
|
||||
if scope is None:
|
||||
scope = {}
|
||||
|
||||
nested = [
|
||||
self._ConvertMessageDescriptor(
|
||||
nested, desc_name, file_desc, scope, syntax)
|
||||
for nested in desc_proto.nested_type]
|
||||
enums = [
|
||||
self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope)
|
||||
for enum in desc_proto.enum_type]
|
||||
fields = [self._MakeFieldDescriptor(field, desc_name, index)
|
||||
for index, field in enumerate(desc_proto.field)]
|
||||
extensions = [
|
||||
self._MakeFieldDescriptor(extension, desc_name, index,
|
||||
is_extension=True)
|
||||
for index, extension in enumerate(desc_proto.extension)]
|
||||
oneofs = [
|
||||
descriptor.OneofDescriptor(desc.name, '.'.join((desc_name, desc.name)),
|
||||
index, None, [])
|
||||
for index, desc in enumerate(desc_proto.oneof_decl)]
|
||||
extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
|
||||
if extension_ranges:
|
||||
is_extendable = True
|
||||
else:
|
||||
is_extendable = False
|
||||
desc = descriptor.Descriptor(
|
||||
name=desc_proto.name,
|
||||
full_name=desc_name,
|
||||
filename=file_name,
|
||||
containing_type=None,
|
||||
fields=fields,
|
||||
oneofs=oneofs,
|
||||
nested_types=nested,
|
||||
enum_types=enums,
|
||||
extensions=extensions,
|
||||
options=desc_proto.options,
|
||||
is_extendable=is_extendable,
|
||||
extension_ranges=extension_ranges,
|
||||
file=file_desc,
|
||||
serialized_start=None,
|
||||
serialized_end=None,
|
||||
syntax=syntax)
|
||||
for nested in desc.nested_types:
|
||||
nested.containing_type = desc
|
||||
for enum in desc.enum_types:
|
||||
enum.containing_type = desc
|
||||
for field_index, field_desc in enumerate(desc_proto.field):
|
||||
if field_desc.HasField('oneof_index'):
|
||||
oneof_index = field_desc.oneof_index
|
||||
oneofs[oneof_index].fields.append(fields[field_index])
|
||||
fields[field_index].containing_oneof = oneofs[oneof_index]
|
||||
|
||||
scope[_PrefixWithDot(desc_name)] = desc
|
||||
self._descriptors[desc_name] = desc
|
||||
return desc
|
||||
|
||||
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
|
||||
containing_type=None, scope=None):
|
||||
"""Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
|
||||
|
||||
Args:
|
||||
enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
|
||||
package: Optional package name for the new message EnumDescriptor.
|
||||
file_desc: The file containing the enum descriptor.
|
||||
containing_type: The type containing this enum.
|
||||
scope: Scope containing available types.
|
||||
|
||||
Returns:
|
||||
The added descriptor
|
||||
"""
|
||||
|
||||
if package:
|
||||
enum_name = '.'.join((package, enum_proto.name))
|
||||
else:
|
||||
enum_name = enum_proto.name
|
||||
|
||||
if file_desc is None:
|
||||
file_name = None
|
||||
else:
|
||||
file_name = file_desc.name
|
||||
|
||||
values = [self._MakeEnumValueDescriptor(value, index)
|
||||
for index, value in enumerate(enum_proto.value)]
|
||||
desc = descriptor.EnumDescriptor(name=enum_proto.name,
|
||||
full_name=enum_name,
|
||||
filename=file_name,
|
||||
file=file_desc,
|
||||
values=values,
|
||||
containing_type=containing_type,
|
||||
options=enum_proto.options)
|
||||
scope['.%s' % enum_name] = desc
|
||||
self._enum_descriptors[enum_name] = desc
|
||||
return desc
|
||||
|
||||
def _MakeFieldDescriptor(self, field_proto, message_name, index,
|
||||
is_extension=False):
|
||||
"""Creates a field descriptor from a FieldDescriptorProto.
|
||||
|
||||
For message and enum type fields, this method will do a look up
|
||||
in the pool for the appropriate descriptor for that type. If it
|
||||
is unavailable, it will fall back to the _source function to
|
||||
create it. If this type is still unavailable, construction will
|
||||
fail.
|
||||
|
||||
Args:
|
||||
field_proto: The proto describing the field.
|
||||
message_name: The name of the containing message.
|
||||
index: Index of the field
|
||||
is_extension: Indication that this field is for an extension.
|
||||
|
||||
Returns:
|
||||
An initialized FieldDescriptor object
|
||||
"""
|
||||
|
||||
if message_name:
|
||||
full_name = '.'.join((message_name, field_proto.name))
|
||||
else:
|
||||
full_name = field_proto.name
|
||||
|
||||
return descriptor.FieldDescriptor(
|
||||
name=field_proto.name,
|
||||
full_name=full_name,
|
||||
index=index,
|
||||
number=field_proto.number,
|
||||
type=field_proto.type,
|
||||
cpp_type=None,
|
||||
message_type=None,
|
||||
enum_type=None,
|
||||
containing_type=None,
|
||||
label=field_proto.label,
|
||||
has_default_value=False,
|
||||
default_value=None,
|
||||
is_extension=is_extension,
|
||||
extension_scope=None,
|
||||
options=field_proto.options)
|
||||
|
||||
def _SetAllFieldTypes(self, package, desc_proto, scope):
|
||||
"""Sets all the descriptor's fields's types.
|
||||
|
||||
This method also sets the containing types on any extensions.
|
||||
|
||||
Args:
|
||||
package: The current package of desc_proto.
|
||||
desc_proto: The message descriptor to update.
|
||||
scope: Enclosing scope of available types.
|
||||
"""
|
||||
|
||||
package = _PrefixWithDot(package)
|
||||
|
||||
main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
|
||||
|
||||
if package == '.':
|
||||
nested_package = _PrefixWithDot(desc_proto.name)
|
||||
else:
|
||||
nested_package = '.'.join([package, desc_proto.name])
|
||||
|
||||
for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
|
||||
self._SetFieldType(field_proto, field_desc, nested_package, scope)
|
||||
|
||||
for extension_proto, extension_desc in (
|
||||
zip(desc_proto.extension, main_desc.extensions)):
|
||||
extension_desc.containing_type = self._GetTypeFromScope(
|
||||
nested_package, extension_proto.extendee, scope)
|
||||
self._SetFieldType(extension_proto, extension_desc, nested_package, scope)
|
||||
|
||||
for nested_type in desc_proto.nested_type:
|
||||
self._SetAllFieldTypes(nested_package, nested_type, scope)
|
||||
|
||||
def _SetFieldType(self, field_proto, field_desc, package, scope):
|
||||
"""Sets the field's type, cpp_type, message_type and enum_type.
|
||||
|
||||
Args:
|
||||
field_proto: Data about the field in proto format.
|
||||
field_desc: The descriptor to modiy.
|
||||
package: The package the field's container is in.
|
||||
scope: Enclosing scope of available types.
|
||||
"""
|
||||
if field_proto.type_name:
|
||||
desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
|
||||
else:
|
||||
desc = None
|
||||
|
||||
if not field_proto.HasField('type'):
|
||||
if isinstance(desc, descriptor.Descriptor):
|
||||
field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
|
||||
else:
|
||||
field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
|
||||
|
||||
field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
|
||||
field_proto.type)
|
||||
|
||||
if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
|
||||
or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
|
||||
field_desc.message_type = desc
|
||||
|
||||
if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
||||
field_desc.enum_type = desc
|
||||
|
||||
if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
field_desc.has_default_value = False
|
||||
field_desc.default_value = []
|
||||
elif field_proto.HasField('default_value'):
|
||||
field_desc.has_default_value = True
|
||||
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
|
||||
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
|
||||
field_desc.default_value = float(field_proto.default_value)
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
|
||||
field_desc.default_value = field_proto.default_value
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
|
||||
field_desc.default_value = field_proto.default_value.lower() == 'true'
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
||||
field_desc.default_value = field_desc.enum_type.values_by_name[
|
||||
field_proto.default_value].number
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
||||
field_desc.default_value = text_encoding.CUnescape(
|
||||
field_proto.default_value)
|
||||
else:
|
||||
# All other types are of the "int" type.
|
||||
field_desc.default_value = int(field_proto.default_value)
|
||||
else:
|
||||
field_desc.has_default_value = False
|
||||
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
|
||||
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
|
||||
field_desc.default_value = 0.0
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
|
||||
field_desc.default_value = u''
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
|
||||
field_desc.default_value = False
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
||||
field_desc.default_value = field_desc.enum_type.values[0].number
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
||||
field_desc.default_value = b''
|
||||
else:
|
||||
# All other types are of the "int" type.
|
||||
field_desc.default_value = 0
|
||||
|
||||
field_desc.type = field_proto.type
|
||||
|
||||
def _MakeEnumValueDescriptor(self, value_proto, index):
|
||||
"""Creates a enum value descriptor object from a enum value proto.
|
||||
|
||||
Args:
|
||||
value_proto: The proto describing the enum value.
|
||||
index: The index of the enum value.
|
||||
|
||||
Returns:
|
||||
An initialized EnumValueDescriptor object.
|
||||
"""
|
||||
|
||||
return descriptor.EnumValueDescriptor(
|
||||
name=value_proto.name,
|
||||
index=index,
|
||||
number=value_proto.number,
|
||||
options=value_proto.options,
|
||||
type=None)
|
||||
|
||||
def _ExtractSymbols(self, descriptors):
|
||||
"""Pulls out all the symbols from descriptor protos.
|
||||
|
||||
Args:
|
||||
descriptors: The messages to extract descriptors from.
|
||||
Yields:
|
||||
A two element tuple of the type name and descriptor object.
|
||||
"""
|
||||
|
||||
for desc in descriptors:
|
||||
yield (_PrefixWithDot(desc.full_name), desc)
|
||||
for symbol in self._ExtractSymbols(desc.nested_types):
|
||||
yield symbol
|
||||
for enum in desc.enum_types:
|
||||
yield (_PrefixWithDot(enum.full_name), enum)
|
||||
|
||||
def _GetDeps(self, dependencies):
|
||||
"""Recursively finds dependencies for file protos.
|
||||
|
||||
Args:
|
||||
dependencies: The names of the files being depended on.
|
||||
|
||||
Yields:
|
||||
Each direct and indirect dependency.
|
||||
"""
|
||||
|
||||
for dependency in dependencies:
|
||||
dep_desc = self.FindFileByName(dependency)
|
||||
yield dep_desc
|
||||
for parent_dep in dep_desc.dependencies:
|
||||
yield parent_dep
|
||||
|
||||
def _GetTypeFromScope(self, package, type_name, scope):
|
||||
"""Finds a given type name in the current scope.
|
||||
|
||||
Args:
|
||||
package: The package the proto should be located in.
|
||||
type_name: The name of the type to be found in the scope.
|
||||
scope: Dict mapping short and full symbols to message and enum types.
|
||||
|
||||
Returns:
|
||||
The descriptor for the requested type.
|
||||
"""
|
||||
if type_name not in scope:
|
||||
components = _PrefixWithDot(package).split('.')
|
||||
while components:
|
||||
possible_match = '.'.join(components + [type_name])
|
||||
if possible_match in scope:
|
||||
type_name = possible_match
|
||||
break
|
||||
else:
|
||||
components.pop(-1)
|
||||
return scope[type_name]
|
||||
|
||||
|
||||
def _PrefixWithDot(name):
|
||||
return name if name.startswith('.') else '.%s' % name
|
||||
|
||||
|
||||
if _USE_C_DESCRIPTORS:
|
||||
# TODO(amauryfa): This pool could be constructed from Python code, when we
|
||||
# support a flag like 'use_cpp_generated_pool=True'.
|
||||
# pylint: disable=protected-access
|
||||
_DEFAULT = descriptor._message.default_pool
|
||||
else:
|
||||
_DEFAULT = DescriptorPool()
|
||||
|
||||
|
||||
def Default():
|
||||
return _DEFAULT
|
||||
78
deps/google/protobuf/duration_pb2.py
vendored
Normal file
78
deps/google/protobuf/duration_pb2.py
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/duration.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/protobuf/duration.proto',
|
||||
package='google.protobuf',
|
||||
syntax='proto3',
|
||||
serialized_pb=_b('\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42P\n\x13\x63om.google.protobufB\rDurationProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
)
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_DURATION = _descriptor.Descriptor(
|
||||
name='Duration',
|
||||
full_name='google.protobuf.Duration',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='seconds', full_name='google.protobuf.Duration.seconds', index=0,
|
||||
number=1, type=3, cpp_type=2, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='nanos', full_name='google.protobuf.Duration.nanos', index=1,
|
||||
number=2, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=51,
|
||||
serialized_end=93,
|
||||
)
|
||||
|
||||
DESCRIPTOR.message_types_by_name['Duration'] = _DURATION
|
||||
|
||||
Duration = _reflection.GeneratedProtocolMessageType('Duration', (_message.Message,), dict(
|
||||
DESCRIPTOR = _DURATION,
|
||||
__module__ = 'google.protobuf.duration_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.Duration)
|
||||
))
|
||||
_sym_db.RegisterMessage(Duration)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\rDurationProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
64
deps/google/protobuf/empty_pb2.py
vendored
Normal file
64
deps/google/protobuf/empty_pb2.py
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/empty.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/protobuf/empty.proto',
|
||||
package='google.protobuf',
|
||||
syntax='proto3',
|
||||
serialized_pb=_b('\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyBP\n\x13\x63om.google.protobufB\nEmptyProtoP\x01\xa0\x01\x01\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
)
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_EMPTY = _descriptor.Descriptor(
|
||||
name='Empty',
|
||||
full_name='google.protobuf.Empty',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=48,
|
||||
serialized_end=55,
|
||||
)
|
||||
|
||||
DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
|
||||
|
||||
Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), dict(
|
||||
DESCRIPTOR = _EMPTY,
|
||||
__module__ = 'google.protobuf.empty_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.Empty)
|
||||
))
|
||||
_sym_db.RegisterMessage(Empty)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\nEmptyProtoP\001\240\001\001\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
71
deps/google/protobuf/field_mask_pb2.py
vendored
Normal file
71
deps/google/protobuf/field_mask_pb2.py
vendored
Normal file
@@ -0,0 +1,71 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/field_mask.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/protobuf/field_mask.proto',
|
||||
package='google.protobuf',
|
||||
syntax='proto3',
|
||||
serialized_pb=_b('\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tBQ\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01\xa0\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3')
|
||||
)
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_FIELDMASK = _descriptor.Descriptor(
|
||||
name='FieldMask',
|
||||
full_name='google.protobuf.FieldMask',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='paths', full_name='google.protobuf.FieldMask.paths', index=0,
|
||||
number=1, type=9, cpp_type=9, label=3,
|
||||
has_default_value=False, default_value=[],
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=53,
|
||||
serialized_end=79,
|
||||
)
|
||||
|
||||
DESCRIPTOR.message_types_by_name['FieldMask'] = _FIELDMASK
|
||||
|
||||
FieldMask = _reflection.GeneratedProtocolMessageType('FieldMask', (_message.Message,), dict(
|
||||
DESCRIPTOR = _FIELDMASK,
|
||||
__module__ = 'google.protobuf.field_mask_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.FieldMask)
|
||||
))
|
||||
_sym_db.RegisterMessage(FieldMask)
|
||||
|
||||
|
||||
DESCRIPTOR.has_options = True
|
||||
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\023com.google.protobufB\016FieldMaskProtoP\001\240\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes'))
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
0
deps/google/protobuf/internal/__init__.py
vendored
Normal file
0
deps/google/protobuf/internal/__init__.py
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/__init__.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/__init__.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/_parameterized.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/_parameterized.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/any_test_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/any_test_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/api_implementation.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/api_implementation.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/containers.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/containers.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/decoder.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/decoder.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/descriptor_database_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/descriptor_database_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/descriptor_pool_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/descriptor_pool_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/descriptor_pool_test1_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/descriptor_pool_test1_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/descriptor_pool_test2_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/descriptor_pool_test2_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/descriptor_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/descriptor_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/encoder.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/encoder.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/enum_type_wrapper.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/factory_test1_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/factory_test1_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/factory_test2_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/factory_test2_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/generator_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/generator_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/json_format_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/json_format_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/message_factory_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/message_factory_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/message_listener.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/message_listener.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/message_set_extensions_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/message_set_extensions_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/message_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/message_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/missing_enum_values_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/missing_enum_values_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/more_extensions_dynamic_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/more_extensions_dynamic_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/more_extensions_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/more_extensions_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/more_messages_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/more_messages_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/packed_field_test_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/packed_field_test_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/proto_builder_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/proto_builder_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/python_message.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/python_message.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/reflection_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/reflection_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/service_reflection_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/service_reflection_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/symbol_database_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/symbol_database_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/test_bad_identifiers_pb2.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/test_bad_identifiers_pb2.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/test_util.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/test_util.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/text_encoding_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/text_encoding_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/text_format_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/text_format_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/type_checkers.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/type_checkers.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/unknown_fields_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/unknown_fields_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/well_known_types.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/well_known_types.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/well_known_types_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/well_known_types_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/wire_format.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/wire_format.cpython-34.pyc
vendored
Normal file
Binary file not shown.
BIN
deps/google/protobuf/internal/__pycache__/wire_format_test.cpython-34.pyc
vendored
Normal file
BIN
deps/google/protobuf/internal/__pycache__/wire_format_test.cpython-34.pyc
vendored
Normal file
Binary file not shown.
443
deps/google/protobuf/internal/_parameterized.py
vendored
Normal file
443
deps/google/protobuf/internal/_parameterized.py
vendored
Normal file
@@ -0,0 +1,443 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Adds support for parameterized tests to Python's unittest TestCase class.
|
||||
|
||||
A parameterized test is a method in a test case that is invoked with different
|
||||
argument tuples.
|
||||
|
||||
A simple example:
|
||||
|
||||
class AdditionExample(parameterized.ParameterizedTestCase):
|
||||
@parameterized.Parameters(
|
||||
(1, 2, 3),
|
||||
(4, 5, 9),
|
||||
(1, 1, 3))
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
|
||||
Each invocation is a separate test case and properly isolated just
|
||||
like a normal test method, with its own setUp/tearDown cycle. In the
|
||||
example above, there are three separate testcases, one of which will
|
||||
fail due to an assertion error (1 + 1 != 3).
|
||||
|
||||
Parameters for invididual test cases can be tuples (with positional parameters)
|
||||
or dictionaries (with named parameters):
|
||||
|
||||
class AdditionExample(parameterized.ParameterizedTestCase):
|
||||
@parameterized.Parameters(
|
||||
{'op1': 1, 'op2': 2, 'result': 3},
|
||||
{'op1': 4, 'op2': 5, 'result': 9},
|
||||
)
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
If a parameterized test fails, the error message will show the
|
||||
original test name (which is modified internally) and the arguments
|
||||
for the specific invocation, which are part of the string returned by
|
||||
the shortDescription() method on test cases.
|
||||
|
||||
The id method of the test, used internally by the unittest framework,
|
||||
is also modified to show the arguments. To make sure that test names
|
||||
stay the same across several invocations, object representations like
|
||||
|
||||
>>> class Foo(object):
|
||||
... pass
|
||||
>>> repr(Foo())
|
||||
'<__main__.Foo object at 0x23d8610>'
|
||||
|
||||
are turned into '<__main__.Foo>'. For even more descriptive names,
|
||||
especially in test logs, you can use the NamedParameters decorator. In
|
||||
this case, only tuples are supported, and the first parameters has to
|
||||
be a string (or an object that returns an apt name when converted via
|
||||
str()):
|
||||
|
||||
class NamedExample(parameterized.ParameterizedTestCase):
|
||||
@parameterized.NamedParameters(
|
||||
('Normal', 'aa', 'aaa', True),
|
||||
('EmptyPrefix', '', 'abc', True),
|
||||
('BothEmpty', '', '', True))
|
||||
def testStartsWith(self, prefix, string, result):
|
||||
self.assertEqual(result, strings.startswith(prefix))
|
||||
|
||||
Named tests also have the benefit that they can be run individually
|
||||
from the command line:
|
||||
|
||||
$ testmodule.py NamedExample.testStartsWithNormal
|
||||
.
|
||||
--------------------------------------------------------------------
|
||||
Ran 1 test in 0.000s
|
||||
|
||||
OK
|
||||
|
||||
Parameterized Classes
|
||||
=====================
|
||||
If invocation arguments are shared across test methods in a single
|
||||
ParameterizedTestCase class, instead of decorating all test methods
|
||||
individually, the class itself can be decorated:
|
||||
|
||||
@parameterized.Parameters(
|
||||
(1, 2, 3)
|
||||
(4, 5, 9))
|
||||
class ArithmeticTest(parameterized.ParameterizedTestCase):
|
||||
def testAdd(self, arg1, arg2, result):
|
||||
self.assertEqual(arg1 + arg2, result)
|
||||
|
||||
def testSubtract(self, arg2, arg2, result):
|
||||
self.assertEqual(result - arg1, arg2)
|
||||
|
||||
Inputs from Iterables
|
||||
=====================
|
||||
If parameters should be shared across several test cases, or are dynamically
|
||||
created from other sources, a single non-tuple iterable can be passed into
|
||||
the decorator. This iterable will be used to obtain the test cases:
|
||||
|
||||
class AdditionExample(parameterized.ParameterizedTestCase):
|
||||
@parameterized.Parameters(
|
||||
c.op1, c.op2, c.result for c in testcases
|
||||
)
|
||||
def testAddition(self, op1, op2, result):
|
||||
self.assertEqual(result, op1 + op2)
|
||||
|
||||
|
||||
Single-Argument Test Methods
|
||||
============================
|
||||
If a test method takes only one argument, the single argument does not need to
|
||||
be wrapped into a tuple:
|
||||
|
||||
class NegativeNumberExample(parameterized.ParameterizedTestCase):
|
||||
@parameterized.Parameters(
|
||||
-1, -3, -4, -5
|
||||
)
|
||||
def testIsNegative(self, arg):
|
||||
self.assertTrue(IsNegative(arg))
|
||||
"""
|
||||
|
||||
__author__ = 'tmarek@google.com (Torsten Marek)'
|
||||
|
||||
import collections
|
||||
import functools
|
||||
import re
|
||||
import types
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
import uuid
|
||||
|
||||
import six
|
||||
|
||||
ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
|
||||
_SEPARATOR = uuid.uuid1().hex
|
||||
_FIRST_ARG = object()
|
||||
_ARGUMENT_REPR = object()
|
||||
|
||||
|
||||
def _CleanRepr(obj):
|
||||
return ADDR_RE.sub(r'<\1>', repr(obj))
|
||||
|
||||
|
||||
# Helper function formerly from the unittest module, removed from it in
|
||||
# Python 2.7.
|
||||
def _StrClass(cls):
|
||||
return '%s.%s' % (cls.__module__, cls.__name__)
|
||||
|
||||
|
||||
def _NonStringIterable(obj):
|
||||
return (isinstance(obj, collections.Iterable) and not
|
||||
isinstance(obj, six.string_types))
|
||||
|
||||
|
||||
def _FormatParameterList(testcase_params):
|
||||
if isinstance(testcase_params, collections.Mapping):
|
||||
return ', '.join('%s=%s' % (argname, _CleanRepr(value))
|
||||
for argname, value in testcase_params.items())
|
||||
elif _NonStringIterable(testcase_params):
|
||||
return ', '.join(map(_CleanRepr, testcase_params))
|
||||
else:
|
||||
return _FormatParameterList((testcase_params,))
|
||||
|
||||
|
||||
class _ParameterizedTestIter(object):
|
||||
"""Callable and iterable class for producing new test cases."""
|
||||
|
||||
def __init__(self, test_method, testcases, naming_type):
|
||||
"""Returns concrete test functions for a test and a list of parameters.
|
||||
|
||||
The naming_type is used to determine the name of the concrete
|
||||
functions as reported by the unittest framework. If naming_type is
|
||||
_FIRST_ARG, the testcases must be tuples, and the first element must
|
||||
have a string representation that is a valid Python identifier.
|
||||
|
||||
Args:
|
||||
test_method: The decorated test method.
|
||||
testcases: (list of tuple/dict) A list of parameter
|
||||
tuples/dicts for individual test invocations.
|
||||
naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
|
||||
"""
|
||||
self._test_method = test_method
|
||||
self.testcases = testcases
|
||||
self._naming_type = naming_type
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
raise RuntimeError('You appear to be running a parameterized test case '
|
||||
'without having inherited from parameterized.'
|
||||
'ParameterizedTestCase. This is bad because none of '
|
||||
'your test cases are actually being run.')
|
||||
|
||||
def __iter__(self):
|
||||
test_method = self._test_method
|
||||
naming_type = self._naming_type
|
||||
|
||||
def MakeBoundParamTest(testcase_params):
|
||||
@functools.wraps(test_method)
|
||||
def BoundParamTest(self):
|
||||
if isinstance(testcase_params, collections.Mapping):
|
||||
test_method(self, **testcase_params)
|
||||
elif _NonStringIterable(testcase_params):
|
||||
test_method(self, *testcase_params)
|
||||
else:
|
||||
test_method(self, testcase_params)
|
||||
|
||||
if naming_type is _FIRST_ARG:
|
||||
# Signal the metaclass that the name of the test function is unique
|
||||
# and descriptive.
|
||||
BoundParamTest.__x_use_name__ = True
|
||||
BoundParamTest.__name__ += str(testcase_params[0])
|
||||
testcase_params = testcase_params[1:]
|
||||
elif naming_type is _ARGUMENT_REPR:
|
||||
# __x_extra_id__ is used to pass naming information to the __new__
|
||||
# method of TestGeneratorMetaclass.
|
||||
# The metaclass will make sure to create a unique, but nondescriptive
|
||||
# name for this test.
|
||||
BoundParamTest.__x_extra_id__ = '(%s)' % (
|
||||
_FormatParameterList(testcase_params),)
|
||||
else:
|
||||
raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
|
||||
|
||||
BoundParamTest.__doc__ = '%s(%s)' % (
|
||||
BoundParamTest.__name__, _FormatParameterList(testcase_params))
|
||||
if test_method.__doc__:
|
||||
BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
|
||||
return BoundParamTest
|
||||
return (MakeBoundParamTest(c) for c in self.testcases)
|
||||
|
||||
|
||||
def _IsSingletonList(testcases):
|
||||
"""True iff testcases contains only a single non-tuple element."""
|
||||
return len(testcases) == 1 and not isinstance(testcases[0], tuple)
|
||||
|
||||
|
||||
def _ModifyClass(class_object, testcases, naming_type):
|
||||
assert not getattr(class_object, '_id_suffix', None), (
|
||||
'Cannot add parameters to %s,'
|
||||
' which already has parameterized methods.' % (class_object,))
|
||||
class_object._id_suffix = id_suffix = {}
|
||||
# We change the size of __dict__ while we iterate over it,
|
||||
# which Python 3.x will complain about, so use copy().
|
||||
for name, obj in class_object.__dict__.copy().items():
|
||||
if (name.startswith(unittest.TestLoader.testMethodPrefix)
|
||||
and isinstance(obj, types.FunctionType)):
|
||||
delattr(class_object, name)
|
||||
methods = {}
|
||||
_UpdateClassDictForParamTestCase(
|
||||
methods, id_suffix, name,
|
||||
_ParameterizedTestIter(obj, testcases, naming_type))
|
||||
for name, meth in methods.items():
|
||||
setattr(class_object, name, meth)
|
||||
|
||||
|
||||
def _ParameterDecorator(naming_type, testcases):
|
||||
"""Implementation of the parameterization decorators.
|
||||
|
||||
Args:
|
||||
naming_type: The naming type.
|
||||
testcases: Testcase parameters.
|
||||
|
||||
Returns:
|
||||
A function for modifying the decorated object.
|
||||
"""
|
||||
def _Apply(obj):
|
||||
if isinstance(obj, type):
|
||||
_ModifyClass(
|
||||
obj,
|
||||
list(testcases) if not isinstance(testcases, collections.Sequence)
|
||||
else testcases,
|
||||
naming_type)
|
||||
return obj
|
||||
else:
|
||||
return _ParameterizedTestIter(obj, testcases, naming_type)
|
||||
|
||||
if _IsSingletonList(testcases):
|
||||
assert _NonStringIterable(testcases[0]), (
|
||||
'Single parameter argument must be a non-string iterable')
|
||||
testcases = testcases[0]
|
||||
|
||||
return _Apply
|
||||
|
||||
|
||||
def Parameters(*testcases):
|
||||
"""A decorator for creating parameterized tests.
|
||||
|
||||
See the module docstring for a usage example.
|
||||
Args:
|
||||
*testcases: Parameters for the decorated method, either a single
|
||||
iterable, or a list of tuples/dicts/objects (for tests
|
||||
with only one argument).
|
||||
|
||||
Returns:
|
||||
A test generator to be handled by TestGeneratorMetaclass.
|
||||
"""
|
||||
return _ParameterDecorator(_ARGUMENT_REPR, testcases)
|
||||
|
||||
|
||||
def NamedParameters(*testcases):
|
||||
"""A decorator for creating parameterized tests.
|
||||
|
||||
See the module docstring for a usage example. The first element of
|
||||
each parameter tuple should be a string and will be appended to the
|
||||
name of the test method.
|
||||
|
||||
Args:
|
||||
*testcases: Parameters for the decorated method, either a single
|
||||
iterable, or a list of tuples.
|
||||
|
||||
Returns:
|
||||
A test generator to be handled by TestGeneratorMetaclass.
|
||||
"""
|
||||
return _ParameterDecorator(_FIRST_ARG, testcases)
|
||||
|
||||
|
||||
class TestGeneratorMetaclass(type):
|
||||
"""Metaclass for test cases with test generators.
|
||||
|
||||
A test generator is an iterable in a testcase that produces callables. These
|
||||
callables must be single-argument methods. These methods are injected into
|
||||
the class namespace and the original iterable is removed. If the name of the
|
||||
iterable conforms to the test pattern, the injected methods will be picked
|
||||
up as tests by the unittest framework.
|
||||
|
||||
In general, it is supposed to be used in conjuction with the
|
||||
Parameters decorator.
|
||||
"""
|
||||
|
||||
def __new__(mcs, class_name, bases, dct):
|
||||
dct['_id_suffix'] = id_suffix = {}
|
||||
for name, obj in dct.items():
|
||||
if (name.startswith(unittest.TestLoader.testMethodPrefix) and
|
||||
_NonStringIterable(obj)):
|
||||
iterator = iter(obj)
|
||||
dct.pop(name)
|
||||
_UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
|
||||
|
||||
return type.__new__(mcs, class_name, bases, dct)
|
||||
|
||||
|
||||
def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
|
||||
"""Adds individual test cases to a dictionary.
|
||||
|
||||
Args:
|
||||
dct: The target dictionary.
|
||||
id_suffix: The dictionary for mapping names to test IDs.
|
||||
name: The original name of the test case.
|
||||
iterator: The iterator generating the individual test cases.
|
||||
"""
|
||||
for idx, func in enumerate(iterator):
|
||||
assert callable(func), 'Test generators must yield callables, got %r' % (
|
||||
func,)
|
||||
if getattr(func, '__x_use_name__', False):
|
||||
new_name = func.__name__
|
||||
else:
|
||||
new_name = '%s%s%d' % (name, _SEPARATOR, idx)
|
||||
assert new_name not in dct, (
|
||||
'Name of parameterized test case "%s" not unique' % (new_name,))
|
||||
dct[new_name] = func
|
||||
id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
|
||||
|
||||
|
||||
class ParameterizedTestCase(unittest.TestCase):
|
||||
"""Base class for test cases using the Parameters decorator."""
|
||||
__metaclass__ = TestGeneratorMetaclass
|
||||
|
||||
def _OriginalName(self):
|
||||
return self._testMethodName.split(_SEPARATOR)[0]
|
||||
|
||||
def __str__(self):
|
||||
return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
|
||||
|
||||
def id(self): # pylint: disable=invalid-name
|
||||
"""Returns the descriptive ID of the test.
|
||||
|
||||
This is used internally by the unittesting framework to get a name
|
||||
for the test to be used in reports.
|
||||
|
||||
Returns:
|
||||
The test id.
|
||||
"""
|
||||
return '%s.%s%s' % (_StrClass(self.__class__),
|
||||
self._OriginalName(),
|
||||
self._id_suffix.get(self._testMethodName, ''))
|
||||
|
||||
|
||||
def CoopParameterizedTestCase(other_base_class):
|
||||
"""Returns a new base class with a cooperative metaclass base.
|
||||
|
||||
This enables the ParameterizedTestCase to be used in combination
|
||||
with other base classes that have custom metaclasses, such as
|
||||
mox.MoxTestBase.
|
||||
|
||||
Only works with metaclasses that do not override type.__new__.
|
||||
|
||||
Example:
|
||||
|
||||
import google3
|
||||
import mox
|
||||
|
||||
from google3.testing.pybase import parameterized
|
||||
|
||||
class ExampleTest(parameterized.CoopParameterizedTestCase(mox.MoxTestBase)):
|
||||
...
|
||||
|
||||
Args:
|
||||
other_base_class: (class) A test case base class.
|
||||
|
||||
Returns:
|
||||
A new class object.
|
||||
"""
|
||||
metaclass = type(
|
||||
'CoopMetaclass',
|
||||
(other_base_class.__metaclass__,
|
||||
TestGeneratorMetaclass), {})
|
||||
return metaclass(
|
||||
'CoopParameterizedTestCase',
|
||||
(other_base_class, ParameterizedTestCase), {})
|
||||
79
deps/google/protobuf/internal/any_test_pb2.py
vendored
Normal file
79
deps/google/protobuf/internal/any_test_pb2.py
vendored
Normal file
@@ -0,0 +1,79 @@
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: google/protobuf/internal/any_test.proto
|
||||
|
||||
import sys
|
||||
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import message as _message
|
||||
from google.protobuf import reflection as _reflection
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf import descriptor_pb2
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor.FileDescriptor(
|
||||
name='google/protobuf/internal/any_test.proto',
|
||||
package='google.protobuf.internal',
|
||||
syntax='proto3',
|
||||
serialized_pb=_b('\n\'google/protobuf/internal/any_test.proto\x12\x18google.protobuf.internal\x1a\x19google/protobuf/any.proto\"A\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x11\n\tint_value\x18\x02 \x01(\x05\x62\x06proto3')
|
||||
,
|
||||
dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
|
||||
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
|
||||
|
||||
|
||||
|
||||
|
||||
_TESTANY = _descriptor.Descriptor(
|
||||
name='TestAny',
|
||||
full_name='google.protobuf.internal.TestAny',
|
||||
filename=None,
|
||||
file=DESCRIPTOR,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
_descriptor.FieldDescriptor(
|
||||
name='value', full_name='google.protobuf.internal.TestAny.value', index=0,
|
||||
number=1, type=11, cpp_type=10, label=1,
|
||||
has_default_value=False, default_value=None,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
_descriptor.FieldDescriptor(
|
||||
name='int_value', full_name='google.protobuf.internal.TestAny.int_value', index=1,
|
||||
number=2, type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None,
|
||||
options=None),
|
||||
],
|
||||
extensions=[
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
],
|
||||
options=None,
|
||||
is_extendable=False,
|
||||
syntax='proto3',
|
||||
extension_ranges=[],
|
||||
oneofs=[
|
||||
],
|
||||
serialized_start=96,
|
||||
serialized_end=161,
|
||||
)
|
||||
|
||||
_TESTANY.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
|
||||
DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY
|
||||
|
||||
TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), dict(
|
||||
DESCRIPTOR = _TESTANY,
|
||||
__module__ = 'google.protobuf.internal.any_test_pb2'
|
||||
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestAny)
|
||||
))
|
||||
_sym_db.RegisterMessage(TestAny)
|
||||
|
||||
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
107
deps/google/protobuf/internal/api_implementation.py
vendored
Normal file
107
deps/google/protobuf/internal/api_implementation.py
vendored
Normal file
@@ -0,0 +1,107 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Determine which implementation of the protobuf API is used in this process.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.internal import _api_implementation
|
||||
# The compile-time constants in the _api_implementation module can be used to
|
||||
# switch to a certain implementation of the Python API at build time.
|
||||
_api_version = _api_implementation.api_version
|
||||
_proto_extension_modules_exist_in_build = True
|
||||
except ImportError:
|
||||
_api_version = -1 # Unspecified by compiler flags.
|
||||
_proto_extension_modules_exist_in_build = False
|
||||
|
||||
if _api_version == 1:
|
||||
raise ValueError('api_version=1 is no longer supported.')
|
||||
if _api_version < 0: # Still unspecified?
|
||||
try:
|
||||
# The presence of this module in a build allows the proto implementation to
|
||||
# be upgraded merely via build deps rather than a compiler flag or the
|
||||
# runtime environment variable.
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf import _use_fast_cpp_protos
|
||||
# Work around a known issue in the classic bootstrap .par import hook.
|
||||
if not _use_fast_cpp_protos:
|
||||
raise ImportError('_use_fast_cpp_protos import succeeded but was None')
|
||||
del _use_fast_cpp_protos
|
||||
_api_version = 2
|
||||
except ImportError:
|
||||
if _proto_extension_modules_exist_in_build:
|
||||
if sys.version_info[0] >= 3: # Python 3 defaults to C++ impl v2.
|
||||
_api_version = 2
|
||||
# TODO(b/17427486): Make Python 2 default to C++ impl v2.
|
||||
|
||||
_default_implementation_type = (
|
||||
'python' if _api_version <= 0 else 'cpp')
|
||||
|
||||
# This environment variable can be used to switch to a certain implementation
|
||||
# of the Python API, overriding the compile-time constants in the
|
||||
# _api_implementation module. Right now only 'python' and 'cpp' are valid
|
||||
# values. Any other value will be ignored.
|
||||
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
|
||||
_default_implementation_type)
|
||||
|
||||
if _implementation_type != 'python':
|
||||
_implementation_type = 'cpp'
|
||||
|
||||
# This environment variable can be used to switch between the two
|
||||
# 'cpp' implementations, overriding the compile-time constants in the
|
||||
# _api_implementation module. Right now only '2' is supported. Any other
|
||||
# value will cause an error to be raised.
|
||||
_implementation_version_str = os.getenv(
|
||||
'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION', '2')
|
||||
|
||||
if _implementation_version_str != '2':
|
||||
raise ValueError(
|
||||
'unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: "' +
|
||||
_implementation_version_str + '" (supported versions: 2)'
|
||||
)
|
||||
|
||||
_implementation_version = int(_implementation_version_str)
|
||||
|
||||
|
||||
# Usage of this function is discouraged. Clients shouldn't care which
|
||||
# implementation of the API is in use. Note that there is no guarantee
|
||||
# that differences between APIs will be maintained.
|
||||
# Please don't use this function if possible.
|
||||
def Type():
|
||||
return _implementation_type
|
||||
|
||||
|
||||
# See comment on 'Type' above.
|
||||
def Version():
|
||||
return _implementation_version
|
||||
611
deps/google/protobuf/internal/containers.py
vendored
Normal file
611
deps/google/protobuf/internal/containers.py
vendored
Normal file
@@ -0,0 +1,611 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains container classes to represent different protocol buffer types.
|
||||
|
||||
This file defines container classes which represent categories of protocol
|
||||
buffer field types which need extra maintenance. Currently these categories
|
||||
are:
|
||||
- Repeated scalar fields - These are all repeated fields which aren't
|
||||
composite (e.g. they are of simple types like int32, string, etc).
|
||||
- Repeated composite fields - Repeated fields which are composite. This
|
||||
includes groups and nested messages.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
import collections
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] < 3:
|
||||
# We would use collections.MutableMapping all the time, but in Python 2 it
|
||||
# doesn't define __slots__. This causes two significant problems:
|
||||
#
|
||||
# 1. we can't disallow arbitrary attribute assignment, even if our derived
|
||||
# classes *do* define __slots__.
|
||||
#
|
||||
# 2. we can't safely derive a C type from it without __slots__ defined (the
|
||||
# interpreter expects to find a dict at tp_dictoffset, which we can't
|
||||
# robustly provide. And we don't want an instance dict anyway.
|
||||
#
|
||||
# So this is the Python 2.7 definition of Mapping/MutableMapping functions
|
||||
# verbatim, except that:
|
||||
# 1. We declare __slots__.
|
||||
# 2. We don't declare this as a virtual base class. The classes defined
|
||||
# in collections are the interesting base classes, not us.
|
||||
#
|
||||
# Note: deriving from object is critical. It is the only thing that makes
|
||||
# this a true type, allowing us to derive from it in C++ cleanly and making
|
||||
# __slots__ properly disallow arbitrary element assignment.
|
||||
|
||||
class Mapping(object):
|
||||
__slots__ = ()
|
||||
|
||||
def get(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
return default
|
||||
|
||||
def __contains__(self, key):
|
||||
try:
|
||||
self[key]
|
||||
except KeyError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def iterkeys(self):
|
||||
return iter(self)
|
||||
|
||||
def itervalues(self):
|
||||
for key in self:
|
||||
yield self[key]
|
||||
|
||||
def iteritems(self):
|
||||
for key in self:
|
||||
yield (key, self[key])
|
||||
|
||||
def keys(self):
|
||||
return list(self)
|
||||
|
||||
def items(self):
|
||||
return [(key, self[key]) for key in self]
|
||||
|
||||
def values(self):
|
||||
return [self[key] for key in self]
|
||||
|
||||
# Mappings are not hashable by default, but subclasses can change this
|
||||
__hash__ = None
|
||||
|
||||
def __eq__(self, other):
|
||||
if not isinstance(other, collections.Mapping):
|
||||
return NotImplemented
|
||||
return dict(self.items()) == dict(other.items())
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
|
||||
class MutableMapping(Mapping):
|
||||
__slots__ = ()
|
||||
|
||||
__marker = object()
|
||||
|
||||
def pop(self, key, default=__marker):
|
||||
try:
|
||||
value = self[key]
|
||||
except KeyError:
|
||||
if default is self.__marker:
|
||||
raise
|
||||
return default
|
||||
else:
|
||||
del self[key]
|
||||
return value
|
||||
|
||||
def popitem(self):
|
||||
try:
|
||||
key = next(iter(self))
|
||||
except StopIteration:
|
||||
raise KeyError
|
||||
value = self[key]
|
||||
del self[key]
|
||||
return key, value
|
||||
|
||||
def clear(self):
|
||||
try:
|
||||
while True:
|
||||
self.popitem()
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
def update(*args, **kwds):
|
||||
if len(args) > 2:
|
||||
raise TypeError("update() takes at most 2 positional "
|
||||
"arguments ({} given)".format(len(args)))
|
||||
elif not args:
|
||||
raise TypeError("update() takes at least 1 argument (0 given)")
|
||||
self = args[0]
|
||||
other = args[1] if len(args) >= 2 else ()
|
||||
|
||||
if isinstance(other, Mapping):
|
||||
for key in other:
|
||||
self[key] = other[key]
|
||||
elif hasattr(other, "keys"):
|
||||
for key in other.keys():
|
||||
self[key] = other[key]
|
||||
else:
|
||||
for key, value in other:
|
||||
self[key] = value
|
||||
for key, value in kwds.items():
|
||||
self[key] = value
|
||||
|
||||
def setdefault(self, key, default=None):
|
||||
try:
|
||||
return self[key]
|
||||
except KeyError:
|
||||
self[key] = default
|
||||
return default
|
||||
|
||||
collections.Mapping.register(Mapping)
|
||||
collections.MutableMapping.register(MutableMapping)
|
||||
|
||||
else:
|
||||
# In Python 3 we can just use MutableMapping directly, because it defines
|
||||
# __slots__.
|
||||
MutableMapping = collections.MutableMapping
|
||||
|
||||
|
||||
class BaseContainer(object):
|
||||
|
||||
"""Base container class."""
|
||||
|
||||
# Minimizes memory usage and disallows assignment to other attributes.
|
||||
__slots__ = ['_message_listener', '_values']
|
||||
|
||||
def __init__(self, message_listener):
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedScalarFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._values = []
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Retrieves item by the specified key."""
|
||||
return self._values[key]
|
||||
|
||||
def __len__(self):
|
||||
"""Returns the number of elements in the container."""
|
||||
return len(self._values)
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Checks if another instance isn't equal to this one."""
|
||||
# The concrete classes should define __eq__.
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._values)
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
# Continue to support the old sort_function keyword argument.
|
||||
# This is expected to be a rare occurrence, so use LBYL to avoid
|
||||
# the overhead of actually catching KeyError.
|
||||
if 'sort_function' in kwargs:
|
||||
kwargs['cmp'] = kwargs.pop('sort_function')
|
||||
self._values.sort(*args, **kwargs)
|
||||
|
||||
|
||||
class RepeatedScalarFieldContainer(BaseContainer):
|
||||
|
||||
"""Simple, type-checked, list-like container for holding repeated scalars."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_type_checker']
|
||||
|
||||
def __init__(self, message_listener, type_checker):
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedScalarFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
type_checker: A type_checkers.ValueChecker instance to run on elements
|
||||
inserted into this container.
|
||||
"""
|
||||
super(RepeatedScalarFieldContainer, self).__init__(message_listener)
|
||||
self._type_checker = type_checker
|
||||
|
||||
def append(self, value):
|
||||
"""Appends an item to the list. Similar to list.append()."""
|
||||
self._values.append(self._type_checker.CheckValue(value))
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def insert(self, key, value):
|
||||
"""Inserts the item at the specified position. Similar to list.insert()."""
|
||||
self._values.insert(key, self._type_checker.CheckValue(value))
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def extend(self, elem_seq):
|
||||
"""Extends by appending the given iterable. Similar to list.extend()."""
|
||||
|
||||
if elem_seq is None:
|
||||
return
|
||||
try:
|
||||
elem_seq_iter = iter(elem_seq)
|
||||
except TypeError:
|
||||
if not elem_seq:
|
||||
# silently ignore falsy inputs :-/.
|
||||
# TODO(ptucker): Deprecate this behavior. b/18413862
|
||||
return
|
||||
raise
|
||||
|
||||
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
|
||||
if new_values:
|
||||
self._values.extend(new_values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def MergeFrom(self, other):
|
||||
"""Appends the contents of another repeated field of the same type to this
|
||||
one. We do not check the types of the individual fields.
|
||||
"""
|
||||
self._values.extend(other._values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def remove(self, elem):
|
||||
"""Removes an item from the list. Similar to list.remove()."""
|
||||
self._values.remove(elem)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def pop(self, key=-1):
|
||||
"""Removes and returns an item at a given index. Similar to list.pop()."""
|
||||
value = self._values[key]
|
||||
self.__delitem__(key)
|
||||
return value
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Sets the item on the specified position."""
|
||||
if isinstance(key, slice): # PY3
|
||||
if key.step is not None:
|
||||
raise ValueError('Extended slices not supported')
|
||||
self.__setslice__(key.start, key.stop, value)
|
||||
else:
|
||||
self._values[key] = self._type_checker.CheckValue(value)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __getslice__(self, start, stop):
|
||||
"""Retrieves the subset of items from between the specified indices."""
|
||||
return self._values[start:stop]
|
||||
|
||||
def __setslice__(self, start, stop, values):
|
||||
"""Sets the subset of items from between the specified indices."""
|
||||
new_values = []
|
||||
for value in values:
|
||||
new_values.append(self._type_checker.CheckValue(value))
|
||||
self._values[start:stop] = new_values
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delitem__(self, key):
|
||||
"""Deletes the item at the specified position."""
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delslice__(self, start, stop):
|
||||
"""Deletes the subset of items from between the specified indices."""
|
||||
del self._values[start:stop]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
# Special case for the same type which should be common and fast.
|
||||
if isinstance(other, self.__class__):
|
||||
return other._values == self._values
|
||||
# We are presumably comparing against some other sequence type.
|
||||
return other == self._values
|
||||
|
||||
collections.MutableSequence.register(BaseContainer)
|
||||
|
||||
|
||||
class RepeatedCompositeFieldContainer(BaseContainer):
|
||||
|
||||
"""Simple, list-like container for holding repeated composite fields."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_message_descriptor']
|
||||
|
||||
def __init__(self, message_listener, message_descriptor):
|
||||
"""
|
||||
Note that we pass in a descriptor instead of the generated directly,
|
||||
since at the time we construct a _RepeatedCompositeFieldContainer we
|
||||
haven't yet necessarily initialized the type that will be contained in the
|
||||
container.
|
||||
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedCompositeFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
message_descriptor: A Descriptor instance describing the protocol type
|
||||
that should be present in this container. We'll use the
|
||||
_concrete_class field of this descriptor when the client calls add().
|
||||
"""
|
||||
super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
|
||||
self._message_descriptor = message_descriptor
|
||||
|
||||
def add(self, **kwargs):
|
||||
"""Adds a new element at the end of the list and returns it. Keyword
|
||||
arguments may be used to initialize the element.
|
||||
"""
|
||||
new_element = self._message_descriptor._concrete_class(**kwargs)
|
||||
new_element._SetListener(self._message_listener)
|
||||
self._values.append(new_element)
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
return new_element
|
||||
|
||||
def extend(self, elem_seq):
|
||||
"""Extends by appending the given sequence of elements of the same type
|
||||
as this one, copying each individual message.
|
||||
"""
|
||||
message_class = self._message_descriptor._concrete_class
|
||||
listener = self._message_listener
|
||||
values = self._values
|
||||
for message in elem_seq:
|
||||
new_element = message_class()
|
||||
new_element._SetListener(listener)
|
||||
new_element.MergeFrom(message)
|
||||
values.append(new_element)
|
||||
listener.Modified()
|
||||
|
||||
def MergeFrom(self, other):
|
||||
"""Appends the contents of another repeated field of the same type to this
|
||||
one, copying each individual message.
|
||||
"""
|
||||
self.extend(other._values)
|
||||
|
||||
def remove(self, elem):
|
||||
"""Removes an item from the list. Similar to list.remove()."""
|
||||
self._values.remove(elem)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def pop(self, key=-1):
|
||||
"""Removes and returns an item at a given index. Similar to list.pop()."""
|
||||
value = self._values[key]
|
||||
self.__delitem__(key)
|
||||
return value
|
||||
|
||||
def __getslice__(self, start, stop):
|
||||
"""Retrieves the subset of items from between the specified indices."""
|
||||
return self._values[start:stop]
|
||||
|
||||
def __delitem__(self, key):
|
||||
"""Deletes the item at the specified position."""
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delslice__(self, start, stop):
|
||||
"""Deletes the subset of items from between the specified indices."""
|
||||
del self._values[start:stop]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
if not isinstance(other, self.__class__):
|
||||
raise TypeError('Can only compare repeated composite fields against '
|
||||
'other repeated composite fields.')
|
||||
return self._values == other._values
|
||||
|
||||
|
||||
class ScalarMap(MutableMapping):
|
||||
|
||||
"""Simple, type-checked, dict-like container for holding repeated scalars."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener']
|
||||
|
||||
def __init__(self, message_listener, key_checker, value_checker):
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The ScalarMap will call this object's Modified() method when it
|
||||
is modified.
|
||||
key_checker: A type_checkers.ValueChecker instance to run on keys
|
||||
inserted into this container.
|
||||
value_checker: A type_checkers.ValueChecker instance to run on values
|
||||
inserted into this container.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._key_checker = key_checker
|
||||
self._value_checker = value_checker
|
||||
self._values = {}
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return self._values[key]
|
||||
except KeyError:
|
||||
key = self._key_checker.CheckValue(key)
|
||||
val = self._value_checker.DefaultValue()
|
||||
self._values[key] = val
|
||||
return val
|
||||
|
||||
def __contains__(self, item):
|
||||
# We check the key's type to match the strong-typing flavor of the API.
|
||||
# Also this makes it easier to match the behavior of the C++ implementation.
|
||||
self._key_checker.CheckValue(item)
|
||||
return item in self._values
|
||||
|
||||
# We need to override this explicitly, because our defaultdict-like behavior
|
||||
# will make the default implementation (from our base class) always insert
|
||||
# the key.
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
checked_key = self._key_checker.CheckValue(key)
|
||||
checked_value = self._value_checker.CheckValue(value)
|
||||
self._values[checked_key] = checked_value
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __len__(self):
|
||||
return len(self._values)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._values)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._values)
|
||||
|
||||
def MergeFrom(self, other):
|
||||
self._values.update(other._values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def InvalidateIterators(self):
|
||||
# It appears that the only way to reliably invalidate iterators to
|
||||
# self._values is to ensure that its size changes.
|
||||
original = self._values
|
||||
self._values = original.copy()
|
||||
original[None] = None
|
||||
|
||||
# This is defined in the abstract base, but we can do it much more cheaply.
|
||||
def clear(self):
|
||||
self._values.clear()
|
||||
self._message_listener.Modified()
|
||||
|
||||
|
||||
class MessageMap(MutableMapping):
|
||||
|
||||
"""Simple, type-checked, dict-like container for with submessage values."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_key_checker', '_values', '_message_listener',
|
||||
'_message_descriptor']
|
||||
|
||||
def __init__(self, message_listener, message_descriptor, key_checker):
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The ScalarMap will call this object's Modified() method when it
|
||||
is modified.
|
||||
key_checker: A type_checkers.ValueChecker instance to run on keys
|
||||
inserted into this container.
|
||||
value_checker: A type_checkers.ValueChecker instance to run on values
|
||||
inserted into this container.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._message_descriptor = message_descriptor
|
||||
self._key_checker = key_checker
|
||||
self._values = {}
|
||||
|
||||
def __getitem__(self, key):
|
||||
try:
|
||||
return self._values[key]
|
||||
except KeyError:
|
||||
key = self._key_checker.CheckValue(key)
|
||||
new_element = self._message_descriptor._concrete_class()
|
||||
new_element._SetListener(self._message_listener)
|
||||
self._values[key] = new_element
|
||||
self._message_listener.Modified()
|
||||
|
||||
return new_element
|
||||
|
||||
def get_or_create(self, key):
|
||||
"""get_or_create() is an alias for getitem (ie. map[key]).
|
||||
|
||||
Args:
|
||||
key: The key to get or create in the map.
|
||||
|
||||
This is useful in cases where you want to be explicit that the call is
|
||||
mutating the map. This can avoid lint errors for statements like this
|
||||
that otherwise would appear to be pointless statements:
|
||||
|
||||
msg.my_map[key]
|
||||
"""
|
||||
return self[key]
|
||||
|
||||
# We need to override this explicitly, because our defaultdict-like behavior
|
||||
# will make the default implementation (from our base class) always insert
|
||||
# the key.
|
||||
def get(self, key, default=None):
|
||||
if key in self:
|
||||
return self[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
def __contains__(self, item):
|
||||
return item in self._values
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
raise ValueError('May not set values directly, call my_map[key].foo = 5')
|
||||
|
||||
def __delitem__(self, key):
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __len__(self):
|
||||
return len(self._values)
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self._values)
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._values)
|
||||
|
||||
def MergeFrom(self, other):
|
||||
for key in other:
|
||||
self[key].MergeFrom(other[key])
|
||||
# self._message_listener.Modified() not required here, because
|
||||
# mutations to submessages already propagate.
|
||||
|
||||
def InvalidateIterators(self):
|
||||
# It appears that the only way to reliably invalidate iterators to
|
||||
# self._values is to ensure that its size changes.
|
||||
original = self._values
|
||||
self._values = original.copy()
|
||||
original[None] = None
|
||||
|
||||
# This is defined in the abstract base, but we can do it much more cheaply.
|
||||
def clear(self):
|
||||
self._values.clear()
|
||||
self._message_listener.Modified()
|
||||
854
deps/google/protobuf/internal/decoder.py
vendored
Normal file
854
deps/google/protobuf/internal/decoder.py
vendored
Normal file
@@ -0,0 +1,854 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Code for decoding protocol buffer primitives.
|
||||
|
||||
This code is very similar to encoder.py -- read the docs for that module first.
|
||||
|
||||
A "decoder" is a function with the signature:
|
||||
Decode(buffer, pos, end, message, field_dict)
|
||||
The arguments are:
|
||||
buffer: The string containing the encoded message.
|
||||
pos: The current position in the string.
|
||||
end: The position in the string where the current message ends. May be
|
||||
less than len(buffer) if we're reading a sub-message.
|
||||
message: The message object into which we're parsing.
|
||||
field_dict: message._fields (avoids a hashtable lookup).
|
||||
The decoder reads the field and stores it into field_dict, returning the new
|
||||
buffer position. A decoder for a repeated field may proactively decode all of
|
||||
the elements of that field, if they appear consecutively.
|
||||
|
||||
Note that decoders may throw any of the following:
|
||||
IndexError: Indicates a truncated message.
|
||||
struct.error: Unpacking of a fixed-width field failed.
|
||||
message.DecodeError: Other errors.
|
||||
|
||||
Decoders are expected to raise an exception if they are called with pos > end.
|
||||
This allows callers to be lax about bounds checking: it's fineto read past
|
||||
"end" as long as you are sure that someone else will notice and throw an
|
||||
exception later on.
|
||||
|
||||
Something up the call stack is expected to catch IndexError and struct.error
|
||||
and convert them to message.DecodeError.
|
||||
|
||||
Decoders are constructed using decoder constructors with the signature:
|
||||
MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
|
||||
The arguments are:
|
||||
field_number: The field number of the field we want to decode.
|
||||
is_repeated: Is the field a repeated field? (bool)
|
||||
is_packed: Is the field a packed field? (bool)
|
||||
key: The key to use when looking up the field within field_dict.
|
||||
(This is actually the FieldDescriptor but nothing in this
|
||||
file should depend on that.)
|
||||
new_default: A function which takes a message object as a parameter and
|
||||
returns a new instance of the default value for this field.
|
||||
(This is called for repeated fields and sub-messages, when an
|
||||
instance does not already exist.)
|
||||
|
||||
As with encoders, we define a decoder constructor for every type of field.
|
||||
Then, for every field of every message class we construct an actual decoder.
|
||||
That decoder goes into a dict indexed by tag, so when we decode a message
|
||||
we repeatedly read a tag, look up the corresponding decoder, and invoke it.
|
||||
"""
|
||||
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import struct
|
||||
|
||||
import six
|
||||
|
||||
if six.PY3:
|
||||
long = int
|
||||
|
||||
from google.protobuf.internal import encoder
|
||||
from google.protobuf.internal import wire_format
|
||||
from google.protobuf import message
|
||||
|
||||
|
||||
# This will overflow and thus become IEEE-754 "infinity". We would use
|
||||
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
|
||||
_POS_INF = 1e10000
|
||||
_NEG_INF = -_POS_INF
|
||||
_NAN = _POS_INF * 0
|
||||
|
||||
|
||||
# This is not for optimization, but rather to avoid conflicts with local
|
||||
# variables named "message".
|
||||
_DecodeError = message.DecodeError
|
||||
|
||||
|
||||
def _VarintDecoder(mask, result_type):
|
||||
"""Return an encoder for a basic varint value (does not include tag).
|
||||
|
||||
Decoded values will be bitwise-anded with the given mask before being
|
||||
returned, e.g. to limit them to 32 bits. The returned decoder does not
|
||||
take the usual "end" parameter -- the caller is expected to do bounds checking
|
||||
after the fact (often the caller can defer such checking until later). The
|
||||
decoder returns a (value, new_pos) pair.
|
||||
"""
|
||||
|
||||
def DecodeVarint(buffer, pos):
|
||||
result = 0
|
||||
shift = 0
|
||||
while 1:
|
||||
b = six.indexbytes(buffer, pos)
|
||||
result |= ((b & 0x7f) << shift)
|
||||
pos += 1
|
||||
if not (b & 0x80):
|
||||
result &= mask
|
||||
result = result_type(result)
|
||||
return (result, pos)
|
||||
shift += 7
|
||||
if shift >= 64:
|
||||
raise _DecodeError('Too many bytes when decoding varint.')
|
||||
return DecodeVarint
|
||||
|
||||
|
||||
def _SignedVarintDecoder(mask, result_type):
|
||||
"""Like _VarintDecoder() but decodes signed values."""
|
||||
|
||||
def DecodeVarint(buffer, pos):
|
||||
result = 0
|
||||
shift = 0
|
||||
while 1:
|
||||
b = six.indexbytes(buffer, pos)
|
||||
result |= ((b & 0x7f) << shift)
|
||||
pos += 1
|
||||
if not (b & 0x80):
|
||||
if result > 0x7fffffffffffffff:
|
||||
result -= (1 << 64)
|
||||
result |= ~mask
|
||||
else:
|
||||
result &= mask
|
||||
result = result_type(result)
|
||||
return (result, pos)
|
||||
shift += 7
|
||||
if shift >= 64:
|
||||
raise _DecodeError('Too many bytes when decoding varint.')
|
||||
return DecodeVarint
|
||||
|
||||
# We force 32-bit values to int and 64-bit values to long to make
|
||||
# alternate implementations where the distinction is more significant
|
||||
# (e.g. the C++ implementation) simpler.
|
||||
|
||||
_DecodeVarint = _VarintDecoder((1 << 64) - 1, long)
|
||||
_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, long)
|
||||
|
||||
# Use these versions for values which must be limited to 32 bits.
|
||||
_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
|
||||
_DecodeSignedVarint32 = _SignedVarintDecoder((1 << 32) - 1, int)
|
||||
|
||||
|
||||
def ReadTag(buffer, pos):
|
||||
"""Read a tag from the buffer, and return a (tag_bytes, new_pos) tuple.
|
||||
|
||||
We return the raw bytes of the tag rather than decoding them. The raw
|
||||
bytes can then be used to look up the proper decoder. This effectively allows
|
||||
us to trade some work that would be done in pure-python (decoding a varint)
|
||||
for work that is done in C (searching for a byte string in a hash table).
|
||||
In a low-level language it would be much cheaper to decode the varint and
|
||||
use that, but not in Python.
|
||||
"""
|
||||
|
||||
start = pos
|
||||
while six.indexbytes(buffer, pos) & 0x80:
|
||||
pos += 1
|
||||
pos += 1
|
||||
return (buffer[start:pos], pos)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
def _SimpleDecoder(wire_type, decode_value):
|
||||
"""Return a constructor for a decoder for fields of a particular type.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type.
|
||||
decode_value: A function which decodes an individual value, e.g.
|
||||
_DecodeVarint()
|
||||
"""
|
||||
|
||||
def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if is_packed:
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
def DecodePackedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
(endpoint, pos) = local_DecodeVarint(buffer, pos)
|
||||
endpoint += pos
|
||||
if endpoint > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
while pos < endpoint:
|
||||
(element, pos) = decode_value(buffer, pos)
|
||||
value.append(element)
|
||||
if pos > endpoint:
|
||||
del value[-1] # Discard corrupt value.
|
||||
raise _DecodeError('Packed element was truncated.')
|
||||
return pos
|
||||
return DecodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number, wire_type)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
(element, new_pos) = decode_value(buffer, pos)
|
||||
value.append(element)
|
||||
# Predict that the next tag is another copy of the same repeated
|
||||
# field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
|
||||
# Prediction failed. Return.
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
(field_dict[key], pos) = decode_value(buffer, pos)
|
||||
if pos > end:
|
||||
del field_dict[key] # Discard corrupt value.
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
return DecodeField
|
||||
|
||||
return SpecificDecoder
|
||||
|
||||
|
||||
def _ModifiedDecoder(wire_type, decode_value, modify_value):
|
||||
"""Like SimpleDecoder but additionally invokes modify_value on every value
|
||||
before storing it. Usually modify_value is ZigZagDecode.
|
||||
"""
|
||||
|
||||
# Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
|
||||
# not enough to make a significant difference.
|
||||
|
||||
def InnerDecode(buffer, pos):
|
||||
(result, new_pos) = decode_value(buffer, pos)
|
||||
return (modify_value(result), new_pos)
|
||||
return _SimpleDecoder(wire_type, InnerDecode)
|
||||
|
||||
|
||||
def _StructPackDecoder(wire_type, format):
|
||||
"""Return a constructor for a decoder for a fixed-width field.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type.
|
||||
format: The format string to pass to struct.unpack().
|
||||
"""
|
||||
|
||||
value_size = struct.calcsize(format)
|
||||
local_unpack = struct.unpack
|
||||
|
||||
# Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
|
||||
# not enough to make a significant difference.
|
||||
|
||||
# Note that we expect someone up-stack to catch struct.error and convert
|
||||
# it to _DecodeError -- this way we don't have to set up exception-
|
||||
# handling blocks every time we parse one value.
|
||||
|
||||
def InnerDecode(buffer, pos):
|
||||
new_pos = pos + value_size
|
||||
result = local_unpack(format, buffer[pos:new_pos])[0]
|
||||
return (result, new_pos)
|
||||
return _SimpleDecoder(wire_type, InnerDecode)
|
||||
|
||||
|
||||
def _FloatDecoder():
|
||||
"""Returns a decoder for a float field.
|
||||
|
||||
This code works around a bug in struct.unpack for non-finite 32-bit
|
||||
floating-point values.
|
||||
"""
|
||||
|
||||
local_unpack = struct.unpack
|
||||
|
||||
def InnerDecode(buffer, pos):
|
||||
# We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
|
||||
# bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
|
||||
new_pos = pos + 4
|
||||
float_bytes = buffer[pos:new_pos]
|
||||
|
||||
# If this value has all its exponent bits set, then it's non-finite.
|
||||
# In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
|
||||
# To avoid that, we parse it specially.
|
||||
if (float_bytes[3:4] in b'\x7F\xFF' and float_bytes[2:3] >= b'\x80'):
|
||||
# If at least one significand bit is set...
|
||||
if float_bytes[0:3] != b'\x00\x00\x80':
|
||||
return (_NAN, new_pos)
|
||||
# If sign bit is set...
|
||||
if float_bytes[3:4] == b'\xFF':
|
||||
return (_NEG_INF, new_pos)
|
||||
return (_POS_INF, new_pos)
|
||||
|
||||
# Note that we expect someone up-stack to catch struct.error and convert
|
||||
# it to _DecodeError -- this way we don't have to set up exception-
|
||||
# handling blocks every time we parse one value.
|
||||
result = local_unpack('<f', float_bytes)[0]
|
||||
return (result, new_pos)
|
||||
return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
|
||||
|
||||
|
||||
def _DoubleDecoder():
|
||||
"""Returns a decoder for a double field.
|
||||
|
||||
This code works around a bug in struct.unpack for not-a-number.
|
||||
"""
|
||||
|
||||
local_unpack = struct.unpack
|
||||
|
||||
def InnerDecode(buffer, pos):
|
||||
# We expect a 64-bit value in little-endian byte order. Bit 1 is the sign
|
||||
# bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
|
||||
new_pos = pos + 8
|
||||
double_bytes = buffer[pos:new_pos]
|
||||
|
||||
# If this value has all its exponent bits set and at least one significand
|
||||
# bit set, it's not a number. In Python 2.4, struct.unpack will treat it
|
||||
# as inf or -inf. To avoid that, we treat it specially.
|
||||
if ((double_bytes[7:8] in b'\x7F\xFF')
|
||||
and (double_bytes[6:7] >= b'\xF0')
|
||||
and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
|
||||
return (_NAN, new_pos)
|
||||
|
||||
# Note that we expect someone up-stack to catch struct.error and convert
|
||||
# it to _DecodeError -- this way we don't have to set up exception-
|
||||
# handling blocks every time we parse one value.
|
||||
result = local_unpack('<d', double_bytes)[0]
|
||||
return (result, new_pos)
|
||||
return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
|
||||
|
||||
|
||||
def EnumDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
enum_type = key.enum_type
|
||||
if is_packed:
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
def DecodePackedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
(endpoint, pos) = local_DecodeVarint(buffer, pos)
|
||||
endpoint += pos
|
||||
if endpoint > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
while pos < endpoint:
|
||||
value_start_pos = pos
|
||||
(element, pos) = _DecodeSignedVarint32(buffer, pos)
|
||||
if element in enum_type.values_by_number:
|
||||
value.append(element)
|
||||
else:
|
||||
if not message._unknown_fields:
|
||||
message._unknown_fields = []
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_VARINT)
|
||||
message._unknown_fields.append(
|
||||
(tag_bytes, buffer[value_start_pos:pos]))
|
||||
if pos > endpoint:
|
||||
if element in enum_type.values_by_number:
|
||||
del value[-1] # Discard corrupt value.
|
||||
else:
|
||||
del message._unknown_fields[-1]
|
||||
raise _DecodeError('Packed element was truncated.')
|
||||
return pos
|
||||
return DecodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
(element, new_pos) = _DecodeSignedVarint32(buffer, pos)
|
||||
if element in enum_type.values_by_number:
|
||||
value.append(element)
|
||||
else:
|
||||
if not message._unknown_fields:
|
||||
message._unknown_fields = []
|
||||
message._unknown_fields.append(
|
||||
(tag_bytes, buffer[pos:new_pos]))
|
||||
# Predict that the next tag is another copy of the same repeated
|
||||
# field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
|
||||
# Prediction failed. Return.
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
value_start_pos = pos
|
||||
(enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
if enum_value in enum_type.values_by_number:
|
||||
field_dict[key] = enum_value
|
||||
else:
|
||||
if not message._unknown_fields:
|
||||
message._unknown_fields = []
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_VARINT)
|
||||
message._unknown_fields.append(
|
||||
(tag_bytes, buffer[value_start_pos:pos]))
|
||||
return pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
Int32Decoder = _SimpleDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
|
||||
|
||||
Int64Decoder = _SimpleDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
|
||||
|
||||
UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
|
||||
UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
|
||||
|
||||
SInt32Decoder = _ModifiedDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
|
||||
SInt64Decoder = _ModifiedDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
|
||||
|
||||
# Note that Python conveniently guarantees that when using the '<' prefix on
|
||||
# formats, they will also have the same size across all platforms (as opposed
|
||||
# to without the prefix, where their sizes depend on the C compiler's basic
|
||||
# type sizes).
|
||||
Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
|
||||
Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
|
||||
SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
|
||||
SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
|
||||
FloatDecoder = _FloatDecoder()
|
||||
DoubleDecoder = _DoubleDecoder()
|
||||
|
||||
BoolDecoder = _ModifiedDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
|
||||
|
||||
|
||||
def StringDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
"""Returns a decoder for a string field."""
|
||||
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
local_unicode = six.text_type
|
||||
|
||||
def _ConvertToUnicode(byte_str):
|
||||
try:
|
||||
return local_unicode(byte_str, 'utf-8')
|
||||
except UnicodeDecodeError as e:
|
||||
# add more information to the error message and re-raise it.
|
||||
e.reason = '%s in field: %s' % (e, key.full_name)
|
||||
raise
|
||||
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated string.')
|
||||
value.append(_ConvertToUnicode(buffer[pos:new_pos]))
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated string.')
|
||||
field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
|
||||
return new_pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
def BytesDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
"""Returns a decoder for a bytes field."""
|
||||
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated string.')
|
||||
value.append(buffer[pos:new_pos])
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated string.')
|
||||
field_dict[key] = buffer[pos:new_pos]
|
||||
return new_pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
"""Returns a decoder for a group field."""
|
||||
|
||||
end_tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_END_GROUP)
|
||||
end_tag_len = len(end_tag_bytes)
|
||||
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_START_GROUP)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read sub-message.
|
||||
pos = value.add()._InternalParse(buffer, pos, end)
|
||||
# Read end tag.
|
||||
new_pos = pos+end_tag_len
|
||||
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
||||
raise _DecodeError('Missing group end tag.')
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read sub-message.
|
||||
pos = value._InternalParse(buffer, pos, end)
|
||||
# Read end tag.
|
||||
new_pos = pos+end_tag_len
|
||||
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
||||
raise _DecodeError('Missing group end tag.')
|
||||
return new_pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
"""Returns a decoder for a message field."""
|
||||
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
# Read length.
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
|
||||
# The only reason _InternalParse would return early is if it
|
||||
# encountered an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read length.
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
if value._InternalParse(buffer, pos, new_pos) != new_pos:
|
||||
# The only reason _InternalParse would return early is if it encountered
|
||||
# an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
return new_pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
|
||||
|
||||
def MessageSetItemDecoder(extensions_by_number):
|
||||
"""Returns a decoder for a MessageSet item.
|
||||
|
||||
The parameter is the _extensions_by_number map for the message class.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
|
||||
message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
|
||||
|
||||
local_ReadTag = ReadTag
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
local_SkipField = SkipField
|
||||
|
||||
def DecodeItem(buffer, pos, end, message, field_dict):
|
||||
message_set_item_start = pos
|
||||
type_id = -1
|
||||
message_start = -1
|
||||
message_end = -1
|
||||
|
||||
# Technically, type_id and message can appear in any order, so we need
|
||||
# a little loop here.
|
||||
while 1:
|
||||
(tag_bytes, pos) = local_ReadTag(buffer, pos)
|
||||
if tag_bytes == type_id_tag_bytes:
|
||||
(type_id, pos) = local_DecodeVarint(buffer, pos)
|
||||
elif tag_bytes == message_tag_bytes:
|
||||
(size, message_start) = local_DecodeVarint(buffer, pos)
|
||||
pos = message_end = message_start + size
|
||||
elif tag_bytes == item_end_tag_bytes:
|
||||
break
|
||||
else:
|
||||
pos = SkipField(buffer, pos, end, tag_bytes)
|
||||
if pos == -1:
|
||||
raise _DecodeError('Missing group end tag.')
|
||||
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
|
||||
if type_id == -1:
|
||||
raise _DecodeError('MessageSet item missing type_id.')
|
||||
if message_start == -1:
|
||||
raise _DecodeError('MessageSet item missing message.')
|
||||
|
||||
extension = extensions_by_number.get(type_id)
|
||||
if extension is not None:
|
||||
value = field_dict.get(extension)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(
|
||||
extension, extension.message_type._concrete_class())
|
||||
if value._InternalParse(buffer, message_start,message_end) != message_end:
|
||||
# The only reason _InternalParse would return early is if it encountered
|
||||
# an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
else:
|
||||
if not message._unknown_fields:
|
||||
message._unknown_fields = []
|
||||
message._unknown_fields.append((MESSAGE_SET_ITEM_TAG,
|
||||
buffer[message_set_item_start:pos]))
|
||||
|
||||
return pos
|
||||
|
||||
return DecodeItem
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
def MapDecoder(field_descriptor, new_default, is_message_map):
|
||||
"""Returns a decoder for a map field."""
|
||||
|
||||
key = field_descriptor
|
||||
tag_bytes = encoder.TagBytes(field_descriptor.number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
# Can't read _concrete_class yet; might not be initialized.
|
||||
message_type = field_descriptor.message_type
|
||||
|
||||
def DecodeMap(buffer, pos, end, message, field_dict):
|
||||
submsg = message_type._concrete_class()
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
# Read length.
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
submsg.Clear()
|
||||
if submsg._InternalParse(buffer, pos, new_pos) != new_pos:
|
||||
# The only reason _InternalParse would return early is if it
|
||||
# encountered an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
|
||||
if is_message_map:
|
||||
value[submsg.key].MergeFrom(submsg.value)
|
||||
else:
|
||||
value[submsg.key] = submsg.value
|
||||
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
|
||||
return DecodeMap
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Optimization is not as heavy here because calls to SkipField() are rare,
|
||||
# except for handling end-group tags.
|
||||
|
||||
def _SkipVarint(buffer, pos, end):
|
||||
"""Skip a varint value. Returns the new position."""
|
||||
# Previously ord(buffer[pos]) raised IndexError when pos is out of range.
|
||||
# With this code, ord(b'') raises TypeError. Both are handled in
|
||||
# python_message.py to generate a 'Truncated message' error.
|
||||
while ord(buffer[pos:pos+1]) & 0x80:
|
||||
pos += 1
|
||||
pos += 1
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
|
||||
def _SkipFixed64(buffer, pos, end):
|
||||
"""Skip a fixed64 value. Returns the new position."""
|
||||
|
||||
pos += 8
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
|
||||
def _SkipLengthDelimited(buffer, pos, end):
|
||||
"""Skip a length-delimited value. Returns the new position."""
|
||||
|
||||
(size, pos) = _DecodeVarint(buffer, pos)
|
||||
pos += size
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
|
||||
def _SkipGroup(buffer, pos, end):
|
||||
"""Skip sub-group. Returns the new position."""
|
||||
|
||||
while 1:
|
||||
(tag_bytes, pos) = ReadTag(buffer, pos)
|
||||
new_pos = SkipField(buffer, pos, end, tag_bytes)
|
||||
if new_pos == -1:
|
||||
return pos
|
||||
pos = new_pos
|
||||
|
||||
def _EndGroup(buffer, pos, end):
|
||||
"""Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
|
||||
|
||||
return -1
|
||||
|
||||
def _SkipFixed32(buffer, pos, end):
|
||||
"""Skip a fixed32 value. Returns the new position."""
|
||||
|
||||
pos += 4
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
|
||||
def _RaiseInvalidWireType(buffer, pos, end):
|
||||
"""Skip function for unknown wire types. Raises an exception."""
|
||||
|
||||
raise _DecodeError('Tag had invalid wire type.')
|
||||
|
||||
def _FieldSkipper():
|
||||
"""Constructs the SkipField function."""
|
||||
|
||||
WIRETYPE_TO_SKIPPER = [
|
||||
_SkipVarint,
|
||||
_SkipFixed64,
|
||||
_SkipLengthDelimited,
|
||||
_SkipGroup,
|
||||
_EndGroup,
|
||||
_SkipFixed32,
|
||||
_RaiseInvalidWireType,
|
||||
_RaiseInvalidWireType,
|
||||
]
|
||||
|
||||
wiretype_mask = wire_format.TAG_TYPE_MASK
|
||||
|
||||
def SkipField(buffer, pos, end, tag_bytes):
|
||||
"""Skips a field with the specified tag.
|
||||
|
||||
|pos| should point to the byte immediately after the tag.
|
||||
|
||||
Returns:
|
||||
The new position (after the tag value), or -1 if the tag is an end-group
|
||||
tag (in which case the calling loop should break).
|
||||
"""
|
||||
|
||||
# The wire type is always in the first byte since varints are little-endian.
|
||||
wire_type = ord(tag_bytes[0:1]) & wiretype_mask
|
||||
return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
|
||||
|
||||
return SkipField
|
||||
|
||||
SkipField = _FieldSkipper()
|
||||
68
deps/google/protobuf/internal/descriptor_database_test.py
vendored
Normal file
68
deps/google/protobuf/internal/descriptor_database_test.py
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.descriptor_database."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
from google.protobuf import descriptor_pb2
|
||||
from google.protobuf.internal import factory_test2_pb2
|
||||
from google.protobuf import descriptor_database
|
||||
|
||||
|
||||
class DescriptorDatabaseTest(unittest.TestCase):
|
||||
|
||||
def testAdd(self):
|
||||
db = descriptor_database.DescriptorDatabase()
|
||||
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
factory_test2_pb2.DESCRIPTOR.serialized_pb)
|
||||
db.Add(file_desc_proto)
|
||||
|
||||
self.assertEqual(file_desc_proto, db.FindFileByName(
|
||||
'google/protobuf/internal/factory_test2.proto'))
|
||||
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Message'))
|
||||
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Message.NestedFactory2Message'))
|
||||
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Enum'))
|
||||
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum'))
|
||||
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum'))
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
763
deps/google/protobuf/internal/descriptor_pool_test.py
vendored
Normal file
763
deps/google/protobuf/internal/descriptor_pool_test.py
vendored
Normal file
@@ -0,0 +1,763 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.descriptor_pool."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
import os
|
||||
|
||||
try:
|
||||
import unittest2 as unittest
|
||||
except ImportError:
|
||||
import unittest
|
||||
from google.protobuf import unittest_import_pb2
|
||||
from google.protobuf import unittest_import_public_pb2
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import descriptor_pb2
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import descriptor_pool_test1_pb2
|
||||
from google.protobuf.internal import descriptor_pool_test2_pb2
|
||||
from google.protobuf.internal import factory_test1_pb2
|
||||
from google.protobuf.internal import factory_test2_pb2
|
||||
from google.protobuf.internal import test_util
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import descriptor_database
|
||||
from google.protobuf import descriptor_pool
|
||||
from google.protobuf import message_factory
|
||||
from google.protobuf import symbol_database
|
||||
|
||||
|
||||
class DescriptorPoolTest(unittest.TestCase):
|
||||
|
||||
def CreatePool(self):
|
||||
return descriptor_pool.DescriptorPool()
|
||||
|
||||
def setUp(self):
|
||||
self.pool = self.CreatePool()
|
||||
self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
factory_test1_pb2.DESCRIPTOR.serialized_pb)
|
||||
self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
factory_test2_pb2.DESCRIPTOR.serialized_pb)
|
||||
self.pool.Add(self.factory_test1_fd)
|
||||
self.pool.Add(self.factory_test2_fd)
|
||||
|
||||
def testFindFileByName(self):
|
||||
name1 = 'google/protobuf/internal/factory_test1.proto'
|
||||
file_desc1 = self.pool.FindFileByName(name1)
|
||||
self.assertIsInstance(file_desc1, descriptor.FileDescriptor)
|
||||
self.assertEqual(name1, file_desc1.name)
|
||||
self.assertEqual('google.protobuf.python.internal', file_desc1.package)
|
||||
self.assertIn('Factory1Message', file_desc1.message_types_by_name)
|
||||
|
||||
name2 = 'google/protobuf/internal/factory_test2.proto'
|
||||
file_desc2 = self.pool.FindFileByName(name2)
|
||||
self.assertIsInstance(file_desc2, descriptor.FileDescriptor)
|
||||
self.assertEqual(name2, file_desc2.name)
|
||||
self.assertEqual('google.protobuf.python.internal', file_desc2.package)
|
||||
self.assertIn('Factory2Message', file_desc2.message_types_by_name)
|
||||
|
||||
def testFindFileByNameFailure(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindFileByName('Does not exist')
|
||||
|
||||
def testFindFileContainingSymbol(self):
|
||||
file_desc1 = self.pool.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory1Message')
|
||||
self.assertIsInstance(file_desc1, descriptor.FileDescriptor)
|
||||
self.assertEqual('google/protobuf/internal/factory_test1.proto',
|
||||
file_desc1.name)
|
||||
self.assertEqual('google.protobuf.python.internal', file_desc1.package)
|
||||
self.assertIn('Factory1Message', file_desc1.message_types_by_name)
|
||||
|
||||
file_desc2 = self.pool.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Message')
|
||||
self.assertIsInstance(file_desc2, descriptor.FileDescriptor)
|
||||
self.assertEqual('google/protobuf/internal/factory_test2.proto',
|
||||
file_desc2.name)
|
||||
self.assertEqual('google.protobuf.python.internal', file_desc2.package)
|
||||
self.assertIn('Factory2Message', file_desc2.message_types_by_name)
|
||||
|
||||
def testFindFileContainingSymbolFailure(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindFileContainingSymbol('Does not exist')
|
||||
|
||||
def testFindMessageTypeByName(self):
|
||||
msg1 = self.pool.FindMessageTypeByName(
|
||||
'google.protobuf.python.internal.Factory1Message')
|
||||
self.assertIsInstance(msg1, descriptor.Descriptor)
|
||||
self.assertEqual('Factory1Message', msg1.name)
|
||||
self.assertEqual('google.protobuf.python.internal.Factory1Message',
|
||||
msg1.full_name)
|
||||
self.assertEqual(None, msg1.containing_type)
|
||||
|
||||
nested_msg1 = msg1.nested_types[0]
|
||||
self.assertEqual('NestedFactory1Message', nested_msg1.name)
|
||||
self.assertEqual(msg1, nested_msg1.containing_type)
|
||||
|
||||
nested_enum1 = msg1.enum_types[0]
|
||||
self.assertEqual('NestedFactory1Enum', nested_enum1.name)
|
||||
self.assertEqual(msg1, nested_enum1.containing_type)
|
||||
|
||||
self.assertEqual(nested_msg1, msg1.fields_by_name[
|
||||
'nested_factory_1_message'].message_type)
|
||||
self.assertEqual(nested_enum1, msg1.fields_by_name[
|
||||
'nested_factory_1_enum'].enum_type)
|
||||
|
||||
msg2 = self.pool.FindMessageTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Message')
|
||||
self.assertIsInstance(msg2, descriptor.Descriptor)
|
||||
self.assertEqual('Factory2Message', msg2.name)
|
||||
self.assertEqual('google.protobuf.python.internal.Factory2Message',
|
||||
msg2.full_name)
|
||||
self.assertIsNone(msg2.containing_type)
|
||||
|
||||
nested_msg2 = msg2.nested_types[0]
|
||||
self.assertEqual('NestedFactory2Message', nested_msg2.name)
|
||||
self.assertEqual(msg2, nested_msg2.containing_type)
|
||||
|
||||
nested_enum2 = msg2.enum_types[0]
|
||||
self.assertEqual('NestedFactory2Enum', nested_enum2.name)
|
||||
self.assertEqual(msg2, nested_enum2.containing_type)
|
||||
|
||||
self.assertEqual(nested_msg2, msg2.fields_by_name[
|
||||
'nested_factory_2_message'].message_type)
|
||||
self.assertEqual(nested_enum2, msg2.fields_by_name[
|
||||
'nested_factory_2_enum'].enum_type)
|
||||
|
||||
self.assertTrue(msg2.fields_by_name['int_with_default'].has_default_value)
|
||||
self.assertEqual(
|
||||
1776, msg2.fields_by_name['int_with_default'].default_value)
|
||||
|
||||
self.assertTrue(
|
||||
msg2.fields_by_name['double_with_default'].has_default_value)
|
||||
self.assertEqual(
|
||||
9.99, msg2.fields_by_name['double_with_default'].default_value)
|
||||
|
||||
self.assertTrue(
|
||||
msg2.fields_by_name['string_with_default'].has_default_value)
|
||||
self.assertEqual(
|
||||
'hello world', msg2.fields_by_name['string_with_default'].default_value)
|
||||
|
||||
self.assertTrue(msg2.fields_by_name['bool_with_default'].has_default_value)
|
||||
self.assertFalse(msg2.fields_by_name['bool_with_default'].default_value)
|
||||
|
||||
self.assertTrue(msg2.fields_by_name['enum_with_default'].has_default_value)
|
||||
self.assertEqual(
|
||||
1, msg2.fields_by_name['enum_with_default'].default_value)
|
||||
|
||||
msg3 = self.pool.FindMessageTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Message.NestedFactory2Message')
|
||||
self.assertEqual(nested_msg2, msg3)
|
||||
|
||||
self.assertTrue(msg2.fields_by_name['bytes_with_default'].has_default_value)
|
||||
self.assertEqual(
|
||||
b'a\xfb\x00c',
|
||||
msg2.fields_by_name['bytes_with_default'].default_value)
|
||||
|
||||
self.assertEqual(1, len(msg2.oneofs))
|
||||
self.assertEqual(1, len(msg2.oneofs_by_name))
|
||||
self.assertEqual(2, len(msg2.oneofs[0].fields))
|
||||
for name in ['oneof_int', 'oneof_string']:
|
||||
self.assertEqual(msg2.oneofs[0],
|
||||
msg2.fields_by_name[name].containing_oneof)
|
||||
self.assertIn(msg2.fields_by_name[name], msg2.oneofs[0].fields)
|
||||
|
||||
def testFindMessageTypeByNameFailure(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindMessageTypeByName('Does not exist')
|
||||
|
||||
def testFindEnumTypeByName(self):
|
||||
enum1 = self.pool.FindEnumTypeByName(
|
||||
'google.protobuf.python.internal.Factory1Enum')
|
||||
self.assertIsInstance(enum1, descriptor.EnumDescriptor)
|
||||
self.assertEqual(0, enum1.values_by_name['FACTORY_1_VALUE_0'].number)
|
||||
self.assertEqual(1, enum1.values_by_name['FACTORY_1_VALUE_1'].number)
|
||||
|
||||
nested_enum1 = self.pool.FindEnumTypeByName(
|
||||
'google.protobuf.python.internal.Factory1Message.NestedFactory1Enum')
|
||||
self.assertIsInstance(nested_enum1, descriptor.EnumDescriptor)
|
||||
self.assertEqual(
|
||||
0, nested_enum1.values_by_name['NESTED_FACTORY_1_VALUE_0'].number)
|
||||
self.assertEqual(
|
||||
1, nested_enum1.values_by_name['NESTED_FACTORY_1_VALUE_1'].number)
|
||||
|
||||
enum2 = self.pool.FindEnumTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Enum')
|
||||
self.assertIsInstance(enum2, descriptor.EnumDescriptor)
|
||||
self.assertEqual(0, enum2.values_by_name['FACTORY_2_VALUE_0'].number)
|
||||
self.assertEqual(1, enum2.values_by_name['FACTORY_2_VALUE_1'].number)
|
||||
|
||||
nested_enum2 = self.pool.FindEnumTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum')
|
||||
self.assertIsInstance(nested_enum2, descriptor.EnumDescriptor)
|
||||
self.assertEqual(
|
||||
0, nested_enum2.values_by_name['NESTED_FACTORY_2_VALUE_0'].number)
|
||||
self.assertEqual(
|
||||
1, nested_enum2.values_by_name['NESTED_FACTORY_2_VALUE_1'].number)
|
||||
|
||||
def testFindEnumTypeByNameFailure(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindEnumTypeByName('Does not exist')
|
||||
|
||||
def testFindFieldByName(self):
|
||||
field = self.pool.FindFieldByName(
|
||||
'google.protobuf.python.internal.Factory1Message.list_value')
|
||||
self.assertEqual(field.name, 'list_value')
|
||||
self.assertEqual(field.label, field.LABEL_REPEATED)
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindFieldByName('Does not exist')
|
||||
|
||||
def testFindExtensionByName(self):
|
||||
# An extension defined in a message.
|
||||
extension = self.pool.FindExtensionByName(
|
||||
'google.protobuf.python.internal.Factory2Message.one_more_field')
|
||||
self.assertEqual(extension.name, 'one_more_field')
|
||||
# An extension defined at file scope.
|
||||
extension = self.pool.FindExtensionByName(
|
||||
'google.protobuf.python.internal.another_field')
|
||||
self.assertEqual(extension.name, 'another_field')
|
||||
self.assertEqual(extension.number, 1002)
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindFieldByName('Does not exist')
|
||||
|
||||
def testExtensionsAreNotFields(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindFieldByName('google.protobuf.python.internal.another_field')
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindFieldByName(
|
||||
'google.protobuf.python.internal.Factory2Message.one_more_field')
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindExtensionByName(
|
||||
'google.protobuf.python.internal.Factory1Message.list_value')
|
||||
|
||||
def testUserDefinedDB(self):
|
||||
db = descriptor_database.DescriptorDatabase()
|
||||
self.pool = descriptor_pool.DescriptorPool(db)
|
||||
db.Add(self.factory_test1_fd)
|
||||
db.Add(self.factory_test2_fd)
|
||||
self.testFindMessageTypeByName()
|
||||
|
||||
def testAddSerializedFile(self):
|
||||
self.pool = descriptor_pool.DescriptorPool()
|
||||
self.pool.AddSerializedFile(self.factory_test1_fd.SerializeToString())
|
||||
self.pool.AddSerializedFile(self.factory_test2_fd.SerializeToString())
|
||||
self.testFindMessageTypeByName()
|
||||
|
||||
def testComplexNesting(self):
|
||||
test1_desc = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
descriptor_pool_test1_pb2.DESCRIPTOR.serialized_pb)
|
||||
test2_desc = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
descriptor_pool_test2_pb2.DESCRIPTOR.serialized_pb)
|
||||
self.pool.Add(test1_desc)
|
||||
self.pool.Add(test2_desc)
|
||||
TEST1_FILE.CheckFile(self, self.pool)
|
||||
TEST2_FILE.CheckFile(self, self.pool)
|
||||
|
||||
|
||||
def testEnumDefaultValue(self):
|
||||
"""Test the default value of enums which don't start at zero."""
|
||||
def _CheckDefaultValue(file_descriptor):
|
||||
default_value = (file_descriptor
|
||||
.message_types_by_name['DescriptorPoolTest1']
|
||||
.fields_by_name['nested_enum']
|
||||
.default_value)
|
||||
self.assertEqual(default_value,
|
||||
descriptor_pool_test1_pb2.DescriptorPoolTest1.BETA)
|
||||
# First check what the generated descriptor contains.
|
||||
_CheckDefaultValue(descriptor_pool_test1_pb2.DESCRIPTOR)
|
||||
# Then check the generated pool. Normally this is the same descriptor.
|
||||
file_descriptor = symbol_database.Default().pool.FindFileByName(
|
||||
'google/protobuf/internal/descriptor_pool_test1.proto')
|
||||
self.assertIs(file_descriptor, descriptor_pool_test1_pb2.DESCRIPTOR)
|
||||
_CheckDefaultValue(file_descriptor)
|
||||
|
||||
# Then check the dynamic pool and its internal DescriptorDatabase.
|
||||
descriptor_proto = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
descriptor_pool_test1_pb2.DESCRIPTOR.serialized_pb)
|
||||
self.pool.Add(descriptor_proto)
|
||||
# And do the same check as above
|
||||
file_descriptor = self.pool.FindFileByName(
|
||||
'google/protobuf/internal/descriptor_pool_test1.proto')
|
||||
_CheckDefaultValue(file_descriptor)
|
||||
|
||||
def testDefaultValueForCustomMessages(self):
|
||||
"""Check the value returned by non-existent fields."""
|
||||
def _CheckValueAndType(value, expected_value, expected_type):
|
||||
self.assertEqual(value, expected_value)
|
||||
self.assertIsInstance(value, expected_type)
|
||||
|
||||
def _CheckDefaultValues(msg):
|
||||
try:
|
||||
int64 = long
|
||||
except NameError: # Python3
|
||||
int64 = int
|
||||
try:
|
||||
unicode_type = unicode
|
||||
except NameError: # Python3
|
||||
unicode_type = str
|
||||
_CheckValueAndType(msg.optional_int32, 0, int)
|
||||
_CheckValueAndType(msg.optional_uint64, 0, (int64, int))
|
||||
_CheckValueAndType(msg.optional_float, 0, (float, int))
|
||||
_CheckValueAndType(msg.optional_double, 0, (float, int))
|
||||
_CheckValueAndType(msg.optional_bool, False, bool)
|
||||
_CheckValueAndType(msg.optional_string, u'', unicode_type)
|
||||
_CheckValueAndType(msg.optional_bytes, b'', bytes)
|
||||
_CheckValueAndType(msg.optional_nested_enum, msg.FOO, int)
|
||||
# First for the generated message
|
||||
_CheckDefaultValues(unittest_pb2.TestAllTypes())
|
||||
# Then for a message built with from the DescriptorPool.
|
||||
pool = descriptor_pool.DescriptorPool()
|
||||
pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
|
||||
unittest_import_public_pb2.DESCRIPTOR.serialized_pb))
|
||||
pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
|
||||
unittest_import_pb2.DESCRIPTOR.serialized_pb))
|
||||
pool.Add(descriptor_pb2.FileDescriptorProto.FromString(
|
||||
unittest_pb2.DESCRIPTOR.serialized_pb))
|
||||
message_class = message_factory.MessageFactory(pool).GetPrototype(
|
||||
pool.FindMessageTypeByName(
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR.full_name))
|
||||
_CheckDefaultValues(message_class())
|
||||
|
||||
|
||||
@unittest.skipIf(api_implementation.Type() != 'cpp',
|
||||
'explicit tests of the C++ implementation')
|
||||
class CppDescriptorPoolTest(DescriptorPoolTest):
|
||||
# TODO(amauryfa): remove when descriptor_pool.DescriptorPool() creates true
|
||||
# C++ descriptor pool object for C++ implementation.
|
||||
|
||||
def CreatePool(self):
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.pyext import _message
|
||||
return _message.DescriptorPool()
|
||||
|
||||
|
||||
class ProtoFile(object):
|
||||
|
||||
def __init__(self, name, package, messages, dependencies=None):
|
||||
self.name = name
|
||||
self.package = package
|
||||
self.messages = messages
|
||||
self.dependencies = dependencies or []
|
||||
|
||||
def CheckFile(self, test, pool):
|
||||
file_desc = pool.FindFileByName(self.name)
|
||||
test.assertEqual(self.name, file_desc.name)
|
||||
test.assertEqual(self.package, file_desc.package)
|
||||
dependencies_names = [f.name for f in file_desc.dependencies]
|
||||
test.assertEqual(self.dependencies, dependencies_names)
|
||||
for name, msg_type in self.messages.items():
|
||||
msg_type.CheckType(test, None, name, file_desc)
|
||||
|
||||
|
||||
class EnumType(object):
|
||||
|
||||
def __init__(self, values):
|
||||
self.values = values
|
||||
|
||||
def CheckType(self, test, msg_desc, name, file_desc):
|
||||
enum_desc = msg_desc.enum_types_by_name[name]
|
||||
test.assertEqual(name, enum_desc.name)
|
||||
expected_enum_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_enum_full_name, enum_desc.full_name)
|
||||
test.assertEqual(msg_desc, enum_desc.containing_type)
|
||||
test.assertEqual(file_desc, enum_desc.file)
|
||||
for index, (value, number) in enumerate(self.values):
|
||||
value_desc = enum_desc.values_by_name[value]
|
||||
test.assertEqual(value, value_desc.name)
|
||||
test.assertEqual(index, value_desc.index)
|
||||
test.assertEqual(number, value_desc.number)
|
||||
test.assertEqual(enum_desc, value_desc.type)
|
||||
test.assertIn(value, msg_desc.enum_values_by_name)
|
||||
|
||||
|
||||
class MessageType(object):
|
||||
|
||||
def __init__(self, type_dict, field_list, is_extendable=False,
|
||||
extensions=None):
|
||||
self.type_dict = type_dict
|
||||
self.field_list = field_list
|
||||
self.is_extendable = is_extendable
|
||||
self.extensions = extensions or []
|
||||
|
||||
def CheckType(self, test, containing_type_desc, name, file_desc):
|
||||
if containing_type_desc is None:
|
||||
desc = file_desc.message_types_by_name[name]
|
||||
expected_full_name = '.'.join([file_desc.package, name])
|
||||
else:
|
||||
desc = containing_type_desc.nested_types_by_name[name]
|
||||
expected_full_name = '.'.join([containing_type_desc.full_name, name])
|
||||
|
||||
test.assertEqual(name, desc.name)
|
||||
test.assertEqual(expected_full_name, desc.full_name)
|
||||
test.assertEqual(containing_type_desc, desc.containing_type)
|
||||
test.assertEqual(desc.file, file_desc)
|
||||
test.assertEqual(self.is_extendable, desc.is_extendable)
|
||||
for name, subtype in self.type_dict.items():
|
||||
subtype.CheckType(test, desc, name, file_desc)
|
||||
|
||||
for index, (name, field) in enumerate(self.field_list):
|
||||
field.CheckField(test, desc, name, index)
|
||||
|
||||
for index, (name, field) in enumerate(self.extensions):
|
||||
field.CheckField(test, desc, name, index)
|
||||
|
||||
|
||||
class EnumField(object):
|
||||
|
||||
def __init__(self, number, type_name, default_value):
|
||||
self.number = number
|
||||
self.type_name = type_name
|
||||
self.default_value = default_value
|
||||
|
||||
def CheckField(self, test, msg_desc, name, index):
|
||||
field_desc = msg_desc.fields_by_name[name]
|
||||
enum_desc = msg_desc.enum_types_by_name[self.type_name]
|
||||
test.assertEqual(name, field_desc.name)
|
||||
expected_field_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_field_full_name, field_desc.full_name)
|
||||
test.assertEqual(index, field_desc.index)
|
||||
test.assertEqual(self.number, field_desc.number)
|
||||
test.assertEqual(descriptor.FieldDescriptor.TYPE_ENUM, field_desc.type)
|
||||
test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_ENUM,
|
||||
field_desc.cpp_type)
|
||||
test.assertTrue(field_desc.has_default_value)
|
||||
test.assertEqual(enum_desc.values_by_name[self.default_value].number,
|
||||
field_desc.default_value)
|
||||
test.assertEqual(msg_desc, field_desc.containing_type)
|
||||
test.assertEqual(enum_desc, field_desc.enum_type)
|
||||
|
||||
|
||||
class MessageField(object):
|
||||
|
||||
def __init__(self, number, type_name):
|
||||
self.number = number
|
||||
self.type_name = type_name
|
||||
|
||||
def CheckField(self, test, msg_desc, name, index):
|
||||
field_desc = msg_desc.fields_by_name[name]
|
||||
field_type_desc = msg_desc.nested_types_by_name[self.type_name]
|
||||
test.assertEqual(name, field_desc.name)
|
||||
expected_field_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_field_full_name, field_desc.full_name)
|
||||
test.assertEqual(index, field_desc.index)
|
||||
test.assertEqual(self.number, field_desc.number)
|
||||
test.assertEqual(descriptor.FieldDescriptor.TYPE_MESSAGE, field_desc.type)
|
||||
test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_MESSAGE,
|
||||
field_desc.cpp_type)
|
||||
test.assertFalse(field_desc.has_default_value)
|
||||
test.assertEqual(msg_desc, field_desc.containing_type)
|
||||
test.assertEqual(field_type_desc, field_desc.message_type)
|
||||
|
||||
|
||||
class StringField(object):
|
||||
|
||||
def __init__(self, number, default_value):
|
||||
self.number = number
|
||||
self.default_value = default_value
|
||||
|
||||
def CheckField(self, test, msg_desc, name, index):
|
||||
field_desc = msg_desc.fields_by_name[name]
|
||||
test.assertEqual(name, field_desc.name)
|
||||
expected_field_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_field_full_name, field_desc.full_name)
|
||||
test.assertEqual(index, field_desc.index)
|
||||
test.assertEqual(self.number, field_desc.number)
|
||||
test.assertEqual(descriptor.FieldDescriptor.TYPE_STRING, field_desc.type)
|
||||
test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_STRING,
|
||||
field_desc.cpp_type)
|
||||
test.assertTrue(field_desc.has_default_value)
|
||||
test.assertEqual(self.default_value, field_desc.default_value)
|
||||
|
||||
|
||||
class ExtensionField(object):
|
||||
|
||||
def __init__(self, number, extended_type):
|
||||
self.number = number
|
||||
self.extended_type = extended_type
|
||||
|
||||
def CheckField(self, test, msg_desc, name, index):
|
||||
field_desc = msg_desc.extensions_by_name[name]
|
||||
test.assertEqual(name, field_desc.name)
|
||||
expected_field_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_field_full_name, field_desc.full_name)
|
||||
test.assertEqual(self.number, field_desc.number)
|
||||
test.assertEqual(index, field_desc.index)
|
||||
test.assertEqual(descriptor.FieldDescriptor.TYPE_MESSAGE, field_desc.type)
|
||||
test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_MESSAGE,
|
||||
field_desc.cpp_type)
|
||||
test.assertFalse(field_desc.has_default_value)
|
||||
test.assertTrue(field_desc.is_extension)
|
||||
test.assertEqual(msg_desc, field_desc.extension_scope)
|
||||
test.assertEqual(msg_desc, field_desc.message_type)
|
||||
test.assertEqual(self.extended_type, field_desc.containing_type.name)
|
||||
|
||||
|
||||
class AddDescriptorTest(unittest.TestCase):
|
||||
|
||||
def _TestMessage(self, prefix):
|
||||
pool = descriptor_pool.DescriptorPool()
|
||||
pool.AddDescriptor(unittest_pb2.TestAllTypes.DESCRIPTOR)
|
||||
self.assertEqual(
|
||||
'protobuf_unittest.TestAllTypes',
|
||||
pool.FindMessageTypeByName(
|
||||
prefix + 'protobuf_unittest.TestAllTypes').full_name)
|
||||
|
||||
# AddDescriptor is not recursive.
|
||||
with self.assertRaises(KeyError):
|
||||
pool.FindMessageTypeByName(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedMessage')
|
||||
|
||||
pool.AddDescriptor(unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR)
|
||||
self.assertEqual(
|
||||
'protobuf_unittest.TestAllTypes.NestedMessage',
|
||||
pool.FindMessageTypeByName(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedMessage').full_name)
|
||||
|
||||
# Files are implicitly also indexed when messages are added.
|
||||
self.assertEqual(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileByName(
|
||||
'google/protobuf/unittest.proto').name)
|
||||
|
||||
self.assertEqual(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileContainingSymbol(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedMessage').name)
|
||||
|
||||
@unittest.skipIf(api_implementation.Type() == 'cpp',
|
||||
'With the cpp implementation, Add() must be called first')
|
||||
def testMessage(self):
|
||||
self._TestMessage('')
|
||||
self._TestMessage('.')
|
||||
|
||||
def _TestEnum(self, prefix):
|
||||
pool = descriptor_pool.DescriptorPool()
|
||||
pool.AddEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR)
|
||||
self.assertEqual(
|
||||
'protobuf_unittest.ForeignEnum',
|
||||
pool.FindEnumTypeByName(
|
||||
prefix + 'protobuf_unittest.ForeignEnum').full_name)
|
||||
|
||||
# AddEnumDescriptor is not recursive.
|
||||
with self.assertRaises(KeyError):
|
||||
pool.FindEnumTypeByName(
|
||||
prefix + 'protobuf_unittest.ForeignEnum.NestedEnum')
|
||||
|
||||
pool.AddEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR)
|
||||
self.assertEqual(
|
||||
'protobuf_unittest.TestAllTypes.NestedEnum',
|
||||
pool.FindEnumTypeByName(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedEnum').full_name)
|
||||
|
||||
# Files are implicitly also indexed when enums are added.
|
||||
self.assertEqual(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileByName(
|
||||
'google/protobuf/unittest.proto').name)
|
||||
|
||||
self.assertEqual(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileContainingSymbol(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedEnum').name)
|
||||
|
||||
@unittest.skipIf(api_implementation.Type() == 'cpp',
|
||||
'With the cpp implementation, Add() must be called first')
|
||||
def testEnum(self):
|
||||
self._TestEnum('')
|
||||
self._TestEnum('.')
|
||||
|
||||
@unittest.skipIf(api_implementation.Type() == 'cpp',
|
||||
'With the cpp implementation, Add() must be called first')
|
||||
def testFile(self):
|
||||
pool = descriptor_pool.DescriptorPool()
|
||||
pool.AddFileDescriptor(unittest_pb2.DESCRIPTOR)
|
||||
self.assertEqual(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileByName(
|
||||
'google/protobuf/unittest.proto').name)
|
||||
|
||||
# AddFileDescriptor is not recursive; messages and enums within files must
|
||||
# be explicitly registered.
|
||||
with self.assertRaises(KeyError):
|
||||
pool.FindFileContainingSymbol(
|
||||
'protobuf_unittest.TestAllTypes')
|
||||
|
||||
def _GetDescriptorPoolClass(self):
|
||||
# Test with both implementations of descriptor pools.
|
||||
if api_implementation.Type() == 'cpp':
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.pyext import _message
|
||||
return _message.DescriptorPool
|
||||
else:
|
||||
return descriptor_pool.DescriptorPool
|
||||
|
||||
def testEmptyDescriptorPool(self):
|
||||
# Check that an empty DescriptorPool() contains no message.
|
||||
pool = self._GetDescriptorPoolClass()()
|
||||
proto_file_name = descriptor_pb2.DESCRIPTOR.name
|
||||
self.assertRaises(KeyError, pool.FindFileByName, proto_file_name)
|
||||
# Add the above file to the pool
|
||||
file_descriptor = descriptor_pb2.FileDescriptorProto()
|
||||
descriptor_pb2.DESCRIPTOR.CopyToProto(file_descriptor)
|
||||
pool.Add(file_descriptor)
|
||||
# Now it exists.
|
||||
self.assertTrue(pool.FindFileByName(proto_file_name))
|
||||
|
||||
def testCustomDescriptorPool(self):
|
||||
# Create a new pool, and add a file descriptor.
|
||||
pool = self._GetDescriptorPoolClass()()
|
||||
file_desc = descriptor_pb2.FileDescriptorProto(
|
||||
name='some/file.proto', package='package')
|
||||
file_desc.message_type.add(name='Message')
|
||||
pool.Add(file_desc)
|
||||
self.assertEqual(pool.FindFileByName('some/file.proto').name,
|
||||
'some/file.proto')
|
||||
self.assertEqual(pool.FindMessageTypeByName('package.Message').name,
|
||||
'Message')
|
||||
|
||||
|
||||
@unittest.skipIf(
|
||||
api_implementation.Type() != 'cpp',
|
||||
'default_pool is only supported by the C++ implementation')
|
||||
class DefaultPoolTest(unittest.TestCase):
|
||||
|
||||
def testFindMethods(self):
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.pyext import _message
|
||||
pool = _message.default_pool
|
||||
self.assertIs(
|
||||
pool.FindFileByName('google/protobuf/unittest.proto'),
|
||||
unittest_pb2.DESCRIPTOR)
|
||||
self.assertIs(
|
||||
pool.FindMessageTypeByName('protobuf_unittest.TestAllTypes'),
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR)
|
||||
self.assertIs(
|
||||
pool.FindFieldByName('protobuf_unittest.TestAllTypes.optional_int32'),
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR.fields_by_name['optional_int32'])
|
||||
self.assertIs(
|
||||
pool.FindExtensionByName('protobuf_unittest.optional_int32_extension'),
|
||||
unittest_pb2.DESCRIPTOR.extensions_by_name['optional_int32_extension'])
|
||||
self.assertIs(
|
||||
pool.FindEnumTypeByName('protobuf_unittest.ForeignEnum'),
|
||||
unittest_pb2.ForeignEnum.DESCRIPTOR)
|
||||
self.assertIs(
|
||||
pool.FindOneofByName('protobuf_unittest.TestAllTypes.oneof_field'),
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR.oneofs_by_name['oneof_field'])
|
||||
|
||||
def testAddFileDescriptor(self):
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.pyext import _message
|
||||
pool = _message.default_pool
|
||||
file_desc = descriptor_pb2.FileDescriptorProto(name='some/file.proto')
|
||||
pool.Add(file_desc)
|
||||
pool.AddSerializedFile(file_desc.SerializeToString())
|
||||
|
||||
|
||||
TEST1_FILE = ProtoFile(
|
||||
'google/protobuf/internal/descriptor_pool_test1.proto',
|
||||
'google.protobuf.python.internal',
|
||||
{
|
||||
'DescriptorPoolTest1': MessageType({
|
||||
'NestedEnum': EnumType([('ALPHA', 1), ('BETA', 2)]),
|
||||
'NestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('EPSILON', 5), ('ZETA', 6)]),
|
||||
'DeepNestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('ETA', 7), ('THETA', 8)]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'ETA')),
|
||||
('nested_field', StringField(2, 'theta')),
|
||||
]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'ZETA')),
|
||||
('nested_field', StringField(2, 'beta')),
|
||||
('deep_nested_message', MessageField(3, 'DeepNestedMessage')),
|
||||
])
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'BETA')),
|
||||
('nested_message', MessageField(2, 'NestedMessage')),
|
||||
], is_extendable=True),
|
||||
|
||||
'DescriptorPoolTest2': MessageType({
|
||||
'NestedEnum': EnumType([('GAMMA', 3), ('DELTA', 4)]),
|
||||
'NestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('IOTA', 9), ('KAPPA', 10)]),
|
||||
'DeepNestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('LAMBDA', 11), ('MU', 12)]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'MU')),
|
||||
('nested_field', StringField(2, 'lambda')),
|
||||
]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'IOTA')),
|
||||
('nested_field', StringField(2, 'delta')),
|
||||
('deep_nested_message', MessageField(3, 'DeepNestedMessage')),
|
||||
])
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'GAMMA')),
|
||||
('nested_message', MessageField(2, 'NestedMessage')),
|
||||
]),
|
||||
})
|
||||
|
||||
|
||||
TEST2_FILE = ProtoFile(
|
||||
'google/protobuf/internal/descriptor_pool_test2.proto',
|
||||
'google.protobuf.python.internal',
|
||||
{
|
||||
'DescriptorPoolTest3': MessageType({
|
||||
'NestedEnum': EnumType([('NU', 13), ('XI', 14)]),
|
||||
'NestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('OMICRON', 15), ('PI', 16)]),
|
||||
'DeepNestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('RHO', 17), ('SIGMA', 18)]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'RHO')),
|
||||
('nested_field', StringField(2, 'sigma')),
|
||||
]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'PI')),
|
||||
('nested_field', StringField(2, 'nu')),
|
||||
('deep_nested_message', MessageField(3, 'DeepNestedMessage')),
|
||||
])
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'XI')),
|
||||
('nested_message', MessageField(2, 'NestedMessage')),
|
||||
], extensions=[
|
||||
('descriptor_pool_test',
|
||||
ExtensionField(1001, 'DescriptorPoolTest1')),
|
||||
]),
|
||||
},
|
||||
dependencies=['google/protobuf/internal/descriptor_pool_test1.proto'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user