parent
							
								
									7c1b0271cc
								
							
						
					
					
						commit
						63abde6afa
					
				
					 139 changed files with 16860 additions and 876 deletions
				
			
		
							
								
								
									
										4
									
								
								lib/google/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										4
									
								
								lib/google/__init__.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,4 @@ | ||||||
|  | import os | ||||||
|  | import sys | ||||||
|  | 
 | ||||||
|  | sys.path.insert(1, os.path.join(os.path.dirname(__file__))) | ||||||
							
								
								
									
										26
									
								
								lib/google/protobuf/any_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/google/protobuf/any_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/any.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _ANY._serialized_start=46 | ||||||
|  |   _ANY._serialized_end=84 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										32
									
								
								lib/google/protobuf/api_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								lib/google/protobuf/api_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,32 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/api.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 | ||||||
|  | from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _API._serialized_start=113 | ||||||
|  |   _API._serialized_end=370 | ||||||
|  |   _METHOD._serialized_start=373 | ||||||
|  |   _METHOD._serialized_end=586 | ||||||
|  |   _MIXIN._serialized_start=588 | ||||||
|  |   _MIXIN._serialized_end=623 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										35
									
								
								lib/google/protobuf/compiler/plugin_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								lib/google/protobuf/compiler/plugin_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,35 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/compiler/plugin.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' | ||||||
|  |   _VERSION._serialized_start=101 | ||||||
|  |   _VERSION._serialized_end=171 | ||||||
|  |   _CODEGENERATORREQUEST._serialized_start=174 | ||||||
|  |   _CODEGENERATORREQUEST._serialized_end=360 | ||||||
|  |   _CODEGENERATORRESPONSE._serialized_start=363 | ||||||
|  |   _CODEGENERATORRESPONSE._serialized_end=684 | ||||||
|  |   _CODEGENERATORRESPONSE_FILE._serialized_start=499 | ||||||
|  |   _CODEGENERATORRESPONSE_FILE._serialized_end=626 | ||||||
|  |   _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 | ||||||
|  |   _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
|  | @ -37,14 +37,14 @@ __author__ = 'robinson@google.com (Will Robinson)' | ||||||
| import threading | import threading | ||||||
| import warnings | import warnings | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import api_implementation | from google.protobuf.internal import api_implementation | ||||||
| 
 | 
 | ||||||
| _USE_C_DESCRIPTORS = False | _USE_C_DESCRIPTORS = False | ||||||
| if api_implementation.Type() == 'cpp': | if api_implementation.Type() == 'cpp': | ||||||
|   # Used by MakeDescriptor in cpp mode |   # Used by MakeDescriptor in cpp mode | ||||||
|   import binascii |   import binascii | ||||||
|   import os |   import os | ||||||
|   from protobuf.pyext import _message |   from google.protobuf.pyext import _message | ||||||
|   _USE_C_DESCRIPTORS = True |   _USE_C_DESCRIPTORS = True | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -163,7 +163,7 @@ class DescriptorBase(metaclass=DescriptorMetaclass): | ||||||
|     if self._options: |     if self._options: | ||||||
|       return self._options |       return self._options | ||||||
| 
 | 
 | ||||||
|     from protobuf import descriptor_pb2 |     from google.protobuf import descriptor_pb2 | ||||||
|     try: |     try: | ||||||
|       options_class = getattr(descriptor_pb2, |       options_class = getattr(descriptor_pb2, | ||||||
|                               self._options_class_name) |                               self._options_class_name) | ||||||
|  | @ -965,7 +965,7 @@ class MethodDescriptor(DescriptorBase): | ||||||
|         arguments. |         arguments. | ||||||
|     """ |     """ | ||||||
|     if self.containing_service is not None: |     if self.containing_service is not None: | ||||||
|       from protobuf import descriptor_pb2 |       from google.protobuf import descriptor_pb2 | ||||||
|       service_proto = descriptor_pb2.ServiceDescriptorProto() |       service_proto = descriptor_pb2.ServiceDescriptorProto() | ||||||
|       self.containing_service.CopyToProto(service_proto) |       self.containing_service.CopyToProto(service_proto) | ||||||
|       proto.CopyFrom(service_proto.method[self.index]) |       proto.CopyFrom(service_proto.method[self.index]) | ||||||
|  | @ -978,7 +978,7 @@ class FileDescriptor(DescriptorBase): | ||||||
| 
 | 
 | ||||||
|   Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and |   Note that :attr:`enum_types_by_name`, :attr:`extensions_by_name`, and | ||||||
|   :attr:`dependencies` fields are only set by the |   :attr:`dependencies` fields are only set by the | ||||||
|   :py:mod:`protobuf.message_factory` module, and not by the generated |   :py:mod:`google.protobuf.message_factory` module, and not by the generated | ||||||
|   proto code. |   proto code. | ||||||
| 
 | 
 | ||||||
|   Attributes: |   Attributes: | ||||||
|  | @ -1037,7 +1037,7 @@ class FileDescriptor(DescriptorBase): | ||||||
|         options, serialized_options, 'FileOptions') |         options, serialized_options, 'FileOptions') | ||||||
| 
 | 
 | ||||||
|     if pool is None: |     if pool is None: | ||||||
|       from protobuf import descriptor_pool |       from google.protobuf import descriptor_pool | ||||||
|       pool = descriptor_pool.Default() |       pool = descriptor_pool.Default() | ||||||
|     self.pool = pool |     self.pool = pool | ||||||
|     self.message_types_by_name = {} |     self.message_types_by_name = {} | ||||||
|  | @ -1140,7 +1140,7 @@ def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True, | ||||||
|     # definition in the C++ descriptor pool. To do this, we build a |     # definition in the C++ descriptor pool. To do this, we build a | ||||||
|     # FileDescriptorProto with the same definition as this descriptor and build |     # FileDescriptorProto with the same definition as this descriptor and build | ||||||
|     # it into the pool. |     # it into the pool. | ||||||
|     from protobuf import descriptor_pb2 |     from google.protobuf import descriptor_pb2 | ||||||
|     file_descriptor_proto = descriptor_pb2.FileDescriptorProto() |     file_descriptor_proto = descriptor_pb2.FileDescriptorProto() | ||||||
|     file_descriptor_proto.message_type.add().MergeFrom(desc_proto) |     file_descriptor_proto.message_type.add().MergeFrom(desc_proto) | ||||||
| 
 | 
 | ||||||
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							|  | @ -51,7 +51,7 @@ module in order to create a protocol buffer class that can be encoded and | ||||||
| decoded. | decoded. | ||||||
| 
 | 
 | ||||||
| If you want to get a Python class for the specified proto, use the | If you want to get a Python class for the specified proto, use the | ||||||
| helper functions inside protobuf.message_factory | helper functions inside google.protobuf.message_factory | ||||||
| directly instead of this class. | directly instead of this class. | ||||||
| """ | """ | ||||||
| 
 | 
 | ||||||
|  | @ -60,9 +60,9 @@ __author__ = 'matthewtoia@google.com (Matt Toia)' | ||||||
| import collections | import collections | ||||||
| import warnings | import warnings | ||||||
| 
 | 
 | ||||||
| from protobuf import descriptor | from google.protobuf import descriptor | ||||||
| from protobuf import descriptor_database | from google.protobuf import descriptor_database | ||||||
| from protobuf import text_encoding | from google.protobuf import text_encoding | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS  # pylint: disable=protected-access | _USE_C_DESCRIPTORS = descriptor._USE_C_DESCRIPTORS  # pylint: disable=protected-access | ||||||
|  | @ -213,7 +213,7 @@ class DescriptorPool(object): | ||||||
|     """ |     """ | ||||||
| 
 | 
 | ||||||
|     # pylint: disable=g-import-not-at-top |     # pylint: disable=g-import-not-at-top | ||||||
|     from protobuf import descriptor_pb2 |     from google.protobuf import descriptor_pb2 | ||||||
|     file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( |     file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString( | ||||||
|         serialized_file_desc_proto) |         serialized_file_desc_proto) | ||||||
|     file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) |     file_desc = self._ConvertFileProtoToFileDescriptor(file_desc_proto) | ||||||
							
								
								
									
										26
									
								
								lib/google/protobuf/duration_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/google/protobuf/duration_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/duration.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _DURATION._serialized_start=51 | ||||||
|  |   _DURATION._serialized_end=93 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										26
									
								
								lib/google/protobuf/empty_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/google/protobuf/empty_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/empty.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _EMPTY._serialized_start=48 | ||||||
|  |   _EMPTY._serialized_end=55 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										26
									
								
								lib/google/protobuf/field_mask_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/google/protobuf/field_mask_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/field_mask.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _FIELDMASK._serialized_start=53 | ||||||
|  |   _FIELDMASK._serialized_end=79 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
|  | @ -37,7 +37,7 @@ import warnings | ||||||
| 
 | 
 | ||||||
| try: | try: | ||||||
|   # pylint: disable=g-import-not-at-top |   # pylint: disable=g-import-not-at-top | ||||||
|   from protobuf.internal import _api_implementation |   from google.protobuf.internal import _api_implementation | ||||||
|   # The compile-time constants in the _api_implementation module can be used to |   # The compile-time constants in the _api_implementation module can be used to | ||||||
|   # switch to a certain implementation of the Python API at build time. |   # switch to a certain implementation of the Python API at build time. | ||||||
|   _api_version = _api_implementation.api_version |   _api_version = _api_implementation.api_version | ||||||
|  | @ -82,7 +82,7 @@ try: | ||||||
|   # this boolean outside of this module. |   # this boolean outside of this module. | ||||||
|   # |   # | ||||||
|   # pylint: disable=g-import-not-at-top,unused-import |   # pylint: disable=g-import-not-at-top,unused-import | ||||||
|   from protobuf import enable_deterministic_proto_serialization |   from google.protobuf import enable_deterministic_proto_serialization | ||||||
|   _python_deterministic_proto_serialization = True |   _python_deterministic_proto_serialization = True | ||||||
| except ImportError: | except ImportError: | ||||||
|   _python_deterministic_proto_serialization = False |   _python_deterministic_proto_serialization = False | ||||||
|  | @ -37,10 +37,10 @@ in generated code. | ||||||
| 
 | 
 | ||||||
| __author__ = 'jieluo@google.com (Jie Luo)' | __author__ = 'jieluo@google.com (Jie Luo)' | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import enum_type_wrapper | from google.protobuf.internal import enum_type_wrapper | ||||||
| from protobuf import message as _message | from google.protobuf import message as _message | ||||||
| from protobuf import reflection as _reflection | from google.protobuf import reflection as _reflection | ||||||
| from protobuf import symbol_database as _symbol_database | from google.protobuf import symbol_database as _symbol_database | ||||||
| 
 | 
 | ||||||
| _sym_db = _symbol_database.Default() | _sym_db = _symbol_database.Default() | ||||||
| 
 | 
 | ||||||
|  | @ -117,8 +117,8 @@ def BuildServices(file_des, module_name, module): | ||||||
|     module: Generated _pb2 module |     module: Generated _pb2 module | ||||||
|   """ |   """ | ||||||
|   # pylint: disable=g-import-not-at-top |   # pylint: disable=g-import-not-at-top | ||||||
|   from protobuf import service as _service |   from google.protobuf import service as _service | ||||||
|   from protobuf import service_reflection |   from google.protobuf import service_reflection | ||||||
|   # pylint: enable=g-import-not-at-top |   # pylint: enable=g-import-not-at-top | ||||||
|   for (name, service) in file_des.services_by_name.items(): |   for (name, service) in file_des.services_by_name.items(): | ||||||
|     module[name] = service_reflection.GeneratedServiceType( |     module[name] = service_reflection.GeneratedServiceType( | ||||||
|  | @ -83,10 +83,10 @@ __author__ = 'kenton@google.com (Kenton Varda)' | ||||||
| import math | import math | ||||||
| import struct | import struct | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import containers | from google.protobuf.internal import containers | ||||||
| from protobuf.internal import encoder | from google.protobuf.internal import encoder | ||||||
| from protobuf.internal import wire_format | from google.protobuf.internal import wire_format | ||||||
| from protobuf import message | from google.protobuf import message | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # This is not for optimization, but rather to avoid conflicts with local | # This is not for optimization, but rather to avoid conflicts with local | ||||||
|  | @ -68,7 +68,7 @@ __author__ = 'kenton@google.com (Kenton Varda)' | ||||||
| 
 | 
 | ||||||
| import struct | import struct | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import wire_format | from google.protobuf.internal import wire_format | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # This will overflow and thus become IEEE-754 "infinity".  We would use | # This will overflow and thus become IEEE-754 "infinity".  We would use | ||||||
|  | @ -31,8 +31,8 @@ | ||||||
| """Contains _ExtensionDict class to represent extensions. | """Contains _ExtensionDict class to represent extensions. | ||||||
| """ | """ | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import type_checkers | from google.protobuf.internal import type_checkers | ||||||
| from protobuf.descriptor import FieldDescriptor | from google.protobuf.descriptor import FieldDescriptor | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _VerifyExtensionHandle(message, extension_handle): | def _VerifyExtensionHandle(message, extension_handle): | ||||||
|  | @ -56,22 +56,22 @@ import sys | ||||||
| import weakref | import weakref | ||||||
| 
 | 
 | ||||||
| # We use "as" to avoid name collisions with variables. | # We use "as" to avoid name collisions with variables. | ||||||
| from protobuf.internal import api_implementation | from google.protobuf.internal import api_implementation | ||||||
| from protobuf.internal import containers | from google.protobuf.internal import containers | ||||||
| from protobuf.internal import decoder | from google.protobuf.internal import decoder | ||||||
| from protobuf.internal import encoder | from google.protobuf.internal import encoder | ||||||
| from protobuf.internal import enum_type_wrapper | from google.protobuf.internal import enum_type_wrapper | ||||||
| from protobuf.internal import extension_dict | from google.protobuf.internal import extension_dict | ||||||
| from protobuf.internal import message_listener as message_listener_mod | from google.protobuf.internal import message_listener as message_listener_mod | ||||||
| from protobuf.internal import type_checkers | from google.protobuf.internal import type_checkers | ||||||
| from protobuf.internal import well_known_types | from google.protobuf.internal import well_known_types | ||||||
| from protobuf.internal import wire_format | from google.protobuf.internal import wire_format | ||||||
| from protobuf import descriptor as descriptor_mod | from google.protobuf import descriptor as descriptor_mod | ||||||
| from protobuf import message as message_mod | from google.protobuf import message as message_mod | ||||||
| from protobuf import text_format | from google.protobuf import text_format | ||||||
| 
 | 
 | ||||||
| _FieldDescriptor = descriptor_mod.FieldDescriptor | _FieldDescriptor = descriptor_mod.FieldDescriptor | ||||||
| _AnyFullTypeName = 'protobuf.Any' | _AnyFullTypeName = 'google.protobuf.Any' | ||||||
| _ExtensionDict = extension_dict._ExtensionDict | _ExtensionDict = extension_dict._ExtensionDict | ||||||
| 
 | 
 | ||||||
| class GeneratedProtocolMessageType(type): | class GeneratedProtocolMessageType(type): | ||||||
|  | @ -952,7 +952,7 @@ def _InternalUnpackAny(msg): | ||||||
|   # To make Any work with custom factories, use the message factory of the |   # To make Any work with custom factories, use the message factory of the | ||||||
|   # parent message. |   # parent message. | ||||||
|   # pylint: disable=g-import-not-at-top |   # pylint: disable=g-import-not-at-top | ||||||
|   from protobuf import symbol_database |   from google.protobuf import symbol_database | ||||||
|   factory = symbol_database.Default() |   factory = symbol_database.Default() | ||||||
| 
 | 
 | ||||||
|   type_url = msg.type_url |   type_url = msg.type_url | ||||||
|  | @ -48,10 +48,10 @@ __author__ = 'robinson@google.com (Will Robinson)' | ||||||
| import ctypes | import ctypes | ||||||
| import numbers | import numbers | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import decoder | from google.protobuf.internal import decoder | ||||||
| from protobuf.internal import encoder | from google.protobuf.internal import encoder | ||||||
| from protobuf.internal import wire_format | from google.protobuf.internal import wire_format | ||||||
| from protobuf import descriptor | from google.protobuf import descriptor | ||||||
| 
 | 
 | ||||||
| _FieldDescriptor = descriptor.FieldDescriptor | _FieldDescriptor = descriptor.FieldDescriptor | ||||||
| 
 | 
 | ||||||
|  | @ -44,7 +44,7 @@ import calendar | ||||||
| import collections.abc | import collections.abc | ||||||
| import datetime | import datetime | ||||||
| 
 | 
 | ||||||
| from protobuf.descriptor import FieldDescriptor | from google.protobuf.descriptor import FieldDescriptor | ||||||
| 
 | 
 | ||||||
| _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' | _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' | ||||||
| _NANOS_PER_SECOND = 1000000000 | _NANOS_PER_SECOND = 1000000000 | ||||||
|  | @ -869,10 +869,10 @@ collections.abc.MutableSequence.register(ListValue) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| WKTBASES = { | WKTBASES = { | ||||||
|     'protobuf.Any': Any, |     'google.protobuf.Any': Any, | ||||||
|     'protobuf.Duration': Duration, |     'google.protobuf.Duration': Duration, | ||||||
|     'protobuf.FieldMask': FieldMask, |     'google.protobuf.FieldMask': FieldMask, | ||||||
|     'protobuf.ListValue': ListValue, |     'google.protobuf.ListValue': ListValue, | ||||||
|     'protobuf.Struct': Struct, |     'google.protobuf.Struct': Struct, | ||||||
|     'protobuf.Timestamp': Timestamp, |     'google.protobuf.Timestamp': Timestamp, | ||||||
| } | } | ||||||
|  | @ -33,8 +33,8 @@ | ||||||
| __author__ = 'robinson@google.com (Will Robinson)' | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
| 
 | 
 | ||||||
| import struct | import struct | ||||||
| from protobuf import descriptor | from google.protobuf import descriptor | ||||||
| from protobuf import message | from google.protobuf import message | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| TAG_TYPE_BITS = 3  # Number of bits used to hold type info in a proto tag. | TAG_TYPE_BITS = 3  # Number of bits used to hold type info in a proto tag. | ||||||
|  | @ -51,9 +51,9 @@ from operator import methodcaller | ||||||
| import re | import re | ||||||
| import sys | import sys | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import type_checkers | from google.protobuf.internal import type_checkers | ||||||
| from protobuf import descriptor | from google.protobuf import descriptor | ||||||
| from protobuf import symbol_database | from google.protobuf import symbol_database | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' | _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' | ||||||
|  | @ -280,7 +280,7 @@ class _Printer(object): | ||||||
|     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: | ||||||
|       if self.use_integers_for_enums: |       if self.use_integers_for_enums: | ||||||
|         return value |         return value | ||||||
|       if field.enum_type.full_name == 'protobuf.NullValue': |       if field.enum_type.full_name == 'google.protobuf.NullValue': | ||||||
|         return None |         return None | ||||||
|       enum_value = field.enum_type.values_by_number.get(value, None) |       enum_value = field.enum_type.values_by_number.get(value, None) | ||||||
|       if enum_value is not None: |       if enum_value is not None: | ||||||
|  | @ -564,11 +564,11 @@ class _Parser(object): | ||||||
| 
 | 
 | ||||||
|         if value is None: |         if value is None: | ||||||
|           if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE |           if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE | ||||||
|               and field.message_type.full_name == 'protobuf.Value'): |               and field.message_type.full_name == 'google.protobuf.Value'): | ||||||
|             sub_message = getattr(message, field.name) |             sub_message = getattr(message, field.name) | ||||||
|             sub_message.null_value = 0 |             sub_message.null_value = 0 | ||||||
|           elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM |           elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM | ||||||
|                 and field.enum_type.full_name == 'protobuf.NullValue'): |                 and field.enum_type.full_name == 'google.protobuf.NullValue'): | ||||||
|             setattr(message, field.name, 0) |             setattr(message, field.name, 0) | ||||||
|           else: |           else: | ||||||
|             message.ClearField(field.name) |             message.ClearField(field.name) | ||||||
|  | @ -590,7 +590,7 @@ class _Parser(object): | ||||||
|               sub_message = getattr(message, field.name).add() |               sub_message = getattr(message, field.name).add() | ||||||
|               # None is a null_value in Value. |               # None is a null_value in Value. | ||||||
|               if (item is None and |               if (item is None and | ||||||
|                   sub_message.DESCRIPTOR.full_name != 'protobuf.Value'): |                   sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): | ||||||
|                 raise ParseError('null is not allowed to be used as an element' |                 raise ParseError('null is not allowed to be used as an element' | ||||||
|                                  ' in a repeated field at {0}.{1}[{2}]'.format( |                                  ' in a repeated field at {0}.{1}[{2}]'.format( | ||||||
|                                      path, name, index)) |                                      path, name, index)) | ||||||
|  | @ -895,18 +895,18 @@ def _ConvertBool(value, require_str): | ||||||
|   return value |   return value | ||||||
| 
 | 
 | ||||||
| _WKTJSONMETHODS = { | _WKTJSONMETHODS = { | ||||||
|     'protobuf.Any': ['_AnyMessageToJsonObject', |     'google.protobuf.Any': ['_AnyMessageToJsonObject', | ||||||
|                             '_ConvertAnyMessage'], |                             '_ConvertAnyMessage'], | ||||||
|     'protobuf.Duration': ['_GenericMessageToJsonObject', |     'google.protobuf.Duration': ['_GenericMessageToJsonObject', | ||||||
|                                  '_ConvertGenericMessage'], |                                  '_ConvertGenericMessage'], | ||||||
|     'protobuf.FieldMask': ['_GenericMessageToJsonObject', |     'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', | ||||||
|                                   '_ConvertGenericMessage'], |                                   '_ConvertGenericMessage'], | ||||||
|     'protobuf.ListValue': ['_ListValueMessageToJsonObject', |     'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', | ||||||
|                                   '_ConvertListValueMessage'], |                                   '_ConvertListValueMessage'], | ||||||
|     'protobuf.Struct': ['_StructMessageToJsonObject', |     'google.protobuf.Struct': ['_StructMessageToJsonObject', | ||||||
|                                '_ConvertStructMessage'], |                                '_ConvertStructMessage'], | ||||||
|     'protobuf.Timestamp': ['_GenericMessageToJsonObject', |     'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', | ||||||
|                                   '_ConvertGenericMessage'], |                                   '_ConvertGenericMessage'], | ||||||
|     'protobuf.Value': ['_ValueMessageToJsonObject', |     'google.protobuf.Value': ['_ValueMessageToJsonObject', | ||||||
|                               '_ConvertValueMessage'] |                               '_ConvertValueMessage'] | ||||||
| } | } | ||||||
|  | @ -74,7 +74,7 @@ class Message(object): | ||||||
| 
 | 
 | ||||||
|   __slots__ = [] |   __slots__ = [] | ||||||
| 
 | 
 | ||||||
|   #: The :class:`protobuf.descriptor.Descriptor` for this message type. |   #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. | ||||||
|   DESCRIPTOR = None |   DESCRIPTOR = None | ||||||
| 
 | 
 | ||||||
|   def __deepcopy__(self, memo=None): |   def __deepcopy__(self, memo=None): | ||||||
|  | @ -419,6 +419,6 @@ class Message(object): | ||||||
| 
 | 
 | ||||||
| def _InternalConstructMessage(full_name): | def _InternalConstructMessage(full_name): | ||||||
|   """Constructs a nested message.""" |   """Constructs a nested message.""" | ||||||
|   from protobuf import symbol_database  # pylint:disable=g-import-not-at-top |   from google.protobuf import symbol_database  # pylint:disable=g-import-not-at-top | ||||||
| 
 | 
 | ||||||
|   return symbol_database.Default().GetSymbol(full_name)() |   return symbol_database.Default().GetSymbol(full_name)() | ||||||
|  | @ -39,14 +39,14 @@ my_proto_instance = message_classes['some.proto.package.MessageName']() | ||||||
| 
 | 
 | ||||||
| __author__ = 'matthewtoia@google.com (Matt Toia)' | __author__ = 'matthewtoia@google.com (Matt Toia)' | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import api_implementation | from google.protobuf.internal import api_implementation | ||||||
| from protobuf import descriptor_pool | from google.protobuf import descriptor_pool | ||||||
| from protobuf import message | from google.protobuf import message | ||||||
| 
 | 
 | ||||||
| if api_implementation.Type() == 'cpp': | if api_implementation.Type() == 'cpp': | ||||||
|   from protobuf.pyext import cpp_message as message_impl |   from google.protobuf.pyext import cpp_message as message_impl | ||||||
| else: | else: | ||||||
|   from protobuf.internal import python_message as message_impl |   from google.protobuf.internal import python_message as message_impl | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # The type of all Message classes. | # The type of all Message classes. | ||||||
|  | @ -34,9 +34,9 @@ from collections import OrderedDict | ||||||
| import hashlib | import hashlib | ||||||
| import os | import os | ||||||
| 
 | 
 | ||||||
| from protobuf import descriptor_pb2 | from google.protobuf import descriptor_pb2 | ||||||
| from protobuf import descriptor | from google.protobuf import descriptor | ||||||
| from protobuf import message_factory | from google.protobuf import message_factory | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _GetMessageFromFactory(factory, full_name): | def _GetMessageFromFactory(factory, full_name): | ||||||
|  | @ -36,7 +36,7 @@ Descriptor objects at runtime backed by the protocol buffer C++ API. | ||||||
| 
 | 
 | ||||||
| __author__ = 'tibell@google.com (Johan Tibell)' | __author__ = 'tibell@google.com (Johan Tibell)' | ||||||
| 
 | 
 | ||||||
| from protobuf.pyext import _message | from google.protobuf.pyext import _message | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class GeneratedProtocolMessageType(_message.MessageMeta): | class GeneratedProtocolMessageType(_message.MessageMeta): | ||||||
|  | @ -48,8 +48,8 @@ this file*. | ||||||
| __author__ = 'robinson@google.com (Will Robinson)' | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| from protobuf import message_factory | from google.protobuf import message_factory | ||||||
| from protobuf import symbol_database | from google.protobuf import symbol_database | ||||||
| 
 | 
 | ||||||
| # The type of all Message classes. | # The type of all Message classes. | ||||||
| # Part of the public interface, but normally only used by message factories. | # Part of the public interface, but normally only used by message factories. | ||||||
							
								
								
									
										26
									
								
								lib/google/protobuf/source_context_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/google/protobuf/source_context_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/source_context.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _SOURCECONTEXT._serialized_start=57 | ||||||
|  |   _SOURCECONTEXT._serialized_end=91 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										36
									
								
								lib/google/protobuf/struct_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								lib/google/protobuf/struct_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,36 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/struct.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _STRUCT_FIELDSENTRY._options = None | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_options = b'8\001' | ||||||
|  |   _NULLVALUE._serialized_start=474 | ||||||
|  |   _NULLVALUE._serialized_end=501 | ||||||
|  |   _STRUCT._serialized_start=50 | ||||||
|  |   _STRUCT._serialized_end=182 | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_start=113 | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_end=182 | ||||||
|  |   _VALUE._serialized_start=185 | ||||||
|  |   _VALUE._serialized_end=419 | ||||||
|  |   _LISTVALUE._serialized_start=421 | ||||||
|  |   _LISTVALUE._serialized_end=472 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
|  | @ -58,9 +58,9 @@ Example usage:: | ||||||
| """ | """ | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import api_implementation | from google.protobuf.internal import api_implementation | ||||||
| from protobuf import descriptor_pool | from google.protobuf import descriptor_pool | ||||||
| from protobuf import message_factory | from google.protobuf import message_factory | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class SymbolDatabase(message_factory.MessageFactory): | class SymbolDatabase(message_factory.MessageFactory): | ||||||
|  | @ -72,7 +72,7 @@ class SymbolDatabase(message_factory.MessageFactory): | ||||||
|     Calls to GetSymbol() and GetMessages() will return messages registered here. |     Calls to GetSymbol() and GetMessages() will return messages registered here. | ||||||
| 
 | 
 | ||||||
|     Args: |     Args: | ||||||
|       message: A :class:`protobuf.message.Message` subclass (or |       message: A :class:`google.protobuf.message.Message` subclass (or | ||||||
|         instance); its descriptor will be registered. |         instance); its descriptor will be registered. | ||||||
| 
 | 
 | ||||||
|     Returns: |     Returns: | ||||||
|  | @ -49,10 +49,10 @@ import io | ||||||
| import math | import math | ||||||
| import re | import re | ||||||
| 
 | 
 | ||||||
| from protobuf.internal import decoder | from google.protobuf.internal import decoder | ||||||
| from protobuf.internal import type_checkers | from google.protobuf.internal import type_checkers | ||||||
| from protobuf import descriptor | from google.protobuf import descriptor | ||||||
| from protobuf import text_encoding | from google.protobuf import text_encoding | ||||||
| 
 | 
 | ||||||
| # pylint: disable=g-import-not-at-top | # pylint: disable=g-import-not-at-top | ||||||
| __all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', | __all__ = ['MessageToString', 'Parse', 'PrintMessage', 'PrintField', | ||||||
|  | @ -65,7 +65,7 @@ _INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(), | ||||||
| _FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) | _FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?$', re.IGNORECASE) | ||||||
| _FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) | _FLOAT_NAN = re.compile('nanf?$', re.IGNORECASE) | ||||||
| _QUOTES = frozenset(("'", '"')) | _QUOTES = frozenset(("'", '"')) | ||||||
| _ANY_FULL_TYPE_NAME = 'protobuf.Any' | _ANY_FULL_TYPE_NAME = 'google.protobuf.Any' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Error(Exception): | class Error(Exception): | ||||||
|  | @ -300,9 +300,9 @@ def _BuildMessageFromTypeName(type_name, descriptor_pool): | ||||||
|   """ |   """ | ||||||
|   # pylint: disable=g-import-not-at-top |   # pylint: disable=g-import-not-at-top | ||||||
|   if descriptor_pool is None: |   if descriptor_pool is None: | ||||||
|     from protobuf import descriptor_pool as pool_mod |     from google.protobuf import descriptor_pool as pool_mod | ||||||
|     descriptor_pool = pool_mod.Default() |     descriptor_pool = pool_mod.Default() | ||||||
|   from protobuf import symbol_database |   from google.protobuf import symbol_database | ||||||
|   database = symbol_database.Default() |   database = symbol_database.Default() | ||||||
|   try: |   try: | ||||||
|     message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) |     message_descriptor = descriptor_pool.FindMessageTypeByName(type_name) | ||||||
|  | @ -391,7 +391,7 @@ class _Printer(object): | ||||||
|     self.force_colon = force_colon |     self.force_colon = force_colon | ||||||
| 
 | 
 | ||||||
|   def _TryPrintAsAnyMessage(self, message): |   def _TryPrintAsAnyMessage(self, message): | ||||||
|     """Serializes if message is a protobuf.Any field.""" |     """Serializes if message is a google.protobuf.Any field.""" | ||||||
|     if '/' not in message.type_url: |     if '/' not in message.type_url: | ||||||
|       return False |       return False | ||||||
|     packed_message = _BuildMessageFromTypeName(message.TypeName(), |     packed_message = _BuildMessageFromTypeName(message.TypeName(), | ||||||
|  | @ -977,7 +977,7 @@ class _Parser(object): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|   def _ConsumeAnyTypeUrl(self, tokenizer): |   def _ConsumeAnyTypeUrl(self, tokenizer): | ||||||
|     """Consumes a protobuf.Any type URL and returns the type name.""" |     """Consumes a google.protobuf.Any type URL and returns the type name.""" | ||||||
|     # Consume "type.googleapis.com/". |     # Consume "type.googleapis.com/". | ||||||
|     prefix = [tokenizer.ConsumeIdentifier()] |     prefix = [tokenizer.ConsumeIdentifier()] | ||||||
|     tokenizer.Consume('.') |     tokenizer.Consume('.') | ||||||
							
								
								
									
										26
									
								
								lib/google/protobuf/timestamp_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/google/protobuf/timestamp_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/timestamp.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _TIMESTAMP._serialized_start=52 | ||||||
|  |   _TIMESTAMP._serialized_end=95 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										42
									
								
								lib/google/protobuf/type_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								lib/google/protobuf/type_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,42 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/type.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 | ||||||
|  | from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _SYNTAX._serialized_start=1413 | ||||||
|  |   _SYNTAX._serialized_end=1459 | ||||||
|  |   _TYPE._serialized_start=113 | ||||||
|  |   _TYPE._serialized_end=328 | ||||||
|  |   _FIELD._serialized_start=331 | ||||||
|  |   _FIELD._serialized_end=1056 | ||||||
|  |   _FIELD_KIND._serialized_start=610 | ||||||
|  |   _FIELD_KIND._serialized_end=938 | ||||||
|  |   _FIELD_CARDINALITY._serialized_start=940 | ||||||
|  |   _FIELD_CARDINALITY._serialized_end=1056 | ||||||
|  |   _ENUM._serialized_start=1059 | ||||||
|  |   _ENUM._serialized_end=1265 | ||||||
|  |   _ENUMVALUE._serialized_start=1267 | ||||||
|  |   _ENUMVALUE._serialized_end=1350 | ||||||
|  |   _OPTION._serialized_start=1352 | ||||||
|  |   _OPTION._serialized_end=1411 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
|  | @ -2,10 +2,10 @@ | ||||||
| # Generated by the protocol buffer compiler.  DO NOT EDIT! | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
| # source: google/protobuf/util/json_format.proto | # source: google/protobuf/util/json_format.proto | ||||||
| """Generated protocol buffer code.""" | """Generated protocol buffer code.""" | ||||||
| from protobuf.internal import builder as _builder | from google.protobuf.internal import builder as _builder | ||||||
| from protobuf import descriptor as _descriptor | from google.protobuf import descriptor as _descriptor | ||||||
| from protobuf import descriptor_pool as _descriptor_pool | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
| from protobuf import symbol_database as _symbol_database | from google.protobuf import symbol_database as _symbol_database | ||||||
| # @@protoc_insertion_point(imports) | # @@protoc_insertion_point(imports) | ||||||
| 
 | 
 | ||||||
| _sym_db = _symbol_database.Default() | _sym_db = _symbol_database.Default() | ||||||
|  | @ -16,7 +16,7 @@ _sym_db = _symbol_database.Default() | ||||||
| DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') | ||||||
| 
 | 
 | ||||||
| _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
| _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'protobuf.util.json_format_pb2', globals()) | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) | ||||||
| if _descriptor._USE_C_DESCRIPTORS == False: | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|   TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) |   TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) | ||||||
| 
 | 
 | ||||||
							
								
								
									
										129
									
								
								lib/google/protobuf/util/json_format_proto3_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										129
									
								
								lib/google/protobuf/util/json_format_proto3_pb2.py
									
										
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
							
								
								
									
										42
									
								
								lib/google/protobuf/wrappers_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								lib/google/protobuf/wrappers_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,42 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/wrappers.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _DOUBLEVALUE._serialized_start=51 | ||||||
|  |   _DOUBLEVALUE._serialized_end=79 | ||||||
|  |   _FLOATVALUE._serialized_start=81 | ||||||
|  |   _FLOATVALUE._serialized_end=108 | ||||||
|  |   _INT64VALUE._serialized_start=110 | ||||||
|  |   _INT64VALUE._serialized_end=137 | ||||||
|  |   _UINT64VALUE._serialized_start=139 | ||||||
|  |   _UINT64VALUE._serialized_end=167 | ||||||
|  |   _INT32VALUE._serialized_start=169 | ||||||
|  |   _INT32VALUE._serialized_end=196 | ||||||
|  |   _UINT32VALUE._serialized_start=198 | ||||||
|  |   _UINT32VALUE._serialized_end=226 | ||||||
|  |   _BOOLVALUE._serialized_start=228 | ||||||
|  |   _BOOLVALUE._serialized_end=254 | ||||||
|  |   _STRINGVALUE._serialized_start=256 | ||||||
|  |   _STRINGVALUE._serialized_end=284 | ||||||
|  |   _BYTESVALUE._serialized_start=286 | ||||||
|  |   _BYTESVALUE._serialized_end=313 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										33
									
								
								lib/protobuf/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										33
									
								
								lib/protobuf/__init__.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,33 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | # Copyright 2007 Google Inc. All Rights Reserved. | ||||||
|  | 
 | ||||||
|  | __version__ = '3.20.1' | ||||||
							
								
								
									
										26
									
								
								lib/protobuf/any_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/protobuf/any_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/any.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _ANY._serialized_start=46 | ||||||
|  |   _ANY._serialized_end=84 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										32
									
								
								lib/protobuf/api_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										32
									
								
								lib/protobuf/api_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,32 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/api.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 | ||||||
|  | from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _API._serialized_start=113 | ||||||
|  |   _API._serialized_end=370 | ||||||
|  |   _METHOD._serialized_start=373 | ||||||
|  |   _METHOD._serialized_end=586 | ||||||
|  |   _MIXIN._serialized_start=588 | ||||||
|  |   _MIXIN._serialized_end=623 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										0
									
								
								lib/protobuf/compiler/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								lib/protobuf/compiler/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										35
									
								
								lib/protobuf/compiler/plugin_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										35
									
								
								lib/protobuf/compiler/plugin_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,35 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/compiler/plugin.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' | ||||||
|  |   _VERSION._serialized_start=101 | ||||||
|  |   _VERSION._serialized_end=171 | ||||||
|  |   _CODEGENERATORREQUEST._serialized_start=174 | ||||||
|  |   _CODEGENERATORREQUEST._serialized_end=360 | ||||||
|  |   _CODEGENERATORRESPONSE._serialized_start=363 | ||||||
|  |   _CODEGENERATORRESPONSE._serialized_end=684 | ||||||
|  |   _CODEGENERATORRESPONSE_FILE._serialized_start=499 | ||||||
|  |   _CODEGENERATORRESPONSE_FILE._serialized_end=626 | ||||||
|  |   _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 | ||||||
|  |   _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										1224
									
								
								lib/protobuf/descriptor.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1224
									
								
								lib/protobuf/descriptor.py
									
										
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							
							
								
								
									
										177
									
								
								lib/protobuf/descriptor_database.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										177
									
								
								lib/protobuf/descriptor_database.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,177 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Provides a container for DescriptorProtos.""" | ||||||
|  | 
 | ||||||
|  | __author__ = 'matthewtoia@google.com (Matt Toia)' | ||||||
|  | 
 | ||||||
|  | import warnings | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Error(Exception): | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DescriptorDatabaseConflictingDefinitionError(Error): | ||||||
|  |   """Raised when a proto is added with the same name & different descriptor.""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DescriptorDatabase(object): | ||||||
|  |   """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" | ||||||
|  | 
 | ||||||
|  |   def __init__(self): | ||||||
|  |     self._file_desc_protos_by_file = {} | ||||||
|  |     self._file_desc_protos_by_symbol = {} | ||||||
|  | 
 | ||||||
|  |   def Add(self, file_desc_proto): | ||||||
|  |     """Adds the FileDescriptorProto and its types to this database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       file_desc_proto: The FileDescriptorProto to add. | ||||||
|  |     Raises: | ||||||
|  |       DescriptorDatabaseConflictingDefinitionError: if an attempt is made to | ||||||
|  |         add a proto with the same name but different definition than an | ||||||
|  |         existing proto in the database. | ||||||
|  |     """ | ||||||
|  |     proto_name = file_desc_proto.name | ||||||
|  |     if proto_name not in self._file_desc_protos_by_file: | ||||||
|  |       self._file_desc_protos_by_file[proto_name] = file_desc_proto | ||||||
|  |     elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: | ||||||
|  |       raise DescriptorDatabaseConflictingDefinitionError( | ||||||
|  |           '%s already added, but with different descriptor.' % proto_name) | ||||||
|  |     else: | ||||||
|  |       return | ||||||
|  | 
 | ||||||
|  |     # Add all the top-level descriptors to the index. | ||||||
|  |     package = file_desc_proto.package | ||||||
|  |     for message in file_desc_proto.message_type: | ||||||
|  |       for name in _ExtractSymbols(message, package): | ||||||
|  |         self._AddSymbol(name, file_desc_proto) | ||||||
|  |     for enum in file_desc_proto.enum_type: | ||||||
|  |       self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) | ||||||
|  |       for enum_value in enum.value: | ||||||
|  |         self._file_desc_protos_by_symbol[ | ||||||
|  |             '.'.join((package, enum_value.name))] = file_desc_proto | ||||||
|  |     for extension in file_desc_proto.extension: | ||||||
|  |       self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) | ||||||
|  |     for service in file_desc_proto.service: | ||||||
|  |       self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) | ||||||
|  | 
 | ||||||
|  |   def FindFileByName(self, name): | ||||||
|  |     """Finds the file descriptor proto by file name. | ||||||
|  | 
 | ||||||
|  |     Typically the file name is a relative path ending to a .proto file. The | ||||||
|  |     proto with the given name will have to have been added to this database | ||||||
|  |     using the Add method or else an error will be raised. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       name: The file name to find. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       The file descriptor proto matching the name. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError if no file by the given name was added. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     return self._file_desc_protos_by_file[name] | ||||||
|  | 
 | ||||||
|  |   def FindFileContainingSymbol(self, symbol): | ||||||
|  |     """Finds the file descriptor proto containing the specified symbol. | ||||||
|  | 
 | ||||||
|  |     The symbol should be a fully qualified name including the file descriptor's | ||||||
|  |     package and any containing messages. Some examples: | ||||||
|  | 
 | ||||||
|  |     'some.package.name.Message' | ||||||
|  |     'some.package.name.Message.NestedEnum' | ||||||
|  |     'some.package.name.Message.some_field' | ||||||
|  | 
 | ||||||
|  |     The file descriptor proto containing the specified symbol must be added to | ||||||
|  |     this database using the Add method or else an error will be raised. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       symbol: The fully qualified symbol name. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       The file descriptor proto containing the symbol. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError if no file contains the specified symbol. | ||||||
|  |     """ | ||||||
|  |     try: | ||||||
|  |       return self._file_desc_protos_by_symbol[symbol] | ||||||
|  |     except KeyError: | ||||||
|  |       # Fields, enum values, and nested extensions are not in | ||||||
|  |       # _file_desc_protos_by_symbol. Try to find the top level | ||||||
|  |       # descriptor. Non-existent nested symbol under a valid top level | ||||||
|  |       # descriptor can also be found. The behavior is the same with | ||||||
|  |       # protobuf C++. | ||||||
|  |       top_level, _, _ = symbol.rpartition('.') | ||||||
|  |       try: | ||||||
|  |         return self._file_desc_protos_by_symbol[top_level] | ||||||
|  |       except KeyError: | ||||||
|  |         # Raise the original symbol as a KeyError for better diagnostics. | ||||||
|  |         raise KeyError(symbol) | ||||||
|  | 
 | ||||||
|  |   def FindFileContainingExtension(self, extendee_name, extension_number): | ||||||
|  |     # TODO(jieluo): implement this API. | ||||||
|  |     return None | ||||||
|  | 
 | ||||||
|  |   def FindAllExtensionNumbers(self, extendee_name): | ||||||
|  |     # TODO(jieluo): implement this API. | ||||||
|  |     return [] | ||||||
|  | 
 | ||||||
|  |   def _AddSymbol(self, name, file_desc_proto): | ||||||
|  |     if name in self._file_desc_protos_by_symbol: | ||||||
|  |       warn_msg = ('Conflict register for file "' + file_desc_proto.name + | ||||||
|  |                   '": ' + name + | ||||||
|  |                   ' is already defined in file "' + | ||||||
|  |                   self._file_desc_protos_by_symbol[name].name + '"') | ||||||
|  |       warnings.warn(warn_msg, RuntimeWarning) | ||||||
|  |     self._file_desc_protos_by_symbol[name] = file_desc_proto | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ExtractSymbols(desc_proto, package): | ||||||
|  |   """Pulls out all the symbols from a descriptor proto. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     desc_proto: The proto to extract symbols from. | ||||||
|  |     package: The package containing the descriptor type. | ||||||
|  | 
 | ||||||
|  |   Yields: | ||||||
|  |     The fully qualified name found in the descriptor. | ||||||
|  |   """ | ||||||
|  |   message_name = package + '.' + desc_proto.name if package else desc_proto.name | ||||||
|  |   yield message_name | ||||||
|  |   for nested_type in desc_proto.nested_type: | ||||||
|  |     for symbol in _ExtractSymbols(nested_type, message_name): | ||||||
|  |       yield symbol | ||||||
|  |   for enum_type in desc_proto.enum_type: | ||||||
|  |     yield '.'.join((message_name, enum_type.name)) | ||||||
							
								
								
									
										1925
									
								
								lib/protobuf/descriptor_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1925
									
								
								lib/protobuf/descriptor_pb2.py
									
										
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
							
								
								
									
										1295
									
								
								lib/protobuf/descriptor_pool.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1295
									
								
								lib/protobuf/descriptor_pool.py
									
										
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							
							
								
								
									
										26
									
								
								lib/protobuf/duration_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/protobuf/duration_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/duration.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _DURATION._serialized_start=51 | ||||||
|  |   _DURATION._serialized_end=93 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										26
									
								
								lib/protobuf/empty_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/protobuf/empty_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/empty.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _EMPTY._serialized_start=48 | ||||||
|  |   _EMPTY._serialized_end=55 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										26
									
								
								lib/protobuf/field_mask_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/protobuf/field_mask_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/field_mask.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _FIELDMASK._serialized_start=53 | ||||||
|  |   _FIELDMASK._serialized_end=79 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										0
									
								
								lib/protobuf/internal/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								lib/protobuf/internal/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										
											BIN
										
									
								
								lib/protobuf/internal/_api_implementation.cpython-310-x86_64-linux-gnu.so
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								lib/protobuf/internal/_api_implementation.cpython-310-x86_64-linux-gnu.so
									
										
									
									
									
										Executable file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										112
									
								
								lib/protobuf/internal/api_implementation.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										112
									
								
								lib/protobuf/internal/api_implementation.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,112 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Determine which implementation of the protobuf API is used in this process. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | import os | ||||||
|  | import sys | ||||||
|  | import warnings | ||||||
|  | 
 | ||||||
|  | try: | ||||||
|  |   # pylint: disable=g-import-not-at-top | ||||||
|  |   from google.protobuf.internal import _api_implementation | ||||||
|  |   # The compile-time constants in the _api_implementation module can be used to | ||||||
|  |   # switch to a certain implementation of the Python API at build time. | ||||||
|  |   _api_version = _api_implementation.api_version | ||||||
|  | except ImportError: | ||||||
|  |   _api_version = -1  # Unspecified by compiler flags. | ||||||
|  | 
 | ||||||
|  | if _api_version == 1: | ||||||
|  |   raise ValueError('api_version=1 is no longer supported.') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _default_implementation_type = ('cpp' if _api_version > 0 else 'python') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # This environment variable can be used to switch to a certain implementation | ||||||
|  | # of the Python API, overriding the compile-time constants in the | ||||||
|  | # _api_implementation module. Right now only 'python' and 'cpp' are valid | ||||||
|  | # values. Any other value will be ignored. | ||||||
|  | _implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', | ||||||
|  |                                  _default_implementation_type) | ||||||
|  | 
 | ||||||
|  | if _implementation_type != 'python': | ||||||
|  |   _implementation_type = 'cpp' | ||||||
|  | 
 | ||||||
|  | if 'PyPy' in sys.version and _implementation_type == 'cpp': | ||||||
|  |   warnings.warn('PyPy does not work yet with cpp protocol buffers. ' | ||||||
|  |                 'Falling back to the python implementation.') | ||||||
|  |   _implementation_type = 'python' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Detect if serialization should be deterministic by default | ||||||
|  | try: | ||||||
|  |   # The presence of this module in a build allows the proto implementation to | ||||||
|  |   # be upgraded merely via build deps. | ||||||
|  |   # | ||||||
|  |   # NOTE: Merely importing this automatically enables deterministic proto | ||||||
|  |   # serialization for C++ code, but we still need to export it as a boolean so | ||||||
|  |   # that we can do the same for `_implementation_type == 'python'`. | ||||||
|  |   # | ||||||
|  |   # NOTE2: It is possible for C++ code to enable deterministic serialization by | ||||||
|  |   # default _without_ affecting Python code, if the C++ implementation is not in | ||||||
|  |   # use by this module.  That is intended behavior, so we don't actually expose | ||||||
|  |   # this boolean outside of this module. | ||||||
|  |   # | ||||||
|  |   # pylint: disable=g-import-not-at-top,unused-import | ||||||
|  |   from google.protobuf import enable_deterministic_proto_serialization | ||||||
|  |   _python_deterministic_proto_serialization = True | ||||||
|  | except ImportError: | ||||||
|  |   _python_deterministic_proto_serialization = False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Usage of this function is discouraged. Clients shouldn't care which | ||||||
|  | # implementation of the API is in use. Note that there is no guarantee | ||||||
|  | # that differences between APIs will be maintained. | ||||||
|  | # Please don't use this function if possible. | ||||||
|  | def Type(): | ||||||
|  |   return _implementation_type | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SetType(implementation_type): | ||||||
|  |   """Never use! Only for protobuf benchmark.""" | ||||||
|  |   global _implementation_type | ||||||
|  |   _implementation_type = implementation_type | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # See comment on 'Type' above. | ||||||
|  | def Version(): | ||||||
|  |   return 2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # For internal use only | ||||||
|  | def IsPythonDefaultSerializationDeterministic(): | ||||||
|  |   return _python_deterministic_proto_serialization | ||||||
							
								
								
									
										130
									
								
								lib/protobuf/internal/builder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										130
									
								
								lib/protobuf/internal/builder.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,130 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Builds descriptors, message classes and services for generated _pb2.py. | ||||||
|  | 
 | ||||||
|  | This file is only called in python generated _pb2.py files. It builds | ||||||
|  | descriptors, message classes and services that users can directly use | ||||||
|  | in generated code. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'jieluo@google.com (Jie Luo)' | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import enum_type_wrapper | ||||||
|  | from google.protobuf import message as _message | ||||||
|  | from google.protobuf import reflection as _reflection | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BuildMessageAndEnumDescriptors(file_des, module): | ||||||
|  |   """Builds message and enum descriptors. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     file_des: FileDescriptor of the .proto file | ||||||
|  |     module: Generated _pb2 module | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def BuildNestedDescriptors(msg_des, prefix): | ||||||
|  |     for (name, nested_msg) in msg_des.nested_types_by_name.items(): | ||||||
|  |       module_name = prefix + name.upper() | ||||||
|  |       module[module_name] = nested_msg | ||||||
|  |       BuildNestedDescriptors(nested_msg, module_name + '_') | ||||||
|  |     for enum_des in msg_des.enum_types: | ||||||
|  |       module[prefix + enum_des.name.upper()] = enum_des | ||||||
|  | 
 | ||||||
|  |   for (name, msg_des) in file_des.message_types_by_name.items(): | ||||||
|  |     module_name = '_' + name.upper() | ||||||
|  |     module[module_name] = msg_des | ||||||
|  |     BuildNestedDescriptors(msg_des, module_name + '_') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BuildTopDescriptorsAndMessages(file_des, module_name, module): | ||||||
|  |   """Builds top level descriptors and message classes. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     file_des: FileDescriptor of the .proto file | ||||||
|  |     module_name: str, the name of generated _pb2 module | ||||||
|  |     module: Generated _pb2 module | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def BuildMessage(msg_des): | ||||||
|  |     create_dict = {} | ||||||
|  |     for (name, nested_msg) in msg_des.nested_types_by_name.items(): | ||||||
|  |       create_dict[name] = BuildMessage(nested_msg) | ||||||
|  |     create_dict['DESCRIPTOR'] = msg_des | ||||||
|  |     create_dict['__module__'] = module_name | ||||||
|  |     message_class = _reflection.GeneratedProtocolMessageType( | ||||||
|  |         msg_des.name, (_message.Message,), create_dict) | ||||||
|  |     _sym_db.RegisterMessage(message_class) | ||||||
|  |     return message_class | ||||||
|  | 
 | ||||||
|  |   # top level enums | ||||||
|  |   for (name, enum_des) in file_des.enum_types_by_name.items(): | ||||||
|  |     module['_' + name.upper()] = enum_des | ||||||
|  |     module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) | ||||||
|  |     for enum_value in enum_des.values: | ||||||
|  |       module[enum_value.name] = enum_value.number | ||||||
|  | 
 | ||||||
|  |   # top level extensions | ||||||
|  |   for (name, extension_des) in file_des.extensions_by_name.items(): | ||||||
|  |     module[name.upper() + '_FIELD_NUMBER'] = extension_des.number | ||||||
|  |     module[name] = extension_des | ||||||
|  | 
 | ||||||
|  |   # services | ||||||
|  |   for (name, service) in file_des.services_by_name.items(): | ||||||
|  |     module['_' + name.upper()] = service | ||||||
|  | 
 | ||||||
|  |   # Build messages. | ||||||
|  |   for (name, msg_des) in file_des.message_types_by_name.items(): | ||||||
|  |     module[name] = BuildMessage(msg_des) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BuildServices(file_des, module_name, module): | ||||||
|  |   """Builds services classes and services stub class. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     file_des: FileDescriptor of the .proto file | ||||||
|  |     module_name: str, the name of generated _pb2 module | ||||||
|  |     module: Generated _pb2 module | ||||||
|  |   """ | ||||||
|  |   # pylint: disable=g-import-not-at-top | ||||||
|  |   from google.protobuf import service as _service | ||||||
|  |   from google.protobuf import service_reflection | ||||||
|  |   # pylint: enable=g-import-not-at-top | ||||||
|  |   for (name, service) in file_des.services_by_name.items(): | ||||||
|  |     module[name] = service_reflection.GeneratedServiceType( | ||||||
|  |         name, (_service.Service,), | ||||||
|  |         dict(DESCRIPTOR=service, __module__=module_name)) | ||||||
|  |     stub_name = name + '_Stub' | ||||||
|  |     module[stub_name] = service_reflection.GeneratedServiceStubType( | ||||||
|  |         stub_name, (module[name],), | ||||||
|  |         dict(DESCRIPTOR=service, __module__=module_name)) | ||||||
							
								
								
									
										710
									
								
								lib/protobuf/internal/containers.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										710
									
								
								lib/protobuf/internal/containers.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,710 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains container classes to represent different protocol buffer types. | ||||||
|  | 
 | ||||||
|  | This file defines container classes which represent categories of protocol | ||||||
|  | buffer field types which need extra maintenance. Currently these categories | ||||||
|  | are: | ||||||
|  | 
 | ||||||
|  | -   Repeated scalar fields - These are all repeated fields which aren't | ||||||
|  |     composite (e.g. they are of simple types like int32, string, etc). | ||||||
|  | -   Repeated composite fields - Repeated fields which are composite. This | ||||||
|  |     includes groups and nested messages. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | import collections.abc | ||||||
|  | import copy | ||||||
|  | import pickle | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     Iterable, | ||||||
|  |     Iterator, | ||||||
|  |     List, | ||||||
|  |     MutableMapping, | ||||||
|  |     MutableSequence, | ||||||
|  |     NoReturn, | ||||||
|  |     Optional, | ||||||
|  |     Sequence, | ||||||
|  |     TypeVar, | ||||||
|  |     Union, | ||||||
|  |     overload, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _T = TypeVar('_T') | ||||||
|  | _K = TypeVar('_K') | ||||||
|  | _V = TypeVar('_V') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class BaseContainer(Sequence[_T]): | ||||||
|  |   """Base container class.""" | ||||||
|  | 
 | ||||||
|  |   # Minimizes memory usage and disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_message_listener', '_values'] | ||||||
|  | 
 | ||||||
|  |   def __init__(self, message_listener: Any) -> None: | ||||||
|  |     """ | ||||||
|  |     Args: | ||||||
|  |       message_listener: A MessageListener implementation. | ||||||
|  |         The RepeatedScalarFieldContainer will call this object's | ||||||
|  |         Modified() method when it is modified. | ||||||
|  |     """ | ||||||
|  |     self._message_listener = message_listener | ||||||
|  |     self._values = [] | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __getitem__(self, key: int) -> _T: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __getitem__(self, key: slice) -> List[_T]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, key): | ||||||
|  |     """Retrieves item by the specified key.""" | ||||||
|  |     return self._values[key] | ||||||
|  | 
 | ||||||
|  |   def __len__(self) -> int: | ||||||
|  |     """Returns the number of elements in the container.""" | ||||||
|  |     return len(self._values) | ||||||
|  | 
 | ||||||
|  |   def __ne__(self, other: Any) -> bool: | ||||||
|  |     """Checks if another instance isn't equal to this one.""" | ||||||
|  |     # The concrete classes should define __eq__. | ||||||
|  |     return not self == other | ||||||
|  | 
 | ||||||
|  |   __hash__ = None | ||||||
|  | 
 | ||||||
|  |   def __repr__(self) -> str: | ||||||
|  |     return repr(self._values) | ||||||
|  | 
 | ||||||
|  |   def sort(self, *args, **kwargs) -> None: | ||||||
|  |     # Continue to support the old sort_function keyword argument. | ||||||
|  |     # This is expected to be a rare occurrence, so use LBYL to avoid | ||||||
|  |     # the overhead of actually catching KeyError. | ||||||
|  |     if 'sort_function' in kwargs: | ||||||
|  |       kwargs['cmp'] = kwargs.pop('sort_function') | ||||||
|  |     self._values.sort(*args, **kwargs) | ||||||
|  | 
 | ||||||
|  |   def reverse(self) -> None: | ||||||
|  |     self._values.reverse() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO(slebedev): Remove this. BaseContainer does *not* conform to | ||||||
|  | # MutableSequence, only its subclasses do. | ||||||
|  | collections.abc.MutableSequence.register(BaseContainer) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): | ||||||
|  |   """Simple, type-checked, list-like container for holding repeated scalars.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_type_checker'] | ||||||
|  | 
 | ||||||
|  |   def __init__( | ||||||
|  |       self, | ||||||
|  |       message_listener: Any, | ||||||
|  |       type_checker: Any, | ||||||
|  |   ) -> None: | ||||||
|  |     """Args: | ||||||
|  | 
 | ||||||
|  |       message_listener: A MessageListener implementation. The | ||||||
|  |       RepeatedScalarFieldContainer will call this object's Modified() method | ||||||
|  |       when it is modified. | ||||||
|  |       type_checker: A type_checkers.ValueChecker instance to run on elements | ||||||
|  |       inserted into this container. | ||||||
|  |     """ | ||||||
|  |     super().__init__(message_listener) | ||||||
|  |     self._type_checker = type_checker | ||||||
|  | 
 | ||||||
|  |   def append(self, value: _T) -> None: | ||||||
|  |     """Appends an item to the list. Similar to list.append().""" | ||||||
|  |     self._values.append(self._type_checker.CheckValue(value)) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def insert(self, key: int, value: _T) -> None: | ||||||
|  |     """Inserts the item at the specified position. Similar to list.insert().""" | ||||||
|  |     self._values.insert(key, self._type_checker.CheckValue(value)) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def extend(self, elem_seq: Iterable[_T]) -> None: | ||||||
|  |     """Extends by appending the given iterable. Similar to list.extend().""" | ||||||
|  |     if elem_seq is None: | ||||||
|  |       return | ||||||
|  |     try: | ||||||
|  |       elem_seq_iter = iter(elem_seq) | ||||||
|  |     except TypeError: | ||||||
|  |       if not elem_seq: | ||||||
|  |         # silently ignore falsy inputs :-/. | ||||||
|  |         # TODO(ptucker): Deprecate this behavior. b/18413862 | ||||||
|  |         return | ||||||
|  |       raise | ||||||
|  | 
 | ||||||
|  |     new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] | ||||||
|  |     if new_values: | ||||||
|  |       self._values.extend(new_values) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def MergeFrom( | ||||||
|  |       self, | ||||||
|  |       other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], | ||||||
|  |   ) -> None: | ||||||
|  |     """Appends the contents of another repeated field of the same type to this | ||||||
|  |     one. We do not check the types of the individual fields. | ||||||
|  |     """ | ||||||
|  |     self._values.extend(other) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def remove(self, elem: _T): | ||||||
|  |     """Removes an item from the list. Similar to list.remove().""" | ||||||
|  |     self._values.remove(elem) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def pop(self, key: Optional[int] = -1) -> _T: | ||||||
|  |     """Removes and returns an item at a given index. Similar to list.pop().""" | ||||||
|  |     value = self._values[key] | ||||||
|  |     self.__delitem__(key) | ||||||
|  |     return value | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __setitem__(self, key: int, value: _T) -> None: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __setitem__(self, key: slice, value: Iterable[_T]) -> None: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key, value) -> None: | ||||||
|  |     """Sets the item on the specified position.""" | ||||||
|  |     if isinstance(key, slice): | ||||||
|  |       if key.step is not None: | ||||||
|  |         raise ValueError('Extended slices not supported') | ||||||
|  |       self._values[key] = map(self._type_checker.CheckValue, value) | ||||||
|  |       self._message_listener.Modified() | ||||||
|  |     else: | ||||||
|  |       self._values[key] = self._type_checker.CheckValue(value) | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key: Union[int, slice]) -> None: | ||||||
|  |     """Deletes the item at the specified position.""" | ||||||
|  |     del self._values[key] | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other: Any) -> bool: | ||||||
|  |     """Compares the current instance with another one.""" | ||||||
|  |     if self is other: | ||||||
|  |       return True | ||||||
|  |     # Special case for the same type which should be common and fast. | ||||||
|  |     if isinstance(other, self.__class__): | ||||||
|  |       return other._values == self._values | ||||||
|  |     # We are presumably comparing against some other sequence type. | ||||||
|  |     return other == self._values | ||||||
|  | 
 | ||||||
|  |   def __deepcopy__( | ||||||
|  |       self, | ||||||
|  |       unused_memo: Any = None, | ||||||
|  |   ) -> 'RepeatedScalarFieldContainer[_T]': | ||||||
|  |     clone = RepeatedScalarFieldContainer( | ||||||
|  |         copy.deepcopy(self._message_listener), self._type_checker) | ||||||
|  |     clone.MergeFrom(self) | ||||||
|  |     return clone | ||||||
|  | 
 | ||||||
|  |   def __reduce__(self, **kwargs) -> NoReturn: | ||||||
|  |     raise pickle.PickleError( | ||||||
|  |         "Can't pickle repeated scalar fields, convert to list first") | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO(slebedev): Constrain T to be a subtype of Message. | ||||||
|  | class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): | ||||||
|  |   """Simple, list-like container for holding repeated composite fields.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_message_descriptor'] | ||||||
|  | 
 | ||||||
|  |   def __init__(self, message_listener: Any, message_descriptor: Any) -> None: | ||||||
|  |     """ | ||||||
|  |     Note that we pass in a descriptor instead of the generated directly, | ||||||
|  |     since at the time we construct a _RepeatedCompositeFieldContainer we | ||||||
|  |     haven't yet necessarily initialized the type that will be contained in the | ||||||
|  |     container. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       message_listener: A MessageListener implementation. | ||||||
|  |         The RepeatedCompositeFieldContainer will call this object's | ||||||
|  |         Modified() method when it is modified. | ||||||
|  |       message_descriptor: A Descriptor instance describing the protocol type | ||||||
|  |         that should be present in this container.  We'll use the | ||||||
|  |         _concrete_class field of this descriptor when the client calls add(). | ||||||
|  |     """ | ||||||
|  |     super().__init__(message_listener) | ||||||
|  |     self._message_descriptor = message_descriptor | ||||||
|  | 
 | ||||||
|  |   def add(self, **kwargs: Any) -> _T: | ||||||
|  |     """Adds a new element at the end of the list and returns it. Keyword | ||||||
|  |     arguments may be used to initialize the element. | ||||||
|  |     """ | ||||||
|  |     new_element = self._message_descriptor._concrete_class(**kwargs) | ||||||
|  |     new_element._SetListener(self._message_listener) | ||||||
|  |     self._values.append(new_element) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  |     return new_element | ||||||
|  | 
 | ||||||
|  |   def append(self, value: _T) -> None: | ||||||
|  |     """Appends one element by copying the message.""" | ||||||
|  |     new_element = self._message_descriptor._concrete_class() | ||||||
|  |     new_element._SetListener(self._message_listener) | ||||||
|  |     new_element.CopyFrom(value) | ||||||
|  |     self._values.append(new_element) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def insert(self, key: int, value: _T) -> None: | ||||||
|  |     """Inserts the item at the specified position by copying.""" | ||||||
|  |     new_element = self._message_descriptor._concrete_class() | ||||||
|  |     new_element._SetListener(self._message_listener) | ||||||
|  |     new_element.CopyFrom(value) | ||||||
|  |     self._values.insert(key, new_element) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def extend(self, elem_seq: Iterable[_T]) -> None: | ||||||
|  |     """Extends by appending the given sequence of elements of the same type | ||||||
|  | 
 | ||||||
|  |     as this one, copying each individual message. | ||||||
|  |     """ | ||||||
|  |     message_class = self._message_descriptor._concrete_class | ||||||
|  |     listener = self._message_listener | ||||||
|  |     values = self._values | ||||||
|  |     for message in elem_seq: | ||||||
|  |       new_element = message_class() | ||||||
|  |       new_element._SetListener(listener) | ||||||
|  |       new_element.MergeFrom(message) | ||||||
|  |       values.append(new_element) | ||||||
|  |     listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def MergeFrom( | ||||||
|  |       self, | ||||||
|  |       other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], | ||||||
|  |   ) -> None: | ||||||
|  |     """Appends the contents of another repeated field of the same type to this | ||||||
|  |     one, copying each individual message. | ||||||
|  |     """ | ||||||
|  |     self.extend(other) | ||||||
|  | 
 | ||||||
|  |   def remove(self, elem: _T) -> None: | ||||||
|  |     """Removes an item from the list. Similar to list.remove().""" | ||||||
|  |     self._values.remove(elem) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def pop(self, key: Optional[int] = -1) -> _T: | ||||||
|  |     """Removes and returns an item at a given index. Similar to list.pop().""" | ||||||
|  |     value = self._values[key] | ||||||
|  |     self.__delitem__(key) | ||||||
|  |     return value | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __setitem__(self, key: int, value: _T) -> None: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __setitem__(self, key: slice, value: Iterable[_T]) -> None: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key, value): | ||||||
|  |     # This method is implemented to make RepeatedCompositeFieldContainer | ||||||
|  |     # structurally compatible with typing.MutableSequence. It is | ||||||
|  |     # otherwise unsupported and will always raise an error. | ||||||
|  |     raise TypeError( | ||||||
|  |         f'{self.__class__.__name__} object does not support item assignment') | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key: Union[int, slice]) -> None: | ||||||
|  |     """Deletes the item at the specified position.""" | ||||||
|  |     del self._values[key] | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other: Any) -> bool: | ||||||
|  |     """Compares the current instance with another one.""" | ||||||
|  |     if self is other: | ||||||
|  |       return True | ||||||
|  |     if not isinstance(other, self.__class__): | ||||||
|  |       raise TypeError('Can only compare repeated composite fields against ' | ||||||
|  |                       'other repeated composite fields.') | ||||||
|  |     return self._values == other._values | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ScalarMap(MutableMapping[_K, _V]): | ||||||
|  |   """Simple, type-checked, dict-like container for holding repeated scalars.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', | ||||||
|  |                '_entry_descriptor'] | ||||||
|  | 
 | ||||||
|  |   def __init__( | ||||||
|  |       self, | ||||||
|  |       message_listener: Any, | ||||||
|  |       key_checker: Any, | ||||||
|  |       value_checker: Any, | ||||||
|  |       entry_descriptor: Any, | ||||||
|  |   ) -> None: | ||||||
|  |     """ | ||||||
|  |     Args: | ||||||
|  |       message_listener: A MessageListener implementation. | ||||||
|  |         The ScalarMap will call this object's Modified() method when it | ||||||
|  |         is modified. | ||||||
|  |       key_checker: A type_checkers.ValueChecker instance to run on keys | ||||||
|  |         inserted into this container. | ||||||
|  |       value_checker: A type_checkers.ValueChecker instance to run on values | ||||||
|  |         inserted into this container. | ||||||
|  |       entry_descriptor: The MessageDescriptor of a map entry: key and value. | ||||||
|  |     """ | ||||||
|  |     self._message_listener = message_listener | ||||||
|  |     self._key_checker = key_checker | ||||||
|  |     self._value_checker = value_checker | ||||||
|  |     self._entry_descriptor = entry_descriptor | ||||||
|  |     self._values = {} | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, key: _K) -> _V: | ||||||
|  |     try: | ||||||
|  |       return self._values[key] | ||||||
|  |     except KeyError: | ||||||
|  |       key = self._key_checker.CheckValue(key) | ||||||
|  |       val = self._value_checker.DefaultValue() | ||||||
|  |       self._values[key] = val | ||||||
|  |       return val | ||||||
|  | 
 | ||||||
|  |   def __contains__(self, item: _K) -> bool: | ||||||
|  |     # We check the key's type to match the strong-typing flavor of the API. | ||||||
|  |     # Also this makes it easier to match the behavior of the C++ implementation. | ||||||
|  |     self._key_checker.CheckValue(item) | ||||||
|  |     return item in self._values | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def get(self, key: _K) -> Optional[_V]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def get(self, key: _K, default: _T) -> Union[_V, _T]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   # We need to override this explicitly, because our defaultdict-like behavior | ||||||
|  |   # will make the default implementation (from our base class) always insert | ||||||
|  |   # the key. | ||||||
|  |   def get(self, key, default=None): | ||||||
|  |     if key in self: | ||||||
|  |       return self[key] | ||||||
|  |     else: | ||||||
|  |       return default | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key: _K, value: _V) -> _T: | ||||||
|  |     checked_key = self._key_checker.CheckValue(key) | ||||||
|  |     checked_value = self._value_checker.CheckValue(value) | ||||||
|  |     self._values[checked_key] = checked_value | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key: _K) -> None: | ||||||
|  |     del self._values[key] | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __len__(self) -> int: | ||||||
|  |     return len(self._values) | ||||||
|  | 
 | ||||||
|  |   def __iter__(self) -> Iterator[_K]: | ||||||
|  |     return iter(self._values) | ||||||
|  | 
 | ||||||
|  |   def __repr__(self) -> str: | ||||||
|  |     return repr(self._values) | ||||||
|  | 
 | ||||||
|  |   def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: | ||||||
|  |     self._values.update(other._values) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def InvalidateIterators(self) -> None: | ||||||
|  |     # It appears that the only way to reliably invalidate iterators to | ||||||
|  |     # self._values is to ensure that its size changes. | ||||||
|  |     original = self._values | ||||||
|  |     self._values = original.copy() | ||||||
|  |     original[None] = None | ||||||
|  | 
 | ||||||
|  |   # This is defined in the abstract base, but we can do it much more cheaply. | ||||||
|  |   def clear(self) -> None: | ||||||
|  |     self._values.clear() | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def GetEntryClass(self) -> Any: | ||||||
|  |     return self._entry_descriptor._concrete_class | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MessageMap(MutableMapping[_K, _V]): | ||||||
|  |   """Simple, type-checked, dict-like container for with submessage values.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_key_checker', '_values', '_message_listener', | ||||||
|  |                '_message_descriptor', '_entry_descriptor'] | ||||||
|  | 
 | ||||||
|  |   def __init__( | ||||||
|  |       self, | ||||||
|  |       message_listener: Any, | ||||||
|  |       message_descriptor: Any, | ||||||
|  |       key_checker: Any, | ||||||
|  |       entry_descriptor: Any, | ||||||
|  |   ) -> None: | ||||||
|  |     """ | ||||||
|  |     Args: | ||||||
|  |       message_listener: A MessageListener implementation. | ||||||
|  |         The ScalarMap will call this object's Modified() method when it | ||||||
|  |         is modified. | ||||||
|  |       key_checker: A type_checkers.ValueChecker instance to run on keys | ||||||
|  |         inserted into this container. | ||||||
|  |       value_checker: A type_checkers.ValueChecker instance to run on values | ||||||
|  |         inserted into this container. | ||||||
|  |       entry_descriptor: The MessageDescriptor of a map entry: key and value. | ||||||
|  |     """ | ||||||
|  |     self._message_listener = message_listener | ||||||
|  |     self._message_descriptor = message_descriptor | ||||||
|  |     self._key_checker = key_checker | ||||||
|  |     self._entry_descriptor = entry_descriptor | ||||||
|  |     self._values = {} | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, key: _K) -> _V: | ||||||
|  |     key = self._key_checker.CheckValue(key) | ||||||
|  |     try: | ||||||
|  |       return self._values[key] | ||||||
|  |     except KeyError: | ||||||
|  |       new_element = self._message_descriptor._concrete_class() | ||||||
|  |       new_element._SetListener(self._message_listener) | ||||||
|  |       self._values[key] = new_element | ||||||
|  |       self._message_listener.Modified() | ||||||
|  |       return new_element | ||||||
|  | 
 | ||||||
|  |   def get_or_create(self, key: _K) -> _V: | ||||||
|  |     """get_or_create() is an alias for getitem (ie. map[key]). | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       key: The key to get or create in the map. | ||||||
|  | 
 | ||||||
|  |     This is useful in cases where you want to be explicit that the call is | ||||||
|  |     mutating the map.  This can avoid lint errors for statements like this | ||||||
|  |     that otherwise would appear to be pointless statements: | ||||||
|  | 
 | ||||||
|  |       msg.my_map[key] | ||||||
|  |     """ | ||||||
|  |     return self[key] | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def get(self, key: _K) -> Optional[_V]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def get(self, key: _K, default: _T) -> Union[_V, _T]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   # We need to override this explicitly, because our defaultdict-like behavior | ||||||
|  |   # will make the default implementation (from our base class) always insert | ||||||
|  |   # the key. | ||||||
|  |   def get(self, key, default=None): | ||||||
|  |     if key in self: | ||||||
|  |       return self[key] | ||||||
|  |     else: | ||||||
|  |       return default | ||||||
|  | 
 | ||||||
|  |   def __contains__(self, item: _K) -> bool: | ||||||
|  |     item = self._key_checker.CheckValue(item) | ||||||
|  |     return item in self._values | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key: _K, value: _V) -> NoReturn: | ||||||
|  |     raise ValueError('May not set values directly, call my_map[key].foo = 5') | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key: _K) -> None: | ||||||
|  |     key = self._key_checker.CheckValue(key) | ||||||
|  |     del self._values[key] | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __len__(self) -> int: | ||||||
|  |     return len(self._values) | ||||||
|  | 
 | ||||||
|  |   def __iter__(self) -> Iterator[_K]: | ||||||
|  |     return iter(self._values) | ||||||
|  | 
 | ||||||
|  |   def __repr__(self) -> str: | ||||||
|  |     return repr(self._values) | ||||||
|  | 
 | ||||||
|  |   def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     for key in other._values: | ||||||
|  |       # According to documentation: "When parsing from the wire or when merging, | ||||||
|  |       # if there are duplicate map keys the last key seen is used". | ||||||
|  |       if key in self: | ||||||
|  |         del self[key] | ||||||
|  |       self[key].CopyFrom(other[key]) | ||||||
|  |     # self._message_listener.Modified() not required here, because | ||||||
|  |     # mutations to submessages already propagate. | ||||||
|  | 
 | ||||||
|  |   def InvalidateIterators(self) -> None: | ||||||
|  |     # It appears that the only way to reliably invalidate iterators to | ||||||
|  |     # self._values is to ensure that its size changes. | ||||||
|  |     original = self._values | ||||||
|  |     self._values = original.copy() | ||||||
|  |     original[None] = None | ||||||
|  | 
 | ||||||
|  |   # This is defined in the abstract base, but we can do it much more cheaply. | ||||||
|  |   def clear(self) -> None: | ||||||
|  |     self._values.clear() | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def GetEntryClass(self) -> Any: | ||||||
|  |     return self._entry_descriptor._concrete_class | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _UnknownField: | ||||||
|  |   """A parsed unknown field.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_field_number', '_wire_type', '_data'] | ||||||
|  | 
 | ||||||
|  |   def __init__(self, field_number, wire_type, data): | ||||||
|  |     self._field_number = field_number | ||||||
|  |     self._wire_type = wire_type | ||||||
|  |     self._data = data | ||||||
|  |     return | ||||||
|  | 
 | ||||||
|  |   def __lt__(self, other): | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return self._field_number < other._field_number | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other): | ||||||
|  |     if self is other: | ||||||
|  |       return True | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return (self._field_number == other._field_number and | ||||||
|  |             self._wire_type == other._wire_type and | ||||||
|  |             self._data == other._data) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class UnknownFieldRef:  # pylint: disable=missing-class-docstring | ||||||
|  | 
 | ||||||
|  |   def __init__(self, parent, index): | ||||||
|  |     self._parent = parent | ||||||
|  |     self._index = index | ||||||
|  | 
 | ||||||
|  |   def _check_valid(self): | ||||||
|  |     if not self._parent: | ||||||
|  |       raise ValueError('UnknownField does not exist. ' | ||||||
|  |                        'The parent message might be cleared.') | ||||||
|  |     if self._index >= len(self._parent): | ||||||
|  |       raise ValueError('UnknownField does not exist. ' | ||||||
|  |                        'The parent message might be cleared.') | ||||||
|  | 
 | ||||||
|  |   @property | ||||||
|  |   def field_number(self): | ||||||
|  |     self._check_valid() | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return self._parent._internal_get(self._index)._field_number | ||||||
|  | 
 | ||||||
|  |   @property | ||||||
|  |   def wire_type(self): | ||||||
|  |     self._check_valid() | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return self._parent._internal_get(self._index)._wire_type | ||||||
|  | 
 | ||||||
|  |   @property | ||||||
|  |   def data(self): | ||||||
|  |     self._check_valid() | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return self._parent._internal_get(self._index)._data | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class UnknownFieldSet: | ||||||
|  |   """UnknownField container""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_values'] | ||||||
|  | 
 | ||||||
|  |   def __init__(self): | ||||||
|  |     self._values = [] | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, index): | ||||||
|  |     if self._values is None: | ||||||
|  |       raise ValueError('UnknownFields does not exist. ' | ||||||
|  |                        'The parent message might be cleared.') | ||||||
|  |     size = len(self._values) | ||||||
|  |     if index < 0: | ||||||
|  |       index += size | ||||||
|  |     if index < 0 or index >= size: | ||||||
|  |       raise IndexError('index %d out of range'.index) | ||||||
|  | 
 | ||||||
|  |     return UnknownFieldRef(self, index) | ||||||
|  | 
 | ||||||
|  |   def _internal_get(self, index): | ||||||
|  |     return self._values[index] | ||||||
|  | 
 | ||||||
|  |   def __len__(self): | ||||||
|  |     if self._values is None: | ||||||
|  |       raise ValueError('UnknownFields does not exist. ' | ||||||
|  |                        'The parent message might be cleared.') | ||||||
|  |     return len(self._values) | ||||||
|  | 
 | ||||||
|  |   def _add(self, field_number, wire_type, data): | ||||||
|  |     unknown_field = _UnknownField(field_number, wire_type, data) | ||||||
|  |     self._values.append(unknown_field) | ||||||
|  |     return unknown_field | ||||||
|  | 
 | ||||||
|  |   def __iter__(self): | ||||||
|  |     for i in range(len(self)): | ||||||
|  |       yield UnknownFieldRef(self, i) | ||||||
|  | 
 | ||||||
|  |   def _extend(self, other): | ||||||
|  |     if other is None: | ||||||
|  |       return | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     self._values.extend(other._values) | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other): | ||||||
|  |     if self is other: | ||||||
|  |       return True | ||||||
|  |     # Sort unknown fields because their order shouldn't | ||||||
|  |     # affect equality test. | ||||||
|  |     values = list(self._values) | ||||||
|  |     if other is None: | ||||||
|  |       return not values | ||||||
|  |     values.sort() | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     other_values = sorted(other._values) | ||||||
|  |     return values == other_values | ||||||
|  | 
 | ||||||
|  |   def _clear(self): | ||||||
|  |     for value in self._values: | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       if isinstance(value._data, UnknownFieldSet): | ||||||
|  |         value._data._clear()  # pylint: disable=protected-access | ||||||
|  |     self._values = None | ||||||
							
								
								
									
										1029
									
								
								lib/protobuf/internal/decoder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1029
									
								
								lib/protobuf/internal/decoder.py
									
										
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							
							
								
								
									
										829
									
								
								lib/protobuf/internal/encoder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										829
									
								
								lib/protobuf/internal/encoder.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,829 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Code for encoding protocol message primitives. | ||||||
|  | 
 | ||||||
|  | Contains the logic for encoding every logical protocol field type | ||||||
|  | into one of the 5 physical wire types. | ||||||
|  | 
 | ||||||
|  | This code is designed to push the Python interpreter's performance to the | ||||||
|  | limits. | ||||||
|  | 
 | ||||||
|  | The basic idea is that at startup time, for every field (i.e. every | ||||||
|  | FieldDescriptor) we construct two functions:  a "sizer" and an "encoder".  The | ||||||
|  | sizer takes a value of this field's type and computes its byte size.  The | ||||||
|  | encoder takes a writer function and a value.  It encodes the value into byte | ||||||
|  | strings and invokes the writer function to write those strings.  Typically the | ||||||
|  | writer function is the write() method of a BytesIO. | ||||||
|  | 
 | ||||||
|  | We try to do as much work as possible when constructing the writer and the | ||||||
|  | sizer rather than when calling them.  In particular: | ||||||
|  | * We copy any needed global functions to local variables, so that we do not need | ||||||
|  |   to do costly global table lookups at runtime. | ||||||
|  | * Similarly, we try to do any attribute lookups at startup time if possible. | ||||||
|  | * Every field's tag is encoded to bytes at startup, since it can't change at | ||||||
|  |   runtime. | ||||||
|  | * Whatever component of the field size we can compute at startup, we do. | ||||||
|  | * We *avoid* sharing code if doing so would make the code slower and not sharing | ||||||
|  |   does not burden us too much.  For example, encoders for repeated fields do | ||||||
|  |   not just call the encoders for singular fields in a loop because this would | ||||||
|  |   add an extra function call overhead for every loop iteration; instead, we | ||||||
|  |   manually inline the single-value encoder into the loop. | ||||||
|  | * If a Python function lacks a return statement, Python actually generates | ||||||
|  |   instructions to pop the result of the last statement off the stack, push | ||||||
|  |   None onto the stack, and then return that.  If we really don't care what | ||||||
|  |   value is returned, then we can save two instructions by returning the | ||||||
|  |   result of the last statement.  It looks funny but it helps. | ||||||
|  | * We assume that type and bounds checking has happened at a higher level. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'kenton@google.com (Kenton Varda)' | ||||||
|  | 
 | ||||||
|  | import struct | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import wire_format | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # This will overflow and thus become IEEE-754 "infinity".  We would use | ||||||
|  | # "float('inf')" but it doesn't work on Windows pre-Python-2.6. | ||||||
|  | _POS_INF = 1e10000 | ||||||
|  | _NEG_INF = -_POS_INF | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _VarintSize(value): | ||||||
|  |   """Compute the size of a varint value.""" | ||||||
|  |   if value <= 0x7f: return 1 | ||||||
|  |   if value <= 0x3fff: return 2 | ||||||
|  |   if value <= 0x1fffff: return 3 | ||||||
|  |   if value <= 0xfffffff: return 4 | ||||||
|  |   if value <= 0x7ffffffff: return 5 | ||||||
|  |   if value <= 0x3ffffffffff: return 6 | ||||||
|  |   if value <= 0x1ffffffffffff: return 7 | ||||||
|  |   if value <= 0xffffffffffffff: return 8 | ||||||
|  |   if value <= 0x7fffffffffffffff: return 9 | ||||||
|  |   return 10 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SignedVarintSize(value): | ||||||
|  |   """Compute the size of a signed varint value.""" | ||||||
|  |   if value < 0: return 10 | ||||||
|  |   if value <= 0x7f: return 1 | ||||||
|  |   if value <= 0x3fff: return 2 | ||||||
|  |   if value <= 0x1fffff: return 3 | ||||||
|  |   if value <= 0xfffffff: return 4 | ||||||
|  |   if value <= 0x7ffffffff: return 5 | ||||||
|  |   if value <= 0x3ffffffffff: return 6 | ||||||
|  |   if value <= 0x1ffffffffffff: return 7 | ||||||
|  |   if value <= 0xffffffffffffff: return 8 | ||||||
|  |   if value <= 0x7fffffffffffffff: return 9 | ||||||
|  |   return 10 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _TagSize(field_number): | ||||||
|  |   """Returns the number of bytes required to serialize a tag with this field | ||||||
|  |   number.""" | ||||||
|  |   # Just pass in type 0, since the type won't affect the tag+type size. | ||||||
|  |   return _VarintSize(wire_format.PackTag(field_number, 0)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # In this section we define some generic sizers.  Each of these functions | ||||||
|  | # takes parameters specific to a particular field type, e.g. int32 or fixed64. | ||||||
|  | # It returns another function which in turn takes parameters specific to a | ||||||
|  | # particular field, e.g. the field number and whether it is repeated or packed. | ||||||
|  | # Look at the next section to see how these are used. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SimpleSizer(compute_value_size): | ||||||
|  |   """A sizer which uses the function compute_value_size to compute the size of | ||||||
|  |   each value.  Typically compute_value_size is _VarintSize.""" | ||||||
|  | 
 | ||||||
|  |   def SpecificSizer(field_number, is_repeated, is_packed): | ||||||
|  |     tag_size = _TagSize(field_number) | ||||||
|  |     if is_packed: | ||||||
|  |       local_VarintSize = _VarintSize | ||||||
|  |       def PackedFieldSize(value): | ||||||
|  |         result = 0 | ||||||
|  |         for element in value: | ||||||
|  |           result += compute_value_size(element) | ||||||
|  |         return result + local_VarintSize(result) + tag_size | ||||||
|  |       return PackedFieldSize | ||||||
|  |     elif is_repeated: | ||||||
|  |       def RepeatedFieldSize(value): | ||||||
|  |         result = tag_size * len(value) | ||||||
|  |         for element in value: | ||||||
|  |           result += compute_value_size(element) | ||||||
|  |         return result | ||||||
|  |       return RepeatedFieldSize | ||||||
|  |     else: | ||||||
|  |       def FieldSize(value): | ||||||
|  |         return tag_size + compute_value_size(value) | ||||||
|  |       return FieldSize | ||||||
|  | 
 | ||||||
|  |   return SpecificSizer | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ModifiedSizer(compute_value_size, modify_value): | ||||||
|  |   """Like SimpleSizer, but modify_value is invoked on each value before it is | ||||||
|  |   passed to compute_value_size.  modify_value is typically ZigZagEncode.""" | ||||||
|  | 
 | ||||||
|  |   def SpecificSizer(field_number, is_repeated, is_packed): | ||||||
|  |     tag_size = _TagSize(field_number) | ||||||
|  |     if is_packed: | ||||||
|  |       local_VarintSize = _VarintSize | ||||||
|  |       def PackedFieldSize(value): | ||||||
|  |         result = 0 | ||||||
|  |         for element in value: | ||||||
|  |           result += compute_value_size(modify_value(element)) | ||||||
|  |         return result + local_VarintSize(result) + tag_size | ||||||
|  |       return PackedFieldSize | ||||||
|  |     elif is_repeated: | ||||||
|  |       def RepeatedFieldSize(value): | ||||||
|  |         result = tag_size * len(value) | ||||||
|  |         for element in value: | ||||||
|  |           result += compute_value_size(modify_value(element)) | ||||||
|  |         return result | ||||||
|  |       return RepeatedFieldSize | ||||||
|  |     else: | ||||||
|  |       def FieldSize(value): | ||||||
|  |         return tag_size + compute_value_size(modify_value(value)) | ||||||
|  |       return FieldSize | ||||||
|  | 
 | ||||||
|  |   return SpecificSizer | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _FixedSizer(value_size): | ||||||
|  |   """Like _SimpleSizer except for a fixed-size field.  The input is the size | ||||||
|  |   of one value.""" | ||||||
|  | 
 | ||||||
|  |   def SpecificSizer(field_number, is_repeated, is_packed): | ||||||
|  |     tag_size = _TagSize(field_number) | ||||||
|  |     if is_packed: | ||||||
|  |       local_VarintSize = _VarintSize | ||||||
|  |       def PackedFieldSize(value): | ||||||
|  |         result = len(value) * value_size | ||||||
|  |         return result + local_VarintSize(result) + tag_size | ||||||
|  |       return PackedFieldSize | ||||||
|  |     elif is_repeated: | ||||||
|  |       element_size = value_size + tag_size | ||||||
|  |       def RepeatedFieldSize(value): | ||||||
|  |         return len(value) * element_size | ||||||
|  |       return RepeatedFieldSize | ||||||
|  |     else: | ||||||
|  |       field_size = value_size + tag_size | ||||||
|  |       def FieldSize(value): | ||||||
|  |         return field_size | ||||||
|  |       return FieldSize | ||||||
|  | 
 | ||||||
|  |   return SpecificSizer | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # ==================================================================== | ||||||
|  | # Here we declare a sizer constructor for each field type.  Each "sizer | ||||||
|  | # constructor" is a function that takes (field_number, is_repeated, is_packed) | ||||||
|  | # as parameters and returns a sizer, which in turn takes a field value as | ||||||
|  | # a parameter and returns its encoded size. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize) | ||||||
|  | 
 | ||||||
|  | UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize) | ||||||
|  | 
 | ||||||
|  | SInt32Sizer = SInt64Sizer = _ModifiedSizer( | ||||||
|  |     _SignedVarintSize, wire_format.ZigZagEncode) | ||||||
|  | 
 | ||||||
|  | Fixed32Sizer = SFixed32Sizer = FloatSizer  = _FixedSizer(4) | ||||||
|  | Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8) | ||||||
|  | 
 | ||||||
|  | BoolSizer = _FixedSizer(1) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def StringSizer(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns a sizer for a string field.""" | ||||||
|  | 
 | ||||||
|  |   tag_size = _TagSize(field_number) | ||||||
|  |   local_VarintSize = _VarintSize | ||||||
|  |   local_len = len | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def RepeatedFieldSize(value): | ||||||
|  |       result = tag_size * len(value) | ||||||
|  |       for element in value: | ||||||
|  |         l = local_len(element.encode('utf-8')) | ||||||
|  |         result += local_VarintSize(l) + l | ||||||
|  |       return result | ||||||
|  |     return RepeatedFieldSize | ||||||
|  |   else: | ||||||
|  |     def FieldSize(value): | ||||||
|  |       l = local_len(value.encode('utf-8')) | ||||||
|  |       return tag_size + local_VarintSize(l) + l | ||||||
|  |     return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BytesSizer(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns a sizer for a bytes field.""" | ||||||
|  | 
 | ||||||
|  |   tag_size = _TagSize(field_number) | ||||||
|  |   local_VarintSize = _VarintSize | ||||||
|  |   local_len = len | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def RepeatedFieldSize(value): | ||||||
|  |       result = tag_size * len(value) | ||||||
|  |       for element in value: | ||||||
|  |         l = local_len(element) | ||||||
|  |         result += local_VarintSize(l) + l | ||||||
|  |       return result | ||||||
|  |     return RepeatedFieldSize | ||||||
|  |   else: | ||||||
|  |     def FieldSize(value): | ||||||
|  |       l = local_len(value) | ||||||
|  |       return tag_size + local_VarintSize(l) + l | ||||||
|  |     return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GroupSizer(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns a sizer for a group field.""" | ||||||
|  | 
 | ||||||
|  |   tag_size = _TagSize(field_number) * 2 | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def RepeatedFieldSize(value): | ||||||
|  |       result = tag_size * len(value) | ||||||
|  |       for element in value: | ||||||
|  |         result += element.ByteSize() | ||||||
|  |       return result | ||||||
|  |     return RepeatedFieldSize | ||||||
|  |   else: | ||||||
|  |     def FieldSize(value): | ||||||
|  |       return tag_size + value.ByteSize() | ||||||
|  |     return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageSizer(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns a sizer for a message field.""" | ||||||
|  | 
 | ||||||
|  |   tag_size = _TagSize(field_number) | ||||||
|  |   local_VarintSize = _VarintSize | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def RepeatedFieldSize(value): | ||||||
|  |       result = tag_size * len(value) | ||||||
|  |       for element in value: | ||||||
|  |         l = element.ByteSize() | ||||||
|  |         result += local_VarintSize(l) + l | ||||||
|  |       return result | ||||||
|  |     return RepeatedFieldSize | ||||||
|  |   else: | ||||||
|  |     def FieldSize(value): | ||||||
|  |       l = value.ByteSize() | ||||||
|  |       return tag_size + local_VarintSize(l) + l | ||||||
|  |     return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # MessageSet is special: it needs custom logic to compute its size properly. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageSetItemSizer(field_number): | ||||||
|  |   """Returns a sizer for extensions of MessageSet. | ||||||
|  | 
 | ||||||
|  |   The message set message looks like this: | ||||||
|  |     message MessageSet { | ||||||
|  |       repeated group Item = 1 { | ||||||
|  |         required int32 type_id = 2; | ||||||
|  |         required string message = 3; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   """ | ||||||
|  |   static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) + | ||||||
|  |                  _TagSize(3)) | ||||||
|  |   local_VarintSize = _VarintSize | ||||||
|  | 
 | ||||||
|  |   def FieldSize(value): | ||||||
|  |     l = value.ByteSize() | ||||||
|  |     return static_size + local_VarintSize(l) + l | ||||||
|  | 
 | ||||||
|  |   return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # Map is special: it needs custom logic to compute its size properly. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MapSizer(field_descriptor, is_message_map): | ||||||
|  |   """Returns a sizer for a map field.""" | ||||||
|  | 
 | ||||||
|  |   # Can't look at field_descriptor.message_type._concrete_class because it may | ||||||
|  |   # not have been initialized yet. | ||||||
|  |   message_type = field_descriptor.message_type | ||||||
|  |   message_sizer = MessageSizer(field_descriptor.number, False, False) | ||||||
|  | 
 | ||||||
|  |   def FieldSize(map_value): | ||||||
|  |     total = 0 | ||||||
|  |     for key in map_value: | ||||||
|  |       value = map_value[key] | ||||||
|  |       # It's wasteful to create the messages and throw them away one second | ||||||
|  |       # later since we'll do the same for the actual encode.  But there's not an | ||||||
|  |       # obvious way to avoid this within the current design without tons of code | ||||||
|  |       # duplication. For message map, value.ByteSize() should be called to | ||||||
|  |       # update the status. | ||||||
|  |       entry_msg = message_type._concrete_class(key=key, value=value) | ||||||
|  |       total += message_sizer(entry_msg) | ||||||
|  |       if is_message_map: | ||||||
|  |         value.ByteSize() | ||||||
|  |     return total | ||||||
|  | 
 | ||||||
|  |   return FieldSize | ||||||
|  | 
 | ||||||
|  | # ==================================================================== | ||||||
|  | # Encoders! | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _VarintEncoder(): | ||||||
|  |   """Return an encoder for a basic varint value (does not include tag).""" | ||||||
|  | 
 | ||||||
|  |   local_int2byte = struct.Struct('>B').pack | ||||||
|  | 
 | ||||||
|  |   def EncodeVarint(write, value, unused_deterministic=None): | ||||||
|  |     bits = value & 0x7f | ||||||
|  |     value >>= 7 | ||||||
|  |     while value: | ||||||
|  |       write(local_int2byte(0x80|bits)) | ||||||
|  |       bits = value & 0x7f | ||||||
|  |       value >>= 7 | ||||||
|  |     return write(local_int2byte(bits)) | ||||||
|  | 
 | ||||||
|  |   return EncodeVarint | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SignedVarintEncoder(): | ||||||
|  |   """Return an encoder for a basic signed varint value (does not include | ||||||
|  |   tag).""" | ||||||
|  | 
 | ||||||
|  |   local_int2byte = struct.Struct('>B').pack | ||||||
|  | 
 | ||||||
|  |   def EncodeSignedVarint(write, value, unused_deterministic=None): | ||||||
|  |     if value < 0: | ||||||
|  |       value += (1 << 64) | ||||||
|  |     bits = value & 0x7f | ||||||
|  |     value >>= 7 | ||||||
|  |     while value: | ||||||
|  |       write(local_int2byte(0x80|bits)) | ||||||
|  |       bits = value & 0x7f | ||||||
|  |       value >>= 7 | ||||||
|  |     return write(local_int2byte(bits)) | ||||||
|  | 
 | ||||||
|  |   return EncodeSignedVarint | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _EncodeVarint = _VarintEncoder() | ||||||
|  | _EncodeSignedVarint = _SignedVarintEncoder() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _VarintBytes(value): | ||||||
|  |   """Encode the given integer as a varint and return the bytes.  This is only | ||||||
|  |   called at startup time so it doesn't need to be fast.""" | ||||||
|  | 
 | ||||||
|  |   pieces = [] | ||||||
|  |   _EncodeVarint(pieces.append, value, True) | ||||||
|  |   return b"".join(pieces) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def TagBytes(field_number, wire_type): | ||||||
|  |   """Encode the given tag and return the bytes.  Only called at startup.""" | ||||||
|  | 
 | ||||||
|  |   return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # As with sizers (see above), we have a number of common encoder | ||||||
|  | # implementations. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SimpleEncoder(wire_type, encode_value, compute_value_size): | ||||||
|  |   """Return a constructor for an encoder for fields of a particular type. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |       wire_type:  The field's wire type, for encoding tags. | ||||||
|  |       encode_value:  A function which encodes an individual value, e.g. | ||||||
|  |         _EncodeVarint(). | ||||||
|  |       compute_value_size:  A function which computes the size of an individual | ||||||
|  |         value, e.g. _VarintSize(). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def SpecificEncoder(field_number, is_repeated, is_packed): | ||||||
|  |     if is_packed: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |       local_EncodeVarint = _EncodeVarint | ||||||
|  |       def EncodePackedField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         size = 0 | ||||||
|  |         for element in value: | ||||||
|  |           size += compute_value_size(element) | ||||||
|  |         local_EncodeVarint(write, size, deterministic) | ||||||
|  |         for element in value: | ||||||
|  |           encode_value(write, element, deterministic) | ||||||
|  |       return EncodePackedField | ||||||
|  |     elif is_repeated: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |         for element in value: | ||||||
|  |           write(tag_bytes) | ||||||
|  |           encode_value(write, element, deterministic) | ||||||
|  |       return EncodeRepeatedField | ||||||
|  |     else: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         return encode_value(write, value, deterministic) | ||||||
|  |       return EncodeField | ||||||
|  | 
 | ||||||
|  |   return SpecificEncoder | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): | ||||||
|  |   """Like SimpleEncoder but additionally invokes modify_value on every value | ||||||
|  |   before passing it to encode_value.  Usually modify_value is ZigZagEncode.""" | ||||||
|  | 
 | ||||||
|  |   def SpecificEncoder(field_number, is_repeated, is_packed): | ||||||
|  |     if is_packed: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |       local_EncodeVarint = _EncodeVarint | ||||||
|  |       def EncodePackedField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         size = 0 | ||||||
|  |         for element in value: | ||||||
|  |           size += compute_value_size(modify_value(element)) | ||||||
|  |         local_EncodeVarint(write, size, deterministic) | ||||||
|  |         for element in value: | ||||||
|  |           encode_value(write, modify_value(element), deterministic) | ||||||
|  |       return EncodePackedField | ||||||
|  |     elif is_repeated: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |         for element in value: | ||||||
|  |           write(tag_bytes) | ||||||
|  |           encode_value(write, modify_value(element), deterministic) | ||||||
|  |       return EncodeRepeatedField | ||||||
|  |     else: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         return encode_value(write, modify_value(value), deterministic) | ||||||
|  |       return EncodeField | ||||||
|  | 
 | ||||||
|  |   return SpecificEncoder | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _StructPackEncoder(wire_type, format): | ||||||
|  |   """Return a constructor for an encoder for a fixed-width field. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |       wire_type:  The field's wire type, for encoding tags. | ||||||
|  |       format:  The format string to pass to struct.pack(). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   value_size = struct.calcsize(format) | ||||||
|  | 
 | ||||||
|  |   def SpecificEncoder(field_number, is_repeated, is_packed): | ||||||
|  |     local_struct_pack = struct.pack | ||||||
|  |     if is_packed: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |       local_EncodeVarint = _EncodeVarint | ||||||
|  |       def EncodePackedField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         local_EncodeVarint(write, len(value) * value_size, deterministic) | ||||||
|  |         for element in value: | ||||||
|  |           write(local_struct_pack(format, element)) | ||||||
|  |       return EncodePackedField | ||||||
|  |     elif is_repeated: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeRepeatedField(write, value, unused_deterministic=None): | ||||||
|  |         for element in value: | ||||||
|  |           write(tag_bytes) | ||||||
|  |           write(local_struct_pack(format, element)) | ||||||
|  |       return EncodeRepeatedField | ||||||
|  |     else: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeField(write, value, unused_deterministic=None): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         return write(local_struct_pack(format, value)) | ||||||
|  |       return EncodeField | ||||||
|  | 
 | ||||||
|  |   return SpecificEncoder | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _FloatingPointEncoder(wire_type, format): | ||||||
|  |   """Return a constructor for an encoder for float fields. | ||||||
|  | 
 | ||||||
|  |   This is like StructPackEncoder, but catches errors that may be due to | ||||||
|  |   passing non-finite floating-point values to struct.pack, and makes a | ||||||
|  |   second attempt to encode those values. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |       wire_type:  The field's wire type, for encoding tags. | ||||||
|  |       format:  The format string to pass to struct.pack(). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   value_size = struct.calcsize(format) | ||||||
|  |   if value_size == 4: | ||||||
|  |     def EncodeNonFiniteOrRaise(write, value): | ||||||
|  |       # Remember that the serialized form uses little-endian byte order. | ||||||
|  |       if value == _POS_INF: | ||||||
|  |         write(b'\x00\x00\x80\x7F') | ||||||
|  |       elif value == _NEG_INF: | ||||||
|  |         write(b'\x00\x00\x80\xFF') | ||||||
|  |       elif value != value:           # NaN | ||||||
|  |         write(b'\x00\x00\xC0\x7F') | ||||||
|  |       else: | ||||||
|  |         raise | ||||||
|  |   elif value_size == 8: | ||||||
|  |     def EncodeNonFiniteOrRaise(write, value): | ||||||
|  |       if value == _POS_INF: | ||||||
|  |         write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') | ||||||
|  |       elif value == _NEG_INF: | ||||||
|  |         write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') | ||||||
|  |       elif value != value:                         # NaN | ||||||
|  |         write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') | ||||||
|  |       else: | ||||||
|  |         raise | ||||||
|  |   else: | ||||||
|  |     raise ValueError('Can\'t encode floating-point values that are ' | ||||||
|  |                      '%d bytes long (only 4 or 8)' % value_size) | ||||||
|  | 
 | ||||||
|  |   def SpecificEncoder(field_number, is_repeated, is_packed): | ||||||
|  |     local_struct_pack = struct.pack | ||||||
|  |     if is_packed: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |       local_EncodeVarint = _EncodeVarint | ||||||
|  |       def EncodePackedField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         local_EncodeVarint(write, len(value) * value_size, deterministic) | ||||||
|  |         for element in value: | ||||||
|  |           # This try/except block is going to be faster than any code that | ||||||
|  |           # we could write to check whether element is finite. | ||||||
|  |           try: | ||||||
|  |             write(local_struct_pack(format, element)) | ||||||
|  |           except SystemError: | ||||||
|  |             EncodeNonFiniteOrRaise(write, element) | ||||||
|  |       return EncodePackedField | ||||||
|  |     elif is_repeated: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeRepeatedField(write, value, unused_deterministic=None): | ||||||
|  |         for element in value: | ||||||
|  |           write(tag_bytes) | ||||||
|  |           try: | ||||||
|  |             write(local_struct_pack(format, element)) | ||||||
|  |           except SystemError: | ||||||
|  |             EncodeNonFiniteOrRaise(write, element) | ||||||
|  |       return EncodeRepeatedField | ||||||
|  |     else: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeField(write, value, unused_deterministic=None): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         try: | ||||||
|  |           write(local_struct_pack(format, value)) | ||||||
|  |         except SystemError: | ||||||
|  |           EncodeNonFiniteOrRaise(write, value) | ||||||
|  |       return EncodeField | ||||||
|  | 
 | ||||||
|  |   return SpecificEncoder | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # ==================================================================== | ||||||
|  | # Here we declare an encoder constructor for each field type.  These work | ||||||
|  | # very similarly to sizer constructors, described earlier. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( | ||||||
|  |     wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) | ||||||
|  | 
 | ||||||
|  | UInt32Encoder = UInt64Encoder = _SimpleEncoder( | ||||||
|  |     wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) | ||||||
|  | 
 | ||||||
|  | SInt32Encoder = SInt64Encoder = _ModifiedEncoder( | ||||||
|  |     wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, | ||||||
|  |     wire_format.ZigZagEncode) | ||||||
|  | 
 | ||||||
|  | # Note that Python conveniently guarantees that when using the '<' prefix on | ||||||
|  | # formats, they will also have the same size across all platforms (as opposed | ||||||
|  | # to without the prefix, where their sizes depend on the C compiler's basic | ||||||
|  | # type sizes). | ||||||
|  | Fixed32Encoder  = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I') | ||||||
|  | Fixed64Encoder  = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q') | ||||||
|  | SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i') | ||||||
|  | SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q') | ||||||
|  | FloatEncoder    = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f') | ||||||
|  | DoubleEncoder   = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BoolEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a boolean field.""" | ||||||
|  | 
 | ||||||
|  |   false_byte = b'\x00' | ||||||
|  |   true_byte = b'\x01' | ||||||
|  |   if is_packed: | ||||||
|  |     tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |     local_EncodeVarint = _EncodeVarint | ||||||
|  |     def EncodePackedField(write, value, deterministic): | ||||||
|  |       write(tag_bytes) | ||||||
|  |       local_EncodeVarint(write, len(value), deterministic) | ||||||
|  |       for element in value: | ||||||
|  |         if element: | ||||||
|  |           write(true_byte) | ||||||
|  |         else: | ||||||
|  |           write(false_byte) | ||||||
|  |     return EncodePackedField | ||||||
|  |   elif is_repeated: | ||||||
|  |     tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT) | ||||||
|  |     def EncodeRepeatedField(write, value, unused_deterministic=None): | ||||||
|  |       for element in value: | ||||||
|  |         write(tag_bytes) | ||||||
|  |         if element: | ||||||
|  |           write(true_byte) | ||||||
|  |         else: | ||||||
|  |           write(false_byte) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT) | ||||||
|  |     def EncodeField(write, value, unused_deterministic=None): | ||||||
|  |       write(tag_bytes) | ||||||
|  |       if value: | ||||||
|  |         return write(true_byte) | ||||||
|  |       return write(false_byte) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def StringEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a string field.""" | ||||||
|  | 
 | ||||||
|  |   tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |   local_EncodeVarint = _EncodeVarint | ||||||
|  |   local_len = len | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |       for element in value: | ||||||
|  |         encoded = element.encode('utf-8') | ||||||
|  |         write(tag) | ||||||
|  |         local_EncodeVarint(write, local_len(encoded), deterministic) | ||||||
|  |         write(encoded) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     def EncodeField(write, value, deterministic): | ||||||
|  |       encoded = value.encode('utf-8') | ||||||
|  |       write(tag) | ||||||
|  |       local_EncodeVarint(write, local_len(encoded), deterministic) | ||||||
|  |       return write(encoded) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BytesEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a bytes field.""" | ||||||
|  | 
 | ||||||
|  |   tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |   local_EncodeVarint = _EncodeVarint | ||||||
|  |   local_len = len | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |       for element in value: | ||||||
|  |         write(tag) | ||||||
|  |         local_EncodeVarint(write, local_len(element), deterministic) | ||||||
|  |         write(element) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     def EncodeField(write, value, deterministic): | ||||||
|  |       write(tag) | ||||||
|  |       local_EncodeVarint(write, local_len(value), deterministic) | ||||||
|  |       return write(value) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GroupEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a group field.""" | ||||||
|  | 
 | ||||||
|  |   start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP) | ||||||
|  |   end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP) | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |       for element in value: | ||||||
|  |         write(start_tag) | ||||||
|  |         element._InternalSerialize(write, deterministic) | ||||||
|  |         write(end_tag) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     def EncodeField(write, value, deterministic): | ||||||
|  |       write(start_tag) | ||||||
|  |       value._InternalSerialize(write, deterministic) | ||||||
|  |       return write(end_tag) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a message field.""" | ||||||
|  | 
 | ||||||
|  |   tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |   local_EncodeVarint = _EncodeVarint | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |       for element in value: | ||||||
|  |         write(tag) | ||||||
|  |         local_EncodeVarint(write, element.ByteSize(), deterministic) | ||||||
|  |         element._InternalSerialize(write, deterministic) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     def EncodeField(write, value, deterministic): | ||||||
|  |       write(tag) | ||||||
|  |       local_EncodeVarint(write, value.ByteSize(), deterministic) | ||||||
|  |       return value._InternalSerialize(write, deterministic) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # As before, MessageSet is special. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageSetItemEncoder(field_number): | ||||||
|  |   """Encoder for extensions of MessageSet. | ||||||
|  | 
 | ||||||
|  |   The message set message looks like this: | ||||||
|  |     message MessageSet { | ||||||
|  |       repeated group Item = 1 { | ||||||
|  |         required int32 type_id = 2; | ||||||
|  |         required string message = 3; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   """ | ||||||
|  |   start_bytes = b"".join([ | ||||||
|  |       TagBytes(1, wire_format.WIRETYPE_START_GROUP), | ||||||
|  |       TagBytes(2, wire_format.WIRETYPE_VARINT), | ||||||
|  |       _VarintBytes(field_number), | ||||||
|  |       TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)]) | ||||||
|  |   end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP) | ||||||
|  |   local_EncodeVarint = _EncodeVarint | ||||||
|  | 
 | ||||||
|  |   def EncodeField(write, value, deterministic): | ||||||
|  |     write(start_bytes) | ||||||
|  |     local_EncodeVarint(write, value.ByteSize(), deterministic) | ||||||
|  |     value._InternalSerialize(write, deterministic) | ||||||
|  |     return write(end_bytes) | ||||||
|  | 
 | ||||||
|  |   return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # As before, Map is special. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MapEncoder(field_descriptor): | ||||||
|  |   """Encoder for extensions of MessageSet. | ||||||
|  | 
 | ||||||
|  |   Maps always have a wire format like this: | ||||||
|  |     message MapEntry { | ||||||
|  |       key_type key = 1; | ||||||
|  |       value_type value = 2; | ||||||
|  |     } | ||||||
|  |     repeated MapEntry map = N; | ||||||
|  |   """ | ||||||
|  |   # Can't look at field_descriptor.message_type._concrete_class because it may | ||||||
|  |   # not have been initialized yet. | ||||||
|  |   message_type = field_descriptor.message_type | ||||||
|  |   encode_message = MessageEncoder(field_descriptor.number, False, False) | ||||||
|  | 
 | ||||||
|  |   def EncodeField(write, value, deterministic): | ||||||
|  |     value_keys = sorted(value.keys()) if deterministic else value | ||||||
|  |     for key in value_keys: | ||||||
|  |       entry_msg = message_type._concrete_class(key=key, value=value[key]) | ||||||
|  |       encode_message(write, entry_msg, deterministic) | ||||||
|  | 
 | ||||||
|  |   return EncodeField | ||||||
							
								
								
									
										124
									
								
								lib/protobuf/internal/enum_type_wrapper.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										124
									
								
								lib/protobuf/internal/enum_type_wrapper.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,124 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """A simple wrapper around enum types to expose utility functions. | ||||||
|  | 
 | ||||||
|  | Instances are created as properties with the same name as the enum they wrap | ||||||
|  | on proto classes.  For usage, see: | ||||||
|  |   reflection_test.py | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'rabsatt@google.com (Kevin Rabsatt)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class EnumTypeWrapper(object): | ||||||
|  |   """A utility for finding the names of enum values.""" | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR = None | ||||||
|  | 
 | ||||||
|  |   # This is a type alias, which mypy typing stubs can type as | ||||||
|  |   # a genericized parameter constrained to an int, allowing subclasses | ||||||
|  |   # to be typed with more constraint in .pyi stubs | ||||||
|  |   # Eg. | ||||||
|  |   # def MyGeneratedEnum(Message): | ||||||
|  |   #   ValueType = NewType('ValueType', int) | ||||||
|  |   #   def Name(self, number: MyGeneratedEnum.ValueType) -> str | ||||||
|  |   ValueType = int | ||||||
|  | 
 | ||||||
|  |   def __init__(self, enum_type): | ||||||
|  |     """Inits EnumTypeWrapper with an EnumDescriptor.""" | ||||||
|  |     self._enum_type = enum_type | ||||||
|  |     self.DESCRIPTOR = enum_type  # pylint: disable=invalid-name | ||||||
|  | 
 | ||||||
|  |   def Name(self, number):  # pylint: disable=invalid-name | ||||||
|  |     """Returns a string containing the name of an enum value.""" | ||||||
|  |     try: | ||||||
|  |       return self._enum_type.values_by_number[number].name | ||||||
|  |     except KeyError: | ||||||
|  |       pass  # fall out to break exception chaining | ||||||
|  | 
 | ||||||
|  |     if not isinstance(number, int): | ||||||
|  |       raise TypeError( | ||||||
|  |           'Enum value for {} must be an int, but got {} {!r}.'.format( | ||||||
|  |               self._enum_type.name, type(number), number)) | ||||||
|  |     else: | ||||||
|  |       # repr here to handle the odd case when you pass in a boolean. | ||||||
|  |       raise ValueError('Enum {} has no name defined for value {!r}'.format( | ||||||
|  |           self._enum_type.name, number)) | ||||||
|  | 
 | ||||||
|  |   def Value(self, name):  # pylint: disable=invalid-name | ||||||
|  |     """Returns the value corresponding to the given enum name.""" | ||||||
|  |     try: | ||||||
|  |       return self._enum_type.values_by_name[name].number | ||||||
|  |     except KeyError: | ||||||
|  |       pass  # fall out to break exception chaining | ||||||
|  |     raise ValueError('Enum {} has no value defined for name {!r}'.format( | ||||||
|  |         self._enum_type.name, name)) | ||||||
|  | 
 | ||||||
|  |   def keys(self): | ||||||
|  |     """Return a list of the string names in the enum. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A list of strs, in the order they were defined in the .proto file. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     return [value_descriptor.name | ||||||
|  |             for value_descriptor in self._enum_type.values] | ||||||
|  | 
 | ||||||
|  |   def values(self): | ||||||
|  |     """Return a list of the integer values in the enum. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A list of ints, in the order they were defined in the .proto file. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     return [value_descriptor.number | ||||||
|  |             for value_descriptor in self._enum_type.values] | ||||||
|  | 
 | ||||||
|  |   def items(self): | ||||||
|  |     """Return a list of the (name, value) pairs of the enum. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A list of (str, int) pairs, in the order they were defined | ||||||
|  |       in the .proto file. | ||||||
|  |     """ | ||||||
|  |     return [(value_descriptor.name, value_descriptor.number) | ||||||
|  |             for value_descriptor in self._enum_type.values] | ||||||
|  | 
 | ||||||
|  |   def __getattr__(self, name): | ||||||
|  |     """Returns the value corresponding to the given enum name.""" | ||||||
|  |     try: | ||||||
|  |       return super( | ||||||
|  |           EnumTypeWrapper, | ||||||
|  |           self).__getattribute__('_enum_type').values_by_name[name].number | ||||||
|  |     except KeyError: | ||||||
|  |       pass  # fall out to break exception chaining | ||||||
|  |     raise AttributeError('Enum {} has no value defined for name {!r}'.format( | ||||||
|  |         self._enum_type.name, name)) | ||||||
							
								
								
									
										213
									
								
								lib/protobuf/internal/extension_dict.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										213
									
								
								lib/protobuf/internal/extension_dict.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,213 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains _ExtensionDict class to represent extensions. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import type_checkers | ||||||
|  | from google.protobuf.descriptor import FieldDescriptor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _VerifyExtensionHandle(message, extension_handle): | ||||||
|  |   """Verify that the given extension handle is valid.""" | ||||||
|  | 
 | ||||||
|  |   if not isinstance(extension_handle, FieldDescriptor): | ||||||
|  |     raise KeyError('HasExtension() expects an extension handle, got: %s' % | ||||||
|  |                    extension_handle) | ||||||
|  | 
 | ||||||
|  |   if not extension_handle.is_extension: | ||||||
|  |     raise KeyError('"%s" is not an extension.' % extension_handle.full_name) | ||||||
|  | 
 | ||||||
|  |   if not extension_handle.containing_type: | ||||||
|  |     raise KeyError('"%s" is missing a containing_type.' | ||||||
|  |                    % extension_handle.full_name) | ||||||
|  | 
 | ||||||
|  |   if extension_handle.containing_type is not message.DESCRIPTOR: | ||||||
|  |     raise KeyError('Extension "%s" extends message type "%s", but this ' | ||||||
|  |                    'message is of type "%s".' % | ||||||
|  |                    (extension_handle.full_name, | ||||||
|  |                     extension_handle.containing_type.full_name, | ||||||
|  |                     message.DESCRIPTOR.full_name)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO(robinson): Unify error handling of "unknown extension" crap. | ||||||
|  | # TODO(robinson): Support iteritems()-style iteration over all | ||||||
|  | # extensions with the "has" bits turned on? | ||||||
|  | class _ExtensionDict(object): | ||||||
|  | 
 | ||||||
|  |   """Dict-like container for Extension fields on proto instances. | ||||||
|  | 
 | ||||||
|  |   Note that in all cases we expect extension handles to be | ||||||
|  |   FieldDescriptors. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def __init__(self, extended_message): | ||||||
|  |     """ | ||||||
|  |     Args: | ||||||
|  |       extended_message: Message instance for which we are the Extensions dict. | ||||||
|  |     """ | ||||||
|  |     self._extended_message = extended_message | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, extension_handle): | ||||||
|  |     """Returns the current value of the given extension handle.""" | ||||||
|  | 
 | ||||||
|  |     _VerifyExtensionHandle(self._extended_message, extension_handle) | ||||||
|  | 
 | ||||||
|  |     result = self._extended_message._fields.get(extension_handle) | ||||||
|  |     if result is not None: | ||||||
|  |       return result | ||||||
|  | 
 | ||||||
|  |     if extension_handle.label == FieldDescriptor.LABEL_REPEATED: | ||||||
|  |       result = extension_handle._default_constructor(self._extended_message) | ||||||
|  |     elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |       message_type = extension_handle.message_type | ||||||
|  |       if not hasattr(message_type, '_concrete_class'): | ||||||
|  |         # pylint: disable=protected-access | ||||||
|  |         self._extended_message._FACTORY.GetPrototype(message_type) | ||||||
|  |       assert getattr(extension_handle.message_type, '_concrete_class', None), ( | ||||||
|  |           'Uninitialized concrete class found for field %r (message type %r)' | ||||||
|  |           % (extension_handle.full_name, | ||||||
|  |              extension_handle.message_type.full_name)) | ||||||
|  |       result = extension_handle.message_type._concrete_class() | ||||||
|  |       try: | ||||||
|  |         result._SetListener(self._extended_message._listener_for_children) | ||||||
|  |       except ReferenceError: | ||||||
|  |         pass | ||||||
|  |     else: | ||||||
|  |       # Singular scalar -- just return the default without inserting into the | ||||||
|  |       # dict. | ||||||
|  |       return extension_handle.default_value | ||||||
|  | 
 | ||||||
|  |     # Atomically check if another thread has preempted us and, if not, swap | ||||||
|  |     # in the new object we just created.  If someone has preempted us, we | ||||||
|  |     # take that object and discard ours. | ||||||
|  |     # WARNING:  We are relying on setdefault() being atomic.  This is true | ||||||
|  |     #   in CPython but we haven't investigated others.  This warning appears | ||||||
|  |     #   in several other locations in this file. | ||||||
|  |     result = self._extended_message._fields.setdefault( | ||||||
|  |         extension_handle, result) | ||||||
|  | 
 | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other): | ||||||
|  |     if not isinstance(other, self.__class__): | ||||||
|  |       return False | ||||||
|  | 
 | ||||||
|  |     my_fields = self._extended_message.ListFields() | ||||||
|  |     other_fields = other._extended_message.ListFields() | ||||||
|  | 
 | ||||||
|  |     # Get rid of non-extension fields. | ||||||
|  |     my_fields = [field for field in my_fields if field.is_extension] | ||||||
|  |     other_fields = [field for field in other_fields if field.is_extension] | ||||||
|  | 
 | ||||||
|  |     return my_fields == other_fields | ||||||
|  | 
 | ||||||
|  |   def __ne__(self, other): | ||||||
|  |     return not self == other | ||||||
|  | 
 | ||||||
|  |   def __len__(self): | ||||||
|  |     fields = self._extended_message.ListFields() | ||||||
|  |     # Get rid of non-extension fields. | ||||||
|  |     extension_fields = [field for field in fields if field[0].is_extension] | ||||||
|  |     return len(extension_fields) | ||||||
|  | 
 | ||||||
|  |   def __hash__(self): | ||||||
|  |     raise TypeError('unhashable object') | ||||||
|  | 
 | ||||||
|  |   # Note that this is only meaningful for non-repeated, scalar extension | ||||||
|  |   # fields.  Note also that we may have to call _Modified() when we do | ||||||
|  |   # successfully set a field this way, to set any necessary "has" bits in the | ||||||
|  |   # ancestors of the extended message. | ||||||
|  |   def __setitem__(self, extension_handle, value): | ||||||
|  |     """If extension_handle specifies a non-repeated, scalar extension | ||||||
|  |     field, sets the value of that field. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     _VerifyExtensionHandle(self._extended_message, extension_handle) | ||||||
|  | 
 | ||||||
|  |     if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or | ||||||
|  |         extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): | ||||||
|  |       raise TypeError( | ||||||
|  |           'Cannot assign to extension "%s" because it is a repeated or ' | ||||||
|  |           'composite type.' % extension_handle.full_name) | ||||||
|  | 
 | ||||||
|  |     # It's slightly wasteful to lookup the type checker each time, | ||||||
|  |     # but we expect this to be a vanishingly uncommon case anyway. | ||||||
|  |     type_checker = type_checkers.GetTypeChecker(extension_handle) | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     self._extended_message._fields[extension_handle] = ( | ||||||
|  |         type_checker.CheckValue(value)) | ||||||
|  |     self._extended_message._Modified() | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, extension_handle): | ||||||
|  |     self._extended_message.ClearExtension(extension_handle) | ||||||
|  | 
 | ||||||
|  |   def _FindExtensionByName(self, name): | ||||||
|  |     """Tries to find a known extension with the specified name. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       name: Extension full name. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       Extension field descriptor. | ||||||
|  |     """ | ||||||
|  |     return self._extended_message._extensions_by_name.get(name, None) | ||||||
|  | 
 | ||||||
|  |   def _FindExtensionByNumber(self, number): | ||||||
|  |     """Tries to find a known extension with the field number. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       number: Extension field number. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       Extension field descriptor. | ||||||
|  |     """ | ||||||
|  |     return self._extended_message._extensions_by_number.get(number, None) | ||||||
|  | 
 | ||||||
|  |   def __iter__(self): | ||||||
|  |     # Return a generator over the populated extension fields | ||||||
|  |     return (f[0] for f in self._extended_message.ListFields() | ||||||
|  |             if f[0].is_extension) | ||||||
|  | 
 | ||||||
|  |   def __contains__(self, extension_handle): | ||||||
|  |     _VerifyExtensionHandle(self._extended_message, extension_handle) | ||||||
|  | 
 | ||||||
|  |     if extension_handle not in self._extended_message._fields: | ||||||
|  |       return False | ||||||
|  | 
 | ||||||
|  |     if extension_handle.label == FieldDescriptor.LABEL_REPEATED: | ||||||
|  |       return bool(self._extended_message._fields.get(extension_handle)) | ||||||
|  | 
 | ||||||
|  |     if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |       value = self._extended_message._fields.get(extension_handle) | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       return value is not None and value._is_present_in_parent | ||||||
|  | 
 | ||||||
|  |     return True | ||||||
							
								
								
									
										78
									
								
								lib/protobuf/internal/message_listener.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										78
									
								
								lib/protobuf/internal/message_listener.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,78 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Defines a listener interface for observing certain | ||||||
|  | state transitions on Message objects. | ||||||
|  | 
 | ||||||
|  | Also defines a null implementation of this interface. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MessageListener(object): | ||||||
|  | 
 | ||||||
|  |   """Listens for modifications made to a message.  Meant to be registered via | ||||||
|  |   Message._SetListener(). | ||||||
|  | 
 | ||||||
|  |   Attributes: | ||||||
|  |     dirty:  If True, then calling Modified() would be a no-op.  This can be | ||||||
|  |             used to avoid these calls entirely in the common case. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def Modified(self): | ||||||
|  |     """Called every time the message is modified in such a way that the parent | ||||||
|  |     message may need to be updated.  This currently means either: | ||||||
|  |     (a) The message was modified for the first time, so the parent message | ||||||
|  |         should henceforth mark the message as present. | ||||||
|  |     (b) The message's cached byte size became dirty -- i.e. the message was | ||||||
|  |         modified for the first time after a previous call to ByteSize(). | ||||||
|  |         Therefore the parent should also mark its byte size as dirty. | ||||||
|  |     Note that (a) implies (b), since new objects start out with a client cached | ||||||
|  |     size (zero).  However, we document (a) explicitly because it is important. | ||||||
|  | 
 | ||||||
|  |     Modified() will *only* be called in response to one of these two events -- | ||||||
|  |     not every time the sub-message is modified. | ||||||
|  | 
 | ||||||
|  |     Note that if the listener's |dirty| attribute is true, then calling | ||||||
|  |     Modified at the moment would be a no-op, so it can be skipped.  Performance- | ||||||
|  |     sensitive callers should check this attribute directly before calling since | ||||||
|  |     it will be true most of the time. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class NullMessageListener(object): | ||||||
|  | 
 | ||||||
|  |   """No-op MessageListener implementation.""" | ||||||
|  | 
 | ||||||
|  |   def Modified(self): | ||||||
|  |     pass | ||||||
							
								
								
									
										1539
									
								
								lib/protobuf/internal/python_message.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1539
									
								
								lib/protobuf/internal/python_message.py
									
										
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							
							
								
								
									
										435
									
								
								lib/protobuf/internal/type_checkers.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										435
									
								
								lib/protobuf/internal/type_checkers.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,435 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Provides type checking routines. | ||||||
|  | 
 | ||||||
|  | This module defines type checking utilities in the forms of dictionaries: | ||||||
|  | 
 | ||||||
|  | VALUE_CHECKERS: A dictionary of field types and a value validation object. | ||||||
|  | TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing | ||||||
|  |   function. | ||||||
|  | TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization | ||||||
|  |   function. | ||||||
|  | FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their | ||||||
|  |   corresponding wire types. | ||||||
|  | TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization | ||||||
|  |   function. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | import ctypes | ||||||
|  | import numbers | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import decoder | ||||||
|  | from google.protobuf.internal import encoder | ||||||
|  | from google.protobuf.internal import wire_format | ||||||
|  | from google.protobuf import descriptor | ||||||
|  | 
 | ||||||
|  | _FieldDescriptor = descriptor.FieldDescriptor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def TruncateToFourByteFloat(original): | ||||||
|  |   return ctypes.c_float(original).value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ToShortestFloat(original): | ||||||
|  |   """Returns the shortest float that has same value in wire.""" | ||||||
|  |   # All 4 byte floats have between 6 and 9 significant digits, so we | ||||||
|  |   # start with 6 as the lower bound. | ||||||
|  |   # It has to be iterative because use '.9g' directly can not get rid | ||||||
|  |   # of the noises for most values. For example if set a float_field=0.9 | ||||||
|  |   # use '.9g' will print 0.899999976. | ||||||
|  |   precision = 6 | ||||||
|  |   rounded = float('{0:.{1}g}'.format(original, precision)) | ||||||
|  |   while TruncateToFourByteFloat(rounded) != original: | ||||||
|  |     precision += 1 | ||||||
|  |     rounded = float('{0:.{1}g}'.format(original, precision)) | ||||||
|  |   return rounded | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SupportsOpenEnums(field_descriptor): | ||||||
|  |   return field_descriptor.containing_type.syntax == 'proto3' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GetTypeChecker(field): | ||||||
|  |   """Returns a type checker for a message field of the specified types. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     field: FieldDescriptor object for this field. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     An instance of TypeChecker which can be used to verify the types | ||||||
|  |     of values assigned to a field of the specified type. | ||||||
|  |   """ | ||||||
|  |   if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and | ||||||
|  |       field.type == _FieldDescriptor.TYPE_STRING): | ||||||
|  |     return UnicodeValueChecker() | ||||||
|  |   if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: | ||||||
|  |     if SupportsOpenEnums(field): | ||||||
|  |       # When open enums are supported, any int32 can be assigned. | ||||||
|  |       return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] | ||||||
|  |     else: | ||||||
|  |       return EnumValueChecker(field.enum_type) | ||||||
|  |   return _VALUE_CHECKERS[field.cpp_type] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # None of the typecheckers below make any attempt to guard against people | ||||||
|  | # subclassing builtin types and doing weird things.  We're not trying to | ||||||
|  | # protect against malicious clients here, just people accidentally shooting | ||||||
|  | # themselves in the foot in obvious ways. | ||||||
|  | class TypeChecker(object): | ||||||
|  | 
 | ||||||
|  |   """Type checker used to catch type errors as early as possible | ||||||
|  |   when the client is setting scalar fields in protocol messages. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def __init__(self, *acceptable_types): | ||||||
|  |     self._acceptable_types = acceptable_types | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     """Type check the provided value and return it. | ||||||
|  | 
 | ||||||
|  |     The returned value might have been normalized to another type. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(proposed_value, self._acceptable_types): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), self._acceptable_types)) | ||||||
|  |       raise TypeError(message) | ||||||
|  |     return proposed_value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class TypeCheckerWithDefault(TypeChecker): | ||||||
|  | 
 | ||||||
|  |   def __init__(self, default_value, *acceptable_types): | ||||||
|  |     TypeChecker.__init__(self, *acceptable_types) | ||||||
|  |     self._default_value = default_value | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return self._default_value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class BoolValueChecker(object): | ||||||
|  |   """Type checker used for bool fields.""" | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     if not hasattr(proposed_value, '__index__') or ( | ||||||
|  |         type(proposed_value).__module__ == 'numpy' and | ||||||
|  |         type(proposed_value).__name__ == 'ndarray'): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), (bool, int))) | ||||||
|  |       raise TypeError(message) | ||||||
|  |     return bool(proposed_value) | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # IntValueChecker and its subclasses perform integer type-checks | ||||||
|  | # and bounds-checks. | ||||||
|  | class IntValueChecker(object): | ||||||
|  | 
 | ||||||
|  |   """Checker used for integer fields.  Performs type-check and range check.""" | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     if not hasattr(proposed_value, '__index__') or ( | ||||||
|  |         type(proposed_value).__module__ == 'numpy' and | ||||||
|  |         type(proposed_value).__name__ == 'ndarray'): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), (int,))) | ||||||
|  |       raise TypeError(message) | ||||||
|  | 
 | ||||||
|  |     if not self._MIN <= int(proposed_value) <= self._MAX: | ||||||
|  |       raise ValueError('Value out of range: %d' % proposed_value) | ||||||
|  |     # We force all values to int to make alternate implementations where the | ||||||
|  |     # distinction is more significant (e.g. the C++ implementation) simpler. | ||||||
|  |     proposed_value = int(proposed_value) | ||||||
|  |     return proposed_value | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return 0 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class EnumValueChecker(object): | ||||||
|  | 
 | ||||||
|  |   """Checker used for enum fields.  Performs type-check and range check.""" | ||||||
|  | 
 | ||||||
|  |   def __init__(self, enum_type): | ||||||
|  |     self._enum_type = enum_type | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     if not isinstance(proposed_value, numbers.Integral): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), (int,))) | ||||||
|  |       raise TypeError(message) | ||||||
|  |     if int(proposed_value) not in self._enum_type.values_by_number: | ||||||
|  |       raise ValueError('Unknown enum value: %d' % proposed_value) | ||||||
|  |     return proposed_value | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return self._enum_type.values[0].number | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class UnicodeValueChecker(object): | ||||||
|  | 
 | ||||||
|  |   """Checker used for string fields. | ||||||
|  | 
 | ||||||
|  |   Always returns a unicode value, even if the input is of type str. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     if not isinstance(proposed_value, (bytes, str)): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), (bytes, str))) | ||||||
|  |       raise TypeError(message) | ||||||
|  | 
 | ||||||
|  |     # If the value is of type 'bytes' make sure that it is valid UTF-8 data. | ||||||
|  |     if isinstance(proposed_value, bytes): | ||||||
|  |       try: | ||||||
|  |         proposed_value = proposed_value.decode('utf-8') | ||||||
|  |       except UnicodeDecodeError: | ||||||
|  |         raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' | ||||||
|  |                          'encoding. Non-UTF-8 strings must be converted to ' | ||||||
|  |                          'unicode objects before being added.' % | ||||||
|  |                          (proposed_value)) | ||||||
|  |     else: | ||||||
|  |       try: | ||||||
|  |         proposed_value.encode('utf8') | ||||||
|  |       except UnicodeEncodeError: | ||||||
|  |         raise ValueError('%.1024r isn\'t a valid unicode string and ' | ||||||
|  |                          'can\'t be encoded in UTF-8.'% | ||||||
|  |                          (proposed_value)) | ||||||
|  | 
 | ||||||
|  |     return proposed_value | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return u"" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Int32ValueChecker(IntValueChecker): | ||||||
|  |   # We're sure to use ints instead of longs here since comparison may be more | ||||||
|  |   # efficient. | ||||||
|  |   _MIN = -2147483648 | ||||||
|  |   _MAX = 2147483647 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Uint32ValueChecker(IntValueChecker): | ||||||
|  |   _MIN = 0 | ||||||
|  |   _MAX = (1 << 32) - 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Int64ValueChecker(IntValueChecker): | ||||||
|  |   _MIN = -(1 << 63) | ||||||
|  |   _MAX = (1 << 63) - 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Uint64ValueChecker(IntValueChecker): | ||||||
|  |   _MIN = 0 | ||||||
|  |   _MAX = (1 << 64) - 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # The max 4 bytes float is about 3.4028234663852886e+38 | ||||||
|  | _FLOAT_MAX = float.fromhex('0x1.fffffep+127') | ||||||
|  | _FLOAT_MIN = -_FLOAT_MAX | ||||||
|  | _INF = float('inf') | ||||||
|  | _NEG_INF = float('-inf') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DoubleValueChecker(object): | ||||||
|  |   """Checker used for double fields. | ||||||
|  | 
 | ||||||
|  |   Performs type-check and range check. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     """Check and convert proposed_value to float.""" | ||||||
|  |     if (not hasattr(proposed_value, '__float__') and | ||||||
|  |         not hasattr(proposed_value, '__index__')) or ( | ||||||
|  |             type(proposed_value).__module__ == 'numpy' and | ||||||
|  |             type(proposed_value).__name__ == 'ndarray'): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: int, float' % | ||||||
|  |                  (proposed_value, type(proposed_value))) | ||||||
|  |       raise TypeError(message) | ||||||
|  |     return float(proposed_value) | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return 0.0 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class FloatValueChecker(DoubleValueChecker): | ||||||
|  |   """Checker used for float fields. | ||||||
|  | 
 | ||||||
|  |   Performs type-check and range check. | ||||||
|  | 
 | ||||||
|  |   Values exceeding a 32-bit float will be converted to inf/-inf. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     """Check and convert proposed_value to float.""" | ||||||
|  |     converted_value = super().CheckValue(proposed_value) | ||||||
|  |     # This inf rounding matches the C++ proto SafeDoubleToFloat logic. | ||||||
|  |     if converted_value > _FLOAT_MAX: | ||||||
|  |       return _INF | ||||||
|  |     if converted_value < _FLOAT_MIN: | ||||||
|  |       return _NEG_INF | ||||||
|  | 
 | ||||||
|  |     return TruncateToFourByteFloat(converted_value) | ||||||
|  | 
 | ||||||
|  | # Type-checkers for all scalar CPPTYPEs. | ||||||
|  | _VALUE_CHECKERS = { | ||||||
|  |     _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Map from field type to a function F, such that F(field_num, value) | ||||||
|  | # gives the total byte size for a value of the given type.  This | ||||||
|  | # byte size includes tag information and any other additional space | ||||||
|  | # associated with serializing "value". | ||||||
|  | TYPE_TO_BYTE_SIZE_FN = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Maps from field types to encoder constructors. | ||||||
|  | TYPE_TO_ENCODER = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Maps from field types to sizer constructors. | ||||||
|  | TYPE_TO_SIZER = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: encoder.StringSizer, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Maps from field type to a decoder constructor. | ||||||
|  | TYPE_TO_DECODER = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  | # Maps from field type to expected wiretype. | ||||||
|  | FIELD_TYPE_TO_WIRE_TYPE = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: | ||||||
|  |       wire_format.WIRETYPE_LENGTH_DELIMITED, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: | ||||||
|  |       wire_format.WIRETYPE_LENGTH_DELIMITED, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: | ||||||
|  |       wire_format.WIRETYPE_LENGTH_DELIMITED, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, | ||||||
|  |     } | ||||||
							
								
								
									
										878
									
								
								lib/protobuf/internal/well_known_types.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										878
									
								
								lib/protobuf/internal/well_known_types.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,878 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains well known classes. | ||||||
|  | 
 | ||||||
|  | This files defines well known classes which need extra maintenance including: | ||||||
|  |   - Any | ||||||
|  |   - Duration | ||||||
|  |   - FieldMask | ||||||
|  |   - Struct | ||||||
|  |   - Timestamp | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'jieluo@google.com (Jie Luo)' | ||||||
|  | 
 | ||||||
|  | import calendar | ||||||
|  | import collections.abc | ||||||
|  | import datetime | ||||||
|  | 
 | ||||||
|  | from google.protobuf.descriptor import FieldDescriptor | ||||||
|  | 
 | ||||||
|  | _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' | ||||||
|  | _NANOS_PER_SECOND = 1000000000 | ||||||
|  | _NANOS_PER_MILLISECOND = 1000000 | ||||||
|  | _NANOS_PER_MICROSECOND = 1000 | ||||||
|  | _MILLIS_PER_SECOND = 1000 | ||||||
|  | _MICROS_PER_SECOND = 1000000 | ||||||
|  | _SECONDS_PER_DAY = 24 * 3600 | ||||||
|  | _DURATION_SECONDS_MAX = 315576000000 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Any(object): | ||||||
|  |   """Class for Any Message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def Pack(self, msg, type_url_prefix='type.googleapis.com/', | ||||||
|  |            deterministic=None): | ||||||
|  |     """Packs the specified message into current Any message.""" | ||||||
|  |     if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': | ||||||
|  |       self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) | ||||||
|  |     else: | ||||||
|  |       self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) | ||||||
|  |     self.value = msg.SerializeToString(deterministic=deterministic) | ||||||
|  | 
 | ||||||
|  |   def Unpack(self, msg): | ||||||
|  |     """Unpacks the current Any message into specified message.""" | ||||||
|  |     descriptor = msg.DESCRIPTOR | ||||||
|  |     if not self.Is(descriptor): | ||||||
|  |       return False | ||||||
|  |     msg.ParseFromString(self.value) | ||||||
|  |     return True | ||||||
|  | 
 | ||||||
|  |   def TypeName(self): | ||||||
|  |     """Returns the protobuf type name of the inner message.""" | ||||||
|  |     # Only last part is to be used: b/25630112 | ||||||
|  |     return self.type_url.split('/')[-1] | ||||||
|  | 
 | ||||||
|  |   def Is(self, descriptor): | ||||||
|  |     """Checks if this Any represents the given protobuf type.""" | ||||||
|  |     return '/' in self.type_url and self.TypeName() == descriptor.full_name | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) | ||||||
|  | _EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( | ||||||
|  |     0, tz=datetime.timezone.utc) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Timestamp(object): | ||||||
|  |   """Class for Timestamp message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def ToJsonString(self): | ||||||
|  |     """Converts Timestamp to RFC 3339 date string format. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A string converted from timestamp. The string is always Z-normalized | ||||||
|  |       and uses 3, 6 or 9 fractional digits as required to represent the | ||||||
|  |       exact time. Example of the return format: '1972-01-01T10:00:20.021Z' | ||||||
|  |     """ | ||||||
|  |     nanos = self.nanos % _NANOS_PER_SECOND | ||||||
|  |     total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND | ||||||
|  |     seconds = total_sec % _SECONDS_PER_DAY | ||||||
|  |     days = (total_sec - seconds) // _SECONDS_PER_DAY | ||||||
|  |     dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) | ||||||
|  | 
 | ||||||
|  |     result = dt.isoformat() | ||||||
|  |     if (nanos % 1e9) == 0: | ||||||
|  |       # If there are 0 fractional digits, the fractional | ||||||
|  |       # point '.' should be omitted when serializing. | ||||||
|  |       return result + 'Z' | ||||||
|  |     if (nanos % 1e6) == 0: | ||||||
|  |       # Serialize 3 fractional digits. | ||||||
|  |       return result + '.%03dZ' % (nanos / 1e6) | ||||||
|  |     if (nanos % 1e3) == 0: | ||||||
|  |       # Serialize 6 fractional digits. | ||||||
|  |       return result + '.%06dZ' % (nanos / 1e3) | ||||||
|  |     # Serialize 9 fractional digits. | ||||||
|  |     return result + '.%09dZ' % nanos | ||||||
|  | 
 | ||||||
|  |   def FromJsonString(self, value): | ||||||
|  |     """Parse a RFC 3339 date string format to Timestamp. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       value: A date string. Any fractional digits (or none) and any offset are | ||||||
|  |           accepted as long as they fit into nano-seconds precision. | ||||||
|  |           Example of accepted format: '1972-01-01T10:00:20.021-05:00' | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: On parsing problems. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(value, str): | ||||||
|  |       raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) | ||||||
|  |     timezone_offset = value.find('Z') | ||||||
|  |     if timezone_offset == -1: | ||||||
|  |       timezone_offset = value.find('+') | ||||||
|  |     if timezone_offset == -1: | ||||||
|  |       timezone_offset = value.rfind('-') | ||||||
|  |     if timezone_offset == -1: | ||||||
|  |       raise ValueError( | ||||||
|  |           'Failed to parse timestamp: missing valid timezone offset.') | ||||||
|  |     time_value = value[0:timezone_offset] | ||||||
|  |     # Parse datetime and nanos. | ||||||
|  |     point_position = time_value.find('.') | ||||||
|  |     if point_position == -1: | ||||||
|  |       second_value = time_value | ||||||
|  |       nano_value = '' | ||||||
|  |     else: | ||||||
|  |       second_value = time_value[:point_position] | ||||||
|  |       nano_value = time_value[point_position + 1:] | ||||||
|  |     if 't' in second_value: | ||||||
|  |       raise ValueError( | ||||||
|  |           'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' | ||||||
|  |           'lowercase \'t\' is not accepted'.format(second_value)) | ||||||
|  |     date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) | ||||||
|  |     td = date_object - datetime.datetime(1970, 1, 1) | ||||||
|  |     seconds = td.seconds + td.days * _SECONDS_PER_DAY | ||||||
|  |     if len(nano_value) > 9: | ||||||
|  |       raise ValueError( | ||||||
|  |           'Failed to parse Timestamp: nanos {0} more than ' | ||||||
|  |           '9 fractional digits.'.format(nano_value)) | ||||||
|  |     if nano_value: | ||||||
|  |       nanos = round(float('0.' + nano_value) * 1e9) | ||||||
|  |     else: | ||||||
|  |       nanos = 0 | ||||||
|  |     # Parse timezone offsets. | ||||||
|  |     if value[timezone_offset] == 'Z': | ||||||
|  |       if len(value) != timezone_offset + 1: | ||||||
|  |         raise ValueError('Failed to parse timestamp: invalid trailing' | ||||||
|  |                          ' data {0}.'.format(value)) | ||||||
|  |     else: | ||||||
|  |       timezone = value[timezone_offset:] | ||||||
|  |       pos = timezone.find(':') | ||||||
|  |       if pos == -1: | ||||||
|  |         raise ValueError( | ||||||
|  |             'Invalid timezone offset value: {0}.'.format(timezone)) | ||||||
|  |       if timezone[0] == '+': | ||||||
|  |         seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 | ||||||
|  |       else: | ||||||
|  |         seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 | ||||||
|  |     # Set seconds and nanos | ||||||
|  |     self.seconds = int(seconds) | ||||||
|  |     self.nanos = int(nanos) | ||||||
|  | 
 | ||||||
|  |   def GetCurrentTime(self): | ||||||
|  |     """Get the current UTC into Timestamp.""" | ||||||
|  |     self.FromDatetime(datetime.datetime.utcnow()) | ||||||
|  | 
 | ||||||
|  |   def ToNanoseconds(self): | ||||||
|  |     """Converts Timestamp to nanoseconds since epoch.""" | ||||||
|  |     return self.seconds * _NANOS_PER_SECOND + self.nanos | ||||||
|  | 
 | ||||||
|  |   def ToMicroseconds(self): | ||||||
|  |     """Converts Timestamp to microseconds since epoch.""" | ||||||
|  |     return (self.seconds * _MICROS_PER_SECOND + | ||||||
|  |             self.nanos // _NANOS_PER_MICROSECOND) | ||||||
|  | 
 | ||||||
|  |   def ToMilliseconds(self): | ||||||
|  |     """Converts Timestamp to milliseconds since epoch.""" | ||||||
|  |     return (self.seconds * _MILLIS_PER_SECOND + | ||||||
|  |             self.nanos // _NANOS_PER_MILLISECOND) | ||||||
|  | 
 | ||||||
|  |   def ToSeconds(self): | ||||||
|  |     """Converts Timestamp to seconds since epoch.""" | ||||||
|  |     return self.seconds | ||||||
|  | 
 | ||||||
|  |   def FromNanoseconds(self, nanos): | ||||||
|  |     """Converts nanoseconds since epoch to Timestamp.""" | ||||||
|  |     self.seconds = nanos // _NANOS_PER_SECOND | ||||||
|  |     self.nanos = nanos % _NANOS_PER_SECOND | ||||||
|  | 
 | ||||||
|  |   def FromMicroseconds(self, micros): | ||||||
|  |     """Converts microseconds since epoch to Timestamp.""" | ||||||
|  |     self.seconds = micros // _MICROS_PER_SECOND | ||||||
|  |     self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND | ||||||
|  | 
 | ||||||
|  |   def FromMilliseconds(self, millis): | ||||||
|  |     """Converts milliseconds since epoch to Timestamp.""" | ||||||
|  |     self.seconds = millis // _MILLIS_PER_SECOND | ||||||
|  |     self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND | ||||||
|  | 
 | ||||||
|  |   def FromSeconds(self, seconds): | ||||||
|  |     """Converts seconds since epoch to Timestamp.""" | ||||||
|  |     self.seconds = seconds | ||||||
|  |     self.nanos = 0 | ||||||
|  | 
 | ||||||
|  |   def ToDatetime(self, tzinfo=None): | ||||||
|  |     """Converts Timestamp to a datetime. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       tzinfo: A datetime.tzinfo subclass; defaults to None. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone | ||||||
|  |       information, i.e. not aware that it's UTC). | ||||||
|  | 
 | ||||||
|  |       Otherwise, returns a timezone-aware datetime in the input timezone. | ||||||
|  |     """ | ||||||
|  |     delta = datetime.timedelta( | ||||||
|  |         seconds=self.seconds, | ||||||
|  |         microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) | ||||||
|  |     if tzinfo is None: | ||||||
|  |       return _EPOCH_DATETIME_NAIVE + delta | ||||||
|  |     else: | ||||||
|  |       return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta | ||||||
|  | 
 | ||||||
|  |   def FromDatetime(self, dt): | ||||||
|  |     """Converts datetime to Timestamp. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. | ||||||
|  |     """ | ||||||
|  |     # Using this guide: http://wiki.python.org/moin/WorkingWithTime | ||||||
|  |     # And this conversion guide: http://docs.python.org/library/time.html | ||||||
|  | 
 | ||||||
|  |     # Turn the date parameter into a tuple (struct_time) that can then be | ||||||
|  |     # manipulated into a long value of seconds.  During the conversion from | ||||||
|  |     # struct_time to long, the source date in UTC, and so it follows that the | ||||||
|  |     # correct transformation is calendar.timegm() | ||||||
|  |     self.seconds = calendar.timegm(dt.utctimetuple()) | ||||||
|  |     self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Duration(object): | ||||||
|  |   """Class for Duration message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def ToJsonString(self): | ||||||
|  |     """Converts Duration to string format. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A string converted from self. The string format will contains | ||||||
|  |       3, 6, or 9 fractional digits depending on the precision required to | ||||||
|  |       represent the exact Duration value. For example: "1s", "1.010s", | ||||||
|  |       "1.000000100s", "-3.100s" | ||||||
|  |     """ | ||||||
|  |     _CheckDurationValid(self.seconds, self.nanos) | ||||||
|  |     if self.seconds < 0 or self.nanos < 0: | ||||||
|  |       result = '-' | ||||||
|  |       seconds = - self.seconds + int((0 - self.nanos) // 1e9) | ||||||
|  |       nanos = (0 - self.nanos) % 1e9 | ||||||
|  |     else: | ||||||
|  |       result = '' | ||||||
|  |       seconds = self.seconds + int(self.nanos // 1e9) | ||||||
|  |       nanos = self.nanos % 1e9 | ||||||
|  |     result += '%d' % seconds | ||||||
|  |     if (nanos % 1e9) == 0: | ||||||
|  |       # If there are 0 fractional digits, the fractional | ||||||
|  |       # point '.' should be omitted when serializing. | ||||||
|  |       return result + 's' | ||||||
|  |     if (nanos % 1e6) == 0: | ||||||
|  |       # Serialize 3 fractional digits. | ||||||
|  |       return result + '.%03ds' % (nanos / 1e6) | ||||||
|  |     if (nanos % 1e3) == 0: | ||||||
|  |       # Serialize 6 fractional digits. | ||||||
|  |       return result + '.%06ds' % (nanos / 1e3) | ||||||
|  |     # Serialize 9 fractional digits. | ||||||
|  |     return result + '.%09ds' % nanos | ||||||
|  | 
 | ||||||
|  |   def FromJsonString(self, value): | ||||||
|  |     """Converts a string to Duration. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       value: A string to be converted. The string must end with 's'. Any | ||||||
|  |           fractional digits (or none) are accepted as long as they fit into | ||||||
|  |           precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: On parsing problems. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(value, str): | ||||||
|  |       raise ValueError('Duration JSON value not a string: {!r}'.format(value)) | ||||||
|  |     if len(value) < 1 or value[-1] != 's': | ||||||
|  |       raise ValueError( | ||||||
|  |           'Duration must end with letter "s": {0}.'.format(value)) | ||||||
|  |     try: | ||||||
|  |       pos = value.find('.') | ||||||
|  |       if pos == -1: | ||||||
|  |         seconds = int(value[:-1]) | ||||||
|  |         nanos = 0 | ||||||
|  |       else: | ||||||
|  |         seconds = int(value[:pos]) | ||||||
|  |         if value[0] == '-': | ||||||
|  |           nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) | ||||||
|  |         else: | ||||||
|  |           nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) | ||||||
|  |       _CheckDurationValid(seconds, nanos) | ||||||
|  |       self.seconds = seconds | ||||||
|  |       self.nanos = nanos | ||||||
|  |     except ValueError as e: | ||||||
|  |       raise ValueError( | ||||||
|  |           'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) | ||||||
|  | 
 | ||||||
|  |   def ToNanoseconds(self): | ||||||
|  |     """Converts a Duration to nanoseconds.""" | ||||||
|  |     return self.seconds * _NANOS_PER_SECOND + self.nanos | ||||||
|  | 
 | ||||||
|  |   def ToMicroseconds(self): | ||||||
|  |     """Converts a Duration to microseconds.""" | ||||||
|  |     micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) | ||||||
|  |     return self.seconds * _MICROS_PER_SECOND + micros | ||||||
|  | 
 | ||||||
|  |   def ToMilliseconds(self): | ||||||
|  |     """Converts a Duration to milliseconds.""" | ||||||
|  |     millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) | ||||||
|  |     return self.seconds * _MILLIS_PER_SECOND + millis | ||||||
|  | 
 | ||||||
|  |   def ToSeconds(self): | ||||||
|  |     """Converts a Duration to seconds.""" | ||||||
|  |     return self.seconds | ||||||
|  | 
 | ||||||
|  |   def FromNanoseconds(self, nanos): | ||||||
|  |     """Converts nanoseconds to Duration.""" | ||||||
|  |     self._NormalizeDuration(nanos // _NANOS_PER_SECOND, | ||||||
|  |                             nanos % _NANOS_PER_SECOND) | ||||||
|  | 
 | ||||||
|  |   def FromMicroseconds(self, micros): | ||||||
|  |     """Converts microseconds to Duration.""" | ||||||
|  |     self._NormalizeDuration( | ||||||
|  |         micros // _MICROS_PER_SECOND, | ||||||
|  |         (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) | ||||||
|  | 
 | ||||||
|  |   def FromMilliseconds(self, millis): | ||||||
|  |     """Converts milliseconds to Duration.""" | ||||||
|  |     self._NormalizeDuration( | ||||||
|  |         millis // _MILLIS_PER_SECOND, | ||||||
|  |         (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) | ||||||
|  | 
 | ||||||
|  |   def FromSeconds(self, seconds): | ||||||
|  |     """Converts seconds to Duration.""" | ||||||
|  |     self.seconds = seconds | ||||||
|  |     self.nanos = 0 | ||||||
|  | 
 | ||||||
|  |   def ToTimedelta(self): | ||||||
|  |     """Converts Duration to timedelta.""" | ||||||
|  |     return datetime.timedelta( | ||||||
|  |         seconds=self.seconds, microseconds=_RoundTowardZero( | ||||||
|  |             self.nanos, _NANOS_PER_MICROSECOND)) | ||||||
|  | 
 | ||||||
|  |   def FromTimedelta(self, td): | ||||||
|  |     """Converts timedelta to Duration.""" | ||||||
|  |     self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, | ||||||
|  |                             td.microseconds * _NANOS_PER_MICROSECOND) | ||||||
|  | 
 | ||||||
|  |   def _NormalizeDuration(self, seconds, nanos): | ||||||
|  |     """Set Duration by seconds and nanos.""" | ||||||
|  |     # Force nanos to be negative if the duration is negative. | ||||||
|  |     if seconds < 0 and nanos > 0: | ||||||
|  |       seconds += 1 | ||||||
|  |       nanos -= _NANOS_PER_SECOND | ||||||
|  |     self.seconds = seconds | ||||||
|  |     self.nanos = nanos | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _CheckDurationValid(seconds, nanos): | ||||||
|  |   if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: | ||||||
|  |     raise ValueError( | ||||||
|  |         'Duration is not valid: Seconds {0} must be in range ' | ||||||
|  |         '[-315576000000, 315576000000].'.format(seconds)) | ||||||
|  |   if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: | ||||||
|  |     raise ValueError( | ||||||
|  |         'Duration is not valid: Nanos {0} must be in range ' | ||||||
|  |         '[-999999999, 999999999].'.format(nanos)) | ||||||
|  |   if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): | ||||||
|  |     raise ValueError( | ||||||
|  |         'Duration is not valid: Sign mismatch.') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _RoundTowardZero(value, divider): | ||||||
|  |   """Truncates the remainder part after division.""" | ||||||
|  |   # For some languages, the sign of the remainder is implementation | ||||||
|  |   # dependent if any of the operands is negative. Here we enforce | ||||||
|  |   # "rounded toward zero" semantics. For example, for (-5) / 2 an | ||||||
|  |   # implementation may give -3 as the result with the remainder being | ||||||
|  |   # 1. This function ensures we always return -2 (closer to zero). | ||||||
|  |   result = value // divider | ||||||
|  |   remainder = value % divider | ||||||
|  |   if result < 0 and remainder > 0: | ||||||
|  |     return result + 1 | ||||||
|  |   else: | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class FieldMask(object): | ||||||
|  |   """Class for FieldMask message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def ToJsonString(self): | ||||||
|  |     """Converts FieldMask to string according to proto3 JSON spec.""" | ||||||
|  |     camelcase_paths = [] | ||||||
|  |     for path in self.paths: | ||||||
|  |       camelcase_paths.append(_SnakeCaseToCamelCase(path)) | ||||||
|  |     return ','.join(camelcase_paths) | ||||||
|  | 
 | ||||||
|  |   def FromJsonString(self, value): | ||||||
|  |     """Converts string to FieldMask according to proto3 JSON spec.""" | ||||||
|  |     if not isinstance(value, str): | ||||||
|  |       raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) | ||||||
|  |     self.Clear() | ||||||
|  |     if value: | ||||||
|  |       for path in value.split(','): | ||||||
|  |         self.paths.append(_CamelCaseToSnakeCase(path)) | ||||||
|  | 
 | ||||||
|  |   def IsValidForDescriptor(self, message_descriptor): | ||||||
|  |     """Checks whether the FieldMask is valid for Message Descriptor.""" | ||||||
|  |     for path in self.paths: | ||||||
|  |       if not _IsValidPath(message_descriptor, path): | ||||||
|  |         return False | ||||||
|  |     return True | ||||||
|  | 
 | ||||||
|  |   def AllFieldsFromDescriptor(self, message_descriptor): | ||||||
|  |     """Gets all direct fields of Message Descriptor to FieldMask.""" | ||||||
|  |     self.Clear() | ||||||
|  |     for field in message_descriptor.fields: | ||||||
|  |       self.paths.append(field.name) | ||||||
|  | 
 | ||||||
|  |   def CanonicalFormFromMask(self, mask): | ||||||
|  |     """Converts a FieldMask to the canonical form. | ||||||
|  | 
 | ||||||
|  |     Removes paths that are covered by another path. For example, | ||||||
|  |     "foo.bar" is covered by "foo" and will be removed if "foo" | ||||||
|  |     is also in the FieldMask. Then sorts all paths in alphabetical order. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       mask: The original FieldMask to be converted. | ||||||
|  |     """ | ||||||
|  |     tree = _FieldMaskTree(mask) | ||||||
|  |     tree.ToFieldMask(self) | ||||||
|  | 
 | ||||||
|  |   def Union(self, mask1, mask2): | ||||||
|  |     """Merges mask1 and mask2 into this FieldMask.""" | ||||||
|  |     _CheckFieldMaskMessage(mask1) | ||||||
|  |     _CheckFieldMaskMessage(mask2) | ||||||
|  |     tree = _FieldMaskTree(mask1) | ||||||
|  |     tree.MergeFromFieldMask(mask2) | ||||||
|  |     tree.ToFieldMask(self) | ||||||
|  | 
 | ||||||
|  |   def Intersect(self, mask1, mask2): | ||||||
|  |     """Intersects mask1 and mask2 into this FieldMask.""" | ||||||
|  |     _CheckFieldMaskMessage(mask1) | ||||||
|  |     _CheckFieldMaskMessage(mask2) | ||||||
|  |     tree = _FieldMaskTree(mask1) | ||||||
|  |     intersection = _FieldMaskTree() | ||||||
|  |     for path in mask2.paths: | ||||||
|  |       tree.IntersectPath(path, intersection) | ||||||
|  |     intersection.ToFieldMask(self) | ||||||
|  | 
 | ||||||
|  |   def MergeMessage( | ||||||
|  |       self, source, destination, | ||||||
|  |       replace_message_field=False, replace_repeated_field=False): | ||||||
|  |     """Merges fields specified in FieldMask from source to destination. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       source: Source message. | ||||||
|  |       destination: The destination message to be merged into. | ||||||
|  |       replace_message_field: Replace message field if True. Merge message | ||||||
|  |           field if False. | ||||||
|  |       replace_repeated_field: Replace repeated field if True. Append | ||||||
|  |           elements of repeated field if False. | ||||||
|  |     """ | ||||||
|  |     tree = _FieldMaskTree(self) | ||||||
|  |     tree.MergeMessage( | ||||||
|  |         source, destination, replace_message_field, replace_repeated_field) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _IsValidPath(message_descriptor, path): | ||||||
|  |   """Checks whether the path is valid for Message Descriptor.""" | ||||||
|  |   parts = path.split('.') | ||||||
|  |   last = parts.pop() | ||||||
|  |   for name in parts: | ||||||
|  |     field = message_descriptor.fields_by_name.get(name) | ||||||
|  |     if (field is None or | ||||||
|  |         field.label == FieldDescriptor.LABEL_REPEATED or | ||||||
|  |         field.type != FieldDescriptor.TYPE_MESSAGE): | ||||||
|  |       return False | ||||||
|  |     message_descriptor = field.message_type | ||||||
|  |   return last in message_descriptor.fields_by_name | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _CheckFieldMaskMessage(message): | ||||||
|  |   """Raises ValueError if message is not a FieldMask.""" | ||||||
|  |   message_descriptor = message.DESCRIPTOR | ||||||
|  |   if (message_descriptor.name != 'FieldMask' or | ||||||
|  |       message_descriptor.file.name != 'google/protobuf/field_mask.proto'): | ||||||
|  |     raise ValueError('Message {0} is not a FieldMask.'.format( | ||||||
|  |         message_descriptor.full_name)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SnakeCaseToCamelCase(path_name): | ||||||
|  |   """Converts a path name from snake_case to camelCase.""" | ||||||
|  |   result = [] | ||||||
|  |   after_underscore = False | ||||||
|  |   for c in path_name: | ||||||
|  |     if c.isupper(): | ||||||
|  |       raise ValueError( | ||||||
|  |           'Fail to print FieldMask to Json string: Path name ' | ||||||
|  |           '{0} must not contain uppercase letters.'.format(path_name)) | ||||||
|  |     if after_underscore: | ||||||
|  |       if c.islower(): | ||||||
|  |         result.append(c.upper()) | ||||||
|  |         after_underscore = False | ||||||
|  |       else: | ||||||
|  |         raise ValueError( | ||||||
|  |             'Fail to print FieldMask to Json string: The ' | ||||||
|  |             'character after a "_" must be a lowercase letter ' | ||||||
|  |             'in path name {0}.'.format(path_name)) | ||||||
|  |     elif c == '_': | ||||||
|  |       after_underscore = True | ||||||
|  |     else: | ||||||
|  |       result += c | ||||||
|  | 
 | ||||||
|  |   if after_underscore: | ||||||
|  |     raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' | ||||||
|  |                      'in path name {0}.'.format(path_name)) | ||||||
|  |   return ''.join(result) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _CamelCaseToSnakeCase(path_name): | ||||||
|  |   """Converts a field name from camelCase to snake_case.""" | ||||||
|  |   result = [] | ||||||
|  |   for c in path_name: | ||||||
|  |     if c == '_': | ||||||
|  |       raise ValueError('Fail to parse FieldMask: Path name ' | ||||||
|  |                        '{0} must not contain "_"s.'.format(path_name)) | ||||||
|  |     if c.isupper(): | ||||||
|  |       result += '_' | ||||||
|  |       result += c.lower() | ||||||
|  |     else: | ||||||
|  |       result += c | ||||||
|  |   return ''.join(result) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _FieldMaskTree(object): | ||||||
|  |   """Represents a FieldMask in a tree structure. | ||||||
|  | 
 | ||||||
|  |   For example, given a FieldMask "foo.bar,foo.baz,bar.baz", | ||||||
|  |   the FieldMaskTree will be: | ||||||
|  |       [_root] -+- foo -+- bar | ||||||
|  |             |       | | ||||||
|  |             |       +- baz | ||||||
|  |             | | ||||||
|  |             +- bar --- baz | ||||||
|  |   In the tree, each leaf node represents a field path. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   __slots__ = ('_root',) | ||||||
|  | 
 | ||||||
|  |   def __init__(self, field_mask=None): | ||||||
|  |     """Initializes the tree by FieldMask.""" | ||||||
|  |     self._root = {} | ||||||
|  |     if field_mask: | ||||||
|  |       self.MergeFromFieldMask(field_mask) | ||||||
|  | 
 | ||||||
|  |   def MergeFromFieldMask(self, field_mask): | ||||||
|  |     """Merges a FieldMask to the tree.""" | ||||||
|  |     for path in field_mask.paths: | ||||||
|  |       self.AddPath(path) | ||||||
|  | 
 | ||||||
|  |   def AddPath(self, path): | ||||||
|  |     """Adds a field path into the tree. | ||||||
|  | 
 | ||||||
|  |     If the field path to add is a sub-path of an existing field path | ||||||
|  |     in the tree (i.e., a leaf node), it means the tree already matches | ||||||
|  |     the given path so nothing will be added to the tree. If the path | ||||||
|  |     matches an existing non-leaf node in the tree, that non-leaf node | ||||||
|  |     will be turned into a leaf node with all its children removed because | ||||||
|  |     the path matches all the node's children. Otherwise, a new path will | ||||||
|  |     be added. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       path: The field path to add. | ||||||
|  |     """ | ||||||
|  |     node = self._root | ||||||
|  |     for name in path.split('.'): | ||||||
|  |       if name not in node: | ||||||
|  |         node[name] = {} | ||||||
|  |       elif not node[name]: | ||||||
|  |         # Pre-existing empty node implies we already have this entire tree. | ||||||
|  |         return | ||||||
|  |       node = node[name] | ||||||
|  |     # Remove any sub-trees we might have had. | ||||||
|  |     node.clear() | ||||||
|  | 
 | ||||||
|  |   def ToFieldMask(self, field_mask): | ||||||
|  |     """Converts the tree to a FieldMask.""" | ||||||
|  |     field_mask.Clear() | ||||||
|  |     _AddFieldPaths(self._root, '', field_mask) | ||||||
|  | 
 | ||||||
|  |   def IntersectPath(self, path, intersection): | ||||||
|  |     """Calculates the intersection part of a field path with this tree. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       path: The field path to calculates. | ||||||
|  |       intersection: The out tree to record the intersection part. | ||||||
|  |     """ | ||||||
|  |     node = self._root | ||||||
|  |     for name in path.split('.'): | ||||||
|  |       if name not in node: | ||||||
|  |         return | ||||||
|  |       elif not node[name]: | ||||||
|  |         intersection.AddPath(path) | ||||||
|  |         return | ||||||
|  |       node = node[name] | ||||||
|  |     intersection.AddLeafNodes(path, node) | ||||||
|  | 
 | ||||||
|  |   def AddLeafNodes(self, prefix, node): | ||||||
|  |     """Adds leaf nodes begin with prefix to this tree.""" | ||||||
|  |     if not node: | ||||||
|  |       self.AddPath(prefix) | ||||||
|  |     for name in node: | ||||||
|  |       child_path = prefix + '.' + name | ||||||
|  |       self.AddLeafNodes(child_path, node[name]) | ||||||
|  | 
 | ||||||
|  |   def MergeMessage( | ||||||
|  |       self, source, destination, | ||||||
|  |       replace_message, replace_repeated): | ||||||
|  |     """Merge all fields specified by this tree from source to destination.""" | ||||||
|  |     _MergeMessage( | ||||||
|  |         self._root, source, destination, replace_message, replace_repeated) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _StrConvert(value): | ||||||
|  |   """Converts value to str if it is not.""" | ||||||
|  |   # This file is imported by c extension and some methods like ClearField | ||||||
|  |   # requires string for the field name. py2/py3 has different text | ||||||
|  |   # type and may use unicode. | ||||||
|  |   if not isinstance(value, str): | ||||||
|  |     return value.encode('utf-8') | ||||||
|  |   return value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _MergeMessage( | ||||||
|  |     node, source, destination, replace_message, replace_repeated): | ||||||
|  |   """Merge all fields specified by a sub-tree from source to destination.""" | ||||||
|  |   source_descriptor = source.DESCRIPTOR | ||||||
|  |   for name in node: | ||||||
|  |     child = node[name] | ||||||
|  |     field = source_descriptor.fields_by_name[name] | ||||||
|  |     if field is None: | ||||||
|  |       raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( | ||||||
|  |           name, source_descriptor.full_name)) | ||||||
|  |     if child: | ||||||
|  |       # Sub-paths are only allowed for singular message fields. | ||||||
|  |       if (field.label == FieldDescriptor.LABEL_REPEATED or | ||||||
|  |           field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): | ||||||
|  |         raise ValueError('Error: Field {0} in message {1} is not a singular ' | ||||||
|  |                          'message field and cannot have sub-fields.'.format( | ||||||
|  |                              name, source_descriptor.full_name)) | ||||||
|  |       if source.HasField(name): | ||||||
|  |         _MergeMessage( | ||||||
|  |             child, getattr(source, name), getattr(destination, name), | ||||||
|  |             replace_message, replace_repeated) | ||||||
|  |       continue | ||||||
|  |     if field.label == FieldDescriptor.LABEL_REPEATED: | ||||||
|  |       if replace_repeated: | ||||||
|  |         destination.ClearField(_StrConvert(name)) | ||||||
|  |       repeated_source = getattr(source, name) | ||||||
|  |       repeated_destination = getattr(destination, name) | ||||||
|  |       repeated_destination.MergeFrom(repeated_source) | ||||||
|  |     else: | ||||||
|  |       if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |         if replace_message: | ||||||
|  |           destination.ClearField(_StrConvert(name)) | ||||||
|  |         if source.HasField(name): | ||||||
|  |           getattr(destination, name).MergeFrom(getattr(source, name)) | ||||||
|  |       else: | ||||||
|  |         setattr(destination, name, getattr(source, name)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _AddFieldPaths(node, prefix, field_mask): | ||||||
|  |   """Adds the field paths descended from node to field_mask.""" | ||||||
|  |   if not node and prefix: | ||||||
|  |     field_mask.paths.append(prefix) | ||||||
|  |     return | ||||||
|  |   for name in sorted(node): | ||||||
|  |     if prefix: | ||||||
|  |       child_path = prefix + '.' + name | ||||||
|  |     else: | ||||||
|  |       child_path = name | ||||||
|  |     _AddFieldPaths(node[name], child_path, field_mask) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SetStructValue(struct_value, value): | ||||||
|  |   if value is None: | ||||||
|  |     struct_value.null_value = 0 | ||||||
|  |   elif isinstance(value, bool): | ||||||
|  |     # Note: this check must come before the number check because in Python | ||||||
|  |     # True and False are also considered numbers. | ||||||
|  |     struct_value.bool_value = value | ||||||
|  |   elif isinstance(value, str): | ||||||
|  |     struct_value.string_value = value | ||||||
|  |   elif isinstance(value, (int, float)): | ||||||
|  |     struct_value.number_value = value | ||||||
|  |   elif isinstance(value, (dict, Struct)): | ||||||
|  |     struct_value.struct_value.Clear() | ||||||
|  |     struct_value.struct_value.update(value) | ||||||
|  |   elif isinstance(value, (list, ListValue)): | ||||||
|  |     struct_value.list_value.Clear() | ||||||
|  |     struct_value.list_value.extend(value) | ||||||
|  |   else: | ||||||
|  |     raise ValueError('Unexpected type') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _GetStructValue(struct_value): | ||||||
|  |   which = struct_value.WhichOneof('kind') | ||||||
|  |   if which == 'struct_value': | ||||||
|  |     return struct_value.struct_value | ||||||
|  |   elif which == 'null_value': | ||||||
|  |     return None | ||||||
|  |   elif which == 'number_value': | ||||||
|  |     return struct_value.number_value | ||||||
|  |   elif which == 'string_value': | ||||||
|  |     return struct_value.string_value | ||||||
|  |   elif which == 'bool_value': | ||||||
|  |     return struct_value.bool_value | ||||||
|  |   elif which == 'list_value': | ||||||
|  |     return struct_value.list_value | ||||||
|  |   elif which is None: | ||||||
|  |     raise ValueError('Value not set') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Struct(object): | ||||||
|  |   """Class for Struct message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, key): | ||||||
|  |     return _GetStructValue(self.fields[key]) | ||||||
|  | 
 | ||||||
|  |   def __contains__(self, item): | ||||||
|  |     return item in self.fields | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key, value): | ||||||
|  |     _SetStructValue(self.fields[key], value) | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key): | ||||||
|  |     del self.fields[key] | ||||||
|  | 
 | ||||||
|  |   def __len__(self): | ||||||
|  |     return len(self.fields) | ||||||
|  | 
 | ||||||
|  |   def __iter__(self): | ||||||
|  |     return iter(self.fields) | ||||||
|  | 
 | ||||||
|  |   def keys(self):  # pylint: disable=invalid-name | ||||||
|  |     return self.fields.keys() | ||||||
|  | 
 | ||||||
|  |   def values(self):  # pylint: disable=invalid-name | ||||||
|  |     return [self[key] for key in self] | ||||||
|  | 
 | ||||||
|  |   def items(self):  # pylint: disable=invalid-name | ||||||
|  |     return [(key, self[key]) for key in self] | ||||||
|  | 
 | ||||||
|  |   def get_or_create_list(self, key): | ||||||
|  |     """Returns a list for this key, creating if it didn't exist already.""" | ||||||
|  |     if not self.fields[key].HasField('list_value'): | ||||||
|  |       # Clear will mark list_value modified which will indeed create a list. | ||||||
|  |       self.fields[key].list_value.Clear() | ||||||
|  |     return self.fields[key].list_value | ||||||
|  | 
 | ||||||
|  |   def get_or_create_struct(self, key): | ||||||
|  |     """Returns a struct for this key, creating if it didn't exist already.""" | ||||||
|  |     if not self.fields[key].HasField('struct_value'): | ||||||
|  |       # Clear will mark struct_value modified which will indeed create a struct. | ||||||
|  |       self.fields[key].struct_value.Clear() | ||||||
|  |     return self.fields[key].struct_value | ||||||
|  | 
 | ||||||
|  |   def update(self, dictionary):  # pylint: disable=invalid-name | ||||||
|  |     for key, value in dictionary.items(): | ||||||
|  |       _SetStructValue(self.fields[key], value) | ||||||
|  | 
 | ||||||
|  | collections.abc.MutableMapping.register(Struct) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ListValue(object): | ||||||
|  |   """Class for ListValue message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def __len__(self): | ||||||
|  |     return len(self.values) | ||||||
|  | 
 | ||||||
|  |   def append(self, value): | ||||||
|  |     _SetStructValue(self.values.add(), value) | ||||||
|  | 
 | ||||||
|  |   def extend(self, elem_seq): | ||||||
|  |     for value in elem_seq: | ||||||
|  |       self.append(value) | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, index): | ||||||
|  |     """Retrieves item by the specified index.""" | ||||||
|  |     return _GetStructValue(self.values.__getitem__(index)) | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, index, value): | ||||||
|  |     _SetStructValue(self.values.__getitem__(index), value) | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key): | ||||||
|  |     del self.values[key] | ||||||
|  | 
 | ||||||
|  |   def items(self): | ||||||
|  |     for i in range(len(self)): | ||||||
|  |       yield self[i] | ||||||
|  | 
 | ||||||
|  |   def add_struct(self): | ||||||
|  |     """Appends and returns a struct value as the next value in the list.""" | ||||||
|  |     struct_value = self.values.add().struct_value | ||||||
|  |     # Clear will mark struct_value modified which will indeed create a struct. | ||||||
|  |     struct_value.Clear() | ||||||
|  |     return struct_value | ||||||
|  | 
 | ||||||
|  |   def add_list(self): | ||||||
|  |     """Appends and returns a list value as the next value in the list.""" | ||||||
|  |     list_value = self.values.add().list_value | ||||||
|  |     # Clear will mark list_value modified which will indeed create a list. | ||||||
|  |     list_value.Clear() | ||||||
|  |     return list_value | ||||||
|  | 
 | ||||||
|  | collections.abc.MutableSequence.register(ListValue) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | WKTBASES = { | ||||||
|  |     'google.protobuf.Any': Any, | ||||||
|  |     'google.protobuf.Duration': Duration, | ||||||
|  |     'google.protobuf.FieldMask': FieldMask, | ||||||
|  |     'google.protobuf.ListValue': ListValue, | ||||||
|  |     'google.protobuf.Struct': Struct, | ||||||
|  |     'google.protobuf.Timestamp': Timestamp, | ||||||
|  | } | ||||||
							
								
								
									
										268
									
								
								lib/protobuf/internal/wire_format.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										268
									
								
								lib/protobuf/internal/wire_format.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,268 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Constants and static functions to support protocol buffer wire format.""" | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | import struct | ||||||
|  | from google.protobuf import descriptor | ||||||
|  | from google.protobuf import message | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | TAG_TYPE_BITS = 3  # Number of bits used to hold type info in a proto tag. | ||||||
|  | TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1  # 0x7 | ||||||
|  | 
 | ||||||
|  | # These numbers identify the wire type of a protocol buffer value. | ||||||
|  | # We use the least-significant TAG_TYPE_BITS bits of the varint-encoded | ||||||
|  | # tag-and-type to store one of these WIRETYPE_* constants. | ||||||
|  | # These values must match WireType enum in google/protobuf/wire_format.h. | ||||||
|  | WIRETYPE_VARINT = 0 | ||||||
|  | WIRETYPE_FIXED64 = 1 | ||||||
|  | WIRETYPE_LENGTH_DELIMITED = 2 | ||||||
|  | WIRETYPE_START_GROUP = 3 | ||||||
|  | WIRETYPE_END_GROUP = 4 | ||||||
|  | WIRETYPE_FIXED32 = 5 | ||||||
|  | _WIRETYPE_MAX = 5 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Bounds for various integer types. | ||||||
|  | INT32_MAX = int((1 << 31) - 1) | ||||||
|  | INT32_MIN = int(-(1 << 31)) | ||||||
|  | UINT32_MAX = (1 << 32) - 1 | ||||||
|  | 
 | ||||||
|  | INT64_MAX = (1 << 63) - 1 | ||||||
|  | INT64_MIN = -(1 << 63) | ||||||
|  | UINT64_MAX = (1 << 64) - 1 | ||||||
|  | 
 | ||||||
|  | # "struct" format strings that will encode/decode the specified formats. | ||||||
|  | FORMAT_UINT32_LITTLE_ENDIAN = '<I' | ||||||
|  | FORMAT_UINT64_LITTLE_ENDIAN = '<Q' | ||||||
|  | FORMAT_FLOAT_LITTLE_ENDIAN = '<f' | ||||||
|  | FORMAT_DOUBLE_LITTLE_ENDIAN = '<d' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # We'll have to provide alternate implementations of AppendLittleEndian*() on | ||||||
|  | # any architectures where these checks fail. | ||||||
|  | if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4: | ||||||
|  |   raise AssertionError('Format "I" is not a 32-bit number.') | ||||||
|  | if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8: | ||||||
|  |   raise AssertionError('Format "Q" is not a 64-bit number.') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def PackTag(field_number, wire_type): | ||||||
|  |   """Returns an unsigned 32-bit integer that encodes the field number and | ||||||
|  |   wire type information in standard protocol message wire format. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     field_number: Expected to be an integer in the range [1, 1 << 29) | ||||||
|  |     wire_type: One of the WIRETYPE_* constants. | ||||||
|  |   """ | ||||||
|  |   if not 0 <= wire_type <= _WIRETYPE_MAX: | ||||||
|  |     raise message.EncodeError('Unknown wire type: %d' % wire_type) | ||||||
|  |   return (field_number << TAG_TYPE_BITS) | wire_type | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def UnpackTag(tag): | ||||||
|  |   """The inverse of PackTag().  Given an unsigned 32-bit number, | ||||||
|  |   returns a (field_number, wire_type) tuple. | ||||||
|  |   """ | ||||||
|  |   return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ZigZagEncode(value): | ||||||
|  |   """ZigZag Transform:  Encodes signed integers so that they can be | ||||||
|  |   effectively used with varint encoding.  See wire_format.h for | ||||||
|  |   more details. | ||||||
|  |   """ | ||||||
|  |   if value >= 0: | ||||||
|  |     return value << 1 | ||||||
|  |   return (value << 1) ^ (~0) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ZigZagDecode(value): | ||||||
|  |   """Inverse of ZigZagEncode().""" | ||||||
|  |   if not value & 0x1: | ||||||
|  |     return value >> 1 | ||||||
|  |   return (value >> 1) ^ (~0) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # The *ByteSize() functions below return the number of bytes required to | ||||||
|  | # serialize "field number + type" information and then serialize the value. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Int32ByteSize(field_number, int32): | ||||||
|  |   return Int64ByteSize(field_number, int32) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Int32ByteSizeNoTag(int32): | ||||||
|  |   return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Int64ByteSize(field_number, int64): | ||||||
|  |   # Have to convert to uint before calling UInt64ByteSize(). | ||||||
|  |   return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def UInt32ByteSize(field_number, uint32): | ||||||
|  |   return UInt64ByteSize(field_number, uint32) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def UInt64ByteSize(field_number, uint64): | ||||||
|  |   return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SInt32ByteSize(field_number, int32): | ||||||
|  |   return UInt32ByteSize(field_number, ZigZagEncode(int32)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SInt64ByteSize(field_number, int64): | ||||||
|  |   return UInt64ByteSize(field_number, ZigZagEncode(int64)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Fixed32ByteSize(field_number, fixed32): | ||||||
|  |   return TagByteSize(field_number) + 4 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Fixed64ByteSize(field_number, fixed64): | ||||||
|  |   return TagByteSize(field_number) + 8 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SFixed32ByteSize(field_number, sfixed32): | ||||||
|  |   return TagByteSize(field_number) + 4 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SFixed64ByteSize(field_number, sfixed64): | ||||||
|  |   return TagByteSize(field_number) + 8 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def FloatByteSize(field_number, flt): | ||||||
|  |   return TagByteSize(field_number) + 4 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def DoubleByteSize(field_number, double): | ||||||
|  |   return TagByteSize(field_number) + 8 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BoolByteSize(field_number, b): | ||||||
|  |   return TagByteSize(field_number) + 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def EnumByteSize(field_number, enum): | ||||||
|  |   return UInt32ByteSize(field_number, enum) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def StringByteSize(field_number, string): | ||||||
|  |   return BytesByteSize(field_number, string.encode('utf-8')) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BytesByteSize(field_number, b): | ||||||
|  |   return (TagByteSize(field_number) | ||||||
|  |           + _VarUInt64ByteSizeNoTag(len(b)) | ||||||
|  |           + len(b)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GroupByteSize(field_number, message): | ||||||
|  |   return (2 * TagByteSize(field_number)  # START and END group. | ||||||
|  |           + message.ByteSize()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageByteSize(field_number, message): | ||||||
|  |   return (TagByteSize(field_number) | ||||||
|  |           + _VarUInt64ByteSizeNoTag(message.ByteSize()) | ||||||
|  |           + message.ByteSize()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageSetItemByteSize(field_number, msg): | ||||||
|  |   # First compute the sizes of the tags. | ||||||
|  |   # There are 2 tags for the beginning and ending of the repeated group, that | ||||||
|  |   # is field number 1, one with field number 2 (type_id) and one with field | ||||||
|  |   # number 3 (message). | ||||||
|  |   total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) | ||||||
|  | 
 | ||||||
|  |   # Add the number of bytes for type_id. | ||||||
|  |   total_size += _VarUInt64ByteSizeNoTag(field_number) | ||||||
|  | 
 | ||||||
|  |   message_size = msg.ByteSize() | ||||||
|  | 
 | ||||||
|  |   # The number of bytes for encoding the length of the message. | ||||||
|  |   total_size += _VarUInt64ByteSizeNoTag(message_size) | ||||||
|  | 
 | ||||||
|  |   # The size of the message. | ||||||
|  |   total_size += message_size | ||||||
|  |   return total_size | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def TagByteSize(field_number): | ||||||
|  |   """Returns the bytes required to serialize a tag with this field number.""" | ||||||
|  |   # Just pass in type 0, since the type won't affect the tag+type size. | ||||||
|  |   return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Private helper function for the *ByteSize() functions above. | ||||||
|  | 
 | ||||||
|  | def _VarUInt64ByteSizeNoTag(uint64): | ||||||
|  |   """Returns the number of bytes required to serialize a single varint | ||||||
|  |   using boundary value comparisons. (unrolled loop optimization -WPierce) | ||||||
|  |   uint64 must be unsigned. | ||||||
|  |   """ | ||||||
|  |   if uint64 <= 0x7f: return 1 | ||||||
|  |   if uint64 <= 0x3fff: return 2 | ||||||
|  |   if uint64 <= 0x1fffff: return 3 | ||||||
|  |   if uint64 <= 0xfffffff: return 4 | ||||||
|  |   if uint64 <= 0x7ffffffff: return 5 | ||||||
|  |   if uint64 <= 0x3ffffffffff: return 6 | ||||||
|  |   if uint64 <= 0x1ffffffffffff: return 7 | ||||||
|  |   if uint64 <= 0xffffffffffffff: return 8 | ||||||
|  |   if uint64 <= 0x7fffffffffffffff: return 9 | ||||||
|  |   if uint64 > UINT64_MAX: | ||||||
|  |     raise message.EncodeError('Value out of range: %d' % uint64) | ||||||
|  |   return 10 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | NON_PACKABLE_TYPES = ( | ||||||
|  |   descriptor.FieldDescriptor.TYPE_STRING, | ||||||
|  |   descriptor.FieldDescriptor.TYPE_GROUP, | ||||||
|  |   descriptor.FieldDescriptor.TYPE_MESSAGE, | ||||||
|  |   descriptor.FieldDescriptor.TYPE_BYTES | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def IsTypePackable(field_type): | ||||||
|  |   """Return true iff packable = true is valid for fields of this type. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     field_type: a FieldDescriptor::Type value. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     True iff fields of this type are packable. | ||||||
|  |   """ | ||||||
|  |   return field_type not in NON_PACKABLE_TYPES | ||||||
							
								
								
									
										912
									
								
								lib/protobuf/json_format.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										912
									
								
								lib/protobuf/json_format.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,912 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains routines for printing protocol messages in JSON format. | ||||||
|  | 
 | ||||||
|  | Simple usage example: | ||||||
|  | 
 | ||||||
|  |   # Create a proto object and serialize it to a json format string. | ||||||
|  |   message = my_proto_pb2.MyMessage(foo='bar') | ||||||
|  |   json_string = json_format.MessageToJson(message) | ||||||
|  | 
 | ||||||
|  |   # Parse a json format string to proto object. | ||||||
|  |   message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'jieluo@google.com (Jie Luo)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | import base64 | ||||||
|  | from collections import OrderedDict | ||||||
|  | import json | ||||||
|  | import math | ||||||
|  | from operator import methodcaller | ||||||
|  | import re | ||||||
|  | import sys | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import type_checkers | ||||||
|  | from google.protobuf import descriptor | ||||||
|  | from google.protobuf import symbol_database | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' | ||||||
|  | _INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, | ||||||
|  |                         descriptor.FieldDescriptor.CPPTYPE_UINT32, | ||||||
|  |                         descriptor.FieldDescriptor.CPPTYPE_INT64, | ||||||
|  |                         descriptor.FieldDescriptor.CPPTYPE_UINT64]) | ||||||
|  | _INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, | ||||||
|  |                           descriptor.FieldDescriptor.CPPTYPE_UINT64]) | ||||||
|  | _FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, | ||||||
|  |                           descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) | ||||||
|  | _INFINITY = 'Infinity' | ||||||
|  | _NEG_INFINITY = '-Infinity' | ||||||
|  | _NAN = 'NaN' | ||||||
|  | 
 | ||||||
|  | _UNPAIRED_SURROGATE_PATTERN = re.compile( | ||||||
|  |     u'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]') | ||||||
|  | 
 | ||||||
|  | _VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Error(Exception): | ||||||
|  |   """Top-level module error for json_format.""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class SerializeToJsonError(Error): | ||||||
|  |   """Thrown if serialization to JSON fails.""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ParseError(Error): | ||||||
|  |   """Thrown in case of parsing error.""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageToJson( | ||||||
|  |     message, | ||||||
|  |     including_default_value_fields=False, | ||||||
|  |     preserving_proto_field_name=False, | ||||||
|  |     indent=2, | ||||||
|  |     sort_keys=False, | ||||||
|  |     use_integers_for_enums=False, | ||||||
|  |     descriptor_pool=None, | ||||||
|  |     float_precision=None, | ||||||
|  |     ensure_ascii=True): | ||||||
|  |   """Converts protobuf message to JSON format. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     message: The protocol buffers message instance to serialize. | ||||||
|  |     including_default_value_fields: If True, singular primitive fields, | ||||||
|  |         repeated fields, and map fields will always be serialized.  If | ||||||
|  |         False, only serialize non-empty fields.  Singular message fields | ||||||
|  |         and oneof fields are not affected by this option. | ||||||
|  |     preserving_proto_field_name: If True, use the original proto field | ||||||
|  |         names as defined in the .proto file. If False, convert the field | ||||||
|  |         names to lowerCamelCase. | ||||||
|  |     indent: The JSON object will be pretty-printed with this indent level. | ||||||
|  |         An indent level of 0 or negative will only insert newlines. | ||||||
|  |     sort_keys: If True, then the output will be sorted by field names. | ||||||
|  |     use_integers_for_enums: If true, print integers instead of enum names. | ||||||
|  |     descriptor_pool: A Descriptor Pool for resolving types. If None use the | ||||||
|  |         default. | ||||||
|  |     float_precision: If set, use this to specify float field valid digits. | ||||||
|  |     ensure_ascii: If True, strings with non-ASCII characters are escaped. | ||||||
|  |         If False, Unicode strings are returned unchanged. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     A string containing the JSON formatted protocol buffer message. | ||||||
|  |   """ | ||||||
|  |   printer = _Printer( | ||||||
|  |       including_default_value_fields, | ||||||
|  |       preserving_proto_field_name, | ||||||
|  |       use_integers_for_enums, | ||||||
|  |       descriptor_pool, | ||||||
|  |       float_precision=float_precision) | ||||||
|  |   return printer.ToJsonString(message, indent, sort_keys, ensure_ascii) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageToDict( | ||||||
|  |     message, | ||||||
|  |     including_default_value_fields=False, | ||||||
|  |     preserving_proto_field_name=False, | ||||||
|  |     use_integers_for_enums=False, | ||||||
|  |     descriptor_pool=None, | ||||||
|  |     float_precision=None): | ||||||
|  |   """Converts protobuf message to a dictionary. | ||||||
|  | 
 | ||||||
|  |   When the dictionary is encoded to JSON, it conforms to proto3 JSON spec. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     message: The protocol buffers message instance to serialize. | ||||||
|  |     including_default_value_fields: If True, singular primitive fields, | ||||||
|  |         repeated fields, and map fields will always be serialized.  If | ||||||
|  |         False, only serialize non-empty fields.  Singular message fields | ||||||
|  |         and oneof fields are not affected by this option. | ||||||
|  |     preserving_proto_field_name: If True, use the original proto field | ||||||
|  |         names as defined in the .proto file. If False, convert the field | ||||||
|  |         names to lowerCamelCase. | ||||||
|  |     use_integers_for_enums: If true, print integers instead of enum names. | ||||||
|  |     descriptor_pool: A Descriptor Pool for resolving types. If None use the | ||||||
|  |         default. | ||||||
|  |     float_precision: If set, use this to specify float field valid digits. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     A dict representation of the protocol buffer message. | ||||||
|  |   """ | ||||||
|  |   printer = _Printer( | ||||||
|  |       including_default_value_fields, | ||||||
|  |       preserving_proto_field_name, | ||||||
|  |       use_integers_for_enums, | ||||||
|  |       descriptor_pool, | ||||||
|  |       float_precision=float_precision) | ||||||
|  |   # pylint: disable=protected-access | ||||||
|  |   return printer._MessageToJsonObject(message) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _IsMapEntry(field): | ||||||
|  |   return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and | ||||||
|  |           field.message_type.has_options and | ||||||
|  |           field.message_type.GetOptions().map_entry) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _Printer(object): | ||||||
|  |   """JSON format printer for protocol message.""" | ||||||
|  | 
 | ||||||
|  |   def __init__( | ||||||
|  |       self, | ||||||
|  |       including_default_value_fields=False, | ||||||
|  |       preserving_proto_field_name=False, | ||||||
|  |       use_integers_for_enums=False, | ||||||
|  |       descriptor_pool=None, | ||||||
|  |       float_precision=None): | ||||||
|  |     self.including_default_value_fields = including_default_value_fields | ||||||
|  |     self.preserving_proto_field_name = preserving_proto_field_name | ||||||
|  |     self.use_integers_for_enums = use_integers_for_enums | ||||||
|  |     self.descriptor_pool = descriptor_pool | ||||||
|  |     if float_precision: | ||||||
|  |       self.float_format = '.{}g'.format(float_precision) | ||||||
|  |     else: | ||||||
|  |       self.float_format = None | ||||||
|  | 
 | ||||||
|  |   def ToJsonString(self, message, indent, sort_keys, ensure_ascii): | ||||||
|  |     js = self._MessageToJsonObject(message) | ||||||
|  |     return json.dumps( | ||||||
|  |         js, indent=indent, sort_keys=sort_keys, ensure_ascii=ensure_ascii) | ||||||
|  | 
 | ||||||
|  |   def _MessageToJsonObject(self, message): | ||||||
|  |     """Converts message to an object according to Proto3 JSON Specification.""" | ||||||
|  |     message_descriptor = message.DESCRIPTOR | ||||||
|  |     full_name = message_descriptor.full_name | ||||||
|  |     if _IsWrapperMessage(message_descriptor): | ||||||
|  |       return self._WrapperMessageToJsonObject(message) | ||||||
|  |     if full_name in _WKTJSONMETHODS: | ||||||
|  |       return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self) | ||||||
|  |     js = {} | ||||||
|  |     return self._RegularMessageToJsonObject(message, js) | ||||||
|  | 
 | ||||||
|  |   def _RegularMessageToJsonObject(self, message, js): | ||||||
|  |     """Converts normal message according to Proto3 JSON Specification.""" | ||||||
|  |     fields = message.ListFields() | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |       for field, value in fields: | ||||||
|  |         if self.preserving_proto_field_name: | ||||||
|  |           name = field.name | ||||||
|  |         else: | ||||||
|  |           name = field.json_name | ||||||
|  |         if _IsMapEntry(field): | ||||||
|  |           # Convert a map field. | ||||||
|  |           v_field = field.message_type.fields_by_name['value'] | ||||||
|  |           js_map = {} | ||||||
|  |           for key in value: | ||||||
|  |             if isinstance(key, bool): | ||||||
|  |               if key: | ||||||
|  |                 recorded_key = 'true' | ||||||
|  |               else: | ||||||
|  |                 recorded_key = 'false' | ||||||
|  |             else: | ||||||
|  |               recorded_key = str(key) | ||||||
|  |             js_map[recorded_key] = self._FieldToJsonObject( | ||||||
|  |                 v_field, value[key]) | ||||||
|  |           js[name] = js_map | ||||||
|  |         elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: | ||||||
|  |           # Convert a repeated field. | ||||||
|  |           js[name] = [self._FieldToJsonObject(field, k) | ||||||
|  |                       for k in value] | ||||||
|  |         elif field.is_extension: | ||||||
|  |           name = '[%s]' % field.full_name | ||||||
|  |           js[name] = self._FieldToJsonObject(field, value) | ||||||
|  |         else: | ||||||
|  |           js[name] = self._FieldToJsonObject(field, value) | ||||||
|  | 
 | ||||||
|  |       # Serialize default value if including_default_value_fields is True. | ||||||
|  |       if self.including_default_value_fields: | ||||||
|  |         message_descriptor = message.DESCRIPTOR | ||||||
|  |         for field in message_descriptor.fields: | ||||||
|  |           # Singular message fields and oneof fields will not be affected. | ||||||
|  |           if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and | ||||||
|  |                field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or | ||||||
|  |               field.containing_oneof): | ||||||
|  |             continue | ||||||
|  |           if self.preserving_proto_field_name: | ||||||
|  |             name = field.name | ||||||
|  |           else: | ||||||
|  |             name = field.json_name | ||||||
|  |           if name in js: | ||||||
|  |             # Skip the field which has been serialized already. | ||||||
|  |             continue | ||||||
|  |           if _IsMapEntry(field): | ||||||
|  |             js[name] = {} | ||||||
|  |           elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: | ||||||
|  |             js[name] = [] | ||||||
|  |           else: | ||||||
|  |             js[name] = self._FieldToJsonObject(field, field.default_value) | ||||||
|  | 
 | ||||||
|  |     except ValueError as e: | ||||||
|  |       raise SerializeToJsonError( | ||||||
|  |           'Failed to serialize {0} field: {1}.'.format(field.name, e)) | ||||||
|  | 
 | ||||||
|  |     return js | ||||||
|  | 
 | ||||||
|  |   def _FieldToJsonObject(self, field, value): | ||||||
|  |     """Converts field value according to Proto3 JSON Specification.""" | ||||||
|  |     if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |       return self._MessageToJsonObject(value) | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: | ||||||
|  |       if self.use_integers_for_enums: | ||||||
|  |         return value | ||||||
|  |       if field.enum_type.full_name == 'google.protobuf.NullValue': | ||||||
|  |         return None | ||||||
|  |       enum_value = field.enum_type.values_by_number.get(value, None) | ||||||
|  |       if enum_value is not None: | ||||||
|  |         return enum_value.name | ||||||
|  |       else: | ||||||
|  |         if field.file.syntax == 'proto3': | ||||||
|  |           return value | ||||||
|  |         raise SerializeToJsonError('Enum field contains an integer value ' | ||||||
|  |                                    'which can not mapped to an enum value.') | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: | ||||||
|  |       if field.type == descriptor.FieldDescriptor.TYPE_BYTES: | ||||||
|  |         # Use base64 Data encoding for bytes | ||||||
|  |         return base64.b64encode(value).decode('utf-8') | ||||||
|  |       else: | ||||||
|  |         return value | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: | ||||||
|  |       return bool(value) | ||||||
|  |     elif field.cpp_type in _INT64_TYPES: | ||||||
|  |       return str(value) | ||||||
|  |     elif field.cpp_type in _FLOAT_TYPES: | ||||||
|  |       if math.isinf(value): | ||||||
|  |         if value < 0.0: | ||||||
|  |           return _NEG_INFINITY | ||||||
|  |         else: | ||||||
|  |           return _INFINITY | ||||||
|  |       if math.isnan(value): | ||||||
|  |         return _NAN | ||||||
|  |       if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: | ||||||
|  |         if self.float_format: | ||||||
|  |           return float(format(value, self.float_format)) | ||||||
|  |         else: | ||||||
|  |           return type_checkers.ToShortestFloat(value) | ||||||
|  | 
 | ||||||
|  |     return value | ||||||
|  | 
 | ||||||
|  |   def _AnyMessageToJsonObject(self, message): | ||||||
|  |     """Converts Any message according to Proto3 JSON Specification.""" | ||||||
|  |     if not message.ListFields(): | ||||||
|  |       return {} | ||||||
|  |     # Must print @type first, use OrderedDict instead of {} | ||||||
|  |     js = OrderedDict() | ||||||
|  |     type_url = message.type_url | ||||||
|  |     js['@type'] = type_url | ||||||
|  |     sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) | ||||||
|  |     sub_message.ParseFromString(message.value) | ||||||
|  |     message_descriptor = sub_message.DESCRIPTOR | ||||||
|  |     full_name = message_descriptor.full_name | ||||||
|  |     if _IsWrapperMessage(message_descriptor): | ||||||
|  |       js['value'] = self._WrapperMessageToJsonObject(sub_message) | ||||||
|  |       return js | ||||||
|  |     if full_name in _WKTJSONMETHODS: | ||||||
|  |       js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0], | ||||||
|  |                                  sub_message)(self) | ||||||
|  |       return js | ||||||
|  |     return self._RegularMessageToJsonObject(sub_message, js) | ||||||
|  | 
 | ||||||
|  |   def _GenericMessageToJsonObject(self, message): | ||||||
|  |     """Converts message according to Proto3 JSON Specification.""" | ||||||
|  |     # Duration, Timestamp and FieldMask have ToJsonString method to do the | ||||||
|  |     # convert. Users can also call the method directly. | ||||||
|  |     return message.ToJsonString() | ||||||
|  | 
 | ||||||
|  |   def _ValueMessageToJsonObject(self, message): | ||||||
|  |     """Converts Value message according to Proto3 JSON Specification.""" | ||||||
|  |     which = message.WhichOneof('kind') | ||||||
|  |     # If the Value message is not set treat as null_value when serialize | ||||||
|  |     # to JSON. The parse back result will be different from original message. | ||||||
|  |     if which is None or which == 'null_value': | ||||||
|  |       return None | ||||||
|  |     if which == 'list_value': | ||||||
|  |       return self._ListValueMessageToJsonObject(message.list_value) | ||||||
|  |     if which == 'struct_value': | ||||||
|  |       value = message.struct_value | ||||||
|  |     else: | ||||||
|  |       value = getattr(message, which) | ||||||
|  |     oneof_descriptor = message.DESCRIPTOR.fields_by_name[which] | ||||||
|  |     return self._FieldToJsonObject(oneof_descriptor, value) | ||||||
|  | 
 | ||||||
|  |   def _ListValueMessageToJsonObject(self, message): | ||||||
|  |     """Converts ListValue message according to Proto3 JSON Specification.""" | ||||||
|  |     return [self._ValueMessageToJsonObject(value) | ||||||
|  |             for value in message.values] | ||||||
|  | 
 | ||||||
|  |   def _StructMessageToJsonObject(self, message): | ||||||
|  |     """Converts Struct message according to Proto3 JSON Specification.""" | ||||||
|  |     fields = message.fields | ||||||
|  |     ret = {} | ||||||
|  |     for key in fields: | ||||||
|  |       ret[key] = self._ValueMessageToJsonObject(fields[key]) | ||||||
|  |     return ret | ||||||
|  | 
 | ||||||
|  |   def _WrapperMessageToJsonObject(self, message): | ||||||
|  |     return self._FieldToJsonObject( | ||||||
|  |         message.DESCRIPTOR.fields_by_name['value'], message.value) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _IsWrapperMessage(message_descriptor): | ||||||
|  |   return message_descriptor.file.name == 'google/protobuf/wrappers.proto' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _DuplicateChecker(js): | ||||||
|  |   result = {} | ||||||
|  |   for name, value in js: | ||||||
|  |     if name in result: | ||||||
|  |       raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name)) | ||||||
|  |     result[name] = value | ||||||
|  |   return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _CreateMessageFromTypeUrl(type_url, descriptor_pool): | ||||||
|  |   """Creates a message from a type URL.""" | ||||||
|  |   db = symbol_database.Default() | ||||||
|  |   pool = db.pool if descriptor_pool is None else descriptor_pool | ||||||
|  |   type_name = type_url.split('/')[-1] | ||||||
|  |   try: | ||||||
|  |     message_descriptor = pool.FindMessageTypeByName(type_name) | ||||||
|  |   except KeyError: | ||||||
|  |     raise TypeError( | ||||||
|  |         'Can not find message descriptor by type_url: {0}'.format(type_url)) | ||||||
|  |   message_class = db.GetPrototype(message_descriptor) | ||||||
|  |   return message_class() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Parse(text, | ||||||
|  |           message, | ||||||
|  |           ignore_unknown_fields=False, | ||||||
|  |           descriptor_pool=None, | ||||||
|  |           max_recursion_depth=100): | ||||||
|  |   """Parses a JSON representation of a protocol message into a message. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     text: Message JSON representation. | ||||||
|  |     message: A protocol buffer message to merge into. | ||||||
|  |     ignore_unknown_fields: If True, do not raise errors for unknown fields. | ||||||
|  |     descriptor_pool: A Descriptor Pool for resolving types. If None use the | ||||||
|  |       default. | ||||||
|  |     max_recursion_depth: max recursion depth of JSON message to be | ||||||
|  |       deserialized. JSON messages over this depth will fail to be | ||||||
|  |       deserialized. Default value is 100. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The same message passed as argument. | ||||||
|  | 
 | ||||||
|  |   Raises:: | ||||||
|  |     ParseError: On JSON parsing problems. | ||||||
|  |   """ | ||||||
|  |   if not isinstance(text, str): | ||||||
|  |     text = text.decode('utf-8') | ||||||
|  |   try: | ||||||
|  |     js = json.loads(text, object_pairs_hook=_DuplicateChecker) | ||||||
|  |   except ValueError as e: | ||||||
|  |     raise ParseError('Failed to load JSON: {0}.'.format(str(e))) | ||||||
|  |   return ParseDict(js, message, ignore_unknown_fields, descriptor_pool, | ||||||
|  |                    max_recursion_depth) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ParseDict(js_dict, | ||||||
|  |               message, | ||||||
|  |               ignore_unknown_fields=False, | ||||||
|  |               descriptor_pool=None, | ||||||
|  |               max_recursion_depth=100): | ||||||
|  |   """Parses a JSON dictionary representation into a message. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     js_dict: Dict representation of a JSON message. | ||||||
|  |     message: A protocol buffer message to merge into. | ||||||
|  |     ignore_unknown_fields: If True, do not raise errors for unknown fields. | ||||||
|  |     descriptor_pool: A Descriptor Pool for resolving types. If None use the | ||||||
|  |       default. | ||||||
|  |     max_recursion_depth: max recursion depth of JSON message to be | ||||||
|  |       deserialized. JSON messages over this depth will fail to be | ||||||
|  |       deserialized. Default value is 100. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The same message passed as argument. | ||||||
|  |   """ | ||||||
|  |   parser = _Parser(ignore_unknown_fields, descriptor_pool, max_recursion_depth) | ||||||
|  |   parser.ConvertMessage(js_dict, message, '') | ||||||
|  |   return message | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _INT_OR_FLOAT = (int, float) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _Parser(object): | ||||||
|  |   """JSON format parser for protocol message.""" | ||||||
|  | 
 | ||||||
|  |   def __init__(self, ignore_unknown_fields, descriptor_pool, | ||||||
|  |                max_recursion_depth): | ||||||
|  |     self.ignore_unknown_fields = ignore_unknown_fields | ||||||
|  |     self.descriptor_pool = descriptor_pool | ||||||
|  |     self.max_recursion_depth = max_recursion_depth | ||||||
|  |     self.recursion_depth = 0 | ||||||
|  | 
 | ||||||
|  |   def ConvertMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON object into a message. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       value: A JSON object. | ||||||
|  |       message: A WKT or regular protocol message to record the data. | ||||||
|  |       path: parent path to log parse error info. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ParseError: In case of convert problems. | ||||||
|  |     """ | ||||||
|  |     self.recursion_depth += 1 | ||||||
|  |     if self.recursion_depth > self.max_recursion_depth: | ||||||
|  |       raise ParseError('Message too deep. Max recursion depth is {0}'.format( | ||||||
|  |           self.max_recursion_depth)) | ||||||
|  |     message_descriptor = message.DESCRIPTOR | ||||||
|  |     full_name = message_descriptor.full_name | ||||||
|  |     if not path: | ||||||
|  |       path = message_descriptor.name | ||||||
|  |     if _IsWrapperMessage(message_descriptor): | ||||||
|  |       self._ConvertWrapperMessage(value, message, path) | ||||||
|  |     elif full_name in _WKTJSONMETHODS: | ||||||
|  |       methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) | ||||||
|  |     else: | ||||||
|  |       self._ConvertFieldValuePair(value, message, path) | ||||||
|  |     self.recursion_depth -= 1 | ||||||
|  | 
 | ||||||
|  |   def _ConvertFieldValuePair(self, js, message, path): | ||||||
|  |     """Convert field value pairs into regular message. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       js: A JSON object to convert the field value pairs. | ||||||
|  |       message: A regular protocol message to record the data. | ||||||
|  |       path: parent path to log parse error info. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ParseError: In case of problems converting. | ||||||
|  |     """ | ||||||
|  |     names = [] | ||||||
|  |     message_descriptor = message.DESCRIPTOR | ||||||
|  |     fields_by_json_name = dict((f.json_name, f) | ||||||
|  |                                for f in message_descriptor.fields) | ||||||
|  |     for name in js: | ||||||
|  |       try: | ||||||
|  |         field = fields_by_json_name.get(name, None) | ||||||
|  |         if not field: | ||||||
|  |           field = message_descriptor.fields_by_name.get(name, None) | ||||||
|  |         if not field and _VALID_EXTENSION_NAME.match(name): | ||||||
|  |           if not message_descriptor.is_extendable: | ||||||
|  |             raise ParseError( | ||||||
|  |                 'Message type {0} does not have extensions at {1}'.format( | ||||||
|  |                     message_descriptor.full_name, path)) | ||||||
|  |           identifier = name[1:-1]  # strip [] brackets | ||||||
|  |           # pylint: disable=protected-access | ||||||
|  |           field = message.Extensions._FindExtensionByName(identifier) | ||||||
|  |           # pylint: enable=protected-access | ||||||
|  |           if not field: | ||||||
|  |             # Try looking for extension by the message type name, dropping the | ||||||
|  |             # field name following the final . separator in full_name. | ||||||
|  |             identifier = '.'.join(identifier.split('.')[:-1]) | ||||||
|  |             # pylint: disable=protected-access | ||||||
|  |             field = message.Extensions._FindExtensionByName(identifier) | ||||||
|  |             # pylint: enable=protected-access | ||||||
|  |         if not field: | ||||||
|  |           if self.ignore_unknown_fields: | ||||||
|  |             continue | ||||||
|  |           raise ParseError( | ||||||
|  |               ('Message type "{0}" has no field named "{1}" at "{2}".\n' | ||||||
|  |                ' Available Fields(except extensions): "{3}"').format( | ||||||
|  |                    message_descriptor.full_name, name, path, | ||||||
|  |                    [f.json_name for f in message_descriptor.fields])) | ||||||
|  |         if name in names: | ||||||
|  |           raise ParseError('Message type "{0}" should not have multiple ' | ||||||
|  |                            '"{1}" fields at "{2}".'.format( | ||||||
|  |                                message.DESCRIPTOR.full_name, name, path)) | ||||||
|  |         names.append(name) | ||||||
|  |         value = js[name] | ||||||
|  |         # Check no other oneof field is parsed. | ||||||
|  |         if field.containing_oneof is not None and value is not None: | ||||||
|  |           oneof_name = field.containing_oneof.name | ||||||
|  |           if oneof_name in names: | ||||||
|  |             raise ParseError('Message type "{0}" should not have multiple ' | ||||||
|  |                              '"{1}" oneof fields at "{2}".'.format( | ||||||
|  |                                  message.DESCRIPTOR.full_name, oneof_name, | ||||||
|  |                                  path)) | ||||||
|  |           names.append(oneof_name) | ||||||
|  | 
 | ||||||
|  |         if value is None: | ||||||
|  |           if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE | ||||||
|  |               and field.message_type.full_name == 'google.protobuf.Value'): | ||||||
|  |             sub_message = getattr(message, field.name) | ||||||
|  |             sub_message.null_value = 0 | ||||||
|  |           elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM | ||||||
|  |                 and field.enum_type.full_name == 'google.protobuf.NullValue'): | ||||||
|  |             setattr(message, field.name, 0) | ||||||
|  |           else: | ||||||
|  |             message.ClearField(field.name) | ||||||
|  |           continue | ||||||
|  | 
 | ||||||
|  |         # Parse field value. | ||||||
|  |         if _IsMapEntry(field): | ||||||
|  |           message.ClearField(field.name) | ||||||
|  |           self._ConvertMapFieldValue(value, message, field, | ||||||
|  |                                      '{0}.{1}'.format(path, name)) | ||||||
|  |         elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: | ||||||
|  |           message.ClearField(field.name) | ||||||
|  |           if not isinstance(value, list): | ||||||
|  |             raise ParseError('repeated field {0} must be in [] which is ' | ||||||
|  |                              '{1} at {2}'.format(name, value, path)) | ||||||
|  |           if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |             # Repeated message field. | ||||||
|  |             for index, item in enumerate(value): | ||||||
|  |               sub_message = getattr(message, field.name).add() | ||||||
|  |               # None is a null_value in Value. | ||||||
|  |               if (item is None and | ||||||
|  |                   sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): | ||||||
|  |                 raise ParseError('null is not allowed to be used as an element' | ||||||
|  |                                  ' in a repeated field at {0}.{1}[{2}]'.format( | ||||||
|  |                                      path, name, index)) | ||||||
|  |               self.ConvertMessage(item, sub_message, | ||||||
|  |                                   '{0}.{1}[{2}]'.format(path, name, index)) | ||||||
|  |           else: | ||||||
|  |             # Repeated scalar field. | ||||||
|  |             for index, item in enumerate(value): | ||||||
|  |               if item is None: | ||||||
|  |                 raise ParseError('null is not allowed to be used as an element' | ||||||
|  |                                  ' in a repeated field at {0}.{1}[{2}]'.format( | ||||||
|  |                                      path, name, index)) | ||||||
|  |               getattr(message, field.name).append( | ||||||
|  |                   _ConvertScalarFieldValue( | ||||||
|  |                       item, field, '{0}.{1}[{2}]'.format(path, name, index))) | ||||||
|  |         elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |           if field.is_extension: | ||||||
|  |             sub_message = message.Extensions[field] | ||||||
|  |           else: | ||||||
|  |             sub_message = getattr(message, field.name) | ||||||
|  |           sub_message.SetInParent() | ||||||
|  |           self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) | ||||||
|  |         else: | ||||||
|  |           if field.is_extension: | ||||||
|  |             message.Extensions[field] = _ConvertScalarFieldValue( | ||||||
|  |                 value, field, '{0}.{1}'.format(path, name)) | ||||||
|  |           else: | ||||||
|  |             setattr( | ||||||
|  |                 message, field.name, | ||||||
|  |                 _ConvertScalarFieldValue(value, field, | ||||||
|  |                                          '{0}.{1}'.format(path, name))) | ||||||
|  |       except ParseError as e: | ||||||
|  |         if field and field.containing_oneof is None: | ||||||
|  |           raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) | ||||||
|  |         else: | ||||||
|  |           raise ParseError(str(e)) | ||||||
|  |       except ValueError as e: | ||||||
|  |         raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) | ||||||
|  |       except TypeError as e: | ||||||
|  |         raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) | ||||||
|  | 
 | ||||||
|  |   def _ConvertAnyMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into Any message.""" | ||||||
|  |     if isinstance(value, dict) and not value: | ||||||
|  |       return | ||||||
|  |     try: | ||||||
|  |       type_url = value['@type'] | ||||||
|  |     except KeyError: | ||||||
|  |       raise ParseError( | ||||||
|  |           '@type is missing when parsing any message at {0}'.format(path)) | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |       sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) | ||||||
|  |     except TypeError as e: | ||||||
|  |       raise ParseError('{0} at {1}'.format(e, path)) | ||||||
|  |     message_descriptor = sub_message.DESCRIPTOR | ||||||
|  |     full_name = message_descriptor.full_name | ||||||
|  |     if _IsWrapperMessage(message_descriptor): | ||||||
|  |       self._ConvertWrapperMessage(value['value'], sub_message, | ||||||
|  |                                   '{0}.value'.format(path)) | ||||||
|  |     elif full_name in _WKTJSONMETHODS: | ||||||
|  |       methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, | ||||||
|  |                    '{0}.value'.format(path))( | ||||||
|  |                        self) | ||||||
|  |     else: | ||||||
|  |       del value['@type'] | ||||||
|  |       self._ConvertFieldValuePair(value, sub_message, path) | ||||||
|  |       value['@type'] = type_url | ||||||
|  |     # Sets Any message | ||||||
|  |     message.value = sub_message.SerializeToString() | ||||||
|  |     message.type_url = type_url | ||||||
|  | 
 | ||||||
|  |   def _ConvertGenericMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into message with FromJsonString.""" | ||||||
|  |     # Duration, Timestamp, FieldMask have a FromJsonString method to do the | ||||||
|  |     # conversion. Users can also call the method directly. | ||||||
|  |     try: | ||||||
|  |       message.FromJsonString(value) | ||||||
|  |     except ValueError as e: | ||||||
|  |       raise ParseError('{0} at {1}'.format(e, path)) | ||||||
|  | 
 | ||||||
|  |   def _ConvertValueMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into Value message.""" | ||||||
|  |     if isinstance(value, dict): | ||||||
|  |       self._ConvertStructMessage(value, message.struct_value, path) | ||||||
|  |     elif isinstance(value, list): | ||||||
|  |       self._ConvertListValueMessage(value, message.list_value, path) | ||||||
|  |     elif value is None: | ||||||
|  |       message.null_value = 0 | ||||||
|  |     elif isinstance(value, bool): | ||||||
|  |       message.bool_value = value | ||||||
|  |     elif isinstance(value, str): | ||||||
|  |       message.string_value = value | ||||||
|  |     elif isinstance(value, _INT_OR_FLOAT): | ||||||
|  |       message.number_value = value | ||||||
|  |     else: | ||||||
|  |       raise ParseError('Value {0} has unexpected type {1} at {2}'.format( | ||||||
|  |           value, type(value), path)) | ||||||
|  | 
 | ||||||
|  |   def _ConvertListValueMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into ListValue message.""" | ||||||
|  |     if not isinstance(value, list): | ||||||
|  |       raise ParseError('ListValue must be in [] which is {0} at {1}'.format( | ||||||
|  |           value, path)) | ||||||
|  |     message.ClearField('values') | ||||||
|  |     for index, item in enumerate(value): | ||||||
|  |       self._ConvertValueMessage(item, message.values.add(), | ||||||
|  |                                 '{0}[{1}]'.format(path, index)) | ||||||
|  | 
 | ||||||
|  |   def _ConvertStructMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into Struct message.""" | ||||||
|  |     if not isinstance(value, dict): | ||||||
|  |       raise ParseError('Struct must be in a dict which is {0} at {1}'.format( | ||||||
|  |           value, path)) | ||||||
|  |     # Clear will mark the struct as modified so it will be created even if | ||||||
|  |     # there are no values. | ||||||
|  |     message.Clear() | ||||||
|  |     for key in value: | ||||||
|  |       self._ConvertValueMessage(value[key], message.fields[key], | ||||||
|  |                                 '{0}.{1}'.format(path, key)) | ||||||
|  |     return | ||||||
|  | 
 | ||||||
|  |   def _ConvertWrapperMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into Wrapper message.""" | ||||||
|  |     field = message.DESCRIPTOR.fields_by_name['value'] | ||||||
|  |     setattr( | ||||||
|  |         message, 'value', | ||||||
|  |         _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) | ||||||
|  | 
 | ||||||
|  |   def _ConvertMapFieldValue(self, value, message, field, path): | ||||||
|  |     """Convert map field value for a message map field. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       value: A JSON object to convert the map field value. | ||||||
|  |       message: A protocol message to record the converted data. | ||||||
|  |       field: The descriptor of the map field to be converted. | ||||||
|  |       path: parent path to log parse error info. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ParseError: In case of convert problems. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(value, dict): | ||||||
|  |       raise ParseError( | ||||||
|  |           'Map field {0} must be in a dict which is {1} at {2}'.format( | ||||||
|  |               field.name, value, path)) | ||||||
|  |     key_field = field.message_type.fields_by_name['key'] | ||||||
|  |     value_field = field.message_type.fields_by_name['value'] | ||||||
|  |     for key in value: | ||||||
|  |       key_value = _ConvertScalarFieldValue(key, key_field, | ||||||
|  |                                            '{0}.key'.format(path), True) | ||||||
|  |       if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |         self.ConvertMessage(value[key], | ||||||
|  |                             getattr(message, field.name)[key_value], | ||||||
|  |                             '{0}[{1}]'.format(path, key_value)) | ||||||
|  |       else: | ||||||
|  |         getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( | ||||||
|  |             value[key], value_field, path='{0}[{1}]'.format(path, key_value)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ConvertScalarFieldValue(value, field, path, require_str=False): | ||||||
|  |   """Convert a single scalar field value. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     value: A scalar value to convert the scalar field value. | ||||||
|  |     field: The descriptor of the field to convert. | ||||||
|  |     path: parent path to log parse error info. | ||||||
|  |     require_str: If True, the field value must be a str. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The converted scalar field value | ||||||
|  | 
 | ||||||
|  |   Raises: | ||||||
|  |     ParseError: In case of convert problems. | ||||||
|  |   """ | ||||||
|  |   try: | ||||||
|  |     if field.cpp_type in _INT_TYPES: | ||||||
|  |       return _ConvertInteger(value) | ||||||
|  |     elif field.cpp_type in _FLOAT_TYPES: | ||||||
|  |       return _ConvertFloat(value, field) | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: | ||||||
|  |       return _ConvertBool(value, require_str) | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: | ||||||
|  |       if field.type == descriptor.FieldDescriptor.TYPE_BYTES: | ||||||
|  |         if isinstance(value, str): | ||||||
|  |           encoded = value.encode('utf-8') | ||||||
|  |         else: | ||||||
|  |           encoded = value | ||||||
|  |         # Add extra padding '=' | ||||||
|  |         padded_value = encoded + b'=' * (4 - len(encoded) % 4) | ||||||
|  |         return base64.urlsafe_b64decode(padded_value) | ||||||
|  |       else: | ||||||
|  |         # Checking for unpaired surrogates appears to be unreliable, | ||||||
|  |         # depending on the specific Python version, so we check manually. | ||||||
|  |         if _UNPAIRED_SURROGATE_PATTERN.search(value): | ||||||
|  |           raise ParseError('Unpaired surrogate') | ||||||
|  |         return value | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: | ||||||
|  |       # Convert an enum value. | ||||||
|  |       enum_value = field.enum_type.values_by_name.get(value, None) | ||||||
|  |       if enum_value is None: | ||||||
|  |         try: | ||||||
|  |           number = int(value) | ||||||
|  |           enum_value = field.enum_type.values_by_number.get(number, None) | ||||||
|  |         except ValueError: | ||||||
|  |           raise ParseError('Invalid enum value {0} for enum type {1}'.format( | ||||||
|  |               value, field.enum_type.full_name)) | ||||||
|  |         if enum_value is None: | ||||||
|  |           if field.file.syntax == 'proto3': | ||||||
|  |             # Proto3 accepts unknown enums. | ||||||
|  |             return number | ||||||
|  |           raise ParseError('Invalid enum value {0} for enum type {1}'.format( | ||||||
|  |               value, field.enum_type.full_name)) | ||||||
|  |       return enum_value.number | ||||||
|  |   except ParseError as e: | ||||||
|  |     raise ParseError('{0} at {1}'.format(e, path)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ConvertInteger(value): | ||||||
|  |   """Convert an integer. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     value: A scalar value to convert. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The integer value. | ||||||
|  | 
 | ||||||
|  |   Raises: | ||||||
|  |     ParseError: If an integer couldn't be consumed. | ||||||
|  |   """ | ||||||
|  |   if isinstance(value, float) and not value.is_integer(): | ||||||
|  |     raise ParseError('Couldn\'t parse integer: {0}'.format(value)) | ||||||
|  | 
 | ||||||
|  |   if isinstance(value, str) and value.find(' ') != -1: | ||||||
|  |     raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) | ||||||
|  | 
 | ||||||
|  |   if isinstance(value, bool): | ||||||
|  |     raise ParseError('Bool value {0} is not acceptable for ' | ||||||
|  |                      'integer field'.format(value)) | ||||||
|  | 
 | ||||||
|  |   return int(value) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ConvertFloat(value, field): | ||||||
|  |   """Convert an floating point number.""" | ||||||
|  |   if isinstance(value, float): | ||||||
|  |     if math.isnan(value): | ||||||
|  |       raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') | ||||||
|  |     if math.isinf(value): | ||||||
|  |       if value > 0: | ||||||
|  |         raise ParseError('Couldn\'t parse Infinity or value too large, ' | ||||||
|  |                          'use quoted "Infinity" instead') | ||||||
|  |       else: | ||||||
|  |         raise ParseError('Couldn\'t parse -Infinity or value too small, ' | ||||||
|  |                          'use quoted "-Infinity" instead') | ||||||
|  |     if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       if value > type_checkers._FLOAT_MAX: | ||||||
|  |         raise ParseError('Float value too large') | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       if value < type_checkers._FLOAT_MIN: | ||||||
|  |         raise ParseError('Float value too small') | ||||||
|  |   if value == 'nan': | ||||||
|  |     raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') | ||||||
|  |   try: | ||||||
|  |     # Assume Python compatible syntax. | ||||||
|  |     return float(value) | ||||||
|  |   except ValueError: | ||||||
|  |     # Check alternative spellings. | ||||||
|  |     if value == _NEG_INFINITY: | ||||||
|  |       return float('-inf') | ||||||
|  |     elif value == _INFINITY: | ||||||
|  |       return float('inf') | ||||||
|  |     elif value == _NAN: | ||||||
|  |       return float('nan') | ||||||
|  |     else: | ||||||
|  |       raise ParseError('Couldn\'t parse float: {0}'.format(value)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ConvertBool(value, require_str): | ||||||
|  |   """Convert a boolean value. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     value: A scalar value to convert. | ||||||
|  |     require_str: If True, value must be a str. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The bool parsed. | ||||||
|  | 
 | ||||||
|  |   Raises: | ||||||
|  |     ParseError: If a boolean value couldn't be consumed. | ||||||
|  |   """ | ||||||
|  |   if require_str: | ||||||
|  |     if value == 'true': | ||||||
|  |       return True | ||||||
|  |     elif value == 'false': | ||||||
|  |       return False | ||||||
|  |     else: | ||||||
|  |       raise ParseError('Expected "true" or "false", not {0}'.format(value)) | ||||||
|  | 
 | ||||||
|  |   if not isinstance(value, bool): | ||||||
|  |     raise ParseError('Expected true or false without quotes') | ||||||
|  |   return value | ||||||
|  | 
 | ||||||
|  | _WKTJSONMETHODS = { | ||||||
|  |     'google.protobuf.Any': ['_AnyMessageToJsonObject', | ||||||
|  |                             '_ConvertAnyMessage'], | ||||||
|  |     'google.protobuf.Duration': ['_GenericMessageToJsonObject', | ||||||
|  |                                  '_ConvertGenericMessage'], | ||||||
|  |     'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', | ||||||
|  |                                   '_ConvertGenericMessage'], | ||||||
|  |     'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', | ||||||
|  |                                   '_ConvertListValueMessage'], | ||||||
|  |     'google.protobuf.Struct': ['_StructMessageToJsonObject', | ||||||
|  |                                '_ConvertStructMessage'], | ||||||
|  |     'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', | ||||||
|  |                                   '_ConvertGenericMessage'], | ||||||
|  |     'google.protobuf.Value': ['_ValueMessageToJsonObject', | ||||||
|  |                               '_ConvertValueMessage'] | ||||||
|  | } | ||||||
							
								
								
									
										424
									
								
								lib/protobuf/message.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										424
									
								
								lib/protobuf/message.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,424 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | # TODO(robinson): We should just make these methods all "pure-virtual" and move | ||||||
|  | # all implementation out, into reflection.py for now. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | """Contains an abstract base class for protocol messages.""" | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | class Error(Exception): | ||||||
|  |   """Base error type for this module.""" | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DecodeError(Error): | ||||||
|  |   """Exception raised when deserializing messages.""" | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class EncodeError(Error): | ||||||
|  |   """Exception raised when serializing messages.""" | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Message(object): | ||||||
|  | 
 | ||||||
|  |   """Abstract base class for protocol messages. | ||||||
|  | 
 | ||||||
|  |   Protocol message classes are almost always generated by the protocol | ||||||
|  |   compiler.  These generated types subclass Message and implement the methods | ||||||
|  |   shown below. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Link to an HTML document here. | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Document that instances of this class will also | ||||||
|  |   # have an Extensions attribute with __getitem__ and __setitem__. | ||||||
|  |   # Again, not sure how to best convey this. | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Document that the class must also have a static | ||||||
|  |   #   RegisterExtension(extension_field) method. | ||||||
|  |   #   Not sure how to best express at this point. | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Document these fields and methods. | ||||||
|  | 
 | ||||||
|  |   __slots__ = [] | ||||||
|  | 
 | ||||||
|  |   #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. | ||||||
|  |   DESCRIPTOR = None | ||||||
|  | 
 | ||||||
|  |   def __deepcopy__(self, memo=None): | ||||||
|  |     clone = type(self)() | ||||||
|  |     clone.MergeFrom(self) | ||||||
|  |     return clone | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other_msg): | ||||||
|  |     """Recursively compares two messages by value and structure.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def __ne__(self, other_msg): | ||||||
|  |     # Can't just say self != other_msg, since that would infinitely recurse. :) | ||||||
|  |     return not self == other_msg | ||||||
|  | 
 | ||||||
|  |   def __hash__(self): | ||||||
|  |     raise TypeError('unhashable object') | ||||||
|  | 
 | ||||||
|  |   def __str__(self): | ||||||
|  |     """Outputs a human-readable representation of the message.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def __unicode__(self): | ||||||
|  |     """Outputs a human-readable representation of the message.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def MergeFrom(self, other_msg): | ||||||
|  |     """Merges the contents of the specified message into current message. | ||||||
|  | 
 | ||||||
|  |     This method merges the contents of the specified message into the current | ||||||
|  |     message. Singular fields that are set in the specified message overwrite | ||||||
|  |     the corresponding fields in the current message. Repeated fields are | ||||||
|  |     appended. Singular sub-messages and groups are recursively merged. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       other_msg (Message): A message to merge into the current message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def CopyFrom(self, other_msg): | ||||||
|  |     """Copies the content of the specified message into the current message. | ||||||
|  | 
 | ||||||
|  |     The method clears the current message and then merges the specified | ||||||
|  |     message using MergeFrom. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       other_msg (Message): A message to copy into the current one. | ||||||
|  |     """ | ||||||
|  |     if self is other_msg: | ||||||
|  |       return | ||||||
|  |     self.Clear() | ||||||
|  |     self.MergeFrom(other_msg) | ||||||
|  | 
 | ||||||
|  |   def Clear(self): | ||||||
|  |     """Clears all data that was set in the message.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def SetInParent(self): | ||||||
|  |     """Mark this as present in the parent. | ||||||
|  | 
 | ||||||
|  |     This normally happens automatically when you assign a field of a | ||||||
|  |     sub-message, but sometimes you want to make the sub-message | ||||||
|  |     present while keeping it empty.  If you find yourself using this, | ||||||
|  |     you may want to reconsider your design. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def IsInitialized(self): | ||||||
|  |     """Checks if the message is initialized. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       bool: The method returns True if the message is initialized (i.e. all of | ||||||
|  |       its required fields are set). | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): MergeFromString() should probably return None and be | ||||||
|  |   # implemented in terms of a helper that returns the # of bytes read.  Our | ||||||
|  |   # deserialization routines would use the helper when recursively | ||||||
|  |   # deserializing, but the end user would almost always just want the no-return | ||||||
|  |   # MergeFromString(). | ||||||
|  | 
 | ||||||
|  |   def MergeFromString(self, serialized): | ||||||
|  |     """Merges serialized protocol buffer data into this message. | ||||||
|  | 
 | ||||||
|  |     When we find a field in `serialized` that is already present | ||||||
|  |     in this message: | ||||||
|  | 
 | ||||||
|  |     -   If it's a "repeated" field, we append to the end of our list. | ||||||
|  |     -   Else, if it's a scalar, we overwrite our field. | ||||||
|  |     -   Else, (it's a nonrepeated composite), we recursively merge | ||||||
|  |         into the existing composite. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       serialized (bytes): Any object that allows us to call | ||||||
|  |         ``memoryview(serialized)`` to access a string of bytes using the | ||||||
|  |         buffer interface. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       int: The number of bytes read from `serialized`. | ||||||
|  |       For non-group messages, this will always be `len(serialized)`, | ||||||
|  |       but for messages which are actually groups, this will | ||||||
|  |       generally be less than `len(serialized)`, since we must | ||||||
|  |       stop when we reach an ``END_GROUP`` tag.  Note that if | ||||||
|  |       we *do* stop because of an ``END_GROUP`` tag, the number | ||||||
|  |       of bytes returned does not include the bytes | ||||||
|  |       for the ``END_GROUP`` tag information. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       DecodeError: if the input cannot be parsed. | ||||||
|  |     """ | ||||||
|  |     # TODO(robinson): Document handling of unknown fields. | ||||||
|  |     # TODO(robinson): When we switch to a helper, this will return None. | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ParseFromString(self, serialized): | ||||||
|  |     """Parse serialized protocol buffer data into this message. | ||||||
|  | 
 | ||||||
|  |     Like :func:`MergeFromString()`, except we clear the object first. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       message.DecodeError if the input cannot be parsed. | ||||||
|  |     """ | ||||||
|  |     self.Clear() | ||||||
|  |     return self.MergeFromString(serialized) | ||||||
|  | 
 | ||||||
|  |   def SerializeToString(self, **kwargs): | ||||||
|  |     """Serializes the protocol message to a binary string. | ||||||
|  | 
 | ||||||
|  |     Keyword Args: | ||||||
|  |       deterministic (bool): If true, requests deterministic serialization | ||||||
|  |         of the protobuf, with predictable ordering of map keys. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A binary string representation of the message if all of the required | ||||||
|  |       fields in the message are set (i.e. the message is initialized). | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       EncodeError: if the message isn't initialized (see :func:`IsInitialized`). | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def SerializePartialToString(self, **kwargs): | ||||||
|  |     """Serializes the protocol message to a binary string. | ||||||
|  | 
 | ||||||
|  |     This method is similar to SerializeToString but doesn't check if the | ||||||
|  |     message is initialized. | ||||||
|  | 
 | ||||||
|  |     Keyword Args: | ||||||
|  |       deterministic (bool): If true, requests deterministic serialization | ||||||
|  |         of the protobuf, with predictable ordering of map keys. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       bytes: A serialized representation of the partial message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Decide whether we like these better | ||||||
|  |   # than auto-generated has_foo() and clear_foo() methods | ||||||
|  |   # on the instances themselves.  This way is less consistent | ||||||
|  |   # with C++, but it makes reflection-type access easier and | ||||||
|  |   # reduces the number of magically autogenerated things. | ||||||
|  |   # | ||||||
|  |   # TODO(robinson): Be sure to document (and test) exactly | ||||||
|  |   # which field names are accepted here.  Are we case-sensitive? | ||||||
|  |   # What do we do with fields that share names with Python keywords | ||||||
|  |   # like 'lambda' and 'yield'? | ||||||
|  |   # | ||||||
|  |   # nnorwitz says: | ||||||
|  |   # """ | ||||||
|  |   # Typically (in python), an underscore is appended to names that are | ||||||
|  |   # keywords. So they would become lambda_ or yield_. | ||||||
|  |   # """ | ||||||
|  |   def ListFields(self): | ||||||
|  |     """Returns a list of (FieldDescriptor, value) tuples for present fields. | ||||||
|  | 
 | ||||||
|  |     A message field is non-empty if HasField() would return true. A singular | ||||||
|  |     primitive field is non-empty if HasField() would return true in proto2 or it | ||||||
|  |     is non zero in proto3. A repeated field is non-empty if it contains at least | ||||||
|  |     one element. The fields are ordered by field number. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       list[tuple(FieldDescriptor, value)]: field descriptors and values | ||||||
|  |       for all fields in the message which are not empty. The values vary by | ||||||
|  |       field type. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def HasField(self, field_name): | ||||||
|  |     """Checks if a certain field is set for the message. | ||||||
|  | 
 | ||||||
|  |     For a oneof group, checks if any field inside is set. Note that if the | ||||||
|  |     field_name is not defined in the message descriptor, :exc:`ValueError` will | ||||||
|  |     be raised. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       field_name (str): The name of the field to check for presence. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       bool: Whether a value has been set for the named field. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: if the `field_name` is not a member of this message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ClearField(self, field_name): | ||||||
|  |     """Clears the contents of a given field. | ||||||
|  | 
 | ||||||
|  |     Inside a oneof group, clears the field set. If the name neither refers to a | ||||||
|  |     defined field or oneof group, :exc:`ValueError` is raised. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       field_name (str): The name of the field to check for presence. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: if the `field_name` is not a member of this message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def WhichOneof(self, oneof_group): | ||||||
|  |     """Returns the name of the field that is set inside a oneof group. | ||||||
|  | 
 | ||||||
|  |     If no field is set, returns None. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       oneof_group (str): the name of the oneof group to check. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       str or None: The name of the group that is set, or None. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: no group with the given name exists | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def HasExtension(self, extension_handle): | ||||||
|  |     """Checks if a certain extension is present for this message. | ||||||
|  | 
 | ||||||
|  |     Extensions are retrieved using the :attr:`Extensions` mapping (if present). | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       extension_handle: The handle for the extension to check. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       bool: Whether the extension is present for this message. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError: if the extension is repeated. Similar to repeated fields, | ||||||
|  |         there is no separate notion of presence: a "not present" repeated | ||||||
|  |         extension is an empty list. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ClearExtension(self, extension_handle): | ||||||
|  |     """Clears the contents of a given extension. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       extension_handle: The handle for the extension to clear. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def UnknownFields(self): | ||||||
|  |     """Returns the UnknownFieldSet. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       UnknownFieldSet: The unknown fields stored in this message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def DiscardUnknownFields(self): | ||||||
|  |     """Clears all fields in the :class:`UnknownFieldSet`. | ||||||
|  | 
 | ||||||
|  |     This operation is recursive for nested message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ByteSize(self): | ||||||
|  |     """Returns the serialized size of this message. | ||||||
|  | 
 | ||||||
|  |     Recursively calls ByteSize() on all contained messages. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       int: The number of bytes required to serialize this message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   @classmethod | ||||||
|  |   def FromString(cls, s): | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   @staticmethod | ||||||
|  |   def RegisterExtension(extension_handle): | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def _SetListener(self, message_listener): | ||||||
|  |     """Internal method used by the protocol message implementation. | ||||||
|  |     Clients should not call this directly. | ||||||
|  | 
 | ||||||
|  |     Sets a listener that this message will call on certain state transitions. | ||||||
|  | 
 | ||||||
|  |     The purpose of this method is to register back-edges from children to | ||||||
|  |     parents at runtime, for the purpose of setting "has" bits and | ||||||
|  |     byte-size-dirty bits in the parent and ancestor objects whenever a child or | ||||||
|  |     descendant object is modified. | ||||||
|  | 
 | ||||||
|  |     If the client wants to disconnect this Message from the object tree, she | ||||||
|  |     explicitly sets callback to None. | ||||||
|  | 
 | ||||||
|  |     If message_listener is None, unregisters any existing listener.  Otherwise, | ||||||
|  |     message_listener must implement the MessageListener interface in | ||||||
|  |     internal/message_listener.py, and we discard any listener registered | ||||||
|  |     via a previous _SetListener() call. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def __getstate__(self): | ||||||
|  |     """Support the pickle protocol.""" | ||||||
|  |     return dict(serialized=self.SerializePartialToString()) | ||||||
|  | 
 | ||||||
|  |   def __setstate__(self, state): | ||||||
|  |     """Support the pickle protocol.""" | ||||||
|  |     self.__init__() | ||||||
|  |     serialized = state['serialized'] | ||||||
|  |     # On Python 3, using encoding='latin1' is required for unpickling | ||||||
|  |     # protos pickled by Python 2. | ||||||
|  |     if not isinstance(serialized, bytes): | ||||||
|  |       serialized = serialized.encode('latin1') | ||||||
|  |     self.ParseFromString(serialized) | ||||||
|  | 
 | ||||||
|  |   def __reduce__(self): | ||||||
|  |     message_descriptor = self.DESCRIPTOR | ||||||
|  |     if message_descriptor.containing_type is None: | ||||||
|  |       return type(self), (), self.__getstate__() | ||||||
|  |     # the message type must be nested. | ||||||
|  |     # Python does not pickle nested classes; use the symbol_database on the | ||||||
|  |     # receiving end. | ||||||
|  |     container = message_descriptor | ||||||
|  |     return (_InternalConstructMessage, (container.full_name,), | ||||||
|  |             self.__getstate__()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _InternalConstructMessage(full_name): | ||||||
|  |   """Constructs a nested message.""" | ||||||
|  |   from google.protobuf import symbol_database  # pylint:disable=g-import-not-at-top | ||||||
|  | 
 | ||||||
|  |   return symbol_database.Default().GetSymbol(full_name)() | ||||||
							
								
								
									
										185
									
								
								lib/protobuf/message_factory.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										185
									
								
								lib/protobuf/message_factory.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,185 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Provides a factory class for generating dynamic messages. | ||||||
|  | 
 | ||||||
|  | The easiest way to use this class is if you have access to the FileDescriptor | ||||||
|  | protos containing the messages you want to create you can just do the following: | ||||||
|  | 
 | ||||||
|  | message_classes = message_factory.GetMessages(iterable_of_file_descriptors) | ||||||
|  | my_proto_instance = message_classes['some.proto.package.MessageName']() | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'matthewtoia@google.com (Matt Toia)' | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import api_implementation | ||||||
|  | from google.protobuf import descriptor_pool | ||||||
|  | from google.protobuf import message | ||||||
|  | 
 | ||||||
|  | if api_implementation.Type() == 'cpp': | ||||||
|  |   from google.protobuf.pyext import cpp_message as message_impl | ||||||
|  | else: | ||||||
|  |   from google.protobuf.internal import python_message as message_impl | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # The type of all Message classes. | ||||||
|  | _GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MessageFactory(object): | ||||||
|  |   """Factory for creating Proto2 messages from descriptors in a pool.""" | ||||||
|  | 
 | ||||||
|  |   def __init__(self, pool=None): | ||||||
|  |     """Initializes a new factory.""" | ||||||
|  |     self.pool = pool or descriptor_pool.DescriptorPool() | ||||||
|  | 
 | ||||||
|  |     # local cache of all classes built from protobuf descriptors | ||||||
|  |     self._classes = {} | ||||||
|  | 
 | ||||||
|  |   def GetPrototype(self, descriptor): | ||||||
|  |     """Obtains a proto2 message class based on the passed in descriptor. | ||||||
|  | 
 | ||||||
|  |     Passing a descriptor with a fully qualified name matching a previous | ||||||
|  |     invocation will cause the same class to be returned. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       descriptor: The descriptor to build from. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A class describing the passed in descriptor. | ||||||
|  |     """ | ||||||
|  |     if descriptor not in self._classes: | ||||||
|  |       result_class = self.CreatePrototype(descriptor) | ||||||
|  |       # The assignment to _classes is redundant for the base implementation, but | ||||||
|  |       # might avoid confusion in cases where CreatePrototype gets overridden and | ||||||
|  |       # does not call the base implementation. | ||||||
|  |       self._classes[descriptor] = result_class | ||||||
|  |       return result_class | ||||||
|  |     return self._classes[descriptor] | ||||||
|  | 
 | ||||||
|  |   def CreatePrototype(self, descriptor): | ||||||
|  |     """Builds a proto2 message class based on the passed in descriptor. | ||||||
|  | 
 | ||||||
|  |     Don't call this function directly, it always creates a new class. Call | ||||||
|  |     GetPrototype() instead. This method is meant to be overridden in subblasses | ||||||
|  |     to perform additional operations on the newly constructed class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       descriptor: The descriptor to build from. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A class describing the passed in descriptor. | ||||||
|  |     """ | ||||||
|  |     descriptor_name = descriptor.name | ||||||
|  |     result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( | ||||||
|  |         descriptor_name, | ||||||
|  |         (message.Message,), | ||||||
|  |         { | ||||||
|  |             'DESCRIPTOR': descriptor, | ||||||
|  |             # If module not set, it wrongly points to message_factory module. | ||||||
|  |             '__module__': None, | ||||||
|  |         }) | ||||||
|  |     result_class._FACTORY = self  # pylint: disable=protected-access | ||||||
|  |     # Assign in _classes before doing recursive calls to avoid infinite | ||||||
|  |     # recursion. | ||||||
|  |     self._classes[descriptor] = result_class | ||||||
|  |     for field in descriptor.fields: | ||||||
|  |       if field.message_type: | ||||||
|  |         self.GetPrototype(field.message_type) | ||||||
|  |     for extension in result_class.DESCRIPTOR.extensions: | ||||||
|  |       if extension.containing_type not in self._classes: | ||||||
|  |         self.GetPrototype(extension.containing_type) | ||||||
|  |       extended_class = self._classes[extension.containing_type] | ||||||
|  |       extended_class.RegisterExtension(extension) | ||||||
|  |     return result_class | ||||||
|  | 
 | ||||||
|  |   def GetMessages(self, files): | ||||||
|  |     """Gets all the messages from a specified file. | ||||||
|  | 
 | ||||||
|  |     This will find and resolve dependencies, failing if the descriptor | ||||||
|  |     pool cannot satisfy them. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       files: The file names to extract messages from. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A dictionary mapping proto names to the message classes. This will include | ||||||
|  |       any dependent messages as well as any messages defined in the same file as | ||||||
|  |       a specified message. | ||||||
|  |     """ | ||||||
|  |     result = {} | ||||||
|  |     for file_name in files: | ||||||
|  |       file_desc = self.pool.FindFileByName(file_name) | ||||||
|  |       for desc in file_desc.message_types_by_name.values(): | ||||||
|  |         result[desc.full_name] = self.GetPrototype(desc) | ||||||
|  | 
 | ||||||
|  |       # While the extension FieldDescriptors are created by the descriptor pool, | ||||||
|  |       # the python classes created in the factory need them to be registered | ||||||
|  |       # explicitly, which is done below. | ||||||
|  |       # | ||||||
|  |       # The call to RegisterExtension will specifically check if the | ||||||
|  |       # extension was already registered on the object and either | ||||||
|  |       # ignore the registration if the original was the same, or raise | ||||||
|  |       # an error if they were different. | ||||||
|  | 
 | ||||||
|  |       for extension in file_desc.extensions_by_name.values(): | ||||||
|  |         if extension.containing_type not in self._classes: | ||||||
|  |           self.GetPrototype(extension.containing_type) | ||||||
|  |         extended_class = self._classes[extension.containing_type] | ||||||
|  |         extended_class.RegisterExtension(extension) | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _FACTORY = MessageFactory() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GetMessages(file_protos): | ||||||
|  |   """Builds a dictionary of all the messages available in a set of files. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     file_protos: Iterable of FileDescriptorProto to build messages out of. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     A dictionary mapping proto names to the message classes. This will include | ||||||
|  |     any dependent messages as well as any messages defined in the same file as | ||||||
|  |     a specified message. | ||||||
|  |   """ | ||||||
|  |   # The cpp implementation of the protocol buffer library requires to add the | ||||||
|  |   # message in topological order of the dependency graph. | ||||||
|  |   file_by_name = {file_proto.name: file_proto for file_proto in file_protos} | ||||||
|  |   def _AddFile(file_proto): | ||||||
|  |     for dependency in file_proto.dependency: | ||||||
|  |       if dependency in file_by_name: | ||||||
|  |         # Remove from elements to be visited, in order to cut cycles. | ||||||
|  |         _AddFile(file_by_name.pop(dependency)) | ||||||
|  |     _FACTORY.pool.Add(file_proto) | ||||||
|  |   while file_by_name: | ||||||
|  |     _AddFile(file_by_name.popitem()[1]) | ||||||
|  |   return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) | ||||||
							
								
								
									
										134
									
								
								lib/protobuf/proto_builder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										134
									
								
								lib/protobuf/proto_builder.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,134 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Dynamic Protobuf class creator.""" | ||||||
|  | 
 | ||||||
|  | from collections import OrderedDict | ||||||
|  | import hashlib | ||||||
|  | import os | ||||||
|  | 
 | ||||||
|  | from google.protobuf import descriptor_pb2 | ||||||
|  | from google.protobuf import descriptor | ||||||
|  | from google.protobuf import message_factory | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _GetMessageFromFactory(factory, full_name): | ||||||
|  |   """Get a proto class from the MessageFactory by name. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     factory: a MessageFactory instance. | ||||||
|  |     full_name: str, the fully qualified name of the proto type. | ||||||
|  |   Returns: | ||||||
|  |     A class, for the type identified by full_name. | ||||||
|  |   Raises: | ||||||
|  |     KeyError, if the proto is not found in the factory's descriptor pool. | ||||||
|  |   """ | ||||||
|  |   proto_descriptor = factory.pool.FindMessageTypeByName(full_name) | ||||||
|  |   proto_cls = factory.GetPrototype(proto_descriptor) | ||||||
|  |   return proto_cls | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MakeSimpleProtoClass(fields, full_name=None, pool=None): | ||||||
|  |   """Create a Protobuf class whose fields are basic types. | ||||||
|  | 
 | ||||||
|  |   Note: this doesn't validate field names! | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     fields: dict of {name: field_type} mappings for each field in the proto. If | ||||||
|  |         this is an OrderedDict the order will be maintained, otherwise the | ||||||
|  |         fields will be sorted by name. | ||||||
|  |     full_name: optional str, the fully-qualified name of the proto type. | ||||||
|  |     pool: optional DescriptorPool instance. | ||||||
|  |   Returns: | ||||||
|  |     a class, the new protobuf class with a FileDescriptor. | ||||||
|  |   """ | ||||||
|  |   factory = message_factory.MessageFactory(pool=pool) | ||||||
|  | 
 | ||||||
|  |   if full_name is not None: | ||||||
|  |     try: | ||||||
|  |       proto_cls = _GetMessageFromFactory(factory, full_name) | ||||||
|  |       return proto_cls | ||||||
|  |     except KeyError: | ||||||
|  |       # The factory's DescriptorPool doesn't know about this class yet. | ||||||
|  |       pass | ||||||
|  | 
 | ||||||
|  |   # Get a list of (name, field_type) tuples from the fields dict. If fields was | ||||||
|  |   # an OrderedDict we keep the order, but otherwise we sort the field to ensure | ||||||
|  |   # consistent ordering. | ||||||
|  |   field_items = fields.items() | ||||||
|  |   if not isinstance(fields, OrderedDict): | ||||||
|  |     field_items = sorted(field_items) | ||||||
|  | 
 | ||||||
|  |   # Use a consistent file name that is unlikely to conflict with any imported | ||||||
|  |   # proto files. | ||||||
|  |   fields_hash = hashlib.sha1() | ||||||
|  |   for f_name, f_type in field_items: | ||||||
|  |     fields_hash.update(f_name.encode('utf-8')) | ||||||
|  |     fields_hash.update(str(f_type).encode('utf-8')) | ||||||
|  |   proto_file_name = fields_hash.hexdigest() + '.proto' | ||||||
|  | 
 | ||||||
|  |   # If the proto is anonymous, use the same hash to name it. | ||||||
|  |   if full_name is None: | ||||||
|  |     full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + | ||||||
|  |                  fields_hash.hexdigest()) | ||||||
|  |     try: | ||||||
|  |       proto_cls = _GetMessageFromFactory(factory, full_name) | ||||||
|  |       return proto_cls | ||||||
|  |     except KeyError: | ||||||
|  |       # The factory's DescriptorPool doesn't know about this class yet. | ||||||
|  |       pass | ||||||
|  | 
 | ||||||
|  |   # This is the first time we see this proto: add a new descriptor to the pool. | ||||||
|  |   factory.pool.Add( | ||||||
|  |       _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) | ||||||
|  |   return _GetMessageFromFactory(factory, full_name) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): | ||||||
|  |   """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" | ||||||
|  |   package, name = full_name.rsplit('.', 1) | ||||||
|  |   file_proto = descriptor_pb2.FileDescriptorProto() | ||||||
|  |   file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) | ||||||
|  |   file_proto.package = package | ||||||
|  |   desc_proto = file_proto.message_type.add() | ||||||
|  |   desc_proto.name = name | ||||||
|  |   for f_number, (f_name, f_type) in enumerate(field_items, 1): | ||||||
|  |     field_proto = desc_proto.field.add() | ||||||
|  |     field_proto.name = f_name | ||||||
|  |     # # If the number falls in the reserved range, reassign it to the correct | ||||||
|  |     # # number after the range. | ||||||
|  |     if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: | ||||||
|  |       f_number += ( | ||||||
|  |           descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - | ||||||
|  |           descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) | ||||||
|  |     field_proto.number = f_number | ||||||
|  |     field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL | ||||||
|  |     field_proto.type = f_type | ||||||
|  |   return file_proto | ||||||
							
								
								
									
										0
									
								
								lib/protobuf/pyext/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								lib/protobuf/pyext/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										
											BIN
										
									
								
								lib/protobuf/pyext/_message.cpython-310-x86_64-linux-gnu.so
									
										
									
									
									
										Executable file
									
								
							
							
						
						
									
										
											BIN
										
									
								
								lib/protobuf/pyext/_message.cpython-310-x86_64-linux-gnu.so
									
										
									
									
									
										Executable file
									
								
							
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										65
									
								
								lib/protobuf/pyext/cpp_message.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										65
									
								
								lib/protobuf/pyext/cpp_message.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,65 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Protocol message implementation hooks for C++ implementation. | ||||||
|  | 
 | ||||||
|  | Contains helper functions used to create protocol message classes from | ||||||
|  | Descriptor objects at runtime backed by the protocol buffer C++ API. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'tibell@google.com (Johan Tibell)' | ||||||
|  | 
 | ||||||
|  | from google.protobuf.pyext import _message | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class GeneratedProtocolMessageType(_message.MessageMeta): | ||||||
|  | 
 | ||||||
|  |   """Metaclass for protocol message classes created at runtime from Descriptors. | ||||||
|  | 
 | ||||||
|  |   The protocol compiler currently uses this metaclass to create protocol | ||||||
|  |   message classes at runtime.  Clients can also manually create their own | ||||||
|  |   classes at runtime, as in this example: | ||||||
|  | 
 | ||||||
|  |   mydescriptor = Descriptor(.....) | ||||||
|  |   factory = symbol_database.Default() | ||||||
|  |   factory.pool.AddDescriptor(mydescriptor) | ||||||
|  |   MyProtoClass = factory.GetPrototype(mydescriptor) | ||||||
|  |   myproto_instance = MyProtoClass() | ||||||
|  |   myproto.foo_field = 23 | ||||||
|  |   ... | ||||||
|  | 
 | ||||||
|  |   The above example will not work for nested types. If you wish to include them, | ||||||
|  |   use reflection.MakeClass() instead of manually instantiating the class in | ||||||
|  |   order to create the appropriate class structure. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   # Must be consistent with the protocol-compiler code in | ||||||
|  |   # proto2/compiler/internal/generator.*. | ||||||
|  |   _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||||||
							
								
								
									
										95
									
								
								lib/protobuf/reflection.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										95
									
								
								lib/protobuf/reflection.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,95 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | # This code is meant to work on Python 2.4 and above only. | ||||||
|  | 
 | ||||||
|  | """Contains a metaclass and helper functions used to create | ||||||
|  | protocol message classes from Descriptor objects at runtime. | ||||||
|  | 
 | ||||||
|  | Recall that a metaclass is the "type" of a class. | ||||||
|  | (A class is to a metaclass what an instance is to a class.) | ||||||
|  | 
 | ||||||
|  | In this case, we use the GeneratedProtocolMessageType metaclass | ||||||
|  | to inject all the useful functionality into the classes | ||||||
|  | output by the protocol compiler at compile-time. | ||||||
|  | 
 | ||||||
|  | The upshot of all this is that the real implementation | ||||||
|  | details for ALL pure-Python protocol buffers are *here in | ||||||
|  | this file*. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import message_factory | ||||||
|  | from google.protobuf import symbol_database | ||||||
|  | 
 | ||||||
|  | # The type of all Message classes. | ||||||
|  | # Part of the public interface, but normally only used by message factories. | ||||||
|  | GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE | ||||||
|  | 
 | ||||||
|  | MESSAGE_CLASS_CACHE = {} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Deprecated. Please NEVER use reflection.ParseMessage(). | ||||||
|  | def ParseMessage(descriptor, byte_str): | ||||||
|  |   """Generate a new Message instance from this Descriptor and a byte string. | ||||||
|  | 
 | ||||||
|  |   DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). | ||||||
|  |   Please use MessageFactory.GetPrototype() instead. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     descriptor: Protobuf Descriptor object | ||||||
|  |     byte_str: Serialized protocol buffer byte string | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     Newly created protobuf Message object. | ||||||
|  |   """ | ||||||
|  |   result_class = MakeClass(descriptor) | ||||||
|  |   new_msg = result_class() | ||||||
|  |   new_msg.ParseFromString(byte_str) | ||||||
|  |   return new_msg | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Deprecated. Please NEVER use reflection.MakeClass(). | ||||||
|  | def MakeClass(descriptor): | ||||||
|  |   """Construct a class object for a protobuf described by descriptor. | ||||||
|  | 
 | ||||||
|  |   DEPRECATED: use MessageFactory.GetPrototype() instead. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     descriptor: A descriptor.Descriptor object describing the protobuf. | ||||||
|  |   Returns: | ||||||
|  |     The Message class object described by the descriptor. | ||||||
|  |   """ | ||||||
|  |   # Original implementation leads to duplicate message classes, which won't play | ||||||
|  |   # well with extensions. Message factory info is also missing. | ||||||
|  |   # Redirect to message_factory. | ||||||
|  |   return symbol_database.Default().GetPrototype(descriptor) | ||||||
							
								
								
									
										228
									
								
								lib/protobuf/service.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										228
									
								
								lib/protobuf/service.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,228 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """DEPRECATED:  Declares the RPC service interfaces. | ||||||
|  | 
 | ||||||
|  | This module declares the abstract interfaces underlying proto2 RPC | ||||||
|  | services.  These are intended to be independent of any particular RPC | ||||||
|  | implementation, so that proto2 services can be used on top of a variety | ||||||
|  | of implementations.  Starting with version 2.3.0, RPC implementations should | ||||||
|  | not try to build on these, but should instead provide code generator plugins | ||||||
|  | which generate code specific to the particular RPC implementation.  This way | ||||||
|  | the generated code can be more appropriate for the implementation in use | ||||||
|  | and can avoid unnecessary layers of indirection. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'petar@google.com (Petar Petrov)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RpcException(Exception): | ||||||
|  |   """Exception raised on failed blocking RPC method call.""" | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Service(object): | ||||||
|  | 
 | ||||||
|  |   """Abstract base interface for protocol-buffer-based RPC services. | ||||||
|  | 
 | ||||||
|  |   Services themselves are abstract classes (implemented either by servers or as | ||||||
|  |   stubs), but they subclass this base interface. The methods of this | ||||||
|  |   interface can be used to call the methods of the service without knowing | ||||||
|  |   its exact type at compile time (analogous to the Message interface). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def GetDescriptor(): | ||||||
|  |     """Retrieves this service's descriptor.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def CallMethod(self, method_descriptor, rpc_controller, | ||||||
|  |                  request, done): | ||||||
|  |     """Calls a method of the service specified by method_descriptor. | ||||||
|  | 
 | ||||||
|  |     If "done" is None then the call is blocking and the response | ||||||
|  |     message will be returned directly.  Otherwise the call is asynchronous | ||||||
|  |     and "done" will later be called with the response value. | ||||||
|  | 
 | ||||||
|  |     In the blocking case, RpcException will be raised on error. | ||||||
|  | 
 | ||||||
|  |     Preconditions: | ||||||
|  | 
 | ||||||
|  |     * method_descriptor.service == GetDescriptor | ||||||
|  |     * request is of the exact same classes as returned by | ||||||
|  |       GetRequestClass(method). | ||||||
|  |     * After the call has started, the request must not be modified. | ||||||
|  |     * "rpc_controller" is of the correct type for the RPC implementation being | ||||||
|  |       used by this Service.  For stubs, the "correct type" depends on the | ||||||
|  |       RpcChannel which the stub is using. | ||||||
|  | 
 | ||||||
|  |     Postconditions: | ||||||
|  | 
 | ||||||
|  |     * "done" will be called when the method is complete.  This may be | ||||||
|  |       before CallMethod() returns or it may be at some point in the future. | ||||||
|  |     * If the RPC failed, the response value passed to "done" will be None. | ||||||
|  |       Further details about the failure can be found by querying the | ||||||
|  |       RpcController. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def GetRequestClass(self, method_descriptor): | ||||||
|  |     """Returns the class of the request message for the specified method. | ||||||
|  | 
 | ||||||
|  |     CallMethod() requires that the request is of a particular subclass of | ||||||
|  |     Message. GetRequestClass() gets the default instance of this required | ||||||
|  |     type. | ||||||
|  | 
 | ||||||
|  |     Example: | ||||||
|  |       method = service.GetDescriptor().FindMethodByName("Foo") | ||||||
|  |       request = stub.GetRequestClass(method)() | ||||||
|  |       request.ParseFromString(input) | ||||||
|  |       service.CallMethod(method, request, callback) | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def GetResponseClass(self, method_descriptor): | ||||||
|  |     """Returns the class of the response message for the specified method. | ||||||
|  | 
 | ||||||
|  |     This method isn't really needed, as the RpcChannel's CallMethod constructs | ||||||
|  |     the response protocol message. It's provided anyway in case it is useful | ||||||
|  |     for the caller to know the response type in advance. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RpcController(object): | ||||||
|  | 
 | ||||||
|  |   """An RpcController mediates a single method call. | ||||||
|  | 
 | ||||||
|  |   The primary purpose of the controller is to provide a way to manipulate | ||||||
|  |   settings specific to the RPC implementation and to find out about RPC-level | ||||||
|  |   errors. The methods provided by the RpcController interface are intended | ||||||
|  |   to be a "least common denominator" set of features which we expect all | ||||||
|  |   implementations to support.  Specific implementations may provide more | ||||||
|  |   advanced features (e.g. deadline propagation). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   # Client-side methods below | ||||||
|  | 
 | ||||||
|  |   def Reset(self): | ||||||
|  |     """Resets the RpcController to its initial state. | ||||||
|  | 
 | ||||||
|  |     After the RpcController has been reset, it may be reused in | ||||||
|  |     a new call. Must not be called while an RPC is in progress. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def Failed(self): | ||||||
|  |     """Returns true if the call failed. | ||||||
|  | 
 | ||||||
|  |     After a call has finished, returns true if the call failed.  The possible | ||||||
|  |     reasons for failure depend on the RPC implementation.  Failed() must not | ||||||
|  |     be called before a call has finished.  If Failed() returns true, the | ||||||
|  |     contents of the response message are undefined. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ErrorText(self): | ||||||
|  |     """If Failed is true, returns a human-readable description of the error.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def StartCancel(self): | ||||||
|  |     """Initiate cancellation. | ||||||
|  | 
 | ||||||
|  |     Advises the RPC system that the caller desires that the RPC call be | ||||||
|  |     canceled.  The RPC system may cancel it immediately, may wait awhile and | ||||||
|  |     then cancel it, or may not even cancel the call at all.  If the call is | ||||||
|  |     canceled, the "done" callback will still be called and the RpcController | ||||||
|  |     will indicate that the call failed at that time. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   # Server-side methods below | ||||||
|  | 
 | ||||||
|  |   def SetFailed(self, reason): | ||||||
|  |     """Sets a failure reason. | ||||||
|  | 
 | ||||||
|  |     Causes Failed() to return true on the client side.  "reason" will be | ||||||
|  |     incorporated into the message returned by ErrorText().  If you find | ||||||
|  |     you need to return machine-readable information about failures, you | ||||||
|  |     should incorporate it into your response protocol buffer and should | ||||||
|  |     NOT call SetFailed(). | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def IsCanceled(self): | ||||||
|  |     """Checks if the client cancelled the RPC. | ||||||
|  | 
 | ||||||
|  |     If true, indicates that the client canceled the RPC, so the server may | ||||||
|  |     as well give up on replying to it.  The server should still call the | ||||||
|  |     final "done" callback. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def NotifyOnCancel(self, callback): | ||||||
|  |     """Sets a callback to invoke on cancel. | ||||||
|  | 
 | ||||||
|  |     Asks that the given callback be called when the RPC is canceled.  The | ||||||
|  |     callback will always be called exactly once.  If the RPC completes without | ||||||
|  |     being canceled, the callback will be called after completion.  If the RPC | ||||||
|  |     has already been canceled when NotifyOnCancel() is called, the callback | ||||||
|  |     will be called immediately. | ||||||
|  | 
 | ||||||
|  |     NotifyOnCancel() must be called no more than once per request. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RpcChannel(object): | ||||||
|  | 
 | ||||||
|  |   """Abstract interface for an RPC channel. | ||||||
|  | 
 | ||||||
|  |   An RpcChannel represents a communication line to a service which can be used | ||||||
|  |   to call that service's methods.  The service may be running on another | ||||||
|  |   machine. Normally, you should not use an RpcChannel directly, but instead | ||||||
|  |   construct a stub {@link Service} wrapping it.  Example: | ||||||
|  | 
 | ||||||
|  |   Example: | ||||||
|  |     RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") | ||||||
|  |     RpcController controller = rpcImpl.Controller() | ||||||
|  |     MyService service = MyService_Stub(channel) | ||||||
|  |     service.MyMethod(controller, request, callback) | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def CallMethod(self, method_descriptor, rpc_controller, | ||||||
|  |                  request, response_class, done): | ||||||
|  |     """Calls the method identified by the descriptor. | ||||||
|  | 
 | ||||||
|  |     Call the given method of the remote service.  The signature of this | ||||||
|  |     procedure looks the same as Service.CallMethod(), but the requirements | ||||||
|  |     are less strict in one important way:  the request object doesn't have to | ||||||
|  |     be of any specific class as long as its descriptor is method.input_type. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
							
								
								
									
										295
									
								
								lib/protobuf/service_reflection.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										295
									
								
								lib/protobuf/service_reflection.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,295 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains metaclasses used to create protocol service and service stub | ||||||
|  | classes from ServiceDescriptor objects at runtime. | ||||||
|  | 
 | ||||||
|  | The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to | ||||||
|  | inject all useful functionality into the classes output by the protocol | ||||||
|  | compiler at compile-time. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'petar@google.com (Petar Petrov)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class GeneratedServiceType(type): | ||||||
|  | 
 | ||||||
|  |   """Metaclass for service classes created at runtime from ServiceDescriptors. | ||||||
|  | 
 | ||||||
|  |   Implementations for all methods described in the Service class are added here | ||||||
|  |   by this class. We also create properties to allow getting/setting all fields | ||||||
|  |   in the protocol message. | ||||||
|  | 
 | ||||||
|  |   The protocol compiler currently uses this metaclass to create protocol service | ||||||
|  |   classes at runtime. Clients can also manually create their own classes at | ||||||
|  |   runtime, as in this example:: | ||||||
|  | 
 | ||||||
|  |     mydescriptor = ServiceDescriptor(.....) | ||||||
|  |     class MyProtoService(service.Service): | ||||||
|  |       __metaclass__ = GeneratedServiceType | ||||||
|  |       DESCRIPTOR = mydescriptor | ||||||
|  |     myservice_instance = MyProtoService() | ||||||
|  |     # ... | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||||||
|  | 
 | ||||||
|  |   def __init__(cls, name, bases, dictionary): | ||||||
|  |     """Creates a message service class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       name: Name of the class (ignored, but required by the metaclass | ||||||
|  |         protocol). | ||||||
|  |       bases: Base classes of the class being constructed. | ||||||
|  |       dictionary: The class dictionary of the class being constructed. | ||||||
|  |         dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object | ||||||
|  |         describing this protocol service type. | ||||||
|  |     """ | ||||||
|  |     # Don't do anything if this class doesn't have a descriptor. This happens | ||||||
|  |     # when a service class is subclassed. | ||||||
|  |     if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: | ||||||
|  |       return | ||||||
|  | 
 | ||||||
|  |     descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] | ||||||
|  |     service_builder = _ServiceBuilder(descriptor) | ||||||
|  |     service_builder.BuildService(cls) | ||||||
|  |     cls.DESCRIPTOR = descriptor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class GeneratedServiceStubType(GeneratedServiceType): | ||||||
|  | 
 | ||||||
|  |   """Metaclass for service stubs created at runtime from ServiceDescriptors. | ||||||
|  | 
 | ||||||
|  |   This class has similar responsibilities as GeneratedServiceType, except that | ||||||
|  |   it creates the service stub classes. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||||||
|  | 
 | ||||||
|  |   def __init__(cls, name, bases, dictionary): | ||||||
|  |     """Creates a message service stub class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       name: Name of the class (ignored, here). | ||||||
|  |       bases: Base classes of the class being constructed. | ||||||
|  |       dictionary: The class dictionary of the class being constructed. | ||||||
|  |         dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object | ||||||
|  |         describing this protocol service type. | ||||||
|  |     """ | ||||||
|  |     super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) | ||||||
|  |     # Don't do anything if this class doesn't have a descriptor. This happens | ||||||
|  |     # when a service stub is subclassed. | ||||||
|  |     if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: | ||||||
|  |       return | ||||||
|  | 
 | ||||||
|  |     descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] | ||||||
|  |     service_stub_builder = _ServiceStubBuilder(descriptor) | ||||||
|  |     service_stub_builder.BuildServiceStub(cls) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _ServiceBuilder(object): | ||||||
|  | 
 | ||||||
|  |   """This class constructs a protocol service class using a service descriptor. | ||||||
|  | 
 | ||||||
|  |   Given a service descriptor, this class constructs a class that represents | ||||||
|  |   the specified service descriptor. One service builder instance constructs | ||||||
|  |   exactly one service class. That means all instances of that class share the | ||||||
|  |   same builder. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def __init__(self, service_descriptor): | ||||||
|  |     """Initializes an instance of the service class builder. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       service_descriptor: ServiceDescriptor to use when constructing the | ||||||
|  |         service class. | ||||||
|  |     """ | ||||||
|  |     self.descriptor = service_descriptor | ||||||
|  | 
 | ||||||
|  |   def BuildService(builder, cls): | ||||||
|  |     """Constructs the service class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       cls: The class that will be constructed. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     # CallMethod needs to operate with an instance of the Service class. This | ||||||
|  |     # internal wrapper function exists only to be able to pass the service | ||||||
|  |     # instance to the method that does the real CallMethod work. | ||||||
|  |     # Making sure to use exact argument names from the abstract interface in | ||||||
|  |     # service.py to match the type signature | ||||||
|  |     def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): | ||||||
|  |       return builder._CallMethod(self, method_descriptor, rpc_controller, | ||||||
|  |                                  request, done) | ||||||
|  | 
 | ||||||
|  |     def _WrapGetRequestClass(self, method_descriptor): | ||||||
|  |       return builder._GetRequestClass(method_descriptor) | ||||||
|  | 
 | ||||||
|  |     def _WrapGetResponseClass(self, method_descriptor): | ||||||
|  |       return builder._GetResponseClass(method_descriptor) | ||||||
|  | 
 | ||||||
|  |     builder.cls = cls | ||||||
|  |     cls.CallMethod = _WrapCallMethod | ||||||
|  |     cls.GetDescriptor = staticmethod(lambda: builder.descriptor) | ||||||
|  |     cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' | ||||||
|  |     cls.GetRequestClass = _WrapGetRequestClass | ||||||
|  |     cls.GetResponseClass = _WrapGetResponseClass | ||||||
|  |     for method in builder.descriptor.methods: | ||||||
|  |       setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) | ||||||
|  | 
 | ||||||
|  |   def _CallMethod(self, srvc, method_descriptor, | ||||||
|  |                   rpc_controller, request, callback): | ||||||
|  |     """Calls the method described by a given method descriptor. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       srvc: Instance of the service for which this method is called. | ||||||
|  |       method_descriptor: Descriptor that represent the method to call. | ||||||
|  |       rpc_controller: RPC controller to use for this method's execution. | ||||||
|  |       request: Request protocol message. | ||||||
|  |       callback: A callback to invoke after the method has completed. | ||||||
|  |     """ | ||||||
|  |     if method_descriptor.containing_service != self.descriptor: | ||||||
|  |       raise RuntimeError( | ||||||
|  |           'CallMethod() given method descriptor for wrong service type.') | ||||||
|  |     method = getattr(srvc, method_descriptor.name) | ||||||
|  |     return method(rpc_controller, request, callback) | ||||||
|  | 
 | ||||||
|  |   def _GetRequestClass(self, method_descriptor): | ||||||
|  |     """Returns the class of the request protocol message. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       method_descriptor: Descriptor of the method for which to return the | ||||||
|  |         request protocol message class. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A class that represents the input protocol message of the specified | ||||||
|  |       method. | ||||||
|  |     """ | ||||||
|  |     if method_descriptor.containing_service != self.descriptor: | ||||||
|  |       raise RuntimeError( | ||||||
|  |           'GetRequestClass() given method descriptor for wrong service type.') | ||||||
|  |     return method_descriptor.input_type._concrete_class | ||||||
|  | 
 | ||||||
|  |   def _GetResponseClass(self, method_descriptor): | ||||||
|  |     """Returns the class of the response protocol message. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       method_descriptor: Descriptor of the method for which to return the | ||||||
|  |         response protocol message class. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A class that represents the output protocol message of the specified | ||||||
|  |       method. | ||||||
|  |     """ | ||||||
|  |     if method_descriptor.containing_service != self.descriptor: | ||||||
|  |       raise RuntimeError( | ||||||
|  |           'GetResponseClass() given method descriptor for wrong service type.') | ||||||
|  |     return method_descriptor.output_type._concrete_class | ||||||
|  | 
 | ||||||
|  |   def _GenerateNonImplementedMethod(self, method): | ||||||
|  |     """Generates and returns a method that can be set for a service methods. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       method: Descriptor of the service method for which a method is to be | ||||||
|  |         generated. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A method that can be added to the service class. | ||||||
|  |     """ | ||||||
|  |     return lambda inst, rpc_controller, request, callback: ( | ||||||
|  |         self._NonImplementedMethod(method.name, rpc_controller, callback)) | ||||||
|  | 
 | ||||||
|  |   def _NonImplementedMethod(self, method_name, rpc_controller, callback): | ||||||
|  |     """The body of all methods in the generated service class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       method_name: Name of the method being executed. | ||||||
|  |       rpc_controller: RPC controller used to execute this method. | ||||||
|  |       callback: A callback which will be invoked when the method finishes. | ||||||
|  |     """ | ||||||
|  |     rpc_controller.SetFailed('Method %s not implemented.' % method_name) | ||||||
|  |     callback(None) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _ServiceStubBuilder(object): | ||||||
|  | 
 | ||||||
|  |   """Constructs a protocol service stub class using a service descriptor. | ||||||
|  | 
 | ||||||
|  |   Given a service descriptor, this class constructs a suitable stub class. | ||||||
|  |   A stub is just a type-safe wrapper around an RpcChannel which emulates a | ||||||
|  |   local implementation of the service. | ||||||
|  | 
 | ||||||
|  |   One service stub builder instance constructs exactly one class. It means all | ||||||
|  |   instances of that class share the same service stub builder. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def __init__(self, service_descriptor): | ||||||
|  |     """Initializes an instance of the service stub class builder. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       service_descriptor: ServiceDescriptor to use when constructing the | ||||||
|  |         stub class. | ||||||
|  |     """ | ||||||
|  |     self.descriptor = service_descriptor | ||||||
|  | 
 | ||||||
|  |   def BuildServiceStub(self, cls): | ||||||
|  |     """Constructs the stub class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       cls: The class that will be constructed. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def _ServiceStubInit(stub, rpc_channel): | ||||||
|  |       stub.rpc_channel = rpc_channel | ||||||
|  |     self.cls = cls | ||||||
|  |     cls.__init__ = _ServiceStubInit | ||||||
|  |     for method in self.descriptor.methods: | ||||||
|  |       setattr(cls, method.name, self._GenerateStubMethod(method)) | ||||||
|  | 
 | ||||||
|  |   def _GenerateStubMethod(self, method): | ||||||
|  |     return (lambda inst, rpc_controller, request, callback=None: | ||||||
|  |         self._StubMethod(inst, method, rpc_controller, request, callback)) | ||||||
|  | 
 | ||||||
|  |   def _StubMethod(self, stub, method_descriptor, | ||||||
|  |                   rpc_controller, request, callback): | ||||||
|  |     """The body of all service methods in the generated stub class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       stub: Stub instance. | ||||||
|  |       method_descriptor: Descriptor of the invoked method. | ||||||
|  |       rpc_controller: Rpc controller to execute the method. | ||||||
|  |       request: Request protocol message. | ||||||
|  |       callback: A callback to execute when the method finishes. | ||||||
|  |     Returns: | ||||||
|  |       Response message (in case of blocking call). | ||||||
|  |     """ | ||||||
|  |     return stub.rpc_channel.CallMethod( | ||||||
|  |         method_descriptor, rpc_controller, request, | ||||||
|  |         method_descriptor.output_type._concrete_class, callback) | ||||||
							
								
								
									
										26
									
								
								lib/protobuf/source_context_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/protobuf/source_context_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/source_context.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _SOURCECONTEXT._serialized_start=57 | ||||||
|  |   _SOURCECONTEXT._serialized_end=91 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										36
									
								
								lib/protobuf/struct_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										36
									
								
								lib/protobuf/struct_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,36 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/struct.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _STRUCT_FIELDSENTRY._options = None | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_options = b'8\001' | ||||||
|  |   _NULLVALUE._serialized_start=474 | ||||||
|  |   _NULLVALUE._serialized_end=501 | ||||||
|  |   _STRUCT._serialized_start=50 | ||||||
|  |   _STRUCT._serialized_end=182 | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_start=113 | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_end=182 | ||||||
|  |   _VALUE._serialized_start=185 | ||||||
|  |   _VALUE._serialized_end=419 | ||||||
|  |   _LISTVALUE._serialized_start=421 | ||||||
|  |   _LISTVALUE._serialized_end=472 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										194
									
								
								lib/protobuf/symbol_database.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										194
									
								
								lib/protobuf/symbol_database.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,194 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """A database of Python protocol buffer generated symbols. | ||||||
|  | 
 | ||||||
|  | SymbolDatabase is the MessageFactory for messages generated at compile time, | ||||||
|  | and makes it easy to create new instances of a registered type, given only the | ||||||
|  | type's protocol buffer symbol name. | ||||||
|  | 
 | ||||||
|  | Example usage:: | ||||||
|  | 
 | ||||||
|  |   db = symbol_database.SymbolDatabase() | ||||||
|  | 
 | ||||||
|  |   # Register symbols of interest, from one or multiple files. | ||||||
|  |   db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) | ||||||
|  |   db.RegisterMessage(my_proto_pb2.MyMessage) | ||||||
|  |   db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) | ||||||
|  | 
 | ||||||
|  |   # The database can be used as a MessageFactory, to generate types based on | ||||||
|  |   # their name: | ||||||
|  |   types = db.GetMessages(['my_proto.proto']) | ||||||
|  |   my_message_instance = types['MyMessage']() | ||||||
|  | 
 | ||||||
|  |   # The database's underlying descriptor pool can be queried, so it's not | ||||||
|  |   # necessary to know a type's filename to be able to generate it: | ||||||
|  |   filename = db.pool.FindFileContainingSymbol('MyMessage') | ||||||
|  |   my_message_instance = db.GetMessages([filename])['MyMessage']() | ||||||
|  | 
 | ||||||
|  |   # This functionality is also provided directly via a convenience method: | ||||||
|  |   my_message_instance = db.GetSymbol('MyMessage')() | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import api_implementation | ||||||
|  | from google.protobuf import descriptor_pool | ||||||
|  | from google.protobuf import message_factory | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class SymbolDatabase(message_factory.MessageFactory): | ||||||
|  |   """A database of Python generated symbols.""" | ||||||
|  | 
 | ||||||
|  |   def RegisterMessage(self, message): | ||||||
|  |     """Registers the given message type in the local database. | ||||||
|  | 
 | ||||||
|  |     Calls to GetSymbol() and GetMessages() will return messages registered here. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       message: A :class:`google.protobuf.message.Message` subclass (or | ||||||
|  |         instance); its descriptor will be registered. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       The provided message. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     desc = message.DESCRIPTOR | ||||||
|  |     self._classes[desc] = message | ||||||
|  |     self.RegisterMessageDescriptor(desc) | ||||||
|  |     return message | ||||||
|  | 
 | ||||||
|  |   def RegisterMessageDescriptor(self, message_descriptor): | ||||||
|  |     """Registers the given message descriptor in the local database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       message_descriptor (Descriptor): the message descriptor to add. | ||||||
|  |     """ | ||||||
|  |     if api_implementation.Type() == 'python': | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       self.pool._AddDescriptor(message_descriptor) | ||||||
|  | 
 | ||||||
|  |   def RegisterEnumDescriptor(self, enum_descriptor): | ||||||
|  |     """Registers the given enum descriptor in the local database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       enum_descriptor (EnumDescriptor): The enum descriptor to register. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       EnumDescriptor: The provided descriptor. | ||||||
|  |     """ | ||||||
|  |     if api_implementation.Type() == 'python': | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       self.pool._AddEnumDescriptor(enum_descriptor) | ||||||
|  |     return enum_descriptor | ||||||
|  | 
 | ||||||
|  |   def RegisterServiceDescriptor(self, service_descriptor): | ||||||
|  |     """Registers the given service descriptor in the local database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       service_descriptor (ServiceDescriptor): the service descriptor to | ||||||
|  |         register. | ||||||
|  |     """ | ||||||
|  |     if api_implementation.Type() == 'python': | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       self.pool._AddServiceDescriptor(service_descriptor) | ||||||
|  | 
 | ||||||
|  |   def RegisterFileDescriptor(self, file_descriptor): | ||||||
|  |     """Registers the given file descriptor in the local database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       file_descriptor (FileDescriptor): The file descriptor to register. | ||||||
|  |     """ | ||||||
|  |     if api_implementation.Type() == 'python': | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       self.pool._InternalAddFileDescriptor(file_descriptor) | ||||||
|  | 
 | ||||||
|  |   def GetSymbol(self, symbol): | ||||||
|  |     """Tries to find a symbol in the local database. | ||||||
|  | 
 | ||||||
|  |     Currently, this method only returns message.Message instances, however, if | ||||||
|  |     may be extended in future to support other symbol types. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       symbol (str): a protocol buffer symbol. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A Python class corresponding to the symbol. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError: if the symbol could not be found. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     return self._classes[self.pool.FindMessageTypeByName(symbol)] | ||||||
|  | 
 | ||||||
|  |   def GetMessages(self, files): | ||||||
|  |     # TODO(amauryfa): Fix the differences with MessageFactory. | ||||||
|  |     """Gets all registered messages from a specified file. | ||||||
|  | 
 | ||||||
|  |     Only messages already created and registered will be returned; (this is the | ||||||
|  |     case for imported _pb2 modules) | ||||||
|  |     But unlike MessageFactory, this version also returns already defined nested | ||||||
|  |     messages, but does not register any message extensions. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       files (list[str]): The file names to extract messages from. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A dictionary mapping proto names to the message classes. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError: if a file could not be found. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def _GetAllMessages(desc): | ||||||
|  |       """Walk a message Descriptor and recursively yields all message names.""" | ||||||
|  |       yield desc | ||||||
|  |       for msg_desc in desc.nested_types: | ||||||
|  |         for nested_desc in _GetAllMessages(msg_desc): | ||||||
|  |           yield nested_desc | ||||||
|  | 
 | ||||||
|  |     result = {} | ||||||
|  |     for file_name in files: | ||||||
|  |       file_desc = self.pool.FindFileByName(file_name) | ||||||
|  |       for msg_desc in file_desc.message_types_by_name.values(): | ||||||
|  |         for desc in _GetAllMessages(msg_desc): | ||||||
|  |           try: | ||||||
|  |             result[desc.full_name] = self._classes[desc] | ||||||
|  |           except KeyError: | ||||||
|  |             # This descriptor has no registered class, skip it. | ||||||
|  |             pass | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Default(): | ||||||
|  |   """Returns the default SymbolDatabase.""" | ||||||
|  |   return _DEFAULT | ||||||
							
								
								
									
										110
									
								
								lib/protobuf/text_encoding.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										110
									
								
								lib/protobuf/text_encoding.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,110 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Encoding related utilities.""" | ||||||
|  | import re | ||||||
|  | 
 | ||||||
|  | _cescape_chr_to_symbol_map = {} | ||||||
|  | _cescape_chr_to_symbol_map[9] = r'\t'  # optional escape | ||||||
|  | _cescape_chr_to_symbol_map[10] = r'\n'  # optional escape | ||||||
|  | _cescape_chr_to_symbol_map[13] = r'\r'  # optional escape | ||||||
|  | _cescape_chr_to_symbol_map[34] = r'\"'  # necessary escape | ||||||
|  | _cescape_chr_to_symbol_map[39] = r"\'"  # optional escape | ||||||
|  | _cescape_chr_to_symbol_map[92] = r'\\'  # necessary escape | ||||||
|  | 
 | ||||||
|  | # Lookup table for unicode | ||||||
|  | _cescape_unicode_to_str = [chr(i) for i in range(0, 256)] | ||||||
|  | for byte, string in _cescape_chr_to_symbol_map.items(): | ||||||
|  |   _cescape_unicode_to_str[byte] = string | ||||||
|  | 
 | ||||||
|  | # Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) | ||||||
|  | _cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + | ||||||
|  |                         [chr(i) for i in range(32, 127)] + | ||||||
|  |                         [r'\%03o' % i for i in range(127, 256)]) | ||||||
|  | for byte, string in _cescape_chr_to_symbol_map.items(): | ||||||
|  |   _cescape_byte_to_str[byte] = string | ||||||
|  | del byte, string | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def CEscape(text, as_utf8): | ||||||
|  |   # type: (...) -> str | ||||||
|  |   """Escape a bytes string for use in an text protocol buffer. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     text: A byte string to be escaped. | ||||||
|  |     as_utf8: Specifies if result may contain non-ASCII characters. | ||||||
|  |         In Python 3 this allows unescaped non-ASCII Unicode characters. | ||||||
|  |         In Python 2 the return value will be valid UTF-8 rather than only ASCII. | ||||||
|  |   Returns: | ||||||
|  |     Escaped string (str). | ||||||
|  |   """ | ||||||
|  |   # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not | ||||||
|  |   # satisfy our needs; they encodes unprintable characters using two-digit hex | ||||||
|  |   # escapes whereas our C++ unescaping function allows hex escapes to be any | ||||||
|  |   # length.  So, "\0011".encode('string_escape') ends up being "\\x011", which | ||||||
|  |   # will be decoded in C++ as a single-character string with char code 0x11. | ||||||
|  |   text_is_unicode = isinstance(text, str) | ||||||
|  |   if as_utf8 and text_is_unicode: | ||||||
|  |     # We're already unicode, no processing beyond control char escapes. | ||||||
|  |     return text.translate(_cescape_chr_to_symbol_map) | ||||||
|  |   ord_ = ord if text_is_unicode else lambda x: x  # bytes iterate as ints. | ||||||
|  |   if as_utf8: | ||||||
|  |     return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) | ||||||
|  |   return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def CUnescape(text): | ||||||
|  |   # type: (str) -> bytes | ||||||
|  |   """Unescape a text string with C-style escape sequences to UTF-8 bytes. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     text: The data to parse in a str. | ||||||
|  |   Returns: | ||||||
|  |     A byte string. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def ReplaceHex(m): | ||||||
|  |     # Only replace the match if the number of leading back slashes is odd. i.e. | ||||||
|  |     # the slash itself is not escaped. | ||||||
|  |     if len(m.group(1)) & 1: | ||||||
|  |       return m.group(1) + 'x0' + m.group(2) | ||||||
|  |     return m.group(0) | ||||||
|  | 
 | ||||||
|  |   # This is required because the 'string_escape' encoding doesn't | ||||||
|  |   # allow single-digit hex escapes (like '\xf'). | ||||||
|  |   result = _CUNESCAPE_HEX.sub(ReplaceHex, text) | ||||||
|  | 
 | ||||||
|  |   return (result.encode('utf-8')  # Make it bytes to allow decode. | ||||||
|  |           .decode('unicode_escape') | ||||||
|  |           # Make it bytes again to return the proper type. | ||||||
|  |           .encode('raw_unicode_escape')) | ||||||
							
								
								
									
										1795
									
								
								lib/protobuf/text_format.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										1795
									
								
								lib/protobuf/text_format.py
									
										
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because it is too large
												Load diff
											
										
									
								
							
							
								
								
									
										26
									
								
								lib/protobuf/timestamp_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										26
									
								
								lib/protobuf/timestamp_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/timestamp.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _TIMESTAMP._serialized_start=52 | ||||||
|  |   _TIMESTAMP._serialized_end=95 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										42
									
								
								lib/protobuf/type_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								lib/protobuf/type_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,42 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/type.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 | ||||||
|  | from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _SYNTAX._serialized_start=1413 | ||||||
|  |   _SYNTAX._serialized_end=1459 | ||||||
|  |   _TYPE._serialized_start=113 | ||||||
|  |   _TYPE._serialized_end=328 | ||||||
|  |   _FIELD._serialized_start=331 | ||||||
|  |   _FIELD._serialized_end=1056 | ||||||
|  |   _FIELD_KIND._serialized_start=610 | ||||||
|  |   _FIELD_KIND._serialized_end=938 | ||||||
|  |   _FIELD_CARDINALITY._serialized_start=940 | ||||||
|  |   _FIELD_CARDINALITY._serialized_end=1056 | ||||||
|  |   _ENUM._serialized_start=1059 | ||||||
|  |   _ENUM._serialized_end=1265 | ||||||
|  |   _ENUMVALUE._serialized_start=1267 | ||||||
|  |   _ENUMVALUE._serialized_end=1350 | ||||||
|  |   _OPTION._serialized_start=1352 | ||||||
|  |   _OPTION._serialized_end=1411 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										0
									
								
								lib/protobuf/util/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										0
									
								
								lib/protobuf/util/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
								
								
									
										72
									
								
								lib/protobuf/util/json_format_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								lib/protobuf/util/json_format_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,72 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/util/json_format.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  |   TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   _TESTBOOLMAP_BOOLMAPENTRY._options = None | ||||||
|  |   _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' | ||||||
|  |   _TESTSTRINGMAP_STRINGMAPENTRY._options = None | ||||||
|  |   _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' | ||||||
|  |   _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None | ||||||
|  |   _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' | ||||||
|  |   _ENUMVALUE._serialized_start=1607 | ||||||
|  |   _ENUMVALUE._serialized_end=1657 | ||||||
|  |   _TESTFLAGSANDSTRINGS._serialized_start=62 | ||||||
|  |   _TESTFLAGSANDSTRINGS._serialized_end=199 | ||||||
|  |   _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 | ||||||
|  |   _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 | ||||||
|  |   _TESTBASE64BYTEARRAYS._serialized_start=201 | ||||||
|  |   _TESTBASE64BYTEARRAYS._serialized_end=234 | ||||||
|  |   _TESTJAVASCRIPTJSON._serialized_start=236 | ||||||
|  |   _TESTJAVASCRIPTJSON._serialized_end=307 | ||||||
|  |   _TESTJAVASCRIPTORDERJSON1._serialized_start=309 | ||||||
|  |   _TESTJAVASCRIPTORDERJSON1._serialized_end=390 | ||||||
|  |   _TESTJAVASCRIPTORDERJSON2._serialized_start=393 | ||||||
|  |   _TESTJAVASCRIPTORDERJSON2._serialized_end=530 | ||||||
|  |   _TESTLARGEINT._serialized_start=532 | ||||||
|  |   _TESTLARGEINT._serialized_end=568 | ||||||
|  |   _TESTNUMBERS._serialized_start=571 | ||||||
|  |   _TESTNUMBERS._serialized_end=731 | ||||||
|  |   _TESTNUMBERS_MYTYPE._serialized_start=691 | ||||||
|  |   _TESTNUMBERS_MYTYPE._serialized_end=731 | ||||||
|  |   _TESTCAMELCASE._serialized_start=733 | ||||||
|  |   _TESTCAMELCASE._serialized_end=817 | ||||||
|  |   _TESTBOOLMAP._serialized_start=819 | ||||||
|  |   _TESTBOOLMAP._serialized_end=943 | ||||||
|  |   _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 | ||||||
|  |   _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 | ||||||
|  |   _TESTRECURSION._serialized_start=945 | ||||||
|  |   _TESTRECURSION._serialized_end=1024 | ||||||
|  |   _TESTSTRINGMAP._serialized_start=1027 | ||||||
|  |   _TESTSTRINGMAP._serialized_end=1161 | ||||||
|  |   _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 | ||||||
|  |   _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 | ||||||
|  |   _TESTSTRINGSERIALIZER._serialized_start=1164 | ||||||
|  |   _TESTSTRINGSERIALIZER._serialized_end=1360 | ||||||
|  |   _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 | ||||||
|  |   _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 | ||||||
|  |   _TESTMESSAGEWITHEXTENSION._serialized_start=1362 | ||||||
|  |   _TESTMESSAGEWITHEXTENSION._serialized_end=1398 | ||||||
|  |   _TESTEXTENSION._serialized_start=1400 | ||||||
|  |   _TESTEXTENSION._serialized_end=1522 | ||||||
|  |   _TESTDEFAULTENUMVALUE._serialized_start=1524 | ||||||
|  |   _TESTDEFAULTENUMVALUE._serialized_end=1605 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										129
									
								
								lib/protobuf/util/json_format_proto3_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										129
									
								
								lib/protobuf/util/json_format_proto3_pb2.py
									
										
									
									
									
										Normal file
									
								
							
										
											
												File diff suppressed because one or more lines are too long
											
										
									
								
							
							
								
								
									
										42
									
								
								lib/protobuf/wrappers_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
									
										42
									
								
								lib/protobuf/wrappers_pb2.py
									
										
									
									
									
										Normal file
									
								
							|  | @ -0,0 +1,42 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/wrappers.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _DOUBLEVALUE._serialized_start=51 | ||||||
|  |   _DOUBLEVALUE._serialized_end=79 | ||||||
|  |   _FLOATVALUE._serialized_start=81 | ||||||
|  |   _FLOATVALUE._serialized_end=108 | ||||||
|  |   _INT64VALUE._serialized_start=110 | ||||||
|  |   _INT64VALUE._serialized_end=137 | ||||||
|  |   _UINT64VALUE._serialized_start=139 | ||||||
|  |   _UINT64VALUE._serialized_end=167 | ||||||
|  |   _INT32VALUE._serialized_start=169 | ||||||
|  |   _INT32VALUE._serialized_end=196 | ||||||
|  |   _UINT32VALUE._serialized_start=198 | ||||||
|  |   _UINT32VALUE._serialized_end=226 | ||||||
|  |   _BOOLVALUE._serialized_start=228 | ||||||
|  |   _BOOLVALUE._serialized_end=254 | ||||||
|  |   _STRINGVALUE._serialized_start=256 | ||||||
|  |   _STRINGVALUE._serialized_end=284 | ||||||
|  |   _BYTESVALUE._serialized_start=286 | ||||||
|  |   _BYTESVALUE._serialized_end=313 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
|  | @ -2,11 +2,11 @@ | ||||||
| # Generated by the protocol buffer compiler.  DO NOT EDIT! | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
| # source: authentication.proto | # source: authentication.proto | ||||||
| """Generated protocol buffer code.""" | """Generated protocol buffer code.""" | ||||||
| from protobuf.internal import enum_type_wrapper | from google.protobuf.internal import enum_type_wrapper | ||||||
| from protobuf import descriptor as _descriptor | from google.protobuf import descriptor as _descriptor | ||||||
| from protobuf import message as _message | from google.protobuf import message as _message | ||||||
| from protobuf import reflection as _reflection | from google.protobuf import reflection as _reflection | ||||||
| from protobuf import symbol_database as _symbol_database | from google.protobuf import symbol_database as _symbol_database | ||||||
| # @@protoc_insertion_point(imports) | # @@protoc_insertion_point(imports) | ||||||
| 
 | 
 | ||||||
| _sym_db = _symbol_database.Default() | _sym_db = _symbol_database.Default() | ||||||
|  |  | ||||||
|  | @ -2,11 +2,11 @@ | ||||||
| # Generated by the protocol buffer compiler.  DO NOT EDIT! | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
| # source: canvaz-meta.proto | # source: canvaz-meta.proto | ||||||
| """Generated protocol buffer code.""" | """Generated protocol buffer code.""" | ||||||
| from protobuf import descriptor as _descriptor | from google.protobuf import descriptor as _descriptor | ||||||
| from protobuf import message as _message | from google.protobuf import message as _message | ||||||
| from protobuf import reflection as _reflection | from google.protobuf import reflection as _reflection | ||||||
| from protobuf import symbol_database as _symbol_database | from google.protobuf import symbol_database as _symbol_database | ||||||
| from protobuf.internal import enum_type_wrapper | from google.protobuf.internal import enum_type_wrapper | ||||||
| 
 | 
 | ||||||
| # @@protoc_insertion_point(imports) | # @@protoc_insertion_point(imports) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -3,10 +3,10 @@ | ||||||
| # source: canvaz.proto | # source: canvaz.proto | ||||||
| """Generated protocol buffer code.""" | """Generated protocol buffer code.""" | ||||||
| import CanvazMeta_pb2 as canvaz__meta__pb2 | import CanvazMeta_pb2 as canvaz__meta__pb2 | ||||||
| from protobuf import descriptor as _descriptor | from google.protobuf import descriptor as _descriptor | ||||||
| from protobuf import message as _message | from google.protobuf import message as _message | ||||||
| from protobuf import reflection as _reflection | from google.protobuf import reflection as _reflection | ||||||
| from protobuf import symbol_database as _symbol_database | from google.protobuf import symbol_database as _symbol_database | ||||||
| 
 | 
 | ||||||
| # @@protoc_insertion_point(imports) | # @@protoc_insertion_point(imports) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -2,10 +2,10 @@ | ||||||
| # Generated by the protocol buffer compiler.  DO NOT EDIT! | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
| # source: client_token.proto | # source: client_token.proto | ||||||
| """Generated protocol buffer code.""" | """Generated protocol buffer code.""" | ||||||
| from protobuf.internal import builder as _builder | from google.protobuf.internal import builder as _builder | ||||||
| from protobuf import descriptor as _descriptor | from google.protobuf import descriptor as _descriptor | ||||||
| from protobuf import descriptor_pool as _descriptor_pool | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
| from protobuf import symbol_database as _symbol_database | from google.protobuf import symbol_database as _symbol_database | ||||||
| # @@protoc_insertion_point(imports) | # @@protoc_insertion_point(imports) | ||||||
| 
 | 
 | ||||||
| _sym_db = _symbol_database.Default() | _sym_db = _symbol_database.Default() | ||||||
|  |  | ||||||
|  | @ -2,11 +2,11 @@ | ||||||
| # Generated by the protocol buffer compiler.  DO NOT EDIT! | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
| # source: connect.proto | # source: connect.proto | ||||||
| """Generated protocol buffer code.""" | """Generated protocol buffer code.""" | ||||||
| from protobuf.internal import enum_type_wrapper | from google.protobuf.internal import enum_type_wrapper | ||||||
| from protobuf import descriptor as _descriptor | from google.protobuf import descriptor as _descriptor | ||||||
| from protobuf import message as _message | from google.protobuf import message as _message | ||||||
| from protobuf import reflection as _reflection | from google.protobuf import reflection as _reflection | ||||||
| from protobuf import symbol_database as _symbol_database | from google.protobuf import symbol_database as _symbol_database | ||||||
| # @@protoc_insertion_point(imports) | # @@protoc_insertion_point(imports) | ||||||
| 
 | 
 | ||||||
| _sym_db = _symbol_database.Default() | _sym_db = _symbol_database.Default() | ||||||
|  |  | ||||||
Some files were not shown because too many files have changed in this diff Show more
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue