code time
							
								
								
									
										4
									
								
								.gitignore
									
										
									
									
										vendored
									
									
								
							
							
						
						|  | @ -5,3 +5,7 @@ | ||||||
| .DS_Store | .DS_Store | ||||||
| credentials.json | credentials.json | ||||||
| venv/ | venv/ | ||||||
|  | .vscode/ | ||||||
|  | __pycache__/ | ||||||
|  | *.pyo | ||||||
|  | *.pyc | ||||||
							
								
								
									
										33
									
								
								lib/protobuf/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,33 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | # Copyright 2007 Google Inc. All Rights Reserved. | ||||||
|  | 
 | ||||||
|  | __version__ = '3.20.1' | ||||||
							
								
								
									
										26
									
								
								lib/protobuf/any_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/any.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42v\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z,google.golang.org/protobuf/types/known/anypb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.any_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010AnyProtoP\001Z,google.golang.org/protobuf/types/known/anypb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _ANY._serialized_start=46 | ||||||
|  |   _ANY._serialized_end=84 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										32
									
								
								lib/protobuf/api_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,32 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/api.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 | ||||||
|  | from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBv\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z,google.golang.org/protobuf/types/known/apipb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.api_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\010ApiProtoP\001Z,google.golang.org/protobuf/types/known/apipb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _API._serialized_start=113 | ||||||
|  |   _API._serialized_end=370 | ||||||
|  |   _METHOD._serialized_start=373 | ||||||
|  |   _METHOD._serialized_end=586 | ||||||
|  |   _MIXIN._serialized_start=588 | ||||||
|  |   _MIXIN._serialized_end=623 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										0
									
								
								lib/protobuf/compiler/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										35
									
								
								lib/protobuf/compiler/plugin_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,35 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/compiler/plugin.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\xc1\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a\x7f\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\x12?\n\x13generated_code_info\x18\x10 \x01(\x0b\x32\".google.protobuf.GeneratedCodeInfo\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42W\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ)google.golang.org/protobuf/types/pluginpb') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.compiler.plugin_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\034com.google.protobuf.compilerB\014PluginProtosZ)google.golang.org/protobuf/types/pluginpb' | ||||||
|  |   _VERSION._serialized_start=101 | ||||||
|  |   _VERSION._serialized_end=171 | ||||||
|  |   _CODEGENERATORREQUEST._serialized_start=174 | ||||||
|  |   _CODEGENERATORREQUEST._serialized_end=360 | ||||||
|  |   _CODEGENERATORRESPONSE._serialized_start=363 | ||||||
|  |   _CODEGENERATORRESPONSE._serialized_end=684 | ||||||
|  |   _CODEGENERATORRESPONSE_FILE._serialized_start=499 | ||||||
|  |   _CODEGENERATORRESPONSE_FILE._serialized_end=626 | ||||||
|  |   _CODEGENERATORRESPONSE_FEATURE._serialized_start=628 | ||||||
|  |   _CODEGENERATORRESPONSE_FEATURE._serialized_end=684 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										1224
									
								
								lib/protobuf/descriptor.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										177
									
								
								lib/protobuf/descriptor_database.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,177 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Provides a container for DescriptorProtos.""" | ||||||
|  | 
 | ||||||
|  | __author__ = 'matthewtoia@google.com (Matt Toia)' | ||||||
|  | 
 | ||||||
|  | import warnings | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Error(Exception): | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DescriptorDatabaseConflictingDefinitionError(Error): | ||||||
|  |   """Raised when a proto is added with the same name & different descriptor.""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DescriptorDatabase(object): | ||||||
|  |   """A container accepting FileDescriptorProtos and maps DescriptorProtos.""" | ||||||
|  | 
 | ||||||
|  |   def __init__(self): | ||||||
|  |     self._file_desc_protos_by_file = {} | ||||||
|  |     self._file_desc_protos_by_symbol = {} | ||||||
|  | 
 | ||||||
|  |   def Add(self, file_desc_proto): | ||||||
|  |     """Adds the FileDescriptorProto and its types to this database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       file_desc_proto: The FileDescriptorProto to add. | ||||||
|  |     Raises: | ||||||
|  |       DescriptorDatabaseConflictingDefinitionError: if an attempt is made to | ||||||
|  |         add a proto with the same name but different definition than an | ||||||
|  |         existing proto in the database. | ||||||
|  |     """ | ||||||
|  |     proto_name = file_desc_proto.name | ||||||
|  |     if proto_name not in self._file_desc_protos_by_file: | ||||||
|  |       self._file_desc_protos_by_file[proto_name] = file_desc_proto | ||||||
|  |     elif self._file_desc_protos_by_file[proto_name] != file_desc_proto: | ||||||
|  |       raise DescriptorDatabaseConflictingDefinitionError( | ||||||
|  |           '%s already added, but with different descriptor.' % proto_name) | ||||||
|  |     else: | ||||||
|  |       return | ||||||
|  | 
 | ||||||
|  |     # Add all the top-level descriptors to the index. | ||||||
|  |     package = file_desc_proto.package | ||||||
|  |     for message in file_desc_proto.message_type: | ||||||
|  |       for name in _ExtractSymbols(message, package): | ||||||
|  |         self._AddSymbol(name, file_desc_proto) | ||||||
|  |     for enum in file_desc_proto.enum_type: | ||||||
|  |       self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto) | ||||||
|  |       for enum_value in enum.value: | ||||||
|  |         self._file_desc_protos_by_symbol[ | ||||||
|  |             '.'.join((package, enum_value.name))] = file_desc_proto | ||||||
|  |     for extension in file_desc_proto.extension: | ||||||
|  |       self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto) | ||||||
|  |     for service in file_desc_proto.service: | ||||||
|  |       self._AddSymbol(('.'.join((package, service.name))), file_desc_proto) | ||||||
|  | 
 | ||||||
|  |   def FindFileByName(self, name): | ||||||
|  |     """Finds the file descriptor proto by file name. | ||||||
|  | 
 | ||||||
|  |     Typically the file name is a relative path ending to a .proto file. The | ||||||
|  |     proto with the given name will have to have been added to this database | ||||||
|  |     using the Add method or else an error will be raised. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       name: The file name to find. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       The file descriptor proto matching the name. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError if no file by the given name was added. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     return self._file_desc_protos_by_file[name] | ||||||
|  | 
 | ||||||
|  |   def FindFileContainingSymbol(self, symbol): | ||||||
|  |     """Finds the file descriptor proto containing the specified symbol. | ||||||
|  | 
 | ||||||
|  |     The symbol should be a fully qualified name including the file descriptor's | ||||||
|  |     package and any containing messages. Some examples: | ||||||
|  | 
 | ||||||
|  |     'some.package.name.Message' | ||||||
|  |     'some.package.name.Message.NestedEnum' | ||||||
|  |     'some.package.name.Message.some_field' | ||||||
|  | 
 | ||||||
|  |     The file descriptor proto containing the specified symbol must be added to | ||||||
|  |     this database using the Add method or else an error will be raised. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       symbol: The fully qualified symbol name. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       The file descriptor proto containing the symbol. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError if no file contains the specified symbol. | ||||||
|  |     """ | ||||||
|  |     try: | ||||||
|  |       return self._file_desc_protos_by_symbol[symbol] | ||||||
|  |     except KeyError: | ||||||
|  |       # Fields, enum values, and nested extensions are not in | ||||||
|  |       # _file_desc_protos_by_symbol. Try to find the top level | ||||||
|  |       # descriptor. Non-existent nested symbol under a valid top level | ||||||
|  |       # descriptor can also be found. The behavior is the same with | ||||||
|  |       # protobuf C++. | ||||||
|  |       top_level, _, _ = symbol.rpartition('.') | ||||||
|  |       try: | ||||||
|  |         return self._file_desc_protos_by_symbol[top_level] | ||||||
|  |       except KeyError: | ||||||
|  |         # Raise the original symbol as a KeyError for better diagnostics. | ||||||
|  |         raise KeyError(symbol) | ||||||
|  | 
 | ||||||
|  |   def FindFileContainingExtension(self, extendee_name, extension_number): | ||||||
|  |     # TODO(jieluo): implement this API. | ||||||
|  |     return None | ||||||
|  | 
 | ||||||
|  |   def FindAllExtensionNumbers(self, extendee_name): | ||||||
|  |     # TODO(jieluo): implement this API. | ||||||
|  |     return [] | ||||||
|  | 
 | ||||||
|  |   def _AddSymbol(self, name, file_desc_proto): | ||||||
|  |     if name in self._file_desc_protos_by_symbol: | ||||||
|  |       warn_msg = ('Conflict register for file "' + file_desc_proto.name + | ||||||
|  |                   '": ' + name + | ||||||
|  |                   ' is already defined in file "' + | ||||||
|  |                   self._file_desc_protos_by_symbol[name].name + '"') | ||||||
|  |       warnings.warn(warn_msg, RuntimeWarning) | ||||||
|  |     self._file_desc_protos_by_symbol[name] = file_desc_proto | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ExtractSymbols(desc_proto, package): | ||||||
|  |   """Pulls out all the symbols from a descriptor proto. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     desc_proto: The proto to extract symbols from. | ||||||
|  |     package: The package containing the descriptor type. | ||||||
|  | 
 | ||||||
|  |   Yields: | ||||||
|  |     The fully qualified name found in the descriptor. | ||||||
|  |   """ | ||||||
|  |   message_name = package + '.' + desc_proto.name if package else desc_proto.name | ||||||
|  |   yield message_name | ||||||
|  |   for nested_type in desc_proto.nested_type: | ||||||
|  |     for symbol in _ExtractSymbols(nested_type, message_name): | ||||||
|  |       yield symbol | ||||||
|  |   for enum_type in desc_proto.enum_type: | ||||||
|  |     yield '.'.join((message_name, enum_type.name)) | ||||||
							
								
								
									
										1925
									
								
								lib/protobuf/descriptor_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										1295
									
								
								lib/protobuf/descriptor_pool.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										26
									
								
								lib/protobuf/duration_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/duration.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x83\x01\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z1google.golang.org/protobuf/types/known/durationpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.duration_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rDurationProtoP\001Z1google.golang.org/protobuf/types/known/durationpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _DURATION._serialized_start=51 | ||||||
|  |   _DURATION._serialized_end=93 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										26
									
								
								lib/protobuf/empty_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/empty.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyB}\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z.google.golang.org/protobuf/types/known/emptypb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.empty_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\nEmptyProtoP\001Z.google.golang.org/protobuf/types/known/emptypb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _EMPTY._serialized_start=48 | ||||||
|  |   _EMPTY._serialized_end=55 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										26
									
								
								lib/protobuf/field_mask_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/field_mask.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x85\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z2google.golang.org/protobuf/types/known/fieldmaskpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.field_mask_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z2google.golang.org/protobuf/types/known/fieldmaskpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _FIELDMASK._serialized_start=53 | ||||||
|  |   _FIELDMASK._serialized_end=79 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										0
									
								
								lib/protobuf/internal/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										
											BIN
										
									
								
								lib/protobuf/internal/_api_implementation.cpython-310-x86_64-linux-gnu.so
									
										
									
									
									
										Executable file
									
								
							
							
						
						
							
								
								
									
										112
									
								
								lib/protobuf/internal/api_implementation.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,112 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Determine which implementation of the protobuf API is used in this process. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | import os | ||||||
|  | import sys | ||||||
|  | import warnings | ||||||
|  | 
 | ||||||
|  | try: | ||||||
|  |   # pylint: disable=g-import-not-at-top | ||||||
|  |   from google.protobuf.internal import _api_implementation | ||||||
|  |   # The compile-time constants in the _api_implementation module can be used to | ||||||
|  |   # switch to a certain implementation of the Python API at build time. | ||||||
|  |   _api_version = _api_implementation.api_version | ||||||
|  | except ImportError: | ||||||
|  |   _api_version = -1  # Unspecified by compiler flags. | ||||||
|  | 
 | ||||||
|  | if _api_version == 1: | ||||||
|  |   raise ValueError('api_version=1 is no longer supported.') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _default_implementation_type = ('cpp' if _api_version > 0 else 'python') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # This environment variable can be used to switch to a certain implementation | ||||||
|  | # of the Python API, overriding the compile-time constants in the | ||||||
|  | # _api_implementation module. Right now only 'python' and 'cpp' are valid | ||||||
|  | # values. Any other value will be ignored. | ||||||
|  | _implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', | ||||||
|  |                                  _default_implementation_type) | ||||||
|  | 
 | ||||||
|  | if _implementation_type != 'python': | ||||||
|  |   _implementation_type = 'cpp' | ||||||
|  | 
 | ||||||
|  | if 'PyPy' in sys.version and _implementation_type == 'cpp': | ||||||
|  |   warnings.warn('PyPy does not work yet with cpp protocol buffers. ' | ||||||
|  |                 'Falling back to the python implementation.') | ||||||
|  |   _implementation_type = 'python' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Detect if serialization should be deterministic by default | ||||||
|  | try: | ||||||
|  |   # The presence of this module in a build allows the proto implementation to | ||||||
|  |   # be upgraded merely via build deps. | ||||||
|  |   # | ||||||
|  |   # NOTE: Merely importing this automatically enables deterministic proto | ||||||
|  |   # serialization for C++ code, but we still need to export it as a boolean so | ||||||
|  |   # that we can do the same for `_implementation_type == 'python'`. | ||||||
|  |   # | ||||||
|  |   # NOTE2: It is possible for C++ code to enable deterministic serialization by | ||||||
|  |   # default _without_ affecting Python code, if the C++ implementation is not in | ||||||
|  |   # use by this module.  That is intended behavior, so we don't actually expose | ||||||
|  |   # this boolean outside of this module. | ||||||
|  |   # | ||||||
|  |   # pylint: disable=g-import-not-at-top,unused-import | ||||||
|  |   from google.protobuf import enable_deterministic_proto_serialization | ||||||
|  |   _python_deterministic_proto_serialization = True | ||||||
|  | except ImportError: | ||||||
|  |   _python_deterministic_proto_serialization = False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Usage of this function is discouraged. Clients shouldn't care which | ||||||
|  | # implementation of the API is in use. Note that there is no guarantee | ||||||
|  | # that differences between APIs will be maintained. | ||||||
|  | # Please don't use this function if possible. | ||||||
|  | def Type(): | ||||||
|  |   return _implementation_type | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SetType(implementation_type): | ||||||
|  |   """Never use! Only for protobuf benchmark.""" | ||||||
|  |   global _implementation_type | ||||||
|  |   _implementation_type = implementation_type | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # See comment on 'Type' above. | ||||||
|  | def Version(): | ||||||
|  |   return 2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # For internal use only | ||||||
|  | def IsPythonDefaultSerializationDeterministic(): | ||||||
|  |   return _python_deterministic_proto_serialization | ||||||
							
								
								
									
										130
									
								
								lib/protobuf/internal/builder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,130 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Builds descriptors, message classes and services for generated _pb2.py. | ||||||
|  | 
 | ||||||
|  | This file is only called in python generated _pb2.py files. It builds | ||||||
|  | descriptors, message classes and services that users can directly use | ||||||
|  | in generated code. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'jieluo@google.com (Jie Luo)' | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import enum_type_wrapper | ||||||
|  | from google.protobuf import message as _message | ||||||
|  | from google.protobuf import reflection as _reflection | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BuildMessageAndEnumDescriptors(file_des, module): | ||||||
|  |   """Builds message and enum descriptors. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     file_des: FileDescriptor of the .proto file | ||||||
|  |     module: Generated _pb2 module | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def BuildNestedDescriptors(msg_des, prefix): | ||||||
|  |     for (name, nested_msg) in msg_des.nested_types_by_name.items(): | ||||||
|  |       module_name = prefix + name.upper() | ||||||
|  |       module[module_name] = nested_msg | ||||||
|  |       BuildNestedDescriptors(nested_msg, module_name + '_') | ||||||
|  |     for enum_des in msg_des.enum_types: | ||||||
|  |       module[prefix + enum_des.name.upper()] = enum_des | ||||||
|  | 
 | ||||||
|  |   for (name, msg_des) in file_des.message_types_by_name.items(): | ||||||
|  |     module_name = '_' + name.upper() | ||||||
|  |     module[module_name] = msg_des | ||||||
|  |     BuildNestedDescriptors(msg_des, module_name + '_') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BuildTopDescriptorsAndMessages(file_des, module_name, module): | ||||||
|  |   """Builds top level descriptors and message classes. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     file_des: FileDescriptor of the .proto file | ||||||
|  |     module_name: str, the name of generated _pb2 module | ||||||
|  |     module: Generated _pb2 module | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def BuildMessage(msg_des): | ||||||
|  |     create_dict = {} | ||||||
|  |     for (name, nested_msg) in msg_des.nested_types_by_name.items(): | ||||||
|  |       create_dict[name] = BuildMessage(nested_msg) | ||||||
|  |     create_dict['DESCRIPTOR'] = msg_des | ||||||
|  |     create_dict['__module__'] = module_name | ||||||
|  |     message_class = _reflection.GeneratedProtocolMessageType( | ||||||
|  |         msg_des.name, (_message.Message,), create_dict) | ||||||
|  |     _sym_db.RegisterMessage(message_class) | ||||||
|  |     return message_class | ||||||
|  | 
 | ||||||
|  |   # top level enums | ||||||
|  |   for (name, enum_des) in file_des.enum_types_by_name.items(): | ||||||
|  |     module['_' + name.upper()] = enum_des | ||||||
|  |     module[name] = enum_type_wrapper.EnumTypeWrapper(enum_des) | ||||||
|  |     for enum_value in enum_des.values: | ||||||
|  |       module[enum_value.name] = enum_value.number | ||||||
|  | 
 | ||||||
|  |   # top level extensions | ||||||
|  |   for (name, extension_des) in file_des.extensions_by_name.items(): | ||||||
|  |     module[name.upper() + '_FIELD_NUMBER'] = extension_des.number | ||||||
|  |     module[name] = extension_des | ||||||
|  | 
 | ||||||
|  |   # services | ||||||
|  |   for (name, service) in file_des.services_by_name.items(): | ||||||
|  |     module['_' + name.upper()] = service | ||||||
|  | 
 | ||||||
|  |   # Build messages. | ||||||
|  |   for (name, msg_des) in file_des.message_types_by_name.items(): | ||||||
|  |     module[name] = BuildMessage(msg_des) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BuildServices(file_des, module_name, module): | ||||||
|  |   """Builds services classes and services stub class. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     file_des: FileDescriptor of the .proto file | ||||||
|  |     module_name: str, the name of generated _pb2 module | ||||||
|  |     module: Generated _pb2 module | ||||||
|  |   """ | ||||||
|  |   # pylint: disable=g-import-not-at-top | ||||||
|  |   from google.protobuf import service as _service | ||||||
|  |   from google.protobuf import service_reflection | ||||||
|  |   # pylint: enable=g-import-not-at-top | ||||||
|  |   for (name, service) in file_des.services_by_name.items(): | ||||||
|  |     module[name] = service_reflection.GeneratedServiceType( | ||||||
|  |         name, (_service.Service,), | ||||||
|  |         dict(DESCRIPTOR=service, __module__=module_name)) | ||||||
|  |     stub_name = name + '_Stub' | ||||||
|  |     module[stub_name] = service_reflection.GeneratedServiceStubType( | ||||||
|  |         stub_name, (module[name],), | ||||||
|  |         dict(DESCRIPTOR=service, __module__=module_name)) | ||||||
							
								
								
									
										710
									
								
								lib/protobuf/internal/containers.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,710 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains container classes to represent different protocol buffer types. | ||||||
|  | 
 | ||||||
|  | This file defines container classes which represent categories of protocol | ||||||
|  | buffer field types which need extra maintenance. Currently these categories | ||||||
|  | are: | ||||||
|  | 
 | ||||||
|  | -   Repeated scalar fields - These are all repeated fields which aren't | ||||||
|  |     composite (e.g. they are of simple types like int32, string, etc). | ||||||
|  | -   Repeated composite fields - Repeated fields which are composite. This | ||||||
|  |     includes groups and nested messages. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | import collections.abc | ||||||
|  | import copy | ||||||
|  | import pickle | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     Iterable, | ||||||
|  |     Iterator, | ||||||
|  |     List, | ||||||
|  |     MutableMapping, | ||||||
|  |     MutableSequence, | ||||||
|  |     NoReturn, | ||||||
|  |     Optional, | ||||||
|  |     Sequence, | ||||||
|  |     TypeVar, | ||||||
|  |     Union, | ||||||
|  |     overload, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _T = TypeVar('_T') | ||||||
|  | _K = TypeVar('_K') | ||||||
|  | _V = TypeVar('_V') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class BaseContainer(Sequence[_T]): | ||||||
|  |   """Base container class.""" | ||||||
|  | 
 | ||||||
|  |   # Minimizes memory usage and disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_message_listener', '_values'] | ||||||
|  | 
 | ||||||
|  |   def __init__(self, message_listener: Any) -> None: | ||||||
|  |     """ | ||||||
|  |     Args: | ||||||
|  |       message_listener: A MessageListener implementation. | ||||||
|  |         The RepeatedScalarFieldContainer will call this object's | ||||||
|  |         Modified() method when it is modified. | ||||||
|  |     """ | ||||||
|  |     self._message_listener = message_listener | ||||||
|  |     self._values = [] | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __getitem__(self, key: int) -> _T: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __getitem__(self, key: slice) -> List[_T]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, key): | ||||||
|  |     """Retrieves item by the specified key.""" | ||||||
|  |     return self._values[key] | ||||||
|  | 
 | ||||||
|  |   def __len__(self) -> int: | ||||||
|  |     """Returns the number of elements in the container.""" | ||||||
|  |     return len(self._values) | ||||||
|  | 
 | ||||||
|  |   def __ne__(self, other: Any) -> bool: | ||||||
|  |     """Checks if another instance isn't equal to this one.""" | ||||||
|  |     # The concrete classes should define __eq__. | ||||||
|  |     return not self == other | ||||||
|  | 
 | ||||||
|  |   __hash__ = None | ||||||
|  | 
 | ||||||
|  |   def __repr__(self) -> str: | ||||||
|  |     return repr(self._values) | ||||||
|  | 
 | ||||||
|  |   def sort(self, *args, **kwargs) -> None: | ||||||
|  |     # Continue to support the old sort_function keyword argument. | ||||||
|  |     # This is expected to be a rare occurrence, so use LBYL to avoid | ||||||
|  |     # the overhead of actually catching KeyError. | ||||||
|  |     if 'sort_function' in kwargs: | ||||||
|  |       kwargs['cmp'] = kwargs.pop('sort_function') | ||||||
|  |     self._values.sort(*args, **kwargs) | ||||||
|  | 
 | ||||||
|  |   def reverse(self) -> None: | ||||||
|  |     self._values.reverse() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO(slebedev): Remove this. BaseContainer does *not* conform to | ||||||
|  | # MutableSequence, only its subclasses do. | ||||||
|  | collections.abc.MutableSequence.register(BaseContainer) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RepeatedScalarFieldContainer(BaseContainer[_T], MutableSequence[_T]): | ||||||
|  |   """Simple, type-checked, list-like container for holding repeated scalars.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_type_checker'] | ||||||
|  | 
 | ||||||
|  |   def __init__( | ||||||
|  |       self, | ||||||
|  |       message_listener: Any, | ||||||
|  |       type_checker: Any, | ||||||
|  |   ) -> None: | ||||||
|  |     """Args: | ||||||
|  | 
 | ||||||
|  |       message_listener: A MessageListener implementation. The | ||||||
|  |       RepeatedScalarFieldContainer will call this object's Modified() method | ||||||
|  |       when it is modified. | ||||||
|  |       type_checker: A type_checkers.ValueChecker instance to run on elements | ||||||
|  |       inserted into this container. | ||||||
|  |     """ | ||||||
|  |     super().__init__(message_listener) | ||||||
|  |     self._type_checker = type_checker | ||||||
|  | 
 | ||||||
|  |   def append(self, value: _T) -> None: | ||||||
|  |     """Appends an item to the list. Similar to list.append().""" | ||||||
|  |     self._values.append(self._type_checker.CheckValue(value)) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def insert(self, key: int, value: _T) -> None: | ||||||
|  |     """Inserts the item at the specified position. Similar to list.insert().""" | ||||||
|  |     self._values.insert(key, self._type_checker.CheckValue(value)) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def extend(self, elem_seq: Iterable[_T]) -> None: | ||||||
|  |     """Extends by appending the given iterable. Similar to list.extend().""" | ||||||
|  |     if elem_seq is None: | ||||||
|  |       return | ||||||
|  |     try: | ||||||
|  |       elem_seq_iter = iter(elem_seq) | ||||||
|  |     except TypeError: | ||||||
|  |       if not elem_seq: | ||||||
|  |         # silently ignore falsy inputs :-/. | ||||||
|  |         # TODO(ptucker): Deprecate this behavior. b/18413862 | ||||||
|  |         return | ||||||
|  |       raise | ||||||
|  | 
 | ||||||
|  |     new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter] | ||||||
|  |     if new_values: | ||||||
|  |       self._values.extend(new_values) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def MergeFrom( | ||||||
|  |       self, | ||||||
|  |       other: Union['RepeatedScalarFieldContainer[_T]', Iterable[_T]], | ||||||
|  |   ) -> None: | ||||||
|  |     """Appends the contents of another repeated field of the same type to this | ||||||
|  |     one. We do not check the types of the individual fields. | ||||||
|  |     """ | ||||||
|  |     self._values.extend(other) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def remove(self, elem: _T): | ||||||
|  |     """Removes an item from the list. Similar to list.remove().""" | ||||||
|  |     self._values.remove(elem) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def pop(self, key: Optional[int] = -1) -> _T: | ||||||
|  |     """Removes and returns an item at a given index. Similar to list.pop().""" | ||||||
|  |     value = self._values[key] | ||||||
|  |     self.__delitem__(key) | ||||||
|  |     return value | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __setitem__(self, key: int, value: _T) -> None: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __setitem__(self, key: slice, value: Iterable[_T]) -> None: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key, value) -> None: | ||||||
|  |     """Sets the item on the specified position.""" | ||||||
|  |     if isinstance(key, slice): | ||||||
|  |       if key.step is not None: | ||||||
|  |         raise ValueError('Extended slices not supported') | ||||||
|  |       self._values[key] = map(self._type_checker.CheckValue, value) | ||||||
|  |       self._message_listener.Modified() | ||||||
|  |     else: | ||||||
|  |       self._values[key] = self._type_checker.CheckValue(value) | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key: Union[int, slice]) -> None: | ||||||
|  |     """Deletes the item at the specified position.""" | ||||||
|  |     del self._values[key] | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other: Any) -> bool: | ||||||
|  |     """Compares the current instance with another one.""" | ||||||
|  |     if self is other: | ||||||
|  |       return True | ||||||
|  |     # Special case for the same type which should be common and fast. | ||||||
|  |     if isinstance(other, self.__class__): | ||||||
|  |       return other._values == self._values | ||||||
|  |     # We are presumably comparing against some other sequence type. | ||||||
|  |     return other == self._values | ||||||
|  | 
 | ||||||
|  |   def __deepcopy__( | ||||||
|  |       self, | ||||||
|  |       unused_memo: Any = None, | ||||||
|  |   ) -> 'RepeatedScalarFieldContainer[_T]': | ||||||
|  |     clone = RepeatedScalarFieldContainer( | ||||||
|  |         copy.deepcopy(self._message_listener), self._type_checker) | ||||||
|  |     clone.MergeFrom(self) | ||||||
|  |     return clone | ||||||
|  | 
 | ||||||
|  |   def __reduce__(self, **kwargs) -> NoReturn: | ||||||
|  |     raise pickle.PickleError( | ||||||
|  |         "Can't pickle repeated scalar fields, convert to list first") | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO(slebedev): Constrain T to be a subtype of Message. | ||||||
|  | class RepeatedCompositeFieldContainer(BaseContainer[_T], MutableSequence[_T]): | ||||||
|  |   """Simple, list-like container for holding repeated composite fields.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_message_descriptor'] | ||||||
|  | 
 | ||||||
|  |   def __init__(self, message_listener: Any, message_descriptor: Any) -> None: | ||||||
|  |     """ | ||||||
|  |     Note that we pass in a descriptor instead of the generated directly, | ||||||
|  |     since at the time we construct a _RepeatedCompositeFieldContainer we | ||||||
|  |     haven't yet necessarily initialized the type that will be contained in the | ||||||
|  |     container. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       message_listener: A MessageListener implementation. | ||||||
|  |         The RepeatedCompositeFieldContainer will call this object's | ||||||
|  |         Modified() method when it is modified. | ||||||
|  |       message_descriptor: A Descriptor instance describing the protocol type | ||||||
|  |         that should be present in this container.  We'll use the | ||||||
|  |         _concrete_class field of this descriptor when the client calls add(). | ||||||
|  |     """ | ||||||
|  |     super().__init__(message_listener) | ||||||
|  |     self._message_descriptor = message_descriptor | ||||||
|  | 
 | ||||||
|  |   def add(self, **kwargs: Any) -> _T: | ||||||
|  |     """Adds a new element at the end of the list and returns it. Keyword | ||||||
|  |     arguments may be used to initialize the element. | ||||||
|  |     """ | ||||||
|  |     new_element = self._message_descriptor._concrete_class(**kwargs) | ||||||
|  |     new_element._SetListener(self._message_listener) | ||||||
|  |     self._values.append(new_element) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  |     return new_element | ||||||
|  | 
 | ||||||
|  |   def append(self, value: _T) -> None: | ||||||
|  |     """Appends one element by copying the message.""" | ||||||
|  |     new_element = self._message_descriptor._concrete_class() | ||||||
|  |     new_element._SetListener(self._message_listener) | ||||||
|  |     new_element.CopyFrom(value) | ||||||
|  |     self._values.append(new_element) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def insert(self, key: int, value: _T) -> None: | ||||||
|  |     """Inserts the item at the specified position by copying.""" | ||||||
|  |     new_element = self._message_descriptor._concrete_class() | ||||||
|  |     new_element._SetListener(self._message_listener) | ||||||
|  |     new_element.CopyFrom(value) | ||||||
|  |     self._values.insert(key, new_element) | ||||||
|  |     if not self._message_listener.dirty: | ||||||
|  |       self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def extend(self, elem_seq: Iterable[_T]) -> None: | ||||||
|  |     """Extends by appending the given sequence of elements of the same type | ||||||
|  | 
 | ||||||
|  |     as this one, copying each individual message. | ||||||
|  |     """ | ||||||
|  |     message_class = self._message_descriptor._concrete_class | ||||||
|  |     listener = self._message_listener | ||||||
|  |     values = self._values | ||||||
|  |     for message in elem_seq: | ||||||
|  |       new_element = message_class() | ||||||
|  |       new_element._SetListener(listener) | ||||||
|  |       new_element.MergeFrom(message) | ||||||
|  |       values.append(new_element) | ||||||
|  |     listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def MergeFrom( | ||||||
|  |       self, | ||||||
|  |       other: Union['RepeatedCompositeFieldContainer[_T]', Iterable[_T]], | ||||||
|  |   ) -> None: | ||||||
|  |     """Appends the contents of another repeated field of the same type to this | ||||||
|  |     one, copying each individual message. | ||||||
|  |     """ | ||||||
|  |     self.extend(other) | ||||||
|  | 
 | ||||||
|  |   def remove(self, elem: _T) -> None: | ||||||
|  |     """Removes an item from the list. Similar to list.remove().""" | ||||||
|  |     self._values.remove(elem) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def pop(self, key: Optional[int] = -1) -> _T: | ||||||
|  |     """Removes and returns an item at a given index. Similar to list.pop().""" | ||||||
|  |     value = self._values[key] | ||||||
|  |     self.__delitem__(key) | ||||||
|  |     return value | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __setitem__(self, key: int, value: _T) -> None: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def __setitem__(self, key: slice, value: Iterable[_T]) -> None: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key, value): | ||||||
|  |     # This method is implemented to make RepeatedCompositeFieldContainer | ||||||
|  |     # structurally compatible with typing.MutableSequence. It is | ||||||
|  |     # otherwise unsupported and will always raise an error. | ||||||
|  |     raise TypeError( | ||||||
|  |         f'{self.__class__.__name__} object does not support item assignment') | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key: Union[int, slice]) -> None: | ||||||
|  |     """Deletes the item at the specified position.""" | ||||||
|  |     del self._values[key] | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other: Any) -> bool: | ||||||
|  |     """Compares the current instance with another one.""" | ||||||
|  |     if self is other: | ||||||
|  |       return True | ||||||
|  |     if not isinstance(other, self.__class__): | ||||||
|  |       raise TypeError('Can only compare repeated composite fields against ' | ||||||
|  |                       'other repeated composite fields.') | ||||||
|  |     return self._values == other._values | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ScalarMap(MutableMapping[_K, _V]): | ||||||
|  |   """Simple, type-checked, dict-like container for holding repeated scalars.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener', | ||||||
|  |                '_entry_descriptor'] | ||||||
|  | 
 | ||||||
|  |   def __init__( | ||||||
|  |       self, | ||||||
|  |       message_listener: Any, | ||||||
|  |       key_checker: Any, | ||||||
|  |       value_checker: Any, | ||||||
|  |       entry_descriptor: Any, | ||||||
|  |   ) -> None: | ||||||
|  |     """ | ||||||
|  |     Args: | ||||||
|  |       message_listener: A MessageListener implementation. | ||||||
|  |         The ScalarMap will call this object's Modified() method when it | ||||||
|  |         is modified. | ||||||
|  |       key_checker: A type_checkers.ValueChecker instance to run on keys | ||||||
|  |         inserted into this container. | ||||||
|  |       value_checker: A type_checkers.ValueChecker instance to run on values | ||||||
|  |         inserted into this container. | ||||||
|  |       entry_descriptor: The MessageDescriptor of a map entry: key and value. | ||||||
|  |     """ | ||||||
|  |     self._message_listener = message_listener | ||||||
|  |     self._key_checker = key_checker | ||||||
|  |     self._value_checker = value_checker | ||||||
|  |     self._entry_descriptor = entry_descriptor | ||||||
|  |     self._values = {} | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, key: _K) -> _V: | ||||||
|  |     try: | ||||||
|  |       return self._values[key] | ||||||
|  |     except KeyError: | ||||||
|  |       key = self._key_checker.CheckValue(key) | ||||||
|  |       val = self._value_checker.DefaultValue() | ||||||
|  |       self._values[key] = val | ||||||
|  |       return val | ||||||
|  | 
 | ||||||
|  |   def __contains__(self, item: _K) -> bool: | ||||||
|  |     # We check the key's type to match the strong-typing flavor of the API. | ||||||
|  |     # Also this makes it easier to match the behavior of the C++ implementation. | ||||||
|  |     self._key_checker.CheckValue(item) | ||||||
|  |     return item in self._values | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def get(self, key: _K) -> Optional[_V]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def get(self, key: _K, default: _T) -> Union[_V, _T]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   # We need to override this explicitly, because our defaultdict-like behavior | ||||||
|  |   # will make the default implementation (from our base class) always insert | ||||||
|  |   # the key. | ||||||
|  |   def get(self, key, default=None): | ||||||
|  |     if key in self: | ||||||
|  |       return self[key] | ||||||
|  |     else: | ||||||
|  |       return default | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key: _K, value: _V) -> _T: | ||||||
|  |     checked_key = self._key_checker.CheckValue(key) | ||||||
|  |     checked_value = self._value_checker.CheckValue(value) | ||||||
|  |     self._values[checked_key] = checked_value | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key: _K) -> None: | ||||||
|  |     del self._values[key] | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __len__(self) -> int: | ||||||
|  |     return len(self._values) | ||||||
|  | 
 | ||||||
|  |   def __iter__(self) -> Iterator[_K]: | ||||||
|  |     return iter(self._values) | ||||||
|  | 
 | ||||||
|  |   def __repr__(self) -> str: | ||||||
|  |     return repr(self._values) | ||||||
|  | 
 | ||||||
|  |   def MergeFrom(self, other: 'ScalarMap[_K, _V]') -> None: | ||||||
|  |     self._values.update(other._values) | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def InvalidateIterators(self) -> None: | ||||||
|  |     # It appears that the only way to reliably invalidate iterators to | ||||||
|  |     # self._values is to ensure that its size changes. | ||||||
|  |     original = self._values | ||||||
|  |     self._values = original.copy() | ||||||
|  |     original[None] = None | ||||||
|  | 
 | ||||||
|  |   # This is defined in the abstract base, but we can do it much more cheaply. | ||||||
|  |   def clear(self) -> None: | ||||||
|  |     self._values.clear() | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def GetEntryClass(self) -> Any: | ||||||
|  |     return self._entry_descriptor._concrete_class | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MessageMap(MutableMapping[_K, _V]): | ||||||
|  |   """Simple, type-checked, dict-like container for with submessage values.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_key_checker', '_values', '_message_listener', | ||||||
|  |                '_message_descriptor', '_entry_descriptor'] | ||||||
|  | 
 | ||||||
|  |   def __init__( | ||||||
|  |       self, | ||||||
|  |       message_listener: Any, | ||||||
|  |       message_descriptor: Any, | ||||||
|  |       key_checker: Any, | ||||||
|  |       entry_descriptor: Any, | ||||||
|  |   ) -> None: | ||||||
|  |     """ | ||||||
|  |     Args: | ||||||
|  |       message_listener: A MessageListener implementation. | ||||||
|  |         The ScalarMap will call this object's Modified() method when it | ||||||
|  |         is modified. | ||||||
|  |       key_checker: A type_checkers.ValueChecker instance to run on keys | ||||||
|  |         inserted into this container. | ||||||
|  |       value_checker: A type_checkers.ValueChecker instance to run on values | ||||||
|  |         inserted into this container. | ||||||
|  |       entry_descriptor: The MessageDescriptor of a map entry: key and value. | ||||||
|  |     """ | ||||||
|  |     self._message_listener = message_listener | ||||||
|  |     self._message_descriptor = message_descriptor | ||||||
|  |     self._key_checker = key_checker | ||||||
|  |     self._entry_descriptor = entry_descriptor | ||||||
|  |     self._values = {} | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, key: _K) -> _V: | ||||||
|  |     key = self._key_checker.CheckValue(key) | ||||||
|  |     try: | ||||||
|  |       return self._values[key] | ||||||
|  |     except KeyError: | ||||||
|  |       new_element = self._message_descriptor._concrete_class() | ||||||
|  |       new_element._SetListener(self._message_listener) | ||||||
|  |       self._values[key] = new_element | ||||||
|  |       self._message_listener.Modified() | ||||||
|  |       return new_element | ||||||
|  | 
 | ||||||
|  |   def get_or_create(self, key: _K) -> _V: | ||||||
|  |     """get_or_create() is an alias for getitem (ie. map[key]). | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       key: The key to get or create in the map. | ||||||
|  | 
 | ||||||
|  |     This is useful in cases where you want to be explicit that the call is | ||||||
|  |     mutating the map.  This can avoid lint errors for statements like this | ||||||
|  |     that otherwise would appear to be pointless statements: | ||||||
|  | 
 | ||||||
|  |       msg.my_map[key] | ||||||
|  |     """ | ||||||
|  |     return self[key] | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def get(self, key: _K) -> Optional[_V]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   @overload | ||||||
|  |   def get(self, key: _K, default: _T) -> Union[_V, _T]: | ||||||
|  |     ... | ||||||
|  | 
 | ||||||
|  |   # We need to override this explicitly, because our defaultdict-like behavior | ||||||
|  |   # will make the default implementation (from our base class) always insert | ||||||
|  |   # the key. | ||||||
|  |   def get(self, key, default=None): | ||||||
|  |     if key in self: | ||||||
|  |       return self[key] | ||||||
|  |     else: | ||||||
|  |       return default | ||||||
|  | 
 | ||||||
|  |   def __contains__(self, item: _K) -> bool: | ||||||
|  |     item = self._key_checker.CheckValue(item) | ||||||
|  |     return item in self._values | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key: _K, value: _V) -> NoReturn: | ||||||
|  |     raise ValueError('May not set values directly, call my_map[key].foo = 5') | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key: _K) -> None: | ||||||
|  |     key = self._key_checker.CheckValue(key) | ||||||
|  |     del self._values[key] | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def __len__(self) -> int: | ||||||
|  |     return len(self._values) | ||||||
|  | 
 | ||||||
|  |   def __iter__(self) -> Iterator[_K]: | ||||||
|  |     return iter(self._values) | ||||||
|  | 
 | ||||||
|  |   def __repr__(self) -> str: | ||||||
|  |     return repr(self._values) | ||||||
|  | 
 | ||||||
|  |   def MergeFrom(self, other: 'MessageMap[_K, _V]') -> None: | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     for key in other._values: | ||||||
|  |       # According to documentation: "When parsing from the wire or when merging, | ||||||
|  |       # if there are duplicate map keys the last key seen is used". | ||||||
|  |       if key in self: | ||||||
|  |         del self[key] | ||||||
|  |       self[key].CopyFrom(other[key]) | ||||||
|  |     # self._message_listener.Modified() not required here, because | ||||||
|  |     # mutations to submessages already propagate. | ||||||
|  | 
 | ||||||
|  |   def InvalidateIterators(self) -> None: | ||||||
|  |     # It appears that the only way to reliably invalidate iterators to | ||||||
|  |     # self._values is to ensure that its size changes. | ||||||
|  |     original = self._values | ||||||
|  |     self._values = original.copy() | ||||||
|  |     original[None] = None | ||||||
|  | 
 | ||||||
|  |   # This is defined in the abstract base, but we can do it much more cheaply. | ||||||
|  |   def clear(self) -> None: | ||||||
|  |     self._values.clear() | ||||||
|  |     self._message_listener.Modified() | ||||||
|  | 
 | ||||||
|  |   def GetEntryClass(self) -> Any: | ||||||
|  |     return self._entry_descriptor._concrete_class | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _UnknownField: | ||||||
|  |   """A parsed unknown field.""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_field_number', '_wire_type', '_data'] | ||||||
|  | 
 | ||||||
|  |   def __init__(self, field_number, wire_type, data): | ||||||
|  |     self._field_number = field_number | ||||||
|  |     self._wire_type = wire_type | ||||||
|  |     self._data = data | ||||||
|  |     return | ||||||
|  | 
 | ||||||
|  |   def __lt__(self, other): | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return self._field_number < other._field_number | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other): | ||||||
|  |     if self is other: | ||||||
|  |       return True | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return (self._field_number == other._field_number and | ||||||
|  |             self._wire_type == other._wire_type and | ||||||
|  |             self._data == other._data) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class UnknownFieldRef:  # pylint: disable=missing-class-docstring | ||||||
|  | 
 | ||||||
|  |   def __init__(self, parent, index): | ||||||
|  |     self._parent = parent | ||||||
|  |     self._index = index | ||||||
|  | 
 | ||||||
|  |   def _check_valid(self): | ||||||
|  |     if not self._parent: | ||||||
|  |       raise ValueError('UnknownField does not exist. ' | ||||||
|  |                        'The parent message might be cleared.') | ||||||
|  |     if self._index >= len(self._parent): | ||||||
|  |       raise ValueError('UnknownField does not exist. ' | ||||||
|  |                        'The parent message might be cleared.') | ||||||
|  | 
 | ||||||
|  |   @property | ||||||
|  |   def field_number(self): | ||||||
|  |     self._check_valid() | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return self._parent._internal_get(self._index)._field_number | ||||||
|  | 
 | ||||||
|  |   @property | ||||||
|  |   def wire_type(self): | ||||||
|  |     self._check_valid() | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return self._parent._internal_get(self._index)._wire_type | ||||||
|  | 
 | ||||||
|  |   @property | ||||||
|  |   def data(self): | ||||||
|  |     self._check_valid() | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     return self._parent._internal_get(self._index)._data | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class UnknownFieldSet: | ||||||
|  |   """UnknownField container""" | ||||||
|  | 
 | ||||||
|  |   # Disallows assignment to other attributes. | ||||||
|  |   __slots__ = ['_values'] | ||||||
|  | 
 | ||||||
|  |   def __init__(self): | ||||||
|  |     self._values = [] | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, index): | ||||||
|  |     if self._values is None: | ||||||
|  |       raise ValueError('UnknownFields does not exist. ' | ||||||
|  |                        'The parent message might be cleared.') | ||||||
|  |     size = len(self._values) | ||||||
|  |     if index < 0: | ||||||
|  |       index += size | ||||||
|  |     if index < 0 or index >= size: | ||||||
|  |       raise IndexError('index %d out of range'.index) | ||||||
|  | 
 | ||||||
|  |     return UnknownFieldRef(self, index) | ||||||
|  | 
 | ||||||
|  |   def _internal_get(self, index): | ||||||
|  |     return self._values[index] | ||||||
|  | 
 | ||||||
|  |   def __len__(self): | ||||||
|  |     if self._values is None: | ||||||
|  |       raise ValueError('UnknownFields does not exist. ' | ||||||
|  |                        'The parent message might be cleared.') | ||||||
|  |     return len(self._values) | ||||||
|  | 
 | ||||||
|  |   def _add(self, field_number, wire_type, data): | ||||||
|  |     unknown_field = _UnknownField(field_number, wire_type, data) | ||||||
|  |     self._values.append(unknown_field) | ||||||
|  |     return unknown_field | ||||||
|  | 
 | ||||||
|  |   def __iter__(self): | ||||||
|  |     for i in range(len(self)): | ||||||
|  |       yield UnknownFieldRef(self, i) | ||||||
|  | 
 | ||||||
|  |   def _extend(self, other): | ||||||
|  |     if other is None: | ||||||
|  |       return | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     self._values.extend(other._values) | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other): | ||||||
|  |     if self is other: | ||||||
|  |       return True | ||||||
|  |     # Sort unknown fields because their order shouldn't | ||||||
|  |     # affect equality test. | ||||||
|  |     values = list(self._values) | ||||||
|  |     if other is None: | ||||||
|  |       return not values | ||||||
|  |     values.sort() | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     other_values = sorted(other._values) | ||||||
|  |     return values == other_values | ||||||
|  | 
 | ||||||
|  |   def _clear(self): | ||||||
|  |     for value in self._values: | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       if isinstance(value._data, UnknownFieldSet): | ||||||
|  |         value._data._clear()  # pylint: disable=protected-access | ||||||
|  |     self._values = None | ||||||
							
								
								
									
										1029
									
								
								lib/protobuf/internal/decoder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										829
									
								
								lib/protobuf/internal/encoder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,829 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Code for encoding protocol message primitives. | ||||||
|  | 
 | ||||||
|  | Contains the logic for encoding every logical protocol field type | ||||||
|  | into one of the 5 physical wire types. | ||||||
|  | 
 | ||||||
|  | This code is designed to push the Python interpreter's performance to the | ||||||
|  | limits. | ||||||
|  | 
 | ||||||
|  | The basic idea is that at startup time, for every field (i.e. every | ||||||
|  | FieldDescriptor) we construct two functions:  a "sizer" and an "encoder".  The | ||||||
|  | sizer takes a value of this field's type and computes its byte size.  The | ||||||
|  | encoder takes a writer function and a value.  It encodes the value into byte | ||||||
|  | strings and invokes the writer function to write those strings.  Typically the | ||||||
|  | writer function is the write() method of a BytesIO. | ||||||
|  | 
 | ||||||
|  | We try to do as much work as possible when constructing the writer and the | ||||||
|  | sizer rather than when calling them.  In particular: | ||||||
|  | * We copy any needed global functions to local variables, so that we do not need | ||||||
|  |   to do costly global table lookups at runtime. | ||||||
|  | * Similarly, we try to do any attribute lookups at startup time if possible. | ||||||
|  | * Every field's tag is encoded to bytes at startup, since it can't change at | ||||||
|  |   runtime. | ||||||
|  | * Whatever component of the field size we can compute at startup, we do. | ||||||
|  | * We *avoid* sharing code if doing so would make the code slower and not sharing | ||||||
|  |   does not burden us too much.  For example, encoders for repeated fields do | ||||||
|  |   not just call the encoders for singular fields in a loop because this would | ||||||
|  |   add an extra function call overhead for every loop iteration; instead, we | ||||||
|  |   manually inline the single-value encoder into the loop. | ||||||
|  | * If a Python function lacks a return statement, Python actually generates | ||||||
|  |   instructions to pop the result of the last statement off the stack, push | ||||||
|  |   None onto the stack, and then return that.  If we really don't care what | ||||||
|  |   value is returned, then we can save two instructions by returning the | ||||||
|  |   result of the last statement.  It looks funny but it helps. | ||||||
|  | * We assume that type and bounds checking has happened at a higher level. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'kenton@google.com (Kenton Varda)' | ||||||
|  | 
 | ||||||
|  | import struct | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import wire_format | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # This will overflow and thus become IEEE-754 "infinity".  We would use | ||||||
|  | # "float('inf')" but it doesn't work on Windows pre-Python-2.6. | ||||||
|  | _POS_INF = 1e10000 | ||||||
|  | _NEG_INF = -_POS_INF | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _VarintSize(value): | ||||||
|  |   """Compute the size of a varint value.""" | ||||||
|  |   if value <= 0x7f: return 1 | ||||||
|  |   if value <= 0x3fff: return 2 | ||||||
|  |   if value <= 0x1fffff: return 3 | ||||||
|  |   if value <= 0xfffffff: return 4 | ||||||
|  |   if value <= 0x7ffffffff: return 5 | ||||||
|  |   if value <= 0x3ffffffffff: return 6 | ||||||
|  |   if value <= 0x1ffffffffffff: return 7 | ||||||
|  |   if value <= 0xffffffffffffff: return 8 | ||||||
|  |   if value <= 0x7fffffffffffffff: return 9 | ||||||
|  |   return 10 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SignedVarintSize(value): | ||||||
|  |   """Compute the size of a signed varint value.""" | ||||||
|  |   if value < 0: return 10 | ||||||
|  |   if value <= 0x7f: return 1 | ||||||
|  |   if value <= 0x3fff: return 2 | ||||||
|  |   if value <= 0x1fffff: return 3 | ||||||
|  |   if value <= 0xfffffff: return 4 | ||||||
|  |   if value <= 0x7ffffffff: return 5 | ||||||
|  |   if value <= 0x3ffffffffff: return 6 | ||||||
|  |   if value <= 0x1ffffffffffff: return 7 | ||||||
|  |   if value <= 0xffffffffffffff: return 8 | ||||||
|  |   if value <= 0x7fffffffffffffff: return 9 | ||||||
|  |   return 10 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _TagSize(field_number): | ||||||
|  |   """Returns the number of bytes required to serialize a tag with this field | ||||||
|  |   number.""" | ||||||
|  |   # Just pass in type 0, since the type won't affect the tag+type size. | ||||||
|  |   return _VarintSize(wire_format.PackTag(field_number, 0)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # In this section we define some generic sizers.  Each of these functions | ||||||
|  | # takes parameters specific to a particular field type, e.g. int32 or fixed64. | ||||||
|  | # It returns another function which in turn takes parameters specific to a | ||||||
|  | # particular field, e.g. the field number and whether it is repeated or packed. | ||||||
|  | # Look at the next section to see how these are used. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SimpleSizer(compute_value_size): | ||||||
|  |   """A sizer which uses the function compute_value_size to compute the size of | ||||||
|  |   each value.  Typically compute_value_size is _VarintSize.""" | ||||||
|  | 
 | ||||||
|  |   def SpecificSizer(field_number, is_repeated, is_packed): | ||||||
|  |     tag_size = _TagSize(field_number) | ||||||
|  |     if is_packed: | ||||||
|  |       local_VarintSize = _VarintSize | ||||||
|  |       def PackedFieldSize(value): | ||||||
|  |         result = 0 | ||||||
|  |         for element in value: | ||||||
|  |           result += compute_value_size(element) | ||||||
|  |         return result + local_VarintSize(result) + tag_size | ||||||
|  |       return PackedFieldSize | ||||||
|  |     elif is_repeated: | ||||||
|  |       def RepeatedFieldSize(value): | ||||||
|  |         result = tag_size * len(value) | ||||||
|  |         for element in value: | ||||||
|  |           result += compute_value_size(element) | ||||||
|  |         return result | ||||||
|  |       return RepeatedFieldSize | ||||||
|  |     else: | ||||||
|  |       def FieldSize(value): | ||||||
|  |         return tag_size + compute_value_size(value) | ||||||
|  |       return FieldSize | ||||||
|  | 
 | ||||||
|  |   return SpecificSizer | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ModifiedSizer(compute_value_size, modify_value): | ||||||
|  |   """Like SimpleSizer, but modify_value is invoked on each value before it is | ||||||
|  |   passed to compute_value_size.  modify_value is typically ZigZagEncode.""" | ||||||
|  | 
 | ||||||
|  |   def SpecificSizer(field_number, is_repeated, is_packed): | ||||||
|  |     tag_size = _TagSize(field_number) | ||||||
|  |     if is_packed: | ||||||
|  |       local_VarintSize = _VarintSize | ||||||
|  |       def PackedFieldSize(value): | ||||||
|  |         result = 0 | ||||||
|  |         for element in value: | ||||||
|  |           result += compute_value_size(modify_value(element)) | ||||||
|  |         return result + local_VarintSize(result) + tag_size | ||||||
|  |       return PackedFieldSize | ||||||
|  |     elif is_repeated: | ||||||
|  |       def RepeatedFieldSize(value): | ||||||
|  |         result = tag_size * len(value) | ||||||
|  |         for element in value: | ||||||
|  |           result += compute_value_size(modify_value(element)) | ||||||
|  |         return result | ||||||
|  |       return RepeatedFieldSize | ||||||
|  |     else: | ||||||
|  |       def FieldSize(value): | ||||||
|  |         return tag_size + compute_value_size(modify_value(value)) | ||||||
|  |       return FieldSize | ||||||
|  | 
 | ||||||
|  |   return SpecificSizer | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _FixedSizer(value_size): | ||||||
|  |   """Like _SimpleSizer except for a fixed-size field.  The input is the size | ||||||
|  |   of one value.""" | ||||||
|  | 
 | ||||||
|  |   def SpecificSizer(field_number, is_repeated, is_packed): | ||||||
|  |     tag_size = _TagSize(field_number) | ||||||
|  |     if is_packed: | ||||||
|  |       local_VarintSize = _VarintSize | ||||||
|  |       def PackedFieldSize(value): | ||||||
|  |         result = len(value) * value_size | ||||||
|  |         return result + local_VarintSize(result) + tag_size | ||||||
|  |       return PackedFieldSize | ||||||
|  |     elif is_repeated: | ||||||
|  |       element_size = value_size + tag_size | ||||||
|  |       def RepeatedFieldSize(value): | ||||||
|  |         return len(value) * element_size | ||||||
|  |       return RepeatedFieldSize | ||||||
|  |     else: | ||||||
|  |       field_size = value_size + tag_size | ||||||
|  |       def FieldSize(value): | ||||||
|  |         return field_size | ||||||
|  |       return FieldSize | ||||||
|  | 
 | ||||||
|  |   return SpecificSizer | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # ==================================================================== | ||||||
|  | # Here we declare a sizer constructor for each field type.  Each "sizer | ||||||
|  | # constructor" is a function that takes (field_number, is_repeated, is_packed) | ||||||
|  | # as parameters and returns a sizer, which in turn takes a field value as | ||||||
|  | # a parameter and returns its encoded size. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize) | ||||||
|  | 
 | ||||||
|  | UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize) | ||||||
|  | 
 | ||||||
|  | SInt32Sizer = SInt64Sizer = _ModifiedSizer( | ||||||
|  |     _SignedVarintSize, wire_format.ZigZagEncode) | ||||||
|  | 
 | ||||||
|  | Fixed32Sizer = SFixed32Sizer = FloatSizer  = _FixedSizer(4) | ||||||
|  | Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8) | ||||||
|  | 
 | ||||||
|  | BoolSizer = _FixedSizer(1) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def StringSizer(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns a sizer for a string field.""" | ||||||
|  | 
 | ||||||
|  |   tag_size = _TagSize(field_number) | ||||||
|  |   local_VarintSize = _VarintSize | ||||||
|  |   local_len = len | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def RepeatedFieldSize(value): | ||||||
|  |       result = tag_size * len(value) | ||||||
|  |       for element in value: | ||||||
|  |         l = local_len(element.encode('utf-8')) | ||||||
|  |         result += local_VarintSize(l) + l | ||||||
|  |       return result | ||||||
|  |     return RepeatedFieldSize | ||||||
|  |   else: | ||||||
|  |     def FieldSize(value): | ||||||
|  |       l = local_len(value.encode('utf-8')) | ||||||
|  |       return tag_size + local_VarintSize(l) + l | ||||||
|  |     return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BytesSizer(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns a sizer for a bytes field.""" | ||||||
|  | 
 | ||||||
|  |   tag_size = _TagSize(field_number) | ||||||
|  |   local_VarintSize = _VarintSize | ||||||
|  |   local_len = len | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def RepeatedFieldSize(value): | ||||||
|  |       result = tag_size * len(value) | ||||||
|  |       for element in value: | ||||||
|  |         l = local_len(element) | ||||||
|  |         result += local_VarintSize(l) + l | ||||||
|  |       return result | ||||||
|  |     return RepeatedFieldSize | ||||||
|  |   else: | ||||||
|  |     def FieldSize(value): | ||||||
|  |       l = local_len(value) | ||||||
|  |       return tag_size + local_VarintSize(l) + l | ||||||
|  |     return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GroupSizer(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns a sizer for a group field.""" | ||||||
|  | 
 | ||||||
|  |   tag_size = _TagSize(field_number) * 2 | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def RepeatedFieldSize(value): | ||||||
|  |       result = tag_size * len(value) | ||||||
|  |       for element in value: | ||||||
|  |         result += element.ByteSize() | ||||||
|  |       return result | ||||||
|  |     return RepeatedFieldSize | ||||||
|  |   else: | ||||||
|  |     def FieldSize(value): | ||||||
|  |       return tag_size + value.ByteSize() | ||||||
|  |     return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageSizer(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns a sizer for a message field.""" | ||||||
|  | 
 | ||||||
|  |   tag_size = _TagSize(field_number) | ||||||
|  |   local_VarintSize = _VarintSize | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def RepeatedFieldSize(value): | ||||||
|  |       result = tag_size * len(value) | ||||||
|  |       for element in value: | ||||||
|  |         l = element.ByteSize() | ||||||
|  |         result += local_VarintSize(l) + l | ||||||
|  |       return result | ||||||
|  |     return RepeatedFieldSize | ||||||
|  |   else: | ||||||
|  |     def FieldSize(value): | ||||||
|  |       l = value.ByteSize() | ||||||
|  |       return tag_size + local_VarintSize(l) + l | ||||||
|  |     return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # MessageSet is special: it needs custom logic to compute its size properly. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageSetItemSizer(field_number): | ||||||
|  |   """Returns a sizer for extensions of MessageSet. | ||||||
|  | 
 | ||||||
|  |   The message set message looks like this: | ||||||
|  |     message MessageSet { | ||||||
|  |       repeated group Item = 1 { | ||||||
|  |         required int32 type_id = 2; | ||||||
|  |         required string message = 3; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   """ | ||||||
|  |   static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) + | ||||||
|  |                  _TagSize(3)) | ||||||
|  |   local_VarintSize = _VarintSize | ||||||
|  | 
 | ||||||
|  |   def FieldSize(value): | ||||||
|  |     l = value.ByteSize() | ||||||
|  |     return static_size + local_VarintSize(l) + l | ||||||
|  | 
 | ||||||
|  |   return FieldSize | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # Map is special: it needs custom logic to compute its size properly. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MapSizer(field_descriptor, is_message_map): | ||||||
|  |   """Returns a sizer for a map field.""" | ||||||
|  | 
 | ||||||
|  |   # Can't look at field_descriptor.message_type._concrete_class because it may | ||||||
|  |   # not have been initialized yet. | ||||||
|  |   message_type = field_descriptor.message_type | ||||||
|  |   message_sizer = MessageSizer(field_descriptor.number, False, False) | ||||||
|  | 
 | ||||||
|  |   def FieldSize(map_value): | ||||||
|  |     total = 0 | ||||||
|  |     for key in map_value: | ||||||
|  |       value = map_value[key] | ||||||
|  |       # It's wasteful to create the messages and throw them away one second | ||||||
|  |       # later since we'll do the same for the actual encode.  But there's not an | ||||||
|  |       # obvious way to avoid this within the current design without tons of code | ||||||
|  |       # duplication. For message map, value.ByteSize() should be called to | ||||||
|  |       # update the status. | ||||||
|  |       entry_msg = message_type._concrete_class(key=key, value=value) | ||||||
|  |       total += message_sizer(entry_msg) | ||||||
|  |       if is_message_map: | ||||||
|  |         value.ByteSize() | ||||||
|  |     return total | ||||||
|  | 
 | ||||||
|  |   return FieldSize | ||||||
|  | 
 | ||||||
|  | # ==================================================================== | ||||||
|  | # Encoders! | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _VarintEncoder(): | ||||||
|  |   """Return an encoder for a basic varint value (does not include tag).""" | ||||||
|  | 
 | ||||||
|  |   local_int2byte = struct.Struct('>B').pack | ||||||
|  | 
 | ||||||
|  |   def EncodeVarint(write, value, unused_deterministic=None): | ||||||
|  |     bits = value & 0x7f | ||||||
|  |     value >>= 7 | ||||||
|  |     while value: | ||||||
|  |       write(local_int2byte(0x80|bits)) | ||||||
|  |       bits = value & 0x7f | ||||||
|  |       value >>= 7 | ||||||
|  |     return write(local_int2byte(bits)) | ||||||
|  | 
 | ||||||
|  |   return EncodeVarint | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SignedVarintEncoder(): | ||||||
|  |   """Return an encoder for a basic signed varint value (does not include | ||||||
|  |   tag).""" | ||||||
|  | 
 | ||||||
|  |   local_int2byte = struct.Struct('>B').pack | ||||||
|  | 
 | ||||||
|  |   def EncodeSignedVarint(write, value, unused_deterministic=None): | ||||||
|  |     if value < 0: | ||||||
|  |       value += (1 << 64) | ||||||
|  |     bits = value & 0x7f | ||||||
|  |     value >>= 7 | ||||||
|  |     while value: | ||||||
|  |       write(local_int2byte(0x80|bits)) | ||||||
|  |       bits = value & 0x7f | ||||||
|  |       value >>= 7 | ||||||
|  |     return write(local_int2byte(bits)) | ||||||
|  | 
 | ||||||
|  |   return EncodeSignedVarint | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _EncodeVarint = _VarintEncoder() | ||||||
|  | _EncodeSignedVarint = _SignedVarintEncoder() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _VarintBytes(value): | ||||||
|  |   """Encode the given integer as a varint and return the bytes.  This is only | ||||||
|  |   called at startup time so it doesn't need to be fast.""" | ||||||
|  | 
 | ||||||
|  |   pieces = [] | ||||||
|  |   _EncodeVarint(pieces.append, value, True) | ||||||
|  |   return b"".join(pieces) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def TagBytes(field_number, wire_type): | ||||||
|  |   """Encode the given tag and return the bytes.  Only called at startup.""" | ||||||
|  | 
 | ||||||
|  |   return bytes(_VarintBytes(wire_format.PackTag(field_number, wire_type))) | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # As with sizers (see above), we have a number of common encoder | ||||||
|  | # implementations. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SimpleEncoder(wire_type, encode_value, compute_value_size): | ||||||
|  |   """Return a constructor for an encoder for fields of a particular type. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |       wire_type:  The field's wire type, for encoding tags. | ||||||
|  |       encode_value:  A function which encodes an individual value, e.g. | ||||||
|  |         _EncodeVarint(). | ||||||
|  |       compute_value_size:  A function which computes the size of an individual | ||||||
|  |         value, e.g. _VarintSize(). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def SpecificEncoder(field_number, is_repeated, is_packed): | ||||||
|  |     if is_packed: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |       local_EncodeVarint = _EncodeVarint | ||||||
|  |       def EncodePackedField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         size = 0 | ||||||
|  |         for element in value: | ||||||
|  |           size += compute_value_size(element) | ||||||
|  |         local_EncodeVarint(write, size, deterministic) | ||||||
|  |         for element in value: | ||||||
|  |           encode_value(write, element, deterministic) | ||||||
|  |       return EncodePackedField | ||||||
|  |     elif is_repeated: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |         for element in value: | ||||||
|  |           write(tag_bytes) | ||||||
|  |           encode_value(write, element, deterministic) | ||||||
|  |       return EncodeRepeatedField | ||||||
|  |     else: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         return encode_value(write, value, deterministic) | ||||||
|  |       return EncodeField | ||||||
|  | 
 | ||||||
|  |   return SpecificEncoder | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value): | ||||||
|  |   """Like SimpleEncoder but additionally invokes modify_value on every value | ||||||
|  |   before passing it to encode_value.  Usually modify_value is ZigZagEncode.""" | ||||||
|  | 
 | ||||||
|  |   def SpecificEncoder(field_number, is_repeated, is_packed): | ||||||
|  |     if is_packed: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |       local_EncodeVarint = _EncodeVarint | ||||||
|  |       def EncodePackedField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         size = 0 | ||||||
|  |         for element in value: | ||||||
|  |           size += compute_value_size(modify_value(element)) | ||||||
|  |         local_EncodeVarint(write, size, deterministic) | ||||||
|  |         for element in value: | ||||||
|  |           encode_value(write, modify_value(element), deterministic) | ||||||
|  |       return EncodePackedField | ||||||
|  |     elif is_repeated: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |         for element in value: | ||||||
|  |           write(tag_bytes) | ||||||
|  |           encode_value(write, modify_value(element), deterministic) | ||||||
|  |       return EncodeRepeatedField | ||||||
|  |     else: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         return encode_value(write, modify_value(value), deterministic) | ||||||
|  |       return EncodeField | ||||||
|  | 
 | ||||||
|  |   return SpecificEncoder | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _StructPackEncoder(wire_type, format): | ||||||
|  |   """Return a constructor for an encoder for a fixed-width field. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |       wire_type:  The field's wire type, for encoding tags. | ||||||
|  |       format:  The format string to pass to struct.pack(). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   value_size = struct.calcsize(format) | ||||||
|  | 
 | ||||||
|  |   def SpecificEncoder(field_number, is_repeated, is_packed): | ||||||
|  |     local_struct_pack = struct.pack | ||||||
|  |     if is_packed: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |       local_EncodeVarint = _EncodeVarint | ||||||
|  |       def EncodePackedField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         local_EncodeVarint(write, len(value) * value_size, deterministic) | ||||||
|  |         for element in value: | ||||||
|  |           write(local_struct_pack(format, element)) | ||||||
|  |       return EncodePackedField | ||||||
|  |     elif is_repeated: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeRepeatedField(write, value, unused_deterministic=None): | ||||||
|  |         for element in value: | ||||||
|  |           write(tag_bytes) | ||||||
|  |           write(local_struct_pack(format, element)) | ||||||
|  |       return EncodeRepeatedField | ||||||
|  |     else: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeField(write, value, unused_deterministic=None): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         return write(local_struct_pack(format, value)) | ||||||
|  |       return EncodeField | ||||||
|  | 
 | ||||||
|  |   return SpecificEncoder | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _FloatingPointEncoder(wire_type, format): | ||||||
|  |   """Return a constructor for an encoder for float fields. | ||||||
|  | 
 | ||||||
|  |   This is like StructPackEncoder, but catches errors that may be due to | ||||||
|  |   passing non-finite floating-point values to struct.pack, and makes a | ||||||
|  |   second attempt to encode those values. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |       wire_type:  The field's wire type, for encoding tags. | ||||||
|  |       format:  The format string to pass to struct.pack(). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   value_size = struct.calcsize(format) | ||||||
|  |   if value_size == 4: | ||||||
|  |     def EncodeNonFiniteOrRaise(write, value): | ||||||
|  |       # Remember that the serialized form uses little-endian byte order. | ||||||
|  |       if value == _POS_INF: | ||||||
|  |         write(b'\x00\x00\x80\x7F') | ||||||
|  |       elif value == _NEG_INF: | ||||||
|  |         write(b'\x00\x00\x80\xFF') | ||||||
|  |       elif value != value:           # NaN | ||||||
|  |         write(b'\x00\x00\xC0\x7F') | ||||||
|  |       else: | ||||||
|  |         raise | ||||||
|  |   elif value_size == 8: | ||||||
|  |     def EncodeNonFiniteOrRaise(write, value): | ||||||
|  |       if value == _POS_INF: | ||||||
|  |         write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F') | ||||||
|  |       elif value == _NEG_INF: | ||||||
|  |         write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF') | ||||||
|  |       elif value != value:                         # NaN | ||||||
|  |         write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F') | ||||||
|  |       else: | ||||||
|  |         raise | ||||||
|  |   else: | ||||||
|  |     raise ValueError('Can\'t encode floating-point values that are ' | ||||||
|  |                      '%d bytes long (only 4 or 8)' % value_size) | ||||||
|  | 
 | ||||||
|  |   def SpecificEncoder(field_number, is_repeated, is_packed): | ||||||
|  |     local_struct_pack = struct.pack | ||||||
|  |     if is_packed: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |       local_EncodeVarint = _EncodeVarint | ||||||
|  |       def EncodePackedField(write, value, deterministic): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         local_EncodeVarint(write, len(value) * value_size, deterministic) | ||||||
|  |         for element in value: | ||||||
|  |           # This try/except block is going to be faster than any code that | ||||||
|  |           # we could write to check whether element is finite. | ||||||
|  |           try: | ||||||
|  |             write(local_struct_pack(format, element)) | ||||||
|  |           except SystemError: | ||||||
|  |             EncodeNonFiniteOrRaise(write, element) | ||||||
|  |       return EncodePackedField | ||||||
|  |     elif is_repeated: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeRepeatedField(write, value, unused_deterministic=None): | ||||||
|  |         for element in value: | ||||||
|  |           write(tag_bytes) | ||||||
|  |           try: | ||||||
|  |             write(local_struct_pack(format, element)) | ||||||
|  |           except SystemError: | ||||||
|  |             EncodeNonFiniteOrRaise(write, element) | ||||||
|  |       return EncodeRepeatedField | ||||||
|  |     else: | ||||||
|  |       tag_bytes = TagBytes(field_number, wire_type) | ||||||
|  |       def EncodeField(write, value, unused_deterministic=None): | ||||||
|  |         write(tag_bytes) | ||||||
|  |         try: | ||||||
|  |           write(local_struct_pack(format, value)) | ||||||
|  |         except SystemError: | ||||||
|  |           EncodeNonFiniteOrRaise(write, value) | ||||||
|  |       return EncodeField | ||||||
|  | 
 | ||||||
|  |   return SpecificEncoder | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # ==================================================================== | ||||||
|  | # Here we declare an encoder constructor for each field type.  These work | ||||||
|  | # very similarly to sizer constructors, described earlier. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder( | ||||||
|  |     wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize) | ||||||
|  | 
 | ||||||
|  | UInt32Encoder = UInt64Encoder = _SimpleEncoder( | ||||||
|  |     wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize) | ||||||
|  | 
 | ||||||
|  | SInt32Encoder = SInt64Encoder = _ModifiedEncoder( | ||||||
|  |     wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize, | ||||||
|  |     wire_format.ZigZagEncode) | ||||||
|  | 
 | ||||||
|  | # Note that Python conveniently guarantees that when using the '<' prefix on | ||||||
|  | # formats, they will also have the same size across all platforms (as opposed | ||||||
|  | # to without the prefix, where their sizes depend on the C compiler's basic | ||||||
|  | # type sizes). | ||||||
|  | Fixed32Encoder  = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I') | ||||||
|  | Fixed64Encoder  = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q') | ||||||
|  | SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i') | ||||||
|  | SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q') | ||||||
|  | FloatEncoder    = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f') | ||||||
|  | DoubleEncoder   = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BoolEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a boolean field.""" | ||||||
|  | 
 | ||||||
|  |   false_byte = b'\x00' | ||||||
|  |   true_byte = b'\x01' | ||||||
|  |   if is_packed: | ||||||
|  |     tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |     local_EncodeVarint = _EncodeVarint | ||||||
|  |     def EncodePackedField(write, value, deterministic): | ||||||
|  |       write(tag_bytes) | ||||||
|  |       local_EncodeVarint(write, len(value), deterministic) | ||||||
|  |       for element in value: | ||||||
|  |         if element: | ||||||
|  |           write(true_byte) | ||||||
|  |         else: | ||||||
|  |           write(false_byte) | ||||||
|  |     return EncodePackedField | ||||||
|  |   elif is_repeated: | ||||||
|  |     tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT) | ||||||
|  |     def EncodeRepeatedField(write, value, unused_deterministic=None): | ||||||
|  |       for element in value: | ||||||
|  |         write(tag_bytes) | ||||||
|  |         if element: | ||||||
|  |           write(true_byte) | ||||||
|  |         else: | ||||||
|  |           write(false_byte) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT) | ||||||
|  |     def EncodeField(write, value, unused_deterministic=None): | ||||||
|  |       write(tag_bytes) | ||||||
|  |       if value: | ||||||
|  |         return write(true_byte) | ||||||
|  |       return write(false_byte) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def StringEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a string field.""" | ||||||
|  | 
 | ||||||
|  |   tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |   local_EncodeVarint = _EncodeVarint | ||||||
|  |   local_len = len | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |       for element in value: | ||||||
|  |         encoded = element.encode('utf-8') | ||||||
|  |         write(tag) | ||||||
|  |         local_EncodeVarint(write, local_len(encoded), deterministic) | ||||||
|  |         write(encoded) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     def EncodeField(write, value, deterministic): | ||||||
|  |       encoded = value.encode('utf-8') | ||||||
|  |       write(tag) | ||||||
|  |       local_EncodeVarint(write, local_len(encoded), deterministic) | ||||||
|  |       return write(encoded) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BytesEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a bytes field.""" | ||||||
|  | 
 | ||||||
|  |   tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |   local_EncodeVarint = _EncodeVarint | ||||||
|  |   local_len = len | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |       for element in value: | ||||||
|  |         write(tag) | ||||||
|  |         local_EncodeVarint(write, local_len(element), deterministic) | ||||||
|  |         write(element) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     def EncodeField(write, value, deterministic): | ||||||
|  |       write(tag) | ||||||
|  |       local_EncodeVarint(write, local_len(value), deterministic) | ||||||
|  |       return write(value) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GroupEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a group field.""" | ||||||
|  | 
 | ||||||
|  |   start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP) | ||||||
|  |   end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP) | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |       for element in value: | ||||||
|  |         write(start_tag) | ||||||
|  |         element._InternalSerialize(write, deterministic) | ||||||
|  |         write(end_tag) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     def EncodeField(write, value, deterministic): | ||||||
|  |       write(start_tag) | ||||||
|  |       value._InternalSerialize(write, deterministic) | ||||||
|  |       return write(end_tag) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageEncoder(field_number, is_repeated, is_packed): | ||||||
|  |   """Returns an encoder for a message field.""" | ||||||
|  | 
 | ||||||
|  |   tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED) | ||||||
|  |   local_EncodeVarint = _EncodeVarint | ||||||
|  |   assert not is_packed | ||||||
|  |   if is_repeated: | ||||||
|  |     def EncodeRepeatedField(write, value, deterministic): | ||||||
|  |       for element in value: | ||||||
|  |         write(tag) | ||||||
|  |         local_EncodeVarint(write, element.ByteSize(), deterministic) | ||||||
|  |         element._InternalSerialize(write, deterministic) | ||||||
|  |     return EncodeRepeatedField | ||||||
|  |   else: | ||||||
|  |     def EncodeField(write, value, deterministic): | ||||||
|  |       write(tag) | ||||||
|  |       local_EncodeVarint(write, value.ByteSize(), deterministic) | ||||||
|  |       return value._InternalSerialize(write, deterministic) | ||||||
|  |     return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # As before, MessageSet is special. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageSetItemEncoder(field_number): | ||||||
|  |   """Encoder for extensions of MessageSet. | ||||||
|  | 
 | ||||||
|  |   The message set message looks like this: | ||||||
|  |     message MessageSet { | ||||||
|  |       repeated group Item = 1 { | ||||||
|  |         required int32 type_id = 2; | ||||||
|  |         required string message = 3; | ||||||
|  |       } | ||||||
|  |     } | ||||||
|  |   """ | ||||||
|  |   start_bytes = b"".join([ | ||||||
|  |       TagBytes(1, wire_format.WIRETYPE_START_GROUP), | ||||||
|  |       TagBytes(2, wire_format.WIRETYPE_VARINT), | ||||||
|  |       _VarintBytes(field_number), | ||||||
|  |       TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)]) | ||||||
|  |   end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP) | ||||||
|  |   local_EncodeVarint = _EncodeVarint | ||||||
|  | 
 | ||||||
|  |   def EncodeField(write, value, deterministic): | ||||||
|  |     write(start_bytes) | ||||||
|  |     local_EncodeVarint(write, value.ByteSize(), deterministic) | ||||||
|  |     value._InternalSerialize(write, deterministic) | ||||||
|  |     return write(end_bytes) | ||||||
|  | 
 | ||||||
|  |   return EncodeField | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # -------------------------------------------------------------------- | ||||||
|  | # As before, Map is special. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MapEncoder(field_descriptor): | ||||||
|  |   """Encoder for extensions of MessageSet. | ||||||
|  | 
 | ||||||
|  |   Maps always have a wire format like this: | ||||||
|  |     message MapEntry { | ||||||
|  |       key_type key = 1; | ||||||
|  |       value_type value = 2; | ||||||
|  |     } | ||||||
|  |     repeated MapEntry map = N; | ||||||
|  |   """ | ||||||
|  |   # Can't look at field_descriptor.message_type._concrete_class because it may | ||||||
|  |   # not have been initialized yet. | ||||||
|  |   message_type = field_descriptor.message_type | ||||||
|  |   encode_message = MessageEncoder(field_descriptor.number, False, False) | ||||||
|  | 
 | ||||||
|  |   def EncodeField(write, value, deterministic): | ||||||
|  |     value_keys = sorted(value.keys()) if deterministic else value | ||||||
|  |     for key in value_keys: | ||||||
|  |       entry_msg = message_type._concrete_class(key=key, value=value[key]) | ||||||
|  |       encode_message(write, entry_msg, deterministic) | ||||||
|  | 
 | ||||||
|  |   return EncodeField | ||||||
							
								
								
									
										124
									
								
								lib/protobuf/internal/enum_type_wrapper.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,124 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """A simple wrapper around enum types to expose utility functions. | ||||||
|  | 
 | ||||||
|  | Instances are created as properties with the same name as the enum they wrap | ||||||
|  | on proto classes.  For usage, see: | ||||||
|  |   reflection_test.py | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'rabsatt@google.com (Kevin Rabsatt)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class EnumTypeWrapper(object): | ||||||
|  |   """A utility for finding the names of enum values.""" | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR = None | ||||||
|  | 
 | ||||||
|  |   # This is a type alias, which mypy typing stubs can type as | ||||||
|  |   # a genericized parameter constrained to an int, allowing subclasses | ||||||
|  |   # to be typed with more constraint in .pyi stubs | ||||||
|  |   # Eg. | ||||||
|  |   # def MyGeneratedEnum(Message): | ||||||
|  |   #   ValueType = NewType('ValueType', int) | ||||||
|  |   #   def Name(self, number: MyGeneratedEnum.ValueType) -> str | ||||||
|  |   ValueType = int | ||||||
|  | 
 | ||||||
|  |   def __init__(self, enum_type): | ||||||
|  |     """Inits EnumTypeWrapper with an EnumDescriptor.""" | ||||||
|  |     self._enum_type = enum_type | ||||||
|  |     self.DESCRIPTOR = enum_type  # pylint: disable=invalid-name | ||||||
|  | 
 | ||||||
|  |   def Name(self, number):  # pylint: disable=invalid-name | ||||||
|  |     """Returns a string containing the name of an enum value.""" | ||||||
|  |     try: | ||||||
|  |       return self._enum_type.values_by_number[number].name | ||||||
|  |     except KeyError: | ||||||
|  |       pass  # fall out to break exception chaining | ||||||
|  | 
 | ||||||
|  |     if not isinstance(number, int): | ||||||
|  |       raise TypeError( | ||||||
|  |           'Enum value for {} must be an int, but got {} {!r}.'.format( | ||||||
|  |               self._enum_type.name, type(number), number)) | ||||||
|  |     else: | ||||||
|  |       # repr here to handle the odd case when you pass in a boolean. | ||||||
|  |       raise ValueError('Enum {} has no name defined for value {!r}'.format( | ||||||
|  |           self._enum_type.name, number)) | ||||||
|  | 
 | ||||||
|  |   def Value(self, name):  # pylint: disable=invalid-name | ||||||
|  |     """Returns the value corresponding to the given enum name.""" | ||||||
|  |     try: | ||||||
|  |       return self._enum_type.values_by_name[name].number | ||||||
|  |     except KeyError: | ||||||
|  |       pass  # fall out to break exception chaining | ||||||
|  |     raise ValueError('Enum {} has no value defined for name {!r}'.format( | ||||||
|  |         self._enum_type.name, name)) | ||||||
|  | 
 | ||||||
|  |   def keys(self): | ||||||
|  |     """Return a list of the string names in the enum. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A list of strs, in the order they were defined in the .proto file. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     return [value_descriptor.name | ||||||
|  |             for value_descriptor in self._enum_type.values] | ||||||
|  | 
 | ||||||
|  |   def values(self): | ||||||
|  |     """Return a list of the integer values in the enum. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A list of ints, in the order they were defined in the .proto file. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     return [value_descriptor.number | ||||||
|  |             for value_descriptor in self._enum_type.values] | ||||||
|  | 
 | ||||||
|  |   def items(self): | ||||||
|  |     """Return a list of the (name, value) pairs of the enum. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A list of (str, int) pairs, in the order they were defined | ||||||
|  |       in the .proto file. | ||||||
|  |     """ | ||||||
|  |     return [(value_descriptor.name, value_descriptor.number) | ||||||
|  |             for value_descriptor in self._enum_type.values] | ||||||
|  | 
 | ||||||
|  |   def __getattr__(self, name): | ||||||
|  |     """Returns the value corresponding to the given enum name.""" | ||||||
|  |     try: | ||||||
|  |       return super( | ||||||
|  |           EnumTypeWrapper, | ||||||
|  |           self).__getattribute__('_enum_type').values_by_name[name].number | ||||||
|  |     except KeyError: | ||||||
|  |       pass  # fall out to break exception chaining | ||||||
|  |     raise AttributeError('Enum {} has no value defined for name {!r}'.format( | ||||||
|  |         self._enum_type.name, name)) | ||||||
							
								
								
									
										213
									
								
								lib/protobuf/internal/extension_dict.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,213 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains _ExtensionDict class to represent extensions. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import type_checkers | ||||||
|  | from google.protobuf.descriptor import FieldDescriptor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _VerifyExtensionHandle(message, extension_handle): | ||||||
|  |   """Verify that the given extension handle is valid.""" | ||||||
|  | 
 | ||||||
|  |   if not isinstance(extension_handle, FieldDescriptor): | ||||||
|  |     raise KeyError('HasExtension() expects an extension handle, got: %s' % | ||||||
|  |                    extension_handle) | ||||||
|  | 
 | ||||||
|  |   if not extension_handle.is_extension: | ||||||
|  |     raise KeyError('"%s" is not an extension.' % extension_handle.full_name) | ||||||
|  | 
 | ||||||
|  |   if not extension_handle.containing_type: | ||||||
|  |     raise KeyError('"%s" is missing a containing_type.' | ||||||
|  |                    % extension_handle.full_name) | ||||||
|  | 
 | ||||||
|  |   if extension_handle.containing_type is not message.DESCRIPTOR: | ||||||
|  |     raise KeyError('Extension "%s" extends message type "%s", but this ' | ||||||
|  |                    'message is of type "%s".' % | ||||||
|  |                    (extension_handle.full_name, | ||||||
|  |                     extension_handle.containing_type.full_name, | ||||||
|  |                     message.DESCRIPTOR.full_name)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO(robinson): Unify error handling of "unknown extension" crap. | ||||||
|  | # TODO(robinson): Support iteritems()-style iteration over all | ||||||
|  | # extensions with the "has" bits turned on? | ||||||
|  | class _ExtensionDict(object): | ||||||
|  | 
 | ||||||
|  |   """Dict-like container for Extension fields on proto instances. | ||||||
|  | 
 | ||||||
|  |   Note that in all cases we expect extension handles to be | ||||||
|  |   FieldDescriptors. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def __init__(self, extended_message): | ||||||
|  |     """ | ||||||
|  |     Args: | ||||||
|  |       extended_message: Message instance for which we are the Extensions dict. | ||||||
|  |     """ | ||||||
|  |     self._extended_message = extended_message | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, extension_handle): | ||||||
|  |     """Returns the current value of the given extension handle.""" | ||||||
|  | 
 | ||||||
|  |     _VerifyExtensionHandle(self._extended_message, extension_handle) | ||||||
|  | 
 | ||||||
|  |     result = self._extended_message._fields.get(extension_handle) | ||||||
|  |     if result is not None: | ||||||
|  |       return result | ||||||
|  | 
 | ||||||
|  |     if extension_handle.label == FieldDescriptor.LABEL_REPEATED: | ||||||
|  |       result = extension_handle._default_constructor(self._extended_message) | ||||||
|  |     elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |       message_type = extension_handle.message_type | ||||||
|  |       if not hasattr(message_type, '_concrete_class'): | ||||||
|  |         # pylint: disable=protected-access | ||||||
|  |         self._extended_message._FACTORY.GetPrototype(message_type) | ||||||
|  |       assert getattr(extension_handle.message_type, '_concrete_class', None), ( | ||||||
|  |           'Uninitialized concrete class found for field %r (message type %r)' | ||||||
|  |           % (extension_handle.full_name, | ||||||
|  |              extension_handle.message_type.full_name)) | ||||||
|  |       result = extension_handle.message_type._concrete_class() | ||||||
|  |       try: | ||||||
|  |         result._SetListener(self._extended_message._listener_for_children) | ||||||
|  |       except ReferenceError: | ||||||
|  |         pass | ||||||
|  |     else: | ||||||
|  |       # Singular scalar -- just return the default without inserting into the | ||||||
|  |       # dict. | ||||||
|  |       return extension_handle.default_value | ||||||
|  | 
 | ||||||
|  |     # Atomically check if another thread has preempted us and, if not, swap | ||||||
|  |     # in the new object we just created.  If someone has preempted us, we | ||||||
|  |     # take that object and discard ours. | ||||||
|  |     # WARNING:  We are relying on setdefault() being atomic.  This is true | ||||||
|  |     #   in CPython but we haven't investigated others.  This warning appears | ||||||
|  |     #   in several other locations in this file. | ||||||
|  |     result = self._extended_message._fields.setdefault( | ||||||
|  |         extension_handle, result) | ||||||
|  | 
 | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other): | ||||||
|  |     if not isinstance(other, self.__class__): | ||||||
|  |       return False | ||||||
|  | 
 | ||||||
|  |     my_fields = self._extended_message.ListFields() | ||||||
|  |     other_fields = other._extended_message.ListFields() | ||||||
|  | 
 | ||||||
|  |     # Get rid of non-extension fields. | ||||||
|  |     my_fields = [field for field in my_fields if field.is_extension] | ||||||
|  |     other_fields = [field for field in other_fields if field.is_extension] | ||||||
|  | 
 | ||||||
|  |     return my_fields == other_fields | ||||||
|  | 
 | ||||||
|  |   def __ne__(self, other): | ||||||
|  |     return not self == other | ||||||
|  | 
 | ||||||
|  |   def __len__(self): | ||||||
|  |     fields = self._extended_message.ListFields() | ||||||
|  |     # Get rid of non-extension fields. | ||||||
|  |     extension_fields = [field for field in fields if field[0].is_extension] | ||||||
|  |     return len(extension_fields) | ||||||
|  | 
 | ||||||
|  |   def __hash__(self): | ||||||
|  |     raise TypeError('unhashable object') | ||||||
|  | 
 | ||||||
|  |   # Note that this is only meaningful for non-repeated, scalar extension | ||||||
|  |   # fields.  Note also that we may have to call _Modified() when we do | ||||||
|  |   # successfully set a field this way, to set any necessary "has" bits in the | ||||||
|  |   # ancestors of the extended message. | ||||||
|  |   def __setitem__(self, extension_handle, value): | ||||||
|  |     """If extension_handle specifies a non-repeated, scalar extension | ||||||
|  |     field, sets the value of that field. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     _VerifyExtensionHandle(self._extended_message, extension_handle) | ||||||
|  | 
 | ||||||
|  |     if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or | ||||||
|  |         extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE): | ||||||
|  |       raise TypeError( | ||||||
|  |           'Cannot assign to extension "%s" because it is a repeated or ' | ||||||
|  |           'composite type.' % extension_handle.full_name) | ||||||
|  | 
 | ||||||
|  |     # It's slightly wasteful to lookup the type checker each time, | ||||||
|  |     # but we expect this to be a vanishingly uncommon case anyway. | ||||||
|  |     type_checker = type_checkers.GetTypeChecker(extension_handle) | ||||||
|  |     # pylint: disable=protected-access | ||||||
|  |     self._extended_message._fields[extension_handle] = ( | ||||||
|  |         type_checker.CheckValue(value)) | ||||||
|  |     self._extended_message._Modified() | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, extension_handle): | ||||||
|  |     self._extended_message.ClearExtension(extension_handle) | ||||||
|  | 
 | ||||||
|  |   def _FindExtensionByName(self, name): | ||||||
|  |     """Tries to find a known extension with the specified name. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       name: Extension full name. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       Extension field descriptor. | ||||||
|  |     """ | ||||||
|  |     return self._extended_message._extensions_by_name.get(name, None) | ||||||
|  | 
 | ||||||
|  |   def _FindExtensionByNumber(self, number): | ||||||
|  |     """Tries to find a known extension with the field number. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       number: Extension field number. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       Extension field descriptor. | ||||||
|  |     """ | ||||||
|  |     return self._extended_message._extensions_by_number.get(number, None) | ||||||
|  | 
 | ||||||
|  |   def __iter__(self): | ||||||
|  |     # Return a generator over the populated extension fields | ||||||
|  |     return (f[0] for f in self._extended_message.ListFields() | ||||||
|  |             if f[0].is_extension) | ||||||
|  | 
 | ||||||
|  |   def __contains__(self, extension_handle): | ||||||
|  |     _VerifyExtensionHandle(self._extended_message, extension_handle) | ||||||
|  | 
 | ||||||
|  |     if extension_handle not in self._extended_message._fields: | ||||||
|  |       return False | ||||||
|  | 
 | ||||||
|  |     if extension_handle.label == FieldDescriptor.LABEL_REPEATED: | ||||||
|  |       return bool(self._extended_message._fields.get(extension_handle)) | ||||||
|  | 
 | ||||||
|  |     if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |       value = self._extended_message._fields.get(extension_handle) | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       return value is not None and value._is_present_in_parent | ||||||
|  | 
 | ||||||
|  |     return True | ||||||
							
								
								
									
										78
									
								
								lib/protobuf/internal/message_listener.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,78 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Defines a listener interface for observing certain | ||||||
|  | state transitions on Message objects. | ||||||
|  | 
 | ||||||
|  | Also defines a null implementation of this interface. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MessageListener(object): | ||||||
|  | 
 | ||||||
|  |   """Listens for modifications made to a message.  Meant to be registered via | ||||||
|  |   Message._SetListener(). | ||||||
|  | 
 | ||||||
|  |   Attributes: | ||||||
|  |     dirty:  If True, then calling Modified() would be a no-op.  This can be | ||||||
|  |             used to avoid these calls entirely in the common case. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def Modified(self): | ||||||
|  |     """Called every time the message is modified in such a way that the parent | ||||||
|  |     message may need to be updated.  This currently means either: | ||||||
|  |     (a) The message was modified for the first time, so the parent message | ||||||
|  |         should henceforth mark the message as present. | ||||||
|  |     (b) The message's cached byte size became dirty -- i.e. the message was | ||||||
|  |         modified for the first time after a previous call to ByteSize(). | ||||||
|  |         Therefore the parent should also mark its byte size as dirty. | ||||||
|  |     Note that (a) implies (b), since new objects start out with a client cached | ||||||
|  |     size (zero).  However, we document (a) explicitly because it is important. | ||||||
|  | 
 | ||||||
|  |     Modified() will *only* be called in response to one of these two events -- | ||||||
|  |     not every time the sub-message is modified. | ||||||
|  | 
 | ||||||
|  |     Note that if the listener's |dirty| attribute is true, then calling | ||||||
|  |     Modified at the moment would be a no-op, so it can be skipped.  Performance- | ||||||
|  |     sensitive callers should check this attribute directly before calling since | ||||||
|  |     it will be true most of the time. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class NullMessageListener(object): | ||||||
|  | 
 | ||||||
|  |   """No-op MessageListener implementation.""" | ||||||
|  | 
 | ||||||
|  |   def Modified(self): | ||||||
|  |     pass | ||||||
							
								
								
									
										1539
									
								
								lib/protobuf/internal/python_message.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										435
									
								
								lib/protobuf/internal/type_checkers.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,435 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Provides type checking routines. | ||||||
|  | 
 | ||||||
|  | This module defines type checking utilities in the forms of dictionaries: | ||||||
|  | 
 | ||||||
|  | VALUE_CHECKERS: A dictionary of field types and a value validation object. | ||||||
|  | TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing | ||||||
|  |   function. | ||||||
|  | TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization | ||||||
|  |   function. | ||||||
|  | FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their | ||||||
|  |   corresponding wire types. | ||||||
|  | TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization | ||||||
|  |   function. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | import ctypes | ||||||
|  | import numbers | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import decoder | ||||||
|  | from google.protobuf.internal import encoder | ||||||
|  | from google.protobuf.internal import wire_format | ||||||
|  | from google.protobuf import descriptor | ||||||
|  | 
 | ||||||
|  | _FieldDescriptor = descriptor.FieldDescriptor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def TruncateToFourByteFloat(original): | ||||||
|  |   return ctypes.c_float(original).value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ToShortestFloat(original): | ||||||
|  |   """Returns the shortest float that has same value in wire.""" | ||||||
|  |   # All 4 byte floats have between 6 and 9 significant digits, so we | ||||||
|  |   # start with 6 as the lower bound. | ||||||
|  |   # It has to be iterative because use '.9g' directly can not get rid | ||||||
|  |   # of the noises for most values. For example if set a float_field=0.9 | ||||||
|  |   # use '.9g' will print 0.899999976. | ||||||
|  |   precision = 6 | ||||||
|  |   rounded = float('{0:.{1}g}'.format(original, precision)) | ||||||
|  |   while TruncateToFourByteFloat(rounded) != original: | ||||||
|  |     precision += 1 | ||||||
|  |     rounded = float('{0:.{1}g}'.format(original, precision)) | ||||||
|  |   return rounded | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SupportsOpenEnums(field_descriptor): | ||||||
|  |   return field_descriptor.containing_type.syntax == 'proto3' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GetTypeChecker(field): | ||||||
|  |   """Returns a type checker for a message field of the specified types. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     field: FieldDescriptor object for this field. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     An instance of TypeChecker which can be used to verify the types | ||||||
|  |     of values assigned to a field of the specified type. | ||||||
|  |   """ | ||||||
|  |   if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and | ||||||
|  |       field.type == _FieldDescriptor.TYPE_STRING): | ||||||
|  |     return UnicodeValueChecker() | ||||||
|  |   if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM: | ||||||
|  |     if SupportsOpenEnums(field): | ||||||
|  |       # When open enums are supported, any int32 can be assigned. | ||||||
|  |       return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32] | ||||||
|  |     else: | ||||||
|  |       return EnumValueChecker(field.enum_type) | ||||||
|  |   return _VALUE_CHECKERS[field.cpp_type] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # None of the typecheckers below make any attempt to guard against people | ||||||
|  | # subclassing builtin types and doing weird things.  We're not trying to | ||||||
|  | # protect against malicious clients here, just people accidentally shooting | ||||||
|  | # themselves in the foot in obvious ways. | ||||||
|  | class TypeChecker(object): | ||||||
|  | 
 | ||||||
|  |   """Type checker used to catch type errors as early as possible | ||||||
|  |   when the client is setting scalar fields in protocol messages. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def __init__(self, *acceptable_types): | ||||||
|  |     self._acceptable_types = acceptable_types | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     """Type check the provided value and return it. | ||||||
|  | 
 | ||||||
|  |     The returned value might have been normalized to another type. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(proposed_value, self._acceptable_types): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), self._acceptable_types)) | ||||||
|  |       raise TypeError(message) | ||||||
|  |     return proposed_value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class TypeCheckerWithDefault(TypeChecker): | ||||||
|  | 
 | ||||||
|  |   def __init__(self, default_value, *acceptable_types): | ||||||
|  |     TypeChecker.__init__(self, *acceptable_types) | ||||||
|  |     self._default_value = default_value | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return self._default_value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class BoolValueChecker(object): | ||||||
|  |   """Type checker used for bool fields.""" | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     if not hasattr(proposed_value, '__index__') or ( | ||||||
|  |         type(proposed_value).__module__ == 'numpy' and | ||||||
|  |         type(proposed_value).__name__ == 'ndarray'): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), (bool, int))) | ||||||
|  |       raise TypeError(message) | ||||||
|  |     return bool(proposed_value) | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return False | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # IntValueChecker and its subclasses perform integer type-checks | ||||||
|  | # and bounds-checks. | ||||||
|  | class IntValueChecker(object): | ||||||
|  | 
 | ||||||
|  |   """Checker used for integer fields.  Performs type-check and range check.""" | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     if not hasattr(proposed_value, '__index__') or ( | ||||||
|  |         type(proposed_value).__module__ == 'numpy' and | ||||||
|  |         type(proposed_value).__name__ == 'ndarray'): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), (int,))) | ||||||
|  |       raise TypeError(message) | ||||||
|  | 
 | ||||||
|  |     if not self._MIN <= int(proposed_value) <= self._MAX: | ||||||
|  |       raise ValueError('Value out of range: %d' % proposed_value) | ||||||
|  |     # We force all values to int to make alternate implementations where the | ||||||
|  |     # distinction is more significant (e.g. the C++ implementation) simpler. | ||||||
|  |     proposed_value = int(proposed_value) | ||||||
|  |     return proposed_value | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return 0 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class EnumValueChecker(object): | ||||||
|  | 
 | ||||||
|  |   """Checker used for enum fields.  Performs type-check and range check.""" | ||||||
|  | 
 | ||||||
|  |   def __init__(self, enum_type): | ||||||
|  |     self._enum_type = enum_type | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     if not isinstance(proposed_value, numbers.Integral): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), (int,))) | ||||||
|  |       raise TypeError(message) | ||||||
|  |     if int(proposed_value) not in self._enum_type.values_by_number: | ||||||
|  |       raise ValueError('Unknown enum value: %d' % proposed_value) | ||||||
|  |     return proposed_value | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return self._enum_type.values[0].number | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class UnicodeValueChecker(object): | ||||||
|  | 
 | ||||||
|  |   """Checker used for string fields. | ||||||
|  | 
 | ||||||
|  |   Always returns a unicode value, even if the input is of type str. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     if not isinstance(proposed_value, (bytes, str)): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: %s' % | ||||||
|  |                  (proposed_value, type(proposed_value), (bytes, str))) | ||||||
|  |       raise TypeError(message) | ||||||
|  | 
 | ||||||
|  |     # If the value is of type 'bytes' make sure that it is valid UTF-8 data. | ||||||
|  |     if isinstance(proposed_value, bytes): | ||||||
|  |       try: | ||||||
|  |         proposed_value = proposed_value.decode('utf-8') | ||||||
|  |       except UnicodeDecodeError: | ||||||
|  |         raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 ' | ||||||
|  |                          'encoding. Non-UTF-8 strings must be converted to ' | ||||||
|  |                          'unicode objects before being added.' % | ||||||
|  |                          (proposed_value)) | ||||||
|  |     else: | ||||||
|  |       try: | ||||||
|  |         proposed_value.encode('utf8') | ||||||
|  |       except UnicodeEncodeError: | ||||||
|  |         raise ValueError('%.1024r isn\'t a valid unicode string and ' | ||||||
|  |                          'can\'t be encoded in UTF-8.'% | ||||||
|  |                          (proposed_value)) | ||||||
|  | 
 | ||||||
|  |     return proposed_value | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return u"" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Int32ValueChecker(IntValueChecker): | ||||||
|  |   # We're sure to use ints instead of longs here since comparison may be more | ||||||
|  |   # efficient. | ||||||
|  |   _MIN = -2147483648 | ||||||
|  |   _MAX = 2147483647 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Uint32ValueChecker(IntValueChecker): | ||||||
|  |   _MIN = 0 | ||||||
|  |   _MAX = (1 << 32) - 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Int64ValueChecker(IntValueChecker): | ||||||
|  |   _MIN = -(1 << 63) | ||||||
|  |   _MAX = (1 << 63) - 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Uint64ValueChecker(IntValueChecker): | ||||||
|  |   _MIN = 0 | ||||||
|  |   _MAX = (1 << 64) - 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # The max 4 bytes float is about 3.4028234663852886e+38 | ||||||
|  | _FLOAT_MAX = float.fromhex('0x1.fffffep+127') | ||||||
|  | _FLOAT_MIN = -_FLOAT_MAX | ||||||
|  | _INF = float('inf') | ||||||
|  | _NEG_INF = float('-inf') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DoubleValueChecker(object): | ||||||
|  |   """Checker used for double fields. | ||||||
|  | 
 | ||||||
|  |   Performs type-check and range check. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     """Check and convert proposed_value to float.""" | ||||||
|  |     if (not hasattr(proposed_value, '__float__') and | ||||||
|  |         not hasattr(proposed_value, '__index__')) or ( | ||||||
|  |             type(proposed_value).__module__ == 'numpy' and | ||||||
|  |             type(proposed_value).__name__ == 'ndarray'): | ||||||
|  |       message = ('%.1024r has type %s, but expected one of: int, float' % | ||||||
|  |                  (proposed_value, type(proposed_value))) | ||||||
|  |       raise TypeError(message) | ||||||
|  |     return float(proposed_value) | ||||||
|  | 
 | ||||||
|  |   def DefaultValue(self): | ||||||
|  |     return 0.0 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class FloatValueChecker(DoubleValueChecker): | ||||||
|  |   """Checker used for float fields. | ||||||
|  | 
 | ||||||
|  |   Performs type-check and range check. | ||||||
|  | 
 | ||||||
|  |   Values exceeding a 32-bit float will be converted to inf/-inf. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def CheckValue(self, proposed_value): | ||||||
|  |     """Check and convert proposed_value to float.""" | ||||||
|  |     converted_value = super().CheckValue(proposed_value) | ||||||
|  |     # This inf rounding matches the C++ proto SafeDoubleToFloat logic. | ||||||
|  |     if converted_value > _FLOAT_MAX: | ||||||
|  |       return _INF | ||||||
|  |     if converted_value < _FLOAT_MIN: | ||||||
|  |       return _NEG_INF | ||||||
|  | 
 | ||||||
|  |     return TruncateToFourByteFloat(converted_value) | ||||||
|  | 
 | ||||||
|  | # Type-checkers for all scalar CPPTYPEs. | ||||||
|  | _VALUE_CHECKERS = { | ||||||
|  |     _FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_DOUBLE: DoubleValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_BOOL: BoolValueChecker(), | ||||||
|  |     _FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes), | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Map from field type to a function F, such that F(field_num, value) | ||||||
|  | # gives the total byte size for a value of the given type.  This | ||||||
|  | # byte size includes tag information and any other additional space | ||||||
|  | # associated with serializing "value". | ||||||
|  | TYPE_TO_BYTE_SIZE_FN = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: wire_format.StringByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Maps from field types to encoder constructors. | ||||||
|  | TYPE_TO_ENCODER = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: encoder.Int64Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: encoder.Int32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: encoder.StringEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder, | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Maps from field types to sizer constructors. | ||||||
|  | TYPE_TO_SIZER = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: encoder.Int64Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: encoder.Int32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: encoder.BoolSizer, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: encoder.StringSizer, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: encoder.GroupSizer, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: encoder.BytesSizer, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: encoder.EnumSizer, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer, | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Maps from field type to a decoder constructor. | ||||||
|  | TYPE_TO_DECODER = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: decoder.Int64Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: decoder.Int32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: decoder.StringDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder, | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  | # Maps from field type to expected wiretype. | ||||||
|  | FIELD_TYPE_TO_WIRE_TYPE = { | ||||||
|  |     _FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64, | ||||||
|  |     _FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32, | ||||||
|  |     _FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64, | ||||||
|  |     _FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32, | ||||||
|  |     _FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_STRING: | ||||||
|  |       wire_format.WIRETYPE_LENGTH_DELIMITED, | ||||||
|  |     _FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP, | ||||||
|  |     _FieldDescriptor.TYPE_MESSAGE: | ||||||
|  |       wire_format.WIRETYPE_LENGTH_DELIMITED, | ||||||
|  |     _FieldDescriptor.TYPE_BYTES: | ||||||
|  |       wire_format.WIRETYPE_LENGTH_DELIMITED, | ||||||
|  |     _FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32, | ||||||
|  |     _FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64, | ||||||
|  |     _FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT, | ||||||
|  |     _FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT, | ||||||
|  |     } | ||||||
							
								
								
									
										878
									
								
								lib/protobuf/internal/well_known_types.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,878 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains well known classes. | ||||||
|  | 
 | ||||||
|  | This files defines well known classes which need extra maintenance including: | ||||||
|  |   - Any | ||||||
|  |   - Duration | ||||||
|  |   - FieldMask | ||||||
|  |   - Struct | ||||||
|  |   - Timestamp | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'jieluo@google.com (Jie Luo)' | ||||||
|  | 
 | ||||||
|  | import calendar | ||||||
|  | import collections.abc | ||||||
|  | import datetime | ||||||
|  | 
 | ||||||
|  | from google.protobuf.descriptor import FieldDescriptor | ||||||
|  | 
 | ||||||
|  | _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' | ||||||
|  | _NANOS_PER_SECOND = 1000000000 | ||||||
|  | _NANOS_PER_MILLISECOND = 1000000 | ||||||
|  | _NANOS_PER_MICROSECOND = 1000 | ||||||
|  | _MILLIS_PER_SECOND = 1000 | ||||||
|  | _MICROS_PER_SECOND = 1000000 | ||||||
|  | _SECONDS_PER_DAY = 24 * 3600 | ||||||
|  | _DURATION_SECONDS_MAX = 315576000000 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Any(object): | ||||||
|  |   """Class for Any Message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def Pack(self, msg, type_url_prefix='type.googleapis.com/', | ||||||
|  |            deterministic=None): | ||||||
|  |     """Packs the specified message into current Any message.""" | ||||||
|  |     if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/': | ||||||
|  |       self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) | ||||||
|  |     else: | ||||||
|  |       self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name) | ||||||
|  |     self.value = msg.SerializeToString(deterministic=deterministic) | ||||||
|  | 
 | ||||||
|  |   def Unpack(self, msg): | ||||||
|  |     """Unpacks the current Any message into specified message.""" | ||||||
|  |     descriptor = msg.DESCRIPTOR | ||||||
|  |     if not self.Is(descriptor): | ||||||
|  |       return False | ||||||
|  |     msg.ParseFromString(self.value) | ||||||
|  |     return True | ||||||
|  | 
 | ||||||
|  |   def TypeName(self): | ||||||
|  |     """Returns the protobuf type name of the inner message.""" | ||||||
|  |     # Only last part is to be used: b/25630112 | ||||||
|  |     return self.type_url.split('/')[-1] | ||||||
|  | 
 | ||||||
|  |   def Is(self, descriptor): | ||||||
|  |     """Checks if this Any represents the given protobuf type.""" | ||||||
|  |     return '/' in self.type_url and self.TypeName() == descriptor.full_name | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _EPOCH_DATETIME_NAIVE = datetime.datetime.utcfromtimestamp(0) | ||||||
|  | _EPOCH_DATETIME_AWARE = datetime.datetime.fromtimestamp( | ||||||
|  |     0, tz=datetime.timezone.utc) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Timestamp(object): | ||||||
|  |   """Class for Timestamp message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def ToJsonString(self): | ||||||
|  |     """Converts Timestamp to RFC 3339 date string format. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A string converted from timestamp. The string is always Z-normalized | ||||||
|  |       and uses 3, 6 or 9 fractional digits as required to represent the | ||||||
|  |       exact time. Example of the return format: '1972-01-01T10:00:20.021Z' | ||||||
|  |     """ | ||||||
|  |     nanos = self.nanos % _NANOS_PER_SECOND | ||||||
|  |     total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND | ||||||
|  |     seconds = total_sec % _SECONDS_PER_DAY | ||||||
|  |     days = (total_sec - seconds) // _SECONDS_PER_DAY | ||||||
|  |     dt = datetime.datetime(1970, 1, 1) + datetime.timedelta(days, seconds) | ||||||
|  | 
 | ||||||
|  |     result = dt.isoformat() | ||||||
|  |     if (nanos % 1e9) == 0: | ||||||
|  |       # If there are 0 fractional digits, the fractional | ||||||
|  |       # point '.' should be omitted when serializing. | ||||||
|  |       return result + 'Z' | ||||||
|  |     if (nanos % 1e6) == 0: | ||||||
|  |       # Serialize 3 fractional digits. | ||||||
|  |       return result + '.%03dZ' % (nanos / 1e6) | ||||||
|  |     if (nanos % 1e3) == 0: | ||||||
|  |       # Serialize 6 fractional digits. | ||||||
|  |       return result + '.%06dZ' % (nanos / 1e3) | ||||||
|  |     # Serialize 9 fractional digits. | ||||||
|  |     return result + '.%09dZ' % nanos | ||||||
|  | 
 | ||||||
|  |   def FromJsonString(self, value): | ||||||
|  |     """Parse a RFC 3339 date string format to Timestamp. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       value: A date string. Any fractional digits (or none) and any offset are | ||||||
|  |           accepted as long as they fit into nano-seconds precision. | ||||||
|  |           Example of accepted format: '1972-01-01T10:00:20.021-05:00' | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: On parsing problems. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(value, str): | ||||||
|  |       raise ValueError('Timestamp JSON value not a string: {!r}'.format(value)) | ||||||
|  |     timezone_offset = value.find('Z') | ||||||
|  |     if timezone_offset == -1: | ||||||
|  |       timezone_offset = value.find('+') | ||||||
|  |     if timezone_offset == -1: | ||||||
|  |       timezone_offset = value.rfind('-') | ||||||
|  |     if timezone_offset == -1: | ||||||
|  |       raise ValueError( | ||||||
|  |           'Failed to parse timestamp: missing valid timezone offset.') | ||||||
|  |     time_value = value[0:timezone_offset] | ||||||
|  |     # Parse datetime and nanos. | ||||||
|  |     point_position = time_value.find('.') | ||||||
|  |     if point_position == -1: | ||||||
|  |       second_value = time_value | ||||||
|  |       nano_value = '' | ||||||
|  |     else: | ||||||
|  |       second_value = time_value[:point_position] | ||||||
|  |       nano_value = time_value[point_position + 1:] | ||||||
|  |     if 't' in second_value: | ||||||
|  |       raise ValueError( | ||||||
|  |           'time data \'{0}\' does not match format \'%Y-%m-%dT%H:%M:%S\', ' | ||||||
|  |           'lowercase \'t\' is not accepted'.format(second_value)) | ||||||
|  |     date_object = datetime.datetime.strptime(second_value, _TIMESTAMPFOMAT) | ||||||
|  |     td = date_object - datetime.datetime(1970, 1, 1) | ||||||
|  |     seconds = td.seconds + td.days * _SECONDS_PER_DAY | ||||||
|  |     if len(nano_value) > 9: | ||||||
|  |       raise ValueError( | ||||||
|  |           'Failed to parse Timestamp: nanos {0} more than ' | ||||||
|  |           '9 fractional digits.'.format(nano_value)) | ||||||
|  |     if nano_value: | ||||||
|  |       nanos = round(float('0.' + nano_value) * 1e9) | ||||||
|  |     else: | ||||||
|  |       nanos = 0 | ||||||
|  |     # Parse timezone offsets. | ||||||
|  |     if value[timezone_offset] == 'Z': | ||||||
|  |       if len(value) != timezone_offset + 1: | ||||||
|  |         raise ValueError('Failed to parse timestamp: invalid trailing' | ||||||
|  |                          ' data {0}.'.format(value)) | ||||||
|  |     else: | ||||||
|  |       timezone = value[timezone_offset:] | ||||||
|  |       pos = timezone.find(':') | ||||||
|  |       if pos == -1: | ||||||
|  |         raise ValueError( | ||||||
|  |             'Invalid timezone offset value: {0}.'.format(timezone)) | ||||||
|  |       if timezone[0] == '+': | ||||||
|  |         seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 | ||||||
|  |       else: | ||||||
|  |         seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60 | ||||||
|  |     # Set seconds and nanos | ||||||
|  |     self.seconds = int(seconds) | ||||||
|  |     self.nanos = int(nanos) | ||||||
|  | 
 | ||||||
|  |   def GetCurrentTime(self): | ||||||
|  |     """Get the current UTC into Timestamp.""" | ||||||
|  |     self.FromDatetime(datetime.datetime.utcnow()) | ||||||
|  | 
 | ||||||
|  |   def ToNanoseconds(self): | ||||||
|  |     """Converts Timestamp to nanoseconds since epoch.""" | ||||||
|  |     return self.seconds * _NANOS_PER_SECOND + self.nanos | ||||||
|  | 
 | ||||||
|  |   def ToMicroseconds(self): | ||||||
|  |     """Converts Timestamp to microseconds since epoch.""" | ||||||
|  |     return (self.seconds * _MICROS_PER_SECOND + | ||||||
|  |             self.nanos // _NANOS_PER_MICROSECOND) | ||||||
|  | 
 | ||||||
|  |   def ToMilliseconds(self): | ||||||
|  |     """Converts Timestamp to milliseconds since epoch.""" | ||||||
|  |     return (self.seconds * _MILLIS_PER_SECOND + | ||||||
|  |             self.nanos // _NANOS_PER_MILLISECOND) | ||||||
|  | 
 | ||||||
|  |   def ToSeconds(self): | ||||||
|  |     """Converts Timestamp to seconds since epoch.""" | ||||||
|  |     return self.seconds | ||||||
|  | 
 | ||||||
|  |   def FromNanoseconds(self, nanos): | ||||||
|  |     """Converts nanoseconds since epoch to Timestamp.""" | ||||||
|  |     self.seconds = nanos // _NANOS_PER_SECOND | ||||||
|  |     self.nanos = nanos % _NANOS_PER_SECOND | ||||||
|  | 
 | ||||||
|  |   def FromMicroseconds(self, micros): | ||||||
|  |     """Converts microseconds since epoch to Timestamp.""" | ||||||
|  |     self.seconds = micros // _MICROS_PER_SECOND | ||||||
|  |     self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND | ||||||
|  | 
 | ||||||
|  |   def FromMilliseconds(self, millis): | ||||||
|  |     """Converts milliseconds since epoch to Timestamp.""" | ||||||
|  |     self.seconds = millis // _MILLIS_PER_SECOND | ||||||
|  |     self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND | ||||||
|  | 
 | ||||||
|  |   def FromSeconds(self, seconds): | ||||||
|  |     """Converts seconds since epoch to Timestamp.""" | ||||||
|  |     self.seconds = seconds | ||||||
|  |     self.nanos = 0 | ||||||
|  | 
 | ||||||
|  |   def ToDatetime(self, tzinfo=None): | ||||||
|  |     """Converts Timestamp to a datetime. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       tzinfo: A datetime.tzinfo subclass; defaults to None. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       If tzinfo is None, returns a timezone-naive UTC datetime (with no timezone | ||||||
|  |       information, i.e. not aware that it's UTC). | ||||||
|  | 
 | ||||||
|  |       Otherwise, returns a timezone-aware datetime in the input timezone. | ||||||
|  |     """ | ||||||
|  |     delta = datetime.timedelta( | ||||||
|  |         seconds=self.seconds, | ||||||
|  |         microseconds=_RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)) | ||||||
|  |     if tzinfo is None: | ||||||
|  |       return _EPOCH_DATETIME_NAIVE + delta | ||||||
|  |     else: | ||||||
|  |       return _EPOCH_DATETIME_AWARE.astimezone(tzinfo) + delta | ||||||
|  | 
 | ||||||
|  |   def FromDatetime(self, dt): | ||||||
|  |     """Converts datetime to Timestamp. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       dt: A datetime. If it's timezone-naive, it's assumed to be in UTC. | ||||||
|  |     """ | ||||||
|  |     # Using this guide: http://wiki.python.org/moin/WorkingWithTime | ||||||
|  |     # And this conversion guide: http://docs.python.org/library/time.html | ||||||
|  | 
 | ||||||
|  |     # Turn the date parameter into a tuple (struct_time) that can then be | ||||||
|  |     # manipulated into a long value of seconds.  During the conversion from | ||||||
|  |     # struct_time to long, the source date in UTC, and so it follows that the | ||||||
|  |     # correct transformation is calendar.timegm() | ||||||
|  |     self.seconds = calendar.timegm(dt.utctimetuple()) | ||||||
|  |     self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Duration(object): | ||||||
|  |   """Class for Duration message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def ToJsonString(self): | ||||||
|  |     """Converts Duration to string format. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A string converted from self. The string format will contains | ||||||
|  |       3, 6, or 9 fractional digits depending on the precision required to | ||||||
|  |       represent the exact Duration value. For example: "1s", "1.010s", | ||||||
|  |       "1.000000100s", "-3.100s" | ||||||
|  |     """ | ||||||
|  |     _CheckDurationValid(self.seconds, self.nanos) | ||||||
|  |     if self.seconds < 0 or self.nanos < 0: | ||||||
|  |       result = '-' | ||||||
|  |       seconds = - self.seconds + int((0 - self.nanos) // 1e9) | ||||||
|  |       nanos = (0 - self.nanos) % 1e9 | ||||||
|  |     else: | ||||||
|  |       result = '' | ||||||
|  |       seconds = self.seconds + int(self.nanos // 1e9) | ||||||
|  |       nanos = self.nanos % 1e9 | ||||||
|  |     result += '%d' % seconds | ||||||
|  |     if (nanos % 1e9) == 0: | ||||||
|  |       # If there are 0 fractional digits, the fractional | ||||||
|  |       # point '.' should be omitted when serializing. | ||||||
|  |       return result + 's' | ||||||
|  |     if (nanos % 1e6) == 0: | ||||||
|  |       # Serialize 3 fractional digits. | ||||||
|  |       return result + '.%03ds' % (nanos / 1e6) | ||||||
|  |     if (nanos % 1e3) == 0: | ||||||
|  |       # Serialize 6 fractional digits. | ||||||
|  |       return result + '.%06ds' % (nanos / 1e3) | ||||||
|  |     # Serialize 9 fractional digits. | ||||||
|  |     return result + '.%09ds' % nanos | ||||||
|  | 
 | ||||||
|  |   def FromJsonString(self, value): | ||||||
|  |     """Converts a string to Duration. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       value: A string to be converted. The string must end with 's'. Any | ||||||
|  |           fractional digits (or none) are accepted as long as they fit into | ||||||
|  |           precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: On parsing problems. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(value, str): | ||||||
|  |       raise ValueError('Duration JSON value not a string: {!r}'.format(value)) | ||||||
|  |     if len(value) < 1 or value[-1] != 's': | ||||||
|  |       raise ValueError( | ||||||
|  |           'Duration must end with letter "s": {0}.'.format(value)) | ||||||
|  |     try: | ||||||
|  |       pos = value.find('.') | ||||||
|  |       if pos == -1: | ||||||
|  |         seconds = int(value[:-1]) | ||||||
|  |         nanos = 0 | ||||||
|  |       else: | ||||||
|  |         seconds = int(value[:pos]) | ||||||
|  |         if value[0] == '-': | ||||||
|  |           nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9)) | ||||||
|  |         else: | ||||||
|  |           nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9)) | ||||||
|  |       _CheckDurationValid(seconds, nanos) | ||||||
|  |       self.seconds = seconds | ||||||
|  |       self.nanos = nanos | ||||||
|  |     except ValueError as e: | ||||||
|  |       raise ValueError( | ||||||
|  |           'Couldn\'t parse duration: {0} : {1}.'.format(value, e)) | ||||||
|  | 
 | ||||||
|  |   def ToNanoseconds(self): | ||||||
|  |     """Converts a Duration to nanoseconds.""" | ||||||
|  |     return self.seconds * _NANOS_PER_SECOND + self.nanos | ||||||
|  | 
 | ||||||
|  |   def ToMicroseconds(self): | ||||||
|  |     """Converts a Duration to microseconds.""" | ||||||
|  |     micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND) | ||||||
|  |     return self.seconds * _MICROS_PER_SECOND + micros | ||||||
|  | 
 | ||||||
|  |   def ToMilliseconds(self): | ||||||
|  |     """Converts a Duration to milliseconds.""" | ||||||
|  |     millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND) | ||||||
|  |     return self.seconds * _MILLIS_PER_SECOND + millis | ||||||
|  | 
 | ||||||
|  |   def ToSeconds(self): | ||||||
|  |     """Converts a Duration to seconds.""" | ||||||
|  |     return self.seconds | ||||||
|  | 
 | ||||||
|  |   def FromNanoseconds(self, nanos): | ||||||
|  |     """Converts nanoseconds to Duration.""" | ||||||
|  |     self._NormalizeDuration(nanos // _NANOS_PER_SECOND, | ||||||
|  |                             nanos % _NANOS_PER_SECOND) | ||||||
|  | 
 | ||||||
|  |   def FromMicroseconds(self, micros): | ||||||
|  |     """Converts microseconds to Duration.""" | ||||||
|  |     self._NormalizeDuration( | ||||||
|  |         micros // _MICROS_PER_SECOND, | ||||||
|  |         (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND) | ||||||
|  | 
 | ||||||
|  |   def FromMilliseconds(self, millis): | ||||||
|  |     """Converts milliseconds to Duration.""" | ||||||
|  |     self._NormalizeDuration( | ||||||
|  |         millis // _MILLIS_PER_SECOND, | ||||||
|  |         (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND) | ||||||
|  | 
 | ||||||
|  |   def FromSeconds(self, seconds): | ||||||
|  |     """Converts seconds to Duration.""" | ||||||
|  |     self.seconds = seconds | ||||||
|  |     self.nanos = 0 | ||||||
|  | 
 | ||||||
|  |   def ToTimedelta(self): | ||||||
|  |     """Converts Duration to timedelta.""" | ||||||
|  |     return datetime.timedelta( | ||||||
|  |         seconds=self.seconds, microseconds=_RoundTowardZero( | ||||||
|  |             self.nanos, _NANOS_PER_MICROSECOND)) | ||||||
|  | 
 | ||||||
|  |   def FromTimedelta(self, td): | ||||||
|  |     """Converts timedelta to Duration.""" | ||||||
|  |     self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY, | ||||||
|  |                             td.microseconds * _NANOS_PER_MICROSECOND) | ||||||
|  | 
 | ||||||
|  |   def _NormalizeDuration(self, seconds, nanos): | ||||||
|  |     """Set Duration by seconds and nanos.""" | ||||||
|  |     # Force nanos to be negative if the duration is negative. | ||||||
|  |     if seconds < 0 and nanos > 0: | ||||||
|  |       seconds += 1 | ||||||
|  |       nanos -= _NANOS_PER_SECOND | ||||||
|  |     self.seconds = seconds | ||||||
|  |     self.nanos = nanos | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _CheckDurationValid(seconds, nanos): | ||||||
|  |   if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX: | ||||||
|  |     raise ValueError( | ||||||
|  |         'Duration is not valid: Seconds {0} must be in range ' | ||||||
|  |         '[-315576000000, 315576000000].'.format(seconds)) | ||||||
|  |   if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND: | ||||||
|  |     raise ValueError( | ||||||
|  |         'Duration is not valid: Nanos {0} must be in range ' | ||||||
|  |         '[-999999999, 999999999].'.format(nanos)) | ||||||
|  |   if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0): | ||||||
|  |     raise ValueError( | ||||||
|  |         'Duration is not valid: Sign mismatch.') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _RoundTowardZero(value, divider): | ||||||
|  |   """Truncates the remainder part after division.""" | ||||||
|  |   # For some languages, the sign of the remainder is implementation | ||||||
|  |   # dependent if any of the operands is negative. Here we enforce | ||||||
|  |   # "rounded toward zero" semantics. For example, for (-5) / 2 an | ||||||
|  |   # implementation may give -3 as the result with the remainder being | ||||||
|  |   # 1. This function ensures we always return -2 (closer to zero). | ||||||
|  |   result = value // divider | ||||||
|  |   remainder = value % divider | ||||||
|  |   if result < 0 and remainder > 0: | ||||||
|  |     return result + 1 | ||||||
|  |   else: | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class FieldMask(object): | ||||||
|  |   """Class for FieldMask message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def ToJsonString(self): | ||||||
|  |     """Converts FieldMask to string according to proto3 JSON spec.""" | ||||||
|  |     camelcase_paths = [] | ||||||
|  |     for path in self.paths: | ||||||
|  |       camelcase_paths.append(_SnakeCaseToCamelCase(path)) | ||||||
|  |     return ','.join(camelcase_paths) | ||||||
|  | 
 | ||||||
|  |   def FromJsonString(self, value): | ||||||
|  |     """Converts string to FieldMask according to proto3 JSON spec.""" | ||||||
|  |     if not isinstance(value, str): | ||||||
|  |       raise ValueError('FieldMask JSON value not a string: {!r}'.format(value)) | ||||||
|  |     self.Clear() | ||||||
|  |     if value: | ||||||
|  |       for path in value.split(','): | ||||||
|  |         self.paths.append(_CamelCaseToSnakeCase(path)) | ||||||
|  | 
 | ||||||
|  |   def IsValidForDescriptor(self, message_descriptor): | ||||||
|  |     """Checks whether the FieldMask is valid for Message Descriptor.""" | ||||||
|  |     for path in self.paths: | ||||||
|  |       if not _IsValidPath(message_descriptor, path): | ||||||
|  |         return False | ||||||
|  |     return True | ||||||
|  | 
 | ||||||
|  |   def AllFieldsFromDescriptor(self, message_descriptor): | ||||||
|  |     """Gets all direct fields of Message Descriptor to FieldMask.""" | ||||||
|  |     self.Clear() | ||||||
|  |     for field in message_descriptor.fields: | ||||||
|  |       self.paths.append(field.name) | ||||||
|  | 
 | ||||||
|  |   def CanonicalFormFromMask(self, mask): | ||||||
|  |     """Converts a FieldMask to the canonical form. | ||||||
|  | 
 | ||||||
|  |     Removes paths that are covered by another path. For example, | ||||||
|  |     "foo.bar" is covered by "foo" and will be removed if "foo" | ||||||
|  |     is also in the FieldMask. Then sorts all paths in alphabetical order. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       mask: The original FieldMask to be converted. | ||||||
|  |     """ | ||||||
|  |     tree = _FieldMaskTree(mask) | ||||||
|  |     tree.ToFieldMask(self) | ||||||
|  | 
 | ||||||
|  |   def Union(self, mask1, mask2): | ||||||
|  |     """Merges mask1 and mask2 into this FieldMask.""" | ||||||
|  |     _CheckFieldMaskMessage(mask1) | ||||||
|  |     _CheckFieldMaskMessage(mask2) | ||||||
|  |     tree = _FieldMaskTree(mask1) | ||||||
|  |     tree.MergeFromFieldMask(mask2) | ||||||
|  |     tree.ToFieldMask(self) | ||||||
|  | 
 | ||||||
|  |   def Intersect(self, mask1, mask2): | ||||||
|  |     """Intersects mask1 and mask2 into this FieldMask.""" | ||||||
|  |     _CheckFieldMaskMessage(mask1) | ||||||
|  |     _CheckFieldMaskMessage(mask2) | ||||||
|  |     tree = _FieldMaskTree(mask1) | ||||||
|  |     intersection = _FieldMaskTree() | ||||||
|  |     for path in mask2.paths: | ||||||
|  |       tree.IntersectPath(path, intersection) | ||||||
|  |     intersection.ToFieldMask(self) | ||||||
|  | 
 | ||||||
|  |   def MergeMessage( | ||||||
|  |       self, source, destination, | ||||||
|  |       replace_message_field=False, replace_repeated_field=False): | ||||||
|  |     """Merges fields specified in FieldMask from source to destination. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       source: Source message. | ||||||
|  |       destination: The destination message to be merged into. | ||||||
|  |       replace_message_field: Replace message field if True. Merge message | ||||||
|  |           field if False. | ||||||
|  |       replace_repeated_field: Replace repeated field if True. Append | ||||||
|  |           elements of repeated field if False. | ||||||
|  |     """ | ||||||
|  |     tree = _FieldMaskTree(self) | ||||||
|  |     tree.MergeMessage( | ||||||
|  |         source, destination, replace_message_field, replace_repeated_field) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _IsValidPath(message_descriptor, path): | ||||||
|  |   """Checks whether the path is valid for Message Descriptor.""" | ||||||
|  |   parts = path.split('.') | ||||||
|  |   last = parts.pop() | ||||||
|  |   for name in parts: | ||||||
|  |     field = message_descriptor.fields_by_name.get(name) | ||||||
|  |     if (field is None or | ||||||
|  |         field.label == FieldDescriptor.LABEL_REPEATED or | ||||||
|  |         field.type != FieldDescriptor.TYPE_MESSAGE): | ||||||
|  |       return False | ||||||
|  |     message_descriptor = field.message_type | ||||||
|  |   return last in message_descriptor.fields_by_name | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _CheckFieldMaskMessage(message): | ||||||
|  |   """Raises ValueError if message is not a FieldMask.""" | ||||||
|  |   message_descriptor = message.DESCRIPTOR | ||||||
|  |   if (message_descriptor.name != 'FieldMask' or | ||||||
|  |       message_descriptor.file.name != 'google/protobuf/field_mask.proto'): | ||||||
|  |     raise ValueError('Message {0} is not a FieldMask.'.format( | ||||||
|  |         message_descriptor.full_name)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SnakeCaseToCamelCase(path_name): | ||||||
|  |   """Converts a path name from snake_case to camelCase.""" | ||||||
|  |   result = [] | ||||||
|  |   after_underscore = False | ||||||
|  |   for c in path_name: | ||||||
|  |     if c.isupper(): | ||||||
|  |       raise ValueError( | ||||||
|  |           'Fail to print FieldMask to Json string: Path name ' | ||||||
|  |           '{0} must not contain uppercase letters.'.format(path_name)) | ||||||
|  |     if after_underscore: | ||||||
|  |       if c.islower(): | ||||||
|  |         result.append(c.upper()) | ||||||
|  |         after_underscore = False | ||||||
|  |       else: | ||||||
|  |         raise ValueError( | ||||||
|  |             'Fail to print FieldMask to Json string: The ' | ||||||
|  |             'character after a "_" must be a lowercase letter ' | ||||||
|  |             'in path name {0}.'.format(path_name)) | ||||||
|  |     elif c == '_': | ||||||
|  |       after_underscore = True | ||||||
|  |     else: | ||||||
|  |       result += c | ||||||
|  | 
 | ||||||
|  |   if after_underscore: | ||||||
|  |     raise ValueError('Fail to print FieldMask to Json string: Trailing "_" ' | ||||||
|  |                      'in path name {0}.'.format(path_name)) | ||||||
|  |   return ''.join(result) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _CamelCaseToSnakeCase(path_name): | ||||||
|  |   """Converts a field name from camelCase to snake_case.""" | ||||||
|  |   result = [] | ||||||
|  |   for c in path_name: | ||||||
|  |     if c == '_': | ||||||
|  |       raise ValueError('Fail to parse FieldMask: Path name ' | ||||||
|  |                        '{0} must not contain "_"s.'.format(path_name)) | ||||||
|  |     if c.isupper(): | ||||||
|  |       result += '_' | ||||||
|  |       result += c.lower() | ||||||
|  |     else: | ||||||
|  |       result += c | ||||||
|  |   return ''.join(result) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _FieldMaskTree(object): | ||||||
|  |   """Represents a FieldMask in a tree structure. | ||||||
|  | 
 | ||||||
|  |   For example, given a FieldMask "foo.bar,foo.baz,bar.baz", | ||||||
|  |   the FieldMaskTree will be: | ||||||
|  |       [_root] -+- foo -+- bar | ||||||
|  |             |       | | ||||||
|  |             |       +- baz | ||||||
|  |             | | ||||||
|  |             +- bar --- baz | ||||||
|  |   In the tree, each leaf node represents a field path. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   __slots__ = ('_root',) | ||||||
|  | 
 | ||||||
|  |   def __init__(self, field_mask=None): | ||||||
|  |     """Initializes the tree by FieldMask.""" | ||||||
|  |     self._root = {} | ||||||
|  |     if field_mask: | ||||||
|  |       self.MergeFromFieldMask(field_mask) | ||||||
|  | 
 | ||||||
|  |   def MergeFromFieldMask(self, field_mask): | ||||||
|  |     """Merges a FieldMask to the tree.""" | ||||||
|  |     for path in field_mask.paths: | ||||||
|  |       self.AddPath(path) | ||||||
|  | 
 | ||||||
|  |   def AddPath(self, path): | ||||||
|  |     """Adds a field path into the tree. | ||||||
|  | 
 | ||||||
|  |     If the field path to add is a sub-path of an existing field path | ||||||
|  |     in the tree (i.e., a leaf node), it means the tree already matches | ||||||
|  |     the given path so nothing will be added to the tree. If the path | ||||||
|  |     matches an existing non-leaf node in the tree, that non-leaf node | ||||||
|  |     will be turned into a leaf node with all its children removed because | ||||||
|  |     the path matches all the node's children. Otherwise, a new path will | ||||||
|  |     be added. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       path: The field path to add. | ||||||
|  |     """ | ||||||
|  |     node = self._root | ||||||
|  |     for name in path.split('.'): | ||||||
|  |       if name not in node: | ||||||
|  |         node[name] = {} | ||||||
|  |       elif not node[name]: | ||||||
|  |         # Pre-existing empty node implies we already have this entire tree. | ||||||
|  |         return | ||||||
|  |       node = node[name] | ||||||
|  |     # Remove any sub-trees we might have had. | ||||||
|  |     node.clear() | ||||||
|  | 
 | ||||||
|  |   def ToFieldMask(self, field_mask): | ||||||
|  |     """Converts the tree to a FieldMask.""" | ||||||
|  |     field_mask.Clear() | ||||||
|  |     _AddFieldPaths(self._root, '', field_mask) | ||||||
|  | 
 | ||||||
|  |   def IntersectPath(self, path, intersection): | ||||||
|  |     """Calculates the intersection part of a field path with this tree. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       path: The field path to calculates. | ||||||
|  |       intersection: The out tree to record the intersection part. | ||||||
|  |     """ | ||||||
|  |     node = self._root | ||||||
|  |     for name in path.split('.'): | ||||||
|  |       if name not in node: | ||||||
|  |         return | ||||||
|  |       elif not node[name]: | ||||||
|  |         intersection.AddPath(path) | ||||||
|  |         return | ||||||
|  |       node = node[name] | ||||||
|  |     intersection.AddLeafNodes(path, node) | ||||||
|  | 
 | ||||||
|  |   def AddLeafNodes(self, prefix, node): | ||||||
|  |     """Adds leaf nodes begin with prefix to this tree.""" | ||||||
|  |     if not node: | ||||||
|  |       self.AddPath(prefix) | ||||||
|  |     for name in node: | ||||||
|  |       child_path = prefix + '.' + name | ||||||
|  |       self.AddLeafNodes(child_path, node[name]) | ||||||
|  | 
 | ||||||
|  |   def MergeMessage( | ||||||
|  |       self, source, destination, | ||||||
|  |       replace_message, replace_repeated): | ||||||
|  |     """Merge all fields specified by this tree from source to destination.""" | ||||||
|  |     _MergeMessage( | ||||||
|  |         self._root, source, destination, replace_message, replace_repeated) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _StrConvert(value): | ||||||
|  |   """Converts value to str if it is not.""" | ||||||
|  |   # This file is imported by c extension and some methods like ClearField | ||||||
|  |   # requires string for the field name. py2/py3 has different text | ||||||
|  |   # type and may use unicode. | ||||||
|  |   if not isinstance(value, str): | ||||||
|  |     return value.encode('utf-8') | ||||||
|  |   return value | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _MergeMessage( | ||||||
|  |     node, source, destination, replace_message, replace_repeated): | ||||||
|  |   """Merge all fields specified by a sub-tree from source to destination.""" | ||||||
|  |   source_descriptor = source.DESCRIPTOR | ||||||
|  |   for name in node: | ||||||
|  |     child = node[name] | ||||||
|  |     field = source_descriptor.fields_by_name[name] | ||||||
|  |     if field is None: | ||||||
|  |       raise ValueError('Error: Can\'t find field {0} in message {1}.'.format( | ||||||
|  |           name, source_descriptor.full_name)) | ||||||
|  |     if child: | ||||||
|  |       # Sub-paths are only allowed for singular message fields. | ||||||
|  |       if (field.label == FieldDescriptor.LABEL_REPEATED or | ||||||
|  |           field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE): | ||||||
|  |         raise ValueError('Error: Field {0} in message {1} is not a singular ' | ||||||
|  |                          'message field and cannot have sub-fields.'.format( | ||||||
|  |                              name, source_descriptor.full_name)) | ||||||
|  |       if source.HasField(name): | ||||||
|  |         _MergeMessage( | ||||||
|  |             child, getattr(source, name), getattr(destination, name), | ||||||
|  |             replace_message, replace_repeated) | ||||||
|  |       continue | ||||||
|  |     if field.label == FieldDescriptor.LABEL_REPEATED: | ||||||
|  |       if replace_repeated: | ||||||
|  |         destination.ClearField(_StrConvert(name)) | ||||||
|  |       repeated_source = getattr(source, name) | ||||||
|  |       repeated_destination = getattr(destination, name) | ||||||
|  |       repeated_destination.MergeFrom(repeated_source) | ||||||
|  |     else: | ||||||
|  |       if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |         if replace_message: | ||||||
|  |           destination.ClearField(_StrConvert(name)) | ||||||
|  |         if source.HasField(name): | ||||||
|  |           getattr(destination, name).MergeFrom(getattr(source, name)) | ||||||
|  |       else: | ||||||
|  |         setattr(destination, name, getattr(source, name)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _AddFieldPaths(node, prefix, field_mask): | ||||||
|  |   """Adds the field paths descended from node to field_mask.""" | ||||||
|  |   if not node and prefix: | ||||||
|  |     field_mask.paths.append(prefix) | ||||||
|  |     return | ||||||
|  |   for name in sorted(node): | ||||||
|  |     if prefix: | ||||||
|  |       child_path = prefix + '.' + name | ||||||
|  |     else: | ||||||
|  |       child_path = name | ||||||
|  |     _AddFieldPaths(node[name], child_path, field_mask) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _SetStructValue(struct_value, value): | ||||||
|  |   if value is None: | ||||||
|  |     struct_value.null_value = 0 | ||||||
|  |   elif isinstance(value, bool): | ||||||
|  |     # Note: this check must come before the number check because in Python | ||||||
|  |     # True and False are also considered numbers. | ||||||
|  |     struct_value.bool_value = value | ||||||
|  |   elif isinstance(value, str): | ||||||
|  |     struct_value.string_value = value | ||||||
|  |   elif isinstance(value, (int, float)): | ||||||
|  |     struct_value.number_value = value | ||||||
|  |   elif isinstance(value, (dict, Struct)): | ||||||
|  |     struct_value.struct_value.Clear() | ||||||
|  |     struct_value.struct_value.update(value) | ||||||
|  |   elif isinstance(value, (list, ListValue)): | ||||||
|  |     struct_value.list_value.Clear() | ||||||
|  |     struct_value.list_value.extend(value) | ||||||
|  |   else: | ||||||
|  |     raise ValueError('Unexpected type') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _GetStructValue(struct_value): | ||||||
|  |   which = struct_value.WhichOneof('kind') | ||||||
|  |   if which == 'struct_value': | ||||||
|  |     return struct_value.struct_value | ||||||
|  |   elif which == 'null_value': | ||||||
|  |     return None | ||||||
|  |   elif which == 'number_value': | ||||||
|  |     return struct_value.number_value | ||||||
|  |   elif which == 'string_value': | ||||||
|  |     return struct_value.string_value | ||||||
|  |   elif which == 'bool_value': | ||||||
|  |     return struct_value.bool_value | ||||||
|  |   elif which == 'list_value': | ||||||
|  |     return struct_value.list_value | ||||||
|  |   elif which is None: | ||||||
|  |     raise ValueError('Value not set') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Struct(object): | ||||||
|  |   """Class for Struct message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, key): | ||||||
|  |     return _GetStructValue(self.fields[key]) | ||||||
|  | 
 | ||||||
|  |   def __contains__(self, item): | ||||||
|  |     return item in self.fields | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, key, value): | ||||||
|  |     _SetStructValue(self.fields[key], value) | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key): | ||||||
|  |     del self.fields[key] | ||||||
|  | 
 | ||||||
|  |   def __len__(self): | ||||||
|  |     return len(self.fields) | ||||||
|  | 
 | ||||||
|  |   def __iter__(self): | ||||||
|  |     return iter(self.fields) | ||||||
|  | 
 | ||||||
|  |   def keys(self):  # pylint: disable=invalid-name | ||||||
|  |     return self.fields.keys() | ||||||
|  | 
 | ||||||
|  |   def values(self):  # pylint: disable=invalid-name | ||||||
|  |     return [self[key] for key in self] | ||||||
|  | 
 | ||||||
|  |   def items(self):  # pylint: disable=invalid-name | ||||||
|  |     return [(key, self[key]) for key in self] | ||||||
|  | 
 | ||||||
|  |   def get_or_create_list(self, key): | ||||||
|  |     """Returns a list for this key, creating if it didn't exist already.""" | ||||||
|  |     if not self.fields[key].HasField('list_value'): | ||||||
|  |       # Clear will mark list_value modified which will indeed create a list. | ||||||
|  |       self.fields[key].list_value.Clear() | ||||||
|  |     return self.fields[key].list_value | ||||||
|  | 
 | ||||||
|  |   def get_or_create_struct(self, key): | ||||||
|  |     """Returns a struct for this key, creating if it didn't exist already.""" | ||||||
|  |     if not self.fields[key].HasField('struct_value'): | ||||||
|  |       # Clear will mark struct_value modified which will indeed create a struct. | ||||||
|  |       self.fields[key].struct_value.Clear() | ||||||
|  |     return self.fields[key].struct_value | ||||||
|  | 
 | ||||||
|  |   def update(self, dictionary):  # pylint: disable=invalid-name | ||||||
|  |     for key, value in dictionary.items(): | ||||||
|  |       _SetStructValue(self.fields[key], value) | ||||||
|  | 
 | ||||||
|  | collections.abc.MutableMapping.register(Struct) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ListValue(object): | ||||||
|  |   """Class for ListValue message type.""" | ||||||
|  | 
 | ||||||
|  |   __slots__ = () | ||||||
|  | 
 | ||||||
|  |   def __len__(self): | ||||||
|  |     return len(self.values) | ||||||
|  | 
 | ||||||
|  |   def append(self, value): | ||||||
|  |     _SetStructValue(self.values.add(), value) | ||||||
|  | 
 | ||||||
|  |   def extend(self, elem_seq): | ||||||
|  |     for value in elem_seq: | ||||||
|  |       self.append(value) | ||||||
|  | 
 | ||||||
|  |   def __getitem__(self, index): | ||||||
|  |     """Retrieves item by the specified index.""" | ||||||
|  |     return _GetStructValue(self.values.__getitem__(index)) | ||||||
|  | 
 | ||||||
|  |   def __setitem__(self, index, value): | ||||||
|  |     _SetStructValue(self.values.__getitem__(index), value) | ||||||
|  | 
 | ||||||
|  |   def __delitem__(self, key): | ||||||
|  |     del self.values[key] | ||||||
|  | 
 | ||||||
|  |   def items(self): | ||||||
|  |     for i in range(len(self)): | ||||||
|  |       yield self[i] | ||||||
|  | 
 | ||||||
|  |   def add_struct(self): | ||||||
|  |     """Appends and returns a struct value as the next value in the list.""" | ||||||
|  |     struct_value = self.values.add().struct_value | ||||||
|  |     # Clear will mark struct_value modified which will indeed create a struct. | ||||||
|  |     struct_value.Clear() | ||||||
|  |     return struct_value | ||||||
|  | 
 | ||||||
|  |   def add_list(self): | ||||||
|  |     """Appends and returns a list value as the next value in the list.""" | ||||||
|  |     list_value = self.values.add().list_value | ||||||
|  |     # Clear will mark list_value modified which will indeed create a list. | ||||||
|  |     list_value.Clear() | ||||||
|  |     return list_value | ||||||
|  | 
 | ||||||
|  | collections.abc.MutableSequence.register(ListValue) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | WKTBASES = { | ||||||
|  |     'google.protobuf.Any': Any, | ||||||
|  |     'google.protobuf.Duration': Duration, | ||||||
|  |     'google.protobuf.FieldMask': FieldMask, | ||||||
|  |     'google.protobuf.ListValue': ListValue, | ||||||
|  |     'google.protobuf.Struct': Struct, | ||||||
|  |     'google.protobuf.Timestamp': Timestamp, | ||||||
|  | } | ||||||
							
								
								
									
										268
									
								
								lib/protobuf/internal/wire_format.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,268 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Constants and static functions to support protocol buffer wire format.""" | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | import struct | ||||||
|  | from google.protobuf import descriptor | ||||||
|  | from google.protobuf import message | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | TAG_TYPE_BITS = 3  # Number of bits used to hold type info in a proto tag. | ||||||
|  | TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1  # 0x7 | ||||||
|  | 
 | ||||||
|  | # These numbers identify the wire type of a protocol buffer value. | ||||||
|  | # We use the least-significant TAG_TYPE_BITS bits of the varint-encoded | ||||||
|  | # tag-and-type to store one of these WIRETYPE_* constants. | ||||||
|  | # These values must match WireType enum in google/protobuf/wire_format.h. | ||||||
|  | WIRETYPE_VARINT = 0 | ||||||
|  | WIRETYPE_FIXED64 = 1 | ||||||
|  | WIRETYPE_LENGTH_DELIMITED = 2 | ||||||
|  | WIRETYPE_START_GROUP = 3 | ||||||
|  | WIRETYPE_END_GROUP = 4 | ||||||
|  | WIRETYPE_FIXED32 = 5 | ||||||
|  | _WIRETYPE_MAX = 5 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Bounds for various integer types. | ||||||
|  | INT32_MAX = int((1 << 31) - 1) | ||||||
|  | INT32_MIN = int(-(1 << 31)) | ||||||
|  | UINT32_MAX = (1 << 32) - 1 | ||||||
|  | 
 | ||||||
|  | INT64_MAX = (1 << 63) - 1 | ||||||
|  | INT64_MIN = -(1 << 63) | ||||||
|  | UINT64_MAX = (1 << 64) - 1 | ||||||
|  | 
 | ||||||
|  | # "struct" format strings that will encode/decode the specified formats. | ||||||
|  | FORMAT_UINT32_LITTLE_ENDIAN = '<I' | ||||||
|  | FORMAT_UINT64_LITTLE_ENDIAN = '<Q' | ||||||
|  | FORMAT_FLOAT_LITTLE_ENDIAN = '<f' | ||||||
|  | FORMAT_DOUBLE_LITTLE_ENDIAN = '<d' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # We'll have to provide alternate implementations of AppendLittleEndian*() on | ||||||
|  | # any architectures where these checks fail. | ||||||
|  | if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4: | ||||||
|  |   raise AssertionError('Format "I" is not a 32-bit number.') | ||||||
|  | if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8: | ||||||
|  |   raise AssertionError('Format "Q" is not a 64-bit number.') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def PackTag(field_number, wire_type): | ||||||
|  |   """Returns an unsigned 32-bit integer that encodes the field number and | ||||||
|  |   wire type information in standard protocol message wire format. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     field_number: Expected to be an integer in the range [1, 1 << 29) | ||||||
|  |     wire_type: One of the WIRETYPE_* constants. | ||||||
|  |   """ | ||||||
|  |   if not 0 <= wire_type <= _WIRETYPE_MAX: | ||||||
|  |     raise message.EncodeError('Unknown wire type: %d' % wire_type) | ||||||
|  |   return (field_number << TAG_TYPE_BITS) | wire_type | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def UnpackTag(tag): | ||||||
|  |   """The inverse of PackTag().  Given an unsigned 32-bit number, | ||||||
|  |   returns a (field_number, wire_type) tuple. | ||||||
|  |   """ | ||||||
|  |   return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ZigZagEncode(value): | ||||||
|  |   """ZigZag Transform:  Encodes signed integers so that they can be | ||||||
|  |   effectively used with varint encoding.  See wire_format.h for | ||||||
|  |   more details. | ||||||
|  |   """ | ||||||
|  |   if value >= 0: | ||||||
|  |     return value << 1 | ||||||
|  |   return (value << 1) ^ (~0) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ZigZagDecode(value): | ||||||
|  |   """Inverse of ZigZagEncode().""" | ||||||
|  |   if not value & 0x1: | ||||||
|  |     return value >> 1 | ||||||
|  |   return (value >> 1) ^ (~0) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # The *ByteSize() functions below return the number of bytes required to | ||||||
|  | # serialize "field number + type" information and then serialize the value. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Int32ByteSize(field_number, int32): | ||||||
|  |   return Int64ByteSize(field_number, int32) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Int32ByteSizeNoTag(int32): | ||||||
|  |   return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Int64ByteSize(field_number, int64): | ||||||
|  |   # Have to convert to uint before calling UInt64ByteSize(). | ||||||
|  |   return UInt64ByteSize(field_number, 0xffffffffffffffff & int64) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def UInt32ByteSize(field_number, uint32): | ||||||
|  |   return UInt64ByteSize(field_number, uint32) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def UInt64ByteSize(field_number, uint64): | ||||||
|  |   return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SInt32ByteSize(field_number, int32): | ||||||
|  |   return UInt32ByteSize(field_number, ZigZagEncode(int32)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SInt64ByteSize(field_number, int64): | ||||||
|  |   return UInt64ByteSize(field_number, ZigZagEncode(int64)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Fixed32ByteSize(field_number, fixed32): | ||||||
|  |   return TagByteSize(field_number) + 4 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Fixed64ByteSize(field_number, fixed64): | ||||||
|  |   return TagByteSize(field_number) + 8 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SFixed32ByteSize(field_number, sfixed32): | ||||||
|  |   return TagByteSize(field_number) + 4 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def SFixed64ByteSize(field_number, sfixed64): | ||||||
|  |   return TagByteSize(field_number) + 8 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def FloatByteSize(field_number, flt): | ||||||
|  |   return TagByteSize(field_number) + 4 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def DoubleByteSize(field_number, double): | ||||||
|  |   return TagByteSize(field_number) + 8 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BoolByteSize(field_number, b): | ||||||
|  |   return TagByteSize(field_number) + 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def EnumByteSize(field_number, enum): | ||||||
|  |   return UInt32ByteSize(field_number, enum) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def StringByteSize(field_number, string): | ||||||
|  |   return BytesByteSize(field_number, string.encode('utf-8')) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def BytesByteSize(field_number, b): | ||||||
|  |   return (TagByteSize(field_number) | ||||||
|  |           + _VarUInt64ByteSizeNoTag(len(b)) | ||||||
|  |           + len(b)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GroupByteSize(field_number, message): | ||||||
|  |   return (2 * TagByteSize(field_number)  # START and END group. | ||||||
|  |           + message.ByteSize()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageByteSize(field_number, message): | ||||||
|  |   return (TagByteSize(field_number) | ||||||
|  |           + _VarUInt64ByteSizeNoTag(message.ByteSize()) | ||||||
|  |           + message.ByteSize()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageSetItemByteSize(field_number, msg): | ||||||
|  |   # First compute the sizes of the tags. | ||||||
|  |   # There are 2 tags for the beginning and ending of the repeated group, that | ||||||
|  |   # is field number 1, one with field number 2 (type_id) and one with field | ||||||
|  |   # number 3 (message). | ||||||
|  |   total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3)) | ||||||
|  | 
 | ||||||
|  |   # Add the number of bytes for type_id. | ||||||
|  |   total_size += _VarUInt64ByteSizeNoTag(field_number) | ||||||
|  | 
 | ||||||
|  |   message_size = msg.ByteSize() | ||||||
|  | 
 | ||||||
|  |   # The number of bytes for encoding the length of the message. | ||||||
|  |   total_size += _VarUInt64ByteSizeNoTag(message_size) | ||||||
|  | 
 | ||||||
|  |   # The size of the message. | ||||||
|  |   total_size += message_size | ||||||
|  |   return total_size | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def TagByteSize(field_number): | ||||||
|  |   """Returns the bytes required to serialize a tag with this field number.""" | ||||||
|  |   # Just pass in type 0, since the type won't affect the tag+type size. | ||||||
|  |   return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Private helper function for the *ByteSize() functions above. | ||||||
|  | 
 | ||||||
|  | def _VarUInt64ByteSizeNoTag(uint64): | ||||||
|  |   """Returns the number of bytes required to serialize a single varint | ||||||
|  |   using boundary value comparisons. (unrolled loop optimization -WPierce) | ||||||
|  |   uint64 must be unsigned. | ||||||
|  |   """ | ||||||
|  |   if uint64 <= 0x7f: return 1 | ||||||
|  |   if uint64 <= 0x3fff: return 2 | ||||||
|  |   if uint64 <= 0x1fffff: return 3 | ||||||
|  |   if uint64 <= 0xfffffff: return 4 | ||||||
|  |   if uint64 <= 0x7ffffffff: return 5 | ||||||
|  |   if uint64 <= 0x3ffffffffff: return 6 | ||||||
|  |   if uint64 <= 0x1ffffffffffff: return 7 | ||||||
|  |   if uint64 <= 0xffffffffffffff: return 8 | ||||||
|  |   if uint64 <= 0x7fffffffffffffff: return 9 | ||||||
|  |   if uint64 > UINT64_MAX: | ||||||
|  |     raise message.EncodeError('Value out of range: %d' % uint64) | ||||||
|  |   return 10 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | NON_PACKABLE_TYPES = ( | ||||||
|  |   descriptor.FieldDescriptor.TYPE_STRING, | ||||||
|  |   descriptor.FieldDescriptor.TYPE_GROUP, | ||||||
|  |   descriptor.FieldDescriptor.TYPE_MESSAGE, | ||||||
|  |   descriptor.FieldDescriptor.TYPE_BYTES | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def IsTypePackable(field_type): | ||||||
|  |   """Return true iff packable = true is valid for fields of this type. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     field_type: a FieldDescriptor::Type value. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     True iff fields of this type are packable. | ||||||
|  |   """ | ||||||
|  |   return field_type not in NON_PACKABLE_TYPES | ||||||
							
								
								
									
										912
									
								
								lib/protobuf/json_format.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,912 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains routines for printing protocol messages in JSON format. | ||||||
|  | 
 | ||||||
|  | Simple usage example: | ||||||
|  | 
 | ||||||
|  |   # Create a proto object and serialize it to a json format string. | ||||||
|  |   message = my_proto_pb2.MyMessage(foo='bar') | ||||||
|  |   json_string = json_format.MessageToJson(message) | ||||||
|  | 
 | ||||||
|  |   # Parse a json format string to proto object. | ||||||
|  |   message = json_format.Parse(json_string, my_proto_pb2.MyMessage()) | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'jieluo@google.com (Jie Luo)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | import base64 | ||||||
|  | from collections import OrderedDict | ||||||
|  | import json | ||||||
|  | import math | ||||||
|  | from operator import methodcaller | ||||||
|  | import re | ||||||
|  | import sys | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import type_checkers | ||||||
|  | from google.protobuf import descriptor | ||||||
|  | from google.protobuf import symbol_database | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S' | ||||||
|  | _INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32, | ||||||
|  |                         descriptor.FieldDescriptor.CPPTYPE_UINT32, | ||||||
|  |                         descriptor.FieldDescriptor.CPPTYPE_INT64, | ||||||
|  |                         descriptor.FieldDescriptor.CPPTYPE_UINT64]) | ||||||
|  | _INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64, | ||||||
|  |                           descriptor.FieldDescriptor.CPPTYPE_UINT64]) | ||||||
|  | _FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT, | ||||||
|  |                           descriptor.FieldDescriptor.CPPTYPE_DOUBLE]) | ||||||
|  | _INFINITY = 'Infinity' | ||||||
|  | _NEG_INFINITY = '-Infinity' | ||||||
|  | _NAN = 'NaN' | ||||||
|  | 
 | ||||||
|  | _UNPAIRED_SURROGATE_PATTERN = re.compile( | ||||||
|  |     u'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]') | ||||||
|  | 
 | ||||||
|  | _VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Error(Exception): | ||||||
|  |   """Top-level module error for json_format.""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class SerializeToJsonError(Error): | ||||||
|  |   """Thrown if serialization to JSON fails.""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ParseError(Error): | ||||||
|  |   """Thrown in case of parsing error.""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageToJson( | ||||||
|  |     message, | ||||||
|  |     including_default_value_fields=False, | ||||||
|  |     preserving_proto_field_name=False, | ||||||
|  |     indent=2, | ||||||
|  |     sort_keys=False, | ||||||
|  |     use_integers_for_enums=False, | ||||||
|  |     descriptor_pool=None, | ||||||
|  |     float_precision=None, | ||||||
|  |     ensure_ascii=True): | ||||||
|  |   """Converts protobuf message to JSON format. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     message: The protocol buffers message instance to serialize. | ||||||
|  |     including_default_value_fields: If True, singular primitive fields, | ||||||
|  |         repeated fields, and map fields will always be serialized.  If | ||||||
|  |         False, only serialize non-empty fields.  Singular message fields | ||||||
|  |         and oneof fields are not affected by this option. | ||||||
|  |     preserving_proto_field_name: If True, use the original proto field | ||||||
|  |         names as defined in the .proto file. If False, convert the field | ||||||
|  |         names to lowerCamelCase. | ||||||
|  |     indent: The JSON object will be pretty-printed with this indent level. | ||||||
|  |         An indent level of 0 or negative will only insert newlines. | ||||||
|  |     sort_keys: If True, then the output will be sorted by field names. | ||||||
|  |     use_integers_for_enums: If true, print integers instead of enum names. | ||||||
|  |     descriptor_pool: A Descriptor Pool for resolving types. If None use the | ||||||
|  |         default. | ||||||
|  |     float_precision: If set, use this to specify float field valid digits. | ||||||
|  |     ensure_ascii: If True, strings with non-ASCII characters are escaped. | ||||||
|  |         If False, Unicode strings are returned unchanged. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     A string containing the JSON formatted protocol buffer message. | ||||||
|  |   """ | ||||||
|  |   printer = _Printer( | ||||||
|  |       including_default_value_fields, | ||||||
|  |       preserving_proto_field_name, | ||||||
|  |       use_integers_for_enums, | ||||||
|  |       descriptor_pool, | ||||||
|  |       float_precision=float_precision) | ||||||
|  |   return printer.ToJsonString(message, indent, sort_keys, ensure_ascii) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MessageToDict( | ||||||
|  |     message, | ||||||
|  |     including_default_value_fields=False, | ||||||
|  |     preserving_proto_field_name=False, | ||||||
|  |     use_integers_for_enums=False, | ||||||
|  |     descriptor_pool=None, | ||||||
|  |     float_precision=None): | ||||||
|  |   """Converts protobuf message to a dictionary. | ||||||
|  | 
 | ||||||
|  |   When the dictionary is encoded to JSON, it conforms to proto3 JSON spec. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     message: The protocol buffers message instance to serialize. | ||||||
|  |     including_default_value_fields: If True, singular primitive fields, | ||||||
|  |         repeated fields, and map fields will always be serialized.  If | ||||||
|  |         False, only serialize non-empty fields.  Singular message fields | ||||||
|  |         and oneof fields are not affected by this option. | ||||||
|  |     preserving_proto_field_name: If True, use the original proto field | ||||||
|  |         names as defined in the .proto file. If False, convert the field | ||||||
|  |         names to lowerCamelCase. | ||||||
|  |     use_integers_for_enums: If true, print integers instead of enum names. | ||||||
|  |     descriptor_pool: A Descriptor Pool for resolving types. If None use the | ||||||
|  |         default. | ||||||
|  |     float_precision: If set, use this to specify float field valid digits. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     A dict representation of the protocol buffer message. | ||||||
|  |   """ | ||||||
|  |   printer = _Printer( | ||||||
|  |       including_default_value_fields, | ||||||
|  |       preserving_proto_field_name, | ||||||
|  |       use_integers_for_enums, | ||||||
|  |       descriptor_pool, | ||||||
|  |       float_precision=float_precision) | ||||||
|  |   # pylint: disable=protected-access | ||||||
|  |   return printer._MessageToJsonObject(message) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _IsMapEntry(field): | ||||||
|  |   return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and | ||||||
|  |           field.message_type.has_options and | ||||||
|  |           field.message_type.GetOptions().map_entry) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _Printer(object): | ||||||
|  |   """JSON format printer for protocol message.""" | ||||||
|  | 
 | ||||||
|  |   def __init__( | ||||||
|  |       self, | ||||||
|  |       including_default_value_fields=False, | ||||||
|  |       preserving_proto_field_name=False, | ||||||
|  |       use_integers_for_enums=False, | ||||||
|  |       descriptor_pool=None, | ||||||
|  |       float_precision=None): | ||||||
|  |     self.including_default_value_fields = including_default_value_fields | ||||||
|  |     self.preserving_proto_field_name = preserving_proto_field_name | ||||||
|  |     self.use_integers_for_enums = use_integers_for_enums | ||||||
|  |     self.descriptor_pool = descriptor_pool | ||||||
|  |     if float_precision: | ||||||
|  |       self.float_format = '.{}g'.format(float_precision) | ||||||
|  |     else: | ||||||
|  |       self.float_format = None | ||||||
|  | 
 | ||||||
|  |   def ToJsonString(self, message, indent, sort_keys, ensure_ascii): | ||||||
|  |     js = self._MessageToJsonObject(message) | ||||||
|  |     return json.dumps( | ||||||
|  |         js, indent=indent, sort_keys=sort_keys, ensure_ascii=ensure_ascii) | ||||||
|  | 
 | ||||||
|  |   def _MessageToJsonObject(self, message): | ||||||
|  |     """Converts message to an object according to Proto3 JSON Specification.""" | ||||||
|  |     message_descriptor = message.DESCRIPTOR | ||||||
|  |     full_name = message_descriptor.full_name | ||||||
|  |     if _IsWrapperMessage(message_descriptor): | ||||||
|  |       return self._WrapperMessageToJsonObject(message) | ||||||
|  |     if full_name in _WKTJSONMETHODS: | ||||||
|  |       return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self) | ||||||
|  |     js = {} | ||||||
|  |     return self._RegularMessageToJsonObject(message, js) | ||||||
|  | 
 | ||||||
|  |   def _RegularMessageToJsonObject(self, message, js): | ||||||
|  |     """Converts normal message according to Proto3 JSON Specification.""" | ||||||
|  |     fields = message.ListFields() | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |       for field, value in fields: | ||||||
|  |         if self.preserving_proto_field_name: | ||||||
|  |           name = field.name | ||||||
|  |         else: | ||||||
|  |           name = field.json_name | ||||||
|  |         if _IsMapEntry(field): | ||||||
|  |           # Convert a map field. | ||||||
|  |           v_field = field.message_type.fields_by_name['value'] | ||||||
|  |           js_map = {} | ||||||
|  |           for key in value: | ||||||
|  |             if isinstance(key, bool): | ||||||
|  |               if key: | ||||||
|  |                 recorded_key = 'true' | ||||||
|  |               else: | ||||||
|  |                 recorded_key = 'false' | ||||||
|  |             else: | ||||||
|  |               recorded_key = str(key) | ||||||
|  |             js_map[recorded_key] = self._FieldToJsonObject( | ||||||
|  |                 v_field, value[key]) | ||||||
|  |           js[name] = js_map | ||||||
|  |         elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: | ||||||
|  |           # Convert a repeated field. | ||||||
|  |           js[name] = [self._FieldToJsonObject(field, k) | ||||||
|  |                       for k in value] | ||||||
|  |         elif field.is_extension: | ||||||
|  |           name = '[%s]' % field.full_name | ||||||
|  |           js[name] = self._FieldToJsonObject(field, value) | ||||||
|  |         else: | ||||||
|  |           js[name] = self._FieldToJsonObject(field, value) | ||||||
|  | 
 | ||||||
|  |       # Serialize default value if including_default_value_fields is True. | ||||||
|  |       if self.including_default_value_fields: | ||||||
|  |         message_descriptor = message.DESCRIPTOR | ||||||
|  |         for field in message_descriptor.fields: | ||||||
|  |           # Singular message fields and oneof fields will not be affected. | ||||||
|  |           if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and | ||||||
|  |                field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or | ||||||
|  |               field.containing_oneof): | ||||||
|  |             continue | ||||||
|  |           if self.preserving_proto_field_name: | ||||||
|  |             name = field.name | ||||||
|  |           else: | ||||||
|  |             name = field.json_name | ||||||
|  |           if name in js: | ||||||
|  |             # Skip the field which has been serialized already. | ||||||
|  |             continue | ||||||
|  |           if _IsMapEntry(field): | ||||||
|  |             js[name] = {} | ||||||
|  |           elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: | ||||||
|  |             js[name] = [] | ||||||
|  |           else: | ||||||
|  |             js[name] = self._FieldToJsonObject(field, field.default_value) | ||||||
|  | 
 | ||||||
|  |     except ValueError as e: | ||||||
|  |       raise SerializeToJsonError( | ||||||
|  |           'Failed to serialize {0} field: {1}.'.format(field.name, e)) | ||||||
|  | 
 | ||||||
|  |     return js | ||||||
|  | 
 | ||||||
|  |   def _FieldToJsonObject(self, field, value): | ||||||
|  |     """Converts field value according to Proto3 JSON Specification.""" | ||||||
|  |     if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |       return self._MessageToJsonObject(value) | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: | ||||||
|  |       if self.use_integers_for_enums: | ||||||
|  |         return value | ||||||
|  |       if field.enum_type.full_name == 'google.protobuf.NullValue': | ||||||
|  |         return None | ||||||
|  |       enum_value = field.enum_type.values_by_number.get(value, None) | ||||||
|  |       if enum_value is not None: | ||||||
|  |         return enum_value.name | ||||||
|  |       else: | ||||||
|  |         if field.file.syntax == 'proto3': | ||||||
|  |           return value | ||||||
|  |         raise SerializeToJsonError('Enum field contains an integer value ' | ||||||
|  |                                    'which can not mapped to an enum value.') | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: | ||||||
|  |       if field.type == descriptor.FieldDescriptor.TYPE_BYTES: | ||||||
|  |         # Use base64 Data encoding for bytes | ||||||
|  |         return base64.b64encode(value).decode('utf-8') | ||||||
|  |       else: | ||||||
|  |         return value | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: | ||||||
|  |       return bool(value) | ||||||
|  |     elif field.cpp_type in _INT64_TYPES: | ||||||
|  |       return str(value) | ||||||
|  |     elif field.cpp_type in _FLOAT_TYPES: | ||||||
|  |       if math.isinf(value): | ||||||
|  |         if value < 0.0: | ||||||
|  |           return _NEG_INFINITY | ||||||
|  |         else: | ||||||
|  |           return _INFINITY | ||||||
|  |       if math.isnan(value): | ||||||
|  |         return _NAN | ||||||
|  |       if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: | ||||||
|  |         if self.float_format: | ||||||
|  |           return float(format(value, self.float_format)) | ||||||
|  |         else: | ||||||
|  |           return type_checkers.ToShortestFloat(value) | ||||||
|  | 
 | ||||||
|  |     return value | ||||||
|  | 
 | ||||||
|  |   def _AnyMessageToJsonObject(self, message): | ||||||
|  |     """Converts Any message according to Proto3 JSON Specification.""" | ||||||
|  |     if not message.ListFields(): | ||||||
|  |       return {} | ||||||
|  |     # Must print @type first, use OrderedDict instead of {} | ||||||
|  |     js = OrderedDict() | ||||||
|  |     type_url = message.type_url | ||||||
|  |     js['@type'] = type_url | ||||||
|  |     sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) | ||||||
|  |     sub_message.ParseFromString(message.value) | ||||||
|  |     message_descriptor = sub_message.DESCRIPTOR | ||||||
|  |     full_name = message_descriptor.full_name | ||||||
|  |     if _IsWrapperMessage(message_descriptor): | ||||||
|  |       js['value'] = self._WrapperMessageToJsonObject(sub_message) | ||||||
|  |       return js | ||||||
|  |     if full_name in _WKTJSONMETHODS: | ||||||
|  |       js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0], | ||||||
|  |                                  sub_message)(self) | ||||||
|  |       return js | ||||||
|  |     return self._RegularMessageToJsonObject(sub_message, js) | ||||||
|  | 
 | ||||||
|  |   def _GenericMessageToJsonObject(self, message): | ||||||
|  |     """Converts message according to Proto3 JSON Specification.""" | ||||||
|  |     # Duration, Timestamp and FieldMask have ToJsonString method to do the | ||||||
|  |     # convert. Users can also call the method directly. | ||||||
|  |     return message.ToJsonString() | ||||||
|  | 
 | ||||||
|  |   def _ValueMessageToJsonObject(self, message): | ||||||
|  |     """Converts Value message according to Proto3 JSON Specification.""" | ||||||
|  |     which = message.WhichOneof('kind') | ||||||
|  |     # If the Value message is not set treat as null_value when serialize | ||||||
|  |     # to JSON. The parse back result will be different from original message. | ||||||
|  |     if which is None or which == 'null_value': | ||||||
|  |       return None | ||||||
|  |     if which == 'list_value': | ||||||
|  |       return self._ListValueMessageToJsonObject(message.list_value) | ||||||
|  |     if which == 'struct_value': | ||||||
|  |       value = message.struct_value | ||||||
|  |     else: | ||||||
|  |       value = getattr(message, which) | ||||||
|  |     oneof_descriptor = message.DESCRIPTOR.fields_by_name[which] | ||||||
|  |     return self._FieldToJsonObject(oneof_descriptor, value) | ||||||
|  | 
 | ||||||
|  |   def _ListValueMessageToJsonObject(self, message): | ||||||
|  |     """Converts ListValue message according to Proto3 JSON Specification.""" | ||||||
|  |     return [self._ValueMessageToJsonObject(value) | ||||||
|  |             for value in message.values] | ||||||
|  | 
 | ||||||
|  |   def _StructMessageToJsonObject(self, message): | ||||||
|  |     """Converts Struct message according to Proto3 JSON Specification.""" | ||||||
|  |     fields = message.fields | ||||||
|  |     ret = {} | ||||||
|  |     for key in fields: | ||||||
|  |       ret[key] = self._ValueMessageToJsonObject(fields[key]) | ||||||
|  |     return ret | ||||||
|  | 
 | ||||||
|  |   def _WrapperMessageToJsonObject(self, message): | ||||||
|  |     return self._FieldToJsonObject( | ||||||
|  |         message.DESCRIPTOR.fields_by_name['value'], message.value) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _IsWrapperMessage(message_descriptor): | ||||||
|  |   return message_descriptor.file.name == 'google/protobuf/wrappers.proto' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _DuplicateChecker(js): | ||||||
|  |   result = {} | ||||||
|  |   for name, value in js: | ||||||
|  |     if name in result: | ||||||
|  |       raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name)) | ||||||
|  |     result[name] = value | ||||||
|  |   return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _CreateMessageFromTypeUrl(type_url, descriptor_pool): | ||||||
|  |   """Creates a message from a type URL.""" | ||||||
|  |   db = symbol_database.Default() | ||||||
|  |   pool = db.pool if descriptor_pool is None else descriptor_pool | ||||||
|  |   type_name = type_url.split('/')[-1] | ||||||
|  |   try: | ||||||
|  |     message_descriptor = pool.FindMessageTypeByName(type_name) | ||||||
|  |   except KeyError: | ||||||
|  |     raise TypeError( | ||||||
|  |         'Can not find message descriptor by type_url: {0}'.format(type_url)) | ||||||
|  |   message_class = db.GetPrototype(message_descriptor) | ||||||
|  |   return message_class() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Parse(text, | ||||||
|  |           message, | ||||||
|  |           ignore_unknown_fields=False, | ||||||
|  |           descriptor_pool=None, | ||||||
|  |           max_recursion_depth=100): | ||||||
|  |   """Parses a JSON representation of a protocol message into a message. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     text: Message JSON representation. | ||||||
|  |     message: A protocol buffer message to merge into. | ||||||
|  |     ignore_unknown_fields: If True, do not raise errors for unknown fields. | ||||||
|  |     descriptor_pool: A Descriptor Pool for resolving types. If None use the | ||||||
|  |       default. | ||||||
|  |     max_recursion_depth: max recursion depth of JSON message to be | ||||||
|  |       deserialized. JSON messages over this depth will fail to be | ||||||
|  |       deserialized. Default value is 100. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The same message passed as argument. | ||||||
|  | 
 | ||||||
|  |   Raises:: | ||||||
|  |     ParseError: On JSON parsing problems. | ||||||
|  |   """ | ||||||
|  |   if not isinstance(text, str): | ||||||
|  |     text = text.decode('utf-8') | ||||||
|  |   try: | ||||||
|  |     js = json.loads(text, object_pairs_hook=_DuplicateChecker) | ||||||
|  |   except ValueError as e: | ||||||
|  |     raise ParseError('Failed to load JSON: {0}.'.format(str(e))) | ||||||
|  |   return ParseDict(js, message, ignore_unknown_fields, descriptor_pool, | ||||||
|  |                    max_recursion_depth) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def ParseDict(js_dict, | ||||||
|  |               message, | ||||||
|  |               ignore_unknown_fields=False, | ||||||
|  |               descriptor_pool=None, | ||||||
|  |               max_recursion_depth=100): | ||||||
|  |   """Parses a JSON dictionary representation into a message. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     js_dict: Dict representation of a JSON message. | ||||||
|  |     message: A protocol buffer message to merge into. | ||||||
|  |     ignore_unknown_fields: If True, do not raise errors for unknown fields. | ||||||
|  |     descriptor_pool: A Descriptor Pool for resolving types. If None use the | ||||||
|  |       default. | ||||||
|  |     max_recursion_depth: max recursion depth of JSON message to be | ||||||
|  |       deserialized. JSON messages over this depth will fail to be | ||||||
|  |       deserialized. Default value is 100. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The same message passed as argument. | ||||||
|  |   """ | ||||||
|  |   parser = _Parser(ignore_unknown_fields, descriptor_pool, max_recursion_depth) | ||||||
|  |   parser.ConvertMessage(js_dict, message, '') | ||||||
|  |   return message | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _INT_OR_FLOAT = (int, float) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _Parser(object): | ||||||
|  |   """JSON format parser for protocol message.""" | ||||||
|  | 
 | ||||||
|  |   def __init__(self, ignore_unknown_fields, descriptor_pool, | ||||||
|  |                max_recursion_depth): | ||||||
|  |     self.ignore_unknown_fields = ignore_unknown_fields | ||||||
|  |     self.descriptor_pool = descriptor_pool | ||||||
|  |     self.max_recursion_depth = max_recursion_depth | ||||||
|  |     self.recursion_depth = 0 | ||||||
|  | 
 | ||||||
|  |   def ConvertMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON object into a message. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       value: A JSON object. | ||||||
|  |       message: A WKT or regular protocol message to record the data. | ||||||
|  |       path: parent path to log parse error info. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ParseError: In case of convert problems. | ||||||
|  |     """ | ||||||
|  |     self.recursion_depth += 1 | ||||||
|  |     if self.recursion_depth > self.max_recursion_depth: | ||||||
|  |       raise ParseError('Message too deep. Max recursion depth is {0}'.format( | ||||||
|  |           self.max_recursion_depth)) | ||||||
|  |     message_descriptor = message.DESCRIPTOR | ||||||
|  |     full_name = message_descriptor.full_name | ||||||
|  |     if not path: | ||||||
|  |       path = message_descriptor.name | ||||||
|  |     if _IsWrapperMessage(message_descriptor): | ||||||
|  |       self._ConvertWrapperMessage(value, message, path) | ||||||
|  |     elif full_name in _WKTJSONMETHODS: | ||||||
|  |       methodcaller(_WKTJSONMETHODS[full_name][1], value, message, path)(self) | ||||||
|  |     else: | ||||||
|  |       self._ConvertFieldValuePair(value, message, path) | ||||||
|  |     self.recursion_depth -= 1 | ||||||
|  | 
 | ||||||
|  |   def _ConvertFieldValuePair(self, js, message, path): | ||||||
|  |     """Convert field value pairs into regular message. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       js: A JSON object to convert the field value pairs. | ||||||
|  |       message: A regular protocol message to record the data. | ||||||
|  |       path: parent path to log parse error info. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ParseError: In case of problems converting. | ||||||
|  |     """ | ||||||
|  |     names = [] | ||||||
|  |     message_descriptor = message.DESCRIPTOR | ||||||
|  |     fields_by_json_name = dict((f.json_name, f) | ||||||
|  |                                for f in message_descriptor.fields) | ||||||
|  |     for name in js: | ||||||
|  |       try: | ||||||
|  |         field = fields_by_json_name.get(name, None) | ||||||
|  |         if not field: | ||||||
|  |           field = message_descriptor.fields_by_name.get(name, None) | ||||||
|  |         if not field and _VALID_EXTENSION_NAME.match(name): | ||||||
|  |           if not message_descriptor.is_extendable: | ||||||
|  |             raise ParseError( | ||||||
|  |                 'Message type {0} does not have extensions at {1}'.format( | ||||||
|  |                     message_descriptor.full_name, path)) | ||||||
|  |           identifier = name[1:-1]  # strip [] brackets | ||||||
|  |           # pylint: disable=protected-access | ||||||
|  |           field = message.Extensions._FindExtensionByName(identifier) | ||||||
|  |           # pylint: enable=protected-access | ||||||
|  |           if not field: | ||||||
|  |             # Try looking for extension by the message type name, dropping the | ||||||
|  |             # field name following the final . separator in full_name. | ||||||
|  |             identifier = '.'.join(identifier.split('.')[:-1]) | ||||||
|  |             # pylint: disable=protected-access | ||||||
|  |             field = message.Extensions._FindExtensionByName(identifier) | ||||||
|  |             # pylint: enable=protected-access | ||||||
|  |         if not field: | ||||||
|  |           if self.ignore_unknown_fields: | ||||||
|  |             continue | ||||||
|  |           raise ParseError( | ||||||
|  |               ('Message type "{0}" has no field named "{1}" at "{2}".\n' | ||||||
|  |                ' Available Fields(except extensions): "{3}"').format( | ||||||
|  |                    message_descriptor.full_name, name, path, | ||||||
|  |                    [f.json_name for f in message_descriptor.fields])) | ||||||
|  |         if name in names: | ||||||
|  |           raise ParseError('Message type "{0}" should not have multiple ' | ||||||
|  |                            '"{1}" fields at "{2}".'.format( | ||||||
|  |                                message.DESCRIPTOR.full_name, name, path)) | ||||||
|  |         names.append(name) | ||||||
|  |         value = js[name] | ||||||
|  |         # Check no other oneof field is parsed. | ||||||
|  |         if field.containing_oneof is not None and value is not None: | ||||||
|  |           oneof_name = field.containing_oneof.name | ||||||
|  |           if oneof_name in names: | ||||||
|  |             raise ParseError('Message type "{0}" should not have multiple ' | ||||||
|  |                              '"{1}" oneof fields at "{2}".'.format( | ||||||
|  |                                  message.DESCRIPTOR.full_name, oneof_name, | ||||||
|  |                                  path)) | ||||||
|  |           names.append(oneof_name) | ||||||
|  | 
 | ||||||
|  |         if value is None: | ||||||
|  |           if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE | ||||||
|  |               and field.message_type.full_name == 'google.protobuf.Value'): | ||||||
|  |             sub_message = getattr(message, field.name) | ||||||
|  |             sub_message.null_value = 0 | ||||||
|  |           elif (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM | ||||||
|  |                 and field.enum_type.full_name == 'google.protobuf.NullValue'): | ||||||
|  |             setattr(message, field.name, 0) | ||||||
|  |           else: | ||||||
|  |             message.ClearField(field.name) | ||||||
|  |           continue | ||||||
|  | 
 | ||||||
|  |         # Parse field value. | ||||||
|  |         if _IsMapEntry(field): | ||||||
|  |           message.ClearField(field.name) | ||||||
|  |           self._ConvertMapFieldValue(value, message, field, | ||||||
|  |                                      '{0}.{1}'.format(path, name)) | ||||||
|  |         elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED: | ||||||
|  |           message.ClearField(field.name) | ||||||
|  |           if not isinstance(value, list): | ||||||
|  |             raise ParseError('repeated field {0} must be in [] which is ' | ||||||
|  |                              '{1} at {2}'.format(name, value, path)) | ||||||
|  |           if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |             # Repeated message field. | ||||||
|  |             for index, item in enumerate(value): | ||||||
|  |               sub_message = getattr(message, field.name).add() | ||||||
|  |               # None is a null_value in Value. | ||||||
|  |               if (item is None and | ||||||
|  |                   sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'): | ||||||
|  |                 raise ParseError('null is not allowed to be used as an element' | ||||||
|  |                                  ' in a repeated field at {0}.{1}[{2}]'.format( | ||||||
|  |                                      path, name, index)) | ||||||
|  |               self.ConvertMessage(item, sub_message, | ||||||
|  |                                   '{0}.{1}[{2}]'.format(path, name, index)) | ||||||
|  |           else: | ||||||
|  |             # Repeated scalar field. | ||||||
|  |             for index, item in enumerate(value): | ||||||
|  |               if item is None: | ||||||
|  |                 raise ParseError('null is not allowed to be used as an element' | ||||||
|  |                                  ' in a repeated field at {0}.{1}[{2}]'.format( | ||||||
|  |                                      path, name, index)) | ||||||
|  |               getattr(message, field.name).append( | ||||||
|  |                   _ConvertScalarFieldValue( | ||||||
|  |                       item, field, '{0}.{1}[{2}]'.format(path, name, index))) | ||||||
|  |         elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |           if field.is_extension: | ||||||
|  |             sub_message = message.Extensions[field] | ||||||
|  |           else: | ||||||
|  |             sub_message = getattr(message, field.name) | ||||||
|  |           sub_message.SetInParent() | ||||||
|  |           self.ConvertMessage(value, sub_message, '{0}.{1}'.format(path, name)) | ||||||
|  |         else: | ||||||
|  |           if field.is_extension: | ||||||
|  |             message.Extensions[field] = _ConvertScalarFieldValue( | ||||||
|  |                 value, field, '{0}.{1}'.format(path, name)) | ||||||
|  |           else: | ||||||
|  |             setattr( | ||||||
|  |                 message, field.name, | ||||||
|  |                 _ConvertScalarFieldValue(value, field, | ||||||
|  |                                          '{0}.{1}'.format(path, name))) | ||||||
|  |       except ParseError as e: | ||||||
|  |         if field and field.containing_oneof is None: | ||||||
|  |           raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) | ||||||
|  |         else: | ||||||
|  |           raise ParseError(str(e)) | ||||||
|  |       except ValueError as e: | ||||||
|  |         raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) | ||||||
|  |       except TypeError as e: | ||||||
|  |         raise ParseError('Failed to parse {0} field: {1}.'.format(name, e)) | ||||||
|  | 
 | ||||||
|  |   def _ConvertAnyMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into Any message.""" | ||||||
|  |     if isinstance(value, dict) and not value: | ||||||
|  |       return | ||||||
|  |     try: | ||||||
|  |       type_url = value['@type'] | ||||||
|  |     except KeyError: | ||||||
|  |       raise ParseError( | ||||||
|  |           '@type is missing when parsing any message at {0}'.format(path)) | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |       sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool) | ||||||
|  |     except TypeError as e: | ||||||
|  |       raise ParseError('{0} at {1}'.format(e, path)) | ||||||
|  |     message_descriptor = sub_message.DESCRIPTOR | ||||||
|  |     full_name = message_descriptor.full_name | ||||||
|  |     if _IsWrapperMessage(message_descriptor): | ||||||
|  |       self._ConvertWrapperMessage(value['value'], sub_message, | ||||||
|  |                                   '{0}.value'.format(path)) | ||||||
|  |     elif full_name in _WKTJSONMETHODS: | ||||||
|  |       methodcaller(_WKTJSONMETHODS[full_name][1], value['value'], sub_message, | ||||||
|  |                    '{0}.value'.format(path))( | ||||||
|  |                        self) | ||||||
|  |     else: | ||||||
|  |       del value['@type'] | ||||||
|  |       self._ConvertFieldValuePair(value, sub_message, path) | ||||||
|  |       value['@type'] = type_url | ||||||
|  |     # Sets Any message | ||||||
|  |     message.value = sub_message.SerializeToString() | ||||||
|  |     message.type_url = type_url | ||||||
|  | 
 | ||||||
|  |   def _ConvertGenericMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into message with FromJsonString.""" | ||||||
|  |     # Duration, Timestamp, FieldMask have a FromJsonString method to do the | ||||||
|  |     # conversion. Users can also call the method directly. | ||||||
|  |     try: | ||||||
|  |       message.FromJsonString(value) | ||||||
|  |     except ValueError as e: | ||||||
|  |       raise ParseError('{0} at {1}'.format(e, path)) | ||||||
|  | 
 | ||||||
|  |   def _ConvertValueMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into Value message.""" | ||||||
|  |     if isinstance(value, dict): | ||||||
|  |       self._ConvertStructMessage(value, message.struct_value, path) | ||||||
|  |     elif isinstance(value, list): | ||||||
|  |       self._ConvertListValueMessage(value, message.list_value, path) | ||||||
|  |     elif value is None: | ||||||
|  |       message.null_value = 0 | ||||||
|  |     elif isinstance(value, bool): | ||||||
|  |       message.bool_value = value | ||||||
|  |     elif isinstance(value, str): | ||||||
|  |       message.string_value = value | ||||||
|  |     elif isinstance(value, _INT_OR_FLOAT): | ||||||
|  |       message.number_value = value | ||||||
|  |     else: | ||||||
|  |       raise ParseError('Value {0} has unexpected type {1} at {2}'.format( | ||||||
|  |           value, type(value), path)) | ||||||
|  | 
 | ||||||
|  |   def _ConvertListValueMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into ListValue message.""" | ||||||
|  |     if not isinstance(value, list): | ||||||
|  |       raise ParseError('ListValue must be in [] which is {0} at {1}'.format( | ||||||
|  |           value, path)) | ||||||
|  |     message.ClearField('values') | ||||||
|  |     for index, item in enumerate(value): | ||||||
|  |       self._ConvertValueMessage(item, message.values.add(), | ||||||
|  |                                 '{0}[{1}]'.format(path, index)) | ||||||
|  | 
 | ||||||
|  |   def _ConvertStructMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into Struct message.""" | ||||||
|  |     if not isinstance(value, dict): | ||||||
|  |       raise ParseError('Struct must be in a dict which is {0} at {1}'.format( | ||||||
|  |           value, path)) | ||||||
|  |     # Clear will mark the struct as modified so it will be created even if | ||||||
|  |     # there are no values. | ||||||
|  |     message.Clear() | ||||||
|  |     for key in value: | ||||||
|  |       self._ConvertValueMessage(value[key], message.fields[key], | ||||||
|  |                                 '{0}.{1}'.format(path, key)) | ||||||
|  |     return | ||||||
|  | 
 | ||||||
|  |   def _ConvertWrapperMessage(self, value, message, path): | ||||||
|  |     """Convert a JSON representation into Wrapper message.""" | ||||||
|  |     field = message.DESCRIPTOR.fields_by_name['value'] | ||||||
|  |     setattr( | ||||||
|  |         message, 'value', | ||||||
|  |         _ConvertScalarFieldValue(value, field, path='{0}.value'.format(path))) | ||||||
|  | 
 | ||||||
|  |   def _ConvertMapFieldValue(self, value, message, field, path): | ||||||
|  |     """Convert map field value for a message map field. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       value: A JSON object to convert the map field value. | ||||||
|  |       message: A protocol message to record the converted data. | ||||||
|  |       field: The descriptor of the map field to be converted. | ||||||
|  |       path: parent path to log parse error info. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ParseError: In case of convert problems. | ||||||
|  |     """ | ||||||
|  |     if not isinstance(value, dict): | ||||||
|  |       raise ParseError( | ||||||
|  |           'Map field {0} must be in a dict which is {1} at {2}'.format( | ||||||
|  |               field.name, value, path)) | ||||||
|  |     key_field = field.message_type.fields_by_name['key'] | ||||||
|  |     value_field = field.message_type.fields_by_name['value'] | ||||||
|  |     for key in value: | ||||||
|  |       key_value = _ConvertScalarFieldValue(key, key_field, | ||||||
|  |                                            '{0}.key'.format(path), True) | ||||||
|  |       if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE: | ||||||
|  |         self.ConvertMessage(value[key], | ||||||
|  |                             getattr(message, field.name)[key_value], | ||||||
|  |                             '{0}[{1}]'.format(path, key_value)) | ||||||
|  |       else: | ||||||
|  |         getattr(message, field.name)[key_value] = _ConvertScalarFieldValue( | ||||||
|  |             value[key], value_field, path='{0}[{1}]'.format(path, key_value)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ConvertScalarFieldValue(value, field, path, require_str=False): | ||||||
|  |   """Convert a single scalar field value. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     value: A scalar value to convert the scalar field value. | ||||||
|  |     field: The descriptor of the field to convert. | ||||||
|  |     path: parent path to log parse error info. | ||||||
|  |     require_str: If True, the field value must be a str. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The converted scalar field value | ||||||
|  | 
 | ||||||
|  |   Raises: | ||||||
|  |     ParseError: In case of convert problems. | ||||||
|  |   """ | ||||||
|  |   try: | ||||||
|  |     if field.cpp_type in _INT_TYPES: | ||||||
|  |       return _ConvertInteger(value) | ||||||
|  |     elif field.cpp_type in _FLOAT_TYPES: | ||||||
|  |       return _ConvertFloat(value, field) | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL: | ||||||
|  |       return _ConvertBool(value, require_str) | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING: | ||||||
|  |       if field.type == descriptor.FieldDescriptor.TYPE_BYTES: | ||||||
|  |         if isinstance(value, str): | ||||||
|  |           encoded = value.encode('utf-8') | ||||||
|  |         else: | ||||||
|  |           encoded = value | ||||||
|  |         # Add extra padding '=' | ||||||
|  |         padded_value = encoded + b'=' * (4 - len(encoded) % 4) | ||||||
|  |         return base64.urlsafe_b64decode(padded_value) | ||||||
|  |       else: | ||||||
|  |         # Checking for unpaired surrogates appears to be unreliable, | ||||||
|  |         # depending on the specific Python version, so we check manually. | ||||||
|  |         if _UNPAIRED_SURROGATE_PATTERN.search(value): | ||||||
|  |           raise ParseError('Unpaired surrogate') | ||||||
|  |         return value | ||||||
|  |     elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM: | ||||||
|  |       # Convert an enum value. | ||||||
|  |       enum_value = field.enum_type.values_by_name.get(value, None) | ||||||
|  |       if enum_value is None: | ||||||
|  |         try: | ||||||
|  |           number = int(value) | ||||||
|  |           enum_value = field.enum_type.values_by_number.get(number, None) | ||||||
|  |         except ValueError: | ||||||
|  |           raise ParseError('Invalid enum value {0} for enum type {1}'.format( | ||||||
|  |               value, field.enum_type.full_name)) | ||||||
|  |         if enum_value is None: | ||||||
|  |           if field.file.syntax == 'proto3': | ||||||
|  |             # Proto3 accepts unknown enums. | ||||||
|  |             return number | ||||||
|  |           raise ParseError('Invalid enum value {0} for enum type {1}'.format( | ||||||
|  |               value, field.enum_type.full_name)) | ||||||
|  |       return enum_value.number | ||||||
|  |   except ParseError as e: | ||||||
|  |     raise ParseError('{0} at {1}'.format(e, path)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ConvertInteger(value): | ||||||
|  |   """Convert an integer. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     value: A scalar value to convert. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The integer value. | ||||||
|  | 
 | ||||||
|  |   Raises: | ||||||
|  |     ParseError: If an integer couldn't be consumed. | ||||||
|  |   """ | ||||||
|  |   if isinstance(value, float) and not value.is_integer(): | ||||||
|  |     raise ParseError('Couldn\'t parse integer: {0}'.format(value)) | ||||||
|  | 
 | ||||||
|  |   if isinstance(value, str) and value.find(' ') != -1: | ||||||
|  |     raise ParseError('Couldn\'t parse integer: "{0}"'.format(value)) | ||||||
|  | 
 | ||||||
|  |   if isinstance(value, bool): | ||||||
|  |     raise ParseError('Bool value {0} is not acceptable for ' | ||||||
|  |                      'integer field'.format(value)) | ||||||
|  | 
 | ||||||
|  |   return int(value) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ConvertFloat(value, field): | ||||||
|  |   """Convert an floating point number.""" | ||||||
|  |   if isinstance(value, float): | ||||||
|  |     if math.isnan(value): | ||||||
|  |       raise ParseError('Couldn\'t parse NaN, use quoted "NaN" instead') | ||||||
|  |     if math.isinf(value): | ||||||
|  |       if value > 0: | ||||||
|  |         raise ParseError('Couldn\'t parse Infinity or value too large, ' | ||||||
|  |                          'use quoted "Infinity" instead') | ||||||
|  |       else: | ||||||
|  |         raise ParseError('Couldn\'t parse -Infinity or value too small, ' | ||||||
|  |                          'use quoted "-Infinity" instead') | ||||||
|  |     if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT: | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       if value > type_checkers._FLOAT_MAX: | ||||||
|  |         raise ParseError('Float value too large') | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       if value < type_checkers._FLOAT_MIN: | ||||||
|  |         raise ParseError('Float value too small') | ||||||
|  |   if value == 'nan': | ||||||
|  |     raise ParseError('Couldn\'t parse float "nan", use "NaN" instead') | ||||||
|  |   try: | ||||||
|  |     # Assume Python compatible syntax. | ||||||
|  |     return float(value) | ||||||
|  |   except ValueError: | ||||||
|  |     # Check alternative spellings. | ||||||
|  |     if value == _NEG_INFINITY: | ||||||
|  |       return float('-inf') | ||||||
|  |     elif value == _INFINITY: | ||||||
|  |       return float('inf') | ||||||
|  |     elif value == _NAN: | ||||||
|  |       return float('nan') | ||||||
|  |     else: | ||||||
|  |       raise ParseError('Couldn\'t parse float: {0}'.format(value)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _ConvertBool(value, require_str): | ||||||
|  |   """Convert a boolean value. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     value: A scalar value to convert. | ||||||
|  |     require_str: If True, value must be a str. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     The bool parsed. | ||||||
|  | 
 | ||||||
|  |   Raises: | ||||||
|  |     ParseError: If a boolean value couldn't be consumed. | ||||||
|  |   """ | ||||||
|  |   if require_str: | ||||||
|  |     if value == 'true': | ||||||
|  |       return True | ||||||
|  |     elif value == 'false': | ||||||
|  |       return False | ||||||
|  |     else: | ||||||
|  |       raise ParseError('Expected "true" or "false", not {0}'.format(value)) | ||||||
|  | 
 | ||||||
|  |   if not isinstance(value, bool): | ||||||
|  |     raise ParseError('Expected true or false without quotes') | ||||||
|  |   return value | ||||||
|  | 
 | ||||||
|  | _WKTJSONMETHODS = { | ||||||
|  |     'google.protobuf.Any': ['_AnyMessageToJsonObject', | ||||||
|  |                             '_ConvertAnyMessage'], | ||||||
|  |     'google.protobuf.Duration': ['_GenericMessageToJsonObject', | ||||||
|  |                                  '_ConvertGenericMessage'], | ||||||
|  |     'google.protobuf.FieldMask': ['_GenericMessageToJsonObject', | ||||||
|  |                                   '_ConvertGenericMessage'], | ||||||
|  |     'google.protobuf.ListValue': ['_ListValueMessageToJsonObject', | ||||||
|  |                                   '_ConvertListValueMessage'], | ||||||
|  |     'google.protobuf.Struct': ['_StructMessageToJsonObject', | ||||||
|  |                                '_ConvertStructMessage'], | ||||||
|  |     'google.protobuf.Timestamp': ['_GenericMessageToJsonObject', | ||||||
|  |                                   '_ConvertGenericMessage'], | ||||||
|  |     'google.protobuf.Value': ['_ValueMessageToJsonObject', | ||||||
|  |                               '_ConvertValueMessage'] | ||||||
|  | } | ||||||
							
								
								
									
										424
									
								
								lib/protobuf/message.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,424 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | # TODO(robinson): We should just make these methods all "pure-virtual" and move | ||||||
|  | # all implementation out, into reflection.py for now. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | """Contains an abstract base class for protocol messages.""" | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | class Error(Exception): | ||||||
|  |   """Base error type for this module.""" | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DecodeError(Error): | ||||||
|  |   """Exception raised when deserializing messages.""" | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class EncodeError(Error): | ||||||
|  |   """Exception raised when serializing messages.""" | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Message(object): | ||||||
|  | 
 | ||||||
|  |   """Abstract base class for protocol messages. | ||||||
|  | 
 | ||||||
|  |   Protocol message classes are almost always generated by the protocol | ||||||
|  |   compiler.  These generated types subclass Message and implement the methods | ||||||
|  |   shown below. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Link to an HTML document here. | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Document that instances of this class will also | ||||||
|  |   # have an Extensions attribute with __getitem__ and __setitem__. | ||||||
|  |   # Again, not sure how to best convey this. | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Document that the class must also have a static | ||||||
|  |   #   RegisterExtension(extension_field) method. | ||||||
|  |   #   Not sure how to best express at this point. | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Document these fields and methods. | ||||||
|  | 
 | ||||||
|  |   __slots__ = [] | ||||||
|  | 
 | ||||||
|  |   #: The :class:`google.protobuf.descriptor.Descriptor` for this message type. | ||||||
|  |   DESCRIPTOR = None | ||||||
|  | 
 | ||||||
|  |   def __deepcopy__(self, memo=None): | ||||||
|  |     clone = type(self)() | ||||||
|  |     clone.MergeFrom(self) | ||||||
|  |     return clone | ||||||
|  | 
 | ||||||
|  |   def __eq__(self, other_msg): | ||||||
|  |     """Recursively compares two messages by value and structure.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def __ne__(self, other_msg): | ||||||
|  |     # Can't just say self != other_msg, since that would infinitely recurse. :) | ||||||
|  |     return not self == other_msg | ||||||
|  | 
 | ||||||
|  |   def __hash__(self): | ||||||
|  |     raise TypeError('unhashable object') | ||||||
|  | 
 | ||||||
|  |   def __str__(self): | ||||||
|  |     """Outputs a human-readable representation of the message.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def __unicode__(self): | ||||||
|  |     """Outputs a human-readable representation of the message.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def MergeFrom(self, other_msg): | ||||||
|  |     """Merges the contents of the specified message into current message. | ||||||
|  | 
 | ||||||
|  |     This method merges the contents of the specified message into the current | ||||||
|  |     message. Singular fields that are set in the specified message overwrite | ||||||
|  |     the corresponding fields in the current message. Repeated fields are | ||||||
|  |     appended. Singular sub-messages and groups are recursively merged. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       other_msg (Message): A message to merge into the current message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def CopyFrom(self, other_msg): | ||||||
|  |     """Copies the content of the specified message into the current message. | ||||||
|  | 
 | ||||||
|  |     The method clears the current message and then merges the specified | ||||||
|  |     message using MergeFrom. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       other_msg (Message): A message to copy into the current one. | ||||||
|  |     """ | ||||||
|  |     if self is other_msg: | ||||||
|  |       return | ||||||
|  |     self.Clear() | ||||||
|  |     self.MergeFrom(other_msg) | ||||||
|  | 
 | ||||||
|  |   def Clear(self): | ||||||
|  |     """Clears all data that was set in the message.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def SetInParent(self): | ||||||
|  |     """Mark this as present in the parent. | ||||||
|  | 
 | ||||||
|  |     This normally happens automatically when you assign a field of a | ||||||
|  |     sub-message, but sometimes you want to make the sub-message | ||||||
|  |     present while keeping it empty.  If you find yourself using this, | ||||||
|  |     you may want to reconsider your design. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def IsInitialized(self): | ||||||
|  |     """Checks if the message is initialized. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       bool: The method returns True if the message is initialized (i.e. all of | ||||||
|  |       its required fields are set). | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): MergeFromString() should probably return None and be | ||||||
|  |   # implemented in terms of a helper that returns the # of bytes read.  Our | ||||||
|  |   # deserialization routines would use the helper when recursively | ||||||
|  |   # deserializing, but the end user would almost always just want the no-return | ||||||
|  |   # MergeFromString(). | ||||||
|  | 
 | ||||||
|  |   def MergeFromString(self, serialized): | ||||||
|  |     """Merges serialized protocol buffer data into this message. | ||||||
|  | 
 | ||||||
|  |     When we find a field in `serialized` that is already present | ||||||
|  |     in this message: | ||||||
|  | 
 | ||||||
|  |     -   If it's a "repeated" field, we append to the end of our list. | ||||||
|  |     -   Else, if it's a scalar, we overwrite our field. | ||||||
|  |     -   Else, (it's a nonrepeated composite), we recursively merge | ||||||
|  |         into the existing composite. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       serialized (bytes): Any object that allows us to call | ||||||
|  |         ``memoryview(serialized)`` to access a string of bytes using the | ||||||
|  |         buffer interface. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       int: The number of bytes read from `serialized`. | ||||||
|  |       For non-group messages, this will always be `len(serialized)`, | ||||||
|  |       but for messages which are actually groups, this will | ||||||
|  |       generally be less than `len(serialized)`, since we must | ||||||
|  |       stop when we reach an ``END_GROUP`` tag.  Note that if | ||||||
|  |       we *do* stop because of an ``END_GROUP`` tag, the number | ||||||
|  |       of bytes returned does not include the bytes | ||||||
|  |       for the ``END_GROUP`` tag information. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       DecodeError: if the input cannot be parsed. | ||||||
|  |     """ | ||||||
|  |     # TODO(robinson): Document handling of unknown fields. | ||||||
|  |     # TODO(robinson): When we switch to a helper, this will return None. | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ParseFromString(self, serialized): | ||||||
|  |     """Parse serialized protocol buffer data into this message. | ||||||
|  | 
 | ||||||
|  |     Like :func:`MergeFromString()`, except we clear the object first. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       message.DecodeError if the input cannot be parsed. | ||||||
|  |     """ | ||||||
|  |     self.Clear() | ||||||
|  |     return self.MergeFromString(serialized) | ||||||
|  | 
 | ||||||
|  |   def SerializeToString(self, **kwargs): | ||||||
|  |     """Serializes the protocol message to a binary string. | ||||||
|  | 
 | ||||||
|  |     Keyword Args: | ||||||
|  |       deterministic (bool): If true, requests deterministic serialization | ||||||
|  |         of the protobuf, with predictable ordering of map keys. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A binary string representation of the message if all of the required | ||||||
|  |       fields in the message are set (i.e. the message is initialized). | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       EncodeError: if the message isn't initialized (see :func:`IsInitialized`). | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def SerializePartialToString(self, **kwargs): | ||||||
|  |     """Serializes the protocol message to a binary string. | ||||||
|  | 
 | ||||||
|  |     This method is similar to SerializeToString but doesn't check if the | ||||||
|  |     message is initialized. | ||||||
|  | 
 | ||||||
|  |     Keyword Args: | ||||||
|  |       deterministic (bool): If true, requests deterministic serialization | ||||||
|  |         of the protobuf, with predictable ordering of map keys. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       bytes: A serialized representation of the partial message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   # TODO(robinson): Decide whether we like these better | ||||||
|  |   # than auto-generated has_foo() and clear_foo() methods | ||||||
|  |   # on the instances themselves.  This way is less consistent | ||||||
|  |   # with C++, but it makes reflection-type access easier and | ||||||
|  |   # reduces the number of magically autogenerated things. | ||||||
|  |   # | ||||||
|  |   # TODO(robinson): Be sure to document (and test) exactly | ||||||
|  |   # which field names are accepted here.  Are we case-sensitive? | ||||||
|  |   # What do we do with fields that share names with Python keywords | ||||||
|  |   # like 'lambda' and 'yield'? | ||||||
|  |   # | ||||||
|  |   # nnorwitz says: | ||||||
|  |   # """ | ||||||
|  |   # Typically (in python), an underscore is appended to names that are | ||||||
|  |   # keywords. So they would become lambda_ or yield_. | ||||||
|  |   # """ | ||||||
|  |   def ListFields(self): | ||||||
|  |     """Returns a list of (FieldDescriptor, value) tuples for present fields. | ||||||
|  | 
 | ||||||
|  |     A message field is non-empty if HasField() would return true. A singular | ||||||
|  |     primitive field is non-empty if HasField() would return true in proto2 or it | ||||||
|  |     is non zero in proto3. A repeated field is non-empty if it contains at least | ||||||
|  |     one element. The fields are ordered by field number. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       list[tuple(FieldDescriptor, value)]: field descriptors and values | ||||||
|  |       for all fields in the message which are not empty. The values vary by | ||||||
|  |       field type. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def HasField(self, field_name): | ||||||
|  |     """Checks if a certain field is set for the message. | ||||||
|  | 
 | ||||||
|  |     For a oneof group, checks if any field inside is set. Note that if the | ||||||
|  |     field_name is not defined in the message descriptor, :exc:`ValueError` will | ||||||
|  |     be raised. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       field_name (str): The name of the field to check for presence. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       bool: Whether a value has been set for the named field. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: if the `field_name` is not a member of this message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ClearField(self, field_name): | ||||||
|  |     """Clears the contents of a given field. | ||||||
|  | 
 | ||||||
|  |     Inside a oneof group, clears the field set. If the name neither refers to a | ||||||
|  |     defined field or oneof group, :exc:`ValueError` is raised. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       field_name (str): The name of the field to check for presence. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: if the `field_name` is not a member of this message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def WhichOneof(self, oneof_group): | ||||||
|  |     """Returns the name of the field that is set inside a oneof group. | ||||||
|  | 
 | ||||||
|  |     If no field is set, returns None. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       oneof_group (str): the name of the oneof group to check. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       str or None: The name of the group that is set, or None. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       ValueError: no group with the given name exists | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def HasExtension(self, extension_handle): | ||||||
|  |     """Checks if a certain extension is present for this message. | ||||||
|  | 
 | ||||||
|  |     Extensions are retrieved using the :attr:`Extensions` mapping (if present). | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       extension_handle: The handle for the extension to check. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       bool: Whether the extension is present for this message. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError: if the extension is repeated. Similar to repeated fields, | ||||||
|  |         there is no separate notion of presence: a "not present" repeated | ||||||
|  |         extension is an empty list. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ClearExtension(self, extension_handle): | ||||||
|  |     """Clears the contents of a given extension. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       extension_handle: The handle for the extension to clear. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def UnknownFields(self): | ||||||
|  |     """Returns the UnknownFieldSet. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       UnknownFieldSet: The unknown fields stored in this message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def DiscardUnknownFields(self): | ||||||
|  |     """Clears all fields in the :class:`UnknownFieldSet`. | ||||||
|  | 
 | ||||||
|  |     This operation is recursive for nested message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ByteSize(self): | ||||||
|  |     """Returns the serialized size of this message. | ||||||
|  | 
 | ||||||
|  |     Recursively calls ByteSize() on all contained messages. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       int: The number of bytes required to serialize this message. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   @classmethod | ||||||
|  |   def FromString(cls, s): | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   @staticmethod | ||||||
|  |   def RegisterExtension(extension_handle): | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def _SetListener(self, message_listener): | ||||||
|  |     """Internal method used by the protocol message implementation. | ||||||
|  |     Clients should not call this directly. | ||||||
|  | 
 | ||||||
|  |     Sets a listener that this message will call on certain state transitions. | ||||||
|  | 
 | ||||||
|  |     The purpose of this method is to register back-edges from children to | ||||||
|  |     parents at runtime, for the purpose of setting "has" bits and | ||||||
|  |     byte-size-dirty bits in the parent and ancestor objects whenever a child or | ||||||
|  |     descendant object is modified. | ||||||
|  | 
 | ||||||
|  |     If the client wants to disconnect this Message from the object tree, she | ||||||
|  |     explicitly sets callback to None. | ||||||
|  | 
 | ||||||
|  |     If message_listener is None, unregisters any existing listener.  Otherwise, | ||||||
|  |     message_listener must implement the MessageListener interface in | ||||||
|  |     internal/message_listener.py, and we discard any listener registered | ||||||
|  |     via a previous _SetListener() call. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def __getstate__(self): | ||||||
|  |     """Support the pickle protocol.""" | ||||||
|  |     return dict(serialized=self.SerializePartialToString()) | ||||||
|  | 
 | ||||||
|  |   def __setstate__(self, state): | ||||||
|  |     """Support the pickle protocol.""" | ||||||
|  |     self.__init__() | ||||||
|  |     serialized = state['serialized'] | ||||||
|  |     # On Python 3, using encoding='latin1' is required for unpickling | ||||||
|  |     # protos pickled by Python 2. | ||||||
|  |     if not isinstance(serialized, bytes): | ||||||
|  |       serialized = serialized.encode('latin1') | ||||||
|  |     self.ParseFromString(serialized) | ||||||
|  | 
 | ||||||
|  |   def __reduce__(self): | ||||||
|  |     message_descriptor = self.DESCRIPTOR | ||||||
|  |     if message_descriptor.containing_type is None: | ||||||
|  |       return type(self), (), self.__getstate__() | ||||||
|  |     # the message type must be nested. | ||||||
|  |     # Python does not pickle nested classes; use the symbol_database on the | ||||||
|  |     # receiving end. | ||||||
|  |     container = message_descriptor | ||||||
|  |     return (_InternalConstructMessage, (container.full_name,), | ||||||
|  |             self.__getstate__()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _InternalConstructMessage(full_name): | ||||||
|  |   """Constructs a nested message.""" | ||||||
|  |   from google.protobuf import symbol_database  # pylint:disable=g-import-not-at-top | ||||||
|  | 
 | ||||||
|  |   return symbol_database.Default().GetSymbol(full_name)() | ||||||
							
								
								
									
										185
									
								
								lib/protobuf/message_factory.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,185 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Provides a factory class for generating dynamic messages. | ||||||
|  | 
 | ||||||
|  | The easiest way to use this class is if you have access to the FileDescriptor | ||||||
|  | protos containing the messages you want to create you can just do the following: | ||||||
|  | 
 | ||||||
|  | message_classes = message_factory.GetMessages(iterable_of_file_descriptors) | ||||||
|  | my_proto_instance = message_classes['some.proto.package.MessageName']() | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'matthewtoia@google.com (Matt Toia)' | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import api_implementation | ||||||
|  | from google.protobuf import descriptor_pool | ||||||
|  | from google.protobuf import message | ||||||
|  | 
 | ||||||
|  | if api_implementation.Type() == 'cpp': | ||||||
|  |   from google.protobuf.pyext import cpp_message as message_impl | ||||||
|  | else: | ||||||
|  |   from google.protobuf.internal import python_message as message_impl | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # The type of all Message classes. | ||||||
|  | _GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MessageFactory(object): | ||||||
|  |   """Factory for creating Proto2 messages from descriptors in a pool.""" | ||||||
|  | 
 | ||||||
|  |   def __init__(self, pool=None): | ||||||
|  |     """Initializes a new factory.""" | ||||||
|  |     self.pool = pool or descriptor_pool.DescriptorPool() | ||||||
|  | 
 | ||||||
|  |     # local cache of all classes built from protobuf descriptors | ||||||
|  |     self._classes = {} | ||||||
|  | 
 | ||||||
|  |   def GetPrototype(self, descriptor): | ||||||
|  |     """Obtains a proto2 message class based on the passed in descriptor. | ||||||
|  | 
 | ||||||
|  |     Passing a descriptor with a fully qualified name matching a previous | ||||||
|  |     invocation will cause the same class to be returned. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       descriptor: The descriptor to build from. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A class describing the passed in descriptor. | ||||||
|  |     """ | ||||||
|  |     if descriptor not in self._classes: | ||||||
|  |       result_class = self.CreatePrototype(descriptor) | ||||||
|  |       # The assignment to _classes is redundant for the base implementation, but | ||||||
|  |       # might avoid confusion in cases where CreatePrototype gets overridden and | ||||||
|  |       # does not call the base implementation. | ||||||
|  |       self._classes[descriptor] = result_class | ||||||
|  |       return result_class | ||||||
|  |     return self._classes[descriptor] | ||||||
|  | 
 | ||||||
|  |   def CreatePrototype(self, descriptor): | ||||||
|  |     """Builds a proto2 message class based on the passed in descriptor. | ||||||
|  | 
 | ||||||
|  |     Don't call this function directly, it always creates a new class. Call | ||||||
|  |     GetPrototype() instead. This method is meant to be overridden in subblasses | ||||||
|  |     to perform additional operations on the newly constructed class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       descriptor: The descriptor to build from. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A class describing the passed in descriptor. | ||||||
|  |     """ | ||||||
|  |     descriptor_name = descriptor.name | ||||||
|  |     result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE( | ||||||
|  |         descriptor_name, | ||||||
|  |         (message.Message,), | ||||||
|  |         { | ||||||
|  |             'DESCRIPTOR': descriptor, | ||||||
|  |             # If module not set, it wrongly points to message_factory module. | ||||||
|  |             '__module__': None, | ||||||
|  |         }) | ||||||
|  |     result_class._FACTORY = self  # pylint: disable=protected-access | ||||||
|  |     # Assign in _classes before doing recursive calls to avoid infinite | ||||||
|  |     # recursion. | ||||||
|  |     self._classes[descriptor] = result_class | ||||||
|  |     for field in descriptor.fields: | ||||||
|  |       if field.message_type: | ||||||
|  |         self.GetPrototype(field.message_type) | ||||||
|  |     for extension in result_class.DESCRIPTOR.extensions: | ||||||
|  |       if extension.containing_type not in self._classes: | ||||||
|  |         self.GetPrototype(extension.containing_type) | ||||||
|  |       extended_class = self._classes[extension.containing_type] | ||||||
|  |       extended_class.RegisterExtension(extension) | ||||||
|  |     return result_class | ||||||
|  | 
 | ||||||
|  |   def GetMessages(self, files): | ||||||
|  |     """Gets all the messages from a specified file. | ||||||
|  | 
 | ||||||
|  |     This will find and resolve dependencies, failing if the descriptor | ||||||
|  |     pool cannot satisfy them. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       files: The file names to extract messages from. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A dictionary mapping proto names to the message classes. This will include | ||||||
|  |       any dependent messages as well as any messages defined in the same file as | ||||||
|  |       a specified message. | ||||||
|  |     """ | ||||||
|  |     result = {} | ||||||
|  |     for file_name in files: | ||||||
|  |       file_desc = self.pool.FindFileByName(file_name) | ||||||
|  |       for desc in file_desc.message_types_by_name.values(): | ||||||
|  |         result[desc.full_name] = self.GetPrototype(desc) | ||||||
|  | 
 | ||||||
|  |       # While the extension FieldDescriptors are created by the descriptor pool, | ||||||
|  |       # the python classes created in the factory need them to be registered | ||||||
|  |       # explicitly, which is done below. | ||||||
|  |       # | ||||||
|  |       # The call to RegisterExtension will specifically check if the | ||||||
|  |       # extension was already registered on the object and either | ||||||
|  |       # ignore the registration if the original was the same, or raise | ||||||
|  |       # an error if they were different. | ||||||
|  | 
 | ||||||
|  |       for extension in file_desc.extensions_by_name.values(): | ||||||
|  |         if extension.containing_type not in self._classes: | ||||||
|  |           self.GetPrototype(extension.containing_type) | ||||||
|  |         extended_class = self._classes[extension.containing_type] | ||||||
|  |         extended_class.RegisterExtension(extension) | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _FACTORY = MessageFactory() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def GetMessages(file_protos): | ||||||
|  |   """Builds a dictionary of all the messages available in a set of files. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     file_protos: Iterable of FileDescriptorProto to build messages out of. | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     A dictionary mapping proto names to the message classes. This will include | ||||||
|  |     any dependent messages as well as any messages defined in the same file as | ||||||
|  |     a specified message. | ||||||
|  |   """ | ||||||
|  |   # The cpp implementation of the protocol buffer library requires to add the | ||||||
|  |   # message in topological order of the dependency graph. | ||||||
|  |   file_by_name = {file_proto.name: file_proto for file_proto in file_protos} | ||||||
|  |   def _AddFile(file_proto): | ||||||
|  |     for dependency in file_proto.dependency: | ||||||
|  |       if dependency in file_by_name: | ||||||
|  |         # Remove from elements to be visited, in order to cut cycles. | ||||||
|  |         _AddFile(file_by_name.pop(dependency)) | ||||||
|  |     _FACTORY.pool.Add(file_proto) | ||||||
|  |   while file_by_name: | ||||||
|  |     _AddFile(file_by_name.popitem()[1]) | ||||||
|  |   return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos]) | ||||||
							
								
								
									
										134
									
								
								lib/protobuf/proto_builder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,134 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Dynamic Protobuf class creator.""" | ||||||
|  | 
 | ||||||
|  | from collections import OrderedDict | ||||||
|  | import hashlib | ||||||
|  | import os | ||||||
|  | 
 | ||||||
|  | from google.protobuf import descriptor_pb2 | ||||||
|  | from google.protobuf import descriptor | ||||||
|  | from google.protobuf import message_factory | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _GetMessageFromFactory(factory, full_name): | ||||||
|  |   """Get a proto class from the MessageFactory by name. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     factory: a MessageFactory instance. | ||||||
|  |     full_name: str, the fully qualified name of the proto type. | ||||||
|  |   Returns: | ||||||
|  |     A class, for the type identified by full_name. | ||||||
|  |   Raises: | ||||||
|  |     KeyError, if the proto is not found in the factory's descriptor pool. | ||||||
|  |   """ | ||||||
|  |   proto_descriptor = factory.pool.FindMessageTypeByName(full_name) | ||||||
|  |   proto_cls = factory.GetPrototype(proto_descriptor) | ||||||
|  |   return proto_cls | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def MakeSimpleProtoClass(fields, full_name=None, pool=None): | ||||||
|  |   """Create a Protobuf class whose fields are basic types. | ||||||
|  | 
 | ||||||
|  |   Note: this doesn't validate field names! | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     fields: dict of {name: field_type} mappings for each field in the proto. If | ||||||
|  |         this is an OrderedDict the order will be maintained, otherwise the | ||||||
|  |         fields will be sorted by name. | ||||||
|  |     full_name: optional str, the fully-qualified name of the proto type. | ||||||
|  |     pool: optional DescriptorPool instance. | ||||||
|  |   Returns: | ||||||
|  |     a class, the new protobuf class with a FileDescriptor. | ||||||
|  |   """ | ||||||
|  |   factory = message_factory.MessageFactory(pool=pool) | ||||||
|  | 
 | ||||||
|  |   if full_name is not None: | ||||||
|  |     try: | ||||||
|  |       proto_cls = _GetMessageFromFactory(factory, full_name) | ||||||
|  |       return proto_cls | ||||||
|  |     except KeyError: | ||||||
|  |       # The factory's DescriptorPool doesn't know about this class yet. | ||||||
|  |       pass | ||||||
|  | 
 | ||||||
|  |   # Get a list of (name, field_type) tuples from the fields dict. If fields was | ||||||
|  |   # an OrderedDict we keep the order, but otherwise we sort the field to ensure | ||||||
|  |   # consistent ordering. | ||||||
|  |   field_items = fields.items() | ||||||
|  |   if not isinstance(fields, OrderedDict): | ||||||
|  |     field_items = sorted(field_items) | ||||||
|  | 
 | ||||||
|  |   # Use a consistent file name that is unlikely to conflict with any imported | ||||||
|  |   # proto files. | ||||||
|  |   fields_hash = hashlib.sha1() | ||||||
|  |   for f_name, f_type in field_items: | ||||||
|  |     fields_hash.update(f_name.encode('utf-8')) | ||||||
|  |     fields_hash.update(str(f_type).encode('utf-8')) | ||||||
|  |   proto_file_name = fields_hash.hexdigest() + '.proto' | ||||||
|  | 
 | ||||||
|  |   # If the proto is anonymous, use the same hash to name it. | ||||||
|  |   if full_name is None: | ||||||
|  |     full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' + | ||||||
|  |                  fields_hash.hexdigest()) | ||||||
|  |     try: | ||||||
|  |       proto_cls = _GetMessageFromFactory(factory, full_name) | ||||||
|  |       return proto_cls | ||||||
|  |     except KeyError: | ||||||
|  |       # The factory's DescriptorPool doesn't know about this class yet. | ||||||
|  |       pass | ||||||
|  | 
 | ||||||
|  |   # This is the first time we see this proto: add a new descriptor to the pool. | ||||||
|  |   factory.pool.Add( | ||||||
|  |       _MakeFileDescriptorProto(proto_file_name, full_name, field_items)) | ||||||
|  |   return _GetMessageFromFactory(factory, full_name) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _MakeFileDescriptorProto(proto_file_name, full_name, field_items): | ||||||
|  |   """Populate FileDescriptorProto for MessageFactory's DescriptorPool.""" | ||||||
|  |   package, name = full_name.rsplit('.', 1) | ||||||
|  |   file_proto = descriptor_pb2.FileDescriptorProto() | ||||||
|  |   file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name) | ||||||
|  |   file_proto.package = package | ||||||
|  |   desc_proto = file_proto.message_type.add() | ||||||
|  |   desc_proto.name = name | ||||||
|  |   for f_number, (f_name, f_type) in enumerate(field_items, 1): | ||||||
|  |     field_proto = desc_proto.field.add() | ||||||
|  |     field_proto.name = f_name | ||||||
|  |     # # If the number falls in the reserved range, reassign it to the correct | ||||||
|  |     # # number after the range. | ||||||
|  |     if f_number >= descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER: | ||||||
|  |       f_number += ( | ||||||
|  |           descriptor.FieldDescriptor.LAST_RESERVED_FIELD_NUMBER - | ||||||
|  |           descriptor.FieldDescriptor.FIRST_RESERVED_FIELD_NUMBER + 1) | ||||||
|  |     field_proto.number = f_number | ||||||
|  |     field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL | ||||||
|  |     field_proto.type = f_type | ||||||
|  |   return file_proto | ||||||
							
								
								
									
										0
									
								
								lib/protobuf/pyext/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										
											BIN
										
									
								
								lib/protobuf/pyext/_message.cpython-310-x86_64-linux-gnu.so
									
										
									
									
									
										Executable file
									
								
							
							
						
						
							
								
								
									
										65
									
								
								lib/protobuf/pyext/cpp_message.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,65 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Protocol message implementation hooks for C++ implementation. | ||||||
|  | 
 | ||||||
|  | Contains helper functions used to create protocol message classes from | ||||||
|  | Descriptor objects at runtime backed by the protocol buffer C++ API. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'tibell@google.com (Johan Tibell)' | ||||||
|  | 
 | ||||||
|  | from google.protobuf.pyext import _message | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class GeneratedProtocolMessageType(_message.MessageMeta): | ||||||
|  | 
 | ||||||
|  |   """Metaclass for protocol message classes created at runtime from Descriptors. | ||||||
|  | 
 | ||||||
|  |   The protocol compiler currently uses this metaclass to create protocol | ||||||
|  |   message classes at runtime.  Clients can also manually create their own | ||||||
|  |   classes at runtime, as in this example: | ||||||
|  | 
 | ||||||
|  |   mydescriptor = Descriptor(.....) | ||||||
|  |   factory = symbol_database.Default() | ||||||
|  |   factory.pool.AddDescriptor(mydescriptor) | ||||||
|  |   MyProtoClass = factory.GetPrototype(mydescriptor) | ||||||
|  |   myproto_instance = MyProtoClass() | ||||||
|  |   myproto.foo_field = 23 | ||||||
|  |   ... | ||||||
|  | 
 | ||||||
|  |   The above example will not work for nested types. If you wish to include them, | ||||||
|  |   use reflection.MakeClass() instead of manually instantiating the class in | ||||||
|  |   order to create the appropriate class structure. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   # Must be consistent with the protocol-compiler code in | ||||||
|  |   # proto2/compiler/internal/generator.*. | ||||||
|  |   _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||||||
							
								
								
									
										95
									
								
								lib/protobuf/reflection.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,95 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | # This code is meant to work on Python 2.4 and above only. | ||||||
|  | 
 | ||||||
|  | """Contains a metaclass and helper functions used to create | ||||||
|  | protocol message classes from Descriptor objects at runtime. | ||||||
|  | 
 | ||||||
|  | Recall that a metaclass is the "type" of a class. | ||||||
|  | (A class is to a metaclass what an instance is to a class.) | ||||||
|  | 
 | ||||||
|  | In this case, we use the GeneratedProtocolMessageType metaclass | ||||||
|  | to inject all the useful functionality into the classes | ||||||
|  | output by the protocol compiler at compile-time. | ||||||
|  | 
 | ||||||
|  | The upshot of all this is that the real implementation | ||||||
|  | details for ALL pure-Python protocol buffers are *here in | ||||||
|  | this file*. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'robinson@google.com (Will Robinson)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import message_factory | ||||||
|  | from google.protobuf import symbol_database | ||||||
|  | 
 | ||||||
|  | # The type of all Message classes. | ||||||
|  | # Part of the public interface, but normally only used by message factories. | ||||||
|  | GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE | ||||||
|  | 
 | ||||||
|  | MESSAGE_CLASS_CACHE = {} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Deprecated. Please NEVER use reflection.ParseMessage(). | ||||||
|  | def ParseMessage(descriptor, byte_str): | ||||||
|  |   """Generate a new Message instance from this Descriptor and a byte string. | ||||||
|  | 
 | ||||||
|  |   DEPRECATED: ParseMessage is deprecated because it is using MakeClass(). | ||||||
|  |   Please use MessageFactory.GetPrototype() instead. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     descriptor: Protobuf Descriptor object | ||||||
|  |     byte_str: Serialized protocol buffer byte string | ||||||
|  | 
 | ||||||
|  |   Returns: | ||||||
|  |     Newly created protobuf Message object. | ||||||
|  |   """ | ||||||
|  |   result_class = MakeClass(descriptor) | ||||||
|  |   new_msg = result_class() | ||||||
|  |   new_msg.ParseFromString(byte_str) | ||||||
|  |   return new_msg | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Deprecated. Please NEVER use reflection.MakeClass(). | ||||||
|  | def MakeClass(descriptor): | ||||||
|  |   """Construct a class object for a protobuf described by descriptor. | ||||||
|  | 
 | ||||||
|  |   DEPRECATED: use MessageFactory.GetPrototype() instead. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     descriptor: A descriptor.Descriptor object describing the protobuf. | ||||||
|  |   Returns: | ||||||
|  |     The Message class object described by the descriptor. | ||||||
|  |   """ | ||||||
|  |   # Original implementation leads to duplicate message classes, which won't play | ||||||
|  |   # well with extensions. Message factory info is also missing. | ||||||
|  |   # Redirect to message_factory. | ||||||
|  |   return symbol_database.Default().GetPrototype(descriptor) | ||||||
							
								
								
									
										228
									
								
								lib/protobuf/service.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,228 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """DEPRECATED:  Declares the RPC service interfaces. | ||||||
|  | 
 | ||||||
|  | This module declares the abstract interfaces underlying proto2 RPC | ||||||
|  | services.  These are intended to be independent of any particular RPC | ||||||
|  | implementation, so that proto2 services can be used on top of a variety | ||||||
|  | of implementations.  Starting with version 2.3.0, RPC implementations should | ||||||
|  | not try to build on these, but should instead provide code generator plugins | ||||||
|  | which generate code specific to the particular RPC implementation.  This way | ||||||
|  | the generated code can be more appropriate for the implementation in use | ||||||
|  | and can avoid unnecessary layers of indirection. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'petar@google.com (Petar Petrov)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RpcException(Exception): | ||||||
|  |   """Exception raised on failed blocking RPC method call.""" | ||||||
|  |   pass | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Service(object): | ||||||
|  | 
 | ||||||
|  |   """Abstract base interface for protocol-buffer-based RPC services. | ||||||
|  | 
 | ||||||
|  |   Services themselves are abstract classes (implemented either by servers or as | ||||||
|  |   stubs), but they subclass this base interface. The methods of this | ||||||
|  |   interface can be used to call the methods of the service without knowing | ||||||
|  |   its exact type at compile time (analogous to the Message interface). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def GetDescriptor(): | ||||||
|  |     """Retrieves this service's descriptor.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def CallMethod(self, method_descriptor, rpc_controller, | ||||||
|  |                  request, done): | ||||||
|  |     """Calls a method of the service specified by method_descriptor. | ||||||
|  | 
 | ||||||
|  |     If "done" is None then the call is blocking and the response | ||||||
|  |     message will be returned directly.  Otherwise the call is asynchronous | ||||||
|  |     and "done" will later be called with the response value. | ||||||
|  | 
 | ||||||
|  |     In the blocking case, RpcException will be raised on error. | ||||||
|  | 
 | ||||||
|  |     Preconditions: | ||||||
|  | 
 | ||||||
|  |     * method_descriptor.service == GetDescriptor | ||||||
|  |     * request is of the exact same classes as returned by | ||||||
|  |       GetRequestClass(method). | ||||||
|  |     * After the call has started, the request must not be modified. | ||||||
|  |     * "rpc_controller" is of the correct type for the RPC implementation being | ||||||
|  |       used by this Service.  For stubs, the "correct type" depends on the | ||||||
|  |       RpcChannel which the stub is using. | ||||||
|  | 
 | ||||||
|  |     Postconditions: | ||||||
|  | 
 | ||||||
|  |     * "done" will be called when the method is complete.  This may be | ||||||
|  |       before CallMethod() returns or it may be at some point in the future. | ||||||
|  |     * If the RPC failed, the response value passed to "done" will be None. | ||||||
|  |       Further details about the failure can be found by querying the | ||||||
|  |       RpcController. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def GetRequestClass(self, method_descriptor): | ||||||
|  |     """Returns the class of the request message for the specified method. | ||||||
|  | 
 | ||||||
|  |     CallMethod() requires that the request is of a particular subclass of | ||||||
|  |     Message. GetRequestClass() gets the default instance of this required | ||||||
|  |     type. | ||||||
|  | 
 | ||||||
|  |     Example: | ||||||
|  |       method = service.GetDescriptor().FindMethodByName("Foo") | ||||||
|  |       request = stub.GetRequestClass(method)() | ||||||
|  |       request.ParseFromString(input) | ||||||
|  |       service.CallMethod(method, request, callback) | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def GetResponseClass(self, method_descriptor): | ||||||
|  |     """Returns the class of the response message for the specified method. | ||||||
|  | 
 | ||||||
|  |     This method isn't really needed, as the RpcChannel's CallMethod constructs | ||||||
|  |     the response protocol message. It's provided anyway in case it is useful | ||||||
|  |     for the caller to know the response type in advance. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RpcController(object): | ||||||
|  | 
 | ||||||
|  |   """An RpcController mediates a single method call. | ||||||
|  | 
 | ||||||
|  |   The primary purpose of the controller is to provide a way to manipulate | ||||||
|  |   settings specific to the RPC implementation and to find out about RPC-level | ||||||
|  |   errors. The methods provided by the RpcController interface are intended | ||||||
|  |   to be a "least common denominator" set of features which we expect all | ||||||
|  |   implementations to support.  Specific implementations may provide more | ||||||
|  |   advanced features (e.g. deadline propagation). | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   # Client-side methods below | ||||||
|  | 
 | ||||||
|  |   def Reset(self): | ||||||
|  |     """Resets the RpcController to its initial state. | ||||||
|  | 
 | ||||||
|  |     After the RpcController has been reset, it may be reused in | ||||||
|  |     a new call. Must not be called while an RPC is in progress. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def Failed(self): | ||||||
|  |     """Returns true if the call failed. | ||||||
|  | 
 | ||||||
|  |     After a call has finished, returns true if the call failed.  The possible | ||||||
|  |     reasons for failure depend on the RPC implementation.  Failed() must not | ||||||
|  |     be called before a call has finished.  If Failed() returns true, the | ||||||
|  |     contents of the response message are undefined. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def ErrorText(self): | ||||||
|  |     """If Failed is true, returns a human-readable description of the error.""" | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def StartCancel(self): | ||||||
|  |     """Initiate cancellation. | ||||||
|  | 
 | ||||||
|  |     Advises the RPC system that the caller desires that the RPC call be | ||||||
|  |     canceled.  The RPC system may cancel it immediately, may wait awhile and | ||||||
|  |     then cancel it, or may not even cancel the call at all.  If the call is | ||||||
|  |     canceled, the "done" callback will still be called and the RpcController | ||||||
|  |     will indicate that the call failed at that time. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   # Server-side methods below | ||||||
|  | 
 | ||||||
|  |   def SetFailed(self, reason): | ||||||
|  |     """Sets a failure reason. | ||||||
|  | 
 | ||||||
|  |     Causes Failed() to return true on the client side.  "reason" will be | ||||||
|  |     incorporated into the message returned by ErrorText().  If you find | ||||||
|  |     you need to return machine-readable information about failures, you | ||||||
|  |     should incorporate it into your response protocol buffer and should | ||||||
|  |     NOT call SetFailed(). | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def IsCanceled(self): | ||||||
|  |     """Checks if the client cancelled the RPC. | ||||||
|  | 
 | ||||||
|  |     If true, indicates that the client canceled the RPC, so the server may | ||||||
|  |     as well give up on replying to it.  The server should still call the | ||||||
|  |     final "done" callback. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |   def NotifyOnCancel(self, callback): | ||||||
|  |     """Sets a callback to invoke on cancel. | ||||||
|  | 
 | ||||||
|  |     Asks that the given callback be called when the RPC is canceled.  The | ||||||
|  |     callback will always be called exactly once.  If the RPC completes without | ||||||
|  |     being canceled, the callback will be called after completion.  If the RPC | ||||||
|  |     has already been canceled when NotifyOnCancel() is called, the callback | ||||||
|  |     will be called immediately. | ||||||
|  | 
 | ||||||
|  |     NotifyOnCancel() must be called no more than once per request. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RpcChannel(object): | ||||||
|  | 
 | ||||||
|  |   """Abstract interface for an RPC channel. | ||||||
|  | 
 | ||||||
|  |   An RpcChannel represents a communication line to a service which can be used | ||||||
|  |   to call that service's methods.  The service may be running on another | ||||||
|  |   machine. Normally, you should not use an RpcChannel directly, but instead | ||||||
|  |   construct a stub {@link Service} wrapping it.  Example: | ||||||
|  | 
 | ||||||
|  |   Example: | ||||||
|  |     RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234") | ||||||
|  |     RpcController controller = rpcImpl.Controller() | ||||||
|  |     MyService service = MyService_Stub(channel) | ||||||
|  |     service.MyMethod(controller, request, callback) | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def CallMethod(self, method_descriptor, rpc_controller, | ||||||
|  |                  request, response_class, done): | ||||||
|  |     """Calls the method identified by the descriptor. | ||||||
|  | 
 | ||||||
|  |     Call the given method of the remote service.  The signature of this | ||||||
|  |     procedure looks the same as Service.CallMethod(), but the requirements | ||||||
|  |     are less strict in one important way:  the request object doesn't have to | ||||||
|  |     be of any specific class as long as its descriptor is method.input_type. | ||||||
|  |     """ | ||||||
|  |     raise NotImplementedError | ||||||
							
								
								
									
										295
									
								
								lib/protobuf/service_reflection.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,295 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Contains metaclasses used to create protocol service and service stub | ||||||
|  | classes from ServiceDescriptor objects at runtime. | ||||||
|  | 
 | ||||||
|  | The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to | ||||||
|  | inject all useful functionality into the classes output by the protocol | ||||||
|  | compiler at compile-time. | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | __author__ = 'petar@google.com (Petar Petrov)' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class GeneratedServiceType(type): | ||||||
|  | 
 | ||||||
|  |   """Metaclass for service classes created at runtime from ServiceDescriptors. | ||||||
|  | 
 | ||||||
|  |   Implementations for all methods described in the Service class are added here | ||||||
|  |   by this class. We also create properties to allow getting/setting all fields | ||||||
|  |   in the protocol message. | ||||||
|  | 
 | ||||||
|  |   The protocol compiler currently uses this metaclass to create protocol service | ||||||
|  |   classes at runtime. Clients can also manually create their own classes at | ||||||
|  |   runtime, as in this example:: | ||||||
|  | 
 | ||||||
|  |     mydescriptor = ServiceDescriptor(.....) | ||||||
|  |     class MyProtoService(service.Service): | ||||||
|  |       __metaclass__ = GeneratedServiceType | ||||||
|  |       DESCRIPTOR = mydescriptor | ||||||
|  |     myservice_instance = MyProtoService() | ||||||
|  |     # ... | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||||||
|  | 
 | ||||||
|  |   def __init__(cls, name, bases, dictionary): | ||||||
|  |     """Creates a message service class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       name: Name of the class (ignored, but required by the metaclass | ||||||
|  |         protocol). | ||||||
|  |       bases: Base classes of the class being constructed. | ||||||
|  |       dictionary: The class dictionary of the class being constructed. | ||||||
|  |         dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object | ||||||
|  |         describing this protocol service type. | ||||||
|  |     """ | ||||||
|  |     # Don't do anything if this class doesn't have a descriptor. This happens | ||||||
|  |     # when a service class is subclassed. | ||||||
|  |     if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary: | ||||||
|  |       return | ||||||
|  | 
 | ||||||
|  |     descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY] | ||||||
|  |     service_builder = _ServiceBuilder(descriptor) | ||||||
|  |     service_builder.BuildService(cls) | ||||||
|  |     cls.DESCRIPTOR = descriptor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class GeneratedServiceStubType(GeneratedServiceType): | ||||||
|  | 
 | ||||||
|  |   """Metaclass for service stubs created at runtime from ServiceDescriptors. | ||||||
|  | 
 | ||||||
|  |   This class has similar responsibilities as GeneratedServiceType, except that | ||||||
|  |   it creates the service stub classes. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   _DESCRIPTOR_KEY = 'DESCRIPTOR' | ||||||
|  | 
 | ||||||
|  |   def __init__(cls, name, bases, dictionary): | ||||||
|  |     """Creates a message service stub class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       name: Name of the class (ignored, here). | ||||||
|  |       bases: Base classes of the class being constructed. | ||||||
|  |       dictionary: The class dictionary of the class being constructed. | ||||||
|  |         dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object | ||||||
|  |         describing this protocol service type. | ||||||
|  |     """ | ||||||
|  |     super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary) | ||||||
|  |     # Don't do anything if this class doesn't have a descriptor. This happens | ||||||
|  |     # when a service stub is subclassed. | ||||||
|  |     if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary: | ||||||
|  |       return | ||||||
|  | 
 | ||||||
|  |     descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] | ||||||
|  |     service_stub_builder = _ServiceStubBuilder(descriptor) | ||||||
|  |     service_stub_builder.BuildServiceStub(cls) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _ServiceBuilder(object): | ||||||
|  | 
 | ||||||
|  |   """This class constructs a protocol service class using a service descriptor. | ||||||
|  | 
 | ||||||
|  |   Given a service descriptor, this class constructs a class that represents | ||||||
|  |   the specified service descriptor. One service builder instance constructs | ||||||
|  |   exactly one service class. That means all instances of that class share the | ||||||
|  |   same builder. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def __init__(self, service_descriptor): | ||||||
|  |     """Initializes an instance of the service class builder. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       service_descriptor: ServiceDescriptor to use when constructing the | ||||||
|  |         service class. | ||||||
|  |     """ | ||||||
|  |     self.descriptor = service_descriptor | ||||||
|  | 
 | ||||||
|  |   def BuildService(builder, cls): | ||||||
|  |     """Constructs the service class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       cls: The class that will be constructed. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     # CallMethod needs to operate with an instance of the Service class. This | ||||||
|  |     # internal wrapper function exists only to be able to pass the service | ||||||
|  |     # instance to the method that does the real CallMethod work. | ||||||
|  |     # Making sure to use exact argument names from the abstract interface in | ||||||
|  |     # service.py to match the type signature | ||||||
|  |     def _WrapCallMethod(self, method_descriptor, rpc_controller, request, done): | ||||||
|  |       return builder._CallMethod(self, method_descriptor, rpc_controller, | ||||||
|  |                                  request, done) | ||||||
|  | 
 | ||||||
|  |     def _WrapGetRequestClass(self, method_descriptor): | ||||||
|  |       return builder._GetRequestClass(method_descriptor) | ||||||
|  | 
 | ||||||
|  |     def _WrapGetResponseClass(self, method_descriptor): | ||||||
|  |       return builder._GetResponseClass(method_descriptor) | ||||||
|  | 
 | ||||||
|  |     builder.cls = cls | ||||||
|  |     cls.CallMethod = _WrapCallMethod | ||||||
|  |     cls.GetDescriptor = staticmethod(lambda: builder.descriptor) | ||||||
|  |     cls.GetDescriptor.__doc__ = 'Returns the service descriptor.' | ||||||
|  |     cls.GetRequestClass = _WrapGetRequestClass | ||||||
|  |     cls.GetResponseClass = _WrapGetResponseClass | ||||||
|  |     for method in builder.descriptor.methods: | ||||||
|  |       setattr(cls, method.name, builder._GenerateNonImplementedMethod(method)) | ||||||
|  | 
 | ||||||
|  |   def _CallMethod(self, srvc, method_descriptor, | ||||||
|  |                   rpc_controller, request, callback): | ||||||
|  |     """Calls the method described by a given method descriptor. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       srvc: Instance of the service for which this method is called. | ||||||
|  |       method_descriptor: Descriptor that represent the method to call. | ||||||
|  |       rpc_controller: RPC controller to use for this method's execution. | ||||||
|  |       request: Request protocol message. | ||||||
|  |       callback: A callback to invoke after the method has completed. | ||||||
|  |     """ | ||||||
|  |     if method_descriptor.containing_service != self.descriptor: | ||||||
|  |       raise RuntimeError( | ||||||
|  |           'CallMethod() given method descriptor for wrong service type.') | ||||||
|  |     method = getattr(srvc, method_descriptor.name) | ||||||
|  |     return method(rpc_controller, request, callback) | ||||||
|  | 
 | ||||||
|  |   def _GetRequestClass(self, method_descriptor): | ||||||
|  |     """Returns the class of the request protocol message. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       method_descriptor: Descriptor of the method for which to return the | ||||||
|  |         request protocol message class. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A class that represents the input protocol message of the specified | ||||||
|  |       method. | ||||||
|  |     """ | ||||||
|  |     if method_descriptor.containing_service != self.descriptor: | ||||||
|  |       raise RuntimeError( | ||||||
|  |           'GetRequestClass() given method descriptor for wrong service type.') | ||||||
|  |     return method_descriptor.input_type._concrete_class | ||||||
|  | 
 | ||||||
|  |   def _GetResponseClass(self, method_descriptor): | ||||||
|  |     """Returns the class of the response protocol message. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       method_descriptor: Descriptor of the method for which to return the | ||||||
|  |         response protocol message class. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A class that represents the output protocol message of the specified | ||||||
|  |       method. | ||||||
|  |     """ | ||||||
|  |     if method_descriptor.containing_service != self.descriptor: | ||||||
|  |       raise RuntimeError( | ||||||
|  |           'GetResponseClass() given method descriptor for wrong service type.') | ||||||
|  |     return method_descriptor.output_type._concrete_class | ||||||
|  | 
 | ||||||
|  |   def _GenerateNonImplementedMethod(self, method): | ||||||
|  |     """Generates and returns a method that can be set for a service methods. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       method: Descriptor of the service method for which a method is to be | ||||||
|  |         generated. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A method that can be added to the service class. | ||||||
|  |     """ | ||||||
|  |     return lambda inst, rpc_controller, request, callback: ( | ||||||
|  |         self._NonImplementedMethod(method.name, rpc_controller, callback)) | ||||||
|  | 
 | ||||||
|  |   def _NonImplementedMethod(self, method_name, rpc_controller, callback): | ||||||
|  |     """The body of all methods in the generated service class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       method_name: Name of the method being executed. | ||||||
|  |       rpc_controller: RPC controller used to execute this method. | ||||||
|  |       callback: A callback which will be invoked when the method finishes. | ||||||
|  |     """ | ||||||
|  |     rpc_controller.SetFailed('Method %s not implemented.' % method_name) | ||||||
|  |     callback(None) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class _ServiceStubBuilder(object): | ||||||
|  | 
 | ||||||
|  |   """Constructs a protocol service stub class using a service descriptor. | ||||||
|  | 
 | ||||||
|  |   Given a service descriptor, this class constructs a suitable stub class. | ||||||
|  |   A stub is just a type-safe wrapper around an RpcChannel which emulates a | ||||||
|  |   local implementation of the service. | ||||||
|  | 
 | ||||||
|  |   One service stub builder instance constructs exactly one class. It means all | ||||||
|  |   instances of that class share the same service stub builder. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def __init__(self, service_descriptor): | ||||||
|  |     """Initializes an instance of the service stub class builder. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       service_descriptor: ServiceDescriptor to use when constructing the | ||||||
|  |         stub class. | ||||||
|  |     """ | ||||||
|  |     self.descriptor = service_descriptor | ||||||
|  | 
 | ||||||
|  |   def BuildServiceStub(self, cls): | ||||||
|  |     """Constructs the stub class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       cls: The class that will be constructed. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def _ServiceStubInit(stub, rpc_channel): | ||||||
|  |       stub.rpc_channel = rpc_channel | ||||||
|  |     self.cls = cls | ||||||
|  |     cls.__init__ = _ServiceStubInit | ||||||
|  |     for method in self.descriptor.methods: | ||||||
|  |       setattr(cls, method.name, self._GenerateStubMethod(method)) | ||||||
|  | 
 | ||||||
|  |   def _GenerateStubMethod(self, method): | ||||||
|  |     return (lambda inst, rpc_controller, request, callback=None: | ||||||
|  |         self._StubMethod(inst, method, rpc_controller, request, callback)) | ||||||
|  | 
 | ||||||
|  |   def _StubMethod(self, stub, method_descriptor, | ||||||
|  |                   rpc_controller, request, callback): | ||||||
|  |     """The body of all service methods in the generated stub class. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       stub: Stub instance. | ||||||
|  |       method_descriptor: Descriptor of the invoked method. | ||||||
|  |       rpc_controller: Rpc controller to execute the method. | ||||||
|  |       request: Request protocol message. | ||||||
|  |       callback: A callback to execute when the method finishes. | ||||||
|  |     Returns: | ||||||
|  |       Response message (in case of blocking call). | ||||||
|  |     """ | ||||||
|  |     return stub.rpc_channel.CallMethod( | ||||||
|  |         method_descriptor, rpc_controller, request, | ||||||
|  |         method_descriptor.output_type._concrete_class, callback) | ||||||
							
								
								
									
										26
									
								
								lib/protobuf/source_context_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/source_context.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x8a\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01Z6google.golang.org/protobuf/types/known/sourcecontextpb\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.source_context_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\022SourceContextProtoP\001Z6google.golang.org/protobuf/types/known/sourcecontextpb\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _SOURCECONTEXT._serialized_start=57 | ||||||
|  |   _SOURCECONTEXT._serialized_end=91 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										36
									
								
								lib/protobuf/struct_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,36 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/struct.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x7f\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z/google.golang.org/protobuf/types/known/structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.struct_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\013StructProtoP\001Z/google.golang.org/protobuf/types/known/structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _STRUCT_FIELDSENTRY._options = None | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_options = b'8\001' | ||||||
|  |   _NULLVALUE._serialized_start=474 | ||||||
|  |   _NULLVALUE._serialized_end=501 | ||||||
|  |   _STRUCT._serialized_start=50 | ||||||
|  |   _STRUCT._serialized_end=182 | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_start=113 | ||||||
|  |   _STRUCT_FIELDSENTRY._serialized_end=182 | ||||||
|  |   _VALUE._serialized_start=185 | ||||||
|  |   _VALUE._serialized_end=419 | ||||||
|  |   _LISTVALUE._serialized_start=421 | ||||||
|  |   _LISTVALUE._serialized_end=472 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										194
									
								
								lib/protobuf/symbol_database.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,194 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """A database of Python protocol buffer generated symbols. | ||||||
|  | 
 | ||||||
|  | SymbolDatabase is the MessageFactory for messages generated at compile time, | ||||||
|  | and makes it easy to create new instances of a registered type, given only the | ||||||
|  | type's protocol buffer symbol name. | ||||||
|  | 
 | ||||||
|  | Example usage:: | ||||||
|  | 
 | ||||||
|  |   db = symbol_database.SymbolDatabase() | ||||||
|  | 
 | ||||||
|  |   # Register symbols of interest, from one or multiple files. | ||||||
|  |   db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR) | ||||||
|  |   db.RegisterMessage(my_proto_pb2.MyMessage) | ||||||
|  |   db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR) | ||||||
|  | 
 | ||||||
|  |   # The database can be used as a MessageFactory, to generate types based on | ||||||
|  |   # their name: | ||||||
|  |   types = db.GetMessages(['my_proto.proto']) | ||||||
|  |   my_message_instance = types['MyMessage']() | ||||||
|  | 
 | ||||||
|  |   # The database's underlying descriptor pool can be queried, so it's not | ||||||
|  |   # necessary to know a type's filename to be able to generate it: | ||||||
|  |   filename = db.pool.FindFileContainingSymbol('MyMessage') | ||||||
|  |   my_message_instance = db.GetMessages([filename])['MyMessage']() | ||||||
|  | 
 | ||||||
|  |   # This functionality is also provided directly via a convenience method: | ||||||
|  |   my_message_instance = db.GetSymbol('MyMessage')() | ||||||
|  | """ | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf.internal import api_implementation | ||||||
|  | from google.protobuf import descriptor_pool | ||||||
|  | from google.protobuf import message_factory | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class SymbolDatabase(message_factory.MessageFactory): | ||||||
|  |   """A database of Python generated symbols.""" | ||||||
|  | 
 | ||||||
|  |   def RegisterMessage(self, message): | ||||||
|  |     """Registers the given message type in the local database. | ||||||
|  | 
 | ||||||
|  |     Calls to GetSymbol() and GetMessages() will return messages registered here. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       message: A :class:`google.protobuf.message.Message` subclass (or | ||||||
|  |         instance); its descriptor will be registered. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       The provided message. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     desc = message.DESCRIPTOR | ||||||
|  |     self._classes[desc] = message | ||||||
|  |     self.RegisterMessageDescriptor(desc) | ||||||
|  |     return message | ||||||
|  | 
 | ||||||
|  |   def RegisterMessageDescriptor(self, message_descriptor): | ||||||
|  |     """Registers the given message descriptor in the local database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       message_descriptor (Descriptor): the message descriptor to add. | ||||||
|  |     """ | ||||||
|  |     if api_implementation.Type() == 'python': | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       self.pool._AddDescriptor(message_descriptor) | ||||||
|  | 
 | ||||||
|  |   def RegisterEnumDescriptor(self, enum_descriptor): | ||||||
|  |     """Registers the given enum descriptor in the local database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       enum_descriptor (EnumDescriptor): The enum descriptor to register. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       EnumDescriptor: The provided descriptor. | ||||||
|  |     """ | ||||||
|  |     if api_implementation.Type() == 'python': | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       self.pool._AddEnumDescriptor(enum_descriptor) | ||||||
|  |     return enum_descriptor | ||||||
|  | 
 | ||||||
|  |   def RegisterServiceDescriptor(self, service_descriptor): | ||||||
|  |     """Registers the given service descriptor in the local database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       service_descriptor (ServiceDescriptor): the service descriptor to | ||||||
|  |         register. | ||||||
|  |     """ | ||||||
|  |     if api_implementation.Type() == 'python': | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       self.pool._AddServiceDescriptor(service_descriptor) | ||||||
|  | 
 | ||||||
|  |   def RegisterFileDescriptor(self, file_descriptor): | ||||||
|  |     """Registers the given file descriptor in the local database. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       file_descriptor (FileDescriptor): The file descriptor to register. | ||||||
|  |     """ | ||||||
|  |     if api_implementation.Type() == 'python': | ||||||
|  |       # pylint: disable=protected-access | ||||||
|  |       self.pool._InternalAddFileDescriptor(file_descriptor) | ||||||
|  | 
 | ||||||
|  |   def GetSymbol(self, symbol): | ||||||
|  |     """Tries to find a symbol in the local database. | ||||||
|  | 
 | ||||||
|  |     Currently, this method only returns message.Message instances, however, if | ||||||
|  |     may be extended in future to support other symbol types. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       symbol (str): a protocol buffer symbol. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A Python class corresponding to the symbol. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError: if the symbol could not be found. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     return self._classes[self.pool.FindMessageTypeByName(symbol)] | ||||||
|  | 
 | ||||||
|  |   def GetMessages(self, files): | ||||||
|  |     # TODO(amauryfa): Fix the differences with MessageFactory. | ||||||
|  |     """Gets all registered messages from a specified file. | ||||||
|  | 
 | ||||||
|  |     Only messages already created and registered will be returned; (this is the | ||||||
|  |     case for imported _pb2 modules) | ||||||
|  |     But unlike MessageFactory, this version also returns already defined nested | ||||||
|  |     messages, but does not register any message extensions. | ||||||
|  | 
 | ||||||
|  |     Args: | ||||||
|  |       files (list[str]): The file names to extract messages from. | ||||||
|  | 
 | ||||||
|  |     Returns: | ||||||
|  |       A dictionary mapping proto names to the message classes. | ||||||
|  | 
 | ||||||
|  |     Raises: | ||||||
|  |       KeyError: if a file could not be found. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def _GetAllMessages(desc): | ||||||
|  |       """Walk a message Descriptor and recursively yields all message names.""" | ||||||
|  |       yield desc | ||||||
|  |       for msg_desc in desc.nested_types: | ||||||
|  |         for nested_desc in _GetAllMessages(msg_desc): | ||||||
|  |           yield nested_desc | ||||||
|  | 
 | ||||||
|  |     result = {} | ||||||
|  |     for file_name in files: | ||||||
|  |       file_desc = self.pool.FindFileByName(file_name) | ||||||
|  |       for msg_desc in file_desc.message_types_by_name.values(): | ||||||
|  |         for desc in _GetAllMessages(msg_desc): | ||||||
|  |           try: | ||||||
|  |             result[desc.full_name] = self._classes[desc] | ||||||
|  |           except KeyError: | ||||||
|  |             # This descriptor has no registered class, skip it. | ||||||
|  |             pass | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _DEFAULT = SymbolDatabase(pool=descriptor_pool.Default()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def Default(): | ||||||
|  |   """Returns the default SymbolDatabase.""" | ||||||
|  |   return _DEFAULT | ||||||
							
								
								
									
										110
									
								
								lib/protobuf/text_encoding.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,110 @@ | ||||||
|  | # Protocol Buffers - Google's data interchange format | ||||||
|  | # Copyright 2008 Google Inc.  All rights reserved. | ||||||
|  | # https://developers.google.com/protocol-buffers/ | ||||||
|  | # | ||||||
|  | # Redistribution and use in source and binary forms, with or without | ||||||
|  | # modification, are permitted provided that the following conditions are | ||||||
|  | # met: | ||||||
|  | # | ||||||
|  | #     * Redistributions of source code must retain the above copyright | ||||||
|  | # notice, this list of conditions and the following disclaimer. | ||||||
|  | #     * Redistributions in binary form must reproduce the above | ||||||
|  | # copyright notice, this list of conditions and the following disclaimer | ||||||
|  | # in the documentation and/or other materials provided with the | ||||||
|  | # distribution. | ||||||
|  | #     * Neither the name of Google Inc. nor the names of its | ||||||
|  | # contributors may be used to endorse or promote products derived from | ||||||
|  | # this software without specific prior written permission. | ||||||
|  | # | ||||||
|  | # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS | ||||||
|  | # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR | ||||||
|  | # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT | ||||||
|  | # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, | ||||||
|  | # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT | ||||||
|  | # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, | ||||||
|  | # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY | ||||||
|  | # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT | ||||||
|  | # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE | ||||||
|  | # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. | ||||||
|  | 
 | ||||||
|  | """Encoding related utilities.""" | ||||||
|  | import re | ||||||
|  | 
 | ||||||
|  | _cescape_chr_to_symbol_map = {} | ||||||
|  | _cescape_chr_to_symbol_map[9] = r'\t'  # optional escape | ||||||
|  | _cescape_chr_to_symbol_map[10] = r'\n'  # optional escape | ||||||
|  | _cescape_chr_to_symbol_map[13] = r'\r'  # optional escape | ||||||
|  | _cescape_chr_to_symbol_map[34] = r'\"'  # necessary escape | ||||||
|  | _cescape_chr_to_symbol_map[39] = r"\'"  # optional escape | ||||||
|  | _cescape_chr_to_symbol_map[92] = r'\\'  # necessary escape | ||||||
|  | 
 | ||||||
|  | # Lookup table for unicode | ||||||
|  | _cescape_unicode_to_str = [chr(i) for i in range(0, 256)] | ||||||
|  | for byte, string in _cescape_chr_to_symbol_map.items(): | ||||||
|  |   _cescape_unicode_to_str[byte] = string | ||||||
|  | 
 | ||||||
|  | # Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32) | ||||||
|  | _cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] + | ||||||
|  |                         [chr(i) for i in range(32, 127)] + | ||||||
|  |                         [r'\%03o' % i for i in range(127, 256)]) | ||||||
|  | for byte, string in _cescape_chr_to_symbol_map.items(): | ||||||
|  |   _cescape_byte_to_str[byte] = string | ||||||
|  | del byte, string | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def CEscape(text, as_utf8): | ||||||
|  |   # type: (...) -> str | ||||||
|  |   """Escape a bytes string for use in an text protocol buffer. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     text: A byte string to be escaped. | ||||||
|  |     as_utf8: Specifies if result may contain non-ASCII characters. | ||||||
|  |         In Python 3 this allows unescaped non-ASCII Unicode characters. | ||||||
|  |         In Python 2 the return value will be valid UTF-8 rather than only ASCII. | ||||||
|  |   Returns: | ||||||
|  |     Escaped string (str). | ||||||
|  |   """ | ||||||
|  |   # Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not | ||||||
|  |   # satisfy our needs; they encodes unprintable characters using two-digit hex | ||||||
|  |   # escapes whereas our C++ unescaping function allows hex escapes to be any | ||||||
|  |   # length.  So, "\0011".encode('string_escape') ends up being "\\x011", which | ||||||
|  |   # will be decoded in C++ as a single-character string with char code 0x11. | ||||||
|  |   text_is_unicode = isinstance(text, str) | ||||||
|  |   if as_utf8 and text_is_unicode: | ||||||
|  |     # We're already unicode, no processing beyond control char escapes. | ||||||
|  |     return text.translate(_cescape_chr_to_symbol_map) | ||||||
|  |   ord_ = ord if text_is_unicode else lambda x: x  # bytes iterate as ints. | ||||||
|  |   if as_utf8: | ||||||
|  |     return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text) | ||||||
|  |   return ''.join(_cescape_byte_to_str[ord_(c)] for c in text) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def CUnescape(text): | ||||||
|  |   # type: (str) -> bytes | ||||||
|  |   """Unescape a text string with C-style escape sequences to UTF-8 bytes. | ||||||
|  | 
 | ||||||
|  |   Args: | ||||||
|  |     text: The data to parse in a str. | ||||||
|  |   Returns: | ||||||
|  |     A byte string. | ||||||
|  |   """ | ||||||
|  | 
 | ||||||
|  |   def ReplaceHex(m): | ||||||
|  |     # Only replace the match if the number of leading back slashes is odd. i.e. | ||||||
|  |     # the slash itself is not escaped. | ||||||
|  |     if len(m.group(1)) & 1: | ||||||
|  |       return m.group(1) + 'x0' + m.group(2) | ||||||
|  |     return m.group(0) | ||||||
|  | 
 | ||||||
|  |   # This is required because the 'string_escape' encoding doesn't | ||||||
|  |   # allow single-digit hex escapes (like '\xf'). | ||||||
|  |   result = _CUNESCAPE_HEX.sub(ReplaceHex, text) | ||||||
|  | 
 | ||||||
|  |   return (result.encode('utf-8')  # Make it bytes to allow decode. | ||||||
|  |           .decode('unicode_escape') | ||||||
|  |           # Make it bytes again to return the proper type. | ||||||
|  |           .encode('raw_unicode_escape')) | ||||||
							
								
								
									
										1795
									
								
								lib/protobuf/text_format.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										26
									
								
								lib/protobuf/timestamp_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,26 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/timestamp.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1fgoogle/protobuf/timestamp.proto\x12\x0fgoogle.protobuf\"+\n\tTimestamp\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42\x85\x01\n\x13\x63om.google.protobufB\x0eTimestampProtoP\x01Z2google.golang.org/protobuf/types/known/timestamppb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.timestamp_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\016TimestampProtoP\001Z2google.golang.org/protobuf/types/known/timestamppb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _TIMESTAMP._serialized_start=52 | ||||||
|  |   _TIMESTAMP._serialized_end=95 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										42
									
								
								lib/protobuf/type_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,42 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/type.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2 | ||||||
|  | from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1agoogle/protobuf/type.proto\x12\x0fgoogle.protobuf\x1a\x19google/protobuf/any.proto\x1a$google/protobuf/source_context.proto\"\xd7\x01\n\x04Type\x12\x0c\n\x04name\x18\x01 \x01(\t\x12&\n\x06\x66ields\x18\x02 \x03(\x0b\x32\x16.google.protobuf.Field\x12\x0e\n\x06oneofs\x18\x03 \x03(\t\x12(\n\x07options\x18\x04 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x06 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x05\n\x05\x46ield\x12)\n\x04kind\x18\x01 \x01(\x0e\x32\x1b.google.protobuf.Field.Kind\x12\x37\n\x0b\x63\x61rdinality\x18\x02 \x01(\x0e\x32\".google.protobuf.Field.Cardinality\x12\x0e\n\x06number\x18\x03 \x01(\x05\x12\x0c\n\x04name\x18\x04 \x01(\t\x12\x10\n\x08type_url\x18\x06 \x01(\t\x12\x13\n\x0boneof_index\x18\x07 \x01(\x05\x12\x0e\n\x06packed\x18\x08 \x01(\x08\x12(\n\x07options\x18\t \x03(\x0b\x32\x17.google.protobuf.Option\x12\x11\n\tjson_name\x18\n \x01(\t\x12\x15\n\rdefault_value\x18\x0b \x01(\t\"\xc8\x02\n\x04Kind\x12\x10\n\x0cTYPE_UNKNOWN\x10\x00\x12\x0f\n\x0bTYPE_DOUBLE\x10\x01\x12\x0e\n\nTYPE_FLOAT\x10\x02\x12\x0e\n\nTYPE_INT64\x10\x03\x12\x0f\n\x0bTYPE_UINT64\x10\x04\x12\x0e\n\nTYPE_INT32\x10\x05\x12\x10\n\x0cTYPE_FIXED64\x10\x06\x12\x10\n\x0cTYPE_FIXED32\x10\x07\x12\r\n\tTYPE_BOOL\x10\x08\x12\x0f\n\x0bTYPE_STRING\x10\t\x12\x0e\n\nTYPE_GROUP\x10\n\x12\x10\n\x0cTYPE_MESSAGE\x10\x0b\x12\x0e\n\nTYPE_BYTES\x10\x0c\x12\x0f\n\x0bTYPE_UINT32\x10\r\x12\r\n\tTYPE_ENUM\x10\x0e\x12\x11\n\rTYPE_SFIXED32\x10\x0f\x12\x11\n\rTYPE_SFIXED64\x10\x10\x12\x0f\n\x0bTYPE_SINT32\x10\x11\x12\x0f\n\x0bTYPE_SINT64\x10\x12\"t\n\x0b\x43\x61rdinality\x12\x17\n\x13\x43\x41RDINALITY_UNKNOWN\x10\x00\x12\x18\n\x14\x43\x41RDINALITY_OPTIONAL\x10\x01\x12\x18\n\x14\x43\x41RDINALITY_REQUIRED\x10\x02\x12\x18\n\x14\x43\x41RDINALITY_REPEATED\x10\x03\"\xce\x01\n\x04\x45num\x12\x0c\n\x04name\x18\x01 \x01(\t\x12-\n\tenumvalue\x18\x02 \x03(\x0b\x32\x1a.google.protobuf.EnumValue\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x36\n\x0esource_context\x18\x04 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12\'\n\x06syntax\x18\x05 \x01(\x0e\x32\x17.google.protobuf.Syntax\"S\n\tEnumValue\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0e\n\x06number\x18\x02 \x01(\x05\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\";\n\x06Option\x12\x0c\n\x04name\x18\x01 \x01(\t\x12#\n\x05value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any*.\n\x06Syntax\x12\x11\n\rSYNTAX_PROTO2\x10\x00\x12\x11\n\rSYNTAX_PROTO3\x10\x01\x42{\n\x13\x63om.google.protobufB\tTypeProtoP\x01Z-google.golang.org/protobuf/types/known/typepb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.type_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\tTypeProtoP\001Z-google.golang.org/protobuf/types/known/typepb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _SYNTAX._serialized_start=1413 | ||||||
|  |   _SYNTAX._serialized_end=1459 | ||||||
|  |   _TYPE._serialized_start=113 | ||||||
|  |   _TYPE._serialized_end=328 | ||||||
|  |   _FIELD._serialized_start=331 | ||||||
|  |   _FIELD._serialized_end=1056 | ||||||
|  |   _FIELD_KIND._serialized_start=610 | ||||||
|  |   _FIELD_KIND._serialized_end=938 | ||||||
|  |   _FIELD_CARDINALITY._serialized_start=940 | ||||||
|  |   _FIELD_CARDINALITY._serialized_end=1056 | ||||||
|  |   _ENUM._serialized_start=1059 | ||||||
|  |   _ENUM._serialized_end=1265 | ||||||
|  |   _ENUMVALUE._serialized_start=1267 | ||||||
|  |   _ENUMVALUE._serialized_end=1350 | ||||||
|  |   _OPTION._serialized_start=1352 | ||||||
|  |   _OPTION._serialized_end=1411 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										0
									
								
								lib/protobuf/util/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										72
									
								
								lib/protobuf/util/json_format_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,72 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/util/json_format.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&google/protobuf/util/json_format.proto\x12\x11protobuf_unittest\"\x89\x01\n\x13TestFlagsAndStrings\x12\t\n\x01\x41\x18\x01 \x02(\x05\x12K\n\rrepeatedgroup\x18\x02 \x03(\n24.protobuf_unittest.TestFlagsAndStrings.RepeatedGroup\x1a\x1a\n\rRepeatedGroup\x12\t\n\x01\x66\x18\x03 \x02(\t\"!\n\x14TestBase64ByteArrays\x12\t\n\x01\x61\x18\x01 \x02(\x0c\"G\n\x12TestJavaScriptJSON\x12\t\n\x01\x61\x18\x01 \x01(\x05\x12\r\n\x05\x66inal\x18\x02 \x01(\x02\x12\n\n\x02in\x18\x03 \x01(\t\x12\x0b\n\x03Var\x18\x04 \x01(\t\"Q\n\x18TestJavaScriptOrderJSON1\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\"\x89\x01\n\x18TestJavaScriptOrderJSON2\x12\t\n\x01\x64\x18\x01 \x01(\x05\x12\t\n\x01\x63\x18\x02 \x01(\x05\x12\t\n\x01x\x18\x03 \x01(\x08\x12\t\n\x01\x62\x18\x04 \x01(\x05\x12\t\n\x01\x61\x18\x05 \x01(\x05\x12\x36\n\x01z\x18\x06 \x03(\x0b\x32+.protobuf_unittest.TestJavaScriptOrderJSON1\"$\n\x0cTestLargeInt\x12\t\n\x01\x61\x18\x01 \x02(\x03\x12\t\n\x01\x62\x18\x02 \x02(\x04\"\xa0\x01\n\x0bTestNumbers\x12\x30\n\x01\x61\x18\x01 \x01(\x0e\x32%.protobuf_unittest.TestNumbers.MyType\x12\t\n\x01\x62\x18\x02 \x01(\x05\x12\t\n\x01\x63\x18\x03 \x01(\x02\x12\t\n\x01\x64\x18\x04 \x01(\x08\x12\t\n\x01\x65\x18\x05 \x01(\x01\x12\t\n\x01\x66\x18\x06 \x01(\r\"(\n\x06MyType\x12\x06\n\x02OK\x10\x00\x12\x0b\n\x07WARNING\x10\x01\x12\t\n\x05\x45RROR\x10\x02\"T\n\rTestCamelCase\x12\x14\n\x0cnormal_field\x18\x01 \x01(\t\x12\x15\n\rCAPITAL_FIELD\x18\x02 \x01(\x05\x12\x16\n\x0e\x43\x61melCaseField\x18\x03 \x01(\x05\"|\n\x0bTestBoolMap\x12=\n\x08\x62ool_map\x18\x01 \x03(\x0b\x32+.protobuf_unittest.TestBoolMap.BoolMapEntry\x1a.\n\x0c\x42oolMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01\"O\n\rTestRecursion\x12\r\n\x05value\x18\x01 \x01(\x05\x12/\n\x05\x63hild\x18\x02 \x01(\x0b\x32 .protobuf_unittest.TestRecursion\"\x86\x01\n\rTestStringMap\x12\x43\n\nstring_map\x18\x01 \x03(\x0b\x32/.protobuf_unittest.TestStringMap.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"\xc4\x01\n\x14TestStringSerializer\x12\x15\n\rscalar_string\x18\x01 \x01(\t\x12\x17\n\x0frepeated_string\x18\x02 \x03(\t\x12J\n\nstring_map\x18\x03 \x03(\x0b\x32\x36.protobuf_unittest.TestStringSerializer.StringMapEntry\x1a\x30\n\x0eStringMapEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"$\n\x18TestMessageWithExtension*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"z\n\rTestExtension\x12\r\n\x05value\x18\x01 \x01(\t2Z\n\x03\x65xt\x12+.protobuf_unittest.TestMessageWithExtension\x18\x64 \x01(\x0b\x32 .protobuf_unittest.TestExtension\"Q\n\x14TestDefaultEnumValue\x12\x39\n\nenum_value\x18\x01 \x01(\x0e\x32\x1c.protobuf_unittest.EnumValue:\x07\x44\x45\x46\x41ULT*2\n\tEnumValue\x12\x0c\n\x08PROTOCOL\x10\x00\x12\n\n\x06\x42UFFER\x10\x01\x12\x0b\n\x07\x44\x45\x46\x41ULT\x10\x02') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.util.json_format_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  |   TestMessageWithExtension.RegisterExtension(_TESTEXTENSION.extensions_by_name['ext']) | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   _TESTBOOLMAP_BOOLMAPENTRY._options = None | ||||||
|  |   _TESTBOOLMAP_BOOLMAPENTRY._serialized_options = b'8\001' | ||||||
|  |   _TESTSTRINGMAP_STRINGMAPENTRY._options = None | ||||||
|  |   _TESTSTRINGMAP_STRINGMAPENTRY._serialized_options = b'8\001' | ||||||
|  |   _TESTSTRINGSERIALIZER_STRINGMAPENTRY._options = None | ||||||
|  |   _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_options = b'8\001' | ||||||
|  |   _ENUMVALUE._serialized_start=1607 | ||||||
|  |   _ENUMVALUE._serialized_end=1657 | ||||||
|  |   _TESTFLAGSANDSTRINGS._serialized_start=62 | ||||||
|  |   _TESTFLAGSANDSTRINGS._serialized_end=199 | ||||||
|  |   _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_start=173 | ||||||
|  |   _TESTFLAGSANDSTRINGS_REPEATEDGROUP._serialized_end=199 | ||||||
|  |   _TESTBASE64BYTEARRAYS._serialized_start=201 | ||||||
|  |   _TESTBASE64BYTEARRAYS._serialized_end=234 | ||||||
|  |   _TESTJAVASCRIPTJSON._serialized_start=236 | ||||||
|  |   _TESTJAVASCRIPTJSON._serialized_end=307 | ||||||
|  |   _TESTJAVASCRIPTORDERJSON1._serialized_start=309 | ||||||
|  |   _TESTJAVASCRIPTORDERJSON1._serialized_end=390 | ||||||
|  |   _TESTJAVASCRIPTORDERJSON2._serialized_start=393 | ||||||
|  |   _TESTJAVASCRIPTORDERJSON2._serialized_end=530 | ||||||
|  |   _TESTLARGEINT._serialized_start=532 | ||||||
|  |   _TESTLARGEINT._serialized_end=568 | ||||||
|  |   _TESTNUMBERS._serialized_start=571 | ||||||
|  |   _TESTNUMBERS._serialized_end=731 | ||||||
|  |   _TESTNUMBERS_MYTYPE._serialized_start=691 | ||||||
|  |   _TESTNUMBERS_MYTYPE._serialized_end=731 | ||||||
|  |   _TESTCAMELCASE._serialized_start=733 | ||||||
|  |   _TESTCAMELCASE._serialized_end=817 | ||||||
|  |   _TESTBOOLMAP._serialized_start=819 | ||||||
|  |   _TESTBOOLMAP._serialized_end=943 | ||||||
|  |   _TESTBOOLMAP_BOOLMAPENTRY._serialized_start=897 | ||||||
|  |   _TESTBOOLMAP_BOOLMAPENTRY._serialized_end=943 | ||||||
|  |   _TESTRECURSION._serialized_start=945 | ||||||
|  |   _TESTRECURSION._serialized_end=1024 | ||||||
|  |   _TESTSTRINGMAP._serialized_start=1027 | ||||||
|  |   _TESTSTRINGMAP._serialized_end=1161 | ||||||
|  |   _TESTSTRINGMAP_STRINGMAPENTRY._serialized_start=1113 | ||||||
|  |   _TESTSTRINGMAP_STRINGMAPENTRY._serialized_end=1161 | ||||||
|  |   _TESTSTRINGSERIALIZER._serialized_start=1164 | ||||||
|  |   _TESTSTRINGSERIALIZER._serialized_end=1360 | ||||||
|  |   _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_start=1113 | ||||||
|  |   _TESTSTRINGSERIALIZER_STRINGMAPENTRY._serialized_end=1161 | ||||||
|  |   _TESTMESSAGEWITHEXTENSION._serialized_start=1362 | ||||||
|  |   _TESTMESSAGEWITHEXTENSION._serialized_end=1398 | ||||||
|  |   _TESTEXTENSION._serialized_start=1400 | ||||||
|  |   _TESTEXTENSION._serialized_end=1522 | ||||||
|  |   _TESTDEFAULTENUMVALUE._serialized_start=1524 | ||||||
|  |   _TESTDEFAULTENUMVALUE._serialized_end=1605 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										129
									
								
								lib/protobuf/util/json_format_proto3_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										42
									
								
								lib/protobuf/wrappers_pb2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,42 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | # Generated by the protocol buffer compiler.  DO NOT EDIT! | ||||||
|  | # source: google/protobuf/wrappers.proto | ||||||
|  | """Generated protocol buffer code.""" | ||||||
|  | from google.protobuf.internal import builder as _builder | ||||||
|  | from google.protobuf import descriptor as _descriptor | ||||||
|  | from google.protobuf import descriptor_pool as _descriptor_pool | ||||||
|  | from google.protobuf import symbol_database as _symbol_database | ||||||
|  | # @@protoc_insertion_point(imports) | ||||||
|  | 
 | ||||||
|  | _sym_db = _symbol_database.Default() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x1egoogle/protobuf/wrappers.proto\x12\x0fgoogle.protobuf\"\x1c\n\x0b\x44oubleValue\x12\r\n\x05value\x18\x01 \x01(\x01\"\x1b\n\nFloatValue\x12\r\n\x05value\x18\x01 \x01(\x02\"\x1b\n\nInt64Value\x12\r\n\x05value\x18\x01 \x01(\x03\"\x1c\n\x0bUInt64Value\x12\r\n\x05value\x18\x01 \x01(\x04\"\x1b\n\nInt32Value\x12\r\n\x05value\x18\x01 \x01(\x05\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1b\n\nBytesValue\x12\r\n\x05value\x18\x01 \x01(\x0c\x42\x83\x01\n\x13\x63om.google.protobufB\rWrappersProtoP\x01Z1google.golang.org/protobuf/types/known/wrapperspb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3') | ||||||
|  | 
 | ||||||
|  | _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) | ||||||
|  | _builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'google.protobuf.wrappers_pb2', globals()) | ||||||
|  | if _descriptor._USE_C_DESCRIPTORS == False: | ||||||
|  | 
 | ||||||
|  |   DESCRIPTOR._options = None | ||||||
|  |   DESCRIPTOR._serialized_options = b'\n\023com.google.protobufB\rWrappersProtoP\001Z1google.golang.org/protobuf/types/known/wrapperspb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes' | ||||||
|  |   _DOUBLEVALUE._serialized_start=51 | ||||||
|  |   _DOUBLEVALUE._serialized_end=79 | ||||||
|  |   _FLOATVALUE._serialized_start=81 | ||||||
|  |   _FLOATVALUE._serialized_end=108 | ||||||
|  |   _INT64VALUE._serialized_start=110 | ||||||
|  |   _INT64VALUE._serialized_end=137 | ||||||
|  |   _UINT64VALUE._serialized_start=139 | ||||||
|  |   _UINT64VALUE._serialized_end=167 | ||||||
|  |   _INT32VALUE._serialized_start=169 | ||||||
|  |   _INT32VALUE._serialized_end=196 | ||||||
|  |   _UINT32VALUE._serialized_start=198 | ||||||
|  |   _UINT32VALUE._serialized_end=226 | ||||||
|  |   _BOOLVALUE._serialized_start=228 | ||||||
|  |   _BOOLVALUE._serialized_end=254 | ||||||
|  |   _STRINGVALUE._serialized_start=256 | ||||||
|  |   _STRINGVALUE._serialized_end=284 | ||||||
|  |   _BYTESVALUE._serialized_start=286 | ||||||
|  |   _BYTESVALUE._serialized_end=313 | ||||||
|  | # @@protoc_insertion_point(module_scope) | ||||||
							
								
								
									
										
											BIN
										
									
								
								resources/fanart.jpg
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 153 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 17 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_clear_cache.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 2.4 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_albums.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 2.2 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_artists.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 2.9 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_explore.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 4.7 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_library.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 3.1 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_playlists.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 964 B | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_search.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 1.9 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_songs.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 1.5 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_top_artists.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 1.4 KiB | 
							
								
								
									
										
											BIN
										
									
								
								resources/icon_music_top_tracks.png
									
										
									
									
									
										Normal file
									
								
							
							
						
						| After Width: | Height: | Size: 301 B | 
							
								
								
									
										179
									
								
								resources/language/resource.language.de_de/strings.po
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,179 @@ | ||||||
|  | # Kodi Media Center language file | ||||||
|  | # Addon Name: Spotify | ||||||
|  | # Addon id: plugin.audio.spotify | ||||||
|  | # Addon Provider: marcelveldt | ||||||
|  | msgid "" | ||||||
|  | msgstr "" | ||||||
|  | "Project-Id-Version: Kodi-Addons\n" | ||||||
|  | "Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" | ||||||
|  | "POT-Creation-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "PO-Revision-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "Last-Translator: tobhor\n" | ||||||
|  | "Language-Team: German\n" | ||||||
|  | "MIME-Version: 1.0\n" | ||||||
|  | "Content-Type: text/plain; charset=UTF-8\n" | ||||||
|  | "Content-Transfer-Encoding: 8bit\n" | ||||||
|  | "Language: de\n" | ||||||
|  | "Plural-Forms: nplurals=2; plural=(n != 1)\n" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11001" | ||||||
|  | msgid "Username" | ||||||
|  | msgstr "Nutzername" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11002" | ||||||
|  | msgid "Password" | ||||||
|  | msgstr "Passwort" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11005" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Neuerscheinungen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11007" | ||||||
|  | msgid "Save to My Music" | ||||||
|  | msgstr "Zu meiner Musik hinzufügen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11008" | ||||||
|  | msgid "Remove from My Music" | ||||||
|  | msgstr "Aus meiner Musik entfernen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11009" | ||||||
|  | msgid "Follow" | ||||||
|  | msgstr "Folgen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11010" | ||||||
|  | msgid "Unfollow" | ||||||
|  | msgstr "Nicht mehr folgen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11011" | ||||||
|  | msgid "Artist top tracks" | ||||||
|  | msgstr "Beliebteste Lieder des Künstlers" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11012" | ||||||
|  | msgid "Related artists" | ||||||
|  | msgstr "Ähnliche Künstler" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11013" | ||||||
|  | msgid "My Music" | ||||||
|  | msgstr "Meine Musik" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11014" | ||||||
|  | msgid "Explore" | ||||||
|  | msgstr "Entdecken" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11015" | ||||||
|  | msgid "Featured playlists" | ||||||
|  | msgstr "Empfohlene Wiedergabelisten" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11016" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Neuerscheinungen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11017" | ||||||
|  | msgid "Remove from playlist" | ||||||
|  | msgstr "Aus Playlist entfernen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11018" | ||||||
|  | msgid "All albums for artist" | ||||||
|  | msgstr "Alle Alben des Künstlers" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11020" | ||||||
|  | msgid "Default view for categories" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11023" | ||||||
|  | msgid "Most played artists" | ||||||
|  | msgstr "Meistgespielte Künstler" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11024" | ||||||
|  | msgid "Most played tracks" | ||||||
|  | msgstr "Meistgespielte Lieder" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11025" | ||||||
|  | msgid "Follow artist" | ||||||
|  | msgstr "Künstler folgen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11026" | ||||||
|  | msgid "Unfollow artist" | ||||||
|  | msgstr "Künstler nicht mehr folgen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11027" | ||||||
|  | msgid "Refresh listing" | ||||||
|  | msgstr "Eintrag aktualisieren" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11028" | ||||||
|  | msgid "Login details" | ||||||
|  | msgstr "Anmeldedetails" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11030" | ||||||
|  | msgid "Append artist name to song title" | ||||||
|  | msgstr "Künstlername an Songtitel anhängen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11031" | ||||||
|  | msgid "Default view for playlists" | ||||||
|  | msgstr "Standardansicht für Wiedergabeliste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11032" | ||||||
|  | msgid "Default view for artist list" | ||||||
|  | msgstr "Standardansicht für Künstlerliste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11033" | ||||||
|  | msgid "Default view for album list" | ||||||
|  | msgstr "Standardansicht für Albenliste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11034" | ||||||
|  | msgid "Default view for song list" | ||||||
|  | msgstr "Standardansicht für Songliste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11047" | ||||||
|  | msgid "Current user" | ||||||
|  | msgstr "Aktueller Benutzer" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11050" | ||||||
|  | msgid "" | ||||||
|  | "Spotify login failed. Either no credentials have been set or the Spotify username or password is incorrect.\n" | ||||||
|  | "Please check your username and password in the addon settings." | ||||||
|  | msgstr "Keine Anmeldedaten vorhanden oder Anmeldung fehlgeschlagen. \n Bitte gebe deine Anmeldedaten im folgenden Einstellungsdialog ein." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11054" | ||||||
|  | msgid "Audio" | ||||||
|  | msgstr "Audio" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11055" | ||||||
|  | msgid "Views" | ||||||
|  | msgstr "Ansichten" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11069" | ||||||
|  | msgid "My recently played playlist" | ||||||
|  | msgstr "My recently played playlist" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11070" | ||||||
|  | msgid "Gap between tracks when playing a playlist (secs)" | ||||||
|  | msgstr "Gap between tracks when playing a playlist (secs)" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11071" | ||||||
|  | msgid "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11072" | ||||||
|  | msgid "Clear the plugin cache" | ||||||
|  | msgstr "Clear the plugin cache" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11073" | ||||||
|  | msgid "Followed artists" | ||||||
|  | msgstr "Followed artists" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11074" | ||||||
|  | msgid "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11075" | ||||||
|  | msgid "Use Spotify normalization when playing tracks" | ||||||
|  | msgstr "" | ||||||
							
								
								
									
										175
									
								
								resources/language/resource.language.en_gb/strings.po
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,175 @@ | ||||||
|  | # Kodi Media Center language file | ||||||
|  | # Addon Name: Spotify | ||||||
|  | # Addon id: plugin.audio.spotify | ||||||
|  | # Addon Provider: marcelveldt | ||||||
|  | msgid "" | ||||||
|  | msgstr "" | ||||||
|  | "Project-Id-Version: Kodi-Addons\n" | ||||||
|  | "Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" | ||||||
|  | "POT-Creation-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "PO-Revision-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "Last-Translator: logi85\n" | ||||||
|  | "Language-Team: English\n" | ||||||
|  | "MIME-Version: 1.0\n" | ||||||
|  | "Content-Type: text/plain; charset=UTF-8\n" | ||||||
|  | "Content-Transfer-Encoding: 8bit\n" | ||||||
|  | "Language: en\n" | ||||||
|  | "Plural-Forms: nplurals=2; plural=(n != 1)\n" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11001" | ||||||
|  | msgid "Username" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11002" | ||||||
|  | msgid "Password" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11005" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11007" | ||||||
|  | msgid "Save to My Music" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11008" | ||||||
|  | msgid "Remove from My Music" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11009" | ||||||
|  | msgid "Follow" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11010" | ||||||
|  | msgid "Unfollow" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11011" | ||||||
|  | msgid "Artist top tracks" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11012" | ||||||
|  | msgid "Related artists" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11013" | ||||||
|  | msgid "My Music" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11014" | ||||||
|  | msgid "Explore" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11015" | ||||||
|  | msgid "Featured playlists" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11016" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11017" | ||||||
|  | msgid "Remove from playlist" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11018" | ||||||
|  | msgid "All albums for artist" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11020" | ||||||
|  | msgid "Default view for categories" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11023" | ||||||
|  | msgid "Most played artists" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11024" | ||||||
|  | msgid "Most played tracks" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11025" | ||||||
|  | msgid "Follow artist" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11026" | ||||||
|  | msgid "Unfollow artist" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11027" | ||||||
|  | msgid "Refresh listing" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11028" | ||||||
|  | msgid "Login details" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11030" | ||||||
|  | msgid "Append artist name to song title" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11031" | ||||||
|  | msgid "Default view for playlists" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11032" | ||||||
|  | msgid "Default view for artist list" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11033" | ||||||
|  | msgid "Default view for album list" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11034" | ||||||
|  | msgid "Default view for song list" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11047" | ||||||
|  | msgid "Current user" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11050" | ||||||
|  | msgid "" | ||||||
|  | "Spotify login failed. Either no credentials have been set or the Spotify username or password is incorrect.\n" | ||||||
|  | "Please check your username and password in the addon settings." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11054" | ||||||
|  | msgid "Audio" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11055" | ||||||
|  | msgid "Views" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11069" | ||||||
|  | msgid "My recently played playlist" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11070" | ||||||
|  | msgid "Gap between tracks when playing a playlist (secs)" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11071" | ||||||
|  | msgid "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11072" | ||||||
|  | msgid "Clear the plugin cache" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11073" | ||||||
|  | msgid "Followed artists" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11074" | ||||||
|  | msgid "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11075" | ||||||
|  | msgid "Use Spotify normalization when playing tracks" | ||||||
|  | msgstr "" | ||||||
							
								
								
									
										175
									
								
								resources/language/resource.language.es_ar/strings.po
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,175 @@ | ||||||
|  | # Kodi Media Center language file | ||||||
|  | # Addon Name: Spotify | ||||||
|  | # Addon id: plugin.audio.spotify | ||||||
|  | # Addon Provider: marcelveldt | ||||||
|  | msgid "" | ||||||
|  | msgstr "" | ||||||
|  | "Project-Id-Version: Kodi-Addons\n" | ||||||
|  | "Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" | ||||||
|  | "POT-Creation-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "PO-Revision-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "Last-Translator: trihy\n" | ||||||
|  | "Language-Team: English\n" | ||||||
|  | "MIME-Version: 1.0\n" | ||||||
|  | "Content-Type: text/plain; charset=UTF-8\n" | ||||||
|  | "Content-Transfer-Encoding: 8bit\n" | ||||||
|  | "Language: es_ar\n" | ||||||
|  | "Plural-Forms: nplurals=2; plural=(n != 1)\n" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11001" | ||||||
|  | msgid "Username" | ||||||
|  | msgstr "Nombre de Usuario" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11002" | ||||||
|  | msgid "Password" | ||||||
|  | msgstr "Contraseña" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11005" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nuevos Lanzamientos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11007" | ||||||
|  | msgid "Save to My Music" | ||||||
|  | msgstr "Guardar en Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11008" | ||||||
|  | msgid "Remove from My Music" | ||||||
|  | msgstr "Remover de Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11009" | ||||||
|  | msgid "Follow" | ||||||
|  | msgstr "Seguir" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11010" | ||||||
|  | msgid "Unfollow" | ||||||
|  | msgstr "Dejar de Seguir" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11011" | ||||||
|  | msgid "Artist top tracks" | ||||||
|  | msgstr "Canciones Top del Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11012" | ||||||
|  | msgid "Related artists" | ||||||
|  | msgstr "Artistas Relacionados" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11013" | ||||||
|  | msgid "My Music" | ||||||
|  | msgstr "Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11014" | ||||||
|  | msgid "Explore" | ||||||
|  | msgstr "Explorar" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11015" | ||||||
|  | msgid "Featured playlists" | ||||||
|  | msgstr "Playlists Destacadas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11016" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nuevos Lanzamientos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11017" | ||||||
|  | msgid "Remove from playlist" | ||||||
|  | msgstr "Remover del Playlist" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11018" | ||||||
|  | msgid "All albums for artist" | ||||||
|  | msgstr "Todos los Albumes del Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11020" | ||||||
|  | msgid "Default view for categories" | ||||||
|  | msgstr "Vista por Defecto para Categorías" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11023" | ||||||
|  | msgid "Most played artists" | ||||||
|  | msgstr "Artistas más Reproducidos " | ||||||
|  | 
 | ||||||
|  | msgctxt "#11024" | ||||||
|  | msgid "Most played tracks" | ||||||
|  | msgstr "Canciones más Reproducidas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11025" | ||||||
|  | msgid "Follow artist" | ||||||
|  | msgstr "Seguir Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11026" | ||||||
|  | msgid "Unfollow artist" | ||||||
|  | msgstr "Dejar de Seguir Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11027" | ||||||
|  | msgid "Refresh listing" | ||||||
|  | msgstr "Refrescar Listado" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11028" | ||||||
|  | msgid "Login details" | ||||||
|  | msgstr "Detalles de Login" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11030" | ||||||
|  | msgid "Append artist name to song title" | ||||||
|  | msgstr "Adjuntar Nombre del Artista a Título de Canción" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11031" | ||||||
|  | msgid "Default view for playlists" | ||||||
|  | msgstr "Vista por Defecto para Playlists" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11032" | ||||||
|  | msgid "Default view for artist list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11033" | ||||||
|  | msgid "Default view for album list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Album" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11034" | ||||||
|  | msgid "Default view for song list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Canción" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11047" | ||||||
|  | msgid "Current user" | ||||||
|  | msgstr "Usuario Actual" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11050" | ||||||
|  | msgid "" | ||||||
|  | "Spotify login failed. Either no credentials have been set or the Spotify username or password is incorrect.\n" | ||||||
|  | "Please check your username and password in the addon settings." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11054" | ||||||
|  | msgid "Audio" | ||||||
|  | msgstr "Audio" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11055" | ||||||
|  | msgid "Views" | ||||||
|  | msgstr "Vistas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11069" | ||||||
|  | msgid "My recently played playlist" | ||||||
|  | msgstr "Mi Playlist reproducida Recientemente" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11070" | ||||||
|  | msgid "Gap between tracks when playing a playlist (secs)" | ||||||
|  | msgstr "Espacio entre canciones cuando se reproduce una playlist (Segundos)" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11071" | ||||||
|  | msgid "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11072" | ||||||
|  | msgid "Clear the plugin cache" | ||||||
|  | msgstr "Limpiar cache del plugin" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11073" | ||||||
|  | msgid "Followed artists" | ||||||
|  | msgstr "Artistas Seguidos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11074" | ||||||
|  | msgid "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "Cache de base de datos del plugin exitosamente limpiada" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11075" | ||||||
|  | msgid "Use Spotify normalization when playing tracks" | ||||||
|  | msgstr "Usar Normalizacion de Spotify cuando reproduzca canciones" | ||||||
							
								
								
									
										175
									
								
								resources/language/resource.language.es_es/strings.po
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,175 @@ | ||||||
|  | # Kodi Media Center language file | ||||||
|  | # Addon Name: Spotify | ||||||
|  | # Addon id: plugin.audio.spotify | ||||||
|  | # Addon Provider: marcelveldt | ||||||
|  | msgid "" | ||||||
|  | msgstr "" | ||||||
|  | "Project-Id-Version: Kodi-Addons\n" | ||||||
|  | "Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" | ||||||
|  | "POT-Creation-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "PO-Revision-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "Last-Translator: trihy\n" | ||||||
|  | "Language-Team: English\n" | ||||||
|  | "MIME-Version: 1.0\n" | ||||||
|  | "Content-Type: text/plain; charset=UTF-8\n" | ||||||
|  | "Content-Transfer-Encoding: 8bit\n" | ||||||
|  | "Language: es_es\n" | ||||||
|  | "Plural-Forms: nplurals=2; plural=(n != 1)\n" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11001" | ||||||
|  | msgid "Username" | ||||||
|  | msgstr "Nombre de Usuario" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11002" | ||||||
|  | msgid "Password" | ||||||
|  | msgstr "Contraseña" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11005" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nuevos Lanzamientos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11007" | ||||||
|  | msgid "Save to My Music" | ||||||
|  | msgstr "Guardar en Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11008" | ||||||
|  | msgid "Remove from My Music" | ||||||
|  | msgstr "Remover de Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11009" | ||||||
|  | msgid "Follow" | ||||||
|  | msgstr "Seguir" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11010" | ||||||
|  | msgid "Unfollow" | ||||||
|  | msgstr "Dejar de Seguir" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11011" | ||||||
|  | msgid "Artist top tracks" | ||||||
|  | msgstr "Canciones Top del Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11012" | ||||||
|  | msgid "Related artists" | ||||||
|  | msgstr "Artistas Relacionados" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11013" | ||||||
|  | msgid "My Music" | ||||||
|  | msgstr "Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11014" | ||||||
|  | msgid "Explore" | ||||||
|  | msgstr "Explorar" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11015" | ||||||
|  | msgid "Featured playlists" | ||||||
|  | msgstr "Playlists Destacadas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11016" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nuevos Lanzamientos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11017" | ||||||
|  | msgid "Remove from playlist" | ||||||
|  | msgstr "Remover del Playlist" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11018" | ||||||
|  | msgid "All albums for artist" | ||||||
|  | msgstr "Todos los Albumes del Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11020" | ||||||
|  | msgid "Default view for categories" | ||||||
|  | msgstr "Vista por Defecto para Categorías" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11023" | ||||||
|  | msgid "Most played artists" | ||||||
|  | msgstr "Artistas más Reproducidos " | ||||||
|  | 
 | ||||||
|  | msgctxt "#11024" | ||||||
|  | msgid "Most played tracks" | ||||||
|  | msgstr "Canciones más Reproducidas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11025" | ||||||
|  | msgid "Follow artist" | ||||||
|  | msgstr "Seguir Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11026" | ||||||
|  | msgid "Unfollow artist" | ||||||
|  | msgstr "Dejar de Seguir Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11027" | ||||||
|  | msgid "Refresh listing" | ||||||
|  | msgstr "Refrescar Listado" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11028" | ||||||
|  | msgid "Login details" | ||||||
|  | msgstr "Detalles de Login" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11030" | ||||||
|  | msgid "Append artist name to song title" | ||||||
|  | msgstr "Adjuntar Nombre del Artista a Título de Canción" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11031" | ||||||
|  | msgid "Default view for playlists" | ||||||
|  | msgstr "Vista por Defecto para Playlists" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11032" | ||||||
|  | msgid "Default view for artist list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11033" | ||||||
|  | msgid "Default view for album list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Album" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11034" | ||||||
|  | msgid "Default view for song list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Canción" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11047" | ||||||
|  | msgid "Current user" | ||||||
|  | msgstr "Usuario Actual" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11050" | ||||||
|  | msgid "" | ||||||
|  | "Spotify login failed. Either no credentials have been set or the Spotify username or password is incorrect.\n" | ||||||
|  | "Please check your username and password in the addon settings." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11054" | ||||||
|  | msgid "Audio" | ||||||
|  | msgstr "Audio" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11055" | ||||||
|  | msgid "Views" | ||||||
|  | msgstr "Vistas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11069" | ||||||
|  | msgid "My recently played playlist" | ||||||
|  | msgstr "Mi Playlist reproducida Recientemente" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11070" | ||||||
|  | msgid "Gap between tracks when playing a playlist (secs)" | ||||||
|  | msgstr "Espacio entre canciones cuando se reproduce una playlist (Segundos)" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11071" | ||||||
|  | msgid "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11072" | ||||||
|  | msgid "Clear the plugin cache" | ||||||
|  | msgstr "Limpiar cache del plugin" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11073" | ||||||
|  | msgid "Followed artists" | ||||||
|  | msgstr "Artistas Seguidos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11074" | ||||||
|  | msgid "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "Cache de base de datos del plugin exitosamente limpiada" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11075" | ||||||
|  | msgid "Use Spotify normalization when playing tracks" | ||||||
|  | msgstr "Usar Normalizacion de Spotify cuando reproduzca canciones" | ||||||
							
								
								
									
										175
									
								
								resources/language/resource.language.es_mx/strings.po
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,175 @@ | ||||||
|  | # Kodi Media Center language file | ||||||
|  | # Addon Name: Spotify | ||||||
|  | # Addon id: plugin.audio.spotify | ||||||
|  | # Addon Provider: marcelveldt | ||||||
|  | msgid "" | ||||||
|  | msgstr "" | ||||||
|  | "Project-Id-Version: Kodi-Addons\n" | ||||||
|  | "Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" | ||||||
|  | "POT-Creation-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "PO-Revision-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "Last-Translator: trihy\n" | ||||||
|  | "Language-Team: English\n" | ||||||
|  | "MIME-Version: 1.0\n" | ||||||
|  | "Content-Type: text/plain; charset=UTF-8\n" | ||||||
|  | "Content-Transfer-Encoding: 8bit\n" | ||||||
|  | "Language: es_mx\n" | ||||||
|  | "Plural-Forms: nplurals=2; plural=(n != 1)\n" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11001" | ||||||
|  | msgid "Username" | ||||||
|  | msgstr "Nombre de Usuario" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11002" | ||||||
|  | msgid "Password" | ||||||
|  | msgstr "Contraseña" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11005" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nuevos Lanzamientos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11007" | ||||||
|  | msgid "Save to My Music" | ||||||
|  | msgstr "Guardar en Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11008" | ||||||
|  | msgid "Remove from My Music" | ||||||
|  | msgstr "Remover de Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11009" | ||||||
|  | msgid "Follow" | ||||||
|  | msgstr "Seguir" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11010" | ||||||
|  | msgid "Unfollow" | ||||||
|  | msgstr "Dejar de Seguir" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11011" | ||||||
|  | msgid "Artist top tracks" | ||||||
|  | msgstr "Canciones Top del Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11012" | ||||||
|  | msgid "Related artists" | ||||||
|  | msgstr "Artistas Relacionados" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11013" | ||||||
|  | msgid "My Music" | ||||||
|  | msgstr "Mi Música" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11014" | ||||||
|  | msgid "Explore" | ||||||
|  | msgstr "Explorar" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11015" | ||||||
|  | msgid "Featured playlists" | ||||||
|  | msgstr "Playlists Destacadas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11016" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nuevos Lanzamientos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11017" | ||||||
|  | msgid "Remove from playlist" | ||||||
|  | msgstr "Remover del Playlist" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11018" | ||||||
|  | msgid "All albums for artist" | ||||||
|  | msgstr "Todos los Albumes del Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11020" | ||||||
|  | msgid "Default view for categories" | ||||||
|  | msgstr "Vista por Defecto para Categorías" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11023" | ||||||
|  | msgid "Most played artists" | ||||||
|  | msgstr "Artistas más Reproducidos " | ||||||
|  | 
 | ||||||
|  | msgctxt "#11024" | ||||||
|  | msgid "Most played tracks" | ||||||
|  | msgstr "Canciones más Reproducidas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11025" | ||||||
|  | msgid "Follow artist" | ||||||
|  | msgstr "Seguir Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11026" | ||||||
|  | msgid "Unfollow artist" | ||||||
|  | msgstr "Dejar de Seguir Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11027" | ||||||
|  | msgid "Refresh listing" | ||||||
|  | msgstr "Refrescar Listado" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11028" | ||||||
|  | msgid "Login details" | ||||||
|  | msgstr "Detalles de Login" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11030" | ||||||
|  | msgid "Append artist name to song title" | ||||||
|  | msgstr "Adjuntar Nombre del Artista a Título de Canción" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11031" | ||||||
|  | msgid "Default view for playlists" | ||||||
|  | msgstr "Vista por Defecto para Playlists" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11032" | ||||||
|  | msgid "Default view for artist list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Artista" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11033" | ||||||
|  | msgid "Default view for album list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Album" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11034" | ||||||
|  | msgid "Default view for song list" | ||||||
|  | msgstr "Vista por Defecto para Lista de Canción" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11047" | ||||||
|  | msgid "Current user" | ||||||
|  | msgstr "Usuario Actual" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11050" | ||||||
|  | msgid "" | ||||||
|  | "Spotify login failed. Either no credentials have been set or the Spotify username or password is incorrect.\n" | ||||||
|  | "Please check your username and password in the addon settings." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11054" | ||||||
|  | msgid "Audio" | ||||||
|  | msgstr "Audio" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11055" | ||||||
|  | msgid "Views" | ||||||
|  | msgstr "Vistas" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11069" | ||||||
|  | msgid "My recently played playlist" | ||||||
|  | msgstr "Mi Playlist reproducida Recientemente" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11070" | ||||||
|  | msgid "Gap between tracks when playing a playlist (secs)" | ||||||
|  | msgstr "Espacio entre canciones cuando se reproduce una playlist (Segundos)" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11071" | ||||||
|  | msgid "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11072" | ||||||
|  | msgid "Clear the plugin cache" | ||||||
|  | msgstr "Limpiar cache del plugin" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11073" | ||||||
|  | msgid "Followed artists" | ||||||
|  | msgstr "Artistas Seguidos" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11074" | ||||||
|  | msgid "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "Cache de base de datos del plugin exitosamente limpiada" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11075" | ||||||
|  | msgid "Use Spotify normalization when playing tracks" | ||||||
|  | msgstr "Usar Normalizacion de Spotify cuando reproduzca canciones" | ||||||
							
								
								
									
										179
									
								
								resources/language/resource.language.fr_fr/strings.po
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,179 @@ | ||||||
|  | # Kodi Media Center language file | ||||||
|  | # Addon Name: Spotify | ||||||
|  | # Addon id: plugin.audio.spotify | ||||||
|  | # Addon Provider: marcelveldt | ||||||
|  | msgid "" | ||||||
|  | msgstr "" | ||||||
|  | "Project-Id-Version: Kodi-Addons\n" | ||||||
|  | "Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" | ||||||
|  | "POT-Creation-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "PO-Revision-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "Last-Translator: xsellier\n" | ||||||
|  | "Language-Team: French\n" | ||||||
|  | "MIME-Version: 1.0\n" | ||||||
|  | "Content-Type: text/plain; charset=UTF-8\n" | ||||||
|  | "Content-Transfer-Encoding: 8bit\n" | ||||||
|  | "Language: en\n" | ||||||
|  | "Plural-Forms: nplurals=2; plural=(n != 1)\n" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11001" | ||||||
|  | msgid "Username" | ||||||
|  | msgstr "Nom d'utilisateur" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11002" | ||||||
|  | msgid "Password" | ||||||
|  | msgstr "Mot de passe" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11005" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nouveautés" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11007" | ||||||
|  | msgid "Save to My Music" | ||||||
|  | msgstr "Ajouter a Ma Musique" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11008" | ||||||
|  | msgid "Remove from My Music" | ||||||
|  | msgstr "Enlever de Ma Musique" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11009" | ||||||
|  | msgid "Follow" | ||||||
|  | msgstr "Suivre" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11010" | ||||||
|  | msgid "Unfollow" | ||||||
|  | msgstr "Arrêter de suivre" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11011" | ||||||
|  | msgid "Artist top tracks" | ||||||
|  | msgstr "Les meilleures chansons de l'artiste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11012" | ||||||
|  | msgid "Related artists" | ||||||
|  | msgstr "Artistes similaires" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11013" | ||||||
|  | msgid "My Music" | ||||||
|  | msgstr "Ma Musique" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11014" | ||||||
|  | msgid "Explore" | ||||||
|  | msgstr "Explorer" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11015" | ||||||
|  | msgid "Featured playlists" | ||||||
|  | msgstr "Listes de lectures mises en avant" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11016" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nouveautés" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11017" | ||||||
|  | msgid "Remove from playlist" | ||||||
|  | msgstr "Enlever de la liste de lecture" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11018" | ||||||
|  | msgid "All albums for artist" | ||||||
|  | msgstr "Tous les albums de cet artiste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11020" | ||||||
|  | msgid "Default view for categories" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11023" | ||||||
|  | msgid "Most played artists" | ||||||
|  | msgstr "Les artistes les plus écoutés" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11024" | ||||||
|  | msgid "Most played tracks" | ||||||
|  | msgstr "Les musiques les plus jouées" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11025" | ||||||
|  | msgid "Follow artist" | ||||||
|  | msgstr "Suivre l'artiste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11026" | ||||||
|  | msgid "Unfollow artist" | ||||||
|  | msgstr "Arrêter de suivre l'artiste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11027" | ||||||
|  | msgid "Refresh listing" | ||||||
|  | msgstr "Rafraîchir la liste" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11028" | ||||||
|  | msgid "Login details" | ||||||
|  | msgstr "Détails de l'authentification" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11030" | ||||||
|  | msgid "Append artist name to song title" | ||||||
|  | msgstr "Mettre l nom de l'artiste à la suite du titre de la chanson" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11031" | ||||||
|  | msgid "Default view for playlists" | ||||||
|  | msgstr "Liste de lecture comme vue par défaut" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11032" | ||||||
|  | msgid "Default view for artist list" | ||||||
|  | msgstr "Liste d'artistes comme vue par défaut" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11033" | ||||||
|  | msgid "Default view for album list" | ||||||
|  | msgstr "Liste d'albums comme vue par défaut" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11034" | ||||||
|  | msgid "Default view for song list" | ||||||
|  | msgstr "Liste de chansons comme vue par défaut" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11047" | ||||||
|  | msgid "Current user" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11050" | ||||||
|  | msgid "" | ||||||
|  | "Spotify login failed. Either no credentials have been set or the Spotify username or password is incorrect.\n" | ||||||
|  | "Please check your username and password in the addon settings." | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11054" | ||||||
|  | msgid "Audio" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11055" | ||||||
|  | msgid "Views" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11069" | ||||||
|  | msgid "My recently played playlist" | ||||||
|  | msgstr "My recently played playlist" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11070" | ||||||
|  | msgid "Gap between tracks when playing a playlist (secs)" | ||||||
|  | msgstr "Gap between tracks when playing a playlist (secs)" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11071" | ||||||
|  | msgid "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11072" | ||||||
|  | msgid "Clear the plugin cache" | ||||||
|  | msgstr "Clear the plugin cache" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11073" | ||||||
|  | msgid "Followed artists" | ||||||
|  | msgstr "Followed artists" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11074" | ||||||
|  | msgid "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11075" | ||||||
|  | msgid "Use Spotify normalization when playing tracks" | ||||||
|  | msgstr "" | ||||||
							
								
								
									
										182
									
								
								resources/language/resource.language.he_he/strings.po
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,182 @@ | ||||||
|  | # Kodi Media Center language file | ||||||
|  | # Addon Name: Spotify | ||||||
|  | # Addon id: plugin.audio.spotify | ||||||
|  | # Addon Provider: marcelveldt | ||||||
|  | msgid "" | ||||||
|  | msgstr "" | ||||||
|  | "Project-Id-Version: Kodi-Addons\n" | ||||||
|  | "Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" | ||||||
|  | "POT-Creation-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "PO-Revision-Date: 2017-09-28 14:10+0300\n" | ||||||
|  | "Last-Translator: A. Dambledore\n" | ||||||
|  | "Language-Team: Eng2Heb\n" | ||||||
|  | "Language: he_IL\n" | ||||||
|  | "MIME-Version: 1.0\n" | ||||||
|  | "Content-Type: text/plain; charset=UTF-8\n" | ||||||
|  | "Content-Transfer-Encoding: 8bit\n" | ||||||
|  | "Plural-Forms: nplurals=2; plural=(n != 1);\n" | ||||||
|  | "X-Generator: Poedit 2.0.4\n" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11001" | ||||||
|  | msgid "Username" | ||||||
|  | msgstr "שם משתמש" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11002" | ||||||
|  | msgid "Password" | ||||||
|  | msgstr "ססמה" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11005" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "שיחרורים חדשים" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11007" | ||||||
|  | msgid "Save to My Music" | ||||||
|  | msgstr "לשמור אל המוזיקה שלי" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11008" | ||||||
|  | msgid "Remove from My Music" | ||||||
|  | msgstr "הסר מהמוזיקה שלי" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11009" | ||||||
|  | msgid "Follow" | ||||||
|  | msgstr "עקוב" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11010" | ||||||
|  | msgid "Unfollow" | ||||||
|  | msgstr "ביטול מעקב" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11011" | ||||||
|  | msgid "Artist top tracks" | ||||||
|  | msgstr "השירים הכי טובים של האמן" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11012" | ||||||
|  | msgid "Related artists" | ||||||
|  | msgstr "אמנים קשורים" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11013" | ||||||
|  | msgid "My Music" | ||||||
|  | msgstr "המוזיקה שלי" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11014" | ||||||
|  | msgid "Explore" | ||||||
|  | msgstr "חקור" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11015" | ||||||
|  | msgid "Featured playlists" | ||||||
|  | msgstr "רשימות ניגון מומלצות" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11016" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "שיחרורים חדשים" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11017" | ||||||
|  | msgid "Remove from playlist" | ||||||
|  | msgstr "הסר מרשימת ניגון" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11018" | ||||||
|  | msgid "All albums for artist" | ||||||
|  | msgstr "כל האלבומים עבור אמן" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11020" | ||||||
|  | msgid "Default view for categories" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11023" | ||||||
|  | msgid "Most played artists" | ||||||
|  | msgstr "האמנים הכי מושמעים" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11024" | ||||||
|  | msgid "Most played tracks" | ||||||
|  | msgstr "השירים הכי מושמעים" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11025" | ||||||
|  | msgid "Follow artist" | ||||||
|  | msgstr "עקוב אחר האמן" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11026" | ||||||
|  | msgid "Unfollow artist" | ||||||
|  | msgstr "בטל עקיבה אחר האמן" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11027" | ||||||
|  | msgid "Refresh listing" | ||||||
|  | msgstr "רענן את הרישום" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11028" | ||||||
|  | msgid "Login details" | ||||||
|  | msgstr "פרטי התחברות" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11030" | ||||||
|  | msgid "Append artist name to song title" | ||||||
|  | msgstr "הוסף שם אמן לכותרת השיר" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11031" | ||||||
|  | msgid "Default view for playlists" | ||||||
|  | msgstr "תצוגת ברירת המחדל עבור רשימות ניגון" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11032" | ||||||
|  | msgid "Default view for artist list" | ||||||
|  | msgstr "תצוגת ברירת המחדל עבור רשימת האמנים" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11033" | ||||||
|  | msgid "Default view for album list" | ||||||
|  | msgstr "תצוגת ברירת המחדל עבור רשימת אלבומים" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11034" | ||||||
|  | msgid "Default view for song list" | ||||||
|  | msgstr "תצוגת ברירת המחדל עבור רשימת השירים" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11047" | ||||||
|  | msgid "Current user" | ||||||
|  | msgstr "משתמש נוכחי" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11050" | ||||||
|  | msgid "" | ||||||
|  | "Spotify login failed. Either no credentials have been set or the Spotify username or password is incorrect.\n" | ||||||
|  | "Please check your username and password in the addon settings." | ||||||
|  | msgstr "" | ||||||
|  | "לא הוגדרו אישורי כניסה או שהחיבור נכשל.\n" | ||||||
|  | "הזן את הפרטים שלך בתיבת הדו-שיח הגדרות." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11054" | ||||||
|  | msgid "Audio" | ||||||
|  | msgstr "אודיו" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11055" | ||||||
|  | msgid "Views" | ||||||
|  | msgstr "צפיות" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11069" | ||||||
|  | msgid "My recently played playlist" | ||||||
|  | msgstr "My recently played playlist" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11070" | ||||||
|  | msgid "Gap between tracks when playing a playlist (secs)" | ||||||
|  | msgstr "Gap between tracks when playing a playlist (secs)" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11071" | ||||||
|  | msgid "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11072" | ||||||
|  | msgid "Clear the plugin cache" | ||||||
|  | msgstr "Clear the plugin cache" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11073" | ||||||
|  | msgid "Followed artists" | ||||||
|  | msgstr "Followed artists" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11074" | ||||||
|  | msgid "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11075" | ||||||
|  | msgid "Use Spotify normalization when playing tracks" | ||||||
|  | msgstr "" | ||||||
							
								
								
									
										
											BIN
										
									
								
								resources/language/resource.language.nl_nl/strings.mo
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										179
									
								
								resources/language/resource.language.nl_nl/strings.po
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,179 @@ | ||||||
|  | # Kodi Media Center language file | ||||||
|  | # Addon Name: Spotify | ||||||
|  | # Addon id: plugin.audio.spotify | ||||||
|  | # Addon Provider: marcelveldt | ||||||
|  | msgid "" | ||||||
|  | msgstr "" | ||||||
|  | "Project-Id-Version: Kodi-Addons\n" | ||||||
|  | "Report-Msgid-Bugs-To: alanwww1@xbmc.org\n" | ||||||
|  | "POT-Creation-Date: 2015-11-04 18:30+0100\n" | ||||||
|  | "PO-Revision-Date: 2019-07-18 15:34+0200\n" | ||||||
|  | "Last-Translator: logi85\n" | ||||||
|  | "Language-Team: Dutch\n" | ||||||
|  | "MIME-Version: 1.0\n" | ||||||
|  | "Content-Type: text/plain; charset=UTF-8\n" | ||||||
|  | "Content-Transfer-Encoding: 8bit\n" | ||||||
|  | "Language: nl\n" | ||||||
|  | "Plural-Forms: nplurals=2; plural=(n != 1);\n" | ||||||
|  | "X-Generator: Poedit 2.2.3\n" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11001" | ||||||
|  | msgid "Username" | ||||||
|  | msgstr "Gebruikersnaam" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11002" | ||||||
|  | msgid "Password" | ||||||
|  | msgstr "Wachtwoord" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11005" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nieuwe releases" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11007" | ||||||
|  | msgid "Save to My Music" | ||||||
|  | msgstr "Opslaan naar Mijn Muziek" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11008" | ||||||
|  | msgid "Remove from My Music" | ||||||
|  | msgstr "Verwijder uit Mijn Muziek" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11009" | ||||||
|  | msgid "Follow" | ||||||
|  | msgstr "Volgen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11010" | ||||||
|  | msgid "Unfollow" | ||||||
|  | msgstr "Niet meer volgen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11011" | ||||||
|  | msgid "Artist top tracks" | ||||||
|  | msgstr "Artiest top-nummers" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11012" | ||||||
|  | msgid "Related artists" | ||||||
|  | msgstr "Gerelateerde artiesten" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11013" | ||||||
|  | msgid "My Music" | ||||||
|  | msgstr "Mijn Muziek" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11014" | ||||||
|  | msgid "Explore" | ||||||
|  | msgstr "Verkennen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11015" | ||||||
|  | msgid "Featured playlists" | ||||||
|  | msgstr "Aanbevolen afspeellijsten" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11016" | ||||||
|  | msgid "New releases" | ||||||
|  | msgstr "Nieuwe releases" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11017" | ||||||
|  | msgid "Remove from playlist" | ||||||
|  | msgstr "Verwijder uit afspeellijst" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11018" | ||||||
|  | msgid "All albums for artist" | ||||||
|  | msgstr "Alle albums van artiest" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11020" | ||||||
|  | msgid "Default view for categories" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11023" | ||||||
|  | msgid "Most played artists" | ||||||
|  | msgstr "Meest afgespeelde artiesten" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11024" | ||||||
|  | msgid "Most played tracks" | ||||||
|  | msgstr "Meest afgespeelde liedjes" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11025" | ||||||
|  | msgid "Follow artist" | ||||||
|  | msgstr "Volg artiest" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11026" | ||||||
|  | msgid "Unfollow artist" | ||||||
|  | msgstr "Artiest niet volgen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11027" | ||||||
|  | msgid "Refresh listing" | ||||||
|  | msgstr "Lijst vernieuwen" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11036" | ||||||
|  | msgid "Enable this device as Spotify Connect target (experimental)" | ||||||
|  | msgstr "Schakel dit apparaat in als Spotify Connect doel (experimenteel)" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11031" | ||||||
|  | msgid "Default view for playlists" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11032" | ||||||
|  | msgid "Default view for artist list" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11033" | ||||||
|  | msgid "Default view for album list" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11034" | ||||||
|  | msgid "Default view for song list" | ||||||
|  | msgstr "" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11047" | ||||||
|  | msgid "Current user" | ||||||
|  | msgstr "Huidige gebruiker" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11050" | ||||||
|  | msgid "" | ||||||
|  | "Spotify login failed. Either no credentials have been set or the Spotify username or password is incorrect.\n" | ||||||
|  | "Please check your username and password in the addon settings." | ||||||
|  | msgstr "" | ||||||
|  | "Er zijn geen inloggegevens gevonden of het inloggen faalt.\n" | ||||||
|  | "Voer alsjeblieft je inloggegevens in in de plugin instellingen." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11054" | ||||||
|  | msgid "Audio" | ||||||
|  | msgstr "Audio" | ||||||
|  | 
 | ||||||
|  | #, fuzzy | ||||||
|  | msgctxt "#11055" | ||||||
|  | msgid "Views" | ||||||
|  | msgstr "Weergave" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11069" | ||||||
|  | msgid "My recently played playlist" | ||||||
|  | msgstr "My recently played playlist" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11070" | ||||||
|  | msgid "Gap between tracks when playing a playlist (secs)" | ||||||
|  | msgstr "Gap between tracks when playing a playlist (secs)" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11071" | ||||||
|  | msgid "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | "To give better audio streaming from Spotify, a video http rule was just added to 'userdata/playercorefactory.xml'.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11072" | ||||||
|  | msgid "Clear the plugin cache" | ||||||
|  | msgstr "Clear the plugin cache" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11073" | ||||||
|  | msgid "Followed artists" | ||||||
|  | msgstr "Followed artists" | ||||||
|  | 
 | ||||||
|  | msgctxt "#11074" | ||||||
|  | msgid "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | msgstr "" | ||||||
|  | "Successfully cleared the plugin cache database.\n" | ||||||
|  | "Please restart Kodi for this to take affect." | ||||||
|  | 
 | ||||||
|  | msgctxt "#11075" | ||||||
|  | msgid "Use Spotify normalization when playing tracks" | ||||||
|  | msgstr "" | ||||||
							
								
								
									
										50
									
								
								resources/lib/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,50 @@ | ||||||
|  | import os | ||||||
|  | import sys | ||||||
|  | 
 | ||||||
|  | sys.path.insert(1, os.path.join(os.path.dirname(__file__))) | ||||||
|  | 
 | ||||||
|  | # IMPORTANT: The 'cherrypy' module cannot be imported as a submodule from 'httpproxy.py'. | ||||||
|  | #   I.e, 'from deps import cherrypy' will not work. Not sure why. So we do the following | ||||||
|  | #   path hack to put 'cherrypy' on the module search path: | ||||||
|  | sys.path.insert(1, os.path.join(os.path.dirname(__file__), "deps")) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # import pkgutil | ||||||
|  | 
 | ||||||
|  | # def list_submodules(list_name, package_name): | ||||||
|  | #     for loader, module_name, is_pkg in pkgutil.walk_packages( | ||||||
|  | #         package_name.__path__, package_name.__name__ + "." | ||||||
|  | #     ): | ||||||
|  | #         list_name.append(module_name) | ||||||
|  | #         print(f"module_name 1 = {module_name}, is_pkg = {is_pkg}.") | ||||||
|  | #         try: | ||||||
|  | #             module_name = __import__(module_name, fromlist="dummylist") | ||||||
|  | #         except Exception as ex: | ||||||
|  | #             print(ex) | ||||||
|  | #             module_name = None | ||||||
|  | #         print(f"module_name 2 = {module_name}.") | ||||||
|  | #         if is_pkg: | ||||||
|  | #             list_submodules(list_name, module_name) | ||||||
|  | # | ||||||
|  | # | ||||||
|  | # if len(sys.argv) != 2: | ||||||
|  | #     print("Usage: {} [PACKAGE-NAME]".format(os.path.basename(__file__))) | ||||||
|  | #     sys.exit(1) | ||||||
|  | # else: | ||||||
|  | #     package_name = sys.argv[1] | ||||||
|  | # | ||||||
|  | # print(f"package_name = '{package_name}'.") | ||||||
|  | # try: | ||||||
|  | #     package = __import__(package_name) | ||||||
|  | # except ImportError: | ||||||
|  | #     print("Package {} not found...".format(package_name)) | ||||||
|  | #     sys.exit(1) | ||||||
|  | # | ||||||
|  | # print(f"package.__path__ = '{package.__path__}'.") | ||||||
|  | # print(f"package.__name__ = '{package.__name__}'.") | ||||||
|  | # | ||||||
|  | # all_modules = [] | ||||||
|  | # list_submodules(all_modules, package) | ||||||
|  | # | ||||||
|  | # print(f"all_modules = {all_modules}.") | ||||||
|  | # sys.exit(1) | ||||||
							
								
								
									
										61
									
								
								resources/lib/bottle_manager.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,61 @@ | ||||||
|  | from wsgiref.simple_server import make_server | ||||||
|  | 
 | ||||||
|  | from bottle import app, request, HTTPResponse, Bottle | ||||||
|  | import xbmc | ||||||
|  | 
 | ||||||
|  | import re | ||||||
|  | 
 | ||||||
|  | from librespot.core import Session | ||||||
|  | from librespot.audio.decoders import AudioQuality, VorbisOnlyAudioQuality | ||||||
|  | from librespot.metadata import TrackId | ||||||
|  | 
 | ||||||
|  | from utils import log_msg | ||||||
|  | 
 | ||||||
|  | class LibrespotServer(Bottle): | ||||||
|  |     def __init__(self, session: Session): | ||||||
|  |         super(LibrespotServer, self).__init__() | ||||||
|  |         self.session: Session = session | ||||||
|  |         self.route('/track/<track_id>', callback=self.stream) | ||||||
|  | 
 | ||||||
|  |     # TODO: Make Range header work PLEASE I BEG | ||||||
|  | 
 | ||||||
|  |     def stream(self, track_id): | ||||||
|  |         try: | ||||||
|  |             playabletrack_id = TrackId.from_uri(f"spotify:track:{track_id}") | ||||||
|  |             stream = self.session.content_feeder().load( | ||||||
|  |                 playabletrack_id, VorbisOnlyAudioQuality(AudioQuality.NORMAL), False, | ||||||
|  |                 None) | ||||||
|  |             start = 0 | ||||||
|  |             end = stream.input_stream.size | ||||||
|  |             payload = stream.input_stream.stream() | ||||||
|  |             log_msg(stream.input_stream.size) | ||||||
|  |             # reqrange = request.get_header("range") | ||||||
|  |             # if reqrange is not None: | ||||||
|  |             #     range_search = re.search( | ||||||
|  |             #         "^bytes=(?P<start>[0-9]+?)-(?P<end>[0-9]+?)$", | ||||||
|  |             #         reqrange) | ||||||
|  |             #     if range_search is not None: | ||||||
|  |             #         start = int(range_search.group("start")) | ||||||
|  |             #         end = (int(range_search.group("end")) | ||||||
|  |             #                 if int(range_search.group("end")) <= | ||||||
|  |             #                 stream.input_stream.size else | ||||||
|  |             #                 stream.input_stream.size) | ||||||
|  |             #         payload.skip(start) | ||||||
|  |             #     else: | ||||||
|  |             #         payload = stream | ||||||
|  |             response = HTTPResponse(body=payload) | ||||||
|  |             response.add_header('Content-Type', 'audio/ogg') | ||||||
|  |             # response.add_header('Accept-Ranges', 'bytes') | ||||||
|  |             # response.add_header("Content-Length", str(stream.input_stream.size).encode() if | ||||||
|  |             #                 stream.input_stream.size == end else "{}-{}/{}" | ||||||
|  |             #                 .format(start, end, | ||||||
|  |             #                         stream.input_stream.size).encode()) | ||||||
|  |             return response | ||||||
|  |         except Exception as e: | ||||||
|  |             log_msg(e) | ||||||
|  | 
 | ||||||
|  | def start_thread(web_port: int) -> None: | ||||||
|  |     httpd = make_server('127.0.0.1', web_port, app) | ||||||
|  |     monitor = xbmc.Monitor() | ||||||
|  |     while not monitor.abortRequested(): | ||||||
|  |         httpd.handle_request() | ||||||
							
								
								
									
										154
									
								
								resources/lib/defusedxml/ElementTree.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,154 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defused xml.etree.ElementTree facade | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | import sys | ||||||
|  | import warnings | ||||||
|  | from xml.etree.ElementTree import ParseError | ||||||
|  | from xml.etree.ElementTree import TreeBuilder as _TreeBuilder | ||||||
|  | from xml.etree.ElementTree import parse as _parse | ||||||
|  | from xml.etree.ElementTree import tostring | ||||||
|  | 
 | ||||||
|  | from .common import PY3 | ||||||
|  | 
 | ||||||
|  | if PY3: | ||||||
|  |     import importlib | ||||||
|  | else: | ||||||
|  |     from xml.etree.ElementTree import XMLParser as _XMLParser | ||||||
|  |     from xml.etree.ElementTree import iterparse as _iterparse | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | from .common import ( | ||||||
|  |     DTDForbidden, | ||||||
|  |     EntitiesForbidden, | ||||||
|  |     ExternalReferenceForbidden, | ||||||
|  |     _generate_etree_functions, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | __origin__ = "xml.etree.ElementTree" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _get_py3_cls(): | ||||||
|  |     """Python 3.3 hides the pure Python code but defusedxml requires it. | ||||||
|  | 
 | ||||||
|  |     The code is based on test.support.import_fresh_module(). | ||||||
|  |     """ | ||||||
|  |     pymodname = "xml.etree.ElementTree" | ||||||
|  |     cmodname = "_elementtree" | ||||||
|  | 
 | ||||||
|  |     pymod = sys.modules.pop(pymodname, None) | ||||||
|  |     cmod = sys.modules.pop(cmodname, None) | ||||||
|  | 
 | ||||||
|  |     sys.modules[cmodname] = None | ||||||
|  |     try: | ||||||
|  |         pure_pymod = importlib.import_module(pymodname) | ||||||
|  |     finally: | ||||||
|  |         # restore module | ||||||
|  |         sys.modules[pymodname] = pymod | ||||||
|  |         if cmod is not None: | ||||||
|  |             sys.modules[cmodname] = cmod | ||||||
|  |         else: | ||||||
|  |             sys.modules.pop(cmodname, None) | ||||||
|  |         # restore attribute on original package | ||||||
|  |         etree_pkg = sys.modules["xml.etree"] | ||||||
|  |         if pymod is not None: | ||||||
|  |             etree_pkg.ElementTree = pymod | ||||||
|  |         elif hasattr(etree_pkg, "ElementTree"): | ||||||
|  |             del etree_pkg.ElementTree | ||||||
|  | 
 | ||||||
|  |     _XMLParser = pure_pymod.XMLParser | ||||||
|  |     _iterparse = pure_pymod.iterparse | ||||||
|  |     # patch pure module to use ParseError from C extension | ||||||
|  |     pure_pymod.ParseError = ParseError | ||||||
|  | 
 | ||||||
|  |     return _XMLParser, _iterparse | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | if PY3: | ||||||
|  |     _XMLParser, _iterparse = _get_py3_cls() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _sentinel = object() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DefusedXMLParser(_XMLParser): | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         html=_sentinel, | ||||||
|  |         target=None, | ||||||
|  |         encoding=None, | ||||||
|  |         forbid_dtd=False, | ||||||
|  |         forbid_entities=True, | ||||||
|  |         forbid_external=True, | ||||||
|  |     ): | ||||||
|  |         # Python 2.x old style class | ||||||
|  |         _XMLParser.__init__(self, target=target, encoding=encoding) | ||||||
|  |         if html is not _sentinel: | ||||||
|  |             # the 'html' argument has been deprecated and ignored in all | ||||||
|  |             # supported versions of Python. Python 3.8 finally removed it. | ||||||
|  |             if html: | ||||||
|  |                 raise TypeError("'html=True' is no longer supported.") | ||||||
|  |             else: | ||||||
|  |                 warnings.warn( | ||||||
|  |                     "'html' keyword argument is no longer supported. Pass " | ||||||
|  |                     "in arguments as keyword arguments.", | ||||||
|  |                     category=DeprecationWarning, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |         self.forbid_dtd = forbid_dtd | ||||||
|  |         self.forbid_entities = forbid_entities | ||||||
|  |         self.forbid_external = forbid_external | ||||||
|  |         if PY3: | ||||||
|  |             parser = self.parser | ||||||
|  |         else: | ||||||
|  |             parser = self._parser | ||||||
|  |         if self.forbid_dtd: | ||||||
|  |             parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl | ||||||
|  |         if self.forbid_entities: | ||||||
|  |             parser.EntityDeclHandler = self.defused_entity_decl | ||||||
|  |             parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl | ||||||
|  |         if self.forbid_external: | ||||||
|  |             parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler | ||||||
|  | 
 | ||||||
|  |     def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset): | ||||||
|  |         raise DTDForbidden(name, sysid, pubid) | ||||||
|  | 
 | ||||||
|  |     def defused_entity_decl( | ||||||
|  |         self, name, is_parameter_entity, value, base, sysid, pubid, notation_name | ||||||
|  |     ): | ||||||
|  |         raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) | ||||||
|  | 
 | ||||||
|  |     def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): | ||||||
|  |         # expat 1.2 | ||||||
|  |         raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)  # pragma: no cover | ||||||
|  | 
 | ||||||
|  |     def defused_external_entity_ref_handler(self, context, base, sysid, pubid): | ||||||
|  |         raise ExternalReferenceForbidden(context, base, sysid, pubid) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # aliases | ||||||
|  | # XMLParse is a typo, keep it for backwards compatibility | ||||||
|  | XMLTreeBuilder = XMLParse = XMLParser = DefusedXMLParser | ||||||
|  | 
 | ||||||
|  | parse, iterparse, fromstring = _generate_etree_functions( | ||||||
|  |     DefusedXMLParser, _TreeBuilder, _parse, _iterparse | ||||||
|  | ) | ||||||
|  | XML = fromstring | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | __all__ = [ | ||||||
|  |     "ParseError", | ||||||
|  |     "XML", | ||||||
|  |     "XMLParse", | ||||||
|  |     "XMLParser", | ||||||
|  |     "XMLTreeBuilder", | ||||||
|  |     "fromstring", | ||||||
|  |     "iterparse", | ||||||
|  |     "parse", | ||||||
|  |     "tostring", | ||||||
|  | ] | ||||||
							
								
								
									
										67
									
								
								resources/lib/defusedxml/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,67 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defuse XML bomb denial of service vulnerabilities | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | import warnings | ||||||
|  | 
 | ||||||
|  | from .common import ( | ||||||
|  |     DefusedXmlException, | ||||||
|  |     DTDForbidden, | ||||||
|  |     EntitiesForbidden, | ||||||
|  |     ExternalReferenceForbidden, | ||||||
|  |     NotSupportedError, | ||||||
|  |     _apply_defusing, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def defuse_stdlib(): | ||||||
|  |     """Monkey patch and defuse all stdlib packages | ||||||
|  | 
 | ||||||
|  |     :warning: The monkey patch is an EXPERIMETNAL feature. | ||||||
|  |     """ | ||||||
|  |     defused = {} | ||||||
|  | 
 | ||||||
|  |     with warnings.catch_warnings(): | ||||||
|  |         from . import cElementTree | ||||||
|  |     from . import ElementTree | ||||||
|  |     from . import minidom | ||||||
|  |     from . import pulldom | ||||||
|  |     from . import sax | ||||||
|  |     from . import expatbuilder | ||||||
|  |     from . import expatreader | ||||||
|  |     from . import xmlrpc | ||||||
|  | 
 | ||||||
|  |     xmlrpc.monkey_patch() | ||||||
|  |     defused[xmlrpc] = None | ||||||
|  | 
 | ||||||
|  |     defused_mods = [ | ||||||
|  |         cElementTree, | ||||||
|  |         ElementTree, | ||||||
|  |         minidom, | ||||||
|  |         pulldom, | ||||||
|  |         sax, | ||||||
|  |         expatbuilder, | ||||||
|  |         expatreader, | ||||||
|  |     ] | ||||||
|  | 
 | ||||||
|  |     for defused_mod in defused_mods: | ||||||
|  |         stdlib_mod = _apply_defusing(defused_mod) | ||||||
|  |         defused[defused_mod] = stdlib_mod | ||||||
|  | 
 | ||||||
|  |     return defused | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | __version__ = "0.7.1" | ||||||
|  | 
 | ||||||
|  | __all__ = [ | ||||||
|  |     "DefusedXmlException", | ||||||
|  |     "DTDForbidden", | ||||||
|  |     "EntitiesForbidden", | ||||||
|  |     "ExternalReferenceForbidden", | ||||||
|  |     "NotSupportedError", | ||||||
|  | ] | ||||||
							
								
								
									
										62
									
								
								resources/lib/defusedxml/cElementTree.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,62 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defused xml.etree.cElementTree | ||||||
|  | """ | ||||||
|  | from __future__ import absolute_import | ||||||
|  | 
 | ||||||
|  | import warnings | ||||||
|  | 
 | ||||||
|  | from .common import _generate_etree_functions | ||||||
|  | 
 | ||||||
|  | from xml.etree.cElementTree import TreeBuilder as _TreeBuilder | ||||||
|  | from xml.etree.cElementTree import parse as _parse | ||||||
|  | from xml.etree.cElementTree import tostring | ||||||
|  | 
 | ||||||
|  | # iterparse from ElementTree! | ||||||
|  | from xml.etree.ElementTree import iterparse as _iterparse | ||||||
|  | 
 | ||||||
|  | # This module is an alias for ElementTree just like xml.etree.cElementTree | ||||||
|  | from .ElementTree import ( | ||||||
|  |     XML, | ||||||
|  |     XMLParse, | ||||||
|  |     XMLParser, | ||||||
|  |     XMLTreeBuilder, | ||||||
|  |     fromstring, | ||||||
|  |     iterparse, | ||||||
|  |     parse, | ||||||
|  |     tostring, | ||||||
|  |     DefusedXMLParser, | ||||||
|  |     ParseError, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | __origin__ = "xml.etree.cElementTree" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | warnings.warn( | ||||||
|  |     "defusedxml.cElementTree is deprecated, import from defusedxml.ElementTree instead.", | ||||||
|  |     category=DeprecationWarning, | ||||||
|  |     stacklevel=2, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | # XMLParse is a typo, keep it for backwards compatibility | ||||||
|  | XMLTreeBuilder = XMLParse = XMLParser = DefusedXMLParser | ||||||
|  | 
 | ||||||
|  | parse, iterparse, fromstring = _generate_etree_functions( | ||||||
|  |     DefusedXMLParser, _TreeBuilder, _parse, _iterparse | ||||||
|  | ) | ||||||
|  | XML = fromstring | ||||||
|  | 
 | ||||||
|  | __all__ = [ | ||||||
|  |     "ParseError", | ||||||
|  |     "XML", | ||||||
|  |     "XMLParse", | ||||||
|  |     "XMLParser", | ||||||
|  |     "XMLTreeBuilder", | ||||||
|  |     "fromstring", | ||||||
|  |     "iterparse", | ||||||
|  |     "parse", | ||||||
|  |     "tostring", | ||||||
|  | ] | ||||||
							
								
								
									
										129
									
								
								resources/lib/defusedxml/common.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,129 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Common constants, exceptions and helpe functions | ||||||
|  | """ | ||||||
|  | import sys | ||||||
|  | import xml.parsers.expat | ||||||
|  | 
 | ||||||
|  | PY3 = sys.version_info[0] == 3 | ||||||
|  | 
 | ||||||
|  | # Fail early when pyexpat is not installed correctly | ||||||
|  | if not hasattr(xml.parsers.expat, "ParserCreate"): | ||||||
|  |     raise ImportError("pyexpat")  # pragma: no cover | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DefusedXmlException(ValueError): | ||||||
|  |     """Base exception""" | ||||||
|  | 
 | ||||||
|  |     def __repr__(self): | ||||||
|  |         return str(self) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DTDForbidden(DefusedXmlException): | ||||||
|  |     """Document type definition is forbidden""" | ||||||
|  | 
 | ||||||
|  |     def __init__(self, name, sysid, pubid): | ||||||
|  |         super(DTDForbidden, self).__init__() | ||||||
|  |         self.name = name | ||||||
|  |         self.sysid = sysid | ||||||
|  |         self.pubid = pubid | ||||||
|  | 
 | ||||||
|  |     def __str__(self): | ||||||
|  |         tpl = "DTDForbidden(name='{}', system_id={!r}, public_id={!r})" | ||||||
|  |         return tpl.format(self.name, self.sysid, self.pubid) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class EntitiesForbidden(DefusedXmlException): | ||||||
|  |     """Entity definition is forbidden""" | ||||||
|  | 
 | ||||||
|  |     def __init__(self, name, value, base, sysid, pubid, notation_name): | ||||||
|  |         super(EntitiesForbidden, self).__init__() | ||||||
|  |         self.name = name | ||||||
|  |         self.value = value | ||||||
|  |         self.base = base | ||||||
|  |         self.sysid = sysid | ||||||
|  |         self.pubid = pubid | ||||||
|  |         self.notation_name = notation_name | ||||||
|  | 
 | ||||||
|  |     def __str__(self): | ||||||
|  |         tpl = "EntitiesForbidden(name='{}', system_id={!r}, public_id={!r})" | ||||||
|  |         return tpl.format(self.name, self.sysid, self.pubid) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ExternalReferenceForbidden(DefusedXmlException): | ||||||
|  |     """Resolving an external reference is forbidden""" | ||||||
|  | 
 | ||||||
|  |     def __init__(self, context, base, sysid, pubid): | ||||||
|  |         super(ExternalReferenceForbidden, self).__init__() | ||||||
|  |         self.context = context | ||||||
|  |         self.base = base | ||||||
|  |         self.sysid = sysid | ||||||
|  |         self.pubid = pubid | ||||||
|  | 
 | ||||||
|  |     def __str__(self): | ||||||
|  |         tpl = "ExternalReferenceForbidden(system_id='{}', public_id={})" | ||||||
|  |         return tpl.format(self.sysid, self.pubid) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class NotSupportedError(DefusedXmlException): | ||||||
|  |     """The operation is not supported""" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _apply_defusing(defused_mod): | ||||||
|  |     assert defused_mod is sys.modules[defused_mod.__name__] | ||||||
|  |     stdlib_name = defused_mod.__origin__ | ||||||
|  |     __import__(stdlib_name, {}, {}, ["*"]) | ||||||
|  |     stdlib_mod = sys.modules[stdlib_name] | ||||||
|  |     stdlib_names = set(dir(stdlib_mod)) | ||||||
|  |     for name, obj in vars(defused_mod).items(): | ||||||
|  |         if name.startswith("_") or name not in stdlib_names: | ||||||
|  |             continue | ||||||
|  |         setattr(stdlib_mod, name, obj) | ||||||
|  |     return stdlib_mod | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def _generate_etree_functions(DefusedXMLParser, _TreeBuilder, _parse, _iterparse): | ||||||
|  |     """Factory for functions needed by etree, dependent on whether | ||||||
|  |     cElementTree or ElementTree is used.""" | ||||||
|  | 
 | ||||||
|  |     def parse(source, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True): | ||||||
|  |         if parser is None: | ||||||
|  |             parser = DefusedXMLParser( | ||||||
|  |                 target=_TreeBuilder(), | ||||||
|  |                 forbid_dtd=forbid_dtd, | ||||||
|  |                 forbid_entities=forbid_entities, | ||||||
|  |                 forbid_external=forbid_external, | ||||||
|  |             ) | ||||||
|  |         return _parse(source, parser) | ||||||
|  | 
 | ||||||
|  |     def iterparse( | ||||||
|  |         source, | ||||||
|  |         events=None, | ||||||
|  |         parser=None, | ||||||
|  |         forbid_dtd=False, | ||||||
|  |         forbid_entities=True, | ||||||
|  |         forbid_external=True, | ||||||
|  |     ): | ||||||
|  |         if parser is None: | ||||||
|  |             parser = DefusedXMLParser( | ||||||
|  |                 target=_TreeBuilder(), | ||||||
|  |                 forbid_dtd=forbid_dtd, | ||||||
|  |                 forbid_entities=forbid_entities, | ||||||
|  |                 forbid_external=forbid_external, | ||||||
|  |             ) | ||||||
|  |         return _iterparse(source, events, parser) | ||||||
|  | 
 | ||||||
|  |     def fromstring(text, forbid_dtd=False, forbid_entities=True, forbid_external=True): | ||||||
|  |         parser = DefusedXMLParser( | ||||||
|  |             target=_TreeBuilder(), | ||||||
|  |             forbid_dtd=forbid_dtd, | ||||||
|  |             forbid_entities=forbid_entities, | ||||||
|  |             forbid_external=forbid_external, | ||||||
|  |         ) | ||||||
|  |         parser.feed(text) | ||||||
|  |         return parser.close() | ||||||
|  | 
 | ||||||
|  |     return parse, iterparse, fromstring | ||||||
							
								
								
									
										107
									
								
								resources/lib/defusedxml/expatbuilder.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,107 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defused xml.dom.expatbuilder | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | from xml.dom.expatbuilder import ExpatBuilder as _ExpatBuilder | ||||||
|  | from xml.dom.expatbuilder import Namespaces as _Namespaces | ||||||
|  | 
 | ||||||
|  | from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden | ||||||
|  | 
 | ||||||
|  | __origin__ = "xml.dom.expatbuilder" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DefusedExpatBuilder(_ExpatBuilder): | ||||||
|  |     """Defused document builder""" | ||||||
|  | 
 | ||||||
|  |     def __init__( | ||||||
|  |         self, options=None, forbid_dtd=False, forbid_entities=True, forbid_external=True | ||||||
|  |     ): | ||||||
|  |         _ExpatBuilder.__init__(self, options) | ||||||
|  |         self.forbid_dtd = forbid_dtd | ||||||
|  |         self.forbid_entities = forbid_entities | ||||||
|  |         self.forbid_external = forbid_external | ||||||
|  | 
 | ||||||
|  |     def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset): | ||||||
|  |         raise DTDForbidden(name, sysid, pubid) | ||||||
|  | 
 | ||||||
|  |     def defused_entity_decl( | ||||||
|  |         self, name, is_parameter_entity, value, base, sysid, pubid, notation_name | ||||||
|  |     ): | ||||||
|  |         raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) | ||||||
|  | 
 | ||||||
|  |     def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): | ||||||
|  |         # expat 1.2 | ||||||
|  |         raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)  # pragma: no cover | ||||||
|  | 
 | ||||||
|  |     def defused_external_entity_ref_handler(self, context, base, sysid, pubid): | ||||||
|  |         raise ExternalReferenceForbidden(context, base, sysid, pubid) | ||||||
|  | 
 | ||||||
|  |     def install(self, parser): | ||||||
|  |         _ExpatBuilder.install(self, parser) | ||||||
|  | 
 | ||||||
|  |         if self.forbid_dtd: | ||||||
|  |             parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl | ||||||
|  |         if self.forbid_entities: | ||||||
|  |             # if self._options.entities: | ||||||
|  |             parser.EntityDeclHandler = self.defused_entity_decl | ||||||
|  |             parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl | ||||||
|  |         if self.forbid_external: | ||||||
|  |             parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DefusedExpatBuilderNS(_Namespaces, DefusedExpatBuilder): | ||||||
|  |     """Defused document builder that supports namespaces.""" | ||||||
|  | 
 | ||||||
|  |     def install(self, parser): | ||||||
|  |         DefusedExpatBuilder.install(self, parser) | ||||||
|  |         if self._options.namespace_declarations: | ||||||
|  |             parser.StartNamespaceDeclHandler = self.start_namespace_decl_handler | ||||||
|  | 
 | ||||||
|  |     def reset(self): | ||||||
|  |         DefusedExpatBuilder.reset(self) | ||||||
|  |         self._initNamespaces() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parse(file, namespaces=True, forbid_dtd=False, forbid_entities=True, forbid_external=True): | ||||||
|  |     """Parse a document, returning the resulting Document node. | ||||||
|  | 
 | ||||||
|  |     'file' may be either a file name or an open file object. | ||||||
|  |     """ | ||||||
|  |     if namespaces: | ||||||
|  |         build_builder = DefusedExpatBuilderNS | ||||||
|  |     else: | ||||||
|  |         build_builder = DefusedExpatBuilder | ||||||
|  |     builder = build_builder( | ||||||
|  |         forbid_dtd=forbid_dtd, forbid_entities=forbid_entities, forbid_external=forbid_external | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     if isinstance(file, str): | ||||||
|  |         fp = open(file, "rb") | ||||||
|  |         try: | ||||||
|  |             result = builder.parseFile(fp) | ||||||
|  |         finally: | ||||||
|  |             fp.close() | ||||||
|  |     else: | ||||||
|  |         result = builder.parseFile(file) | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parseString( | ||||||
|  |     string, namespaces=True, forbid_dtd=False, forbid_entities=True, forbid_external=True | ||||||
|  | ): | ||||||
|  |     """Parse a document from a string, returning the resulting | ||||||
|  |     Document node. | ||||||
|  |     """ | ||||||
|  |     if namespaces: | ||||||
|  |         build_builder = DefusedExpatBuilderNS | ||||||
|  |     else: | ||||||
|  |         build_builder = DefusedExpatBuilder | ||||||
|  |     builder = build_builder( | ||||||
|  |         forbid_dtd=forbid_dtd, forbid_entities=forbid_entities, forbid_external=forbid_external | ||||||
|  |     ) | ||||||
|  |     return builder.parseString(string) | ||||||
							
								
								
									
										61
									
								
								resources/lib/defusedxml/expatreader.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,61 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defused xml.sax.expatreader | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | from xml.sax.expatreader import ExpatParser as _ExpatParser | ||||||
|  | 
 | ||||||
|  | from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden | ||||||
|  | 
 | ||||||
|  | __origin__ = "xml.sax.expatreader" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DefusedExpatParser(_ExpatParser): | ||||||
|  |     """Defused SAX driver for the pyexpat C module.""" | ||||||
|  | 
 | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         namespaceHandling=0, | ||||||
|  |         bufsize=2 ** 16 - 20, | ||||||
|  |         forbid_dtd=False, | ||||||
|  |         forbid_entities=True, | ||||||
|  |         forbid_external=True, | ||||||
|  |     ): | ||||||
|  |         _ExpatParser.__init__(self, namespaceHandling, bufsize) | ||||||
|  |         self.forbid_dtd = forbid_dtd | ||||||
|  |         self.forbid_entities = forbid_entities | ||||||
|  |         self.forbid_external = forbid_external | ||||||
|  | 
 | ||||||
|  |     def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset): | ||||||
|  |         raise DTDForbidden(name, sysid, pubid) | ||||||
|  | 
 | ||||||
|  |     def defused_entity_decl( | ||||||
|  |         self, name, is_parameter_entity, value, base, sysid, pubid, notation_name | ||||||
|  |     ): | ||||||
|  |         raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) | ||||||
|  | 
 | ||||||
|  |     def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): | ||||||
|  |         # expat 1.2 | ||||||
|  |         raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)  # pragma: no cover | ||||||
|  | 
 | ||||||
|  |     def defused_external_entity_ref_handler(self, context, base, sysid, pubid): | ||||||
|  |         raise ExternalReferenceForbidden(context, base, sysid, pubid) | ||||||
|  | 
 | ||||||
|  |     def reset(self): | ||||||
|  |         _ExpatParser.reset(self) | ||||||
|  |         parser = self._parser | ||||||
|  |         if self.forbid_dtd: | ||||||
|  |             parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl | ||||||
|  |         if self.forbid_entities: | ||||||
|  |             parser.EntityDeclHandler = self.defused_entity_decl | ||||||
|  |             parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl | ||||||
|  |         if self.forbid_external: | ||||||
|  |             parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def create_parser(*args, **kwargs): | ||||||
|  |     return DefusedExpatParser(*args, **kwargs) | ||||||
							
								
								
									
										153
									
								
								resources/lib/defusedxml/lxml.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,153 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """DEPRECATED Example code for lxml.etree protection | ||||||
|  | 
 | ||||||
|  | The code has NO protection against decompression bombs. | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | import threading | ||||||
|  | import warnings | ||||||
|  | 
 | ||||||
|  | from lxml import etree as _etree | ||||||
|  | 
 | ||||||
|  | from .common import DTDForbidden, EntitiesForbidden, NotSupportedError | ||||||
|  | 
 | ||||||
|  | LXML3 = _etree.LXML_VERSION[0] >= 3 | ||||||
|  | 
 | ||||||
|  | __origin__ = "lxml.etree" | ||||||
|  | 
 | ||||||
|  | tostring = _etree.tostring | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | warnings.warn( | ||||||
|  |     "defusedxml.lxml is no longer supported and will be removed in a future release.", | ||||||
|  |     category=DeprecationWarning, | ||||||
|  |     stacklevel=2, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class RestrictedElement(_etree.ElementBase): | ||||||
|  |     """A restricted Element class that filters out instances of some classes""" | ||||||
|  | 
 | ||||||
|  |     __slots__ = () | ||||||
|  |     # blacklist = (etree._Entity, etree._ProcessingInstruction, etree._Comment) | ||||||
|  |     blacklist = _etree._Entity | ||||||
|  | 
 | ||||||
|  |     def _filter(self, iterator): | ||||||
|  |         blacklist = self.blacklist | ||||||
|  |         for child in iterator: | ||||||
|  |             if isinstance(child, blacklist): | ||||||
|  |                 continue | ||||||
|  |             yield child | ||||||
|  | 
 | ||||||
|  |     def __iter__(self): | ||||||
|  |         iterator = super(RestrictedElement, self).__iter__() | ||||||
|  |         return self._filter(iterator) | ||||||
|  | 
 | ||||||
|  |     def iterchildren(self, tag=None, reversed=False): | ||||||
|  |         iterator = super(RestrictedElement, self).iterchildren(tag=tag, reversed=reversed) | ||||||
|  |         return self._filter(iterator) | ||||||
|  | 
 | ||||||
|  |     def iter(self, tag=None, *tags): | ||||||
|  |         iterator = super(RestrictedElement, self).iter(tag=tag, *tags) | ||||||
|  |         return self._filter(iterator) | ||||||
|  | 
 | ||||||
|  |     def iterdescendants(self, tag=None, *tags): | ||||||
|  |         iterator = super(RestrictedElement, self).iterdescendants(tag=tag, *tags) | ||||||
|  |         return self._filter(iterator) | ||||||
|  | 
 | ||||||
|  |     def itersiblings(self, tag=None, preceding=False): | ||||||
|  |         iterator = super(RestrictedElement, self).itersiblings(tag=tag, preceding=preceding) | ||||||
|  |         return self._filter(iterator) | ||||||
|  | 
 | ||||||
|  |     def getchildren(self): | ||||||
|  |         iterator = super(RestrictedElement, self).__iter__() | ||||||
|  |         return list(self._filter(iterator)) | ||||||
|  | 
 | ||||||
|  |     def getiterator(self, tag=None): | ||||||
|  |         iterator = super(RestrictedElement, self).getiterator(tag) | ||||||
|  |         return self._filter(iterator) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class GlobalParserTLS(threading.local): | ||||||
|  |     """Thread local context for custom parser instances""" | ||||||
|  | 
 | ||||||
|  |     parser_config = { | ||||||
|  |         "resolve_entities": False, | ||||||
|  |         # 'remove_comments': True, | ||||||
|  |         # 'remove_pis': True, | ||||||
|  |     } | ||||||
|  | 
 | ||||||
|  |     element_class = RestrictedElement | ||||||
|  | 
 | ||||||
|  |     def createDefaultParser(self): | ||||||
|  |         parser = _etree.XMLParser(**self.parser_config) | ||||||
|  |         element_class = self.element_class | ||||||
|  |         if self.element_class is not None: | ||||||
|  |             lookup = _etree.ElementDefaultClassLookup(element=element_class) | ||||||
|  |             parser.set_element_class_lookup(lookup) | ||||||
|  |         return parser | ||||||
|  | 
 | ||||||
|  |     def setDefaultParser(self, parser): | ||||||
|  |         self._default_parser = parser | ||||||
|  | 
 | ||||||
|  |     def getDefaultParser(self): | ||||||
|  |         parser = getattr(self, "_default_parser", None) | ||||||
|  |         if parser is None: | ||||||
|  |             parser = self.createDefaultParser() | ||||||
|  |             self.setDefaultParser(parser) | ||||||
|  |         return parser | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _parser_tls = GlobalParserTLS() | ||||||
|  | getDefaultParser = _parser_tls.getDefaultParser | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def check_docinfo(elementtree, forbid_dtd=False, forbid_entities=True): | ||||||
|  |     """Check docinfo of an element tree for DTD and entity declarations | ||||||
|  | 
 | ||||||
|  |     The check for entity declarations needs lxml 3 or newer. lxml 2.x does | ||||||
|  |     not support dtd.iterentities(). | ||||||
|  |     """ | ||||||
|  |     docinfo = elementtree.docinfo | ||||||
|  |     if docinfo.doctype: | ||||||
|  |         if forbid_dtd: | ||||||
|  |             raise DTDForbidden(docinfo.doctype, docinfo.system_url, docinfo.public_id) | ||||||
|  |         if forbid_entities and not LXML3: | ||||||
|  |             # lxml < 3 has no iterentities() | ||||||
|  |             raise NotSupportedError("Unable to check for entity declarations " "in lxml 2.x") | ||||||
|  | 
 | ||||||
|  |     if forbid_entities: | ||||||
|  |         for dtd in docinfo.internalDTD, docinfo.externalDTD: | ||||||
|  |             if dtd is None: | ||||||
|  |                 continue | ||||||
|  |             for entity in dtd.iterentities(): | ||||||
|  |                 raise EntitiesForbidden(entity.name, entity.content, None, None, None, None) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parse(source, parser=None, base_url=None, forbid_dtd=False, forbid_entities=True): | ||||||
|  |     if parser is None: | ||||||
|  |         parser = getDefaultParser() | ||||||
|  |     elementtree = _etree.parse(source, parser, base_url=base_url) | ||||||
|  |     check_docinfo(elementtree, forbid_dtd, forbid_entities) | ||||||
|  |     return elementtree | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def fromstring(text, parser=None, base_url=None, forbid_dtd=False, forbid_entities=True): | ||||||
|  |     if parser is None: | ||||||
|  |         parser = getDefaultParser() | ||||||
|  |     rootelement = _etree.fromstring(text, parser, base_url=base_url) | ||||||
|  |     elementtree = rootelement.getroottree() | ||||||
|  |     check_docinfo(elementtree, forbid_dtd, forbid_entities) | ||||||
|  |     return rootelement | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | XML = fromstring | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def iterparse(*args, **kwargs): | ||||||
|  |     raise NotSupportedError("defused lxml.etree.iterparse not available") | ||||||
							
								
								
									
										63
									
								
								resources/lib/defusedxml/minidom.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,63 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defused xml.dom.minidom | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | from xml.dom.minidom import _do_pulldom_parse | ||||||
|  | from . import expatbuilder as _expatbuilder | ||||||
|  | from . import pulldom as _pulldom | ||||||
|  | 
 | ||||||
|  | __origin__ = "xml.dom.minidom" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parse( | ||||||
|  |     file, parser=None, bufsize=None, forbid_dtd=False, forbid_entities=True, forbid_external=True | ||||||
|  | ): | ||||||
|  |     """Parse a file into a DOM by filename or file object.""" | ||||||
|  |     if parser is None and not bufsize: | ||||||
|  |         return _expatbuilder.parse( | ||||||
|  |             file, | ||||||
|  |             forbid_dtd=forbid_dtd, | ||||||
|  |             forbid_entities=forbid_entities, | ||||||
|  |             forbid_external=forbid_external, | ||||||
|  |         ) | ||||||
|  |     else: | ||||||
|  |         return _do_pulldom_parse( | ||||||
|  |             _pulldom.parse, | ||||||
|  |             (file,), | ||||||
|  |             { | ||||||
|  |                 "parser": parser, | ||||||
|  |                 "bufsize": bufsize, | ||||||
|  |                 "forbid_dtd": forbid_dtd, | ||||||
|  |                 "forbid_entities": forbid_entities, | ||||||
|  |                 "forbid_external": forbid_external, | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parseString( | ||||||
|  |     string, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True | ||||||
|  | ): | ||||||
|  |     """Parse a file into a DOM from a string.""" | ||||||
|  |     if parser is None: | ||||||
|  |         return _expatbuilder.parseString( | ||||||
|  |             string, | ||||||
|  |             forbid_dtd=forbid_dtd, | ||||||
|  |             forbid_entities=forbid_entities, | ||||||
|  |             forbid_external=forbid_external, | ||||||
|  |         ) | ||||||
|  |     else: | ||||||
|  |         return _do_pulldom_parse( | ||||||
|  |             _pulldom.parseString, | ||||||
|  |             (string,), | ||||||
|  |             { | ||||||
|  |                 "parser": parser, | ||||||
|  |                 "forbid_dtd": forbid_dtd, | ||||||
|  |                 "forbid_entities": forbid_entities, | ||||||
|  |                 "forbid_external": forbid_external, | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
							
								
								
									
										41
									
								
								resources/lib/defusedxml/pulldom.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,41 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defused xml.dom.pulldom | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | from xml.dom.pulldom import parse as _parse | ||||||
|  | from xml.dom.pulldom import parseString as _parseString | ||||||
|  | from .sax import make_parser | ||||||
|  | 
 | ||||||
|  | __origin__ = "xml.dom.pulldom" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parse( | ||||||
|  |     stream_or_string, | ||||||
|  |     parser=None, | ||||||
|  |     bufsize=None, | ||||||
|  |     forbid_dtd=False, | ||||||
|  |     forbid_entities=True, | ||||||
|  |     forbid_external=True, | ||||||
|  | ): | ||||||
|  |     if parser is None: | ||||||
|  |         parser = make_parser() | ||||||
|  |         parser.forbid_dtd = forbid_dtd | ||||||
|  |         parser.forbid_entities = forbid_entities | ||||||
|  |         parser.forbid_external = forbid_external | ||||||
|  |     return _parse(stream_or_string, parser, bufsize) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parseString( | ||||||
|  |     string, parser=None, forbid_dtd=False, forbid_entities=True, forbid_external=True | ||||||
|  | ): | ||||||
|  |     if parser is None: | ||||||
|  |         parser = make_parser() | ||||||
|  |         parser.forbid_dtd = forbid_dtd | ||||||
|  |         parser.forbid_entities = forbid_entities | ||||||
|  |         parser.forbid_external = forbid_external | ||||||
|  |     return _parseString(string, parser) | ||||||
							
								
								
									
										60
									
								
								resources/lib/defusedxml/sax.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,60 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defused xml.sax | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | from xml.sax import InputSource as _InputSource | ||||||
|  | from xml.sax import ErrorHandler as _ErrorHandler | ||||||
|  | 
 | ||||||
|  | from . import expatreader | ||||||
|  | 
 | ||||||
|  | __origin__ = "xml.sax" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parse( | ||||||
|  |     source, | ||||||
|  |     handler, | ||||||
|  |     errorHandler=_ErrorHandler(), | ||||||
|  |     forbid_dtd=False, | ||||||
|  |     forbid_entities=True, | ||||||
|  |     forbid_external=True, | ||||||
|  | ): | ||||||
|  |     parser = make_parser() | ||||||
|  |     parser.setContentHandler(handler) | ||||||
|  |     parser.setErrorHandler(errorHandler) | ||||||
|  |     parser.forbid_dtd = forbid_dtd | ||||||
|  |     parser.forbid_entities = forbid_entities | ||||||
|  |     parser.forbid_external = forbid_external | ||||||
|  |     parser.parse(source) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def parseString( | ||||||
|  |     string, | ||||||
|  |     handler, | ||||||
|  |     errorHandler=_ErrorHandler(), | ||||||
|  |     forbid_dtd=False, | ||||||
|  |     forbid_entities=True, | ||||||
|  |     forbid_external=True, | ||||||
|  | ): | ||||||
|  |     from io import BytesIO | ||||||
|  | 
 | ||||||
|  |     if errorHandler is None: | ||||||
|  |         errorHandler = _ErrorHandler() | ||||||
|  |     parser = make_parser() | ||||||
|  |     parser.setContentHandler(handler) | ||||||
|  |     parser.setErrorHandler(errorHandler) | ||||||
|  |     parser.forbid_dtd = forbid_dtd | ||||||
|  |     parser.forbid_entities = forbid_entities | ||||||
|  |     parser.forbid_external = forbid_external | ||||||
|  | 
 | ||||||
|  |     inpsrc = _InputSource() | ||||||
|  |     inpsrc.setByteStream(BytesIO(string)) | ||||||
|  |     parser.parse(inpsrc) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def make_parser(parser_list=[]): | ||||||
|  |     return expatreader.create_parser() | ||||||
							
								
								
									
										153
									
								
								resources/lib/defusedxml/xmlrpc.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,153 @@ | ||||||
|  | # defusedxml | ||||||
|  | # | ||||||
|  | # Copyright (c) 2013 by Christian Heimes <christian@python.org> | ||||||
|  | # Licensed to PSF under a Contributor Agreement. | ||||||
|  | # See https://www.python.org/psf/license for licensing details. | ||||||
|  | """Defused xmlrpclib | ||||||
|  | 
 | ||||||
|  | Also defuses gzip bomb | ||||||
|  | """ | ||||||
|  | from __future__ import print_function, absolute_import | ||||||
|  | 
 | ||||||
|  | import io | ||||||
|  | 
 | ||||||
|  | from .common import DTDForbidden, EntitiesForbidden, ExternalReferenceForbidden, PY3 | ||||||
|  | 
 | ||||||
|  | if PY3: | ||||||
|  |     __origin__ = "xmlrpc.client" | ||||||
|  |     from xmlrpc.client import ExpatParser | ||||||
|  |     from xmlrpc import client as xmlrpc_client | ||||||
|  |     from xmlrpc import server as xmlrpc_server | ||||||
|  |     from xmlrpc.client import gzip_decode as _orig_gzip_decode | ||||||
|  |     from xmlrpc.client import GzipDecodedResponse as _OrigGzipDecodedResponse | ||||||
|  | else: | ||||||
|  |     __origin__ = "xmlrpclib" | ||||||
|  |     from xmlrpclib import ExpatParser | ||||||
|  |     import xmlrpclib as xmlrpc_client | ||||||
|  | 
 | ||||||
|  |     xmlrpc_server = None | ||||||
|  |     from xmlrpclib import gzip_decode as _orig_gzip_decode | ||||||
|  |     from xmlrpclib import GzipDecodedResponse as _OrigGzipDecodedResponse | ||||||
|  | 
 | ||||||
|  | try: | ||||||
|  |     import gzip | ||||||
|  | except ImportError:  # pragma: no cover | ||||||
|  |     gzip = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Limit maximum request size to prevent resource exhaustion DoS | ||||||
|  | # Also used to limit maximum amount of gzip decoded data in order to prevent | ||||||
|  | # decompression bombs | ||||||
|  | # A value of -1 or smaller disables the limit | ||||||
|  | MAX_DATA = 30 * 1024 * 1024  # 30 MB | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def defused_gzip_decode(data, limit=None): | ||||||
|  |     """gzip encoded data -> unencoded data | ||||||
|  | 
 | ||||||
|  |     Decode data using the gzip content encoding as described in RFC 1952 | ||||||
|  |     """ | ||||||
|  |     if not gzip:  # pragma: no cover | ||||||
|  |         raise NotImplementedError | ||||||
|  |     if limit is None: | ||||||
|  |         limit = MAX_DATA | ||||||
|  |     f = io.BytesIO(data) | ||||||
|  |     gzf = gzip.GzipFile(mode="rb", fileobj=f) | ||||||
|  |     try: | ||||||
|  |         if limit < 0:  # no limit | ||||||
|  |             decoded = gzf.read() | ||||||
|  |         else: | ||||||
|  |             decoded = gzf.read(limit + 1) | ||||||
|  |     except IOError:  # pragma: no cover | ||||||
|  |         raise ValueError("invalid data") | ||||||
|  |     f.close() | ||||||
|  |     gzf.close() | ||||||
|  |     if limit >= 0 and len(decoded) > limit: | ||||||
|  |         raise ValueError("max gzipped payload length exceeded") | ||||||
|  |     return decoded | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DefusedGzipDecodedResponse(gzip.GzipFile if gzip else object): | ||||||
|  |     """a file-like object to decode a response encoded with the gzip | ||||||
|  |     method, as described in RFC 1952. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def __init__(self, response, limit=None): | ||||||
|  |         # response doesn't support tell() and read(), required by | ||||||
|  |         # GzipFile | ||||||
|  |         if not gzip:  # pragma: no cover | ||||||
|  |             raise NotImplementedError | ||||||
|  |         self.limit = limit = limit if limit is not None else MAX_DATA | ||||||
|  |         if limit < 0:  # no limit | ||||||
|  |             data = response.read() | ||||||
|  |             self.readlength = None | ||||||
|  |         else: | ||||||
|  |             data = response.read(limit + 1) | ||||||
|  |             self.readlength = 0 | ||||||
|  |         if limit >= 0 and len(data) > limit: | ||||||
|  |             raise ValueError("max payload length exceeded") | ||||||
|  |         self.stringio = io.BytesIO(data) | ||||||
|  |         gzip.GzipFile.__init__(self, mode="rb", fileobj=self.stringio) | ||||||
|  | 
 | ||||||
|  |     def read(self, n): | ||||||
|  |         if self.limit >= 0: | ||||||
|  |             left = self.limit - self.readlength | ||||||
|  |             n = min(n, left + 1) | ||||||
|  |             data = gzip.GzipFile.read(self, n) | ||||||
|  |             self.readlength += len(data) | ||||||
|  |             if self.readlength > self.limit: | ||||||
|  |                 raise ValueError("max payload length exceeded") | ||||||
|  |             return data | ||||||
|  |         else: | ||||||
|  |             return gzip.GzipFile.read(self, n) | ||||||
|  | 
 | ||||||
|  |     def close(self): | ||||||
|  |         gzip.GzipFile.close(self) | ||||||
|  |         self.stringio.close() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DefusedExpatParser(ExpatParser): | ||||||
|  |     def __init__(self, target, forbid_dtd=False, forbid_entities=True, forbid_external=True): | ||||||
|  |         ExpatParser.__init__(self, target) | ||||||
|  |         self.forbid_dtd = forbid_dtd | ||||||
|  |         self.forbid_entities = forbid_entities | ||||||
|  |         self.forbid_external = forbid_external | ||||||
|  |         parser = self._parser | ||||||
|  |         if self.forbid_dtd: | ||||||
|  |             parser.StartDoctypeDeclHandler = self.defused_start_doctype_decl | ||||||
|  |         if self.forbid_entities: | ||||||
|  |             parser.EntityDeclHandler = self.defused_entity_decl | ||||||
|  |             parser.UnparsedEntityDeclHandler = self.defused_unparsed_entity_decl | ||||||
|  |         if self.forbid_external: | ||||||
|  |             parser.ExternalEntityRefHandler = self.defused_external_entity_ref_handler | ||||||
|  | 
 | ||||||
|  |     def defused_start_doctype_decl(self, name, sysid, pubid, has_internal_subset): | ||||||
|  |         raise DTDForbidden(name, sysid, pubid) | ||||||
|  | 
 | ||||||
|  |     def defused_entity_decl( | ||||||
|  |         self, name, is_parameter_entity, value, base, sysid, pubid, notation_name | ||||||
|  |     ): | ||||||
|  |         raise EntitiesForbidden(name, value, base, sysid, pubid, notation_name) | ||||||
|  | 
 | ||||||
|  |     def defused_unparsed_entity_decl(self, name, base, sysid, pubid, notation_name): | ||||||
|  |         # expat 1.2 | ||||||
|  |         raise EntitiesForbidden(name, None, base, sysid, pubid, notation_name)  # pragma: no cover | ||||||
|  | 
 | ||||||
|  |     def defused_external_entity_ref_handler(self, context, base, sysid, pubid): | ||||||
|  |         raise ExternalReferenceForbidden(context, base, sysid, pubid) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def monkey_patch(): | ||||||
|  |     xmlrpc_client.FastParser = DefusedExpatParser | ||||||
|  |     xmlrpc_client.GzipDecodedResponse = DefusedGzipDecodedResponse | ||||||
|  |     xmlrpc_client.gzip_decode = defused_gzip_decode | ||||||
|  |     if xmlrpc_server: | ||||||
|  |         xmlrpc_server.gzip_decode = defused_gzip_decode | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def unmonkey_patch(): | ||||||
|  |     xmlrpc_client.FastParser = None | ||||||
|  |     xmlrpc_client.GzipDecodedResponse = _OrigGzipDecodedResponse | ||||||
|  |     xmlrpc_client.gzip_decode = _orig_gzip_decode | ||||||
|  |     if xmlrpc_server: | ||||||
|  |         xmlrpc_server.gzip_decode = _orig_gzip_decode | ||||||
							
								
								
									
										4
									
								
								resources/lib/deps/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,4 @@ | ||||||
|  | import os | ||||||
|  | import sys | ||||||
|  | 
 | ||||||
|  | sys.path.insert(1, os.path.join(os.path.dirname(__file__))) | ||||||
							
								
								
									
										4417
									
								
								resources/lib/deps/bottle.py
									
										
									
									
									
										Executable file
									
								
							
							
						
						
							
								
								
									
										307
									
								
								resources/lib/deps/simplecache.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,307 @@ | ||||||
|  | #!/usr/bin/python | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | 
 | ||||||
|  | '''provides a simple stateless caching system for Kodi addons and plugins''' | ||||||
|  | 
 | ||||||
|  | import sys | ||||||
|  | import xbmcvfs | ||||||
|  | import xbmcgui | ||||||
|  | import xbmc | ||||||
|  | import xbmcaddon | ||||||
|  | import datetime | ||||||
|  | import time | ||||||
|  | import sqlite3 | ||||||
|  | import json | ||||||
|  | from functools import reduce | ||||||
|  | 
 | ||||||
|  | class SimpleCache(object): | ||||||
|  |     '''simple stateless caching system for Kodi''' | ||||||
|  |     enable_mem_cache = True | ||||||
|  |     data_is_json = False | ||||||
|  |     global_checksum = None | ||||||
|  |     _exit = False | ||||||
|  |     _auto_clean_interval = datetime.timedelta(hours=4) | ||||||
|  |     _win = None | ||||||
|  |     _busy_tasks = [] | ||||||
|  |     _database = None | ||||||
|  | 
 | ||||||
|  |     def __init__(self, addon_id): | ||||||
|  |         '''Initialize our caching class''' | ||||||
|  |         self.addon_id = addon_id | ||||||
|  |         self._win = xbmcgui.Window(10000) | ||||||
|  |         self._monitor = xbmc.Monitor() | ||||||
|  |         self.check_cleanup() | ||||||
|  |         self._log_msg("Initialized") | ||||||
|  | 
 | ||||||
|  |     def close(self): | ||||||
|  |         '''tell any tasks to stop immediately (as we can be called multithreaded) and cleanup objects''' | ||||||
|  |         self._exit = True | ||||||
|  |         # wait for all tasks to complete | ||||||
|  |         while self._busy_tasks and not self._monitor.abortRequested(): | ||||||
|  |             xbmc.sleep(25) | ||||||
|  |         del self._win | ||||||
|  |         del self._monitor | ||||||
|  |         self._log_msg("Closed") | ||||||
|  | 
 | ||||||
|  |     def __del__(self): | ||||||
|  |         '''make sure close is called''' | ||||||
|  |         if not self._exit: | ||||||
|  |             self.close() | ||||||
|  | 
 | ||||||
|  |     def get(self, endpoint, checksum="", json_data=False): | ||||||
|  |         ''' | ||||||
|  |             get object from cache and return the results | ||||||
|  |             endpoint: the (unique) name of the cache object as reference | ||||||
|  |             checkum: optional argument to check if the checksum in the cacheobject matches the checkum provided | ||||||
|  |         ''' | ||||||
|  |         checksum = self._get_checksum(checksum) | ||||||
|  |         cur_time = self._get_timestamp(datetime.datetime.now()) | ||||||
|  |         result = None | ||||||
|  |         # 1: try memory cache first | ||||||
|  |         if self.enable_mem_cache: | ||||||
|  |             result = self._get_mem_cache(endpoint, checksum, cur_time, json_data) | ||||||
|  | 
 | ||||||
|  |         # 2: fallback to _database cache | ||||||
|  |         if result is None: | ||||||
|  |             result = self._get_db_cache(endpoint, checksum, cur_time, json_data) | ||||||
|  | 
 | ||||||
|  |         return result | ||||||
|  | 
 | ||||||
|  |     def set(self, endpoint, data, checksum="", expiration=datetime.timedelta(days=30), json_data=False): | ||||||
|  |         ''' | ||||||
|  |             set data in cache | ||||||
|  |         ''' | ||||||
|  |         task_name = "set.%s" % endpoint | ||||||
|  |         self._busy_tasks.append(task_name) | ||||||
|  |         checksum = self._get_checksum(checksum) | ||||||
|  |         expires = self._get_timestamp(datetime.datetime.now() + expiration) | ||||||
|  | 
 | ||||||
|  |         # memory cache: write to window property | ||||||
|  |         if self.enable_mem_cache and not self._exit: | ||||||
|  |             self._set_mem_cache(endpoint, checksum, expires, data, json_data) | ||||||
|  | 
 | ||||||
|  |         # db cache | ||||||
|  |         if not self._exit: | ||||||
|  |             self._set_db_cache(endpoint, checksum, expires, data, json_data) | ||||||
|  | 
 | ||||||
|  |         # remove this task from list | ||||||
|  |         self._busy_tasks.remove(task_name) | ||||||
|  | 
 | ||||||
|  |     def check_cleanup(self): | ||||||
|  |         '''check if cleanup is needed - public method, may be called by calling addon''' | ||||||
|  |         cur_time = datetime.datetime.now() | ||||||
|  |         lastexecuted = self._win.getProperty("simplecache.clean.lastexecuted") | ||||||
|  |         if not lastexecuted: | ||||||
|  |             self._win.setProperty("simplecache.clean.lastexecuted", repr(cur_time)) | ||||||
|  |         elif (eval(lastexecuted) + self._auto_clean_interval) < cur_time: | ||||||
|  |             # cleanup needed... | ||||||
|  |             self._do_cleanup() | ||||||
|  | 
 | ||||||
|  |     def _get_mem_cache(self, endpoint, checksum, cur_time, json_data): | ||||||
|  |         ''' | ||||||
|  |             get cache data from memory cache | ||||||
|  |             we use window properties because we need to be stateless | ||||||
|  |         ''' | ||||||
|  |         result = None | ||||||
|  |         cachedata = self._win.getProperty(endpoint) | ||||||
|  | 
 | ||||||
|  |         if cachedata: | ||||||
|  |             if json_data or self.data_is_json: | ||||||
|  |                 cachedata = json.loads(cachedata) | ||||||
|  |             else: | ||||||
|  |                 cachedata = eval(cachedata) | ||||||
|  |             if cachedata[0] > cur_time: | ||||||
|  |                 if not checksum or checksum == cachedata[2]: | ||||||
|  |                     result = cachedata[1] | ||||||
|  |         return result | ||||||
|  | 
 | ||||||
|  |     def _set_mem_cache(self, endpoint, checksum, expires, data, json_data): | ||||||
|  |         ''' | ||||||
|  |             window property cache as alternative for memory cache | ||||||
|  |             usefull for (stateless) plugins | ||||||
|  |         ''' | ||||||
|  |         cachedata = (expires, data, checksum) | ||||||
|  |         if json_data or self.data_is_json: | ||||||
|  |             cachedata_str = json.dumps(cachedata) | ||||||
|  |         else: | ||||||
|  |             cachedata_str = repr(cachedata) | ||||||
|  |         self._win.setProperty(endpoint, cachedata_str) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |     def _get_db_cache(self, endpoint, checksum, cur_time, json_data): | ||||||
|  |         '''get cache data from sqllite _database''' | ||||||
|  |         result = None | ||||||
|  |         query = "SELECT expires, data, checksum FROM simplecache WHERE id = ?" | ||||||
|  |         cache_data = self._execute_sql(query, (endpoint,)) | ||||||
|  |         if cache_data: | ||||||
|  |             cache_data = cache_data.fetchone() | ||||||
|  |             if cache_data and cache_data[0] > cur_time: | ||||||
|  |                 if not checksum or cache_data[2] == checksum: | ||||||
|  |                     if json_data or self.data_is_json: | ||||||
|  |                         result = json.loads(cache_data[1]) | ||||||
|  |                     else: | ||||||
|  |                         result = eval(cache_data[1]) | ||||||
|  |                     # also set result in memory cache for further access | ||||||
|  |                     if self.enable_mem_cache: | ||||||
|  |                         self._set_mem_cache(endpoint, checksum, cache_data[0], result, json_data) | ||||||
|  |         return result | ||||||
|  | 
 | ||||||
|  |     def _set_db_cache(self, endpoint, checksum, expires, data, json_data): | ||||||
|  |         ''' store cache data in _database ''' | ||||||
|  |         query = "INSERT OR REPLACE INTO simplecache( id, expires, data, checksum) VALUES (?, ?, ?, ?)" | ||||||
|  |         if json_data or self.data_is_json: | ||||||
|  |             data = json.dumps(data) | ||||||
|  |         else: | ||||||
|  |             data = repr(data) | ||||||
|  |         self._execute_sql(query, (endpoint, expires, data, checksum)) | ||||||
|  | 
 | ||||||
|  |     def _do_cleanup(self): | ||||||
|  |         '''perform cleanup task''' | ||||||
|  |         if self._exit or self._monitor.abortRequested(): | ||||||
|  |             return | ||||||
|  |         self._busy_tasks.append(__name__) | ||||||
|  |         cur_time = datetime.datetime.now() | ||||||
|  |         cur_timestamp = self._get_timestamp(cur_time) | ||||||
|  |         self._log_msg("Running cleanup...") | ||||||
|  |         if self._win.getProperty("simplecachecleanbusy"): | ||||||
|  |             return | ||||||
|  |         self._win.setProperty("simplecachecleanbusy", "busy") | ||||||
|  | 
 | ||||||
|  |         query = "SELECT id, expires FROM simplecache" | ||||||
|  |         for cache_data in self._execute_sql(query).fetchall(): | ||||||
|  |             cache_id = cache_data[0] | ||||||
|  |             cache_expires = cache_data[1] | ||||||
|  | 
 | ||||||
|  |             if self._exit or self._monitor.abortRequested(): | ||||||
|  |                 return | ||||||
|  | 
 | ||||||
|  |             # always cleanup all memory objects on each interval | ||||||
|  |             self._win.clearProperty(cache_id) | ||||||
|  | 
 | ||||||
|  |             # clean up db cache object only if expired | ||||||
|  |             if cache_expires < cur_timestamp: | ||||||
|  |                 query = 'DELETE FROM simplecache WHERE id = ?' | ||||||
|  |                 self._execute_sql(query, (cache_id,)) | ||||||
|  |                 self._log_msg("delete from db %s" % cache_id) | ||||||
|  | 
 | ||||||
|  |         # compact db | ||||||
|  |         self._execute_sql("VACUUM") | ||||||
|  | 
 | ||||||
|  |         # remove task from list | ||||||
|  |         self._busy_tasks.remove(__name__) | ||||||
|  |         self._win.setProperty("simplecache.clean.lastexecuted", repr(cur_time)) | ||||||
|  |         self._win.clearProperty("simplecachecleanbusy") | ||||||
|  |         self._log_msg("Auto cleanup done") | ||||||
|  | 
 | ||||||
|  |     def _get_database(self): | ||||||
|  |         '''get reference to our sqllite _database - performs basic integrity check''' | ||||||
|  |         addon = xbmcaddon.Addon(self.addon_id) | ||||||
|  |         dbpath = addon.getAddonInfo('profile') | ||||||
|  |         dbfile = xbmcvfs.translatePath("%s/simplecache.db" % dbpath) | ||||||
|  | 
 | ||||||
|  |         if not xbmcvfs.exists(dbpath): | ||||||
|  |             xbmcvfs.mkdirs(dbpath) | ||||||
|  |         del addon | ||||||
|  |         try: | ||||||
|  |             connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) | ||||||
|  |             connection.execute('SELECT * FROM simplecache LIMIT 1') | ||||||
|  |             return connection | ||||||
|  |         except Exception as error: | ||||||
|  |             # our _database is corrupt or doesn't exist yet, we simply try to recreate it | ||||||
|  |             if xbmcvfs.exists(dbfile): | ||||||
|  |                 xbmcvfs.delete(dbfile) | ||||||
|  |             try: | ||||||
|  |                 connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None) | ||||||
|  |                 connection.execute( | ||||||
|  |                     """CREATE TABLE IF NOT EXISTS simplecache( | ||||||
|  |                     id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""") | ||||||
|  |                 return connection | ||||||
|  |             except Exception as error: | ||||||
|  |                 self._log_msg("Exception while initializing _database: %s" % str(error), xbmc.LOGWARNING) | ||||||
|  |                 self.close() | ||||||
|  |                 return None | ||||||
|  | 
 | ||||||
|  |     def _execute_sql(self, query, data=None): | ||||||
|  |         '''little wrapper around execute and executemany to just retry a db command if db is locked''' | ||||||
|  |         retries = 0 | ||||||
|  |         result = None | ||||||
|  |         error = None | ||||||
|  |         # always use new db object because we need to be sure that data is available for other simplecache instances | ||||||
|  |         with self._get_database() as _database: | ||||||
|  |             while not retries == 10 and not self._monitor.abortRequested(): | ||||||
|  |                 if self._exit: | ||||||
|  |                     return None | ||||||
|  |                 try: | ||||||
|  |                     if isinstance(data, list): | ||||||
|  |                         result = _database.executemany(query, data) | ||||||
|  |                     elif data: | ||||||
|  |                         result = _database.execute(query, data) | ||||||
|  |                     else: | ||||||
|  |                         result = _database.execute(query) | ||||||
|  |                     return result | ||||||
|  |                 except sqlite3.OperationalError as error: | ||||||
|  |                     if "_database is locked" in error: | ||||||
|  |                         self._log_msg("retrying DB commit...") | ||||||
|  |                         retries += 1 | ||||||
|  |                         self._monitor.waitForAbort(0.5) | ||||||
|  |                     else: | ||||||
|  |                         break | ||||||
|  |                 except Exception as error: | ||||||
|  |                     break | ||||||
|  |             self._log_msg("_database ERROR ! -- %s" % str(error), xbmc.LOGWARNING) | ||||||
|  |         return None | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def _log_msg(msg, loglevel=xbmc.LOGDEBUG): | ||||||
|  |         '''helper to send a message to the kodi log''' | ||||||
|  |         xbmc.log("Skin Helper Simplecache --> %s" % msg, level=loglevel) | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def _get_timestamp(date_time): | ||||||
|  |         '''Converts a datetime object to unix timestamp''' | ||||||
|  |         return int(time.mktime(date_time.timetuple())) | ||||||
|  | 
 | ||||||
|  |     def _get_checksum(self, stringinput): | ||||||
|  |         '''get int checksum from string''' | ||||||
|  |         if not stringinput and not self.global_checksum: | ||||||
|  |             return 0 | ||||||
|  |         if self.global_checksum: | ||||||
|  |             stringinput = "%s-%s" %(self.global_checksum, stringinput) | ||||||
|  |         else: | ||||||
|  |             stringinput = str(stringinput) | ||||||
|  |         return reduce(lambda x, y: x + y, map(ord, stringinput)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def use_cache(cache_days=14): | ||||||
|  |     ''' | ||||||
|  |         wrapper around our simple cache to use as decorator | ||||||
|  |         Usage: define an instance of SimpleCache with name "cache" (self.cache) in your class | ||||||
|  |         Any method that needs caching just add @use_cache as decorator | ||||||
|  |         NOTE: use unnamed arguments for calling the method and named arguments for optional settings | ||||||
|  |     ''' | ||||||
|  |     def decorator(func): | ||||||
|  |         '''our decorator''' | ||||||
|  |         def decorated(*args, **kwargs): | ||||||
|  |             '''process the original method and apply caching of the results''' | ||||||
|  |             method_class = args[0] | ||||||
|  |             method_class_name = method_class.__class__.__name__ | ||||||
|  |             cache_str = "%s.%s" % (method_class_name, func.__name__) | ||||||
|  |             # cache identifier is based on positional args only | ||||||
|  |             # named args are considered optional and ignored | ||||||
|  |             for item in args[1:]: | ||||||
|  |                 cache_str += u".%s" % item | ||||||
|  |             cache_str = cache_str.lower() | ||||||
|  |             cachedata = method_class.cache.get(cache_str) | ||||||
|  |             global_cache_ignore = False | ||||||
|  |             try: | ||||||
|  |                 global_cache_ignore = method_class.ignore_cache | ||||||
|  |             except Exception: | ||||||
|  |                 pass | ||||||
|  |             if cachedata is not None and not kwargs.get("ignore_cache", False) and not global_cache_ignore: | ||||||
|  |                 return cachedata | ||||||
|  |             else: | ||||||
|  |                 result = func(*args, **kwargs) | ||||||
|  |                 method_class.cache.set(cache_str, result, expiration=datetime.timedelta(days=cache_days)) | ||||||
|  |                 return result | ||||||
|  |         return decorated | ||||||
|  |     return decorator | ||||||
							
								
								
									
										5
									
								
								resources/lib/deps/spotipy/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,5 @@ | ||||||
|  | from .cache_handler import *  # noqa | ||||||
|  | from .client import *  # noqa | ||||||
|  | from .exceptions import *  # noqa | ||||||
|  | from .oauth2 import *  # noqa | ||||||
|  | from .util import *  # noqa | ||||||
							
								
								
									
										173
									
								
								resources/lib/deps/spotipy/cache_handler.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,173 @@ | ||||||
|  | __all__ = [ | ||||||
|  |     'CacheHandler', | ||||||
|  |     'CacheFileHandler', | ||||||
|  |     'DjangoSessionCacheHandler', | ||||||
|  |     'FlaskSessionCacheHandler', | ||||||
|  |     'MemoryCacheHandler'] | ||||||
|  | 
 | ||||||
|  | import errno | ||||||
|  | import json | ||||||
|  | import logging | ||||||
|  | import os | ||||||
|  | from spotipy.util import CLIENT_CREDS_ENV_VARS | ||||||
|  | 
 | ||||||
|  | logger = logging.getLogger(__name__) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class CacheHandler(): | ||||||
|  |     """ | ||||||
|  |     An abstraction layer for handling the caching and retrieval of | ||||||
|  |     authorization tokens. | ||||||
|  | 
 | ||||||
|  |     Custom extensions of this class must implement get_cached_token | ||||||
|  |     and save_token_to_cache methods with the same input and output | ||||||
|  |     structure as the CacheHandler class. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def get_cached_token(self): | ||||||
|  |         """ | ||||||
|  |         Get and return a token_info dictionary object. | ||||||
|  |         """ | ||||||
|  |         # return token_info | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  |     def save_token_to_cache(self, token_info): | ||||||
|  |         """ | ||||||
|  |         Save a token_info dictionary object to the cache and return None. | ||||||
|  |         """ | ||||||
|  |         raise NotImplementedError() | ||||||
|  |         return None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class CacheFileHandler(CacheHandler): | ||||||
|  |     """ | ||||||
|  |     Handles reading and writing cached Spotify authorization tokens | ||||||
|  |     as json files on disk. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def __init__(self, | ||||||
|  |                  cache_path=None, | ||||||
|  |                  username=None, | ||||||
|  |                  encoder_cls=None): | ||||||
|  |         """ | ||||||
|  |         Parameters: | ||||||
|  |              * cache_path: May be supplied, will otherwise be generated | ||||||
|  |                            (takes precedence over `username`) | ||||||
|  |              * username: May be supplied or set as environment variable | ||||||
|  |                          (will set `cache_path` to `.cache-{username}`) | ||||||
|  |              * encoder_cls: May be supplied as a means of overwriting the | ||||||
|  |                         default serializer used for writing tokens to disk | ||||||
|  |         """ | ||||||
|  |         self.encoder_cls = encoder_cls | ||||||
|  |         if cache_path: | ||||||
|  |             self.cache_path = cache_path | ||||||
|  |         else: | ||||||
|  |             cache_path = ".cache" | ||||||
|  |             username = (username or os.getenv(CLIENT_CREDS_ENV_VARS["client_username"])) | ||||||
|  |             if username: | ||||||
|  |                 cache_path += "-" + str(username) | ||||||
|  |             self.cache_path = cache_path | ||||||
|  | 
 | ||||||
|  |     def get_cached_token(self): | ||||||
|  |         token_info = None | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             f = open(self.cache_path) | ||||||
|  |             token_info_string = f.read() | ||||||
|  |             f.close() | ||||||
|  |             token_info = json.loads(token_info_string) | ||||||
|  | 
 | ||||||
|  |         except IOError as error: | ||||||
|  |             if error.errno == errno.ENOENT: | ||||||
|  |                 logger.debug("cache does not exist at: %s", self.cache_path) | ||||||
|  |             else: | ||||||
|  |                 logger.warning("Couldn't read cache at: %s", self.cache_path) | ||||||
|  | 
 | ||||||
|  |         return token_info | ||||||
|  | 
 | ||||||
|  |     def save_token_to_cache(self, token_info): | ||||||
|  |         try: | ||||||
|  |             f = open(self.cache_path, "w") | ||||||
|  |             f.write(json.dumps(token_info, cls=self.encoder_cls)) | ||||||
|  |             f.close() | ||||||
|  |         except IOError: | ||||||
|  |             logger.warning('Couldn\'t write token to cache at: %s', | ||||||
|  |                            self.cache_path) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class MemoryCacheHandler(CacheHandler): | ||||||
|  |     """ | ||||||
|  |     A cache handler that simply stores the token info in memory as an | ||||||
|  |     instance attribute of this class. The token info will be lost when this | ||||||
|  |     instance is freed. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def __init__(self, token_info=None): | ||||||
|  |         """ | ||||||
|  |         Parameters: | ||||||
|  |             * token_info: The token info to store in memory. Can be None. | ||||||
|  |         """ | ||||||
|  |         self.token_info = token_info | ||||||
|  | 
 | ||||||
|  |     def get_cached_token(self): | ||||||
|  |         return self.token_info | ||||||
|  | 
 | ||||||
|  |     def save_token_to_cache(self, token_info): | ||||||
|  |         self.token_info = token_info | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class DjangoSessionCacheHandler(CacheHandler): | ||||||
|  |     """ | ||||||
|  |     A cache handler that stores the token info in the session framework | ||||||
|  |     provided by Django. | ||||||
|  | 
 | ||||||
|  |     Read more at https://docs.djangoproject.com/en/3.2/topics/http/sessions/ | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def __init__(self, request): | ||||||
|  |         """ | ||||||
|  |         Parameters: | ||||||
|  |             * request: HttpRequest object provided by Django for every | ||||||
|  |             incoming request | ||||||
|  |         """ | ||||||
|  |         self.request = request | ||||||
|  | 
 | ||||||
|  |     def get_cached_token(self): | ||||||
|  |         token_info = None | ||||||
|  |         try: | ||||||
|  |             token_info = self.request.session['token_info'] | ||||||
|  |         except KeyError: | ||||||
|  |             logger.debug("Token not found in the session") | ||||||
|  | 
 | ||||||
|  |         return token_info | ||||||
|  | 
 | ||||||
|  |     def save_token_to_cache(self, token_info): | ||||||
|  |         try: | ||||||
|  |             self.request.session['token_info'] = token_info | ||||||
|  |         except Exception as e: | ||||||
|  |             logger.warning("Error saving token to cache: " + str(e)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class FlaskSessionCacheHandler(CacheHandler): | ||||||
|  |     """ | ||||||
|  |     A cache handler that stores the token info in the session framework | ||||||
|  |     provided by flask. | ||||||
|  |     """ | ||||||
|  | 
 | ||||||
|  |     def __init__(self, session): | ||||||
|  |         self.session = session | ||||||
|  | 
 | ||||||
|  |     def get_cached_token(self): | ||||||
|  |         token_info = None | ||||||
|  |         try: | ||||||
|  |             token_info = self.session["token_info"] | ||||||
|  |         except KeyError: | ||||||
|  |             logger.debug("Token not found in the session") | ||||||
|  | 
 | ||||||
|  |         return token_info | ||||||
|  | 
 | ||||||
|  |     def save_token_to_cache(self, token_info): | ||||||
|  |         try: | ||||||
|  |             self.session["token_info"] = token_info | ||||||
|  |         except Exception as e: | ||||||
|  |             logger.warning("Error saving token to cache: " + str(e)) | ||||||
							
								
								
									
										2035
									
								
								resources/lib/deps/spotipy/client.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										16
									
								
								resources/lib/deps/spotipy/exceptions.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,16 @@ | ||||||
|  | class SpotifyException(Exception): | ||||||
|  | 
 | ||||||
|  |     def __init__(self, http_status, code, msg, reason=None, headers=None): | ||||||
|  |         self.http_status = http_status | ||||||
|  |         self.code = code | ||||||
|  |         self.msg = msg | ||||||
|  |         self.reason = reason | ||||||
|  |         # `headers` is used to support `Retry-After` in the event of a | ||||||
|  |         # 429 status code. | ||||||
|  |         if headers is None: | ||||||
|  |             headers = {} | ||||||
|  |         self.headers = headers | ||||||
|  | 
 | ||||||
|  |     def __str__(self): | ||||||
|  |         return 'http status: {0}, code:{1} - {2}, reason: {3}'.format( | ||||||
|  |             self.http_status, self.code, self.msg, self.reason) | ||||||
							
								
								
									
										1308
									
								
								resources/lib/deps/spotipy/oauth2.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						
							
								
								
									
										135
									
								
								resources/lib/deps/spotipy/util.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,135 @@ | ||||||
|  | # -*- coding: utf-8 -*- | ||||||
|  | 
 | ||||||
|  | """ Shows a user's playlists (need to be authenticated via oauth) """ | ||||||
|  | 
 | ||||||
|  | __all__ = ["CLIENT_CREDS_ENV_VARS", "prompt_for_user_token"] | ||||||
|  | 
 | ||||||
|  | import logging | ||||||
|  | import os | ||||||
|  | import warnings | ||||||
|  | 
 | ||||||
|  | import spotipy | ||||||
|  | 
 | ||||||
|  | LOGGER = logging.getLogger(__name__) | ||||||
|  | 
 | ||||||
|  | CLIENT_CREDS_ENV_VARS = { | ||||||
|  |     "client_id": "SPOTIPY_CLIENT_ID", | ||||||
|  |     "client_secret": "SPOTIPY_CLIENT_SECRET", | ||||||
|  |     "client_username": "SPOTIPY_CLIENT_USERNAME", | ||||||
|  |     "redirect_uri": "SPOTIPY_REDIRECT_URI", | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def prompt_for_user_token( | ||||||
|  |     username=None, | ||||||
|  |     scope=None, | ||||||
|  |     client_id=None, | ||||||
|  |     client_secret=None, | ||||||
|  |     redirect_uri=None, | ||||||
|  |     cache_path=None, | ||||||
|  |     oauth_manager=None, | ||||||
|  |     show_dialog=False | ||||||
|  | ): | ||||||
|  |     warnings.warn( | ||||||
|  |         "'prompt_for_user_token' is deprecated." | ||||||
|  |         "Use the following instead: " | ||||||
|  |         "    auth_manager=SpotifyOAuth(scope=scope)" | ||||||
|  |         "    spotipy.Spotify(auth_manager=auth_manager)", | ||||||
|  |         DeprecationWarning | ||||||
|  |     ) | ||||||
|  |     """ prompts the user to login if necessary and returns | ||||||
|  |         the user token suitable for use with the spotipy.Spotify | ||||||
|  |         constructor | ||||||
|  | 
 | ||||||
|  |         Parameters: | ||||||
|  | 
 | ||||||
|  |          - username - the Spotify username (optional) | ||||||
|  |          - scope - the desired scope of the request (optional) | ||||||
|  |          - client_id - the client id of your app (required) | ||||||
|  |          - client_secret - the client secret of your app (required) | ||||||
|  |          - redirect_uri - the redirect URI of your app (required) | ||||||
|  |          - cache_path - path to location to save tokens (optional) | ||||||
|  |          - oauth_manager - Oauth manager object (optional) | ||||||
|  |          - show_dialog - If true, a login prompt always shows (optional, defaults to False) | ||||||
|  | 
 | ||||||
|  |     """ | ||||||
|  |     if not oauth_manager: | ||||||
|  |         if not client_id: | ||||||
|  |             client_id = os.getenv("SPOTIPY_CLIENT_ID") | ||||||
|  | 
 | ||||||
|  |         if not client_secret: | ||||||
|  |             client_secret = os.getenv("SPOTIPY_CLIENT_SECRET") | ||||||
|  | 
 | ||||||
|  |         if not redirect_uri: | ||||||
|  |             redirect_uri = os.getenv("SPOTIPY_REDIRECT_URI") | ||||||
|  | 
 | ||||||
|  |         if not client_id: | ||||||
|  |             LOGGER.warning( | ||||||
|  |                 """ | ||||||
|  |                 You need to set your Spotify API credentials. | ||||||
|  |                 You can do this by setting environment variables like so: | ||||||
|  | 
 | ||||||
|  |                 export SPOTIPY_CLIENT_ID='your-spotify-client-id' | ||||||
|  |                 export SPOTIPY_CLIENT_SECRET='your-spotify-client-secret' | ||||||
|  |                 export SPOTIPY_REDIRECT_URI='your-app-redirect-url' | ||||||
|  | 
 | ||||||
|  |                 Get your credentials at | ||||||
|  |                     https://developer.spotify.com/my-applications | ||||||
|  |             """ | ||||||
|  |             ) | ||||||
|  |             raise spotipy.SpotifyException(550, -1, "no credentials set") | ||||||
|  | 
 | ||||||
|  |     sp_oauth = oauth_manager or spotipy.SpotifyOAuth( | ||||||
|  |         client_id, | ||||||
|  |         client_secret, | ||||||
|  |         redirect_uri, | ||||||
|  |         scope=scope, | ||||||
|  |         cache_path=cache_path, | ||||||
|  |         username=username, | ||||||
|  |         show_dialog=show_dialog | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # try to get a valid token for this user, from the cache, | ||||||
|  |     # if not in the cache, then create a new (this will send | ||||||
|  |     # the user to a web page where they can authorize this app) | ||||||
|  | 
 | ||||||
|  |     token_info = sp_oauth.validate_token(sp_oauth.cache_handler.get_cached_token()) | ||||||
|  | 
 | ||||||
|  |     if not token_info: | ||||||
|  |         code = sp_oauth.get_auth_response() | ||||||
|  |         token = sp_oauth.get_access_token(code, as_dict=False) | ||||||
|  |     else: | ||||||
|  |         return token_info["access_token"] | ||||||
|  | 
 | ||||||
|  |     # Auth'ed API request | ||||||
|  |     if token: | ||||||
|  |         return token | ||||||
|  |     else: | ||||||
|  |         return None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_host_port(netloc): | ||||||
|  |     if ":" in netloc: | ||||||
|  |         host, port = netloc.split(":", 1) | ||||||
|  |         port = int(port) | ||||||
|  |     else: | ||||||
|  |         host = netloc | ||||||
|  |         port = None | ||||||
|  | 
 | ||||||
|  |     return host, port | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def normalize_scope(scope): | ||||||
|  |     if scope: | ||||||
|  |         if isinstance(scope, str): | ||||||
|  |             scopes = scope.split(',') | ||||||
|  |         elif isinstance(scope, list) or isinstance(scope, tuple): | ||||||
|  |             scopes = scope | ||||||
|  |         else: | ||||||
|  |             raise Exception( | ||||||
|  |                 "Unsupported scope value, please either provide a list of scopes, " | ||||||
|  |                 "or a string of scopes separated by commas" | ||||||
|  |             ) | ||||||
|  |         return " ".join(sorted(scopes)) | ||||||
|  |     else: | ||||||
|  |         return None | ||||||
							
								
								
									
										84
									
								
								resources/lib/http_video_player_setter.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,84 @@ | ||||||
|  | import os | ||||||
|  | from xml.etree import ElementTree | ||||||
|  | 
 | ||||||
|  | import xbmcvfs | ||||||
|  | from xbmc import LOGDEBUG | ||||||
|  | 
 | ||||||
|  | from utils import ADDON_ID, log_msg | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class HttpVideoPlayerSetter: | ||||||
|  |     def __init__(self): | ||||||
|  |         self.__plugin_name = ADDON_ID | ||||||
|  |         self.__player_rules_filename = xbmcvfs.translatePath( | ||||||
|  |             f"special://masterprofile/playercorefactory.xml" | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     def set_http_rule(self) -> bool: | ||||||
|  |         if not os.path.exists(self.__player_rules_filename): | ||||||
|  |             self.__create_new_player_rules() | ||||||
|  |             log_msg(f"Created a new file '{self.__player_rules_filename}' with a video http rule.") | ||||||
|  |             return True | ||||||
|  |         else: | ||||||
|  |             if self.__add_http_rule(): | ||||||
|  |                 log_msg(f"Added a video http rule to '{self.__player_rules_filename}'") | ||||||
|  |                 return True | ||||||
|  | 
 | ||||||
|  |             log_msg( | ||||||
|  |                 f"There is already a video http rule in '{self.__player_rules_filename}'." | ||||||
|  |                 " Nothing to do.", | ||||||
|  |                 LOGDEBUG, | ||||||
|  |             ) | ||||||
|  |             return False | ||||||
|  | 
 | ||||||
|  |     def __create_new_player_rules(self) -> None: | ||||||
|  |         xml_str = f"""<?xml version='1.0' encoding='utf-8'?> | ||||||
|  | <playercorefactory> | ||||||
|  |   <!-- This file created by the '{self.__plugin_name}' addon. -->      | ||||||
|  |   <rules name="system rules"> | ||||||
|  |     <rule name="http" protocols="http" player="VideoPlayer" /> | ||||||
|  |   </rules> | ||||||
|  | </playercorefactory> | ||||||
|  | """ | ||||||
|  |         with open(self.__player_rules_filename, "w") as f: | ||||||
|  |             f.write(xml_str) | ||||||
|  | 
 | ||||||
|  |     def __add_http_rule(self) -> bool: | ||||||
|  |         class CommentedTreeBuilder(ElementTree.TreeBuilder): | ||||||
|  |             def comment(self, data): | ||||||
|  |                 self.start(ElementTree.Comment().tag, {}) | ||||||
|  |                 self.data(data) | ||||||
|  |                 self.end(ElementTree.Comment().tag) | ||||||
|  | 
 | ||||||
|  |         parser = ElementTree.XMLParser(target=CommentedTreeBuilder()) | ||||||
|  |         tree = ElementTree.parse(self.__player_rules_filename, parser=parser) | ||||||
|  |         root = tree.getroot() | ||||||
|  | 
 | ||||||
|  |         http_rule = root.findall("./rules/rule/[@protocols='http']") | ||||||
|  |         if http_rule: | ||||||
|  |             return False | ||||||
|  | 
 | ||||||
|  |         rules = root.find("./rules") | ||||||
|  | 
 | ||||||
|  |         attributes = { | ||||||
|  |             "name": "http", | ||||||
|  |             "protocols": "http", | ||||||
|  |             "player": "VideoPlayer", | ||||||
|  |         } | ||||||
|  |         new_rule = ElementTree.Element("rule", attributes) | ||||||
|  |         new_rule.tail = "\n\n" + "    " | ||||||
|  |         rules.insert(0, new_rule) | ||||||
|  | 
 | ||||||
|  |         comment = ElementTree.Comment( | ||||||
|  |             f" This http rule added by the '{self.__plugin_name}' addon. " | ||||||
|  |         ) | ||||||
|  |         comment.tail = "\n" + "    " | ||||||
|  |         rules.insert(0, comment) | ||||||
|  | 
 | ||||||
|  |         xml_str = ElementTree.tostring(root, encoding="unicode", xml_declaration=True) | ||||||
|  | 
 | ||||||
|  |         with open(self.__player_rules_filename, "w") as f: | ||||||
|  |             f.write(xml_str) | ||||||
|  |             f.write("\n") | ||||||
|  | 
 | ||||||
|  |         return True | ||||||
							
								
								
									
										34
									
								
								resources/lib/librespot/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,34 @@ | ||||||
|  | from __future__ import annotations | ||||||
|  | from librespot.crypto import DiffieHellman | ||||||
|  | from librespot.proto.Keyexchange_pb2 import BuildInfo, Platform, Product, ProductFlags | ||||||
|  | from librespot.structure import Closeable, Runnable | ||||||
|  | import platform | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Version: | ||||||
|  |     version_name = "0.0.9" | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def platform() -> Platform: | ||||||
|  |         if platform.system() == "Windows": | ||||||
|  |             return Platform.PLATFORM_WIN32_X86 | ||||||
|  |         if platform.system() == "Darwin": | ||||||
|  |             return Platform.PLATFORM_OSX_X86 | ||||||
|  |         return Platform.PLATFORM_LINUX_X86 | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def version_string(): | ||||||
|  |         return "librespot-python " + Version.version_name | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def system_info_string(): | ||||||
|  |         return Version.version_string() + \ | ||||||
|  |                "; Python " + platform.python_version() + \ | ||||||
|  |                "; " + platform.system() | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def standard_build_info() -> BuildInfo: | ||||||
|  |         return BuildInfo(product=Product.PRODUCT_CLIENT, | ||||||
|  |                          product_flags=[ProductFlags.PRODUCT_FLAG_NONE], | ||||||
|  |                          platform=Version.platform(), | ||||||
|  |                          version=117300517) | ||||||
							
								
								
									
										912
									
								
								resources/lib/librespot/audio/__init__.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,912 @@ | ||||||
|  | from __future__ import annotations | ||||||
|  | from librespot import util | ||||||
|  | from librespot.audio.decrypt import AesAudioDecrypt | ||||||
|  | from librespot.audio.format import SuperAudioFormat | ||||||
|  | from librespot.audio.storage import ChannelManager | ||||||
|  | from librespot.cache import CacheManager | ||||||
|  | from librespot.crypto import Packet | ||||||
|  | from librespot.metadata import EpisodeId, PlayableId, TrackId | ||||||
|  | from librespot.proto import Metadata_pb2 as Metadata, StorageResolve_pb2 as StorageResolve | ||||||
|  | from librespot.structure import AudioDecrypt, AudioQualityPicker, Closeable, FeederException, GeneralAudioStream, GeneralWritableStream, HaltListener, NoopAudioDecrypt, PacketsReceiver | ||||||
|  | import concurrent.futures | ||||||
|  | import io | ||||||
|  | import logging | ||||||
|  | import math | ||||||
|  | import queue | ||||||
|  | import random | ||||||
|  | import struct | ||||||
|  | import threading | ||||||
|  | import time | ||||||
|  | import typing | ||||||
|  | import urllib.parse | ||||||
|  | 
 | ||||||
|  | if typing.TYPE_CHECKING: | ||||||
|  |     from librespot.core import Session | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class AbsChunkedInputStream(io.BytesIO, HaltListener): | ||||||
|  |     chunk_exception = None | ||||||
|  |     closed = False | ||||||
|  |     max_chunk_tries = 128 | ||||||
|  |     preload_ahead = 3 | ||||||
|  |     preload_chunk_retries = 2 | ||||||
|  |     retries: typing.List[int] | ||||||
|  |     retry_on_chunk_error: bool | ||||||
|  |     wait_lock: threading.Condition = threading.Condition() | ||||||
|  |     wait_for_chunk = -1 | ||||||
|  |     __decoded_length = 0 | ||||||
|  |     __mark = 0 | ||||||
|  |     __pos = 0 | ||||||
|  | 
 | ||||||
|  |     def __init__(self, retry_on_chunk_error: bool): | ||||||
|  |         super().__init__() | ||||||
|  |         self.retries = [0] * self.chunks() | ||||||
|  |         self.retry_on_chunk_error = retry_on_chunk_error | ||||||
|  | 
 | ||||||
|  |     def is_closed(self) -> bool: | ||||||
|  |         return self.closed | ||||||
|  | 
 | ||||||
|  |     def buffer(self) -> typing.List[bytes]: | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  |     def size(self) -> int: | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  |     def close(self) -> None: | ||||||
|  |         self.closed = True | ||||||
|  |         with self.wait_lock: | ||||||
|  |             self.wait_lock.notify_all() | ||||||
|  | 
 | ||||||
|  |     def available(self): | ||||||
|  |         return self.size() - self.__pos | ||||||
|  | 
 | ||||||
|  |     def mark_supported(self) -> bool: | ||||||
|  |         return True | ||||||
|  | 
 | ||||||
|  |     def mark(self, read_ahead_limit: int) -> None: | ||||||
|  |         self.__mark = self.__pos | ||||||
|  | 
 | ||||||
|  |     def reset(self) -> None: | ||||||
|  |         self.__pos = self.__mark | ||||||
|  | 
 | ||||||
|  |     def pos(self) -> int: | ||||||
|  |         return self.__pos | ||||||
|  | 
 | ||||||
|  |     def seek(self, where: int, **kwargs) -> None: | ||||||
|  |         if where < 0: | ||||||
|  |             raise TypeError() | ||||||
|  |         if self.closed: | ||||||
|  |             raise IOError("Stream is closed!") | ||||||
|  |         self.__pos = where | ||||||
|  |         self.check_availability(int(self.__pos / (128 * 1024)), False, False) | ||||||
|  | 
 | ||||||
|  |     def skip(self, n: int) -> int: | ||||||
|  |         if n < 0: | ||||||
|  |             raise TypeError() | ||||||
|  |         if self.closed: | ||||||
|  |             raise IOError("Stream is closed!") | ||||||
|  |         k = self.size() - self.__pos | ||||||
|  |         if n < k: | ||||||
|  |             k = n | ||||||
|  |         self.__pos += k | ||||||
|  |         chunk = int(self.__pos / (128 * 1024)) | ||||||
|  |         self.check_availability(chunk, False, False) | ||||||
|  |         return k | ||||||
|  | 
 | ||||||
|  |     def requested_chunks(self) -> typing.List[bool]: | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  |     def available_chunks(self) -> typing.List[bool]: | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  |     def chunks(self) -> int: | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  |     def request_chunk_from_stream(self, index: int) -> None: | ||||||
|  |         raise NotImplementedError() | ||||||
|  | 
 | ||||||
|  |     def should_retry(self, chunk: int) -> bool: | ||||||
|  |         if self.retries[chunk] < 1: | ||||||
|  |             return True | ||||||
|  |         if self.retries[chunk] > self.max_chunk_tries: | ||||||
|  |             return False | ||||||
|  |         return self.retry_on_chunk_error | ||||||
|  | 
 | ||||||
|  |     def check_availability(self, chunk: int, wait: bool, halted: bool) -> None: | ||||||
|  |         if halted and not wait: | ||||||
|  |             raise TypeError() | ||||||
|  |         if not self.requested_chunks()[chunk]: | ||||||
|  |             self.request_chunk_from_stream(chunk) | ||||||
|  |             self.requested_chunks()[chunk] = True | ||||||
|  |         for i in range(chunk + 1, | ||||||
|  |                        min(self.chunks() - 1, chunk + self.preload_ahead) + 1): | ||||||
|  |             if (self.requested_chunks()[i] | ||||||
|  |                     and self.retries[i] < self.preload_chunk_retries): | ||||||
|  |                 self.request_chunk_from_stream(i) | ||||||
|  |                 self.requested_chunks()[chunk] = True | ||||||
|  |         if wait: | ||||||
|  |             if self.available_chunks()[chunk]: | ||||||
|  |                 return | ||||||
|  |             retry = False | ||||||
|  |             with self.wait_lock: | ||||||
|  |                 if not halted: | ||||||
|  |                     self.stream_read_halted(chunk, int(time.time() * 1000)) | ||||||
|  |                 self.chunk_exception = None | ||||||
|  |                 self.wait_for_chunk = chunk | ||||||
|  |                 self.wait_lock.wait_for(lambda: self.available_chunks()[chunk]) | ||||||
|  |                 if self.closed: | ||||||
|  |                     return | ||||||
|  |                 if self.chunk_exception is not None: | ||||||
|  |                     if self.should_retry(chunk): | ||||||
|  |                         retry = True | ||||||
|  |                     else: | ||||||
|  |                         raise AbsChunkedInputStream.ChunkException | ||||||
|  |                 if not retry: | ||||||
|  |                     self.stream_read_halted(chunk, int(time.time() * 1000)) | ||||||
|  |             if retry: | ||||||
|  |                 time.sleep(math.log10(self.retries[chunk])) | ||||||
|  |                 self.check_availability(chunk, True, True) | ||||||
|  | 
 | ||||||
|  |     def read(self, __size: int = 0) -> bytes: | ||||||
|  |         if self.closed: | ||||||
|  |             raise IOError("Stream is closed!") | ||||||
|  |         if __size <= 0: | ||||||
|  |             if self.__pos == self.size(): | ||||||
|  |                 return b"" | ||||||
|  |             buffer = io.BytesIO() | ||||||
|  |             total_size = self.size() | ||||||
|  |             chunk = int(self.__pos / (128 * 1024)) | ||||||
|  |             chunk_off = int(self.__pos % (128 * 1024)) | ||||||
|  |             chunk_total = int(math.ceil(total_size / (128 * 1024))) | ||||||
|  |             self.check_availability(chunk, True, False) | ||||||
|  |             buffer.write(self.buffer()[chunk][chunk_off:]) | ||||||
|  |             chunk += 1 | ||||||
|  |             if chunk != chunk_total: | ||||||
|  |                 while chunk <= chunk_total - 1: | ||||||
|  |                     self.check_availability(chunk, True, False) | ||||||
|  |                     buffer.write(self.buffer()[chunk]) | ||||||
|  |                     chunk += 1 | ||||||
|  |             buffer.seek(0) | ||||||
|  |             self.__pos += buffer.getbuffer().nbytes | ||||||
|  |             return buffer.read() | ||||||
|  |         buffer = io.BytesIO() | ||||||
|  |         chunk = int(self.__pos / (128 * 1024)) | ||||||
|  |         chunk_off = int(self.__pos % (128 * 1024)) | ||||||
|  |         chunk_end = int(__size / (128 * 1024)) | ||||||
|  |         chunk_end_off = int(__size % (128 * 1024)) | ||||||
|  |         if chunk_end > self.size(): | ||||||
|  |             chunk_end = int(self.size() / (128 * 1024)) | ||||||
|  |             chunk_end_off = int(self.size() % (128 * 1024)) | ||||||
|  |         self.check_availability(chunk, True, False) | ||||||
|  |         if chunk_off + __size > len(self.buffer()[chunk]): | ||||||
|  |             buffer.write(self.buffer()[chunk][chunk_off:]) | ||||||
|  |             chunk += 1 | ||||||
|  |             while chunk <= chunk_end: | ||||||
|  |                 self.check_availability(chunk, True, False) | ||||||
|  |                 if chunk == chunk_end: | ||||||
|  |                     buffer.write(self.buffer()[chunk][:chunk_end_off]) | ||||||
|  |                 else: | ||||||
|  |                     buffer.write(self.buffer()[chunk]) | ||||||
|  |                 chunk += 1 | ||||||
|  |         else: | ||||||
|  |             buffer.write(self.buffer()[chunk][chunk_off:chunk_off + __size]) | ||||||
|  |         buffer.seek(0) | ||||||
|  |         self.__pos += buffer.getbuffer().nbytes | ||||||
|  |         return buffer.read() | ||||||
|  | 
 | ||||||
|  |     def notify_chunk_available(self, index: int) -> None: | ||||||
|  |         self.available_chunks()[index] = True | ||||||
|  |         self.__decoded_length += len(self.buffer()[index]) | ||||||
|  |         with self.wait_lock: | ||||||
|  |             if index == self.wait_for_chunk and not self.closed: | ||||||
|  |                 self.wait_for_chunk = -1 | ||||||
|  |                 self.wait_lock.notify_all() | ||||||
|  | 
 | ||||||
|  |     def notify_chunk_error(self, index: int, ex): | ||||||
|  |         self.available_chunks()[index] = False | ||||||
|  |         self.requested_chunks()[index] = False | ||||||
|  |         self.retries[index] += 1 | ||||||
|  |         with self.wait_lock: | ||||||
|  |             if index == self.wait_for_chunk and not self.closed: | ||||||
|  |                 self.chunk_exception = ex | ||||||
|  |                 self.wait_for_chunk = -1 | ||||||
|  |                 self.wait_lock.notify_all() | ||||||
|  | 
 | ||||||
|  |     def decoded_length(self): | ||||||
|  |         return self.__decoded_length | ||||||
|  | 
 | ||||||
|  |     class ChunkException(IOError): | ||||||
|  | 
 | ||||||
|  |         @staticmethod | ||||||
|  |         def from_stream_error(stream_error: int): | ||||||
|  |             return AbsChunkedInputStream \ | ||||||
|  |                 .ChunkException("Failed due to stream error, code: {}".format(stream_error)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class AudioKeyManager(PacketsReceiver, Closeable): | ||||||
|  |     audio_key_request_timeout = 20 | ||||||
|  |     logger = logging.getLogger("Librespot:AudioKeyManager") | ||||||
|  |     __callbacks: typing.Dict[int, Callback] = {} | ||||||
|  |     __seq_holder = 0 | ||||||
|  |     __seq_holder_lock = threading.Condition() | ||||||
|  |     __session: Session | ||||||
|  |     __zero_short = b"\x00\x00" | ||||||
|  | 
 | ||||||
|  |     def __init__(self, session: Session): | ||||||
|  |         self.__session = session | ||||||
|  | 
 | ||||||
|  |     def dispatch(self, packet: Packet) -> None: | ||||||
|  |         payload = io.BytesIO(packet.payload) | ||||||
|  |         seq = struct.unpack(">i", payload.read(4))[0] | ||||||
|  |         callback = self.__callbacks.get(seq) | ||||||
|  |         if callback is None: | ||||||
|  |             self.logger.warning( | ||||||
|  |                 "Couldn't find callback for seq: {}".format(seq)) | ||||||
|  |             return | ||||||
|  |         if packet.is_cmd(Packet.Type.aes_key): | ||||||
|  |             key = payload.read(16) | ||||||
|  |             callback.key(key) | ||||||
|  |         elif packet.is_cmd(Packet.Type.aes_key_error): | ||||||
|  |             code = struct.unpack(">H", payload.read(2))[0] | ||||||
|  |             callback.error(code) | ||||||
|  |         else: | ||||||
|  |             self.logger.warning( | ||||||
|  |                 "Couldn't handle packet, cmd: {}, length: {}".format( | ||||||
|  |                     packet.cmd, len(packet.payload))) | ||||||
|  | 
 | ||||||
|  |     def get_audio_key(self, | ||||||
|  |                       gid: bytes, | ||||||
|  |                       file_id: bytes, | ||||||
|  |                       retry: bool = True) -> bytes: | ||||||
|  |         seq: int | ||||||
|  |         with self.__seq_holder_lock: | ||||||
|  |             seq = self.__seq_holder | ||||||
|  |             self.__seq_holder += 1 | ||||||
|  |         out = io.BytesIO() | ||||||
|  |         out.write(file_id) | ||||||
|  |         out.write(gid) | ||||||
|  |         out.write(struct.pack(">i", seq)) | ||||||
|  |         out.write(self.__zero_short) | ||||||
|  |         out.seek(0) | ||||||
|  |         self.__session.send(Packet.Type.request_key, out.read()) | ||||||
|  |         callback = AudioKeyManager.SyncCallback(self) | ||||||
|  |         self.__callbacks[seq] = callback | ||||||
|  |         key = callback.wait_response() | ||||||
|  |         if key is None: | ||||||
|  |             if retry: | ||||||
|  |                 return self.get_audio_key(gid, file_id, False) | ||||||
|  |             raise RuntimeError( | ||||||
|  |                 "Failed fetching audio key! gid: {}, fileId: {}".format( | ||||||
|  |                     util.bytes_to_hex(gid), util.bytes_to_hex(file_id))) | ||||||
|  |         return key | ||||||
|  | 
 | ||||||
|  |     class Callback: | ||||||
|  | 
 | ||||||
|  |         def key(self, key: bytes) -> None: | ||||||
|  |             raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |         def error(self, code: int) -> None: | ||||||
|  |             raise NotImplementedError | ||||||
|  | 
 | ||||||
|  |     class SyncCallback(Callback): | ||||||
|  |         __audio_key_manager: AudioKeyManager | ||||||
|  |         __reference = queue.Queue() | ||||||
|  |         __reference_lock = threading.Condition() | ||||||
|  | 
 | ||||||
|  |         def __init__(self, audio_key_manager: AudioKeyManager): | ||||||
|  |             self.__audio_key_manager = audio_key_manager | ||||||
|  | 
 | ||||||
|  |         def key(self, key: bytes) -> None: | ||||||
|  |             with self.__reference_lock: | ||||||
|  |                 self.__reference.put(key) | ||||||
|  |                 self.__reference_lock.notify_all() | ||||||
|  | 
 | ||||||
|  |         def error(self, code: int) -> None: | ||||||
|  |             self.__audio_key_manager.logger.fatal( | ||||||
|  |                 "Audio key error, code: {}".format(code)) | ||||||
|  |             with self.__reference_lock: | ||||||
|  |                 self.__reference.put(None) | ||||||
|  |                 self.__reference_lock.notify_all() | ||||||
|  | 
 | ||||||
|  |         def wait_response(self) -> bytes: | ||||||
|  |             with self.__reference_lock: | ||||||
|  |                 self.__reference_lock.wait( | ||||||
|  |                     AudioKeyManager.audio_key_request_timeout) | ||||||
|  |                 return self.__reference.get(block=False) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class CdnFeedHelper: | ||||||
|  |     _LOGGER: logging = logging.getLogger(__name__) | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def get_url(resp: StorageResolve.StorageResolveResponse) -> str: | ||||||
|  |         selected_url = random.choice(resp.cdnurl) | ||||||
|  |         while "audio4-gm-fb" in selected_url or "audio-gm-fb" in selected_url: | ||||||
|  |             selected_url = random.choice(resp.cdnurl) | ||||||
|  |         return selected_url | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def load_track( | ||||||
|  |             session: Session, track: Metadata.Track, file: Metadata.AudioFile, | ||||||
|  |             resp_or_url: typing.Union[StorageResolve.StorageResolveResponse, | ||||||
|  |                                       str], preload: bool, | ||||||
|  |             halt_listener: HaltListener) -> PlayableContentFeeder.LoadedStream: | ||||||
|  |         if type(resp_or_url) is str: | ||||||
|  |             url = resp_or_url | ||||||
|  |         else: | ||||||
|  |             url = CdnFeedHelper.get_url(resp_or_url) | ||||||
|  |         start = int(time.time() * 1000) | ||||||
|  |         key = session.audio_key().get_audio_key(track.gid, file.file_id) | ||||||
|  |         audio_key_time = int(time.time() * 1000) - start | ||||||
|  | 
 | ||||||
|  |         streamer = session.cdn().stream_file(file, key, url, halt_listener) | ||||||
|  |         input_stream = streamer.stream() | ||||||
|  |         normalization_data = NormalizationData.read(input_stream) | ||||||
|  |         if input_stream.skip(0xA7) != 0xA7: | ||||||
|  |             raise IOError("Couldn't skip 0xa7 bytes!") | ||||||
|  |         return PlayableContentFeeder.LoadedStream( | ||||||
|  |             track, | ||||||
|  |             streamer, | ||||||
|  |             normalization_data, | ||||||
|  |             PlayableContentFeeder.Metrics(file.file_id, preload, | ||||||
|  |                                           -1 if preload else audio_key_time), | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def load_episode_external( | ||||||
|  |             session: Session, episode: Metadata.Episode, | ||||||
|  |             halt_listener: HaltListener) -> PlayableContentFeeder.LoadedStream: | ||||||
|  |         resp = session.client().head(episode.external_url) | ||||||
|  | 
 | ||||||
|  |         if resp.status_code != 200: | ||||||
|  |             CdnFeedHelper._LOGGER.warning("Couldn't resolve redirect!") | ||||||
|  | 
 | ||||||
|  |         url = resp.url | ||||||
|  |         CdnFeedHelper._LOGGER.debug("Fetched external url for {}: {}".format( | ||||||
|  |             util.bytes_to_hex(episode.gid), url)) | ||||||
|  | 
 | ||||||
|  |         streamer = session.cdn().stream_external_episode( | ||||||
|  |             episode, url, halt_listener) | ||||||
|  |         return PlayableContentFeeder.LoadedStream( | ||||||
|  |             episode, | ||||||
|  |             streamer, | ||||||
|  |             None, | ||||||
|  |             PlayableContentFeeder.Metrics(None, False, -1), | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def load_episode( | ||||||
|  |         session: Session, | ||||||
|  |         episode: Metadata.Episode, | ||||||
|  |         file: Metadata.AudioFile, | ||||||
|  |         resp_or_url: typing.Union[StorageResolve.StorageResolveResponse, str], | ||||||
|  |         preload: bool, | ||||||
|  |         halt_listener: HaltListener, | ||||||
|  |     ) -> PlayableContentFeeder.LoadedStream: | ||||||
|  |         if type(resp_or_url) is str: | ||||||
|  |             url = resp_or_url | ||||||
|  |         else: | ||||||
|  |             url = CdnFeedHelper.get_url(resp_or_url) | ||||||
|  |         start = int(time.time() * 1000) | ||||||
|  |         key = session.audio_key().get_audio_key(episode.gid, file.file_id) | ||||||
|  |         audio_key_time = int(time.time() * 1000) - start | ||||||
|  | 
 | ||||||
|  |         streamer = session.cdn().stream_file(file, key, url, halt_listener) | ||||||
|  |         input_stream = streamer.stream() | ||||||
|  |         normalization_data = NormalizationData.read(input_stream) | ||||||
|  |         if input_stream.skip(0xA7) != 0xA7: | ||||||
|  |             raise IOError("Couldn't skip 0xa7 bytes!") | ||||||
|  |         return PlayableContentFeeder.LoadedStream( | ||||||
|  |             episode, | ||||||
|  |             streamer, | ||||||
|  |             normalization_data, | ||||||
|  |             PlayableContentFeeder.Metrics(file.file_id, preload, | ||||||
|  |                                           -1 if preload else audio_key_time), | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class CdnManager: | ||||||
|  |     logger: logging = logging.getLogger("Librespot:CdnManager") | ||||||
|  |     __session: Session | ||||||
|  | 
 | ||||||
|  |     def __init__(self, session: Session): | ||||||
|  |         self.__session = session | ||||||
|  | 
 | ||||||
|  |     def get_head(self, file_id: bytes): | ||||||
|  |         response = self.__session.client() \ | ||||||
|  |             .get(self.__session.get_user_attribute("head-files-url", "https://heads-fa.spotify.com/head/{file_id}") | ||||||
|  |                  .replace("{file_id}", util.bytes_to_hex(file_id))) | ||||||
|  |         if response.status_code != 200: | ||||||
|  |             raise IOError("{}".format(response.status_code)) | ||||||
|  |         body = response.content | ||||||
|  |         if body is None: | ||||||
|  |             raise IOError("Response body is empty!") | ||||||
|  |         return body | ||||||
|  | 
 | ||||||
|  |     def stream_external_episode(self, episode: Metadata.Episode, | ||||||
|  |                                 external_url: str, | ||||||
|  |                                 halt_listener: HaltListener): | ||||||
|  |         return CdnManager.Streamer( | ||||||
|  |             self.__session, | ||||||
|  |             StreamId(episode=episode), | ||||||
|  |             SuperAudioFormat.MP3, | ||||||
|  |             CdnManager.CdnUrl(self, None, external_url), | ||||||
|  |             self.__session.cache(), | ||||||
|  |             NoopAudioDecrypt(), | ||||||
|  |             halt_listener, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     def stream_file(self, file: Metadata.AudioFile, key: bytes, url: str, | ||||||
|  |                     halt_listener: HaltListener): | ||||||
|  |         return CdnManager.Streamer( | ||||||
|  |             self.__session, | ||||||
|  |             StreamId(file=file), | ||||||
|  |             SuperAudioFormat.get(file.format), | ||||||
|  |             CdnManager.CdnUrl(self, file.file_id, url), | ||||||
|  |             self.__session.cache(), | ||||||
|  |             AesAudioDecrypt(key), | ||||||
|  |             halt_listener, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     def get_audio_url(self, file_id: bytes): | ||||||
|  |         response = self.__session.api()\ | ||||||
|  |             .send("GET", "/storage-resolve/files/audio/interactive/{}".format(util.bytes_to_hex(file_id)), None, None) | ||||||
|  |         if response.status_code != 200: | ||||||
|  |             raise IOError(response.status_code) | ||||||
|  |         body = response.content | ||||||
|  |         if body is None: | ||||||
|  |             raise IOError("Response body is empty!") | ||||||
|  |         proto = StorageResolve.StorageResolveResponse() | ||||||
|  |         proto.ParseFromString(body) | ||||||
|  |         if proto.result == StorageResolve.StorageResolveResponse.Result.CDN: | ||||||
|  |             url = random.choice(proto.cdnurl) | ||||||
|  |             self.logger.debug("Fetched CDN url for {}: {}".format( | ||||||
|  |                 util.bytes_to_hex(file_id), url)) | ||||||
|  |             return url | ||||||
|  |         raise CdnManager.CdnException( | ||||||
|  |             "Could not retrieve CDN url! result: {}".format(proto.result)) | ||||||
|  | 
 | ||||||
|  |     class CdnException(Exception): | ||||||
|  |         pass | ||||||
|  | 
 | ||||||
|  |     class InternalResponse: | ||||||
|  |         buffer: bytes | ||||||
|  |         headers: typing.Dict[str, str] | ||||||
|  | 
 | ||||||
|  |         def __init__(self, buffer: bytes, headers: typing.Dict[str, str]): | ||||||
|  |             self.buffer = buffer | ||||||
|  |             self.headers = headers | ||||||
|  | 
 | ||||||
|  |     class CdnUrl: | ||||||
|  |         __cdn_manager = None | ||||||
|  |         __file_id: bytes | ||||||
|  |         __expiration: int | ||||||
|  |         url: str | ||||||
|  | 
 | ||||||
|  |         def __init__(self, cdn_manager, file_id: typing.Union[bytes, None], | ||||||
|  |                      url: str): | ||||||
|  |             self.__cdn_manager: CdnManager = cdn_manager | ||||||
|  |             self.__file_id = file_id | ||||||
|  |             self.set_url(url) | ||||||
|  | 
 | ||||||
|  |         def url(self): | ||||||
|  |             if self.__expiration == -1: | ||||||
|  |                 return self.url | ||||||
|  |             if self.__expiration <= int(time.time() * 1000) + 5 * 60 * 1000: | ||||||
|  |                 self.url = self.__cdn_manager.get_audio_url(self.__file_id) | ||||||
|  |             return self.url | ||||||
|  | 
 | ||||||
|  |         def set_url(self, url: str): | ||||||
|  |             self.url = url | ||||||
|  |             if self.__file_id is not None: | ||||||
|  |                 token_url = urllib.parse.urlparse(url) | ||||||
|  |                 token_query = urllib.parse.parse_qs(token_url.query) | ||||||
|  |                 token_list = token_query.get("__token__") | ||||||
|  |                 try: | ||||||
|  |                     token_str = str(token_list[0]) | ||||||
|  |                 except TypeError: | ||||||
|  |                     token_str = "" | ||||||
|  |                 expires_list = token_query.get("Expires") | ||||||
|  |                 try: | ||||||
|  |                     expires_str = str(expires_list[0]) | ||||||
|  |                 except TypeError: | ||||||
|  |                     expires_str = "" | ||||||
|  |                 if token_str != "None" and len(token_str) != 0: | ||||||
|  |                     expire_at = None | ||||||
|  |                     split = token_str.split("~") | ||||||
|  |                     for s in split: | ||||||
|  |                         try: | ||||||
|  |                             i = s.index("=") | ||||||
|  |                         except ValueError: | ||||||
|  |                             continue | ||||||
|  |                         if s[:i] == "exp": | ||||||
|  |                             expire_at = int(s[i + 1:]) | ||||||
|  |                             break | ||||||
|  |                     if expire_at is None: | ||||||
|  |                         self.__expiration = -1 | ||||||
|  |                         self.__cdn_manager.logger.warning( | ||||||
|  |                             "Invalid __token__ in CDN url: {}".format(url)) | ||||||
|  |                         return | ||||||
|  |                     self.__expiration = expire_at * 1000 | ||||||
|  |                 elif expires_str != "None" and len(expires_str) != 0: | ||||||
|  |                     expires_at = None | ||||||
|  |                     expires_str = expires_str.split("~")[0] | ||||||
|  |                     expires_at = int(expires_str) | ||||||
|  |                     if expires_at is None: | ||||||
|  |                         self.__expiration = -1 | ||||||
|  |                         self.__cdn_manager.logger.warning("Invalid Expires param in CDN url: {}".format(url)) | ||||||
|  |                         return | ||||||
|  |                     self.__expiration = expires_at * 1000 | ||||||
|  |                 else: | ||||||
|  |                     try: | ||||||
|  |                         i = token_url.query.index("_") | ||||||
|  |                     except ValueError: | ||||||
|  |                         self.__expiration = -1 | ||||||
|  |                         self.__cdn_manager.logger \ | ||||||
|  |                             .warning("Couldn't extract expiration, invalid parameter in CDN url: {}".format(url)) | ||||||
|  |                         return | ||||||
|  |                     self.__expiration = int(token_url.query[:i]) * 1000 | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 self.__expiration = -1 | ||||||
|  | 
 | ||||||
|  |     class Streamer(GeneralAudioStream, GeneralWritableStream): | ||||||
|  |         available: typing.List[bool] | ||||||
|  |         buffer: typing.List[bytes] | ||||||
|  |         chunks: int | ||||||
|  |         executor_service = concurrent.futures.ThreadPoolExecutor() | ||||||
|  |         halt_listener: HaltListener | ||||||
|  |         requested: typing.List[bool] | ||||||
|  |         size: int | ||||||
|  |         __audio_format: SuperAudioFormat | ||||||
|  |         __audio_decrypt: AudioDecrypt | ||||||
|  |         __cdn_url: CdnManager.CdnUrl | ||||||
|  |         __internal_stream: InternalStream | ||||||
|  |         __session: Session | ||||||
|  |         __stream_id: StreamId | ||||||
|  | 
 | ||||||
|  |         def __init__(self, session: Session, stream_id: StreamId, | ||||||
|  |                      audio_format: SuperAudioFormat, | ||||||
|  |                      cdn_url: CdnManager.CdnUrl, cache: CacheManager, | ||||||
|  |                      audio_decrypt: AudioDecrypt, halt_listener: HaltListener): | ||||||
|  |             self.__session = session | ||||||
|  |             self.__stream_id = stream_id | ||||||
|  |             self.__audio_format = audio_format | ||||||
|  |             self.__audio_decrypt = audio_decrypt | ||||||
|  |             self.__cdn_url = cdn_url | ||||||
|  |             self.halt_listener = halt_listener | ||||||
|  |             response = self.request(range_start=0, | ||||||
|  |                                     range_end=ChannelManager.chunk_size - 1) | ||||||
|  |             content_range = response.headers.get("Content-Range") | ||||||
|  |             if content_range is None: | ||||||
|  |                 raise IOError("Missing Content-Range header!") | ||||||
|  |             split = content_range.split("/") | ||||||
|  |             self.size = int(split[1]) | ||||||
|  |             self.chunks = int(math.ceil(self.size / ChannelManager.chunk_size)) | ||||||
|  |             first_chunk = response.buffer | ||||||
|  |             self.available = [False for _ in range(self.chunks)] | ||||||
|  |             self.requested = [False for _ in range(self.chunks)] | ||||||
|  |             self.buffer = [b"" for _ in range(self.chunks)] | ||||||
|  |             self.__internal_stream = CdnManager.Streamer.InternalStream( | ||||||
|  |                 self, False) | ||||||
|  |             self.requested[0] = True | ||||||
|  |             self.write_chunk(first_chunk, 0, False) | ||||||
|  | 
 | ||||||
|  |         def write_chunk(self, chunk: bytes, chunk_index: int, | ||||||
|  |                         cached: bool) -> None: | ||||||
|  |             if self.__internal_stream.is_closed(): | ||||||
|  |                 return | ||||||
|  |             self.__session.logger.debug( | ||||||
|  |                 "Chunk {}/{} completed, cached: {}, stream: {}".format( | ||||||
|  |                     chunk_index + 1, self.chunks, cached, self.describe())) | ||||||
|  |             self.buffer[chunk_index] = self.__audio_decrypt.decrypt_chunk( | ||||||
|  |                 chunk_index, chunk) | ||||||
|  |             self.__internal_stream.notify_chunk_available(chunk_index) | ||||||
|  | 
 | ||||||
|  |         def stream(self) -> AbsChunkedInputStream: | ||||||
|  |             return self.__internal_stream | ||||||
|  | 
 | ||||||
|  |         def codec(self) -> SuperAudioFormat: | ||||||
|  |             return self.__audio_format | ||||||
|  | 
 | ||||||
|  |         def describe(self) -> str: | ||||||
|  |             if self.__stream_id.is_episode(): | ||||||
|  |                 return "episode_gid: {}".format( | ||||||
|  |                     self.__stream_id.get_episode_gid()) | ||||||
|  |             return "file_id: {}".format(self.__stream_id.get_file_id()) | ||||||
|  | 
 | ||||||
|  |         def decrypt_time_ms(self) -> int: | ||||||
|  |             return self.__audio_decrypt.decrypt_time_ms() | ||||||
|  | 
 | ||||||
|  |         def request_chunk(self, index: int) -> None: | ||||||
|  |             response = self.request(index) | ||||||
|  |             self.write_chunk(response.buffer, index, False) | ||||||
|  | 
 | ||||||
|  |         def request(self, chunk: int = None, range_start: int = None, range_end: int = None)\ | ||||||
|  |                 -> CdnManager.InternalResponse: | ||||||
|  |             if chunk is None and range_start is None and range_end is None: | ||||||
|  |                 raise TypeError() | ||||||
|  |             if chunk is not None: | ||||||
|  |                 range_start = ChannelManager.chunk_size * chunk | ||||||
|  |                 range_end = (chunk + 1) * ChannelManager.chunk_size - 1 | ||||||
|  |             response = self.__session.client().get( | ||||||
|  |                 self.__cdn_url.url, | ||||||
|  |                 headers={ | ||||||
|  |                     "Range": "bytes={}-{}".format(range_start, range_end) | ||||||
|  |                 }, | ||||||
|  |             ) | ||||||
|  |             if response.status_code != 206: | ||||||
|  |                 raise IOError(response.status_code) | ||||||
|  |             body = response.content | ||||||
|  |             if body is None: | ||||||
|  |                 raise IOError("Response body is empty!") | ||||||
|  |             return CdnManager.InternalResponse(body, dict(response.headers)) | ||||||
|  | 
 | ||||||
|  |         class InternalStream(AbsChunkedInputStream): | ||||||
|  |             streamer: CdnManager.Streamer | ||||||
|  | 
 | ||||||
|  |             def __init__(self, streamer, retry_on_chunk_error: bool): | ||||||
|  |                 self.streamer: CdnManager.Streamer = streamer | ||||||
|  |                 super().__init__(retry_on_chunk_error) | ||||||
|  | 
 | ||||||
|  |             def buffer(self) -> typing.List[bytes]: | ||||||
|  |                 return self.streamer.buffer | ||||||
|  | 
 | ||||||
|  |             def size(self) -> int: | ||||||
|  |                 return self.streamer.size | ||||||
|  | 
 | ||||||
|  |             def close(self) -> None: | ||||||
|  |                 super().close() | ||||||
|  |                 del self.streamer.buffer | ||||||
|  | 
 | ||||||
|  |             def requested_chunks(self) -> typing.List[bool]: | ||||||
|  |                 return self.streamer.requested | ||||||
|  | 
 | ||||||
|  |             def available_chunks(self) -> typing.List[bool]: | ||||||
|  |                 return self.streamer.available | ||||||
|  | 
 | ||||||
|  |             def chunks(self) -> int: | ||||||
|  |                 return self.streamer.chunks | ||||||
|  | 
 | ||||||
|  |             def request_chunk_from_stream(self, index: int) -> None: | ||||||
|  |                 self.streamer.executor_service \ | ||||||
|  |                     .submit(lambda: self.streamer.request_chunk(index)) | ||||||
|  | 
 | ||||||
|  |             def stream_read_halted(self, chunk: int, _time: int) -> None: | ||||||
|  |                 if self.streamer.halt_listener is not None: | ||||||
|  |                     self.streamer.executor_service\ | ||||||
|  |                         .submit(lambda: self.streamer.halt_listener.stream_read_halted(chunk, _time)) | ||||||
|  | 
 | ||||||
|  |             def stream_read_resumed(self, chunk: int, _time: int) -> None: | ||||||
|  |                 if self.streamer.halt_listener is not None: | ||||||
|  |                     self.streamer.executor_service \ | ||||||
|  |                         .submit(lambda: self.streamer.halt_listener.stream_read_resumed(chunk, _time)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class NormalizationData: | ||||||
|  |     _LOGGER: logging = logging.getLogger(__name__) | ||||||
|  |     track_gain_db: float | ||||||
|  |     track_peak: float | ||||||
|  |     album_gain_db: float | ||||||
|  |     album_peak: float | ||||||
|  | 
 | ||||||
|  |     def __init__(self, track_gain_db: float, track_peak: float, | ||||||
|  |                  album_gain_db: float, album_peak: float): | ||||||
|  |         self.track_gain_db = track_gain_db | ||||||
|  |         self.track_peak = track_peak | ||||||
|  |         self.album_gain_db = album_gain_db | ||||||
|  |         self.album_peak = album_peak | ||||||
|  | 
 | ||||||
|  |         self._LOGGER.debug( | ||||||
|  |             "Loaded normalization data, track_gain: {}, track_peak: {}, album_gain: {}, album_peak: {}" | ||||||
|  |             .format(track_gain_db, track_peak, album_gain_db, album_peak)) | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def read(input_stream: AbsChunkedInputStream) -> NormalizationData: | ||||||
|  |         input_stream.seek(144) | ||||||
|  |         data = input_stream.read(4 * 4) | ||||||
|  |         input_stream.seek(0) | ||||||
|  |         buffer = io.BytesIO(data) | ||||||
|  |         return NormalizationData( | ||||||
|  |             struct.unpack("<f", buffer.read(4))[0], | ||||||
|  |             struct.unpack("<f", buffer.read(4))[0], | ||||||
|  |             struct.unpack("<f", buffer.read(4))[0], | ||||||
|  |             struct.unpack("<f", buffer.read(4))[0]) | ||||||
|  | 
 | ||||||
|  |     def get_factor(self, normalisation_pregain) -> float: | ||||||
|  |         normalisation_factor = float( | ||||||
|  |             math.pow(10, (self.track_gain_db + normalisation_pregain) / 20)) | ||||||
|  |         if normalisation_factor * self.track_peak > 1: | ||||||
|  |             self._LOGGER \ | ||||||
|  |                 .warning("Reducing normalisation factor to prevent clipping. Please add negative pregain to avoid.") | ||||||
|  |             normalisation_factor = 1 / self.track_peak | ||||||
|  |         return normalisation_factor | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class PlayableContentFeeder: | ||||||
|  |     logger = logging.getLogger("Librespot:PlayableContentFeeder") | ||||||
|  |     storage_resolve_interactive = "/storage-resolve/files/audio/interactive/{}" | ||||||
|  |     storage_resolve_interactive_prefetch = "/storage-resolve/files/audio/interactive_prefetch/{}" | ||||||
|  |     __session: Session | ||||||
|  | 
 | ||||||
|  |     def __init__(self, session: Session): | ||||||
|  |         self.__session = session | ||||||
|  | 
 | ||||||
|  |     def load(self, playable_id: PlayableId, | ||||||
|  |              audio_quality_picker: AudioQualityPicker, preload: bool, | ||||||
|  |              halt_listener: typing.Union[HaltListener, None]): | ||||||
|  |         if type(playable_id) is TrackId: | ||||||
|  |             return self.load_track(playable_id, audio_quality_picker, preload, | ||||||
|  |                                    halt_listener) | ||||||
|  |         if type(playable_id) is EpisodeId: | ||||||
|  |             return self.load_episode(playable_id, audio_quality_picker, | ||||||
|  |                                      preload, halt_listener) | ||||||
|  |         raise TypeError("Unknown content: {}".format(playable_id)) | ||||||
|  | 
 | ||||||
|  |     def load_stream(self, file: Metadata.AudioFile, track: Metadata.Track, | ||||||
|  |                     episode: Metadata.Episode, preload: bool, | ||||||
|  |                     halt_lister: HaltListener): | ||||||
|  |         if track is None and episode is None: | ||||||
|  |             raise RuntimeError() | ||||||
|  |         response = self.resolve_storage_interactive(file.file_id, preload) | ||||||
|  |         if response.result == StorageResolve.StorageResolveResponse.Result.CDN: | ||||||
|  |             if track is not None: | ||||||
|  |                 return CdnFeedHelper.load_track(self.__session, track, file, | ||||||
|  |                                                 response, preload, halt_lister) | ||||||
|  |             return CdnFeedHelper.load_episode(self.__session, episode, file, | ||||||
|  |                                               response, preload, halt_lister) | ||||||
|  |         if response.result == StorageResolve.StorageResolveResponse.Result.STORAGE: | ||||||
|  |             if track is None: | ||||||
|  |                 pass | ||||||
|  |         elif response.result == StorageResolve.StorageResolveResponse.Result.RESTRICTED: | ||||||
|  |             raise RuntimeError("Content is restricted!") | ||||||
|  |         elif response.result == StorageResolve.StorageResolveResponse.Response.UNRECOGNIZED: | ||||||
|  |             raise RuntimeError("Content is unrecognized!") | ||||||
|  |         else: | ||||||
|  |             raise RuntimeError("Unknown result: {}".format(response.result)) | ||||||
|  | 
 | ||||||
|  |     def load_episode(self, episode_id: EpisodeId, | ||||||
|  |                      audio_quality_picker: AudioQualityPicker, preload: bool, | ||||||
|  |                      halt_listener: HaltListener) -> LoadedStream: | ||||||
|  |         episode = self.__session.api().get_metadata_4_episode(episode_id) | ||||||
|  |         if episode.external_url: | ||||||
|  |             return CdnFeedHelper.load_episode_external(self.__session, episode, | ||||||
|  |                                                        halt_listener) | ||||||
|  |         file = audio_quality_picker.get_file(episode.audio) | ||||||
|  |         if file is None: | ||||||
|  |             self.logger.fatal( | ||||||
|  |                 "Couldn't find any suitable audio file, available: {}".format( | ||||||
|  |                     episode.audio)) | ||||||
|  |         return self.load_stream(file, None, episode, preload, halt_listener) | ||||||
|  | 
 | ||||||
|  |     def load_track(self, track_id_or_track: typing.Union[TrackId, | ||||||
|  |                                                          Metadata.Track], | ||||||
|  |                    audio_quality_picker: AudioQualityPicker, preload: bool, | ||||||
|  |                    halt_listener: HaltListener): | ||||||
|  |         if type(track_id_or_track) is TrackId: | ||||||
|  |             original = self.__session.api().get_metadata_4_track( | ||||||
|  |                 track_id_or_track) | ||||||
|  |             track = self.pick_alternative_if_necessary(original) | ||||||
|  |             if track is None: | ||||||
|  |                 raise RuntimeError("Cannot get alternative track") | ||||||
|  |         else: | ||||||
|  |             track = track_id_or_track | ||||||
|  |         file = audio_quality_picker.get_file(track.file) | ||||||
|  |         if file is None: | ||||||
|  |             self.logger.fatal( | ||||||
|  |                 "Couldn't find any suitable audio file, available: {}".format( | ||||||
|  |                     track.file)) | ||||||
|  |             raise FeederException() | ||||||
|  |         return self.load_stream(file, track, None, preload, halt_listener) | ||||||
|  | 
 | ||||||
|  |     def pick_alternative_if_necessary( | ||||||
|  |             self, track: Metadata.Track) -> typing.Union[Metadata.Track, None]: | ||||||
|  |         if len(track.file) > 0: | ||||||
|  |             return track | ||||||
|  |         for alt in track.alternative: | ||||||
|  |             if len(alt.file) > 0: | ||||||
|  |                 return Metadata.Track( | ||||||
|  |                     gid=track.gid, | ||||||
|  |                     name=track.name, | ||||||
|  |                     album=track.album, | ||||||
|  |                     artist=track.artist, | ||||||
|  |                     number=track.number, | ||||||
|  |                     disc_number=track.disc_number, | ||||||
|  |                     duration=track.duration, | ||||||
|  |                     popularity=track.popularity, | ||||||
|  |                     explicit=track.explicit, | ||||||
|  |                     external_id=track.external_id, | ||||||
|  |                     restriction=track.restriction, | ||||||
|  |                     file=alt.file, | ||||||
|  |                     sale_period=track.sale_period, | ||||||
|  |                     preview=track.preview, | ||||||
|  |                     tags=track.tags, | ||||||
|  |                     earliest_live_timestamp=track.earliest_live_timestamp, | ||||||
|  |                     has_lyrics=track.has_lyrics, | ||||||
|  |                     availability=track.availability, | ||||||
|  |                     licensor=track.licensor) | ||||||
|  |         return None | ||||||
|  | 
 | ||||||
|  |     def resolve_storage_interactive( | ||||||
|  |             self, file_id: bytes, | ||||||
|  |             preload: bool) -> StorageResolve.StorageResolveResponse: | ||||||
|  |         resp = self.__session.api().send( | ||||||
|  |             "GET", | ||||||
|  |             (self.storage_resolve_interactive_prefetch | ||||||
|  |              if preload else self.storage_resolve_interactive).format( | ||||||
|  |                  util.bytes_to_hex(file_id)), | ||||||
|  |             None, | ||||||
|  |             None, | ||||||
|  |         ) | ||||||
|  |         if resp.status_code != 200: | ||||||
|  |             raise RuntimeError(resp.status_code) | ||||||
|  |         body = resp.content | ||||||
|  |         if body is None: | ||||||
|  |             raise RuntimeError("Response body is empty!") | ||||||
|  |         storage_resolve_response = StorageResolve.StorageResolveResponse() | ||||||
|  |         storage_resolve_response.ParseFromString(body) | ||||||
|  |         return storage_resolve_response | ||||||
|  | 
 | ||||||
|  |     class LoadedStream: | ||||||
|  |         episode: Metadata.Episode | ||||||
|  |         track: Metadata.Track | ||||||
|  |         input_stream: GeneralAudioStream | ||||||
|  |         normalization_data: NormalizationData | ||||||
|  |         metrics: PlayableContentFeeder.Metrics | ||||||
|  | 
 | ||||||
|  |         def __init__(self, track_or_episode: typing.Union[Metadata.Track, | ||||||
|  |                                                           Metadata.Episode], | ||||||
|  |                      input_stream: GeneralAudioStream, | ||||||
|  |                      normalization_data: typing.Union[NormalizationData, None], | ||||||
|  |                      metrics: PlayableContentFeeder.Metrics): | ||||||
|  |             if type(track_or_episode) is Metadata.Track: | ||||||
|  |                 self.track = track_or_episode | ||||||
|  |                 self.episode = None | ||||||
|  |             elif type(track_or_episode) is Metadata.Episode: | ||||||
|  |                 self.track = None | ||||||
|  |                 self.episode = track_or_episode | ||||||
|  |             else: | ||||||
|  |                 raise TypeError() | ||||||
|  |             self.input_stream = input_stream | ||||||
|  |             self.normalization_data = normalization_data | ||||||
|  |             self.metrics = metrics | ||||||
|  | 
 | ||||||
|  |     class Metrics: | ||||||
|  |         file_id: str | ||||||
|  |         preloaded_audio_key: bool | ||||||
|  |         audio_key_time: int | ||||||
|  | 
 | ||||||
|  |         def __init__(self, file_id: typing.Union[bytes, None], | ||||||
|  |                      preloaded_audio_key: bool, audio_key_time: int): | ||||||
|  |             self.file_id = None if file_id is None else util.bytes_to_hex( | ||||||
|  |                 file_id) | ||||||
|  |             self.preloaded_audio_key = preloaded_audio_key | ||||||
|  |             self.audio_key_time = audio_key_time | ||||||
|  |             if preloaded_audio_key and audio_key_time != -1: | ||||||
|  |                 raise RuntimeError() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class StreamId: | ||||||
|  |     file_id: bytes | ||||||
|  |     episode_gid: bytes | ||||||
|  | 
 | ||||||
|  |     def __init__(self, | ||||||
|  |                  file: Metadata.AudioFile = None, | ||||||
|  |                  episode: Metadata.Episode = None): | ||||||
|  |         if file is None and episode is None: | ||||||
|  |             return | ||||||
|  |         self.file_id = None if file is None else file.file_id | ||||||
|  |         self.episode_gid = None if episode is None else episode.gid | ||||||
|  | 
 | ||||||
|  |     def get_file_id(self): | ||||||
|  |         if self.file_id is None: | ||||||
|  |             raise RuntimeError("Not a file!") | ||||||
|  |         return util.bytes_to_hex(self.file_id) | ||||||
|  | 
 | ||||||
|  |     def is_episode(self): | ||||||
|  |         return self.episode_gid is not None | ||||||
|  | 
 | ||||||
|  |     def get_episode_gid(self): | ||||||
|  |         if self.episode_gid is None: | ||||||
|  |             raise RuntimeError("Not an episode!") | ||||||
|  |         return util.bytes_to_hex(self.episode_gid) | ||||||
							
								
								
									
										81
									
								
								resources/lib/librespot/audio/decoders.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,81 @@ | ||||||
|  | from __future__ import annotations | ||||||
|  | from librespot.audio import SuperAudioFormat | ||||||
|  | from librespot.proto import Metadata_pb2 as Metadata | ||||||
|  | from librespot.proto.Metadata_pb2 import AudioFile | ||||||
|  | from librespot.structure import AudioQualityPicker | ||||||
|  | import enum | ||||||
|  | import logging | ||||||
|  | import typing | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class AudioQuality(enum.Enum): | ||||||
|  |     NORMAL = 0x00 | ||||||
|  |     HIGH = 0x01 | ||||||
|  |     VERY_HIGH = 0x02 | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def get_quality(audio_format: AudioFile.Format) -> AudioQuality: | ||||||
|  |         if audio_format in [ | ||||||
|  |                 AudioFile.MP3_96, | ||||||
|  |                 AudioFile.OGG_VORBIS_96, | ||||||
|  |                 AudioFile.AAC_24_NORM, | ||||||
|  |         ]: | ||||||
|  |             return AudioQuality.NORMAL | ||||||
|  |         if audio_format in [ | ||||||
|  |                 AudioFile.MP3_160, | ||||||
|  |                 AudioFile.MP3_160_ENC, | ||||||
|  |                 AudioFile.OGG_VORBIS_160, | ||||||
|  |                 AudioFile.AAC_24, | ||||||
|  |         ]: | ||||||
|  |             return AudioQuality.HIGH | ||||||
|  |         if audio_format in [ | ||||||
|  |                 AudioFile.MP3_320, | ||||||
|  |                 AudioFile.MP3_256, | ||||||
|  |                 AudioFile.OGG_VORBIS_320, | ||||||
|  |                 AudioFile.AAC_48, | ||||||
|  |         ]: | ||||||
|  |             return AudioQuality.VERY_HIGH | ||||||
|  |         raise RuntimeError("Unknown format: {}".format(format)) | ||||||
|  | 
 | ||||||
|  |     def get_matches(self, | ||||||
|  |                     files: typing.List[AudioFile]) -> typing.List[AudioFile]: | ||||||
|  |         file_list = [] | ||||||
|  |         for file in files: | ||||||
|  |             if hasattr(file, "format") and AudioQuality.get_quality( | ||||||
|  |                     file.format) == self: | ||||||
|  |                 file_list.append(file) | ||||||
|  |         return file_list | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class VorbisOnlyAudioQuality(AudioQualityPicker): | ||||||
|  |     logger = logging.getLogger("Librespot:Player:VorbisOnlyAudioQuality") | ||||||
|  |     preferred: AudioQuality | ||||||
|  | 
 | ||||||
|  |     def __init__(self, preferred: AudioQuality): | ||||||
|  |         self.preferred = preferred | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def get_vorbis_file(files: typing.List[Metadata.AudioFile]): | ||||||
|  |         for file in files: | ||||||
|  |             if file.HasField("format") and SuperAudioFormat.get( | ||||||
|  |                     file.format) == SuperAudioFormat.VORBIS: | ||||||
|  |                 return file | ||||||
|  |         return None | ||||||
|  | 
 | ||||||
|  |     def get_file(self, files: typing.List[Metadata.AudioFile]): | ||||||
|  |         matches: typing.List[Metadata.AudioFile] = self.preferred.get_matches( | ||||||
|  |             files) | ||||||
|  |         vorbis: Metadata.AudioFile = VorbisOnlyAudioQuality.get_vorbis_file( | ||||||
|  |             matches) | ||||||
|  |         if vorbis is None: | ||||||
|  |             vorbis: Metadata.AudioFile = VorbisOnlyAudioQuality.get_vorbis_file( | ||||||
|  |                 files) | ||||||
|  |             if vorbis is not None: | ||||||
|  |                 self.logger.warning( | ||||||
|  |                     "Using {} because preferred {} couldn't be found.".format( | ||||||
|  |                         Metadata.AudioFile.Format.Name(vorbis.format), | ||||||
|  |                         self.preferred)) | ||||||
|  |             else: | ||||||
|  |                 self.logger.fatal( | ||||||
|  |                     "Couldn't find any Vorbis file, available: {}") | ||||||
|  |         return vorbis | ||||||
							
								
								
									
										45
									
								
								resources/lib/librespot/audio/decrypt.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,45 @@ | ||||||
|  | from __future__ import annotations | ||||||
|  | from Cryptodome.Cipher import AES | ||||||
|  | from Cryptodome.Util import Counter | ||||||
|  | from librespot.audio.storage import ChannelManager | ||||||
|  | from librespot.structure import AudioDecrypt | ||||||
|  | import io | ||||||
|  | import time | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class AesAudioDecrypt(AudioDecrypt): | ||||||
|  |     audio_aes_iv = b'r\xe0g\xfb\xdd\xcb\xcfw\xeb\xe8\xbcd?c\r\x93' | ||||||
|  |     cipher = None | ||||||
|  |     decrypt_count = 0 | ||||||
|  |     decrypt_total_time = 0 | ||||||
|  |     iv_int = int.from_bytes(audio_aes_iv, "big") | ||||||
|  |     iv_diff = 0x100 | ||||||
|  |     key: bytes | ||||||
|  | 
 | ||||||
|  |     def __init__(self, key: bytes): | ||||||
|  |         self.key = key | ||||||
|  | 
 | ||||||
|  |     def decrypt_chunk(self, chunk_index: int, buffer: bytes): | ||||||
|  |         new_buffer = io.BytesIO() | ||||||
|  |         iv = self.iv_int + int(ChannelManager.chunk_size * chunk_index / 16) | ||||||
|  |         start = time.time_ns() | ||||||
|  |         for i in range(0, len(buffer), 4096): | ||||||
|  |             cipher = AES.new(key=self.key, | ||||||
|  |                              mode=AES.MODE_CTR, | ||||||
|  |                              counter=Counter.new(128, initial_value=iv)) | ||||||
|  |             count = min(4096, len(buffer) - i) | ||||||
|  |             decrypted_buffer = cipher.decrypt(buffer[i:i + count]) | ||||||
|  |             new_buffer.write(decrypted_buffer) | ||||||
|  |             if count != len(decrypted_buffer): | ||||||
|  |                 raise RuntimeError( | ||||||
|  |                     "Couldn't process all data, actual: {}, expected: {}". | ||||||
|  |                     format(len(decrypted_buffer), count)) | ||||||
|  |             iv += self.iv_diff | ||||||
|  |         self.decrypt_total_time += time.time_ns() - start | ||||||
|  |         self.decrypt_count += 1 | ||||||
|  |         new_buffer.seek(0) | ||||||
|  |         return new_buffer.read() | ||||||
|  | 
 | ||||||
|  |     def decrypt_time_ms(self): | ||||||
|  |         return 0 if self.decrypt_count == 0 else int( | ||||||
|  |             (self.decrypt_total_time / self.decrypt_count) / 1000000) | ||||||
							
								
								
									
										32
									
								
								resources/lib/librespot/audio/format.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,32 @@ | ||||||
|  | from librespot.proto import Metadata_pb2 as Metadata | ||||||
|  | import enum | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class SuperAudioFormat(enum.Enum): | ||||||
|  |     MP3 = 0x00 | ||||||
|  |     VORBIS = 0x01 | ||||||
|  |     AAC = 0x02 | ||||||
|  | 
 | ||||||
|  |     @staticmethod | ||||||
|  |     def get(audio_format: Metadata.AudioFile.Format): | ||||||
|  |         if audio_format in [ | ||||||
|  |                 Metadata.AudioFile.Format.OGG_VORBIS_96, | ||||||
|  |                 Metadata.AudioFile.Format.OGG_VORBIS_160, | ||||||
|  |                 Metadata.AudioFile.Format.OGG_VORBIS_320, | ||||||
|  |         ]: | ||||||
|  |             return SuperAudioFormat.VORBIS | ||||||
|  |         if audio_format in [ | ||||||
|  |                 Metadata.AudioFile.Format.MP3_256, | ||||||
|  |                 Metadata.AudioFile.Format.MP3_320, | ||||||
|  |                 Metadata.AudioFile.Format.MP3_160, | ||||||
|  |                 Metadata.AudioFile.Format.MP3_96, | ||||||
|  |                 Metadata.AudioFile.Format.MP3_160_ENC, | ||||||
|  |         ]: | ||||||
|  |             return SuperAudioFormat.MP3 | ||||||
|  |         if audio_format in [ | ||||||
|  |                 Metadata.AudioFile.Format.AAC_24, | ||||||
|  |                 Metadata.AudioFile.Format.AAC_48, | ||||||
|  |                 Metadata.AudioFile.Format.AAC_24_NORM, | ||||||
|  |         ]: | ||||||
|  |             return SuperAudioFormat.AAC | ||||||
|  |         raise RuntimeError("Unknown audio format: {}".format(audio_format)) | ||||||
							
								
								
									
										139
									
								
								resources/lib/librespot/audio/storage.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,139 @@ | ||||||
|  | from __future__ import annotations | ||||||
|  | from librespot import util | ||||||
|  | from librespot.crypto import Packet | ||||||
|  | from librespot.proto.Metadata_pb2 import AudioFile | ||||||
|  | from librespot.structure import Closeable, PacketsReceiver | ||||||
|  | import concurrent.futures | ||||||
|  | import io | ||||||
|  | import logging | ||||||
|  | import queue | ||||||
|  | import struct | ||||||
|  | import threading | ||||||
|  | import typing | ||||||
|  | 
 | ||||||
|  | if typing.TYPE_CHECKING: | ||||||
|  |     from librespot.core import Session | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class ChannelManager(Closeable, PacketsReceiver): | ||||||
|  |     channels: typing.Dict[int, Channel] = {} | ||||||
|  |     chunk_size = 128 * 1024 | ||||||
|  |     executor_service = concurrent.futures.ThreadPoolExecutor() | ||||||
|  |     logger = logging.getLogger("Librespot:ChannelManager") | ||||||
|  |     seq_holder = 0 | ||||||
|  |     seq_holder_lock = threading.Condition() | ||||||
|  |     __session: Session = None | ||||||
|  | 
 | ||||||
|  |     def __init__(self, session: Session): | ||||||
|  |         self.__session = session | ||||||
|  | 
 | ||||||
|  |     def request_chunk(self, file_id: bytes, index: int, file: AudioFile): | ||||||
|  |         start = int(index * self.chunk_size / 4) | ||||||
|  |         end = int((index + 1) * self.chunk_size / 4) | ||||||
|  |         channel = ChannelManager.Channel(self, file, index) | ||||||
|  |         self.channels[channel.chunk_id] = channel | ||||||
|  |         out = io.BytesIO() | ||||||
|  |         out.write(struct.pack(">H", channel.chunk_id)) | ||||||
|  |         out.write(struct.pack(">i", 0x00000000)) | ||||||
|  |         out.write(struct.pack(">i", 0x00000000)) | ||||||
|  |         out.write(struct.pack(">i", 0x00004E20)) | ||||||
|  |         out.write(struct.pack(">i", 0x00030D40)) | ||||||
|  |         out.write(file_id) | ||||||
|  |         out.write(struct.pack(">i", start)) | ||||||
|  |         out.write(struct.pack(">i", end)) | ||||||
|  |         out.seek(0) | ||||||
|  |         self.__session.send(Packet.Type.stream_chunk, out.read()) | ||||||
|  | 
 | ||||||
|  |     def dispatch(self, packet: Packet) -> None: | ||||||
|  |         payload = io.BytesIO(packet.payload) | ||||||
|  |         if packet.is_cmd(Packet.Type.stream_chunk_res): | ||||||
|  |             chunk_id = struct.unpack(">H", payload.read(2))[0] | ||||||
|  |             channel = self.channels.get(chunk_id) | ||||||
|  |             if channel is None: | ||||||
|  |                 self.logger.warning( | ||||||
|  |                     "Couldn't find channel, id: {}, received: {}".format( | ||||||
|  |                         chunk_id, len(packet.payload))) | ||||||
|  |                 return | ||||||
|  |             channel.add_to_queue(payload) | ||||||
|  |         elif packet.is_cmd(Packet.Type.channel_error): | ||||||
|  |             chunk_id = struct.unpack(">H", payload.read(2))[0] | ||||||
|  |             channel = self.channels.get(chunk_id) | ||||||
|  |             if channel is None: | ||||||
|  |                 self.logger.warning( | ||||||
|  |                     "Dropping channel error, id: {}, code: {}".format( | ||||||
|  |                         chunk_id, | ||||||
|  |                         struct.unpack(">H", payload.read(2))[0])) | ||||||
|  |                 return | ||||||
|  |             channel.stream_error(struct.unpack(">H", payload.read(2))[0]) | ||||||
|  |         else: | ||||||
|  |             self.logger.warning( | ||||||
|  |                 "Couldn't handle packet, cmd: {}, payload: {}".format( | ||||||
|  |                     packet.cmd, util.bytes_to_hex(packet.payload))) | ||||||
|  | 
 | ||||||
|  |     def close(self) -> None: | ||||||
|  |         self.executor_service.shutdown() | ||||||
|  | 
 | ||||||
|  |     class Channel: | ||||||
|  |         channel_manager: ChannelManager | ||||||
|  |         chunk_id: int | ||||||
|  |         q = queue.Queue() | ||||||
|  |         __buffer: io.BytesIO | ||||||
|  |         __chunk_index: int | ||||||
|  |         __file: AudioFile | ||||||
|  |         __header: bool = True | ||||||
|  | 
 | ||||||
|  |         def __init__(self, channel_manager: ChannelManager, file: AudioFile, | ||||||
|  |                      chunk_index: int): | ||||||
|  |             self.__buffer = io.BytesIO() | ||||||
|  |             self.channel_manager = channel_manager | ||||||
|  |             self.__file = file | ||||||
|  |             self.__chunk_index = chunk_index | ||||||
|  |             with self.channel_manager.seq_holder_lock: | ||||||
|  |                 self.chunk_id = self.channel_manager.seq_holder | ||||||
|  |                 self.channel_manager.seq_holder += 1 | ||||||
|  |             self.channel_manager.executor_service.submit( | ||||||
|  |                 lambda: ChannelManager.Channel.Handler(self)) | ||||||
|  | 
 | ||||||
|  |         def _handle(self, payload: bytes) -> bool: | ||||||
|  |             if len(payload) == 0: | ||||||
|  |                 if not self.__header: | ||||||
|  |                     self.__file.write_chunk(payload, self.__chunk_index, False) | ||||||
|  |                     return True | ||||||
|  |                 self.channel_manager.logger.debug( | ||||||
|  |                     "Received empty chunk, skipping.") | ||||||
|  |                 return False | ||||||
|  |             if self.__header: | ||||||
|  |                 length: int | ||||||
|  |                 while len(payload.buffer) > 0: | ||||||
|  |                     length = payload.read_short() | ||||||
|  |                     if not length > 0: | ||||||
|  |                         break | ||||||
|  |                     header_id = payload.read_byte() | ||||||
|  |                     header_data = payload.read(length - 1) | ||||||
|  |                     self.__file.write_header(int.from_bytes(header_id, "big"), | ||||||
|  |                                              bytearray(header_data), False) | ||||||
|  |                 self.__header = False | ||||||
|  |             else: | ||||||
|  |                 self.__buffer.write(payload.read(len(payload.buffer))) | ||||||
|  |             return False | ||||||
|  | 
 | ||||||
|  |         def add_to_queue(self, payload): | ||||||
|  |             self.q.put(payload) | ||||||
|  | 
 | ||||||
|  |         def stream_error(self, code: int) -> None: | ||||||
|  |             self.__file.stream_error(self.__chunk_index, code) | ||||||
|  | 
 | ||||||
|  |         class Handler: | ||||||
|  |             __channel: ChannelManager.Channel = None | ||||||
|  | 
 | ||||||
|  |             def __init__(self, channel: ChannelManager.Channel): | ||||||
|  |                 self.__channel = channel | ||||||
|  | 
 | ||||||
|  |             def run(self) -> None: | ||||||
|  |                 self.__channel.channel_manager.logger.debug( | ||||||
|  |                     "ChannelManager.Handler is starting") | ||||||
|  |                 with self.__channel.q.all_tasks_done: | ||||||
|  |                     self.__channel.channel_manager.channels.pop( | ||||||
|  |                         self.__channel.chunk_id) | ||||||
|  |                 self.__channel.channel_manager.logger.debug( | ||||||
|  |                     "ChannelManager.Handler is shutting down") | ||||||
							
								
								
									
										18
									
								
								resources/lib/librespot/cache.py
									
										
									
									
									
										Normal file
									
								
							
							
						
						|  | @ -0,0 +1,18 @@ | ||||||
|  | from __future__ import annotations | ||||||
|  | import typing | ||||||
|  | 
 | ||||||
|  | if typing.TYPE_CHECKING: | ||||||
|  |     from librespot.core import Session | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class CacheManager: | ||||||
|  |     clean_up_threshold = 604800000 | ||||||
|  |     header_hash = 253 | ||||||
|  |     header_timestamp = 254 | ||||||
|  |     parent: str | ||||||
|  | 
 | ||||||
|  |     def __init__(self, session: Session): | ||||||
|  |         """ | ||||||
|  |         @Todo Implement function | ||||||
|  |         :param session: | ||||||
|  |         """ | ||||||