1#!/usr/bin/env python 2 3from __future__ import unicode_literals 4 5'''Generate header file for nanopb from a ProtoBuf FileDescriptorSet.''' 6nanopb_version = "nanopb-0.3.9.8" 7 8import sys 9import re 10import codecs 11import copy 12from functools import reduce 13 14try: 15 # Add some dummy imports to keep packaging tools happy. 16 import google, distutils.util # bbfreeze seems to need these 17 import pkg_resources # pyinstaller / protobuf 2.5 seem to need these 18 import proto.nanopb_pb2 as nanopb_pb2 # pyinstaller seems to need this 19 import proto.plugin_pb2 as plugin_pb2 20except: 21 # Don't care, we will error out later if it is actually important. 22 pass 23 24try: 25 import google.protobuf.text_format as text_format 26 import google.protobuf.descriptor_pb2 as descriptor 27except: 28 sys.stderr.write(''' 29 ************************************************************* 30 *** Could not import the Google protobuf Python libraries *** 31 *** Try installing package 'python-protobuf' or similar. *** 32 ************************************************************* 33 ''' + '\n') 34 raise 35 36try: 37 from .proto import nanopb_pb2, plugin_pb2 38except TypeError: 39 sys.stderr.write(''' 40 **************************************************************************** 41 *** Got TypeError when importing the protocol definitions for generator. *** 42 *** This usually means that the protoc in your path doesn't match the *** 43 *** Python protobuf library version. *** 44 *** *** 45 *** Please check the output of the following commands: *** 46 *** which protoc *** 47 *** protoc --version *** 48 *** python -c 'import google.protobuf; print(google.protobuf.__file__)' *** 49 *** If you are not able to find the python protobuf version using the *** 50 *** above command, use this command. *** 51 *** pip freeze | grep -i protobuf *** 52 **************************************************************************** 53 ''' + '\n') 54 raise 55except (ValueError, SystemError, ImportError): 56 # Probably invoked directly instead of via installed scripts. 57 import proto.nanopb_pb2 as nanopb_pb2 58 import proto.plugin_pb2 as plugin_pb2 59except: 60 sys.stderr.write(''' 61 ******************************************************************** 62 *** Failed to import the protocol definitions for generator. *** 63 *** You have to run 'make' in the nanopb/generator/proto folder. *** 64 ******************************************************************** 65 ''' + '\n') 66 raise 67 68# --------------------------------------------------------------------------- 69# Generation of single fields 70# --------------------------------------------------------------------------- 71 72import time 73import os.path 74 75# Values are tuple (c type, pb type, encoded size, int_size_allowed) 76FieldD = descriptor.FieldDescriptorProto 77datatypes = { 78 FieldD.TYPE_BOOL: ('bool', 'BOOL', 1, False), 79 FieldD.TYPE_DOUBLE: ('double', 'DOUBLE', 8, False), 80 FieldD.TYPE_FIXED32: ('uint32_t', 'FIXED32', 4, False), 81 FieldD.TYPE_FIXED64: ('uint64_t', 'FIXED64', 8, False), 82 FieldD.TYPE_FLOAT: ('float', 'FLOAT', 4, False), 83 FieldD.TYPE_INT32: ('int32_t', 'INT32', 10, True), 84 FieldD.TYPE_INT64: ('int64_t', 'INT64', 10, True), 85 FieldD.TYPE_SFIXED32: ('int32_t', 'SFIXED32', 4, False), 86 FieldD.TYPE_SFIXED64: ('int64_t', 'SFIXED64', 8, False), 87 FieldD.TYPE_SINT32: ('int32_t', 'SINT32', 5, True), 88 FieldD.TYPE_SINT64: ('int64_t', 'SINT64', 10, True), 89 FieldD.TYPE_UINT32: ('uint32_t', 'UINT32', 5, True), 90 FieldD.TYPE_UINT64: ('uint64_t', 'UINT64', 10, True) 91} 92 93# Integer size overrides (from .proto settings) 94intsizes = { 95 nanopb_pb2.IS_8: 'int8_t', 96 nanopb_pb2.IS_16: 'int16_t', 97 nanopb_pb2.IS_32: 'int32_t', 98 nanopb_pb2.IS_64: 'int64_t', 99} 100 101# String types (for python 2 / python 3 compatibility) 102try: 103 strtypes = (unicode, str) 104except NameError: 105 strtypes = (str, ) 106 107 108class Names: 109 '''Keeps a set of nested names and formats them to C identifier.''' 110 def __init__(self, parts = ()): 111 if isinstance(parts, Names): 112 parts = parts.parts 113 elif isinstance(parts, strtypes): 114 parts = (parts,) 115 self.parts = tuple(parts) 116 117 def __str__(self): 118 return '_'.join(self.parts) 119 120 def __add__(self, other): 121 if isinstance(other, strtypes): 122 return Names(self.parts + (other,)) 123 elif isinstance(other, Names): 124 return Names(self.parts + other.parts) 125 elif isinstance(other, tuple): 126 return Names(self.parts + other) 127 else: 128 raise ValueError("Name parts should be of type str") 129 130 def __eq__(self, other): 131 return isinstance(other, Names) and self.parts == other.parts 132 133def names_from_type_name(type_name): 134 '''Parse Names() from FieldDescriptorProto type_name''' 135 if type_name[0] != '.': 136 raise NotImplementedError("Lookup of non-absolute type names is not supported") 137 return Names(type_name[1:].split('.')) 138 139def varint_max_size(max_value): 140 '''Returns the maximum number of bytes a varint can take when encoded.''' 141 if max_value < 0: 142 max_value = 2**64 - max_value 143 for i in range(1, 11): 144 if (max_value >> (i * 7)) == 0: 145 return i 146 raise ValueError("Value too large for varint: " + str(max_value)) 147 148assert varint_max_size(-1) == 10 149assert varint_max_size(0) == 1 150assert varint_max_size(127) == 1 151assert varint_max_size(128) == 2 152 153class EncodedSize: 154 '''Class used to represent the encoded size of a field or a message. 155 Consists of a combination of symbolic sizes and integer sizes.''' 156 def __init__(self, value = 0, symbols = []): 157 if isinstance(value, EncodedSize): 158 self.value = value.value 159 self.symbols = value.symbols 160 elif isinstance(value, strtypes + (Names,)): 161 self.symbols = [str(value)] 162 self.value = 0 163 else: 164 self.value = value 165 self.symbols = symbols 166 167 def __add__(self, other): 168 if isinstance(other, int): 169 return EncodedSize(self.value + other, self.symbols) 170 elif isinstance(other, strtypes + (Names,)): 171 return EncodedSize(self.value, self.symbols + [str(other)]) 172 elif isinstance(other, EncodedSize): 173 return EncodedSize(self.value + other.value, self.symbols + other.symbols) 174 else: 175 raise ValueError("Cannot add size: " + repr(other)) 176 177 def __mul__(self, other): 178 if isinstance(other, int): 179 return EncodedSize(self.value * other, [str(other) + '*' + s for s in self.symbols]) 180 else: 181 raise ValueError("Cannot multiply size: " + repr(other)) 182 183 def __str__(self): 184 if not self.symbols: 185 return str(self.value) 186 else: 187 return '(' + str(self.value) + ' + ' + ' + '.join(self.symbols) + ')' 188 189 def upperlimit(self): 190 if not self.symbols: 191 return self.value 192 else: 193 return 2**32 - 1 194 195class Enum: 196 def __init__(self, names, desc, enum_options): 197 '''desc is EnumDescriptorProto''' 198 199 self.options = enum_options 200 self.names = names 201 202 # by definition, `names` include this enum's name 203 base_name = Names(names.parts[:-1]) 204 205 if enum_options.long_names: 206 self.values = [(names + x.name, x.number) for x in desc.value] 207 else: 208 self.values = [(base_name + x.name, x.number) for x in desc.value] 209 210 self.value_longnames = [self.names + x.name for x in desc.value] 211 self.packed = enum_options.packed_enum 212 213 def has_negative(self): 214 for n, v in self.values: 215 if v < 0: 216 return True 217 return False 218 219 def encoded_size(self): 220 return max([varint_max_size(v) for n,v in self.values]) 221 222 def __str__(self): 223 result = 'typedef enum _%s {\n' % self.names 224 result += ',\n'.join([" %s = %d" % x for x in self.values]) 225 result += '\n}' 226 227 if self.packed: 228 result += ' pb_packed' 229 230 result += ' %s;' % self.names 231 232 # sort the enum by value 233 sorted_values = sorted(self.values, key = lambda x: (x[1], x[0])) 234 235 result += '\n#define _%s_MIN %s' % (self.names, sorted_values[0][0]) 236 result += '\n#define _%s_MAX %s' % (self.names, sorted_values[-1][0]) 237 result += '\n#define _%s_ARRAYSIZE ((%s)(%s+1))' % (self.names, self.names, sorted_values[-1][0]) 238 239 if not self.options.long_names: 240 # Define the long names always so that enum value references 241 # from other files work properly. 242 for i, x in enumerate(self.values): 243 result += '\n#define %s %s' % (self.value_longnames[i], x[0]) 244 245 if self.options.enum_to_string: 246 result += '\nconst char *%s_name(%s v);\n' % (self.names, self.names) 247 248 return result 249 250 def enum_to_string_definition(self): 251 if not self.options.enum_to_string: 252 return "" 253 254 result = 'const char *%s_name(%s v) {\n' % (self.names, self.names) 255 result += ' switch (v) {\n' 256 257 for ((enumname, _), strname) in zip(self.values, self.value_longnames): 258 # Strip off the leading type name from the string value. 259 strval = str(strname)[len(str(self.names)) + 1:] 260 result += ' case %s: return "%s";\n' % (enumname, strval) 261 262 result += ' }\n' 263 result += ' return "unknown";\n' 264 result += '}\n' 265 266 return result 267 268class FieldMaxSize: 269 def __init__(self, worst = 0, checks = [], field_name = 'undefined'): 270 if isinstance(worst, list): 271 self.worst = max(i for i in worst if i is not None) 272 else: 273 self.worst = worst 274 275 self.worst_field = field_name 276 self.checks = list(checks) 277 278 def extend(self, extend, field_name = None): 279 self.worst = max(self.worst, extend.worst) 280 281 if self.worst == extend.worst: 282 self.worst_field = extend.worst_field 283 284 self.checks.extend(extend.checks) 285 286class Field: 287 def __init__(self, struct_name, desc, field_options): 288 '''desc is FieldDescriptorProto''' 289 self.tag = desc.number 290 self.struct_name = struct_name 291 self.union_name = None 292 self.name = desc.name 293 self.default = None 294 self.max_size = None 295 self.max_count = None 296 self.array_decl = "" 297 self.enc_size = None 298 self.ctype = None 299 self.fixed_count = False 300 301 if field_options.type == nanopb_pb2.FT_INLINE: 302 # Before nanopb-0.3.8, fixed length bytes arrays were specified 303 # by setting type to FT_INLINE. But to handle pointer typed fields, 304 # it makes sense to have it as a separate option. 305 field_options.type = nanopb_pb2.FT_STATIC 306 field_options.fixed_length = True 307 308 # Parse field options 309 if field_options.HasField("max_size"): 310 self.max_size = field_options.max_size 311 312 if desc.type == FieldD.TYPE_STRING and field_options.HasField("max_length"): 313 # max_length overrides max_size for strings 314 self.max_size = field_options.max_length + 1 315 316 if field_options.HasField("max_count"): 317 self.max_count = field_options.max_count 318 319 if desc.HasField('default_value'): 320 self.default = desc.default_value 321 322 # Check field rules, i.e. required/optional/repeated. 323 can_be_static = True 324 if desc.label == FieldD.LABEL_REPEATED: 325 self.rules = 'REPEATED' 326 if self.max_count is None: 327 can_be_static = False 328 else: 329 self.array_decl = '[%d]' % self.max_count 330 self.fixed_count = field_options.fixed_count 331 332 elif field_options.proto3: 333 self.rules = 'SINGULAR' 334 elif desc.label == FieldD.LABEL_REQUIRED: 335 self.rules = 'REQUIRED' 336 elif desc.label == FieldD.LABEL_OPTIONAL: 337 self.rules = 'OPTIONAL' 338 else: 339 raise NotImplementedError(desc.label) 340 341 # Check if the field can be implemented with static allocation 342 # i.e. whether the data size is known. 343 if desc.type == FieldD.TYPE_STRING and self.max_size is None: 344 can_be_static = False 345 346 if desc.type == FieldD.TYPE_BYTES and self.max_size is None: 347 can_be_static = False 348 349 # Decide how the field data will be allocated 350 if field_options.type == nanopb_pb2.FT_DEFAULT: 351 if can_be_static: 352 field_options.type = nanopb_pb2.FT_STATIC 353 else: 354 field_options.type = nanopb_pb2.FT_CALLBACK 355 356 if field_options.type == nanopb_pb2.FT_STATIC and not can_be_static: 357 raise Exception("Field '%s' is defined as static, but max_size or " 358 "max_count is not given." % self.name) 359 360 if field_options.fixed_count and self.max_count is None: 361 raise Exception("Field '%s' is defined as fixed count, " 362 "but max_count is not given." % self.name) 363 364 if field_options.type == nanopb_pb2.FT_STATIC: 365 self.allocation = 'STATIC' 366 elif field_options.type == nanopb_pb2.FT_POINTER: 367 self.allocation = 'POINTER' 368 elif field_options.type == nanopb_pb2.FT_CALLBACK: 369 self.allocation = 'CALLBACK' 370 else: 371 raise NotImplementedError(field_options.type) 372 373 # Decide the C data type to use in the struct. 374 if desc.type in datatypes: 375 self.ctype, self.pbtype, self.enc_size, isa = datatypes[desc.type] 376 377 # Override the field size if user wants to use smaller integers 378 if isa and field_options.int_size != nanopb_pb2.IS_DEFAULT: 379 self.ctype = intsizes[field_options.int_size] 380 if desc.type == FieldD.TYPE_UINT32 or desc.type == FieldD.TYPE_UINT64: 381 self.ctype = 'u' + self.ctype; 382 elif desc.type == FieldD.TYPE_ENUM: 383 self.pbtype = 'ENUM' 384 self.ctype = names_from_type_name(desc.type_name) 385 if self.default is not None: 386 self.default = self.ctype + self.default 387 self.enc_size = None # Needs to be filled in when enum values are known 388 elif desc.type == FieldD.TYPE_STRING: 389 self.pbtype = 'STRING' 390 self.ctype = 'char' 391 if self.allocation == 'STATIC': 392 self.ctype = 'char' 393 self.array_decl += '[%d]' % self.max_size 394 self.enc_size = varint_max_size(self.max_size) + self.max_size 395 elif desc.type == FieldD.TYPE_BYTES: 396 if field_options.fixed_length: 397 self.pbtype = 'FIXED_LENGTH_BYTES' 398 399 if self.max_size is None: 400 raise Exception("Field '%s' is defined as fixed length, " 401 "but max_size is not given." % self.name) 402 403 self.enc_size = varint_max_size(self.max_size) + self.max_size 404 self.ctype = 'pb_byte_t' 405 self.array_decl += '[%d]' % self.max_size 406 else: 407 self.pbtype = 'BYTES' 408 self.ctype = 'pb_bytes_array_t' 409 if self.allocation == 'STATIC': 410 self.ctype = self.struct_name + self.name + 't' 411 self.enc_size = varint_max_size(self.max_size) + self.max_size 412 elif desc.type == FieldD.TYPE_MESSAGE: 413 self.pbtype = 'MESSAGE' 414 self.ctype = self.submsgname = names_from_type_name(desc.type_name) 415 self.enc_size = None # Needs to be filled in after the message type is available 416 else: 417 raise NotImplementedError(desc.type) 418 419 def __lt__(self, other): 420 return self.tag < other.tag 421 422 def __str__(self): 423 result = '' 424 if self.allocation == 'POINTER': 425 if self.rules == 'REPEATED': 426 result += ' pb_size_t ' + self.name + '_count;\n' 427 428 if self.pbtype == 'MESSAGE': 429 # Use struct definition, so recursive submessages are possible 430 result += ' struct _%s *%s;' % (self.ctype, self.name) 431 elif self.pbtype == 'FIXED_LENGTH_BYTES': 432 # Pointer to fixed size array 433 result += ' %s (*%s)%s;' % (self.ctype, self.name, self.array_decl) 434 elif self.rules == 'REPEATED' and self.pbtype in ['STRING', 'BYTES']: 435 # String/bytes arrays need to be defined as pointers to pointers 436 result += ' %s **%s;' % (self.ctype, self.name) 437 else: 438 result += ' %s *%s;' % (self.ctype, self.name) 439 elif self.allocation == 'CALLBACK': 440 result += ' pb_callback_t %s;' % self.name 441 else: 442 if self.rules == 'OPTIONAL' and self.allocation == 'STATIC': 443 result += ' bool has_' + self.name + ';\n' 444 elif (self.rules == 'REPEATED' and 445 self.allocation == 'STATIC' and 446 not self.fixed_count): 447 result += ' pb_size_t ' + self.name + '_count;\n' 448 result += ' %s %s%s;' % (self.ctype, self.name, self.array_decl) 449 return result 450 451 def types(self): 452 '''Return definitions for any special types this field might need.''' 453 if self.pbtype == 'BYTES' and self.allocation == 'STATIC': 454 result = 'typedef PB_BYTES_ARRAY_T(%d) %s;\n' % (self.max_size, self.ctype) 455 else: 456 result = '' 457 return result 458 459 def get_dependencies(self): 460 '''Get list of type names used by this field.''' 461 if self.allocation == 'STATIC': 462 return [str(self.ctype)] 463 else: 464 return [] 465 466 def get_initializer(self, null_init, inner_init_only = False): 467 '''Return literal expression for this field's default value. 468 null_init: If True, initialize to a 0 value instead of default from .proto 469 inner_init_only: If True, exclude initialization for any count/has fields 470 ''' 471 472 inner_init = None 473 if self.pbtype == 'MESSAGE': 474 if null_init: 475 inner_init = '%s_init_zero' % self.ctype 476 else: 477 inner_init = '%s_init_default' % self.ctype 478 elif self.default is None or null_init: 479 if self.pbtype == 'STRING': 480 inner_init = '""' 481 elif self.pbtype == 'BYTES': 482 inner_init = '{0, {0}}' 483 elif self.pbtype == 'FIXED_LENGTH_BYTES': 484 inner_init = '{0}' 485 elif self.pbtype in ('ENUM', 'UENUM'): 486 inner_init = '_%s_MIN' % self.ctype 487 else: 488 inner_init = '0' 489 else: 490 if self.pbtype == 'STRING': 491 data = codecs.escape_encode(self.default.encode('utf-8'))[0] 492 inner_init = '"' + data.decode('ascii') + '"' 493 elif self.pbtype == 'BYTES': 494 data = codecs.escape_decode(self.default)[0] 495 data = ["0x%02x" % c for c in bytearray(data)] 496 if len(data) == 0: 497 inner_init = '{0, {0}}' 498 else: 499 inner_init = '{%d, {%s}}' % (len(data), ','.join(data)) 500 elif self.pbtype == 'FIXED_LENGTH_BYTES': 501 data = codecs.escape_decode(self.default)[0] 502 data = ["0x%02x" % c for c in bytearray(data)] 503 if len(data) == 0: 504 inner_init = '{0}' 505 else: 506 inner_init = '{%s}' % ','.join(data) 507 elif self.pbtype in ['FIXED32', 'UINT32']: 508 inner_init = str(self.default) + 'u' 509 elif self.pbtype in ['FIXED64', 'UINT64']: 510 inner_init = str(self.default) + 'ull' 511 elif self.pbtype in ['SFIXED64', 'INT64']: 512 inner_init = str(self.default) + 'll' 513 else: 514 inner_init = str(self.default) 515 516 if inner_init_only: 517 return inner_init 518 519 outer_init = None 520 if self.allocation == 'STATIC': 521 if self.rules == 'REPEATED': 522 outer_init = '' 523 if not self.fixed_count: 524 outer_init += '0, ' 525 outer_init += '{' 526 outer_init += ', '.join([inner_init] * self.max_count) 527 outer_init += '}' 528 elif self.rules == 'OPTIONAL': 529 outer_init = 'false, ' + inner_init 530 else: 531 outer_init = inner_init 532 elif self.allocation == 'POINTER': 533 if self.rules == 'REPEATED': 534 outer_init = '0, NULL' 535 else: 536 outer_init = 'NULL' 537 elif self.allocation == 'CALLBACK': 538 if self.pbtype == 'EXTENSION': 539 outer_init = 'NULL' 540 else: 541 outer_init = '{{NULL}, NULL}' 542 543 return outer_init 544 545 def default_decl(self, declaration_only = False): 546 '''Return definition for this field's default value.''' 547 if self.default is None: 548 return None 549 550 ctype = self.ctype 551 default = self.get_initializer(False, True) 552 array_decl = '' 553 554 if self.pbtype == 'STRING': 555 if self.allocation != 'STATIC': 556 return None # Not implemented 557 array_decl = '[%d]' % self.max_size 558 elif self.pbtype == 'BYTES': 559 if self.allocation != 'STATIC': 560 return None # Not implemented 561 elif self.pbtype == 'FIXED_LENGTH_BYTES': 562 if self.allocation != 'STATIC': 563 return None # Not implemented 564 array_decl = '[%d]' % self.max_size 565 566 if declaration_only: 567 return 'extern const %s %s_default%s;' % (ctype, self.struct_name + self.name, array_decl) 568 else: 569 return 'const %s %s_default%s = %s;' % (ctype, self.struct_name + self.name, array_decl, default) 570 571 def tags(self): 572 '''Return the #define for the tag number of this field.''' 573 identifier = '%s_%s_tag' % (self.struct_name, self.name) 574 return '#define %-40s %d\n' % (identifier, self.tag) 575 576 def pb_field_t(self, prev_field_name, union_index = None): 577 '''Return the pb_field_t initializer to use in the constant array. 578 prev_field_name is the name of the previous field or None. For OneOf 579 unions, union_index is the index of this field inside the OneOf. 580 ''' 581 582 if self.rules == 'ONEOF': 583 if self.anonymous: 584 result = ' PB_ANONYMOUS_ONEOF_FIELD(%s, ' % self.union_name 585 else: 586 result = ' PB_ONEOF_FIELD(%s, ' % self.union_name 587 elif self.fixed_count: 588 result = ' PB_REPEATED_FIXED_COUNT(' 589 else: 590 result = ' PB_FIELD(' 591 592 result += '%3d, ' % self.tag 593 result += '%-8s, ' % self.pbtype 594 if not self.fixed_count: 595 result += '%s, ' % self.rules 596 result += '%-8s, ' % self.allocation 597 598 if union_index is not None and union_index > 0: 599 result += 'UNION, ' 600 elif prev_field_name is None: 601 result += 'FIRST, ' 602 else: 603 result += 'OTHER, ' 604 605 result += '%s, ' % self.struct_name 606 result += '%s, ' % self.name 607 result += '%s, ' % (prev_field_name or self.name) 608 609 if self.pbtype == 'MESSAGE': 610 result += '&%s_fields)' % self.submsgname 611 elif self.default is None: 612 result += '0)' 613 elif self.pbtype in ['BYTES', 'STRING', 'FIXED_LENGTH_BYTES'] and self.allocation != 'STATIC': 614 result += '0)' # Arbitrary size default values not implemented 615 elif self.rules == 'OPTEXT': 616 result += '0)' # Default value for extensions is not implemented 617 else: 618 result += '&%s_default)' % (self.struct_name + self.name) 619 620 return result 621 622 def get_last_field_name(self): 623 return self.name 624 625 def largest_field_value(self): 626 '''Determine if this field needs 16bit or 32bit pb_field_t structure to compile properly. 627 Returns numeric value or a C-expression for assert.''' 628 check = [] 629 630 need_check = False 631 632 if self.pbtype == 'BYTES' and self.allocation == 'STATIC' and self.max_size > 251: 633 need_check = True 634 elif self.pbtype == 'MESSAGE' and self.allocation == 'STATIC': 635 need_check = True 636 637 if need_check: 638 if self.rules == 'REPEATED': 639 check.append('pb_membersize(%s, %s[0])' % (self.struct_name, self.name)) 640 elif self.rules == 'ONEOF': 641 if self.anonymous: 642 check.append('pb_membersize(%s, %s)' % (self.struct_name, self.name)) 643 else: 644 check.append('pb_membersize(%s, %s.%s)' % (self.struct_name, self.union_name, self.name)) 645 else: 646 check.append('pb_membersize(%s, %s)' % (self.struct_name, self.name)) 647 648 return FieldMaxSize([self.tag, self.max_size, self.max_count], 649 check, 650 ('%s.%s' % (self.struct_name, self.name))) 651 652 def encoded_size(self, dependencies): 653 '''Return the maximum size that this field can take when encoded, 654 including the field tag. If the size cannot be determined, returns 655 None.''' 656 657 if self.allocation != 'STATIC': 658 return None 659 660 if self.pbtype == 'MESSAGE': 661 encsize = None 662 if str(self.submsgname) in dependencies: 663 submsg = dependencies[str(self.submsgname)] 664 encsize = submsg.encoded_size(dependencies) 665 if encsize is not None: 666 # Include submessage length prefix 667 encsize += varint_max_size(encsize.upperlimit()) 668 else: 669 my_msg = dependencies.get(str(self.struct_name)) 670 if my_msg and submsg.protofile == my_msg.protofile: 671 # The dependency is from the same file and size cannot be 672 # determined for it, thus we know it will not be possible 673 # in runtime either. 674 return None 675 676 if encsize is None: 677 # Submessage or its size cannot be found. 678 # This can occur if submessage is defined in different 679 # file, and it or its .options could not be found. 680 # Instead of direct numeric value, reference the size that 681 # has been #defined in the other file. 682 encsize = EncodedSize(self.submsgname + 'size') 683 684 # We will have to make a conservative assumption on the length 685 # prefix size, though. 686 encsize += 5 687 688 elif self.pbtype in ['ENUM', 'UENUM']: 689 if str(self.ctype) in dependencies: 690 enumtype = dependencies[str(self.ctype)] 691 encsize = enumtype.encoded_size() 692 else: 693 # Conservative assumption 694 encsize = 10 695 696 elif self.enc_size is None: 697 raise RuntimeError("Could not determine encoded size for %s.%s" 698 % (self.struct_name, self.name)) 699 else: 700 encsize = EncodedSize(self.enc_size) 701 702 encsize += varint_max_size(self.tag << 3) # Tag + wire type 703 704 if self.rules == 'REPEATED': 705 # Decoders must be always able to handle unpacked arrays. 706 # Therefore we have to reserve space for it, even though 707 # we emit packed arrays ourselves. For length of 1, packed 708 # arrays are larger however so we need to add allowance 709 # for the length byte. 710 encsize *= self.max_count 711 712 if self.max_count == 1: 713 encsize += 1 714 715 return encsize 716 717 718class ExtensionRange(Field): 719 def __init__(self, struct_name, range_start, field_options): 720 '''Implements a special pb_extension_t* field in an extensible message 721 structure. The range_start signifies the index at which the extensions 722 start. Not necessarily all tags above this are extensions, it is merely 723 a speed optimization. 724 ''' 725 self.tag = range_start 726 self.struct_name = struct_name 727 self.name = 'extensions' 728 self.pbtype = 'EXTENSION' 729 self.rules = 'OPTIONAL' 730 self.allocation = 'CALLBACK' 731 self.ctype = 'pb_extension_t' 732 self.array_decl = '' 733 self.default = None 734 self.max_size = 0 735 self.max_count = 0 736 self.fixed_count = False 737 738 def __str__(self): 739 return ' pb_extension_t *extensions;' 740 741 def types(self): 742 return '' 743 744 def tags(self): 745 return '' 746 747 def encoded_size(self, dependencies): 748 # We exclude extensions from the count, because they cannot be known 749 # until runtime. Other option would be to return None here, but this 750 # way the value remains useful if extensions are not used. 751 return EncodedSize(0) 752 753class ExtensionField(Field): 754 def __init__(self, fullname, desc, field_options): 755 self.fullname = fullname 756 self.extendee_name = names_from_type_name(desc.extendee) 757 Field.__init__(self, self.fullname + 'struct', desc, field_options) 758 759 if self.rules != 'OPTIONAL': 760 self.skip = True 761 else: 762 self.skip = False 763 self.rules = 'OPTEXT' 764 765 def tags(self): 766 '''Return the #define for the tag number of this field.''' 767 identifier = '%s_tag' % self.fullname 768 return '#define %-40s %d\n' % (identifier, self.tag) 769 770 def extension_decl(self): 771 '''Declaration of the extension type in the .pb.h file''' 772 if self.skip: 773 msg = '/* Extension field %s was skipped because only "optional"\n' % self.fullname 774 msg +=' type of extension fields is currently supported. */\n' 775 return msg 776 777 return ('extern const pb_extension_type_t %s; /* field type: %s */\n' % 778 (self.fullname, str(self).strip())) 779 780 def extension_def(self): 781 '''Definition of the extension type in the .pb.c file''' 782 783 if self.skip: 784 return '' 785 786 result = 'typedef struct {\n' 787 result += str(self) 788 result += '\n} %s;\n\n' % self.struct_name 789 result += ('static const pb_field_t %s_field = \n %s;\n\n' % 790 (self.fullname, self.pb_field_t(None))) 791 result += 'const pb_extension_type_t %s = {\n' % self.fullname 792 result += ' NULL,\n' 793 result += ' NULL,\n' 794 result += ' &%s_field\n' % self.fullname 795 result += '};\n' 796 return result 797 798 799# --------------------------------------------------------------------------- 800# Generation of oneofs (unions) 801# --------------------------------------------------------------------------- 802 803class OneOf(Field): 804 def __init__(self, struct_name, oneof_desc): 805 self.struct_name = struct_name 806 self.name = oneof_desc.name 807 self.ctype = 'union' 808 self.pbtype = 'oneof' 809 self.fields = [] 810 self.allocation = 'ONEOF' 811 self.default = None 812 self.rules = 'ONEOF' 813 self.anonymous = False 814 815 def add_field(self, field): 816 if field.allocation == 'CALLBACK': 817 raise Exception("Callback fields inside of oneof are not supported" 818 + " (field %s)" % field.name) 819 820 field.union_name = self.name 821 field.rules = 'ONEOF' 822 field.anonymous = self.anonymous 823 self.fields.append(field) 824 self.fields.sort(key = lambda f: f.tag) 825 826 # Sort by the lowest tag number inside union 827 self.tag = min([f.tag for f in self.fields]) 828 829 def __str__(self): 830 result = '' 831 if self.fields: 832 result += ' pb_size_t which_' + self.name + ";\n" 833 result += ' union {\n' 834 for f in self.fields: 835 result += ' ' + str(f).replace('\n', '\n ') + '\n' 836 if self.anonymous: 837 result += ' };' 838 else: 839 result += ' } ' + self.name + ';' 840 return result 841 842 def types(self): 843 return ''.join([f.types() for f in self.fields]) 844 845 def get_dependencies(self): 846 deps = [] 847 for f in self.fields: 848 deps += f.get_dependencies() 849 return deps 850 851 def get_initializer(self, null_init): 852 return '0, {' + self.fields[0].get_initializer(null_init) + '}' 853 854 def default_decl(self, declaration_only = False): 855 return None 856 857 def tags(self): 858 return ''.join([f.tags() for f in self.fields]) 859 860 def pb_field_t(self, prev_field_name): 861 parts = [] 862 for union_index, field in enumerate(self.fields): 863 parts.append(field.pb_field_t(prev_field_name, union_index)) 864 return ',\n'.join(parts) 865 866 def get_last_field_name(self): 867 if self.anonymous: 868 return self.fields[-1].name 869 else: 870 return self.name + '.' + self.fields[-1].name 871 872 def largest_field_value(self): 873 largest = FieldMaxSize() 874 for f in self.fields: 875 largest.extend(f.largest_field_value()) 876 return largest 877 878 def encoded_size(self, dependencies): 879 '''Returns the size of the largest oneof field.''' 880 largest = 0 881 symbols = [] 882 for f in self.fields: 883 size = EncodedSize(f.encoded_size(dependencies)) 884 if size is None or size.value is None: 885 return None 886 elif size.symbols: 887 symbols.append((f.tag, size.symbols[0])) 888 elif size.value > largest: 889 largest = size.value 890 891 if not symbols: 892 # Simple case, all sizes were known at generator time 893 return largest 894 895 if largest > 0: 896 # Some sizes were known, some were not 897 symbols.insert(0, (0, largest)) 898 899 if len(symbols) == 1: 900 # Only one symbol was needed 901 return EncodedSize(5, [symbols[0][1]]) 902 else: 903 # Use sizeof(union{}) construct to find the maximum size of 904 # submessages. 905 union_def = ' '.join('char f%d[%s];' % s for s in symbols) 906 return EncodedSize(5, ['sizeof(union{%s})' % union_def]) 907 908# --------------------------------------------------------------------------- 909# Generation of messages (structures) 910# --------------------------------------------------------------------------- 911 912 913class Message: 914 def __init__(self, names, desc, message_options): 915 self.name = names 916 self.fields = [] 917 self.oneofs = {} 918 no_unions = [] 919 920 if message_options.msgid: 921 self.msgid = message_options.msgid 922 923 if hasattr(desc, 'oneof_decl'): 924 for i, f in enumerate(desc.oneof_decl): 925 oneof_options = get_nanopb_suboptions(desc, message_options, self.name + f.name) 926 if oneof_options.no_unions: 927 no_unions.append(i) # No union, but add fields normally 928 elif oneof_options.type == nanopb_pb2.FT_IGNORE: 929 pass # No union and skip fields also 930 else: 931 oneof = OneOf(self.name, f) 932 if oneof_options.anonymous_oneof: 933 oneof.anonymous = True 934 self.oneofs[i] = oneof 935 self.fields.append(oneof) 936 else: 937 sys.stderr.write('Note: This Python protobuf library has no OneOf support\n') 938 939 for f in desc.field: 940 field_options = get_nanopb_suboptions(f, message_options, self.name + f.name) 941 if field_options.type == nanopb_pb2.FT_IGNORE: 942 continue 943 944 field = Field(self.name, f, field_options) 945 if (hasattr(f, 'oneof_index') and 946 f.HasField('oneof_index') and 947 f.oneof_index not in no_unions): 948 if f.oneof_index in self.oneofs: 949 self.oneofs[f.oneof_index].add_field(field) 950 else: 951 self.fields.append(field) 952 953 if len(desc.extension_range) > 0: 954 field_options = get_nanopb_suboptions(desc, message_options, self.name + 'extensions') 955 range_start = min([r.start for r in desc.extension_range]) 956 if field_options.type != nanopb_pb2.FT_IGNORE: 957 self.fields.append(ExtensionRange(self.name, range_start, field_options)) 958 959 self.packed = message_options.packed_struct 960 self.ordered_fields = self.fields[:] 961 self.ordered_fields.sort() 962 963 def get_dependencies(self): 964 '''Get list of type names that this structure refers to.''' 965 deps = [] 966 for f in self.fields: 967 deps += f.get_dependencies() 968 return deps 969 970 def __str__(self): 971 result = 'typedef struct _%s {\n' % self.name 972 973 if not self.ordered_fields: 974 # Empty structs are not allowed in C standard. 975 # Therefore add a dummy field if an empty message occurs. 976 result += ' char dummy_field;' 977 978 result += '\n'.join([str(f) for f in self.ordered_fields]) 979 result += '\n/* @@protoc_insertion_point(struct:%s) */' % self.name 980 result += '\n}' 981 982 if self.packed: 983 result += ' pb_packed' 984 985 result += ' %s;' % self.name 986 987 if self.packed: 988 result = 'PB_PACKED_STRUCT_START\n' + result 989 result += '\nPB_PACKED_STRUCT_END' 990 991 return result 992 993 def types(self): 994 return ''.join([f.types() for f in self.fields]) 995 996 def get_initializer(self, null_init): 997 if not self.ordered_fields: 998 return '{0}' 999 1000 parts = [] 1001 for field in self.ordered_fields: 1002 parts.append(field.get_initializer(null_init)) 1003 return '{' + ', '.join(parts) + '}' 1004 1005 def default_decl(self, declaration_only = False): 1006 result = "" 1007 for field in self.fields: 1008 default = field.default_decl(declaration_only) 1009 if default is not None: 1010 result += default + '\n' 1011 return result 1012 1013 def all_fields(self): 1014 '''Iterate over all fields in this message, including nested OneOfs.''' 1015 for f in self.fields: 1016 if isinstance(f, OneOf): 1017 for f2 in f.fields: 1018 yield f2 1019 else: 1020 yield f 1021 1022 def count_required_fields(self): 1023 '''Returns number of required fields inside this message''' 1024 count = 0 1025 for f in self.fields: 1026 if not isinstance(f, OneOf): 1027 if f.rules == 'REQUIRED': 1028 count += 1 1029 return count 1030 1031 def count_all_fields(self): 1032 count = 0 1033 for f in self.fields: 1034 if isinstance(f, OneOf): 1035 count += len(f.fields) 1036 else: 1037 count += 1 1038 return count 1039 1040 def fields_declaration(self): 1041 result = 'extern const pb_field_t %s_fields[%d];' % (self.name, self.count_all_fields() + 1) 1042 return result 1043 1044 def fields_definition(self): 1045 result = 'const pb_field_t %s_fields[%d] = {\n' % (self.name, self.count_all_fields() + 1) 1046 1047 prev = None 1048 for field in self.ordered_fields: 1049 result += field.pb_field_t(prev) 1050 result += ',\n' 1051 prev = field.get_last_field_name() 1052 1053 result += ' PB_LAST_FIELD\n};' 1054 return result 1055 1056 def encoded_size(self, dependencies): 1057 '''Return the maximum size that this message can take when encoded. 1058 If the size cannot be determined, returns None. 1059 ''' 1060 size = EncodedSize(0) 1061 for field in self.fields: 1062 fsize = field.encoded_size(dependencies) 1063 if fsize is None: 1064 return None 1065 size += fsize 1066 1067 return size 1068 1069 1070# --------------------------------------------------------------------------- 1071# Processing of entire .proto files 1072# --------------------------------------------------------------------------- 1073 1074def iterate_messages(desc, flatten = False, names = Names()): 1075 '''Recursively find all messages. For each, yield name, DescriptorProto.''' 1076 if hasattr(desc, 'message_type'): 1077 submsgs = desc.message_type 1078 else: 1079 submsgs = desc.nested_type 1080 1081 for submsg in submsgs: 1082 sub_names = names + submsg.name 1083 if flatten: 1084 yield Names(submsg.name), submsg 1085 else: 1086 yield sub_names, submsg 1087 1088 for x in iterate_messages(submsg, flatten, sub_names): 1089 yield x 1090 1091def iterate_extensions(desc, flatten = False, names = Names()): 1092 '''Recursively find all extensions. 1093 For each, yield name, FieldDescriptorProto. 1094 ''' 1095 for extension in desc.extension: 1096 yield names, extension 1097 1098 for subname, subdesc in iterate_messages(desc, flatten, names): 1099 for extension in subdesc.extension: 1100 yield subname, extension 1101 1102def toposort2(data): 1103 '''Topological sort. 1104 From http://code.activestate.com/recipes/577413-topological-sort/ 1105 This function is under the MIT license. 1106 ''' 1107 for k, v in list(data.items()): 1108 v.discard(k) # Ignore self dependencies 1109 extra_items_in_deps = reduce(set.union, list(data.values()), set()) - set(data.keys()) 1110 data.update(dict([(item, set()) for item in extra_items_in_deps])) 1111 while True: 1112 ordered = set(item for item,dep in list(data.items()) if not dep) 1113 if not ordered: 1114 break 1115 for item in sorted(ordered): 1116 yield item 1117 data = dict([(item, (dep - ordered)) for item,dep in list(data.items()) 1118 if item not in ordered]) 1119 assert not data, "A cyclic dependency exists amongst %r" % data 1120 1121def sort_dependencies(messages): 1122 '''Sort a list of Messages based on dependencies.''' 1123 dependencies = {} 1124 message_by_name = {} 1125 for message in messages: 1126 dependencies[str(message.name)] = set(message.get_dependencies()) 1127 message_by_name[str(message.name)] = message 1128 1129 for msgname in toposort2(dependencies): 1130 if msgname in message_by_name: 1131 yield message_by_name[msgname] 1132 1133def make_identifier(headername): 1134 '''Make #ifndef identifier that contains uppercase A-Z and digits 0-9''' 1135 result = "" 1136 for c in headername.upper(): 1137 if c.isalnum(): 1138 result += c 1139 else: 1140 result += '_' 1141 return result 1142 1143class ProtoFile: 1144 def __init__(self, fdesc, file_options): 1145 '''Takes a FileDescriptorProto and parses it.''' 1146 self.fdesc = fdesc 1147 self.file_options = file_options 1148 self.dependencies = {} 1149 self.parse() 1150 1151 # Some of types used in this file probably come from the file itself. 1152 # Thus it has implicit dependency on itself. 1153 self.add_dependency(self) 1154 1155 def parse(self): 1156 self.enums = [] 1157 self.messages = [] 1158 self.extensions = [] 1159 1160 mangle_names = self.file_options.mangle_names 1161 flatten = mangle_names == nanopb_pb2.M_FLATTEN 1162 strip_prefix = None 1163 if mangle_names == nanopb_pb2.M_STRIP_PACKAGE: 1164 strip_prefix = "." + self.fdesc.package 1165 1166 def create_name(names): 1167 if mangle_names == nanopb_pb2.M_NONE: 1168 return base_name + names 1169 elif mangle_names == nanopb_pb2.M_STRIP_PACKAGE: 1170 return Names(names) 1171 else: 1172 single_name = names 1173 if isinstance(names, Names): 1174 single_name = names.parts[-1] 1175 return Names(single_name) 1176 1177 def mangle_field_typename(typename): 1178 if mangle_names == nanopb_pb2.M_FLATTEN: 1179 return "." + typename.split(".")[-1] 1180 elif strip_prefix is not None and typename.startswith(strip_prefix): 1181 return typename[len(strip_prefix):] 1182 else: 1183 return typename 1184 1185 if self.fdesc.package: 1186 base_name = Names(self.fdesc.package.split('.')) 1187 else: 1188 base_name = Names() 1189 1190 for enum in self.fdesc.enum_type: 1191 name = create_name(enum.name) 1192 enum_options = get_nanopb_suboptions(enum, self.file_options, name) 1193 self.enums.append(Enum(name, enum, enum_options)) 1194 1195 for names, message in iterate_messages(self.fdesc, flatten): 1196 name = create_name(names) 1197 message_options = get_nanopb_suboptions(message, self.file_options, name) 1198 1199 if message_options.skip_message: 1200 continue 1201 1202 message = copy.deepcopy(message) 1203 for field in message.field: 1204 if field.type in (FieldD.TYPE_MESSAGE, FieldD.TYPE_ENUM): 1205 field.type_name = mangle_field_typename(field.type_name) 1206 1207 self.messages.append(Message(name, message, message_options)) 1208 for enum in message.enum_type: 1209 name = create_name(names + enum.name) 1210 enum_options = get_nanopb_suboptions(enum, message_options, name) 1211 self.enums.append(Enum(name, enum, enum_options)) 1212 1213 for names, extension in iterate_extensions(self.fdesc, flatten): 1214 name = create_name(names + extension.name) 1215 field_options = get_nanopb_suboptions(extension, self.file_options, name) 1216 if field_options.type != nanopb_pb2.FT_IGNORE: 1217 self.extensions.append(ExtensionField(name, extension, field_options)) 1218 1219 def add_dependency(self, other): 1220 for enum in other.enums: 1221 self.dependencies[str(enum.names)] = enum 1222 enum.protofile = other 1223 1224 for msg in other.messages: 1225 self.dependencies[str(msg.name)] = msg 1226 msg.protofile = other 1227 1228 # Fix field default values where enum short names are used. 1229 for enum in other.enums: 1230 if not enum.options.long_names: 1231 for message in self.messages: 1232 for field in message.all_fields(): 1233 if field.default in enum.value_longnames: 1234 idx = enum.value_longnames.index(field.default) 1235 field.default = enum.values[idx][0] 1236 1237 # Fix field data types where enums have negative values. 1238 for enum in other.enums: 1239 if not enum.has_negative(): 1240 for message in self.messages: 1241 for field in message.all_fields(): 1242 if field.pbtype == 'ENUM' and field.ctype == enum.names: 1243 field.pbtype = 'UENUM' 1244 1245 def generate_header(self, includes, headername, options): 1246 '''Generate content for a header file. 1247 Generates strings, which should be concatenated and stored to file. 1248 ''' 1249 1250 yield '/* Automatically generated nanopb header */\n' 1251 if options.notimestamp: 1252 yield '/* Generated by %s */\n\n' % (nanopb_version) 1253 else: 1254 yield '/* Generated by %s at %s. */\n\n' % (nanopb_version, time.asctime()) 1255 1256 if self.fdesc.package: 1257 symbol = make_identifier(self.fdesc.package + '_' + headername) 1258 else: 1259 symbol = make_identifier(headername) 1260 yield '#ifndef PB_%s_INCLUDED\n' % symbol 1261 yield '#define PB_%s_INCLUDED\n' % symbol 1262 try: 1263 yield options.libformat % ('pb.h') 1264 except TypeError: 1265 # no %s specified - use whatever was passed in as options.libformat 1266 yield options.libformat 1267 yield '\n' 1268 1269 for incfile in includes: 1270 noext = os.path.splitext(incfile)[0] 1271 yield options.genformat % (noext + options.extension + options.header_extension) 1272 yield '\n' 1273 1274 yield '/* @@protoc_insertion_point(includes) */\n' 1275 1276 yield '#if PB_PROTO_HEADER_VERSION != 30\n' 1277 yield '#error Regenerate this file with the current version of nanopb generator.\n' 1278 yield '#endif\n' 1279 yield '\n' 1280 1281 yield '#ifdef __cplusplus\n' 1282 yield 'extern "C" {\n' 1283 yield '#endif\n\n' 1284 1285 if self.enums: 1286 yield '/* Enum definitions */\n' 1287 for enum in self.enums: 1288 yield str(enum) + '\n\n' 1289 1290 if self.messages: 1291 yield '/* Struct definitions */\n' 1292 for msg in sort_dependencies(self.messages): 1293 yield msg.types() 1294 yield str(msg) + '\n\n' 1295 1296 if self.extensions: 1297 yield '/* Extensions */\n' 1298 for extension in self.extensions: 1299 yield extension.extension_decl() 1300 yield '\n' 1301 1302 if self.messages: 1303 yield '/* Default values for struct fields */\n' 1304 for msg in self.messages: 1305 yield msg.default_decl(True) 1306 yield '\n' 1307 1308 yield '/* Initializer values for message structs */\n' 1309 for msg in self.messages: 1310 identifier = '%s_init_default' % msg.name 1311 yield '#define %-40s %s\n' % (identifier, msg.get_initializer(False)) 1312 for msg in self.messages: 1313 identifier = '%s_init_zero' % msg.name 1314 yield '#define %-40s %s\n' % (identifier, msg.get_initializer(True)) 1315 yield '\n' 1316 1317 yield '/* Field tags (for use in manual encoding/decoding) */\n' 1318 for msg in sort_dependencies(self.messages): 1319 for field in msg.fields: 1320 yield field.tags() 1321 for extension in self.extensions: 1322 yield extension.tags() 1323 yield '\n' 1324 1325 yield '/* Struct field encoding specification for nanopb */\n' 1326 for msg in self.messages: 1327 yield msg.fields_declaration() + '\n' 1328 yield '\n' 1329 1330 yield '/* Maximum encoded size of messages (where known) */\n' 1331 for msg in self.messages: 1332 msize = msg.encoded_size(self.dependencies) 1333 identifier = '%s_size' % msg.name 1334 if msize is not None: 1335 yield '#define %-40s %s\n' % (identifier, msize) 1336 else: 1337 yield '/* %s depends on runtime parameters */\n' % identifier 1338 yield '\n' 1339 1340 yield '/* Message IDs (where set with "msgid" option) */\n' 1341 1342 yield '#ifdef PB_MSGID\n' 1343 for msg in self.messages: 1344 if hasattr(msg,'msgid'): 1345 yield '#define PB_MSG_%d %s\n' % (msg.msgid, msg.name) 1346 yield '\n' 1347 1348 symbol = make_identifier(headername.split('.')[0]) 1349 yield '#define %s_MESSAGES \\\n' % symbol 1350 1351 for msg in self.messages: 1352 m = "-1" 1353 msize = msg.encoded_size(self.dependencies) 1354 if msize is not None: 1355 m = msize 1356 if hasattr(msg,'msgid'): 1357 yield '\tPB_MSG(%d,%s,%s) \\\n' % (msg.msgid, m, msg.name) 1358 yield '\n' 1359 1360 for msg in self.messages: 1361 if hasattr(msg,'msgid'): 1362 yield '#define %s_msgid %d\n' % (msg.name, msg.msgid) 1363 yield '\n' 1364 1365 yield '#endif\n\n' 1366 1367 yield '#ifdef __cplusplus\n' 1368 yield '} /* extern "C" */\n' 1369 yield '#endif\n' 1370 1371 # End of header 1372 yield '/* @@protoc_insertion_point(eof) */\n' 1373 yield '\n#endif\n' 1374 1375 def generate_source(self, headername, options): 1376 '''Generate content for a source file.''' 1377 1378 yield '/* Automatically generated nanopb constant definitions */\n' 1379 if options.notimestamp: 1380 yield '/* Generated by %s */\n\n' % (nanopb_version) 1381 else: 1382 yield '/* Generated by %s at %s. */\n\n' % (nanopb_version, time.asctime()) 1383 yield options.genformat % (headername) 1384 yield '\n' 1385 yield '/* @@protoc_insertion_point(includes) */\n' 1386 1387 yield '#if PB_PROTO_HEADER_VERSION != 30\n' 1388 yield '#error Regenerate this file with the current version of nanopb generator.\n' 1389 yield '#endif\n' 1390 yield '\n' 1391 1392 for msg in self.messages: 1393 yield msg.default_decl(False) 1394 1395 yield '\n\n' 1396 1397 for msg in self.messages: 1398 yield msg.fields_definition() + '\n\n' 1399 1400 for ext in self.extensions: 1401 yield ext.extension_def() + '\n' 1402 1403 for enum in self.enums: 1404 yield enum.enum_to_string_definition() + '\n' 1405 1406 # Add checks for numeric limits 1407 if self.messages: 1408 largest_msg = max(self.messages, key = lambda m: m.count_required_fields()) 1409 largest_count = largest_msg.count_required_fields() 1410 if largest_count > 64: 1411 yield '\n/* Check that missing required fields will be properly detected */\n' 1412 yield '#if PB_MAX_REQUIRED_FIELDS < %d\n' % largest_count 1413 yield '#error Properly detecting missing required fields in %s requires \\\n' % largest_msg.name 1414 yield ' setting PB_MAX_REQUIRED_FIELDS to %d or more.\n' % largest_count 1415 yield '#endif\n' 1416 1417 max_field = FieldMaxSize() 1418 checks_msgnames = [] 1419 for msg in self.messages: 1420 checks_msgnames.append(msg.name) 1421 for field in msg.fields: 1422 max_field.extend(field.largest_field_value()) 1423 for field in self.extensions: 1424 max_field.extend(field.largest_field_value()) 1425 1426 worst = max_field.worst 1427 worst_field = max_field.worst_field 1428 checks = max_field.checks 1429 1430 if worst > 255 or checks: 1431 yield '\n/* Check that field information fits in pb_field_t */\n' 1432 1433 if worst > 65535 or checks: 1434 yield '#if !defined(PB_FIELD_32BIT)\n' 1435 if worst > 65535: 1436 yield '#error Field descriptor for %s is too large. Define PB_FIELD_32BIT to fix this.\n' % worst_field 1437 else: 1438 assertion = ' && '.join(str(c) + ' < 65536' for c in checks) 1439 msgs = '_'.join(str(n) for n in checks_msgnames) 1440 yield '/* If you get an error here, it means that you need to define PB_FIELD_32BIT\n' 1441 yield ' * compile-time option. You can do that in pb.h or on compiler command line.\n' 1442 yield ' *\n' 1443 yield ' * The reason you need to do this is that some of your messages contain tag\n' 1444 yield ' * numbers or field sizes that are larger than what can fit in 8 or 16 bit\n' 1445 yield ' * field descriptors.\n' 1446 yield ' */\n' 1447 yield 'PB_STATIC_ASSERT((%s), YOU_MUST_DEFINE_PB_FIELD_32BIT_FOR_MESSAGES_%s)\n'%(assertion,msgs) 1448 yield '#endif\n\n' 1449 1450 if worst < 65536: 1451 yield '#if !defined(PB_FIELD_16BIT) && !defined(PB_FIELD_32BIT)\n' 1452 if worst > 255: 1453 yield '#error Field descriptor for %s is too large. Define PB_FIELD_16BIT to fix this.\n' % worst_field 1454 else: 1455 assertion = ' && '.join(str(c) + ' < 256' for c in checks) 1456 msgs = '_'.join(str(n) for n in checks_msgnames) 1457 yield '/* If you get an error here, it means that you need to define PB_FIELD_16BIT\n' 1458 yield ' * compile-time option. You can do that in pb.h or on compiler command line.\n' 1459 yield ' *\n' 1460 yield ' * The reason you need to do this is that some of your messages contain tag\n' 1461 yield ' * numbers or field sizes that are larger than what can fit in the default\n' 1462 yield ' * 8 bit descriptors.\n' 1463 yield ' */\n' 1464 yield 'PB_STATIC_ASSERT((%s), YOU_MUST_DEFINE_PB_FIELD_16BIT_FOR_MESSAGES_%s)\n'%(assertion,msgs) 1465 yield '#endif\n\n' 1466 1467 # Add check for sizeof(double) 1468 has_double = False 1469 for msg in self.messages: 1470 for field in msg.all_fields(): 1471 if field.ctype == 'double': 1472 has_double = True 1473 1474 if has_double: 1475 yield '\n' 1476 yield '/* On some platforms (such as AVR), double is really float.\n' 1477 yield ' * These are not directly supported by nanopb, but see example_avr_double.\n' 1478 yield ' * To get rid of this error, remove any double fields from your .proto.\n' 1479 yield ' */\n' 1480 yield 'PB_STATIC_ASSERT(sizeof(double) == 8, DOUBLE_MUST_BE_8_BYTES)\n' 1481 1482 yield '\n' 1483 yield '/* @@protoc_insertion_point(eof) */\n' 1484 1485# --------------------------------------------------------------------------- 1486# Options parsing for the .proto files 1487# --------------------------------------------------------------------------- 1488 1489from fnmatch import fnmatch 1490 1491def read_options_file(infile): 1492 '''Parse a separate options file to list: 1493 [(namemask, options), ...] 1494 ''' 1495 results = [] 1496 data = infile.read() 1497 data = re.sub('/\*.*?\*/', '', data, flags = re.MULTILINE) 1498 data = re.sub('//.*?$', '', data, flags = re.MULTILINE) 1499 data = re.sub('#.*?$', '', data, flags = re.MULTILINE) 1500 for i, line in enumerate(data.split('\n')): 1501 line = line.strip() 1502 if not line: 1503 continue 1504 1505 parts = line.split(None, 1) 1506 1507 if len(parts) < 2: 1508 sys.stderr.write("%s:%d: " % (infile.name, i + 1) + 1509 "Option lines should have space between field name and options. " + 1510 "Skipping line: '%s'\n" % line) 1511 continue 1512 1513 opts = nanopb_pb2.NanoPBOptions() 1514 1515 try: 1516 text_format.Merge(parts[1], opts) 1517 except Exception as e: 1518 sys.stderr.write("%s:%d: " % (infile.name, i + 1) + 1519 "Unparseable option line: '%s'. " % line + 1520 "Error: %s\n" % str(e)) 1521 continue 1522 results.append((parts[0], opts)) 1523 1524 return results 1525 1526class Globals: 1527 '''Ugly global variables, should find a good way to pass these.''' 1528 verbose_options = False 1529 separate_options = [] 1530 matched_namemasks = set() 1531 1532def get_nanopb_suboptions(subdesc, options, name): 1533 '''Get copy of options, and merge information from subdesc.''' 1534 new_options = nanopb_pb2.NanoPBOptions() 1535 new_options.CopyFrom(options) 1536 1537 if hasattr(subdesc, 'syntax') and subdesc.syntax == "proto3": 1538 new_options.proto3 = True 1539 1540 # Handle options defined in a separate file 1541 dotname = '.'.join(name.parts) 1542 for namemask, options in Globals.separate_options: 1543 if fnmatch(dotname, namemask): 1544 Globals.matched_namemasks.add(namemask) 1545 new_options.MergeFrom(options) 1546 1547 # Handle options defined in .proto 1548 if isinstance(subdesc.options, descriptor.FieldOptions): 1549 ext_type = nanopb_pb2.nanopb 1550 elif isinstance(subdesc.options, descriptor.FileOptions): 1551 ext_type = nanopb_pb2.nanopb_fileopt 1552 elif isinstance(subdesc.options, descriptor.MessageOptions): 1553 ext_type = nanopb_pb2.nanopb_msgopt 1554 elif isinstance(subdesc.options, descriptor.EnumOptions): 1555 ext_type = nanopb_pb2.nanopb_enumopt 1556 else: 1557 raise Exception("Unknown options type") 1558 1559 if subdesc.options.HasExtension(ext_type): 1560 ext = subdesc.options.Extensions[ext_type] 1561 new_options.MergeFrom(ext) 1562 1563 if Globals.verbose_options: 1564 sys.stderr.write("Options for " + dotname + ": ") 1565 sys.stderr.write(text_format.MessageToString(new_options) + "\n") 1566 1567 return new_options 1568 1569 1570# --------------------------------------------------------------------------- 1571# Command line interface 1572# --------------------------------------------------------------------------- 1573 1574import sys 1575import os.path 1576from optparse import OptionParser 1577 1578optparser = OptionParser( 1579 usage = "Usage: nanopb_generator.py [options] file.pb ...", 1580 epilog = "Compile file.pb from file.proto by: 'protoc -ofile.pb file.proto'. " + 1581 "Output will be written to file.pb.h and file.pb.c.") 1582optparser.add_option("--version", dest="version", action="store_true", 1583 help="Show version info and exit") 1584optparser.add_option("-x", dest="exclude", metavar="FILE", action="append", default=[], 1585 help="Exclude file from generated #include list.") 1586optparser.add_option("-e", "--extension", dest="extension", metavar="EXTENSION", default=".pb", 1587 help="Set extension to use instead of '.pb' for generated files. [default: %default]") 1588optparser.add_option("-H", "--header-extension", dest="header_extension", metavar="EXTENSION", default=".h", 1589 help="Set extension to use for generated header files. [default: %default]") 1590optparser.add_option("-S", "--source-extension", dest="source_extension", metavar="EXTENSION", default=".c", 1591 help="Set extension to use for generated source files. [default: %default]") 1592optparser.add_option("-f", "--options-file", dest="options_file", metavar="FILE", default="%s.options", 1593 help="Set name of a separate generator options file.") 1594optparser.add_option("-I", "--options-path", dest="options_path", metavar="DIR", 1595 action="append", default = [], 1596 help="Search for .options files additionally in this path") 1597optparser.add_option("-D", "--output-dir", dest="output_dir", 1598 metavar="OUTPUTDIR", default=None, 1599 help="Output directory of .pb.h and .pb.c files") 1600optparser.add_option("-Q", "--generated-include-format", dest="genformat", 1601 metavar="FORMAT", default='#include "%s"\n', 1602 help="Set format string to use for including other .pb.h files. [default: %default]") 1603optparser.add_option("-L", "--library-include-format", dest="libformat", 1604 metavar="FORMAT", default='#include <%s>\n', 1605 help="Set format string to use for including the nanopb pb.h header. [default: %default]") 1606optparser.add_option("--strip-path", dest="strip_path", action="store_true", default=True, 1607 help="Strip directory path from #included .pb.h file name [default: %default]") 1608optparser.add_option("--no-strip-path", dest="strip_path", action="store_false", 1609 help="Opposite of --strip-path") 1610optparser.add_option("-T", "--no-timestamp", dest="notimestamp", action="store_true", default=False, 1611 help="Don't add timestamp to .pb.h and .pb.c preambles") 1612optparser.add_option("-q", "--quiet", dest="quiet", action="store_true", default=False, 1613 help="Don't print anything except errors.") 1614optparser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False, 1615 help="Print more information.") 1616optparser.add_option("-s", dest="settings", metavar="OPTION:VALUE", action="append", default=[], 1617 help="Set generator option (max_size, max_count etc.).") 1618 1619def parse_file(filename, fdesc, options): 1620 '''Parse a single file. Returns a ProtoFile instance.''' 1621 toplevel_options = nanopb_pb2.NanoPBOptions() 1622 for s in options.settings: 1623 text_format.Merge(s, toplevel_options) 1624 1625 if not fdesc: 1626 data = open(filename, 'rb').read() 1627 fdesc = descriptor.FileDescriptorSet.FromString(data).file[0] 1628 1629 # Check if there is a separate .options file 1630 had_abspath = False 1631 try: 1632 optfilename = options.options_file % os.path.splitext(filename)[0] 1633 except TypeError: 1634 # No %s specified, use the filename as-is 1635 optfilename = options.options_file 1636 had_abspath = True 1637 1638 paths = ['.'] + options.options_path 1639 for p in paths: 1640 if os.path.isfile(os.path.join(p, optfilename)): 1641 optfilename = os.path.join(p, optfilename) 1642 if options.verbose: 1643 sys.stderr.write('Reading options from ' + optfilename + '\n') 1644 Globals.separate_options = read_options_file(open(optfilename, 'r', encoding = 'utf-8')) 1645 break 1646 else: 1647 # If we are given a full filename and it does not exist, give an error. 1648 # However, don't give error when we automatically look for .options file 1649 # with the same name as .proto. 1650 if options.verbose or had_abspath: 1651 sys.stderr.write('Options file not found: ' + optfilename + '\n') 1652 Globals.separate_options = [] 1653 1654 Globals.matched_namemasks = set() 1655 1656 # Parse the file 1657 file_options = get_nanopb_suboptions(fdesc, toplevel_options, Names([filename])) 1658 f = ProtoFile(fdesc, file_options) 1659 f.optfilename = optfilename 1660 1661 return f 1662 1663def process_file(filename, fdesc, options, other_files = {}): 1664 '''Process a single file. 1665 filename: The full path to the .proto or .pb source file, as string. 1666 fdesc: The loaded FileDescriptorSet, or None to read from the input file. 1667 options: Command line options as they come from OptionsParser. 1668 1669 Returns a dict: 1670 {'headername': Name of header file, 1671 'headerdata': Data for the .h header file, 1672 'sourcename': Name of the source code file, 1673 'sourcedata': Data for the .c source code file 1674 } 1675 ''' 1676 f = parse_file(filename, fdesc, options) 1677 1678 # Provide dependencies if available 1679 for dep in f.fdesc.dependency: 1680 if dep in other_files: 1681 f.add_dependency(other_files[dep]) 1682 1683 # Decide the file names 1684 noext = os.path.splitext(filename)[0] 1685 headername = noext + options.extension + options.header_extension 1686 sourcename = noext + options.extension + options.source_extension 1687 1688 if options.strip_path: 1689 headerbasename = os.path.basename(headername) 1690 else: 1691 headerbasename = headername 1692 1693 # List of .proto files that should not be included in the C header file 1694 # even if they are mentioned in the source .proto. 1695 excludes = ['nanopb.proto', 'google/protobuf/descriptor.proto'] + options.exclude 1696 includes = [d for d in f.fdesc.dependency if d not in excludes] 1697 1698 headerdata = ''.join(f.generate_header(includes, headerbasename, options)) 1699 sourcedata = ''.join(f.generate_source(headerbasename, options)) 1700 1701 # Check if there were any lines in .options that did not match a member 1702 unmatched = [n for n,o in Globals.separate_options if n not in Globals.matched_namemasks] 1703 if unmatched and not options.quiet: 1704 sys.stderr.write("Following patterns in " + f.optfilename + " did not match any fields: " 1705 + ', '.join(unmatched) + "\n") 1706 if not Globals.verbose_options: 1707 sys.stderr.write("Use protoc --nanopb-out=-v:. to see a list of the field names.\n") 1708 1709 return {'headername': headername, 'headerdata': headerdata, 1710 'sourcename': sourcename, 'sourcedata': sourcedata} 1711 1712def main_cli(): 1713 '''Main function when invoked directly from the command line.''' 1714 1715 options, filenames = optparser.parse_args() 1716 1717 if options.version: 1718 print(nanopb_version) 1719 sys.exit(0) 1720 1721 if not filenames: 1722 optparser.print_help() 1723 sys.exit(1) 1724 1725 if options.quiet: 1726 options.verbose = False 1727 1728 if options.output_dir and not os.path.exists(options.output_dir): 1729 optparser.print_help() 1730 sys.stderr.write("\noutput_dir does not exist: %s\n" % options.output_dir) 1731 sys.exit(1) 1732 1733 if options.verbose: 1734 sys.stderr.write("Nanopb version %s\n" % nanopb_version) 1735 sys.stderr.write('Google Python protobuf library imported from %s, version %s\n' 1736 % (google.protobuf.__file__, google.protobuf.__version__)) 1737 1738 Globals.verbose_options = options.verbose 1739 for filename in filenames: 1740 results = process_file(filename, None, options) 1741 1742 base_dir = options.output_dir or '' 1743 to_write = [ 1744 (os.path.join(base_dir, results['headername']), results['headerdata']), 1745 (os.path.join(base_dir, results['sourcename']), results['sourcedata']), 1746 ] 1747 1748 if not options.quiet: 1749 paths = " and ".join([x[0] for x in to_write]) 1750 sys.stderr.write("Writing to %s\n" % paths) 1751 1752 for path, data in to_write: 1753 with open(path, 'w') as f: 1754 f.write(data) 1755 1756def main_plugin(): 1757 '''Main function when invoked as a protoc plugin.''' 1758 1759 import io, sys 1760 if sys.platform == "win32": 1761 import os, msvcrt 1762 # Set stdin and stdout to binary mode 1763 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY) 1764 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY) 1765 1766 data = io.open(sys.stdin.fileno(), "rb").read() 1767 1768 request = plugin_pb2.CodeGeneratorRequest.FromString(data) 1769 1770 try: 1771 # Versions of Python prior to 2.7.3 do not support unicode 1772 # input to shlex.split(). Try to convert to str if possible. 1773 params = str(request.parameter) 1774 except UnicodeEncodeError: 1775 params = request.parameter 1776 1777 import shlex 1778 args = shlex.split(params) 1779 1780 if len(args) == 1 and ',' in args[0]: 1781 # For compatibility with other protoc plugins, support options 1782 # separated by comma. 1783 lex = shlex.shlex(params) 1784 lex.whitespace_split = True 1785 lex.whitespace = ',' 1786 args = list(lex) 1787 1788 optparser.usage = "Usage: protoc --nanopb_out=[options][,more_options]:outdir file.proto" 1789 optparser.epilog = "Output will be written to file.pb.h and file.pb.c." 1790 1791 if '-h' in args or '--help' in args: 1792 # By default optparser prints help to stdout, which doesn't work for 1793 # protoc plugins. 1794 optparser.print_help(sys.stderr) 1795 sys.exit(1) 1796 1797 options, dummy = optparser.parse_args(args) 1798 1799 if options.version: 1800 sys.stderr.write('%s\n' % (nanopb_version)) 1801 sys.exit(0) 1802 1803 Globals.verbose_options = options.verbose 1804 1805 if options.verbose: 1806 sys.stderr.write("Nanopb version %s\n" % nanopb_version) 1807 sys.stderr.write('Google Python protobuf library imported from %s, version %s\n' 1808 % (google.protobuf.__file__, google.protobuf.__version__)) 1809 1810 response = plugin_pb2.CodeGeneratorResponse() 1811 1812 # Google's protoc does not currently indicate the full path of proto files. 1813 # Instead always add the main file path to the search dirs, that works for 1814 # the common case. 1815 import os.path 1816 options.options_path.append(os.path.dirname(request.file_to_generate[0])) 1817 1818 # Process any include files first, in order to have them 1819 # available as dependencies 1820 other_files = {} 1821 for fdesc in request.proto_file: 1822 other_files[fdesc.name] = parse_file(fdesc.name, fdesc, options) 1823 1824 for filename in request.file_to_generate: 1825 for fdesc in request.proto_file: 1826 if fdesc.name == filename: 1827 results = process_file(filename, fdesc, options, other_files) 1828 1829 f = response.file.add() 1830 f.name = results['headername'] 1831 f.content = results['headerdata'] 1832 1833 f = response.file.add() 1834 f.name = results['sourcename'] 1835 f.content = results['sourcedata'] 1836 1837 io.open(sys.stdout.fileno(), "wb").write(response.SerializeToString()) 1838 1839if __name__ == '__main__': 1840 # Check if we are running as a plugin under protoc 1841 if 'protoc-gen-' in sys.argv[0] or '--protoc-plugin' in sys.argv: 1842 main_plugin() 1843 else: 1844 main_cli() 1845