1from fontTools.misc.fixedTools import ( 2 fixedToFloat as fi2fl, 3 floatToFixed as fl2fi, 4 floatToFixedToStr as fl2str, 5 strToFixedToFloat as str2fl, 6 otRound, 7) 8from fontTools.misc.textTools import safeEval 9import array 10from collections import Counter, defaultdict 11import io 12import logging 13import struct 14import sys 15 16 17# https://www.microsoft.com/typography/otspec/otvarcommonformats.htm 18 19EMBEDDED_PEAK_TUPLE = 0x8000 20INTERMEDIATE_REGION = 0x4000 21PRIVATE_POINT_NUMBERS = 0x2000 22 23DELTAS_ARE_ZERO = 0x80 24DELTAS_ARE_WORDS = 0x40 25DELTA_RUN_COUNT_MASK = 0x3F 26 27POINTS_ARE_WORDS = 0x80 28POINT_RUN_COUNT_MASK = 0x7F 29 30TUPLES_SHARE_POINT_NUMBERS = 0x8000 31TUPLE_COUNT_MASK = 0x0FFF 32TUPLE_INDEX_MASK = 0x0FFF 33 34log = logging.getLogger(__name__) 35 36 37class TupleVariation(object): 38 def __init__(self, axes, coordinates): 39 self.axes = axes.copy() 40 self.coordinates = list(coordinates) 41 42 def __repr__(self): 43 axes = ",".join( 44 sorted(["%s=%s" % (name, value) for (name, value) in self.axes.items()]) 45 ) 46 return "<TupleVariation %s %s>" % (axes, self.coordinates) 47 48 def __eq__(self, other): 49 return self.coordinates == other.coordinates and self.axes == other.axes 50 51 def getUsedPoints(self): 52 # Empty set means "all points used". 53 if None not in self.coordinates: 54 return frozenset() 55 used = frozenset([i for i, p in enumerate(self.coordinates) if p is not None]) 56 # Return None if no points used. 57 return used if used else None 58 59 def hasImpact(self): 60 """Returns True if this TupleVariation has any visible impact. 61 62 If the result is False, the TupleVariation can be omitted from the font 63 without making any visible difference. 64 """ 65 return any(c is not None for c in self.coordinates) 66 67 def toXML(self, writer, axisTags): 68 writer.begintag("tuple") 69 writer.newline() 70 for axis in axisTags: 71 value = self.axes.get(axis) 72 if value is not None: 73 minValue, value, maxValue = value 74 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 75 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 76 if minValue == defaultMinValue and maxValue == defaultMaxValue: 77 writer.simpletag("coord", axis=axis, value=fl2str(value, 14)) 78 else: 79 attrs = [ 80 ("axis", axis), 81 ("min", fl2str(minValue, 14)), 82 ("value", fl2str(value, 14)), 83 ("max", fl2str(maxValue, 14)), 84 ] 85 writer.simpletag("coord", attrs) 86 writer.newline() 87 wrote_any_deltas = False 88 for i, delta in enumerate(self.coordinates): 89 if type(delta) == tuple and len(delta) == 2: 90 writer.simpletag("delta", pt=i, x=delta[0], y=delta[1]) 91 writer.newline() 92 wrote_any_deltas = True 93 elif type(delta) == int: 94 writer.simpletag("delta", cvt=i, value=delta) 95 writer.newline() 96 wrote_any_deltas = True 97 elif delta is not None: 98 log.error("bad delta format") 99 writer.comment("bad delta #%d" % i) 100 writer.newline() 101 wrote_any_deltas = True 102 if not wrote_any_deltas: 103 writer.comment("no deltas") 104 writer.newline() 105 writer.endtag("tuple") 106 writer.newline() 107 108 def fromXML(self, name, attrs, _content): 109 if name == "coord": 110 axis = attrs["axis"] 111 value = str2fl(attrs["value"], 14) 112 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 113 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 114 minValue = str2fl(attrs.get("min", defaultMinValue), 14) 115 maxValue = str2fl(attrs.get("max", defaultMaxValue), 14) 116 self.axes[axis] = (minValue, value, maxValue) 117 elif name == "delta": 118 if "pt" in attrs: 119 point = safeEval(attrs["pt"]) 120 x = safeEval(attrs["x"]) 121 y = safeEval(attrs["y"]) 122 self.coordinates[point] = (x, y) 123 elif "cvt" in attrs: 124 cvt = safeEval(attrs["cvt"]) 125 value = safeEval(attrs["value"]) 126 self.coordinates[cvt] = value 127 else: 128 log.warning("bad delta format: %s" % ", ".join(sorted(attrs.keys()))) 129 130 def compile(self, axisTags, sharedCoordIndices={}, pointData=None): 131 assert set(self.axes.keys()) <= set(axisTags), ( 132 "Unknown axis tag found.", 133 self.axes.keys(), 134 axisTags, 135 ) 136 137 tupleData = [] 138 auxData = [] 139 140 if pointData is None: 141 usedPoints = self.getUsedPoints() 142 if usedPoints is None: # Nothing to encode 143 return b"", b"" 144 pointData = self.compilePoints(usedPoints) 145 146 coord = self.compileCoord(axisTags) 147 flags = sharedCoordIndices.get(coord) 148 if flags is None: 149 flags = EMBEDDED_PEAK_TUPLE 150 tupleData.append(coord) 151 152 intermediateCoord = self.compileIntermediateCoord(axisTags) 153 if intermediateCoord is not None: 154 flags |= INTERMEDIATE_REGION 155 tupleData.append(intermediateCoord) 156 157 # pointData of b'' implies "use shared points". 158 if pointData: 159 flags |= PRIVATE_POINT_NUMBERS 160 auxData.append(pointData) 161 162 auxData.append(self.compileDeltas()) 163 auxData = b"".join(auxData) 164 165 tupleData.insert(0, struct.pack(">HH", len(auxData), flags)) 166 return b"".join(tupleData), auxData 167 168 def compileCoord(self, axisTags): 169 result = [] 170 axes = self.axes 171 for axis in axisTags: 172 triple = axes.get(axis) 173 if triple is None: 174 result.append(b"\0\0") 175 else: 176 result.append(struct.pack(">h", fl2fi(triple[1], 14))) 177 return b"".join(result) 178 179 def compileIntermediateCoord(self, axisTags): 180 needed = False 181 for axis in axisTags: 182 minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0)) 183 defaultMinValue = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 184 defaultMaxValue = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 185 if (minValue != defaultMinValue) or (maxValue != defaultMaxValue): 186 needed = True 187 break 188 if not needed: 189 return None 190 minCoords = [] 191 maxCoords = [] 192 for axis in axisTags: 193 minValue, value, maxValue = self.axes.get(axis, (0.0, 0.0, 0.0)) 194 minCoords.append(struct.pack(">h", fl2fi(minValue, 14))) 195 maxCoords.append(struct.pack(">h", fl2fi(maxValue, 14))) 196 return b"".join(minCoords + maxCoords) 197 198 @staticmethod 199 def decompileCoord_(axisTags, data, offset): 200 coord = {} 201 pos = offset 202 for axis in axisTags: 203 coord[axis] = fi2fl(struct.unpack(">h", data[pos : pos + 2])[0], 14) 204 pos += 2 205 return coord, pos 206 207 @staticmethod 208 def compilePoints(points): 209 # If the set consists of all points in the glyph, it gets encoded with 210 # a special encoding: a single zero byte. 211 # 212 # To use this optimization, points passed in must be empty set. 213 # The following two lines are not strictly necessary as the main code 214 # below would emit the same. But this is most common and faster. 215 if not points: 216 return b"\0" 217 218 # In the 'gvar' table, the packing of point numbers is a little surprising. 219 # It consists of multiple runs, each being a delta-encoded list of integers. 220 # For example, the point set {17, 18, 19, 20, 21, 22, 23} gets encoded as 221 # [6, 17, 1, 1, 1, 1, 1, 1]. The first value (6) is the run length minus 1. 222 # There are two types of runs, with values being either 8 or 16 bit unsigned 223 # integers. 224 points = list(points) 225 points.sort() 226 numPoints = len(points) 227 228 result = bytearray() 229 # The binary representation starts with the total number of points in the set, 230 # encoded into one or two bytes depending on the value. 231 if numPoints < 0x80: 232 result.append(numPoints) 233 else: 234 result.append((numPoints >> 8) | 0x80) 235 result.append(numPoints & 0xFF) 236 237 MAX_RUN_LENGTH = 127 238 pos = 0 239 lastValue = 0 240 while pos < numPoints: 241 runLength = 0 242 243 headerPos = len(result) 244 result.append(0) 245 246 useByteEncoding = None 247 while pos < numPoints and runLength <= MAX_RUN_LENGTH: 248 curValue = points[pos] 249 delta = curValue - lastValue 250 if useByteEncoding is None: 251 useByteEncoding = 0 <= delta <= 0xFF 252 if useByteEncoding and (delta > 0xFF or delta < 0): 253 # we need to start a new run (which will not use byte encoding) 254 break 255 # TODO This never switches back to a byte-encoding from a short-encoding. 256 # That's suboptimal. 257 if useByteEncoding: 258 result.append(delta) 259 else: 260 result.append(delta >> 8) 261 result.append(delta & 0xFF) 262 lastValue = curValue 263 pos += 1 264 runLength += 1 265 if useByteEncoding: 266 result[headerPos] = runLength - 1 267 else: 268 result[headerPos] = (runLength - 1) | POINTS_ARE_WORDS 269 270 return result 271 272 @staticmethod 273 def decompilePoints_(numPoints, data, offset, tableTag): 274 """(numPoints, data, offset, tableTag) --> ([point1, point2, ...], newOffset)""" 275 assert tableTag in ("cvar", "gvar") 276 pos = offset 277 numPointsInData = data[pos] 278 pos += 1 279 if (numPointsInData & POINTS_ARE_WORDS) != 0: 280 numPointsInData = (numPointsInData & POINT_RUN_COUNT_MASK) << 8 | data[pos] 281 pos += 1 282 if numPointsInData == 0: 283 return (range(numPoints), pos) 284 285 result = [] 286 while len(result) < numPointsInData: 287 runHeader = data[pos] 288 pos += 1 289 numPointsInRun = (runHeader & POINT_RUN_COUNT_MASK) + 1 290 point = 0 291 if (runHeader & POINTS_ARE_WORDS) != 0: 292 points = array.array("H") 293 pointsSize = numPointsInRun * 2 294 else: 295 points = array.array("B") 296 pointsSize = numPointsInRun 297 points.frombytes(data[pos : pos + pointsSize]) 298 if sys.byteorder != "big": 299 points.byteswap() 300 301 assert len(points) == numPointsInRun 302 pos += pointsSize 303 304 result.extend(points) 305 306 # Convert relative to absolute 307 absolute = [] 308 current = 0 309 for delta in result: 310 current += delta 311 absolute.append(current) 312 result = absolute 313 del absolute 314 315 badPoints = {str(p) for p in result if p < 0 or p >= numPoints} 316 if badPoints: 317 log.warning( 318 "point %s out of range in '%s' table" 319 % (",".join(sorted(badPoints)), tableTag) 320 ) 321 return (result, pos) 322 323 def compileDeltas(self): 324 deltaX = [] 325 deltaY = [] 326 if self.getCoordWidth() == 2: 327 for c in self.coordinates: 328 if c is None: 329 continue 330 deltaX.append(c[0]) 331 deltaY.append(c[1]) 332 else: 333 for c in self.coordinates: 334 if c is None: 335 continue 336 deltaX.append(c) 337 bytearr = bytearray() 338 self.compileDeltaValues_(deltaX, bytearr) 339 self.compileDeltaValues_(deltaY, bytearr) 340 return bytearr 341 342 @staticmethod 343 def compileDeltaValues_(deltas, bytearr=None): 344 """[value1, value2, value3, ...] --> bytearray 345 346 Emits a sequence of runs. Each run starts with a 347 byte-sized header whose 6 least significant bits 348 (header & 0x3F) indicate how many values are encoded 349 in this run. The stored length is the actual length 350 minus one; run lengths are thus in the range [1..64]. 351 If the header byte has its most significant bit (0x80) 352 set, all values in this run are zero, and no data 353 follows. Otherwise, the header byte is followed by 354 ((header & 0x3F) + 1) signed values. If (header & 355 0x40) is clear, the delta values are stored as signed 356 bytes; if (header & 0x40) is set, the delta values are 357 signed 16-bit integers. 358 """ # Explaining the format because the 'gvar' spec is hard to understand. 359 if bytearr is None: 360 bytearr = bytearray() 361 pos = 0 362 numDeltas = len(deltas) 363 while pos < numDeltas: 364 value = deltas[pos] 365 if value == 0: 366 pos = TupleVariation.encodeDeltaRunAsZeroes_(deltas, pos, bytearr) 367 elif -128 <= value <= 127: 368 pos = TupleVariation.encodeDeltaRunAsBytes_(deltas, pos, bytearr) 369 else: 370 pos = TupleVariation.encodeDeltaRunAsWords_(deltas, pos, bytearr) 371 return bytearr 372 373 @staticmethod 374 def encodeDeltaRunAsZeroes_(deltas, offset, bytearr): 375 pos = offset 376 numDeltas = len(deltas) 377 while pos < numDeltas and deltas[pos] == 0: 378 pos += 1 379 runLength = pos - offset 380 while runLength >= 64: 381 bytearr.append(DELTAS_ARE_ZERO | 63) 382 runLength -= 64 383 if runLength: 384 bytearr.append(DELTAS_ARE_ZERO | (runLength - 1)) 385 return pos 386 387 @staticmethod 388 def encodeDeltaRunAsBytes_(deltas, offset, bytearr): 389 pos = offset 390 numDeltas = len(deltas) 391 while pos < numDeltas: 392 value = deltas[pos] 393 if not (-128 <= value <= 127): 394 break 395 # Within a byte-encoded run of deltas, a single zero 396 # is best stored literally as 0x00 value. However, 397 # if are two or more zeroes in a sequence, it is 398 # better to start a new run. For example, the sequence 399 # of deltas [15, 15, 0, 15, 15] becomes 6 bytes 400 # (04 0F 0F 00 0F 0F) when storing the zero value 401 # literally, but 7 bytes (01 0F 0F 80 01 0F 0F) 402 # when starting a new run. 403 if value == 0 and pos + 1 < numDeltas and deltas[pos + 1] == 0: 404 break 405 pos += 1 406 runLength = pos - offset 407 while runLength >= 64: 408 bytearr.append(63) 409 bytearr.extend(array.array("b", deltas[offset : offset + 64])) 410 offset += 64 411 runLength -= 64 412 if runLength: 413 bytearr.append(runLength - 1) 414 bytearr.extend(array.array("b", deltas[offset:pos])) 415 return pos 416 417 @staticmethod 418 def encodeDeltaRunAsWords_(deltas, offset, bytearr): 419 pos = offset 420 numDeltas = len(deltas) 421 while pos < numDeltas: 422 value = deltas[pos] 423 # Within a word-encoded run of deltas, it is easiest 424 # to start a new run (with a different encoding) 425 # whenever we encounter a zero value. For example, 426 # the sequence [0x6666, 0, 0x7777] needs 7 bytes when 427 # storing the zero literally (42 66 66 00 00 77 77), 428 # and equally 7 bytes when starting a new run 429 # (40 66 66 80 40 77 77). 430 if value == 0: 431 break 432 433 # Within a word-encoded run of deltas, a single value 434 # in the range (-128..127) should be encoded literally 435 # because it is more compact. For example, the sequence 436 # [0x6666, 2, 0x7777] becomes 7 bytes when storing 437 # the value literally (42 66 66 00 02 77 77), but 8 bytes 438 # when starting a new run (40 66 66 00 02 40 77 77). 439 if ( 440 (-128 <= value <= 127) 441 and pos + 1 < numDeltas 442 and (-128 <= deltas[pos + 1] <= 127) 443 ): 444 break 445 pos += 1 446 runLength = pos - offset 447 while runLength >= 64: 448 bytearr.append(DELTAS_ARE_WORDS | 63) 449 a = array.array("h", deltas[offset : offset + 64]) 450 if sys.byteorder != "big": 451 a.byteswap() 452 bytearr.extend(a) 453 offset += 64 454 runLength -= 64 455 if runLength: 456 bytearr.append(DELTAS_ARE_WORDS | (runLength - 1)) 457 a = array.array("h", deltas[offset:pos]) 458 if sys.byteorder != "big": 459 a.byteswap() 460 bytearr.extend(a) 461 return pos 462 463 @staticmethod 464 def decompileDeltas_(numDeltas, data, offset): 465 """(numDeltas, data, offset) --> ([delta, delta, ...], newOffset)""" 466 result = [] 467 pos = offset 468 while len(result) < numDeltas: 469 runHeader = data[pos] 470 pos += 1 471 numDeltasInRun = (runHeader & DELTA_RUN_COUNT_MASK) + 1 472 if (runHeader & DELTAS_ARE_ZERO) != 0: 473 result.extend([0] * numDeltasInRun) 474 else: 475 if (runHeader & DELTAS_ARE_WORDS) != 0: 476 deltas = array.array("h") 477 deltasSize = numDeltasInRun * 2 478 else: 479 deltas = array.array("b") 480 deltasSize = numDeltasInRun 481 deltas.frombytes(data[pos : pos + deltasSize]) 482 if sys.byteorder != "big": 483 deltas.byteswap() 484 assert len(deltas) == numDeltasInRun 485 pos += deltasSize 486 result.extend(deltas) 487 assert len(result) == numDeltas 488 return (result, pos) 489 490 @staticmethod 491 def getTupleSize_(flags, axisCount): 492 size = 4 493 if (flags & EMBEDDED_PEAK_TUPLE) != 0: 494 size += axisCount * 2 495 if (flags & INTERMEDIATE_REGION) != 0: 496 size += axisCount * 4 497 return size 498 499 def getCoordWidth(self): 500 """Return 2 if coordinates are (x, y) as in gvar, 1 if single values 501 as in cvar, or 0 if empty. 502 """ 503 firstDelta = next((c for c in self.coordinates if c is not None), None) 504 if firstDelta is None: 505 return 0 # empty or has no impact 506 if type(firstDelta) in (int, float): 507 return 1 508 if type(firstDelta) is tuple and len(firstDelta) == 2: 509 return 2 510 raise TypeError( 511 "invalid type of delta; expected (int or float) number, or " 512 "Tuple[number, number]: %r" % firstDelta 513 ) 514 515 def scaleDeltas(self, scalar): 516 if scalar == 1.0: 517 return # no change 518 coordWidth = self.getCoordWidth() 519 self.coordinates = [ 520 ( 521 None 522 if d is None 523 else d * scalar if coordWidth == 1 else (d[0] * scalar, d[1] * scalar) 524 ) 525 for d in self.coordinates 526 ] 527 528 def roundDeltas(self): 529 coordWidth = self.getCoordWidth() 530 self.coordinates = [ 531 ( 532 None 533 if d is None 534 else otRound(d) if coordWidth == 1 else (otRound(d[0]), otRound(d[1])) 535 ) 536 for d in self.coordinates 537 ] 538 539 def calcInferredDeltas(self, origCoords, endPts): 540 from fontTools.varLib.iup import iup_delta 541 542 if self.getCoordWidth() == 1: 543 raise TypeError("Only 'gvar' TupleVariation can have inferred deltas") 544 if None in self.coordinates: 545 if len(self.coordinates) != len(origCoords): 546 raise ValueError( 547 "Expected len(origCoords) == %d; found %d" 548 % (len(self.coordinates), len(origCoords)) 549 ) 550 self.coordinates = iup_delta(self.coordinates, origCoords, endPts) 551 552 def optimize(self, origCoords, endPts, tolerance=0.5, isComposite=False): 553 from fontTools.varLib.iup import iup_delta_optimize 554 555 if None in self.coordinates: 556 return # already optimized 557 558 deltaOpt = iup_delta_optimize( 559 self.coordinates, origCoords, endPts, tolerance=tolerance 560 ) 561 if None in deltaOpt: 562 if isComposite and all(d is None for d in deltaOpt): 563 # Fix for macOS composites 564 # https://github.com/fonttools/fonttools/issues/1381 565 deltaOpt = [(0, 0)] + [None] * (len(deltaOpt) - 1) 566 # Use "optimized" version only if smaller... 567 varOpt = TupleVariation(self.axes, deltaOpt) 568 569 # Shouldn't matter that this is different from fvar...? 570 axisTags = sorted(self.axes.keys()) 571 tupleData, auxData = self.compile(axisTags) 572 unoptimizedLength = len(tupleData) + len(auxData) 573 tupleData, auxData = varOpt.compile(axisTags) 574 optimizedLength = len(tupleData) + len(auxData) 575 576 if optimizedLength < unoptimizedLength: 577 self.coordinates = varOpt.coordinates 578 579 def __imul__(self, scalar): 580 self.scaleDeltas(scalar) 581 return self 582 583 def __iadd__(self, other): 584 if not isinstance(other, TupleVariation): 585 return NotImplemented 586 deltas1 = self.coordinates 587 length = len(deltas1) 588 deltas2 = other.coordinates 589 if len(deltas2) != length: 590 raise ValueError("cannot sum TupleVariation deltas with different lengths") 591 # 'None' values have different meanings in gvar vs cvar TupleVariations: 592 # within the gvar, when deltas are not provided explicitly for some points, 593 # they need to be inferred; whereas for the 'cvar' table, if deltas are not 594 # provided for some CVT values, then no adjustments are made (i.e. None == 0). 595 # Thus, we cannot sum deltas for gvar TupleVariations if they contain 596 # inferred inferred deltas (the latter need to be computed first using 597 # 'calcInferredDeltas' method), but we can treat 'None' values in cvar 598 # deltas as if they are zeros. 599 if self.getCoordWidth() == 2: 600 for i, d2 in zip(range(length), deltas2): 601 d1 = deltas1[i] 602 try: 603 deltas1[i] = (d1[0] + d2[0], d1[1] + d2[1]) 604 except TypeError: 605 raise ValueError("cannot sum gvar deltas with inferred points") 606 else: 607 for i, d2 in zip(range(length), deltas2): 608 d1 = deltas1[i] 609 if d1 is not None and d2 is not None: 610 deltas1[i] = d1 + d2 611 elif d1 is None and d2 is not None: 612 deltas1[i] = d2 613 # elif d2 is None do nothing 614 return self 615 616 617def decompileSharedTuples(axisTags, sharedTupleCount, data, offset): 618 result = [] 619 for _ in range(sharedTupleCount): 620 t, offset = TupleVariation.decompileCoord_(axisTags, data, offset) 621 result.append(t) 622 return result 623 624 625def compileSharedTuples( 626 axisTags, variations, MAX_NUM_SHARED_COORDS=TUPLE_INDEX_MASK + 1 627): 628 coordCount = Counter() 629 for var in variations: 630 coord = var.compileCoord(axisTags) 631 coordCount[coord] += 1 632 # In python < 3.7, most_common() ordering is non-deterministic 633 # so apply a sort to make sure the ordering is consistent. 634 sharedCoords = sorted( 635 coordCount.most_common(MAX_NUM_SHARED_COORDS), 636 key=lambda item: (-item[1], item[0]), 637 ) 638 return [c[0] for c in sharedCoords if c[1] > 1] 639 640 641def compileTupleVariationStore( 642 variations, pointCount, axisTags, sharedTupleIndices, useSharedPoints=True 643): 644 # pointCount is actually unused. Keeping for API compat. 645 del pointCount 646 newVariations = [] 647 pointDatas = [] 648 # Compile all points and figure out sharing if desired 649 sharedPoints = None 650 651 # Collect, count, and compile point-sets for all variation sets 652 pointSetCount = defaultdict(int) 653 for v in variations: 654 points = v.getUsedPoints() 655 if points is None: # Empty variations 656 continue 657 pointSetCount[points] += 1 658 newVariations.append(v) 659 pointDatas.append(points) 660 variations = newVariations 661 del newVariations 662 663 if not variations: 664 return (0, b"", b"") 665 666 n = len(variations[0].coordinates) 667 assert all( 668 len(v.coordinates) == n for v in variations 669 ), "Variation sets have different sizes" 670 671 compiledPoints = { 672 pointSet: TupleVariation.compilePoints(pointSet) for pointSet in pointSetCount 673 } 674 675 tupleVariationCount = len(variations) 676 tuples = [] 677 data = [] 678 679 if useSharedPoints: 680 # Find point-set which saves most bytes. 681 def key(pn): 682 pointSet = pn[0] 683 count = pn[1] 684 return len(compiledPoints[pointSet]) * (count - 1) 685 686 sharedPoints = max(pointSetCount.items(), key=key)[0] 687 688 data.append(compiledPoints[sharedPoints]) 689 tupleVariationCount |= TUPLES_SHARE_POINT_NUMBERS 690 691 # b'' implies "use shared points" 692 pointDatas = [ 693 compiledPoints[points] if points != sharedPoints else b"" 694 for points in pointDatas 695 ] 696 697 for v, p in zip(variations, pointDatas): 698 thisTuple, thisData = v.compile(axisTags, sharedTupleIndices, pointData=p) 699 700 tuples.append(thisTuple) 701 data.append(thisData) 702 703 tuples = b"".join(tuples) 704 data = b"".join(data) 705 return tupleVariationCount, tuples, data 706 707 708def decompileTupleVariationStore( 709 tableTag, 710 axisTags, 711 tupleVariationCount, 712 pointCount, 713 sharedTuples, 714 data, 715 pos, 716 dataPos, 717): 718 numAxes = len(axisTags) 719 result = [] 720 if (tupleVariationCount & TUPLES_SHARE_POINT_NUMBERS) != 0: 721 sharedPoints, dataPos = TupleVariation.decompilePoints_( 722 pointCount, data, dataPos, tableTag 723 ) 724 else: 725 sharedPoints = [] 726 for _ in range(tupleVariationCount & TUPLE_COUNT_MASK): 727 dataSize, flags = struct.unpack(">HH", data[pos : pos + 4]) 728 tupleSize = TupleVariation.getTupleSize_(flags, numAxes) 729 tupleData = data[pos : pos + tupleSize] 730 pointDeltaData = data[dataPos : dataPos + dataSize] 731 result.append( 732 decompileTupleVariation_( 733 pointCount, 734 sharedTuples, 735 sharedPoints, 736 tableTag, 737 axisTags, 738 tupleData, 739 pointDeltaData, 740 ) 741 ) 742 pos += tupleSize 743 dataPos += dataSize 744 return result 745 746 747def decompileTupleVariation_( 748 pointCount, sharedTuples, sharedPoints, tableTag, axisTags, data, tupleData 749): 750 assert tableTag in ("cvar", "gvar"), tableTag 751 flags = struct.unpack(">H", data[2:4])[0] 752 pos = 4 753 if (flags & EMBEDDED_PEAK_TUPLE) == 0: 754 peak = sharedTuples[flags & TUPLE_INDEX_MASK] 755 else: 756 peak, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 757 if (flags & INTERMEDIATE_REGION) != 0: 758 start, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 759 end, pos = TupleVariation.decompileCoord_(axisTags, data, pos) 760 else: 761 start, end = inferRegion_(peak) 762 axes = {} 763 for axis in axisTags: 764 region = start[axis], peak[axis], end[axis] 765 if region != (0.0, 0.0, 0.0): 766 axes[axis] = region 767 pos = 0 768 if (flags & PRIVATE_POINT_NUMBERS) != 0: 769 points, pos = TupleVariation.decompilePoints_( 770 pointCount, tupleData, pos, tableTag 771 ) 772 else: 773 points = sharedPoints 774 775 deltas = [None] * pointCount 776 777 if tableTag == "cvar": 778 deltas_cvt, pos = TupleVariation.decompileDeltas_(len(points), tupleData, pos) 779 for p, delta in zip(points, deltas_cvt): 780 if 0 <= p < pointCount: 781 deltas[p] = delta 782 783 elif tableTag == "gvar": 784 deltas_x, pos = TupleVariation.decompileDeltas_(len(points), tupleData, pos) 785 deltas_y, pos = TupleVariation.decompileDeltas_(len(points), tupleData, pos) 786 for p, x, y in zip(points, deltas_x, deltas_y): 787 if 0 <= p < pointCount: 788 deltas[p] = (x, y) 789 790 return TupleVariation(axes, deltas) 791 792 793def inferRegion_(peak): 794 """Infer start and end for a (non-intermediate) region 795 796 This helper function computes the applicability region for 797 variation tuples whose INTERMEDIATE_REGION flag is not set in the 798 TupleVariationHeader structure. Variation tuples apply only to 799 certain regions of the variation space; outside that region, the 800 tuple has no effect. To make the binary encoding more compact, 801 TupleVariationHeaders can omit the intermediateStartTuple and 802 intermediateEndTuple fields. 803 """ 804 start, end = {}, {} 805 for axis, value in peak.items(): 806 start[axis] = min(value, 0.0) # -0.3 --> -0.3; 0.7 --> 0.0 807 end[axis] = max(value, 0.0) # -0.3 --> 0.0; 0.7 --> 0.7 808 return (start, end) 809