1 /* 2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc. 3 * Copyright © 2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod 27 */ 28 29 #ifndef HB_OPEN_TYPE_HH 30 #define HB_OPEN_TYPE_HH 31 32 #include "hb.hh" 33 #include "hb-blob.hh" 34 #include "hb-face.hh" 35 #include "hb-machinery.hh" 36 #include "hb-meta.hh" 37 #include "hb-subset.hh" 38 39 40 namespace OT { 41 42 43 /* 44 * 45 * The OpenType Font File: Data Types 46 */ 47 48 49 /* "The following data types are used in the OpenType font file. 50 * All OpenType fonts use Motorola-style byte ordering (Big Endian):" */ 51 52 /* 53 * Int types 54 */ 55 56 /* Integer types in big-endian order and no alignment requirement */ 57 template <typename Type, 58 unsigned int Size = sizeof (Type)> 59 struct IntType 60 { 61 typedef Type type; 62 63 IntType () = default; IntTypeOT::IntType64 explicit constexpr IntType (Type V) : v {V} {} operator =OT::IntType65 IntType& operator = (Type i) { v = i; return *this; } 66 /* For reason we define cast out operator for signed/unsigned, instead of Type, see: 67 * https://github.com/harfbuzz/harfbuzz/pull/2875/commits/09836013995cab2b9f07577a179ad7b024130467 */ operator typename std::conditional<std::is_signed<Type>::value,signed,unsigned>::typeOT::IntType68 operator typename std::conditional<std::is_signed<Type>::value, signed, unsigned>::type () const { return v; } 69 operator ==OT::IntType70 bool operator == (const IntType &o) const { return (Type) v == (Type) o.v; } operator !=OT::IntType71 bool operator != (const IntType &o) const { return !(*this == o); } 72 operator +=OT::IntType73 IntType& operator += (unsigned count) { *this = *this + count; return *this; } operator -=OT::IntType74 IntType& operator -= (unsigned count) { *this = *this - count; return *this; } operator ++OT::IntType75 IntType& operator ++ () { *this += 1; return *this; } operator --OT::IntType76 IntType& operator -- () { *this -= 1; return *this; } operator ++OT::IntType77 IntType operator ++ (int) { IntType c (*this); ++*this; return c; } operator --OT::IntType78 IntType operator -- (int) { IntType c (*this); --*this; return c; } 79 cmpOT::IntType80 HB_INTERNAL static int cmp (const IntType *a, const IntType *b) 81 { return b->cmp (*a); } cmpOT::IntType82 HB_INTERNAL static int cmp (const void *a, const void *b) 83 { 84 IntType *pa = (IntType *) a; 85 IntType *pb = (IntType *) b; 86 87 return pb->cmp (*pa); 88 } 89 template <typename Type2, 90 hb_enable_if (std::is_integral<Type2>::value && 91 sizeof (Type2) < sizeof (int) && 92 sizeof (Type) < sizeof (int))> cmpOT::IntType93 int cmp (Type2 a) const 94 { 95 Type b = v; 96 return (int) a - (int) b; 97 } 98 template <typename Type2, 99 hb_enable_if (hb_is_convertible (Type2, Type))> cmpOT::IntType100 int cmp (Type2 a) const 101 { 102 Type b = v; 103 return a < b ? -1 : a == b ? 0 : +1; 104 } sanitizeOT::IntType105 bool sanitize (hb_sanitize_context_t *c) const 106 { 107 TRACE_SANITIZE (this); 108 return_trace (c->check_struct (this)); 109 } 110 protected: 111 BEInt<Type, Size> v; 112 public: 113 DEFINE_SIZE_STATIC (Size); 114 }; 115 116 typedef IntType<uint8_t> HBUINT8; /* 8-bit unsigned integer. */ 117 typedef IntType<int8_t> HBINT8; /* 8-bit signed integer. */ 118 typedef IntType<uint16_t> HBUINT16; /* 16-bit unsigned integer. */ 119 typedef IntType<int16_t> HBINT16; /* 16-bit signed integer. */ 120 typedef IntType<uint32_t> HBUINT32; /* 32-bit unsigned integer. */ 121 typedef IntType<int32_t> HBINT32; /* 32-bit signed integer. */ 122 /* Note: we cannot defined a signed HBINT24 because there's no corresponding C type. 123 * Works for unsigned, but not signed, since we rely on compiler for sign-extension. */ 124 typedef IntType<uint32_t, 3> HBUINT24; /* 24-bit unsigned integer. */ 125 126 /* 15-bit unsigned number; top bit used for extension. */ 127 struct HBUINT15 : HBUINT16 128 { 129 /* TODO Flesh out; actually mask top bit. */ operator =OT::HBUINT15130 HBUINT15& operator = (uint16_t i ) { HBUINT16::operator= (i); return *this; } 131 public: 132 DEFINE_SIZE_STATIC (2); 133 }; 134 135 /* 32-bit unsigned integer with variable encoding. */ 136 struct HBUINT32VAR 137 { get_sizeOT::HBUINT32VAR138 unsigned get_size () const 139 { 140 unsigned b0 = v[0]; 141 if (b0 < 0x80) 142 return 1; 143 else if (b0 < 0xC0) 144 return 2; 145 else if (b0 < 0xE0) 146 return 3; 147 else if (b0 < 0xF0) 148 return 4; 149 else 150 return 5; 151 } 152 get_sizeOT::HBUINT32VAR153 static unsigned get_size (uint32_t v) 154 { 155 if (v < 0x80) 156 return 1; 157 else if (v < 0x4000) 158 return 2; 159 else if (v < 0x200000) 160 return 3; 161 else if (v < 0x10000000) 162 return 4; 163 else 164 return 5; 165 } 166 sanitizeOT::HBUINT32VAR167 bool sanitize (hb_sanitize_context_t *c) const 168 { 169 TRACE_SANITIZE (this); 170 return_trace (c->check_range (v, 1) && 171 hb_barrier () && 172 c->check_range (v, get_size ())); 173 } 174 operator uint32_tOT::HBUINT32VAR175 operator uint32_t () const 176 { 177 unsigned b0 = v[0]; 178 if (b0 < 0x80) 179 return b0; 180 else if (b0 < 0xC0) 181 return ((b0 & 0x3F) << 8) | v[1]; 182 else if (b0 < 0xE0) 183 return ((b0 & 0x1F) << 16) | (v[1] << 8) | v[2]; 184 else if (b0 < 0xF0) 185 return ((b0 & 0x0F) << 24) | (v[1] << 16) | (v[2] << 8) | v[3]; 186 else 187 return (v[1] << 24) | (v[2] << 16) | (v[3] << 8) | v[4]; 188 } 189 serializeOT::HBUINT32VAR190 static bool serialize (hb_serialize_context_t *c, uint32_t v) 191 { 192 unsigned len = get_size (v); 193 194 unsigned char *buf = c->allocate_size<unsigned char> (len, false); 195 if (unlikely (!buf)) 196 return false; 197 198 unsigned char *p = buf + len; 199 for (unsigned i = 0; i < len; i++) 200 { 201 *--p = v & 0xFF; 202 v >>= 8; 203 } 204 205 if (len > 1) 206 buf[0] |= ((1 << (len - 1)) - 1) << (9 - len); 207 208 return true; 209 } 210 211 protected: 212 unsigned char v[5]; 213 214 public: 215 DEFINE_SIZE_MIN (1); 216 }; 217 218 /* 16-bit signed integer (HBINT16) that describes a quantity in FUnits. */ 219 typedef HBINT16 FWORD; 220 221 /* 32-bit signed integer (HBINT32) that describes a quantity in FUnits. */ 222 typedef HBINT32 FWORD32; 223 224 /* 16-bit unsigned integer (HBUINT16) that describes a quantity in FUnits. */ 225 typedef HBUINT16 UFWORD; 226 227 template <typename Type, unsigned fraction_bits> 228 struct HBFixed : Type 229 { 230 static constexpr float shift = (float) (1 << fraction_bits); 231 static_assert (Type::static_size * 8 > fraction_bits, ""); 232 233 operator signed () const = delete; 234 operator unsigned () const = delete; operator floatOT::HBFixed235 explicit operator float () const { return to_float (); } to_intOT::HBFixed236 typename Type::type to_int () const { return Type::v; } set_intOT::HBFixed237 void set_int (typename Type::type i ) { Type::v = i; } to_floatOT::HBFixed238 float to_float (float offset = 0) const { return ((int32_t) Type::v + offset) / shift; } set_floatOT::HBFixed239 void set_float (float f) { Type::v = roundf (f * shift); } 240 public: 241 DEFINE_SIZE_STATIC (Type::static_size); 242 }; 243 244 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */ 245 using F2DOT14 = HBFixed<HBINT16, 14>; 246 using F4DOT12 = HBFixed<HBINT16, 12>; 247 using F6DOT10 = HBFixed<HBINT16, 10>; 248 249 /* 32-bit signed fixed-point number (16.16). */ 250 using F16DOT16 = HBFixed<HBINT32, 16>; 251 252 /* Date represented in number of seconds since 12:00 midnight, January 1, 253 * 1904. The value is represented as a signed 64-bit integer. */ 254 struct LONGDATETIME 255 { sanitizeOT::LONGDATETIME256 bool sanitize (hb_sanitize_context_t *c) const 257 { 258 TRACE_SANITIZE (this); 259 return_trace (c->check_struct (this)); 260 } 261 protected: 262 HBINT32 major; 263 HBUINT32 minor; 264 public: 265 DEFINE_SIZE_STATIC (8); 266 }; 267 268 /* Array of four uint8s (length = 32 bits) used to identify a script, language 269 * system, feature, or baseline */ 270 struct Tag : HBUINT32 271 { operator =OT::Tag272 Tag& operator = (hb_tag_t i) { HBUINT32::operator= (i); return *this; } 273 /* What the char* converters return is NOT nul-terminated. Print using "%.4s" */ operator const char*OT::Tag274 operator const char* () const { return reinterpret_cast<const char *> (this); } operator char*OT::Tag275 operator char* () { return reinterpret_cast<char *> (this); } 276 public: 277 DEFINE_SIZE_STATIC (4); 278 }; 279 280 /* Glyph index number, same as uint16 (length = 16 bits) */ 281 struct HBGlyphID16 : HBUINT16 282 { operator =OT::HBGlyphID16283 HBGlyphID16& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; } 284 }; 285 struct HBGlyphID24 : HBUINT24 286 { operator =OT::HBGlyphID24287 HBGlyphID24& operator = (uint32_t i) { HBUINT24::operator= (i); return *this; } 288 }; 289 290 /* Script/language-system/feature index */ 291 struct Index : HBUINT16 { 292 static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFu; operator =OT::Index293 Index& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; } 294 }; 295 DECLARE_NULL_NAMESPACE_BYTES (OT, Index); 296 297 typedef Index NameID; 298 299 struct VarIdx : HBUINT32 { 300 static constexpr unsigned NO_VARIATION = 0xFFFFFFFFu; 301 static_assert (NO_VARIATION == HB_OT_LAYOUT_NO_VARIATIONS_INDEX, ""); addOT::VarIdx302 static uint32_t add (uint32_t i, unsigned short v) 303 { 304 if (i == NO_VARIATION) return i; 305 return i + v; 306 } operator =OT::VarIdx307 VarIdx& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; } 308 }; 309 DECLARE_NULL_NAMESPACE_BYTES (OT, VarIdx); 310 311 /* Offset, Null offset = 0 */ 312 template <typename Type, bool has_null=true> 313 struct Offset : Type 314 { operator =OT::Offset315 Offset& operator = (typename Type::type i) { Type::operator= (i); return *this; } 316 317 typedef Type type; 318 is_nullOT::Offset319 bool is_null () const { return has_null && 0 == *this; } 320 321 public: 322 DEFINE_SIZE_STATIC (sizeof (Type)); 323 }; 324 325 typedef Offset<HBUINT16> Offset16; 326 typedef Offset<HBUINT24> Offset24; 327 typedef Offset<HBUINT32> Offset32; 328 329 330 /* CheckSum */ 331 struct CheckSum : HBUINT32 332 { operator =OT::CheckSum333 CheckSum& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; } 334 335 /* This is reference implementation from the spec. */ CalcTableChecksumOT::CheckSum336 static uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length) 337 { 338 uint32_t Sum = 0L; 339 assert (0 == (Length & 3)); 340 const HBUINT32 *EndPtr = Table + Length / HBUINT32::static_size; 341 342 while (Table < EndPtr) 343 Sum += *Table++; 344 return Sum; 345 } 346 347 /* Note: data should be 4byte aligned and have 4byte padding at the end. */ set_for_dataOT::CheckSum348 void set_for_data (const void *data, unsigned int length) 349 { *this = CalcTableChecksum ((const HBUINT32 *) data, length); } 350 351 public: 352 DEFINE_SIZE_STATIC (4); 353 }; 354 355 356 /* 357 * Version Numbers 358 */ 359 360 template <typename FixedType=HBUINT16> 361 struct FixedVersion 362 { to_intOT::FixedVersion363 uint32_t to_int () const { return (major << (sizeof (FixedType) * 8)) + minor; } 364 sanitizeOT::FixedVersion365 bool sanitize (hb_sanitize_context_t *c) const 366 { 367 TRACE_SANITIZE (this); 368 return_trace (c->check_struct (this)); 369 } 370 371 FixedType major; 372 FixedType minor; 373 public: 374 DEFINE_SIZE_STATIC (2 * sizeof (FixedType)); 375 }; 376 377 378 /* 379 * Template subclasses of Offset that do the dereferencing. 380 * Use: (base+offset) 381 */ 382 383 template <typename Type, bool has_null> 384 struct _hb_has_null 385 { get_nullOT::_hb_has_null386 static const Type *get_null () { return nullptr; } get_crapOT::_hb_has_null387 static Type *get_crap () { return nullptr; } 388 }; 389 template <typename Type> 390 struct _hb_has_null<Type, true> 391 { get_nullOT::_hb_has_null392 static const Type *get_null () { return &Null (Type); } get_crapOT::_hb_has_null393 static Type *get_crap () { return &Crap (Type); } 394 }; 395 396 template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true> 397 struct OffsetTo : Offset<OffsetType, has_null> 398 { 399 using target_t = Type; 400 401 // Make sure Type is not unbounded; works only for types that are fully defined at OffsetTo time. 402 static_assert (has_null == false || 403 (hb_has_null_size (Type) || !hb_has_min_size (Type)), ""); 404 405 HB_DELETE_COPY_ASSIGN (OffsetTo); 406 OffsetTo () = default; 407 operator =OT::OffsetTo408 OffsetTo& operator = (typename OffsetType::type i) { OffsetType::operator= (i); return *this; } 409 operator ()OT::OffsetTo410 const Type& operator () (const void *base) const 411 { 412 if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_null (); 413 return StructAtOffset<const Type> (base, *this); 414 } operator ()OT::OffsetTo415 Type& operator () (void *base) const 416 { 417 if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_crap (); 418 return StructAtOffset<Type> (base, *this); 419 } 420 421 template <typename Base, 422 hb_enable_if (hb_is_convertible (const Base, const BaseType *))> operator +(const Base & base,const OffsetTo & offset)423 friend const Type& operator + (const Base &base, const OffsetTo &offset) { return offset ((const void *) base); } 424 template <typename Base, 425 hb_enable_if (hb_is_convertible (const Base, const BaseType *))> operator +(const OffsetTo & offset,const Base & base)426 friend const Type& operator + (const OffsetTo &offset, const Base &base) { return offset ((const void *) base); } 427 template <typename Base, 428 hb_enable_if (hb_is_convertible (Base, BaseType *))> operator +(Base && base,OffsetTo & offset)429 friend Type& operator + (Base &&base, OffsetTo &offset) { return offset ((void *) base); } 430 template <typename Base, 431 hb_enable_if (hb_is_convertible (Base, BaseType *))> operator +(OffsetTo & offset,Base && base)432 friend Type& operator + (OffsetTo &offset, Base &&base) { return offset ((void *) base); } 433 434 435 template <typename Base, typename ...Ts> serialize_subsetOT::OffsetTo436 bool serialize_subset (hb_subset_context_t *c, const OffsetTo& src, 437 const Base *src_base, Ts&&... ds) 438 { 439 *this = 0; 440 if (src.is_null ()) 441 return false; 442 443 auto *s = c->serializer; 444 445 s->push (); 446 447 bool ret = c->dispatch (src_base+src, std::forward<Ts> (ds)...); 448 449 if (ret || !has_null) 450 s->add_link (*this, s->pop_pack ()); 451 else 452 s->pop_discard (); 453 454 return ret; 455 } 456 457 458 template <typename ...Ts> serialize_serializeOT::OffsetTo459 bool serialize_serialize (hb_serialize_context_t *c, Ts&&... ds) 460 { 461 *this = 0; 462 463 Type* obj = c->push<Type> (); 464 bool ret = obj->serialize (c, std::forward<Ts> (ds)...); 465 466 if (ret) 467 c->add_link (*this, c->pop_pack ()); 468 else 469 c->pop_discard (); 470 471 return ret; 472 } 473 474 /* TODO: Somehow merge this with previous function into a serialize_dispatch(). */ 475 /* Workaround clang bug: https://bugs.llvm.org/show_bug.cgi?id=23029 476 * Can't compile: whence = hb_serialize_context_t::Head followed by Ts&&... 477 */ 478 template <typename ...Ts> serialize_copyOT::OffsetTo479 bool serialize_copy (hb_serialize_context_t *c, const OffsetTo& src, 480 const void *src_base, unsigned dst_bias, 481 hb_serialize_context_t::whence_t whence, 482 Ts&&... ds) 483 { 484 *this = 0; 485 if (src.is_null ()) 486 return false; 487 488 c->push (); 489 490 bool ret = c->copy (src_base+src, std::forward<Ts> (ds)...); 491 492 c->add_link (*this, c->pop_pack (), whence, dst_bias); 493 494 return ret; 495 } 496 serialize_copyOT::OffsetTo497 bool serialize_copy (hb_serialize_context_t *c, const OffsetTo& src, 498 const void *src_base, unsigned dst_bias = 0) 499 { return serialize_copy (c, src, src_base, dst_bias, hb_serialize_context_t::Head); } 500 sanitize_shallowOT::OffsetTo501 bool sanitize_shallow (hb_sanitize_context_t *c, const BaseType *base) const 502 { 503 TRACE_SANITIZE (this); 504 if (unlikely (!c->check_struct (this))) return_trace (false); 505 hb_barrier (); 506 //if (unlikely (this->is_null ())) return_trace (true); 507 if (unlikely ((const char *) base + (unsigned) *this < (const char *) base)) return_trace (false); 508 return_trace (true); 509 } 510 511 template <typename ...Ts> 512 #ifndef HB_OPTIMIZE_SIZE 513 HB_ALWAYS_INLINE 514 #endif sanitizeOT::OffsetTo515 bool sanitize (hb_sanitize_context_t *c, const BaseType *base, Ts&&... ds) const 516 { 517 TRACE_SANITIZE (this); 518 return_trace (sanitize_shallow (c, base) && 519 hb_barrier () && 520 (this->is_null () || 521 c->dispatch (StructAtOffset<Type> (base, *this), std::forward<Ts> (ds)...) || 522 neuter (c))); 523 } 524 525 /* Set the offset to Null */ neuterOT::OffsetTo526 bool neuter (hb_sanitize_context_t *c) const 527 { 528 if (!has_null) return false; 529 return c->try_set (this, 0); 530 } 531 DEFINE_SIZE_STATIC (sizeof (OffsetType)); 532 }; 533 /* Partial specializations. */ 534 template <typename Type, typename BaseType=void, bool has_null=true> using Offset16To = OffsetTo<Type, HBUINT16, BaseType, has_null>; 535 template <typename Type, typename BaseType=void, bool has_null=true> using Offset24To = OffsetTo<Type, HBUINT24, BaseType, has_null>; 536 template <typename Type, typename BaseType=void, bool has_null=true> using Offset32To = OffsetTo<Type, HBUINT32, BaseType, has_null>; 537 538 template <typename Type, typename OffsetType, typename BaseType=void> using NNOffsetTo = OffsetTo<Type, OffsetType, BaseType, false>; 539 template <typename Type, typename BaseType=void> using NNOffset16To = Offset16To<Type, BaseType, false>; 540 template <typename Type, typename BaseType=void> using NNOffset24To = Offset24To<Type, BaseType, false>; 541 template <typename Type, typename BaseType=void> using NNOffset32To = Offset32To<Type, BaseType, false>; 542 543 544 /* 545 * Array Types 546 */ 547 548 template <typename Type> 549 struct UnsizedArrayOf 550 { 551 typedef Type item_t; 552 static constexpr unsigned item_size = hb_static_size (Type); 553 554 HB_DELETE_CREATE_COPY_ASSIGN (UnsizedArrayOf); 555 operator []OT::UnsizedArrayOf556 const Type& operator [] (unsigned int i) const 557 { 558 return arrayZ[i]; 559 } operator []OT::UnsizedArrayOf560 Type& operator [] (unsigned int i) 561 { 562 return arrayZ[i]; 563 } 564 get_sizeOT::UnsizedArrayOf565 static unsigned int get_size (unsigned int len) 566 { return len * Type::static_size; } 567 operator T*OT::UnsizedArrayOf568 template <typename T> operator T * () { return arrayZ; } operator const T*OT::UnsizedArrayOf569 template <typename T> operator const T * () const { return arrayZ; } as_arrayOT::UnsizedArrayOf570 hb_array_t<Type> as_array (unsigned int len) 571 { return hb_array (arrayZ, len); } as_arrayOT::UnsizedArrayOf572 hb_array_t<const Type> as_array (unsigned int len) const 573 { return hb_array (arrayZ, len); } 574 575 template <typename T> lsearchOT::UnsizedArrayOf576 Type &lsearch (unsigned int len, const T &x, Type ¬_found = Crap (Type)) 577 { return *as_array (len).lsearch (x, ¬_found); } 578 template <typename T> lsearchOT::UnsizedArrayOf579 const Type &lsearch (unsigned int len, const T &x, const Type ¬_found = Null (Type)) const 580 { return *as_array (len).lsearch (x, ¬_found); } 581 template <typename T> lfindOT::UnsizedArrayOf582 bool lfind (unsigned int len, const T &x, unsigned int *i = nullptr, 583 hb_not_found_t not_found = HB_NOT_FOUND_DONT_STORE, 584 unsigned int to_store = (unsigned int) -1) const 585 { return as_array (len).lfind (x, i, not_found, to_store); } 586 qsortOT::UnsizedArrayOf587 void qsort (unsigned int len, unsigned int start = 0, unsigned int end = (unsigned int) -1) 588 { as_array (len).qsort (start, end); } 589 serializeOT::UnsizedArrayOf590 bool serialize (hb_serialize_context_t *c, unsigned int items_len, bool clear = true) 591 { 592 TRACE_SERIALIZE (this); 593 if (unlikely (!c->extend_size (this, get_size (items_len), clear))) return_trace (false); 594 return_trace (true); 595 } 596 template <typename Iterator, 597 hb_requires (hb_is_source_of (Iterator, Type))> serializeOT::UnsizedArrayOf598 bool serialize (hb_serialize_context_t *c, Iterator items) 599 { 600 TRACE_SERIALIZE (this); 601 unsigned count = hb_len (items); 602 if (unlikely (!serialize (c, count, false))) return_trace (false); 603 /* TODO Umm. Just exhaust the iterator instead? Being extra 604 * cautious right now.. */ 605 for (unsigned i = 0; i < count; i++, ++items) 606 arrayZ[i] = *items; 607 return_trace (true); 608 } 609 copyOT::UnsizedArrayOf610 UnsizedArrayOf* copy (hb_serialize_context_t *c, unsigned count) const 611 { 612 TRACE_SERIALIZE (this); 613 auto *out = c->start_embed (this); 614 if (unlikely (!as_array (count).copy (c))) return_trace (nullptr); 615 return_trace (out); 616 } 617 618 template <typename ...Ts> 619 HB_ALWAYS_INLINE sanitizeOT::UnsizedArrayOf620 bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const 621 { 622 TRACE_SANITIZE (this); 623 if (unlikely (!sanitize_shallow (c, count))) return_trace (false); 624 if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true); 625 hb_barrier (); 626 for (unsigned int i = 0; i < count; i++) 627 if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...))) 628 return_trace (false); 629 return_trace (true); 630 } 631 sanitize_shallowOT::UnsizedArrayOf632 bool sanitize_shallow (hb_sanitize_context_t *c, unsigned int count) const 633 { 634 TRACE_SANITIZE (this); 635 return_trace (c->check_array (arrayZ, count)); 636 } 637 638 public: 639 Type arrayZ[HB_VAR_ARRAY]; 640 public: 641 DEFINE_SIZE_UNBOUNDED (0); 642 }; 643 644 /* Unsized array of offset's */ 645 template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true> 646 using UnsizedArray16OfOffsetTo = UnsizedArrayOf<OffsetTo<Type, OffsetType, BaseType, has_null>>; 647 648 /* Unsized array of offsets relative to the beginning of the array itself. */ 649 template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true> 650 struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, BaseType, has_null> 651 { operator []OT::UnsizedListOfOffset16To652 const Type& operator [] (int i_) const 653 { 654 unsigned int i = (unsigned int) i_; 655 const OffsetTo<Type, OffsetType, BaseType, has_null> *p = &this->arrayZ[i]; 656 if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Null (Type); /* Overflowed. */ 657 hb_barrier (); 658 return this+*p; 659 } operator []OT::UnsizedListOfOffset16To660 Type& operator [] (int i_) 661 { 662 unsigned int i = (unsigned int) i_; 663 const OffsetTo<Type, OffsetType, BaseType, has_null> *p = &this->arrayZ[i]; 664 if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Crap (Type); /* Overflowed. */ 665 hb_barrier (); 666 return this+*p; 667 } 668 669 template <typename ...Ts> sanitizeOT::UnsizedListOfOffset16To670 bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const 671 { 672 TRACE_SANITIZE (this); 673 return_trace ((UnsizedArray16OfOffsetTo<Type, OffsetType, BaseType, has_null> 674 ::sanitize (c, count, this, std::forward<Ts> (ds)...))); 675 } 676 }; 677 678 /* An array with sorted elements. Supports binary searching. */ 679 template <typename Type> 680 struct SortedUnsizedArrayOf : UnsizedArrayOf<Type> 681 { as_arrayOT::SortedUnsizedArrayOf682 hb_sorted_array_t<Type> as_array (unsigned int len) 683 { return hb_sorted_array (this->arrayZ, len); } as_arrayOT::SortedUnsizedArrayOf684 hb_sorted_array_t<const Type> as_array (unsigned int len) const 685 { return hb_sorted_array (this->arrayZ, len); } operator hb_sorted_array_t<Type>OT::SortedUnsizedArrayOf686 operator hb_sorted_array_t<Type> () { return as_array (); } operator hb_sorted_array_t<const Type>OT::SortedUnsizedArrayOf687 operator hb_sorted_array_t<const Type> () const { return as_array (); } 688 689 template <typename T> bsearchOT::SortedUnsizedArrayOf690 Type &bsearch (unsigned int len, const T &x, Type ¬_found = Crap (Type)) 691 { return *as_array (len).bsearch (x, ¬_found); } 692 template <typename T> bsearchOT::SortedUnsizedArrayOf693 const Type &bsearch (unsigned int len, const T &x, const Type ¬_found = Null (Type)) const 694 { return *as_array (len).bsearch (x, ¬_found); } 695 template <typename T> bfindOT::SortedUnsizedArrayOf696 bool bfind (unsigned int len, const T &x, unsigned int *i = nullptr, 697 hb_not_found_t not_found = HB_NOT_FOUND_DONT_STORE, 698 unsigned int to_store = (unsigned int) -1) const 699 { return as_array (len).bfind (x, i, not_found, to_store); } 700 }; 701 702 703 /* An array with a number of elements. */ 704 template <typename Type, typename LenType> 705 struct ArrayOf 706 { 707 typedef Type item_t; 708 static constexpr unsigned item_size = hb_static_size (Type); 709 710 HB_DELETE_CREATE_COPY_ASSIGN (ArrayOf); 711 operator []OT::ArrayOf712 const Type& operator [] (int i_) const 713 { 714 unsigned int i = (unsigned int) i_; 715 if (unlikely (i >= len)) return Null (Type); 716 hb_barrier (); 717 return arrayZ[i]; 718 } operator []OT::ArrayOf719 Type& operator [] (int i_) 720 { 721 unsigned int i = (unsigned int) i_; 722 if (unlikely (i >= len)) return Crap (Type); 723 hb_barrier (); 724 return arrayZ[i]; 725 } 726 get_sizeOT::ArrayOf727 unsigned int get_size () const 728 { return len.static_size + len * Type::static_size; } 729 operator boolOT::ArrayOf730 explicit operator bool () const { return len; } 731 popOT::ArrayOf732 void pop () { len--; } 733 as_arrayOT::ArrayOf734 hb_array_t< Type> as_array () { return hb_array (arrayZ, len); } as_arrayOT::ArrayOf735 hb_array_t<const Type> as_array () const { return hb_array (arrayZ, len); } 736 737 /* Iterator. */ 738 typedef hb_array_t<const Type> iter_t; 739 typedef hb_array_t< Type> writer_t; iterOT::ArrayOf740 iter_t iter () const { return as_array (); } writerOT::ArrayOf741 writer_t writer () { return as_array (); } operator iter_tOT::ArrayOf742 operator iter_t () const { return iter (); } operator writer_tOT::ArrayOf743 operator writer_t () { return writer (); } 744 745 /* Faster range-based for loop. */ beginOT::ArrayOf746 const Type *begin () const { return arrayZ; } endOT::ArrayOf747 const Type *end () const { return arrayZ + len; } 748 749 template <typename T> lsearchOT::ArrayOf750 Type &lsearch (const T &x, Type ¬_found = Crap (Type)) 751 { return *as_array ().lsearch (x, ¬_found); } 752 template <typename T> lsearchOT::ArrayOf753 const Type &lsearch (const T &x, const Type ¬_found = Null (Type)) const 754 { return *as_array ().lsearch (x, ¬_found); } 755 template <typename T> lfindOT::ArrayOf756 bool lfind (const T &x, unsigned int *i = nullptr, 757 hb_not_found_t not_found = HB_NOT_FOUND_DONT_STORE, 758 unsigned int to_store = (unsigned int) -1) const 759 { return as_array ().lfind (x, i, not_found, to_store); } 760 qsortOT::ArrayOf761 void qsort () 762 { as_array ().qsort (); } 763 serializeOT::ArrayOf764 HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned items_len, bool clear = true) 765 { 766 TRACE_SERIALIZE (this); 767 if (unlikely (!c->extend_min (this))) return_trace (false); 768 c->check_assign (len, items_len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW); 769 if (unlikely (!c->extend_size (this, get_size (), clear))) return_trace (false); 770 return_trace (true); 771 } 772 template <typename Iterator, 773 hb_requires (hb_is_source_of (Iterator, Type))> serializeOT::ArrayOf774 HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items) 775 { 776 TRACE_SERIALIZE (this); 777 unsigned count = hb_len (items); 778 if (unlikely (!serialize (c, count, false))) return_trace (false); 779 /* TODO Umm. Just exhaust the iterator instead? Being extra 780 * cautious right now.. */ 781 for (unsigned i = 0; i < count; i++, ++items) 782 arrayZ[i] = *items; 783 return_trace (true); 784 } 785 serialize_appendOT::ArrayOf786 Type* serialize_append (hb_serialize_context_t *c) 787 { 788 TRACE_SERIALIZE (this); 789 len++; 790 if (unlikely (!len || !c->extend (this))) 791 { 792 len--; 793 return_trace (nullptr); 794 } 795 return_trace (&arrayZ[len - 1]); 796 } 797 copyOT::ArrayOf798 ArrayOf* copy (hb_serialize_context_t *c) const 799 { 800 TRACE_SERIALIZE (this); 801 auto *out = c->start_embed (this); 802 if (unlikely (!c->extend_min (out))) return_trace (nullptr); 803 c->check_assign (out->len, len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW); 804 if (unlikely (!as_array ().copy (c))) return_trace (nullptr); 805 return_trace (out); 806 } 807 808 template <typename ...Ts> 809 HB_ALWAYS_INLINE sanitizeOT::ArrayOf810 bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const 811 { 812 TRACE_SANITIZE (this); 813 if (unlikely (!sanitize_shallow (c))) return_trace (false); 814 if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true); 815 hb_barrier (); 816 unsigned int count = len; 817 for (unsigned int i = 0; i < count; i++) 818 if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...))) 819 return_trace (false); 820 return_trace (true); 821 } 822 sanitize_shallowOT::ArrayOf823 bool sanitize_shallow (hb_sanitize_context_t *c) const 824 { 825 TRACE_SANITIZE (this); 826 return_trace (len.sanitize (c) && 827 hb_barrier () && 828 c->check_array_sized (arrayZ, len, sizeof (LenType))); 829 } 830 831 public: 832 LenType len; 833 Type arrayZ[HB_VAR_ARRAY]; 834 public: 835 DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); 836 }; 837 template <typename Type> using Array16Of = ArrayOf<Type, HBUINT16>; 838 template <typename Type> using Array24Of = ArrayOf<Type, HBUINT24>; 839 template <typename Type> using Array32Of = ArrayOf<Type, HBUINT32>; 840 using PString = ArrayOf<HBUINT8, HBUINT8>; 841 842 /* Array of Offset's */ 843 template <typename Type> using Array8OfOffset24To = ArrayOf<OffsetTo<Type, HBUINT24>, HBUINT8>; 844 template <typename Type> using Array16OfOffset16To = ArrayOf<OffsetTo<Type, HBUINT16>, HBUINT16>; 845 template <typename Type> using Array16OfOffset32To = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT16>; 846 template <typename Type> using Array32OfOffset32To = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32>; 847 848 /* Array of offsets relative to the beginning of the array itself. */ 849 template <typename Type, typename OffsetType> 850 struct List16OfOffsetTo : ArrayOf<OffsetTo<Type, OffsetType>, HBUINT16> 851 { operator []OT::List16OfOffsetTo852 const Type& operator [] (int i_) const 853 { 854 unsigned int i = (unsigned int) i_; 855 if (unlikely (i >= this->len)) return Null (Type); 856 hb_barrier (); 857 return this+this->arrayZ[i]; 858 } operator []OT::List16OfOffsetTo859 const Type& operator [] (int i_) 860 { 861 unsigned int i = (unsigned int) i_; 862 if (unlikely (i >= this->len)) return Crap (Type); 863 hb_barrier (); 864 return this+this->arrayZ[i]; 865 } 866 subsetOT::List16OfOffsetTo867 bool subset (hb_subset_context_t *c) const 868 { 869 TRACE_SUBSET (this); 870 struct List16OfOffsetTo *out = c->serializer->embed (*this); 871 if (unlikely (!out)) return_trace (false); 872 unsigned int count = this->len; 873 for (unsigned int i = 0; i < count; i++) 874 out->arrayZ[i].serialize_subset (c, this->arrayZ[i], this, out); 875 return_trace (true); 876 } 877 878 template <typename ...Ts> sanitizeOT::List16OfOffsetTo879 bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const 880 { 881 TRACE_SANITIZE (this); 882 return_trace ((Array16Of<OffsetTo<Type, OffsetType>>::sanitize (c, this, std::forward<Ts> (ds)...))); 883 } 884 }; 885 886 template <typename Type> 887 using List16OfOffset16To = List16OfOffsetTo<Type, HBUINT16>; 888 889 /* An array starting at second element. */ 890 template <typename Type, typename LenType> 891 struct HeadlessArrayOf 892 { 893 static constexpr unsigned item_size = Type::static_size; 894 895 HB_DELETE_CREATE_COPY_ASSIGN (HeadlessArrayOf); 896 operator []OT::HeadlessArrayOf897 const Type& operator [] (int i_) const 898 { 899 unsigned int i = (unsigned int) i_; 900 if (unlikely (i >= lenP1 || !i)) return Null (Type); 901 hb_barrier (); 902 return arrayZ[i-1]; 903 } operator []OT::HeadlessArrayOf904 Type& operator [] (int i_) 905 { 906 unsigned int i = (unsigned int) i_; 907 if (unlikely (i >= lenP1 || !i)) return Crap (Type); 908 hb_barrier (); 909 return arrayZ[i-1]; 910 } get_sizeOT::HeadlessArrayOf911 unsigned int get_size () const 912 { return lenP1.static_size + get_length () * Type::static_size; } 913 get_lengthOT::HeadlessArrayOf914 unsigned get_length () const { return lenP1 ? lenP1 - 1 : 0; } 915 as_arrayOT::HeadlessArrayOf916 hb_array_t< Type> as_array () { return hb_array (arrayZ, get_length ()); } as_arrayOT::HeadlessArrayOf917 hb_array_t<const Type> as_array () const { return hb_array (arrayZ, get_length ()); } 918 919 /* Iterator. */ 920 typedef hb_array_t<const Type> iter_t; 921 typedef hb_array_t< Type> writer_t; iterOT::HeadlessArrayOf922 iter_t iter () const { return as_array (); } writerOT::HeadlessArrayOf923 writer_t writer () { return as_array (); } operator iter_tOT::HeadlessArrayOf924 operator iter_t () const { return iter (); } operator writer_tOT::HeadlessArrayOf925 operator writer_t () { return writer (); } 926 927 /* Faster range-based for loop. */ beginOT::HeadlessArrayOf928 const Type *begin () const { return arrayZ; } endOT::HeadlessArrayOf929 const Type *end () const { return arrayZ + get_length (); } 930 serializeOT::HeadlessArrayOf931 HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned int items_len, bool clear = true) 932 { 933 TRACE_SERIALIZE (this); 934 if (unlikely (!c->extend_min (this))) return_trace (false); 935 c->check_assign (lenP1, items_len + 1, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW); 936 if (unlikely (!c->extend_size (this, get_size (), clear))) return_trace (false); 937 return_trace (true); 938 } 939 template <typename Iterator, 940 hb_requires (hb_is_source_of (Iterator, Type))> serializeOT::HeadlessArrayOf941 HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items) 942 { 943 TRACE_SERIALIZE (this); 944 unsigned count = hb_len (items); 945 if (unlikely (!serialize (c, count, false))) return_trace (false); 946 /* TODO Umm. Just exhaust the iterator instead? Being extra 947 * cautious right now.. */ 948 for (unsigned i = 0; i < count; i++, ++items) 949 arrayZ[i] = *items; 950 return_trace (true); 951 } 952 953 template <typename ...Ts> 954 HB_ALWAYS_INLINE sanitizeOT::HeadlessArrayOf955 bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const 956 { 957 TRACE_SANITIZE (this); 958 if (unlikely (!sanitize_shallow (c))) return_trace (false); 959 if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true); 960 hb_barrier (); 961 unsigned int count = get_length (); 962 for (unsigned int i = 0; i < count; i++) 963 if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...))) 964 return_trace (false); 965 return_trace (true); 966 } 967 968 private: sanitize_shallowOT::HeadlessArrayOf969 bool sanitize_shallow (hb_sanitize_context_t *c) const 970 { 971 TRACE_SANITIZE (this); 972 return_trace (lenP1.sanitize (c) && 973 hb_barrier () && 974 (!lenP1 || c->check_array_sized (arrayZ, lenP1 - 1, sizeof (LenType)))); 975 } 976 977 public: 978 LenType lenP1; 979 Type arrayZ[HB_VAR_ARRAY]; 980 public: 981 DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); 982 }; 983 template <typename Type> using HeadlessArray16Of = HeadlessArrayOf<Type, HBUINT16>; 984 985 /* An array storing length-1. */ 986 template <typename Type, typename LenType=HBUINT16> 987 struct ArrayOfM1 988 { 989 HB_DELETE_CREATE_COPY_ASSIGN (ArrayOfM1); 990 operator []OT::ArrayOfM1991 const Type& operator [] (int i_) const 992 { 993 unsigned int i = (unsigned int) i_; 994 if (unlikely (i > lenM1)) return Null (Type); 995 hb_barrier (); 996 return arrayZ[i]; 997 } operator []OT::ArrayOfM1998 Type& operator [] (int i_) 999 { 1000 unsigned int i = (unsigned int) i_; 1001 if (unlikely (i > lenM1)) return Crap (Type); 1002 hb_barrier (); 1003 return arrayZ[i]; 1004 } get_sizeOT::ArrayOfM11005 unsigned int get_size () const 1006 { return lenM1.static_size + (lenM1 + 1) * Type::static_size; } 1007 1008 template <typename ...Ts> 1009 HB_ALWAYS_INLINE sanitizeOT::ArrayOfM11010 bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const 1011 { 1012 TRACE_SANITIZE (this); 1013 if (unlikely (!sanitize_shallow (c))) return_trace (false); 1014 if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true); 1015 hb_barrier (); 1016 unsigned int count = lenM1 + 1; 1017 for (unsigned int i = 0; i < count; i++) 1018 if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...))) 1019 return_trace (false); 1020 return_trace (true); 1021 } 1022 1023 private: sanitize_shallowOT::ArrayOfM11024 bool sanitize_shallow (hb_sanitize_context_t *c) const 1025 { 1026 TRACE_SANITIZE (this); 1027 return_trace (lenM1.sanitize (c) && 1028 hb_barrier () && 1029 (c->check_array_sized (arrayZ, lenM1 + 1, sizeof (LenType)))); 1030 } 1031 1032 public: 1033 LenType lenM1; 1034 Type arrayZ[HB_VAR_ARRAY]; 1035 public: 1036 DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ); 1037 }; 1038 1039 /* An array with sorted elements. Supports binary searching. */ 1040 template <typename Type, typename LenType> 1041 struct SortedArrayOf : ArrayOf<Type, LenType> 1042 { as_arrayOT::SortedArrayOf1043 hb_sorted_array_t< Type> as_array () { return hb_sorted_array (this->arrayZ, this->len); } as_arrayOT::SortedArrayOf1044 hb_sorted_array_t<const Type> as_array () const { return hb_sorted_array (this->arrayZ, this->len); } 1045 1046 /* Iterator. */ 1047 typedef hb_sorted_array_t<const Type> iter_t; 1048 typedef hb_sorted_array_t< Type> writer_t; iterOT::SortedArrayOf1049 iter_t iter () const { return as_array (); } writerOT::SortedArrayOf1050 writer_t writer () { return as_array (); } operator iter_tOT::SortedArrayOf1051 operator iter_t () const { return iter (); } operator writer_tOT::SortedArrayOf1052 operator writer_t () { return writer (); } 1053 1054 /* Faster range-based for loop. */ beginOT::SortedArrayOf1055 const Type *begin () const { return this->arrayZ; } endOT::SortedArrayOf1056 const Type *end () const { return this->arrayZ + this->len; } 1057 serializeOT::SortedArrayOf1058 bool serialize (hb_serialize_context_t *c, unsigned int items_len) 1059 { 1060 TRACE_SERIALIZE (this); 1061 bool ret = ArrayOf<Type, LenType>::serialize (c, items_len); 1062 return_trace (ret); 1063 } 1064 template <typename Iterator, 1065 hb_requires (hb_is_sorted_source_of (Iterator, Type))> serializeOT::SortedArrayOf1066 bool serialize (hb_serialize_context_t *c, Iterator items) 1067 { 1068 TRACE_SERIALIZE (this); 1069 bool ret = ArrayOf<Type, LenType>::serialize (c, items); 1070 return_trace (ret); 1071 } 1072 copyOT::SortedArrayOf1073 SortedArrayOf* copy (hb_serialize_context_t *c) const 1074 { 1075 TRACE_SERIALIZE (this); 1076 SortedArrayOf* out = reinterpret_cast<SortedArrayOf *> (ArrayOf<Type, LenType>::copy (c)); 1077 return_trace (out); 1078 } 1079 1080 template <typename T> bsearchOT::SortedArrayOf1081 Type &bsearch (const T &x, Type ¬_found = Crap (Type)) 1082 { return *as_array ().bsearch (x, ¬_found); } 1083 template <typename T> bsearchOT::SortedArrayOf1084 const Type &bsearch (const T &x, const Type ¬_found = Null (Type)) const 1085 { return *as_array ().bsearch (x, ¬_found); } 1086 template <typename T> bfindOT::SortedArrayOf1087 bool bfind (const T &x, unsigned int *i = nullptr, 1088 hb_not_found_t not_found = HB_NOT_FOUND_DONT_STORE, 1089 unsigned int to_store = (unsigned int) -1) const 1090 { return as_array ().bfind (x, i, not_found, to_store); } 1091 }; 1092 1093 template <typename Type> using SortedArray16Of = SortedArrayOf<Type, HBUINT16>; 1094 template <typename Type> using SortedArray24Of = SortedArrayOf<Type, HBUINT24>; 1095 template <typename Type> using SortedArray32Of = SortedArrayOf<Type, HBUINT32>; 1096 1097 /* 1098 * Binary-search arrays 1099 */ 1100 1101 template <typename LenType=HBUINT16> 1102 struct BinSearchHeader 1103 { operator uint32_tOT::BinSearchHeader1104 operator uint32_t () const { return len; } 1105 sanitizeOT::BinSearchHeader1106 bool sanitize (hb_sanitize_context_t *c) const 1107 { 1108 TRACE_SANITIZE (this); 1109 return_trace (c->check_struct (this)); 1110 } 1111 operator =OT::BinSearchHeader1112 BinSearchHeader& operator = (unsigned int v) 1113 { 1114 len = v; 1115 assert (len == v); 1116 entrySelector = hb_max (1u, hb_bit_storage (v)) - 1; 1117 searchRange = 16 * (1u << entrySelector); 1118 rangeShift = v * 16 > searchRange 1119 ? 16 * v - searchRange 1120 : 0; 1121 return *this; 1122 } 1123 1124 protected: 1125 LenType len; 1126 LenType searchRange; 1127 LenType entrySelector; 1128 LenType rangeShift; 1129 1130 public: 1131 DEFINE_SIZE_STATIC (8); 1132 }; 1133 1134 template <typename Type, typename LenType=HBUINT16> 1135 using BinSearchArrayOf = SortedArrayOf<Type, BinSearchHeader<LenType>>; 1136 1137 1138 struct VarSizedBinSearchHeader 1139 { 1140 sanitizeOT::VarSizedBinSearchHeader1141 bool sanitize (hb_sanitize_context_t *c) const 1142 { 1143 TRACE_SANITIZE (this); 1144 return_trace (c->check_struct (this)); 1145 } 1146 1147 HBUINT16 unitSize; /* Size of a lookup unit for this search in bytes. */ 1148 HBUINT16 nUnits; /* Number of units of the preceding size to be searched. */ 1149 HBUINT16 searchRange; /* The value of unitSize times the largest power of 2 1150 * that is less than or equal to the value of nUnits. */ 1151 HBUINT16 entrySelector; /* The log base 2 of the largest power of 2 less than 1152 * or equal to the value of nUnits. */ 1153 HBUINT16 rangeShift; /* The value of unitSize times the difference of the 1154 * value of nUnits minus the largest power of 2 less 1155 * than or equal to the value of nUnits. */ 1156 public: 1157 DEFINE_SIZE_STATIC (10); 1158 }; 1159 1160 template <typename Type> 1161 struct VarSizedBinSearchArrayOf 1162 { 1163 static constexpr unsigned item_size = Type::static_size; 1164 1165 HB_DELETE_CREATE_COPY_ASSIGN (VarSizedBinSearchArrayOf); 1166 last_is_terminatorOT::VarSizedBinSearchArrayOf1167 bool last_is_terminator () const 1168 { 1169 if (unlikely (!header.nUnits)) return false; 1170 1171 /* Gah. 1172 * 1173 * "The number of termination values that need to be included is table-specific. 1174 * The value that indicates binary search termination is 0xFFFF." */ 1175 const HBUINT16 *words = &StructAtOffset<HBUINT16> (&bytesZ, (header.nUnits - 1) * header.unitSize); 1176 unsigned int count = Type::TerminationWordCount; 1177 for (unsigned int i = 0; i < count; i++) 1178 if (words[i] != 0xFFFFu) 1179 return false; 1180 return true; 1181 } 1182 operator []OT::VarSizedBinSearchArrayOf1183 const Type& operator [] (int i_) const 1184 { 1185 unsigned int i = (unsigned int) i_; 1186 if (unlikely (i >= get_length ())) return Null (Type); 1187 hb_barrier (); 1188 return StructAtOffset<Type> (&bytesZ, i * header.unitSize); 1189 } operator []OT::VarSizedBinSearchArrayOf1190 Type& operator [] (int i_) 1191 { 1192 unsigned int i = (unsigned int) i_; 1193 if (unlikely (i >= get_length ())) return Crap (Type); 1194 hb_barrier (); 1195 return StructAtOffset<Type> (&bytesZ, i * header.unitSize); 1196 } get_lengthOT::VarSizedBinSearchArrayOf1197 unsigned int get_length () const 1198 { return header.nUnits - last_is_terminator (); } get_sizeOT::VarSizedBinSearchArrayOf1199 unsigned int get_size () const 1200 { return header.static_size + header.nUnits * header.unitSize; } 1201 1202 template <typename ...Ts> 1203 HB_ALWAYS_INLINE sanitizeOT::VarSizedBinSearchArrayOf1204 bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const 1205 { 1206 TRACE_SANITIZE (this); 1207 if (unlikely (!sanitize_shallow (c))) return_trace (false); 1208 if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true); 1209 hb_barrier (); 1210 unsigned int count = get_length (); 1211 for (unsigned int i = 0; i < count; i++) 1212 if (unlikely (!(*this)[i].sanitize (c, std::forward<Ts> (ds)...))) 1213 return_trace (false); 1214 return_trace (true); 1215 } 1216 1217 template <typename T> bsearchOT::VarSizedBinSearchArrayOf1218 const Type *bsearch (const T &key) const 1219 { 1220 unsigned pos; 1221 return hb_bsearch_impl (&pos, 1222 key, 1223 (const void *) bytesZ, 1224 get_length (), 1225 header.unitSize, 1226 _hb_cmp_method<T, Type>) 1227 ? (const Type *) (((const char *) &bytesZ) + (pos * header.unitSize)) 1228 : nullptr; 1229 } 1230 1231 private: sanitize_shallowOT::VarSizedBinSearchArrayOf1232 bool sanitize_shallow (hb_sanitize_context_t *c) const 1233 { 1234 TRACE_SANITIZE (this); 1235 return_trace (header.sanitize (c) && 1236 hb_barrier () && 1237 Type::static_size <= header.unitSize && 1238 c->check_range (bytesZ.arrayZ, 1239 header.nUnits, 1240 header.unitSize)); 1241 } 1242 1243 protected: 1244 VarSizedBinSearchHeader header; 1245 UnsizedArrayOf<HBUINT8> bytesZ; 1246 public: 1247 DEFINE_SIZE_ARRAY (10, bytesZ); 1248 }; 1249 1250 1251 /* CFF INDEX */ 1252 1253 template <typename COUNT> 1254 struct CFFIndex 1255 { offset_array_sizeOT::CFFIndex1256 unsigned int offset_array_size () const 1257 { return offSize * (count + 1); } 1258 1259 template <typename Iterable, 1260 hb_requires (hb_is_iterable (Iterable))> serializeOT::CFFIndex1261 bool serialize (hb_serialize_context_t *c, 1262 const Iterable &iterable, 1263 const unsigned *p_data_size = nullptr, 1264 unsigned min_off_size = 0) 1265 { 1266 TRACE_SERIALIZE (this); 1267 unsigned data_size; 1268 if (p_data_size) 1269 data_size = *p_data_size; 1270 else 1271 total_size (iterable, &data_size); 1272 1273 auto it = hb_iter (iterable); 1274 if (unlikely (!serialize_header (c, +it, data_size, min_off_size))) return_trace (false); 1275 unsigned char *ret = c->allocate_size<unsigned char> (data_size, false); 1276 if (unlikely (!ret)) return_trace (false); 1277 for (const auto &_ : +it) 1278 { 1279 unsigned len = _.length; 1280 if (!len) 1281 continue; 1282 if (len <= 1) 1283 { 1284 *ret++ = *_.arrayZ; 1285 continue; 1286 } 1287 hb_memcpy (ret, _.arrayZ, len); 1288 ret += len; 1289 } 1290 return_trace (true); 1291 } 1292 1293 template <typename Iterator, 1294 hb_requires (hb_is_iterator (Iterator))> serialize_headerOT::CFFIndex1295 bool serialize_header (hb_serialize_context_t *c, 1296 Iterator it, 1297 unsigned data_size, 1298 unsigned min_off_size = 0) 1299 { 1300 TRACE_SERIALIZE (this); 1301 1302 unsigned off_size = (hb_bit_storage (data_size + 1) + 7) / 8; 1303 off_size = hb_max(min_off_size, off_size); 1304 1305 /* serialize CFFIndex header */ 1306 if (unlikely (!c->extend_min (this))) return_trace (false); 1307 this->count = hb_len (it); 1308 if (!this->count) return_trace (true); 1309 if (unlikely (!c->extend (this->offSize))) return_trace (false); 1310 this->offSize = off_size; 1311 if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1), false))) 1312 return_trace (false); 1313 1314 /* serialize indices */ 1315 unsigned int offset = 1; 1316 if (HB_OPTIMIZE_SIZE_VAL) 1317 { 1318 unsigned int i = 0; 1319 for (const auto &_ : +it) 1320 { 1321 set_offset_at (i++, offset); 1322 offset += hb_len_of (_); 1323 } 1324 set_offset_at (i, offset); 1325 } 1326 else 1327 switch (off_size) 1328 { 1329 case 1: 1330 { 1331 HBUINT8 *p = (HBUINT8 *) offsets; 1332 for (const auto &_ : +it) 1333 { 1334 *p++ = offset; 1335 offset += hb_len_of (_); 1336 } 1337 *p = offset; 1338 } 1339 break; 1340 case 2: 1341 { 1342 HBUINT16 *p = (HBUINT16 *) offsets; 1343 for (const auto &_ : +it) 1344 { 1345 *p++ = offset; 1346 offset += hb_len_of (_); 1347 } 1348 *p = offset; 1349 } 1350 break; 1351 case 3: 1352 { 1353 HBUINT24 *p = (HBUINT24 *) offsets; 1354 for (const auto &_ : +it) 1355 { 1356 *p++ = offset; 1357 offset += hb_len_of (_); 1358 } 1359 *p = offset; 1360 } 1361 break; 1362 case 4: 1363 { 1364 HBUINT32 *p = (HBUINT32 *) offsets; 1365 for (const auto &_ : +it) 1366 { 1367 *p++ = offset; 1368 offset += hb_len_of (_); 1369 } 1370 *p = offset; 1371 } 1372 break; 1373 default: 1374 break; 1375 } 1376 1377 assert (offset == data_size + 1); 1378 return_trace (true); 1379 } 1380 1381 template <typename Iterable, 1382 hb_requires (hb_is_iterable (Iterable))> total_sizeOT::CFFIndex1383 static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr, unsigned min_off_size = 0) 1384 { 1385 auto it = + hb_iter (iterable); 1386 if (!it) 1387 { 1388 if (data_size) *data_size = 0; 1389 return min_size; 1390 } 1391 1392 unsigned total = 0; 1393 for (const auto &_ : +it) 1394 total += hb_len_of (_); 1395 1396 if (data_size) *data_size = total; 1397 1398 unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8; 1399 off_size = hb_max(min_off_size, off_size); 1400 1401 return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total; 1402 } 1403 set_offset_atOT::CFFIndex1404 void set_offset_at (unsigned int index, unsigned int offset) 1405 { 1406 assert (index <= count); 1407 1408 unsigned int size = offSize; 1409 const HBUINT8 *p = offsets; 1410 switch (size) 1411 { 1412 case 1: ((HBUINT8 *) p)[index] = offset; break; 1413 case 2: ((HBUINT16 *) p)[index] = offset; break; 1414 case 3: ((HBUINT24 *) p)[index] = offset; break; 1415 case 4: ((HBUINT32 *) p)[index] = offset; break; 1416 default: return; 1417 } 1418 } 1419 1420 private: offset_atOT::CFFIndex1421 unsigned int offset_at (unsigned int index) const 1422 { 1423 assert (index <= count); 1424 1425 unsigned int size = offSize; 1426 const HBUINT8 *p = offsets; 1427 switch (size) 1428 { 1429 case 1: return ((HBUINT8 *) p)[index]; 1430 case 2: return ((HBUINT16 *) p)[index]; 1431 case 3: return ((HBUINT24 *) p)[index]; 1432 case 4: return ((HBUINT32 *) p)[index]; 1433 default: return 0; 1434 } 1435 } 1436 data_baseOT::CFFIndex1437 const unsigned char *data_base () const 1438 { return (const unsigned char *) this + min_size + offSize.static_size - 1 + offset_array_size (); } 1439 public: 1440 operator []OT::CFFIndex1441 hb_ubytes_t operator [] (unsigned int index) const 1442 { 1443 if (unlikely (index >= count)) return hb_ubytes_t (); 1444 hb_barrier (); 1445 unsigned offset0 = offset_at (index); 1446 unsigned offset1 = offset_at (index + 1); 1447 if (unlikely (offset1 < offset0 || offset1 > offset_at (count))) 1448 return hb_ubytes_t (); 1449 return hb_ubytes_t (data_base () + offset0, offset1 - offset0); 1450 } 1451 get_sizeOT::CFFIndex1452 unsigned int get_size () const 1453 { 1454 if (count) 1455 return min_size + offSize.static_size + offset_array_size () + (offset_at (count) - 1); 1456 return min_size; /* empty CFFIndex contains count only */ 1457 } 1458 sanitizeOT::CFFIndex1459 bool sanitize (hb_sanitize_context_t *c) const 1460 { 1461 TRACE_SANITIZE (this); 1462 return_trace (likely (c->check_struct (this) && 1463 hb_barrier () && 1464 (count == 0 || /* empty INDEX */ 1465 (count < count + 1u && 1466 c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 && 1467 c->check_array (offsets, offSize, count + 1u) && 1468 c->check_range (data_base (), offset_at (count)))))); 1469 } 1470 1471 public: 1472 COUNT count; /* Number of object data. Note there are (count+1) offsets */ 1473 private: 1474 HBUINT8 offSize; /* The byte size of each offset in the offsets array. */ 1475 HBUINT8 offsets[HB_VAR_ARRAY]; 1476 /* The array of (count + 1) offsets into objects array (1-base). */ 1477 /* HBUINT8 data[HB_VAR_ARRAY]; Object data */ 1478 public: 1479 DEFINE_SIZE_MIN (COUNT::static_size); 1480 }; 1481 typedef CFFIndex<HBUINT16> CFF1Index; 1482 typedef CFFIndex<HBUINT32> CFF2Index; 1483 1484 1485 /* TupleValues */ 1486 struct TupleValues 1487 { 1488 enum packed_value_flag_t 1489 { 1490 VALUES_ARE_ZEROS = 0x80, 1491 VALUES_ARE_BYTES = 0x00, 1492 VALUES_ARE_WORDS = 0x40, 1493 VALUES_ARE_LONGS = 0xC0, 1494 VALUES_SIZE_MASK = 0xC0, 1495 VALUE_RUN_COUNT_MASK = 0x3F 1496 }; 1497 compileOT::TupleValues1498 static unsigned compile (hb_array_t<const int> values, /* IN */ 1499 hb_array_t<unsigned char> encoded_bytes /* OUT */) 1500 { 1501 unsigned num_values = values.length; 1502 unsigned encoded_len = 0; 1503 unsigned i = 0; 1504 while (i < num_values) 1505 { 1506 int val = values.arrayZ[i]; 1507 if (val == 0) 1508 encoded_len += encode_value_run_as_zeroes (i, encoded_bytes.sub_array (encoded_len), values); 1509 else if (val >= -128 && val <= 127) 1510 encoded_len += encode_value_run_as_bytes (i, encoded_bytes.sub_array (encoded_len), values); 1511 else if (val >= -32768 && val <= 32767) 1512 encoded_len += encode_value_run_as_words (i, encoded_bytes.sub_array (encoded_len), values); 1513 else 1514 encoded_len += encode_value_run_as_longs (i, encoded_bytes.sub_array (encoded_len), values); 1515 } 1516 return encoded_len; 1517 } 1518 encode_value_run_as_zeroesOT::TupleValues1519 static unsigned encode_value_run_as_zeroes (unsigned& i, 1520 hb_array_t<unsigned char> encoded_bytes, 1521 hb_array_t<const int> values) 1522 { 1523 unsigned num_values = values.length; 1524 unsigned run_length = 0; 1525 auto it = encoded_bytes.iter (); 1526 unsigned encoded_len = 0; 1527 while (i < num_values && values.arrayZ[i] == 0) 1528 { 1529 i++; 1530 run_length++; 1531 } 1532 1533 while (run_length >= 64) 1534 { 1535 *it++ = char (VALUES_ARE_ZEROS | 63); 1536 run_length -= 64; 1537 encoded_len++; 1538 } 1539 1540 if (run_length) 1541 { 1542 *it++ = char (VALUES_ARE_ZEROS | (run_length - 1)); 1543 encoded_len++; 1544 } 1545 return encoded_len; 1546 } 1547 encode_value_run_as_bytesOT::TupleValues1548 static unsigned encode_value_run_as_bytes (unsigned &i, 1549 hb_array_t<unsigned char> encoded_bytes, 1550 hb_array_t<const int> values) 1551 { 1552 unsigned start = i; 1553 unsigned num_values = values.length; 1554 while (i < num_values) 1555 { 1556 int val = values.arrayZ[i]; 1557 if (val > 127 || val < -128) 1558 break; 1559 1560 /* from fonttools: if there're 2 or more zeros in a sequence, 1561 * it is better to start a new run to save bytes. */ 1562 if (val == 0 && i + 1 < num_values && values.arrayZ[i+1] == 0) 1563 break; 1564 1565 i++; 1566 } 1567 unsigned run_length = i - start; 1568 1569 unsigned encoded_len = 0; 1570 auto it = encoded_bytes.iter (); 1571 1572 while (run_length >= 64) 1573 { 1574 *it++ = (VALUES_ARE_BYTES | 63); 1575 encoded_len++; 1576 1577 for (unsigned j = 0; j < 64; j++) 1578 { 1579 *it++ = static_cast<char> (values.arrayZ[start + j]); 1580 encoded_len++; 1581 } 1582 1583 start += 64; 1584 run_length -= 64; 1585 } 1586 1587 if (run_length) 1588 { 1589 *it++ = (VALUES_ARE_BYTES | (run_length - 1)); 1590 encoded_len++; 1591 1592 while (start < i) 1593 { 1594 *it++ = static_cast<char> (values.arrayZ[start++]); 1595 encoded_len++; 1596 } 1597 } 1598 1599 return encoded_len; 1600 } 1601 encode_value_run_as_wordsOT::TupleValues1602 static unsigned encode_value_run_as_words (unsigned &i, 1603 hb_array_t<unsigned char> encoded_bytes, 1604 hb_array_t<const int> values) 1605 { 1606 unsigned start = i; 1607 unsigned num_values = values.length; 1608 while (i < num_values) 1609 { 1610 int val = values.arrayZ[i]; 1611 1612 /* start a new run for a single zero value*/ 1613 if (val == 0) break; 1614 1615 /* from fonttools: continue word-encoded run if there's only one 1616 * single value in the range [-128, 127] because it is more compact. 1617 * Only start a new run when there're 2 continuous such values. */ 1618 if (val >= -128 && val <= 127 && 1619 i + 1 < num_values && 1620 values.arrayZ[i+1] >= -128 && values.arrayZ[i+1] <= 127) 1621 break; 1622 1623 i++; 1624 } 1625 1626 unsigned run_length = i - start; 1627 auto it = encoded_bytes.iter (); 1628 unsigned encoded_len = 0; 1629 while (run_length >= 64) 1630 { 1631 *it++ = (VALUES_ARE_WORDS | 63); 1632 encoded_len++; 1633 1634 for (unsigned j = 0; j < 64; j++) 1635 { 1636 int16_t value_val = values.arrayZ[start + j]; 1637 *it++ = static_cast<char> (value_val >> 8); 1638 *it++ = static_cast<char> (value_val & 0xFF); 1639 1640 encoded_len += 2; 1641 } 1642 1643 start += 64; 1644 run_length -= 64; 1645 } 1646 1647 if (run_length) 1648 { 1649 *it++ = (VALUES_ARE_WORDS | (run_length - 1)); 1650 encoded_len++; 1651 while (start < i) 1652 { 1653 int16_t value_val = values.arrayZ[start++]; 1654 *it++ = static_cast<char> (value_val >> 8); 1655 *it++ = static_cast<char> (value_val & 0xFF); 1656 1657 encoded_len += 2; 1658 } 1659 } 1660 return encoded_len; 1661 } 1662 encode_value_run_as_longsOT::TupleValues1663 static unsigned encode_value_run_as_longs (unsigned &i, 1664 hb_array_t<unsigned char> encoded_bytes, 1665 hb_array_t<const int> values) 1666 { 1667 unsigned start = i; 1668 unsigned num_values = values.length; 1669 while (i < num_values) 1670 { 1671 int val = values.arrayZ[i]; 1672 1673 if (val >= -32768 && val <= 32767) 1674 break; 1675 1676 i++; 1677 } 1678 1679 unsigned run_length = i - start; 1680 auto it = encoded_bytes.iter (); 1681 unsigned encoded_len = 0; 1682 while (run_length >= 64) 1683 { 1684 *it++ = (VALUES_ARE_LONGS | 63); 1685 encoded_len++; 1686 1687 for (unsigned j = 0; j < 64; j++) 1688 { 1689 int32_t value_val = values.arrayZ[start + j]; 1690 *it++ = static_cast<char> (value_val >> 24); 1691 *it++ = static_cast<char> (value_val >> 16); 1692 *it++ = static_cast<char> (value_val >> 8); 1693 *it++ = static_cast<char> (value_val & 0xFF); 1694 1695 encoded_len += 4; 1696 } 1697 1698 start += 64; 1699 run_length -= 64; 1700 } 1701 1702 if (run_length) 1703 { 1704 *it++ = (VALUES_ARE_LONGS | (run_length - 1)); 1705 encoded_len++; 1706 while (start < i) 1707 { 1708 int32_t value_val = values.arrayZ[start++]; 1709 *it++ = static_cast<char> (value_val >> 24); 1710 *it++ = static_cast<char> (value_val >> 16); 1711 *it++ = static_cast<char> (value_val >> 8); 1712 *it++ = static_cast<char> (value_val & 0xFF); 1713 1714 encoded_len += 4; 1715 } 1716 } 1717 return encoded_len; 1718 } 1719 1720 template <typename T> decompileOT::TupleValues1721 static bool decompile (const HBUINT8 *&p /* IN/OUT */, 1722 hb_vector_t<T> &values /* IN/OUT */, 1723 const HBUINT8 *end, 1724 bool consume_all = false) 1725 { 1726 unsigned i = 0; 1727 unsigned count = consume_all ? UINT_MAX : values.length; 1728 if (consume_all) 1729 values.alloc ((end - p) / 2); 1730 while (i < count) 1731 { 1732 if (unlikely (p + 1 > end)) return consume_all; 1733 unsigned control = *p++; 1734 unsigned run_count = (control & VALUE_RUN_COUNT_MASK) + 1; 1735 if (consume_all) 1736 { 1737 if (unlikely (!values.resize (values.length + run_count, false))) 1738 return false; 1739 } 1740 unsigned stop = i + run_count; 1741 if (unlikely (stop > count)) return false; 1742 if ((control & VALUES_SIZE_MASK) == VALUES_ARE_ZEROS) 1743 { 1744 for (; i < stop; i++) 1745 values.arrayZ[i] = 0; 1746 } 1747 else if ((control & VALUES_SIZE_MASK) == VALUES_ARE_WORDS) 1748 { 1749 if (unlikely (p + run_count * HBINT16::static_size > end)) return false; 1750 for (; i < stop; i++) 1751 { 1752 values.arrayZ[i] = * (const HBINT16 *) p; 1753 p += HBINT16::static_size; 1754 } 1755 } 1756 else if ((control & VALUES_SIZE_MASK) == VALUES_ARE_LONGS) 1757 { 1758 if (unlikely (p + run_count * HBINT32::static_size > end)) return false; 1759 for (; i < stop; i++) 1760 { 1761 values.arrayZ[i] = * (const HBINT32 *) p; 1762 p += HBINT32::static_size; 1763 } 1764 } 1765 else if ((control & VALUES_SIZE_MASK) == VALUES_ARE_BYTES) 1766 { 1767 if (unlikely (p + run_count > end)) return false; 1768 for (; i < stop; i++) 1769 { 1770 values.arrayZ[i] = * (const HBINT8 *) p++; 1771 } 1772 } 1773 } 1774 return true; 1775 } 1776 1777 struct iter_t : hb_iter_with_fallback_t<iter_t, int> 1778 { iter_tOT::TupleValues::iter_t1779 iter_t (const unsigned char *p_, unsigned len_) 1780 : p (p_), end (p_ + len_) 1781 { if (ensure_run ()) read_value (); } 1782 1783 private: 1784 const unsigned char *p; 1785 const unsigned char * const end; 1786 int current_value = 0; 1787 signed run_count = 0; 1788 unsigned width = 0; 1789 ensure_runOT::TupleValues::iter_t1790 bool ensure_run () 1791 { 1792 if (likely (run_count > 0)) return true; 1793 1794 if (unlikely (p >= end)) 1795 { 1796 run_count = 0; 1797 current_value = 0; 1798 return false; 1799 } 1800 1801 unsigned control = *p++; 1802 run_count = (control & VALUE_RUN_COUNT_MASK) + 1; 1803 width = control & VALUES_SIZE_MASK; 1804 switch (width) 1805 { 1806 case VALUES_ARE_ZEROS: width = 0; break; 1807 case VALUES_ARE_BYTES: width = HBINT8::static_size; break; 1808 case VALUES_ARE_WORDS: width = HBINT16::static_size; break; 1809 case VALUES_ARE_LONGS: width = HBINT32::static_size; break; 1810 default: assert (false); 1811 } 1812 1813 if (unlikely (p + run_count * width > end)) 1814 { 1815 run_count = 0; 1816 current_value = 0; 1817 return false; 1818 } 1819 1820 return true; 1821 } read_valueOT::TupleValues::iter_t1822 void read_value () 1823 { 1824 switch (width) 1825 { 1826 case 0: current_value = 0; break; 1827 case 1: current_value = * (const HBINT8 *) p; break; 1828 case 2: current_value = * (const HBINT16 *) p; break; 1829 case 4: current_value = * (const HBINT32 *) p; break; 1830 } 1831 p += width; 1832 } 1833 1834 public: 1835 1836 typedef int __item_t__; __item__OT::TupleValues::iter_t1837 __item_t__ __item__ () const 1838 { return current_value; } 1839 __more__OT::TupleValues::iter_t1840 bool __more__ () const { return run_count || p < end; } __next__OT::TupleValues::iter_t1841 void __next__ () 1842 { 1843 run_count--; 1844 if (unlikely (!ensure_run ())) 1845 return; 1846 read_value (); 1847 } __forward__OT::TupleValues::iter_t1848 void __forward__ (unsigned n) 1849 { 1850 if (unlikely (!ensure_run ())) 1851 return; 1852 while (n) 1853 { 1854 unsigned i = hb_min (n, (unsigned) run_count); 1855 run_count -= i; 1856 n -= i; 1857 p += (i - 1) * width; 1858 if (unlikely (!ensure_run ())) 1859 return; 1860 read_value (); 1861 } 1862 } operator !=OT::TupleValues::iter_t1863 bool operator != (const iter_t& o) const 1864 { return p != o.p || run_count != o.run_count; } __end__OT::TupleValues::iter_t1865 iter_t __end__ () const 1866 { 1867 iter_t it (end, 0); 1868 return it; 1869 } 1870 }; 1871 }; 1872 1873 struct TupleList : CFF2Index 1874 { operator []OT::TupleList1875 TupleValues::iter_t operator [] (unsigned i) const 1876 { 1877 auto bytes = CFF2Index::operator [] (i); 1878 return TupleValues::iter_t (bytes.arrayZ, bytes.length); 1879 } 1880 }; 1881 1882 1883 } /* namespace OT */ 1884 1885 1886 #endif /* HB_OPEN_TYPE_HH */ 1887