1 /*
2 * Copyright © 2007,2008,2009 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
31
32 #include "hb.hh"
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
35 #include "hb-set.hh"
36 #include "hb-bimap.hh"
37
38 #include "OT/Layout/Common/Coverage.hh"
39 #include "OT/Layout/types.hh"
40
41 // TODO(garretrieger): cleanup these after migration.
42 using OT::Layout::Common::Coverage;
43 using OT::Layout::Common::RangeRecord;
44 using OT::Layout::SmallTypes;
45 using OT::Layout::MediumTypes;
46
47
48 namespace OT {
49
50 template<typename Iterator>
51 static inline bool ClassDef_serialize (hb_serialize_context_t *c,
52 Iterator it);
53
54 static bool ClassDef_remap_and_serialize (
55 hb_serialize_context_t *c,
56 const hb_set_t &klasses,
57 bool use_class_zero,
58 hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */
59 hb_map_t *klass_map /*IN/OUT*/);
60
61 struct hb_collect_feature_substitutes_with_var_context_t
62 {
63 const hb_map_t *axes_index_tag_map;
64 const hb_hashmap_t<hb_tag_t, Triple> *axes_location;
65 hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map;
66 hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
67 hb_set_t& catch_all_record_feature_idxes;
68
69 // not stored in subset_plan
70 hb_set_t *feature_indices;
71 bool apply;
72 bool variation_applied;
73 bool universal;
74 unsigned cur_record_idx;
75 hb_hashmap_t<hb::shared_ptr<hb_map_t>, unsigned> *conditionset_map;
76 };
77
78 struct hb_prune_langsys_context_t
79 {
hb_prune_langsys_context_tOT::hb_prune_langsys_context_t80 hb_prune_langsys_context_t (const void *table_,
81 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map_,
82 const hb_map_t *duplicate_feature_map_,
83 hb_set_t *new_collected_feature_indexes_)
84 :table (table_),
85 script_langsys_map (script_langsys_map_),
86 duplicate_feature_map (duplicate_feature_map_),
87 new_feature_indexes (new_collected_feature_indexes_),
88 script_count (0),langsys_feature_count (0) {}
89
visitScriptOT::hb_prune_langsys_context_t90 bool visitScript ()
91 { return script_count++ < HB_MAX_SCRIPTS; }
92
visitLangsysOT::hb_prune_langsys_context_t93 bool visitLangsys (unsigned feature_count)
94 {
95 langsys_feature_count += feature_count;
96 return langsys_feature_count < HB_MAX_LANGSYS_FEATURE_COUNT;
97 }
98
99 public:
100 const void *table;
101 hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
102 const hb_map_t *duplicate_feature_map;
103 hb_set_t *new_feature_indexes;
104
105 private:
106 unsigned script_count;
107 unsigned langsys_feature_count;
108 };
109
110 struct hb_subset_layout_context_t :
111 hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
112 {
get_nameOT::hb_subset_layout_context_t113 const char *get_name () { return "SUBSET_LAYOUT"; }
default_return_valueOT::hb_subset_layout_context_t114 static return_t default_return_value () { return hb_empty_t (); }
115
visitScriptOT::hb_subset_layout_context_t116 bool visitScript ()
117 {
118 return script_count++ < HB_MAX_SCRIPTS;
119 }
120
visitLangSysOT::hb_subset_layout_context_t121 bool visitLangSys ()
122 {
123 return langsys_count++ < HB_MAX_LANGSYS;
124 }
125
visitFeatureIndexOT::hb_subset_layout_context_t126 bool visitFeatureIndex (int count)
127 {
128 feature_index_count += count;
129 return feature_index_count < HB_MAX_FEATURE_INDICES;
130 }
131
visitLookupIndexOT::hb_subset_layout_context_t132 bool visitLookupIndex()
133 {
134 lookup_index_count++;
135 return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT;
136 }
137
138 hb_subset_context_t *subset_context;
139 const hb_tag_t table_tag;
140 const hb_map_t *lookup_index_map;
141 const hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
142 const hb_map_t *feature_index_map;
143 const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
144 hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map;
145 const hb_set_t *catch_all_record_feature_idxes;
146 const hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>> *feature_idx_tag_map;
147
148 unsigned cur_script_index;
149 unsigned cur_feature_var_record_idx;
150
hb_subset_layout_context_tOT::hb_subset_layout_context_t151 hb_subset_layout_context_t (hb_subset_context_t *c_,
152 hb_tag_t tag_) :
153 subset_context (c_),
154 table_tag (tag_),
155 cur_script_index (0xFFFFu),
156 cur_feature_var_record_idx (0u),
157 script_count (0),
158 langsys_count (0),
159 feature_index_count (0),
160 lookup_index_count (0)
161 {
162 if (tag_ == HB_OT_TAG_GSUB)
163 {
164 lookup_index_map = &c_->plan->gsub_lookups;
165 script_langsys_map = &c_->plan->gsub_langsys;
166 feature_index_map = &c_->plan->gsub_features;
167 feature_substitutes_map = &c_->plan->gsub_feature_substitutes_map;
168 feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gsub_feature_record_cond_idx_map;
169 catch_all_record_feature_idxes = &c_->plan->gsub_old_features;
170 feature_idx_tag_map = &c_->plan->gsub_old_feature_idx_tag_map;
171 }
172 else
173 {
174 lookup_index_map = &c_->plan->gpos_lookups;
175 script_langsys_map = &c_->plan->gpos_langsys;
176 feature_index_map = &c_->plan->gpos_features;
177 feature_substitutes_map = &c_->plan->gpos_feature_substitutes_map;
178 feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gpos_feature_record_cond_idx_map;
179 catch_all_record_feature_idxes = &c_->plan->gpos_old_features;
180 feature_idx_tag_map = &c_->plan->gpos_old_feature_idx_tag_map;
181 }
182 }
183
184 private:
185 unsigned script_count;
186 unsigned langsys_count;
187 unsigned feature_index_count;
188 unsigned lookup_index_count;
189 };
190
191 struct ItemVariationStore;
192 struct hb_collect_variation_indices_context_t :
193 hb_dispatch_context_t<hb_collect_variation_indices_context_t>
194 {
195 template <typename T>
dispatchOT::hb_collect_variation_indices_context_t196 return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_variation_indices_context_t197 static return_t default_return_value () { return hb_empty_t (); }
198
199 hb_set_t *layout_variation_indices;
200 const hb_set_t *glyph_set;
201 const hb_map_t *gpos_lookups;
202
hb_collect_variation_indices_context_tOT::hb_collect_variation_indices_context_t203 hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
204 const hb_set_t *glyph_set_,
205 const hb_map_t *gpos_lookups_) :
206 layout_variation_indices (layout_variation_indices_),
207 glyph_set (glyph_set_),
208 gpos_lookups (gpos_lookups_) {}
209 };
210
211 template<typename OutputArray>
212 struct subset_offset_array_t
213 {
subset_offset_array_tOT::subset_offset_array_t214 subset_offset_array_t (hb_subset_context_t *subset_context_,
215 OutputArray& out_,
216 const void *base_) : subset_context (subset_context_),
217 out (out_), base (base_) {}
218
219 template <typename T>
operator ()OT::subset_offset_array_t220 bool operator () (T&& offset)
221 {
222 auto snap = subset_context->serializer->snapshot ();
223 auto *o = out.serialize_append (subset_context->serializer);
224 if (unlikely (!o)) return false;
225 bool ret = o->serialize_subset (subset_context, offset, base);
226 if (!ret)
227 {
228 out.pop ();
229 subset_context->serializer->revert (snap);
230 }
231 return ret;
232 }
233
234 private:
235 hb_subset_context_t *subset_context;
236 OutputArray &out;
237 const void *base;
238 };
239
240
241 template<typename OutputArray, typename Arg>
242 struct subset_offset_array_arg_t
243 {
subset_offset_array_arg_tOT::subset_offset_array_arg_t244 subset_offset_array_arg_t (hb_subset_context_t *subset_context_,
245 OutputArray& out_,
246 const void *base_,
247 Arg &&arg_) : subset_context (subset_context_), out (out_),
248 base (base_), arg (arg_) {}
249
250 template <typename T>
operator ()OT::subset_offset_array_arg_t251 bool operator () (T&& offset)
252 {
253 auto snap = subset_context->serializer->snapshot ();
254 auto *o = out.serialize_append (subset_context->serializer);
255 if (unlikely (!o)) return false;
256 bool ret = o->serialize_subset (subset_context, offset, base, arg);
257 if (!ret)
258 {
259 out.pop ();
260 subset_context->serializer->revert (snap);
261 }
262 return ret;
263 }
264
265 private:
266 hb_subset_context_t *subset_context;
267 OutputArray &out;
268 const void *base;
269 Arg &&arg;
270 };
271
272 /*
273 * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
274 * and discards the offset in the array if the subset operation results in an empty
275 * thing.
276 */
277 struct
278 {
279 template<typename OutputArray>
280 subset_offset_array_t<OutputArray>
operator ()OT::__anone7363c260108281 operator () (hb_subset_context_t *subset_context, OutputArray& out,
282 const void *base) const
283 { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
284
285 /* Variant with one extra argument passed to serialize_subset */
286 template<typename OutputArray, typename Arg>
287 subset_offset_array_arg_t<OutputArray, Arg>
operator ()OT::__anone7363c260108288 operator () (hb_subset_context_t *subset_context, OutputArray& out,
289 const void *base, Arg &&arg) const
290 { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
291 }
292 HB_FUNCOBJ (subset_offset_array);
293
294 template<typename OutputArray>
295 struct subset_record_array_t
296 {
subset_record_array_tOT::subset_record_array_t297 subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_,
298 const void *base_) : subset_layout_context (c_),
299 out (out_), base (base_) {}
300
301 template <typename T>
302 void
operator ()OT::subset_record_array_t303 operator () (T&& record)
304 {
305 auto snap = subset_layout_context->subset_context->serializer->snapshot ();
306 bool ret = record.subset (subset_layout_context, base);
307 if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
308 else out->len++;
309 }
310
311 private:
312 hb_subset_layout_context_t *subset_layout_context;
313 OutputArray *out;
314 const void *base;
315 };
316
317 template<typename OutputArray, typename Arg>
318 struct subset_record_array_arg_t
319 {
subset_record_array_arg_tOT::subset_record_array_arg_t320 subset_record_array_arg_t (hb_subset_layout_context_t *c_, OutputArray* out_,
321 const void *base_,
322 Arg &&arg_) : subset_layout_context (c_),
323 out (out_), base (base_), arg (arg_) {}
324
325 template <typename T>
326 void
operator ()OT::subset_record_array_arg_t327 operator () (T&& record)
328 {
329 auto snap = subset_layout_context->subset_context->serializer->snapshot ();
330 bool ret = record.subset (subset_layout_context, base, arg);
331 if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
332 else out->len++;
333 }
334
335 private:
336 hb_subset_layout_context_t *subset_layout_context;
337 OutputArray *out;
338 const void *base;
339 Arg &&arg;
340 };
341
342 /*
343 * Helper to subset a RecordList/record array. Subsets each Record in the array and
344 * discards the record if the subset operation returns false.
345 */
346 struct
347 {
348 template<typename OutputArray>
349 subset_record_array_t<OutputArray>
operator ()OT::__anone7363c260208350 operator () (hb_subset_layout_context_t *c, OutputArray* out,
351 const void *base) const
352 { return subset_record_array_t<OutputArray> (c, out, base); }
353
354 /* Variant with one extra argument passed to subset */
355 template<typename OutputArray, typename Arg>
356 subset_record_array_arg_t<OutputArray, Arg>
operator ()OT::__anone7363c260208357 operator () (hb_subset_layout_context_t *c, OutputArray* out,
358 const void *base, Arg &&arg) const
359 { return subset_record_array_arg_t<OutputArray, Arg> (c, out, base, arg); }
360 }
361 HB_FUNCOBJ (subset_record_array);
362
363
364 template<typename OutputArray>
365 struct serialize_math_record_array_t
366 {
serialize_math_record_array_tOT::serialize_math_record_array_t367 serialize_math_record_array_t (hb_serialize_context_t *serialize_context_,
368 OutputArray& out_,
369 const void *base_) : serialize_context (serialize_context_),
370 out (out_), base (base_) {}
371
372 template <typename T>
operator ()OT::serialize_math_record_array_t373 bool operator () (T&& record)
374 {
375 if (!serialize_context->copy (record, base)) return false;
376 out.len++;
377 return true;
378 }
379
380 private:
381 hb_serialize_context_t *serialize_context;
382 OutputArray &out;
383 const void *base;
384 };
385
386 /*
387 * Helper to serialize an array of MATH records.
388 */
389 struct
390 {
391 template<typename OutputArray>
392 serialize_math_record_array_t<OutputArray>
operator ()OT::__anone7363c260308393 operator () (hb_serialize_context_t *serialize_context, OutputArray& out,
394 const void *base) const
395 { return serialize_math_record_array_t<OutputArray> (serialize_context, out, base); }
396
397 }
398 HB_FUNCOBJ (serialize_math_record_array);
399
400 /*
401 *
402 * OpenType Layout Common Table Formats
403 *
404 */
405
406
407 /*
408 * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
409 */
410
411 struct IndexArray : Array16Of<Index>
412 {
intersectsOT::IndexArray413 bool intersects (const hb_map_t *indexes) const
414 { return hb_any (*this, indexes); }
415
416 template <typename Iterator,
417 hb_requires (hb_is_iterator (Iterator))>
serializeOT::IndexArray418 void serialize (hb_serialize_context_t *c,
419 hb_subset_layout_context_t *l,
420 Iterator it)
421 {
422 if (!it) return;
423 if (unlikely (!c->extend_min ((*this)))) return;
424
425 for (const auto _ : it)
426 {
427 if (!l->visitLookupIndex()) break;
428
429 Index i;
430 i = _;
431 c->copy (i);
432 this->len++;
433 }
434 }
435
get_indexesOT::IndexArray436 unsigned int get_indexes (unsigned int start_offset,
437 unsigned int *_count /* IN/OUT */,
438 unsigned int *_indexes /* OUT */) const
439 {
440 if (_count)
441 {
442 + this->as_array ().sub_array (start_offset, _count)
443 | hb_sink (hb_array (_indexes, *_count))
444 ;
445 }
446 return this->len;
447 }
448
add_indexes_toOT::IndexArray449 void add_indexes_to (hb_set_t* output /* OUT */) const
450 {
451 output->add_array (as_array ());
452 }
453 };
454
455
456 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
457 struct FeatureParamsSize
458 {
sanitizeOT::FeatureParamsSize459 bool sanitize (hb_sanitize_context_t *c) const
460 {
461 TRACE_SANITIZE (this);
462 if (unlikely (!c->check_struct (this))) return_trace (false);
463 hb_barrier ();
464
465 /* This subtable has some "history", if you will. Some earlier versions of
466 * Adobe tools calculated the offset of the FeatureParams subtable from the
467 * beginning of the FeatureList table! Now, that is dealt with in the
468 * Feature implementation. But we still need to be able to tell junk from
469 * real data. Note: We don't check that the nameID actually exists.
470 *
471 * Read Roberts wrote on 9/15/06 on [email protected] :
472 *
473 * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
474 * coming out soon, and that the makeotf program will build a font with a
475 * 'size' feature that is correct by the specification.
476 *
477 * The specification for this feature tag is in the "OpenType Layout Tag
478 * Registry". You can see a copy of this at:
479 * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
480 *
481 * Here is one set of rules to determine if the 'size' feature is built
482 * correctly, or as by the older versions of MakeOTF. You may be able to do
483 * better.
484 *
485 * Assume that the offset to the size feature is according to specification,
486 * and make the following value checks. If it fails, assume the size
487 * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
488 * If this fails, reject the 'size' feature. The older makeOTF's calculated the
489 * offset from the beginning of the FeatureList table, rather than from the
490 * beginning of the 'size' Feature table.
491 *
492 * If "design size" == 0:
493 * fails check
494 *
495 * Else if ("subfamily identifier" == 0 and
496 * "range start" == 0 and
497 * "range end" == 0 and
498 * "range start" == 0 and
499 * "menu name ID" == 0)
500 * passes check: this is the format used when there is a design size
501 * specified, but there is no recommended size range.
502 *
503 * Else if ("design size" < "range start" or
504 * "design size" > "range end" or
505 * "range end" <= "range start" or
506 * "menu name ID" < 256 or
507 * "menu name ID" > 32767 or
508 * menu name ID is not a name ID which is actually in the name table)
509 * fails test
510 * Else
511 * passes test.
512 */
513
514 if (!designSize)
515 return_trace (false);
516 else if (subfamilyID == 0 &&
517 subfamilyNameID == 0 &&
518 rangeStart == 0 &&
519 rangeEnd == 0)
520 return_trace (true);
521 else if (designSize < rangeStart ||
522 designSize > rangeEnd ||
523 subfamilyNameID < 256 ||
524 subfamilyNameID > 32767)
525 return_trace (false);
526 else
527 return_trace (true);
528 }
529
collect_name_idsOT::FeatureParamsSize530 void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
531 { nameids_to_retain->add (subfamilyNameID); }
532
subsetOT::FeatureParamsSize533 bool subset (hb_subset_context_t *c) const
534 {
535 TRACE_SUBSET (this);
536 return_trace ((bool) c->serializer->embed (*this));
537 }
538
539 HBUINT16 designSize; /* Represents the design size in 720/inch
540 * units (decipoints). The design size entry
541 * must be non-zero. When there is a design
542 * size but no recommended size range, the
543 * rest of the array will consist of zeros. */
544 HBUINT16 subfamilyID; /* Has no independent meaning, but serves
545 * as an identifier that associates fonts
546 * in a subfamily. All fonts which share a
547 * Preferred or Font Family name and which
548 * differ only by size range shall have the
549 * same subfamily value, and no fonts which
550 * differ in weight or style shall have the
551 * same subfamily value. If this value is
552 * zero, the remaining fields in the array
553 * will be ignored. */
554 NameID subfamilyNameID;/* If the preceding value is non-zero, this
555 * value must be set in the range 256 - 32767
556 * (inclusive). It records the value of a
557 * field in the name table, which must
558 * contain English-language strings encoded
559 * in Windows Unicode and Macintosh Roman,
560 * and may contain additional strings
561 * localized to other scripts and languages.
562 * Each of these strings is the name an
563 * application should use, in combination
564 * with the family name, to represent the
565 * subfamily in a menu. Applications will
566 * choose the appropriate version based on
567 * their selection criteria. */
568 HBUINT16 rangeStart; /* Large end of the recommended usage range
569 * (inclusive), stored in 720/inch units
570 * (decipoints). */
571 HBUINT16 rangeEnd; /* Small end of the recommended usage range
572 (exclusive), stored in 720/inch units
573 * (decipoints). */
574 public:
575 DEFINE_SIZE_STATIC (10);
576 };
577
578 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
579 struct FeatureParamsStylisticSet
580 {
sanitizeOT::FeatureParamsStylisticSet581 bool sanitize (hb_sanitize_context_t *c) const
582 {
583 TRACE_SANITIZE (this);
584 /* Right now minorVersion is at zero. Which means, any table supports
585 * the uiNameID field. */
586 return_trace (c->check_struct (this));
587 }
588
collect_name_idsOT::FeatureParamsStylisticSet589 void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
590 { nameids_to_retain->add (uiNameID); }
591
subsetOT::FeatureParamsStylisticSet592 bool subset (hb_subset_context_t *c) const
593 {
594 TRACE_SUBSET (this);
595 return_trace ((bool) c->serializer->embed (*this));
596 }
597
598 HBUINT16 version; /* (set to 0): This corresponds to a “minor”
599 * version number. Additional data may be
600 * added to the end of this Feature Parameters
601 * table in the future. */
602
603 NameID uiNameID; /* The 'name' table name ID that specifies a
604 * string (or strings, for multiple languages)
605 * for a user-interface label for this
606 * feature. The values of uiLabelNameId and
607 * sampleTextNameId are expected to be in the
608 * font-specific name ID range (256-32767),
609 * though that is not a requirement in this
610 * Feature Parameters specification. The
611 * user-interface label for the feature can
612 * be provided in multiple languages. An
613 * English string should be included as a
614 * fallback. The string should be kept to a
615 * minimal length to fit comfortably with
616 * different application interfaces. */
617 public:
618 DEFINE_SIZE_STATIC (4);
619 };
620
621 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
622 struct FeatureParamsCharacterVariants
623 {
624 unsigned
get_charactersOT::FeatureParamsCharacterVariants625 get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
626 {
627 if (char_count)
628 {
629 + characters.as_array ().sub_array (start_offset, char_count)
630 | hb_sink (hb_array (chars, *char_count))
631 ;
632 }
633 return characters.len;
634 }
635
get_sizeOT::FeatureParamsCharacterVariants636 unsigned get_size () const
637 { return min_size + characters.len * HBUINT24::static_size; }
638
collect_name_idsOT::FeatureParamsCharacterVariants639 void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
640 {
641 if (featUILableNameID) nameids_to_retain->add (featUILableNameID);
642 if (featUITooltipTextNameID) nameids_to_retain->add (featUITooltipTextNameID);
643 if (sampleTextNameID) nameids_to_retain->add (sampleTextNameID);
644
645 if (!firstParamUILabelNameID || !numNamedParameters || numNamedParameters >= 0x7FFF)
646 return;
647
648 unsigned last_name_id = (unsigned) firstParamUILabelNameID + (unsigned) numNamedParameters - 1;
649 nameids_to_retain->add_range (firstParamUILabelNameID, last_name_id);
650 }
651
subsetOT::FeatureParamsCharacterVariants652 bool subset (hb_subset_context_t *c) const
653 {
654 TRACE_SUBSET (this);
655 return_trace ((bool) c->serializer->embed (*this));
656 }
657
sanitizeOT::FeatureParamsCharacterVariants658 bool sanitize (hb_sanitize_context_t *c) const
659 {
660 TRACE_SANITIZE (this);
661 return_trace (c->check_struct (this) &&
662 characters.sanitize (c));
663 }
664
665 HBUINT16 format; /* Format number is set to 0. */
666 NameID featUILableNameID; /* The ‘name’ table name ID that
667 * specifies a string (or strings,
668 * for multiple languages) for a
669 * user-interface label for this
670 * feature. (May be NULL.) */
671 NameID featUITooltipTextNameID;/* The ‘name’ table name ID that
672 * specifies a string (or strings,
673 * for multiple languages) that an
674 * application can use for tooltip
675 * text for this feature. (May be
676 * nullptr.) */
677 NameID sampleTextNameID; /* The ‘name’ table name ID that
678 * specifies sample text that
679 * illustrates the effect of this
680 * feature. (May be NULL.) */
681 HBUINT16 numNamedParameters; /* Number of named parameters. (May
682 * be zero.) */
683 NameID firstParamUILabelNameID;/* The first ‘name’ table name ID
684 * used to specify strings for
685 * user-interface labels for the
686 * feature parameters. (Must be zero
687 * if numParameters is zero.) */
688 Array16Of<HBUINT24>
689 characters; /* Array of the Unicode Scalar Value
690 * of the characters for which this
691 * feature provides glyph variants.
692 * (May be zero.) */
693 public:
694 DEFINE_SIZE_ARRAY (14, characters);
695 };
696
697 struct FeatureParams
698 {
sanitizeOT::FeatureParams699 bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
700 {
701 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
702 return true;
703 #endif
704 TRACE_SANITIZE (this);
705 if (tag == HB_TAG ('s','i','z','e'))
706 return_trace (u.size.sanitize (c));
707 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
708 return_trace (u.stylisticSet.sanitize (c));
709 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
710 return_trace (u.characterVariants.sanitize (c));
711 return_trace (true);
712 }
713
collect_name_idsOT::FeatureParams714 void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const
715 {
716 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
717 return;
718 #endif
719 if (tag == HB_TAG ('s','i','z','e'))
720 return (u.size.collect_name_ids (nameids_to_retain));
721 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
722 return (u.stylisticSet.collect_name_ids (nameids_to_retain));
723 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
724 return (u.characterVariants.collect_name_ids (nameids_to_retain));
725 }
726
subsetOT::FeatureParams727 bool subset (hb_subset_context_t *c, const Tag* tag) const
728 {
729 TRACE_SUBSET (this);
730 if (!tag) return_trace (false);
731 if (*tag == HB_TAG ('s','i','z','e'))
732 return_trace (u.size.subset (c));
733 if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
734 return_trace (u.stylisticSet.subset (c));
735 if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
736 return_trace (u.characterVariants.subset (c));
737 return_trace (false);
738 }
739
740 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
get_size_paramsOT::FeatureParams741 const FeatureParamsSize& get_size_params (hb_tag_t tag) const
742 {
743 if (tag == HB_TAG ('s','i','z','e'))
744 return u.size;
745 return Null (FeatureParamsSize);
746 }
get_stylistic_set_paramsOT::FeatureParams747 const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
748 {
749 if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
750 return u.stylisticSet;
751 return Null (FeatureParamsStylisticSet);
752 }
get_character_variants_paramsOT::FeatureParams753 const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
754 {
755 if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
756 return u.characterVariants;
757 return Null (FeatureParamsCharacterVariants);
758 }
759 #endif
760
761 private:
762 union {
763 FeatureParamsSize size;
764 FeatureParamsStylisticSet stylisticSet;
765 FeatureParamsCharacterVariants characterVariants;
766 } u;
767 public:
768 DEFINE_SIZE_MIN (0);
769 };
770
771 struct Record_sanitize_closure_t {
772 hb_tag_t tag;
773 const void *list_base;
774 };
775
776 struct Feature
777 {
get_lookup_countOT::Feature778 unsigned int get_lookup_count () const
779 { return lookupIndex.len; }
get_lookup_indexOT::Feature780 hb_tag_t get_lookup_index (unsigned int i) const
781 { return lookupIndex[i]; }
get_lookup_indexesOT::Feature782 unsigned int get_lookup_indexes (unsigned int start_index,
783 unsigned int *lookup_count /* IN/OUT */,
784 unsigned int *lookup_tags /* OUT */) const
785 { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
add_lookup_indexes_toOT::Feature786 void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
787 { lookupIndex.add_indexes_to (lookup_indexes); }
788
get_feature_paramsOT::Feature789 const FeatureParams &get_feature_params () const
790 { return this+featureParams; }
791
intersects_lookup_indexesOT::Feature792 bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
793 { return lookupIndex.intersects (lookup_indexes); }
794
collect_name_idsOT::Feature795 void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const
796 {
797 if (featureParams)
798 get_feature_params ().collect_name_ids (tag, nameids_to_retain);
799 }
800
subsetOT::Feature801 bool subset (hb_subset_context_t *c,
802 hb_subset_layout_context_t *l,
803 const Tag *tag = nullptr) const
804 {
805 TRACE_SUBSET (this);
806 auto *out = c->serializer->start_embed (*this);
807 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
808
809 out->featureParams.serialize_subset (c, featureParams, this, tag);
810
811 auto it =
812 + hb_iter (lookupIndex)
813 | hb_filter (l->lookup_index_map)
814 | hb_map (l->lookup_index_map)
815 ;
816
817 out->lookupIndex.serialize (c->serializer, l, it);
818 // The decision to keep or drop this feature is already made before we get here
819 // so always retain it.
820 return_trace (true);
821 }
822
sanitizeOT::Feature823 bool sanitize (hb_sanitize_context_t *c,
824 const Record_sanitize_closure_t *closure = nullptr) const
825 {
826 TRACE_SANITIZE (this);
827 if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
828 return_trace (false);
829 hb_barrier ();
830
831 /* Some earlier versions of Adobe tools calculated the offset of the
832 * FeatureParams subtable from the beginning of the FeatureList table!
833 *
834 * If sanitizing "failed" for the FeatureParams subtable, try it with the
835 * alternative location. We would know sanitize "failed" if old value
836 * of the offset was non-zero, but it's zeroed now.
837 *
838 * Only do this for the 'size' feature, since at the time of the faulty
839 * Adobe tools, only the 'size' feature had FeatureParams defined.
840 */
841
842 if (likely (featureParams.is_null ()))
843 return_trace (true);
844
845 unsigned int orig_offset = featureParams;
846 if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
847 return_trace (false);
848 hb_barrier ();
849
850 if (featureParams == 0 && closure &&
851 closure->tag == HB_TAG ('s','i','z','e') &&
852 closure->list_base && closure->list_base < this)
853 {
854 unsigned int new_offset_int = orig_offset -
855 (((char *) this) - ((char *) closure->list_base));
856
857 Offset16To<FeatureParams> new_offset;
858 /* Check that it would not overflow. */
859 new_offset = new_offset_int;
860 if (new_offset == new_offset_int &&
861 c->try_set (&featureParams, new_offset_int) &&
862 !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
863 return_trace (false);
864 }
865
866 return_trace (true);
867 }
868
869 Offset16To<FeatureParams>
870 featureParams; /* Offset to Feature Parameters table (if one
871 * has been defined for the feature), relative
872 * to the beginning of the Feature Table; = Null
873 * if not required */
874 IndexArray lookupIndex; /* Array of LookupList indices */
875 public:
876 DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
877 };
878
879 template <typename Type>
880 struct Record
881 {
cmpOT::Record882 int cmp (hb_tag_t a) const { return tag.cmp (a); }
883
subsetOT::Record884 bool subset (hb_subset_layout_context_t *c, const void *base, const void *f_sub = nullptr) const
885 {
886 TRACE_SUBSET (this);
887 auto *out = c->subset_context->serializer->embed (this);
888 if (unlikely (!out)) return_trace (false);
889
890 if (!f_sub)
891 return_trace (out->offset.serialize_subset (c->subset_context, offset, base, c, &tag));
892
893 const Feature& f = *reinterpret_cast<const Feature *> (f_sub);
894 auto *s = c->subset_context->serializer;
895 s->push ();
896
897 out->offset = 0;
898 bool ret = f.subset (c->subset_context, c, &tag);
899 if (ret)
900 s->add_link (out->offset, s->pop_pack ());
901 else
902 s->pop_discard ();
903
904 return_trace (ret);
905 }
906
sanitizeOT::Record907 bool sanitize (hb_sanitize_context_t *c, const void *base) const
908 {
909 TRACE_SANITIZE (this);
910 const Record_sanitize_closure_t closure = {tag, base};
911 return_trace (c->check_struct (this) &&
912 offset.sanitize (c, base, &closure));
913 }
914
915 Tag tag; /* 4-byte Tag identifier */
916 Offset16To<Type>
917 offset; /* Offset from beginning of object holding
918 * the Record */
919 public:
920 DEFINE_SIZE_STATIC (6);
921 };
922
923 template <typename Type>
924 struct RecordArrayOf : SortedArray16Of<Record<Type>>
925 {
get_offsetOT::RecordArrayOf926 const Offset16To<Type>& get_offset (unsigned int i) const
927 { return (*this)[i].offset; }
get_offsetOT::RecordArrayOf928 Offset16To<Type>& get_offset (unsigned int i)
929 { return (*this)[i].offset; }
get_tagOT::RecordArrayOf930 const Tag& get_tag (unsigned int i) const
931 { return (*this)[i].tag; }
get_tagsOT::RecordArrayOf932 unsigned int get_tags (unsigned int start_offset,
933 unsigned int *record_count /* IN/OUT */,
934 hb_tag_t *record_tags /* OUT */) const
935 {
936 if (record_count)
937 {
938 + this->as_array ().sub_array (start_offset, record_count)
939 | hb_map (&Record<Type>::tag)
940 | hb_sink (hb_array (record_tags, *record_count))
941 ;
942 }
943 return this->len;
944 }
find_indexOT::RecordArrayOf945 bool find_index (hb_tag_t tag, unsigned int *index) const
946 {
947 return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
948 }
949 };
950
951 template <typename Type>
952 struct RecordListOf : RecordArrayOf<Type>
953 {
operator []OT::RecordListOf954 const Type& operator [] (unsigned int i) const
955 { return this+this->get_offset (i); }
956
subsetOT::RecordListOf957 bool subset (hb_subset_context_t *c,
958 hb_subset_layout_context_t *l) const
959 {
960 TRACE_SUBSET (this);
961 auto *out = c->serializer->start_embed (*this);
962 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
963
964 + this->iter ()
965 | hb_apply (subset_record_array (l, out, this))
966 ;
967 return_trace (true);
968 }
969
sanitizeOT::RecordListOf970 bool sanitize (hb_sanitize_context_t *c) const
971 {
972 TRACE_SANITIZE (this);
973 return_trace (RecordArrayOf<Type>::sanitize (c, this));
974 }
975 };
976
977 struct RecordListOfFeature : RecordListOf<Feature>
978 {
subsetOT::RecordListOfFeature979 bool subset (hb_subset_context_t *c,
980 hb_subset_layout_context_t *l) const
981 {
982 TRACE_SUBSET (this);
983 auto *out = c->serializer->start_embed (*this);
984 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
985
986 + hb_enumerate (*this)
987 | hb_filter (l->feature_index_map, hb_first)
988 | hb_apply ([l, out, this] (const hb_pair_t<unsigned, const Record<Feature>&>& _)
989 {
990 const Feature *f_sub = nullptr;
991 const Feature **f = nullptr;
992 if (l->feature_substitutes_map->has (_.first, &f))
993 f_sub = *f;
994
995 subset_record_array (l, out, this, f_sub) (_.second);
996 })
997 ;
998
999 return_trace (true);
1000 }
1001 };
1002
1003 typedef RecordListOf<Feature> FeatureList;
1004
1005
1006 struct LangSys
1007 {
get_feature_countOT::LangSys1008 unsigned int get_feature_count () const
1009 { return featureIndex.len; }
get_feature_indexOT::LangSys1010 hb_tag_t get_feature_index (unsigned int i) const
1011 { return featureIndex[i]; }
get_feature_indexesOT::LangSys1012 unsigned int get_feature_indexes (unsigned int start_offset,
1013 unsigned int *feature_count /* IN/OUT */,
1014 unsigned int *feature_indexes /* OUT */) const
1015 { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
add_feature_indexes_toOT::LangSys1016 void add_feature_indexes_to (hb_set_t *feature_indexes) const
1017 { featureIndex.add_indexes_to (feature_indexes); }
1018
has_required_featureOT::LangSys1019 bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
get_required_feature_indexOT::LangSys1020 unsigned int get_required_feature_index () const
1021 {
1022 if (reqFeatureIndex == 0xFFFFu)
1023 return Index::NOT_FOUND_INDEX;
1024 return reqFeatureIndex;
1025 }
1026
copyOT::LangSys1027 LangSys* copy (hb_serialize_context_t *c) const
1028 {
1029 TRACE_SERIALIZE (this);
1030 return_trace (c->embed (*this));
1031 }
1032
compareOT::LangSys1033 bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
1034 {
1035 if (reqFeatureIndex != o.reqFeatureIndex)
1036 return false;
1037
1038 auto iter =
1039 + hb_iter (featureIndex)
1040 | hb_filter (feature_index_map)
1041 | hb_map (feature_index_map)
1042 ;
1043
1044 auto o_iter =
1045 + hb_iter (o.featureIndex)
1046 | hb_filter (feature_index_map)
1047 | hb_map (feature_index_map)
1048 ;
1049
1050 for (; iter && o_iter; iter++, o_iter++)
1051 {
1052 unsigned a = *iter;
1053 unsigned b = *o_iter;
1054 if (a != b) return false;
1055 }
1056
1057 if (iter || o_iter) return false;
1058
1059 return true;
1060 }
1061
collect_featuresOT::LangSys1062 void collect_features (hb_prune_langsys_context_t *c) const
1063 {
1064 if (!has_required_feature () && !get_feature_count ()) return;
1065 if (has_required_feature () &&
1066 c->duplicate_feature_map->has (reqFeatureIndex))
1067 c->new_feature_indexes->add (get_required_feature_index ());
1068
1069 + hb_iter (featureIndex)
1070 | hb_filter (c->duplicate_feature_map)
1071 | hb_sink (c->new_feature_indexes)
1072 ;
1073 }
1074
subsetOT::LangSys1075 bool subset (hb_subset_context_t *c,
1076 hb_subset_layout_context_t *l,
1077 const Tag *tag = nullptr) const
1078 {
1079 TRACE_SUBSET (this);
1080 auto *out = c->serializer->start_embed (*this);
1081 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1082
1083 const uint32_t *v;
1084 out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex, &v) ? *v : 0xFFFFu;
1085
1086 if (!l->visitFeatureIndex (featureIndex.len))
1087 return_trace (false);
1088
1089 auto it =
1090 + hb_iter (featureIndex)
1091 | hb_filter (l->feature_index_map)
1092 | hb_map (l->feature_index_map)
1093 ;
1094
1095 bool ret = bool (it);
1096 out->featureIndex.serialize (c->serializer, l, it);
1097 return_trace (ret);
1098 }
1099
sanitizeOT::LangSys1100 bool sanitize (hb_sanitize_context_t *c,
1101 const Record_sanitize_closure_t * = nullptr) const
1102 {
1103 TRACE_SANITIZE (this);
1104 return_trace (c->check_struct (this) && featureIndex.sanitize (c));
1105 }
1106
1107 Offset16 lookupOrderZ; /* = Null (reserved for an offset to a
1108 * reordering table) */
1109 HBUINT16 reqFeatureIndex;/* Index of a feature required for this
1110 * language system--if no required features
1111 * = 0xFFFFu */
1112 IndexArray featureIndex; /* Array of indices into the FeatureList */
1113 public:
1114 DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
1115 };
1116 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
1117
1118 struct Script
1119 {
get_lang_sys_countOT::Script1120 unsigned int get_lang_sys_count () const
1121 { return langSys.len; }
get_lang_sys_tagOT::Script1122 const Tag& get_lang_sys_tag (unsigned int i) const
1123 { return langSys.get_tag (i); }
get_lang_sys_tagsOT::Script1124 unsigned int get_lang_sys_tags (unsigned int start_offset,
1125 unsigned int *lang_sys_count /* IN/OUT */,
1126 hb_tag_t *lang_sys_tags /* OUT */) const
1127 { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
get_lang_sysOT::Script1128 const LangSys& get_lang_sys (unsigned int i) const
1129 {
1130 if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
1131 return this+langSys[i].offset;
1132 }
find_lang_sys_indexOT::Script1133 bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
1134 { return langSys.find_index (tag, index); }
1135
has_default_lang_sysOT::Script1136 bool has_default_lang_sys () const { return defaultLangSys != 0; }
get_default_lang_sysOT::Script1137 const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
1138
prune_langsysOT::Script1139 void prune_langsys (hb_prune_langsys_context_t *c,
1140 unsigned script_index) const
1141 {
1142 if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
1143 if (!c->visitScript ()) return;
1144
1145 if (!c->script_langsys_map->has (script_index))
1146 {
1147 if (unlikely (!c->script_langsys_map->set (script_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
1148 return;
1149 }
1150
1151 if (has_default_lang_sys ())
1152 {
1153 //only collect features from non-redundant langsys
1154 const LangSys& d = get_default_lang_sys ();
1155 if (c->visitLangsys (d.get_feature_count ())) {
1156 d.collect_features (c);
1157 }
1158
1159 for (auto _ : + hb_enumerate (langSys))
1160 {
1161 const LangSys& l = this+_.second.offset;
1162 if (!c->visitLangsys (l.get_feature_count ())) continue;
1163 if (l.compare (d, c->duplicate_feature_map)) continue;
1164
1165 l.collect_features (c);
1166 c->script_langsys_map->get (script_index)->add (_.first);
1167 }
1168 }
1169 else
1170 {
1171 for (auto _ : + hb_enumerate (langSys))
1172 {
1173 const LangSys& l = this+_.second.offset;
1174 if (!c->visitLangsys (l.get_feature_count ())) continue;
1175 l.collect_features (c);
1176 c->script_langsys_map->get (script_index)->add (_.first);
1177 }
1178 }
1179 }
1180
subsetOT::Script1181 bool subset (hb_subset_context_t *c,
1182 hb_subset_layout_context_t *l,
1183 const Tag *tag) const
1184 {
1185 TRACE_SUBSET (this);
1186 if (!l->visitScript ()) return_trace (false);
1187 if (tag && !c->plan->layout_scripts.has (*tag))
1188 return false;
1189
1190 auto *out = c->serializer->start_embed (*this);
1191 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1192
1193 bool defaultLang = false;
1194 if (has_default_lang_sys ())
1195 {
1196 c->serializer->push ();
1197 const LangSys& ls = this+defaultLangSys;
1198 bool ret = ls.subset (c, l);
1199 if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
1200 {
1201 c->serializer->pop_discard ();
1202 out->defaultLangSys = 0;
1203 }
1204 else
1205 {
1206 c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
1207 defaultLang = true;
1208 }
1209 }
1210
1211 const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
1212 if (active_langsys)
1213 {
1214 + hb_enumerate (langSys)
1215 | hb_filter (active_langsys, hb_first)
1216 | hb_map (hb_second)
1217 | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
1218 | hb_apply (subset_record_array (l, &(out->langSys), this))
1219 ;
1220 }
1221
1222 return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
1223 }
1224
sanitizeOT::Script1225 bool sanitize (hb_sanitize_context_t *c,
1226 const Record_sanitize_closure_t * = nullptr) const
1227 {
1228 TRACE_SANITIZE (this);
1229 return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
1230 }
1231
1232 protected:
1233 Offset16To<LangSys>
1234 defaultLangSys; /* Offset to DefaultLangSys table--from
1235 * beginning of Script table--may be Null */
1236 RecordArrayOf<LangSys>
1237 langSys; /* Array of LangSysRecords--listed
1238 * alphabetically by LangSysTag */
1239 public:
1240 DEFINE_SIZE_ARRAY_SIZED (4, langSys);
1241 };
1242
1243 struct RecordListOfScript : RecordListOf<Script>
1244 {
subsetOT::RecordListOfScript1245 bool subset (hb_subset_context_t *c,
1246 hb_subset_layout_context_t *l) const
1247 {
1248 TRACE_SUBSET (this);
1249 auto *out = c->serializer->start_embed (*this);
1250 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1251
1252 for (auto _ : + hb_enumerate (*this))
1253 {
1254 auto snap = c->serializer->snapshot ();
1255 l->cur_script_index = _.first;
1256 bool ret = _.second.subset (l, this);
1257 if (!ret) c->serializer->revert (snap);
1258 else out->len++;
1259 }
1260
1261 return_trace (true);
1262 }
1263 };
1264
1265 typedef RecordListOfScript ScriptList;
1266
1267
1268
1269 struct LookupFlag : HBUINT16
1270 {
1271 enum Flags {
1272 RightToLeft = 0x0001u,
1273 IgnoreBaseGlyphs = 0x0002u,
1274 IgnoreLigatures = 0x0004u,
1275 IgnoreMarks = 0x0008u,
1276 IgnoreFlags = 0x000Eu,
1277 UseMarkFilteringSet = 0x0010u,
1278 Reserved = 0x00E0u,
1279 MarkAttachmentType = 0xFF00u
1280 };
1281 public:
1282 DEFINE_SIZE_STATIC (2);
1283 };
1284
1285 } /* namespace OT */
1286 /* This has to be outside the namespace. */
1287 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
1288 namespace OT {
1289
1290 struct Lookup
1291 {
get_subtable_countOT::Lookup1292 unsigned int get_subtable_count () const { return subTable.len; }
1293
1294 template <typename TSubTable>
get_subtablesOT::Lookup1295 const Array16OfOffset16To<TSubTable>& get_subtables () const
1296 { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
1297 template <typename TSubTable>
get_subtablesOT::Lookup1298 Array16OfOffset16To<TSubTable>& get_subtables ()
1299 { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
1300
1301 template <typename TSubTable>
get_subtableOT::Lookup1302 const TSubTable& get_subtable (unsigned int i) const
1303 { return this+get_subtables<TSubTable> ()[i]; }
1304 template <typename TSubTable>
get_subtableOT::Lookup1305 TSubTable& get_subtable (unsigned int i)
1306 { return this+get_subtables<TSubTable> ()[i]; }
1307
get_sizeOT::Lookup1308 unsigned int get_size () const
1309 {
1310 const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
1311 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1312 return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
1313 return (const char *) &markFilteringSet - (const char *) this;
1314 }
1315
get_typeOT::Lookup1316 unsigned int get_type () const { return lookupType; }
1317
1318 /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
1319 * higher 16-bit is mark-filtering-set if the lookup uses one.
1320 * Not to be confused with glyph_props which is very similar. */
get_propsOT::Lookup1321 uint32_t get_props () const
1322 {
1323 unsigned int flag = lookupFlag;
1324 if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
1325 {
1326 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1327 flag += (markFilteringSet << 16);
1328 }
1329 return flag;
1330 }
1331
1332 template <typename TSubTable, typename context_t, typename ...Ts>
dispatchOT::Lookup1333 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1334 {
1335 unsigned int lookup_type = get_type ();
1336 TRACE_DISPATCH (this, lookup_type);
1337 unsigned int count = get_subtable_count ();
1338 for (unsigned int i = 0; i < count; i++) {
1339 typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, std::forward<Ts> (ds)...);
1340 if (c->stop_sublookup_iteration (r))
1341 return_trace (r);
1342 }
1343 return_trace (c->default_return_value ());
1344 }
1345
serializeOT::Lookup1346 bool serialize (hb_serialize_context_t *c,
1347 unsigned int lookup_type,
1348 uint32_t lookup_props,
1349 unsigned int num_subtables)
1350 {
1351 TRACE_SERIALIZE (this);
1352 if (unlikely (!c->extend_min (this))) return_trace (false);
1353 lookupType = lookup_type;
1354 lookupFlag = lookup_props & 0xFFFFu;
1355 if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
1356 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1357 {
1358 if (unlikely (!c->extend (this))) return_trace (false);
1359 HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1360 markFilteringSet = lookup_props >> 16;
1361 }
1362 return_trace (true);
1363 }
1364
1365 template <typename TSubTable>
subsetOT::Lookup1366 bool subset (hb_subset_context_t *c) const
1367 {
1368 TRACE_SUBSET (this);
1369 auto *out = c->serializer->start_embed (*this);
1370 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1371 out->lookupType = lookupType;
1372 out->lookupFlag = lookupFlag;
1373
1374 const hb_set_t *glyphset = c->plan->glyphset_gsub ();
1375 unsigned int lookup_type = get_type ();
1376 + hb_iter (get_subtables <TSubTable> ())
1377 | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
1378 | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
1379 ;
1380
1381 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1382 {
1383 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1384 hb_codepoint_t *idx;
1385 if (!c->plan->used_mark_sets_map.has (markFilteringSet, &idx))
1386 {
1387 unsigned new_flag = lookupFlag;
1388 new_flag &= ~LookupFlag::UseMarkFilteringSet;
1389 out->lookupFlag = new_flag;
1390 }
1391 else
1392 {
1393 if (unlikely (!c->serializer->extend (out))) return_trace (false);
1394 HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
1395 outMarkFilteringSet = *idx;
1396 }
1397 }
1398
1399 // Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup
1400 // indices being consistent with those computed during planning. So if an empty lookup is
1401 // discarded during the subset phase it will invalidate all subsequent lookup indices.
1402 // Generally we shouldn't end up with an empty lookup as we pre-prune them during the planning
1403 // phase, but it can happen in rare cases such as when during closure subtable is considered
1404 // degenerate (see: https://github.com/harfbuzz/harfbuzz/issues/3853)
1405 return_trace (true);
1406 }
1407
1408 template <typename TSubTable>
sanitizeOT::Lookup1409 bool sanitize (hb_sanitize_context_t *c) const
1410 {
1411 TRACE_SANITIZE (this);
1412 if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
1413 hb_barrier ();
1414
1415 unsigned subtables = get_subtable_count ();
1416 if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
1417
1418 if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1419 {
1420 const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1421 if (!markFilteringSet.sanitize (c)) return_trace (false);
1422 }
1423
1424 if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
1425 return_trace (false);
1426
1427 if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
1428 {
1429 hb_barrier ();
1430
1431 /* The spec says all subtables of an Extension lookup should
1432 * have the same type, which shall not be the Extension type
1433 * itself (but we already checked for that).
1434 * This is specially important if one has a reverse type!
1435 *
1436 * We only do this if sanitizer edit_count is zero. Otherwise,
1437 * some of the subtables might have become insane after they
1438 * were sanity-checked by the edits of subsequent subtables.
1439 * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
1440 */
1441 unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
1442 for (unsigned int i = 1; i < subtables; i++)
1443 if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
1444 return_trace (false);
1445 }
1446 return_trace (true);
1447 }
1448
1449 protected:
1450 HBUINT16 lookupType; /* Different enumerations for GSUB and GPOS */
1451 HBUINT16 lookupFlag; /* Lookup qualifiers */
1452 Array16Of<Offset16>
1453 subTable; /* Array of SubTables */
1454 /*HBUINT16 markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
1455 * structure. This field is only present if bit
1456 * UseMarkFilteringSet of lookup flags is set. */
1457 public:
1458 DEFINE_SIZE_ARRAY (6, subTable);
1459 };
1460
1461 template <typename Types>
1462 using LookupList = List16OfOffsetTo<Lookup, typename Types::HBUINT>;
1463
1464 template <typename TLookup, typename OffsetType>
1465 struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
1466 {
subsetOT::LookupOffsetList1467 bool subset (hb_subset_context_t *c,
1468 hb_subset_layout_context_t *l) const
1469 {
1470 TRACE_SUBSET (this);
1471 auto *out = c->serializer->start_embed (this);
1472 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1473
1474 + hb_enumerate (*this)
1475 | hb_filter (l->lookup_index_map, hb_first)
1476 | hb_map (hb_second)
1477 | hb_apply (subset_offset_array (c, *out, this))
1478 ;
1479 return_trace (true);
1480 }
1481
sanitizeOT::LookupOffsetList1482 bool sanitize (hb_sanitize_context_t *c) const
1483 {
1484 TRACE_SANITIZE (this);
1485 return_trace (List16OfOffset16To<TLookup>::sanitize (c, this));
1486 }
1487 };
1488
1489
1490 /*
1491 * Coverage Table
1492 */
1493
1494
ClassDef_remap_and_serialize(hb_serialize_context_t * c,const hb_set_t & klasses,bool use_class_zero,hb_sorted_vector_t<hb_codepoint_pair_t> & glyph_and_klass,hb_map_t * klass_map)1495 static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c,
1496 const hb_set_t &klasses,
1497 bool use_class_zero,
1498 hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */
1499 hb_map_t *klass_map /*IN/OUT*/)
1500 {
1501 if (!klass_map)
1502 return ClassDef_serialize (c, glyph_and_klass.iter ());
1503
1504 /* any glyph not assigned a class value falls into Class zero (0),
1505 * if any glyph assigned to class 0, remapping must start with 0->0*/
1506 if (!use_class_zero)
1507 klass_map->set (0, 0);
1508
1509 unsigned idx = klass_map->has (0) ? 1 : 0;
1510 for (const unsigned k: klasses)
1511 {
1512 if (klass_map->has (k)) continue;
1513 klass_map->set (k, idx);
1514 idx++;
1515 }
1516
1517
1518 for (unsigned i = 0; i < glyph_and_klass.length; i++)
1519 {
1520 hb_codepoint_t klass = glyph_and_klass[i].second;
1521 glyph_and_klass[i].second = klass_map->get (klass);
1522 }
1523
1524 c->propagate_error (glyph_and_klass, klasses);
1525 return ClassDef_serialize (c, glyph_and_klass.iter ());
1526 }
1527
1528 /*
1529 * Class Definition Table
1530 */
1531
1532 template <typename Types>
1533 struct ClassDefFormat1_3
1534 {
1535 friend struct ClassDef;
1536
1537 private:
get_classOT::ClassDefFormat1_31538 unsigned int get_class (hb_codepoint_t glyph_id) const
1539 {
1540 return classValue[(unsigned int) (glyph_id - startGlyph)];
1541 }
1542
get_populationOT::ClassDefFormat1_31543 unsigned get_population () const
1544 {
1545 return classValue.len;
1546 }
1547
1548 template<typename Iterator,
1549 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::ClassDefFormat1_31550 bool serialize (hb_serialize_context_t *c,
1551 Iterator it)
1552 {
1553 TRACE_SERIALIZE (this);
1554 if (unlikely (!c->extend_min (this))) return_trace (false);
1555
1556 if (unlikely (!it))
1557 {
1558 classFormat = 1;
1559 startGlyph = 0;
1560 classValue.len = 0;
1561 return_trace (true);
1562 }
1563
1564 hb_codepoint_t glyph_min = (*it).first;
1565 hb_codepoint_t glyph_max = + it
1566 | hb_map (hb_first)
1567 | hb_reduce (hb_max, 0u);
1568 unsigned glyph_count = glyph_max - glyph_min + 1;
1569
1570 startGlyph = glyph_min;
1571 if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
1572 for (const hb_pair_t<hb_codepoint_t, uint32_t> gid_klass_pair : + it)
1573 {
1574 unsigned idx = gid_klass_pair.first - glyph_min;
1575 classValue[idx] = gid_klass_pair.second;
1576 }
1577 return_trace (true);
1578 }
1579
subsetOT::ClassDefFormat1_31580 bool subset (hb_subset_context_t *c,
1581 hb_map_t *klass_map = nullptr /*OUT*/,
1582 bool keep_empty_table = true,
1583 bool use_class_zero = true,
1584 const Coverage* glyph_filter = nullptr) const
1585 {
1586 TRACE_SUBSET (this);
1587 const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
1588
1589 hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass;
1590 hb_set_t orig_klasses;
1591
1592 hb_codepoint_t start = startGlyph;
1593 hb_codepoint_t end = start + classValue.len;
1594
1595 for (const hb_codepoint_t gid : + hb_range (start, end))
1596 {
1597 hb_codepoint_t new_gid = glyph_map[gid];
1598 if (new_gid == HB_MAP_VALUE_INVALID) continue;
1599 if (glyph_filter && !glyph_filter->has(gid)) continue;
1600
1601 unsigned klass = classValue[gid - start];
1602 if (!klass) continue;
1603
1604 glyph_and_klass.push (hb_pair (new_gid, klass));
1605 orig_klasses.add (klass);
1606 }
1607
1608 if (use_class_zero)
1609 {
1610 unsigned glyph_count = glyph_filter
1611 ? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter))
1612 : glyph_map.get_population ();
1613 use_class_zero = glyph_count <= glyph_and_klass.length;
1614 }
1615 if (!ClassDef_remap_and_serialize (c->serializer,
1616 orig_klasses,
1617 use_class_zero,
1618 glyph_and_klass,
1619 klass_map))
1620 return_trace (false);
1621 return_trace (keep_empty_table || (bool) glyph_and_klass);
1622 }
1623
sanitizeOT::ClassDefFormat1_31624 bool sanitize (hb_sanitize_context_t *c) const
1625 {
1626 TRACE_SANITIZE (this);
1627 return_trace (c->check_struct (this) && classValue.sanitize (c));
1628 }
1629
costOT::ClassDefFormat1_31630 unsigned cost () const { return 1; }
1631
1632 template <typename set_t>
collect_coverageOT::ClassDefFormat1_31633 bool collect_coverage (set_t *glyphs) const
1634 {
1635 unsigned int start = 0;
1636 unsigned int count = classValue.len;
1637 for (unsigned int i = 0; i < count; i++)
1638 {
1639 if (classValue[i])
1640 continue;
1641
1642 if (start != i)
1643 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
1644 return false;
1645
1646 start = i + 1;
1647 }
1648 if (start != count)
1649 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
1650 return false;
1651
1652 return true;
1653 }
1654
1655 template <typename set_t>
collect_classOT::ClassDefFormat1_31656 bool collect_class (set_t *glyphs, unsigned klass) const
1657 {
1658 unsigned int count = classValue.len;
1659 for (unsigned int i = 0; i < count; i++)
1660 if (classValue[i] == klass) glyphs->add (startGlyph + i);
1661 return true;
1662 }
1663
intersectsOT::ClassDefFormat1_31664 bool intersects (const hb_set_t *glyphs) const
1665 {
1666 hb_codepoint_t start = startGlyph;
1667 hb_codepoint_t end = startGlyph + classValue.len;
1668 for (hb_codepoint_t iter = startGlyph - 1;
1669 glyphs->next (&iter) && iter < end;)
1670 if (classValue[iter - start]) return true;
1671 return false;
1672 }
intersects_classOT::ClassDefFormat1_31673 bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
1674 {
1675 unsigned int count = classValue.len;
1676 if (klass == 0)
1677 {
1678 /* Match if there's any glyph that is not listed! */
1679 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1680 if (!glyphs->next (&g)) return false;
1681 if (g < startGlyph) return true;
1682 g = startGlyph + count - 1;
1683 if (glyphs->next (&g)) return true;
1684 /* Fall through. */
1685 }
1686 /* TODO Speed up, using set overlap first? */
1687 /* TODO(iter) Rewrite as dagger. */
1688 const HBUINT16 *arr = classValue.arrayZ;
1689 for (unsigned int i = 0; i < count; i++)
1690 if (arr[i] == klass && glyphs->has (startGlyph + i))
1691 return true;
1692 return false;
1693 }
1694
intersected_class_glyphsOT::ClassDefFormat1_31695 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
1696 {
1697 unsigned count = classValue.len;
1698 if (klass == 0)
1699 {
1700 unsigned start_glyph = startGlyph;
1701 for (uint32_t g = HB_SET_VALUE_INVALID;
1702 glyphs->next (&g) && g < start_glyph;)
1703 intersect_glyphs->add (g);
1704
1705 for (uint32_t g = startGlyph + count - 1;
1706 glyphs-> next (&g);)
1707 intersect_glyphs->add (g);
1708
1709 return;
1710 }
1711
1712 for (unsigned i = 0; i < count; i++)
1713 if (classValue[i] == klass && glyphs->has (startGlyph + i))
1714 intersect_glyphs->add (startGlyph + i);
1715
1716 #if 0
1717 /* The following implementation is faster asymptotically, but slower
1718 * in practice. */
1719 unsigned start_glyph = startGlyph;
1720 unsigned end_glyph = start_glyph + count;
1721 for (unsigned g = startGlyph - 1;
1722 glyphs->next (&g) && g < end_glyph;)
1723 if (classValue.arrayZ[g - start_glyph] == klass)
1724 intersect_glyphs->add (g);
1725 #endif
1726 }
1727
intersected_classesOT::ClassDefFormat1_31728 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
1729 {
1730 if (glyphs->is_empty ()) return;
1731 hb_codepoint_t end_glyph = startGlyph + classValue.len - 1;
1732 if (glyphs->get_min () < startGlyph ||
1733 glyphs->get_max () > end_glyph)
1734 intersect_classes->add (0);
1735
1736 for (const auto& _ : + hb_enumerate (classValue))
1737 {
1738 hb_codepoint_t g = startGlyph + _.first;
1739 if (glyphs->has (g))
1740 intersect_classes->add (_.second);
1741 }
1742 }
1743
1744 protected:
1745 HBUINT16 classFormat; /* Format identifier--format = 1 */
1746 typename Types::HBGlyphID
1747 startGlyph; /* First GlyphID of the classValueArray */
1748 typename Types::template ArrayOf<HBUINT16>
1749 classValue; /* Array of Class Values--one per GlyphID */
1750 public:
1751 DEFINE_SIZE_ARRAY (2 + 2 * Types::size, classValue);
1752 };
1753
1754 template <typename Types>
1755 struct ClassDefFormat2_4
1756 {
1757 friend struct ClassDef;
1758
1759 private:
get_classOT::ClassDefFormat2_41760 unsigned int get_class (hb_codepoint_t glyph_id) const
1761 {
1762 return rangeRecord.bsearch (glyph_id).value;
1763 }
1764
get_populationOT::ClassDefFormat2_41765 unsigned get_population () const
1766 {
1767 typename Types::large_int ret = 0;
1768 for (const auto &r : rangeRecord)
1769 ret += r.get_population ();
1770 return ret > UINT_MAX ? UINT_MAX : (unsigned) ret;
1771 }
1772
1773 template<typename Iterator,
1774 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::ClassDefFormat2_41775 bool serialize (hb_serialize_context_t *c,
1776 Iterator it)
1777 {
1778 TRACE_SERIALIZE (this);
1779 if (unlikely (!c->extend_min (this))) return_trace (false);
1780
1781 if (unlikely (!it))
1782 {
1783 classFormat = 2;
1784 rangeRecord.len = 0;
1785 return_trace (true);
1786 }
1787
1788 unsigned unsorted = false;
1789 unsigned num_ranges = 1;
1790 hb_codepoint_t prev_gid = (*it).first;
1791 unsigned prev_klass = (*it).second;
1792
1793 RangeRecord<Types> range_rec;
1794 range_rec.first = prev_gid;
1795 range_rec.last = prev_gid;
1796 range_rec.value = prev_klass;
1797
1798 auto *record = c->copy (range_rec);
1799 if (unlikely (!record)) return_trace (false);
1800
1801 for (const auto gid_klass_pair : + (++it))
1802 {
1803 hb_codepoint_t cur_gid = gid_klass_pair.first;
1804 unsigned cur_klass = gid_klass_pair.second;
1805
1806 if (cur_gid != prev_gid + 1 ||
1807 cur_klass != prev_klass)
1808 {
1809
1810 if (unlikely (cur_gid < prev_gid))
1811 unsorted = true;
1812
1813 if (unlikely (!record)) break;
1814 record->last = prev_gid;
1815 num_ranges++;
1816
1817 range_rec.first = cur_gid;
1818 range_rec.last = cur_gid;
1819 range_rec.value = cur_klass;
1820
1821 record = c->copy (range_rec);
1822 }
1823
1824 prev_klass = cur_klass;
1825 prev_gid = cur_gid;
1826 }
1827
1828 if (unlikely (c->in_error ())) return_trace (false);
1829
1830 if (likely (record)) record->last = prev_gid;
1831 rangeRecord.len = num_ranges;
1832
1833 if (unlikely (unsorted))
1834 rangeRecord.as_array ().qsort (RangeRecord<Types>::cmp_range);
1835
1836 return_trace (true);
1837 }
1838
subsetOT::ClassDefFormat2_41839 bool subset (hb_subset_context_t *c,
1840 hb_map_t *klass_map = nullptr /*OUT*/,
1841 bool keep_empty_table = true,
1842 bool use_class_zero = true,
1843 const Coverage* glyph_filter = nullptr) const
1844 {
1845 TRACE_SUBSET (this);
1846 const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
1847 const hb_set_t &glyph_set = *c->plan->glyphset_gsub ();
1848
1849 hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass;
1850 hb_set_t orig_klasses;
1851
1852 if (glyph_set.get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2
1853 < get_population ())
1854 {
1855 for (hb_codepoint_t g : glyph_set)
1856 {
1857 unsigned klass = get_class (g);
1858 if (!klass) continue;
1859 hb_codepoint_t new_gid = glyph_map[g];
1860 if (new_gid == HB_MAP_VALUE_INVALID) continue;
1861 if (glyph_filter && !glyph_filter->has (g)) continue;
1862 glyph_and_klass.push (hb_pair (new_gid, klass));
1863 orig_klasses.add (klass);
1864 }
1865 }
1866 else
1867 {
1868 unsigned num_source_glyphs = c->plan->source->get_num_glyphs ();
1869 for (auto &range : rangeRecord)
1870 {
1871 unsigned klass = range.value;
1872 if (!klass) continue;
1873 hb_codepoint_t start = range.first;
1874 hb_codepoint_t end = hb_min (range.last + 1, num_source_glyphs);
1875 for (hb_codepoint_t g = start; g < end; g++)
1876 {
1877 hb_codepoint_t new_gid = glyph_map[g];
1878 if (new_gid == HB_MAP_VALUE_INVALID) continue;
1879 if (glyph_filter && !glyph_filter->has (g)) continue;
1880
1881 glyph_and_klass.push (hb_pair (new_gid, klass));
1882 orig_klasses.add (klass);
1883 }
1884 }
1885 }
1886
1887 const hb_set_t& glyphset = *c->plan->glyphset_gsub ();
1888 unsigned glyph_count = glyph_filter
1889 ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
1890 : glyph_map.get_population ();
1891 use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length;
1892 if (!ClassDef_remap_and_serialize (c->serializer,
1893 orig_klasses,
1894 use_class_zero,
1895 glyph_and_klass,
1896 klass_map))
1897 return_trace (false);
1898 return_trace (keep_empty_table || (bool) glyph_and_klass);
1899 }
1900
sanitizeOT::ClassDefFormat2_41901 bool sanitize (hb_sanitize_context_t *c) const
1902 {
1903 TRACE_SANITIZE (this);
1904 return_trace (rangeRecord.sanitize (c));
1905 }
1906
costOT::ClassDefFormat2_41907 unsigned cost () const { return hb_bit_storage ((unsigned) rangeRecord.len); /* bsearch cost */ }
1908
1909 template <typename set_t>
collect_coverageOT::ClassDefFormat2_41910 bool collect_coverage (set_t *glyphs) const
1911 {
1912 for (auto &range : rangeRecord)
1913 if (range.value)
1914 if (unlikely (!range.collect_coverage (glyphs)))
1915 return false;
1916 return true;
1917 }
1918
1919 template <typename set_t>
collect_classOT::ClassDefFormat2_41920 bool collect_class (set_t *glyphs, unsigned int klass) const
1921 {
1922 for (auto &range : rangeRecord)
1923 {
1924 if (range.value == klass)
1925 if (unlikely (!range.collect_coverage (glyphs)))
1926 return false;
1927 }
1928 return true;
1929 }
1930
intersectsOT::ClassDefFormat2_41931 bool intersects (const hb_set_t *glyphs) const
1932 {
1933 if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len) / 2)
1934 {
1935 for (auto g : *glyphs)
1936 if (get_class (g))
1937 return true;
1938 return false;
1939 }
1940
1941 return hb_any (+ hb_iter (rangeRecord)
1942 | hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs) && range.value; }));
1943 }
intersects_classOT::ClassDefFormat2_41944 bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
1945 {
1946 if (klass == 0)
1947 {
1948 /* Match if there's any glyph that is not listed! */
1949 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1950 hb_codepoint_t last = HB_SET_VALUE_INVALID;
1951 auto it = hb_iter (rangeRecord);
1952 for (auto &range : it)
1953 {
1954 if (it->first == last + 1)
1955 {
1956 it++;
1957 continue;
1958 }
1959
1960 if (!glyphs->next (&g))
1961 break;
1962 if (g < range.first)
1963 return true;
1964 g = range.last;
1965 last = g;
1966 }
1967 if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
1968 return true;
1969 /* Fall through. */
1970 }
1971 for (const auto &range : rangeRecord)
1972 if (range.value == klass && range.intersects (*glyphs))
1973 return true;
1974 return false;
1975 }
1976
intersected_class_glyphsOT::ClassDefFormat2_41977 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
1978 {
1979 if (klass == 0)
1980 {
1981 hb_codepoint_t g = HB_SET_VALUE_INVALID;
1982 for (auto &range : rangeRecord)
1983 {
1984 if (!glyphs->next (&g))
1985 goto done;
1986 while (g < range.first)
1987 {
1988 intersect_glyphs->add (g);
1989 if (!glyphs->next (&g))
1990 goto done;
1991 }
1992 g = range.last;
1993 }
1994 while (glyphs->next (&g))
1995 intersect_glyphs->add (g);
1996 done:
1997
1998 return;
1999 }
2000
2001 unsigned count = rangeRecord.len;
2002 if (count > glyphs->get_population () * hb_bit_storage (count) * 8)
2003 {
2004 for (auto g : *glyphs)
2005 {
2006 unsigned i;
2007 if (rangeRecord.as_array ().bfind (g, &i) &&
2008 rangeRecord.arrayZ[i].value == klass)
2009 intersect_glyphs->add (g);
2010 }
2011 return;
2012 }
2013
2014 for (auto &range : rangeRecord)
2015 {
2016 if (range.value != klass) continue;
2017
2018 unsigned end = range.last + 1;
2019 for (hb_codepoint_t g = range.first - 1;
2020 glyphs->next (&g) && g < end;)
2021 intersect_glyphs->add (g);
2022 }
2023 }
2024
intersected_classesOT::ClassDefFormat2_42025 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2026 {
2027 if (glyphs->is_empty ()) return;
2028
2029 hb_codepoint_t g = HB_SET_VALUE_INVALID;
2030 for (auto &range : rangeRecord)
2031 {
2032 if (!glyphs->next (&g))
2033 break;
2034 if (g < range.first)
2035 {
2036 intersect_classes->add (0);
2037 break;
2038 }
2039 g = range.last;
2040 }
2041 if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
2042 intersect_classes->add (0);
2043
2044 for (const auto& range : rangeRecord)
2045 if (range.intersects (*glyphs))
2046 intersect_classes->add (range.value);
2047 }
2048
2049 protected:
2050 HBUINT16 classFormat; /* Format identifier--format = 2 */
2051 typename Types::template SortedArrayOf<RangeRecord<Types>>
2052 rangeRecord; /* Array of glyph ranges--ordered by
2053 * Start GlyphID */
2054 public:
2055 DEFINE_SIZE_ARRAY (2 + Types::size, rangeRecord);
2056 };
2057
2058 struct ClassDef
2059 {
2060 /* Has interface. */
operator []OT::ClassDef2061 unsigned operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::ClassDef2062 bool has (hb_codepoint_t k) const { return (*this)[k]; }
2063 /* Projection. */
operator ()OT::ClassDef2064 hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
2065
getOT::ClassDef2066 unsigned int get (hb_codepoint_t k) const { return get_class (k); }
get_classOT::ClassDef2067 unsigned int get_class (hb_codepoint_t glyph_id) const
2068 {
2069 switch (u.format) {
2070 case 1: hb_barrier (); return u.format1.get_class (glyph_id);
2071 case 2: hb_barrier (); return u.format2.get_class (glyph_id);
2072 #ifndef HB_NO_BEYOND_64K
2073 case 3: hb_barrier (); return u.format3.get_class (glyph_id);
2074 case 4: hb_barrier (); return u.format4.get_class (glyph_id);
2075 #endif
2076 default:return 0;
2077 }
2078 }
2079
get_populationOT::ClassDef2080 unsigned get_population () const
2081 {
2082 switch (u.format) {
2083 case 1: hb_barrier (); return u.format1.get_population ();
2084 case 2: hb_barrier (); return u.format2.get_population ();
2085 #ifndef HB_NO_BEYOND_64K
2086 case 3: hb_barrier (); return u.format3.get_population ();
2087 case 4: hb_barrier (); return u.format4.get_population ();
2088 #endif
2089 default:return NOT_COVERED;
2090 }
2091 }
2092
2093 template<typename Iterator,
2094 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::ClassDef2095 bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
2096 {
2097 TRACE_SERIALIZE (this);
2098 if (unlikely (!c->extend_min (this))) return_trace (false);
2099
2100 auto it = + it_with_class_zero | hb_filter (hb_second);
2101
2102 unsigned format = 2;
2103 hb_codepoint_t glyph_max = 0;
2104 if (likely (it))
2105 {
2106 hb_codepoint_t glyph_min = (*it).first;
2107 glyph_max = glyph_min;
2108
2109 unsigned num_glyphs = 0;
2110 unsigned num_ranges = 1;
2111 hb_codepoint_t prev_gid = glyph_min;
2112 unsigned prev_klass = (*it).second;
2113
2114 for (const auto gid_klass_pair : it)
2115 {
2116 hb_codepoint_t cur_gid = gid_klass_pair.first;
2117 unsigned cur_klass = gid_klass_pair.second;
2118 num_glyphs++;
2119 if (cur_gid == glyph_min) continue;
2120 if (cur_gid > glyph_max) glyph_max = cur_gid;
2121 if (cur_gid != prev_gid + 1 ||
2122 cur_klass != prev_klass)
2123 num_ranges++;
2124
2125 prev_gid = cur_gid;
2126 prev_klass = cur_klass;
2127 }
2128
2129 if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
2130 format = 1;
2131 }
2132
2133 #ifndef HB_NO_BEYOND_64K
2134 if (glyph_max > 0xFFFFu)
2135 u.format += 2;
2136 if (unlikely (glyph_max > 0xFFFFFFu))
2137 #else
2138 if (unlikely (glyph_max > 0xFFFFu))
2139 #endif
2140 {
2141 c->check_success (false, HB_SERIALIZE_ERROR_INT_OVERFLOW);
2142 return_trace (false);
2143 }
2144
2145 u.format = format;
2146
2147 switch (u.format)
2148 {
2149 case 1: hb_barrier (); return_trace (u.format1.serialize (c, it));
2150 case 2: hb_barrier (); return_trace (u.format2.serialize (c, it));
2151 #ifndef HB_NO_BEYOND_64K
2152 case 3: hb_barrier (); return_trace (u.format3.serialize (c, it));
2153 case 4: hb_barrier (); return_trace (u.format4.serialize (c, it));
2154 #endif
2155 default:return_trace (false);
2156 }
2157 }
2158
subsetOT::ClassDef2159 bool subset (hb_subset_context_t *c,
2160 hb_map_t *klass_map = nullptr /*OUT*/,
2161 bool keep_empty_table = true,
2162 bool use_class_zero = true,
2163 const Coverage* glyph_filter = nullptr) const
2164 {
2165 TRACE_SUBSET (this);
2166 switch (u.format) {
2167 case 1: hb_barrier (); return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2168 case 2: hb_barrier (); return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2169 #ifndef HB_NO_BEYOND_64K
2170 case 3: hb_barrier (); return_trace (u.format3.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2171 case 4: hb_barrier (); return_trace (u.format4.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2172 #endif
2173 default:return_trace (false);
2174 }
2175 }
2176
sanitizeOT::ClassDef2177 bool sanitize (hb_sanitize_context_t *c) const
2178 {
2179 TRACE_SANITIZE (this);
2180 if (!u.format.sanitize (c)) return_trace (false);
2181 hb_barrier ();
2182 switch (u.format) {
2183 case 1: hb_barrier (); return_trace (u.format1.sanitize (c));
2184 case 2: hb_barrier (); return_trace (u.format2.sanitize (c));
2185 #ifndef HB_NO_BEYOND_64K
2186 case 3: hb_barrier (); return_trace (u.format3.sanitize (c));
2187 case 4: hb_barrier (); return_trace (u.format4.sanitize (c));
2188 #endif
2189 default:return_trace (true);
2190 }
2191 }
2192
costOT::ClassDef2193 unsigned cost () const
2194 {
2195 switch (u.format) {
2196 case 1: hb_barrier (); return u.format1.cost ();
2197 case 2: hb_barrier (); return u.format2.cost ();
2198 #ifndef HB_NO_BEYOND_64K
2199 case 3: hb_barrier (); return u.format3.cost ();
2200 case 4: hb_barrier (); return u.format4.cost ();
2201 #endif
2202 default:return 0u;
2203 }
2204 }
2205
2206 /* Might return false if array looks unsorted.
2207 * Used for faster rejection of corrupt data. */
2208 template <typename set_t>
collect_coverageOT::ClassDef2209 bool collect_coverage (set_t *glyphs) const
2210 {
2211 switch (u.format) {
2212 case 1: hb_barrier (); return u.format1.collect_coverage (glyphs);
2213 case 2: hb_barrier (); return u.format2.collect_coverage (glyphs);
2214 #ifndef HB_NO_BEYOND_64K
2215 case 3: hb_barrier (); return u.format3.collect_coverage (glyphs);
2216 case 4: hb_barrier (); return u.format4.collect_coverage (glyphs);
2217 #endif
2218 default:return false;
2219 }
2220 }
2221
2222 /* Might return false if array looks unsorted.
2223 * Used for faster rejection of corrupt data. */
2224 template <typename set_t>
collect_classOT::ClassDef2225 bool collect_class (set_t *glyphs, unsigned int klass) const
2226 {
2227 switch (u.format) {
2228 case 1: hb_barrier (); return u.format1.collect_class (glyphs, klass);
2229 case 2: hb_barrier (); return u.format2.collect_class (glyphs, klass);
2230 #ifndef HB_NO_BEYOND_64K
2231 case 3: hb_barrier (); return u.format3.collect_class (glyphs, klass);
2232 case 4: hb_barrier (); return u.format4.collect_class (glyphs, klass);
2233 #endif
2234 default:return false;
2235 }
2236 }
2237
intersectsOT::ClassDef2238 bool intersects (const hb_set_t *glyphs) const
2239 {
2240 switch (u.format) {
2241 case 1: hb_barrier (); return u.format1.intersects (glyphs);
2242 case 2: hb_barrier (); return u.format2.intersects (glyphs);
2243 #ifndef HB_NO_BEYOND_64K
2244 case 3: hb_barrier (); return u.format3.intersects (glyphs);
2245 case 4: hb_barrier (); return u.format4.intersects (glyphs);
2246 #endif
2247 default:return false;
2248 }
2249 }
intersects_classOT::ClassDef2250 bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
2251 {
2252 switch (u.format) {
2253 case 1: hb_barrier (); return u.format1.intersects_class (glyphs, klass);
2254 case 2: hb_barrier (); return u.format2.intersects_class (glyphs, klass);
2255 #ifndef HB_NO_BEYOND_64K
2256 case 3: hb_barrier (); return u.format3.intersects_class (glyphs, klass);
2257 case 4: hb_barrier (); return u.format4.intersects_class (glyphs, klass);
2258 #endif
2259 default:return false;
2260 }
2261 }
2262
intersected_class_glyphsOT::ClassDef2263 void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2264 {
2265 switch (u.format) {
2266 case 1: hb_barrier (); return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2267 case 2: hb_barrier (); return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2268 #ifndef HB_NO_BEYOND_64K
2269 case 3: hb_barrier (); return u.format3.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2270 case 4: hb_barrier (); return u.format4.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2271 #endif
2272 default:return;
2273 }
2274 }
2275
intersected_classesOT::ClassDef2276 void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
2277 {
2278 switch (u.format) {
2279 case 1: hb_barrier (); return u.format1.intersected_classes (glyphs, intersect_classes);
2280 case 2: hb_barrier (); return u.format2.intersected_classes (glyphs, intersect_classes);
2281 #ifndef HB_NO_BEYOND_64K
2282 case 3: hb_barrier (); return u.format3.intersected_classes (glyphs, intersect_classes);
2283 case 4: hb_barrier (); return u.format4.intersected_classes (glyphs, intersect_classes);
2284 #endif
2285 default:return;
2286 }
2287 }
2288
2289
2290 protected:
2291 union {
2292 HBUINT16 format; /* Format identifier */
2293 ClassDefFormat1_3<SmallTypes> format1;
2294 ClassDefFormat2_4<SmallTypes> format2;
2295 #ifndef HB_NO_BEYOND_64K
2296 ClassDefFormat1_3<MediumTypes>format3;
2297 ClassDefFormat2_4<MediumTypes>format4;
2298 #endif
2299 } u;
2300 public:
2301 DEFINE_SIZE_UNION (2, format);
2302 };
2303
2304 template<typename Iterator>
ClassDef_serialize(hb_serialize_context_t * c,Iterator it)2305 static inline bool ClassDef_serialize (hb_serialize_context_t *c,
2306 Iterator it)
2307 { return (c->start_embed<ClassDef> ()->serialize (c, it)); }
2308
2309
2310 /*
2311 * Item Variation Store
2312 */
2313
2314 /* ported from fonttools (class _Encoding) */
2315 struct delta_row_encoding_t
2316 {
2317 /* each byte represents a region, value is one of 0/1/2/4, which means bytes
2318 * needed for this region */
2319 hb_vector_t<uint8_t> chars;
2320 unsigned width = 0;
2321 hb_vector_t<uint8_t> columns;
2322 unsigned overhead = 0;
2323 hb_vector_t<const hb_vector_t<int>*> items;
2324
2325 delta_row_encoding_t () = default;
delta_row_encoding_tOT::delta_row_encoding_t2326 delta_row_encoding_t (hb_vector_t<uint8_t>&& chars_,
2327 const hb_vector_t<int>* row = nullptr) :
2328 delta_row_encoding_t ()
2329
2330 {
2331 chars = std::move (chars_);
2332 width = get_width ();
2333 columns = get_columns ();
2334 overhead = get_chars_overhead (columns);
2335 if (row) items.push (row);
2336 }
2337
is_emptyOT::delta_row_encoding_t2338 bool is_empty () const
2339 { return !items; }
2340
get_row_charsOT::delta_row_encoding_t2341 static hb_vector_t<uint8_t> get_row_chars (const hb_vector_t<int>& row)
2342 {
2343 hb_vector_t<uint8_t> ret;
2344 if (!ret.alloc (row.length)) return ret;
2345
2346 bool long_words = false;
2347
2348 /* 0/1/2 byte encoding */
2349 for (int i = row.length - 1; i >= 0; i--)
2350 {
2351 int v = row.arrayZ[i];
2352 if (v == 0)
2353 ret.push (0);
2354 else if (v > 32767 || v < -32768)
2355 {
2356 long_words = true;
2357 break;
2358 }
2359 else if (v > 127 || v < -128)
2360 ret.push (2);
2361 else
2362 ret.push (1);
2363 }
2364
2365 if (!long_words)
2366 return ret;
2367
2368 /* redo, 0/2/4 bytes encoding */
2369 ret.reset ();
2370 for (int i = row.length - 1; i >= 0; i--)
2371 {
2372 int v = row.arrayZ[i];
2373 if (v == 0)
2374 ret.push (0);
2375 else if (v > 32767 || v < -32768)
2376 ret.push (4);
2377 else
2378 ret.push (2);
2379 }
2380 return ret;
2381 }
2382
get_widthOT::delta_row_encoding_t2383 inline unsigned get_width ()
2384 {
2385 unsigned ret = + hb_iter (chars)
2386 | hb_reduce (hb_add, 0u)
2387 ;
2388 return ret;
2389 }
2390
get_columnsOT::delta_row_encoding_t2391 hb_vector_t<uint8_t> get_columns ()
2392 {
2393 hb_vector_t<uint8_t> cols;
2394 cols.alloc (chars.length);
2395 for (auto v : chars)
2396 {
2397 uint8_t flag = v ? 1 : 0;
2398 cols.push (flag);
2399 }
2400 return cols;
2401 }
2402
get_chars_overheadOT::delta_row_encoding_t2403 static inline unsigned get_chars_overhead (const hb_vector_t<uint8_t>& cols)
2404 {
2405 unsigned c = 4 + 6; // 4 bytes for LOffset, 6 bytes for VarData header
2406 unsigned cols_bit_count = 0;
2407 for (auto v : cols)
2408 if (v) cols_bit_count++;
2409 return c + cols_bit_count * 2;
2410 }
2411
get_gainOT::delta_row_encoding_t2412 unsigned get_gain () const
2413 {
2414 int count = items.length;
2415 return hb_max (0, (int) overhead - count);
2416 }
2417
gain_from_mergingOT::delta_row_encoding_t2418 int gain_from_merging (const delta_row_encoding_t& other_encoding) const
2419 {
2420 int combined_width = 0;
2421 for (unsigned i = 0; i < chars.length; i++)
2422 combined_width += hb_max (chars.arrayZ[i], other_encoding.chars.arrayZ[i]);
2423
2424 hb_vector_t<uint8_t> combined_columns;
2425 combined_columns.alloc (columns.length);
2426 for (unsigned i = 0; i < columns.length; i++)
2427 combined_columns.push (columns.arrayZ[i] | other_encoding.columns.arrayZ[i]);
2428
2429 int combined_overhead = get_chars_overhead (combined_columns);
2430 int combined_gain = (int) overhead + (int) other_encoding.overhead - combined_overhead
2431 - (combined_width - (int) width) * items.length
2432 - (combined_width - (int) other_encoding.width) * other_encoding.items.length;
2433
2434 return combined_gain;
2435 }
2436
cmpOT::delta_row_encoding_t2437 static int cmp (const void *pa, const void *pb)
2438 {
2439 const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
2440 const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
2441
2442 int gain_a = a->get_gain ();
2443 int gain_b = b->get_gain ();
2444
2445 if (gain_a != gain_b)
2446 return gain_a - gain_b;
2447
2448 return (b->chars).as_array ().cmp ((a->chars).as_array ());
2449 }
2450
cmp_widthOT::delta_row_encoding_t2451 static int cmp_width (const void *pa, const void *pb)
2452 {
2453 const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
2454 const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
2455
2456 if (a->width != b->width)
2457 return (int) a->width - (int) b->width;
2458
2459 return (b->chars).as_array ().cmp ((a->chars).as_array ());
2460 }
2461
add_rowOT::delta_row_encoding_t2462 bool add_row (const hb_vector_t<int>* row)
2463 { return items.push (row); }
2464 };
2465
2466 struct VarRegionAxis
2467 {
evaluateOT::VarRegionAxis2468 float evaluate (int coord) const
2469 {
2470 int peak = peakCoord.to_int ();
2471 if (peak == 0 || coord == peak)
2472 return 1.f;
2473 else if (coord == 0) // Faster
2474 return 0.f;
2475
2476 int start = startCoord.to_int (), end = endCoord.to_int ();
2477
2478 /* TODO Move these to sanitize(). */
2479 if (unlikely (start > peak || peak > end))
2480 return 1.f;
2481 if (unlikely (start < 0 && end > 0 && peak != 0))
2482 return 1.f;
2483
2484 if (coord <= start || end <= coord)
2485 return 0.f;
2486
2487 /* Interpolate */
2488 if (coord < peak)
2489 return float (coord - start) / (peak - start);
2490 else
2491 return float (end - coord) / (end - peak);
2492 }
2493
sanitizeOT::VarRegionAxis2494 bool sanitize (hb_sanitize_context_t *c) const
2495 {
2496 TRACE_SANITIZE (this);
2497 return_trace (c->check_struct (this));
2498 }
2499
serializeOT::VarRegionAxis2500 bool serialize (hb_serialize_context_t *c) const
2501 {
2502 TRACE_SERIALIZE (this);
2503 return_trace (c->embed (this));
2504 }
2505
2506 public:
2507 F2DOT14 startCoord;
2508 F2DOT14 peakCoord;
2509 F2DOT14 endCoord;
2510 public:
2511 DEFINE_SIZE_STATIC (6);
2512 };
2513 struct SparseVarRegionAxis
2514 {
evaluateOT::SparseVarRegionAxis2515 float evaluate (const int *coords, unsigned int coord_len) const
2516 {
2517 unsigned i = axisIndex;
2518 int coord = i < coord_len ? coords[i] : 0;
2519 return axis.evaluate (coord);
2520 }
2521
sanitizeOT::SparseVarRegionAxis2522 bool sanitize (hb_sanitize_context_t *c) const
2523 {
2524 TRACE_SANITIZE (this);
2525 return_trace (c->check_struct (this));
2526 }
2527
serializeOT::SparseVarRegionAxis2528 bool serialize (hb_serialize_context_t *c) const
2529 {
2530 TRACE_SERIALIZE (this);
2531 return_trace (c->embed (this));
2532 }
2533
2534 public:
2535 HBUINT16 axisIndex;
2536 VarRegionAxis axis;
2537 public:
2538 DEFINE_SIZE_STATIC (8);
2539 };
2540
2541 #define REGION_CACHE_ITEM_CACHE_INVALID 2.f
2542
2543 struct VarRegionList
2544 {
2545 using cache_t = float;
2546
evaluateOT::VarRegionList2547 float evaluate (unsigned int region_index,
2548 const int *coords, unsigned int coord_len,
2549 cache_t *cache = nullptr) const
2550 {
2551 if (unlikely (region_index >= regionCount))
2552 return 0.;
2553
2554 float *cached_value = nullptr;
2555 if (cache)
2556 {
2557 cached_value = &(cache[region_index]);
2558 if (likely (*cached_value != REGION_CACHE_ITEM_CACHE_INVALID))
2559 return *cached_value;
2560 }
2561
2562 const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
2563
2564 float v = 1.;
2565 unsigned int count = axisCount;
2566 for (unsigned int i = 0; i < count; i++)
2567 {
2568 int coord = i < coord_len ? coords[i] : 0;
2569 float factor = axes[i].evaluate (coord);
2570 if (factor == 0.f)
2571 {
2572 if (cache)
2573 *cached_value = 0.;
2574 return 0.;
2575 }
2576 v *= factor;
2577 }
2578
2579 if (cache)
2580 *cached_value = v;
2581 return v;
2582 }
2583
sanitizeOT::VarRegionList2584 bool sanitize (hb_sanitize_context_t *c) const
2585 {
2586 TRACE_SANITIZE (this);
2587 return_trace (c->check_struct (this) &&
2588 hb_barrier () &&
2589 axesZ.sanitize (c, axisCount * regionCount));
2590 }
2591
serializeOT::VarRegionList2592 bool serialize (hb_serialize_context_t *c,
2593 const hb_vector_t<hb_tag_t>& axis_tags,
2594 const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& regions)
2595 {
2596 TRACE_SERIALIZE (this);
2597 unsigned axis_count = axis_tags.length;
2598 unsigned region_count = regions.length;
2599 if (!axis_count || !region_count) return_trace (false);
2600 if (unlikely (hb_unsigned_mul_overflows (axis_count * region_count,
2601 VarRegionAxis::static_size))) return_trace (false);
2602 if (unlikely (!c->extend_min (this))) return_trace (false);
2603 axisCount = axis_count;
2604 regionCount = region_count;
2605
2606 for (unsigned r = 0; r < region_count; r++)
2607 {
2608 const auto& region = regions[r];
2609 for (unsigned i = 0; i < axis_count; i++)
2610 {
2611 hb_tag_t tag = axis_tags.arrayZ[i];
2612 VarRegionAxis var_region_rec;
2613 Triple *coords;
2614 if (region->has (tag, &coords))
2615 {
2616 var_region_rec.startCoord.set_float (coords->minimum);
2617 var_region_rec.peakCoord.set_float (coords->middle);
2618 var_region_rec.endCoord.set_float (coords->maximum);
2619 }
2620 else
2621 {
2622 var_region_rec.startCoord.set_int (0);
2623 var_region_rec.peakCoord.set_int (0);
2624 var_region_rec.endCoord.set_int (0);
2625 }
2626 if (!var_region_rec.serialize (c))
2627 return_trace (false);
2628 }
2629 }
2630 return_trace (true);
2631 }
2632
serializeOT::VarRegionList2633 bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_inc_bimap_t ®ion_map)
2634 {
2635 TRACE_SERIALIZE (this);
2636 if (unlikely (!c->extend_min (this))) return_trace (false);
2637 axisCount = src->axisCount;
2638 regionCount = region_map.get_population ();
2639 if (unlikely (hb_unsigned_mul_overflows (axisCount * regionCount,
2640 VarRegionAxis::static_size))) return_trace (false);
2641 if (unlikely (!c->extend (this))) return_trace (false);
2642 unsigned int region_count = src->regionCount;
2643 for (unsigned int r = 0; r < regionCount; r++)
2644 {
2645 unsigned int backward = region_map.backward (r);
2646 if (backward >= region_count) return_trace (false);
2647 hb_memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
2648 }
2649
2650 return_trace (true);
2651 }
2652
get_var_regionOT::VarRegionList2653 bool get_var_region (unsigned region_index,
2654 const hb_map_t& axes_old_index_tag_map,
2655 hb_hashmap_t<hb_tag_t, Triple>& axis_tuples /* OUT */) const
2656 {
2657 if (region_index >= regionCount) return false;
2658 const VarRegionAxis* axis_region = axesZ.arrayZ + (region_index * axisCount);
2659 for (unsigned i = 0; i < axisCount; i++)
2660 {
2661 hb_tag_t *axis_tag;
2662 if (!axes_old_index_tag_map.has (i, &axis_tag))
2663 return false;
2664
2665 float min_val = axis_region->startCoord.to_float ();
2666 float def_val = axis_region->peakCoord.to_float ();
2667 float max_val = axis_region->endCoord.to_float ();
2668
2669 if (def_val != 0.f)
2670 axis_tuples.set (*axis_tag, Triple ((double) min_val, (double) def_val, (double) max_val));
2671 axis_region++;
2672 }
2673 return !axis_tuples.in_error ();
2674 }
2675
get_var_regionsOT::VarRegionList2676 bool get_var_regions (const hb_map_t& axes_old_index_tag_map,
2677 hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions /* OUT */) const
2678 {
2679 if (!regions.alloc (regionCount))
2680 return false;
2681
2682 for (unsigned i = 0; i < regionCount; i++)
2683 {
2684 hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
2685 if (!get_var_region (i, axes_old_index_tag_map, axis_tuples))
2686 return false;
2687 regions.push (std::move (axis_tuples));
2688 }
2689 return !regions.in_error ();
2690 }
2691
get_sizeOT::VarRegionList2692 unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
2693
2694 public:
2695 HBUINT16 axisCount;
2696 HBUINT15 regionCount;
2697 protected:
2698 UnsizedArrayOf<VarRegionAxis>
2699 axesZ;
2700 public:
2701 DEFINE_SIZE_ARRAY (4, axesZ);
2702 };
2703
2704 struct SparseVariationRegion : Array16Of<SparseVarRegionAxis>
2705 {
evaluateOT::SparseVariationRegion2706 float evaluate (const int *coords, unsigned int coord_len) const
2707 {
2708 float v = 1.f;
2709 unsigned int count = len;
2710 for (unsigned int i = 0; i < count; i++)
2711 {
2712 float factor = arrayZ[i].evaluate (coords, coord_len);
2713 if (factor == 0.f)
2714 return 0.;
2715 v *= factor;
2716 }
2717 return v;
2718 }
2719 };
2720
2721 struct SparseVarRegionList
2722 {
2723 using cache_t = float;
2724
evaluateOT::SparseVarRegionList2725 float evaluate (unsigned int region_index,
2726 const int *coords, unsigned int coord_len,
2727 cache_t *cache = nullptr) const
2728 {
2729 if (unlikely (region_index >= regions.len))
2730 return 0.;
2731
2732 float *cached_value = nullptr;
2733 if (cache)
2734 {
2735 cached_value = &(cache[region_index]);
2736 if (likely (*cached_value != REGION_CACHE_ITEM_CACHE_INVALID))
2737 return *cached_value;
2738 }
2739
2740 const SparseVariationRegion ®ion = this+regions[region_index];
2741
2742 float v = region.evaluate (coords, coord_len);
2743
2744 if (cache)
2745 *cached_value = v;
2746 return v;
2747 }
2748
sanitizeOT::SparseVarRegionList2749 bool sanitize (hb_sanitize_context_t *c) const
2750 {
2751 TRACE_SANITIZE (this);
2752 return_trace (regions.sanitize (c, this));
2753 }
2754
2755 public:
2756 Array16Of<Offset32To<SparseVariationRegion>>
2757 regions;
2758 public:
2759 DEFINE_SIZE_ARRAY (2, regions);
2760 };
2761
2762
2763 struct VarData
2764 {
get_item_countOT::VarData2765 unsigned int get_item_count () const
2766 { return itemCount; }
2767
get_region_index_countOT::VarData2768 unsigned int get_region_index_count () const
2769 { return regionIndices.len; }
2770
get_region_indexOT::VarData2771 unsigned get_region_index (unsigned i) const
2772 { return i >= regionIndices.len ? -1 : regionIndices[i]; }
2773
get_row_sizeOT::VarData2774 unsigned int get_row_size () const
2775 { return (wordCount () + regionIndices.len) * (longWords () ? 2 : 1); }
2776
get_sizeOT::VarData2777 unsigned int get_size () const
2778 { return min_size
2779 - regionIndices.min_size + regionIndices.get_size ()
2780 + itemCount * get_row_size ();
2781 }
2782
get_deltaOT::VarData2783 float get_delta (unsigned int inner,
2784 const int *coords, unsigned int coord_count,
2785 const VarRegionList ®ions,
2786 VarRegionList::cache_t *cache = nullptr) const
2787 {
2788 if (unlikely (inner >= itemCount))
2789 return 0.;
2790
2791 unsigned int count = regionIndices.len;
2792 bool is_long = longWords ();
2793 unsigned word_count = wordCount ();
2794 unsigned int scount = is_long ? count : word_count;
2795 unsigned int lcount = is_long ? word_count : 0;
2796
2797 const HBUINT8 *bytes = get_delta_bytes ();
2798 const HBUINT8 *row = bytes + inner * get_row_size ();
2799
2800 float delta = 0.;
2801 unsigned int i = 0;
2802
2803 const HBINT32 *lcursor = reinterpret_cast<const HBINT32 *> (row);
2804 for (; i < lcount; i++)
2805 {
2806 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
2807 delta += scalar * *lcursor++;
2808 }
2809 const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (lcursor);
2810 for (; i < scount; i++)
2811 {
2812 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
2813 delta += scalar * *scursor++;
2814 }
2815 const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
2816 for (; i < count; i++)
2817 {
2818 float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
2819 delta += scalar * *bcursor++;
2820 }
2821
2822 return delta;
2823 }
2824
get_region_scalarsOT::VarData2825 void get_region_scalars (const int *coords, unsigned int coord_count,
2826 const VarRegionList ®ions,
2827 float *scalars /*OUT */,
2828 unsigned int num_scalars) const
2829 {
2830 unsigned count = hb_min (num_scalars, regionIndices.len);
2831 for (unsigned int i = 0; i < count; i++)
2832 scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2833 for (unsigned int i = count; i < num_scalars; i++)
2834 scalars[i] = 0.f;
2835 }
2836
sanitizeOT::VarData2837 bool sanitize (hb_sanitize_context_t *c) const
2838 {
2839 TRACE_SANITIZE (this);
2840 return_trace (c->check_struct (this) &&
2841 regionIndices.sanitize (c) &&
2842 hb_barrier () &&
2843 wordCount () <= regionIndices.len &&
2844 c->check_range (get_delta_bytes (),
2845 itemCount,
2846 get_row_size ()));
2847 }
2848
serializeOT::VarData2849 bool serialize (hb_serialize_context_t *c,
2850 bool has_long,
2851 const hb_vector_t<const hb_vector_t<int>*>& rows)
2852 {
2853 TRACE_SERIALIZE (this);
2854 if (unlikely (!c->extend_min (this))) return_trace (false);
2855 unsigned row_count = rows.length;
2856 itemCount = row_count;
2857
2858 int min_threshold = has_long ? -65536 : -128;
2859 int max_threshold = has_long ? +65535 : +127;
2860 enum delta_size_t { kZero=0, kNonWord, kWord };
2861 hb_vector_t<delta_size_t> delta_sz;
2862 unsigned num_regions = rows[0]->length;
2863 if (!delta_sz.resize (num_regions))
2864 return_trace (false);
2865
2866 unsigned word_count = 0;
2867 for (unsigned r = 0; r < num_regions; r++)
2868 {
2869 for (unsigned i = 0; i < row_count; i++)
2870 {
2871 int delta = rows[i]->arrayZ[r];
2872 if (delta < min_threshold || delta > max_threshold)
2873 {
2874 delta_sz[r] = kWord;
2875 word_count++;
2876 break;
2877 }
2878 else if (delta != 0)
2879 {
2880 delta_sz[r] = kNonWord;
2881 }
2882 }
2883 }
2884
2885 /* reorder regions: words and then non-words*/
2886 unsigned word_index = 0;
2887 unsigned non_word_index = word_count;
2888 hb_map_t ri_map;
2889 for (unsigned r = 0; r < num_regions; r++)
2890 {
2891 if (!delta_sz[r]) continue;
2892 unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
2893 if (!ri_map.set (new_r, r))
2894 return_trace (false);
2895 }
2896
2897 wordSizeCount = word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
2898
2899 unsigned ri_count = ri_map.get_population ();
2900 regionIndices.len = ri_count;
2901 if (unlikely (!c->extend (this))) return_trace (false);
2902
2903 for (unsigned r = 0; r < ri_count; r++)
2904 {
2905 hb_codepoint_t *idx;
2906 if (!ri_map.has (r, &idx))
2907 return_trace (false);
2908 regionIndices[r] = *idx;
2909 }
2910
2911 HBUINT8 *delta_bytes = get_delta_bytes ();
2912 unsigned row_size = get_row_size ();
2913 for (unsigned int i = 0; i < row_count; i++)
2914 {
2915 for (unsigned int r = 0; r < ri_count; r++)
2916 {
2917 int delta = rows[i]->arrayZ[ri_map[r]];
2918 set_item_delta_fast (i, r, delta, delta_bytes, row_size);
2919 }
2920 }
2921 return_trace (true);
2922 }
2923
serializeOT::VarData2924 bool serialize (hb_serialize_context_t *c,
2925 const VarData *src,
2926 const hb_inc_bimap_t &inner_map,
2927 const hb_inc_bimap_t ®ion_map)
2928 {
2929 TRACE_SERIALIZE (this);
2930 if (unlikely (!c->extend_min (this))) return_trace (false);
2931 itemCount = inner_map.get_next_value ();
2932
2933 /* Optimize word count */
2934 unsigned ri_count = src->regionIndices.len;
2935 enum delta_size_t { kZero=0, kNonWord, kWord };
2936 hb_vector_t<delta_size_t> delta_sz;
2937 hb_vector_t<unsigned int> ri_map; /* maps new index to old index */
2938 delta_sz.resize (ri_count);
2939 ri_map.resize (ri_count);
2940 unsigned int new_word_count = 0;
2941 unsigned int r;
2942
2943 const HBUINT8 *src_delta_bytes = src->get_delta_bytes ();
2944 unsigned src_row_size = src->get_row_size ();
2945 unsigned src_word_count = src->wordCount ();
2946 bool src_long_words = src->longWords ();
2947
2948 bool has_long = false;
2949 if (src_long_words)
2950 {
2951 for (r = 0; r < src_word_count; r++)
2952 {
2953 for (unsigned old_gid : inner_map.keys())
2954 {
2955 int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size);
2956 if (delta < -65536 || 65535 < delta)
2957 {
2958 has_long = true;
2959 break;
2960 }
2961 }
2962 }
2963 }
2964
2965 signed min_threshold = has_long ? -65536 : -128;
2966 signed max_threshold = has_long ? +65535 : +127;
2967 for (r = 0; r < ri_count; r++)
2968 {
2969 bool short_circuit = src_long_words == has_long && src_word_count <= r;
2970
2971 delta_sz[r] = kZero;
2972 for (unsigned old_gid : inner_map.keys())
2973 {
2974 int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size);
2975 if (delta < min_threshold || max_threshold < delta)
2976 {
2977 delta_sz[r] = kWord;
2978 new_word_count++;
2979 break;
2980 }
2981 else if (delta != 0)
2982 {
2983 delta_sz[r] = kNonWord;
2984 if (short_circuit)
2985 break;
2986 }
2987 }
2988 }
2989
2990 unsigned int word_index = 0;
2991 unsigned int non_word_index = new_word_count;
2992 unsigned int new_ri_count = 0;
2993 for (r = 0; r < ri_count; r++)
2994 if (delta_sz[r])
2995 {
2996 unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
2997 ri_map[new_r] = r;
2998 new_ri_count++;
2999 }
3000
3001 wordSizeCount = new_word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
3002
3003 regionIndices.len = new_ri_count;
3004
3005 if (unlikely (!c->extend (this))) return_trace (false);
3006
3007 for (r = 0; r < new_ri_count; r++)
3008 regionIndices[r] = region_map[src->regionIndices[ri_map[r]]];
3009
3010 HBUINT8 *delta_bytes = get_delta_bytes ();
3011 unsigned row_size = get_row_size ();
3012 unsigned count = itemCount;
3013 for (unsigned int i = 0; i < count; i++)
3014 {
3015 unsigned int old = inner_map.backward (i);
3016 for (unsigned int r = 0; r < new_ri_count; r++)
3017 set_item_delta_fast (i, r,
3018 src->get_item_delta_fast (old, ri_map[r],
3019 src_delta_bytes, src_row_size),
3020 delta_bytes, row_size);
3021 }
3022
3023 return_trace (true);
3024 }
3025
collect_region_refsOT::VarData3026 void collect_region_refs (hb_set_t ®ion_indices, const hb_inc_bimap_t &inner_map) const
3027 {
3028 const HBUINT8 *delta_bytes = get_delta_bytes ();
3029 unsigned row_size = get_row_size ();
3030
3031 for (unsigned int r = 0; r < regionIndices.len; r++)
3032 {
3033 unsigned int region = regionIndices.arrayZ[r];
3034 if (region_indices.has (region)) continue;
3035 for (hb_codepoint_t old_gid : inner_map.keys())
3036 if (get_item_delta_fast (old_gid, r, delta_bytes, row_size) != 0)
3037 {
3038 region_indices.add (region);
3039 break;
3040 }
3041 }
3042 }
3043
3044 public:
get_delta_bytesOT::VarData3045 const HBUINT8 *get_delta_bytes () const
3046 { return &StructAfter<HBUINT8> (regionIndices); }
3047
3048 protected:
get_delta_bytesOT::VarData3049 HBUINT8 *get_delta_bytes ()
3050 { return &StructAfter<HBUINT8> (regionIndices); }
3051
3052 public:
get_item_delta_fastOT::VarData3053 int32_t get_item_delta_fast (unsigned int item, unsigned int region,
3054 const HBUINT8 *delta_bytes, unsigned row_size) const
3055 {
3056 if (unlikely (item >= itemCount || region >= regionIndices.len)) return 0;
3057
3058 const HBINT8 *p = (const HBINT8 *) delta_bytes + item * row_size;
3059 unsigned word_count = wordCount ();
3060 bool is_long = longWords ();
3061 if (is_long)
3062 {
3063 if (region < word_count)
3064 return ((const HBINT32 *) p)[region];
3065 else
3066 return ((const HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count];
3067 }
3068 else
3069 {
3070 if (region < word_count)
3071 return ((const HBINT16 *) p)[region];
3072 else
3073 return (p + HBINT16::static_size * word_count)[region - word_count];
3074 }
3075 }
get_item_deltaOT::VarData3076 int32_t get_item_delta (unsigned int item, unsigned int region) const
3077 {
3078 return get_item_delta_fast (item, region,
3079 get_delta_bytes (),
3080 get_row_size ());
3081 }
3082
3083 protected:
set_item_delta_fastOT::VarData3084 void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta,
3085 HBUINT8 *delta_bytes, unsigned row_size)
3086 {
3087 HBINT8 *p = (HBINT8 *) delta_bytes + item * row_size;
3088 unsigned word_count = wordCount ();
3089 bool is_long = longWords ();
3090 if (is_long)
3091 {
3092 if (region < word_count)
3093 ((HBINT32 *) p)[region] = delta;
3094 else
3095 ((HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count] = delta;
3096 }
3097 else
3098 {
3099 if (region < word_count)
3100 ((HBINT16 *) p)[region] = delta;
3101 else
3102 (p + HBINT16::static_size * word_count)[region - word_count] = delta;
3103 }
3104 }
set_item_deltaOT::VarData3105 void set_item_delta (unsigned int item, unsigned int region, int32_t delta)
3106 {
3107 set_item_delta_fast (item, region, delta,
3108 get_delta_bytes (),
3109 get_row_size ());
3110 }
3111
longWordsOT::VarData3112 bool longWords () const { return wordSizeCount & 0x8000u /* LONG_WORDS */; }
wordCountOT::VarData3113 unsigned wordCount () const { return wordSizeCount & 0x7FFFu /* WORD_DELTA_COUNT_MASK */; }
3114
3115 protected:
3116 HBUINT16 itemCount;
3117 HBUINT16 wordSizeCount;
3118 Array16Of<HBUINT16> regionIndices;
3119 /*UnsizedArrayOf<HBUINT8>bytesX;*/
3120 public:
3121 DEFINE_SIZE_ARRAY (6, regionIndices);
3122 };
3123
3124 struct MultiVarData
3125 {
get_sizeOT::MultiVarData3126 unsigned int get_size () const
3127 { return min_size
3128 - regionIndices.min_size + regionIndices.get_size ()
3129 + StructAfter<CFF2Index> (regionIndices).get_size ();
3130 }
3131
get_deltaOT::MultiVarData3132 void get_delta (unsigned int inner,
3133 const int *coords, unsigned int coord_count,
3134 const SparseVarRegionList ®ions,
3135 hb_array_t<float> out,
3136 SparseVarRegionList::cache_t *cache = nullptr) const
3137 {
3138 auto &deltaSets = StructAfter<decltype (deltaSetsX)> (regionIndices);
3139
3140 auto values_iter = deltaSets[inner];
3141
3142 unsigned regionCount = regionIndices.len;
3143 unsigned count = out.length;
3144 for (unsigned regionIndex = 0; regionIndex < regionCount; regionIndex++)
3145 {
3146 float scalar = regions.evaluate (regionIndices.arrayZ[regionIndex],
3147 coords, coord_count,
3148 cache);
3149 if (scalar == 1.f)
3150 for (unsigned i = 0; i < count; i++)
3151 out.arrayZ[i] += *values_iter++;
3152 else if (scalar)
3153 for (unsigned i = 0; i < count; i++)
3154 out.arrayZ[i] += *values_iter++ * scalar;
3155 else
3156 values_iter += count;
3157 }
3158 }
3159
sanitizeOT::MultiVarData3160 bool sanitize (hb_sanitize_context_t *c) const
3161 {
3162 TRACE_SANITIZE (this);
3163 return_trace (format.sanitize (c) &&
3164 hb_barrier () &&
3165 format == 1 &&
3166 regionIndices.sanitize (c) &&
3167 hb_barrier () &&
3168 StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c));
3169 }
3170
3171 protected:
3172 HBUINT8 format; // 1
3173 Array16Of<HBUINT16> regionIndices;
3174 TupleList deltaSetsX;
3175 public:
3176 DEFINE_SIZE_MIN (8);
3177 };
3178
3179 struct ItemVariationStore
3180 {
3181 friend struct item_variations_t;
3182 using cache_t = VarRegionList::cache_t;
3183
create_cacheOT::ItemVariationStore3184 cache_t *create_cache () const
3185 {
3186 #ifdef HB_NO_VAR
3187 return nullptr;
3188 #endif
3189 auto &r = this+regions;
3190 unsigned count = r.regionCount;
3191
3192 float *cache = (float *) hb_malloc (sizeof (float) * count);
3193 if (unlikely (!cache)) return nullptr;
3194
3195 for (unsigned i = 0; i < count; i++)
3196 cache[i] = REGION_CACHE_ITEM_CACHE_INVALID;
3197
3198 return cache;
3199 }
3200
destroy_cacheOT::ItemVariationStore3201 static void destroy_cache (cache_t *cache) { hb_free (cache); }
3202
3203 private:
get_deltaOT::ItemVariationStore3204 float get_delta (unsigned int outer, unsigned int inner,
3205 const int *coords, unsigned int coord_count,
3206 VarRegionList::cache_t *cache = nullptr) const
3207 {
3208 #ifdef HB_NO_VAR
3209 return 0.f;
3210 #endif
3211
3212 if (unlikely (outer >= dataSets.len))
3213 return 0.f;
3214
3215 return (this+dataSets[outer]).get_delta (inner,
3216 coords, coord_count,
3217 this+regions,
3218 cache);
3219 }
3220
3221 public:
get_deltaOT::ItemVariationStore3222 float get_delta (unsigned int index,
3223 const int *coords, unsigned int coord_count,
3224 VarRegionList::cache_t *cache = nullptr) const
3225 {
3226 unsigned int outer = index >> 16;
3227 unsigned int inner = index & 0xFFFF;
3228 return get_delta (outer, inner, coords, coord_count, cache);
3229 }
get_deltaOT::ItemVariationStore3230 float get_delta (unsigned int index,
3231 hb_array_t<const int> coords,
3232 VarRegionList::cache_t *cache = nullptr) const
3233 {
3234 return get_delta (index,
3235 coords.arrayZ, coords.length,
3236 cache);
3237 }
3238
sanitizeOT::ItemVariationStore3239 bool sanitize (hb_sanitize_context_t *c) const
3240 {
3241 #ifdef HB_NO_VAR
3242 return true;
3243 #endif
3244
3245 TRACE_SANITIZE (this);
3246 return_trace (c->check_struct (this) &&
3247 hb_barrier () &&
3248 format == 1 &&
3249 regions.sanitize (c, this) &&
3250 dataSets.sanitize (c, this));
3251 }
3252
serializeOT::ItemVariationStore3253 bool serialize (hb_serialize_context_t *c,
3254 bool has_long,
3255 const hb_vector_t<hb_tag_t>& axis_tags,
3256 const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& region_list,
3257 const hb_vector_t<delta_row_encoding_t>& vardata_encodings)
3258 {
3259 TRACE_SERIALIZE (this);
3260 #ifdef HB_NO_VAR
3261 return_trace (false);
3262 #endif
3263 if (unlikely (!c->extend_min (this))) return_trace (false);
3264
3265 format = 1;
3266 if (!regions.serialize_serialize (c, axis_tags, region_list))
3267 return_trace (false);
3268
3269 unsigned num_var_data = vardata_encodings.length;
3270 if (!num_var_data) return_trace (false);
3271 if (unlikely (!c->check_assign (dataSets.len, num_var_data,
3272 HB_SERIALIZE_ERROR_INT_OVERFLOW)))
3273 return_trace (false);
3274
3275 if (unlikely (!c->extend (dataSets))) return_trace (false);
3276 for (unsigned i = 0; i < num_var_data; i++)
3277 if (!dataSets[i].serialize_serialize (c, has_long, vardata_encodings[i].items))
3278 return_trace (false);
3279
3280 return_trace (true);
3281 }
3282
serializeOT::ItemVariationStore3283 bool serialize (hb_serialize_context_t *c,
3284 const ItemVariationStore *src,
3285 const hb_array_t <const hb_inc_bimap_t> &inner_maps)
3286 {
3287 TRACE_SERIALIZE (this);
3288 #ifdef HB_NO_VAR
3289 return_trace (false);
3290 #endif
3291
3292 if (unlikely (!c->extend_min (this))) return_trace (false);
3293
3294 unsigned int set_count = 0;
3295 for (unsigned int i = 0; i < inner_maps.length; i++)
3296 if (inner_maps[i].get_population ())
3297 set_count++;
3298
3299 format = 1;
3300
3301 const auto &src_regions = src+src->regions;
3302
3303 hb_set_t region_indices;
3304 for (unsigned int i = 0; i < inner_maps.length; i++)
3305 (src+src->dataSets[i]).collect_region_refs (region_indices, inner_maps[i]);
3306
3307 if (region_indices.in_error ())
3308 return_trace (false);
3309
3310 region_indices.del_range ((src_regions).regionCount, hb_set_t::INVALID);
3311
3312 /* TODO use constructor when our data-structures support that. */
3313 hb_inc_bimap_t region_map;
3314 + hb_iter (region_indices)
3315 | hb_apply ([®ion_map] (unsigned _) { region_map.add(_); })
3316 ;
3317 if (region_map.in_error())
3318 return_trace (false);
3319
3320 if (unlikely (!regions.serialize_serialize (c, &src_regions, region_map)))
3321 return_trace (false);
3322
3323 dataSets.len = set_count;
3324 if (unlikely (!c->extend (dataSets))) return_trace (false);
3325
3326 /* TODO: The following code could be simplified when
3327 * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize () */
3328 unsigned int set_index = 0;
3329 for (unsigned int i = 0; i < inner_maps.length; i++)
3330 {
3331 if (!inner_maps[i].get_population ()) continue;
3332 if (unlikely (!dataSets[set_index++]
3333 .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
3334 return_trace (false);
3335 }
3336
3337 return_trace (true);
3338 }
3339
copyOT::ItemVariationStore3340 ItemVariationStore *copy (hb_serialize_context_t *c) const
3341 {
3342 TRACE_SERIALIZE (this);
3343 auto *out = c->start_embed (this);
3344 if (unlikely (!out)) return_trace (nullptr);
3345
3346 hb_vector_t <hb_inc_bimap_t> inner_maps;
3347 unsigned count = dataSets.len;
3348 for (unsigned i = 0; i < count; i++)
3349 {
3350 hb_inc_bimap_t *map = inner_maps.push ();
3351 if (!c->propagate_error(inner_maps))
3352 return_trace(nullptr);
3353 auto &data = this+dataSets[i];
3354
3355 unsigned itemCount = data.get_item_count ();
3356 for (unsigned j = 0; j < itemCount; j++)
3357 map->add (j);
3358 }
3359
3360 if (unlikely (!out->serialize (c, this, inner_maps))) return_trace (nullptr);
3361
3362 return_trace (out);
3363 }
3364
subsetOT::ItemVariationStore3365 bool subset (hb_subset_context_t *c, const hb_array_t<const hb_inc_bimap_t> &inner_maps) const
3366 {
3367 TRACE_SUBSET (this);
3368 #ifdef HB_NO_VAR
3369 return_trace (false);
3370 #endif
3371
3372 ItemVariationStore *varstore_prime = c->serializer->start_embed<ItemVariationStore> ();
3373 if (unlikely (!varstore_prime)) return_trace (false);
3374
3375 varstore_prime->serialize (c->serializer, this, inner_maps);
3376
3377 return_trace (
3378 !c->serializer->in_error()
3379 && varstore_prime->dataSets);
3380 }
3381
get_region_index_countOT::ItemVariationStore3382 unsigned int get_region_index_count (unsigned int major) const
3383 {
3384 #ifdef HB_NO_VAR
3385 return 0;
3386 #endif
3387 return (this+dataSets[major]).get_region_index_count ();
3388 }
3389
get_region_scalarsOT::ItemVariationStore3390 void get_region_scalars (unsigned int major,
3391 const int *coords, unsigned int coord_count,
3392 float *scalars /*OUT*/,
3393 unsigned int num_scalars) const
3394 {
3395 #ifdef HB_NO_VAR
3396 for (unsigned i = 0; i < num_scalars; i++)
3397 scalars[i] = 0.f;
3398 return;
3399 #endif
3400
3401 (this+dataSets[major]).get_region_scalars (coords, coord_count,
3402 this+regions,
3403 &scalars[0], num_scalars);
3404 }
3405
get_sub_table_countOT::ItemVariationStore3406 unsigned int get_sub_table_count () const
3407 {
3408 #ifdef HB_NO_VAR
3409 return 0;
3410 #endif
3411 return dataSets.len;
3412 }
3413
get_sub_tableOT::ItemVariationStore3414 const VarData& get_sub_table (unsigned i) const
3415 {
3416 #ifdef HB_NO_VAR
3417 return Null (VarData);
3418 #endif
3419 return this+dataSets[i];
3420 }
3421
get_region_listOT::ItemVariationStore3422 const VarRegionList& get_region_list () const
3423 {
3424 #ifdef HB_NO_VAR
3425 return Null (VarRegionList);
3426 #endif
3427 return this+regions;
3428 }
3429
3430 protected:
3431 HBUINT16 format;
3432 Offset32To<VarRegionList> regions;
3433 Array16OfOffset32To<VarData> dataSets;
3434 public:
3435 DEFINE_SIZE_ARRAY_SIZED (8, dataSets);
3436 };
3437
3438 struct MultiItemVariationStore
3439 {
3440 using cache_t = SparseVarRegionList::cache_t;
3441
create_cacheOT::MultiItemVariationStore3442 cache_t *create_cache () const
3443 {
3444 #ifdef HB_NO_VAR
3445 return nullptr;
3446 #endif
3447 auto &r = this+regions;
3448 unsigned count = r.regions.len;
3449
3450 float *cache = (float *) hb_malloc (sizeof (float) * count);
3451 if (unlikely (!cache)) return nullptr;
3452
3453 for (unsigned i = 0; i < count; i++)
3454 cache[i] = REGION_CACHE_ITEM_CACHE_INVALID;
3455
3456 return cache;
3457 }
3458
destroy_cacheOT::MultiItemVariationStore3459 static void destroy_cache (cache_t *cache) { hb_free (cache); }
3460
3461 private:
get_deltaOT::MultiItemVariationStore3462 void get_delta (unsigned int outer, unsigned int inner,
3463 const int *coords, unsigned int coord_count,
3464 hb_array_t<float> out,
3465 VarRegionList::cache_t *cache = nullptr) const
3466 {
3467 #ifdef HB_NO_VAR
3468 return;
3469 #endif
3470
3471 if (unlikely (outer >= dataSets.len))
3472 return;
3473
3474 return (this+dataSets[outer]).get_delta (inner,
3475 coords, coord_count,
3476 this+regions,
3477 out,
3478 cache);
3479 }
3480
3481 public:
get_deltaOT::MultiItemVariationStore3482 void get_delta (unsigned int index,
3483 const int *coords, unsigned int coord_count,
3484 hb_array_t<float> out,
3485 VarRegionList::cache_t *cache = nullptr) const
3486 {
3487 unsigned int outer = index >> 16;
3488 unsigned int inner = index & 0xFFFF;
3489 get_delta (outer, inner, coords, coord_count, out, cache);
3490 }
get_deltaOT::MultiItemVariationStore3491 void get_delta (unsigned int index,
3492 hb_array_t<const int> coords,
3493 hb_array_t<float> out,
3494 VarRegionList::cache_t *cache = nullptr) const
3495 {
3496 return get_delta (index,
3497 coords.arrayZ, coords.length,
3498 out,
3499 cache);
3500 }
3501
sanitizeOT::MultiItemVariationStore3502 bool sanitize (hb_sanitize_context_t *c) const
3503 {
3504 #ifdef HB_NO_VAR
3505 return true;
3506 #endif
3507
3508 TRACE_SANITIZE (this);
3509 return_trace (c->check_struct (this) &&
3510 hb_barrier () &&
3511 format == 1 &&
3512 regions.sanitize (c, this) &&
3513 dataSets.sanitize (c, this));
3514 }
3515
3516 protected:
3517 HBUINT16 format; // 1
3518 Offset32To<SparseVarRegionList> regions;
3519 Array16OfOffset32To<MultiVarData> dataSets;
3520 public:
3521 DEFINE_SIZE_ARRAY_SIZED (8, dataSets);
3522 };
3523
3524 #undef REGION_CACHE_ITEM_CACHE_INVALID
3525
3526 template <typename MapCountT>
3527 struct DeltaSetIndexMapFormat01
3528 {
3529 friend struct DeltaSetIndexMap;
3530
get_sizeOT::DeltaSetIndexMapFormat013531 unsigned get_size () const
3532 { return min_size + mapCount * get_width (); }
3533
3534 private:
copyOT::DeltaSetIndexMapFormat013535 DeltaSetIndexMapFormat01* copy (hb_serialize_context_t *c) const
3536 {
3537 TRACE_SERIALIZE (this);
3538 return_trace (c->embed (this));
3539 }
3540
3541 template <typename T>
serializeOT::DeltaSetIndexMapFormat013542 bool serialize (hb_serialize_context_t *c, const T &plan)
3543 {
3544 unsigned int width = plan.get_width ();
3545 unsigned int inner_bit_count = plan.get_inner_bit_count ();
3546 const hb_array_t<const uint32_t> output_map = plan.get_output_map ();
3547
3548 TRACE_SERIALIZE (this);
3549 if (unlikely (output_map.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0))))
3550 return_trace (false);
3551 if (unlikely (!c->extend_min (this))) return_trace (false);
3552
3553 entryFormat = ((width-1)<<4)|(inner_bit_count-1);
3554 mapCount = output_map.length;
3555 HBUINT8 *p = c->allocate_size<HBUINT8> (width * output_map.length);
3556 if (unlikely (!p)) return_trace (false);
3557 for (unsigned int i = 0; i < output_map.length; i++)
3558 {
3559 unsigned int v = output_map.arrayZ[i];
3560 if (v)
3561 {
3562 unsigned int outer = v >> 16;
3563 unsigned int inner = v & 0xFFFF;
3564 unsigned int u = (outer << inner_bit_count) | inner;
3565 for (unsigned int w = width; w > 0;)
3566 {
3567 p[--w] = u;
3568 u >>= 8;
3569 }
3570 }
3571 p += width;
3572 }
3573 return_trace (true);
3574 }
3575
mapOT::DeltaSetIndexMapFormat013576 uint32_t map (unsigned int v) const /* Returns 16.16 outer.inner. */
3577 {
3578 /* If count is zero, pass value unchanged. This takes
3579 * care of direct mapping for advance map. */
3580 if (!mapCount)
3581 return v;
3582
3583 if (v >= mapCount)
3584 v = mapCount - 1;
3585
3586 unsigned int u = 0;
3587 { /* Fetch it. */
3588 unsigned int w = get_width ();
3589 const HBUINT8 *p = mapDataZ.arrayZ + w * v;
3590 for (; w; w--)
3591 u = (u << 8) + *p++;
3592 }
3593
3594 { /* Repack it. */
3595 unsigned int n = get_inner_bit_count ();
3596 unsigned int outer = u >> n;
3597 unsigned int inner = u & ((1 << n) - 1);
3598 u = (outer<<16) | inner;
3599 }
3600
3601 return u;
3602 }
3603
get_map_countOT::DeltaSetIndexMapFormat013604 unsigned get_map_count () const { return mapCount; }
get_widthOT::DeltaSetIndexMapFormat013605 unsigned get_width () const { return ((entryFormat >> 4) & 3) + 1; }
get_inner_bit_countOT::DeltaSetIndexMapFormat013606 unsigned get_inner_bit_count () const { return (entryFormat & 0xF) + 1; }
3607
3608
sanitizeOT::DeltaSetIndexMapFormat013609 bool sanitize (hb_sanitize_context_t *c) const
3610 {
3611 TRACE_SANITIZE (this);
3612 return_trace (c->check_struct (this) &&
3613 hb_barrier () &&
3614 c->check_range (mapDataZ.arrayZ,
3615 mapCount,
3616 get_width ()));
3617 }
3618
3619 protected:
3620 HBUINT8 format; /* Format identifier--format = 0 */
3621 HBUINT8 entryFormat; /* A packed field that describes the compressed
3622 * representation of delta-set indices. */
3623 MapCountT mapCount; /* The number of mapping entries. */
3624 UnsizedArrayOf<HBUINT8>
3625 mapDataZ; /* The delta-set index mapping data. */
3626
3627 public:
3628 DEFINE_SIZE_ARRAY (2+MapCountT::static_size, mapDataZ);
3629 };
3630
3631 struct DeltaSetIndexMap
3632 {
3633 template <typename T>
serializeOT::DeltaSetIndexMap3634 bool serialize (hb_serialize_context_t *c, const T &plan)
3635 {
3636 TRACE_SERIALIZE (this);
3637 unsigned length = plan.get_output_map ().length;
3638 u.format = length <= 0xFFFF ? 0 : 1;
3639 switch (u.format) {
3640 case 0: hb_barrier (); return_trace (u.format0.serialize (c, plan));
3641 case 1: hb_barrier (); return_trace (u.format1.serialize (c, plan));
3642 default:return_trace (false);
3643 }
3644 }
3645
mapOT::DeltaSetIndexMap3646 uint32_t map (unsigned v) const
3647 {
3648 switch (u.format) {
3649 case 0: hb_barrier (); return (u.format0.map (v));
3650 case 1: hb_barrier (); return (u.format1.map (v));
3651 default:return v;
3652 }
3653 }
3654
get_map_countOT::DeltaSetIndexMap3655 unsigned get_map_count () const
3656 {
3657 switch (u.format) {
3658 case 0: hb_barrier (); return u.format0.get_map_count ();
3659 case 1: hb_barrier (); return u.format1.get_map_count ();
3660 default:return 0;
3661 }
3662 }
3663
get_widthOT::DeltaSetIndexMap3664 unsigned get_width () const
3665 {
3666 switch (u.format) {
3667 case 0: hb_barrier (); return u.format0.get_width ();
3668 case 1: hb_barrier (); return u.format1.get_width ();
3669 default:return 0;
3670 }
3671 }
3672
get_inner_bit_countOT::DeltaSetIndexMap3673 unsigned get_inner_bit_count () const
3674 {
3675 switch (u.format) {
3676 case 0: hb_barrier (); return u.format0.get_inner_bit_count ();
3677 case 1: hb_barrier (); return u.format1.get_inner_bit_count ();
3678 default:return 0;
3679 }
3680 }
3681
sanitizeOT::DeltaSetIndexMap3682 bool sanitize (hb_sanitize_context_t *c) const
3683 {
3684 TRACE_SANITIZE (this);
3685 if (!u.format.sanitize (c)) return_trace (false);
3686 hb_barrier ();
3687 switch (u.format) {
3688 case 0: hb_barrier (); return_trace (u.format0.sanitize (c));
3689 case 1: hb_barrier (); return_trace (u.format1.sanitize (c));
3690 default:return_trace (true);
3691 }
3692 }
3693
copyOT::DeltaSetIndexMap3694 DeltaSetIndexMap* copy (hb_serialize_context_t *c) const
3695 {
3696 TRACE_SERIALIZE (this);
3697 switch (u.format) {
3698 case 0: hb_barrier (); return_trace (reinterpret_cast<DeltaSetIndexMap *> (u.format0.copy (c)));
3699 case 1: hb_barrier (); return_trace (reinterpret_cast<DeltaSetIndexMap *> (u.format1.copy (c)));
3700 default:return_trace (nullptr);
3701 }
3702 }
3703
3704 protected:
3705 union {
3706 HBUINT8 format; /* Format identifier */
3707 DeltaSetIndexMapFormat01<HBUINT16> format0;
3708 DeltaSetIndexMapFormat01<HBUINT32> format1;
3709 } u;
3710 public:
3711 DEFINE_SIZE_UNION (1, format);
3712 };
3713
3714
3715 struct ItemVarStoreInstancer
3716 {
ItemVarStoreInstancerOT::ItemVarStoreInstancer3717 ItemVarStoreInstancer (const ItemVariationStore *varStore_,
3718 const DeltaSetIndexMap *varIdxMap,
3719 hb_array_t<const int> coords,
3720 VarRegionList::cache_t *cache = nullptr) :
3721 varStore (varStore_), varIdxMap (varIdxMap), coords (coords), cache (cache)
3722 {
3723 if (!varStore)
3724 varStore = &Null(ItemVariationStore);
3725 }
3726
operator boolOT::ItemVarStoreInstancer3727 operator bool () const { return varStore && bool (coords); }
3728
operator []OT::ItemVarStoreInstancer3729 float operator[] (uint32_t varIdx) const
3730 { return (*this) (varIdx); }
3731
operator ()OT::ItemVarStoreInstancer3732 float operator() (uint32_t varIdx, unsigned short offset = 0) const
3733 {
3734 if (varIdxMap)
3735 varIdx = varIdxMap->map (VarIdx::add (varIdx, offset));
3736 else
3737 varIdx += offset;
3738 return coords ? varStore->get_delta (varIdx, coords, cache) : 0.f;
3739 }
3740
3741 const ItemVariationStore *varStore;
3742 const DeltaSetIndexMap *varIdxMap;
3743 hb_array_t<const int> coords;
3744 VarRegionList::cache_t *cache;
3745 };
3746
3747 struct MultiItemVarStoreInstancer
3748 {
MultiItemVarStoreInstancerOT::MultiItemVarStoreInstancer3749 MultiItemVarStoreInstancer (const MultiItemVariationStore *varStore,
3750 const DeltaSetIndexMap *varIdxMap,
3751 hb_array_t<const int> coords,
3752 SparseVarRegionList::cache_t *cache = nullptr) :
3753 varStore (varStore), varIdxMap (varIdxMap), coords (coords), cache (cache)
3754 {
3755 if (!varStore)
3756 varStore = &Null(MultiItemVariationStore);
3757 }
3758
operator boolOT::MultiItemVarStoreInstancer3759 operator bool () const { return varStore && bool (coords); }
3760
operator []OT::MultiItemVarStoreInstancer3761 float operator[] (uint32_t varIdx) const
3762 {
3763 float v = 0;
3764 (*this) (hb_array (&v, 1), varIdx);
3765 return v;
3766 }
3767
operator ()OT::MultiItemVarStoreInstancer3768 void operator() (hb_array_t<float> out, uint32_t varIdx, unsigned short offset = 0) const
3769 {
3770 if (coords)
3771 {
3772 if (varIdxMap)
3773 varIdx = varIdxMap->map (VarIdx::add (varIdx, offset));
3774 else
3775 varIdx += offset;
3776 varStore->get_delta (varIdx, coords, out, cache);
3777 }
3778 else
3779 for (unsigned i = 0; i < out.length; i++)
3780 out.arrayZ[i] = 0.f;
3781 }
3782
3783 const MultiItemVariationStore *varStore;
3784 const DeltaSetIndexMap *varIdxMap;
3785 hb_array_t<const int> coords;
3786 SparseVarRegionList::cache_t *cache;
3787 };
3788
3789
3790 /*
3791 * Feature Variations
3792 */
3793 enum Cond_with_Var_flag_t
3794 {
3795 KEEP_COND_WITH_VAR = 0,
3796 KEEP_RECORD_WITH_VAR = 1,
3797 DROP_COND_WITH_VAR = 2,
3798 DROP_RECORD_WITH_VAR = 3,
3799 };
3800
3801 struct Condition;
3802
3803 template <typename Instancer>
3804 static bool
3805 _hb_recurse_condition_evaluate (const struct Condition &condition,
3806 const int *coords,
3807 unsigned int coord_len,
3808 Instancer *instancer);
3809
3810 struct ConditionAxisRange
3811 {
3812 friend struct Condition;
3813
subsetOT::ConditionAxisRange3814 bool subset (hb_subset_context_t *c) const
3815 {
3816 TRACE_SUBSET (this);
3817 auto *out = c->serializer->embed (this);
3818 if (unlikely (!out)) return_trace (false);
3819
3820 const hb_map_t *index_map = &c->plan->axes_index_map;
3821 if (index_map->is_empty ()) return_trace (true);
3822
3823 const hb_map_t& axes_old_index_tag_map = c->plan->axes_old_index_tag_map;
3824 hb_codepoint_t *axis_tag;
3825 if (!axes_old_index_tag_map.has (axisIndex, &axis_tag) ||
3826 !index_map->has (axisIndex))
3827 return_trace (false);
3828
3829 const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location = c->plan->axes_location;
3830 Triple axis_limit{-1.0, 0.0, 1.0};
3831 Triple *normalized_limit;
3832 if (normalized_axes_location.has (*axis_tag, &normalized_limit))
3833 axis_limit = *normalized_limit;
3834
3835 const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances = c->plan->axes_triple_distances;
3836 TripleDistances axis_triple_distances{1.0, 1.0};
3837 TripleDistances *triple_dists;
3838 if (axes_triple_distances.has (*axis_tag, &triple_dists))
3839 axis_triple_distances = *triple_dists;
3840
3841 float normalized_min = renormalizeValue ((double) filterRangeMinValue.to_float (), axis_limit, axis_triple_distances, false);
3842 float normalized_max = renormalizeValue ((double) filterRangeMaxValue.to_float (), axis_limit, axis_triple_distances, false);
3843 out->filterRangeMinValue.set_float (normalized_min);
3844 out->filterRangeMaxValue.set_float (normalized_max);
3845
3846 return_trace (c->serializer->check_assign (out->axisIndex, index_map->get (axisIndex),
3847 HB_SERIALIZE_ERROR_INT_OVERFLOW));
3848 }
3849
3850 private:
keep_with_variationsOT::ConditionAxisRange3851 Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
3852 hb_map_t *condition_map /* OUT */) const
3853 {
3854 //invalid axis index, drop the entire record
3855 if (!c->axes_index_tag_map->has (axisIndex))
3856 return DROP_RECORD_WITH_VAR;
3857
3858 hb_tag_t axis_tag = c->axes_index_tag_map->get (axisIndex);
3859
3860 Triple axis_range (-1.0, 0.0, 1.0);
3861 Triple *axis_limit;
3862 bool axis_set_by_user = false;
3863 if (c->axes_location->has (axis_tag, &axis_limit))
3864 {
3865 axis_range = *axis_limit;
3866 axis_set_by_user = true;
3867 }
3868
3869 float axis_min_val = axis_range.minimum;
3870 float axis_default_val = axis_range.middle;
3871 float axis_max_val = axis_range.maximum;
3872
3873 float filter_min_val = filterRangeMinValue.to_float ();
3874 float filter_max_val = filterRangeMaxValue.to_float ();
3875
3876 if (axis_default_val < filter_min_val ||
3877 axis_default_val > filter_max_val)
3878 c->apply = false;
3879
3880 //condition not met, drop the entire record
3881 if (axis_min_val > filter_max_val || axis_max_val < filter_min_val ||
3882 filter_min_val > filter_max_val)
3883 return DROP_RECORD_WITH_VAR;
3884
3885 //condition met and axis pinned, drop the condition
3886 if (axis_set_by_user && axis_range.is_point ())
3887 return DROP_COND_WITH_VAR;
3888
3889 if (filter_max_val != axis_max_val || filter_min_val != axis_min_val)
3890 {
3891 // add axisIndex->value into the hashmap so we can check if the record is
3892 // unique with variations
3893 int16_t int_filter_max_val = filterRangeMaxValue.to_int ();
3894 int16_t int_filter_min_val = filterRangeMinValue.to_int ();
3895 hb_codepoint_t val = (int_filter_max_val << 16) + int_filter_min_val;
3896
3897 condition_map->set (axisIndex, val);
3898 return KEEP_COND_WITH_VAR;
3899 }
3900 return KEEP_RECORD_WITH_VAR;
3901 }
3902
3903 template <typename Instancer>
evaluateOT::ConditionAxisRange3904 bool evaluate (const int *coords, unsigned int coord_len,
3905 Instancer *instancer HB_UNUSED) const
3906 {
3907 int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
3908 return filterRangeMinValue.to_int () <= coord && coord <= filterRangeMaxValue.to_int ();
3909 }
3910
sanitizeOT::ConditionAxisRange3911 bool sanitize (hb_sanitize_context_t *c) const
3912 {
3913 TRACE_SANITIZE (this);
3914 return_trace (c->check_struct (this));
3915 }
3916
3917 protected:
3918 HBUINT16 format; /* Format identifier--format = 1 */
3919 HBUINT16 axisIndex;
3920 F2DOT14 filterRangeMinValue;
3921 F2DOT14 filterRangeMaxValue;
3922 public:
3923 DEFINE_SIZE_STATIC (8);
3924 };
3925
3926 struct ConditionValue
3927 {
3928 friend struct Condition;
3929
subsetOT::ConditionValue3930 bool subset (hb_subset_context_t *c) const
3931 {
3932 TRACE_SUBSET (this);
3933 // TODO(subset)
3934 return_trace (false);
3935 }
3936
3937 private:
3938 template <typename Instancer>
evaluateOT::ConditionValue3939 bool evaluate (const int *coords, unsigned int coord_len,
3940 Instancer *instancer) const
3941 {
3942 signed value = defaultValue;
3943 value += (*instancer)[varIdx];
3944 return value > 0;
3945 }
3946
subsetOT::ConditionValue3947 bool subset (hb_subset_context_t *c,
3948 hb_subset_layout_context_t *l,
3949 bool insert_catch_all) const
3950 {
3951 TRACE_SUBSET (this);
3952 // TODO(subset)
3953 return_trace (false);
3954 }
3955
sanitizeOT::ConditionValue3956 bool sanitize (hb_sanitize_context_t *c) const
3957 {
3958 TRACE_SANITIZE (this);
3959 return_trace (c->check_struct (this));
3960 }
3961
3962 protected:
3963 HBUINT16 format; /* Format identifier--format = 2 */
3964 HBINT16 defaultValue; /* Value at default instance. */
3965 VarIdx varIdx; /* Variation index */
3966 public:
3967 DEFINE_SIZE_STATIC (8);
3968 };
3969
3970 struct ConditionAnd
3971 {
3972 friend struct Condition;
3973
subsetOT::ConditionAnd3974 bool subset (hb_subset_context_t *c) const
3975 {
3976 TRACE_SUBSET (this);
3977 // TODO(subset)
3978 return_trace (false);
3979 }
3980
3981 private:
3982 template <typename Instancer>
evaluateOT::ConditionAnd3983 bool evaluate (const int *coords, unsigned int coord_len,
3984 Instancer *instancer) const
3985 {
3986 unsigned int count = conditions.len;
3987 for (unsigned int i = 0; i < count; i++)
3988 if (!_hb_recurse_condition_evaluate (this+conditions.arrayZ[i],
3989 coords, coord_len,
3990 instancer))
3991 return false;
3992 return true;
3993 }
3994
subsetOT::ConditionAnd3995 bool subset (hb_subset_context_t *c,
3996 hb_subset_layout_context_t *l,
3997 bool insert_catch_all) const
3998 {
3999 TRACE_SUBSET (this);
4000 // TODO(subset)
4001 return_trace (false);
4002 }
4003
sanitizeOT::ConditionAnd4004 bool sanitize (hb_sanitize_context_t *c) const
4005 {
4006 TRACE_SANITIZE (this);
4007 return_trace (conditions.sanitize (c, this));
4008 }
4009
4010 protected:
4011 HBUINT16 format; /* Format identifier--format = 3 */
4012 Array8OfOffset24To<struct Condition> conditions;
4013 public:
4014 DEFINE_SIZE_ARRAY (3, conditions);
4015 };
4016
4017 struct ConditionOr
4018 {
4019 friend struct Condition;
4020
subsetOT::ConditionOr4021 bool subset (hb_subset_context_t *c) const
4022 {
4023 TRACE_SUBSET (this);
4024 // TODO(subset)
4025 return_trace (false);
4026 }
4027
4028 private:
4029 template <typename Instancer>
evaluateOT::ConditionOr4030 bool evaluate (const int *coords, unsigned int coord_len,
4031 Instancer *instancer) const
4032 {
4033 unsigned int count = conditions.len;
4034 for (unsigned int i = 0; i < count; i++)
4035 if (_hb_recurse_condition_evaluate (this+conditions.arrayZ[i],
4036 coords, coord_len,
4037 instancer))
4038 return true;
4039 return false;
4040 }
4041
subsetOT::ConditionOr4042 bool subset (hb_subset_context_t *c,
4043 hb_subset_layout_context_t *l,
4044 bool insert_catch_all) const
4045 {
4046 TRACE_SUBSET (this);
4047 // TODO(subset)
4048 return_trace (false);
4049 }
4050
sanitizeOT::ConditionOr4051 bool sanitize (hb_sanitize_context_t *c) const
4052 {
4053 TRACE_SANITIZE (this);
4054 return_trace (conditions.sanitize (c, this));
4055 }
4056
4057 protected:
4058 HBUINT16 format; /* Format identifier--format = 4 */
4059 Array8OfOffset24To<struct Condition> conditions;
4060 public:
4061 DEFINE_SIZE_ARRAY (3, conditions);
4062 };
4063
4064 struct ConditionNegate
4065 {
4066 friend struct Condition;
4067
subsetOT::ConditionNegate4068 bool subset (hb_subset_context_t *c) const
4069 {
4070 TRACE_SUBSET (this);
4071 // TODO(subset)
4072 return_trace (false);
4073 }
4074
4075 private:
4076 template <typename Instancer>
evaluateOT::ConditionNegate4077 bool evaluate (const int *coords, unsigned int coord_len,
4078 Instancer *instancer) const
4079 {
4080 return !_hb_recurse_condition_evaluate (this+condition,
4081 coords, coord_len,
4082 instancer);
4083 }
4084
subsetOT::ConditionNegate4085 bool subset (hb_subset_context_t *c,
4086 hb_subset_layout_context_t *l,
4087 bool insert_catch_all) const
4088 {
4089 TRACE_SUBSET (this);
4090 // TODO(subset)
4091 return_trace (false);
4092 }
4093
sanitizeOT::ConditionNegate4094 bool sanitize (hb_sanitize_context_t *c) const
4095 {
4096 TRACE_SANITIZE (this);
4097 return_trace (condition.sanitize (c, this));
4098 }
4099
4100 protected:
4101 HBUINT16 format; /* Format identifier--format = 5 */
4102 Offset24To<struct Condition> condition;
4103 public:
4104 DEFINE_SIZE_STATIC (5);
4105 };
4106
4107 struct Condition
4108 {
4109 template <typename Instancer>
evaluateOT::Condition4110 bool evaluate (const int *coords, unsigned int coord_len,
4111 Instancer *instancer) const
4112 {
4113 switch (u.format) {
4114 case 1: hb_barrier (); return u.format1.evaluate (coords, coord_len, instancer);
4115 case 2: hb_barrier (); return u.format2.evaluate (coords, coord_len, instancer);
4116 case 3: hb_barrier (); return u.format3.evaluate (coords, coord_len, instancer);
4117 case 4: hb_barrier (); return u.format4.evaluate (coords, coord_len, instancer);
4118 case 5: hb_barrier (); return u.format5.evaluate (coords, coord_len, instancer);
4119 default:return false;
4120 }
4121 }
4122
keep_with_variationsOT::Condition4123 Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
4124 hb_map_t *condition_map /* OUT */) const
4125 {
4126 switch (u.format) {
4127 case 1: hb_barrier (); return u.format1.keep_with_variations (c, condition_map);
4128 // TODO(subset)
4129 default: c->apply = false; return KEEP_COND_WITH_VAR;
4130 }
4131 }
4132
4133 template <typename context_t, typename ...Ts>
dispatchOT::Condition4134 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
4135 {
4136 if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
4137 TRACE_DISPATCH (this, u.format);
4138 switch (u.format) {
4139 case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
4140 case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
4141 case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
4142 case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
4143 case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
4144 default:return_trace (c->default_return_value ());
4145 }
4146 }
4147
sanitizeOT::Condition4148 bool sanitize (hb_sanitize_context_t *c) const
4149 {
4150 TRACE_SANITIZE (this);
4151 if (!u.format.sanitize (c)) return_trace (false);
4152 hb_barrier ();
4153 switch (u.format) {
4154 case 1: hb_barrier (); return_trace (u.format1.sanitize (c));
4155 case 2: hb_barrier (); return_trace (u.format2.sanitize (c));
4156 case 3: hb_barrier (); return_trace (u.format3.sanitize (c));
4157 case 4: hb_barrier (); return_trace (u.format4.sanitize (c));
4158 case 5: hb_barrier (); return_trace (u.format5.sanitize (c));
4159 default:return_trace (true);
4160 }
4161 }
4162
4163 protected:
4164 union {
4165 HBUINT16 format; /* Format identifier */
4166 ConditionAxisRange format1;
4167 ConditionValue format2;
4168 ConditionAnd format3;
4169 ConditionOr format4;
4170 ConditionNegate format5;
4171 } u;
4172 public:
4173 DEFINE_SIZE_UNION (2, format);
4174 };
4175
4176 template <typename Instancer>
4177 bool
_hb_recurse_condition_evaluate(const struct Condition & condition,const int * coords,unsigned int coord_len,Instancer * instancer)4178 _hb_recurse_condition_evaluate (const struct Condition &condition,
4179 const int *coords,
4180 unsigned int coord_len,
4181 Instancer *instancer)
4182 {
4183 return condition.evaluate (coords, coord_len, instancer);
4184 }
4185
4186 struct ConditionList
4187 {
operator []OT::ConditionList4188 const Condition& operator[] (unsigned i) const
4189 { return this+conditions[i]; }
4190
sanitizeOT::ConditionList4191 bool sanitize (hb_sanitize_context_t *c) const
4192 {
4193 TRACE_SANITIZE (this);
4194 return_trace (conditions.sanitize (c, this));
4195 }
4196
4197 protected:
4198 Array32OfOffset32To<Condition> conditions;
4199 public:
4200 DEFINE_SIZE_ARRAY (4, conditions);
4201 };
4202
4203 struct ConditionSet
4204 {
evaluateOT::ConditionSet4205 bool evaluate (const int *coords, unsigned int coord_len,
4206 ItemVarStoreInstancer *instancer) const
4207 {
4208 unsigned int count = conditions.len;
4209 for (unsigned int i = 0; i < count; i++)
4210 if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len, instancer))
4211 return false;
4212 return true;
4213 }
4214
keep_with_variationsOT::ConditionSet4215 void keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
4216 {
4217 hb_map_t *condition_map = hb_map_create ();
4218 if (unlikely (!condition_map)) return;
4219 hb::shared_ptr<hb_map_t> p {condition_map};
4220
4221 hb_set_t *cond_set = hb_set_create ();
4222 if (unlikely (!cond_set)) return;
4223 hb::shared_ptr<hb_set_t> s {cond_set};
4224
4225 c->apply = true;
4226 bool should_keep = false;
4227 unsigned num_kept_cond = 0, cond_idx = 0;
4228 for (const auto& offset : conditions)
4229 {
4230 Cond_with_Var_flag_t ret = (this+offset).keep_with_variations (c, condition_map);
4231 // condition is not met or condition out of range, drop the entire record
4232 if (ret == DROP_RECORD_WITH_VAR)
4233 return;
4234
4235 if (ret == KEEP_COND_WITH_VAR)
4236 {
4237 should_keep = true;
4238 cond_set->add (cond_idx);
4239 num_kept_cond++;
4240 }
4241
4242 if (ret == KEEP_RECORD_WITH_VAR)
4243 should_keep = true;
4244
4245 cond_idx++;
4246 }
4247
4248 if (!should_keep) return;
4249
4250 //check if condition_set is unique with variations
4251 if (c->conditionset_map->has (p))
4252 //duplicate found, drop the entire record
4253 return;
4254
4255 c->conditionset_map->set (p, 1);
4256 c->record_cond_idx_map->set (c->cur_record_idx, s);
4257 if (should_keep && num_kept_cond == 0)
4258 c->universal = true;
4259 }
4260
subsetOT::ConditionSet4261 bool subset (hb_subset_context_t *c,
4262 hb_subset_layout_context_t *l,
4263 bool insert_catch_all) const
4264 {
4265 TRACE_SUBSET (this);
4266 auto *out = c->serializer->start_embed (this);
4267 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
4268
4269 if (insert_catch_all) return_trace (true);
4270
4271 hb_set_t *retained_cond_set = nullptr;
4272 if (l->feature_record_cond_idx_map != nullptr)
4273 retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx);
4274
4275 unsigned int count = conditions.len;
4276 for (unsigned int i = 0; i < count; i++)
4277 {
4278 if (retained_cond_set != nullptr && !retained_cond_set->has (i))
4279 continue;
4280 subset_offset_array (c, out->conditions, this) (conditions[i]);
4281 }
4282
4283 return_trace (bool (out->conditions));
4284 }
4285
sanitizeOT::ConditionSet4286 bool sanitize (hb_sanitize_context_t *c) const
4287 {
4288 TRACE_SANITIZE (this);
4289 return_trace (conditions.sanitize (c, this));
4290 }
4291
4292 protected:
4293 Array16OfOffset32To<Condition> conditions;
4294 public:
4295 DEFINE_SIZE_ARRAY (2, conditions);
4296 };
4297
4298 struct FeatureTableSubstitutionRecord
4299 {
4300 friend struct FeatureTableSubstitution;
4301
collect_lookupsOT::FeatureTableSubstitutionRecord4302 void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
4303 {
4304 return (base+feature).add_lookup_indexes_to (lookup_indexes);
4305 }
4306
closure_featuresOT::FeatureTableSubstitutionRecord4307 void closure_features (const void *base,
4308 const hb_map_t *lookup_indexes,
4309 hb_set_t *feature_indexes /* OUT */) const
4310 {
4311 if ((base+feature).intersects_lookup_indexes (lookup_indexes))
4312 feature_indexes->add (featureIndex);
4313 }
4314
collect_feature_substitutes_with_variationsOT::FeatureTableSubstitutionRecord4315 void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
4316 hb_set_t& catch_all_record_feature_idxes,
4317 const hb_set_t *feature_indices,
4318 const void *base) const
4319 {
4320 if (feature_indices->has (featureIndex))
4321 {
4322 feature_substitutes_map->set (featureIndex, &(base+feature));
4323 catch_all_record_feature_idxes.add (featureIndex);
4324 }
4325 }
4326
serializeOT::FeatureTableSubstitutionRecord4327 bool serialize (hb_subset_layout_context_t *c,
4328 unsigned feature_index,
4329 const Feature *f, const Tag *tag)
4330 {
4331 TRACE_SERIALIZE (this);
4332 hb_serialize_context_t *s = c->subset_context->serializer;
4333 if (unlikely (!s->extend_min (this))) return_trace (false);
4334
4335 uint32_t *new_feature_idx;
4336 if (!c->feature_index_map->has (feature_index, &new_feature_idx))
4337 return_trace (false);
4338
4339 if (!s->check_assign (featureIndex, *new_feature_idx, HB_SERIALIZE_ERROR_INT_OVERFLOW))
4340 return_trace (false);
4341
4342 s->push ();
4343 bool ret = f->subset (c->subset_context, c, tag);
4344 if (ret) s->add_link (feature, s->pop_pack ());
4345 else s->pop_discard ();
4346
4347 return_trace (ret);
4348 }
4349
subsetOT::FeatureTableSubstitutionRecord4350 bool subset (hb_subset_layout_context_t *c, const void *base) const
4351 {
4352 TRACE_SUBSET (this);
4353 uint32_t *new_feature_index;
4354 if (!c->feature_index_map->has (featureIndex, &new_feature_index))
4355 return_trace (false);
4356
4357 auto *out = c->subset_context->serializer->embed (this);
4358 if (unlikely (!out)) return_trace (false);
4359
4360 out->featureIndex = *new_feature_index;
4361 return_trace (out->feature.serialize_subset (c->subset_context, feature, base, c));
4362 }
4363
sanitizeOT::FeatureTableSubstitutionRecord4364 bool sanitize (hb_sanitize_context_t *c, const void *base) const
4365 {
4366 TRACE_SANITIZE (this);
4367 return_trace (c->check_struct (this) && feature.sanitize (c, base));
4368 }
4369
4370 protected:
4371 HBUINT16 featureIndex;
4372 Offset32To<Feature> feature;
4373 public:
4374 DEFINE_SIZE_STATIC (6);
4375 };
4376
4377 struct FeatureTableSubstitution
4378 {
find_substituteOT::FeatureTableSubstitution4379 const Feature *find_substitute (unsigned int feature_index) const
4380 {
4381 unsigned int count = substitutions.len;
4382 for (unsigned int i = 0; i < count; i++)
4383 {
4384 const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
4385 if (record.featureIndex == feature_index)
4386 return &(this+record.feature);
4387 }
4388 return nullptr;
4389 }
4390
collect_lookupsOT::FeatureTableSubstitution4391 void collect_lookups (const hb_set_t *feature_indexes,
4392 hb_set_t *lookup_indexes /* OUT */) const
4393 {
4394 + hb_iter (substitutions)
4395 | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
4396 | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
4397 { r.collect_lookups (this, lookup_indexes); })
4398 ;
4399 }
4400
closure_featuresOT::FeatureTableSubstitution4401 void closure_features (const hb_map_t *lookup_indexes,
4402 hb_set_t *feature_indexes /* OUT */) const
4403 {
4404 for (const FeatureTableSubstitutionRecord& record : substitutions)
4405 record.closure_features (this, lookup_indexes, feature_indexes);
4406 }
4407
intersects_featuresOT::FeatureTableSubstitution4408 bool intersects_features (const hb_map_t *feature_index_map) const
4409 {
4410 for (const FeatureTableSubstitutionRecord& record : substitutions)
4411 {
4412 if (feature_index_map->has (record.featureIndex)) return true;
4413 }
4414 return false;
4415 }
4416
collect_feature_substitutes_with_variationsOT::FeatureTableSubstitution4417 void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
4418 {
4419 for (const FeatureTableSubstitutionRecord& record : substitutions)
4420 record.collect_feature_substitutes_with_variations (c->feature_substitutes_map,
4421 c->catch_all_record_feature_idxes,
4422 c->feature_indices, this);
4423 }
4424
subsetOT::FeatureTableSubstitution4425 bool subset (hb_subset_context_t *c,
4426 hb_subset_layout_context_t *l,
4427 bool insert_catch_all) const
4428 {
4429 TRACE_SUBSET (this);
4430 auto *out = c->serializer->start_embed (*this);
4431 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
4432
4433 out->version.major = version.major;
4434 out->version.minor = version.minor;
4435
4436 if (insert_catch_all)
4437 {
4438 for (unsigned feature_index : *(l->catch_all_record_feature_idxes))
4439 {
4440 hb_pair_t<const void*, const void*> *p;
4441 if (!l->feature_idx_tag_map->has (feature_index, &p))
4442 return_trace (false);
4443 auto *o = out->substitutions.serialize_append (c->serializer);
4444 if (!o->serialize (l, feature_index,
4445 reinterpret_cast<const Feature*> (p->first),
4446 reinterpret_cast<const Tag*> (p->second)))
4447 return_trace (false);
4448 }
4449 return_trace (true);
4450 }
4451
4452 + substitutions.iter ()
4453 | hb_apply (subset_record_array (l, &(out->substitutions), this))
4454 ;
4455
4456 return_trace (bool (out->substitutions));
4457 }
4458
sanitizeOT::FeatureTableSubstitution4459 bool sanitize (hb_sanitize_context_t *c) const
4460 {
4461 TRACE_SANITIZE (this);
4462 return_trace (version.sanitize (c) &&
4463 hb_barrier () &&
4464 likely (version.major == 1) &&
4465 substitutions.sanitize (c, this));
4466 }
4467
4468 protected:
4469 FixedVersion<> version; /* Version--0x00010000u */
4470 Array16Of<FeatureTableSubstitutionRecord>
4471 substitutions;
4472 public:
4473 DEFINE_SIZE_ARRAY (6, substitutions);
4474 };
4475
4476 struct FeatureVariationRecord
4477 {
4478 friend struct FeatureVariations;
4479
collect_lookupsOT::FeatureVariationRecord4480 void collect_lookups (const void *base,
4481 const hb_set_t *feature_indexes,
4482 hb_set_t *lookup_indexes /* OUT */) const
4483 {
4484 return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
4485 }
4486
closure_featuresOT::FeatureVariationRecord4487 void closure_features (const void *base,
4488 const hb_map_t *lookup_indexes,
4489 hb_set_t *feature_indexes /* OUT */) const
4490 {
4491 (base+substitutions).closure_features (lookup_indexes, feature_indexes);
4492 }
4493
intersects_featuresOT::FeatureVariationRecord4494 bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
4495 {
4496 return (base+substitutions).intersects_features (feature_index_map);
4497 }
4498
collect_feature_substitutes_with_variationsOT::FeatureVariationRecord4499 void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
4500 const void *base) const
4501 {
4502 (base+conditions).keep_with_variations (c);
4503 if (c->apply && !c->variation_applied)
4504 {
4505 (base+substitutions).collect_feature_substitutes_with_variations (c);
4506 c->variation_applied = true; // set variations only once
4507 }
4508 }
4509
subsetOT::FeatureVariationRecord4510 bool subset (hb_subset_layout_context_t *c, const void *base,
4511 bool insert_catch_all = false) const
4512 {
4513 TRACE_SUBSET (this);
4514 auto *out = c->subset_context->serializer->embed (this);
4515 if (unlikely (!out)) return_trace (false);
4516
4517 out->conditions.serialize_subset (c->subset_context, conditions, base, c, insert_catch_all);
4518 out->substitutions.serialize_subset (c->subset_context, substitutions, base, c, insert_catch_all);
4519
4520 return_trace (true);
4521 }
4522
sanitizeOT::FeatureVariationRecord4523 bool sanitize (hb_sanitize_context_t *c, const void *base) const
4524 {
4525 TRACE_SANITIZE (this);
4526 return_trace (conditions.sanitize (c, base) &&
4527 substitutions.sanitize (c, base));
4528 }
4529
4530 protected:
4531 Offset32To<ConditionSet>
4532 conditions;
4533 Offset32To<FeatureTableSubstitution>
4534 substitutions;
4535 public:
4536 DEFINE_SIZE_STATIC (8);
4537 };
4538
4539 struct FeatureVariations
4540 {
4541 static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
4542
find_indexOT::FeatureVariations4543 bool find_index (const int *coords, unsigned int coord_len,
4544 unsigned int *index,
4545 ItemVarStoreInstancer *instancer) const
4546 {
4547 unsigned int count = varRecords.len;
4548 for (unsigned int i = 0; i < count; i++)
4549 {
4550 const FeatureVariationRecord &record = varRecords.arrayZ[i];
4551 if ((this+record.conditions).evaluate (coords, coord_len, instancer))
4552 {
4553 *index = i;
4554 return true;
4555 }
4556 }
4557 *index = NOT_FOUND_INDEX;
4558 return false;
4559 }
4560
find_substituteOT::FeatureVariations4561 const Feature *find_substitute (unsigned int variations_index,
4562 unsigned int feature_index) const
4563 {
4564 const FeatureVariationRecord &record = varRecords[variations_index];
4565 return (this+record.substitutions).find_substitute (feature_index);
4566 }
4567
collect_feature_substitutes_with_variationsOT::FeatureVariations4568 void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
4569 {
4570 unsigned int count = varRecords.len;
4571 for (unsigned int i = 0; i < count; i++)
4572 {
4573 c->cur_record_idx = i;
4574 varRecords[i].collect_feature_substitutes_with_variations (c, this);
4575 if (c->universal)
4576 break;
4577 }
4578 if (c->universal || c->record_cond_idx_map->is_empty ())
4579 c->catch_all_record_feature_idxes.reset ();
4580 }
4581
copyOT::FeatureVariations4582 FeatureVariations* copy (hb_serialize_context_t *c) const
4583 {
4584 TRACE_SERIALIZE (this);
4585 return_trace (c->embed (*this));
4586 }
4587
collect_lookupsOT::FeatureVariations4588 void collect_lookups (const hb_set_t *feature_indexes,
4589 const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
4590 hb_set_t *lookup_indexes /* OUT */) const
4591 {
4592 unsigned count = varRecords.len;
4593 for (unsigned int i = 0; i < count; i++)
4594 {
4595 if (feature_record_cond_idx_map &&
4596 !feature_record_cond_idx_map->has (i))
4597 continue;
4598 varRecords[i].collect_lookups (this, feature_indexes, lookup_indexes);
4599 }
4600 }
4601
closure_featuresOT::FeatureVariations4602 void closure_features (const hb_map_t *lookup_indexes,
4603 const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
4604 hb_set_t *feature_indexes /* OUT */) const
4605 {
4606 unsigned int count = varRecords.len;
4607 for (unsigned int i = 0; i < count; i++)
4608 {
4609 if (feature_record_cond_idx_map != nullptr &&
4610 !feature_record_cond_idx_map->has (i))
4611 continue;
4612 varRecords[i].closure_features (this, lookup_indexes, feature_indexes);
4613 }
4614 }
4615
subsetOT::FeatureVariations4616 bool subset (hb_subset_context_t *c,
4617 hb_subset_layout_context_t *l) const
4618 {
4619 TRACE_SUBSET (this);
4620 auto *out = c->serializer->start_embed (*this);
4621 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
4622
4623 out->version.major = version.major;
4624 out->version.minor = version.minor;
4625
4626 int keep_up_to = -1;
4627 for (int i = varRecords.len - 1; i >= 0; i--) {
4628 if (varRecords[i].intersects_features (this, l->feature_index_map)) {
4629 keep_up_to = i;
4630 break;
4631 }
4632 }
4633
4634 unsigned count = (unsigned) (keep_up_to + 1);
4635 for (unsigned i = 0; i < count; i++)
4636 {
4637 if (l->feature_record_cond_idx_map != nullptr &&
4638 !l->feature_record_cond_idx_map->has (i))
4639 continue;
4640
4641 l->cur_feature_var_record_idx = i;
4642 subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
4643 }
4644
4645 if (out->varRecords.len && !l->catch_all_record_feature_idxes->is_empty ())
4646 {
4647 bool insert_catch_all_record = true;
4648 subset_record_array (l, &(out->varRecords), this, insert_catch_all_record) (varRecords[0]);
4649 }
4650
4651 return_trace (bool (out->varRecords));
4652 }
4653
sanitizeOT::FeatureVariations4654 bool sanitize (hb_sanitize_context_t *c) const
4655 {
4656 TRACE_SANITIZE (this);
4657 return_trace (version.sanitize (c) &&
4658 hb_barrier () &&
4659 likely (version.major == 1) &&
4660 varRecords.sanitize (c, this));
4661 }
4662
4663 protected:
4664 FixedVersion<> version; /* Version--0x00010000u */
4665 Array32Of<FeatureVariationRecord>
4666 varRecords;
4667 public:
4668 DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
4669 };
4670
4671
4672 /*
4673 * Device Tables
4674 */
4675
4676 struct HintingDevice
4677 {
4678 friend struct Device;
4679
4680 private:
4681
get_x_deltaOT::HintingDevice4682 hb_position_t get_x_delta (hb_font_t *font) const
4683 { return get_delta (font->x_ppem, font->x_scale); }
4684
get_y_deltaOT::HintingDevice4685 hb_position_t get_y_delta (hb_font_t *font) const
4686 { return get_delta (font->y_ppem, font->y_scale); }
4687
4688 public:
4689
get_sizeOT::HintingDevice4690 unsigned int get_size () const
4691 {
4692 unsigned int f = deltaFormat;
4693 if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
4694 return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
4695 }
4696
sanitizeOT::HintingDevice4697 bool sanitize (hb_sanitize_context_t *c) const
4698 {
4699 TRACE_SANITIZE (this);
4700 return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
4701 }
4702
copyOT::HintingDevice4703 HintingDevice* copy (hb_serialize_context_t *c) const
4704 {
4705 TRACE_SERIALIZE (this);
4706 return_trace (c->embed<HintingDevice> (this));
4707 }
4708
4709 private:
4710
get_deltaOT::HintingDevice4711 int get_delta (unsigned int ppem, int scale) const
4712 {
4713 if (!ppem) return 0;
4714
4715 int pixels = get_delta_pixels (ppem);
4716
4717 if (!pixels) return 0;
4718
4719 return (int) (pixels * (int64_t) scale / ppem);
4720 }
get_delta_pixelsOT::HintingDevice4721 int get_delta_pixels (unsigned int ppem_size) const
4722 {
4723 unsigned int f = deltaFormat;
4724 if (unlikely (f < 1 || f > 3))
4725 return 0;
4726
4727 if (ppem_size < startSize || ppem_size > endSize)
4728 return 0;
4729
4730 unsigned int s = ppem_size - startSize;
4731
4732 unsigned int byte = deltaValueZ[s >> (4 - f)];
4733 unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
4734 unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
4735
4736 int delta = bits & mask;
4737
4738 if ((unsigned int) delta >= ((mask + 1) >> 1))
4739 delta -= mask + 1;
4740
4741 return delta;
4742 }
4743
4744 protected:
4745 HBUINT16 startSize; /* Smallest size to correct--in ppem */
4746 HBUINT16 endSize; /* Largest size to correct--in ppem */
4747 HBUINT16 deltaFormat; /* Format of DeltaValue array data: 1, 2, or 3
4748 * 1 Signed 2-bit value, 8 values per uint16
4749 * 2 Signed 4-bit value, 4 values per uint16
4750 * 3 Signed 8-bit value, 2 values per uint16
4751 */
4752 UnsizedArrayOf<HBUINT16>
4753 deltaValueZ; /* Array of compressed data */
4754 public:
4755 DEFINE_SIZE_ARRAY (6, deltaValueZ);
4756 };
4757
4758 struct VariationDevice
4759 {
4760 friend struct Device;
4761
4762 private:
4763
get_x_deltaOT::VariationDevice4764 hb_position_t get_x_delta (hb_font_t *font,
4765 const ItemVariationStore &store,
4766 ItemVariationStore::cache_t *store_cache = nullptr) const
4767 { return font->em_scalef_x (get_delta (font, store, store_cache)); }
4768
get_y_deltaOT::VariationDevice4769 hb_position_t get_y_delta (hb_font_t *font,
4770 const ItemVariationStore &store,
4771 ItemVariationStore::cache_t *store_cache = nullptr) const
4772 { return font->em_scalef_y (get_delta (font, store, store_cache)); }
4773
copyOT::VariationDevice4774 VariationDevice* copy (hb_serialize_context_t *c,
4775 const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
4776 {
4777 TRACE_SERIALIZE (this);
4778 if (!layout_variation_idx_delta_map) return_trace (nullptr);
4779
4780 hb_pair_t<unsigned, int> *v;
4781 if (!layout_variation_idx_delta_map->has (varIdx, &v))
4782 return_trace (nullptr);
4783
4784 c->start_zerocopy (this->static_size);
4785 auto *out = c->embed (this);
4786 if (unlikely (!out)) return_trace (nullptr);
4787
4788 if (!c->check_assign (out->varIdx, hb_first (*v), HB_SERIALIZE_ERROR_INT_OVERFLOW))
4789 return_trace (nullptr);
4790 return_trace (out);
4791 }
4792
collect_variation_indexOT::VariationDevice4793 void collect_variation_index (hb_collect_variation_indices_context_t *c) const
4794 { c->layout_variation_indices->add (varIdx); }
4795
sanitizeOT::VariationDevice4796 bool sanitize (hb_sanitize_context_t *c) const
4797 {
4798 TRACE_SANITIZE (this);
4799 return_trace (c->check_struct (this));
4800 }
4801
4802 private:
4803
get_deltaOT::VariationDevice4804 float get_delta (hb_font_t *font,
4805 const ItemVariationStore &store,
4806 ItemVariationStore::cache_t *store_cache = nullptr) const
4807 {
4808 return store.get_delta (varIdx, font->coords, font->num_coords, (ItemVariationStore::cache_t *) store_cache);
4809 }
4810
4811 protected:
4812 VarIdx varIdx; /* Variation index */
4813 HBUINT16 deltaFormat; /* Format identifier for this table: 0x0x8000 */
4814 public:
4815 DEFINE_SIZE_STATIC (6);
4816 };
4817
4818 struct DeviceHeader
4819 {
4820 protected:
4821 HBUINT16 reserved1;
4822 HBUINT16 reserved2;
4823 public:
4824 HBUINT16 format; /* Format identifier */
4825 public:
4826 DEFINE_SIZE_STATIC (6);
4827 };
4828
4829 struct Device
4830 {
get_x_deltaOT::Device4831 hb_position_t get_x_delta (hb_font_t *font,
4832 const ItemVariationStore &store=Null (ItemVariationStore),
4833 ItemVariationStore::cache_t *store_cache = nullptr) const
4834 {
4835 switch (u.b.format)
4836 {
4837 #ifndef HB_NO_HINTING
4838 case 1: case 2: case 3:
4839 return u.hinting.get_x_delta (font);
4840 #endif
4841 #ifndef HB_NO_VAR
4842 case 0x8000:
4843 return u.variation.get_x_delta (font, store, store_cache);
4844 #endif
4845 default:
4846 return 0;
4847 }
4848 }
get_y_deltaOT::Device4849 hb_position_t get_y_delta (hb_font_t *font,
4850 const ItemVariationStore &store=Null (ItemVariationStore),
4851 ItemVariationStore::cache_t *store_cache = nullptr) const
4852 {
4853 switch (u.b.format)
4854 {
4855 case 1: case 2: case 3:
4856 #ifndef HB_NO_HINTING
4857 return u.hinting.get_y_delta (font);
4858 #endif
4859 #ifndef HB_NO_VAR
4860 case 0x8000:
4861 return u.variation.get_y_delta (font, store, store_cache);
4862 #endif
4863 default:
4864 return 0;
4865 }
4866 }
4867
sanitizeOT::Device4868 bool sanitize (hb_sanitize_context_t *c) const
4869 {
4870 TRACE_SANITIZE (this);
4871 if (!u.b.format.sanitize (c)) return_trace (false);
4872 switch (u.b.format) {
4873 #ifndef HB_NO_HINTING
4874 case 1: case 2: case 3:
4875 return_trace (u.hinting.sanitize (c));
4876 #endif
4877 #ifndef HB_NO_VAR
4878 case 0x8000:
4879 return_trace (u.variation.sanitize (c));
4880 #endif
4881 default:
4882 return_trace (true);
4883 }
4884 }
4885
copyOT::Device4886 Device* copy (hb_serialize_context_t *c,
4887 const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map=nullptr) const
4888 {
4889 TRACE_SERIALIZE (this);
4890 switch (u.b.format) {
4891 #ifndef HB_NO_HINTING
4892 case 1:
4893 case 2:
4894 case 3:
4895 return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
4896 #endif
4897 #ifndef HB_NO_VAR
4898 case 0x8000:
4899 return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_delta_map)));
4900 #endif
4901 default:
4902 return_trace (nullptr);
4903 }
4904 }
4905
collect_variation_indicesOT::Device4906 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
4907 {
4908 switch (u.b.format) {
4909 #ifndef HB_NO_HINTING
4910 case 1:
4911 case 2:
4912 case 3:
4913 return;
4914 #endif
4915 #ifndef HB_NO_VAR
4916 case 0x8000:
4917 u.variation.collect_variation_index (c);
4918 return;
4919 #endif
4920 default:
4921 return;
4922 }
4923 }
4924
get_variation_indexOT::Device4925 unsigned get_variation_index () const
4926 {
4927 switch (u.b.format) {
4928 #ifndef HB_NO_VAR
4929 case 0x8000:
4930 return u.variation.varIdx;
4931 #endif
4932 default:
4933 return HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
4934 }
4935 }
4936
4937 protected:
4938 union {
4939 DeviceHeader b;
4940 HintingDevice hinting;
4941 #ifndef HB_NO_VAR
4942 VariationDevice variation;
4943 #endif
4944 } u;
4945 public:
4946 DEFINE_SIZE_UNION (6, b);
4947 };
4948
4949
4950 } /* namespace OT */
4951
4952
4953 #endif /* HB_OT_LAYOUT_COMMON_HH */
4954