xref: /aosp_15_r20/external/harfbuzz_ng/src/hb-ot-layout-gsubgpos.hh (revision 2d1272b857b1f7575e6e246373e1cb218663db8a)
1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2010,2012  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
31 
32 #include "hb.hh"
33 #include "hb-buffer.hh"
34 #include "hb-map.hh"
35 #include "hb-set.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
39 
40 
41 namespace OT {
42 
43 
44 struct hb_intersects_context_t :
45        hb_dispatch_context_t<hb_intersects_context_t, bool>
46 {
47   template <typename T>
dispatchOT::hb_intersects_context_t48   return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
default_return_valueOT::hb_intersects_context_t49   static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_intersects_context_t50   bool stop_sublookup_iteration (return_t r) const { return r; }
51 
52   const hb_set_t *glyphs;
53 
hb_intersects_context_tOT::hb_intersects_context_t54   hb_intersects_context_t (const hb_set_t *glyphs_) :
55                             glyphs (glyphs_) {}
56 };
57 
58 struct hb_have_non_1to1_context_t :
59        hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
60 {
61   template <typename T>
dispatchOT::hb_have_non_1to1_context_t62   return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
default_return_valueOT::hb_have_non_1to1_context_t63   static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_have_non_1to1_context_t64   bool stop_sublookup_iteration (return_t r) const { return r; }
65 };
66 
67 struct hb_closure_context_t :
68        hb_dispatch_context_t<hb_closure_context_t>
69 {
70   typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
71   template <typename T>
dispatchOT::hb_closure_context_t72   return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_context_t73   static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_context_t74   void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
75   {
76     if (unlikely (nesting_level_left == 0 || !recurse_func))
77       return;
78 
79     nesting_level_left--;
80     recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
81     nesting_level_left++;
82   }
83 
reset_lookup_visit_countOT::hb_closure_context_t84   void reset_lookup_visit_count ()
85   { lookup_count = 0; }
86 
lookup_limit_exceededOT::hb_closure_context_t87   bool lookup_limit_exceeded ()
88   { return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
89 
should_visit_lookupOT::hb_closure_context_t90   bool should_visit_lookup (unsigned int lookup_index)
91   {
92     if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
93       return false;
94 
95     if (is_lookup_done (lookup_index))
96       return false;
97 
98     return true;
99   }
100 
is_lookup_doneOT::hb_closure_context_t101   bool is_lookup_done (unsigned int lookup_index)
102   {
103     if (unlikely (done_lookups_glyph_count->in_error () ||
104 		  done_lookups_glyph_set->in_error ()))
105       return true;
106 
107     /* Have we visited this lookup with the current set of glyphs? */
108     if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
109     {
110       done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
111 
112       if (!done_lookups_glyph_set->has (lookup_index))
113       {
114 	if (unlikely (!done_lookups_glyph_set->set (lookup_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
115 	  return true;
116       }
117 
118       done_lookups_glyph_set->get (lookup_index)->clear ();
119     }
120 
121     hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
122     if (unlikely (covered_glyph_set->in_error ()))
123       return true;
124     if (parent_active_glyphs ().is_subset (*covered_glyph_set))
125       return true;
126 
127     covered_glyph_set->union_ (parent_active_glyphs ());
128     return false;
129   }
130 
previous_parent_active_glyphsOT::hb_closure_context_t131   const hb_set_t& previous_parent_active_glyphs () {
132     if (active_glyphs_stack.length <= 1)
133       return *glyphs;
134 
135     return active_glyphs_stack[active_glyphs_stack.length - 2];
136   }
137 
parent_active_glyphsOT::hb_closure_context_t138   const hb_set_t& parent_active_glyphs ()
139   {
140     if (!active_glyphs_stack)
141       return *glyphs;
142 
143     return active_glyphs_stack.tail ();
144   }
145 
push_cur_active_glyphsOT::hb_closure_context_t146   hb_set_t* push_cur_active_glyphs ()
147   {
148     hb_set_t *s = active_glyphs_stack.push ();
149     if (unlikely (active_glyphs_stack.in_error ()))
150       return nullptr;
151     return s;
152   }
153 
pop_cur_done_glyphsOT::hb_closure_context_t154   bool pop_cur_done_glyphs ()
155   {
156     if (!active_glyphs_stack)
157       return false;
158 
159     active_glyphs_stack.pop ();
160     return true;
161   }
162 
163   hb_face_t *face;
164   hb_set_t *glyphs;
165   hb_set_t output[1];
166   hb_vector_t<hb_set_t> active_glyphs_stack;
167   recurse_func_t recurse_func = nullptr;
168   unsigned int nesting_level_left;
169 
hb_closure_context_tOT::hb_closure_context_t170   hb_closure_context_t (hb_face_t *face_,
171 			hb_set_t *glyphs_,
172 			hb_map_t *done_lookups_glyph_count_,
173 			hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set_,
174 			unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
175 			  face (face_),
176 			  glyphs (glyphs_),
177 			  nesting_level_left (nesting_level_left_),
178 			  done_lookups_glyph_count (done_lookups_glyph_count_),
179 			  done_lookups_glyph_set (done_lookups_glyph_set_)
180   {}
181 
~hb_closure_context_tOT::hb_closure_context_t182   ~hb_closure_context_t () { flush (); }
183 
set_recurse_funcOT::hb_closure_context_t184   void set_recurse_func (recurse_func_t func) { recurse_func = func; }
185 
flushOT::hb_closure_context_t186   void flush ()
187   {
188     output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID);	/* Remove invalid glyphs. */
189     glyphs->union_ (*output);
190     output->clear ();
191     active_glyphs_stack.pop ();
192     active_glyphs_stack.reset ();
193   }
194 
195   private:
196   hb_map_t *done_lookups_glyph_count;
197   hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set;
198   unsigned int lookup_count = 0;
199 };
200 
201 
202 
203 struct hb_closure_lookups_context_t :
204        hb_dispatch_context_t<hb_closure_lookups_context_t>
205 {
206   typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
207   template <typename T>
dispatchOT::hb_closure_lookups_context_t208   return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_lookups_context_t209   static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_lookups_context_t210   void recurse (unsigned lookup_index)
211   {
212     if (unlikely (nesting_level_left == 0 || !recurse_func))
213       return;
214 
215     /* Return if new lookup was recursed to before. */
216     if (lookup_limit_exceeded ()
217         || visited_lookups->in_error ()
218         || visited_lookups->has (lookup_index))
219       // Don't increment lookup count here, that will be done in the call to closure_lookups()
220       // made by recurse_func.
221       return;
222 
223     nesting_level_left--;
224     recurse_func (this, lookup_index);
225     nesting_level_left++;
226   }
227 
set_lookup_visitedOT::hb_closure_lookups_context_t228   void set_lookup_visited (unsigned lookup_index)
229   { visited_lookups->add (lookup_index); }
230 
set_lookup_inactiveOT::hb_closure_lookups_context_t231   void set_lookup_inactive (unsigned lookup_index)
232   { inactive_lookups->add (lookup_index); }
233 
lookup_limit_exceededOT::hb_closure_lookups_context_t234   bool lookup_limit_exceeded ()
235   {
236     bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
237     if (ret)
238       DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
239     return ret; }
240 
is_lookup_visitedOT::hb_closure_lookups_context_t241   bool is_lookup_visited (unsigned lookup_index)
242   {
243     if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
244     {
245       DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
246                  lookup_count, lookup_index);
247       return true;
248     }
249 
250     if (unlikely (visited_lookups->in_error ()))
251       return true;
252 
253     return visited_lookups->has (lookup_index);
254   }
255 
256   hb_face_t *face;
257   const hb_set_t *glyphs;
258   recurse_func_t recurse_func;
259   unsigned int nesting_level_left;
260 
hb_closure_lookups_context_tOT::hb_closure_lookups_context_t261   hb_closure_lookups_context_t (hb_face_t *face_,
262 				const hb_set_t *glyphs_,
263 				hb_set_t *visited_lookups_,
264 				hb_set_t *inactive_lookups_,
265 				unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
266 				face (face_),
267 				glyphs (glyphs_),
268 				recurse_func (nullptr),
269 				nesting_level_left (nesting_level_left_),
270 				visited_lookups (visited_lookups_),
271 				inactive_lookups (inactive_lookups_),
272 				lookup_count (0) {}
273 
set_recurse_funcOT::hb_closure_lookups_context_t274   void set_recurse_func (recurse_func_t func) { recurse_func = func; }
275 
276   private:
277   hb_set_t *visited_lookups;
278   hb_set_t *inactive_lookups;
279   unsigned int lookup_count;
280 };
281 
282 struct hb_would_apply_context_t :
283        hb_dispatch_context_t<hb_would_apply_context_t, bool>
284 {
285   template <typename T>
dispatchOT::hb_would_apply_context_t286   return_t dispatch (const T &obj) { return obj.would_apply (this); }
default_return_valueOT::hb_would_apply_context_t287   static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_would_apply_context_t288   bool stop_sublookup_iteration (return_t r) const { return r; }
289 
290   hb_face_t *face;
291   const hb_codepoint_t *glyphs;
292   unsigned int len;
293   bool zero_context;
294 
hb_would_apply_context_tOT::hb_would_apply_context_t295   hb_would_apply_context_t (hb_face_t *face_,
296 			    const hb_codepoint_t *glyphs_,
297 			    unsigned int len_,
298 			    bool zero_context_) :
299 			      face (face_),
300 			      glyphs (glyphs_),
301 			      len (len_),
302 			      zero_context (zero_context_) {}
303 };
304 
305 struct hb_collect_glyphs_context_t :
306        hb_dispatch_context_t<hb_collect_glyphs_context_t>
307 {
308   typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
309   template <typename T>
dispatchOT::hb_collect_glyphs_context_t310   return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_glyphs_context_t311   static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_collect_glyphs_context_t312   void recurse (unsigned int lookup_index)
313   {
314     if (unlikely (nesting_level_left == 0 || !recurse_func))
315       return;
316 
317     /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
318      * past the previous check.  For GSUB, we only want to collect the output
319      * glyphs in the recursion.  If output is not requested, we can go home now.
320      *
321      * Note further, that the above is not exactly correct.  A recursed lookup
322      * is allowed to match input that is not matched in the context, but that's
323      * not how most fonts are built.  It's possible to relax that and recurse
324      * with all sets here if it proves to be an issue.
325      */
326 
327     if (output == hb_set_get_empty ())
328       return;
329 
330     /* Return if new lookup was recursed to before. */
331     if (recursed_lookups->has (lookup_index))
332       return;
333 
334     hb_set_t *old_before = before;
335     hb_set_t *old_input  = input;
336     hb_set_t *old_after  = after;
337     before = input = after = hb_set_get_empty ();
338 
339     nesting_level_left--;
340     recurse_func (this, lookup_index);
341     nesting_level_left++;
342 
343     before = old_before;
344     input  = old_input;
345     after  = old_after;
346 
347     recursed_lookups->add (lookup_index);
348   }
349 
350   hb_face_t *face;
351   hb_set_t *before;
352   hb_set_t *input;
353   hb_set_t *after;
354   hb_set_t *output;
355   recurse_func_t recurse_func;
356   hb_set_t *recursed_lookups;
357   unsigned int nesting_level_left;
358 
hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t359   hb_collect_glyphs_context_t (hb_face_t *face_,
360 			       hb_set_t  *glyphs_before, /* OUT.  May be NULL */
361 			       hb_set_t  *glyphs_input,  /* OUT.  May be NULL */
362 			       hb_set_t  *glyphs_after,  /* OUT.  May be NULL */
363 			       hb_set_t  *glyphs_output, /* OUT.  May be NULL */
364 			       unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
365 			      face (face_),
366 			      before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
367 			      input  (glyphs_input  ? glyphs_input  : hb_set_get_empty ()),
368 			      after  (glyphs_after  ? glyphs_after  : hb_set_get_empty ()),
369 			      output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
370 			      recurse_func (nullptr),
371 			      recursed_lookups (hb_set_create ()),
372 			      nesting_level_left (nesting_level_left_) {}
~hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t373   ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
374 
set_recurse_funcOT::hb_collect_glyphs_context_t375   void set_recurse_func (recurse_func_t func) { recurse_func = func; }
376 };
377 
378 
379 
380 template <typename set_t>
381 struct hb_collect_coverage_context_t :
382        hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
383 {
384   typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
385   template <typename T>
dispatchOT::hb_collect_coverage_context_t386   return_t dispatch (const T &obj) { return obj.get_coverage (); }
default_return_valueOT::hb_collect_coverage_context_t387   static return_t default_return_value () { return Null (Coverage); }
stop_sublookup_iterationOT::hb_collect_coverage_context_t388   bool stop_sublookup_iteration (return_t r) const
389   {
390     r.collect_coverage (set);
391     return false;
392   }
393 
hb_collect_coverage_context_tOT::hb_collect_coverage_context_t394   hb_collect_coverage_context_t (set_t *set_) :
395 				   set (set_) {}
396 
397   set_t *set;
398 };
399 
400 struct hb_ot_apply_context_t :
401        hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
402 {
403   struct matcher_t
404   {
405     typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
406 
set_ignore_zwnjOT::hb_ot_apply_context_t::matcher_t407     void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
set_ignore_zwjOT::hb_ot_apply_context_t::matcher_t408     void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
set_ignore_hiddenOT::hb_ot_apply_context_t::matcher_t409     void set_ignore_hidden (bool ignore_hidden_) { ignore_hidden = ignore_hidden_; }
set_lookup_propsOT::hb_ot_apply_context_t::matcher_t410     void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
set_maskOT::hb_ot_apply_context_t::matcher_t411     void set_mask (hb_mask_t mask_) { mask = mask_; }
set_per_syllableOT::hb_ot_apply_context_t::matcher_t412     void set_per_syllable (bool per_syllable_) { per_syllable = per_syllable_; }
set_syllableOT::hb_ot_apply_context_t::matcher_t413     void set_syllable (uint8_t syllable_)  { syllable = per_syllable ? syllable_ : 0; }
set_match_funcOT::hb_ot_apply_context_t::matcher_t414     void set_match_func (match_func_t match_func_,
415 			 const void *match_data_)
416     { match_func = match_func_; match_data = match_data_; }
417 
418     enum may_match_t {
419       MATCH_NO,
420       MATCH_YES,
421       MATCH_MAYBE
422     };
423 
424 #ifndef HB_OPTIMIZE_SIZE
425     HB_ALWAYS_INLINE
426 #endif
may_matchOT::hb_ot_apply_context_t::matcher_t427     may_match_t may_match (hb_glyph_info_t &info,
428 			   hb_codepoint_t glyph_data) const
429     {
430       if (!(info.mask & mask) ||
431 	  (syllable && syllable != info.syllable ()))
432 	return MATCH_NO;
433 
434       if (match_func)
435 	return match_func (info, glyph_data, match_data) ? MATCH_YES : MATCH_NO;
436 
437       return MATCH_MAYBE;
438     }
439 
440     enum may_skip_t {
441       SKIP_NO,
442       SKIP_YES,
443       SKIP_MAYBE
444     };
445 
446 #ifndef HB_OPTIMIZE_SIZE
447     HB_ALWAYS_INLINE
448 #endif
may_skipOT::hb_ot_apply_context_t::matcher_t449     may_skip_t may_skip (const hb_ot_apply_context_t *c,
450 			 const hb_glyph_info_t       &info) const
451     {
452       if (!c->check_glyph_property (&info, lookup_props))
453 	return SKIP_YES;
454 
455       if (unlikely (_hb_glyph_info_is_default_ignorable (&info) &&
456 		    (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
457 		    (ignore_zwj || !_hb_glyph_info_is_zwj (&info)) &&
458 		    (ignore_hidden || !_hb_glyph_info_is_hidden (&info))))
459 	return SKIP_MAYBE;
460 
461       return SKIP_NO;
462     }
463 
464     protected:
465     unsigned int lookup_props = 0;
466     hb_mask_t mask = -1;
467     bool ignore_zwnj = false;
468     bool ignore_zwj = false;
469     bool ignore_hidden = false;
470     bool per_syllable = false;
471     uint8_t syllable = 0;
472     match_func_t match_func = nullptr;
473     const void *match_data = nullptr;
474   };
475 
476   struct skipping_iterator_t
477   {
initOT::hb_ot_apply_context_t::skipping_iterator_t478     void init (hb_ot_apply_context_t *c_, bool context_match = false)
479     {
480       c = c_;
481       end = c->buffer->len;
482       match_glyph_data16 = nullptr;
483 #ifndef HB_NO_BEYOND_64K
484       match_glyph_data24 = nullptr;
485 #endif
486       matcher.set_match_func (nullptr, nullptr);
487       matcher.set_lookup_props (c->lookup_props);
488       /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
489       matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
490       /* Ignore ZWJ if we are matching context, or asked to. */
491       matcher.set_ignore_zwj  (context_match || c->auto_zwj);
492       /* Ignore hidden glyphs (like CGJ) during GPOS. */
493       matcher.set_ignore_hidden (c->table_index == 1);
494       matcher.set_mask (context_match ? -1 : c->lookup_mask);
495       /* Per syllable matching is only for GSUB. */
496       matcher.set_per_syllable (c->table_index == 0 && c->per_syllable);
497       matcher.set_syllable (0);
498     }
set_lookup_propsOT::hb_ot_apply_context_t::skipping_iterator_t499     void set_lookup_props (unsigned int lookup_props)
500     {
501       matcher.set_lookup_props (lookup_props);
502     }
set_match_funcOT::hb_ot_apply_context_t::skipping_iterator_t503     void set_match_func (matcher_t::match_func_t match_func_,
504 			 const void *match_data_)
505     {
506       matcher.set_match_func (match_func_, match_data_);
507     }
set_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t508     void set_glyph_data (const HBUINT16 glyph_data[])
509     {
510       match_glyph_data16 = glyph_data;
511 #ifndef HB_NO_BEYOND_64K
512       match_glyph_data24 = nullptr;
513 #endif
514     }
515 #ifndef HB_NO_BEYOND_64K
set_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t516     void set_glyph_data (const HBUINT24 glyph_data[])
517     {
518       match_glyph_data16 = nullptr;
519       match_glyph_data24 = glyph_data;
520     }
521 #endif
522 
523 #ifndef HB_OPTIMIZE_SIZE
524     HB_ALWAYS_INLINE
525 #endif
resetOT::hb_ot_apply_context_t::skipping_iterator_t526     void reset (unsigned int start_index_)
527     {
528       idx = start_index_;
529       end = c->buffer->len;
530       matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
531     }
532 
533 #ifndef HB_OPTIMIZE_SIZE
534     HB_ALWAYS_INLINE
535 #endif
reset_fastOT::hb_ot_apply_context_t::skipping_iterator_t536     void reset_fast (unsigned int start_index_)
537     {
538       // Doesn't set end or syllable. Used by GPOS which doesn't care / change.
539       idx = start_index_;
540     }
541 
rejectOT::hb_ot_apply_context_t::skipping_iterator_t542     void reject ()
543     {
544       backup_glyph_data ();
545     }
546 
547     matcher_t::may_skip_t
548 #ifndef HB_OPTIMIZE_SIZE
549     HB_ALWAYS_INLINE
550 #endif
may_skipOT::hb_ot_apply_context_t::skipping_iterator_t551     may_skip (const hb_glyph_info_t &info) const
552     { return matcher.may_skip (c, info); }
553 
554     enum match_t {
555       MATCH,
556       NOT_MATCH,
557       SKIP
558     };
559 
560 #ifndef HB_OPTIMIZE_SIZE
561     HB_ALWAYS_INLINE
562 #endif
matchOT::hb_ot_apply_context_t::skipping_iterator_t563     match_t match (hb_glyph_info_t &info)
564     {
565       matcher_t::may_skip_t skip = matcher.may_skip (c, info);
566       if (unlikely (skip == matcher_t::SKIP_YES))
567 	return SKIP;
568 
569       matcher_t::may_match_t match = matcher.may_match (info, get_glyph_data ());
570       if (match == matcher_t::MATCH_YES ||
571 	  (match == matcher_t::MATCH_MAYBE &&
572 	   skip == matcher_t::SKIP_NO))
573 	return MATCH;
574 
575       if (skip == matcher_t::SKIP_NO)
576         return NOT_MATCH;
577 
578       return SKIP;
579   }
580 
581 #ifndef HB_OPTIMIZE_SIZE
582     HB_ALWAYS_INLINE
583 #endif
nextOT::hb_ot_apply_context_t::skipping_iterator_t584     bool next (unsigned *unsafe_to = nullptr)
585     {
586       const signed stop = (signed) end - 1;
587       while ((signed) idx < stop)
588       {
589 	idx++;
590 	switch (match (c->buffer->info[idx]))
591 	{
592 	  case MATCH:
593 	  {
594 	    advance_glyph_data ();
595 	    return true;
596 	  }
597 	  case NOT_MATCH:
598 	  {
599 	    if (unsafe_to)
600 	      *unsafe_to = idx + 1;
601 	    return false;
602 	  }
603 	  case SKIP:
604 	    continue;
605 	}
606       }
607       if (unsafe_to)
608         *unsafe_to = end;
609       return false;
610     }
611 #ifndef HB_OPTIMIZE_SIZE
612     HB_ALWAYS_INLINE
613 #endif
prevOT::hb_ot_apply_context_t::skipping_iterator_t614     bool prev (unsigned *unsafe_from = nullptr)
615     {
616       const unsigned stop = 0;
617       while (idx > stop)
618       {
619 	idx--;
620 	switch (match (c->buffer->out_info[idx]))
621 	{
622 	  case MATCH:
623 	  {
624 	    advance_glyph_data ();
625 	    return true;
626 	  }
627 	  case NOT_MATCH:
628 	  {
629 	    if (unsafe_from)
630 	      *unsafe_from = hb_max (1u, idx) - 1u;
631 	    return false;
632 	  }
633 	  case SKIP:
634 	    continue;
635 	}
636       }
637       if (unsafe_from)
638         *unsafe_from = 0;
639       return false;
640     }
641 
642     HB_ALWAYS_INLINE
643     hb_codepoint_t
get_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t644     get_glyph_data ()
645     {
646       if (match_glyph_data16) return *match_glyph_data16;
647 #ifndef HB_NO_BEYOND_64K
648       else
649       if (match_glyph_data24) return *match_glyph_data24;
650 #endif
651       return 0;
652     }
653     HB_ALWAYS_INLINE
654     void
advance_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t655     advance_glyph_data ()
656     {
657       if (match_glyph_data16) match_glyph_data16++;
658 #ifndef HB_NO_BEYOND_64K
659       else
660       if (match_glyph_data24) match_glyph_data24++;
661 #endif
662     }
663     void
backup_glyph_dataOT::hb_ot_apply_context_t::skipping_iterator_t664     backup_glyph_data ()
665     {
666       if (match_glyph_data16) match_glyph_data16--;
667 #ifndef HB_NO_BEYOND_64K
668       else
669       if (match_glyph_data24) match_glyph_data24--;
670 #endif
671     }
672 
673     unsigned int idx;
674     protected:
675     hb_ot_apply_context_t *c;
676     matcher_t matcher;
677     const HBUINT16 *match_glyph_data16;
678 #ifndef HB_NO_BEYOND_64K
679     const HBUINT24 *match_glyph_data24;
680 #endif
681 
682     unsigned int end;
683   };
684 
685 
get_nameOT::hb_ot_apply_context_t686   const char *get_name () { return "APPLY"; }
687   typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
688   template <typename T>
dispatchOT::hb_ot_apply_context_t689   return_t dispatch (const T &obj) { return obj.apply (this); }
default_return_valueOT::hb_ot_apply_context_t690   static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_ot_apply_context_t691   bool stop_sublookup_iteration (return_t r) const { return r; }
recurseOT::hb_ot_apply_context_t692   return_t recurse (unsigned int sub_lookup_index)
693   {
694     if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
695     {
696       buffer->shaping_failed = true;
697       return default_return_value ();
698     }
699 
700     nesting_level_left--;
701     bool ret = recurse_func (this, sub_lookup_index);
702     nesting_level_left++;
703     return ret;
704   }
705 
706   skipping_iterator_t iter_input, iter_context;
707 
708   unsigned int table_index; /* GSUB/GPOS */
709   hb_font_t *font;
710   hb_face_t *face;
711   hb_buffer_t *buffer;
712   hb_sanitize_context_t sanitizer;
713   recurse_func_t recurse_func = nullptr;
714   const GDEF &gdef;
715   const GDEF::accelerator_t &gdef_accel;
716   const ItemVariationStore &var_store;
717   ItemVariationStore::cache_t *var_store_cache;
718   hb_set_digest_t digest;
719 
720   hb_direction_t direction;
721   hb_mask_t lookup_mask = 1;
722   unsigned int lookup_index = (unsigned) -1;
723   unsigned int lookup_props = 0;
724   unsigned int nesting_level_left = HB_MAX_NESTING_LEVEL;
725 
726   bool has_glyph_classes;
727   bool auto_zwnj = true;
728   bool auto_zwj = true;
729   bool per_syllable = false;
730   bool random = false;
731   unsigned new_syllables = (unsigned) -1;
732 
733   signed last_base = -1; // GPOS uses
734   unsigned last_base_until = 0; // GPOS uses
735 
hb_ot_apply_context_tOT::hb_ot_apply_context_t736   hb_ot_apply_context_t (unsigned int table_index_,
737 			 hb_font_t *font_,
738 			 hb_buffer_t *buffer_,
739 			 hb_blob_t *table_blob_) :
740 			table_index (table_index_),
741 			font (font_), face (font->face), buffer (buffer_),
742 			sanitizer (table_blob_),
743 			gdef (
744 #ifndef HB_NO_OT_LAYOUT
745 			      *face->table.GDEF->table
746 #else
747 			      Null (GDEF)
748 #endif
749 			     ),
750 			gdef_accel (
751 #ifndef HB_NO_OT_LAYOUT
752 			      *face->table.GDEF
753 #else
754 			      Null (GDEF::accelerator_t)
755 #endif
756 			     ),
757 			var_store (gdef.get_var_store ()),
758 			var_store_cache (
759 #ifndef HB_NO_VAR
760 					 table_index == 1 && font->num_coords ? var_store.create_cache () : nullptr
761 #else
762 					 nullptr
763 #endif
764 					),
765 			digest (buffer_->digest ()),
766 			direction (buffer_->props.direction),
767 			has_glyph_classes (gdef.has_glyph_classes ())
768   { init_iters (); }
769 
~hb_ot_apply_context_tOT::hb_ot_apply_context_t770   ~hb_ot_apply_context_t ()
771   {
772 #ifndef HB_NO_VAR
773     ItemVariationStore::destroy_cache (var_store_cache);
774 #endif
775   }
776 
init_itersOT::hb_ot_apply_context_t777   void init_iters ()
778   {
779     iter_input.init (this, false);
780     iter_context.init (this, true);
781   }
782 
set_lookup_maskOT::hb_ot_apply_context_t783   void set_lookup_mask (hb_mask_t mask, bool init = true) { lookup_mask = mask; last_base = -1; last_base_until = 0; if (init) init_iters (); }
set_auto_zwjOT::hb_ot_apply_context_t784   void set_auto_zwj (bool auto_zwj_, bool init = true) { auto_zwj = auto_zwj_; if (init) init_iters (); }
set_auto_zwnjOT::hb_ot_apply_context_t785   void set_auto_zwnj (bool auto_zwnj_, bool init = true) { auto_zwnj = auto_zwnj_; if (init) init_iters (); }
set_per_syllableOT::hb_ot_apply_context_t786   void set_per_syllable (bool per_syllable_, bool init = true) { per_syllable = per_syllable_; if (init) init_iters (); }
set_randomOT::hb_ot_apply_context_t787   void set_random (bool random_) { random = random_; }
set_recurse_funcOT::hb_ot_apply_context_t788   void set_recurse_func (recurse_func_t func) { recurse_func = func; }
set_lookup_indexOT::hb_ot_apply_context_t789   void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
set_lookup_propsOT::hb_ot_apply_context_t790   void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
791 
random_numberOT::hb_ot_apply_context_t792   uint32_t random_number ()
793   {
794     /* http://www.cplusplus.com/reference/random/minstd_rand/ */
795     buffer->random_state = buffer->random_state * 48271 % 2147483647;
796     return buffer->random_state;
797   }
798 
match_properties_markOT::hb_ot_apply_context_t799   bool match_properties_mark (hb_codepoint_t  glyph,
800 			      unsigned int    glyph_props,
801 			      unsigned int    match_props) const
802   {
803     /* If using mark filtering sets, the high short of
804      * match_props has the set index.
805      */
806     if (match_props & LookupFlag::UseMarkFilteringSet)
807       return gdef_accel.mark_set_covers (match_props >> 16, glyph);
808 
809     /* The second byte of match_props has the meaning
810      * "ignore marks of attachment type different than
811      * the attachment type specified."
812      */
813     if (match_props & LookupFlag::MarkAttachmentType)
814       return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
815 
816     return true;
817   }
818 
819 #ifndef HB_OPTIMIZE_SIZE
820   HB_ALWAYS_INLINE
821 #endif
check_glyph_propertyOT::hb_ot_apply_context_t822   bool check_glyph_property (const hb_glyph_info_t *info,
823 			     unsigned int  match_props) const
824   {
825     unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
826 
827     /* Not covered, if, for example, glyph class is ligature and
828      * match_props includes LookupFlags::IgnoreLigatures
829      */
830     if (glyph_props & match_props & LookupFlag::IgnoreFlags)
831       return false;
832 
833     if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
834       return match_properties_mark (info->codepoint, glyph_props, match_props);
835 
836     return true;
837   }
838 
_set_glyph_classOT::hb_ot_apply_context_t839   void _set_glyph_class (hb_codepoint_t glyph_index,
840 			  unsigned int class_guess = 0,
841 			  bool ligature = false,
842 			  bool component = false)
843   {
844     digest.add (glyph_index);
845 
846     if (new_syllables != (unsigned) -1)
847       buffer->cur().syllable() = new_syllables;
848 
849     unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
850     props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
851     if (ligature)
852     {
853       props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
854       /* In the only place that the MULTIPLIED bit is used, Uniscribe
855        * seems to only care about the "last" transformation between
856        * Ligature and Multiple substitutions.  Ie. if you ligate, expand,
857        * and ligate again, it forgives the multiplication and acts as
858        * if only ligation happened.  As such, clear MULTIPLIED bit.
859        */
860       props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
861     }
862     if (component)
863       props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
864     if (likely (has_glyph_classes))
865     {
866       props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
867       _hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef_accel.get_glyph_props (glyph_index));
868     }
869     else if (class_guess)
870     {
871       props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
872       _hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess);
873     }
874     else
875       _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
876   }
877 
replace_glyphOT::hb_ot_apply_context_t878   void replace_glyph (hb_codepoint_t glyph_index)
879   {
880     _set_glyph_class (glyph_index);
881     (void) buffer->replace_glyph (glyph_index);
882   }
replace_glyph_inplaceOT::hb_ot_apply_context_t883   void replace_glyph_inplace (hb_codepoint_t glyph_index)
884   {
885     _set_glyph_class (glyph_index);
886     buffer->cur().codepoint = glyph_index;
887   }
replace_glyph_with_ligatureOT::hb_ot_apply_context_t888   void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
889 				    unsigned int class_guess)
890   {
891     _set_glyph_class (glyph_index, class_guess, true);
892     (void) buffer->replace_glyph (glyph_index);
893   }
output_glyph_for_componentOT::hb_ot_apply_context_t894   void output_glyph_for_component (hb_codepoint_t glyph_index,
895 				   unsigned int class_guess)
896   {
897     _set_glyph_class (glyph_index, class_guess, false, true);
898     (void) buffer->output_glyph (glyph_index);
899   }
900 };
901 
902 
903 struct hb_accelerate_subtables_context_t :
904        hb_dispatch_context_t<hb_accelerate_subtables_context_t>
905 {
906   template <typename Type>
apply_toOT::hb_accelerate_subtables_context_t907   static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c)
908   {
909     const Type *typed_obj = (const Type *) obj;
910     return typed_obj->apply (c);
911   }
912 
913 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
914   template <typename T>
apply_cached_OT::hb_accelerate_subtables_context_t915   static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply_cached (c) )
916   template <typename T>
917   static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
918   template <typename Type>
919   static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c)
920   {
921     const Type *typed_obj = (const Type *) obj;
922     return apply_cached_ (typed_obj, c, hb_prioritize);
923   }
924 
925   template <typename T>
cache_func_OT::hb_accelerate_subtables_context_t926   static inline auto cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
927   template <typename T>
928   static inline bool cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
929   template <typename Type>
cache_func_toOT::hb_accelerate_subtables_context_t930   static inline bool cache_func_to (const void *obj, hb_ot_apply_context_t *c, bool enter)
931   {
932     const Type *typed_obj = (const Type *) obj;
933     return cache_func_ (typed_obj, c, enter, hb_prioritize);
934   }
935 #endif
936 
937   typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c);
938   typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter);
939 
940   struct hb_applicable_t
941   {
942     friend struct hb_accelerate_subtables_context_t;
943     friend struct hb_ot_layout_lookup_accelerator_t;
944 
945     template <typename T>
initOT::hb_accelerate_subtables_context_t::hb_applicable_t946     void init (const T &obj_,
947 	       hb_apply_func_t apply_func_
948 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
949 	       , hb_apply_func_t apply_cached_func_
950 	       , hb_cache_func_t cache_func_
951 #endif
952 		)
953     {
954       obj = &obj_;
955       apply_func = apply_func_;
956 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
957       apply_cached_func = apply_cached_func_;
958       cache_func = cache_func_;
959 #endif
960       digest.init ();
961       obj_.get_coverage ().collect_coverage (&digest);
962     }
963 
applyOT::hb_accelerate_subtables_context_t::hb_applicable_t964     bool apply (hb_ot_apply_context_t *c) const
965     {
966       return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
967     }
968 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
apply_cachedOT::hb_accelerate_subtables_context_t::hb_applicable_t969     bool apply_cached (hb_ot_apply_context_t *c) const
970     {
971       return digest.may_have (c->buffer->cur().codepoint) &&  apply_cached_func (obj, c);
972     }
cache_enterOT::hb_accelerate_subtables_context_t::hb_applicable_t973     bool cache_enter (hb_ot_apply_context_t *c) const
974     {
975       return cache_func (obj, c, true);
976     }
cache_leaveOT::hb_accelerate_subtables_context_t::hb_applicable_t977     void cache_leave (hb_ot_apply_context_t *c) const
978     {
979       cache_func (obj, c, false);
980     }
981 #endif
982 
983     private:
984     const void *obj;
985     hb_apply_func_t apply_func;
986 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
987     hb_apply_func_t apply_cached_func;
988     hb_cache_func_t cache_func;
989 #endif
990     hb_set_digest_t digest;
991   };
992 
993 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
994   template <typename T>
cache_costOT::hb_accelerate_subtables_context_t995   auto cache_cost (const T &obj, hb_priority<1>) HB_AUTO_RETURN ( obj.cache_cost () )
996   template <typename T>
997   auto cache_cost (const T &obj, hb_priority<0>) HB_AUTO_RETURN ( 0u )
998 #endif
999 
1000   /* Dispatch interface. */
1001   template <typename T>
1002   return_t dispatch (const T &obj)
1003   {
1004     hb_applicable_t *entry = &array[i++];
1005 
1006     entry->init (obj,
1007 		 apply_to<T>
1008 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
1009 		 , apply_cached_to<T>
1010 		 , cache_func_to<T>
1011 #endif
1012 		 );
1013 
1014 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
1015     /* Cache handling
1016      *
1017      * We allow one subtable from each lookup to use a cache. The assumption
1018      * being that multiple subtables of the same lookup cannot use a cache
1019      * because the resources they would use will collide.  As such, we ask
1020      * each subtable to tell us how much it costs (which a cache would avoid),
1021      * and we allocate the cache opportunity to the costliest subtable.
1022      */
1023     unsigned cost = cache_cost (obj, hb_prioritize);
1024     if (cost > cache_user_cost)
1025     {
1026       cache_user_idx = i - 1;
1027       cache_user_cost = cost;
1028     }
1029 #endif
1030 
1031     return hb_empty_t ();
1032   }
default_return_valueOT::hb_accelerate_subtables_context_t1033   static return_t default_return_value () { return hb_empty_t (); }
1034 
hb_accelerate_subtables_context_tOT::hb_accelerate_subtables_context_t1035   hb_accelerate_subtables_context_t (hb_applicable_t *array_) :
1036 				     array (array_) {}
1037 
1038   hb_applicable_t *array;
1039   unsigned i = 0;
1040 
1041 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
1042   unsigned cache_user_idx = (unsigned) -1;
1043   unsigned cache_user_cost = 0;
1044 #endif
1045 };
1046 
1047 
1048 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data, void *cache);
1049 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache);
1050 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, unsigned value, const void *data);
1051 typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
1052 
1053 struct ContextClosureFuncs
1054 {
1055   intersects_func_t intersects;
1056   intersected_glyphs_func_t intersected_glyphs;
1057 };
1058 struct ContextCollectGlyphsFuncs
1059 {
1060   collect_glyphs_func_t collect;
1061 };
1062 struct ContextApplyFuncs
1063 {
1064   match_func_t match;
1065 };
1066 struct ChainContextApplyFuncs
1067 {
1068   match_func_t match[3];
1069 };
1070 
1071 
intersects_glyph(const hb_set_t * glyphs,unsigned value,const void * data HB_UNUSED,void * cache HB_UNUSED)1072 static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED, void *cache HB_UNUSED)
1073 {
1074   return glyphs->has (value);
1075 }
intersects_class(const hb_set_t * glyphs,unsigned value,const void * data,void * cache)1076 static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data, void *cache)
1077 {
1078   const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1079   hb_map_t *map = (hb_map_t *) cache;
1080 
1081   hb_codepoint_t *cached_v;
1082   if (map->has (value, &cached_v))
1083     return *cached_v;
1084 
1085   bool v = class_def.intersects_class (glyphs, value);
1086   map->set (value, v);
1087 
1088   return v;
1089 }
intersects_coverage(const hb_set_t * glyphs,unsigned value,const void * data,void * cache HB_UNUSED)1090 static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data, void *cache HB_UNUSED)
1091 {
1092   Offset16To<Coverage> coverage;
1093   coverage = value;
1094   return (data+coverage).intersects (glyphs);
1095 }
1096 
1097 
intersected_glyph(const hb_set_t * glyphs HB_UNUSED,const void * data,unsigned value,hb_set_t * intersected_glyphs,HB_UNUSED void * cache)1098 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
1099 {
1100   unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
1101   intersected_glyphs->add (g);
1102 }
1103 
1104 using intersected_class_cache_t = hb_hashmap_t<unsigned, hb_set_t>;
1105 
intersected_class_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs,void * cache)1106 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache)
1107 {
1108   const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1109 
1110   intersected_class_cache_t *map = (intersected_class_cache_t *) cache;
1111 
1112   hb_set_t *cached_v;
1113   if (map->has (value, &cached_v))
1114   {
1115     intersected_glyphs->union_ (*cached_v);
1116     return;
1117   }
1118 
1119   hb_set_t v;
1120   class_def.intersected_class_glyphs (glyphs, value, &v);
1121 
1122   intersected_glyphs->union_ (v);
1123 
1124   map->set (value, std::move (v));
1125 }
1126 
intersected_coverage_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs,HB_UNUSED void * cache)1127 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
1128 {
1129   Offset16To<Coverage> coverage;
1130   coverage = value;
1131   (data+coverage).intersect_set (*glyphs, *intersected_glyphs);
1132 }
1133 
1134 
1135 template <typename HBUINT>
array_is_subset_of(const hb_set_t * glyphs,unsigned int count,const HBUINT values[],intersects_func_t intersects_func,const void * intersects_data,void * cache)1136 static inline bool array_is_subset_of (const hb_set_t *glyphs,
1137 				       unsigned int count,
1138 				       const HBUINT values[],
1139 				       intersects_func_t intersects_func,
1140 				       const void *intersects_data,
1141 				       void *cache)
1142 {
1143   for (const auto &_ : + hb_iter (values, count))
1144     if (!intersects_func (glyphs, _, intersects_data, cache)) return false;
1145   return true;
1146 }
1147 
1148 
collect_glyph(hb_set_t * glyphs,unsigned value,const void * data HB_UNUSED)1149 static inline void collect_glyph (hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED)
1150 {
1151   glyphs->add (value);
1152 }
collect_class(hb_set_t * glyphs,unsigned value,const void * data)1153 static inline void collect_class (hb_set_t *glyphs, unsigned value, const void *data)
1154 {
1155   const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1156   class_def.collect_class (glyphs, value);
1157 }
collect_coverage(hb_set_t * glyphs,unsigned value,const void * data)1158 static inline void collect_coverage (hb_set_t *glyphs, unsigned value, const void *data)
1159 {
1160   Offset16To<Coverage> coverage;
1161   coverage = value;
1162   (data+coverage).collect_coverage (glyphs);
1163 }
1164 template <typename HBUINT>
collect_array(hb_collect_glyphs_context_t * c HB_UNUSED,hb_set_t * glyphs,unsigned int count,const HBUINT values[],collect_glyphs_func_t collect_func,const void * collect_data)1165 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
1166 				  hb_set_t *glyphs,
1167 				  unsigned int count,
1168 				  const HBUINT values[],
1169 				  collect_glyphs_func_t collect_func,
1170 				  const void *collect_data)
1171 {
1172   return
1173   + hb_iter (values, count)
1174   | hb_apply ([&] (const HBUINT &_) { collect_func (glyphs, _, collect_data); })
1175   ;
1176 }
1177 
1178 
match_always(hb_glyph_info_t & info HB_UNUSED,unsigned value HB_UNUSED,const void * data HB_UNUSED)1179 static inline bool match_always (hb_glyph_info_t &info HB_UNUSED, unsigned value HB_UNUSED, const void *data HB_UNUSED)
1180 {
1181   return true;
1182 }
match_glyph(hb_glyph_info_t & info,unsigned value,const void * data HB_UNUSED)1183 static inline bool match_glyph (hb_glyph_info_t &info, unsigned value, const void *data HB_UNUSED)
1184 {
1185   return info.codepoint == value;
1186 }
match_class(hb_glyph_info_t & info,unsigned value,const void * data)1187 static inline bool match_class (hb_glyph_info_t &info, unsigned value, const void *data)
1188 {
1189   const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1190   return class_def.get_class (info.codepoint) == value;
1191 }
match_class_cached(hb_glyph_info_t & info,unsigned value,const void * data)1192 static inline bool match_class_cached (hb_glyph_info_t &info, unsigned value, const void *data)
1193 {
1194   unsigned klass = info.syllable();
1195   if (klass < 255)
1196     return klass == value;
1197   const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1198   klass = class_def.get_class (info.codepoint);
1199   if (likely (klass < 255))
1200     info.syllable() = klass;
1201   return klass == value;
1202 }
match_class_cached1(hb_glyph_info_t & info,unsigned value,const void * data)1203 static inline bool match_class_cached1 (hb_glyph_info_t &info, unsigned value, const void *data)
1204 {
1205   unsigned klass = info.syllable() & 0x0F;
1206   if (klass < 15)
1207     return klass == value;
1208   const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1209   klass = class_def.get_class (info.codepoint);
1210   if (likely (klass < 15))
1211     info.syllable() = (info.syllable() & 0xF0) | klass;
1212   return klass == value;
1213 }
match_class_cached2(hb_glyph_info_t & info,unsigned value,const void * data)1214 static inline bool match_class_cached2 (hb_glyph_info_t &info, unsigned value, const void *data)
1215 {
1216   unsigned klass = (info.syllable() & 0xF0) >> 4;
1217   if (klass < 15)
1218     return klass == value;
1219   const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
1220   klass = class_def.get_class (info.codepoint);
1221   if (likely (klass < 15))
1222     info.syllable() = (info.syllable() & 0x0F) | (klass << 4);
1223   return klass == value;
1224 }
match_coverage(hb_glyph_info_t & info,unsigned value,const void * data)1225 static inline bool match_coverage (hb_glyph_info_t &info, unsigned value, const void *data)
1226 {
1227   Offset16To<Coverage> coverage;
1228   coverage = value;
1229   return (data+coverage).get_coverage (info.codepoint) != NOT_COVERED;
1230 }
1231 
1232 template <typename HBUINT>
would_match_input(hb_would_apply_context_t * c,unsigned int count,const HBUINT input[],match_func_t match_func,const void * match_data)1233 static inline bool would_match_input (hb_would_apply_context_t *c,
1234 				      unsigned int count, /* Including the first glyph (not matched) */
1235 				      const HBUINT input[], /* Array of input values--start with second glyph */
1236 				      match_func_t match_func,
1237 				      const void *match_data)
1238 {
1239   if (count != c->len)
1240     return false;
1241 
1242   for (unsigned int i = 1; i < count; i++)
1243   {
1244     hb_glyph_info_t info;
1245     info.codepoint = c->glyphs[i];
1246     if (likely (!match_func (info, input[i - 1], match_data)))
1247       return false;
1248   }
1249 
1250   return true;
1251 }
1252 template <typename HBUINT>
1253 #ifndef HB_OPTIMIZE_SIZE
1254 HB_ALWAYS_INLINE
1255 #endif
match_input(hb_ot_apply_context_t * c,unsigned int count,const HBUINT input[],match_func_t match_func,const void * match_data,unsigned int * end_position,unsigned int * match_positions,unsigned int * p_total_component_count=nullptr)1256 static bool match_input (hb_ot_apply_context_t *c,
1257 			 unsigned int count, /* Including the first glyph (not matched) */
1258 			 const HBUINT input[], /* Array of input values--start with second glyph */
1259 			 match_func_t match_func,
1260 			 const void *match_data,
1261 			 unsigned int *end_position,
1262 			 unsigned int *match_positions,
1263 			 unsigned int *p_total_component_count = nullptr)
1264 {
1265   TRACE_APPLY (nullptr);
1266 
1267   if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
1268 
1269   hb_buffer_t *buffer = c->buffer;
1270 
1271   hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1272   skippy_iter.reset (buffer->idx);
1273   skippy_iter.set_match_func (match_func, match_data);
1274   skippy_iter.set_glyph_data (input);
1275 
1276   /*
1277    * This is perhaps the trickiest part of OpenType...  Remarks:
1278    *
1279    * - If all components of the ligature were marks, we call this a mark ligature.
1280    *
1281    * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
1282    *   it as a ligature glyph.
1283    *
1284    * - Ligatures cannot be formed across glyphs attached to different components
1285    *   of previous ligatures.  Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
1286    *   LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
1287    *   However, it would be wrong to ligate that SHADDA,FATHA sequence.
1288    *   There are a couple of exceptions to this:
1289    *
1290    *   o If a ligature tries ligating with marks that belong to it itself, go ahead,
1291    *     assuming that the font designer knows what they are doing (otherwise it can
1292    *     break Indic stuff when a matra wants to ligate with a conjunct,
1293    *
1294    *   o If two marks want to ligate and they belong to different components of the
1295    *     same ligature glyph, and said ligature glyph is to be ignored according to
1296    *     mark-filtering rules, then allow.
1297    *     https://github.com/harfbuzz/harfbuzz/issues/545
1298    */
1299 
1300   unsigned int total_component_count = 0;
1301 
1302   unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1303   unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1304 
1305   enum {
1306     LIGBASE_NOT_CHECKED,
1307     LIGBASE_MAY_NOT_SKIP,
1308     LIGBASE_MAY_SKIP
1309   } ligbase = LIGBASE_NOT_CHECKED;
1310 
1311   for (unsigned int i = 1; i < count; i++)
1312   {
1313     unsigned unsafe_to;
1314     if (!skippy_iter.next (&unsafe_to))
1315     {
1316       *end_position = unsafe_to;
1317       return_trace (false);
1318     }
1319 
1320     match_positions[i] = skippy_iter.idx;
1321 
1322     unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
1323     unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
1324 
1325     if (first_lig_id && first_lig_comp)
1326     {
1327       /* If first component was attached to a previous ligature component,
1328        * all subsequent components should be attached to the same ligature
1329        * component, otherwise we shouldn't ligate them... */
1330       if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1331       {
1332 	/* ...unless, we are attached to a base ligature and that base
1333 	 * ligature is ignorable. */
1334 	if (ligbase == LIGBASE_NOT_CHECKED)
1335 	{
1336 	  bool found = false;
1337 	  const auto *out = buffer->out_info;
1338 	  unsigned int j = buffer->out_len;
1339 	  while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1340 	  {
1341 	    if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1342 	    {
1343 	      j--;
1344 	      found = true;
1345 	      break;
1346 	    }
1347 	    j--;
1348 	  }
1349 
1350 	  if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1351 	    ligbase = LIGBASE_MAY_SKIP;
1352 	  else
1353 	    ligbase = LIGBASE_MAY_NOT_SKIP;
1354 	}
1355 
1356 	if (ligbase == LIGBASE_MAY_NOT_SKIP)
1357 	  return_trace (false);
1358       }
1359     }
1360     else
1361     {
1362       /* If first component was NOT attached to a previous ligature component,
1363        * all subsequent components should also NOT be attached to any ligature
1364        * component, unless they are attached to the first component itself! */
1365       if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1366 	return_trace (false);
1367     }
1368 
1369     total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1370   }
1371 
1372   *end_position = skippy_iter.idx + 1;
1373 
1374   if (p_total_component_count)
1375   {
1376     total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1377     *p_total_component_count = total_component_count;
1378   }
1379 
1380   match_positions[0] = buffer->idx;
1381 
1382   return_trace (true);
1383 }
ligate_input(hb_ot_apply_context_t * c,unsigned int count,const unsigned int * match_positions,unsigned int match_end,hb_codepoint_t lig_glyph,unsigned int total_component_count)1384 static inline bool ligate_input (hb_ot_apply_context_t *c,
1385 				 unsigned int count, /* Including the first glyph */
1386 				 const unsigned int *match_positions, /* Including the first glyph */
1387 				 unsigned int match_end,
1388 				 hb_codepoint_t lig_glyph,
1389 				 unsigned int total_component_count)
1390 {
1391   TRACE_APPLY (nullptr);
1392 
1393   hb_buffer_t *buffer = c->buffer;
1394 
1395   buffer->merge_clusters (buffer->idx, match_end);
1396 
1397   /* - If a base and one or more marks ligate, consider that as a base, NOT
1398    *   ligature, such that all following marks can still attach to it.
1399    *   https://github.com/harfbuzz/harfbuzz/issues/1109
1400    *
1401    * - If all components of the ligature were marks, we call this a mark ligature.
1402    *   If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1403    *   the ligature to keep its old ligature id.  This will allow it to attach to
1404    *   a base ligature in GPOS.  Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1405    *   and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1406    *   ligature id and component value of 2.  Then if SHADDA,FATHA form a ligature
1407    *   later, we don't want them to lose their ligature id/component, otherwise
1408    *   GPOS will fail to correctly position the mark ligature on top of the
1409    *   LAM,LAM,HEH ligature.  See:
1410    *     https://bugzilla.gnome.org/show_bug.cgi?id=676343
1411    *
1412    * - If a ligature is formed of components that some of which are also ligatures
1413    *   themselves, and those ligature components had marks attached to *their*
1414    *   components, we have to attach the marks to the new ligature component
1415    *   positions!  Now *that*'s tricky!  And these marks may be following the
1416    *   last component of the whole sequence, so we should loop forward looking
1417    *   for them and update them.
1418    *
1419    *   Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1420    *   'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1421    *   id and component == 1.  Now, during 'liga', the LAM and the LAM-HEH ligature
1422    *   form a LAM-LAM-HEH ligature.  We need to reassign the SHADDA and FATHA to
1423    *   the new ligature with a component value of 2.
1424    *
1425    *   This in fact happened to a font...  See:
1426    *   https://bugzilla.gnome.org/show_bug.cgi?id=437633
1427    */
1428 
1429   bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1430   bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1431   for (unsigned int i = 1; i < count; i++)
1432     if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1433     {
1434       is_base_ligature = false;
1435       is_mark_ligature = false;
1436       break;
1437     }
1438   bool is_ligature = !is_base_ligature && !is_mark_ligature;
1439 
1440   unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1441   unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1442   unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1443   unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1444   unsigned int components_so_far = last_num_components;
1445 
1446   if (is_ligature)
1447   {
1448     _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1449     if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1450     {
1451       _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1452     }
1453   }
1454   c->replace_glyph_with_ligature (lig_glyph, klass);
1455 
1456   for (unsigned int i = 1; i < count; i++)
1457   {
1458     while (buffer->idx < match_positions[i] && buffer->successful)
1459     {
1460       if (is_ligature)
1461       {
1462 	unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1463 	if (this_comp == 0)
1464 	  this_comp = last_num_components;
1465 	unsigned int new_lig_comp = components_so_far - last_num_components +
1466 				    hb_min (this_comp, last_num_components);
1467 	  _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1468       }
1469       (void) buffer->next_glyph ();
1470     }
1471 
1472     last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1473     last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1474     components_so_far += last_num_components;
1475 
1476     /* Skip the base glyph */
1477     buffer->idx++;
1478   }
1479 
1480   if (!is_mark_ligature && last_lig_id)
1481   {
1482     /* Re-adjust components for any marks following. */
1483     for (unsigned i = buffer->idx; i < buffer->len; ++i)
1484     {
1485       if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1486 
1487       unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1488       if (!this_comp) break;
1489 
1490       unsigned new_lig_comp = components_so_far - last_num_components +
1491 			      hb_min (this_comp, last_num_components);
1492       _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1493     }
1494   }
1495   return_trace (true);
1496 }
1497 
1498 template <typename HBUINT>
1499 #ifndef HB_OPTIMIZE_SIZE
1500 HB_ALWAYS_INLINE
1501 #endif
match_backtrack(hb_ot_apply_context_t * c,unsigned int count,const HBUINT backtrack[],match_func_t match_func,const void * match_data,unsigned int * match_start)1502 static bool match_backtrack (hb_ot_apply_context_t *c,
1503 			     unsigned int count,
1504 			     const HBUINT backtrack[],
1505 			     match_func_t match_func,
1506 			     const void *match_data,
1507 			     unsigned int *match_start)
1508 {
1509   TRACE_APPLY (nullptr);
1510 
1511   hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1512   skippy_iter.reset (c->buffer->backtrack_len ());
1513   skippy_iter.set_match_func (match_func, match_data);
1514   skippy_iter.set_glyph_data (backtrack);
1515 
1516   for (unsigned int i = 0; i < count; i++)
1517   {
1518     unsigned unsafe_from;
1519     if (!skippy_iter.prev (&unsafe_from))
1520     {
1521       *match_start = unsafe_from;
1522       return_trace (false);
1523     }
1524   }
1525 
1526   *match_start = skippy_iter.idx;
1527   return_trace (true);
1528 }
1529 
1530 template <typename HBUINT>
1531 #ifndef HB_OPTIMIZE_SIZE
1532 HB_ALWAYS_INLINE
1533 #endif
match_lookahead(hb_ot_apply_context_t * c,unsigned int count,const HBUINT lookahead[],match_func_t match_func,const void * match_data,unsigned int start_index,unsigned int * end_index)1534 static bool match_lookahead (hb_ot_apply_context_t *c,
1535 			     unsigned int count,
1536 			     const HBUINT lookahead[],
1537 			     match_func_t match_func,
1538 			     const void *match_data,
1539 			     unsigned int start_index,
1540 			     unsigned int *end_index)
1541 {
1542   TRACE_APPLY (nullptr);
1543 
1544   hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1545   skippy_iter.reset (start_index - 1);
1546   skippy_iter.set_match_func (match_func, match_data);
1547   skippy_iter.set_glyph_data (lookahead);
1548 
1549   for (unsigned int i = 0; i < count; i++)
1550   {
1551     unsigned unsafe_to;
1552     if (!skippy_iter.next (&unsafe_to))
1553     {
1554       *end_index = unsafe_to;
1555       return_trace (false);
1556     }
1557   }
1558 
1559   *end_index = skippy_iter.idx + 1;
1560   return_trace (true);
1561 }
1562 
1563 
1564 
1565 struct LookupRecord
1566 {
serializeOT::LookupRecord1567   bool serialize (hb_serialize_context_t *c,
1568 		  const hb_map_t         *lookup_map) const
1569   {
1570     TRACE_SERIALIZE (this);
1571     auto *out = c->embed (*this);
1572     if (unlikely (!out)) return_trace (false);
1573 
1574     return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
1575   }
1576 
sanitizeOT::LookupRecord1577   bool sanitize (hb_sanitize_context_t *c) const
1578   {
1579     TRACE_SANITIZE (this);
1580     return_trace (c->check_struct (this));
1581   }
1582 
1583   HBUINT16	sequenceIndex;		/* Index into current glyph
1584 					 * sequence--first glyph = 0 */
1585   HBUINT16	lookupListIndex;	/* Lookup to apply to that
1586 					 * position--zero--based */
1587   public:
1588   DEFINE_SIZE_STATIC (4);
1589 };
1590 
serialize_lookuprecord_array(hb_serialize_context_t * c,const hb_array_t<const LookupRecord> lookupRecords,const hb_map_t * lookup_map)1591 static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
1592 					      const hb_array_t<const LookupRecord> lookupRecords,
1593 					      const hb_map_t *lookup_map)
1594 {
1595   unsigned count = 0;
1596   for (const LookupRecord& r : lookupRecords)
1597   {
1598     if (!lookup_map->has (r.lookupListIndex))
1599       continue;
1600 
1601     if (!r.serialize (c, lookup_map))
1602       return 0;
1603 
1604     count++;
1605   }
1606   return count;
1607 }
1608 
1609 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1610 
1611 template <typename HBUINT>
context_closure_recurse_lookups(hb_closure_context_t * c,unsigned inputCount,const HBUINT input[],unsigned lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextFormat context_format,const void * data,intersected_glyphs_func_t intersected_glyphs_func,void * cache)1612 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1613 					     unsigned inputCount, const HBUINT input[],
1614 					     unsigned lookupCount,
1615 					     const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1616 					     unsigned value,
1617 					     ContextFormat context_format,
1618 					     const void *data,
1619 					     intersected_glyphs_func_t intersected_glyphs_func,
1620 					     void *cache)
1621 {
1622   hb_set_t covered_seq_indicies;
1623   hb_set_t pos_glyphs;
1624   for (unsigned int i = 0; i < lookupCount; i++)
1625   {
1626     unsigned seqIndex = lookupRecord[i].sequenceIndex;
1627     if (seqIndex >= inputCount) continue;
1628 
1629     bool has_pos_glyphs = false;
1630 
1631     if (!covered_seq_indicies.has (seqIndex))
1632     {
1633       has_pos_glyphs = true;
1634       pos_glyphs.clear ();
1635       if (seqIndex == 0)
1636       {
1637         switch (context_format) {
1638         case ContextFormat::SimpleContext:
1639           pos_glyphs.add (value);
1640           break;
1641         case ContextFormat::ClassBasedContext:
1642           intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs, cache);
1643           break;
1644         case ContextFormat::CoverageBasedContext:
1645           pos_glyphs.set (c->parent_active_glyphs ());
1646           break;
1647         }
1648       }
1649       else
1650       {
1651         const void *input_data = input;
1652         unsigned input_value = seqIndex - 1;
1653         if (context_format != ContextFormat::SimpleContext)
1654         {
1655           input_data = data;
1656           input_value = input[seqIndex - 1];
1657         }
1658 
1659         intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs, cache);
1660       }
1661     }
1662 
1663     covered_seq_indicies.add (seqIndex);
1664     hb_set_t *cur_active_glyphs = c->push_cur_active_glyphs ();
1665     if (unlikely (!cur_active_glyphs))
1666       return;
1667     if (has_pos_glyphs) {
1668       *cur_active_glyphs = std::move (pos_glyphs);
1669     } else {
1670       *cur_active_glyphs = *c->glyphs;
1671     }
1672 
1673     unsigned endIndex = inputCount;
1674     if (context_format == ContextFormat::CoverageBasedContext)
1675       endIndex += 1;
1676 
1677     c->recurse (lookupRecord[i].lookupListIndex, &covered_seq_indicies, seqIndex, endIndex);
1678 
1679     c->pop_cur_done_glyphs ();
1680   }
1681 }
1682 
1683 template <typename context_t>
recurse_lookups(context_t * c,unsigned int lookupCount,const LookupRecord lookupRecord[])1684 static inline void recurse_lookups (context_t *c,
1685                                     unsigned int lookupCount,
1686                                     const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1687 {
1688   for (unsigned int i = 0; i < lookupCount; i++)
1689     c->recurse (lookupRecord[i].lookupListIndex);
1690 }
1691 
apply_lookup(hb_ot_apply_context_t * c,unsigned int count,unsigned int * match_positions,unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned int match_end)1692 static inline void apply_lookup (hb_ot_apply_context_t *c,
1693 				 unsigned int count, /* Including the first glyph */
1694 				 unsigned int *match_positions, /* Including the first glyph */
1695 				 unsigned int lookupCount,
1696 				 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1697 				 unsigned int match_end)
1698 {
1699   hb_buffer_t *buffer = c->buffer;
1700   int end;
1701 
1702   unsigned int *match_positions_input = match_positions;
1703   unsigned int match_positions_count = count;
1704 
1705   /* All positions are distance from beginning of *output* buffer.
1706    * Adjust. */
1707   {
1708     unsigned int bl = buffer->backtrack_len ();
1709     end = bl + match_end - buffer->idx;
1710 
1711     int delta = bl - buffer->idx;
1712     /* Convert positions to new indexing. */
1713     for (unsigned int j = 0; j < count; j++)
1714       match_positions[j] += delta;
1715   }
1716 
1717   for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1718   {
1719     unsigned int idx = lookupRecord[i].sequenceIndex;
1720     if (idx >= count)
1721       continue;
1722 
1723     unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1724 
1725     /* This can happen if earlier recursed lookups deleted many entries. */
1726     if (unlikely (match_positions[idx] >= orig_len))
1727       continue;
1728 
1729     if (unlikely (!buffer->move_to (match_positions[idx])))
1730       break;
1731 
1732     if (unlikely (buffer->max_ops <= 0))
1733       break;
1734 
1735     if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
1736     {
1737       if (buffer->have_output)
1738         c->buffer->sync_so_far ();
1739       c->buffer->message (c->font,
1740 			  "recursing to lookup %u at %u",
1741 			  (unsigned) lookupRecord[i].lookupListIndex,
1742 			  buffer->idx);
1743     }
1744 
1745     if (!c->recurse (lookupRecord[i].lookupListIndex))
1746       continue;
1747 
1748     if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
1749     {
1750       if (buffer->have_output)
1751         c->buffer->sync_so_far ();
1752       c->buffer->message (c->font,
1753 			  "recursed to lookup %u",
1754 			  (unsigned) lookupRecord[i].lookupListIndex);
1755     }
1756 
1757     unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1758     int delta = new_len - orig_len;
1759 
1760     if (!delta)
1761       continue;
1762 
1763     /* Recursed lookup changed buffer len.  Adjust.
1764      *
1765      * TODO:
1766      *
1767      * Right now, if buffer length increased by n, we assume n new glyphs
1768      * were added right after the current position, and if buffer length
1769      * was decreased by n, we assume n match positions after the current
1770      * one where removed.  The former (buffer length increased) case is
1771      * fine, but the decrease case can be improved in at least two ways,
1772      * both of which are significant:
1773      *
1774      *   - If recursed-to lookup is MultipleSubst and buffer length
1775      *     decreased, then it's current match position that was deleted,
1776      *     NOT the one after it.
1777      *
1778      *   - If buffer length was decreased by n, it does not necessarily
1779      *     mean that n match positions where removed, as there recursed-to
1780      *     lookup might had a different LookupFlag.  Here's a constructed
1781      *     case of that:
1782      *     https://github.com/harfbuzz/harfbuzz/discussions/3538
1783      *
1784      * It should be possible to construct tests for both of these cases.
1785      */
1786 
1787     end += delta;
1788     if (end < int (match_positions[idx]))
1789     {
1790       /* End might end up being smaller than match_positions[idx] if the recursed
1791        * lookup ended up removing many items.
1792        * Just never rewind end beyond start of current position, since that is
1793        * not possible in the recursed lookup.  Also adjust delta as such.
1794        *
1795        * https://bugs.chromium.org/p/chromium/issues/detail?id=659496
1796        * https://github.com/harfbuzz/harfbuzz/issues/1611
1797        */
1798       delta += match_positions[idx] - end;
1799       end = match_positions[idx];
1800     }
1801 
1802     unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1803 
1804     if (delta > 0)
1805     {
1806       if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1807 	break;
1808       if (unlikely (delta + count > match_positions_count))
1809       {
1810         unsigned new_match_positions_count = hb_max (delta + count, hb_max(match_positions_count, 4u) * 1.5);
1811         if (match_positions == match_positions_input)
1812 	{
1813 	  match_positions = (unsigned int *) hb_malloc (new_match_positions_count * sizeof (match_positions[0]));
1814 	  if (unlikely (!match_positions))
1815 	    break;
1816 	  memcpy (match_positions, match_positions_input, count * sizeof (match_positions[0]));
1817 	  match_positions_count = new_match_positions_count;
1818 	}
1819 	else
1820 	{
1821 	  unsigned int *new_match_positions = (unsigned int *) hb_realloc (match_positions, new_match_positions_count * sizeof (match_positions[0]));
1822 	  if (unlikely (!new_match_positions))
1823 	    break;
1824 	  match_positions = new_match_positions;
1825 	  match_positions_count = new_match_positions_count;
1826 	}
1827       }
1828 
1829     }
1830     else
1831     {
1832       /* NOTE: delta is non-positive. */
1833       delta = hb_max (delta, (int) next - (int) count);
1834       next -= delta;
1835     }
1836 
1837     /* Shift! */
1838     memmove (match_positions + next + delta, match_positions + next,
1839 	     (count - next) * sizeof (match_positions[0]));
1840     next += delta;
1841     count += delta;
1842 
1843     /* Fill in new entries. */
1844     for (unsigned int j = idx + 1; j < next; j++)
1845       match_positions[j] = match_positions[j - 1] + 1;
1846 
1847     /* And fixup the rest. */
1848     for (; next < count; next++)
1849       match_positions[next] += delta;
1850   }
1851 
1852   if (match_positions != match_positions_input)
1853     hb_free (match_positions);
1854 
1855   (void) buffer->move_to (end);
1856 }
1857 
1858 
1859 
1860 /* Contextual lookups */
1861 
1862 struct ContextClosureLookupContext
1863 {
1864   ContextClosureFuncs funcs;
1865   ContextFormat context_format;
1866   const void *intersects_data;
1867   void *intersects_cache;
1868   void *intersected_glyphs_cache;
1869 };
1870 
1871 struct ContextCollectGlyphsLookupContext
1872 {
1873   ContextCollectGlyphsFuncs funcs;
1874   const void *collect_data;
1875 };
1876 
1877 struct ContextApplyLookupContext
1878 {
1879   ContextApplyFuncs funcs;
1880   const void *match_data;
1881 };
1882 
1883 template <typename HBUINT>
context_intersects(const hb_set_t * glyphs,unsigned int inputCount,const HBUINT input[],ContextClosureLookupContext & lookup_context)1884 static inline bool context_intersects (const hb_set_t *glyphs,
1885 				       unsigned int inputCount, /* Including the first glyph (not matched) */
1886 				       const HBUINT input[], /* Array of input values--start with second glyph */
1887 				       ContextClosureLookupContext &lookup_context)
1888 {
1889   return array_is_subset_of (glyphs,
1890 			     inputCount ? inputCount - 1 : 0, input,
1891 			     lookup_context.funcs.intersects,
1892 			     lookup_context.intersects_data,
1893 			     lookup_context.intersects_cache);
1894 }
1895 
1896 template <typename HBUINT>
context_closure_lookup(hb_closure_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextClosureLookupContext & lookup_context)1897 static inline void context_closure_lookup (hb_closure_context_t *c,
1898 					   unsigned int inputCount, /* Including the first glyph (not matched) */
1899 					   const HBUINT input[], /* Array of input values--start with second glyph */
1900 					   unsigned int lookupCount,
1901 					   const LookupRecord lookupRecord[],
1902 					   unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1903 					   ContextClosureLookupContext &lookup_context)
1904 {
1905   if (context_intersects (c->glyphs,
1906 			  inputCount, input,
1907 			  lookup_context))
1908     context_closure_recurse_lookups (c,
1909 				     inputCount, input,
1910 				     lookupCount, lookupRecord,
1911 				     value,
1912 				     lookup_context.context_format,
1913 				     lookup_context.intersects_data,
1914 				     lookup_context.funcs.intersected_glyphs,
1915 				     lookup_context.intersected_glyphs_cache);
1916 }
1917 
1918 template <typename HBUINT>
context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextCollectGlyphsLookupContext & lookup_context)1919 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1920 						  unsigned int inputCount, /* Including the first glyph (not matched) */
1921 						  const HBUINT input[], /* Array of input values--start with second glyph */
1922 						  unsigned int lookupCount,
1923 						  const LookupRecord lookupRecord[],
1924 						  ContextCollectGlyphsLookupContext &lookup_context)
1925 {
1926   collect_array (c, c->input,
1927 		 inputCount ? inputCount - 1 : 0, input,
1928 		 lookup_context.funcs.collect, lookup_context.collect_data);
1929   recurse_lookups (c,
1930 		   lookupCount, lookupRecord);
1931 }
1932 
1933 template <typename HBUINT>
context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,const ContextApplyLookupContext & lookup_context)1934 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1935 					       unsigned int inputCount, /* Including the first glyph (not matched) */
1936 					       const HBUINT input[], /* Array of input values--start with second glyph */
1937 					       unsigned int lookupCount HB_UNUSED,
1938 					       const LookupRecord lookupRecord[] HB_UNUSED,
1939 					       const ContextApplyLookupContext &lookup_context)
1940 {
1941   return would_match_input (c,
1942 			    inputCount, input,
1943 			    lookup_context.funcs.match, lookup_context.match_data);
1944 }
1945 
1946 template <typename HBUINT>
1947 HB_ALWAYS_INLINE
context_apply_lookup(hb_ot_apply_context_t * c,unsigned int inputCount,const HBUINT input[],unsigned int lookupCount,const LookupRecord lookupRecord[],const ContextApplyLookupContext & lookup_context)1948 static bool context_apply_lookup (hb_ot_apply_context_t *c,
1949 				  unsigned int inputCount, /* Including the first glyph (not matched) */
1950 				  const HBUINT input[], /* Array of input values--start with second glyph */
1951 				  unsigned int lookupCount,
1952 				  const LookupRecord lookupRecord[],
1953 				  const ContextApplyLookupContext &lookup_context)
1954 {
1955   if (unlikely (inputCount > HB_MAX_CONTEXT_LENGTH)) return false;
1956   unsigned match_positions_stack[4];
1957   unsigned *match_positions = match_positions_stack;
1958   if (unlikely (inputCount > ARRAY_LENGTH (match_positions_stack)))
1959   {
1960     match_positions = (unsigned *) hb_malloc (hb_max (inputCount, 1u) * sizeof (match_positions[0]));
1961     if (unlikely (!match_positions))
1962       return false;
1963   }
1964 
1965   unsigned match_end = 0;
1966   bool ret = false;
1967   if (match_input (c,
1968 		   inputCount, input,
1969 		   lookup_context.funcs.match, lookup_context.match_data,
1970 		   &match_end, match_positions))
1971   {
1972     c->buffer->unsafe_to_break (c->buffer->idx, match_end);
1973     apply_lookup (c,
1974 		  inputCount, match_positions,
1975 		  lookupCount, lookupRecord,
1976 		  match_end);
1977     ret = true;
1978   }
1979   else
1980   {
1981     c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
1982     ret = false;
1983   }
1984 
1985   if (unlikely (match_positions != match_positions_stack))
1986     hb_free (match_positions);
1987 
1988   return ret;
1989 }
1990 
1991 template <typename Types>
1992 struct Rule
1993 {
1994   template <typename T>
1995   friend struct RuleSet;
1996 
intersectsOT::Rule1997   bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1998   {
1999     return context_intersects (glyphs,
2000 			       inputCount, inputZ.arrayZ,
2001 			       lookup_context);
2002   }
2003 
closureOT::Rule2004   void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
2005   {
2006     if (unlikely (c->lookup_limit_exceeded ())) return;
2007 
2008     const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2009 					   (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
2010     context_closure_lookup (c,
2011 			    inputCount, inputZ.arrayZ,
2012 			    lookupCount, lookupRecord.arrayZ,
2013 			    value, lookup_context);
2014   }
2015 
closure_lookupsOT::Rule2016   void closure_lookups (hb_closure_lookups_context_t *c,
2017                         ContextClosureLookupContext &lookup_context) const
2018   {
2019     if (unlikely (c->lookup_limit_exceeded ())) return;
2020     if (!intersects (c->glyphs, lookup_context)) return;
2021 
2022     const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2023 					   (inputZ.as_array (inputCount ? inputCount - 1 : 0));
2024     recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
2025   }
2026 
collect_glyphsOT::Rule2027   void collect_glyphs (hb_collect_glyphs_context_t *c,
2028 		       ContextCollectGlyphsLookupContext &lookup_context) const
2029   {
2030     const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2031 					   (inputZ.as_array (inputCount ? inputCount - 1 : 0));
2032     context_collect_glyphs_lookup (c,
2033 				   inputCount, inputZ.arrayZ,
2034 				   lookupCount, lookupRecord.arrayZ,
2035 				   lookup_context);
2036   }
2037 
would_applyOT::Rule2038   bool would_apply (hb_would_apply_context_t *c,
2039 		    const ContextApplyLookupContext &lookup_context) const
2040   {
2041     const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2042 					   (inputZ.as_array (inputCount ? inputCount - 1 : 0));
2043     return context_would_apply_lookup (c,
2044 				       inputCount, inputZ.arrayZ,
2045 				       lookupCount, lookupRecord.arrayZ,
2046 				       lookup_context);
2047   }
2048 
applyOT::Rule2049   bool apply (hb_ot_apply_context_t *c,
2050 	      const ContextApplyLookupContext &lookup_context) const
2051   {
2052     TRACE_APPLY (this);
2053     const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2054 					   (inputZ.as_array (inputCount ? inputCount - 1 : 0));
2055     return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
2056   }
2057 
serializeOT::Rule2058   bool serialize (hb_serialize_context_t *c,
2059 		  const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
2060 		  const hb_map_t *lookup_map) const
2061   {
2062     TRACE_SERIALIZE (this);
2063     auto *out = c->start_embed (this);
2064     if (unlikely (!c->extend_min (out))) return_trace (false);
2065 
2066     out->inputCount = inputCount;
2067     const auto input = inputZ.as_array (inputCount - 1);
2068     for (const auto org : input)
2069     {
2070       HBUINT16 d;
2071       d = input_mapping->get (org);
2072       c->copy (d);
2073     }
2074 
2075     const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
2076 					   (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
2077 
2078     unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
2079     return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2080   }
2081 
subsetOT::Rule2082   bool subset (hb_subset_context_t *c,
2083 	       const hb_map_t *lookup_map,
2084 	       const hb_map_t *klass_map = nullptr) const
2085   {
2086     TRACE_SUBSET (this);
2087     if (unlikely (!inputCount)) return_trace (false);
2088     const auto input = inputZ.as_array (inputCount - 1);
2089 
2090     const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
2091     if (!hb_all (input, mapping)) return_trace (false);
2092     return_trace (serialize (c->serializer, mapping, lookup_map));
2093   }
2094 
2095   public:
sanitizeOT::Rule2096   bool sanitize (hb_sanitize_context_t *c) const
2097   {
2098     TRACE_SANITIZE (this);
2099     return_trace (c->check_struct (this) &&
2100 		  hb_barrier () &&
2101 		  c->check_range (inputZ.arrayZ,
2102 				  inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
2103 				  LookupRecord::static_size * lookupCount));
2104   }
2105 
2106   protected:
2107   HBUINT16	inputCount;		/* Total number of glyphs in input
2108 					 * glyph sequence--includes the first
2109 					 * glyph */
2110   HBUINT16	lookupCount;		/* Number of LookupRecords */
2111   UnsizedArrayOf<typename Types::HBUINT>
2112 		inputZ;			/* Array of match inputs--start with
2113 					 * second glyph */
2114 /*UnsizedArrayOf<LookupRecord>
2115 		lookupRecordX;*/	/* Array of LookupRecords--in
2116 					 * design order */
2117   public:
2118   DEFINE_SIZE_ARRAY (4, inputZ);
2119 };
2120 
2121 template <typename Types>
2122 struct RuleSet
2123 {
2124   using Rule = OT::Rule<Types>;
2125 
intersectsOT::RuleSet2126   bool intersects (const hb_set_t *glyphs,
2127 		   ContextClosureLookupContext &lookup_context) const
2128   {
2129     return
2130     + hb_iter (rule)
2131     | hb_map (hb_add (this))
2132     | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
2133     | hb_any
2134     ;
2135   }
2136 
closureOT::RuleSet2137   void closure (hb_closure_context_t *c, unsigned value,
2138 		ContextClosureLookupContext &lookup_context) const
2139   {
2140     if (unlikely (c->lookup_limit_exceeded ())) return;
2141 
2142     return
2143     + hb_iter (rule)
2144     | hb_map (hb_add (this))
2145     | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
2146     ;
2147   }
2148 
closure_lookupsOT::RuleSet2149   void closure_lookups (hb_closure_lookups_context_t *c,
2150                         ContextClosureLookupContext &lookup_context) const
2151   {
2152     if (unlikely (c->lookup_limit_exceeded ())) return;
2153     + hb_iter (rule)
2154     | hb_map (hb_add (this))
2155     | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
2156     ;
2157   }
2158 
collect_glyphsOT::RuleSet2159   void collect_glyphs (hb_collect_glyphs_context_t *c,
2160 		       ContextCollectGlyphsLookupContext &lookup_context) const
2161   {
2162     return
2163     + hb_iter (rule)
2164     | hb_map (hb_add (this))
2165     | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
2166     ;
2167   }
2168 
would_applyOT::RuleSet2169   bool would_apply (hb_would_apply_context_t *c,
2170 		    const ContextApplyLookupContext &lookup_context) const
2171   {
2172     return
2173     + hb_iter (rule)
2174     | hb_map (hb_add (this))
2175     | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
2176     | hb_any
2177     ;
2178   }
2179 
applyOT::RuleSet2180   bool apply (hb_ot_apply_context_t *c,
2181 	      const ContextApplyLookupContext &lookup_context) const
2182   {
2183     TRACE_APPLY (this);
2184 
2185     unsigned num_rules = rule.len;
2186 
2187 #ifndef HB_NO_OT_RULESETS_FAST_PATH
2188     if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
2189 #endif
2190     {
2191     slow:
2192       return_trace (
2193       + hb_iter (rule)
2194       | hb_map (hb_add (this))
2195       | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
2196       | hb_any
2197       )
2198       ;
2199     }
2200 
2201     /* This version is optimized for speed by matching the first & second
2202      * components of the rule here, instead of calling into the matching code.
2203      *
2204      * Replicated from LigatureSet::apply(). */
2205 
2206     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2207     skippy_iter.reset (c->buffer->idx);
2208     skippy_iter.set_match_func (match_always, nullptr);
2209     skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
2210     unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
2211     hb_glyph_info_t *first = nullptr, *second = nullptr;
2212     bool matched = skippy_iter.next ();
2213     if (likely (matched))
2214     {
2215       first = &c->buffer->info[skippy_iter.idx];
2216       unsafe_to = skippy_iter.idx + 1;
2217 
2218       if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
2219       {
2220 	/* Can't use the fast path if eg. the next char is a default-ignorable
2221 	 * or other skippable. */
2222         goto slow;
2223       }
2224     }
2225     else
2226     {
2227       /* Failed to match a next glyph. Only try applying rules that have
2228        * no further input. */
2229       return_trace (
2230       + hb_iter (rule)
2231       | hb_map (hb_add (this))
2232       | hb_filter ([&] (const Rule &_) { return _.inputCount <= 1; })
2233       | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
2234       | hb_any
2235       )
2236       ;
2237     }
2238     matched = skippy_iter.next ();
2239     if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])))
2240     {
2241       second = &c->buffer->info[skippy_iter.idx];
2242       unsafe_to2 = skippy_iter.idx + 1;
2243     }
2244 
2245     auto match_input = lookup_context.funcs.match;
2246     auto *input_data = lookup_context.match_data;
2247     for (unsigned int i = 0; i < num_rules; i++)
2248     {
2249       const auto &r = this+rule.arrayZ[i];
2250 
2251       const auto &input = r.inputZ;
2252 
2253       if (r.inputCount <= 1 ||
2254 	  (!match_input ||
2255 	   match_input (*first, input.arrayZ[0], input_data)))
2256       {
2257         if (!second ||
2258 	    (r.inputCount <= 2 ||
2259 	     (!match_input ||
2260 	      match_input (*second, input.arrayZ[1], input_data)))
2261 	   )
2262 	{
2263 	  if (r.apply (c, lookup_context))
2264 	  {
2265 	    if (unsafe_to != (unsigned) -1)
2266 	      c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
2267 	    return_trace (true);
2268 	  }
2269 	}
2270 	else
2271 	  unsafe_to = unsafe_to2;
2272       }
2273       else
2274       {
2275 	if (unsafe_to == (unsigned) -1)
2276 	  unsafe_to = unsafe_to1;
2277       }
2278     }
2279     if (likely (unsafe_to != (unsigned) -1))
2280       c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
2281 
2282     return_trace (false);
2283   }
2284 
subsetOT::RuleSet2285   bool subset (hb_subset_context_t *c,
2286 	       const hb_map_t *lookup_map,
2287 	       const hb_map_t *klass_map = nullptr) const
2288   {
2289     TRACE_SUBSET (this);
2290 
2291     auto snap = c->serializer->snapshot ();
2292     auto *out = c->serializer->start_embed (*this);
2293     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2294 
2295     for (const Offset16To<Rule>& _ : rule)
2296     {
2297       if (!_) continue;
2298       auto o_snap = c->serializer->snapshot ();
2299       auto *o = out->rule.serialize_append (c->serializer);
2300       if (unlikely (!o)) continue;
2301 
2302       if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
2303       {
2304 	out->rule.pop ();
2305 	c->serializer->revert (o_snap);
2306       }
2307     }
2308 
2309     bool ret = bool (out->rule);
2310     if (!ret) c->serializer->revert (snap);
2311 
2312     return_trace (ret);
2313   }
2314 
sanitizeOT::RuleSet2315   bool sanitize (hb_sanitize_context_t *c) const
2316   {
2317     TRACE_SANITIZE (this);
2318     return_trace (rule.sanitize (c, this));
2319   }
2320 
2321   protected:
2322   Array16OfOffset16To<Rule>
2323 		rule;			/* Array of Rule tables
2324 					 * ordered by preference */
2325   public:
2326   DEFINE_SIZE_ARRAY (2, rule);
2327 };
2328 
2329 
2330 template <typename Types>
2331 struct ContextFormat1_4
2332 {
2333   using RuleSet = OT::RuleSet<Types>;
2334 
intersectsOT::ContextFormat1_42335   bool intersects (const hb_set_t *glyphs) const
2336   {
2337     struct ContextClosureLookupContext lookup_context = {
2338       {intersects_glyph, intersected_glyph},
2339       ContextFormat::SimpleContext,
2340       nullptr
2341     };
2342 
2343     return
2344     + hb_zip (this+coverage, ruleSet)
2345     | hb_filter (*glyphs, hb_first)
2346     | hb_map (hb_second)
2347     | hb_map (hb_add (this))
2348     | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
2349     | hb_any
2350     ;
2351   }
2352 
may_have_non_1to1OT::ContextFormat1_42353   bool may_have_non_1to1 () const
2354   { return true; }
2355 
closureOT::ContextFormat1_42356   void closure (hb_closure_context_t *c) const
2357   {
2358     hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
2359     if (unlikely (!cur_active_glyphs)) return;
2360     get_coverage ().intersect_set (c->previous_parent_active_glyphs (), *cur_active_glyphs);
2361 
2362     struct ContextClosureLookupContext lookup_context = {
2363       {intersects_glyph, intersected_glyph},
2364       ContextFormat::SimpleContext,
2365       nullptr
2366     };
2367 
2368     + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2369     | hb_filter ([&] (hb_codepoint_t _) {
2370       return c->previous_parent_active_glyphs ().has (_);
2371     }, hb_first)
2372     | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
2373     | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2374     ;
2375 
2376     c->pop_cur_done_glyphs ();
2377   }
2378 
closure_lookupsOT::ContextFormat1_42379   void closure_lookups (hb_closure_lookups_context_t *c) const
2380   {
2381     struct ContextClosureLookupContext lookup_context = {
2382       {intersects_glyph, nullptr},
2383       ContextFormat::SimpleContext,
2384       nullptr
2385     };
2386 
2387     + hb_zip (this+coverage, ruleSet)
2388     | hb_filter (*c->glyphs, hb_first)
2389     | hb_map (hb_second)
2390     | hb_map (hb_add (this))
2391     | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
2392     ;
2393   }
2394 
collect_variation_indicesOT::ContextFormat1_42395   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2396 
collect_glyphsOT::ContextFormat1_42397   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2398   {
2399     (this+coverage).collect_coverage (c->input);
2400 
2401     struct ContextCollectGlyphsLookupContext lookup_context = {
2402       {collect_glyph},
2403       nullptr
2404     };
2405 
2406     + hb_iter (ruleSet)
2407     | hb_map (hb_add (this))
2408     | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2409     ;
2410   }
2411 
would_applyOT::ContextFormat1_42412   bool would_apply (hb_would_apply_context_t *c) const
2413   {
2414     const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2415     struct ContextApplyLookupContext lookup_context = {
2416       {match_glyph},
2417       nullptr
2418     };
2419     return rule_set.would_apply (c, lookup_context);
2420   }
2421 
get_coverageOT::ContextFormat1_42422   const Coverage &get_coverage () const { return this+coverage; }
2423 
applyOT::ContextFormat1_42424   bool apply (hb_ot_apply_context_t *c) const
2425   {
2426     TRACE_APPLY (this);
2427     unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2428     if (likely (index == NOT_COVERED))
2429       return_trace (false);
2430 
2431     const RuleSet &rule_set = this+ruleSet[index];
2432     struct ContextApplyLookupContext lookup_context = {
2433       {match_glyph},
2434       nullptr
2435     };
2436     return_trace (rule_set.apply (c, lookup_context));
2437   }
2438 
subsetOT::ContextFormat1_42439   bool subset (hb_subset_context_t *c) const
2440   {
2441     TRACE_SUBSET (this);
2442     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2443     const hb_map_t &glyph_map = *c->plan->glyph_map;
2444 
2445     auto *out = c->serializer->start_embed (*this);
2446     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2447     out->format = format;
2448 
2449     const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
2450     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2451     + hb_zip (this+coverage, ruleSet)
2452     | hb_filter (glyphset, hb_first)
2453     | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2454     | hb_map (hb_first)
2455     | hb_map (glyph_map)
2456     | hb_sink (new_coverage)
2457     ;
2458 
2459     out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
2460     return_trace (bool (new_coverage));
2461   }
2462 
sanitizeOT::ContextFormat1_42463   bool sanitize (hb_sanitize_context_t *c) const
2464   {
2465     TRACE_SANITIZE (this);
2466     return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2467   }
2468 
2469   protected:
2470   HBUINT16	format;			/* Format identifier--format = 1 */
2471   typename Types::template OffsetTo<Coverage>
2472 		coverage;		/* Offset to Coverage table--from
2473 					 * beginning of table */
2474   Array16Of<typename Types::template OffsetTo<RuleSet>>
2475 		ruleSet;		/* Array of RuleSet tables
2476 					 * ordered by Coverage Index */
2477   public:
2478   DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
2479 };
2480 
2481 
2482 template <typename Types>
2483 struct ContextFormat2_5
2484 {
2485   using RuleSet = OT::RuleSet<SmallTypes>;
2486 
intersectsOT::ContextFormat2_52487   bool intersects (const hb_set_t *glyphs) const
2488   {
2489     if (!(this+coverage).intersects (glyphs))
2490       return false;
2491 
2492     const ClassDef &class_def = this+classDef;
2493 
2494     hb_map_t cache;
2495     struct ContextClosureLookupContext lookup_context = {
2496       {intersects_class, nullptr},
2497       ContextFormat::ClassBasedContext,
2498       &class_def,
2499       &cache
2500     };
2501 
2502     hb_set_t retained_coverage_glyphs;
2503     (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
2504 
2505     hb_set_t coverage_glyph_classes;
2506     class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2507 
2508 
2509     return
2510     + hb_iter (ruleSet)
2511     | hb_map (hb_add (this))
2512     | hb_enumerate
2513     | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2514 	      { return class_def.intersects_class (glyphs, p.first) &&
2515 		       coverage_glyph_classes.has (p.first) &&
2516 		       p.second.intersects (glyphs, lookup_context); })
2517     | hb_any
2518     ;
2519   }
2520 
may_have_non_1to1OT::ContextFormat2_52521   bool may_have_non_1to1 () const
2522   { return true; }
2523 
closureOT::ContextFormat2_52524   void closure (hb_closure_context_t *c) const
2525   {
2526     if (!(this+coverage).intersects (c->glyphs))
2527       return;
2528 
2529     hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
2530     if (unlikely (!cur_active_glyphs)) return;
2531     get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
2532 				   *cur_active_glyphs);
2533 
2534     const ClassDef &class_def = this+classDef;
2535 
2536     hb_map_t cache;
2537     intersected_class_cache_t intersected_cache;
2538     struct ContextClosureLookupContext lookup_context = {
2539       {intersects_class, intersected_class_glyphs},
2540       ContextFormat::ClassBasedContext,
2541       &class_def,
2542       &cache,
2543       &intersected_cache
2544     };
2545 
2546     + hb_enumerate (ruleSet)
2547     | hb_filter ([&] (unsigned _)
2548     { return class_def.intersects_class (&c->parent_active_glyphs (), _); },
2549 		 hb_first)
2550     | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<RuleSet>&> _)
2551                 {
2552                   const RuleSet& rule_set = this+_.second;
2553                   rule_set.closure (c, _.first, lookup_context);
2554                 })
2555     ;
2556 
2557     c->pop_cur_done_glyphs ();
2558   }
2559 
closure_lookupsOT::ContextFormat2_52560   void closure_lookups (hb_closure_lookups_context_t *c) const
2561   {
2562     if (!(this+coverage).intersects (c->glyphs))
2563       return;
2564 
2565     const ClassDef &class_def = this+classDef;
2566 
2567     hb_map_t cache;
2568     struct ContextClosureLookupContext lookup_context = {
2569       {intersects_class, nullptr},
2570       ContextFormat::ClassBasedContext,
2571       &class_def,
2572       &cache
2573     };
2574 
2575     + hb_iter (ruleSet)
2576     | hb_map (hb_add (this))
2577     | hb_enumerate
2578     | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2579     { return class_def.intersects_class (c->glyphs, p.first); })
2580     | hb_map (hb_second)
2581     | hb_apply ([&] (const RuleSet & _)
2582     { _.closure_lookups (c, lookup_context); });
2583   }
2584 
collect_variation_indicesOT::ContextFormat2_52585   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2586 
collect_glyphsOT::ContextFormat2_52587   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2588   {
2589     (this+coverage).collect_coverage (c->input);
2590 
2591     const ClassDef &class_def = this+classDef;
2592     struct ContextCollectGlyphsLookupContext lookup_context = {
2593       {collect_class},
2594       &class_def
2595     };
2596 
2597     + hb_iter (ruleSet)
2598     | hb_map (hb_add (this))
2599     | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2600     ;
2601   }
2602 
would_applyOT::ContextFormat2_52603   bool would_apply (hb_would_apply_context_t *c) const
2604   {
2605     const ClassDef &class_def = this+classDef;
2606     unsigned int index = class_def.get_class (c->glyphs[0]);
2607     const RuleSet &rule_set = this+ruleSet[index];
2608     struct ContextApplyLookupContext lookup_context = {
2609       {match_class},
2610       &class_def
2611     };
2612     return rule_set.would_apply (c, lookup_context);
2613   }
2614 
get_coverageOT::ContextFormat2_52615   const Coverage &get_coverage () const { return this+coverage; }
2616 
cache_costOT::ContextFormat2_52617   unsigned cache_cost () const
2618   {
2619     unsigned c = (this+classDef).cost () * ruleSet.len;
2620     return c >= 4 ? c : 0;
2621   }
cache_funcOT::ContextFormat2_52622   bool cache_func (hb_ot_apply_context_t *c, bool enter) const
2623   {
2624     if (enter)
2625     {
2626       if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
2627 	return false;
2628       auto &info = c->buffer->info;
2629       unsigned count = c->buffer->len;
2630       for (unsigned i = 0; i < count; i++)
2631 	info[i].syllable() = 255;
2632       c->new_syllables = 255;
2633       return true;
2634     }
2635     else
2636     {
2637       c->new_syllables = (unsigned) -1;
2638       HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
2639       return true;
2640     }
2641   }
2642 
apply_cachedOT::ContextFormat2_52643   bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); }
applyOT::ContextFormat2_52644   bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); }
_applyOT::ContextFormat2_52645   bool _apply (hb_ot_apply_context_t *c, bool cached) const
2646   {
2647     TRACE_APPLY (this);
2648     unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2649     if (likely (index == NOT_COVERED)) return_trace (false);
2650 
2651     const ClassDef &class_def = this+classDef;
2652 
2653     struct ContextApplyLookupContext lookup_context = {
2654       {cached ? match_class_cached : match_class},
2655       &class_def
2656     };
2657 
2658     if (cached && c->buffer->cur().syllable() < 255)
2659       index = c->buffer->cur().syllable ();
2660     else
2661       index = class_def.get_class (c->buffer->cur().codepoint);
2662     const RuleSet &rule_set = this+ruleSet[index];
2663     return_trace (rule_set.apply (c, lookup_context));
2664   }
2665 
subsetOT::ContextFormat2_52666   bool subset (hb_subset_context_t *c) const
2667   {
2668     TRACE_SUBSET (this);
2669     auto *out = c->serializer->start_embed (*this);
2670     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2671     out->format = format;
2672     if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2673       return_trace (false);
2674 
2675     hb_map_t klass_map;
2676     out->classDef.serialize_subset (c, classDef, this, &klass_map);
2677 
2678     const hb_set_t* glyphset = c->plan->glyphset_gsub ();
2679     hb_set_t retained_coverage_glyphs;
2680     (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
2681 
2682     hb_set_t coverage_glyph_classes;
2683     (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2684 
2685     const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
2686     bool ret = true;
2687     int non_zero_index = -1, index = 0;
2688     auto snapshot = c->serializer->snapshot();
2689     for (const auto& _ : + hb_enumerate (ruleSet)
2690 			 | hb_filter (klass_map, hb_first))
2691     {
2692       auto *o = out->ruleSet.serialize_append (c->serializer);
2693       if (unlikely (!o))
2694       {
2695 	ret = false;
2696 	break;
2697       }
2698 
2699       if (coverage_glyph_classes.has (_.first) &&
2700 	  o->serialize_subset (c, _.second, this, lookup_map, &klass_map)) {
2701 	non_zero_index = index;
2702         snapshot = c->serializer->snapshot();
2703       }
2704 
2705       index++;
2706     }
2707 
2708     if (!ret || non_zero_index == -1) return_trace (false);
2709 
2710     //prune empty trailing ruleSets
2711     --index;
2712     while (index > non_zero_index)
2713     {
2714       out->ruleSet.pop ();
2715       index--;
2716     }
2717     c->serializer->revert (snapshot);
2718 
2719     return_trace (bool (out->ruleSet));
2720   }
2721 
sanitizeOT::ContextFormat2_52722   bool sanitize (hb_sanitize_context_t *c) const
2723   {
2724     TRACE_SANITIZE (this);
2725     return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2726   }
2727 
2728   protected:
2729   HBUINT16	format;			/* Format identifier--format = 2 */
2730   typename Types::template OffsetTo<Coverage>
2731 		coverage;		/* Offset to Coverage table--from
2732 					 * beginning of table */
2733   typename Types::template OffsetTo<ClassDef>
2734 		classDef;		/* Offset to glyph ClassDef table--from
2735 					 * beginning of table */
2736   Array16Of<typename Types::template OffsetTo<RuleSet>>
2737 		ruleSet;		/* Array of RuleSet tables
2738 					 * ordered by class */
2739   public:
2740   DEFINE_SIZE_ARRAY (4 + 2 * Types::size, ruleSet);
2741 };
2742 
2743 
2744 struct ContextFormat3
2745 {
2746   using RuleSet = OT::RuleSet<SmallTypes>;
2747 
intersectsOT::ContextFormat32748   bool intersects (const hb_set_t *glyphs) const
2749   {
2750     if (!(this+coverageZ[0]).intersects (glyphs))
2751       return false;
2752 
2753     struct ContextClosureLookupContext lookup_context = {
2754       {intersects_coverage, nullptr},
2755       ContextFormat::CoverageBasedContext,
2756       this
2757     };
2758     return context_intersects (glyphs,
2759 			       glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2760 			       lookup_context);
2761   }
2762 
may_have_non_1to1OT::ContextFormat32763   bool may_have_non_1to1 () const
2764   { return true; }
2765 
closureOT::ContextFormat32766   void closure (hb_closure_context_t *c) const
2767   {
2768     if (!(this+coverageZ[0]).intersects (c->glyphs))
2769       return;
2770 
2771     hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
2772     if (unlikely (!cur_active_glyphs)) return;
2773     get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
2774 				   *cur_active_glyphs);
2775 
2776     const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2777     struct ContextClosureLookupContext lookup_context = {
2778       {intersects_coverage, intersected_coverage_glyphs},
2779       ContextFormat::CoverageBasedContext,
2780       this
2781     };
2782     context_closure_lookup (c,
2783 			    glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2784 			    lookupCount, lookupRecord,
2785 			    0, lookup_context);
2786 
2787     c->pop_cur_done_glyphs ();
2788   }
2789 
closure_lookupsOT::ContextFormat32790   void closure_lookups (hb_closure_lookups_context_t *c) const
2791   {
2792     if (!intersects (c->glyphs))
2793       return;
2794     const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2795     recurse_lookups (c, lookupCount, lookupRecord);
2796   }
2797 
collect_variation_indicesOT::ContextFormat32798   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2799 
collect_glyphsOT::ContextFormat32800   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2801   {
2802     (this+coverageZ[0]).collect_coverage (c->input);
2803 
2804     const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2805     struct ContextCollectGlyphsLookupContext lookup_context = {
2806       {collect_coverage},
2807       this
2808     };
2809 
2810     context_collect_glyphs_lookup (c,
2811 				   glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2812 				   lookupCount, lookupRecord,
2813 				   lookup_context);
2814   }
2815 
would_applyOT::ContextFormat32816   bool would_apply (hb_would_apply_context_t *c) const
2817   {
2818     const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2819     struct ContextApplyLookupContext lookup_context = {
2820       {match_coverage},
2821       this
2822     };
2823     return context_would_apply_lookup (c,
2824 				       glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2825 				       lookupCount, lookupRecord,
2826 				       lookup_context);
2827   }
2828 
get_coverageOT::ContextFormat32829   const Coverage &get_coverage () const { return this+coverageZ[0]; }
2830 
applyOT::ContextFormat32831   bool apply (hb_ot_apply_context_t *c) const
2832   {
2833     TRACE_APPLY (this);
2834     unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2835     if (likely (index == NOT_COVERED)) return_trace (false);
2836 
2837     const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2838     struct ContextApplyLookupContext lookup_context = {
2839       {match_coverage},
2840       this
2841     };
2842     return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2843   }
2844 
subsetOT::ContextFormat32845   bool subset (hb_subset_context_t *c) const
2846   {
2847     TRACE_SUBSET (this);
2848     auto *out = c->serializer->start_embed (this);
2849     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2850 
2851     out->format = format;
2852     out->glyphCount = glyphCount;
2853 
2854     auto coverages = coverageZ.as_array (glyphCount);
2855 
2856     for (const Offset16To<Coverage>& offset : coverages)
2857     {
2858       /* TODO(subset) This looks like should not be necessary to write this way. */
2859       auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2860       if (unlikely (!o)) return_trace (false);
2861       if (!o->serialize_subset (c, offset, this)) return_trace (false);
2862     }
2863 
2864     const auto& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
2865     const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
2866 
2867 
2868     unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
2869     return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2870   }
2871 
sanitizeOT::ContextFormat32872   bool sanitize (hb_sanitize_context_t *c) const
2873   {
2874     TRACE_SANITIZE (this);
2875     if (unlikely (!c->check_struct (this))) return_trace (false);
2876     hb_barrier ();
2877     unsigned int count = glyphCount;
2878     if (unlikely (!count)) return_trace (false); /* We want to access coverageZ[0] freely. */
2879     if (unlikely (!c->check_array (coverageZ.arrayZ, count))) return_trace (false);
2880     for (unsigned int i = 0; i < count; i++)
2881       if (unlikely (!coverageZ[i].sanitize (c, this))) return_trace (false);
2882     const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2883     return_trace (likely (c->check_array (lookupRecord, lookupCount)));
2884   }
2885 
2886   protected:
2887   HBUINT16	format;			/* Format identifier--format = 3 */
2888   HBUINT16	glyphCount;		/* Number of glyphs in the input glyph
2889 					 * sequence */
2890   HBUINT16	lookupCount;		/* Number of LookupRecords */
2891   UnsizedArrayOf<Offset16To<Coverage>>
2892 		coverageZ;		/* Array of offsets to Coverage
2893 					 * table in glyph sequence order */
2894 /*UnsizedArrayOf<LookupRecord>
2895 		lookupRecordX;*/	/* Array of LookupRecords--in
2896 					 * design order */
2897   public:
2898   DEFINE_SIZE_ARRAY (6, coverageZ);
2899 };
2900 
2901 struct Context
2902 {
2903   template <typename context_t, typename ...Ts>
dispatchOT::Context2904   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2905   {
2906     if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
2907     TRACE_DISPATCH (this, u.format);
2908     switch (u.format) {
2909     case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2910     case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
2911     case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2912 #ifndef HB_NO_BEYOND_64K
2913     case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
2914     case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
2915 #endif
2916     default:return_trace (c->default_return_value ());
2917     }
2918   }
2919 
2920   protected:
2921   union {
2922   HBUINT16			format;		/* Format identifier */
2923   ContextFormat1_4<SmallTypes>	format1;
2924   ContextFormat2_5<SmallTypes>	format2;
2925   ContextFormat3		format3;
2926 #ifndef HB_NO_BEYOND_64K
2927   ContextFormat1_4<MediumTypes>	format4;
2928   ContextFormat2_5<MediumTypes>	format5;
2929 #endif
2930   } u;
2931 };
2932 
2933 
2934 /* Chaining Contextual lookups */
2935 
2936 struct ChainContextClosureLookupContext
2937 {
2938   ContextClosureFuncs funcs;
2939   ContextFormat context_format;
2940   const void *intersects_data[3];
2941   void *intersects_cache[3];
2942   void *intersected_glyphs_cache;
2943 };
2944 
2945 struct ChainContextCollectGlyphsLookupContext
2946 {
2947   ContextCollectGlyphsFuncs funcs;
2948   const void *collect_data[3];
2949 };
2950 
2951 struct ChainContextApplyLookupContext
2952 {
2953   ChainContextApplyFuncs funcs;
2954   const void *match_data[3];
2955 };
2956 
2957 template <typename HBUINT>
chain_context_intersects(const hb_set_t * glyphs,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],ChainContextClosureLookupContext & lookup_context)2958 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2959 					     unsigned int backtrackCount,
2960 					     const HBUINT backtrack[],
2961 					     unsigned int inputCount, /* Including the first glyph (not matched) */
2962 					     const HBUINT input[], /* Array of input values--start with second glyph */
2963 					     unsigned int lookaheadCount,
2964 					     const HBUINT lookahead[],
2965 					     ChainContextClosureLookupContext &lookup_context)
2966 {
2967   return array_is_subset_of (glyphs,
2968 			     backtrackCount, backtrack,
2969 			     lookup_context.funcs.intersects,
2970 			     lookup_context.intersects_data[0],
2971 			     lookup_context.intersects_cache[0])
2972       && array_is_subset_of (glyphs,
2973 			     inputCount ? inputCount - 1 : 0, input,
2974 			     lookup_context.funcs.intersects,
2975 			     lookup_context.intersects_data[1],
2976 			     lookup_context.intersects_cache[1])
2977       && array_is_subset_of (glyphs,
2978 			     lookaheadCount, lookahead,
2979 			     lookup_context.funcs.intersects,
2980 			     lookup_context.intersects_data[2],
2981 			     lookup_context.intersects_cache[2]);
2982 }
2983 
2984 template <typename HBUINT>
chain_context_closure_lookup(hb_closure_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ChainContextClosureLookupContext & lookup_context)2985 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2986 						 unsigned int backtrackCount,
2987 						 const HBUINT backtrack[],
2988 						 unsigned int inputCount, /* Including the first glyph (not matched) */
2989 						 const HBUINT input[], /* Array of input values--start with second glyph */
2990 						 unsigned int lookaheadCount,
2991 						 const HBUINT lookahead[],
2992 						 unsigned int lookupCount,
2993 						 const LookupRecord lookupRecord[],
2994 						 unsigned value,
2995 						 ChainContextClosureLookupContext &lookup_context)
2996 {
2997   if (chain_context_intersects (c->glyphs,
2998 				backtrackCount, backtrack,
2999 				inputCount, input,
3000 				lookaheadCount, lookahead,
3001 				lookup_context))
3002     context_closure_recurse_lookups (c,
3003 		     inputCount, input,
3004 		     lookupCount, lookupRecord,
3005 		     value,
3006 		     lookup_context.context_format,
3007 		     lookup_context.intersects_data[1],
3008 		     lookup_context.funcs.intersected_glyphs,
3009 		     lookup_context.intersected_glyphs_cache);
3010 }
3011 
3012 template <typename HBUINT>
chain_context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextCollectGlyphsLookupContext & lookup_context)3013 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
3014 							unsigned int backtrackCount,
3015 							const HBUINT backtrack[],
3016 							unsigned int inputCount, /* Including the first glyph (not matched) */
3017 							const HBUINT input[], /* Array of input values--start with second glyph */
3018 							unsigned int lookaheadCount,
3019 							const HBUINT lookahead[],
3020 							unsigned int lookupCount,
3021 							const LookupRecord lookupRecord[],
3022 							ChainContextCollectGlyphsLookupContext &lookup_context)
3023 {
3024   collect_array (c, c->before,
3025 		 backtrackCount, backtrack,
3026 		 lookup_context.funcs.collect, lookup_context.collect_data[0]);
3027   collect_array (c, c->input,
3028 		 inputCount ? inputCount - 1 : 0, input,
3029 		 lookup_context.funcs.collect, lookup_context.collect_data[1]);
3030   collect_array (c, c->after,
3031 		 lookaheadCount, lookahead,
3032 		 lookup_context.funcs.collect, lookup_context.collect_data[2]);
3033   recurse_lookups (c,
3034 		   lookupCount, lookupRecord);
3035 }
3036 
3037 template <typename HBUINT>
chain_context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[]HB_UNUSED,unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[]HB_UNUSED,unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,const ChainContextApplyLookupContext & lookup_context)3038 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
3039 						     unsigned int backtrackCount,
3040 						     const HBUINT backtrack[] HB_UNUSED,
3041 						     unsigned int inputCount, /* Including the first glyph (not matched) */
3042 						     const HBUINT input[], /* Array of input values--start with second glyph */
3043 						     unsigned int lookaheadCount,
3044 						     const HBUINT lookahead[] HB_UNUSED,
3045 						     unsigned int lookupCount HB_UNUSED,
3046 						     const LookupRecord lookupRecord[] HB_UNUSED,
3047 						     const ChainContextApplyLookupContext &lookup_context)
3048 {
3049   return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
3050       && would_match_input (c,
3051 			    inputCount, input,
3052 			    lookup_context.funcs.match[1], lookup_context.match_data[1]);
3053 }
3054 
3055 template <typename HBUINT>
3056 HB_ALWAYS_INLINE
chain_context_apply_lookup(hb_ot_apply_context_t * c,unsigned int backtrackCount,const HBUINT backtrack[],unsigned int inputCount,const HBUINT input[],unsigned int lookaheadCount,const HBUINT lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],const ChainContextApplyLookupContext & lookup_context)3057 static bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
3058 					unsigned int backtrackCount,
3059 					const HBUINT backtrack[],
3060 					unsigned int inputCount, /* Including the first glyph (not matched) */
3061 					const HBUINT input[], /* Array of input values--start with second glyph */
3062 					unsigned int lookaheadCount,
3063 					const HBUINT lookahead[],
3064 					unsigned int lookupCount,
3065 					const LookupRecord lookupRecord[],
3066 					const ChainContextApplyLookupContext &lookup_context)
3067 {
3068   if (unlikely (inputCount > HB_MAX_CONTEXT_LENGTH)) return false;
3069   unsigned match_positions_stack[4];
3070   unsigned *match_positions = match_positions_stack;
3071   if (unlikely (inputCount > ARRAY_LENGTH (match_positions_stack)))
3072   {
3073     match_positions = (unsigned *) hb_malloc (hb_max (inputCount, 1u) * sizeof (match_positions[0]));
3074     if (unlikely (!match_positions))
3075       return false;
3076   }
3077 
3078   unsigned start_index = c->buffer->out_len;
3079   unsigned end_index = c->buffer->idx;
3080   unsigned match_end = 0;
3081   bool ret = true;
3082   if (!(match_input (c,
3083 		     inputCount, input,
3084 		     lookup_context.funcs.match[1], lookup_context.match_data[1],
3085 		     &match_end, match_positions) && (end_index = match_end)
3086        && match_lookahead (c,
3087 			   lookaheadCount, lookahead,
3088 			   lookup_context.funcs.match[2], lookup_context.match_data[2],
3089 			   match_end, &end_index)))
3090   {
3091     c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
3092     ret = false;
3093     goto done;
3094   }
3095 
3096   if (!match_backtrack (c,
3097 			backtrackCount, backtrack,
3098 			lookup_context.funcs.match[0], lookup_context.match_data[0],
3099 			&start_index))
3100   {
3101     c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
3102     ret = false;
3103     goto done;
3104   }
3105 
3106   c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
3107   apply_lookup (c,
3108 		inputCount, match_positions,
3109 		lookupCount, lookupRecord,
3110 		match_end);
3111   done:
3112 
3113   if (unlikely (match_positions != match_positions_stack))
3114     hb_free (match_positions);
3115 
3116   return ret;
3117 }
3118 
3119 template <typename Types>
3120 struct ChainRule
3121 {
3122   template <typename T>
3123   friend struct ChainRuleSet;
3124 
intersectsOT::ChainRule3125   bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
3126   {
3127     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3128     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3129     return chain_context_intersects (glyphs,
3130 				     backtrack.len, backtrack.arrayZ,
3131 				     input.lenP1, input.arrayZ,
3132 				     lookahead.len, lookahead.arrayZ,
3133 				     lookup_context);
3134   }
3135 
closureOT::ChainRule3136   void closure (hb_closure_context_t *c, unsigned value,
3137 		ChainContextClosureLookupContext &lookup_context) const
3138   {
3139     if (unlikely (c->lookup_limit_exceeded ())) return;
3140 
3141     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3142     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3143     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3144     chain_context_closure_lookup (c,
3145 				  backtrack.len, backtrack.arrayZ,
3146 				  input.lenP1, input.arrayZ,
3147 				  lookahead.len, lookahead.arrayZ,
3148 				  lookup.len, lookup.arrayZ,
3149 				  value,
3150 				  lookup_context);
3151   }
3152 
closure_lookupsOT::ChainRule3153   void closure_lookups (hb_closure_lookups_context_t *c,
3154                         ChainContextClosureLookupContext &lookup_context) const
3155   {
3156     if (unlikely (c->lookup_limit_exceeded ())) return;
3157     if (!intersects (c->glyphs, lookup_context)) return;
3158 
3159     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3160     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3161     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3162     recurse_lookups (c, lookup.len, lookup.arrayZ);
3163   }
3164 
collect_glyphsOT::ChainRule3165   void collect_glyphs (hb_collect_glyphs_context_t *c,
3166 		       ChainContextCollectGlyphsLookupContext &lookup_context) const
3167   {
3168     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3169     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3170     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3171     chain_context_collect_glyphs_lookup (c,
3172 					 backtrack.len, backtrack.arrayZ,
3173 					 input.lenP1, input.arrayZ,
3174 					 lookahead.len, lookahead.arrayZ,
3175 					 lookup.len, lookup.arrayZ,
3176 					 lookup_context);
3177   }
3178 
would_applyOT::ChainRule3179   bool would_apply (hb_would_apply_context_t *c,
3180 		    const ChainContextApplyLookupContext &lookup_context) const
3181   {
3182     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3183     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3184     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3185     return chain_context_would_apply_lookup (c,
3186 					     backtrack.len, backtrack.arrayZ,
3187 					     input.lenP1, input.arrayZ,
3188 					     lookahead.len, lookahead.arrayZ, lookup.len,
3189 					     lookup.arrayZ, lookup_context);
3190   }
3191 
applyOT::ChainRule3192   bool apply (hb_ot_apply_context_t *c,
3193 	      const ChainContextApplyLookupContext &lookup_context) const
3194   {
3195     TRACE_APPLY (this);
3196     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3197     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3198     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3199     return_trace (chain_context_apply_lookup (c,
3200 					      backtrack.len, backtrack.arrayZ,
3201 					      input.lenP1, input.arrayZ,
3202 					      lookahead.len, lookahead.arrayZ, lookup.len,
3203 					      lookup.arrayZ, lookup_context));
3204   }
3205 
3206   template<typename Iterator,
3207 	   hb_requires (hb_is_iterator (Iterator))>
serialize_arrayOT::ChainRule3208   void serialize_array (hb_serialize_context_t *c,
3209 			HBUINT16 len,
3210 			Iterator it) const
3211   {
3212     c->copy (len);
3213     for (const auto g : it)
3214       c->copy ((HBUINT16) g);
3215   }
3216 
serializeOT::ChainRule3217   bool serialize (hb_serialize_context_t *c,
3218 		  const hb_map_t *lookup_map,
3219 		  const hb_map_t *backtrack_map,
3220 		  const hb_map_t *input_map = nullptr,
3221 		  const hb_map_t *lookahead_map = nullptr) const
3222   {
3223     TRACE_SERIALIZE (this);
3224 
3225     const hb_map_t *mapping = backtrack_map;
3226     serialize_array (c, backtrack.len, + backtrack.iter ()
3227 				       | hb_map (mapping));
3228 
3229     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3230     if (input_map) mapping = input_map;
3231     serialize_array (c, input.lenP1, + input.iter ()
3232 				     | hb_map (mapping));
3233 
3234     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3235     if (lookahead_map) mapping = lookahead_map;
3236     serialize_array (c, lookahead.len, + lookahead.iter ()
3237 				       | hb_map (mapping));
3238 
3239     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3240 
3241     HBUINT16* lookupCount = c->embed (&(lookup.len));
3242     if (!lookupCount) return_trace (false);
3243 
3244     unsigned count = serialize_lookuprecord_array (c, lookup.as_array (), lookup_map);
3245     return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
3246   }
3247 
subsetOT::ChainRule3248   bool subset (hb_subset_context_t *c,
3249 	       const hb_map_t *lookup_map,
3250 	       const hb_map_t *backtrack_map = nullptr,
3251 	       const hb_map_t *input_map = nullptr,
3252 	       const hb_map_t *lookahead_map = nullptr) const
3253   {
3254     TRACE_SUBSET (this);
3255 
3256     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3257     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3258 
3259     if (!backtrack_map)
3260     {
3261       const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
3262       if (!hb_all (backtrack, glyphset) ||
3263 	  !hb_all (input, glyphset) ||
3264 	  !hb_all (lookahead, glyphset))
3265 	return_trace (false);
3266 
3267       serialize (c->serializer, lookup_map, c->plan->glyph_map);
3268     }
3269     else
3270     {
3271       if (!hb_all (backtrack, backtrack_map) ||
3272 	  !hb_all (input, input_map) ||
3273 	  !hb_all (lookahead, lookahead_map))
3274 	return_trace (false);
3275 
3276       serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
3277     }
3278 
3279     return_trace (true);
3280   }
3281 
sanitizeOT::ChainRule3282   bool sanitize (hb_sanitize_context_t *c) const
3283   {
3284     TRACE_SANITIZE (this);
3285     /* Hyper-optimized sanitized because this is really hot. */
3286     if (unlikely (!backtrack.len.sanitize (c))) return_trace (false);
3287     hb_barrier ();
3288     const auto &input = StructAfter<decltype (inputX)> (backtrack);
3289     if (unlikely (!input.lenP1.sanitize (c))) return_trace (false);
3290     hb_barrier ();
3291     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
3292     if (unlikely (!lookahead.len.sanitize (c))) return_trace (false);
3293     hb_barrier ();
3294     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
3295     return_trace (likely (lookup.sanitize (c)));
3296   }
3297 
3298   protected:
3299   Array16Of<typename Types::HBUINT>
3300 		backtrack;		/* Array of backtracking values
3301 					 * (to be matched before the input
3302 					 * sequence) */
3303   HeadlessArray16Of<typename Types::HBUINT>
3304 		inputX;			/* Array of input values (start with
3305 					 * second glyph) */
3306   Array16Of<typename Types::HBUINT>
3307 		lookaheadX;		/* Array of lookahead values's (to be
3308 					 * matched after the input sequence) */
3309   Array16Of<LookupRecord>
3310 		lookupX;		/* Array of LookupRecords--in
3311 					 * design order) */
3312   public:
3313   DEFINE_SIZE_MIN (8);
3314 };
3315 
3316 template <typename Types>
3317 struct ChainRuleSet
3318 {
3319   using ChainRule = OT::ChainRule<Types>;
3320 
intersectsOT::ChainRuleSet3321   bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
3322   {
3323     return
3324     + hb_iter (rule)
3325     | hb_map (hb_add (this))
3326     | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
3327     | hb_any
3328     ;
3329   }
closureOT::ChainRuleSet3330   void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
3331   {
3332     if (unlikely (c->lookup_limit_exceeded ())) return;
3333 
3334     return
3335     + hb_iter (rule)
3336     | hb_map (hb_add (this))
3337     | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
3338     ;
3339   }
3340 
closure_lookupsOT::ChainRuleSet3341   void closure_lookups (hb_closure_lookups_context_t *c,
3342                         ChainContextClosureLookupContext &lookup_context) const
3343   {
3344     if (unlikely (c->lookup_limit_exceeded ())) return;
3345 
3346     + hb_iter (rule)
3347     | hb_map (hb_add (this))
3348     | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
3349     ;
3350   }
3351 
collect_glyphsOT::ChainRuleSet3352   void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
3353   {
3354     return
3355     + hb_iter (rule)
3356     | hb_map (hb_add (this))
3357     | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
3358     ;
3359   }
3360 
would_applyOT::ChainRuleSet3361   bool would_apply (hb_would_apply_context_t *c,
3362 		    const ChainContextApplyLookupContext &lookup_context) const
3363   {
3364     return
3365     + hb_iter (rule)
3366     | hb_map (hb_add (this))
3367     | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
3368     | hb_any
3369     ;
3370   }
3371 
applyOT::ChainRuleSet3372   bool apply (hb_ot_apply_context_t *c,
3373 	      const ChainContextApplyLookupContext &lookup_context) const
3374   {
3375     TRACE_APPLY (this);
3376 
3377     unsigned num_rules = rule.len;
3378 
3379 #ifndef HB_NO_OT_RULESETS_FAST_PATH
3380     if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
3381 #endif
3382     {
3383     slow:
3384       return_trace (
3385       + hb_iter (rule)
3386       | hb_map (hb_add (this))
3387       | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
3388       | hb_any
3389       )
3390       ;
3391     }
3392 
3393     /* This version is optimized for speed by matching the first & second
3394      * components of the rule here, instead of calling into the matching code.
3395      *
3396      * Replicated from LigatureSet::apply(). */
3397 
3398     /* If the input skippy has non-auto joiners behavior (as in Indic shapers),
3399      * skip this fast path, as we don't distinguish between input & lookahead
3400      * matching in the fast path.
3401      *
3402      * https://github.com/harfbuzz/harfbuzz/issues/4813
3403      */
3404     if (!c->auto_zwnj || !c->auto_zwj)
3405       goto slow;
3406 
3407     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
3408     skippy_iter.reset (c->buffer->idx);
3409     skippy_iter.set_match_func (match_always, nullptr);
3410     skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
3411     unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
3412     hb_glyph_info_t *first = nullptr, *second = nullptr;
3413     bool matched = skippy_iter.next ();
3414     if (likely (matched))
3415     {
3416       first = &c->buffer->info[skippy_iter.idx];
3417       unsafe_to1 = skippy_iter.idx + 1;
3418 
3419       if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
3420       {
3421 	/* Can't use the fast path if eg. the next char is a default-ignorable
3422 	 * or other skippable. */
3423         goto slow;
3424       }
3425     }
3426     else
3427     {
3428       /* Failed to match a next glyph. Only try applying rules that have
3429        * no further input and lookahead. */
3430       return_trace (
3431       + hb_iter (rule)
3432       | hb_map (hb_add (this))
3433       | hb_filter ([&] (const ChainRule &_)
3434 		   {
3435 		     const auto &input = StructAfter<decltype (_.inputX)> (_.backtrack);
3436 		     const auto &lookahead = StructAfter<decltype (_.lookaheadX)> (input);
3437 		     return input.lenP1 <= 1 && lookahead.len == 0;
3438 		   })
3439       | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
3440       | hb_any
3441       )
3442       ;
3443     }
3444     matched = skippy_iter.next ();
3445     if (likely (matched && !skippy_iter.may_skip (c->buffer->info[skippy_iter.idx])))
3446     {
3447       second = &c->buffer->info[skippy_iter.idx];
3448       unsafe_to2 = skippy_iter.idx + 1;
3449     }
3450 
3451     auto match_input = lookup_context.funcs.match[1];
3452     auto match_lookahead = lookup_context.funcs.match[2];
3453     auto *input_data = lookup_context.match_data[1];
3454     auto *lookahead_data = lookup_context.match_data[2];
3455     for (unsigned int i = 0; i < num_rules; i++)
3456     {
3457       const auto &r = this+rule.arrayZ[i];
3458 
3459       const auto &input = StructAfter<decltype (r.inputX)> (r.backtrack);
3460       const auto &lookahead = StructAfter<decltype (r.lookaheadX)> (input);
3461 
3462       unsigned lenP1 = hb_max ((unsigned) input.lenP1, 1u);
3463       if (lenP1 > 1 ?
3464 	   (!match_input ||
3465 	    match_input (*first, input.arrayZ[0], input_data))
3466 	  :
3467 	   (!lookahead.len || !match_lookahead ||
3468 	    match_lookahead (*first, lookahead.arrayZ[0], lookahead_data)))
3469       {
3470         if (!second ||
3471 	    (lenP1 > 2 ?
3472 	     (!match_input ||
3473 	      match_input (*second, input.arrayZ[1], input_data))
3474 	     :
3475 	     (lookahead.len <= 2 - lenP1 || !match_lookahead ||
3476 	      match_lookahead (*second, lookahead.arrayZ[2 - lenP1], lookahead_data))))
3477 	{
3478 	  if (r.apply (c, lookup_context))
3479 	  {
3480 	    if (unsafe_to != (unsigned) -1)
3481 	      c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
3482 	    return_trace (true);
3483 	  }
3484 	}
3485 	else
3486 	  unsafe_to = unsafe_to2;
3487       }
3488       else
3489       {
3490 	if (unsafe_to == (unsigned) -1)
3491 	  unsafe_to = unsafe_to1;
3492       }
3493     }
3494     if (likely (unsafe_to != (unsigned) -1))
3495       c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
3496 
3497     return_trace (false);
3498   }
3499 
subsetOT::ChainRuleSet3500   bool subset (hb_subset_context_t *c,
3501 	       const hb_map_t *lookup_map,
3502 	       const hb_map_t *backtrack_klass_map = nullptr,
3503 	       const hb_map_t *input_klass_map = nullptr,
3504 	       const hb_map_t *lookahead_klass_map = nullptr) const
3505   {
3506     TRACE_SUBSET (this);
3507 
3508     auto snap = c->serializer->snapshot ();
3509     auto *out = c->serializer->start_embed (*this);
3510     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3511 
3512     for (const Offset16To<ChainRule>& _ : rule)
3513     {
3514       if (!_) continue;
3515       auto o_snap = c->serializer->snapshot ();
3516       auto *o = out->rule.serialize_append (c->serializer);
3517       if (unlikely (!o)) continue;
3518 
3519       if (!o->serialize_subset (c, _, this,
3520 				lookup_map,
3521 				backtrack_klass_map,
3522 				input_klass_map,
3523 				lookahead_klass_map))
3524       {
3525 	out->rule.pop ();
3526 	c->serializer->revert (o_snap);
3527       }
3528     }
3529 
3530     bool ret = bool (out->rule);
3531     if (!ret) c->serializer->revert (snap);
3532 
3533     return_trace (ret);
3534   }
3535 
sanitizeOT::ChainRuleSet3536   bool sanitize (hb_sanitize_context_t *c) const
3537   {
3538     TRACE_SANITIZE (this);
3539     return_trace (rule.sanitize (c, this));
3540   }
3541 
3542   protected:
3543   Array16OfOffset16To<ChainRule>
3544 		rule;			/* Array of ChainRule tables
3545 					 * ordered by preference */
3546   public:
3547   DEFINE_SIZE_ARRAY (2, rule);
3548 };
3549 
3550 template <typename Types>
3551 struct ChainContextFormat1_4
3552 {
3553   using ChainRuleSet = OT::ChainRuleSet<Types>;
3554 
intersectsOT::ChainContextFormat1_43555   bool intersects (const hb_set_t *glyphs) const
3556   {
3557     struct ChainContextClosureLookupContext lookup_context = {
3558       {intersects_glyph, intersected_glyph},
3559       ContextFormat::SimpleContext,
3560       {nullptr, nullptr, nullptr}
3561     };
3562 
3563     return
3564     + hb_zip (this+coverage, ruleSet)
3565     | hb_filter (*glyphs, hb_first)
3566     | hb_map (hb_second)
3567     | hb_map (hb_add (this))
3568     | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
3569     | hb_any
3570     ;
3571   }
3572 
may_have_non_1to1OT::ChainContextFormat1_43573   bool may_have_non_1to1 () const
3574   { return true; }
3575 
closureOT::ChainContextFormat1_43576   void closure (hb_closure_context_t *c) const
3577   {
3578     hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
3579     if (unlikely (!cur_active_glyphs)) return;
3580     get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
3581 				   *cur_active_glyphs);
3582 
3583     struct ChainContextClosureLookupContext lookup_context = {
3584       {intersects_glyph, intersected_glyph},
3585       ContextFormat::SimpleContext,
3586       {nullptr, nullptr, nullptr}
3587     };
3588 
3589     + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
3590     | hb_filter ([&] (hb_codepoint_t _) {
3591       return c->previous_parent_active_glyphs ().has (_);
3592     }, hb_first)
3593     | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
3594     | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
3595     ;
3596 
3597     c->pop_cur_done_glyphs ();
3598   }
3599 
closure_lookupsOT::ChainContextFormat1_43600   void closure_lookups (hb_closure_lookups_context_t *c) const
3601   {
3602     struct ChainContextClosureLookupContext lookup_context = {
3603       {intersects_glyph, nullptr},
3604       ContextFormat::SimpleContext,
3605       {nullptr, nullptr, nullptr}
3606     };
3607 
3608     + hb_zip (this+coverage, ruleSet)
3609     | hb_filter (*c->glyphs, hb_first)
3610     | hb_map (hb_second)
3611     | hb_map (hb_add (this))
3612     | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
3613     ;
3614   }
3615 
collect_variation_indicesOT::ChainContextFormat1_43616   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3617 
collect_glyphsOT::ChainContextFormat1_43618   void collect_glyphs (hb_collect_glyphs_context_t *c) const
3619   {
3620     (this+coverage).collect_coverage (c->input);
3621 
3622     struct ChainContextCollectGlyphsLookupContext lookup_context = {
3623       {collect_glyph},
3624       {nullptr, nullptr, nullptr}
3625     };
3626 
3627     + hb_iter (ruleSet)
3628     | hb_map (hb_add (this))
3629     | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3630     ;
3631   }
3632 
would_applyOT::ChainContextFormat1_43633   bool would_apply (hb_would_apply_context_t *c) const
3634   {
3635     const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
3636     struct ChainContextApplyLookupContext lookup_context = {
3637       {{match_glyph, match_glyph, match_glyph}},
3638       {nullptr, nullptr, nullptr}
3639     };
3640     return rule_set.would_apply (c, lookup_context);
3641   }
3642 
get_coverageOT::ChainContextFormat1_43643   const Coverage &get_coverage () const { return this+coverage; }
3644 
applyOT::ChainContextFormat1_43645   bool apply (hb_ot_apply_context_t *c) const
3646   {
3647     TRACE_APPLY (this);
3648     unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3649     if (likely (index == NOT_COVERED)) return_trace (false);
3650 
3651     const ChainRuleSet &rule_set = this+ruleSet[index];
3652     struct ChainContextApplyLookupContext lookup_context = {
3653       {{match_glyph, match_glyph, match_glyph}},
3654       {nullptr, nullptr, nullptr}
3655     };
3656     return_trace (rule_set.apply (c, lookup_context));
3657   }
3658 
subsetOT::ChainContextFormat1_43659   bool subset (hb_subset_context_t *c) const
3660   {
3661     TRACE_SUBSET (this);
3662     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
3663     const hb_map_t &glyph_map = *c->plan->glyph_map;
3664 
3665     auto *out = c->serializer->start_embed (*this);
3666     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3667     out->format = format;
3668 
3669     const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
3670     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
3671     + hb_zip (this+coverage, ruleSet)
3672     | hb_filter (glyphset, hb_first)
3673     | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
3674     | hb_map (hb_first)
3675     | hb_map (glyph_map)
3676     | hb_sink (new_coverage)
3677     ;
3678 
3679     out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
3680     return_trace (bool (new_coverage));
3681   }
3682 
sanitizeOT::ChainContextFormat1_43683   bool sanitize (hb_sanitize_context_t *c) const
3684   {
3685     TRACE_SANITIZE (this);
3686     return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
3687   }
3688 
3689   protected:
3690   HBUINT16	format;			/* Format identifier--format = 1 */
3691   typename Types::template OffsetTo<Coverage>
3692 		coverage;		/* Offset to Coverage table--from
3693 					 * beginning of table */
3694   Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
3695 		ruleSet;		/* Array of ChainRuleSet tables
3696 					 * ordered by Coverage Index */
3697   public:
3698   DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
3699 };
3700 
3701 template <typename Types>
3702 struct ChainContextFormat2_5
3703 {
3704   using ChainRuleSet = OT::ChainRuleSet<SmallTypes>;
3705 
intersectsOT::ChainContextFormat2_53706   bool intersects (const hb_set_t *glyphs) const
3707   {
3708     if (!(this+coverage).intersects (glyphs))
3709       return false;
3710 
3711     const ClassDef &backtrack_class_def = this+backtrackClassDef;
3712     const ClassDef &input_class_def = this+inputClassDef;
3713     const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3714 
3715     hb_map_t caches[3] = {};
3716     struct ChainContextClosureLookupContext lookup_context = {
3717       {intersects_class, nullptr},
3718       ContextFormat::ClassBasedContext,
3719       {&backtrack_class_def,
3720        &input_class_def,
3721        &lookahead_class_def},
3722       {&caches[0], &caches[1], &caches[2]}
3723     };
3724 
3725     hb_set_t retained_coverage_glyphs;
3726     (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
3727 
3728     hb_set_t coverage_glyph_classes;
3729     input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3730 
3731     return
3732     + hb_iter (ruleSet)
3733     | hb_map (hb_add (this))
3734     | hb_enumerate
3735     | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
3736 	      { return input_class_def.intersects_class (glyphs, p.first) &&
3737 		       coverage_glyph_classes.has (p.first) &&
3738 		       p.second.intersects (glyphs, lookup_context); })
3739     | hb_any
3740     ;
3741   }
3742 
may_have_non_1to1OT::ChainContextFormat2_53743   bool may_have_non_1to1 () const
3744   { return true; }
3745 
closureOT::ChainContextFormat2_53746   void closure (hb_closure_context_t *c) const
3747   {
3748     if (!(this+coverage).intersects (c->glyphs))
3749       return;
3750 
3751     hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
3752     if (unlikely (!cur_active_glyphs)) return;
3753     get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
3754 				   *cur_active_glyphs);
3755 
3756     const ClassDef &backtrack_class_def = this+backtrackClassDef;
3757     const ClassDef &input_class_def = this+inputClassDef;
3758     const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3759 
3760     hb_map_t caches[3] = {};
3761     intersected_class_cache_t intersected_cache;
3762     struct ChainContextClosureLookupContext lookup_context = {
3763       {intersects_class, intersected_class_glyphs},
3764       ContextFormat::ClassBasedContext,
3765       {&backtrack_class_def,
3766        &input_class_def,
3767        &lookahead_class_def},
3768       {&caches[0], &caches[1], &caches[2]},
3769       &intersected_cache
3770     };
3771 
3772     + hb_enumerate (ruleSet)
3773     | hb_filter ([&] (unsigned _)
3774     { return input_class_def.intersects_class (&c->parent_active_glyphs (), _); },
3775 		 hb_first)
3776     | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<ChainRuleSet>&> _)
3777                 {
3778                   const ChainRuleSet& chainrule_set = this+_.second;
3779                   chainrule_set.closure (c, _.first, lookup_context);
3780                 })
3781     ;
3782 
3783     c->pop_cur_done_glyphs ();
3784   }
3785 
closure_lookupsOT::ChainContextFormat2_53786   void closure_lookups (hb_closure_lookups_context_t *c) const
3787   {
3788     if (!(this+coverage).intersects (c->glyphs))
3789       return;
3790 
3791     const ClassDef &backtrack_class_def = this+backtrackClassDef;
3792     const ClassDef &input_class_def = this+inputClassDef;
3793     const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3794 
3795     hb_map_t caches[3] = {};
3796     struct ChainContextClosureLookupContext lookup_context = {
3797       {intersects_class, nullptr},
3798       ContextFormat::ClassBasedContext,
3799       {&backtrack_class_def,
3800        &input_class_def,
3801        &lookahead_class_def},
3802       {&caches[0], &caches[1], &caches[2]}
3803     };
3804 
3805     + hb_iter (ruleSet)
3806     | hb_map (hb_add (this))
3807     | hb_enumerate
3808     | hb_filter([&] (unsigned klass)
3809     { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
3810     | hb_map (hb_second)
3811     | hb_apply ([&] (const ChainRuleSet &_)
3812     { _.closure_lookups (c, lookup_context); })
3813     ;
3814   }
3815 
collect_variation_indicesOT::ChainContextFormat2_53816   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3817 
collect_glyphsOT::ChainContextFormat2_53818   void collect_glyphs (hb_collect_glyphs_context_t *c) const
3819   {
3820     (this+coverage).collect_coverage (c->input);
3821 
3822     const ClassDef &backtrack_class_def = this+backtrackClassDef;
3823     const ClassDef &input_class_def = this+inputClassDef;
3824     const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3825 
3826     struct ChainContextCollectGlyphsLookupContext lookup_context = {
3827       {collect_class},
3828       {&backtrack_class_def,
3829        &input_class_def,
3830        &lookahead_class_def}
3831     };
3832 
3833     + hb_iter (ruleSet)
3834     | hb_map (hb_add (this))
3835     | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3836     ;
3837   }
3838 
would_applyOT::ChainContextFormat2_53839   bool would_apply (hb_would_apply_context_t *c) const
3840   {
3841     const ClassDef &backtrack_class_def = this+backtrackClassDef;
3842     const ClassDef &input_class_def = this+inputClassDef;
3843     const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3844 
3845     unsigned int index = input_class_def.get_class (c->glyphs[0]);
3846     const ChainRuleSet &rule_set = this+ruleSet[index];
3847     struct ChainContextApplyLookupContext lookup_context = {
3848       {{match_class, match_class, match_class}},
3849       {&backtrack_class_def,
3850        &input_class_def,
3851        &lookahead_class_def}
3852     };
3853     return rule_set.would_apply (c, lookup_context);
3854   }
3855 
get_coverageOT::ChainContextFormat2_53856   const Coverage &get_coverage () const { return this+coverage; }
3857 
cache_costOT::ChainContextFormat2_53858   unsigned cache_cost () const
3859   {
3860     unsigned c = (this+lookaheadClassDef).cost () * ruleSet.len;
3861     return c >= 4 ? c : 0;
3862   }
cache_funcOT::ChainContextFormat2_53863   bool cache_func (hb_ot_apply_context_t *c, bool enter) const
3864   {
3865     if (enter)
3866     {
3867       if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
3868 	return false;
3869       auto &info = c->buffer->info;
3870       unsigned count = c->buffer->len;
3871       for (unsigned i = 0; i < count; i++)
3872 	info[i].syllable() = 255;
3873       c->new_syllables = 255;
3874       return true;
3875     }
3876     else
3877     {
3878       c->new_syllables = (unsigned) -1;
3879       HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
3880       return true;
3881     }
3882   }
3883 
apply_cachedOT::ChainContextFormat2_53884   bool apply_cached (hb_ot_apply_context_t *c) const { return _apply (c, true); }
applyOT::ChainContextFormat2_53885   bool apply (hb_ot_apply_context_t *c) const { return _apply (c, false); }
_applyOT::ChainContextFormat2_53886   bool _apply (hb_ot_apply_context_t *c, bool cached) const
3887   {
3888     TRACE_APPLY (this);
3889     unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3890     if (likely (index == NOT_COVERED)) return_trace (false);
3891 
3892     const ClassDef &backtrack_class_def = this+backtrackClassDef;
3893     const ClassDef &input_class_def = this+inputClassDef;
3894     const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3895 
3896     /* match_class_caches1 is slightly faster. Use it for lookahead,
3897      * which is typically longer. */
3898     struct ChainContextApplyLookupContext lookup_context = {
3899       {{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached1 : match_class,
3900         cached ? match_class_cached2 : match_class,
3901         cached ? match_class_cached1 : match_class}},
3902       {&backtrack_class_def,
3903        &input_class_def,
3904        &lookahead_class_def}
3905     };
3906 
3907     // Note: Corresponds to match_class_cached2
3908     if (cached && ((c->buffer->cur().syllable() & 0xF0) >> 4) < 15)
3909       index = (c->buffer->cur().syllable () & 0xF0) >> 4;
3910     else
3911       index = input_class_def.get_class (c->buffer->cur().codepoint);
3912     const ChainRuleSet &rule_set = this+ruleSet[index];
3913     return_trace (rule_set.apply (c, lookup_context));
3914   }
3915 
subsetOT::ChainContextFormat2_53916   bool subset (hb_subset_context_t *c) const
3917   {
3918     TRACE_SUBSET (this);
3919     auto *out = c->serializer->start_embed (*this);
3920     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3921     out->format = format;
3922     out->coverage.serialize_subset (c, coverage, this);
3923 
3924     hb_map_t backtrack_klass_map;
3925     hb_map_t input_klass_map;
3926     hb_map_t lookahead_klass_map;
3927 
3928     out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3929     // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3930     out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3931     out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3932 
3933     if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3934 						   input_klass_map,
3935 						   lookahead_klass_map)))
3936       return_trace (false);
3937 
3938     const hb_set_t* glyphset = c->plan->glyphset_gsub ();
3939     hb_set_t retained_coverage_glyphs;
3940     (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
3941 
3942     hb_set_t coverage_glyph_classes;
3943     (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3944 
3945     int non_zero_index = -1, index = 0;
3946     bool ret = true;
3947     const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
3948     auto last_non_zero = c->serializer->snapshot ();
3949     for (const auto& _ : + hb_enumerate (ruleSet)
3950 			 | hb_filter (input_klass_map, hb_first))
3951     {
3952       auto *o = out->ruleSet.serialize_append (c->serializer);
3953       if (unlikely (!o))
3954       {
3955 	ret = false;
3956 	break;
3957       }
3958       if (coverage_glyph_classes.has (_.first) &&
3959           o->serialize_subset (c, _.second, this,
3960 			       lookup_map,
3961 			       &backtrack_klass_map,
3962 			       &input_klass_map,
3963 			       &lookahead_klass_map))
3964       {
3965         last_non_zero = c->serializer->snapshot ();
3966 	non_zero_index = index;
3967       }
3968 
3969       index++;
3970     }
3971 
3972     if (!ret || non_zero_index == -1) return_trace (false);
3973 
3974     // prune empty trailing ruleSets
3975     if (index > non_zero_index) {
3976       c->serializer->revert (last_non_zero);
3977       out->ruleSet.len = non_zero_index + 1;
3978     }
3979 
3980     return_trace (bool (out->ruleSet));
3981   }
3982 
sanitizeOT::ChainContextFormat2_53983   bool sanitize (hb_sanitize_context_t *c) const
3984   {
3985     TRACE_SANITIZE (this);
3986     return_trace (coverage.sanitize (c, this) &&
3987 		  backtrackClassDef.sanitize (c, this) &&
3988 		  inputClassDef.sanitize (c, this) &&
3989 		  lookaheadClassDef.sanitize (c, this) &&
3990 		  ruleSet.sanitize (c, this));
3991   }
3992 
3993   protected:
3994   HBUINT16	format;			/* Format identifier--format = 2 */
3995   typename Types::template OffsetTo<Coverage>
3996 		coverage;		/* Offset to Coverage table--from
3997 					 * beginning of table */
3998   typename Types::template OffsetTo<ClassDef>
3999 		backtrackClassDef;	/* Offset to glyph ClassDef table
4000 					 * containing backtrack sequence
4001 					 * data--from beginning of table */
4002   typename Types::template OffsetTo<ClassDef>
4003 		inputClassDef;		/* Offset to glyph ClassDef
4004 					 * table containing input sequence
4005 					 * data--from beginning of table */
4006   typename Types::template OffsetTo<ClassDef>
4007 		lookaheadClassDef;	/* Offset to glyph ClassDef table
4008 					 * containing lookahead sequence
4009 					 * data--from beginning of table */
4010   Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
4011 		ruleSet;		/* Array of ChainRuleSet tables
4012 					 * ordered by class */
4013   public:
4014   DEFINE_SIZE_ARRAY (4 + 4 * Types::size, ruleSet);
4015 };
4016 
4017 struct ChainContextFormat3
4018 {
4019   using RuleSet = OT::RuleSet<SmallTypes>;
4020 
intersectsOT::ChainContextFormat34021   bool intersects (const hb_set_t *glyphs) const
4022   {
4023     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4024 
4025     if (!(this+input[0]).intersects (glyphs))
4026       return false;
4027 
4028     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4029     struct ChainContextClosureLookupContext lookup_context = {
4030       {intersects_coverage, nullptr},
4031       ContextFormat::CoverageBasedContext,
4032       {this, this, this}
4033     };
4034     return chain_context_intersects (glyphs,
4035 				     backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4036 				     input.len, (const HBUINT16 *) input.arrayZ + 1,
4037 				     lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4038 				     lookup_context);
4039   }
4040 
may_have_non_1to1OT::ChainContextFormat34041   bool may_have_non_1to1 () const
4042   { return true; }
4043 
closureOT::ChainContextFormat34044   void closure (hb_closure_context_t *c) const
4045   {
4046     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4047 
4048     if (!(this+input[0]).intersects (c->glyphs))
4049       return;
4050 
4051     hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
4052     if (unlikely (!cur_active_glyphs))
4053       return;
4054     get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
4055 				   *cur_active_glyphs);
4056 
4057     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4058     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4059     struct ChainContextClosureLookupContext lookup_context = {
4060       {intersects_coverage, intersected_coverage_glyphs},
4061       ContextFormat::CoverageBasedContext,
4062       {this, this, this}
4063     };
4064     chain_context_closure_lookup (c,
4065 				  backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4066 				  input.len, (const HBUINT16 *) input.arrayZ + 1,
4067 				  lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4068 				  lookup.len, lookup.arrayZ,
4069 				  0, lookup_context);
4070 
4071     c->pop_cur_done_glyphs ();
4072   }
4073 
closure_lookupsOT::ChainContextFormat34074   void closure_lookups (hb_closure_lookups_context_t *c) const
4075   {
4076     if (!intersects (c->glyphs))
4077       return;
4078 
4079     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4080     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4081     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4082     recurse_lookups (c, lookup.len, lookup.arrayZ);
4083   }
4084 
collect_variation_indicesOT::ChainContextFormat34085   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
4086 
collect_glyphsOT::ChainContextFormat34087   void collect_glyphs (hb_collect_glyphs_context_t *c) const
4088   {
4089     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4090 
4091     (this+input[0]).collect_coverage (c->input);
4092 
4093     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4094     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4095 
4096     struct ChainContextCollectGlyphsLookupContext lookup_context = {
4097       {collect_coverage},
4098       {this, this, this}
4099     };
4100     chain_context_collect_glyphs_lookup (c,
4101 					 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4102 					 input.len, (const HBUINT16 *) input.arrayZ + 1,
4103 					 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4104 					 lookup.len, lookup.arrayZ,
4105 					 lookup_context);
4106   }
4107 
would_applyOT::ChainContextFormat34108   bool would_apply (hb_would_apply_context_t *c) const
4109   {
4110     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4111     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4112     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4113     struct ChainContextApplyLookupContext lookup_context = {
4114       {{match_coverage, match_coverage, match_coverage}},
4115       {this, this, this}
4116     };
4117     return chain_context_would_apply_lookup (c,
4118 					     backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4119 					     input.len, (const HBUINT16 *) input.arrayZ + 1,
4120 					     lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4121 					     lookup.len, lookup.arrayZ, lookup_context);
4122   }
4123 
get_coverageOT::ChainContextFormat34124   const Coverage &get_coverage () const
4125   {
4126     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4127     return this+input[0];
4128   }
4129 
applyOT::ChainContextFormat34130   bool apply (hb_ot_apply_context_t *c) const
4131   {
4132     TRACE_APPLY (this);
4133     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4134 
4135     unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
4136     if (likely (index == NOT_COVERED)) return_trace (false);
4137 
4138     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4139     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4140     struct ChainContextApplyLookupContext lookup_context = {
4141       {{match_coverage, match_coverage, match_coverage}},
4142       {this, this, this}
4143     };
4144     return_trace (chain_context_apply_lookup (c,
4145 					      backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
4146 					      input.len, (const HBUINT16 *) input.arrayZ + 1,
4147 					      lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
4148 					      lookup.len, lookup.arrayZ, lookup_context));
4149   }
4150 
4151   template<typename Iterator,
4152 	   hb_requires (hb_is_iterator (Iterator))>
serialize_coverage_offsetsOT::ChainContextFormat34153   bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
4154   {
4155     TRACE_SERIALIZE (this);
4156     auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
4157 
4158     if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
4159       return_trace (false);
4160 
4161     for (auto& offset : it) {
4162       auto *o = out->serialize_append (c->serializer);
4163       if (unlikely (!o) || !o->serialize_subset (c, offset, base))
4164         return_trace (false);
4165     }
4166 
4167     return_trace (true);
4168   }
4169 
subsetOT::ChainContextFormat34170   bool subset (hb_subset_context_t *c) const
4171   {
4172     TRACE_SUBSET (this);
4173 
4174     if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
4175 
4176     if (!serialize_coverage_offsets (c, backtrack.iter (), this))
4177       return_trace (false);
4178 
4179     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4180     if (!serialize_coverage_offsets (c, input.iter (), this))
4181       return_trace (false);
4182 
4183     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4184     if (!serialize_coverage_offsets (c, lookahead.iter (), this))
4185       return_trace (false);
4186 
4187     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4188     const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
4189 
4190     HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookup.len);
4191     if (!lookupCount) return_trace (false);
4192 
4193     unsigned count = serialize_lookuprecord_array (c->serializer, lookup.as_array (), lookup_map);
4194     return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
4195   }
4196 
sanitizeOT::ChainContextFormat34197   bool sanitize (hb_sanitize_context_t *c) const
4198   {
4199     TRACE_SANITIZE (this);
4200     if (unlikely (!backtrack.sanitize (c, this))) return_trace (false);
4201     hb_barrier ();
4202     const auto &input = StructAfter<decltype (inputX)> (backtrack);
4203     if (unlikely (!input.sanitize (c, this))) return_trace (false);
4204     hb_barrier ();
4205     if (unlikely (!input.len)) return_trace (false); /* To be consistent with Context. */
4206     const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
4207     if (unlikely (!lookahead.sanitize (c, this))) return_trace (false);
4208     hb_barrier ();
4209     const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
4210     return_trace (likely (lookup.sanitize (c)));
4211   }
4212 
4213   protected:
4214   HBUINT16	format;			/* Format identifier--format = 3 */
4215   Array16OfOffset16To<Coverage>
4216 		backtrack;		/* Array of coverage tables
4217 					 * in backtracking sequence, in  glyph
4218 					 * sequence order */
4219   Array16OfOffset16To<Coverage>
4220 		inputX		;	/* Array of coverage
4221 					 * tables in input sequence, in glyph
4222 					 * sequence order */
4223   Array16OfOffset16To<Coverage>
4224 		lookaheadX;		/* Array of coverage tables
4225 					 * in lookahead sequence, in glyph
4226 					 * sequence order */
4227   Array16Of<LookupRecord>
4228 		lookupX;		/* Array of LookupRecords--in
4229 					 * design order) */
4230   public:
4231   DEFINE_SIZE_MIN (10);
4232 };
4233 
4234 struct ChainContext
4235 {
4236   template <typename context_t, typename ...Ts>
dispatchOT::ChainContext4237   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
4238   {
4239     if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
4240     TRACE_DISPATCH (this, u.format);
4241     switch (u.format) {
4242     case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
4243     case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
4244     case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
4245 #ifndef HB_NO_BEYOND_64K
4246     case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
4247     case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
4248 #endif
4249     default:return_trace (c->default_return_value ());
4250     }
4251   }
4252 
4253   protected:
4254   union {
4255   HBUINT16				format;	/* Format identifier */
4256   ChainContextFormat1_4<SmallTypes>	format1;
4257   ChainContextFormat2_5<SmallTypes>	format2;
4258   ChainContextFormat3			format3;
4259 #ifndef HB_NO_BEYOND_64K
4260   ChainContextFormat1_4<MediumTypes>	format4;
4261   ChainContextFormat2_5<MediumTypes>	format5;
4262 #endif
4263   } u;
4264 };
4265 
4266 
4267 template <typename T>
4268 struct ExtensionFormat1
4269 {
get_typeOT::ExtensionFormat14270   unsigned int get_type () const { return extensionLookupType; }
4271 
4272   template <typename X>
get_subtableOT::ExtensionFormat14273   const X& get_subtable () const
4274   { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
4275 
4276   template <typename context_t, typename ...Ts>
dispatchOT::ExtensionFormat14277   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
4278   {
4279     if (unlikely (!c->may_dispatch (this, this))) return c->no_dispatch_return_value ();
4280     TRACE_DISPATCH (this, format);
4281     return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
4282   }
4283 
collect_variation_indicesOT::ExtensionFormat14284   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
4285   { dispatch (c); }
4286 
4287   /* This is called from may_dispatch() above with hb_sanitize_context_t. */
sanitizeOT::ExtensionFormat14288   bool sanitize (hb_sanitize_context_t *c) const
4289   {
4290     TRACE_SANITIZE (this);
4291     return_trace (c->check_struct (this) &&
4292 		  hb_barrier () &&
4293 		  extensionLookupType != T::SubTable::Extension);
4294   }
4295 
subsetOT::ExtensionFormat14296   bool subset (hb_subset_context_t *c) const
4297   {
4298     TRACE_SUBSET (this);
4299 
4300     auto *out = c->serializer->start_embed (this);
4301     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
4302 
4303     out->format = format;
4304     out->extensionLookupType = extensionLookupType;
4305 
4306     const auto& src_offset =
4307         reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
4308     auto& dest_offset =
4309         reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
4310 
4311     return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
4312   }
4313 
4314   protected:
4315   HBUINT16	format;			/* Format identifier. Set to 1. */
4316   HBUINT16	extensionLookupType;	/* Lookup type of subtable referenced
4317 					 * by ExtensionOffset (i.e. the
4318 					 * extension subtable). */
4319   Offset32	extensionOffset;	/* Offset to the extension subtable,
4320 					 * of lookup type subtable. */
4321   public:
4322   DEFINE_SIZE_STATIC (8);
4323 };
4324 
4325 template <typename T>
4326 struct Extension
4327 {
get_typeOT::Extension4328   unsigned int get_type () const
4329   {
4330     switch (u.format) {
4331     case 1: hb_barrier (); return u.format1.get_type ();
4332     default:return 0;
4333     }
4334   }
4335   template <typename X>
get_subtableOT::Extension4336   const X& get_subtable () const
4337   {
4338     switch (u.format) {
4339     case 1: hb_barrier (); return u.format1.template get_subtable<typename T::SubTable> ();
4340     default:return Null (typename T::SubTable);
4341     }
4342   }
4343 
4344   // Specialization of dispatch for subset. dispatch() normally just
4345   // dispatches to the sub table this points too, but for subset
4346   // we need to run subset on this subtable too.
4347   template <typename ...Ts>
dispatchOT::Extension4348   typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
4349   {
4350     switch (u.format) {
4351     case 1: hb_barrier (); return u.format1.subset (c);
4352     default: return c->default_return_value ();
4353     }
4354   }
4355 
4356   template <typename context_t, typename ...Ts>
dispatchOT::Extension4357   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
4358   {
4359     if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
4360     TRACE_DISPATCH (this, u.format);
4361     switch (u.format) {
4362     case 1: hb_barrier (); return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
4363     default:return_trace (c->default_return_value ());
4364     }
4365   }
4366 
4367   protected:
4368   union {
4369   HBUINT16		format;		/* Format identifier */
4370   ExtensionFormat1<T>	format1;
4371   } u;
4372 };
4373 
4374 
4375 /*
4376  * GSUB/GPOS Common
4377  */
4378 
4379 struct hb_ot_layout_lookup_accelerator_t
4380 {
4381   template <typename TLookup>
createOT::hb_ot_layout_lookup_accelerator_t4382   static hb_ot_layout_lookup_accelerator_t *create (const TLookup &lookup)
4383   {
4384     unsigned count = lookup.get_subtable_count ();
4385 
4386     unsigned size = sizeof (hb_ot_layout_lookup_accelerator_t) -
4387 		    HB_VAR_ARRAY * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t) +
4388 		    count * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t);
4389 
4390     /* The following is a calloc because when we are collecting subtables,
4391      * some of them might be invalid and hence not collect; as a result,
4392      * we might not fill in all the count entries of the subtables array.
4393      * Zeroing it allows the set digest to gatekeep it without having to
4394      * initialize it further. */
4395     auto *thiz = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (1, size);
4396     if (unlikely (!thiz))
4397       return nullptr;
4398 
4399     hb_accelerate_subtables_context_t c_accelerate_subtables (thiz->subtables);
4400     lookup.dispatch (&c_accelerate_subtables);
4401 
4402     thiz->digest.init ();
4403     for (auto& subtable : hb_iter (thiz->subtables, count))
4404       thiz->digest.union_ (subtable.digest);
4405 
4406 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4407     thiz->cache_user_idx = c_accelerate_subtables.cache_user_idx;
4408     for (unsigned i = 0; i < count; i++)
4409       if (i != thiz->cache_user_idx)
4410 	thiz->subtables[i].apply_cached_func = thiz->subtables[i].apply_func;
4411 #endif
4412 
4413     return thiz;
4414   }
4415 
may_haveOT::hb_ot_layout_lookup_accelerator_t4416   bool may_have (hb_codepoint_t g) const
4417   { return digest.may_have (g); }
4418 
4419 #ifndef HB_OPTIMIZE_SIZE
4420   HB_ALWAYS_INLINE
4421 #endif
applyOT::hb_ot_layout_lookup_accelerator_t4422   bool apply (hb_ot_apply_context_t *c, unsigned subtables_count, bool use_cache) const
4423   {
4424 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4425     if (use_cache)
4426     {
4427       return
4428       + hb_iter (hb_iter (subtables, subtables_count))
4429       | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply_cached (c); })
4430       | hb_any
4431       ;
4432     }
4433     else
4434 #endif
4435     {
4436       return
4437       + hb_iter (hb_iter (subtables, subtables_count))
4438       | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply (c); })
4439       | hb_any
4440       ;
4441     }
4442     return false;
4443   }
4444 
cache_enterOT::hb_ot_layout_lookup_accelerator_t4445   bool cache_enter (hb_ot_apply_context_t *c) const
4446   {
4447 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4448     return cache_user_idx != (unsigned) -1 &&
4449 	   subtables[cache_user_idx].cache_enter (c);
4450 #else
4451     return false;
4452 #endif
4453   }
cache_leaveOT::hb_ot_layout_lookup_accelerator_t4454   void cache_leave (hb_ot_apply_context_t *c) const
4455   {
4456 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4457     subtables[cache_user_idx].cache_leave (c);
4458 #endif
4459   }
4460 
4461 
4462   hb_set_digest_t digest;
4463   private:
4464 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
4465   unsigned cache_user_idx = (unsigned) -1;
4466 #endif
4467   hb_accelerate_subtables_context_t::hb_applicable_t subtables[HB_VAR_ARRAY];
4468 };
4469 
4470 template <typename Types>
4471 struct GSUBGPOSVersion1_2
4472 {
4473   friend struct GSUBGPOS;
4474 
4475   protected:
4476   FixedVersion<>version;	/* Version of the GSUB/GPOS table--initially set
4477 				 * to 0x00010000u */
4478   typename Types:: template OffsetTo<ScriptList>
4479 		scriptList;	/* ScriptList table */
4480   typename Types::template OffsetTo<FeatureList>
4481 		featureList;	/* FeatureList table */
4482   typename Types::template OffsetTo<LookupList<Types>>
4483 		lookupList;	/* LookupList table */
4484   Offset32To<FeatureVariations>
4485 		featureVars;	/* Offset to Feature Variations
4486 				   table--from beginning of table
4487 				 * (may be NULL).  Introduced
4488 				 * in version 0x00010001. */
4489   public:
4490   DEFINE_SIZE_MIN (4 + 3 * Types::size);
4491 
get_sizeOT::GSUBGPOSVersion1_24492   unsigned int get_size () const
4493   {
4494     return min_size +
4495 	   (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
4496   }
4497 
get_lookup_list_offsetOT::GSUBGPOSVersion1_24498   const typename Types::template OffsetTo<LookupList<Types>>* get_lookup_list_offset () const
4499   {
4500     return &lookupList;
4501   }
4502 
4503   template <typename TLookup>
sanitizeOT::GSUBGPOSVersion1_24504   bool sanitize (hb_sanitize_context_t *c) const
4505   {
4506     TRACE_SANITIZE (this);
4507     typedef List16OfOffsetTo<TLookup, typename Types::HBUINT> TLookupList;
4508     if (unlikely (!(scriptList.sanitize (c, this) &&
4509 		    featureList.sanitize (c, this) &&
4510 		    reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList).sanitize (c, this))))
4511       return_trace (false);
4512 
4513 #ifndef HB_NO_VAR
4514     if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
4515       return_trace (false);
4516 #endif
4517 
4518     return_trace (true);
4519   }
4520 
4521   template <typename TLookup>
subsetOT::GSUBGPOSVersion1_24522   bool subset (hb_subset_layout_context_t *c) const
4523   {
4524     TRACE_SUBSET (this);
4525 
4526     auto *out = c->subset_context->serializer->start_embed (this);
4527     if (unlikely (!c->subset_context->serializer->extend_min (out))) return_trace (false);
4528 
4529     out->version = version;
4530 
4531     typedef LookupOffsetList<TLookup, typename Types::HBUINT> TLookupList;
4532     reinterpret_cast<typename Types::template OffsetTo<TLookupList> &> (out->lookupList)
4533 	.serialize_subset (c->subset_context,
4534 			   reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList),
4535 			   this,
4536 			   c);
4537 
4538     reinterpret_cast<typename Types::template OffsetTo<RecordListOfFeature> &> (out->featureList)
4539 	.serialize_subset (c->subset_context,
4540 			   reinterpret_cast<const typename Types::template OffsetTo<RecordListOfFeature> &> (featureList),
4541 			   this,
4542 			   c);
4543 
4544     out->scriptList.serialize_subset (c->subset_context,
4545 				      scriptList,
4546 				      this,
4547 				      c);
4548 
4549 #ifndef HB_NO_VAR
4550     if (version.to_int () >= 0x00010001u)
4551     {
4552       auto snapshot = c->subset_context->serializer->snapshot ();
4553       if (!c->subset_context->serializer->extend_min (&out->featureVars))
4554         return_trace (false);
4555 
4556       // if all axes are pinned all feature vars are dropped.
4557       bool ret = !c->subset_context->plan->all_axes_pinned
4558                  && out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
4559       if (!ret && version.major == 1)
4560       {
4561         c->subset_context->serializer->revert (snapshot);
4562 	out->version.major = 1;
4563 	out->version.minor = 0;
4564       }
4565     }
4566 #endif
4567 
4568     return_trace (true);
4569   }
4570 };
4571 
4572 struct GSUBGPOS
4573 {
get_sizeOT::GSUBGPOS4574   unsigned int get_size () const
4575   {
4576     switch (u.version.major) {
4577     case 1: hb_barrier (); return u.version1.get_size ();
4578 #ifndef HB_NO_BEYOND_64K
4579     case 2: hb_barrier (); return u.version2.get_size ();
4580 #endif
4581     default: return u.version.static_size;
4582     }
4583   }
4584 
4585   template <typename TLookup>
sanitizeOT::GSUBGPOS4586   bool sanitize (hb_sanitize_context_t *c) const
4587   {
4588     TRACE_SANITIZE (this);
4589     if (unlikely (!u.version.sanitize (c))) return_trace (false);
4590     hb_barrier ();
4591     switch (u.version.major) {
4592     case 1: hb_barrier (); return_trace (u.version1.sanitize<TLookup> (c));
4593 #ifndef HB_NO_BEYOND_64K
4594     case 2: hb_barrier (); return_trace (u.version2.sanitize<TLookup> (c));
4595 #endif
4596     default: return_trace (true);
4597     }
4598   }
4599 
4600   template <typename TLookup>
subsetOT::GSUBGPOS4601   bool subset (hb_subset_layout_context_t *c) const
4602   {
4603     switch (u.version.major) {
4604     case 1: hb_barrier (); return u.version1.subset<TLookup> (c);
4605 #ifndef HB_NO_BEYOND_64K
4606     case 2: hb_barrier (); return u.version2.subset<TLookup> (c);
4607 #endif
4608     default: return false;
4609     }
4610   }
4611 
get_script_listOT::GSUBGPOS4612   const ScriptList &get_script_list () const
4613   {
4614     switch (u.version.major) {
4615     case 1: hb_barrier (); return this+u.version1.scriptList;
4616 #ifndef HB_NO_BEYOND_64K
4617     case 2: hb_barrier (); return this+u.version2.scriptList;
4618 #endif
4619     default: return Null (ScriptList);
4620     }
4621   }
get_feature_listOT::GSUBGPOS4622   const FeatureList &get_feature_list () const
4623   {
4624     switch (u.version.major) {
4625     case 1: hb_barrier (); return this+u.version1.featureList;
4626 #ifndef HB_NO_BEYOND_64K
4627     case 2: hb_barrier (); return this+u.version2.featureList;
4628 #endif
4629     default: return Null (FeatureList);
4630     }
4631   }
get_lookup_countOT::GSUBGPOS4632   unsigned int get_lookup_count () const
4633   {
4634     switch (u.version.major) {
4635     case 1: hb_barrier (); return (this+u.version1.lookupList).len;
4636 #ifndef HB_NO_BEYOND_64K
4637     case 2: hb_barrier (); return (this+u.version2.lookupList).len;
4638 #endif
4639     default: return 0;
4640     }
4641   }
get_lookupOT::GSUBGPOS4642   const Lookup& get_lookup (unsigned int i) const
4643   {
4644     switch (u.version.major) {
4645     case 1: hb_barrier (); return (this+u.version1.lookupList)[i];
4646 #ifndef HB_NO_BEYOND_64K
4647     case 2: hb_barrier (); return (this+u.version2.lookupList)[i];
4648 #endif
4649     default: return Null (Lookup);
4650     }
4651   }
get_feature_variationsOT::GSUBGPOS4652   const FeatureVariations &get_feature_variations () const
4653   {
4654     switch (u.version.major) {
4655     case 1: hb_barrier (); return (u.version.to_int () >= 0x00010001u && hb_barrier () ? this+u.version1.featureVars : Null (FeatureVariations));
4656 #ifndef HB_NO_BEYOND_64K
4657     case 2: hb_barrier (); return this+u.version2.featureVars;
4658 #endif
4659     default: return Null (FeatureVariations);
4660     }
4661   }
4662 
has_dataOT::GSUBGPOS4663   bool has_data () const { return u.version.to_int (); }
get_script_countOT::GSUBGPOS4664   unsigned int get_script_count () const
4665   { return get_script_list ().len; }
get_script_tagOT::GSUBGPOS4666   const Tag& get_script_tag (unsigned int i) const
4667   { return get_script_list ().get_tag (i); }
get_script_tagsOT::GSUBGPOS4668   unsigned int get_script_tags (unsigned int start_offset,
4669 				unsigned int *script_count /* IN/OUT */,
4670 				hb_tag_t     *script_tags /* OUT */) const
4671   { return get_script_list ().get_tags (start_offset, script_count, script_tags); }
get_scriptOT::GSUBGPOS4672   const Script& get_script (unsigned int i) const
4673   { return get_script_list ()[i]; }
find_script_indexOT::GSUBGPOS4674   bool find_script_index (hb_tag_t tag, unsigned int *index) const
4675   { return get_script_list ().find_index (tag, index); }
4676 
get_feature_countOT::GSUBGPOS4677   unsigned int get_feature_count () const
4678   { return get_feature_list ().len; }
get_feature_tagOT::GSUBGPOS4679   hb_tag_t get_feature_tag (unsigned int i) const
4680   { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : get_feature_list ().get_tag (i); }
get_feature_tagsOT::GSUBGPOS4681   unsigned int get_feature_tags (unsigned int start_offset,
4682 				 unsigned int *feature_count /* IN/OUT */,
4683 				 hb_tag_t     *feature_tags /* OUT */) const
4684   { return get_feature_list ().get_tags (start_offset, feature_count, feature_tags); }
get_featureOT::GSUBGPOS4685   const Feature& get_feature (unsigned int i) const
4686   { return get_feature_list ()[i]; }
find_feature_indexOT::GSUBGPOS4687   bool find_feature_index (hb_tag_t tag, unsigned int *index) const
4688   { return get_feature_list ().find_index (tag, index); }
4689 
find_variations_indexOT::GSUBGPOS4690   bool find_variations_index (const int *coords, unsigned int num_coords,
4691 			      unsigned int *index,
4692 			      ItemVarStoreInstancer *instancer) const
4693   {
4694 #ifdef HB_NO_VAR
4695     *index = FeatureVariations::NOT_FOUND_INDEX;
4696     return false;
4697 #endif
4698     return get_feature_variations ().find_index (coords, num_coords, index, instancer);
4699   }
get_feature_variationOT::GSUBGPOS4700   const Feature& get_feature_variation (unsigned int feature_index,
4701 					unsigned int variations_index) const
4702   {
4703 #ifndef HB_NO_VAR
4704     if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
4705 	u.version.to_int () >= 0x00010001u)
4706     {
4707       const Feature *feature = get_feature_variations ().find_substitute (variations_index,
4708 									  feature_index);
4709       if (feature)
4710 	return *feature;
4711     }
4712 #endif
4713     return get_feature (feature_index);
4714   }
4715 
feature_variation_collect_lookupsOT::GSUBGPOS4716   void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
4717 					  const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
4718 					  hb_set_t       *lookup_indexes /* OUT */) const
4719   {
4720 #ifndef HB_NO_VAR
4721     get_feature_variations ().collect_lookups (feature_indexes, feature_record_cond_idx_map, lookup_indexes);
4722 #endif
4723   }
4724 
4725 #ifndef HB_NO_VAR
collect_feature_substitutes_with_variationsOT::GSUBGPOS4726   void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
4727   { get_feature_variations ().collect_feature_substitutes_with_variations (c); }
4728 #endif
4729 
4730   template <typename TLookup>
closure_lookupsOT::GSUBGPOS4731   void closure_lookups (hb_face_t      *face,
4732 			const hb_set_t *glyphs,
4733 			hb_set_t       *lookup_indexes /* IN/OUT */) const
4734   {
4735     hb_set_t visited_lookups, inactive_lookups;
4736     hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
4737 
4738     c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>);
4739 
4740     for (unsigned lookup_index : *lookup_indexes)
4741       reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
4742 
4743     hb_set_union (lookup_indexes, &visited_lookups);
4744     hb_set_subtract (lookup_indexes, &inactive_lookups);
4745   }
4746 
prune_langsysOT::GSUBGPOS4747   void prune_langsys (const hb_map_t *duplicate_feature_map,
4748                       const hb_set_t *layout_scripts,
4749                       hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map,
4750                       hb_set_t       *new_feature_indexes /* OUT */) const
4751   {
4752     hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
4753 
4754     unsigned count = get_script_count ();
4755     for (unsigned script_index = 0; script_index < count; script_index++)
4756     {
4757       const Tag& tag = get_script_tag (script_index);
4758       if (!layout_scripts->has (tag)) continue;
4759       const Script& s = get_script (script_index);
4760       s.prune_langsys (&c, script_index);
4761     }
4762   }
4763 
prune_featuresOT::GSUBGPOS4764   void prune_features (const hb_map_t *lookup_indices, /* IN */
4765 		       const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* IN */
4766 		       const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, /* IN */
4767 		       hb_set_t       *feature_indices /* IN/OUT */) const
4768   {
4769 #ifndef HB_NO_VAR
4770     // This is the set of feature indices which have alternate versions defined
4771     // if the FeatureVariation's table and the alternate version(s) intersect the
4772     // set of lookup indices.
4773     hb_set_t alternate_feature_indices;
4774     get_feature_variations ().closure_features (lookup_indices, feature_record_cond_idx_map, &alternate_feature_indices);
4775     if (unlikely (alternate_feature_indices.in_error()))
4776     {
4777       feature_indices->err ();
4778       return;
4779     }
4780 #endif
4781 
4782     for (unsigned i : hb_iter (feature_indices))
4783     {
4784       hb_tag_t tag =  get_feature_tag (i);
4785       if (tag == HB_TAG ('p', 'r', 'e', 'f'))
4786         // Note: Never ever drop feature 'pref', even if it's empty.
4787         // HarfBuzz chooses shaper for Khmer based on presence of this
4788         // feature.	See thread at:
4789 	// http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
4790         continue;
4791 
4792 
4793       const Feature *f = &(get_feature (i));
4794       const Feature** p = nullptr;
4795       if (feature_substitutes_map->has (i, &p))
4796         f = *p;
4797 
4798       if (!f->featureParams.is_null () &&
4799           tag == HB_TAG ('s', 'i', 'z', 'e'))
4800         continue;
4801 
4802       if (!f->intersects_lookup_indexes (lookup_indices)
4803 #ifndef HB_NO_VAR
4804           && !alternate_feature_indices.has (i)
4805 #endif
4806 	  )
4807 	feature_indices->del (i);
4808     }
4809   }
4810 
collect_name_idsOT::GSUBGPOS4811   void collect_name_ids (const hb_map_t *feature_index_map,
4812                          hb_set_t *nameids_to_retain /* OUT */) const
4813   {
4814     unsigned count = get_feature_count ();
4815     for (unsigned i = 0 ; i < count; i++)
4816     {
4817       if (!feature_index_map->has (i)) continue;
4818       hb_tag_t tag = get_feature_tag (i);
4819       get_feature (i).collect_name_ids (tag, nameids_to_retain);
4820     }
4821   }
4822 
4823   template <typename T>
4824   struct accelerator_t
4825   {
accelerator_tOT::GSUBGPOS::accelerator_t4826     accelerator_t (hb_face_t *face)
4827     {
4828       hb_sanitize_context_t sc;
4829       sc.lazy_some_gpos = true;
4830       this->table = sc.reference_table<T> (face);
4831 
4832       if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
4833       {
4834 	hb_blob_destroy (this->table.get_blob ());
4835 	this->table = hb_blob_get_empty ();
4836       }
4837 
4838       this->lookup_count = table->get_lookup_count ();
4839 
4840       this->accels = (hb_atomic_ptr_t<hb_ot_layout_lookup_accelerator_t> *) hb_calloc (this->lookup_count, sizeof (*accels));
4841       if (unlikely (!this->accels))
4842       {
4843 	this->lookup_count = 0;
4844 	this->table.destroy ();
4845 	this->table = hb_blob_get_empty ();
4846       }
4847     }
~accelerator_tOT::GSUBGPOS::accelerator_t4848     ~accelerator_t ()
4849     {
4850       for (unsigned int i = 0; i < this->lookup_count; i++)
4851 	hb_free (this->accels[i]);
4852       hb_free (this->accels);
4853       this->table.destroy ();
4854     }
4855 
get_blobOT::GSUBGPOS::accelerator_t4856     hb_blob_t *get_blob () const { return table.get_blob (); }
4857 
get_accelOT::GSUBGPOS::accelerator_t4858     hb_ot_layout_lookup_accelerator_t *get_accel (unsigned lookup_index) const
4859     {
4860       if (unlikely (lookup_index >= lookup_count)) return nullptr;
4861 
4862     retry:
4863       auto *accel = accels[lookup_index].get_acquire ();
4864       if (unlikely (!accel))
4865       {
4866 	accel = hb_ot_layout_lookup_accelerator_t::create (table->get_lookup (lookup_index));
4867 	if (unlikely (!accel))
4868 	  return nullptr;
4869 
4870 	if (unlikely (!accels[lookup_index].cmpexch (nullptr, accel)))
4871 	{
4872 	  hb_free (accel);
4873 	  goto retry;
4874 	}
4875       }
4876 
4877       return accel;
4878     }
4879 
4880     hb_blob_ptr_t<T> table;
4881     unsigned int lookup_count;
4882     hb_atomic_ptr_t<hb_ot_layout_lookup_accelerator_t> *accels;
4883   };
4884 
4885   protected:
4886   union {
4887   FixedVersion<>			version;	/* Version identifier */
4888   GSUBGPOSVersion1_2<SmallTypes>	version1;
4889 #ifndef HB_NO_BEYOND_64K
4890   GSUBGPOSVersion1_2<MediumTypes>	version2;
4891 #endif
4892   } u;
4893   public:
4894   DEFINE_SIZE_MIN (4);
4895 };
4896 
4897 
4898 } /* namespace OT */
4899 
4900 
4901 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */
4902