tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

hb-ot-layout-gsubgpos.hh (158834B)


      1 /*
      2 * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
      3 * Copyright © 2010,2012  Google, Inc.
      4 *
      5 *  This is part of HarfBuzz, a text shaping library.
      6 *
      7 * Permission is hereby granted, without written agreement and without
      8 * license or royalty fees, to use, copy, modify, and distribute this
      9 * software and its documentation for any purpose, provided that the
     10 * above copyright notice and the following two paragraphs appear in
     11 * all copies of this software.
     12 *
     13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
     14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
     15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
     16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
     17 * DAMAGE.
     18 *
     19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
     20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
     21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
     22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
     23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
     24 *
     25 * Red Hat Author(s): Behdad Esfahbod
     26 * Google Author(s): Behdad Esfahbod
     27 */
     28 
     29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
     30 #define HB_OT_LAYOUT_GSUBGPOS_HH
     31 
     32 #include "hb.hh"
     33 #include "hb-buffer.hh"
     34 #include "hb-map.hh"
     35 #include "hb-set.hh"
     36 #include "hb-ot-map.hh"
     37 #include "hb-ot-layout-common.hh"
     38 #include "hb-ot-layout-gdef-table.hh"
     39 
     40 
     41 namespace OT {
     42 
     43 
     44 struct hb_intersects_context_t :
     45       hb_dispatch_context_t<hb_intersects_context_t, bool>
     46 {
     47  template <typename T>
     48  return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
     49  static return_t default_return_value () { return false; }
     50  bool stop_sublookup_iteration (return_t r) const { return r; }
     51 
     52  const hb_set_t *glyphs;
     53 
     54  hb_intersects_context_t (const hb_set_t *glyphs_) :
     55                            glyphs (glyphs_) {}
     56 };
     57 
     58 struct hb_have_non_1to1_context_t :
     59       hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
     60 {
     61  template <typename T>
     62  return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
     63  static return_t default_return_value () { return false; }
     64  bool stop_sublookup_iteration (return_t r) const { return r; }
     65 };
     66 
     67 struct hb_closure_context_t :
     68       hb_dispatch_context_t<hb_closure_context_t>
     69 {
     70  typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
     71  template <typename T>
     72  return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
     73  static return_t default_return_value () { return hb_empty_t (); }
     74  void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
     75  {
     76    if (unlikely (nesting_level_left == 0 || !recurse_func))
     77      return;
     78 
     79    nesting_level_left--;
     80    recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
     81    nesting_level_left++;
     82  }
     83 
     84  void reset_lookup_visit_count ()
     85  { lookup_count = 0; }
     86 
     87  bool lookup_limit_exceeded ()
     88  { return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
     89 
     90  bool should_visit_lookup (unsigned int lookup_index)
     91  {
     92    if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
     93      return false;
     94 
     95    if (is_lookup_done (lookup_index))
     96      return false;
     97 
     98    return true;
     99  }
    100 
    101  bool is_lookup_done (unsigned int lookup_index)
    102  {
    103    if (unlikely (done_lookups_glyph_count->in_error () ||
    104 	  done_lookups_glyph_set->in_error ()))
    105      return true;
    106 
    107    /* Have we visited this lookup with the current set of glyphs? */
    108    if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
    109    {
    110      done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
    111 
    112      if (!done_lookups_glyph_set->has (lookup_index))
    113      {
    114 if (unlikely (!done_lookups_glyph_set->set (lookup_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
    115   return true;
    116      }
    117 
    118      done_lookups_glyph_set->get (lookup_index)->clear ();
    119    }
    120 
    121    hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
    122    if (unlikely (covered_glyph_set->in_error ()))
    123      return true;
    124    if (parent_active_glyphs ().is_subset (*covered_glyph_set))
    125      return true;
    126 
    127    covered_glyph_set->union_ (parent_active_glyphs ());
    128    return false;
    129  }
    130 
    131  const hb_set_t& previous_parent_active_glyphs () {
    132    if (active_glyphs_stack.length <= 1)
    133      return *glyphs;
    134 
    135    return active_glyphs_stack[active_glyphs_stack.length - 2];
    136  }
    137 
    138  const hb_set_t& parent_active_glyphs ()
    139  {
    140    if (!active_glyphs_stack)
    141      return *glyphs;
    142 
    143    return active_glyphs_stack.tail ();
    144  }
    145 
    146  hb_set_t* push_cur_active_glyphs ()
    147  {
    148    hb_set_t *s = active_glyphs_stack.push ();
    149    if (unlikely (active_glyphs_stack.in_error ()))
    150      return nullptr;
    151    return s;
    152  }
    153 
    154  bool pop_cur_done_glyphs ()
    155  {
    156    if (!active_glyphs_stack)
    157      return false;
    158 
    159    active_glyphs_stack.pop ();
    160    return true;
    161  }
    162 
    163  hb_face_t *face;
    164  hb_set_t *glyphs;
    165  hb_set_t output[1];
    166  hb_vector_t<hb_set_t> active_glyphs_stack;
    167  recurse_func_t recurse_func = nullptr;
    168  unsigned int nesting_level_left;
    169 
    170  hb_closure_context_t (hb_face_t *face_,
    171 		hb_set_t *glyphs_,
    172 		hb_map_t *done_lookups_glyph_count_,
    173 		hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set_,
    174 		unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
    175 		  face (face_),
    176 		  glyphs (glyphs_),
    177 		  nesting_level_left (nesting_level_left_),
    178 		  done_lookups_glyph_count (done_lookups_glyph_count_),
    179 		  done_lookups_glyph_set (done_lookups_glyph_set_)
    180  {}
    181 
    182  ~hb_closure_context_t () { flush (); }
    183 
    184  void set_recurse_func (recurse_func_t func) { recurse_func = func; }
    185 
    186  void flush ()
    187  {
    188    output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID);	/* Remove invalid glyphs. */
    189    glyphs->union_ (*output);
    190    output->clear ();
    191    active_glyphs_stack.pop ();
    192    active_glyphs_stack.reset ();
    193  }
    194 
    195  private:
    196  hb_map_t *done_lookups_glyph_count;
    197  hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set;
    198  unsigned int lookup_count = 0;
    199 };
    200 
    201 
    202 
    203 struct hb_closure_lookups_context_t :
    204       hb_dispatch_context_t<hb_closure_lookups_context_t>
    205 {
    206  typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
    207  template <typename T>
    208  return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
    209  static return_t default_return_value () { return hb_empty_t (); }
    210  void recurse (unsigned lookup_index)
    211  {
    212    if (unlikely (nesting_level_left == 0 || !recurse_func))
    213      return;
    214 
    215    /* Return if new lookup was recursed to before. */
    216    if (lookup_limit_exceeded ()
    217        || visited_lookups->in_error ()
    218        || visited_lookups->has (lookup_index))
    219      // Don't increment lookup count here, that will be done in the call to closure_lookups()
    220      // made by recurse_func.
    221      return;
    222 
    223    nesting_level_left--;
    224    recurse_func (this, lookup_index);
    225    nesting_level_left++;
    226  }
    227 
    228  void set_lookup_visited (unsigned lookup_index)
    229  { visited_lookups->add (lookup_index); }
    230 
    231  void set_lookup_inactive (unsigned lookup_index)
    232  { inactive_lookups->add (lookup_index); }
    233 
    234  bool lookup_limit_exceeded ()
    235  {
    236    bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
    237    if (ret)
    238      DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
    239    return ret; }
    240 
    241  bool is_lookup_visited (unsigned lookup_index)
    242  {
    243    if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
    244    {
    245      DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
    246                 lookup_count, lookup_index);
    247      return true;
    248    }
    249 
    250    if (unlikely (visited_lookups->in_error ()))
    251      return true;
    252 
    253    return visited_lookups->has (lookup_index);
    254  }
    255 
    256  hb_face_t *face;
    257  const hb_set_t *glyphs;
    258  recurse_func_t recurse_func;
    259  unsigned int nesting_level_left;
    260 
    261  hb_closure_lookups_context_t (hb_face_t *face_,
    262 			const hb_set_t *glyphs_,
    263 			hb_set_t *visited_lookups_,
    264 			hb_set_t *inactive_lookups_,
    265 			unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
    266 			face (face_),
    267 			glyphs (glyphs_),
    268 			recurse_func (nullptr),
    269 			nesting_level_left (nesting_level_left_),
    270 			visited_lookups (visited_lookups_),
    271 			inactive_lookups (inactive_lookups_),
    272 			lookup_count (0) {}
    273 
    274  void set_recurse_func (recurse_func_t func) { recurse_func = func; }
    275 
    276  private:
    277  hb_set_t *visited_lookups;
    278  hb_set_t *inactive_lookups;
    279  unsigned int lookup_count;
    280 };
    281 
    282 struct hb_would_apply_context_t :
    283       hb_dispatch_context_t<hb_would_apply_context_t, bool>
    284 {
    285  template <typename T>
    286  return_t dispatch (const T &obj) { return obj.would_apply (this); }
    287  static return_t default_return_value () { return false; }
    288  bool stop_sublookup_iteration (return_t r) const { return r; }
    289 
    290  hb_face_t *face;
    291  const hb_codepoint_t *glyphs;
    292  unsigned int len;
    293  bool zero_context;
    294 
    295  hb_would_apply_context_t (hb_face_t *face_,
    296 		    const hb_codepoint_t *glyphs_,
    297 		    unsigned int len_,
    298 		    bool zero_context_) :
    299 		      face (face_),
    300 		      glyphs (glyphs_),
    301 		      len (len_),
    302 		      zero_context (zero_context_) {}
    303 };
    304 
    305 struct hb_collect_glyphs_context_t :
    306       hb_dispatch_context_t<hb_collect_glyphs_context_t>
    307 {
    308  typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
    309  template <typename T>
    310  return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
    311  static return_t default_return_value () { return hb_empty_t (); }
    312  void recurse (unsigned int lookup_index)
    313  {
    314    if (unlikely (nesting_level_left == 0 || !recurse_func))
    315      return;
    316 
    317    /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
    318     * past the previous check.  For GSUB, we only want to collect the output
    319     * glyphs in the recursion.  If output is not requested, we can go home now.
    320     *
    321     * Note further, that the above is not exactly correct.  A recursed lookup
    322     * is allowed to match input that is not matched in the context, but that's
    323     * not how most fonts are built.  It's possible to relax that and recurse
    324     * with all sets here if it proves to be an issue.
    325     */
    326 
    327    if (output == hb_set_get_empty ())
    328      return;
    329 
    330    /* Return if new lookup was recursed to before. */
    331    if (recursed_lookups->has (lookup_index))
    332      return;
    333 
    334    hb_set_t *old_before = before;
    335    hb_set_t *old_input  = input;
    336    hb_set_t *old_after  = after;
    337    before = input = after = hb_set_get_empty ();
    338 
    339    nesting_level_left--;
    340    recurse_func (this, lookup_index);
    341    nesting_level_left++;
    342 
    343    before = old_before;
    344    input  = old_input;
    345    after  = old_after;
    346 
    347    recursed_lookups->add (lookup_index);
    348  }
    349 
    350  hb_face_t *face;
    351  hb_set_t *before;
    352  hb_set_t *input;
    353  hb_set_t *after;
    354  hb_set_t *output;
    355  recurse_func_t recurse_func;
    356  hb_set_t *recursed_lookups;
    357  unsigned int nesting_level_left;
    358 
    359  hb_collect_glyphs_context_t (hb_face_t *face_,
    360 		       hb_set_t  *glyphs_before, /* OUT.  May be NULL */
    361 		       hb_set_t  *glyphs_input,  /* OUT.  May be NULL */
    362 		       hb_set_t  *glyphs_after,  /* OUT.  May be NULL */
    363 		       hb_set_t  *glyphs_output, /* OUT.  May be NULL */
    364 		       unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
    365 		      face (face_),
    366 		      before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
    367 		      input  (glyphs_input  ? glyphs_input  : hb_set_get_empty ()),
    368 		      after  (glyphs_after  ? glyphs_after  : hb_set_get_empty ()),
    369 		      output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
    370 		      recurse_func (nullptr),
    371 		      recursed_lookups (hb_set_create ()),
    372 		      nesting_level_left (nesting_level_left_) {}
    373  ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
    374 
    375  void set_recurse_func (recurse_func_t func) { recurse_func = func; }
    376 };
    377 
    378 
    379 
    380 template <typename set_t>
    381 struct hb_collect_coverage_context_t :
    382       hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
    383 {
    384  typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
    385  template <typename T>
    386  return_t dispatch (const T &obj) { return obj.get_coverage (); }
    387  static return_t default_return_value () { return Null (Coverage); }
    388  bool stop_sublookup_iteration (return_t r) const
    389  {
    390    r.collect_coverage (set);
    391    return false;
    392  }
    393 
    394  hb_collect_coverage_context_t (set_t *set_) :
    395 			   set (set_) {}
    396 
    397  set_t *set;
    398 };
    399 
    400 struct matcher_t
    401 {
    402  typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
    403 
    404  template <typename context_t>
    405  void init (const context_t *c, bool context_match = false)
    406  {
    407    set_match_func (nullptr, nullptr);
    408    lookup_props = c->lookup_props;
    409    /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
    410    ignore_zwnj = c->table_index == 1 || (context_match && c->auto_zwnj);
    411    /* Ignore ZWJ if we are matching context, or asked to. */
    412    ignore_zwj = context_match || c->auto_zwj;
    413    /* Ignore hidden glyphs (like CGJ) during GPOS. */
    414    ignore_hidden = c->table_index == 1;
    415    mask = context_match ? -1 : c->lookup_mask;
    416    /* Per syllable matching is only for GSUB. */
    417    per_syllable = c->table_index == 0 && c->per_syllable;
    418    syllable = 0;
    419  }
    420 
    421  void set_match_func (match_func_t match_func_,
    422 	       const void *match_data_)
    423  { match_func = match_func_; match_data = match_data_; }
    424 
    425  enum may_match_t {
    426    MATCH_NO,
    427    MATCH_YES,
    428    MATCH_MAYBE
    429  };
    430 
    431 #ifndef HB_OPTIMIZE_SIZE
    432  HB_ALWAYS_INLINE
    433 #endif
    434  may_match_t may_match (hb_glyph_info_t &info,
    435 		 hb_codepoint_t glyph_data) const
    436  {
    437    if (!(info.mask & mask) ||
    438 (per_syllable && syllable && syllable != info.syllable ()))
    439      return MATCH_NO;
    440 
    441    if (match_func)
    442      return match_func (info, glyph_data, match_data) ? MATCH_YES : MATCH_NO;
    443 
    444    return MATCH_MAYBE;
    445  }
    446 
    447  enum may_skip_t {
    448    SKIP_NO,
    449    SKIP_YES,
    450    SKIP_MAYBE
    451  };
    452 
    453  template <typename context_t>
    454 #ifndef HB_OPTIMIZE_SIZE
    455  HB_ALWAYS_INLINE
    456 #endif
    457  may_skip_t may_skip (const context_t *c,
    458 	       const hb_glyph_info_t &info) const
    459  {
    460    if (!c->check_glyph_property (&info, lookup_props))
    461      return SKIP_YES;
    462 
    463    if (unlikely (_hb_glyph_info_is_default_ignorable (&info) &&
    464 	  (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
    465 	  (ignore_zwj || !_hb_glyph_info_is_zwj (&info)) &&
    466 	  (ignore_hidden || !_hb_glyph_info_is_hidden (&info))))
    467      return SKIP_MAYBE;
    468 
    469    return SKIP_NO;
    470  }
    471 
    472  public:
    473  unsigned int lookup_props = 0;
    474  hb_mask_t mask = -1;
    475  bool ignore_zwnj = false;
    476  bool ignore_zwj = false;
    477  bool ignore_hidden = false;
    478  bool per_syllable = false;
    479  uint8_t syllable = 0;
    480  match_func_t match_func = nullptr;
    481  const void *match_data = nullptr;
    482 };
    483 
    484 template <typename context_t>
    485 struct skipping_iterator_t
    486 {
    487  void init (context_t *c_, bool context_match = false)
    488  {
    489    c = c_;
    490    end = c->buffer->len;
    491    match_glyph_data16 = nullptr;
    492 #ifndef HB_NO_BEYOND_64K
    493    match_glyph_data24 = nullptr;
    494 #endif
    495    matcher.init (c, context_match);
    496  }
    497  void set_lookup_props (unsigned int lookup_props)
    498  {
    499    matcher.lookup_props = lookup_props;
    500  }
    501  void set_match_func (matcher_t::match_func_t match_func_,
    502 	       const void *match_data_)
    503  {
    504    matcher.set_match_func (match_func_, match_data_);
    505  }
    506  void set_glyph_data (const HBUINT16 glyph_data[])
    507  {
    508    match_glyph_data16 = glyph_data;
    509 #ifndef HB_NO_BEYOND_64K
    510    match_glyph_data24 = nullptr;
    511 #endif
    512  }
    513 #ifndef HB_NO_BEYOND_64K
    514  void set_glyph_data (const HBUINT24 glyph_data[])
    515  {
    516    match_glyph_data16 = nullptr;
    517    match_glyph_data24 = glyph_data;
    518  }
    519 #endif
    520 
    521 #ifndef HB_OPTIMIZE_SIZE
    522  HB_ALWAYS_INLINE
    523 #endif
    524  void reset (unsigned int start_index_)
    525  {
    526    // For GSUB forward iterator
    527    idx = start_index_;
    528    end = c->buffer->len;
    529    matcher.syllable = c->buffer->cur().syllable();
    530  }
    531  void reset_back (unsigned int start_index_, bool from_out_buffer = false)
    532  {
    533    // For GSUB backward iterator
    534    idx = start_index_;
    535    matcher.syllable = c->buffer->cur().syllable();
    536  }
    537 
    538 #ifndef HB_OPTIMIZE_SIZE
    539  HB_ALWAYS_INLINE
    540 #endif
    541  void reset_fast (unsigned int start_index_)
    542  {
    543    // Doesn't set end or syllable. Used by GPOS which doesn't care / change.
    544    idx = start_index_;
    545  }
    546 
    547 #ifndef HB_OPTIMIZE_SIZE
    548  HB_ALWAYS_INLINE
    549 #endif
    550  matcher_t::may_skip_t may_skip (const hb_glyph_info_t &info) const
    551  { return matcher.may_skip (c, info); }
    552 
    553  enum match_t {
    554    MATCH,
    555    NOT_MATCH,
    556    SKIP
    557  };
    558 
    559 #ifndef HB_OPTIMIZE_SIZE
    560  HB_ALWAYS_INLINE
    561 #endif
    562  match_t match (hb_glyph_info_t &info)
    563  {
    564    matcher_t::may_skip_t skip = matcher.may_skip (c, info);
    565    if (unlikely (skip == matcher_t::SKIP_YES))
    566      return SKIP;
    567 
    568    matcher_t::may_match_t match = matcher.may_match (info, get_glyph_data ());
    569    if (match == matcher_t::MATCH_YES ||
    570 (match == matcher_t::MATCH_MAYBE &&
    571  skip == matcher_t::SKIP_NO))
    572      return MATCH;
    573 
    574    if (skip == matcher_t::SKIP_NO)
    575      return NOT_MATCH;
    576 
    577    return SKIP;
    578  }
    579 
    580 #ifndef HB_OPTIMIZE_SIZE
    581  HB_ALWAYS_INLINE
    582 #endif
    583  bool next (unsigned *unsafe_to = nullptr)
    584  {
    585    const signed stop = (signed) end - 1;
    586    while ((signed) idx < stop)
    587    {
    588      idx++;
    589      switch (match (c->buffer->info[idx]))
    590      {
    591 case MATCH:
    592 {
    593   advance_glyph_data ();
    594   return true;
    595 }
    596 case NOT_MATCH:
    597 {
    598   if (unsafe_to)
    599     *unsafe_to = idx + 1;
    600   return false;
    601 }
    602 case SKIP:
    603   continue;
    604      }
    605    }
    606    if (unsafe_to)
    607      *unsafe_to = end;
    608    return false;
    609  }
    610 #ifndef HB_OPTIMIZE_SIZE
    611  HB_ALWAYS_INLINE
    612 #endif
    613  bool prev (unsigned *unsafe_from = nullptr)
    614  {
    615    const unsigned stop = 0;
    616    while (idx > stop)
    617    {
    618      idx--;
    619      switch (match (c->buffer->out_info[idx]))
    620      {
    621 case MATCH:
    622 {
    623   advance_glyph_data ();
    624   return true;
    625 }
    626 case NOT_MATCH:
    627 {
    628   if (unsafe_from)
    629     *unsafe_from = hb_max (1u, idx) - 1u;
    630   return false;
    631 }
    632 case SKIP:
    633   continue;
    634      }
    635    }
    636    if (unsafe_from)
    637      *unsafe_from = 0;
    638    return false;
    639  }
    640 
    641  HB_ALWAYS_INLINE
    642  hb_codepoint_t
    643  get_glyph_data ()
    644  {
    645    if (match_glyph_data16) return *match_glyph_data16;
    646 #ifndef HB_NO_BEYOND_64K
    647    else
    648    if (match_glyph_data24) return *match_glyph_data24;
    649 #endif
    650    return 0;
    651  }
    652  HB_ALWAYS_INLINE
    653  void
    654  advance_glyph_data ()
    655  {
    656    if (match_glyph_data16) match_glyph_data16++;
    657 #ifndef HB_NO_BEYOND_64K
    658    else
    659    if (match_glyph_data24) match_glyph_data24++;
    660 #endif
    661  }
    662 
    663  unsigned int idx;
    664  protected:
    665  context_t *c;
    666  matcher_t matcher;
    667  const HBUINT16 *match_glyph_data16;
    668 #ifndef HB_NO_BEYOND_64K
    669  const HBUINT24 *match_glyph_data24;
    670 #endif
    671 
    672  unsigned int end;
    673 };
    674 
    675 struct hb_ot_apply_context_t :
    676       hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
    677 {
    678  const char *get_name () { return "APPLY"; }
    679  typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
    680 
    681  template <typename T>
    682  static inline auto apply_ (const T &obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (return_t, obj.apply (c, nullptr) )
    683  template <typename T>
    684  static inline auto apply_ (const T &obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (return_t, obj.apply (c) )
    685  template <typename T>
    686  return_t dispatch (const T &obj) { return apply_(obj, this, hb_prioritize); }
    687 
    688  static return_t default_return_value () { return false; }
    689  bool stop_sublookup_iteration (return_t r) const { return r; }
    690  return_t recurse (unsigned int sub_lookup_index)
    691  {
    692    assert (recurse_func);
    693    if (unlikely (nesting_level_left == 0))
    694    {
    695      buffer->successful = false;
    696      return default_return_value ();
    697    }
    698 
    699    buffer->max_ops--;
    700    if (unlikely (buffer->max_ops < 0))
    701    {
    702      buffer->successful = false;
    703      return default_return_value ();
    704    }
    705 
    706    nesting_level_left--;
    707    bool ret = recurse_func (this, sub_lookup_index);
    708    nesting_level_left++;
    709    return ret;
    710  }
    711 
    712  skipping_iterator_t<hb_ot_apply_context_t> iter_input, iter_context;
    713 
    714  unsigned int table_index; /* GSUB/GPOS */
    715  hb_font_t *font;
    716  hb_face_t *face;
    717  hb_buffer_t *buffer;
    718  hb_sanitize_context_t sanitizer;
    719  recurse_func_t recurse_func = nullptr;
    720  const GDEF &gdef;
    721  const GDEF::accelerator_t &gdef_accel;
    722  const hb_ot_layout_lookup_accelerator_t *lookup_accel = nullptr;
    723  const ItemVariationStore &var_store;
    724  hb_scalar_cache_t *var_store_cache;
    725 
    726  hb_direction_t direction;
    727  hb_mask_t lookup_mask = 1;
    728  unsigned int lookup_index = (unsigned) -1;
    729  unsigned int lookup_props = 0;
    730  unsigned int nesting_level_left = HB_MAX_NESTING_LEVEL;
    731 
    732  bool has_glyph_classes;
    733  bool auto_zwnj = true;
    734  bool auto_zwj = true;
    735  bool per_syllable = false;
    736  bool random = false;
    737  unsigned new_syllables = (unsigned) -1;
    738 
    739  signed last_base = -1; // GPOS uses
    740  unsigned last_base_until = 0; // GPOS uses
    741 
    742  hb_vector_t<uint32_t> match_positions;
    743  uint32_t stack_match_positions[8];
    744 
    745  hb_ot_apply_context_t (unsigned int table_index_,
    746 		 hb_font_t *font_,
    747 		 hb_buffer_t *buffer_,
    748 		 hb_blob_t *table_blob_,
    749 		 hb_scalar_cache_t *var_store_cache_ = nullptr) :
    750 		table_index (table_index_),
    751 		font (font_), face (font->face), buffer (buffer_),
    752 		sanitizer (table_blob_),
    753 		gdef (
    754 #ifndef HB_NO_OT_LAYOUT
    755 		      *face->table.GDEF->table
    756 #else
    757 		      Null (GDEF)
    758 #endif
    759 		     ),
    760 		gdef_accel (
    761 #ifndef HB_NO_OT_LAYOUT
    762 		      *face->table.GDEF
    763 #else
    764 		      Null (GDEF::accelerator_t)
    765 #endif
    766 		     ),
    767 		var_store (gdef.get_var_store ()),
    768 		var_store_cache (var_store_cache_),
    769 		direction (buffer_->props.direction),
    770 		has_glyph_classes (gdef.has_glyph_classes ())
    771  {
    772    init_iters ();
    773    match_positions.set_storage (stack_match_positions);
    774  }
    775 
    776  void init_iters ()
    777  {
    778    iter_input.init (this, false);
    779    iter_context.init (this, true);
    780  }
    781 
    782  void set_lookup_mask (hb_mask_t mask, bool init = true) { lookup_mask = mask; last_base = -1; last_base_until = 0; if (init) init_iters (); }
    783  void set_auto_zwj (bool auto_zwj_, bool init = true) { auto_zwj = auto_zwj_; if (init) init_iters (); }
    784  void set_auto_zwnj (bool auto_zwnj_, bool init = true) { auto_zwnj = auto_zwnj_; if (init) init_iters (); }
    785  void set_per_syllable (bool per_syllable_, bool init = true) { per_syllable = per_syllable_; if (init) init_iters (); }
    786  void set_random (bool random_) { random = random_; }
    787  void set_recurse_func (recurse_func_t func) { recurse_func = func; }
    788  void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
    789  void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
    790 
    791  uint32_t random_number ()
    792  {
    793    /* http://www.cplusplus.com/reference/random/minstd_rand/ */
    794    buffer->random_state = buffer->random_state * 48271 % 2147483647;
    795    return buffer->random_state;
    796  }
    797 
    798  HB_ALWAYS_INLINE
    799  HB_HOT
    800  bool match_properties_mark (const hb_glyph_info_t *info,
    801 		      unsigned int    glyph_props,
    802 		      unsigned int    match_props) const
    803  {
    804    /* If using mark filtering sets, the high short of
    805     * match_props has the set index.
    806     */
    807    if (match_props & LookupFlag::UseMarkFilteringSet)
    808      return gdef_accel.mark_set_covers (match_props >> 16, info->codepoint);
    809 
    810    /* The second byte of match_props has the meaning
    811     * "ignore marks of attachment type different than
    812     * the attachment type specified."
    813     */
    814    if (match_props & LookupFlag::MarkAttachmentType)
    815      return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
    816 
    817    return true;
    818  }
    819 
    820 #ifndef HB_OPTIMIZE_SIZE
    821  HB_ALWAYS_INLINE
    822 #endif
    823  bool check_glyph_property (const hb_glyph_info_t *info,
    824 		     unsigned match_props) const
    825  {
    826    unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
    827 
    828    /* Not covered, if, for example, glyph class is ligature and
    829     * match_props includes LookupFlags::IgnoreLigatures
    830     */
    831    if (glyph_props & match_props & LookupFlag::IgnoreFlags)
    832      return false;
    833 
    834    if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
    835      return match_properties_mark (info, glyph_props, match_props);
    836 
    837    return true;
    838  }
    839 
    840  void _set_glyph_class (hb_codepoint_t glyph_index,
    841 		  unsigned int class_guess = 0,
    842 		  bool ligature = false,
    843 		  bool component = false)
    844  {
    845    buffer->digest.add (glyph_index);
    846 
    847    if (new_syllables != (unsigned) -1)
    848      buffer->cur().syllable() = new_syllables;
    849 
    850    unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
    851    props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
    852    if (ligature)
    853    {
    854      props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
    855      /* In the only place that the MULTIPLIED bit is used, Uniscribe
    856       * seems to only care about the "last" transformation between
    857       * Ligature and Multiple substitutions.  Ie. if you ligate, expand,
    858       * and ligate again, it forgives the multiplication and acts as
    859       * if only ligation happened.  As such, clear MULTIPLIED bit.
    860       */
    861      props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
    862    }
    863    if (component)
    864      props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
    865    if (likely (has_glyph_classes))
    866    {
    867      props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
    868      _hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef_accel.get_glyph_props (glyph_index));
    869    }
    870    else if (class_guess)
    871    {
    872      props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
    873      _hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess);
    874    }
    875    else
    876      _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
    877  }
    878 
    879  void replace_glyph (hb_codepoint_t glyph_index)
    880  {
    881    _set_glyph_class (glyph_index);
    882    (void) buffer->replace_glyph (glyph_index);
    883  }
    884  void replace_glyph_inplace (hb_codepoint_t glyph_index)
    885  {
    886    _set_glyph_class (glyph_index);
    887    buffer->cur().codepoint = glyph_index;
    888  }
    889  void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
    890 			    unsigned int class_guess)
    891  {
    892    _set_glyph_class (glyph_index, class_guess, true);
    893    (void) buffer->replace_glyph (glyph_index);
    894  }
    895  void output_glyph_for_component (hb_codepoint_t glyph_index,
    896 			   unsigned int class_guess)
    897  {
    898    _set_glyph_class (glyph_index, class_guess, false, true);
    899    (void) buffer->output_glyph (glyph_index);
    900  }
    901 };
    902 
    903 enum class hb_ot_subtable_cache_op_t
    904 {
    905  ENTER,
    906  LEAVE,
    907 };
    908 
    909 struct hb_accelerate_subtables_context_t :
    910       hb_dispatch_context_t<hb_accelerate_subtables_context_t>
    911 {
    912  template <typename T>
    913  static inline auto apply_ (const T *obj, hb_ot_apply_context_t *c, void *external_cache, hb_priority<1>) HB_RETURN (bool, obj->apply (c, external_cache) )
    914  template <typename T>
    915  static inline auto apply_ (const T *obj, hb_ot_apply_context_t *c, void *external_cache, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
    916  template <typename T>
    917  static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c, void *external_cache)
    918  {
    919    const T *typed_obj = (const T *) obj;
    920    return apply_ (typed_obj, c, external_cache, hb_prioritize);
    921  }
    922 
    923 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
    924  template <typename T>
    925  static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, void *external_cache, hb_priority<2>) HB_RETURN (bool, obj->apply_cached (c, external_cache) )
    926  template <typename T>
    927  static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, void *external_cache, hb_priority<1>) HB_RETURN (bool, obj->apply (c, external_cache) )
    928  template <typename T>
    929  static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, void *external_cache, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
    930  template <typename T>
    931  static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c, void *external_cache)
    932  {
    933    const T *typed_obj = (const T *) obj;
    934    return apply_cached_ (typed_obj, c, external_cache, hb_prioritize);
    935  }
    936 
    937  template <typename T>
    938  static inline auto cache_func_ (hb_ot_apply_context_t *c,
    939 			  hb_ot_subtable_cache_op_t op,
    940 			  hb_priority<1>) HB_RETURN (bool, T::cache_func (c, op) )
    941  template <typename T=void>
    942  static inline bool cache_func_ (hb_ot_apply_context_t *c,
    943 			  hb_ot_subtable_cache_op_t op HB_UNUSED,
    944 			  hb_priority<0>) { return false; }
    945  template <typename Type>
    946  static inline bool cache_func_to (hb_ot_apply_context_t *c,
    947 			    hb_ot_subtable_cache_op_t op)
    948  {
    949    return cache_func_<Type> (c, op, hb_prioritize);
    950  }
    951 #endif
    952 
    953  typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c, void *external_cache);
    954  typedef bool (*hb_cache_func_t) (hb_ot_apply_context_t *c, hb_ot_subtable_cache_op_t op);
    955 
    956  struct hb_applicable_t
    957  {
    958    friend struct hb_accelerate_subtables_context_t;
    959    friend struct hb_ot_layout_lookup_accelerator_t;
    960 
    961    template <typename T>
    962    void init (const T &obj_,
    963        hb_apply_func_t apply_func_
    964 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
    965        , hb_apply_func_t apply_cached_func_
    966        , hb_cache_func_t cache_func_
    967        , void *external_cache_
    968 #endif
    969 	)
    970    {
    971      obj = &obj_;
    972      apply_func = apply_func_;
    973 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
    974      apply_cached_func = apply_cached_func_;
    975      cache_func = cache_func_;
    976      external_cache = external_cache_;
    977 #endif
    978      digest.init ();
    979      obj_.get_coverage ().collect_coverage (&digest);
    980    }
    981 
    982 #ifdef HB_NO_OT_LAYOUT_LOOKUP_CACHE
    983    bool apply (hb_ot_apply_context_t *c) const
    984    {
    985      return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c, nullptr);
    986    }
    987 #else
    988    bool apply (hb_ot_apply_context_t *c) const
    989    {
    990      return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c, external_cache);
    991    }
    992    bool apply_cached (hb_ot_apply_context_t *c) const
    993    {
    994      return digest.may_have (c->buffer->cur().codepoint) &&  apply_cached_func (obj, c, external_cache);
    995    }
    996 
    997    bool cache_enter (hb_ot_apply_context_t *c) const
    998    {
    999      return cache_func (c, hb_ot_subtable_cache_op_t::ENTER);
   1000    }
   1001    void cache_leave (hb_ot_apply_context_t *c) const
   1002    {
   1003      cache_func (c, hb_ot_subtable_cache_op_t::LEAVE);
   1004    }
   1005 #endif
   1006 
   1007    private:
   1008    const void *obj;
   1009    hb_apply_func_t apply_func;
   1010 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   1011    hb_apply_func_t apply_cached_func;
   1012    hb_cache_func_t cache_func;
   1013    void *external_cache;
   1014 #endif
   1015    hb_set_digest_t digest;
   1016  };
   1017 
   1018 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   1019  template <typename T>
   1020  auto cache_cost (const T &obj, hb_priority<1>) HB_AUTO_RETURN ( obj.cache_cost () )
   1021  template <typename T>
   1022  auto cache_cost (const T &obj, hb_priority<0>) HB_AUTO_RETURN ( 0u )
   1023 
   1024  template <typename T>
   1025  auto external_cache_create (const T &obj, hb_priority<1>) HB_AUTO_RETURN ( obj.external_cache_create () )
   1026  template <typename T>
   1027  auto external_cache_create (const T &obj, hb_priority<0>) HB_AUTO_RETURN ( nullptr )
   1028 #endif
   1029 
   1030  /* Dispatch interface. */
   1031  template <typename T>
   1032  return_t dispatch (const T &obj)
   1033  {
   1034 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   1035    void *external_cache = nullptr;
   1036    if (i < 8)
   1037      external_cache = external_cache_create (obj, hb_prioritize);
   1038 #endif
   1039 
   1040    hb_applicable_t *entry = &array[i++];
   1041 
   1042    entry->init (obj,
   1043 	 apply_to<T>
   1044 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   1045 	 , apply_cached_to<T>
   1046 	 , cache_func_to<T>
   1047 	 , external_cache
   1048 #endif
   1049 	 );
   1050 
   1051 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   1052    /* Cache handling
   1053     *
   1054     * We allow one subtable from each lookup to use a cache. The assumption
   1055     * being that multiple subtables of the same lookup cannot use a cache
   1056     * because the resources they would use will collide.  As such, we ask
   1057     * each subtable to tell us how much it costs (which a cache would avoid),
   1058     * and we allocate the cache opportunity to the costliest subtable.
   1059     */
   1060    unsigned cost = cache_cost (obj, hb_prioritize);
   1061    if (cost > subtable_cache_user_cost)
   1062    {
   1063      subtable_cache_user_idx = i - 1;
   1064      subtable_cache_user_cost = cost;
   1065    }
   1066 #endif
   1067 
   1068    return hb_empty_t ();
   1069  }
   1070  static return_t default_return_value () { return hb_empty_t (); }
   1071 
   1072  hb_accelerate_subtables_context_t (hb_applicable_t *array_) :
   1073 			     array (array_) {}
   1074 
   1075  hb_applicable_t *array;
   1076  unsigned i = 0;
   1077 
   1078 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   1079  unsigned subtable_cache_user_idx = (unsigned) -1;
   1080  unsigned subtable_cache_user_cost = 0;
   1081 #endif
   1082 };
   1083 
   1084 
   1085 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data, void *cache);
   1086 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache);
   1087 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, unsigned value, const void *data);
   1088 typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
   1089 
   1090 struct ContextClosureFuncs
   1091 {
   1092  intersects_func_t intersects;
   1093  intersected_glyphs_func_t intersected_glyphs;
   1094 };
   1095 struct ContextCollectGlyphsFuncs
   1096 {
   1097  collect_glyphs_func_t collect;
   1098 };
   1099 struct ContextApplyFuncs
   1100 {
   1101  match_func_t match;
   1102 };
   1103 struct ChainContextApplyFuncs
   1104 {
   1105  match_func_t match[3];
   1106 };
   1107 
   1108 
   1109 static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED, void *cache HB_UNUSED)
   1110 {
   1111  return glyphs->has (value);
   1112 }
   1113 static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data, void *cache)
   1114 {
   1115  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
   1116  hb_map_t *map = (hb_map_t *) cache;
   1117 
   1118  hb_codepoint_t *cached_v;
   1119  if (map->has (value, &cached_v))
   1120    return *cached_v;
   1121 
   1122  bool v = class_def.intersects_class (glyphs, value);
   1123  map->set (value, v);
   1124 
   1125  return v;
   1126 }
   1127 static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data, void *cache HB_UNUSED)
   1128 {
   1129  Offset16To<Coverage> coverage;
   1130  coverage = value;
   1131  return (data+coverage).intersects (glyphs);
   1132 }
   1133 
   1134 
   1135 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
   1136 {
   1137  unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
   1138  intersected_glyphs->add (g);
   1139 }
   1140 
   1141 using intersected_class_cache_t = hb_hashmap_t<unsigned, hb_set_t>;
   1142 
   1143 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache)
   1144 {
   1145  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
   1146 
   1147  intersected_class_cache_t *map = (intersected_class_cache_t *) cache;
   1148 
   1149  hb_set_t *cached_v;
   1150  if (map->has (value, &cached_v))
   1151  {
   1152    intersected_glyphs->union_ (*cached_v);
   1153    return;
   1154  }
   1155 
   1156  hb_set_t v;
   1157  class_def.intersected_class_glyphs (glyphs, value, &v);
   1158 
   1159  intersected_glyphs->union_ (v);
   1160 
   1161  map->set (value, std::move (v));
   1162 }
   1163 
   1164 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
   1165 {
   1166  Offset16To<Coverage> coverage;
   1167  coverage = value;
   1168  (data+coverage).intersect_set (*glyphs, *intersected_glyphs);
   1169 }
   1170 
   1171 
   1172 template <typename HBUINT>
   1173 static inline bool array_is_subset_of (const hb_set_t *glyphs,
   1174 			       unsigned int count,
   1175 			       const HBUINT values[],
   1176 			       intersects_func_t intersects_func,
   1177 			       const void *intersects_data,
   1178 			       void *cache)
   1179 {
   1180  for (const auto &_ : + hb_iter (values, count))
   1181    if (!intersects_func (glyphs, _, intersects_data, cache)) return false;
   1182  return true;
   1183 }
   1184 
   1185 
   1186 static inline void collect_glyph (hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED)
   1187 {
   1188  glyphs->add (value);
   1189 }
   1190 static inline void collect_class (hb_set_t *glyphs, unsigned value, const void *data)
   1191 {
   1192  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
   1193  class_def.collect_class (glyphs, value);
   1194 }
   1195 static inline void collect_coverage (hb_set_t *glyphs, unsigned value, const void *data)
   1196 {
   1197  Offset16To<Coverage> coverage;
   1198  coverage = value;
   1199  (data+coverage).collect_coverage (glyphs);
   1200 }
   1201 template <typename HBUINT>
   1202 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
   1203 			  hb_set_t *glyphs,
   1204 			  unsigned int count,
   1205 			  const HBUINT values[],
   1206 			  collect_glyphs_func_t collect_func,
   1207 			  const void *collect_data)
   1208 {
   1209  return
   1210  + hb_iter (values, count)
   1211  | hb_apply ([&] (const HBUINT &_) { collect_func (glyphs, _, collect_data); })
   1212  ;
   1213 }
   1214 
   1215 
   1216 static inline bool match_always (hb_glyph_info_t &info HB_UNUSED, unsigned value HB_UNUSED, const void *data HB_UNUSED)
   1217 {
   1218  return true;
   1219 }
   1220 static inline bool match_glyph (hb_glyph_info_t &info, unsigned value, const void *data HB_UNUSED)
   1221 {
   1222  return info.codepoint == value;
   1223 }
   1224 static inline bool match_class (hb_glyph_info_t &info, unsigned value, const void *data)
   1225 {
   1226  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
   1227  return class_def.get_class (info.codepoint) == value;
   1228 }
   1229 static inline unsigned get_class_cached (const ClassDef &class_def, hb_glyph_info_t &info)
   1230 {
   1231  unsigned klass = info.syllable();
   1232  if (klass < 255)
   1233    return klass;
   1234  klass = class_def.get_class (info.codepoint);
   1235  if (likely (klass < 255))
   1236    info.syllable() = klass;
   1237  return klass;
   1238 }
   1239 static inline bool match_class_cached (hb_glyph_info_t &info, unsigned value, const void *data)
   1240 {
   1241  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
   1242  return get_class_cached (class_def, info) == value;
   1243 }
   1244 static inline unsigned get_class_cached1 (const ClassDef &class_def, hb_glyph_info_t &info)
   1245 {
   1246  unsigned klass = info.syllable() & 0x0F;
   1247  if (klass < 15)
   1248    return klass;
   1249  klass = class_def.get_class (info.codepoint);
   1250  if (likely (klass < 15))
   1251    info.syllable() = (info.syllable() & 0xF0) | klass;
   1252  return klass;
   1253 }
   1254 static inline bool match_class_cached1 (hb_glyph_info_t &info, unsigned value, const void *data)
   1255 {
   1256  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
   1257  return get_class_cached1 (class_def, info) == value;
   1258 }
   1259 static inline unsigned get_class_cached2 (const ClassDef &class_def, hb_glyph_info_t &info)
   1260 {
   1261  unsigned klass = (info.syllable() & 0xF0) >> 4;
   1262  if (klass < 15)
   1263    return klass;
   1264  klass = class_def.get_class (info.codepoint);
   1265  if (likely (klass < 15))
   1266    info.syllable() = (info.syllable() & 0x0F) | (klass << 4);
   1267  return klass;
   1268 }
   1269 static inline bool match_class_cached2 (hb_glyph_info_t &info, unsigned value, const void *data)
   1270 {
   1271  const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
   1272  return get_class_cached2 (class_def, info) == value;
   1273 }
   1274 static inline bool match_coverage (hb_glyph_info_t &info, unsigned value, const void *data)
   1275 {
   1276  Offset16To<Coverage> coverage;
   1277  coverage = value;
   1278  return (data+coverage).get_coverage (info.codepoint) != NOT_COVERED;
   1279 }
   1280 
   1281 template <typename HBUINT>
   1282 static inline bool would_match_input (hb_would_apply_context_t *c,
   1283 			      unsigned int count, /* Including the first glyph (not matched) */
   1284 			      const HBUINT input[], /* Array of input values--start with second glyph */
   1285 			      match_func_t match_func,
   1286 			      const void *match_data)
   1287 {
   1288  if (count != c->len)
   1289    return false;
   1290 
   1291  for (unsigned int i = 1; i < count; i++)
   1292  {
   1293    hb_glyph_info_t info;
   1294    info.codepoint = c->glyphs[i];
   1295    if (likely (!match_func (info, input[i - 1], match_data)))
   1296      return false;
   1297  }
   1298 
   1299  return true;
   1300 }
   1301 template <typename HBUINT>
   1302 #ifndef HB_OPTIMIZE_SIZE
   1303 HB_ALWAYS_INLINE
   1304 #endif
   1305 static bool match_input (hb_ot_apply_context_t *c,
   1306 		 unsigned int count, /* Including the first glyph (not matched) */
   1307 		 const HBUINT input[], /* Array of input values--start with second glyph */
   1308 		 match_func_t match_func,
   1309 		 const void *match_data,
   1310 		 unsigned int *end_position,
   1311 		 unsigned int *p_total_component_count = nullptr)
   1312 {
   1313  TRACE_APPLY (nullptr);
   1314 
   1315  hb_buffer_t *buffer = c->buffer;
   1316 
   1317  if (count == 1)
   1318  {
   1319    *end_position = buffer->idx + 1;
   1320    c->match_positions[0] = buffer->idx;
   1321    if (p_total_component_count)
   1322      *p_total_component_count = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
   1323    return_trace (true);
   1324  }
   1325 
   1326  if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
   1327 
   1328  auto &skippy_iter = c->iter_input;
   1329  skippy_iter.reset (buffer->idx);
   1330  skippy_iter.set_match_func (match_func, match_data);
   1331  skippy_iter.set_glyph_data (input);
   1332 
   1333  /*
   1334   * This is perhaps the trickiest part of OpenType...  Remarks:
   1335   *
   1336   * - If all components of the ligature were marks, we call this a mark ligature.
   1337   *
   1338   * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
   1339   *   it as a ligature glyph.
   1340   *
   1341   * - Ligatures cannot be formed across glyphs attached to different components
   1342   *   of previous ligatures.  Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
   1343   *   LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
   1344   *   However, it would be wrong to ligate that SHADDA,FATHA sequence.
   1345   *   There are a couple of exceptions to this:
   1346   *
   1347   *   o If a ligature tries ligating with marks that belong to it itself, go ahead,
   1348   *     assuming that the font designer knows what they are doing (otherwise it can
   1349   *     break Indic stuff when a matra wants to ligate with a conjunct,
   1350   *
   1351   *   o If two marks want to ligate and they belong to different components of the
   1352   *     same ligature glyph, and said ligature glyph is to be ignored according to
   1353   *     mark-filtering rules, then allow.
   1354   *     https://github.com/harfbuzz/harfbuzz/issues/545
   1355   */
   1356 
   1357  unsigned int total_component_count = 0;
   1358 
   1359  unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
   1360  unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
   1361 
   1362  enum {
   1363    LIGBASE_NOT_CHECKED,
   1364    LIGBASE_MAY_NOT_SKIP,
   1365    LIGBASE_MAY_SKIP
   1366  } ligbase = LIGBASE_NOT_CHECKED;
   1367 
   1368  for (unsigned int i = 1; i < count; i++)
   1369  {
   1370    unsigned unsafe_to;
   1371    if (!skippy_iter.next (&unsafe_to))
   1372    {
   1373      *end_position = unsafe_to;
   1374      return_trace (false);
   1375    }
   1376 
   1377    if (unlikely (i + 1 > c->match_positions.length &&
   1378 	  !c->match_positions.resize_dirty  (i + 1)))
   1379      return_trace (false);
   1380    c->match_positions.arrayZ[i] = skippy_iter.idx;
   1381 
   1382    unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
   1383    unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
   1384 
   1385    if (first_lig_id && first_lig_comp)
   1386    {
   1387      /* If first component was attached to a previous ligature component,
   1388       * all subsequent components should be attached to the same ligature
   1389       * component, otherwise we shouldn't ligate them... */
   1390      if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
   1391      {
   1392 /* ...unless, we are attached to a base ligature and that base
   1393  * ligature is ignorable. */
   1394 if (ligbase == LIGBASE_NOT_CHECKED)
   1395 {
   1396   bool found = false;
   1397   const auto *out = buffer->out_info;
   1398   unsigned int j = buffer->out_len;
   1399   while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
   1400   {
   1401     if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
   1402     {
   1403       j--;
   1404       found = true;
   1405       break;
   1406     }
   1407     j--;
   1408   }
   1409 
   1410   if (found && skippy_iter.may_skip (out[j]) == matcher_t::SKIP_YES)
   1411     ligbase = LIGBASE_MAY_SKIP;
   1412   else
   1413     ligbase = LIGBASE_MAY_NOT_SKIP;
   1414 }
   1415 
   1416 if (ligbase == LIGBASE_MAY_NOT_SKIP)
   1417   return_trace (false);
   1418      }
   1419    }
   1420    else
   1421    {
   1422      /* If first component was NOT attached to a previous ligature component,
   1423       * all subsequent components should also NOT be attached to any ligature
   1424       * component, unless they are attached to the first component itself! */
   1425      if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
   1426 return_trace (false);
   1427    }
   1428 
   1429    total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
   1430  }
   1431 
   1432  *end_position = skippy_iter.idx + 1;
   1433 
   1434  if (p_total_component_count)
   1435  {
   1436    total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
   1437    *p_total_component_count = total_component_count;
   1438  }
   1439 
   1440  c->match_positions.arrayZ[0] = buffer->idx;
   1441 
   1442  return_trace (true);
   1443 }
   1444 static inline bool ligate_input (hb_ot_apply_context_t *c,
   1445 			 unsigned int count, /* Including the first glyph */
   1446 			 unsigned int match_end,
   1447 			 hb_codepoint_t lig_glyph,
   1448 			 unsigned int total_component_count)
   1449 {
   1450  TRACE_APPLY (nullptr);
   1451 
   1452  hb_buffer_t *buffer = c->buffer;
   1453 
   1454  buffer->merge_clusters (buffer->idx, match_end);
   1455 
   1456  /* - If a base and one or more marks ligate, consider that as a base, NOT
   1457   *   ligature, such that all following marks can still attach to it.
   1458   *   https://github.com/harfbuzz/harfbuzz/issues/1109
   1459   *
   1460   * - If all components of the ligature were marks, we call this a mark ligature.
   1461   *   If it *is* a mark ligature, we don't allocate a new ligature id, and leave
   1462   *   the ligature to keep its old ligature id.  This will allow it to attach to
   1463   *   a base ligature in GPOS.  Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
   1464   *   and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
   1465   *   ligature id and component value of 2.  Then if SHADDA,FATHA form a ligature
   1466   *   later, we don't want them to lose their ligature id/component, otherwise
   1467   *   GPOS will fail to correctly position the mark ligature on top of the
   1468   *   LAM,LAM,HEH ligature.  See:
   1469   *     https://bugzilla.gnome.org/show_bug.cgi?id=676343
   1470   *
   1471   * - If a ligature is formed of components that some of which are also ligatures
   1472   *   themselves, and those ligature components had marks attached to *their*
   1473   *   components, we have to attach the marks to the new ligature component
   1474   *   positions!  Now *that*'s tricky!  And these marks may be following the
   1475   *   last component of the whole sequence, so we should loop forward looking
   1476   *   for them and update them.
   1477   *
   1478   *   Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
   1479   *   'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
   1480   *   id and component == 1.  Now, during 'liga', the LAM and the LAM-HEH ligature
   1481   *   form a LAM-LAM-HEH ligature.  We need to reassign the SHADDA and FATHA to
   1482   *   the new ligature with a component value of 2.
   1483   *
   1484   *   This in fact happened to a font...  See:
   1485   *   https://bugzilla.gnome.org/show_bug.cgi?id=437633
   1486   */
   1487 
   1488  bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[c->match_positions.arrayZ[0]]);
   1489  bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[c->match_positions.arrayZ[0]]);
   1490  for (unsigned int i = 1; i < count; i++)
   1491    if (!_hb_glyph_info_is_mark (&buffer->info[c->match_positions.arrayZ[i]]))
   1492    {
   1493      is_base_ligature = false;
   1494      is_mark_ligature = false;
   1495      break;
   1496    }
   1497  bool is_ligature = !is_base_ligature && !is_mark_ligature;
   1498 
   1499  unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
   1500  unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
   1501  unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
   1502  unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
   1503  unsigned int components_so_far = last_num_components;
   1504 
   1505  if (is_ligature)
   1506  {
   1507    _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
   1508    if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
   1509    {
   1510      _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
   1511    }
   1512  }
   1513  c->replace_glyph_with_ligature (lig_glyph, klass);
   1514 
   1515  for (unsigned int i = 1; i < count; i++)
   1516  {
   1517    while (buffer->idx < c->match_positions.arrayZ[i] && buffer->successful)
   1518    {
   1519      if (is_ligature)
   1520      {
   1521 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
   1522 if (this_comp == 0)
   1523   this_comp = last_num_components;
   1524 assert (components_so_far >= last_num_components);
   1525 unsigned int new_lig_comp = components_so_far - last_num_components +
   1526 			    hb_min (this_comp, last_num_components);
   1527   _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
   1528      }
   1529      (void) buffer->next_glyph ();
   1530    }
   1531 
   1532    last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
   1533    last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
   1534    components_so_far += last_num_components;
   1535 
   1536    /* Skip the base glyph */
   1537    buffer->idx++;
   1538  }
   1539 
   1540  if (!is_mark_ligature && last_lig_id)
   1541  {
   1542    /* Re-adjust components for any marks following. */
   1543    for (unsigned i = buffer->idx; i < buffer->len; ++i)
   1544    {
   1545      if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
   1546 
   1547      unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
   1548      if (!this_comp) break;
   1549 
   1550      assert (components_so_far >= last_num_components);
   1551      unsigned new_lig_comp = components_so_far - last_num_components +
   1552 		      hb_min (this_comp, last_num_components);
   1553      _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
   1554    }
   1555  }
   1556  return_trace (true);
   1557 }
   1558 
   1559 template <typename HBUINT>
   1560 #ifndef HB_OPTIMIZE_SIZE
   1561 HB_ALWAYS_INLINE
   1562 #endif
   1563 static bool match_backtrack (hb_ot_apply_context_t *c,
   1564 		     unsigned int count,
   1565 		     const HBUINT backtrack[],
   1566 		     match_func_t match_func,
   1567 		     const void *match_data,
   1568 		     unsigned int *match_start)
   1569 {
   1570  TRACE_APPLY (nullptr);
   1571 
   1572  if (!count)
   1573  {
   1574    *match_start = c->buffer->backtrack_len ();
   1575    return_trace (true);
   1576  }
   1577 
   1578  auto &skippy_iter = c->iter_context;
   1579  skippy_iter.reset_back (c->buffer->backtrack_len ());
   1580  skippy_iter.set_match_func (match_func, match_data);
   1581  skippy_iter.set_glyph_data (backtrack);
   1582 
   1583  for (unsigned int i = 0; i < count; i++)
   1584  {
   1585    unsigned unsafe_from;
   1586    if (!skippy_iter.prev (&unsafe_from))
   1587    {
   1588      *match_start = unsafe_from;
   1589      return_trace (false);
   1590    }
   1591  }
   1592 
   1593  *match_start = skippy_iter.idx;
   1594  return_trace (true);
   1595 }
   1596 
   1597 template <typename HBUINT>
   1598 #ifndef HB_OPTIMIZE_SIZE
   1599 HB_ALWAYS_INLINE
   1600 #endif
   1601 static bool match_lookahead (hb_ot_apply_context_t *c,
   1602 		     unsigned int count,
   1603 		     const HBUINT lookahead[],
   1604 		     match_func_t match_func,
   1605 		     const void *match_data,
   1606 		     unsigned int start_index,
   1607 		     unsigned int *end_index)
   1608 {
   1609  TRACE_APPLY (nullptr);
   1610 
   1611  if (!count)
   1612  {
   1613    *end_index = start_index;
   1614    return_trace (true);
   1615  }
   1616 
   1617  auto &skippy_iter = c->iter_context;
   1618  assert (start_index >= 1);
   1619  skippy_iter.reset (start_index - 1);
   1620  skippy_iter.set_match_func (match_func, match_data);
   1621  skippy_iter.set_glyph_data (lookahead);
   1622 
   1623  for (unsigned int i = 0; i < count; i++)
   1624  {
   1625    unsigned unsafe_to;
   1626    if (!skippy_iter.next (&unsafe_to))
   1627    {
   1628      *end_index = unsafe_to;
   1629      return_trace (false);
   1630    }
   1631  }
   1632 
   1633  *end_index = skippy_iter.idx + 1;
   1634  return_trace (true);
   1635 }
   1636 
   1637 
   1638 
   1639 struct LookupRecord
   1640 {
   1641  bool serialize (hb_serialize_context_t *c,
   1642 	  const hb_map_t         *lookup_map) const
   1643  {
   1644    TRACE_SERIALIZE (this);
   1645    auto *out = c->embed (*this);
   1646    if (unlikely (!out)) return_trace (false);
   1647 
   1648    return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
   1649  }
   1650 
   1651  bool sanitize (hb_sanitize_context_t *c) const
   1652  {
   1653    TRACE_SANITIZE (this);
   1654    return_trace (c->check_struct (this));
   1655  }
   1656 
   1657  HBUINT16	sequenceIndex;		/* Index into current glyph
   1658 				 * sequence--first glyph = 0 */
   1659  HBUINT16	lookupListIndex;	/* Lookup to apply to that
   1660 				 * position--zero--based */
   1661  public:
   1662  DEFINE_SIZE_STATIC (4);
   1663 };
   1664 
   1665 static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
   1666 				      const hb_array_t<const LookupRecord> lookupRecords,
   1667 				      const hb_map_t *lookup_map)
   1668 {
   1669  unsigned count = 0;
   1670  for (const LookupRecord& r : lookupRecords)
   1671  {
   1672    if (!lookup_map->has (r.lookupListIndex))
   1673      continue;
   1674 
   1675    if (!r.serialize (c, lookup_map))
   1676      return 0;
   1677 
   1678    count++;
   1679  }
   1680  return count;
   1681 }
   1682 
   1683 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
   1684 
   1685 template <typename HBUINT>
   1686 static void context_closure_recurse_lookups (hb_closure_context_t *c,
   1687 				     unsigned inputCount, const HBUINT input[],
   1688 				     unsigned lookupCount,
   1689 				     const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
   1690 				     unsigned value,
   1691 				     ContextFormat context_format,
   1692 				     const void *data,
   1693 				     intersected_glyphs_func_t intersected_glyphs_func,
   1694 				     void *cache)
   1695 {
   1696  hb_set_t covered_seq_indicies;
   1697  hb_set_t pos_glyphs;
   1698  for (unsigned int i = 0; i < lookupCount; i++)
   1699  {
   1700    unsigned seqIndex = lookupRecord[i].sequenceIndex;
   1701    if (seqIndex >= inputCount) continue;
   1702 
   1703    bool has_pos_glyphs = false;
   1704 
   1705    if (!covered_seq_indicies.has (seqIndex))
   1706    {
   1707      has_pos_glyphs = true;
   1708      pos_glyphs.clear ();
   1709      if (seqIndex == 0)
   1710      {
   1711        switch (context_format) {
   1712        case ContextFormat::SimpleContext:
   1713          pos_glyphs.add (value);
   1714          break;
   1715        case ContextFormat::ClassBasedContext:
   1716          intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs, cache);
   1717          break;
   1718        case ContextFormat::CoverageBasedContext:
   1719          pos_glyphs.set (c->parent_active_glyphs ());
   1720          break;
   1721        }
   1722      }
   1723      else
   1724      {
   1725        const void *input_data = input;
   1726        unsigned input_value = seqIndex - 1;
   1727        if (context_format != ContextFormat::SimpleContext)
   1728        {
   1729          input_data = data;
   1730          input_value = input[seqIndex - 1];
   1731        }
   1732 
   1733        intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs, cache);
   1734      }
   1735    }
   1736 
   1737    covered_seq_indicies.add (seqIndex);
   1738    hb_set_t *cur_active_glyphs = c->push_cur_active_glyphs ();
   1739    if (unlikely (!cur_active_glyphs))
   1740      return;
   1741    if (has_pos_glyphs) {
   1742      *cur_active_glyphs = std::move (pos_glyphs);
   1743    } else {
   1744      *cur_active_glyphs = *c->glyphs;
   1745    }
   1746 
   1747    unsigned endIndex = inputCount;
   1748    if (context_format == ContextFormat::CoverageBasedContext)
   1749      endIndex += 1;
   1750 
   1751    c->recurse (lookupRecord[i].lookupListIndex, &covered_seq_indicies, seqIndex, endIndex);
   1752 
   1753    c->pop_cur_done_glyphs ();
   1754  }
   1755 }
   1756 
   1757 template <typename context_t>
   1758 static inline void recurse_lookups (context_t *c,
   1759                                    unsigned int lookupCount,
   1760                                    const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
   1761 {
   1762  for (unsigned int i = 0; i < lookupCount; i++)
   1763    c->recurse (lookupRecord[i].lookupListIndex);
   1764 }
   1765 
   1766 static inline void apply_lookup (hb_ot_apply_context_t *c,
   1767 			 unsigned int count, /* Including the first glyph */
   1768 			 unsigned int lookupCount,
   1769 			 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
   1770 			 unsigned int match_end)
   1771 {
   1772  hb_buffer_t *buffer = c->buffer;
   1773  int end;
   1774 
   1775  /* All positions are distance from beginning of *output* buffer.
   1776   * Adjust. */
   1777  {
   1778    unsigned int bl = buffer->backtrack_len ();
   1779    end = bl + match_end - buffer->idx;
   1780 
   1781    int delta = bl - buffer->idx;
   1782    /* Convert positions to new indexing. */
   1783    for (unsigned int j = 0; j < count; j++)
   1784      c->match_positions.arrayZ[j] += delta;
   1785  }
   1786 
   1787  for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
   1788  {
   1789    unsigned int idx = lookupRecord[i].sequenceIndex;
   1790    if (idx >= count)
   1791      continue;
   1792 
   1793    unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
   1794 
   1795    /* This can happen if earlier recursed lookups deleted many entries. */
   1796    if (unlikely (c->match_positions.arrayZ[idx] >= orig_len))
   1797      continue;
   1798 
   1799    if (unlikely (!buffer->move_to (c->match_positions.arrayZ[idx])))
   1800      break;
   1801 
   1802    if (unlikely (buffer->max_ops <= 0))
   1803      break;
   1804 
   1805    if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
   1806    {
   1807      if (buffer->have_output)
   1808        c->buffer->sync_so_far ();
   1809      c->buffer->message (c->font,
   1810 		  "recursing to lookup %u at %u",
   1811 		  (unsigned) lookupRecord[i].lookupListIndex,
   1812 		  buffer->idx);
   1813    }
   1814 
   1815    if (!c->recurse (lookupRecord[i].lookupListIndex))
   1816      continue;
   1817 
   1818    if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
   1819    {
   1820      if (buffer->have_output)
   1821        c->buffer->sync_so_far ();
   1822      c->buffer->message (c->font,
   1823 		  "recursed to lookup %u",
   1824 		  (unsigned) lookupRecord[i].lookupListIndex);
   1825    }
   1826 
   1827    unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
   1828    int delta = new_len - orig_len;
   1829 
   1830    if (!delta)
   1831      continue;
   1832 
   1833    /* Recursed lookup changed buffer len.  Adjust.
   1834     *
   1835     * TODO:
   1836     *
   1837     * Right now, if buffer length increased by n, we assume n new glyphs
   1838     * were added right after the current position, and if buffer length
   1839     * was decreased by n, we assume n match positions after the current
   1840     * one where removed.  The former (buffer length increased) case is
   1841     * fine, but the decrease case can be improved in at least two ways,
   1842     * both of which are significant:
   1843     *
   1844     *   - If recursed-to lookup is MultipleSubst and buffer length
   1845     *     decreased, then it's current match position that was deleted,
   1846     *     NOT the one after it.
   1847     *
   1848     *   - If buffer length was decreased by n, it does not necessarily
   1849     *     mean that n match positions where removed, as there recursed-to
   1850     *     lookup might had a different LookupFlag.  Here's a constructed
   1851     *     case of that:
   1852     *     https://github.com/harfbuzz/harfbuzz/discussions/3538
   1853     *
   1854     * It should be possible to construct tests for both of these cases.
   1855     */
   1856 
   1857    end += delta;
   1858    if (end < int (c->match_positions.arrayZ[idx]))
   1859    {
   1860      /* End might end up being smaller than match_positions.arrayZ[idx] if the recursed
   1861       * lookup ended up removing many items.
   1862       * Just never rewind end beyond start of current position, since that is
   1863       * not possible in the recursed lookup.  Also adjust delta as such.
   1864       *
   1865       * https://bugs.chromium.org/p/chromium/issues/detail?id=659496
   1866       * https://github.com/harfbuzz/harfbuzz/issues/1611
   1867       */
   1868      delta += c->match_positions.arrayZ[idx] - end;
   1869      end = c->match_positions.arrayZ[idx];
   1870    }
   1871 
   1872    unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
   1873 
   1874    if (delta > 0)
   1875    {
   1876      if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
   1877 break;
   1878      if (unlikely (count + delta > c->match_positions.length &&
   1879 	    !c->match_positions.resize_dirty  (count + delta)))
   1880        return;
   1881    }
   1882    else
   1883    {
   1884      /* NOTE: delta is non-positive. */
   1885      delta = hb_max (delta, (int) next - (int) count);
   1886      next -= delta;
   1887    }
   1888 
   1889    /* Shift! */
   1890    memmove (c->match_positions + next + delta, c->match_positions + next,
   1891      (count - next) * sizeof (c->match_positions.arrayZ[0]));
   1892    next += delta;
   1893    count += delta;
   1894 
   1895    /* Fill in new entries. */
   1896    for (unsigned int j = idx + 1; j < next; j++)
   1897      c->match_positions.arrayZ[j] = c->match_positions.arrayZ[j - 1] + 1;
   1898 
   1899    /* And fixup the rest. */
   1900    for (; next < count; next++)
   1901      c->match_positions.arrayZ[next] += delta;
   1902  }
   1903 
   1904  assert (end >= 0);
   1905  (void) buffer->move_to (end);
   1906 }
   1907 
   1908 
   1909 
   1910 /* Contextual lookups */
   1911 
   1912 struct ContextClosureLookupContext
   1913 {
   1914  ContextClosureFuncs funcs;
   1915  ContextFormat context_format;
   1916  const void *intersects_data;
   1917  void *intersects_cache;
   1918  void *intersected_glyphs_cache;
   1919 };
   1920 
   1921 struct ContextCollectGlyphsLookupContext
   1922 {
   1923  ContextCollectGlyphsFuncs funcs;
   1924  const void *collect_data;
   1925 };
   1926 
   1927 struct ContextApplyLookupContext
   1928 {
   1929  ContextApplyFuncs funcs;
   1930  const void *match_data;
   1931 };
   1932 
   1933 template <typename HBUINT>
   1934 static inline bool context_intersects (const hb_set_t *glyphs,
   1935 			       unsigned int inputCount, /* Including the first glyph (not matched) */
   1936 			       const HBUINT input[], /* Array of input values--start with second glyph */
   1937 			       ContextClosureLookupContext &lookup_context)
   1938 {
   1939  return array_is_subset_of (glyphs,
   1940 		     inputCount ? inputCount - 1 : 0, input,
   1941 		     lookup_context.funcs.intersects,
   1942 		     lookup_context.intersects_data,
   1943 		     lookup_context.intersects_cache);
   1944 }
   1945 
   1946 template <typename HBUINT>
   1947 static inline void context_closure_lookup (hb_closure_context_t *c,
   1948 				   unsigned int inputCount, /* Including the first glyph (not matched) */
   1949 				   const HBUINT input[], /* Array of input values--start with second glyph */
   1950 				   unsigned int lookupCount,
   1951 				   const LookupRecord lookupRecord[],
   1952 				   unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
   1953 				   ContextClosureLookupContext &lookup_context)
   1954 {
   1955  if (context_intersects (c->glyphs,
   1956 		  inputCount, input,
   1957 		  lookup_context))
   1958    context_closure_recurse_lookups (c,
   1959 			     inputCount, input,
   1960 			     lookupCount, lookupRecord,
   1961 			     value,
   1962 			     lookup_context.context_format,
   1963 			     lookup_context.intersects_data,
   1964 			     lookup_context.funcs.intersected_glyphs,
   1965 			     lookup_context.intersected_glyphs_cache);
   1966 }
   1967 
   1968 template <typename HBUINT>
   1969 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
   1970 					  unsigned int inputCount, /* Including the first glyph (not matched) */
   1971 					  const HBUINT input[], /* Array of input values--start with second glyph */
   1972 					  unsigned int lookupCount,
   1973 					  const LookupRecord lookupRecord[],
   1974 					  ContextCollectGlyphsLookupContext &lookup_context)
   1975 {
   1976  collect_array (c, c->input,
   1977 	 inputCount ? inputCount - 1 : 0, input,
   1978 	 lookup_context.funcs.collect, lookup_context.collect_data);
   1979  recurse_lookups (c,
   1980 	   lookupCount, lookupRecord);
   1981 }
   1982 
   1983 template <typename HBUINT>
   1984 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
   1985 				       unsigned int inputCount, /* Including the first glyph (not matched) */
   1986 				       const HBUINT input[], /* Array of input values--start with second glyph */
   1987 				       unsigned int lookupCount HB_UNUSED,
   1988 				       const LookupRecord lookupRecord[] HB_UNUSED,
   1989 				       const ContextApplyLookupContext &lookup_context)
   1990 {
   1991  return would_match_input (c,
   1992 		    inputCount, input,
   1993 		    lookup_context.funcs.match, lookup_context.match_data);
   1994 }
   1995 
   1996 template <typename HBUINT>
   1997 HB_ALWAYS_INLINE
   1998 static bool context_apply_lookup (hb_ot_apply_context_t *c,
   1999 			  unsigned int inputCount, /* Including the first glyph (not matched) */
   2000 			  const HBUINT input[], /* Array of input values--start with second glyph */
   2001 			  unsigned int lookupCount,
   2002 			  const LookupRecord lookupRecord[],
   2003 			  const ContextApplyLookupContext &lookup_context)
   2004 {
   2005  if (unlikely (inputCount > HB_MAX_CONTEXT_LENGTH)) return false;
   2006 
   2007  unsigned match_end = 0;
   2008  bool ret = false;
   2009  if (match_input (c,
   2010 	   inputCount, input,
   2011 	   lookup_context.funcs.match, lookup_context.match_data,
   2012 	   &match_end))
   2013  {
   2014    c->buffer->unsafe_to_break (c->buffer->idx, match_end);
   2015    apply_lookup (c,
   2016 	  inputCount,
   2017 	  lookupCount, lookupRecord,
   2018 	  match_end);
   2019    ret = true;
   2020  }
   2021  else
   2022  {
   2023    c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
   2024    ret = false;
   2025  }
   2026 
   2027  return ret;
   2028 }
   2029 
   2030 static inline bool context_cache_func (hb_ot_apply_context_t *c, hb_ot_subtable_cache_op_t op)
   2031 {
   2032  switch (op)
   2033  {
   2034    case hb_ot_subtable_cache_op_t::ENTER:
   2035    {
   2036      if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
   2037 return false;
   2038      auto &info = c->buffer->info;
   2039      unsigned count = c->buffer->len;
   2040      for (unsigned i = 0; i < count; i++)
   2041 info[i].syllable() = 255;
   2042      c->new_syllables = 255;
   2043      return true;
   2044    }
   2045    case hb_ot_subtable_cache_op_t::LEAVE:
   2046    {
   2047      c->new_syllables = (unsigned) -1;
   2048      HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
   2049      break;
   2050    }
   2051  }
   2052  return false;
   2053 }
   2054 
   2055 template <typename Types>
   2056 struct Rule
   2057 {
   2058  template <typename T>
   2059  friend struct RuleSet;
   2060 
   2061  bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
   2062  {
   2063    return context_intersects (glyphs,
   2064 		       inputCount, inputZ.arrayZ,
   2065 		       lookup_context);
   2066  }
   2067 
   2068  void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
   2069  {
   2070    if (unlikely (c->lookup_limit_exceeded ())) return;
   2071 
   2072    const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
   2073 				   (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
   2074    context_closure_lookup (c,
   2075 		    inputCount, inputZ.arrayZ,
   2076 		    lookupCount, lookupRecord.arrayZ,
   2077 		    value, lookup_context);
   2078  }
   2079 
   2080  void closure_lookups (hb_closure_lookups_context_t *c,
   2081                        ContextClosureLookupContext &lookup_context) const
   2082  {
   2083    if (unlikely (c->lookup_limit_exceeded ())) return;
   2084    if (!intersects (c->glyphs, lookup_context)) return;
   2085 
   2086    const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
   2087 				   (inputZ.as_array (inputCount ? inputCount - 1 : 0));
   2088    recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
   2089  }
   2090 
   2091  void collect_glyphs (hb_collect_glyphs_context_t *c,
   2092 	       ContextCollectGlyphsLookupContext &lookup_context) const
   2093  {
   2094    const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
   2095 				   (inputZ.as_array (inputCount ? inputCount - 1 : 0));
   2096    context_collect_glyphs_lookup (c,
   2097 			   inputCount, inputZ.arrayZ,
   2098 			   lookupCount, lookupRecord.arrayZ,
   2099 			   lookup_context);
   2100  }
   2101 
   2102  bool would_apply (hb_would_apply_context_t *c,
   2103 	    const ContextApplyLookupContext &lookup_context) const
   2104  {
   2105    const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
   2106 				   (inputZ.as_array (inputCount ? inputCount - 1 : 0));
   2107    return context_would_apply_lookup (c,
   2108 			       inputCount, inputZ.arrayZ,
   2109 			       lookupCount, lookupRecord.arrayZ,
   2110 			       lookup_context);
   2111  }
   2112 
   2113  bool apply (hb_ot_apply_context_t *c,
   2114       const ContextApplyLookupContext &lookup_context) const
   2115  {
   2116    TRACE_APPLY (this);
   2117    const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
   2118 				   (inputZ.as_array (inputCount ? inputCount - 1 : 0));
   2119    return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
   2120  }
   2121 
   2122  bool serialize (hb_serialize_context_t *c,
   2123 	  const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
   2124 	  const hb_map_t *lookup_map) const
   2125  {
   2126    TRACE_SERIALIZE (this);
   2127    auto *out = c->start_embed (this);
   2128    if (unlikely (!c->extend_min (out))) return_trace (false);
   2129 
   2130    out->inputCount = inputCount;
   2131    const auto input = inputZ.as_array (inputCount - 1);
   2132    for (const auto org : input)
   2133    {
   2134      HBUINT16 d;
   2135      d = input_mapping->get (org);
   2136      c->copy (d);
   2137    }
   2138 
   2139    const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
   2140 				   (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
   2141 
   2142    unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
   2143    return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
   2144  }
   2145 
   2146  bool subset (hb_subset_context_t *c,
   2147        const hb_map_t *lookup_map,
   2148        const hb_map_t *klass_map = nullptr) const
   2149  {
   2150    TRACE_SUBSET (this);
   2151    if (unlikely (!inputCount)) return_trace (false);
   2152    const auto input = inputZ.as_array (inputCount - 1);
   2153 
   2154    const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
   2155    if (!hb_all (input, mapping)) return_trace (false);
   2156    return_trace (serialize (c->serializer, mapping, lookup_map));
   2157  }
   2158 
   2159  public:
   2160  bool sanitize (hb_sanitize_context_t *c) const
   2161  {
   2162    TRACE_SANITIZE (this);
   2163    return_trace (c->check_struct (this) &&
   2164 	  hb_barrier () &&
   2165 	  c->check_range (inputZ.arrayZ,
   2166 			  inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
   2167 			  LookupRecord::static_size * lookupCount));
   2168  }
   2169 
   2170  protected:
   2171  HBUINT16	inputCount;		/* Total number of glyphs in input
   2172 				 * glyph sequence--includes the first
   2173 				 * glyph */
   2174  HBUINT16	lookupCount;		/* Number of LookupRecords */
   2175  UnsizedArrayOf<typename Types::HBUINT>
   2176 	inputZ;			/* Array of match inputs--start with
   2177 				 * second glyph */
   2178 /*UnsizedArrayOf<LookupRecord>
   2179 	lookupRecordX;*/	/* Array of LookupRecords--in
   2180 				 * design order */
   2181  public:
   2182  DEFINE_SIZE_ARRAY (4, inputZ);
   2183 };
   2184 
   2185 template <typename Types>
   2186 struct RuleSet
   2187 {
   2188  using Rule = OT::Rule<Types>;
   2189 
   2190  bool intersects (const hb_set_t *glyphs,
   2191 	   ContextClosureLookupContext &lookup_context) const
   2192  {
   2193    return
   2194    + hb_iter (rule)
   2195    | hb_map (hb_add (this))
   2196    | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
   2197    | hb_any
   2198    ;
   2199  }
   2200 
   2201  void closure (hb_closure_context_t *c, unsigned value,
   2202 	ContextClosureLookupContext &lookup_context) const
   2203  {
   2204    if (unlikely (c->lookup_limit_exceeded ())) return;
   2205 
   2206    return
   2207    + hb_iter (rule)
   2208    | hb_map (hb_add (this))
   2209    | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
   2210    ;
   2211  }
   2212 
   2213  void closure_lookups (hb_closure_lookups_context_t *c,
   2214                        ContextClosureLookupContext &lookup_context) const
   2215  {
   2216    if (unlikely (c->lookup_limit_exceeded ())) return;
   2217    + hb_iter (rule)
   2218    | hb_map (hb_add (this))
   2219    | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
   2220    ;
   2221  }
   2222 
   2223  void collect_glyphs (hb_collect_glyphs_context_t *c,
   2224 	       ContextCollectGlyphsLookupContext &lookup_context) const
   2225  {
   2226    return
   2227    + hb_iter (rule)
   2228    | hb_map (hb_add (this))
   2229    | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
   2230    ;
   2231  }
   2232 
   2233  bool would_apply (hb_would_apply_context_t *c,
   2234 	    const ContextApplyLookupContext &lookup_context) const
   2235  {
   2236    return
   2237    + hb_iter (rule)
   2238    | hb_map (hb_add (this))
   2239    | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
   2240    | hb_any
   2241    ;
   2242  }
   2243 
   2244  bool apply (hb_ot_apply_context_t *c,
   2245       const ContextApplyLookupContext &lookup_context) const
   2246  {
   2247    TRACE_APPLY (this);
   2248 
   2249    unsigned num_rules = rule.len;
   2250 
   2251 #ifndef HB_NO_OT_RULESETS_FAST_PATH
   2252    if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
   2253 #endif
   2254    {
   2255    slow:
   2256      return_trace (
   2257      + hb_iter (rule)
   2258      | hb_map (hb_add (this))
   2259      | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
   2260      | hb_any
   2261      )
   2262      ;
   2263    }
   2264 
   2265    /* This version is optimized for speed by matching the first & second
   2266     * components of the rule here, instead of calling into the matching code.
   2267     *
   2268     * Replicated from LigatureSet::apply(). */
   2269 
   2270    /* We use the iter_context instead of iter_input, to avoid skipping
   2271     * default-ignorables and such.
   2272     *
   2273     * Related: https://github.com/harfbuzz/harfbuzz/issues/4813
   2274     */
   2275    auto &skippy_iter = c->iter_context;
   2276    skippy_iter.reset (c->buffer->idx);
   2277    skippy_iter.set_match_func (match_always, nullptr);
   2278    skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
   2279    unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
   2280    hb_glyph_info_t *first = nullptr, *second = nullptr;
   2281    bool matched = skippy_iter.next ();
   2282    if (likely (matched))
   2283    {
   2284      if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
   2285      {
   2286 /* Can't use the fast path if eg. the next char is a default-ignorable
   2287  * or other skippable. */
   2288        goto slow;
   2289      }
   2290 
   2291      first = &c->buffer->info[skippy_iter.idx];
   2292      unsafe_to = skippy_iter.idx + 1;
   2293    }
   2294    else
   2295    {
   2296      /* Failed to match a next glyph. Only try applying rules that have
   2297       * no further input. */
   2298      return_trace (
   2299      + hb_iter (rule)
   2300      | hb_map (hb_add (this))
   2301      | hb_filter ([&] (const Rule &_) { return _.inputCount <= 1; })
   2302      | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
   2303      | hb_any
   2304      )
   2305      ;
   2306    }
   2307    matched = skippy_iter.next ();
   2308    if (likely (matched))
   2309    {
   2310      if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
   2311      {
   2312 /* Can't use the fast path if eg. the next char is a default-ignorable
   2313  * or other skippable. */
   2314        goto slow;
   2315      }
   2316 
   2317      second = &c->buffer->info[skippy_iter.idx];
   2318      unsafe_to2 = skippy_iter.idx + 1;
   2319    }
   2320 
   2321    auto match_input = lookup_context.funcs.match;
   2322    auto *input_data = lookup_context.match_data;
   2323    for (unsigned int i = 0; i < num_rules; i++)
   2324    {
   2325      const auto &r = this+rule.arrayZ[i];
   2326 
   2327      const auto &input = r.inputZ;
   2328 
   2329      if (r.inputCount <= 1 ||
   2330   (!match_input ||
   2331    match_input (*first, input.arrayZ[0], input_data)))
   2332      {
   2333        if (!second ||
   2334     (r.inputCount <= 2 ||
   2335      (!match_input ||
   2336       match_input (*second, input.arrayZ[1], input_data)))
   2337    )
   2338 {
   2339   if (r.apply (c, lookup_context))
   2340   {
   2341     if (unsafe_to != (unsigned) -1)
   2342       c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
   2343     return_trace (true);
   2344   }
   2345 }
   2346 else
   2347   unsafe_to = unsafe_to2;
   2348      }
   2349      else
   2350      {
   2351 if (unsafe_to == (unsigned) -1)
   2352   unsafe_to = unsafe_to1;
   2353 
   2354 // Skip ahead to next possible first glyph match.
   2355 for (; i + 1 < num_rules; i++)
   2356 {
   2357   const auto &r2 = this+rule.arrayZ[i + 1];
   2358   const auto &input2 = r2.inputZ;
   2359   if (r2.inputCount <= 1 || input2.arrayZ[0] != input.arrayZ[0])
   2360     break;
   2361 }
   2362      }
   2363    }
   2364    if (likely (unsafe_to != (unsigned) -1))
   2365      c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
   2366 
   2367    return_trace (false);
   2368  }
   2369 
   2370  bool subset (hb_subset_context_t *c,
   2371        const hb_map_t *lookup_map,
   2372        const hb_map_t *klass_map = nullptr) const
   2373  {
   2374    TRACE_SUBSET (this);
   2375 
   2376    auto snap = c->serializer->snapshot ();
   2377    auto *out = c->serializer->start_embed (*this);
   2378    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   2379 
   2380    for (const Offset16To<Rule>& _ : rule)
   2381    {
   2382      if (!_) continue;
   2383      auto o_snap = c->serializer->snapshot ();
   2384      auto *o = out->rule.serialize_append (c->serializer);
   2385      if (unlikely (!o)) continue;
   2386 
   2387      if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
   2388      {
   2389 out->rule.pop ();
   2390 c->serializer->revert (o_snap);
   2391      }
   2392    }
   2393 
   2394    bool ret = bool (out->rule);
   2395    if (!ret) c->serializer->revert (snap);
   2396 
   2397    return_trace (ret);
   2398  }
   2399 
   2400  bool sanitize (hb_sanitize_context_t *c) const
   2401  {
   2402    TRACE_SANITIZE (this);
   2403    return_trace (rule.sanitize (c, this));
   2404  }
   2405 
   2406  protected:
   2407  Array16OfOffset16To<Rule>
   2408 	rule;			/* Array of Rule tables
   2409 				 * ordered by preference */
   2410  public:
   2411  DEFINE_SIZE_ARRAY (2, rule);
   2412 };
   2413 
   2414 
   2415 template <typename Types>
   2416 struct ContextFormat1_4
   2417 {
   2418  using RuleSet = OT::RuleSet<Types>;
   2419 
   2420  bool intersects (const hb_set_t *glyphs) const
   2421  {
   2422    struct ContextClosureLookupContext lookup_context = {
   2423      {intersects_glyph, intersected_glyph},
   2424      ContextFormat::SimpleContext,
   2425      nullptr
   2426    };
   2427 
   2428    return
   2429    + hb_zip (this+coverage, ruleSet)
   2430    | hb_filter (*glyphs, hb_first)
   2431    | hb_map (hb_second)
   2432    | hb_map (hb_add (this))
   2433    | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
   2434    | hb_any
   2435    ;
   2436  }
   2437 
   2438  bool may_have_non_1to1 () const
   2439  { return true; }
   2440 
   2441  void closure (hb_closure_context_t *c) const
   2442  {
   2443    hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
   2444    if (unlikely (!cur_active_glyphs)) return;
   2445    get_coverage ().intersect_set (c->previous_parent_active_glyphs (), *cur_active_glyphs);
   2446 
   2447    struct ContextClosureLookupContext lookup_context = {
   2448      {intersects_glyph, intersected_glyph},
   2449      ContextFormat::SimpleContext,
   2450      nullptr
   2451    };
   2452 
   2453    + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
   2454    | hb_filter ([&] (hb_codepoint_t _) {
   2455      return c->previous_parent_active_glyphs ().has (_);
   2456    }, hb_first)
   2457    | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
   2458    | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
   2459    ;
   2460 
   2461    c->pop_cur_done_glyphs ();
   2462  }
   2463 
   2464  void closure_lookups (hb_closure_lookups_context_t *c) const
   2465  {
   2466    struct ContextClosureLookupContext lookup_context = {
   2467      {intersects_glyph, nullptr},
   2468      ContextFormat::SimpleContext,
   2469      nullptr
   2470    };
   2471 
   2472    + hb_zip (this+coverage, ruleSet)
   2473    | hb_filter (*c->glyphs, hb_first)
   2474    | hb_map (hb_second)
   2475    | hb_map (hb_add (this))
   2476    | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
   2477    ;
   2478  }
   2479 
   2480  void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
   2481 
   2482  void collect_glyphs (hb_collect_glyphs_context_t *c) const
   2483  {
   2484    (this+coverage).collect_coverage (c->input);
   2485 
   2486    struct ContextCollectGlyphsLookupContext lookup_context = {
   2487      {collect_glyph},
   2488      nullptr
   2489    };
   2490 
   2491    + hb_iter (ruleSet)
   2492    | hb_map (hb_add (this))
   2493    | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
   2494    ;
   2495  }
   2496 
   2497  bool would_apply (hb_would_apply_context_t *c) const
   2498  {
   2499    const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
   2500    struct ContextApplyLookupContext lookup_context = {
   2501      {match_glyph},
   2502      nullptr
   2503    };
   2504    return rule_set.would_apply (c, lookup_context);
   2505  }
   2506 
   2507  const Coverage &get_coverage () const { return this+coverage; }
   2508 
   2509  bool apply (hb_ot_apply_context_t *c) const
   2510  {
   2511    TRACE_APPLY (this);
   2512    unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
   2513    if (likely (index == NOT_COVERED))
   2514      return_trace (false);
   2515 
   2516    const RuleSet &rule_set = this+ruleSet[index];
   2517    struct ContextApplyLookupContext lookup_context = {
   2518      {match_glyph},
   2519      nullptr
   2520    };
   2521    return_trace (rule_set.apply (c, lookup_context));
   2522  }
   2523 
   2524  bool subset (hb_subset_context_t *c) const
   2525  {
   2526    TRACE_SUBSET (this);
   2527    const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
   2528    const hb_map_t &glyph_map = *c->plan->glyph_map;
   2529 
   2530    auto *out = c->serializer->start_embed (*this);
   2531    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   2532    out->format = format;
   2533 
   2534    const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
   2535    hb_sorted_vector_t<hb_codepoint_t> new_coverage;
   2536    + hb_zip (this+coverage, ruleSet)
   2537    | hb_filter (glyphset, hb_first)
   2538    | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
   2539    | hb_map (hb_first)
   2540    | hb_map (glyph_map)
   2541    | hb_sink (new_coverage)
   2542    ;
   2543 
   2544    out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
   2545    return_trace (bool (new_coverage));
   2546  }
   2547 
   2548  bool sanitize (hb_sanitize_context_t *c) const
   2549  {
   2550    TRACE_SANITIZE (this);
   2551    return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
   2552  }
   2553 
   2554  protected:
   2555  HBUINT16	format;			/* Format identifier--format = 1 */
   2556  typename Types::template OffsetTo<Coverage>
   2557 	coverage;		/* Offset to Coverage table--from
   2558 				 * beginning of table */
   2559  Array16Of<typename Types::template OffsetTo<RuleSet>>
   2560 	ruleSet;		/* Array of RuleSet tables
   2561 				 * ordered by Coverage Index */
   2562  public:
   2563  DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
   2564 };
   2565 
   2566 
   2567 template <typename Types>
   2568 struct ContextFormat2_5
   2569 {
   2570  using RuleSet = OT::RuleSet<SmallTypes>;
   2571 
   2572  bool intersects (const hb_set_t *glyphs) const
   2573  {
   2574    if (!(this+coverage).intersects (glyphs))
   2575      return false;
   2576 
   2577    const ClassDef &class_def = this+classDef;
   2578 
   2579    hb_map_t cache;
   2580    struct ContextClosureLookupContext lookup_context = {
   2581      {intersects_class, nullptr},
   2582      ContextFormat::ClassBasedContext,
   2583      &class_def,
   2584      &cache
   2585    };
   2586 
   2587    hb_set_t retained_coverage_glyphs;
   2588    (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
   2589 
   2590    hb_set_t coverage_glyph_classes;
   2591    class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
   2592 
   2593 
   2594    return
   2595    + hb_iter (ruleSet)
   2596    | hb_map (hb_add (this))
   2597    | hb_enumerate
   2598    | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
   2599       { return class_def.intersects_class (glyphs, p.first) &&
   2600 	       coverage_glyph_classes.has (p.first) &&
   2601 	       p.second.intersects (glyphs, lookup_context); })
   2602    | hb_any
   2603    ;
   2604  }
   2605 
   2606  bool may_have_non_1to1 () const
   2607  { return true; }
   2608 
   2609  void closure (hb_closure_context_t *c) const
   2610  {
   2611    if (!(this+coverage).intersects (c->glyphs))
   2612      return;
   2613 
   2614    hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
   2615    if (unlikely (!cur_active_glyphs)) return;
   2616    get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
   2617 			   *cur_active_glyphs);
   2618 
   2619    const ClassDef &class_def = this+classDef;
   2620 
   2621    hb_map_t cache;
   2622    intersected_class_cache_t intersected_cache;
   2623    struct ContextClosureLookupContext lookup_context = {
   2624      {intersects_class, intersected_class_glyphs},
   2625      ContextFormat::ClassBasedContext,
   2626      &class_def,
   2627      &cache,
   2628      &intersected_cache
   2629    };
   2630 
   2631    + hb_enumerate (ruleSet)
   2632    | hb_filter ([&] (unsigned _)
   2633    { return class_def.intersects_class (&c->parent_active_glyphs (), _); },
   2634 	 hb_first)
   2635    | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<RuleSet>&> _)
   2636                {
   2637                  const RuleSet& rule_set = this+_.second;
   2638                  rule_set.closure (c, _.first, lookup_context);
   2639                })
   2640    ;
   2641 
   2642    c->pop_cur_done_glyphs ();
   2643  }
   2644 
   2645  void closure_lookups (hb_closure_lookups_context_t *c) const
   2646  {
   2647    if (!(this+coverage).intersects (c->glyphs))
   2648      return;
   2649 
   2650    const ClassDef &class_def = this+classDef;
   2651 
   2652    hb_map_t cache;
   2653    struct ContextClosureLookupContext lookup_context = {
   2654      {intersects_class, nullptr},
   2655      ContextFormat::ClassBasedContext,
   2656      &class_def,
   2657      &cache
   2658    };
   2659 
   2660    + hb_iter (ruleSet)
   2661    | hb_map (hb_add (this))
   2662    | hb_enumerate
   2663    | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
   2664    { return class_def.intersects_class (c->glyphs, p.first); })
   2665    | hb_map (hb_second)
   2666    | hb_apply ([&] (const RuleSet & _)
   2667    { _.closure_lookups (c, lookup_context); });
   2668  }
   2669 
   2670  void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
   2671 
   2672  void collect_glyphs (hb_collect_glyphs_context_t *c) const
   2673  {
   2674    (this+coverage).collect_coverage (c->input);
   2675 
   2676    const ClassDef &class_def = this+classDef;
   2677    struct ContextCollectGlyphsLookupContext lookup_context = {
   2678      {collect_class},
   2679      &class_def
   2680    };
   2681 
   2682    + hb_iter (ruleSet)
   2683    | hb_map (hb_add (this))
   2684    | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
   2685    ;
   2686  }
   2687 
   2688  bool would_apply (hb_would_apply_context_t *c) const
   2689  {
   2690    const ClassDef &class_def = this+classDef;
   2691    unsigned int index = class_def.get_class (c->glyphs[0]);
   2692    const RuleSet &rule_set = this+ruleSet[index];
   2693    struct ContextApplyLookupContext lookup_context = {
   2694      {match_class},
   2695      &class_def
   2696    };
   2697    return rule_set.would_apply (c, lookup_context);
   2698  }
   2699 
   2700  const Coverage &get_coverage () const { return this+coverage; }
   2701 
   2702  unsigned cache_cost () const
   2703  {
   2704    return (this+classDef).cost ();
   2705  }
   2706  static bool cache_func (hb_ot_apply_context_t *c, hb_ot_subtable_cache_op_t op)
   2707  {
   2708    return context_cache_func (c, op);
   2709  }
   2710 
   2711  struct external_cache_t
   2712  {
   2713    hb_ot_layout_binary_cache_t coverage;
   2714  };
   2715  void *external_cache_create () const
   2716  {
   2717    external_cache_t *cache = (external_cache_t *) hb_malloc (sizeof (external_cache_t));
   2718    if (likely (cache))
   2719    {
   2720      cache->coverage.clear ();
   2721    }
   2722    return cache;
   2723  }
   2724  bool apply_cached (hb_ot_apply_context_t *c, void *external_cache) const { return _apply (c, true, external_cache); }
   2725  bool apply (hb_ot_apply_context_t *c, void *external_cache) const { return _apply (c, false, external_cache); }
   2726  bool _apply (hb_ot_apply_context_t *c, bool cached, void *external_cache) const
   2727  {
   2728    TRACE_APPLY (this);
   2729 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   2730    external_cache_t *cache = (external_cache_t *) external_cache;
   2731    unsigned int index = (this+coverage).get_coverage_binary (c->buffer->cur().codepoint, cache ? &cache->coverage : nullptr);
   2732 #else
   2733    unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
   2734 #endif
   2735    if (index == NOT_COVERED) return_trace (false);
   2736 
   2737    const ClassDef &class_def = this+classDef;
   2738 
   2739    struct ContextApplyLookupContext lookup_context = {
   2740      {cached ? match_class_cached : match_class},
   2741      &class_def
   2742    };
   2743 
   2744    index = cached ? get_class_cached (class_def, c->buffer->cur()) : class_def.get_class (c->buffer->cur().codepoint);
   2745    const RuleSet &rule_set = this+ruleSet[index];
   2746    return_trace (rule_set.apply (c, lookup_context));
   2747  }
   2748 
   2749  bool subset (hb_subset_context_t *c) const
   2750  {
   2751    TRACE_SUBSET (this);
   2752    auto *out = c->serializer->start_embed (*this);
   2753    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   2754    out->format = format;
   2755    if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
   2756      return_trace (false);
   2757 
   2758    hb_map_t klass_map;
   2759    out->classDef.serialize_subset (c, classDef, this, &klass_map);
   2760 
   2761    const hb_set_t* glyphset = c->plan->glyphset_gsub ();
   2762    hb_set_t retained_coverage_glyphs;
   2763    (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
   2764 
   2765    hb_set_t coverage_glyph_classes;
   2766    (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
   2767 
   2768    const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
   2769    bool ret = true;
   2770    int non_zero_index = -1, index = 0;
   2771    auto snapshot = c->serializer->snapshot();
   2772    for (const auto& _ : + hb_enumerate (ruleSet)
   2773 		 | hb_filter (klass_map, hb_first))
   2774    {
   2775      auto *o = out->ruleSet.serialize_append (c->serializer);
   2776      if (unlikely (!o))
   2777      {
   2778 ret = false;
   2779 break;
   2780      }
   2781 
   2782      if (coverage_glyph_classes.has (_.first) &&
   2783   o->serialize_subset (c, _.second, this, lookup_map, &klass_map)) {
   2784 non_zero_index = index;
   2785        snapshot = c->serializer->snapshot();
   2786      }
   2787 
   2788      index++;
   2789    }
   2790 
   2791    if (!ret || non_zero_index == -1) return_trace (false);
   2792 
   2793    //prune empty trailing ruleSets
   2794    --index;
   2795    while (index > non_zero_index)
   2796    {
   2797      out->ruleSet.pop ();
   2798      index--;
   2799    }
   2800    c->serializer->revert (snapshot);
   2801 
   2802    return_trace (bool (out->ruleSet));
   2803  }
   2804 
   2805  bool sanitize (hb_sanitize_context_t *c) const
   2806  {
   2807    TRACE_SANITIZE (this);
   2808    return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
   2809  }
   2810 
   2811  protected:
   2812  HBUINT16	format;			/* Format identifier--format = 2 */
   2813  typename Types::template OffsetTo<Coverage>
   2814 	coverage;		/* Offset to Coverage table--from
   2815 				 * beginning of table */
   2816  typename Types::template OffsetTo<ClassDef>
   2817 	classDef;		/* Offset to glyph ClassDef table--from
   2818 				 * beginning of table */
   2819  Array16Of<typename Types::template OffsetTo<RuleSet>>
   2820 	ruleSet;		/* Array of RuleSet tables
   2821 				 * ordered by class */
   2822  public:
   2823  DEFINE_SIZE_ARRAY (4 + 2 * Types::size, ruleSet);
   2824 };
   2825 
   2826 
   2827 struct ContextFormat3
   2828 {
   2829  using RuleSet = OT::RuleSet<SmallTypes>;
   2830 
   2831  bool intersects (const hb_set_t *glyphs) const
   2832  {
   2833    if (!(this+coverageZ[0]).intersects (glyphs))
   2834      return false;
   2835 
   2836    struct ContextClosureLookupContext lookup_context = {
   2837      {intersects_coverage, nullptr},
   2838      ContextFormat::CoverageBasedContext,
   2839      this
   2840    };
   2841    return context_intersects (glyphs,
   2842 		       glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
   2843 		       lookup_context);
   2844  }
   2845 
   2846  bool may_have_non_1to1 () const
   2847  { return true; }
   2848 
   2849  void closure (hb_closure_context_t *c) const
   2850  {
   2851    if (!(this+coverageZ[0]).intersects (c->glyphs))
   2852      return;
   2853 
   2854    hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
   2855    if (unlikely (!cur_active_glyphs)) return;
   2856    get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
   2857 			   *cur_active_glyphs);
   2858 
   2859    const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
   2860    struct ContextClosureLookupContext lookup_context = {
   2861      {intersects_coverage, intersected_coverage_glyphs},
   2862      ContextFormat::CoverageBasedContext,
   2863      this
   2864    };
   2865    context_closure_lookup (c,
   2866 		    glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
   2867 		    lookupCount, lookupRecord,
   2868 		    0, lookup_context);
   2869 
   2870    c->pop_cur_done_glyphs ();
   2871  }
   2872 
   2873  void closure_lookups (hb_closure_lookups_context_t *c) const
   2874  {
   2875    if (!intersects (c->glyphs))
   2876      return;
   2877    const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
   2878    recurse_lookups (c, lookupCount, lookupRecord);
   2879  }
   2880 
   2881  void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
   2882 
   2883  void collect_glyphs (hb_collect_glyphs_context_t *c) const
   2884  {
   2885    (this+coverageZ[0]).collect_coverage (c->input);
   2886 
   2887    const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
   2888    struct ContextCollectGlyphsLookupContext lookup_context = {
   2889      {collect_coverage},
   2890      this
   2891    };
   2892 
   2893    context_collect_glyphs_lookup (c,
   2894 			   glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
   2895 			   lookupCount, lookupRecord,
   2896 			   lookup_context);
   2897  }
   2898 
   2899  bool would_apply (hb_would_apply_context_t *c) const
   2900  {
   2901    const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
   2902    struct ContextApplyLookupContext lookup_context = {
   2903      {match_coverage},
   2904      this
   2905    };
   2906    return context_would_apply_lookup (c,
   2907 			       glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
   2908 			       lookupCount, lookupRecord,
   2909 			       lookup_context);
   2910  }
   2911 
   2912  const Coverage &get_coverage () const { return this+coverageZ[0]; }
   2913 
   2914  bool apply (hb_ot_apply_context_t *c) const
   2915  {
   2916    TRACE_APPLY (this);
   2917    unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
   2918    if (index == NOT_COVERED) return_trace (false);
   2919 
   2920    const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
   2921    struct ContextApplyLookupContext lookup_context = {
   2922      {match_coverage},
   2923      this
   2924    };
   2925    return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
   2926  }
   2927 
   2928  bool subset (hb_subset_context_t *c) const
   2929  {
   2930    TRACE_SUBSET (this);
   2931    auto *out = c->serializer->start_embed (this);
   2932    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   2933 
   2934    out->format = format;
   2935    out->glyphCount = glyphCount;
   2936 
   2937    auto coverages = coverageZ.as_array (glyphCount);
   2938 
   2939    for (const Offset16To<Coverage>& offset : coverages)
   2940    {
   2941      /* TODO(subset) This looks like should not be necessary to write this way. */
   2942      auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
   2943      if (unlikely (!o)) return_trace (false);
   2944      if (!o->serialize_subset (c, offset, this)) return_trace (false);
   2945    }
   2946 
   2947    const auto& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
   2948    const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
   2949 
   2950 
   2951    unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
   2952    return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
   2953  }
   2954 
   2955  bool sanitize (hb_sanitize_context_t *c) const
   2956  {
   2957    TRACE_SANITIZE (this);
   2958    if (unlikely (!c->check_struct (this))) return_trace (false);
   2959    hb_barrier ();
   2960    unsigned int count = glyphCount;
   2961    if (unlikely (!count)) return_trace (false); /* We want to access coverageZ[0] freely. */
   2962    if (unlikely (!c->check_array (coverageZ.arrayZ, count))) return_trace (false);
   2963    for (unsigned int i = 0; i < count; i++)
   2964      if (unlikely (!coverageZ[i].sanitize (c, this))) return_trace (false);
   2965    const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
   2966    return_trace (likely (c->check_array (lookupRecord, lookupCount)));
   2967  }
   2968 
   2969  protected:
   2970  HBUINT16	format;			/* Format identifier--format = 3 */
   2971  HBUINT16	glyphCount;		/* Number of glyphs in the input glyph
   2972 				 * sequence */
   2973  HBUINT16	lookupCount;		/* Number of LookupRecords */
   2974  UnsizedArrayOf<Offset16To<Coverage>>
   2975 	coverageZ;		/* Array of offsets to Coverage
   2976 				 * table in glyph sequence order */
   2977 /*UnsizedArrayOf<LookupRecord>
   2978 	lookupRecordX;*/	/* Array of LookupRecords--in
   2979 				 * design order */
   2980  public:
   2981  DEFINE_SIZE_ARRAY (6, coverageZ);
   2982 };
   2983 
   2984 struct Context
   2985 {
   2986  template <typename context_t, typename ...Ts>
   2987  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
   2988  {
   2989    if (unlikely (!c->may_dispatch (this, &u.format.v))) return c->no_dispatch_return_value ();
   2990    TRACE_DISPATCH (this, u.format.v);
   2991    switch (u.format.v) {
   2992    case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
   2993    case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
   2994    case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
   2995 #ifndef HB_NO_BEYOND_64K
   2996    case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
   2997    case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
   2998 #endif
   2999    default:return_trace (c->default_return_value ());
   3000    }
   3001  }
   3002 
   3003  protected:
   3004  union {
   3005  struct { HBUINT16 v; }	format;		/* Format identifier */
   3006  ContextFormat1_4<SmallTypes>	format1;
   3007  ContextFormat2_5<SmallTypes>	format2;
   3008  ContextFormat3		format3;
   3009 #ifndef HB_NO_BEYOND_64K
   3010  ContextFormat1_4<MediumTypes>	format4;
   3011  ContextFormat2_5<MediumTypes>	format5;
   3012 #endif
   3013  } u;
   3014 };
   3015 
   3016 
   3017 /* Chaining Contextual lookups */
   3018 
   3019 struct ChainContextClosureLookupContext
   3020 {
   3021  ContextClosureFuncs funcs;
   3022  ContextFormat context_format;
   3023  const void *intersects_data[3];
   3024  void *intersects_cache[3];
   3025  void *intersected_glyphs_cache;
   3026 };
   3027 
   3028 struct ChainContextCollectGlyphsLookupContext
   3029 {
   3030  ContextCollectGlyphsFuncs funcs;
   3031  const void *collect_data[3];
   3032 };
   3033 
   3034 struct ChainContextApplyLookupContext
   3035 {
   3036  ChainContextApplyFuncs funcs;
   3037  const void *match_data[3];
   3038 };
   3039 
   3040 template <typename HBUINT>
   3041 static inline bool chain_context_intersects (const hb_set_t *glyphs,
   3042 				     unsigned int backtrackCount,
   3043 				     const HBUINT backtrack[],
   3044 				     unsigned int inputCount, /* Including the first glyph (not matched) */
   3045 				     const HBUINT input[], /* Array of input values--start with second glyph */
   3046 				     unsigned int lookaheadCount,
   3047 				     const HBUINT lookahead[],
   3048 				     ChainContextClosureLookupContext &lookup_context)
   3049 {
   3050  return array_is_subset_of (glyphs,
   3051 		     backtrackCount, backtrack,
   3052 		     lookup_context.funcs.intersects,
   3053 		     lookup_context.intersects_data[0],
   3054 		     lookup_context.intersects_cache[0])
   3055      && array_is_subset_of (glyphs,
   3056 		     inputCount ? inputCount - 1 : 0, input,
   3057 		     lookup_context.funcs.intersects,
   3058 		     lookup_context.intersects_data[1],
   3059 		     lookup_context.intersects_cache[1])
   3060      && array_is_subset_of (glyphs,
   3061 		     lookaheadCount, lookahead,
   3062 		     lookup_context.funcs.intersects,
   3063 		     lookup_context.intersects_data[2],
   3064 		     lookup_context.intersects_cache[2]);
   3065 }
   3066 
   3067 template <typename HBUINT>
   3068 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
   3069 					 unsigned int backtrackCount,
   3070 					 const HBUINT backtrack[],
   3071 					 unsigned int inputCount, /* Including the first glyph (not matched) */
   3072 					 const HBUINT input[], /* Array of input values--start with second glyph */
   3073 					 unsigned int lookaheadCount,
   3074 					 const HBUINT lookahead[],
   3075 					 unsigned int lookupCount,
   3076 					 const LookupRecord lookupRecord[],
   3077 					 unsigned value,
   3078 					 ChainContextClosureLookupContext &lookup_context)
   3079 {
   3080  if (chain_context_intersects (c->glyphs,
   3081 			backtrackCount, backtrack,
   3082 			inputCount, input,
   3083 			lookaheadCount, lookahead,
   3084 			lookup_context))
   3085    context_closure_recurse_lookups (c,
   3086 	     inputCount, input,
   3087 	     lookupCount, lookupRecord,
   3088 	     value,
   3089 	     lookup_context.context_format,
   3090 	     lookup_context.intersects_data[1],
   3091 	     lookup_context.funcs.intersected_glyphs,
   3092 	     lookup_context.intersected_glyphs_cache);
   3093 }
   3094 
   3095 template <typename HBUINT>
   3096 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
   3097 						unsigned int backtrackCount,
   3098 						const HBUINT backtrack[],
   3099 						unsigned int inputCount, /* Including the first glyph (not matched) */
   3100 						const HBUINT input[], /* Array of input values--start with second glyph */
   3101 						unsigned int lookaheadCount,
   3102 						const HBUINT lookahead[],
   3103 						unsigned int lookupCount,
   3104 						const LookupRecord lookupRecord[],
   3105 						ChainContextCollectGlyphsLookupContext &lookup_context)
   3106 {
   3107  collect_array (c, c->before,
   3108 	 backtrackCount, backtrack,
   3109 	 lookup_context.funcs.collect, lookup_context.collect_data[0]);
   3110  collect_array (c, c->input,
   3111 	 inputCount ? inputCount - 1 : 0, input,
   3112 	 lookup_context.funcs.collect, lookup_context.collect_data[1]);
   3113  collect_array (c, c->after,
   3114 	 lookaheadCount, lookahead,
   3115 	 lookup_context.funcs.collect, lookup_context.collect_data[2]);
   3116  recurse_lookups (c,
   3117 	   lookupCount, lookupRecord);
   3118 }
   3119 
   3120 template <typename HBUINT>
   3121 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
   3122 					     unsigned int backtrackCount,
   3123 					     const HBUINT backtrack[] HB_UNUSED,
   3124 					     unsigned int inputCount, /* Including the first glyph (not matched) */
   3125 					     const HBUINT input[], /* Array of input values--start with second glyph */
   3126 					     unsigned int lookaheadCount,
   3127 					     const HBUINT lookahead[] HB_UNUSED,
   3128 					     unsigned int lookupCount HB_UNUSED,
   3129 					     const LookupRecord lookupRecord[] HB_UNUSED,
   3130 					     const ChainContextApplyLookupContext &lookup_context)
   3131 {
   3132  return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
   3133      && would_match_input (c,
   3134 		    inputCount, input,
   3135 		    lookup_context.funcs.match[1], lookup_context.match_data[1]);
   3136 }
   3137 
   3138 template <typename HBUINT>
   3139 HB_ALWAYS_INLINE
   3140 static bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
   3141 				unsigned int backtrackCount,
   3142 				const HBUINT backtrack[],
   3143 				unsigned int inputCount, /* Including the first glyph (not matched) */
   3144 				const HBUINT input[], /* Array of input values--start with second glyph */
   3145 				unsigned int lookaheadCount,
   3146 				const HBUINT lookahead[],
   3147 				unsigned int lookupCount,
   3148 				const LookupRecord lookupRecord[],
   3149 				const ChainContextApplyLookupContext &lookup_context)
   3150 {
   3151  if (unlikely (inputCount > HB_MAX_CONTEXT_LENGTH)) return false;
   3152 
   3153  unsigned start_index = c->buffer->out_len;
   3154  unsigned end_index = c->buffer->idx;
   3155  unsigned match_end = 0;
   3156  bool ret = true;
   3157  if (!(match_input (c,
   3158 	     inputCount, input,
   3159 	     lookup_context.funcs.match[1], lookup_context.match_data[1],
   3160 	     &match_end) && (end_index = match_end)
   3161       && match_lookahead (c,
   3162 		   lookaheadCount, lookahead,
   3163 		   lookup_context.funcs.match[2], lookup_context.match_data[2],
   3164 		   match_end, &end_index)))
   3165  {
   3166    c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
   3167    return false;
   3168  }
   3169 
   3170  if (!match_backtrack (c,
   3171 		backtrackCount, backtrack,
   3172 		lookup_context.funcs.match[0], lookup_context.match_data[0],
   3173 		&start_index))
   3174  {
   3175    c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
   3176    return false;
   3177  }
   3178 
   3179  c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
   3180  apply_lookup (c,
   3181 	inputCount,
   3182 	lookupCount, lookupRecord,
   3183 	match_end);
   3184 
   3185  return ret;
   3186 }
   3187 
   3188 template <typename Types>
   3189 struct ChainRule
   3190 {
   3191  template <typename T>
   3192  friend struct ChainRuleSet;
   3193 
   3194  bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
   3195  {
   3196    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3197    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3198    return chain_context_intersects (glyphs,
   3199 			     backtrack.len, backtrack.arrayZ,
   3200 			     input.lenP1, input.arrayZ,
   3201 			     lookahead.len, lookahead.arrayZ,
   3202 			     lookup_context);
   3203  }
   3204 
   3205  void closure (hb_closure_context_t *c, unsigned value,
   3206 	ChainContextClosureLookupContext &lookup_context) const
   3207  {
   3208    if (unlikely (c->lookup_limit_exceeded ())) return;
   3209 
   3210    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3211    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3212    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   3213    chain_context_closure_lookup (c,
   3214 			  backtrack.len, backtrack.arrayZ,
   3215 			  input.lenP1, input.arrayZ,
   3216 			  lookahead.len, lookahead.arrayZ,
   3217 			  lookup.len, lookup.arrayZ,
   3218 			  value,
   3219 			  lookup_context);
   3220  }
   3221 
   3222  void closure_lookups (hb_closure_lookups_context_t *c,
   3223                        ChainContextClosureLookupContext &lookup_context) const
   3224  {
   3225    if (unlikely (c->lookup_limit_exceeded ())) return;
   3226    if (!intersects (c->glyphs, lookup_context)) return;
   3227 
   3228    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3229    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3230    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   3231    recurse_lookups (c, lookup.len, lookup.arrayZ);
   3232  }
   3233 
   3234  void collect_glyphs (hb_collect_glyphs_context_t *c,
   3235 	       ChainContextCollectGlyphsLookupContext &lookup_context) const
   3236  {
   3237    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3238    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3239    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   3240    chain_context_collect_glyphs_lookup (c,
   3241 				 backtrack.len, backtrack.arrayZ,
   3242 				 input.lenP1, input.arrayZ,
   3243 				 lookahead.len, lookahead.arrayZ,
   3244 				 lookup.len, lookup.arrayZ,
   3245 				 lookup_context);
   3246  }
   3247 
   3248  bool would_apply (hb_would_apply_context_t *c,
   3249 	    const ChainContextApplyLookupContext &lookup_context) const
   3250  {
   3251    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3252    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3253    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   3254    return chain_context_would_apply_lookup (c,
   3255 				     backtrack.len, backtrack.arrayZ,
   3256 				     input.lenP1, input.arrayZ,
   3257 				     lookahead.len, lookahead.arrayZ, lookup.len,
   3258 				     lookup.arrayZ, lookup_context);
   3259  }
   3260 
   3261  bool apply (hb_ot_apply_context_t *c,
   3262       const ChainContextApplyLookupContext &lookup_context) const
   3263  {
   3264    TRACE_APPLY (this);
   3265    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3266    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3267    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   3268    return_trace (chain_context_apply_lookup (c,
   3269 				      backtrack.len, backtrack.arrayZ,
   3270 				      input.lenP1, input.arrayZ,
   3271 				      lookahead.len, lookahead.arrayZ, lookup.len,
   3272 				      lookup.arrayZ, lookup_context));
   3273  }
   3274 
   3275  template<typename Iterator,
   3276    hb_requires (hb_is_iterator (Iterator))>
   3277  void serialize_array (hb_serialize_context_t *c,
   3278 		HBUINT16 len,
   3279 		Iterator it) const
   3280  {
   3281    c->copy (len);
   3282    for (const auto g : it)
   3283      c->copy ((HBUINT16) g);
   3284  }
   3285 
   3286  bool serialize (hb_serialize_context_t *c,
   3287 	  const hb_map_t *lookup_map,
   3288 	  const hb_map_t *backtrack_map,
   3289 	  const hb_map_t *input_map = nullptr,
   3290 	  const hb_map_t *lookahead_map = nullptr) const
   3291  {
   3292    TRACE_SERIALIZE (this);
   3293 
   3294    const hb_map_t *mapping = backtrack_map;
   3295    serialize_array (c, backtrack.len, + backtrack.iter ()
   3296 			       | hb_map (mapping));
   3297 
   3298    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3299    if (input_map) mapping = input_map;
   3300    serialize_array (c, input.lenP1, + input.iter ()
   3301 			     | hb_map (mapping));
   3302 
   3303    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3304    if (lookahead_map) mapping = lookahead_map;
   3305    serialize_array (c, lookahead.len, + lookahead.iter ()
   3306 			       | hb_map (mapping));
   3307 
   3308    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   3309 
   3310    HBUINT16* lookupCount = c->embed (&(lookup.len));
   3311    if (!lookupCount) return_trace (false);
   3312 
   3313    unsigned count = serialize_lookuprecord_array (c, lookup.as_array (), lookup_map);
   3314    return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
   3315  }
   3316 
   3317  bool subset (hb_subset_context_t *c,
   3318        const hb_map_t *lookup_map,
   3319        const hb_map_t *backtrack_map = nullptr,
   3320        const hb_map_t *input_map = nullptr,
   3321        const hb_map_t *lookahead_map = nullptr) const
   3322  {
   3323    TRACE_SUBSET (this);
   3324 
   3325    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3326    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3327 
   3328    if (!backtrack_map)
   3329    {
   3330      const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
   3331      if (!hb_all (backtrack, glyphset) ||
   3332   !hb_all (input, glyphset) ||
   3333   !hb_all (lookahead, glyphset))
   3334 return_trace (false);
   3335 
   3336      serialize (c->serializer, lookup_map, c->plan->glyph_map);
   3337    }
   3338    else
   3339    {
   3340      if (!hb_all (backtrack, backtrack_map) ||
   3341   !hb_all (input, input_map) ||
   3342   !hb_all (lookahead, lookahead_map))
   3343 return_trace (false);
   3344 
   3345      serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
   3346    }
   3347 
   3348    return_trace (true);
   3349  }
   3350 
   3351  bool sanitize (hb_sanitize_context_t *c) const
   3352  {
   3353    TRACE_SANITIZE (this);
   3354    /* Hyper-optimized sanitized because this is really hot. */
   3355    if (unlikely (!backtrack.len.sanitize (c))) return_trace (false);
   3356    hb_barrier ();
   3357    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   3358    if (unlikely (!input.lenP1.sanitize (c))) return_trace (false);
   3359    hb_barrier ();
   3360    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   3361    if (unlikely (!lookahead.len.sanitize (c))) return_trace (false);
   3362    hb_barrier ();
   3363    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   3364    return_trace (likely (lookup.sanitize (c)));
   3365  }
   3366 
   3367  protected:
   3368  Array16Of<typename Types::HBUINT>
   3369 	backtrack;		/* Array of backtracking values
   3370 				 * (to be matched before the input
   3371 				 * sequence) */
   3372  HeadlessArray16Of<typename Types::HBUINT>
   3373 	inputX;			/* Array of input values (start with
   3374 				 * second glyph) */
   3375  Array16Of<typename Types::HBUINT>
   3376 	lookaheadX;		/* Array of lookahead values's (to be
   3377 				 * matched after the input sequence) */
   3378  Array16Of<LookupRecord>
   3379 	lookupX;		/* Array of LookupRecords--in
   3380 				 * design order) */
   3381  public:
   3382  DEFINE_SIZE_MIN (8);
   3383 };
   3384 
   3385 template <typename Types>
   3386 struct ChainRuleSet
   3387 {
   3388  using ChainRule = OT::ChainRule<Types>;
   3389 
   3390  bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
   3391  {
   3392    return
   3393    + hb_iter (rule)
   3394    | hb_map (hb_add (this))
   3395    | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
   3396    | hb_any
   3397    ;
   3398  }
   3399  void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
   3400  {
   3401    if (unlikely (c->lookup_limit_exceeded ())) return;
   3402 
   3403    return
   3404    + hb_iter (rule)
   3405    | hb_map (hb_add (this))
   3406    | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
   3407    ;
   3408  }
   3409 
   3410  void closure_lookups (hb_closure_lookups_context_t *c,
   3411                        ChainContextClosureLookupContext &lookup_context) const
   3412  {
   3413    if (unlikely (c->lookup_limit_exceeded ())) return;
   3414 
   3415    + hb_iter (rule)
   3416    | hb_map (hb_add (this))
   3417    | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
   3418    ;
   3419  }
   3420 
   3421  void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
   3422  {
   3423    return
   3424    + hb_iter (rule)
   3425    | hb_map (hb_add (this))
   3426    | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
   3427    ;
   3428  }
   3429 
   3430  bool would_apply (hb_would_apply_context_t *c,
   3431 	    const ChainContextApplyLookupContext &lookup_context) const
   3432  {
   3433    return
   3434    + hb_iter (rule)
   3435    | hb_map (hb_add (this))
   3436    | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
   3437    | hb_any
   3438    ;
   3439  }
   3440 
   3441  bool apply (hb_ot_apply_context_t *c,
   3442       const ChainContextApplyLookupContext &lookup_context) const
   3443  {
   3444    TRACE_APPLY (this);
   3445 
   3446    unsigned num_rules = rule.len;
   3447 
   3448 #ifndef HB_NO_OT_RULESETS_FAST_PATH
   3449    if (HB_OPTIMIZE_SIZE_VAL || num_rules <= 4)
   3450 #endif
   3451    {
   3452    slow:
   3453      return_trace (
   3454      + hb_iter (rule)
   3455      | hb_map (hb_add (this))
   3456      | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
   3457      | hb_any
   3458      )
   3459      ;
   3460    }
   3461 
   3462    /* This version is optimized for speed by matching the first & second
   3463     * components of the rule here, instead of calling into the matching code.
   3464     *
   3465     * Replicated from LigatureSet::apply(). */
   3466 
   3467    /* We use the iter_context instead of iter_input, to avoid skipping
   3468     * default-ignorables and such.
   3469     *
   3470     * Related: https://github.com/harfbuzz/harfbuzz/issues/4813
   3471     */
   3472    auto &skippy_iter = c->iter_context;
   3473    skippy_iter.reset (c->buffer->idx);
   3474    skippy_iter.set_match_func (match_always, nullptr);
   3475    skippy_iter.set_glyph_data ((HBUINT16 *) nullptr);
   3476    unsigned unsafe_to = (unsigned) -1, unsafe_to1 = 0, unsafe_to2 = 0;
   3477    hb_glyph_info_t *first = nullptr, *second = nullptr;
   3478    bool matched = skippy_iter.next ();
   3479    if (likely (matched))
   3480    {
   3481      if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
   3482      {
   3483 /* Can't use the fast path if eg. the next char is a default-ignorable
   3484  * or other skippable. */
   3485        goto slow;
   3486      }
   3487 
   3488      first = &c->buffer->info[skippy_iter.idx];
   3489      unsafe_to1 = skippy_iter.idx + 1;
   3490    }
   3491    else
   3492    {
   3493      /* Failed to match a next glyph. Only try applying rules that have
   3494       * no further input and lookahead. */
   3495      return_trace (
   3496      + hb_iter (rule)
   3497      | hb_map (hb_add (this))
   3498      | hb_filter ([&] (const ChainRule &_)
   3499 	   {
   3500 	     const auto &input = StructAfter<decltype (_.inputX)> (_.backtrack);
   3501 	     const auto &lookahead = StructAfter<decltype (_.lookaheadX)> (input);
   3502 	     return input.lenP1 <= 1 && lookahead.len == 0;
   3503 	   })
   3504      | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
   3505      | hb_any
   3506      )
   3507      ;
   3508    }
   3509    matched = skippy_iter.next ();
   3510    if (likely (matched))
   3511    {
   3512      if (skippy_iter.may_skip (c->buffer->info[skippy_iter.idx]))
   3513      {
   3514 /* Can't use the fast path if eg. the next char is a default-ignorable
   3515  * or other skippable. */
   3516        goto slow;
   3517      }
   3518 
   3519      second = &c->buffer->info[skippy_iter.idx];
   3520      unsafe_to2 = skippy_iter.idx + 1;
   3521    }
   3522 
   3523    auto match_input = lookup_context.funcs.match[1];
   3524    auto match_lookahead = lookup_context.funcs.match[2];
   3525    auto *input_data = lookup_context.match_data[1];
   3526    auto *lookahead_data = lookup_context.match_data[2];
   3527    for (unsigned int i = 0; i < num_rules; i++)
   3528    {
   3529      const auto &r = this+rule.arrayZ[i];
   3530 
   3531      const auto &input = StructAfter<decltype (r.inputX)> (r.backtrack);
   3532      const auto &lookahead = StructAfter<decltype (r.lookaheadX)> (input);
   3533 
   3534      unsigned lenP1 = input.lenP1;
   3535      if (lenP1 > 1 ?
   3536    (!match_input ||
   3537     match_input (*first, input.arrayZ[0], input_data))
   3538   :
   3539    (!lookahead.len || !match_lookahead ||
   3540     match_lookahead (*first, lookahead.arrayZ[0], lookahead_data)))
   3541      {
   3542 lenP1 = hb_max (lenP1, 1u);
   3543        if (!second ||
   3544     (lenP1 > 2 ?
   3545      (!match_input ||
   3546       match_input (*second, input.arrayZ[1], input_data))
   3547      :
   3548      (lookahead.len <= 2 - lenP1 || !match_lookahead ||
   3549       match_lookahead (*second, lookahead.arrayZ[2 - lenP1], lookahead_data))))
   3550 {
   3551   if (r.apply (c, lookup_context))
   3552   {
   3553     if (unsafe_to != (unsigned) -1)
   3554       c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
   3555     return_trace (true);
   3556   }
   3557 }
   3558 else
   3559   unsafe_to = unsafe_to2;
   3560      }
   3561      else
   3562      {
   3563 if (unsafe_to == (unsigned) -1)
   3564   unsafe_to = unsafe_to1;
   3565 
   3566 if (lenP1 > 1)
   3567 {
   3568   // Skip ahead to next possible first glyph match.
   3569   for (; i + 1 < num_rules; i++)
   3570   {
   3571     const auto &r2 = this+rule.arrayZ[i + 1];
   3572     const auto &input2 = StructAfter<decltype (r2.inputX)> (r2.backtrack);
   3573     if (input2.lenP1 <= 1 || input2.arrayZ[0] != input.arrayZ[0])
   3574       break;
   3575   }
   3576 }
   3577      }
   3578    }
   3579    if (likely (unsafe_to != (unsigned) -1))
   3580      c->buffer->unsafe_to_concat (c->buffer->idx, unsafe_to);
   3581 
   3582    return_trace (false);
   3583  }
   3584 
   3585  bool subset (hb_subset_context_t *c,
   3586        const hb_map_t *lookup_map,
   3587        const hb_map_t *backtrack_klass_map = nullptr,
   3588        const hb_map_t *input_klass_map = nullptr,
   3589        const hb_map_t *lookahead_klass_map = nullptr) const
   3590  {
   3591    TRACE_SUBSET (this);
   3592 
   3593    auto snap = c->serializer->snapshot ();
   3594    auto *out = c->serializer->start_embed (*this);
   3595    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   3596 
   3597    for (const Offset16To<ChainRule>& _ : rule)
   3598    {
   3599      if (!_) continue;
   3600      auto o_snap = c->serializer->snapshot ();
   3601      auto *o = out->rule.serialize_append (c->serializer);
   3602      if (unlikely (!o)) continue;
   3603 
   3604      if (!o->serialize_subset (c, _, this,
   3605 			lookup_map,
   3606 			backtrack_klass_map,
   3607 			input_klass_map,
   3608 			lookahead_klass_map))
   3609      {
   3610 out->rule.pop ();
   3611 c->serializer->revert (o_snap);
   3612      }
   3613    }
   3614 
   3615    bool ret = bool (out->rule);
   3616    if (!ret) c->serializer->revert (snap);
   3617 
   3618    return_trace (ret);
   3619  }
   3620 
   3621  bool sanitize (hb_sanitize_context_t *c) const
   3622  {
   3623    TRACE_SANITIZE (this);
   3624    return_trace (rule.sanitize (c, this));
   3625  }
   3626 
   3627  protected:
   3628  Array16OfOffset16To<ChainRule>
   3629 	rule;			/* Array of ChainRule tables
   3630 				 * ordered by preference */
   3631  public:
   3632  DEFINE_SIZE_ARRAY (2, rule);
   3633 };
   3634 
   3635 template <typename Types>
   3636 struct ChainContextFormat1_4
   3637 {
   3638  using ChainRuleSet = OT::ChainRuleSet<Types>;
   3639 
   3640  bool intersects (const hb_set_t *glyphs) const
   3641  {
   3642    struct ChainContextClosureLookupContext lookup_context = {
   3643      {intersects_glyph, intersected_glyph},
   3644      ContextFormat::SimpleContext,
   3645      {nullptr, nullptr, nullptr}
   3646    };
   3647 
   3648    return
   3649    + hb_zip (this+coverage, ruleSet)
   3650    | hb_filter (*glyphs, hb_first)
   3651    | hb_map (hb_second)
   3652    | hb_map (hb_add (this))
   3653    | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
   3654    | hb_any
   3655    ;
   3656  }
   3657 
   3658  bool may_have_non_1to1 () const
   3659  { return true; }
   3660 
   3661  void closure (hb_closure_context_t *c) const
   3662  {
   3663    hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
   3664    if (unlikely (!cur_active_glyphs)) return;
   3665    get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
   3666 			   *cur_active_glyphs);
   3667 
   3668    struct ChainContextClosureLookupContext lookup_context = {
   3669      {intersects_glyph, intersected_glyph},
   3670      ContextFormat::SimpleContext,
   3671      {nullptr, nullptr, nullptr}
   3672    };
   3673 
   3674    + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
   3675    | hb_filter ([&] (hb_codepoint_t _) {
   3676      return c->previous_parent_active_glyphs ().has (_);
   3677    }, hb_first)
   3678    | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
   3679    | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
   3680    ;
   3681 
   3682    c->pop_cur_done_glyphs ();
   3683  }
   3684 
   3685  void closure_lookups (hb_closure_lookups_context_t *c) const
   3686  {
   3687    struct ChainContextClosureLookupContext lookup_context = {
   3688      {intersects_glyph, nullptr},
   3689      ContextFormat::SimpleContext,
   3690      {nullptr, nullptr, nullptr}
   3691    };
   3692 
   3693    + hb_zip (this+coverage, ruleSet)
   3694    | hb_filter (*c->glyphs, hb_first)
   3695    | hb_map (hb_second)
   3696    | hb_map (hb_add (this))
   3697    | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
   3698    ;
   3699  }
   3700 
   3701  void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
   3702 
   3703  void collect_glyphs (hb_collect_glyphs_context_t *c) const
   3704  {
   3705    (this+coverage).collect_coverage (c->input);
   3706 
   3707    struct ChainContextCollectGlyphsLookupContext lookup_context = {
   3708      {collect_glyph},
   3709      {nullptr, nullptr, nullptr}
   3710    };
   3711 
   3712    + hb_iter (ruleSet)
   3713    | hb_map (hb_add (this))
   3714    | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
   3715    ;
   3716  }
   3717 
   3718  bool would_apply (hb_would_apply_context_t *c) const
   3719  {
   3720    const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
   3721    struct ChainContextApplyLookupContext lookup_context = {
   3722      {{match_glyph, match_glyph, match_glyph}},
   3723      {nullptr, nullptr, nullptr}
   3724    };
   3725    return rule_set.would_apply (c, lookup_context);
   3726  }
   3727 
   3728  const Coverage &get_coverage () const { return this+coverage; }
   3729 
   3730  bool apply (hb_ot_apply_context_t *c) const
   3731  {
   3732    TRACE_APPLY (this);
   3733    unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
   3734    if (index == NOT_COVERED) return_trace (false);
   3735 
   3736    const ChainRuleSet &rule_set = this+ruleSet[index];
   3737    struct ChainContextApplyLookupContext lookup_context = {
   3738      {{match_glyph, match_glyph, match_glyph}},
   3739      {nullptr, nullptr, nullptr}
   3740    };
   3741    return_trace (rule_set.apply (c, lookup_context));
   3742  }
   3743 
   3744  bool subset (hb_subset_context_t *c) const
   3745  {
   3746    TRACE_SUBSET (this);
   3747    const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
   3748    const hb_map_t &glyph_map = *c->plan->glyph_map;
   3749 
   3750    auto *out = c->serializer->start_embed (*this);
   3751    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   3752    out->format = format;
   3753 
   3754    const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
   3755    hb_sorted_vector_t<hb_codepoint_t> new_coverage;
   3756    + hb_zip (this+coverage, ruleSet)
   3757    | hb_filter (glyphset, hb_first)
   3758    | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
   3759    | hb_map (hb_first)
   3760    | hb_map (glyph_map)
   3761    | hb_sink (new_coverage)
   3762    ;
   3763 
   3764    out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
   3765    return_trace (bool (new_coverage));
   3766  }
   3767 
   3768  bool sanitize (hb_sanitize_context_t *c) const
   3769  {
   3770    TRACE_SANITIZE (this);
   3771    return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
   3772  }
   3773 
   3774  protected:
   3775  HBUINT16	format;			/* Format identifier--format = 1 */
   3776  typename Types::template OffsetTo<Coverage>
   3777 	coverage;		/* Offset to Coverage table--from
   3778 				 * beginning of table */
   3779  Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
   3780 	ruleSet;		/* Array of ChainRuleSet tables
   3781 				 * ordered by Coverage Index */
   3782  public:
   3783  DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
   3784 };
   3785 
   3786 template <typename Types>
   3787 struct ChainContextFormat2_5
   3788 {
   3789  using ChainRuleSet = OT::ChainRuleSet<SmallTypes>;
   3790 
   3791  bool intersects (const hb_set_t *glyphs) const
   3792  {
   3793    if (!(this+coverage).intersects (glyphs))
   3794      return false;
   3795 
   3796    const ClassDef &backtrack_class_def = this+backtrackClassDef;
   3797    const ClassDef &input_class_def = this+inputClassDef;
   3798    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
   3799 
   3800    hb_map_t caches[3] = {};
   3801    struct ChainContextClosureLookupContext lookup_context = {
   3802      {intersects_class, nullptr},
   3803      ContextFormat::ClassBasedContext,
   3804      {&backtrack_class_def,
   3805       &input_class_def,
   3806       &lookahead_class_def},
   3807      {&caches[0], &caches[1], &caches[2]}
   3808    };
   3809 
   3810    hb_set_t retained_coverage_glyphs;
   3811    (this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
   3812 
   3813    hb_set_t coverage_glyph_classes;
   3814    input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
   3815 
   3816    return
   3817    + hb_iter (ruleSet)
   3818    | hb_map (hb_add (this))
   3819    | hb_enumerate
   3820    | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
   3821       { return input_class_def.intersects_class (glyphs, p.first) &&
   3822 	       coverage_glyph_classes.has (p.first) &&
   3823 	       p.second.intersects (glyphs, lookup_context); })
   3824    | hb_any
   3825    ;
   3826  }
   3827 
   3828  bool may_have_non_1to1 () const
   3829  { return true; }
   3830 
   3831  void closure (hb_closure_context_t *c) const
   3832  {
   3833    if (!(this+coverage).intersects (c->glyphs))
   3834      return;
   3835 
   3836    hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
   3837    if (unlikely (!cur_active_glyphs)) return;
   3838    get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
   3839 			   *cur_active_glyphs);
   3840 
   3841    const ClassDef &backtrack_class_def = this+backtrackClassDef;
   3842    const ClassDef &input_class_def = this+inputClassDef;
   3843    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
   3844 
   3845    hb_map_t caches[3] = {};
   3846    intersected_class_cache_t intersected_cache;
   3847    struct ChainContextClosureLookupContext lookup_context = {
   3848      {intersects_class, intersected_class_glyphs},
   3849      ContextFormat::ClassBasedContext,
   3850      {&backtrack_class_def,
   3851       &input_class_def,
   3852       &lookahead_class_def},
   3853      {&caches[0], &caches[1], &caches[2]},
   3854      &intersected_cache
   3855    };
   3856 
   3857    + hb_enumerate (ruleSet)
   3858    | hb_filter ([&] (unsigned _)
   3859    { return input_class_def.intersects_class (&c->parent_active_glyphs (), _); },
   3860 	 hb_first)
   3861    | hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<ChainRuleSet>&> _)
   3862                {
   3863                  const ChainRuleSet& chainrule_set = this+_.second;
   3864                  chainrule_set.closure (c, _.first, lookup_context);
   3865                })
   3866    ;
   3867 
   3868    c->pop_cur_done_glyphs ();
   3869  }
   3870 
   3871  void closure_lookups (hb_closure_lookups_context_t *c) const
   3872  {
   3873    if (!(this+coverage).intersects (c->glyphs))
   3874      return;
   3875 
   3876    const ClassDef &backtrack_class_def = this+backtrackClassDef;
   3877    const ClassDef &input_class_def = this+inputClassDef;
   3878    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
   3879 
   3880    hb_map_t caches[3] = {};
   3881    struct ChainContextClosureLookupContext lookup_context = {
   3882      {intersects_class, nullptr},
   3883      ContextFormat::ClassBasedContext,
   3884      {&backtrack_class_def,
   3885       &input_class_def,
   3886       &lookahead_class_def},
   3887      {&caches[0], &caches[1], &caches[2]}
   3888    };
   3889 
   3890    + hb_iter (ruleSet)
   3891    | hb_map (hb_add (this))
   3892    | hb_enumerate
   3893    | hb_filter([&] (unsigned klass)
   3894    { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
   3895    | hb_map (hb_second)
   3896    | hb_apply ([&] (const ChainRuleSet &_)
   3897    { _.closure_lookups (c, lookup_context); })
   3898    ;
   3899  }
   3900 
   3901  void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
   3902 
   3903  void collect_glyphs (hb_collect_glyphs_context_t *c) const
   3904  {
   3905    (this+coverage).collect_coverage (c->input);
   3906 
   3907    const ClassDef &backtrack_class_def = this+backtrackClassDef;
   3908    const ClassDef &input_class_def = this+inputClassDef;
   3909    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
   3910 
   3911    struct ChainContextCollectGlyphsLookupContext lookup_context = {
   3912      {collect_class},
   3913      {&backtrack_class_def,
   3914       &input_class_def,
   3915       &lookahead_class_def}
   3916    };
   3917 
   3918    + hb_iter (ruleSet)
   3919    | hb_map (hb_add (this))
   3920    | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
   3921    ;
   3922  }
   3923 
   3924  bool would_apply (hb_would_apply_context_t *c) const
   3925  {
   3926    const ClassDef &backtrack_class_def = this+backtrackClassDef;
   3927    const ClassDef &input_class_def = this+inputClassDef;
   3928    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
   3929 
   3930    unsigned int index = input_class_def.get_class (c->glyphs[0]);
   3931    const ChainRuleSet &rule_set = this+ruleSet[index];
   3932    struct ChainContextApplyLookupContext lookup_context = {
   3933      {{match_class, match_class, match_class}},
   3934      {&backtrack_class_def,
   3935       &input_class_def,
   3936       &lookahead_class_def}
   3937    };
   3938    return rule_set.would_apply (c, lookup_context);
   3939  }
   3940 
   3941  const Coverage &get_coverage () const { return this+coverage; }
   3942 
   3943  unsigned cache_cost () const
   3944  {
   3945    return (this+inputClassDef).cost () + (this+lookaheadClassDef).cost ();
   3946  }
   3947  static bool cache_func (hb_ot_apply_context_t *c, hb_ot_subtable_cache_op_t op)
   3948  {
   3949    return context_cache_func (c, op);
   3950  }
   3951 
   3952  struct external_cache_t
   3953  {
   3954    hb_ot_layout_binary_cache_t coverage;
   3955  };
   3956  void *external_cache_create () const
   3957  {
   3958    external_cache_t *cache = (external_cache_t *) hb_malloc (sizeof (external_cache_t));
   3959    if (likely (cache))
   3960    {
   3961      cache->coverage.clear ();
   3962    }
   3963    return cache;
   3964  }
   3965  bool apply_cached (hb_ot_apply_context_t *c, void *external_cache) const { return _apply (c, true, external_cache); }
   3966  bool apply (hb_ot_apply_context_t *c, void *external_cache) const { return _apply (c, false, external_cache); }
   3967  bool _apply (hb_ot_apply_context_t *c, bool cached, void *external_cache) const
   3968  {
   3969    TRACE_APPLY (this);
   3970 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   3971    external_cache_t *cache = (external_cache_t *) external_cache;
   3972    unsigned int index = (this+coverage).get_coverage_binary (c->buffer->cur().codepoint, cache ? &cache->coverage : nullptr);
   3973 #else
   3974    unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
   3975 #endif
   3976    if (index == NOT_COVERED) return_trace (false);
   3977 
   3978    const ClassDef &backtrack_class_def = this+backtrackClassDef;
   3979    const ClassDef &input_class_def = this+inputClassDef;
   3980    const ClassDef &lookahead_class_def = this+lookaheadClassDef;
   3981 
   3982    /* match_class_caches1 is slightly faster. Use it for lookahead,
   3983     * which is typically longer. */
   3984    struct ChainContextApplyLookupContext lookup_context = {
   3985      {{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached1 : match_class,
   3986        cached ? match_class_cached2 : match_class,
   3987        cached ? match_class_cached1 : match_class}},
   3988      {&backtrack_class_def,
   3989       &input_class_def,
   3990       &lookahead_class_def}
   3991    };
   3992 
   3993    index = cached
   3994         ? get_class_cached2 (input_class_def, c->buffer->cur())
   3995          : input_class_def.get_class (c->buffer->cur().codepoint);
   3996    const ChainRuleSet &rule_set = this+ruleSet[index];
   3997    return_trace (rule_set.apply (c, lookup_context));
   3998  }
   3999 
   4000  bool subset (hb_subset_context_t *c) const
   4001  {
   4002    TRACE_SUBSET (this);
   4003    auto *out = c->serializer->start_embed (*this);
   4004    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   4005    out->format = format;
   4006    out->coverage.serialize_subset (c, coverage, this);
   4007 
   4008    hb_map_t backtrack_klass_map;
   4009    hb_map_t input_klass_map;
   4010    hb_map_t lookahead_klass_map;
   4011 
   4012    out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
   4013    // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
   4014    out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
   4015    out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
   4016 
   4017    if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
   4018 					   input_klass_map,
   4019 					   lookahead_klass_map)))
   4020      return_trace (false);
   4021 
   4022    const hb_set_t* glyphset = c->plan->glyphset_gsub ();
   4023    hb_set_t retained_coverage_glyphs;
   4024    (this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
   4025 
   4026    hb_set_t coverage_glyph_classes;
   4027    (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
   4028 
   4029    int non_zero_index = -1, index = 0;
   4030    bool ret = true;
   4031    const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
   4032    auto last_non_zero = c->serializer->snapshot ();
   4033    for (const auto& _ : + hb_enumerate (ruleSet)
   4034 		 | hb_filter (input_klass_map, hb_first))
   4035    {
   4036      auto *o = out->ruleSet.serialize_append (c->serializer);
   4037      if (unlikely (!o))
   4038      {
   4039 ret = false;
   4040 break;
   4041      }
   4042      if (coverage_glyph_classes.has (_.first) &&
   4043          o->serialize_subset (c, _.second, this,
   4044 		       lookup_map,
   4045 		       &backtrack_klass_map,
   4046 		       &input_klass_map,
   4047 		       &lookahead_klass_map))
   4048      {
   4049        last_non_zero = c->serializer->snapshot ();
   4050 non_zero_index = index;
   4051      }
   4052 
   4053      index++;
   4054    }
   4055 
   4056    if (!ret || non_zero_index == -1) return_trace (false);
   4057 
   4058    // prune empty trailing ruleSets
   4059    if (index > non_zero_index) {
   4060      c->serializer->revert (last_non_zero);
   4061      out->ruleSet.len = non_zero_index + 1;
   4062    }
   4063 
   4064    return_trace (bool (out->ruleSet));
   4065  }
   4066 
   4067  bool sanitize (hb_sanitize_context_t *c) const
   4068  {
   4069    TRACE_SANITIZE (this);
   4070    return_trace (coverage.sanitize (c, this) &&
   4071 	  backtrackClassDef.sanitize (c, this) &&
   4072 	  inputClassDef.sanitize (c, this) &&
   4073 	  lookaheadClassDef.sanitize (c, this) &&
   4074 	  ruleSet.sanitize (c, this));
   4075  }
   4076 
   4077  protected:
   4078  HBUINT16	format;			/* Format identifier--format = 2 */
   4079  typename Types::template OffsetTo<Coverage>
   4080 	coverage;		/* Offset to Coverage table--from
   4081 				 * beginning of table */
   4082  typename Types::template OffsetTo<ClassDef>
   4083 	backtrackClassDef;	/* Offset to glyph ClassDef table
   4084 				 * containing backtrack sequence
   4085 				 * data--from beginning of table */
   4086  typename Types::template OffsetTo<ClassDef>
   4087 	inputClassDef;		/* Offset to glyph ClassDef
   4088 				 * table containing input sequence
   4089 				 * data--from beginning of table */
   4090  typename Types::template OffsetTo<ClassDef>
   4091 	lookaheadClassDef;	/* Offset to glyph ClassDef table
   4092 				 * containing lookahead sequence
   4093 				 * data--from beginning of table */
   4094  Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
   4095 	ruleSet;		/* Array of ChainRuleSet tables
   4096 				 * ordered by class */
   4097  public:
   4098  DEFINE_SIZE_ARRAY (4 + 4 * Types::size, ruleSet);
   4099 };
   4100 
   4101 struct ChainContextFormat3
   4102 {
   4103  using RuleSet = OT::RuleSet<SmallTypes>;
   4104 
   4105  bool intersects (const hb_set_t *glyphs) const
   4106  {
   4107    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4108 
   4109    if (!(this+input[0]).intersects (glyphs))
   4110      return false;
   4111 
   4112    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   4113    struct ChainContextClosureLookupContext lookup_context = {
   4114      {intersects_coverage, nullptr},
   4115      ContextFormat::CoverageBasedContext,
   4116      {this, this, this}
   4117    };
   4118    return chain_context_intersects (glyphs,
   4119 			     backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
   4120 			     input.len, (const HBUINT16 *) input.arrayZ + 1,
   4121 			     lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
   4122 			     lookup_context);
   4123  }
   4124 
   4125  bool may_have_non_1to1 () const
   4126  { return true; }
   4127 
   4128  void closure (hb_closure_context_t *c) const
   4129  {
   4130    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4131 
   4132    if (!(this+input[0]).intersects (c->glyphs))
   4133      return;
   4134 
   4135    hb_set_t* cur_active_glyphs = c->push_cur_active_glyphs ();
   4136    if (unlikely (!cur_active_glyphs))
   4137      return;
   4138    get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
   4139 			   *cur_active_glyphs);
   4140 
   4141    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   4142    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   4143    struct ChainContextClosureLookupContext lookup_context = {
   4144      {intersects_coverage, intersected_coverage_glyphs},
   4145      ContextFormat::CoverageBasedContext,
   4146      {this, this, this}
   4147    };
   4148    chain_context_closure_lookup (c,
   4149 			  backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
   4150 			  input.len, (const HBUINT16 *) input.arrayZ + 1,
   4151 			  lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
   4152 			  lookup.len, lookup.arrayZ,
   4153 			  0, lookup_context);
   4154 
   4155    c->pop_cur_done_glyphs ();
   4156  }
   4157 
   4158  void closure_lookups (hb_closure_lookups_context_t *c) const
   4159  {
   4160    if (!intersects (c->glyphs))
   4161      return;
   4162 
   4163    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4164    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   4165    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   4166    recurse_lookups (c, lookup.len, lookup.arrayZ);
   4167  }
   4168 
   4169  void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
   4170 
   4171  void collect_glyphs (hb_collect_glyphs_context_t *c) const
   4172  {
   4173    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4174 
   4175    (this+input[0]).collect_coverage (c->input);
   4176 
   4177    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   4178    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   4179 
   4180    struct ChainContextCollectGlyphsLookupContext lookup_context = {
   4181      {collect_coverage},
   4182      {this, this, this}
   4183    };
   4184    chain_context_collect_glyphs_lookup (c,
   4185 				 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
   4186 				 input.len, (const HBUINT16 *) input.arrayZ + 1,
   4187 				 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
   4188 				 lookup.len, lookup.arrayZ,
   4189 				 lookup_context);
   4190  }
   4191 
   4192  bool would_apply (hb_would_apply_context_t *c) const
   4193  {
   4194    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4195    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   4196    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   4197    struct ChainContextApplyLookupContext lookup_context = {
   4198      {{match_coverage, match_coverage, match_coverage}},
   4199      {this, this, this}
   4200    };
   4201    return chain_context_would_apply_lookup (c,
   4202 				     backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
   4203 				     input.len, (const HBUINT16 *) input.arrayZ + 1,
   4204 				     lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
   4205 				     lookup.len, lookup.arrayZ, lookup_context);
   4206  }
   4207 
   4208  const Coverage &get_coverage () const
   4209  {
   4210    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4211    return this+input[0];
   4212  }
   4213 
   4214  bool apply (hb_ot_apply_context_t *c) const
   4215  {
   4216    TRACE_APPLY (this);
   4217    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4218 
   4219    unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
   4220    if (index == NOT_COVERED) return_trace (false);
   4221 
   4222    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   4223    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   4224    struct ChainContextApplyLookupContext lookup_context = {
   4225      {{match_coverage, match_coverage, match_coverage}},
   4226      {this, this, this}
   4227    };
   4228    return_trace (chain_context_apply_lookup (c,
   4229 				      backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
   4230 				      input.len, (const HBUINT16 *) input.arrayZ + 1,
   4231 				      lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
   4232 				      lookup.len, lookup.arrayZ, lookup_context));
   4233  }
   4234 
   4235  template<typename Iterator,
   4236    hb_requires (hb_is_iterator (Iterator))>
   4237  bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
   4238  {
   4239    TRACE_SERIALIZE (this);
   4240    auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
   4241 
   4242    if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
   4243      return_trace (false);
   4244 
   4245    for (auto& offset : it) {
   4246      auto *o = out->serialize_append (c->serializer);
   4247      if (unlikely (!o) || !o->serialize_subset (c, offset, base))
   4248        return_trace (false);
   4249    }
   4250 
   4251    return_trace (true);
   4252  }
   4253 
   4254  bool subset (hb_subset_context_t *c) const
   4255  {
   4256    TRACE_SUBSET (this);
   4257 
   4258    if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
   4259 
   4260    if (!serialize_coverage_offsets (c, backtrack.iter (), this))
   4261      return_trace (false);
   4262 
   4263    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4264    if (!serialize_coverage_offsets (c, input.iter (), this))
   4265      return_trace (false);
   4266 
   4267    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   4268    if (!serialize_coverage_offsets (c, lookahead.iter (), this))
   4269      return_trace (false);
   4270 
   4271    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   4272    const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
   4273 
   4274    HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookup.len);
   4275    if (!lookupCount) return_trace (false);
   4276 
   4277    unsigned count = serialize_lookuprecord_array (c->serializer, lookup.as_array (), lookup_map);
   4278    return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
   4279  }
   4280 
   4281  bool sanitize (hb_sanitize_context_t *c) const
   4282  {
   4283    TRACE_SANITIZE (this);
   4284    if (unlikely (!backtrack.sanitize (c, this))) return_trace (false);
   4285    hb_barrier ();
   4286    const auto &input = StructAfter<decltype (inputX)> (backtrack);
   4287    if (unlikely (!input.sanitize (c, this))) return_trace (false);
   4288    hb_barrier ();
   4289    if (unlikely (!input.len)) return_trace (false); /* To be consistent with Context. */
   4290    const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
   4291    if (unlikely (!lookahead.sanitize (c, this))) return_trace (false);
   4292    hb_barrier ();
   4293    const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
   4294    return_trace (likely (lookup.sanitize (c)));
   4295  }
   4296 
   4297  protected:
   4298  HBUINT16	format;			/* Format identifier--format = 3 */
   4299  Array16OfOffset16To<Coverage>
   4300 	backtrack;		/* Array of coverage tables
   4301 				 * in backtracking sequence, in  glyph
   4302 				 * sequence order */
   4303  Array16OfOffset16To<Coverage>
   4304 	inputX		;	/* Array of coverage
   4305 				 * tables in input sequence, in glyph
   4306 				 * sequence order */
   4307  Array16OfOffset16To<Coverage>
   4308 	lookaheadX;		/* Array of coverage tables
   4309 				 * in lookahead sequence, in glyph
   4310 				 * sequence order */
   4311  Array16Of<LookupRecord>
   4312 	lookupX;		/* Array of LookupRecords--in
   4313 				 * design order) */
   4314  public:
   4315  DEFINE_SIZE_MIN (10);
   4316 };
   4317 
   4318 struct ChainContext
   4319 {
   4320  template <typename context_t, typename ...Ts>
   4321  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
   4322  {
   4323    if (unlikely (!c->may_dispatch (this, &u.format.v))) return c->no_dispatch_return_value ();
   4324    TRACE_DISPATCH (this, u.format.v);
   4325    switch (u.format.v) {
   4326    case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
   4327    case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
   4328    case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
   4329 #ifndef HB_NO_BEYOND_64K
   4330    case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
   4331    case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
   4332 #endif
   4333    default:return_trace (c->default_return_value ());
   4334    }
   4335  }
   4336 
   4337  protected:
   4338  union {
   4339  struct { HBUINT16 v; }		format;	/* Format identifier */
   4340  ChainContextFormat1_4<SmallTypes>	format1;
   4341  ChainContextFormat2_5<SmallTypes>	format2;
   4342  ChainContextFormat3			format3;
   4343 #ifndef HB_NO_BEYOND_64K
   4344  ChainContextFormat1_4<MediumTypes>	format4;
   4345  ChainContextFormat2_5<MediumTypes>	format5;
   4346 #endif
   4347  } u;
   4348 };
   4349 
   4350 
   4351 template <typename T>
   4352 struct ExtensionFormat1
   4353 {
   4354  unsigned int get_type () const { return extensionLookupType; }
   4355 
   4356  template <typename X>
   4357  const X& get_subtable () const
   4358  { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
   4359 
   4360  template <typename context_t, typename ...Ts>
   4361  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
   4362  {
   4363    if (unlikely (!c->may_dispatch (this, this))) return c->no_dispatch_return_value ();
   4364    TRACE_DISPATCH (this, format);
   4365    return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
   4366  }
   4367 
   4368  void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
   4369  { dispatch (c); }
   4370 
   4371  /* This is called from may_dispatch() above with hb_sanitize_context_t. */
   4372  bool sanitize (hb_sanitize_context_t *c) const
   4373  {
   4374    TRACE_SANITIZE (this);
   4375    return_trace (c->check_struct (this) &&
   4376 	  hb_barrier () &&
   4377 	  extensionLookupType != T::SubTable::Extension);
   4378  }
   4379 
   4380  bool subset (hb_subset_context_t *c) const
   4381  {
   4382    TRACE_SUBSET (this);
   4383 
   4384    auto *out = c->serializer->start_embed (this);
   4385    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   4386 
   4387    out->format = format;
   4388    out->extensionLookupType = extensionLookupType;
   4389 
   4390    const auto& src_offset =
   4391        reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
   4392    auto& dest_offset =
   4393        reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
   4394 
   4395    return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
   4396  }
   4397 
   4398  protected:
   4399  HBUINT16	format;			/* Format identifier. Set to 1. */
   4400  HBUINT16	extensionLookupType;	/* Lookup type of subtable referenced
   4401 				 * by ExtensionOffset (i.e. the
   4402 				 * extension subtable). */
   4403  Offset32	extensionOffset;	/* Offset to the extension subtable,
   4404 				 * of lookup type subtable. */
   4405  public:
   4406  DEFINE_SIZE_STATIC (8);
   4407 };
   4408 
   4409 template <typename T>
   4410 struct Extension
   4411 {
   4412  unsigned int get_type () const
   4413  {
   4414    switch (u.format.v) {
   4415    case 1: hb_barrier (); return u.format1.get_type ();
   4416    default:return 0;
   4417    }
   4418  }
   4419  template <typename X>
   4420  const X& get_subtable () const
   4421  {
   4422    switch (u.format.v) {
   4423    case 1: hb_barrier (); return u.format1.template get_subtable<typename T::SubTable> ();
   4424    default:return Null (typename T::SubTable);
   4425    }
   4426  }
   4427 
   4428  // Specialization of dispatch for subset. dispatch() normally just
   4429  // dispatches to the sub table this points too, but for subset
   4430  // we need to run subset on this subtable too.
   4431  template <typename ...Ts>
   4432  typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
   4433  {
   4434    switch (u.format.v) {
   4435    case 1: hb_barrier (); return u.format1.subset (c);
   4436    default: return c->default_return_value ();
   4437    }
   4438  }
   4439 
   4440  template <typename context_t, typename ...Ts>
   4441  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
   4442  {
   4443    if (unlikely (!c->may_dispatch (this, &u.format.v))) return c->no_dispatch_return_value ();
   4444    TRACE_DISPATCH (this, u.format.v);
   4445    switch (u.format.v) {
   4446    case 1: hb_barrier (); return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
   4447    default:return_trace (c->default_return_value ());
   4448    }
   4449  }
   4450 
   4451  protected:
   4452  union {
   4453  struct { HBUINT16 v; }	format;		/* Format identifier */
   4454  ExtensionFormat1<T>	format1;
   4455  } u;
   4456 };
   4457 
   4458 
   4459 /*
   4460 * GSUB/GPOS Common
   4461 */
   4462 
   4463 struct hb_ot_layout_lookup_accelerator_t
   4464 {
   4465  template <typename TLookup>
   4466  static hb_ot_layout_lookup_accelerator_t *create (const TLookup &lookup)
   4467  {
   4468    unsigned count = lookup.get_subtable_count ();
   4469 
   4470    unsigned size = sizeof (hb_ot_layout_lookup_accelerator_t) -
   4471 	    HB_VAR_ARRAY * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t) +
   4472 	    count * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t);
   4473 
   4474    /* The following is a calloc because when we are collecting subtables,
   4475     * some of them might be invalid and hence not collect; as a result,
   4476     * we might not fill in all the count entries of the subtables array.
   4477     * Zeroing it allows the set digest to gatekeep it without having to
   4478     * initialize it further. */
   4479    auto *thiz = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (1, size);
   4480    if (unlikely (!thiz))
   4481      return nullptr;
   4482 
   4483    hb_accelerate_subtables_context_t c_accelerate_subtables (thiz->subtables);
   4484    lookup.dispatch (&c_accelerate_subtables);
   4485 
   4486    thiz->digest.init ();
   4487    for (auto& subtable : hb_iter (thiz->subtables, count))
   4488      thiz->digest.union_ (subtable.digest);
   4489 
   4490    thiz->count = count;
   4491 
   4492 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   4493    thiz->subtable_cache_user_idx = c_accelerate_subtables.subtable_cache_user_idx;
   4494 
   4495    for (unsigned i = 0; i < count; i++)
   4496      if (i != thiz->subtable_cache_user_idx)
   4497       thiz->subtables[i].apply_cached_func = thiz->subtables[i].apply_func;
   4498 #endif
   4499 
   4500    return thiz;
   4501  }
   4502 
   4503  void fini ()
   4504  {
   4505 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   4506    for (unsigned i = 0; i < count; i++)
   4507      hb_free (subtables[i].external_cache);
   4508 #endif
   4509  }
   4510 
   4511  bool may_have (hb_codepoint_t g) const
   4512  { return digest.may_have (g); }
   4513 
   4514 #ifndef HB_OPTIMIZE_SIZE
   4515  HB_ALWAYS_INLINE
   4516 #endif
   4517  bool apply (hb_ot_apply_context_t *c, bool use_cache) const
   4518  {
   4519    c->lookup_accel = this;
   4520 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   4521    if (use_cache)
   4522    {
   4523      return
   4524      + hb_iter (hb_iter (subtables, count))
   4525      | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply_cached (c); })
   4526      | hb_any
   4527      ;
   4528    }
   4529    else
   4530 #endif
   4531    {
   4532      return
   4533      + hb_iter (hb_iter (subtables, count))
   4534      | hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply (c); })
   4535      | hb_any
   4536      ;
   4537    }
   4538    return false;
   4539  }
   4540 
   4541  bool cache_enter (hb_ot_apply_context_t *c) const
   4542  {
   4543 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   4544    return subtable_cache_user_idx != (unsigned) -1 &&
   4545    subtables[subtable_cache_user_idx].cache_enter (c);
   4546 #else
   4547    return false;
   4548 #endif
   4549  }
   4550  void cache_leave (hb_ot_apply_context_t *c) const
   4551  {
   4552 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   4553    subtables[subtable_cache_user_idx].cache_leave (c);
   4554 #endif
   4555  }
   4556 
   4557 
   4558  hb_set_digest_t digest;
   4559  private:
   4560  unsigned count = 0; /* Number of subtables in the array. */
   4561 #ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
   4562  unsigned subtable_cache_user_idx = (unsigned) -1;
   4563 #endif
   4564  hb_accelerate_subtables_context_t::hb_applicable_t subtables[HB_VAR_ARRAY];
   4565 };
   4566 
   4567 template <typename Types>
   4568 struct GSUBGPOSVersion1_2
   4569 {
   4570  friend struct GSUBGPOS;
   4571 
   4572  protected:
   4573  FixedVersion<>version;	/* Version of the GSUB/GPOS table--initially set
   4574 			 * to 0x00010000u */
   4575  typename Types:: template OffsetTo<ScriptList>
   4576 	scriptList;	/* ScriptList table */
   4577  typename Types::template OffsetTo<FeatureList>
   4578 	featureList;	/* FeatureList table */
   4579  typename Types::template OffsetTo<LookupList<Types>>
   4580 	lookupList;	/* LookupList table */
   4581  Offset32To<FeatureVariations>
   4582 	featureVars;	/* Offset to Feature Variations
   4583 			   table--from beginning of table
   4584 			 * (may be NULL).  Introduced
   4585 			 * in version 0x00010001. */
   4586  public:
   4587  DEFINE_SIZE_MIN (4 + 3 * Types::size);
   4588 
   4589  unsigned int get_size () const
   4590  {
   4591    return min_size +
   4592    (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
   4593  }
   4594 
   4595  const typename Types::template OffsetTo<LookupList<Types>>* get_lookup_list_offset () const
   4596  {
   4597    return &lookupList;
   4598  }
   4599 
   4600  template <typename TLookup>
   4601  bool sanitize (hb_sanitize_context_t *c) const
   4602  {
   4603    TRACE_SANITIZE (this);
   4604    typedef List16OfOffsetTo<TLookup, typename Types::HBUINT> TLookupList;
   4605    if (unlikely (!(scriptList.sanitize (c, this) &&
   4606 	    featureList.sanitize (c, this) &&
   4607 	    reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList).sanitize (c, this))))
   4608      return_trace (false);
   4609 
   4610 #ifndef HB_NO_VAR
   4611    if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
   4612      return_trace (false);
   4613 #endif
   4614 
   4615    return_trace (true);
   4616  }
   4617 
   4618  template <typename TLookup>
   4619  bool subset (hb_subset_layout_context_t *c) const
   4620  {
   4621    TRACE_SUBSET (this);
   4622 
   4623    auto *out = c->subset_context->serializer->start_embed (this);
   4624    if (unlikely (!c->subset_context->serializer->extend_min (out))) return_trace (false);
   4625 
   4626    out->version = version;
   4627 
   4628    typedef LookupOffsetList<TLookup, typename Types::HBUINT> TLookupList;
   4629    reinterpret_cast<typename Types::template OffsetTo<TLookupList> &> (out->lookupList)
   4630 .serialize_subset (c->subset_context,
   4631 		   reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList),
   4632 		   this,
   4633 		   c);
   4634 
   4635    reinterpret_cast<typename Types::template OffsetTo<RecordListOfFeature> &> (out->featureList)
   4636 .serialize_subset (c->subset_context,
   4637 		   reinterpret_cast<const typename Types::template OffsetTo<RecordListOfFeature> &> (featureList),
   4638 		   this,
   4639 		   c);
   4640 
   4641    out->scriptList.serialize_subset (c->subset_context,
   4642 			      scriptList,
   4643 			      this,
   4644 			      c);
   4645 
   4646 #ifndef HB_NO_VAR
   4647    if (version.to_int () >= 0x00010001u)
   4648    {
   4649      auto snapshot = c->subset_context->serializer->snapshot ();
   4650      if (!c->subset_context->serializer->extend_min (&out->featureVars))
   4651        return_trace (false);
   4652 
   4653      // if all axes are pinned all feature vars are dropped.
   4654      bool ret = !c->subset_context->plan->all_axes_pinned
   4655                 && out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
   4656      if (!ret && version.major == 1)
   4657      {
   4658        c->subset_context->serializer->revert (snapshot);
   4659 out->version.major = 1;
   4660 out->version.minor = 0;
   4661      }
   4662    }
   4663 #endif
   4664 
   4665    return_trace (true);
   4666  }
   4667 };
   4668 
   4669 struct GSUBGPOS
   4670 {
   4671  unsigned int get_size () const
   4672  {
   4673    switch (u.version.major) {
   4674    case 1: hb_barrier (); return u.version1.get_size ();
   4675 #ifndef HB_NO_BEYOND_64K
   4676    case 2: hb_barrier (); return u.version2.get_size ();
   4677 #endif
   4678    default: return u.version.static_size;
   4679    }
   4680  }
   4681 
   4682  template <typename TLookup>
   4683  bool sanitize (hb_sanitize_context_t *c) const
   4684  {
   4685    TRACE_SANITIZE (this);
   4686    if (unlikely (!u.version.sanitize (c))) return_trace (false);
   4687    hb_barrier ();
   4688    switch (u.version.major) {
   4689    case 1: hb_barrier (); return_trace (u.version1.sanitize<TLookup> (c));
   4690 #ifndef HB_NO_BEYOND_64K
   4691    case 2: hb_barrier (); return_trace (u.version2.sanitize<TLookup> (c));
   4692 #endif
   4693    default: return_trace (true);
   4694    }
   4695  }
   4696 
   4697  template <typename TLookup>
   4698  bool subset (hb_subset_layout_context_t *c) const
   4699  {
   4700    switch (u.version.major) {
   4701    case 1: hb_barrier (); return u.version1.subset<TLookup> (c);
   4702 #ifndef HB_NO_BEYOND_64K
   4703    case 2: hb_barrier (); return u.version2.subset<TLookup> (c);
   4704 #endif
   4705    default: return false;
   4706    }
   4707  }
   4708 
   4709  const ScriptList &get_script_list () const
   4710  {
   4711    switch (u.version.major) {
   4712    case 1: hb_barrier (); return this+u.version1.scriptList;
   4713 #ifndef HB_NO_BEYOND_64K
   4714    case 2: hb_barrier (); return this+u.version2.scriptList;
   4715 #endif
   4716    default: return Null (ScriptList);
   4717    }
   4718  }
   4719  const FeatureList &get_feature_list () const
   4720  {
   4721    switch (u.version.major) {
   4722    case 1: hb_barrier (); return this+u.version1.featureList;
   4723 #ifndef HB_NO_BEYOND_64K
   4724    case 2: hb_barrier (); return this+u.version2.featureList;
   4725 #endif
   4726    default: return Null (FeatureList);
   4727    }
   4728  }
   4729  unsigned int get_lookup_count () const
   4730  {
   4731    switch (u.version.major) {
   4732    case 1: hb_barrier (); return (this+u.version1.lookupList).len;
   4733 #ifndef HB_NO_BEYOND_64K
   4734    case 2: hb_barrier (); return (this+u.version2.lookupList).len;
   4735 #endif
   4736    default: return 0;
   4737    }
   4738  }
   4739  const Lookup& get_lookup (unsigned int i) const
   4740  {
   4741    switch (u.version.major) {
   4742    case 1: hb_barrier (); return (this+u.version1.lookupList)[i];
   4743 #ifndef HB_NO_BEYOND_64K
   4744    case 2: hb_barrier (); return (this+u.version2.lookupList)[i];
   4745 #endif
   4746    default: return Null (Lookup);
   4747    }
   4748  }
   4749  const FeatureVariations &get_feature_variations () const
   4750  {
   4751    switch (u.version.major) {
   4752    case 1: hb_barrier (); return (u.version.to_int () >= 0x00010001u && hb_barrier () ? this+u.version1.featureVars : Null (FeatureVariations));
   4753 #ifndef HB_NO_BEYOND_64K
   4754    case 2: hb_barrier (); return this+u.version2.featureVars;
   4755 #endif
   4756    default: return Null (FeatureVariations);
   4757    }
   4758  }
   4759 
   4760  bool has_data () const { return u.version.to_int (); }
   4761  unsigned int get_script_count () const
   4762  { return get_script_list ().len; }
   4763  const Tag& get_script_tag (unsigned int i) const
   4764  { return get_script_list ().get_tag (i); }
   4765  unsigned int get_script_tags (unsigned int start_offset,
   4766 			unsigned int *script_count /* IN/OUT */,
   4767 			hb_tag_t     *script_tags /* OUT */) const
   4768  { return get_script_list ().get_tags (start_offset, script_count, script_tags); }
   4769  const Script& get_script (unsigned int i) const
   4770  { return get_script_list ()[i]; }
   4771  bool find_script_index (hb_tag_t tag, unsigned int *index) const
   4772  { return get_script_list ().find_index (tag, index); }
   4773 
   4774  unsigned int get_feature_count () const
   4775  { return get_feature_list ().len; }
   4776  hb_tag_t get_feature_tag (unsigned int i) const
   4777  { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : get_feature_list ().get_tag (i); }
   4778  unsigned int get_feature_tags (unsigned int start_offset,
   4779 			 unsigned int *feature_count /* IN/OUT */,
   4780 			 hb_tag_t     *feature_tags /* OUT */) const
   4781  { return get_feature_list ().get_tags (start_offset, feature_count, feature_tags); }
   4782  const Feature& get_feature (unsigned int i) const
   4783  { return get_feature_list ()[i]; }
   4784  bool find_feature_index (hb_tag_t tag, unsigned int *index) const
   4785  { return get_feature_list ().find_index (tag, index); }
   4786 
   4787  bool find_variations_index (const int *coords, unsigned int num_coords,
   4788 		      unsigned int *index,
   4789 		      ItemVarStoreInstancer *instancer) const
   4790  {
   4791 #ifdef HB_NO_VAR
   4792    *index = FeatureVariations::NOT_FOUND_INDEX;
   4793    return false;
   4794 #endif
   4795    return get_feature_variations ().find_index (coords, num_coords, index, instancer);
   4796  }
   4797  const Feature& get_feature_variation (unsigned int feature_index,
   4798 				unsigned int variations_index) const
   4799  {
   4800 #ifndef HB_NO_VAR
   4801    if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
   4802 u.version.to_int () >= 0x00010001u)
   4803    {
   4804      const Feature *feature = get_feature_variations ().find_substitute (variations_index,
   4805 								  feature_index);
   4806      if (feature)
   4807 return *feature;
   4808    }
   4809 #endif
   4810    return get_feature (feature_index);
   4811  }
   4812 
   4813  void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
   4814 				  const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
   4815 				  hb_set_t       *lookup_indexes /* OUT */) const
   4816  {
   4817 #ifndef HB_NO_VAR
   4818    get_feature_variations ().collect_lookups (feature_indexes, feature_record_cond_idx_map, lookup_indexes);
   4819 #endif
   4820  }
   4821 
   4822 #ifndef HB_NO_VAR
   4823  void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
   4824  { get_feature_variations ().collect_feature_substitutes_with_variations (c); }
   4825 #endif
   4826 
   4827  template <typename TLookup>
   4828  void closure_lookups (hb_face_t      *face,
   4829 		const hb_set_t *glyphs,
   4830 		hb_set_t       *lookup_indexes /* IN/OUT */) const
   4831  {
   4832    hb_set_t visited_lookups, inactive_lookups;
   4833    hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
   4834 
   4835    c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>);
   4836 
   4837    for (unsigned lookup_index : *lookup_indexes)
   4838      reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
   4839 
   4840    hb_set_union (lookup_indexes, &visited_lookups);
   4841    hb_set_subtract (lookup_indexes, &inactive_lookups);
   4842  }
   4843 
   4844  void prune_langsys (const hb_map_t *duplicate_feature_map,
   4845                      const hb_set_t *layout_scripts,
   4846                      hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map,
   4847                      hb_set_t       *new_feature_indexes /* OUT */) const
   4848  {
   4849    hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
   4850 
   4851    unsigned count = get_script_count ();
   4852    for (unsigned script_index = 0; script_index < count; script_index++)
   4853    {
   4854      const Tag& tag = get_script_tag (script_index);
   4855      if (!layout_scripts->has (tag)) continue;
   4856      const Script& s = get_script (script_index);
   4857      s.prune_langsys (&c, script_index);
   4858    }
   4859  }
   4860 
   4861  void prune_features (const hb_map_t *lookup_indices, /* IN */
   4862 	       const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* IN */
   4863 	       const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, /* IN */
   4864 	       hb_set_t       *feature_indices /* IN/OUT */) const
   4865  {
   4866 #ifndef HB_NO_VAR
   4867    // This is the set of feature indices which have alternate versions defined
   4868    // if the FeatureVariation's table and the alternate version(s) intersect the
   4869    // set of lookup indices.
   4870    hb_set_t alternate_feature_indices;
   4871    get_feature_variations ().closure_features (lookup_indices, feature_record_cond_idx_map, &alternate_feature_indices);
   4872    if (unlikely (alternate_feature_indices.in_error()))
   4873    {
   4874      feature_indices->err ();
   4875      return;
   4876    }
   4877 #endif
   4878 
   4879    for (unsigned i : hb_iter (feature_indices))
   4880    {
   4881      hb_tag_t tag =  get_feature_tag (i);
   4882      if (tag == HB_TAG ('p', 'r', 'e', 'f'))
   4883        // Note: Never ever drop feature 'pref', even if it's empty.
   4884        // HarfBuzz chooses shaper for Khmer based on presence of this
   4885        // feature.	See thread at:
   4886 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
   4887        continue;
   4888 
   4889 
   4890      const Feature *f = &(get_feature (i));
   4891      const Feature** p = nullptr;
   4892      if (feature_substitutes_map->has (i, &p))
   4893        f = *p;
   4894 
   4895      if (!f->featureParams.is_null () &&
   4896          tag == HB_TAG ('s', 'i', 'z', 'e'))
   4897        continue;
   4898 
   4899      if (!f->intersects_lookup_indexes (lookup_indices)
   4900 #ifndef HB_NO_VAR
   4901          && !alternate_feature_indices.has (i)
   4902 #endif
   4903   )
   4904 feature_indices->del (i);
   4905    }
   4906  }
   4907 
   4908  void collect_name_ids (const hb_map_t *feature_index_map,
   4909                         hb_set_t *nameids_to_retain /* OUT */) const
   4910  {
   4911    unsigned count = get_feature_count ();
   4912    for (unsigned i = 0 ; i < count; i++)
   4913    {
   4914      if (!feature_index_map->has (i)) continue;
   4915      hb_tag_t tag = get_feature_tag (i);
   4916      get_feature (i).collect_name_ids (tag, nameids_to_retain);
   4917    }
   4918  }
   4919 
   4920  template <typename T>
   4921  struct accelerator_t
   4922  {
   4923    accelerator_t (hb_face_t *face)
   4924    {
   4925      hb_sanitize_context_t sc;
   4926      sc.lazy_some_gpos = true;
   4927      this->table = sc.reference_table<T> (face);
   4928 
   4929      if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
   4930      {
   4931 hb_blob_destroy (this->table.get_blob ());
   4932 this->table = hb_blob_get_empty ();
   4933      }
   4934 
   4935      this->lookup_count = table->get_lookup_count ();
   4936 
   4937      this->accels = (hb_atomic_t<hb_ot_layout_lookup_accelerator_t *> *) hb_calloc (this->lookup_count, sizeof (*accels));
   4938      if (unlikely (!this->accels))
   4939      {
   4940 this->lookup_count = 0;
   4941 this->table.destroy ();
   4942 this->table = hb_blob_get_empty ();
   4943      }
   4944    }
   4945    ~accelerator_t ()
   4946    {
   4947      for (unsigned int i = 0; i < this->lookup_count; i++)
   4948      {
   4949 auto *accel = this->accels[i].get_relaxed ();
   4950 if (accel)
   4951   accel->fini ();
   4952 hb_free (accel);
   4953      }
   4954      hb_free (this->accels);
   4955      this->table.destroy ();
   4956    }
   4957 
   4958    hb_blob_t *get_blob () const { return table.get_blob (); }
   4959 
   4960    hb_ot_layout_lookup_accelerator_t *get_accel (unsigned lookup_index) const
   4961    {
   4962      if (unlikely (lookup_index >= lookup_count)) return nullptr;
   4963 
   4964    retry:
   4965      auto *accel = accels[lookup_index].get_acquire ();
   4966      if (unlikely (!accel))
   4967      {
   4968 accel = hb_ot_layout_lookup_accelerator_t::create (table->get_lookup (lookup_index));
   4969 if (unlikely (!accel))
   4970   return nullptr;
   4971 
   4972 if (unlikely (!accels[lookup_index].cmpexch (nullptr, accel)))
   4973 {
   4974   accel->fini ();
   4975   hb_free (accel);
   4976   goto retry;
   4977 }
   4978      }
   4979 
   4980      return accel;
   4981    }
   4982 
   4983    hb_blob_ptr_t<T> table;
   4984    unsigned int lookup_count;
   4985    hb_atomic_t<hb_ot_layout_lookup_accelerator_t *> *accels;
   4986  };
   4987 
   4988  protected:
   4989  union {
   4990  FixedVersion<>			version;	/* Version identifier */
   4991  GSUBGPOSVersion1_2<SmallTypes>	version1;
   4992 #ifndef HB_NO_BEYOND_64K
   4993  GSUBGPOSVersion1_2<MediumTypes>	version2;
   4994 #endif
   4995  } u;
   4996  public:
   4997  DEFINE_SIZE_MIN (4);
   4998 };
   4999 
   5000 
   5001 } /* namespace OT */
   5002 
   5003 
   5004 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */