tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

hb-aat-layout-common.hh (41519B)


      1 /*
      2 * Copyright © 2017  Google, Inc.
      3 *
      4 *  This is part of HarfBuzz, a text shaping library.
      5 *
      6 * Permission is hereby granted, without written agreement and without
      7 * license or royalty fees, to use, copy, modify, and distribute this
      8 * software and its documentation for any purpose, provided that the
      9 * above copyright notice and the following two paragraphs appear in
     10 * all copies of this software.
     11 *
     12 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
     13 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
     14 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
     15 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
     16 * DAMAGE.
     17 *
     18 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
     19 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
     20 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
     21 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
     22 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
     23 *
     24 * Google Author(s): Behdad Esfahbod
     25 */
     26 
     27 #ifndef HB_AAT_LAYOUT_COMMON_HH
     28 #define HB_AAT_LAYOUT_COMMON_HH
     29 
     30 #include "hb-aat-layout.hh"
     31 #include "hb-aat-map.hh"
     32 #include "hb-ot-layout-common.hh"
     33 #include "hb-ot-layout-gdef-table.hh"
     34 #include "hb-open-type.hh"
     35 #include "hb-cache.hh"
     36 #include "hb-bit-set.hh"
     37 #include "hb-bit-page.hh"
     38 
     39 
     40 namespace OT {
     41 struct GDEF;
     42 };
     43 
     44 namespace AAT {
     45 
     46 using namespace OT;
     47 
     48 struct ankr;
     49 
     50 using hb_aat_class_cache_t = hb_ot_layout_mapping_cache_t;
     51 
     52 struct hb_aat_scratch_t
     53 {
     54  hb_aat_scratch_t () = default;
     55  hb_aat_scratch_t (const hb_aat_scratch_t &) = delete;
     56 
     57  hb_aat_scratch_t (hb_aat_scratch_t &&o)
     58  {
     59    buffer_glyph_set.set_relaxed (o.buffer_glyph_set.get_relaxed ());
     60    o.buffer_glyph_set.set_relaxed (nullptr);
     61  }
     62  hb_aat_scratch_t & operator = (hb_aat_scratch_t &&o)
     63  {
     64    buffer_glyph_set.set_relaxed (o.buffer_glyph_set.get_relaxed ());
     65    o.buffer_glyph_set.set_relaxed (nullptr);
     66    return *this;
     67  }
     68  ~hb_aat_scratch_t ()
     69  {
     70    auto *s = buffer_glyph_set.get_relaxed ();
     71    if (unlikely (!s))
     72      return;
     73    s->fini ();
     74    hb_free (s);
     75  }
     76 
     77  hb_bit_set_t *create_buffer_glyph_set () const
     78  {
     79    hb_bit_set_t *s = buffer_glyph_set.get_acquire ();
     80    if (s && buffer_glyph_set.cmpexch (s, nullptr))
     81    {
     82      s->clear ();
     83      return s;
     84    }
     85 
     86    s = (hb_bit_set_t *) hb_calloc (1, sizeof (hb_bit_set_t));
     87    if (unlikely (!s))
     88      return nullptr;
     89    s->init ();
     90 
     91    return s;
     92  }
     93  void destroy_buffer_glyph_set (hb_bit_set_t *s) const
     94  {
     95    if (unlikely (!s))
     96      return;
     97    if (buffer_glyph_set.cmpexch (nullptr, s))
     98      return;
     99    s->fini ();
    100    hb_free (s);
    101  }
    102 
    103  mutable hb_atomic_t<hb_bit_set_t *> buffer_glyph_set;
    104 };
    105 
    106 enum { DELETED_GLYPH = 0xFFFF };
    107 
    108 #define HB_BUFFER_SCRATCH_FLAG_AAT_HAS_DELETED HB_BUFFER_SCRATCH_FLAG_SHAPER0
    109 
    110 struct hb_aat_apply_context_t :
    111       hb_dispatch_context_t<hb_aat_apply_context_t, bool, HB_DEBUG_APPLY>
    112 {
    113  const char *get_name () { return "APPLY"; }
    114  template <typename T, typename ...Ts>
    115  return_t dispatch (const T &obj, Ts&&... ds)
    116  { return obj.apply (this, std::forward<Ts> (ds)...); }
    117  static return_t default_return_value () { return false; }
    118  bool stop_sublookup_iteration (return_t r) const { return r; }
    119 
    120  const hb_ot_shape_plan_t *plan;
    121  hb_font_t *font;
    122  hb_face_t *face;
    123  hb_buffer_t *buffer;
    124  hb_sanitize_context_t sanitizer;
    125  const ankr *ankr_table;
    126  const OT::GDEF &gdef;
    127  bool has_glyph_classes;
    128  const hb_sorted_vector_t<hb_aat_map_t::range_flags_t> *range_flags = nullptr;
    129  hb_mask_t subtable_flags = 0;
    130  bool buffer_is_reversed = false;
    131  // Caches
    132  bool using_buffer_glyph_set = false;
    133  hb_bit_set_t *buffer_glyph_set = nullptr;
    134  const hb_bit_set_t *first_set = nullptr;
    135  const hb_bit_set_t *second_set = nullptr;
    136  hb_aat_class_cache_t *machine_class_cache = nullptr;
    137 
    138  /* Unused. For debug tracing only. */
    139  unsigned int lookup_index;
    140 
    141  HB_INTERNAL hb_aat_apply_context_t (const hb_ot_shape_plan_t *plan_,
    142 			      hb_font_t *font_,
    143 			      hb_buffer_t *buffer_,
    144 			      hb_blob_t *blob = const_cast<hb_blob_t *> (&Null (hb_blob_t)));
    145 
    146  HB_INTERNAL ~hb_aat_apply_context_t ();
    147 
    148  HB_INTERNAL void set_ankr_table (const AAT::ankr *ankr_table_);
    149 
    150  void set_lookup_index (unsigned int i) { lookup_index = i; }
    151 
    152  void reverse_buffer ()
    153  {
    154    buffer->reverse ();
    155    buffer_is_reversed = !buffer_is_reversed;
    156  }
    157 
    158  void setup_buffer_glyph_set ()
    159  {
    160    using_buffer_glyph_set = buffer->len >= 4 && buffer_glyph_set;
    161 
    162    if (likely (using_buffer_glyph_set))
    163      buffer->collect_codepoints (*buffer_glyph_set);
    164  }
    165  bool buffer_intersects_machine () const
    166  {
    167    if (likely (using_buffer_glyph_set))
    168      return buffer_glyph_set->intersects (*first_set);
    169 
    170    // Faster for shorter buffers.
    171    for (unsigned i = 0; i < buffer->len; i++)
    172      if (first_set->has (buffer->info[i].codepoint))
    173 return true;
    174    return false;
    175  }
    176 
    177  template <typename T>
    178  HB_NODISCARD bool output_glyphs (unsigned int count,
    179 			   const T *glyphs)
    180  {
    181    if (likely (using_buffer_glyph_set))
    182      buffer_glyph_set->add_array (glyphs, count);
    183    for (unsigned int i = 0; i < count; i++)
    184    {
    185      if (glyphs[i] == DELETED_GLYPH)
    186      {
    187        buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_AAT_HAS_DELETED;
    188 _hb_glyph_info_set_aat_deleted (&buffer->cur());
    189      }
    190      else
    191      {
    192 #ifndef HB_NO_OT_LAYOUT
    193 if (has_glyph_classes)
    194   _hb_glyph_info_set_glyph_props (&buffer->cur(),
    195 				  gdef.get_glyph_props (glyphs[i]));
    196 #endif
    197      }
    198      if (unlikely (!buffer->output_glyph (glyphs[i]))) return false;
    199    }
    200    return true;
    201  }
    202 
    203  HB_NODISCARD bool replace_glyph (hb_codepoint_t glyph)
    204  {
    205    if (glyph == DELETED_GLYPH)
    206    {
    207      buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_AAT_HAS_DELETED;
    208      _hb_glyph_info_set_aat_deleted (&buffer->cur());
    209    }
    210 
    211    if (likely (using_buffer_glyph_set))
    212      buffer_glyph_set->add (glyph);
    213 #ifndef HB_NO_OT_LAYOUT
    214    if (has_glyph_classes)
    215      _hb_glyph_info_set_glyph_props (&buffer->cur(),
    216 			      gdef.get_glyph_props (glyph));
    217 #endif
    218    return buffer->replace_glyph (glyph);
    219  }
    220 
    221  HB_NODISCARD bool delete_glyph ()
    222  {
    223    buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_AAT_HAS_DELETED;
    224    _hb_glyph_info_set_aat_deleted (&buffer->cur());
    225    return buffer->replace_glyph (DELETED_GLYPH);
    226  }
    227 
    228  void replace_glyph_inplace (unsigned i, hb_codepoint_t glyph)
    229  {
    230    buffer->info[i].codepoint = glyph;
    231    if (likely (using_buffer_glyph_set))
    232      buffer_glyph_set->add (glyph);
    233 #ifndef HB_NO_OT_LAYOUT
    234    if (has_glyph_classes)
    235      _hb_glyph_info_set_glyph_props (&buffer->info[i],
    236 			      gdef.get_glyph_props (glyph));
    237 #endif
    238  }
    239 };
    240 
    241 
    242 /*
    243 * Lookup Table
    244 */
    245 
    246 template <typename T> struct Lookup;
    247 
    248 template <typename T>
    249 struct LookupFormat0
    250 {
    251  friend struct Lookup<T>;
    252 
    253  private:
    254  const T* get_value (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
    255  {
    256    if (unlikely (glyph_id >= num_glyphs)) return nullptr;
    257    return &arrayZ[glyph_id];
    258  }
    259 
    260  template <typename set_t>
    261  void collect_glyphs (set_t &glyphs, unsigned num_glyphs) const
    262  {
    263    glyphs.add_range (0, num_glyphs - 1);
    264  }
    265  template <typename set_t, typename filter_t>
    266  void collect_glyphs_filtered (set_t &glyphs, unsigned num_glyphs, const filter_t &filter) const
    267  {
    268    for (unsigned i = 0; i < num_glyphs; i++)
    269      if (filter (arrayZ[i]))
    270 glyphs.add (i);
    271  }
    272 
    273  bool sanitize (hb_sanitize_context_t *c) const
    274  {
    275    TRACE_SANITIZE (this);
    276    return_trace (arrayZ.sanitize (c, c->get_num_glyphs ()));
    277  }
    278  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    279  {
    280    TRACE_SANITIZE (this);
    281    return_trace (arrayZ.sanitize (c, c->get_num_glyphs (), base));
    282  }
    283 
    284  protected:
    285  HBUINT16	format;		/* Format identifier--format = 0 */
    286  UnsizedArrayOf<T>
    287 	arrayZ;		/* Array of lookup values, indexed by glyph index. */
    288  public:
    289  DEFINE_SIZE_UNBOUNDED (2);
    290 };
    291 
    292 
    293 template <typename T>
    294 struct LookupSegmentSingle
    295 {
    296  static constexpr unsigned TerminationWordCount = 2u;
    297 
    298  int cmp (hb_codepoint_t g) const
    299  { return g < first ? -1 : g <= last ? 0 : +1 ; }
    300 
    301  template <typename set_t>
    302  void collect_glyphs (set_t &glyphs) const
    303  {
    304    if (first == DELETED_GLYPH) return;
    305    glyphs.add_range (first, last);
    306  }
    307  template <typename set_t, typename filter_t>
    308  void collect_glyphs_filtered (set_t &glyphs, const filter_t &filter) const
    309  {
    310    if (first == DELETED_GLYPH) return;
    311    if (!filter (value)) return;
    312    glyphs.add_range (first, last);
    313  }
    314 
    315  bool sanitize (hb_sanitize_context_t *c) const
    316  {
    317    TRACE_SANITIZE (this);
    318    return_trace (c->check_struct (this) && value.sanitize (c));
    319  }
    320  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    321  {
    322    TRACE_SANITIZE (this);
    323    return_trace (c->check_struct (this) && value.sanitize (c, base));
    324  }
    325 
    326  HBGlyphID16	last;		/* Last GlyphID in this segment */
    327  HBGlyphID16	first;		/* First GlyphID in this segment */
    328  T		value;		/* The lookup value (only one) */
    329  public:
    330  DEFINE_SIZE_STATIC (4 + T::static_size);
    331 };
    332 
    333 template <typename T>
    334 struct LookupFormat2
    335 {
    336  friend struct Lookup<T>;
    337 
    338  private:
    339  const T* get_value (hb_codepoint_t glyph_id) const
    340  {
    341    const LookupSegmentSingle<T> *v = segments.bsearch (glyph_id);
    342    return v ? &v->value : nullptr;
    343  }
    344 
    345  template <typename set_t>
    346  void collect_glyphs (set_t &glyphs) const
    347  {
    348    unsigned count = segments.get_length ();
    349    for (unsigned int i = 0; i < count; i++)
    350      segments[i].collect_glyphs (glyphs);
    351  }
    352  template <typename set_t, typename filter_t>
    353  void collect_glyphs_filtered (set_t &glyphs, const filter_t &filter) const
    354  {
    355    unsigned count = segments.get_length ();
    356    for (unsigned int i = 0; i < count; i++)
    357      segments[i].collect_glyphs_filtered (glyphs, filter);
    358  }
    359 
    360  bool sanitize (hb_sanitize_context_t *c) const
    361  {
    362    TRACE_SANITIZE (this);
    363    return_trace (segments.sanitize (c));
    364  }
    365  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    366  {
    367    TRACE_SANITIZE (this);
    368    return_trace (segments.sanitize (c, base));
    369  }
    370 
    371  protected:
    372  HBUINT16	format;		/* Format identifier--format = 2 */
    373  VarSizedBinSearchArrayOf<LookupSegmentSingle<T>>
    374 	segments;	/* The actual segments. These must already be sorted,
    375 			 * according to the first word in each one (the last
    376 			 * glyph in each segment). */
    377  public:
    378  DEFINE_SIZE_ARRAY (8, segments);
    379 };
    380 
    381 template <typename T>
    382 struct LookupSegmentArray
    383 {
    384  static constexpr unsigned TerminationWordCount = 2u;
    385 
    386  const T* get_value (hb_codepoint_t glyph_id, const void *base) const
    387  {
    388    return first <= glyph_id && glyph_id <= last ? &(base+valuesZ)[glyph_id - first] : nullptr;
    389  }
    390 
    391  template <typename set_t>
    392  void collect_glyphs (set_t &glyphs) const
    393  {
    394    if (first == DELETED_GLYPH) return;
    395    glyphs.add_range (first, last);
    396  }
    397  template <typename set_t, typename filter_t>
    398  void collect_glyphs_filtered (set_t &glyphs, const void *base, const filter_t &filter) const
    399  {
    400    if (first == DELETED_GLYPH) return;
    401    const auto &values = base+valuesZ;
    402    for (hb_codepoint_t i = first; i <= last; i++)
    403      if (filter (values[i - first]))
    404 glyphs.add (i);
    405  }
    406 
    407  int cmp (hb_codepoint_t g) const
    408  { return g < first ? -1 : g <= last ? 0 : +1; }
    409 
    410  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    411  {
    412    TRACE_SANITIZE (this);
    413    return_trace (c->check_struct (this) &&
    414 	  hb_barrier () &&
    415 	  first <= last &&
    416 	  valuesZ.sanitize (c, base, last - first + 1));
    417  }
    418  template <typename ...Ts>
    419  bool sanitize (hb_sanitize_context_t *c, const void *base, Ts&&... ds) const
    420  {
    421    TRACE_SANITIZE (this);
    422    return_trace (c->check_struct (this) &&
    423 	  hb_barrier () &&
    424 	  first <= last &&
    425 	  valuesZ.sanitize (c, base, last - first + 1, std::forward<Ts> (ds)...));
    426  }
    427 
    428  HBGlyphID16	last;		/* Last GlyphID in this segment */
    429  HBGlyphID16	first;		/* First GlyphID in this segment */
    430  NNOffset16To<UnsizedArrayOf<T>>
    431 	valuesZ;	/* A 16-bit offset from the start of
    432 			 * the table to the data. */
    433  public:
    434  DEFINE_SIZE_STATIC (6);
    435 };
    436 
    437 template <typename T>
    438 struct LookupFormat4
    439 {
    440  friend struct Lookup<T>;
    441 
    442  private:
    443  const T* get_value (hb_codepoint_t glyph_id) const
    444  {
    445    const LookupSegmentArray<T> *v = segments.bsearch (glyph_id);
    446    return v ? v->get_value (glyph_id, this) : nullptr;
    447  }
    448 
    449  template <typename set_t>
    450  void collect_glyphs (set_t &glyphs) const
    451  {
    452    unsigned count = segments.get_length ();
    453    for (unsigned i = 0; i < count; i++)
    454      segments[i].collect_glyphs (glyphs);
    455  }
    456  template <typename set_t, typename filter_t>
    457  void collect_glyphs_filtered (set_t &glyphs, const filter_t &filter) const
    458  {
    459    unsigned count = segments.get_length ();
    460    for (unsigned i = 0; i < count; i++)
    461      segments[i].collect_glyphs_filtered (glyphs, this, filter);
    462  }
    463 
    464  bool sanitize (hb_sanitize_context_t *c) const
    465  {
    466    TRACE_SANITIZE (this);
    467    return_trace (segments.sanitize (c, this));
    468  }
    469  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    470  {
    471    TRACE_SANITIZE (this);
    472    return_trace (segments.sanitize (c, this, base));
    473  }
    474 
    475  protected:
    476  HBUINT16	format;		/* Format identifier--format = 4 */
    477  VarSizedBinSearchArrayOf<LookupSegmentArray<T>>
    478 	segments;	/* The actual segments. These must already be sorted,
    479 			 * according to the first word in each one (the last
    480 			 * glyph in each segment). */
    481  public:
    482  DEFINE_SIZE_ARRAY (8, segments);
    483 };
    484 
    485 template <typename T>
    486 struct LookupSingle
    487 {
    488  static constexpr unsigned TerminationWordCount = 1u;
    489 
    490  int cmp (hb_codepoint_t g) const { return glyph.cmp (g); }
    491 
    492  template <typename set_t>
    493  void collect_glyphs (set_t &glyphs) const
    494  {
    495    if (glyph == DELETED_GLYPH) return;
    496    glyphs.add (glyph);
    497  }
    498  template <typename set_t, typename filter_t>
    499  void collect_glyphs_filtered (set_t &glyphs, const filter_t &filter) const
    500  {
    501    if (glyph == DELETED_GLYPH) return;
    502    if (!filter (value)) return;
    503    glyphs.add (glyph);
    504  }
    505 
    506  bool sanitize (hb_sanitize_context_t *c) const
    507  {
    508    TRACE_SANITIZE (this);
    509    return_trace (c->check_struct (this) && value.sanitize (c));
    510  }
    511  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    512  {
    513    TRACE_SANITIZE (this);
    514    return_trace (c->check_struct (this) && value.sanitize (c, base));
    515  }
    516 
    517  HBGlyphID16	glyph;		/* Last GlyphID */
    518  T		value;		/* The lookup value (only one) */
    519  public:
    520  DEFINE_SIZE_STATIC (2 + T::static_size);
    521 };
    522 
    523 template <typename T>
    524 struct LookupFormat6
    525 {
    526  friend struct Lookup<T>;
    527 
    528  private:
    529  const T* get_value (hb_codepoint_t glyph_id) const
    530  {
    531    const LookupSingle<T> *v = entries.bsearch (glyph_id);
    532    return v ? &v->value : nullptr;
    533  }
    534 
    535  template <typename set_t>
    536  void collect_glyphs (set_t &glyphs) const
    537  {
    538    unsigned count = entries.get_length ();
    539    for (unsigned i = 0; i < count; i++)
    540      entries[i].collect_glyphs (glyphs);
    541  }
    542  template <typename set_t, typename filter_t>
    543  void collect_glyphs_filtered (set_t &glyphs, const filter_t &filter) const
    544  {
    545    unsigned count = entries.get_length ();
    546    for (unsigned i = 0; i < count; i++)
    547      entries[i].collect_glyphs_filtered (glyphs, filter);
    548  }
    549 
    550  bool sanitize (hb_sanitize_context_t *c) const
    551  {
    552    TRACE_SANITIZE (this);
    553    return_trace (entries.sanitize (c));
    554  }
    555  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    556  {
    557    TRACE_SANITIZE (this);
    558    return_trace (entries.sanitize (c, base));
    559  }
    560 
    561  protected:
    562  HBUINT16	format;		/* Format identifier--format = 6 */
    563  VarSizedBinSearchArrayOf<LookupSingle<T>>
    564 	entries;	/* The actual entries, sorted by glyph index. */
    565  public:
    566  DEFINE_SIZE_ARRAY (8, entries);
    567 };
    568 
    569 template <typename T>
    570 struct LookupFormat8
    571 {
    572  friend struct Lookup<T>;
    573 
    574  private:
    575  const T* get_value (hb_codepoint_t glyph_id) const
    576  {
    577    return firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount ?
    578    &valueArrayZ[glyph_id - firstGlyph] : nullptr;
    579  }
    580 
    581  template <typename set_t>
    582  void collect_glyphs (set_t &glyphs) const
    583  {
    584    if (unlikely (!glyphCount)) return;
    585    if (firstGlyph == DELETED_GLYPH) return;
    586    glyphs.add_range (firstGlyph, firstGlyph + glyphCount - 1);
    587  }
    588  template <typename set_t, typename filter_t>
    589  void collect_glyphs_filtered (set_t &glyphs, const filter_t &filter) const
    590  {
    591    if (unlikely (!glyphCount)) return;
    592    if (firstGlyph == DELETED_GLYPH) return;
    593    const T *p = valueArrayZ.arrayZ;
    594    for (unsigned i = 0; i < glyphCount; i++)
    595      if (filter (p[i]))
    596 glyphs.add (firstGlyph + i);
    597  }
    598 
    599  bool sanitize (hb_sanitize_context_t *c) const
    600  {
    601    TRACE_SANITIZE (this);
    602    return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount));
    603  }
    604  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    605  {
    606    TRACE_SANITIZE (this);
    607    return_trace (c->check_struct (this) && valueArrayZ.sanitize (c, glyphCount, base));
    608  }
    609 
    610  protected:
    611  HBUINT16	format;		/* Format identifier--format = 8 */
    612  HBGlyphID16	firstGlyph;	/* First glyph index included in the trimmed array. */
    613  HBUINT16	glyphCount;	/* Total number of glyphs (equivalent to the last
    614 			 * glyph minus the value of firstGlyph plus 1). */
    615  UnsizedArrayOf<T>
    616 	valueArrayZ;	/* The lookup values (indexed by the glyph index
    617 			 * minus the value of firstGlyph). */
    618  public:
    619  DEFINE_SIZE_ARRAY (6, valueArrayZ);
    620 };
    621 
    622 template <typename T>
    623 struct LookupFormat10
    624 {
    625  friend struct Lookup<T>;
    626 
    627  private:
    628  const typename T::type get_value_or_null (hb_codepoint_t glyph_id) const
    629  {
    630    if (!(firstGlyph <= glyph_id && glyph_id - firstGlyph < glyphCount))
    631      return Null (T);
    632 
    633    const HBUINT8 *p = &valueArrayZ[(glyph_id - firstGlyph) * valueSize];
    634 
    635    unsigned int v = 0;
    636    unsigned int count = valueSize;
    637    for (unsigned int i = 0; i < count; i++)
    638      v = (v << 8) | *p++;
    639 
    640    return v;
    641  }
    642 
    643  template <typename set_t>
    644  void collect_glyphs (set_t &glyphs) const
    645  {
    646    if (unlikely (!glyphCount)) return;
    647    if (firstGlyph == DELETED_GLYPH) return;
    648    glyphs.add_range (firstGlyph, firstGlyph + glyphCount - 1);
    649  }
    650 
    651  template <typename set_t, typename filter_t>
    652  void collect_glyphs_filtered (set_t &glyphs, const filter_t &filter) const
    653  {
    654    if (unlikely (!glyphCount)) return;
    655    if (firstGlyph == DELETED_GLYPH) return;
    656    const HBUINT8 *p = valueArrayZ.arrayZ;
    657    for (unsigned i = 0; i < glyphCount; i++)
    658    {
    659      unsigned int v = 0;
    660      unsigned int count = valueSize;
    661      for (unsigned int j = 0; j < count; j++)
    662 v = (v << 8) | *p++;
    663      if (filter (v))
    664 glyphs.add (firstGlyph + i);
    665    }
    666  }
    667 
    668  bool sanitize (hb_sanitize_context_t *c) const
    669  {
    670    TRACE_SANITIZE (this);
    671    return_trace (c->check_struct (this) &&
    672 	  hb_barrier () &&
    673 	  valueSize <= 4 &&
    674 	  valueArrayZ.sanitize (c, glyphCount * valueSize));
    675  }
    676 
    677  protected:
    678  HBUINT16	format;		/* Format identifier--format = 8 */
    679  HBUINT16	valueSize;	/* Byte size of each value. */
    680  HBGlyphID16	firstGlyph;	/* First glyph index included in the trimmed array. */
    681  HBUINT16	glyphCount;	/* Total number of glyphs (equivalent to the last
    682 			 * glyph minus the value of firstGlyph plus 1). */
    683  UnsizedArrayOf<HBUINT8>
    684 	valueArrayZ;	/* The lookup values (indexed by the glyph index
    685 			 * minus the value of firstGlyph). */
    686  public:
    687  DEFINE_SIZE_ARRAY (8, valueArrayZ);
    688 };
    689 
    690 template <typename T>
    691 struct Lookup
    692 {
    693  const T* get_value (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
    694  {
    695    switch (u.format.v) {
    696    case 0: hb_barrier (); return u.format0.get_value (glyph_id, num_glyphs);
    697    case 2: hb_barrier (); return u.format2.get_value (glyph_id);
    698    case 4: hb_barrier (); return u.format4.get_value (glyph_id);
    699    case 6: hb_barrier (); return u.format6.get_value (glyph_id);
    700    case 8: hb_barrier (); return u.format8.get_value (glyph_id);
    701    default:return nullptr;
    702    }
    703  }
    704 
    705  const typename T::type get_value_or_null (hb_codepoint_t glyph_id, unsigned int num_glyphs) const
    706  {
    707    switch (u.format.v) {
    708      /* Format 10 cannot return a pointer. */
    709      case 10: hb_barrier (); return u.format10.get_value_or_null (glyph_id);
    710      default:
    711      const T *v = get_value (glyph_id, num_glyphs);
    712      return v ? *v : Null (T);
    713    }
    714  }
    715 
    716  template <typename set_t>
    717  void collect_glyphs (set_t &glyphs, unsigned int num_glyphs) const
    718  {
    719    switch (u.format.v) {
    720    case 0: hb_barrier (); u.format0.collect_glyphs (glyphs, num_glyphs); return;
    721    case 2: hb_barrier (); u.format2.collect_glyphs (glyphs); return;
    722    case 4: hb_barrier (); u.format4.collect_glyphs (glyphs); return;
    723    case 6: hb_barrier (); u.format6.collect_glyphs (glyphs); return;
    724    case 8: hb_barrier (); u.format8.collect_glyphs (glyphs); return;
    725    case 10: hb_barrier (); u.format10.collect_glyphs (glyphs); return;
    726    default:return;
    727    }
    728  }
    729  template <typename set_t, typename filter_t>
    730  void collect_glyphs_filtered (set_t &glyphs, unsigned num_glyphs, const filter_t &filter) const
    731  {
    732    switch (u.format.v) {
    733    case 0: hb_barrier (); u.format0.collect_glyphs_filtered (glyphs, num_glyphs, filter); return;
    734    case 2: hb_barrier (); u.format2.collect_glyphs_filtered (glyphs, filter); return;
    735    case 4: hb_barrier (); u.format4.collect_glyphs_filtered (glyphs, filter); return;
    736    case 6: hb_barrier (); u.format6.collect_glyphs_filtered (glyphs, filter); return;
    737    case 8: hb_barrier (); u.format8.collect_glyphs_filtered (glyphs, filter); return;
    738    case 10: hb_barrier (); u.format10.collect_glyphs_filtered (glyphs, filter); return;
    739    default:return;
    740    }
    741  }
    742 
    743  typename T::type get_class (hb_codepoint_t glyph_id,
    744 		      unsigned int num_glyphs,
    745 		      unsigned int outOfRange) const
    746  {
    747    const T *v = get_value (glyph_id, num_glyphs);
    748    return v ? *v : outOfRange;
    749  }
    750 
    751  bool sanitize (hb_sanitize_context_t *c) const
    752  {
    753    TRACE_SANITIZE (this);
    754    if (!u.format.v.sanitize (c)) return_trace (false);
    755    hb_barrier ();
    756    switch (u.format.v) {
    757    case 0: hb_barrier (); return_trace (u.format0.sanitize (c));
    758    case 2: hb_barrier (); return_trace (u.format2.sanitize (c));
    759    case 4: hb_barrier (); return_trace (u.format4.sanitize (c));
    760    case 6: hb_barrier (); return_trace (u.format6.sanitize (c));
    761    case 8: hb_barrier (); return_trace (u.format8.sanitize (c));
    762    case 10: hb_barrier (); return_trace (u.format10.sanitize (c));
    763    default:return_trace (true);
    764    }
    765  }
    766  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    767  {
    768    TRACE_SANITIZE (this);
    769    if (!u.format.v.sanitize (c)) return_trace (false);
    770    hb_barrier ();
    771    switch (u.format.v) {
    772    case 0: hb_barrier (); return_trace (u.format0.sanitize (c, base));
    773    case 2: hb_barrier (); return_trace (u.format2.sanitize (c, base));
    774    case 4: hb_barrier (); return_trace (u.format4.sanitize (c, base));
    775    case 6: hb_barrier (); return_trace (u.format6.sanitize (c, base));
    776    case 8: hb_barrier (); return_trace (u.format8.sanitize (c, base));
    777    case 10: return_trace (false); /* We don't support format10 here currently. */
    778    default:return_trace (true);
    779    }
    780  }
    781 
    782  protected:
    783  union {
    784  struct { HBUINT16 v; }	format;		/* Format identifier */
    785  LookupFormat0<T>	format0;
    786  LookupFormat2<T>	format2;
    787  LookupFormat4<T>	format4;
    788  LookupFormat6<T>	format6;
    789  LookupFormat8<T>	format8;
    790  LookupFormat10<T>	format10;
    791  } u;
    792  public:
    793  DEFINE_SIZE_UNION (2, format.v);
    794 };
    795 DECLARE_NULL_NAMESPACE_BYTES_TEMPLATE1 (AAT, Lookup, 2);
    796 
    797 /*
    798 * (Extended) State Table
    799 */
    800 
    801 template <typename T>
    802 struct Entry
    803 {
    804  // This doesn't seem like it's ever called.
    805  bool sanitize (hb_sanitize_context_t *c) const
    806  {
    807    TRACE_SANITIZE (this);
    808    /* Note, we don't recurse-sanitize data because we don't access it.
    809     * That said, in our DEFINE_SIZE_STATIC we access T::static_size,
    810     * which ensures that data has a simple sanitize(). To be determined
    811     * if I need to remove that as well.
    812     *
    813     * HOWEVER! Because we are a template, our DEFINE_SIZE_STATIC
    814     * assertion wouldn't be checked, hence the line below. */
    815    static_assert (T::static_size, "");
    816 
    817    return_trace (c->check_struct (this));
    818  }
    819 
    820  public:
    821  HBUINT16	newState;	/* Byte offset from beginning of state table
    822 			 * to the new state. Really?!?! Or just state
    823 			 * number?  The latter in morx for sure. */
    824  HBUINT16	flags;		/* Table specific. */
    825  T		data;		/* Optional offsets to per-glyph tables. */
    826  public:
    827  DEFINE_SIZE_STATIC (4 + T::static_size);
    828 };
    829 
    830 template <>
    831 struct Entry<void>
    832 {
    833  // This does seem like it's ever called.
    834  bool sanitize (hb_sanitize_context_t *c) const
    835  {
    836    TRACE_SANITIZE (this);
    837    return_trace (c->check_struct (this));
    838  }
    839 
    840  public:
    841  HBUINT16	newState;	/* Byte offset from beginning of state table to the new state. */
    842  HBUINT16	flags;		/* Table specific. */
    843  public:
    844  DEFINE_SIZE_STATIC (4);
    845 };
    846 
    847 enum Class
    848 {
    849  CLASS_END_OF_TEXT = 0,
    850  CLASS_OUT_OF_BOUNDS = 1,
    851  CLASS_DELETED_GLYPH = 2,
    852  CLASS_END_OF_LINE = 3,
    853 };
    854 
    855 template <typename Types, typename Extra>
    856 struct StateTable
    857 {
    858  typedef typename Types::HBUINT HBUINT;
    859  typedef typename Types::HBUSHORT HBUSHORT;
    860  typedef typename Types::ClassTypeNarrow ClassType;
    861 
    862  enum State
    863  {
    864    STATE_START_OF_TEXT = 0,
    865    STATE_START_OF_LINE = 1,
    866  };
    867 
    868  template <typename set_t, typename table_t>
    869  void collect_initial_glyphs (set_t &glyphs, unsigned num_glyphs, const table_t &table) const
    870  {
    871    unsigned num_classes = nClasses;
    872 
    873    if (unlikely (num_classes > hb_bit_page_t::BITS))
    874    {
    875      (this+classTable).collect_glyphs (glyphs, num_glyphs);
    876      return;
    877    }
    878 
    879    // Collect all classes going out from the start state.
    880    hb_bit_page_t filter;
    881 
    882    for (unsigned i = 0; i < num_classes; i++)
    883    {
    884      const auto &entry = get_entry (STATE_START_OF_TEXT, i);
    885      if (new_state (entry.newState) == STATE_START_OF_TEXT &&
    886   !table.is_action_initiable (entry) && !table.is_actionable (entry))
    887 continue;
    888 
    889      filter.add (i);
    890    }
    891 
    892    // And glyphs in those classes.
    893 
    894    if (filter (CLASS_DELETED_GLYPH))
    895      glyphs.add (DELETED_GLYPH);
    896 
    897    (this+classTable).collect_glyphs_filtered (glyphs, num_glyphs, filter);
    898  }
    899 
    900  int new_state (unsigned int newState) const
    901  { return Types::extended ? newState : ((int) newState - (int) stateArrayTable) / (int) nClasses; }
    902 
    903  unsigned int get_class (hb_codepoint_t glyph_id,
    904 		  unsigned int num_glyphs,
    905 		  hb_aat_class_cache_t *cache = nullptr) const
    906  {
    907    unsigned klass;
    908    if (cache && cache->get (glyph_id, &klass)) return klass;
    909    if (unlikely (glyph_id == DELETED_GLYPH)) return CLASS_DELETED_GLYPH;
    910    klass = (this+classTable).get_class (glyph_id, num_glyphs, CLASS_OUT_OF_BOUNDS);
    911    if (cache) cache->set (glyph_id, klass);
    912    return klass;
    913  }
    914 
    915  const Entry<Extra> *get_entries () const
    916  { return (this+entryTable).arrayZ; }
    917 
    918  const Entry<Extra> &get_entry (int state, unsigned int klass) const
    919  {
    920    unsigned n_classes = nClasses;
    921    if (unlikely (klass >= n_classes))
    922      klass = CLASS_OUT_OF_BOUNDS;
    923 
    924    const HBUSHORT *states = (this+stateArrayTable).arrayZ;
    925    const Entry<Extra> *entries = (this+entryTable).arrayZ;
    926 
    927    unsigned int entry = states[state * n_classes + klass];
    928    DEBUG_MSG (APPLY, nullptr, "e%u", entry);
    929 
    930    return entries[entry];
    931  }
    932 
    933  bool sanitize (hb_sanitize_context_t *c,
    934 	 unsigned int *num_entries_out = nullptr) const
    935  {
    936    TRACE_SANITIZE (this);
    937    if (unlikely (!(c->check_struct (this) &&
    938 	    hb_barrier () &&
    939 	    nClasses >= 4 /* Ensure pre-defined classes fit.  */ &&
    940 	    classTable.sanitize (c, this)))) return_trace (false);
    941 
    942    const HBUSHORT *states = (this+stateArrayTable).arrayZ;
    943    const Entry<Extra> *entries = (this+entryTable).arrayZ;
    944 
    945    unsigned int num_classes = nClasses;
    946    if (unlikely (hb_unsigned_mul_overflows (num_classes, states[0].static_size)))
    947      return_trace (false);
    948    unsigned int row_stride = num_classes * states[0].static_size;
    949 
    950    /* Apple 'kern' table has this peculiarity:
    951     *
    952     * "Because the stateTableOffset in the state table header is (strictly
    953     * speaking) redundant, some 'kern' tables use it to record an initial
    954     * state where that should not be StartOfText. To determine if this is
    955     * done, calculate what the stateTableOffset should be. If it's different
    956     * from the actual stateTableOffset, use it as the initial state."
    957     *
    958     * We implement this by calling the initial state zero, but allow *negative*
    959     * states if the start state indeed was not the first state.  Since the code
    960     * is shared, this will also apply to 'mort' table.  The 'kerx' / 'morx'
    961     * tables are not affected since those address states by index, not offset.
    962     */
    963 
    964    int min_state = 0;
    965    int max_state = 0;
    966    unsigned int num_entries = 0;
    967 
    968    int state_pos = 0;
    969    int state_neg = 0;
    970    unsigned int entry = 0;
    971    while (min_state < state_neg || state_pos <= max_state)
    972    {
    973      if (min_state < state_neg)
    974      {
    975 /* Negative states. */
    976 if (unlikely (hb_unsigned_mul_overflows (min_state, num_classes)))
    977   return_trace (false);
    978 if (unlikely (!c->check_range (&states[min_state * num_classes],
    979 			       -min_state,
    980 			       row_stride)))
    981   return_trace (false);
    982 if ((c->max_ops -= state_neg - min_state) <= 0)
    983   return_trace (false);
    984 { /* Sweep new states. */
    985   const HBUSHORT *stop = &states[min_state * num_classes];
    986   if (unlikely (stop > states))
    987     return_trace (false);
    988   for (const HBUSHORT *p = states; stop < p; p--)
    989     num_entries = hb_max (num_entries, *(p - 1) + 1u);
    990   state_neg = min_state;
    991 }
    992      }
    993 
    994      if (state_pos <= max_state)
    995      {
    996 /* Positive states. */
    997 if (unlikely (!c->check_range (states,
    998 			       max_state + 1,
    999 			       row_stride)))
   1000   return_trace (false);
   1001 if ((c->max_ops -= max_state - state_pos + 1) <= 0)
   1002   return_trace (false);
   1003 { /* Sweep new states. */
   1004   if (unlikely (hb_unsigned_mul_overflows ((max_state + 1), num_classes)))
   1005     return_trace (false);
   1006   const HBUSHORT *stop = &states[(max_state + 1) * num_classes];
   1007   if (unlikely (stop < states))
   1008     return_trace (false);
   1009   for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++)
   1010     num_entries = hb_max (num_entries, *p + 1u);
   1011   state_pos = max_state + 1;
   1012 }
   1013      }
   1014 
   1015      if (unlikely (!c->check_array (entries, num_entries)))
   1016 return_trace (false);
   1017      if ((c->max_ops -= num_entries - entry) <= 0)
   1018 return_trace (false);
   1019      { /* Sweep new entries. */
   1020 const Entry<Extra> *stop = &entries[num_entries];
   1021 for (const Entry<Extra> *p = &entries[entry]; p < stop; p++)
   1022 {
   1023   int newState = new_state (p->newState);
   1024   min_state = hb_min (min_state, newState);
   1025   max_state = hb_max (max_state, newState);
   1026 }
   1027 entry = num_entries;
   1028      }
   1029    }
   1030 
   1031    if (num_entries_out)
   1032      *num_entries_out = num_entries;
   1033 
   1034    return_trace (true);
   1035  }
   1036 
   1037  protected:
   1038  HBUINT	nClasses;	/* Number of classes, which is the number of indices
   1039 			 * in a single line in the state array. */
   1040  NNOffsetTo<ClassType, HBUINT>
   1041 	classTable;	/* Offset to the class table. */
   1042  NNOffsetTo<UnsizedArrayOf<HBUSHORT>, HBUINT>
   1043 	stateArrayTable;/* Offset to the state array. */
   1044  NNOffsetTo<UnsizedArrayOf<Entry<Extra>>, HBUINT>
   1045 	entryTable;	/* Offset to the entry array. */
   1046 
   1047  public:
   1048  DEFINE_SIZE_STATIC (4 * sizeof (HBUINT));
   1049 };
   1050 
   1051 template <typename HBUCHAR>
   1052 struct ClassTable
   1053 {
   1054  unsigned int get_class (hb_codepoint_t glyph_id, unsigned int outOfRange) const
   1055  {
   1056    unsigned int i = glyph_id - firstGlyph;
   1057    return i >= classArray.len ? outOfRange : classArray.arrayZ[i];
   1058  }
   1059  unsigned int get_class (hb_codepoint_t glyph_id,
   1060 		  unsigned int num_glyphs HB_UNUSED,
   1061 		  unsigned int outOfRange) const
   1062  {
   1063    return get_class (glyph_id, outOfRange);
   1064  }
   1065 
   1066  template <typename set_t>
   1067  void collect_glyphs (set_t &glyphs, unsigned num_glyphs) const
   1068  {
   1069    for (unsigned i = 0; i < classArray.len; i++)
   1070      if (classArray.arrayZ[i] != CLASS_OUT_OF_BOUNDS)
   1071 glyphs.add (firstGlyph + i);
   1072  }
   1073  template <typename set_t, typename filter_t>
   1074  void collect_glyphs_filtered (set_t &glyphs, unsigned num_glyphs, const filter_t &filter) const
   1075  {
   1076    for (unsigned i = 0; i < classArray.len; i++)
   1077      if (filter (classArray.arrayZ[i]))
   1078 glyphs.add (firstGlyph + i);
   1079  }
   1080 
   1081  bool sanitize (hb_sanitize_context_t *c) const
   1082  {
   1083    TRACE_SANITIZE (this);
   1084    return_trace (c->check_struct (this) && classArray.sanitize (c));
   1085  }
   1086  protected:
   1087  HBGlyphID16		firstGlyph;	/* First glyph index included in the trimmed array. */
   1088  Array16Of<HBUCHAR>	classArray;	/* The class codes (indexed by glyph index minus
   1089 				 * firstGlyph). */
   1090  public:
   1091  DEFINE_SIZE_ARRAY (4, classArray);
   1092 };
   1093 
   1094 struct SubtableGlyphCoverage
   1095 {
   1096  bool sanitize (hb_sanitize_context_t *c, unsigned subtable_count) const
   1097  {
   1098    TRACE_SANITIZE (this);
   1099 
   1100    if (unlikely (!c->check_array (&subtableOffsets, subtable_count)))
   1101      return_trace (false);
   1102 
   1103    unsigned bytes = (c->get_num_glyphs () + CHAR_BIT - 1) / CHAR_BIT;
   1104    for (unsigned i = 0; i < subtable_count; i++)
   1105    {
   1106      uint32_t offset = (uint32_t) subtableOffsets[i];
   1107      // A font file called SFNSDisplay.ttf has value 0xFFFFFFFF in the offsets.
   1108      // Just ignore it.
   1109      if (offset == 0 || offset == 0xFFFFFFFF)
   1110        continue;
   1111      if (unlikely (!subtableOffsets[i].sanitize (c, this, bytes)))
   1112        return_trace (false);
   1113    }
   1114 
   1115    return_trace (true);
   1116  }
   1117  protected:
   1118  UnsizedArrayOf<NNOffset32To<UnsizedArrayOf<HBUINT8>>> subtableOffsets;
   1119 				    /* Array of offsets from the beginning of the
   1120 				     * subtable glyph coverage table to the glyph
   1121 				     * coverage bitfield for a given subtable; there
   1122 				     * is one offset for each subtable in the chain */
   1123  /* UnsizedArrayOf<HBUINT8> coverageBitfields; *//* The individual coverage bitfields. */
   1124  public:
   1125  DEFINE_SIZE_ARRAY (0, subtableOffsets);
   1126 };
   1127 
   1128 struct ObsoleteTypes
   1129 {
   1130  static constexpr bool extended = false;
   1131  typedef HBUINT16 HBUINT;
   1132  typedef HBUINT8 HBUSHORT;
   1133  typedef ClassTable<HBUINT8> ClassTypeNarrow;
   1134  typedef ClassTable<HBUINT16> ClassTypeWide;
   1135 
   1136  template <typename T>
   1137  static unsigned int offsetToIndex (unsigned int offset,
   1138 			     const void *base,
   1139 			     const T *array)
   1140  {
   1141    /* https://github.com/harfbuzz/harfbuzz/issues/3483 */
   1142    /* If offset is less than base, return an offset that would
   1143     * result in an address half a 32bit address-space away,
   1144     * to make sure sanitize fails even on 32bit builds. */
   1145    if (unlikely (offset < unsigned ((const char *) array - (const char *) base)))
   1146      return INT_MAX / T::static_size;
   1147 
   1148    /* https://github.com/harfbuzz/harfbuzz/issues/2816 */
   1149    return (offset - unsigned ((const char *) array - (const char *) base)) / T::static_size;
   1150  }
   1151  template <typename T>
   1152  static unsigned int byteOffsetToIndex (unsigned int offset,
   1153 				 const void *base,
   1154 				 const T *array)
   1155  {
   1156    return offsetToIndex (offset, base, array);
   1157  }
   1158  template <typename T>
   1159  static unsigned int wordOffsetToIndex (unsigned int offset,
   1160 				 const void *base,
   1161 				 const T *array)
   1162  {
   1163    return offsetToIndex (2 * offset, base, array);
   1164  }
   1165 };
   1166 struct ExtendedTypes
   1167 {
   1168  static constexpr bool extended = true;
   1169  typedef HBUINT32 HBUINT;
   1170  typedef HBUINT16 HBUSHORT;
   1171  typedef Lookup<HBUINT16> ClassTypeNarrow;
   1172  typedef Lookup<HBUINT16> ClassTypeWide;
   1173 
   1174  template <typename T>
   1175  static unsigned int offsetToIndex (unsigned int offset,
   1176 			     const void *base HB_UNUSED,
   1177 			     const T *array HB_UNUSED)
   1178  {
   1179    return offset;
   1180  }
   1181  template <typename T>
   1182  static unsigned int byteOffsetToIndex (unsigned int offset,
   1183 				 const void *base HB_UNUSED,
   1184 				 const T *array HB_UNUSED)
   1185  {
   1186    return offset / 2;
   1187  }
   1188  template <typename T>
   1189  static unsigned int wordOffsetToIndex (unsigned int offset,
   1190 				 const void *base HB_UNUSED,
   1191 				 const T *array HB_UNUSED)
   1192  {
   1193    return offset;
   1194  }
   1195 };
   1196 
   1197 template <typename Types, typename EntryData, typename Flags>
   1198 struct StateTableDriver
   1199 {
   1200  using StateTableT = StateTable<Types, EntryData>;
   1201  using EntryT = Entry<EntryData>;
   1202 
   1203  StateTableDriver (const StateTableT &machine_,
   1204 	    hb_face_t *face_) :
   1205       machine (machine_),
   1206       num_glyphs (face_->get_num_glyphs ()) {}
   1207 
   1208  template <typename context_t>
   1209  void drive (context_t *c, hb_aat_apply_context_t *ac)
   1210  {
   1211    hb_buffer_t *buffer = ac->buffer;
   1212 
   1213    if (!c->in_place)
   1214      buffer->clear_output ();
   1215 
   1216    int state = StateTableT::STATE_START_OF_TEXT;
   1217    // If there's only one range, we already checked the flag.
   1218    auto *last_range = ac->range_flags && (ac->range_flags->length > 1) ? &(*ac->range_flags)[0] : nullptr;
   1219    const bool start_state_safe_to_break_eot =
   1220      !c->table->is_actionable (machine.get_entry (StateTableT::STATE_START_OF_TEXT, CLASS_END_OF_TEXT));
   1221    for (buffer->idx = 0; buffer->successful;)
   1222    {
   1223      unsigned int klass = likely (buffer->idx < buffer->len) ?
   1224 		   machine.get_class (buffer->cur().codepoint, num_glyphs, ac->machine_class_cache) :
   1225 		   (unsigned) CLASS_END_OF_TEXT;
   1226    resume:
   1227      DEBUG_MSG (APPLY, nullptr, "c%u at %u", klass, buffer->idx);
   1228      const EntryT &entry = machine.get_entry (state, klass);
   1229      const int next_state = machine.new_state (entry.newState);
   1230 
   1231      bool is_not_epsilon_transition = !(entry.flags & Flags::DontAdvance);
   1232      bool is_not_actionable = !c->table->is_actionable (entry);
   1233 
   1234      if (unlikely (last_range))
   1235      {
   1236 /* This block is copied in NoncontextualSubtable::apply. Keep in sync. */
   1237 auto *range = last_range;
   1238 if (buffer->idx < buffer->len)
   1239 {
   1240   unsigned cluster = buffer->cur().cluster;
   1241   while (cluster < range->cluster_first)
   1242     range--;
   1243   while (cluster > range->cluster_last)
   1244     range++;
   1245 
   1246 
   1247   last_range = range;
   1248 }
   1249 if (!(range->flags & ac->subtable_flags))
   1250 {
   1251   if (buffer->idx == buffer->len)
   1252     break;
   1253 
   1254   state = StateTableT::STATE_START_OF_TEXT;
   1255   (void) buffer->next_glyph ();
   1256   continue;
   1257 }
   1258      }
   1259      else
   1260      {
   1261 // Fast path for when transitioning from start-state to start-state with
   1262 // no action and advancing. Do so as long as the class remains the same.
   1263 // This is common with runs of non-actionable glyphs.
   1264 
   1265 bool is_null_transition = state == StateTableT::STATE_START_OF_TEXT &&
   1266 			  next_state == StateTableT::STATE_START_OF_TEXT &&
   1267 			  start_state_safe_to_break_eot &&
   1268 			  is_not_actionable &&
   1269 			  is_not_epsilon_transition &&
   1270 			  !last_range;
   1271 
   1272 if (is_null_transition)
   1273 {
   1274   unsigned old_klass = klass;
   1275   do
   1276   {
   1277     c->transition (buffer, this, entry);
   1278 
   1279     if (buffer->idx == buffer->len || !buffer->successful)
   1280       break;
   1281 
   1282     (void) buffer->next_glyph ();
   1283 
   1284     klass = likely (buffer->idx < buffer->len) ?
   1285 	     machine.get_class (buffer->cur().codepoint, num_glyphs, ac->machine_class_cache) :
   1286 	     (unsigned) CLASS_END_OF_TEXT;
   1287   } while (klass == old_klass);
   1288 
   1289   if (buffer->idx == buffer->len || !buffer->successful)
   1290     break;
   1291 
   1292   goto resume;
   1293 }
   1294      }
   1295 
   1296      /* Conditions under which it's guaranteed safe-to-break before current glyph:
   1297       *
   1298       * 1. There was no action in this transition; and
   1299       *
   1300       * 2. If we break before current glyph, the results will be the same. That
   1301       *    is guaranteed if:
   1302       *
   1303       *    2a. We were already in start-of-text state; or
   1304       *
   1305       *    2b. We are epsilon-transitioning to start-of-text state; or
   1306       *
   1307       *    2c. Starting from start-of-text state seeing current glyph:
   1308       *
   1309       *        2c'. There won't be any actions; and
   1310       *
   1311       *        2c". We would end up in the same state that we were going to end up
   1312       *             in now, including whether epsilon-transitioning.
   1313       *
   1314       *    and
   1315       *
   1316       * 3. If we break before current glyph, there won't be any end-of-text action
   1317       *    after previous glyph.
   1318       *
   1319       * This triples the transitions we need to look up, but is worth returning
   1320       * granular unsafe-to-break results. See eg.:
   1321       *
   1322       *   https://github.com/harfbuzz/harfbuzz/issues/2860
   1323       */
   1324      const EntryT *wouldbe_entry;
   1325      bool is_safe_to_break =
   1326      (
   1327          /* 1. */
   1328          !c->table->is_actionable (entry) &&
   1329 
   1330          /* 2. */
   1331          // This one is meh, I know...
   1332   (
   1333                 state == StateTableT::STATE_START_OF_TEXT
   1334              || ((entry.flags & Flags::DontAdvance) && next_state == StateTableT::STATE_START_OF_TEXT)
   1335              || (
   1336 	    /* 2c. */
   1337 	    wouldbe_entry = &machine.get_entry(StateTableT::STATE_START_OF_TEXT, klass)
   1338 	    ,
   1339 	    /* 2c'. */
   1340 	    !c->table->is_actionable (*wouldbe_entry) &&
   1341 	    /* 2c". */
   1342 	    (
   1343 	      next_state == machine.new_state(wouldbe_entry->newState) &&
   1344 	      (entry.flags & Flags::DontAdvance) == (wouldbe_entry->flags & Flags::DontAdvance)
   1345 	    )
   1346 	 )
   1347   ) &&
   1348 
   1349          /* 3. */
   1350          !c->table->is_actionable (machine.get_entry (state, CLASS_END_OF_TEXT))
   1351      );
   1352 
   1353      if (!is_safe_to_break && buffer->backtrack_len () && buffer->idx < buffer->len)
   1354 buffer->unsafe_to_break_from_outbuffer (buffer->backtrack_len () - 1, buffer->idx + 1);
   1355 
   1356      c->transition (buffer, this, entry);
   1357 
   1358      state = next_state;
   1359      DEBUG_MSG (APPLY, nullptr, "s%d", state);
   1360 
   1361      if (buffer->idx == buffer->len)
   1362 break;
   1363 
   1364      if (is_not_epsilon_transition || buffer->max_ops-- <= 0)
   1365 (void) buffer->next_glyph ();
   1366    }
   1367 
   1368    if (!c->in_place)
   1369      buffer->sync ();
   1370  }
   1371 
   1372  public:
   1373  const StateTableT &machine;
   1374  unsigned int num_glyphs;
   1375 };
   1376 
   1377 
   1378 } /* namespace AAT */
   1379 
   1380 
   1381 #endif /* HB_AAT_LAYOUT_COMMON_HH */