tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

hb-ot-layout-common.hh (148478B)


      1 /*
      2 * Copyright © 2007,2008,2009  Red Hat, Inc.
      3 * Copyright © 2010,2012  Google, Inc.
      4 *
      5 *  This is part of HarfBuzz, a text shaping library.
      6 *
      7 * Permission is hereby granted, without written agreement and without
      8 * license or royalty fees, to use, copy, modify, and distribute this
      9 * software and its documentation for any purpose, provided that the
     10 * above copyright notice and the following two paragraphs appear in
     11 * all copies of this software.
     12 *
     13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
     14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
     15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
     16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
     17 * DAMAGE.
     18 *
     19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
     20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
     21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
     22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
     23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
     24 *
     25 * Red Hat Author(s): Behdad Esfahbod
     26 * Google Author(s): Behdad Esfahbod
     27 */
     28 
     29 #ifndef HB_OT_LAYOUT_COMMON_HH
     30 #define HB_OT_LAYOUT_COMMON_HH
     31 
     32 #include "hb.hh"
     33 #include "hb-ot-layout.hh"
     34 #include "hb-open-type.hh"
     35 #include "hb-set.hh"
     36 #include "hb-bimap.hh"
     37 #include "hb-cache.hh"
     38 
     39 #include "OT/Layout/Common/Coverage.hh"
     40 #include "OT/Layout/types.hh"
     41 
     42 // TODO(garretrieger): cleanup these after migration.
     43 using OT::Layout::Common::Coverage;
     44 using OT::Layout::Common::RangeRecord;
     45 using OT::Layout::SmallTypes;
     46 using OT::Layout::MediumTypes;
     47 
     48 
     49 namespace OT {
     50 
     51 template<typename Iterator>
     52 static inline bool ClassDef_serialize (hb_serialize_context_t *c,
     53 			       Iterator it);
     54 
     55 static bool ClassDef_remap_and_serialize (
     56    hb_serialize_context_t *c,
     57    const hb_set_t &klasses,
     58    bool use_class_zero,
     59    hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */
     60    hb_map_t *klass_map /*IN/OUT*/);
     61 
     62 struct hb_collect_feature_substitutes_with_var_context_t
     63 {
     64  const hb_map_t *axes_index_tag_map;
     65  const hb_hashmap_t<hb_tag_t, Triple> *axes_location;
     66  hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *record_cond_idx_map;
     67  hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
     68  hb_set_t& catch_all_record_feature_idxes;
     69 
     70  // not stored in subset_plan
     71  hb_set_t *feature_indices;
     72  bool apply;
     73  bool variation_applied;
     74  bool universal;
     75  unsigned cur_record_idx;
     76  hb_hashmap_t<hb::shared_ptr<hb_map_t>, unsigned> *conditionset_map;
     77 };
     78 
     79 struct hb_prune_langsys_context_t
     80 {
     81  hb_prune_langsys_context_t (const void         *table_,
     82                              hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map_,
     83                              const hb_map_t     *duplicate_feature_map_,
     84                              hb_set_t           *new_collected_feature_indexes_)
     85      :table (table_),
     86      script_langsys_map (script_langsys_map_),
     87      duplicate_feature_map (duplicate_feature_map_),
     88      new_feature_indexes (new_collected_feature_indexes_),
     89      script_count (0),langsys_feature_count (0) {}
     90 
     91  bool visitScript ()
     92  { return script_count++ < HB_MAX_SCRIPTS; }
     93 
     94  bool visitLangsys (unsigned feature_count)
     95  {
     96    langsys_feature_count += feature_count;
     97    return langsys_feature_count < HB_MAX_LANGSYS_FEATURE_COUNT;
     98  }
     99 
    100  public:
    101  const void *table;
    102  hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
    103  const hb_map_t     *duplicate_feature_map;
    104  hb_set_t           *new_feature_indexes;
    105 
    106  private:
    107  unsigned script_count;
    108  unsigned langsys_feature_count;
    109 };
    110 
    111 struct hb_subset_layout_context_t :
    112  hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
    113 {
    114  const char *get_name () { return "SUBSET_LAYOUT"; }
    115  static return_t default_return_value () { return hb_empty_t (); }
    116 
    117  bool visitScript ()
    118  {
    119    return script_count++ < HB_MAX_SCRIPTS;
    120  }
    121 
    122  bool visitLangSys ()
    123  {
    124    return langsys_count++ < HB_MAX_LANGSYS;
    125  }
    126 
    127  bool visitFeatureIndex (int count)
    128  {
    129    feature_index_count += count;
    130    return feature_index_count < HB_MAX_FEATURE_INDICES;
    131  }
    132 
    133  bool visitLookupIndex()
    134  {
    135    lookup_index_count++;
    136    return lookup_index_count < HB_MAX_LOOKUP_VISIT_COUNT;
    137  }
    138 
    139  hb_subset_context_t *subset_context;
    140  const hb_tag_t table_tag;
    141  const hb_map_t *lookup_index_map;
    142  const hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map;
    143  const hb_map_t *feature_index_map;
    144  const hb_map_t *feature_map_w_duplicates;
    145  const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map;
    146  hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map;
    147  const hb_set_t *catch_all_record_feature_idxes;
    148  const hb_hashmap_t<unsigned, hb_pair_t<const void*, const void*>> *feature_idx_tag_map;
    149 
    150  unsigned cur_script_index;
    151  unsigned cur_feature_var_record_idx;
    152 
    153  hb_subset_layout_context_t (hb_subset_context_t *c_,
    154 		      hb_tag_t tag_) :
    155 			subset_context (c_),
    156 			table_tag (tag_),
    157 			cur_script_index (0xFFFFu),
    158 			cur_feature_var_record_idx (0u),
    159 			script_count (0),
    160 			langsys_count (0),
    161 			feature_index_count (0),
    162 			lookup_index_count (0)
    163  {
    164    if (tag_ == HB_OT_TAG_GSUB)
    165    {
    166      lookup_index_map = &c_->plan->gsub_lookups;
    167      script_langsys_map = &c_->plan->gsub_langsys;
    168      feature_index_map = &c_->plan->gsub_features;
    169      feature_map_w_duplicates = &c_->plan->gsub_features_w_duplicates;
    170      feature_substitutes_map = &c_->plan->gsub_feature_substitutes_map;
    171      feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gsub_feature_record_cond_idx_map;
    172      catch_all_record_feature_idxes = &c_->plan->gsub_old_features;
    173      feature_idx_tag_map = &c_->plan->gsub_old_feature_idx_tag_map;
    174    }
    175    else
    176    {
    177      lookup_index_map = &c_->plan->gpos_lookups;
    178      script_langsys_map = &c_->plan->gpos_langsys;
    179      feature_index_map = &c_->plan->gpos_features;
    180      feature_map_w_duplicates = &c_->plan->gpos_features_w_duplicates;
    181      feature_substitutes_map = &c_->plan->gpos_feature_substitutes_map;
    182      feature_record_cond_idx_map = c_->plan->user_axes_location.is_empty () ? nullptr : &c_->plan->gpos_feature_record_cond_idx_map;
    183      catch_all_record_feature_idxes = &c_->plan->gpos_old_features;
    184      feature_idx_tag_map = &c_->plan->gpos_old_feature_idx_tag_map;
    185    }
    186  }
    187 
    188  private:
    189  unsigned script_count;
    190  unsigned langsys_count;
    191  unsigned feature_index_count;
    192  unsigned lookup_index_count;
    193 };
    194 
    195 struct ItemVariationStore;
    196 struct hb_collect_variation_indices_context_t :
    197       hb_dispatch_context_t<hb_collect_variation_indices_context_t>
    198 {
    199  template <typename T>
    200  return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
    201  static return_t default_return_value () { return hb_empty_t (); }
    202 
    203  hb_set_t *layout_variation_indices;
    204  const hb_set_t *glyph_set;
    205  const hb_map_t *gpos_lookups;
    206 
    207  hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
    208 				  const hb_set_t *glyph_set_,
    209 				  const hb_map_t *gpos_lookups_) :
    210 				layout_variation_indices (layout_variation_indices_),
    211 				glyph_set (glyph_set_),
    212 				gpos_lookups (gpos_lookups_) {}
    213 };
    214 
    215 template<typename OutputArray>
    216 struct subset_offset_array_t
    217 {
    218  subset_offset_array_t (hb_subset_context_t *subset_context_,
    219 		 OutputArray& out_,
    220 		 const void *base_) : subset_context (subset_context_),
    221 				      out (out_), base (base_) {}
    222 
    223  template <typename T>
    224  bool operator () (T&& offset)
    225  {
    226    auto snap = subset_context->serializer->snapshot ();
    227    auto *o = out.serialize_append (subset_context->serializer);
    228    if (unlikely (!o)) return false;
    229    bool ret = o->serialize_subset (subset_context, offset, base);
    230    if (!ret)
    231    {
    232      out.pop ();
    233      subset_context->serializer->revert (snap);
    234    }
    235    return ret;
    236  }
    237 
    238  private:
    239  hb_subset_context_t *subset_context;
    240  OutputArray &out;
    241  const void *base;
    242 };
    243 
    244 
    245 template<typename OutputArray, typename Arg>
    246 struct subset_offset_array_arg_t
    247 {
    248  subset_offset_array_arg_t (hb_subset_context_t *subset_context_,
    249 		     OutputArray& out_,
    250 		     const void *base_,
    251 		     Arg &&arg_) : subset_context (subset_context_), out (out_),
    252 				  base (base_), arg (arg_) {}
    253 
    254  template <typename T>
    255  bool operator () (T&& offset)
    256  {
    257    auto snap = subset_context->serializer->snapshot ();
    258    auto *o = out.serialize_append (subset_context->serializer);
    259    if (unlikely (!o)) return false;
    260    bool ret = o->serialize_subset (subset_context, offset, base, arg);
    261    if (!ret)
    262    {
    263      out.pop ();
    264      subset_context->serializer->revert (snap);
    265    }
    266    return ret;
    267  }
    268 
    269  private:
    270  hb_subset_context_t *subset_context;
    271  OutputArray &out;
    272  const void *base;
    273  Arg &&arg;
    274 };
    275 
    276 /*
    277 * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
    278 * and discards the offset in the array if the subset operation results in an empty
    279 * thing.
    280 */
    281 struct
    282 {
    283  template<typename OutputArray>
    284  subset_offset_array_t<OutputArray>
    285  operator () (hb_subset_context_t *subset_context, OutputArray& out,
    286        const void *base) const
    287  { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
    288 
    289  /* Variant with one extra argument passed to serialize_subset */
    290  template<typename OutputArray, typename Arg>
    291  subset_offset_array_arg_t<OutputArray, Arg>
    292  operator () (hb_subset_context_t *subset_context, OutputArray& out,
    293        const void *base, Arg &&arg) const
    294  { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
    295 }
    296 HB_FUNCOBJ (subset_offset_array);
    297 
    298 template<typename OutputArray>
    299 struct subset_record_array_t
    300 {
    301  subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_,
    302 		 const void *base_) : subset_layout_context (c_),
    303 				      out (out_), base (base_) {}
    304 
    305  template <typename T>
    306  void
    307  operator () (T&& record)
    308  {
    309    auto snap = subset_layout_context->subset_context->serializer->snapshot ();
    310    bool ret = record.subset (subset_layout_context, base);
    311    if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
    312    else out->len++;
    313  }
    314 
    315  private:
    316  hb_subset_layout_context_t *subset_layout_context;
    317  OutputArray *out;
    318  const void *base;
    319 };
    320 
    321 template<typename OutputArray, typename Arg>
    322 struct subset_record_array_arg_t
    323 {
    324  subset_record_array_arg_t (hb_subset_layout_context_t *c_, OutputArray* out_,
    325 		     const void *base_,
    326 		     Arg &&arg_) : subset_layout_context (c_),
    327 				   out (out_), base (base_), arg (arg_) {}
    328 
    329  template <typename T>
    330  void
    331  operator () (T&& record)
    332  {
    333    auto snap = subset_layout_context->subset_context->serializer->snapshot ();
    334    bool ret = record.subset (subset_layout_context, base, arg);
    335    if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
    336    else out->len++;
    337  }
    338 
    339  private:
    340  hb_subset_layout_context_t *subset_layout_context;
    341  OutputArray *out;
    342  const void *base;
    343  Arg &&arg;
    344 };
    345 
    346 /*
    347 * Helper to subset a RecordList/record array. Subsets each Record in the array and
    348 * discards the record if the subset operation returns false.
    349 */
    350 struct
    351 {
    352  template<typename OutputArray>
    353  subset_record_array_t<OutputArray>
    354  operator () (hb_subset_layout_context_t *c, OutputArray* out,
    355        const void *base) const
    356  { return subset_record_array_t<OutputArray> (c, out, base); }
    357 
    358  /* Variant with one extra argument passed to subset */
    359  template<typename OutputArray, typename Arg>
    360  subset_record_array_arg_t<OutputArray, Arg>
    361  operator () (hb_subset_layout_context_t *c, OutputArray* out,
    362               const void *base, Arg &&arg) const
    363  { return subset_record_array_arg_t<OutputArray, Arg> (c, out, base, arg); }
    364 }
    365 HB_FUNCOBJ (subset_record_array);
    366 
    367 
    368 template<typename OutputArray>
    369 struct serialize_math_record_array_t
    370 {
    371  serialize_math_record_array_t (hb_serialize_context_t *serialize_context_,
    372                         OutputArray& out_,
    373                         const void *base_) : serialize_context (serialize_context_),
    374                                              out (out_), base (base_) {}
    375 
    376  template <typename T>
    377  bool operator () (T&& record)
    378  {
    379    if (!serialize_context->copy (record, base)) return false;
    380    out.len++;
    381    return true;
    382  }
    383 
    384  private:
    385  hb_serialize_context_t *serialize_context;
    386  OutputArray &out;
    387  const void *base;
    388 };
    389 
    390 /*
    391 * Helper to serialize an array of MATH records.
    392 */
    393 struct
    394 {
    395  template<typename OutputArray>
    396  serialize_math_record_array_t<OutputArray>
    397  operator () (hb_serialize_context_t *serialize_context, OutputArray& out,
    398               const void *base) const
    399  { return serialize_math_record_array_t<OutputArray> (serialize_context, out, base); }
    400 
    401 }
    402 HB_FUNCOBJ (serialize_math_record_array);
    403 
    404 /*
    405 *
    406 * OpenType Layout Common Table Formats
    407 *
    408 */
    409 
    410 
    411 /*
    412 * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
    413 */
    414 
    415 struct IndexArray : Array16Of<Index>
    416 {
    417  bool intersects (const hb_map_t *indexes) const
    418  { return hb_any (*this, indexes); }
    419 
    420  template <typename Iterator,
    421     hb_requires (hb_is_iterator (Iterator))>
    422  void serialize (hb_serialize_context_t *c,
    423 	  hb_subset_layout_context_t *l,
    424 	  Iterator it)
    425  {
    426    if (!it) return;
    427    if (unlikely (!c->extend_min ((*this)))) return;
    428 
    429    for (const auto _ : it)
    430    {
    431      if (!l->visitLookupIndex()) break;
    432 
    433      Index i;
    434      i = _;
    435      c->copy (i);
    436      this->len++;
    437    }
    438  }
    439 
    440  unsigned int get_indexes (unsigned int start_offset,
    441 		    unsigned int *_count /* IN/OUT */,
    442 		    unsigned int *_indexes /* OUT */) const
    443  {
    444    if (_count)
    445    {
    446      + this->as_array ().sub_array (start_offset, _count)
    447      | hb_sink (hb_array (_indexes, *_count))
    448      ;
    449    }
    450    return this->len;
    451  }
    452 
    453  void add_indexes_to (hb_set_t* output /* OUT */) const
    454  {
    455    output->add_array (as_array ());
    456  }
    457 };
    458 
    459 
    460 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
    461 struct FeatureParamsSize
    462 {
    463  bool sanitize (hb_sanitize_context_t *c) const
    464  {
    465    TRACE_SANITIZE (this);
    466    if (unlikely (!c->check_struct (this))) return_trace (false);
    467    hb_barrier ();
    468 
    469    /* This subtable has some "history", if you will.  Some earlier versions of
    470     * Adobe tools calculated the offset of the FeatureParams subtable from the
    471     * beginning of the FeatureList table!  Now, that is dealt with in the
    472     * Feature implementation.  But we still need to be able to tell junk from
    473     * real data.  Note: We don't check that the nameID actually exists.
    474     *
    475     * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
    476     *
    477     * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
    478     * coming out soon, and that the makeotf program will build a font with a
    479     * 'size' feature that is correct by the specification.
    480     *
    481     * The specification for this feature tag is in the "OpenType Layout Tag
    482     * Registry". You can see a copy of this at:
    483     * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
    484     *
    485     * Here is one set of rules to determine if the 'size' feature is built
    486     * correctly, or as by the older versions of MakeOTF. You may be able to do
    487     * better.
    488     *
    489     * Assume that the offset to the size feature is according to specification,
    490     * and make the following value checks. If it fails, assume the size
    491     * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
    492     * If this fails, reject the 'size' feature. The older makeOTF's calculated the
    493     * offset from the beginning of the FeatureList table, rather than from the
    494     * beginning of the 'size' Feature table.
    495     *
    496     * If "design size" == 0:
    497     *     fails check
    498     *
    499     * Else if ("subfamily identifier" == 0 and
    500     *     "range start" == 0 and
    501     *     "range end" == 0 and
    502     *     "range start" == 0 and
    503     *     "menu name ID" == 0)
    504     *     passes check: this is the format used when there is a design size
    505     * specified, but there is no recommended size range.
    506     *
    507     * Else if ("design size" <  "range start" or
    508     *     "design size" >   "range end" or
    509     *     "range end" <= "range start" or
    510     *     "menu name ID"  < 256 or
    511     *     "menu name ID"  > 32767 or
    512     *     menu name ID is not a name ID which is actually in the name table)
    513     *     fails test
    514     * Else
    515     *     passes test.
    516     */
    517 
    518    if (!designSize)
    519      return_trace (false);
    520    else if (subfamilyID == 0 &&
    521      subfamilyNameID == 0 &&
    522      rangeStart == 0 &&
    523      rangeEnd == 0)
    524      return_trace (true);
    525    else if (designSize < rangeStart ||
    526      designSize > rangeEnd ||
    527      subfamilyNameID < 256 ||
    528      subfamilyNameID > 32767)
    529      return_trace (false);
    530    else
    531      return_trace (true);
    532  }
    533 
    534  void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
    535  { nameids_to_retain->add (subfamilyNameID); }
    536 
    537  bool subset (hb_subset_context_t *c) const
    538  {
    539    TRACE_SUBSET (this);
    540    return_trace ((bool) c->serializer->embed (*this));
    541  }
    542 
    543  HBUINT16	designSize;	/* Represents the design size in 720/inch
    544 			 * units (decipoints).  The design size entry
    545 			 * must be non-zero.  When there is a design
    546 			 * size but no recommended size range, the
    547 			 * rest of the array will consist of zeros. */
    548  HBUINT16	subfamilyID;	/* Has no independent meaning, but serves
    549 			 * as an identifier that associates fonts
    550 			 * in a subfamily. All fonts which share a
    551 			 * Preferred or Font Family name and which
    552 			 * differ only by size range shall have the
    553 			 * same subfamily value, and no fonts which
    554 			 * differ in weight or style shall have the
    555 			 * same subfamily value. If this value is
    556 			 * zero, the remaining fields in the array
    557 			 * will be ignored. */
    558  NameID	subfamilyNameID;/* If the preceding value is non-zero, this
    559 			 * value must be set in the range 256 - 32767
    560 			 * (inclusive). It records the value of a
    561 			 * field in the name table, which must
    562 			 * contain English-language strings encoded
    563 			 * in Windows Unicode and Macintosh Roman,
    564 			 * and may contain additional strings
    565 			 * localized to other scripts and languages.
    566 			 * Each of these strings is the name an
    567 			 * application should use, in combination
    568 			 * with the family name, to represent the
    569 			 * subfamily in a menu.  Applications will
    570 			 * choose the appropriate version based on
    571 			 * their selection criteria. */
    572  HBUINT16	rangeStart;	/* Large end of the recommended usage range
    573 			 * (inclusive), stored in 720/inch units
    574 			 * (decipoints). */
    575  HBUINT16	rangeEnd;	/* Small end of the recommended usage range
    576 			   (exclusive), stored in 720/inch units
    577 			 * (decipoints). */
    578  public:
    579  DEFINE_SIZE_STATIC (10);
    580 };
    581 
    582 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
    583 struct FeatureParamsStylisticSet
    584 {
    585  bool sanitize (hb_sanitize_context_t *c) const
    586  {
    587    TRACE_SANITIZE (this);
    588    /* Right now minorVersion is at zero.  Which means, any table supports
    589     * the uiNameID field. */
    590    return_trace (c->check_struct (this));
    591  }
    592 
    593  void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
    594  { nameids_to_retain->add (uiNameID); }
    595 
    596  bool subset (hb_subset_context_t *c) const
    597  {
    598    TRACE_SUBSET (this);
    599    return_trace ((bool) c->serializer->embed (*this));
    600  }
    601 
    602  HBUINT16	version;	/* (set to 0): This corresponds to a “minor”
    603 			 * version number. Additional data may be
    604 			 * added to the end of this Feature Parameters
    605 			 * table in the future. */
    606 
    607  NameID	uiNameID;	/* The 'name' table name ID that specifies a
    608 			 * string (or strings, for multiple languages)
    609 			 * for a user-interface label for this
    610 			 * feature.  The values of uiLabelNameId and
    611 			 * sampleTextNameId are expected to be in the
    612 			 * font-specific name ID range (256-32767),
    613 			 * though that is not a requirement in this
    614 			 * Feature Parameters specification. The
    615 			 * user-interface label for the feature can
    616 			 * be provided in multiple languages. An
    617 			 * English string should be included as a
    618 			 * fallback. The string should be kept to a
    619 			 * minimal length to fit comfortably with
    620 			 * different application interfaces. */
    621  public:
    622  DEFINE_SIZE_STATIC (4);
    623 };
    624 
    625 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
    626 struct FeatureParamsCharacterVariants
    627 {
    628  unsigned
    629  get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
    630  {
    631    if (char_count)
    632    {
    633      + characters.as_array ().sub_array (start_offset, char_count)
    634      | hb_sink (hb_array (chars, *char_count))
    635      ;
    636    }
    637    return characters.len;
    638  }
    639 
    640  unsigned get_size () const
    641  { return min_size + characters.len * HBUINT24::static_size; }
    642 
    643  void collect_name_ids (hb_set_t *nameids_to_retain /* OUT */) const
    644  {
    645    if (featUILableNameID) nameids_to_retain->add (featUILableNameID);
    646    if (featUITooltipTextNameID) nameids_to_retain->add (featUITooltipTextNameID);
    647    if (sampleTextNameID) nameids_to_retain->add (sampleTextNameID);
    648 
    649    if (!firstParamUILabelNameID || !numNamedParameters || numNamedParameters >= 0x7FFF)
    650      return;
    651 
    652    unsigned last_name_id = (unsigned) firstParamUILabelNameID + (unsigned) numNamedParameters - 1;
    653    nameids_to_retain->add_range (firstParamUILabelNameID, last_name_id);
    654  }
    655 
    656  bool subset (hb_subset_context_t *c) const
    657  {
    658    TRACE_SUBSET (this);
    659    return_trace ((bool) c->serializer->embed (*this));
    660  }
    661 
    662  bool sanitize (hb_sanitize_context_t *c) const
    663  {
    664    TRACE_SANITIZE (this);
    665    return_trace (c->check_struct (this) &&
    666 	  characters.sanitize (c));
    667  }
    668 
    669  HBUINT16	format;			/* Format number is set to 0. */
    670  NameID	featUILableNameID;	/* The ‘name’ table name ID that
    671 				 * specifies a string (or strings,
    672 				 * for multiple languages) for a
    673 				 * user-interface label for this
    674 				 * feature. (May be NULL.) */
    675  NameID	featUITooltipTextNameID;/* The ‘name’ table name ID that
    676 				 * specifies a string (or strings,
    677 				 * for multiple languages) that an
    678 				 * application can use for tooltip
    679 				 * text for this feature. (May be
    680 				 * nullptr.) */
    681  NameID	sampleTextNameID;	/* The ‘name’ table name ID that
    682 				 * specifies sample text that
    683 				 * illustrates the effect of this
    684 				 * feature. (May be NULL.) */
    685  HBUINT16	numNamedParameters;	/* Number of named parameters. (May
    686 				 * be zero.) */
    687  NameID	firstParamUILabelNameID;/* The first ‘name’ table name ID
    688 				 * used to specify strings for
    689 				 * user-interface labels for the
    690 				 * feature parameters. (Must be zero
    691 				 * if numParameters is zero.) */
    692  Array16Of<HBUINT24>
    693 	characters;		/* Array of the Unicode Scalar Value
    694 				 * of the characters for which this
    695 				 * feature provides glyph variants.
    696 				 * (May be zero.) */
    697  public:
    698  DEFINE_SIZE_ARRAY (14, characters);
    699 };
    700 
    701 struct FeatureParams
    702 {
    703  bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
    704  {
    705 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
    706    return true;
    707 #endif
    708    TRACE_SANITIZE (this);
    709    if (tag == HB_TAG ('s','i','z','e'))
    710      return_trace (u.size.sanitize (c));
    711    if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
    712      return_trace (u.stylisticSet.sanitize (c));
    713    if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
    714      return_trace (u.characterVariants.sanitize (c));
    715    return_trace (true);
    716  }
    717 
    718  void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const
    719  {
    720 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
    721    return;
    722 #endif
    723    if (tag == HB_TAG ('s','i','z','e'))
    724      return (u.size.collect_name_ids (nameids_to_retain));
    725    if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
    726      return (u.stylisticSet.collect_name_ids (nameids_to_retain));
    727    if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
    728      return (u.characterVariants.collect_name_ids (nameids_to_retain));
    729  }
    730 
    731  bool subset (hb_subset_context_t *c, const Tag* tag) const
    732  {
    733    TRACE_SUBSET (this);
    734    if (!tag) return_trace (false);
    735    if (*tag == HB_TAG ('s','i','z','e'))
    736      return_trace (u.size.subset (c));
    737    if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
    738      return_trace (u.stylisticSet.subset (c));
    739    if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
    740      return_trace (u.characterVariants.subset (c));
    741    return_trace (false);
    742  }
    743 
    744 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
    745  const FeatureParamsSize& get_size_params (hb_tag_t tag) const
    746  {
    747    if (tag == HB_TAG ('s','i','z','e'))
    748      return u.size;
    749    return Null (FeatureParamsSize);
    750  }
    751  const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
    752  {
    753    if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
    754      return u.stylisticSet;
    755    return Null (FeatureParamsStylisticSet);
    756  }
    757  const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
    758  {
    759    if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
    760      return u.characterVariants;
    761    return Null (FeatureParamsCharacterVariants);
    762  }
    763 #endif
    764 
    765  private:
    766  union {
    767  FeatureParamsSize			size;
    768  FeatureParamsStylisticSet		stylisticSet;
    769  FeatureParamsCharacterVariants	characterVariants;
    770  } u;
    771  public:
    772  DEFINE_SIZE_MIN (0);
    773 };
    774 
    775 struct Record_sanitize_closure_t {
    776  hb_tag_t tag;
    777  const void *list_base;
    778 };
    779 
    780 struct Feature
    781 {
    782  unsigned int get_lookup_count () const
    783  { return lookupIndex.len; }
    784  hb_tag_t get_lookup_index (unsigned int i) const
    785  { return lookupIndex[i]; }
    786  unsigned int get_lookup_indexes (unsigned int start_index,
    787 			   unsigned int *lookup_count /* IN/OUT */,
    788 			   unsigned int *lookup_tags /* OUT */) const
    789  { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
    790  void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
    791  { lookupIndex.add_indexes_to (lookup_indexes); }
    792 
    793  const FeatureParams &get_feature_params () const
    794  { return this+featureParams; }
    795 
    796  bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
    797  { return lookupIndex.intersects (lookup_indexes); }
    798 
    799  void collect_name_ids (hb_tag_t tag, hb_set_t *nameids_to_retain /* OUT */) const
    800  {
    801    if (featureParams)
    802      get_feature_params ().collect_name_ids (tag, nameids_to_retain);
    803  }
    804 
    805  bool subset (hb_subset_context_t         *c,
    806        hb_subset_layout_context_t  *l,
    807        const Tag                   *tag = nullptr) const
    808  {
    809    TRACE_SUBSET (this);
    810    auto *out = c->serializer->start_embed (*this);
    811    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
    812 
    813    out->featureParams.serialize_subset (c, featureParams, this, tag);
    814 
    815    auto it =
    816    + hb_iter (lookupIndex)
    817    | hb_filter (l->lookup_index_map)
    818    | hb_map (l->lookup_index_map)
    819    ;
    820 
    821    out->lookupIndex.serialize (c->serializer, l, it);
    822    // The decision to keep or drop this feature is already made before we get here
    823    // so always retain it.
    824    return_trace (true);
    825  }
    826 
    827  bool sanitize (hb_sanitize_context_t *c,
    828 	 const Record_sanitize_closure_t *closure = nullptr) const
    829  {
    830    TRACE_SANITIZE (this);
    831    return_trace (c->check_struct (this) &&
    832 	  featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE) &&
    833 	  lookupIndex.sanitize (c));
    834  }
    835 
    836  Offset16To<FeatureParams>
    837 	 featureParams;	/* Offset to Feature Parameters table (if one
    838 			 * has been defined for the feature), relative
    839 			 * to the beginning of the Feature Table; = Null
    840 			 * if not required */
    841  IndexArray	 lookupIndex;	/* Array of LookupList indices */
    842  public:
    843  DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
    844 };
    845 
    846 template <typename Type>
    847 struct Record
    848 {
    849  int cmp (hb_tag_t a) const { return tag.cmp (a); }
    850 
    851  bool subset (hb_subset_layout_context_t *c, const void *base, const void *f_sub = nullptr) const
    852  {
    853    TRACE_SUBSET (this);
    854    auto *out = c->subset_context->serializer->embed (this);
    855    if (unlikely (!out)) return_trace (false);
    856 
    857    if (!f_sub)
    858      return_trace (out->offset.serialize_subset (c->subset_context, offset, base, c, &tag));
    859 
    860    const Feature& f = *reinterpret_cast<const Feature *> (f_sub);
    861    auto *s = c->subset_context->serializer;
    862    s->push ();
    863 
    864    out->offset = 0;
    865    bool ret = f.subset (c->subset_context, c, &tag);
    866    if (ret)
    867      s->add_link (out->offset, s->pop_pack ());
    868    else
    869      s->pop_discard ();
    870 
    871    return_trace (ret);
    872  }
    873 
    874  bool sanitize (hb_sanitize_context_t *c, const void *base) const
    875  {
    876    TRACE_SANITIZE (this);
    877    const Record_sanitize_closure_t closure = {tag, base};
    878    return_trace (c->check_struct (this) &&
    879 	  offset.sanitize (c, base, &closure));
    880  }
    881 
    882  Tag           tag;            /* 4-byte Tag identifier */
    883  Offset16To<Type>
    884                offset;         /* Offset from beginning of object holding
    885                                 * the Record */
    886  public:
    887  DEFINE_SIZE_STATIC (6);
    888 };
    889 
    890 template <typename Type>
    891 struct RecordArrayOf : SortedArray16Of<Record<Type>>
    892 {
    893  const Offset16To<Type>& get_offset (unsigned int i) const
    894  { return (*this)[i].offset; }
    895  Offset16To<Type>& get_offset (unsigned int i)
    896  { return (*this)[i].offset; }
    897  const Tag& get_tag (unsigned int i) const
    898  { return (*this)[i].tag; }
    899  unsigned int get_tags (unsigned int start_offset,
    900                         unsigned int *record_count /* IN/OUT */,
    901                         hb_tag_t     *record_tags /* OUT */) const
    902  {
    903    if (record_count)
    904    {
    905      + this->as_array ().sub_array (start_offset, record_count)
    906      | hb_map (&Record<Type>::tag)
    907      | hb_sink (hb_array (record_tags, *record_count))
    908      ;
    909    }
    910    return this->len;
    911  }
    912  bool find_index (hb_tag_t tag, unsigned int *index) const
    913  {
    914    return this->bfind (tag, index, HB_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
    915  }
    916 };
    917 
    918 template <typename Type>
    919 struct RecordListOf : RecordArrayOf<Type>
    920 {
    921  const Type& operator [] (unsigned int i) const
    922  { return this+this->get_offset (i); }
    923 
    924  bool subset (hb_subset_context_t *c,
    925               hb_subset_layout_context_t *l) const
    926  {
    927    TRACE_SUBSET (this);
    928    auto *out = c->serializer->start_embed (*this);
    929    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
    930 
    931    + this->iter ()
    932    | hb_apply (subset_record_array (l, out, this))
    933    ;
    934    return_trace (true);
    935  }
    936 
    937  bool sanitize (hb_sanitize_context_t *c) const
    938  {
    939    TRACE_SANITIZE (this);
    940    return_trace (RecordArrayOf<Type>::sanitize (c, this));
    941  }
    942 };
    943 
    944 struct RecordListOfFeature : RecordListOf<Feature>
    945 {
    946  bool subset (hb_subset_context_t *c,
    947        hb_subset_layout_context_t *l) const
    948  {
    949    TRACE_SUBSET (this);
    950    auto *out = c->serializer->start_embed (*this);
    951    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
    952 
    953    + hb_enumerate (*this)
    954    | hb_filter (l->feature_index_map, hb_first)
    955    | hb_apply ([l, out, this] (const hb_pair_t<unsigned, const Record<Feature>&>& _)
    956                {
    957                  const Feature *f_sub = nullptr;
    958                  const Feature **f = nullptr;
    959                  if (l->feature_substitutes_map->has (_.first, &f))
    960                    f_sub = *f;
    961 
    962                  subset_record_array (l, out, this, f_sub) (_.second);
    963                })
    964    ;
    965 
    966    return_trace (true);
    967  }
    968 };
    969 
    970 typedef RecordListOf<Feature> FeatureList;
    971 
    972 
    973 struct LangSys
    974 {
    975  unsigned int get_feature_count () const
    976  { return featureIndex.len; }
    977  hb_tag_t get_feature_index (unsigned int i) const
    978  { return featureIndex[i]; }
    979  unsigned int get_feature_indexes (unsigned int start_offset,
    980 			    unsigned int *feature_count /* IN/OUT */,
    981 			    unsigned int *feature_indexes /* OUT */) const
    982  { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
    983  void add_feature_indexes_to (hb_set_t *feature_indexes) const
    984  { featureIndex.add_indexes_to (feature_indexes); }
    985 
    986  bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
    987  unsigned int get_required_feature_index () const
    988  {
    989    if (reqFeatureIndex == 0xFFFFu)
    990      return Index::NOT_FOUND_INDEX;
    991   return reqFeatureIndex;
    992  }
    993 
    994  LangSys* copy (hb_serialize_context_t *c) const
    995  {
    996    TRACE_SERIALIZE (this);
    997    return_trace (c->embed (*this));
    998  }
    999 
   1000  bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
   1001  {
   1002    if (reqFeatureIndex != o.reqFeatureIndex)
   1003      return false;
   1004 
   1005    auto iter =
   1006    + hb_iter (featureIndex)
   1007    | hb_filter (feature_index_map)
   1008    | hb_map (feature_index_map)
   1009    ;
   1010 
   1011    auto o_iter =
   1012    + hb_iter (o.featureIndex)
   1013    | hb_filter (feature_index_map)
   1014    | hb_map (feature_index_map)
   1015    ;
   1016 
   1017    for (; iter && o_iter; iter++, o_iter++)
   1018    {
   1019      unsigned a = *iter;
   1020      unsigned b = *o_iter;
   1021      if (a != b) return false;
   1022    }
   1023 
   1024    if (iter || o_iter) return false;
   1025 
   1026    return true;
   1027  }
   1028 
   1029  void collect_features (hb_prune_langsys_context_t *c) const
   1030  {
   1031    if (!has_required_feature () && !get_feature_count ()) return;
   1032    if (has_required_feature () &&
   1033        c->duplicate_feature_map->has (reqFeatureIndex))
   1034      c->new_feature_indexes->add (get_required_feature_index ());
   1035 
   1036    + hb_iter (featureIndex)
   1037    | hb_filter (c->duplicate_feature_map)
   1038    | hb_sink (c->new_feature_indexes)
   1039    ;
   1040  }
   1041 
   1042  bool subset (hb_subset_context_t        *c,
   1043        hb_subset_layout_context_t *l,
   1044        const Tag                  *tag = nullptr) const
   1045  {
   1046    TRACE_SUBSET (this);
   1047    auto *out = c->serializer->start_embed (*this);
   1048    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   1049 
   1050    const uint32_t *v;
   1051    out->reqFeatureIndex = l->feature_map_w_duplicates->has (reqFeatureIndex, &v) ? *v : 0xFFFFu;
   1052 
   1053    if (!l->visitFeatureIndex (featureIndex.len))
   1054      return_trace (false);
   1055 
   1056    auto it =
   1057    + hb_iter (featureIndex)
   1058    | hb_filter (l->feature_map_w_duplicates)
   1059    | hb_map (l->feature_map_w_duplicates)
   1060    ;
   1061 
   1062    bool ret = bool (it);
   1063    out->featureIndex.serialize (c->serializer, l, it);
   1064    return_trace (ret);
   1065  }
   1066 
   1067  bool sanitize (hb_sanitize_context_t *c,
   1068 	 const Record_sanitize_closure_t * = nullptr) const
   1069  {
   1070    TRACE_SANITIZE (this);
   1071    return_trace (c->check_struct (this) && featureIndex.sanitize (c));
   1072  }
   1073 
   1074  Offset16	lookupOrderZ;	/* = Null (reserved for an offset to a
   1075 			 * reordering table) */
   1076  HBUINT16	reqFeatureIndex;/* Index of a feature required for this
   1077 			 * language system--if no required features
   1078 			 * = 0xFFFFu */
   1079  IndexArray	featureIndex;	/* Array of indices into the FeatureList */
   1080  public:
   1081  DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
   1082 };
   1083 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
   1084 
   1085 struct Script
   1086 {
   1087  unsigned int get_lang_sys_count () const
   1088  { return langSys.len; }
   1089  const Tag& get_lang_sys_tag (unsigned int i) const
   1090  { return langSys.get_tag (i); }
   1091  unsigned int get_lang_sys_tags (unsigned int start_offset,
   1092 			  unsigned int *lang_sys_count /* IN/OUT */,
   1093 			  hb_tag_t     *lang_sys_tags /* OUT */) const
   1094  { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
   1095  const LangSys& get_lang_sys (unsigned int i) const
   1096  {
   1097    if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
   1098    return this+langSys[i].offset;
   1099  }
   1100  bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
   1101  { return langSys.find_index (tag, index); }
   1102 
   1103  bool has_default_lang_sys () const           { return defaultLangSys != 0; }
   1104  const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
   1105 
   1106  void prune_langsys (hb_prune_langsys_context_t *c,
   1107                      unsigned script_index) const
   1108  {
   1109    if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
   1110    if (!c->visitScript ()) return;
   1111 
   1112    if (!c->script_langsys_map->has (script_index))
   1113    {
   1114      if (unlikely (!c->script_langsys_map->set (script_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
   1115 return;
   1116    }
   1117 
   1118    if (has_default_lang_sys ())
   1119    {
   1120      //only collect features from non-redundant langsys
   1121      const LangSys& d = get_default_lang_sys ();
   1122      if (c->visitLangsys (d.get_feature_count ())) {
   1123        d.collect_features (c);
   1124      }
   1125 
   1126      for (auto _ : + hb_enumerate (langSys))
   1127      {
   1128        const LangSys& l = this+_.second.offset;
   1129        if (!c->visitLangsys (l.get_feature_count ())) continue;
   1130        if (l.compare (d, c->duplicate_feature_map)) continue;
   1131 
   1132        l.collect_features (c);
   1133        c->script_langsys_map->get (script_index)->add (_.first);
   1134      }
   1135    }
   1136    else
   1137    {
   1138      for (auto _ : + hb_enumerate (langSys))
   1139      {
   1140        const LangSys& l = this+_.second.offset;
   1141        if (!c->visitLangsys (l.get_feature_count ())) continue;
   1142        l.collect_features (c);
   1143        c->script_langsys_map->get (script_index)->add (_.first);
   1144      }
   1145    }
   1146  }
   1147 
   1148  bool subset (hb_subset_context_t         *c,
   1149        hb_subset_layout_context_t  *l,
   1150        const Tag                   *tag) const
   1151  {
   1152    TRACE_SUBSET (this);
   1153    if (!l->visitScript ()) return_trace (false);
   1154    if (tag && !c->plan->layout_scripts.has (*tag))
   1155      return false;
   1156 
   1157    auto *out = c->serializer->start_embed (*this);
   1158    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   1159 
   1160    bool defaultLang = false;
   1161    if (has_default_lang_sys ())
   1162    {
   1163      c->serializer->push ();
   1164      const LangSys& ls = this+defaultLangSys;
   1165      bool ret = ls.subset (c, l);
   1166      if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
   1167      {
   1168 c->serializer->pop_discard ();
   1169 out->defaultLangSys = 0;
   1170      }
   1171      else
   1172      {
   1173 c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
   1174 defaultLang = true;
   1175      }
   1176    }
   1177 
   1178    const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
   1179    if (active_langsys)
   1180    {
   1181      + hb_enumerate (langSys)
   1182      | hb_filter (active_langsys, hb_first)
   1183      | hb_map (hb_second)
   1184      | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
   1185      | hb_apply (subset_record_array (l, &(out->langSys), this))
   1186      ;
   1187    }
   1188 
   1189    return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
   1190  }
   1191 
   1192  bool sanitize (hb_sanitize_context_t *c,
   1193 	 const Record_sanitize_closure_t * = nullptr) const
   1194  {
   1195    TRACE_SANITIZE (this);
   1196    return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
   1197  }
   1198 
   1199  protected:
   1200  Offset16To<LangSys>
   1201 	defaultLangSys;	/* Offset to DefaultLangSys table--from
   1202 			 * beginning of Script table--may be Null */
   1203  RecordArrayOf<LangSys>
   1204 	langSys;	/* Array of LangSysRecords--listed
   1205 			 * alphabetically by LangSysTag */
   1206  public:
   1207  DEFINE_SIZE_ARRAY_SIZED (4, langSys);
   1208 };
   1209 
   1210 struct RecordListOfScript : RecordListOf<Script>
   1211 {
   1212  bool subset (hb_subset_context_t *c,
   1213               hb_subset_layout_context_t *l) const
   1214  {
   1215    TRACE_SUBSET (this);
   1216    auto *out = c->serializer->start_embed (*this);
   1217    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   1218 
   1219    for (auto _ : + hb_enumerate (*this))
   1220    {
   1221      auto snap = c->serializer->snapshot ();
   1222      l->cur_script_index = _.first;
   1223      bool ret = _.second.subset (l, this);
   1224      if (!ret) c->serializer->revert (snap);
   1225      else out->len++;
   1226    }
   1227 
   1228    return_trace (true);
   1229  }
   1230 };
   1231 
   1232 typedef RecordListOfScript ScriptList;
   1233 
   1234 
   1235 
   1236 struct LookupFlag : HBUINT16
   1237 {
   1238  enum Flags {
   1239    RightToLeft		= 0x0001u,
   1240    IgnoreBaseGlyphs	= 0x0002u,
   1241    IgnoreLigatures	= 0x0004u,
   1242    IgnoreMarks		= 0x0008u,
   1243    IgnoreFlags		= 0x000Eu,
   1244    UseMarkFilteringSet	= 0x0010u,
   1245    Reserved		= 0x00E0u,
   1246    MarkAttachmentType	= 0xFF00u
   1247  };
   1248  public:
   1249  DEFINE_SIZE_STATIC (2);
   1250 };
   1251 
   1252 } /* namespace OT */
   1253 /* This has to be outside the namespace. */
   1254 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
   1255 namespace OT {
   1256 
   1257 struct Lookup
   1258 {
   1259  unsigned int get_subtable_count () const { return subTable.len; }
   1260 
   1261  template <typename TSubTable>
   1262  const Array16OfOffset16To<TSubTable>& get_subtables () const
   1263  { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
   1264  template <typename TSubTable>
   1265  Array16OfOffset16To<TSubTable>& get_subtables ()
   1266  { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
   1267 
   1268  template <typename TSubTable>
   1269  const TSubTable& get_subtable (unsigned int i) const
   1270  { return this+get_subtables<TSubTable> ()[i]; }
   1271  template <typename TSubTable>
   1272  TSubTable& get_subtable (unsigned int i)
   1273  { return this+get_subtables<TSubTable> ()[i]; }
   1274 
   1275  unsigned int get_size () const
   1276  {
   1277    const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
   1278    if (lookupFlag & LookupFlag::UseMarkFilteringSet)
   1279      return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
   1280    return (const char *) &markFilteringSet - (const char *) this;
   1281  }
   1282 
   1283  unsigned int get_type () const { return lookupType; }
   1284 
   1285  /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
   1286   * higher 16-bit is mark-filtering-set if the lookup uses one.
   1287   * Not to be confused with glyph_props which is very similar. */
   1288  uint32_t get_props () const
   1289  {
   1290    unsigned int flag = lookupFlag;
   1291    if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
   1292    {
   1293      const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
   1294      flag += (markFilteringSet << 16);
   1295    }
   1296    return flag;
   1297  }
   1298 
   1299  template <typename TSubTable, typename context_t, typename ...Ts>
   1300  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
   1301  {
   1302    unsigned int lookup_type = get_type ();
   1303    TRACE_DISPATCH (this, lookup_type);
   1304    unsigned int count = get_subtable_count ();
   1305    for (unsigned int i = 0; i < count; i++) {
   1306      typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, ds...);
   1307      if (c->stop_sublookup_iteration (r))
   1308 return_trace (r);
   1309    }
   1310    return_trace (c->default_return_value ());
   1311  }
   1312 
   1313  bool serialize (hb_serialize_context_t *c,
   1314 	  unsigned int lookup_type,
   1315 	  uint32_t lookup_props,
   1316 	  unsigned int num_subtables)
   1317  {
   1318    TRACE_SERIALIZE (this);
   1319    if (unlikely (!c->extend_min (this))) return_trace (false);
   1320    lookupType = lookup_type;
   1321    lookupFlag = lookup_props & 0xFFFFu;
   1322    if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
   1323    if (lookupFlag & LookupFlag::UseMarkFilteringSet)
   1324    {
   1325      if (unlikely (!c->extend (this))) return_trace (false);
   1326      HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
   1327      markFilteringSet = lookup_props >> 16;
   1328    }
   1329    return_trace (true);
   1330  }
   1331 
   1332  template <typename TSubTable>
   1333  bool subset (hb_subset_context_t *c) const
   1334  {
   1335    TRACE_SUBSET (this);
   1336    auto *out = c->serializer->start_embed (*this);
   1337    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   1338    out->lookupType = lookupType;
   1339    out->lookupFlag = lookupFlag;
   1340 
   1341    const hb_set_t *glyphset = c->plan->glyphset_gsub ();
   1342    unsigned int lookup_type = get_type ();
   1343    + hb_iter (get_subtables <TSubTable> ())
   1344    | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
   1345    | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
   1346    ;
   1347 
   1348    if (lookupFlag & LookupFlag::UseMarkFilteringSet)
   1349    {
   1350      const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
   1351      hb_codepoint_t *idx;
   1352      if (!c->plan->used_mark_sets_map.has (markFilteringSet, &idx))
   1353      {
   1354        unsigned new_flag = lookupFlag;
   1355        new_flag &= ~LookupFlag::UseMarkFilteringSet;
   1356        // https://github.com/harfbuzz/harfbuzz/issues/5499
   1357        // If we remove UseMarkFilteringSet flag because the set is now empty,
   1358        // we need to add IgnoreMarks flag, otherwise the lookup will not
   1359        // ignore any marks, which changes the behavior.
   1360        new_flag |= LookupFlag::IgnoreMarks;
   1361        out->lookupFlag = new_flag;
   1362      }
   1363      else
   1364      {
   1365        if (unlikely (!c->serializer->extend (out))) return_trace (false);
   1366        HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
   1367        outMarkFilteringSet = *idx;
   1368      }
   1369    }
   1370 
   1371    // Always keep the lookup even if it's empty. The rest of layout subsetting depends on lookup
   1372    // indices being consistent with those computed during planning. So if an empty lookup is
   1373    // discarded during the subset phase it will invalidate all subsequent lookup indices.
   1374    // Generally we shouldn't end up with an empty lookup as we pre-prune them during the planning
   1375    // phase, but it can happen in rare cases such as when during closure subtable is considered
   1376    // degenerate (see: https://github.com/harfbuzz/harfbuzz/issues/3853)
   1377    return_trace (true);
   1378  }
   1379 
   1380  template <typename TSubTable>
   1381  bool sanitize (hb_sanitize_context_t *c) const
   1382  {
   1383    TRACE_SANITIZE (this);
   1384    if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
   1385    hb_barrier ();
   1386 
   1387    unsigned subtables = get_subtable_count ();
   1388    if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
   1389 
   1390    if (lookupFlag & LookupFlag::UseMarkFilteringSet)
   1391    {
   1392      const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
   1393      if (!markFilteringSet.sanitize (c)) return_trace (false);
   1394    }
   1395 
   1396    if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
   1397      return_trace (false);
   1398 
   1399    if (unlikely (get_type () == TSubTable::Extension))
   1400    {
   1401      hb_barrier ();
   1402 
   1403      /* The spec says all subtables of an Extension lookup should
   1404       * have the same type, which shall not be the Extension type
   1405       * itself (but we already checked for that).
   1406       * This is specially important if one has a reverse type!
   1407       */
   1408      unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
   1409      for (unsigned int i = 1; i < subtables; i++)
   1410 if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
   1411   return_trace (false);
   1412    }
   1413    return_trace (true);
   1414  }
   1415 
   1416  protected:
   1417  HBUINT16	lookupType;		/* Different enumerations for GSUB and GPOS */
   1418  HBUINT16	lookupFlag;		/* Lookup qualifiers */
   1419  Array16Of<Offset16>
   1420 	subTable;		/* Array of SubTables */
   1421 /*HBUINT16	markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
   1422 				 * structure. This field is only present if bit
   1423 				 * UseMarkFilteringSet of lookup flags is set. */
   1424  public:
   1425  DEFINE_SIZE_ARRAY (6, subTable);
   1426 };
   1427 
   1428 template <typename Types>
   1429 using LookupList = List16OfOffsetTo<Lookup, typename Types::HBUINT>;
   1430 
   1431 template <typename TLookup, typename OffsetType>
   1432 struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
   1433 {
   1434  bool subset (hb_subset_context_t        *c,
   1435        hb_subset_layout_context_t *l) const
   1436  {
   1437    TRACE_SUBSET (this);
   1438    auto *out = c->serializer->start_embed (this);
   1439    if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
   1440 
   1441    + hb_enumerate (*this)
   1442    | hb_filter (l->lookup_index_map, hb_first)
   1443    | hb_map (hb_second)
   1444    | hb_apply (subset_offset_array (c, *out, this))
   1445    ;
   1446    return_trace (true);
   1447  }
   1448 
   1449  bool sanitize (hb_sanitize_context_t *c) const
   1450  {
   1451    TRACE_SANITIZE (this);
   1452    return_trace (List16OfOffset16To<TLookup>::sanitize (c, this));
   1453  }
   1454 };
   1455 
   1456 
   1457 /*
   1458 * Coverage Table
   1459 */
   1460 
   1461 
   1462 static bool ClassDef_remap_and_serialize (hb_serialize_context_t *c,
   1463 				  const hb_set_t &klasses,
   1464                                          bool use_class_zero,
   1465                                          hb_sorted_vector_t<hb_codepoint_pair_t> &glyph_and_klass, /* IN/OUT */
   1466 				  hb_map_t *klass_map /*IN/OUT*/)
   1467 {
   1468  if (!klass_map)
   1469    return ClassDef_serialize (c, glyph_and_klass.iter ());
   1470 
   1471  /* any glyph not assigned a class value falls into Class zero (0),
   1472   * if any glyph assigned to class 0, remapping must start with 0->0*/
   1473  if (!use_class_zero)
   1474    klass_map->set (0, 0);
   1475 
   1476  unsigned idx = klass_map->has (0) ? 1 : 0;
   1477  for (const unsigned k: klasses)
   1478  {
   1479    if (klass_map->has (k)) continue;
   1480    klass_map->set (k, idx);
   1481    idx++;
   1482  }
   1483 
   1484 
   1485  for (unsigned i = 0; i < glyph_and_klass.length; i++)
   1486  {
   1487    hb_codepoint_t klass = glyph_and_klass[i].second;
   1488    glyph_and_klass[i].second = klass_map->get (klass);
   1489  }
   1490 
   1491  c->propagate_error (glyph_and_klass, klasses);
   1492  return ClassDef_serialize (c, glyph_and_klass.iter ());
   1493 }
   1494 
   1495 /*
   1496 * Class Definition Table
   1497 */
   1498 
   1499 template <typename Types>
   1500 struct ClassDefFormat1_3
   1501 {
   1502  friend struct ClassDef;
   1503 
   1504  private:
   1505  unsigned int get_class (hb_codepoint_t glyph_id) const
   1506  {
   1507    return classValue[(unsigned int) (glyph_id - startGlyph)];
   1508  }
   1509 
   1510  unsigned get_population () const
   1511  {
   1512    return classValue.len;
   1513  }
   1514 
   1515  template<typename Iterator,
   1516    hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
   1517  bool serialize (hb_serialize_context_t *c,
   1518 	  Iterator it)
   1519  {
   1520    TRACE_SERIALIZE (this);
   1521    if (unlikely (!c->extend_min (this))) return_trace (false);
   1522 
   1523    if (unlikely (!it))
   1524    {
   1525      classFormat = 1;
   1526      startGlyph = 0;
   1527      classValue.len = 0;
   1528      return_trace (true);
   1529    }
   1530 
   1531    hb_codepoint_t glyph_min = (*it).first;
   1532    hb_codepoint_t glyph_max = + it
   1533 		       | hb_map (hb_first)
   1534 		       | hb_reduce (hb_max, 0u);
   1535    unsigned glyph_count = glyph_max - glyph_min + 1;
   1536 
   1537    startGlyph = glyph_min;
   1538    if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
   1539    for (const hb_pair_t<hb_codepoint_t, uint32_t> gid_klass_pair : + it)
   1540    {
   1541      unsigned idx = gid_klass_pair.first - glyph_min;
   1542      classValue[idx] = gid_klass_pair.second;
   1543    }
   1544    return_trace (true);
   1545  }
   1546 
   1547  bool subset (hb_subset_context_t *c,
   1548        hb_map_t *klass_map = nullptr /*OUT*/,
   1549               bool keep_empty_table = true,
   1550               bool use_class_zero = true,
   1551               const Coverage* glyph_filter = nullptr) const
   1552  {
   1553    TRACE_SUBSET (this);
   1554    const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
   1555 
   1556    hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass;
   1557    hb_set_t orig_klasses;
   1558 
   1559    hb_codepoint_t start = startGlyph;
   1560    hb_codepoint_t end   = start + classValue.len;
   1561 
   1562    for (const hb_codepoint_t gid : + hb_range (start, end))
   1563    {
   1564      hb_codepoint_t new_gid = glyph_map[gid];
   1565      if (new_gid == HB_MAP_VALUE_INVALID) continue;
   1566      if (glyph_filter && !glyph_filter->has(gid)) continue;
   1567 
   1568      unsigned klass = classValue[gid - start];
   1569      if (!klass) continue;
   1570 
   1571      glyph_and_klass.push (hb_pair (new_gid, klass));
   1572      orig_klasses.add (klass);
   1573    }
   1574 
   1575    if (use_class_zero)
   1576    {
   1577      unsigned glyph_count = glyph_filter
   1578 		     ? hb_len (hb_iter (glyph_map.keys()) | hb_filter (glyph_filter))
   1579 		     : glyph_map.get_population ();
   1580      use_class_zero = glyph_count <= glyph_and_klass.length;
   1581    }
   1582    if (!ClassDef_remap_and_serialize (c->serializer,
   1583                                       orig_klasses,
   1584                                       use_class_zero,
   1585                                       glyph_and_klass,
   1586                                       klass_map))
   1587      return_trace (false);
   1588    return_trace (keep_empty_table || (bool) glyph_and_klass);
   1589  }
   1590 
   1591  bool sanitize (hb_sanitize_context_t *c) const
   1592  {
   1593    TRACE_SANITIZE (this);
   1594    return_trace (c->check_struct (this) && classValue.sanitize (c));
   1595  }
   1596 
   1597  unsigned cost () const { return 1; }
   1598 
   1599  template <typename set_t>
   1600  bool collect_coverage (set_t *glyphs) const
   1601  {
   1602    unsigned int start = 0;
   1603    unsigned int count = classValue.len;
   1604    for (unsigned int i = 0; i < count; i++)
   1605    {
   1606      if (classValue[i])
   1607 continue;
   1608 
   1609      if (start != i)
   1610 if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
   1611   return false;
   1612 
   1613      start = i + 1;
   1614    }
   1615    if (start != count)
   1616      if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
   1617 return false;
   1618 
   1619    return true;
   1620  }
   1621 
   1622  template <typename set_t>
   1623  bool collect_class (set_t *glyphs, unsigned klass) const
   1624  {
   1625    unsigned int count = classValue.len;
   1626    for (unsigned int i = 0; i < count; i++)
   1627      if (classValue[i] == klass) glyphs->add (startGlyph + i);
   1628    return true;
   1629  }
   1630 
   1631  bool intersects (const hb_set_t *glyphs) const
   1632  {
   1633    hb_codepoint_t start = startGlyph;
   1634    hb_codepoint_t end = startGlyph + classValue.len;
   1635    for (hb_codepoint_t iter = startGlyph - 1;
   1636  glyphs->next (&iter) && iter < end;)
   1637      if (classValue[iter - start]) return true;
   1638    return false;
   1639  }
   1640  bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
   1641  {
   1642    unsigned int count = classValue.len;
   1643    if (klass == 0)
   1644    {
   1645      /* Match if there's any glyph that is not listed! */
   1646      hb_codepoint_t g = HB_SET_VALUE_INVALID;
   1647      if (!glyphs->next (&g)) return false;
   1648      if (g < startGlyph) return true;
   1649      g = startGlyph + count - 1;
   1650      if (glyphs->next (&g)) return true;
   1651      /* Fall through. */
   1652    }
   1653    /* TODO Speed up, using set overlap first? */
   1654    /* TODO(iter) Rewrite as dagger. */
   1655    const HBUINT16 *arr = classValue.arrayZ;
   1656    for (unsigned int i = 0; i < count; i++)
   1657      if (arr[i] == klass && glyphs->has (startGlyph + i))
   1658 return true;
   1659    return false;
   1660  }
   1661 
   1662  void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
   1663  {
   1664    unsigned count = classValue.len;
   1665    if (klass == 0)
   1666    {
   1667      unsigned start_glyph = startGlyph;
   1668      for (uint32_t g = HB_SET_VALUE_INVALID;
   1669    glyphs->next (&g) && g < start_glyph;)
   1670 intersect_glyphs->add (g);
   1671 
   1672      for (uint32_t g = startGlyph + count - 1;
   1673    glyphs-> next (&g);)
   1674 intersect_glyphs->add (g);
   1675 
   1676      return;
   1677    }
   1678 
   1679    for (unsigned i = 0; i < count; i++)
   1680      if (classValue[i] == klass && glyphs->has (startGlyph + i))
   1681 intersect_glyphs->add (startGlyph + i);
   1682 
   1683 #if 0
   1684    /* The following implementation is faster asymptotically, but slower
   1685     * in practice. */
   1686    unsigned start_glyph = startGlyph;
   1687    unsigned end_glyph = start_glyph + count;
   1688    for (unsigned g = startGlyph - 1;
   1689  glyphs->next (&g) && g < end_glyph;)
   1690      if (classValue.arrayZ[g - start_glyph] == klass)
   1691        intersect_glyphs->add (g);
   1692 #endif
   1693  }
   1694 
   1695  void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
   1696  {
   1697    if (glyphs->is_empty ()) return;
   1698    hb_codepoint_t end_glyph = startGlyph + classValue.len - 1;
   1699    if (glyphs->get_min () < startGlyph ||
   1700        glyphs->get_max () > end_glyph)
   1701      intersect_classes->add (0);
   1702 
   1703    for (const auto& _ : + hb_enumerate (classValue))
   1704    {
   1705      hb_codepoint_t g = startGlyph + _.first;
   1706      if (glyphs->has (g))
   1707        intersect_classes->add (_.second);
   1708    }
   1709  }
   1710 
   1711  protected:
   1712  HBUINT16	classFormat;	/* Format identifier--format = 1 */
   1713  typename Types::HBGlyphID
   1714 	 startGlyph;	/* First GlyphID of the classValueArray */
   1715  typename Types::template ArrayOf<HBUINT16>
   1716 	classValue;	/* Array of Class Values--one per GlyphID */
   1717  public:
   1718  DEFINE_SIZE_ARRAY (2 + 2 * Types::size, classValue);
   1719 };
   1720 
   1721 template <typename Types>
   1722 struct ClassDefFormat2_4
   1723 {
   1724  friend struct ClassDef;
   1725 
   1726  private:
   1727  unsigned int get_class (hb_codepoint_t glyph_id) const
   1728  {
   1729    return rangeRecord.bsearch (glyph_id).value;
   1730  }
   1731 
   1732  unsigned get_population () const
   1733  {
   1734    typename Types::large_int ret = 0;
   1735    for (const auto &r : rangeRecord)
   1736      ret += r.get_population ();
   1737    return ret > UINT_MAX ? UINT_MAX : (unsigned) ret;
   1738  }
   1739 
   1740  template<typename Iterator,
   1741    hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
   1742  bool serialize (hb_serialize_context_t *c,
   1743 	  Iterator it)
   1744  {
   1745    TRACE_SERIALIZE (this);
   1746    if (unlikely (!c->extend_min (this))) return_trace (false);
   1747 
   1748    if (unlikely (!it))
   1749    {
   1750      classFormat = 2;
   1751      rangeRecord.len = 0;
   1752      return_trace (true);
   1753    }
   1754 
   1755    unsigned unsorted = false;
   1756    unsigned num_ranges = 1;
   1757    hb_codepoint_t prev_gid = (*it).first;
   1758    unsigned prev_klass = (*it).second;
   1759 
   1760    RangeRecord<Types> range_rec;
   1761    range_rec.first = prev_gid;
   1762    range_rec.last = prev_gid;
   1763    range_rec.value = prev_klass;
   1764 
   1765    auto *record = c->copy (range_rec);
   1766    if (unlikely (!record)) return_trace (false);
   1767 
   1768    for (const auto gid_klass_pair : + (++it))
   1769    {
   1770      hb_codepoint_t cur_gid = gid_klass_pair.first;
   1771      unsigned cur_klass = gid_klass_pair.second;
   1772 
   1773      if (cur_gid != prev_gid + 1 ||
   1774   cur_klass != prev_klass)
   1775      {
   1776 
   1777 if (unlikely (cur_gid < prev_gid))
   1778   unsorted = true;
   1779 
   1780 if (unlikely (!record)) break;
   1781 record->last = prev_gid;
   1782 num_ranges++;
   1783 
   1784 range_rec.first = cur_gid;
   1785 range_rec.last = cur_gid;
   1786 range_rec.value = cur_klass;
   1787 
   1788 record = c->copy (range_rec);
   1789      }
   1790 
   1791      prev_klass = cur_klass;
   1792      prev_gid = cur_gid;
   1793    }
   1794 
   1795    if (unlikely (c->in_error ())) return_trace (false);
   1796 
   1797    if (likely (record)) record->last = prev_gid;
   1798    rangeRecord.len = num_ranges;
   1799 
   1800    if (unlikely (unsorted))
   1801      rangeRecord.as_array ().qsort (RangeRecord<Types>::cmp_range);
   1802 
   1803    return_trace (true);
   1804  }
   1805 
   1806  bool subset (hb_subset_context_t *c,
   1807        hb_map_t *klass_map = nullptr /*OUT*/,
   1808               bool keep_empty_table = true,
   1809               bool use_class_zero = true,
   1810               const Coverage* glyph_filter = nullptr) const
   1811  {
   1812    TRACE_SUBSET (this);
   1813    const hb_map_t &glyph_map = c->plan->glyph_map_gsub;
   1814    const hb_set_t &glyph_set = *c->plan->glyphset_gsub ();
   1815 
   1816    hb_sorted_vector_t<hb_codepoint_pair_t> glyph_and_klass;
   1817    hb_set_t orig_klasses;
   1818 
   1819    if (glyph_set.get_population () * hb_bit_storage ((unsigned) rangeRecord.len)
   1820 < get_population ())
   1821    {
   1822      for (hb_codepoint_t g : glyph_set)
   1823      {
   1824 unsigned klass = get_class (g);
   1825 if (!klass) continue;
   1826 hb_codepoint_t new_gid = glyph_map[g];
   1827 if (new_gid == HB_MAP_VALUE_INVALID) continue;
   1828 if (glyph_filter && !glyph_filter->has (g)) continue;
   1829 glyph_and_klass.push (hb_pair (new_gid, klass));
   1830 orig_klasses.add (klass);
   1831      }
   1832    }
   1833    else
   1834    {
   1835      unsigned num_source_glyphs = c->plan->source->get_num_glyphs ();
   1836      for (auto &range : rangeRecord)
   1837      {
   1838 unsigned klass = range.value;
   1839 if (!klass) continue;
   1840 hb_codepoint_t start = range.first;
   1841 hb_codepoint_t end   = hb_min (range.last + 1, num_source_glyphs);
   1842 for (hb_codepoint_t g = start; g < end; g++)
   1843 {
   1844   hb_codepoint_t new_gid = glyph_map[g];
   1845   if (new_gid == HB_MAP_VALUE_INVALID) continue;
   1846   if (glyph_filter && !glyph_filter->has (g)) continue;
   1847 
   1848   glyph_and_klass.push (hb_pair (new_gid, klass));
   1849   orig_klasses.add (klass);
   1850 }
   1851      }
   1852    }
   1853 
   1854    const hb_set_t& glyphset = *c->plan->glyphset_gsub ();
   1855    unsigned glyph_count = glyph_filter
   1856                           ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
   1857                           : glyph_map.get_population ();
   1858    use_class_zero = use_class_zero && glyph_count <= glyph_and_klass.length;
   1859    if (!ClassDef_remap_and_serialize (c->serializer,
   1860                                       orig_klasses,
   1861                                       use_class_zero,
   1862                                       glyph_and_klass,
   1863                                       klass_map))
   1864      return_trace (false);
   1865    return_trace (keep_empty_table || (bool) glyph_and_klass);
   1866  }
   1867 
   1868  bool sanitize (hb_sanitize_context_t *c) const
   1869  {
   1870    TRACE_SANITIZE (this);
   1871    return_trace (rangeRecord.sanitize (c));
   1872  }
   1873 
   1874  unsigned cost () const { return hb_bit_storage ((unsigned) rangeRecord.len); /* bsearch cost */ }
   1875 
   1876  template <typename set_t>
   1877  bool collect_coverage (set_t *glyphs) const
   1878  {
   1879    for (auto &range : rangeRecord)
   1880      if (range.value)
   1881 if (unlikely (!range.collect_coverage (glyphs)))
   1882   return false;
   1883    return true;
   1884  }
   1885 
   1886  template <typename set_t>
   1887  bool collect_class (set_t *glyphs, unsigned int klass) const
   1888  {
   1889    for (auto &range : rangeRecord)
   1890    {
   1891      if (range.value == klass)
   1892 if (unlikely (!range.collect_coverage (glyphs)))
   1893   return false;
   1894    }
   1895    return true;
   1896  }
   1897 
   1898  bool intersects (const hb_set_t *glyphs) const
   1899  {
   1900    if (rangeRecord.len > glyphs->get_population () * hb_bit_storage ((unsigned) rangeRecord.len))
   1901    {
   1902      for (auto g : *glyphs)
   1903        if (get_class (g))
   1904   return true;
   1905      return false;
   1906    }
   1907 
   1908    return hb_any (+ hb_iter (rangeRecord)
   1909                   | hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (*glyphs) && range.value; }));
   1910  }
   1911  bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
   1912  {
   1913    if (klass == 0)
   1914    {
   1915      /* Match if there's any glyph that is not listed! */
   1916      hb_codepoint_t g = HB_SET_VALUE_INVALID;
   1917      hb_codepoint_t last = HB_SET_VALUE_INVALID;
   1918      auto it = hb_iter (rangeRecord);
   1919      for (auto &range : it)
   1920      {
   1921        if (it->first == last + 1)
   1922 {
   1923   it++;
   1924   continue;
   1925 }
   1926 
   1927 if (!glyphs->next (&g))
   1928   break;
   1929 if (g < range.first)
   1930   return true;
   1931 g = range.last;
   1932 last = g;
   1933      }
   1934      if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
   1935 return true;
   1936      /* Fall through. */
   1937    }
   1938    for (const auto &range : rangeRecord)
   1939      if (range.value == klass && range.intersects (*glyphs))
   1940 return true;
   1941    return false;
   1942  }
   1943 
   1944  void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
   1945  {
   1946    if (klass == 0)
   1947    {
   1948      hb_codepoint_t g = HB_SET_VALUE_INVALID;
   1949      for (auto &range : rangeRecord)
   1950      {
   1951 if (!glyphs->next (&g))
   1952   goto done;
   1953 while (g < range.first)
   1954 {
   1955   intersect_glyphs->add (g);
   1956   if (!glyphs->next (&g))
   1957     goto done;
   1958        }
   1959        g = range.last;
   1960      }
   1961      while (glyphs->next (&g))
   1962 intersect_glyphs->add (g);
   1963      done:
   1964 
   1965      return;
   1966    }
   1967 
   1968    unsigned count = rangeRecord.len;
   1969    if (count > glyphs->get_population () * hb_bit_storage (count))
   1970    {
   1971      for (auto g : *glyphs)
   1972      {
   1973        unsigned i;
   1974        if (rangeRecord.as_array ().bfind (g, &i) &&
   1975     rangeRecord.arrayZ[i].value == klass)
   1976   intersect_glyphs->add (g);
   1977      }
   1978      return;
   1979    }
   1980 
   1981    for (auto &range : rangeRecord)
   1982    {
   1983      if (range.value != klass) continue;
   1984 
   1985      unsigned end = range.last + 1;
   1986      for (hb_codepoint_t g = range.first - 1;
   1987    glyphs->next (&g) && g < end;)
   1988 intersect_glyphs->add (g);
   1989    }
   1990  }
   1991 
   1992  void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
   1993  {
   1994    if (glyphs->is_empty ()) return;
   1995 
   1996    hb_codepoint_t g = HB_SET_VALUE_INVALID;
   1997    for (auto &range : rangeRecord)
   1998    {
   1999      if (!glyphs->next (&g))
   2000        break;
   2001      if (g < range.first)
   2002      {
   2003        intersect_classes->add (0);
   2004        break;
   2005      }
   2006      g = range.last;
   2007    }
   2008    if (g != HB_SET_VALUE_INVALID && glyphs->next (&g))
   2009      intersect_classes->add (0);
   2010 
   2011    for (const auto& range : rangeRecord)
   2012      if (range.intersects (*glyphs))
   2013        intersect_classes->add (range.value);
   2014  }
   2015 
   2016  protected:
   2017  HBUINT16	classFormat;	/* Format identifier--format = 2 */
   2018  typename Types::template SortedArrayOf<RangeRecord<Types>>
   2019 	rangeRecord;	/* Array of glyph ranges--ordered by
   2020 			 * Start GlyphID */
   2021  public:
   2022  DEFINE_SIZE_ARRAY (2 + Types::size, rangeRecord);
   2023 };
   2024 
   2025 struct ClassDef
   2026 {
   2027  /* Has interface. */
   2028  unsigned operator [] (hb_codepoint_t k) const { return get (k); }
   2029  bool has (hb_codepoint_t k) const { return (*this)[k]; }
   2030  /* Projection. */
   2031  hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
   2032 
   2033  unsigned int get (hb_codepoint_t k) const { return get_class (k); }
   2034  unsigned int get_class (hb_codepoint_t glyph_id) const
   2035  {
   2036    switch (u.format.v) {
   2037    case 1: hb_barrier (); return u.format1.get_class (glyph_id);
   2038    case 2: hb_barrier (); return u.format2.get_class (glyph_id);
   2039 #ifndef HB_NO_BEYOND_64K
   2040    case 3: hb_barrier (); return u.format3.get_class (glyph_id);
   2041    case 4: hb_barrier (); return u.format4.get_class (glyph_id);
   2042 #endif
   2043    default:return 0;
   2044    }
   2045  }
   2046  unsigned int get_class (hb_codepoint_t glyph_id,
   2047 		  hb_ot_layout_mapping_cache_t *cache) const
   2048  {
   2049    unsigned klass;
   2050    if (cache && cache->get (glyph_id, &klass)) return klass;
   2051    klass = get_class (glyph_id);
   2052    if (cache) cache->set (glyph_id, klass);
   2053    return klass;
   2054  }
   2055 
   2056  unsigned get_population () const
   2057  {
   2058    switch (u.format.v) {
   2059    case 1: hb_barrier (); return u.format1.get_population ();
   2060    case 2: hb_barrier (); return u.format2.get_population ();
   2061 #ifndef HB_NO_BEYOND_64K
   2062    case 3: hb_barrier (); return u.format3.get_population ();
   2063    case 4: hb_barrier (); return u.format4.get_population ();
   2064 #endif
   2065    default:return NOT_COVERED;
   2066    }
   2067  }
   2068 
   2069  template<typename Iterator,
   2070    hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
   2071  bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
   2072  {
   2073    TRACE_SERIALIZE (this);
   2074    if (unlikely (!c->extend_min (this))) return_trace (false);
   2075 
   2076    auto it = + it_with_class_zero | hb_filter (hb_second);
   2077 
   2078    unsigned format = 2;
   2079    hb_codepoint_t glyph_max = 0;
   2080    if (likely (it))
   2081    {
   2082      hb_codepoint_t glyph_min = (*it).first;
   2083      glyph_max = glyph_min;
   2084 
   2085      unsigned num_glyphs = 0;
   2086      unsigned num_ranges = 1;
   2087      hb_codepoint_t prev_gid = glyph_min;
   2088      unsigned prev_klass = (*it).second;
   2089 
   2090      for (const auto gid_klass_pair : it)
   2091      {
   2092 hb_codepoint_t cur_gid = gid_klass_pair.first;
   2093 unsigned cur_klass = gid_klass_pair.second;
   2094        num_glyphs++;
   2095 if (cur_gid == glyph_min) continue;
   2096        if (cur_gid > glyph_max) glyph_max = cur_gid;
   2097 if (cur_gid != prev_gid + 1 ||
   2098     cur_klass != prev_klass)
   2099   num_ranges++;
   2100 
   2101 prev_gid = cur_gid;
   2102 prev_klass = cur_klass;
   2103      }
   2104 
   2105      if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
   2106 format = 1;
   2107    }
   2108 
   2109 #ifndef HB_NO_BEYOND_64K
   2110    if (glyph_max > 0xFFFFu)
   2111      u.format.v += 2;
   2112    if (unlikely (glyph_max > 0xFFFFFFu))
   2113 #else
   2114    if (unlikely (glyph_max > 0xFFFFu))
   2115 #endif
   2116    {
   2117      c->check_success (false, HB_SERIALIZE_ERROR_INT_OVERFLOW);
   2118      return_trace (false);
   2119    }
   2120 
   2121    u.format.v = format;
   2122 
   2123    switch (u.format.v)
   2124    {
   2125    case 1: hb_barrier (); return_trace (u.format1.serialize (c, it));
   2126    case 2: hb_barrier (); return_trace (u.format2.serialize (c, it));
   2127 #ifndef HB_NO_BEYOND_64K
   2128    case 3: hb_barrier (); return_trace (u.format3.serialize (c, it));
   2129    case 4: hb_barrier (); return_trace (u.format4.serialize (c, it));
   2130 #endif
   2131    default:return_trace (false);
   2132    }
   2133  }
   2134 
   2135  bool subset (hb_subset_context_t *c,
   2136        hb_map_t *klass_map = nullptr /*OUT*/,
   2137               bool keep_empty_table = true,
   2138               bool use_class_zero = true,
   2139               const Coverage* glyph_filter = nullptr) const
   2140  {
   2141    TRACE_SUBSET (this);
   2142    switch (u.format.v) {
   2143    case 1: hb_barrier (); return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
   2144    case 2: hb_barrier (); return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
   2145 #ifndef HB_NO_BEYOND_64K
   2146    case 3: hb_barrier (); return_trace (u.format3.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
   2147    case 4: hb_barrier (); return_trace (u.format4.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
   2148 #endif
   2149    default:return_trace (false);
   2150    }
   2151  }
   2152 
   2153  bool sanitize (hb_sanitize_context_t *c) const
   2154  {
   2155    TRACE_SANITIZE (this);
   2156    if (!u.format.v.sanitize (c)) return_trace (false);
   2157    hb_barrier ();
   2158    switch (u.format.v) {
   2159    case 1: hb_barrier (); return_trace (u.format1.sanitize (c));
   2160    case 2: hb_barrier (); return_trace (u.format2.sanitize (c));
   2161 #ifndef HB_NO_BEYOND_64K
   2162    case 3: hb_barrier (); return_trace (u.format3.sanitize (c));
   2163    case 4: hb_barrier (); return_trace (u.format4.sanitize (c));
   2164 #endif
   2165    default:return_trace (true);
   2166    }
   2167  }
   2168 
   2169  unsigned cost () const
   2170  {
   2171    switch (u.format.v) {
   2172    case 1: hb_barrier (); return u.format1.cost ();
   2173    case 2: hb_barrier (); return u.format2.cost ();
   2174 #ifndef HB_NO_BEYOND_64K
   2175    case 3: hb_barrier (); return u.format3.cost ();
   2176    case 4: hb_barrier (); return u.format4.cost ();
   2177 #endif
   2178    default:return 0u;
   2179    }
   2180  }
   2181 
   2182  /* Might return false if array looks unsorted.
   2183   * Used for faster rejection of corrupt data. */
   2184  template <typename set_t>
   2185  bool collect_coverage (set_t *glyphs) const
   2186  {
   2187    switch (u.format.v) {
   2188    case 1: hb_barrier (); return u.format1.collect_coverage (glyphs);
   2189    case 2: hb_barrier (); return u.format2.collect_coverage (glyphs);
   2190 #ifndef HB_NO_BEYOND_64K
   2191    case 3: hb_barrier (); return u.format3.collect_coverage (glyphs);
   2192    case 4: hb_barrier (); return u.format4.collect_coverage (glyphs);
   2193 #endif
   2194    default:return false;
   2195    }
   2196  }
   2197 
   2198  /* Might return false if array looks unsorted.
   2199   * Used for faster rejection of corrupt data. */
   2200  template <typename set_t>
   2201  bool collect_class (set_t *glyphs, unsigned int klass) const
   2202  {
   2203    switch (u.format.v) {
   2204    case 1: hb_barrier (); return u.format1.collect_class (glyphs, klass);
   2205    case 2: hb_barrier (); return u.format2.collect_class (glyphs, klass);
   2206 #ifndef HB_NO_BEYOND_64K
   2207    case 3: hb_barrier (); return u.format3.collect_class (glyphs, klass);
   2208    case 4: hb_barrier (); return u.format4.collect_class (glyphs, klass);
   2209 #endif
   2210    default:return false;
   2211    }
   2212  }
   2213 
   2214  bool intersects (const hb_set_t *glyphs) const
   2215  {
   2216    switch (u.format.v) {
   2217    case 1: hb_barrier (); return u.format1.intersects (glyphs);
   2218    case 2: hb_barrier (); return u.format2.intersects (glyphs);
   2219 #ifndef HB_NO_BEYOND_64K
   2220    case 3: hb_barrier (); return u.format3.intersects (glyphs);
   2221    case 4: hb_barrier (); return u.format4.intersects (glyphs);
   2222 #endif
   2223    default:return false;
   2224    }
   2225  }
   2226  bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
   2227  {
   2228    switch (u.format.v) {
   2229    case 1: hb_barrier (); return u.format1.intersects_class (glyphs, klass);
   2230    case 2: hb_barrier (); return u.format2.intersects_class (glyphs, klass);
   2231 #ifndef HB_NO_BEYOND_64K
   2232    case 3: hb_barrier (); return u.format3.intersects_class (glyphs, klass);
   2233    case 4: hb_barrier (); return u.format4.intersects_class (glyphs, klass);
   2234 #endif
   2235    default:return false;
   2236    }
   2237  }
   2238 
   2239  void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
   2240  {
   2241    switch (u.format.v) {
   2242    case 1: hb_barrier (); return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
   2243    case 2: hb_barrier (); return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
   2244 #ifndef HB_NO_BEYOND_64K
   2245    case 3: hb_barrier (); return u.format3.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
   2246    case 4: hb_barrier (); return u.format4.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
   2247 #endif
   2248    default:return;
   2249    }
   2250  }
   2251 
   2252  void intersected_classes (const hb_set_t *glyphs, hb_set_t *intersect_classes) const
   2253  {
   2254    switch (u.format.v) {
   2255    case 1: hb_barrier (); return u.format1.intersected_classes (glyphs, intersect_classes);
   2256    case 2: hb_barrier (); return u.format2.intersected_classes (glyphs, intersect_classes);
   2257 #ifndef HB_NO_BEYOND_64K
   2258    case 3: hb_barrier (); return u.format3.intersected_classes (glyphs, intersect_classes);
   2259    case 4: hb_barrier (); return u.format4.intersected_classes (glyphs, intersect_classes);
   2260 #endif
   2261    default:return;
   2262    }
   2263  }
   2264 
   2265 
   2266  protected:
   2267  union {
   2268  struct { HBUINT16 v; }	format;		/* Format identifier */
   2269  ClassDefFormat1_3<SmallTypes>	format1;
   2270  ClassDefFormat2_4<SmallTypes>	format2;
   2271 #ifndef HB_NO_BEYOND_64K
   2272  ClassDefFormat1_3<MediumTypes>format3;
   2273  ClassDefFormat2_4<MediumTypes>format4;
   2274 #endif
   2275  } u;
   2276  public:
   2277  DEFINE_SIZE_UNION (2, format.v);
   2278 };
   2279 
   2280 template<typename Iterator>
   2281 static inline bool ClassDef_serialize (hb_serialize_context_t *c,
   2282 			       Iterator it)
   2283 { return (c->start_embed<ClassDef> ()->serialize (c, it)); }
   2284 
   2285 
   2286 /*
   2287 * Item Variation Store
   2288 */
   2289 
   2290 /* ported from fonttools (class _Encoding) */
   2291 struct delta_row_encoding_t
   2292 {
   2293  /* each byte represents a region, value is one of 0/1/2/4, which means bytes
   2294   * needed for this region */
   2295  struct chars_t : hb_vector_t<uint8_t>
   2296  {
   2297    int cmp (const chars_t& other) const
   2298    {
   2299      return as_array ().cmp (other.as_array ());
   2300    }
   2301 
   2302    hb_pair_t<unsigned, unsigned> get_width ()
   2303    {
   2304      unsigned width = 0;
   2305      unsigned columns = 0;
   2306      for (unsigned i = 0; i < length; i++)
   2307      {
   2308 unsigned v = arrayZ[i];
   2309 width += v;
   2310 columns += (v != 0);
   2311      }
   2312      return hb_pair (width, columns);
   2313    }
   2314 
   2315    HB_HOT
   2316    hb_pair_t<unsigned, unsigned> combine_width (const chars_t& other) const
   2317    {
   2318      unsigned combined_width = 0;
   2319      unsigned combined_columns = 0;
   2320      for (unsigned i = 0; i < length; i++)
   2321      {
   2322        unsigned v = hb_max (arrayZ[i], other.arrayZ[i]);
   2323 combined_width += v;
   2324 combined_columns += (v != 0);
   2325      }
   2326      return hb_pair (combined_width, combined_columns);
   2327    }
   2328  };
   2329 
   2330  hb_pair_t<unsigned, unsigned> combine_width (const delta_row_encoding_t& other_encoding) const { return chars.combine_width (other_encoding.chars); }
   2331 
   2332  // Actual data
   2333 
   2334  chars_t chars;
   2335  unsigned width = 0;
   2336  unsigned overhead = 0;
   2337  hb_vector_t<const hb_vector_t<int>*> items;
   2338 
   2339  delta_row_encoding_t () = default;
   2340  delta_row_encoding_t (hb_vector_t<const hb_vector_t<int>*> &&rows, unsigned num_cols)
   2341  {
   2342    assert (rows);
   2343 
   2344    items = std::move (rows);
   2345 
   2346    if (unlikely (!chars.resize (num_cols)))
   2347      return;
   2348 
   2349    calculate_chars ();
   2350  }
   2351 
   2352  void merge (const delta_row_encoding_t& other)
   2353  {
   2354    items.alloc (items.length + other.items.length);
   2355    for (auto &row : other.items)
   2356      add_row (row);
   2357 
   2358    // Merge chars
   2359    assert (chars.length == other.chars.length);
   2360    for (unsigned i = 0; i < chars.length; i++)
   2361      chars.arrayZ[i] = hb_max (chars.arrayZ[i], other.chars.arrayZ[i]);
   2362    chars_changed ();
   2363  }
   2364 
   2365  void chars_changed ()
   2366  {
   2367    auto _ = chars.get_width ();
   2368    width = _.first;
   2369    overhead = get_chars_overhead (_.second);
   2370  }
   2371 
   2372  void calculate_chars ()
   2373  {
   2374    assert (items);
   2375 
   2376    bool long_words = false;
   2377 
   2378    for (auto &row : items)
   2379    {
   2380      assert (row->length == chars.length);
   2381 
   2382      /* 0/1/2 byte encoding */
   2383      for (unsigned i = 0; i < row->length; i++)
   2384      {
   2385 int v =  row->arrayZ[i];
   2386 if (v == 0)
   2387   continue;
   2388 else if (v > 32767 || v < -32768)
   2389 {
   2390   long_words = true;
   2391   chars.arrayZ[i] = hb_max (chars.arrayZ[i], 4);
   2392 }
   2393 else if (v > 127 || v < -128)
   2394   chars.arrayZ[i] = hb_max (chars.arrayZ[i], 2);
   2395 else
   2396   chars.arrayZ[i] = hb_max (chars.arrayZ[i], 1);
   2397      }
   2398    }
   2399 
   2400    if (long_words)
   2401    {
   2402      // Convert 1s to 2s
   2403      for (auto &v : chars)
   2404 if (v == 1)
   2405   v = 2;
   2406    }
   2407 
   2408    chars_changed ();
   2409  }
   2410 
   2411  bool is_empty () const
   2412  { return !items; }
   2413 
   2414  static inline unsigned get_chars_overhead (unsigned num_columns)
   2415  {
   2416    unsigned c = 4 + 6; // 4 bytes for LOffset, 6 bytes for VarData header
   2417    return c + num_columns * 2;
   2418  }
   2419 
   2420  unsigned get_gain (unsigned additional_bytes_per_rows = 1) const
   2421  {
   2422    int count = items.length;
   2423    return hb_max (0, (int) overhead - count * (int) additional_bytes_per_rows);
   2424  }
   2425 
   2426  int gain_from_merging (const delta_row_encoding_t& other_encoding) const
   2427  {
   2428    // Back of the envelope calculations to reject early.
   2429    signed additional_bytes_per_rows = other_encoding.width - width;
   2430    if (additional_bytes_per_rows > 0)
   2431    {
   2432      if (get_gain (additional_bytes_per_rows) == 0)
   2433        return 0;
   2434    }
   2435    else
   2436    {
   2437      if (other_encoding.get_gain (-additional_bytes_per_rows) == 0)
   2438 return 0;
   2439    }
   2440 
   2441    auto pair = combine_width (other_encoding);
   2442    unsigned combined_width = pair.first;
   2443    unsigned combined_columns = pair.second;
   2444 
   2445    int combined_gain = (int) overhead + (int) other_encoding.overhead;
   2446    combined_gain -= (combined_width - (int) width) * items.length;
   2447    combined_gain -= (combined_width - (int) other_encoding.width) * other_encoding.items.length;
   2448    combined_gain -= get_chars_overhead (combined_columns);
   2449 
   2450    return combined_gain;
   2451  }
   2452 
   2453  bool add_row (const hb_vector_t<int>* row)
   2454  { return items.push (row); }
   2455 
   2456  static int cmp (const void *pa, const void *pb)
   2457  {
   2458    const delta_row_encoding_t *a = (const delta_row_encoding_t *)pa;
   2459    const delta_row_encoding_t *b = (const delta_row_encoding_t *)pb;
   2460 
   2461    if (a->width != b->width)
   2462      return (int) a->width - (int) b->width;
   2463 
   2464    return b->chars.cmp (a->chars);
   2465  }
   2466 };
   2467 
   2468 struct VarRegionAxis
   2469 {
   2470  float evaluate (int coord) const
   2471  {
   2472    int peak = peakCoord.to_int ();
   2473    if (peak == 0 || coord == peak)
   2474      return 1.f;
   2475    else if (coord == 0) // Faster
   2476      return 0.f;
   2477 
   2478    int start = startCoord.to_int (), end = endCoord.to_int ();
   2479 
   2480    /* TODO Move these to sanitize(). */
   2481    if (unlikely (start > peak || peak > end))
   2482      return 1.f;
   2483    if (unlikely (start < 0 && end > 0 && peak != 0))
   2484      return 1.f;
   2485 
   2486    if (coord <= start || end <= coord)
   2487      return 0.f;
   2488 
   2489    /* Interpolate */
   2490    if (coord < peak)
   2491      return float (coord - start) / (peak - start);
   2492    else
   2493      return float (end - coord) / (end - peak);
   2494  }
   2495 
   2496  bool sanitize (hb_sanitize_context_t *c) const
   2497  {
   2498    TRACE_SANITIZE (this);
   2499    return_trace (c->check_struct (this));
   2500  }
   2501 
   2502  bool serialize (hb_serialize_context_t *c) const
   2503  {
   2504    TRACE_SERIALIZE (this);
   2505    return_trace (c->embed (this));
   2506  }
   2507 
   2508  public:
   2509  F2DOT14	startCoord;
   2510  F2DOT14	peakCoord;
   2511  F2DOT14	endCoord;
   2512  public:
   2513  DEFINE_SIZE_STATIC (6);
   2514 };
   2515 struct SparseVarRegionAxis
   2516 {
   2517  float evaluate (const int *coords, unsigned int coord_len) const
   2518  {
   2519    unsigned i = axisIndex;
   2520    int coord = i < coord_len ? coords[i] : 0;
   2521    return axis.evaluate (coord);
   2522  }
   2523 
   2524  bool sanitize (hb_sanitize_context_t *c) const
   2525  {
   2526    TRACE_SANITIZE (this);
   2527    return_trace (c->check_struct (this));
   2528  }
   2529 
   2530  bool serialize (hb_serialize_context_t *c) const
   2531  {
   2532    TRACE_SERIALIZE (this);
   2533    return_trace (c->embed (this));
   2534  }
   2535 
   2536  public:
   2537  HBUINT16 axisIndex;
   2538  VarRegionAxis axis;
   2539  public:
   2540  DEFINE_SIZE_STATIC (8);
   2541 };
   2542 
   2543 struct hb_scalar_cache_t
   2544 {
   2545  private:
   2546  static constexpr unsigned STATIC_LENGTH = 16;
   2547  static constexpr int INVALID = INT_MIN;
   2548  static constexpr float MULTIPLIER = 1 << ((sizeof (int) * 8) - 2);
   2549  static constexpr float DIVISOR = 1.f / MULTIPLIER;
   2550 
   2551  public:
   2552  hb_scalar_cache_t () : length (STATIC_LENGTH) { clear (); }
   2553 
   2554  hb_scalar_cache_t (const hb_scalar_cache_t&) = delete;
   2555  hb_scalar_cache_t (hb_scalar_cache_t&&) = delete;
   2556  hb_scalar_cache_t& operator= (const hb_scalar_cache_t&) = delete;
   2557  hb_scalar_cache_t& operator= (hb_scalar_cache_t&&) = delete;
   2558 
   2559  static hb_scalar_cache_t *create (unsigned int count,
   2560 			    hb_scalar_cache_t *scratch_cache = nullptr)
   2561  {
   2562    if (!count) return (hb_scalar_cache_t *) &Null(hb_scalar_cache_t);
   2563 
   2564    if (scratch_cache && count <= scratch_cache->length)
   2565    {
   2566      scratch_cache->clear ();
   2567      return scratch_cache;
   2568    }
   2569 
   2570    auto *cache = (hb_scalar_cache_t *) hb_malloc (sizeof (hb_scalar_cache_t) - sizeof (static_values) + sizeof (static_values[0]) * count);
   2571    if (unlikely (!cache)) return (hb_scalar_cache_t *) &Null(hb_scalar_cache_t);
   2572 
   2573    cache->length = count;
   2574    cache->clear ();
   2575 
   2576    return cache;
   2577  }
   2578 
   2579  static void destroy (hb_scalar_cache_t *cache,
   2580 	       hb_scalar_cache_t *scratch_cache = nullptr)
   2581  {
   2582    if (cache != &Null(hb_scalar_cache_t) && cache != scratch_cache)
   2583      hb_free (cache);
   2584  }
   2585 
   2586  void clear ()
   2587  {
   2588    auto *values = &static_values[0];
   2589    unsigned i = 0;
   2590 #ifndef HB_OPTIMIZE_SIZE
   2591    for (; i + 3 < length; i += 4)
   2592    {
   2593      values[i + 0] = INVALID;
   2594      values[i + 1] = INVALID;
   2595      values[i + 2] = INVALID;
   2596      values[i + 3] = INVALID;
   2597    }
   2598 #endif
   2599    for (; i < length; i++)
   2600      values[i] = INVALID;
   2601  }
   2602 
   2603  HB_ALWAYS_INLINE
   2604  bool get (unsigned i, float *value) const
   2605  {
   2606    if (unlikely (i >= length))
   2607    {
   2608      *value = 0.f;
   2609      return true;
   2610    }
   2611    auto *values = &static_values[0];
   2612    auto *cached_value = &values[i];
   2613    // Super hot. Most common path is that we have a cached value of 0.
   2614    int v = *cached_value;
   2615    if (likely (!v))
   2616    {
   2617      *value = 0.f;
   2618      return true;
   2619    }
   2620    if (v == INVALID)
   2621      return false;
   2622    *value = v * DIVISOR;
   2623    return true;
   2624  }
   2625 
   2626  HB_ALWAYS_INLINE
   2627  void set (unsigned i, float value)
   2628  {
   2629    if (unlikely (i >= length)) return;
   2630    auto *values = &static_values[0];
   2631    auto *cached_value = &values[i];
   2632    *cached_value = roundf(value * MULTIPLIER);
   2633  }
   2634 
   2635  private:
   2636  unsigned length;
   2637  mutable hb_atomic_t<int> static_values[STATIC_LENGTH];
   2638 };
   2639 
   2640 struct VarRegionList
   2641 {
   2642  private:
   2643  float evaluate_impl (unsigned int region_index,
   2644 	       const int *coords, unsigned int coord_len) const
   2645  {
   2646    const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
   2647    float v = 1.f;
   2648 
   2649    unsigned int count = axisCount;
   2650    for (unsigned int i = 0; i < count; i++)
   2651    {
   2652      int coord = i < coord_len ? coords[i] : 0;
   2653      float factor = axes[i].evaluate (coord);
   2654      if (factor == 0.f)
   2655      {
   2656 v = 0.f;
   2657 break;
   2658      }
   2659      v *= factor;
   2660    }
   2661 
   2662    return v;
   2663  }
   2664 
   2665  public:
   2666  HB_ALWAYS_INLINE
   2667  float evaluate (unsigned int region_index,
   2668 	  const int *coords, unsigned int coord_len,
   2669 	  hb_scalar_cache_t *cache = nullptr) const
   2670  {
   2671    if (unlikely (region_index >= regionCount))
   2672      return 0.;
   2673 
   2674    float v;
   2675    if (cache && cache->get (region_index, &v))
   2676      return v;
   2677 
   2678    v = evaluate_impl (region_index, coords, coord_len);
   2679 
   2680    if (cache)
   2681      cache->set (region_index, v);
   2682    return v;
   2683  }
   2684 
   2685  bool sanitize (hb_sanitize_context_t *c) const
   2686  {
   2687    TRACE_SANITIZE (this);
   2688    return_trace (c->check_struct (this) &&
   2689 	  hb_barrier () &&
   2690 	  axesZ.sanitize (c, axisCount * regionCount));
   2691  }
   2692 
   2693  bool serialize (hb_serialize_context_t *c,
   2694                  const hb_vector_t<hb_tag_t>& axis_tags,
   2695                  const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& regions)
   2696  {
   2697    TRACE_SERIALIZE (this);
   2698    unsigned axis_count = axis_tags.length;
   2699    unsigned region_count = regions.length;
   2700    if (!axis_count || !region_count) return_trace (false);
   2701    if (unlikely (hb_unsigned_mul_overflows (axis_count * region_count,
   2702                                             VarRegionAxis::static_size))) return_trace (false);
   2703    if (unlikely (!c->extend_min (this))) return_trace (false);
   2704    axisCount = axis_count;
   2705    regionCount = region_count;
   2706 
   2707    for (unsigned r = 0; r < region_count; r++)
   2708    {
   2709      const auto& region = regions[r];
   2710      for (unsigned i = 0; i < axis_count; i++)
   2711      {
   2712        hb_tag_t tag = axis_tags.arrayZ[i];
   2713        VarRegionAxis var_region_rec;
   2714        Triple *coords;
   2715        if (region->has (tag, &coords))
   2716        {
   2717          var_region_rec.startCoord.set_float (coords->minimum);
   2718          var_region_rec.peakCoord.set_float (coords->middle);
   2719          var_region_rec.endCoord.set_float (coords->maximum);
   2720        }
   2721        else
   2722        {
   2723          var_region_rec.startCoord.set_int (0);
   2724          var_region_rec.peakCoord.set_int (0);
   2725          var_region_rec.endCoord.set_int (0);
   2726        }
   2727        if (!var_region_rec.serialize (c))
   2728          return_trace (false);
   2729      }
   2730    }
   2731    return_trace (true);
   2732  }
   2733 
   2734  bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_inc_bimap_t &region_map)
   2735  {
   2736    TRACE_SERIALIZE (this);
   2737    if (unlikely (!c->extend_min (this))) return_trace (false);
   2738    axisCount = src->axisCount;
   2739    regionCount = region_map.get_population ();
   2740    if (unlikely (hb_unsigned_mul_overflows (axisCount * regionCount,
   2741 				     VarRegionAxis::static_size))) return_trace (false);
   2742    if (unlikely (!c->extend (this))) return_trace (false);
   2743    unsigned int region_count = src->regionCount;
   2744    for (unsigned int r = 0; r < regionCount; r++)
   2745    {
   2746      unsigned int backward = region_map.backward (r);
   2747      if (backward >= region_count) return_trace (false);
   2748      hb_memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
   2749    }
   2750 
   2751    return_trace (true);
   2752  }
   2753 
   2754  bool get_var_region (unsigned region_index,
   2755                       const hb_map_t& axes_old_index_tag_map,
   2756                       hb_hashmap_t<hb_tag_t, Triple>& axis_tuples /* OUT */) const
   2757  {
   2758    if (region_index >= regionCount) return false;
   2759    const VarRegionAxis* axis_region = axesZ.arrayZ + (region_index * axisCount);
   2760    for (unsigned i = 0; i < axisCount; i++)
   2761    {
   2762      hb_tag_t *axis_tag;
   2763      if (!axes_old_index_tag_map.has (i, &axis_tag))
   2764        return false;
   2765 
   2766      float min_val = axis_region->startCoord.to_float ();
   2767      float def_val = axis_region->peakCoord.to_float ();
   2768      float max_val = axis_region->endCoord.to_float ();
   2769 
   2770      if (def_val != 0.f)
   2771        axis_tuples.set (*axis_tag, Triple ((double) min_val, (double) def_val, (double) max_val));
   2772      axis_region++;
   2773    }
   2774    return !axis_tuples.in_error ();
   2775  }
   2776 
   2777  bool get_var_regions (const hb_map_t& axes_old_index_tag_map,
   2778                        hb_vector_t<hb_hashmap_t<hb_tag_t, Triple>>& regions /* OUT */) const
   2779  {
   2780    if (!regions.alloc (regionCount))
   2781      return false;
   2782 
   2783    for (unsigned i = 0; i < regionCount; i++)
   2784    {
   2785      hb_hashmap_t<hb_tag_t, Triple> axis_tuples;
   2786      if (!get_var_region (i, axes_old_index_tag_map, axis_tuples))
   2787        return false;
   2788      regions.push (std::move (axis_tuples));
   2789    }
   2790    return !regions.in_error ();
   2791  }
   2792 
   2793  unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
   2794 
   2795  public:
   2796  HBUINT16	axisCount;
   2797  HBUINT15	regionCount;
   2798  protected:
   2799  UnsizedArrayOf<VarRegionAxis>
   2800 	axesZ;
   2801  public:
   2802  DEFINE_SIZE_ARRAY (4, axesZ);
   2803 };
   2804 
   2805 struct SparseVariationRegion : Array16Of<SparseVarRegionAxis>
   2806 {
   2807  float evaluate (const int *coords, unsigned int coord_len) const
   2808  {
   2809    float v = 1.f;
   2810    unsigned int count = len;
   2811    for (unsigned int i = 0; i < count; i++)
   2812    {
   2813      float factor = arrayZ[i].evaluate (coords, coord_len);
   2814      if (factor == 0.f)
   2815 return 0.;
   2816      v *= factor;
   2817    }
   2818    return v;
   2819  }
   2820 };
   2821 
   2822 struct SparseVarRegionList
   2823 {
   2824  HB_ALWAYS_INLINE
   2825  float evaluate (unsigned int region_index,
   2826 	  const int *coords, unsigned int coord_len,
   2827 	  hb_scalar_cache_t *cache = nullptr) const
   2828  {
   2829    if (unlikely (region_index >= regions.len))
   2830      return 0.;
   2831 
   2832    float v;
   2833    if (cache && cache->get (region_index, &v))
   2834      return v;
   2835 
   2836    const SparseVariationRegion &region = this+regions[region_index];
   2837 
   2838    v = region.evaluate (coords, coord_len);
   2839    if (cache)
   2840      cache->set (region_index, v);
   2841 
   2842    return v;
   2843  }
   2844 
   2845  bool sanitize (hb_sanitize_context_t *c) const
   2846  {
   2847    TRACE_SANITIZE (this);
   2848    return_trace (regions.sanitize (c, this));
   2849  }
   2850 
   2851  public:
   2852  Array16Of<Offset32To<SparseVariationRegion>>
   2853 	regions;
   2854  public:
   2855  DEFINE_SIZE_ARRAY (2, regions);
   2856 };
   2857 
   2858 
   2859 struct VarData
   2860 {
   2861  unsigned int get_item_count () const
   2862  { return itemCount; }
   2863 
   2864  unsigned int get_region_index_count () const
   2865  { return regionIndices.len; }
   2866 
   2867  unsigned get_region_index (unsigned i) const
   2868  { return i >= regionIndices.len ? -1 : regionIndices[i]; }
   2869 
   2870  unsigned int get_row_size () const
   2871  { return (wordCount () + regionIndices.len) * (longWords () ? 2 : 1); }
   2872 
   2873  unsigned int get_size () const
   2874  { return min_size
   2875  - regionIndices.min_size + regionIndices.get_size ()
   2876  + itemCount * get_row_size ();
   2877  }
   2878 
   2879  float _get_delta (unsigned int inner,
   2880 	    const int *coords, unsigned int coord_count,
   2881 	    const VarRegionList &regions,
   2882 	    hb_scalar_cache_t *cache = nullptr) const
   2883  {
   2884    if (unlikely (inner >= itemCount))
   2885      return 0.;
   2886    bool is_long = longWords ();
   2887    unsigned int count = regionIndices.len;
   2888    unsigned word_count = wordCount ();
   2889    unsigned int scount = is_long ? count : word_count;
   2890    unsigned int lcount = is_long ? word_count : 0;
   2891 
   2892    const HBUINT8 *bytes = get_delta_bytes ();
   2893    const HBUINT8 *row = bytes + inner * get_row_size ();
   2894 
   2895    float delta = 0.;
   2896    unsigned int i = 0;
   2897 
   2898    const HBINT32 *lcursor = reinterpret_cast<const HBINT32 *> (row);
   2899    for (; i < lcount; i++)
   2900    {
   2901      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
   2902      if (scalar)
   2903        delta += scalar * *lcursor;
   2904      lcursor++;
   2905    }
   2906    const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (lcursor);
   2907    for (; i < scount; i++)
   2908    {
   2909      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
   2910      if (scalar)
   2911       delta += scalar * *scursor;
   2912      scursor++;
   2913    }
   2914    const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
   2915    for (; i < count; i++)
   2916    {
   2917      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count, cache);
   2918      if (scalar)
   2919        delta += scalar * *bcursor;
   2920      bcursor++;
   2921    }
   2922 
   2923    return delta;
   2924  }
   2925 
   2926  HB_ALWAYS_INLINE
   2927  float get_delta (unsigned int inner,
   2928 	   const int *coords, unsigned int coord_count,
   2929 	   const VarRegionList &regions,
   2930 	   hb_scalar_cache_t *cache = nullptr) const
   2931  {
   2932    unsigned int count = regionIndices.len;
   2933    if (!count) return 0.f; // This is quite common, so optimize it.
   2934    return _get_delta (inner, coords, coord_count, regions, cache);
   2935  }
   2936 
   2937  void get_region_scalars (const int *coords, unsigned int coord_count,
   2938 		   const VarRegionList &regions,
   2939 		   float *scalars /*OUT */,
   2940 		   unsigned int num_scalars) const
   2941  {
   2942    unsigned count = hb_min (num_scalars, regionIndices.len);
   2943    for (unsigned int i = 0; i < count; i++)
   2944      scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
   2945    for (unsigned int i = count; i < num_scalars; i++)
   2946      scalars[i] = 0.f;
   2947  }
   2948 
   2949  bool sanitize (hb_sanitize_context_t *c) const
   2950  {
   2951    TRACE_SANITIZE (this);
   2952    return_trace (c->check_struct (this) &&
   2953 	  regionIndices.sanitize (c) &&
   2954 	  hb_barrier () &&
   2955 	  wordCount () <= regionIndices.len &&
   2956 	  c->check_range (get_delta_bytes (),
   2957 			  itemCount,
   2958 			  get_row_size ()));
   2959  }
   2960 
   2961  bool serialize (hb_serialize_context_t *c,
   2962                  bool has_long,
   2963                  const hb_vector_t<const hb_vector_t<int>*>& rows)
   2964  {
   2965    TRACE_SERIALIZE (this);
   2966    unsigned row_count = rows.length;
   2967    if (!row_count) {
   2968      // Nothing to serialize, will be empty.
   2969      return false;
   2970    }
   2971 
   2972    if (unlikely (!c->extend_min (this))) return_trace (false);
   2973    itemCount = row_count;
   2974 
   2975    int min_threshold = has_long ? -65536 : -128;
   2976    int max_threshold = has_long ? +65535 : +127;
   2977    enum delta_size_t { kZero=0, kNonWord, kWord };
   2978    hb_vector_t<delta_size_t> delta_sz;
   2979    unsigned num_regions = rows[0]->length;
   2980    if (!delta_sz.resize (num_regions))
   2981      return_trace (false);
   2982 
   2983    unsigned word_count = 0;
   2984    for (unsigned r = 0; r < num_regions; r++)
   2985    {
   2986      for (unsigned i = 0; i < row_count; i++)
   2987      {
   2988        int delta = rows[i]->arrayZ[r];
   2989        if (delta < min_threshold || delta > max_threshold)
   2990        {
   2991          delta_sz[r] = kWord;
   2992          word_count++;
   2993          break;
   2994        }
   2995        else if (delta != 0)
   2996        {
   2997          delta_sz[r] = kNonWord;
   2998        }
   2999      }
   3000    }
   3001 
   3002    /* reorder regions: words and then non-words*/
   3003    unsigned word_index = 0;
   3004    unsigned non_word_index = word_count;
   3005    hb_map_t ri_map;
   3006    for (unsigned r = 0; r < num_regions; r++)
   3007    {
   3008      if (!delta_sz[r]) continue;
   3009      unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
   3010      if (!ri_map.set (new_r, r))
   3011        return_trace (false);
   3012    }
   3013 
   3014    wordSizeCount = word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
   3015 
   3016    unsigned ri_count = ri_map.get_population ();
   3017    regionIndices.len = ri_count;
   3018    if (unlikely (!c->extend (this))) return_trace (false);
   3019 
   3020    for (unsigned r = 0; r < ri_count; r++)
   3021    {
   3022      hb_codepoint_t *idx;
   3023      if (!ri_map.has (r, &idx))
   3024        return_trace (false);
   3025      regionIndices[r] = *idx;
   3026    }
   3027 
   3028    HBUINT8 *delta_bytes = get_delta_bytes ();
   3029    unsigned row_size = get_row_size ();
   3030    for (unsigned int i = 0; i < row_count; i++)
   3031    {
   3032      for (unsigned int r = 0; r < ri_count; r++)
   3033      {
   3034        int delta = rows[i]->arrayZ[ri_map[r]];
   3035        set_item_delta_fast (i, r, delta, delta_bytes, row_size);
   3036      }
   3037    }
   3038    return_trace (true);
   3039  }
   3040 
   3041  bool serialize (hb_serialize_context_t *c,
   3042 	  const VarData *src,
   3043 	  const hb_inc_bimap_t &inner_map,
   3044 	  const hb_inc_bimap_t &region_map)
   3045  {
   3046    TRACE_SERIALIZE (this);
   3047    if (unlikely (!c->extend_min (this))) return_trace (false);
   3048    itemCount = inner_map.get_next_value ();
   3049 
   3050    /* Optimize word count */
   3051    unsigned ri_count = src->regionIndices.len;
   3052    enum delta_size_t { kZero=0, kNonWord, kWord };
   3053    hb_vector_t<delta_size_t> delta_sz;
   3054    hb_vector_t<unsigned int> ri_map;	/* maps new index to old index */
   3055    delta_sz.resize (ri_count);
   3056    ri_map.resize (ri_count);
   3057    unsigned int new_word_count = 0;
   3058    unsigned int r;
   3059 
   3060    const HBUINT8 *src_delta_bytes = src->get_delta_bytes ();
   3061    unsigned src_row_size = src->get_row_size ();
   3062    unsigned src_word_count = src->wordCount ();
   3063    bool     src_long_words = src->longWords ();
   3064 
   3065    bool has_long = false;
   3066    if (src_long_words)
   3067    {
   3068      for (r = 0; r < src_word_count; r++)
   3069      {
   3070        for (unsigned old_gid : inner_map.keys())
   3071 {
   3072   int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size);
   3073   if (delta < -65536 || 65535 < delta)
   3074   {
   3075     has_long = true;
   3076     break;
   3077   }
   3078        }
   3079      }
   3080    }
   3081 
   3082    signed min_threshold = has_long ? -65536 : -128;
   3083    signed max_threshold = has_long ? +65535 : +127;
   3084    for (r = 0; r < ri_count; r++)
   3085    {
   3086      bool short_circuit = src_long_words == has_long && src_word_count <= r;
   3087 
   3088      delta_sz[r] = kZero;
   3089      for (unsigned old_gid : inner_map.keys())
   3090      {
   3091 int32_t delta = src->get_item_delta_fast (old_gid, r, src_delta_bytes, src_row_size);
   3092 if (delta < min_threshold || max_threshold < delta)
   3093 {
   3094   delta_sz[r] = kWord;
   3095   new_word_count++;
   3096   break;
   3097 }
   3098 else if (delta != 0)
   3099 {
   3100   delta_sz[r] = kNonWord;
   3101   if (short_circuit)
   3102     break;
   3103 }
   3104      }
   3105    }
   3106 
   3107    unsigned int word_index = 0;
   3108    unsigned int non_word_index = new_word_count;
   3109    unsigned int new_ri_count = 0;
   3110    for (r = 0; r < ri_count; r++)
   3111      if (delta_sz[r])
   3112      {
   3113 unsigned new_r = (delta_sz[r] == kWord)? word_index++ : non_word_index++;
   3114 ri_map[new_r] = r;
   3115 new_ri_count++;
   3116      }
   3117 
   3118    wordSizeCount = new_word_count | (has_long ? 0x8000u /* LONG_WORDS */ : 0);
   3119 
   3120    regionIndices.len = new_ri_count;
   3121 
   3122    if (unlikely (!c->extend (this))) return_trace (false);
   3123 
   3124    for (r = 0; r < new_ri_count; r++)
   3125      regionIndices[r] = region_map[src->regionIndices[ri_map[r]]];
   3126 
   3127    HBUINT8 *delta_bytes = get_delta_bytes ();
   3128    unsigned row_size = get_row_size ();
   3129    unsigned count = itemCount;
   3130    for (unsigned int i = 0; i < count; i++)
   3131    {
   3132      unsigned int old = inner_map.backward (i);
   3133      for (unsigned int r = 0; r < new_ri_count; r++)
   3134 set_item_delta_fast (i, r,
   3135 		     src->get_item_delta_fast (old, ri_map[r],
   3136 					       src_delta_bytes, src_row_size),
   3137 		     delta_bytes, row_size);
   3138    }
   3139 
   3140    return_trace (true);
   3141  }
   3142 
   3143  void collect_region_refs (hb_set_t &region_indices, const hb_inc_bimap_t &inner_map) const
   3144  {
   3145    const HBUINT8 *delta_bytes = get_delta_bytes ();
   3146    unsigned row_size = get_row_size ();
   3147 
   3148    for (unsigned int r = 0; r < regionIndices.len; r++)
   3149    {
   3150      unsigned int region = regionIndices.arrayZ[r];
   3151      if (region_indices.has (region)) continue;
   3152      for (hb_codepoint_t old_gid : inner_map.keys())
   3153 if (get_item_delta_fast (old_gid, r, delta_bytes, row_size) != 0)
   3154 {
   3155   region_indices.add (region);
   3156   break;
   3157 }
   3158    }
   3159  }
   3160 
   3161  public:
   3162  const HBUINT8 *get_delta_bytes () const
   3163  { return &StructAfter<HBUINT8> (regionIndices); }
   3164 
   3165  protected:
   3166  HBUINT8 *get_delta_bytes ()
   3167  { return &StructAfter<HBUINT8> (regionIndices); }
   3168 
   3169  public:
   3170  int32_t get_item_delta_fast (unsigned int item, unsigned int region,
   3171 		       const HBUINT8 *delta_bytes, unsigned row_size) const
   3172  {
   3173    if (unlikely (item >= itemCount || region >= regionIndices.len)) return 0;
   3174 
   3175    const HBINT8 *p = (const HBINT8 *) delta_bytes + item * row_size;
   3176    unsigned word_count = wordCount ();
   3177    bool is_long = longWords ();
   3178    if (is_long)
   3179    {
   3180      if (region < word_count)
   3181 return ((const HBINT32 *) p)[region];
   3182      else
   3183 return ((const HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count];
   3184    }
   3185    else
   3186    {
   3187      if (region < word_count)
   3188 return ((const HBINT16 *) p)[region];
   3189      else
   3190 return (p + HBINT16::static_size * word_count)[region - word_count];
   3191    }
   3192  }
   3193  int32_t get_item_delta (unsigned int item, unsigned int region) const
   3194  {
   3195     return get_item_delta_fast (item, region,
   3196 			 get_delta_bytes (),
   3197 			 get_row_size ());
   3198  }
   3199 
   3200  protected:
   3201  void set_item_delta_fast (unsigned int item, unsigned int region, int32_t delta,
   3202 		    HBUINT8 *delta_bytes, unsigned row_size)
   3203  {
   3204    HBINT8 *p = (HBINT8 *) delta_bytes + item * row_size;
   3205    unsigned word_count = wordCount ();
   3206    bool is_long = longWords ();
   3207    if (is_long)
   3208    {
   3209      if (region < word_count)
   3210 ((HBINT32 *) p)[region] = delta;
   3211      else
   3212 ((HBINT16 *)(p + HBINT32::static_size * word_count))[region - word_count] = delta;
   3213    }
   3214    else
   3215    {
   3216      if (region < word_count)
   3217 ((HBINT16 *) p)[region] = delta;
   3218      else
   3219 (p + HBINT16::static_size * word_count)[region - word_count] = delta;
   3220    }
   3221  }
   3222  void set_item_delta (unsigned int item, unsigned int region, int32_t delta)
   3223  {
   3224    set_item_delta_fast (item, region, delta,
   3225 		 get_delta_bytes (),
   3226 		 get_row_size ());
   3227  }
   3228 
   3229  bool longWords () const { return wordSizeCount & 0x8000u /* LONG_WORDS */; }
   3230  unsigned wordCount () const { return wordSizeCount & 0x7FFFu /* WORD_DELTA_COUNT_MASK */; }
   3231 
   3232  protected:
   3233  HBUINT16		itemCount;
   3234  HBUINT16		wordSizeCount;
   3235  Array16Of<HBUINT16>	regionIndices;
   3236 /*UnsizedArrayOf<HBUINT8>bytesX;*/
   3237  public:
   3238  DEFINE_SIZE_ARRAY (6, regionIndices);
   3239 };
   3240 
   3241 struct MultiVarData
   3242 {
   3243  unsigned int get_size () const
   3244  { return min_size
   3245  - regionIndices.min_size + regionIndices.get_size ()
   3246  + StructAfter<CFF2Index> (regionIndices).get_size ();
   3247  }
   3248 
   3249  void get_delta (unsigned int inner,
   3250 	  const int *coords, unsigned int coord_count,
   3251 	  const SparseVarRegionList &regions,
   3252 	  hb_array_t<float> out,
   3253 	  hb_scalar_cache_t *cache = nullptr) const
   3254  {
   3255    auto &deltaSets = StructAfter<decltype (deltaSetsX)> (regionIndices);
   3256 
   3257    auto values_iter = deltaSets.fetcher (inner);
   3258    unsigned regionCount = regionIndices.len;
   3259    for (unsigned regionIndex = 0; regionIndex < regionCount; regionIndex++)
   3260    {
   3261      float scalar = regions.evaluate (regionIndices.arrayZ[regionIndex],
   3262 			       coords, coord_count,
   3263 			       cache);
   3264      values_iter.add_to (out, scalar);
   3265    }
   3266  }
   3267 
   3268  bool sanitize (hb_sanitize_context_t *c) const
   3269  {
   3270    TRACE_SANITIZE (this);
   3271    return_trace (format.sanitize (c) &&
   3272 	  hb_barrier () &&
   3273 	  format == 1 &&
   3274 	  regionIndices.sanitize (c) &&
   3275 	  hb_barrier () &&
   3276 	  StructAfter<decltype (deltaSetsX)> (regionIndices).sanitize (c));
   3277  }
   3278 
   3279  protected:
   3280  HBUINT8	      format; // 1
   3281  Array16Of<HBUINT16> regionIndices;
   3282  TupleList	      deltaSetsX;
   3283  public:
   3284  DEFINE_SIZE_MIN (8);
   3285 };
   3286 
   3287 struct ItemVariationStore
   3288 {
   3289  friend struct item_variations_t;
   3290 
   3291  hb_scalar_cache_t *create_cache () const
   3292  {
   3293 #ifdef HB_NO_VAR
   3294    return hb_scalar_cache_t::create (0);
   3295 #endif
   3296    return hb_scalar_cache_t::create ((this+regions).regionCount);
   3297  }
   3298 
   3299  static void destroy_cache (hb_scalar_cache_t *cache)
   3300  {
   3301    hb_scalar_cache_t::destroy (cache);
   3302  }
   3303 
   3304  private:
   3305  float get_delta (unsigned int outer, unsigned int inner,
   3306 	   const int *coords, unsigned int coord_count,
   3307 	   hb_scalar_cache_t *cache = nullptr) const
   3308  {
   3309 #ifdef HB_NO_VAR
   3310    return 0.f;
   3311 #endif
   3312 
   3313    if (unlikely (outer >= dataSets.len))
   3314      return 0.f;
   3315 
   3316    return (this+dataSets[outer]).get_delta (inner,
   3317 				     coords, coord_count,
   3318 				     this+regions,
   3319 				     cache);
   3320  }
   3321 
   3322  public:
   3323  float get_delta (unsigned int index,
   3324 	   const int *coords, unsigned int coord_count,
   3325 	   hb_scalar_cache_t *cache = nullptr) const
   3326  {
   3327    unsigned int outer = index >> 16;
   3328    unsigned int inner = index & 0xFFFF;
   3329    return get_delta (outer, inner, coords, coord_count, cache);
   3330  }
   3331  float get_delta (unsigned int index,
   3332 	   hb_array_t<const int> coords,
   3333 	   hb_scalar_cache_t *cache = nullptr) const
   3334  {
   3335    return get_delta (index,
   3336 	      coords.arrayZ, coords.length,
   3337 	      cache);
   3338  }
   3339 
   3340  bool sanitize (hb_sanitize_context_t *c) const
   3341  {
   3342 #ifdef HB_NO_VAR
   3343    return true;
   3344 #endif
   3345 
   3346    TRACE_SANITIZE (this);
   3347    return_trace (c->check_struct (this) &&
   3348 	  hb_barrier () &&
   3349 	  format == 1 &&
   3350 	  regions.sanitize (c, this) &&
   3351 	  dataSets.sanitize (c, this));
   3352  }
   3353 
   3354  bool serialize (hb_serialize_context_t *c,
   3355                  bool has_long,
   3356                  const hb_vector_t<hb_tag_t>& axis_tags,
   3357                  const hb_vector_t<const hb_hashmap_t<hb_tag_t, Triple>*>& region_list,
   3358                  const hb_vector_t<delta_row_encoding_t>& vardata_encodings)
   3359  {
   3360    TRACE_SERIALIZE (this);
   3361 #ifdef HB_NO_VAR
   3362    return_trace (false);
   3363 #endif
   3364    if (unlikely (!c->extend_min (this))) return_trace (false);
   3365 
   3366    format = 1;
   3367    if (!regions.serialize_serialize (c, axis_tags, region_list))
   3368      return_trace (false);
   3369 
   3370    unsigned num_var_data = vardata_encodings.length;
   3371    if (!num_var_data) return_trace (false);
   3372    if (unlikely (!c->check_assign (dataSets.len, num_var_data,
   3373                                    HB_SERIALIZE_ERROR_INT_OVERFLOW)))
   3374      return_trace (false);
   3375 
   3376    if (unlikely (!c->extend (dataSets))) return_trace (false);
   3377    for (unsigned i = 0; i < num_var_data; i++)
   3378      if (!dataSets[i].serialize_serialize (c, has_long, vardata_encodings[i].items))
   3379        return_trace (false);
   3380 
   3381    return_trace (true);
   3382  }
   3383 
   3384  bool serialize (hb_serialize_context_t *c,
   3385 	  const ItemVariationStore *src,
   3386 	  const hb_array_t <const hb_inc_bimap_t> &inner_maps)
   3387  {
   3388    TRACE_SERIALIZE (this);
   3389 #ifdef HB_NO_VAR
   3390    return_trace (false);
   3391 #endif
   3392 
   3393    if (unlikely (!c->extend_min (this))) return_trace (false);
   3394 
   3395    unsigned int set_count = 0;
   3396    for (unsigned int i = 0; i < inner_maps.length; i++)
   3397      if (inner_maps[i].get_population ())
   3398 set_count++;
   3399 
   3400    format = 1;
   3401 
   3402    const auto &src_regions = src+src->regions;
   3403 
   3404    hb_set_t region_indices;
   3405    for (unsigned int i = 0; i < inner_maps.length; i++)
   3406      (src+src->dataSets[i]).collect_region_refs (region_indices, inner_maps[i]);
   3407 
   3408    if (region_indices.in_error ())
   3409      return_trace (false);
   3410 
   3411    region_indices.del_range ((src_regions).regionCount, hb_set_t::INVALID);
   3412 
   3413    /* TODO use constructor when our data-structures support that. */
   3414    hb_inc_bimap_t region_map;
   3415    + hb_iter (region_indices)
   3416    | hb_apply ([&region_map] (unsigned _) { region_map.add(_); })
   3417    ;
   3418    if (region_map.in_error())
   3419      return_trace (false);
   3420 
   3421    if (unlikely (!regions.serialize_serialize (c, &src_regions, region_map)))
   3422      return_trace (false);
   3423 
   3424    dataSets.len = set_count;
   3425    if (unlikely (!c->extend (dataSets))) return_trace (false);
   3426 
   3427    /* TODO: The following code could be simplified when
   3428     * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize () */
   3429    unsigned int set_index = 0;
   3430    for (unsigned int i = 0; i < inner_maps.length; i++)
   3431    {
   3432      if (!inner_maps[i].get_population ()) continue;
   3433      if (unlikely (!dataSets[set_index++]
   3434 	     .serialize_serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
   3435 return_trace (false);
   3436    }
   3437 
   3438    return_trace (true);
   3439  }
   3440 
   3441  ItemVariationStore *copy (hb_serialize_context_t *c) const
   3442  {
   3443    TRACE_SERIALIZE (this);
   3444    auto *out = c->start_embed (this);
   3445    if (unlikely (!out)) return_trace (nullptr);
   3446 
   3447    hb_vector_t <hb_inc_bimap_t> inner_maps;
   3448    unsigned count = dataSets.len;
   3449    for (unsigned i = 0; i < count; i++)
   3450    {
   3451      hb_inc_bimap_t *map = inner_maps.push ();
   3452      if (unlikely (!c->propagate_error(inner_maps)))
   3453        return_trace(nullptr);
   3454      auto &data = this+dataSets[i];
   3455 
   3456      unsigned itemCount = data.get_item_count ();
   3457      for (unsigned j = 0; j < itemCount; j++)
   3458 map->add (j);
   3459    }
   3460 
   3461    if (unlikely (!out->serialize (c, this, inner_maps))) return_trace (nullptr);
   3462 
   3463    return_trace (out);
   3464  }
   3465 
   3466  bool subset (hb_subset_context_t *c, const hb_array_t<const hb_inc_bimap_t> &inner_maps) const
   3467  {
   3468    TRACE_SUBSET (this);
   3469 #ifdef HB_NO_VAR
   3470    return_trace (false);
   3471 #endif
   3472 
   3473    ItemVariationStore *varstore_prime = c->serializer->start_embed<ItemVariationStore> ();
   3474    if (unlikely (!varstore_prime)) return_trace (false);
   3475 
   3476    varstore_prime->serialize (c->serializer, this, inner_maps);
   3477 
   3478    return_trace (
   3479        !c->serializer->in_error()
   3480        && varstore_prime->dataSets);
   3481  }
   3482 
   3483  unsigned int get_region_index_count (unsigned int major) const
   3484  {
   3485 #ifdef HB_NO_VAR
   3486    return 0;
   3487 #endif
   3488    return (this+dataSets[major]).get_region_index_count ();
   3489  }
   3490 
   3491  void get_region_scalars (unsigned int major,
   3492 		   const int *coords, unsigned int coord_count,
   3493 		   float *scalars /*OUT*/,
   3494 		   unsigned int num_scalars) const
   3495  {
   3496 #ifdef HB_NO_VAR
   3497    for (unsigned i = 0; i < num_scalars; i++)
   3498      scalars[i] = 0.f;
   3499    return;
   3500 #endif
   3501 
   3502    (this+dataSets[major]).get_region_scalars (coords, coord_count,
   3503 				       this+regions,
   3504 				       &scalars[0], num_scalars);
   3505  }
   3506 
   3507  unsigned int get_sub_table_count () const
   3508   {
   3509 #ifdef HB_NO_VAR
   3510     return 0;
   3511 #endif
   3512     return dataSets.len;
   3513   }
   3514 
   3515  const VarData& get_sub_table (unsigned i) const
   3516  {
   3517 #ifdef HB_NO_VAR
   3518     return Null (VarData);
   3519 #endif
   3520     return this+dataSets[i];
   3521  }
   3522 
   3523  const VarRegionList& get_region_list () const
   3524  {
   3525 #ifdef HB_NO_VAR
   3526     return Null (VarRegionList);
   3527 #endif
   3528     return this+regions;
   3529  }
   3530 
   3531  protected:
   3532  HBUINT16				format;
   3533  Offset32To<VarRegionList>		regions;
   3534  Array16OfOffset32To<VarData>		dataSets;
   3535  public:
   3536  DEFINE_SIZE_ARRAY_SIZED (8, dataSets);
   3537 };
   3538 
   3539 struct MultiItemVariationStore
   3540 {
   3541  hb_scalar_cache_t *create_cache (hb_scalar_cache_t *static_cache = nullptr) const
   3542  {
   3543 #ifdef HB_NO_VAR
   3544    return hb_scalar_cache_t::create (0);
   3545 #endif
   3546    auto &r = this+regions;
   3547    unsigned count = r.regions.len;
   3548 
   3549    return hb_scalar_cache_t::create (count, static_cache);
   3550  }
   3551 
   3552  static void destroy_cache (hb_scalar_cache_t *cache,
   3553 		     hb_scalar_cache_t *static_cache = nullptr)
   3554  {
   3555    hb_scalar_cache_t::destroy (cache, static_cache);
   3556  }
   3557 
   3558  private:
   3559  void get_delta (unsigned int outer, unsigned int inner,
   3560 	  const int *coords, unsigned int coord_count,
   3561 	  hb_array_t<float> out,
   3562 	  hb_scalar_cache_t *cache = nullptr) const
   3563  {
   3564 #ifdef HB_NO_VAR
   3565    return;
   3566 #endif
   3567 
   3568    if (unlikely (outer >= dataSets.len))
   3569      return;
   3570 
   3571    return (this+dataSets[outer]).get_delta (inner,
   3572 				     coords, coord_count,
   3573 				     this+regions,
   3574 				     out,
   3575 				     cache);
   3576  }
   3577 
   3578  public:
   3579  void get_delta (unsigned int index,
   3580 	  const int *coords, unsigned int coord_count,
   3581 	  hb_array_t<float> out,
   3582 	  hb_scalar_cache_t *cache = nullptr) const
   3583  {
   3584    unsigned int outer = index >> 16;
   3585    unsigned int inner = index & 0xFFFF;
   3586    get_delta (outer, inner, coords, coord_count, out, cache);
   3587  }
   3588  void get_delta (unsigned int index,
   3589 	  hb_array_t<const int> coords,
   3590 	  hb_array_t<float> out,
   3591 	  hb_scalar_cache_t *cache = nullptr) const
   3592  {
   3593    return get_delta (index,
   3594 	      coords.arrayZ, coords.length,
   3595 	      out,
   3596 	      cache);
   3597  }
   3598 
   3599  bool sanitize (hb_sanitize_context_t *c) const
   3600  {
   3601 #ifdef HB_NO_VAR
   3602    return true;
   3603 #endif
   3604 
   3605    TRACE_SANITIZE (this);
   3606    return_trace (c->check_struct (this) &&
   3607 	  hb_barrier () &&
   3608 	  format == 1 &&
   3609 	  regions.sanitize (c, this) &&
   3610 	  dataSets.sanitize (c, this));
   3611  }
   3612 
   3613  protected:
   3614  HBUINT16				format; // 1
   3615  Offset32To<SparseVarRegionList>	regions;
   3616  Array16OfOffset32To<MultiVarData>	dataSets;
   3617  public:
   3618  DEFINE_SIZE_ARRAY_SIZED (8, dataSets);
   3619 };
   3620 
   3621 template <typename MapCountT>
   3622 struct DeltaSetIndexMapFormat01
   3623 {
   3624  friend struct DeltaSetIndexMap;
   3625 
   3626  unsigned get_size () const
   3627  { return min_size + mapCount * get_width (); }
   3628 
   3629  private:
   3630  DeltaSetIndexMapFormat01* copy (hb_serialize_context_t *c) const
   3631  {
   3632    TRACE_SERIALIZE (this);
   3633    return_trace (c->embed (this));
   3634  }
   3635 
   3636  template <typename T>
   3637  bool serialize (hb_serialize_context_t *c, const T &plan)
   3638  {
   3639    unsigned int width = plan.get_width ();
   3640    unsigned int inner_bit_count = plan.get_inner_bit_count ();
   3641    const hb_array_t<const uint32_t> output_map = plan.get_output_map ();
   3642 
   3643    TRACE_SERIALIZE (this);
   3644    if (unlikely (output_map.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0))))
   3645      return_trace (false);
   3646    if (unlikely (!c->extend_min (this))) return_trace (false);
   3647 
   3648    entryFormat = ((width-1)<<4)|(inner_bit_count-1);
   3649    mapCount = output_map.length;
   3650    HBUINT8 *p = c->allocate_size<HBUINT8> (width * output_map.length);
   3651    if (unlikely (!p)) return_trace (false);
   3652    for (unsigned int i = 0; i < output_map.length; i++)
   3653    {
   3654      unsigned int v = output_map.arrayZ[i];
   3655      if (v)
   3656      {
   3657 unsigned int outer = v >> 16;
   3658 unsigned int inner = v & 0xFFFF;
   3659 unsigned int u = (outer << inner_bit_count) | inner;
   3660 for (unsigned int w = width; w > 0;)
   3661 {
   3662   p[--w] = u;
   3663   u >>= 8;
   3664 }
   3665      }
   3666      p += width;
   3667    }
   3668    return_trace (true);
   3669  }
   3670 
   3671  HB_ALWAYS_INLINE
   3672  uint32_t map (unsigned int v) const /* Returns 16.16 outer.inner. */
   3673  {
   3674    /* If count is zero, pass value unchanged.  This takes
   3675     * care of direct mapping for advance map. */
   3676    if (!mapCount)
   3677      return v;
   3678    return _map (v);
   3679  }
   3680 
   3681  HB_HOT
   3682  uint32_t _map (unsigned int v) const /* Returns 16.16 outer.inner. */
   3683  {
   3684    if (v >= mapCount)
   3685      v = mapCount - 1;
   3686 
   3687    unsigned int u = 0;
   3688    { /* Fetch it. */
   3689      unsigned int w = get_width ();
   3690      const HBUINT8 *p = mapDataZ.arrayZ + w * v;
   3691      for (; w; w--)
   3692        u = (u << 8) + *p++;
   3693    }
   3694 
   3695    { /* Repack it. */
   3696      unsigned int n = get_inner_bit_count ();
   3697      unsigned int outer = u >> n;
   3698      unsigned int inner = u & ((1 << n) - 1);
   3699      u = (outer<<16) | inner;
   3700    }
   3701 
   3702    return u;
   3703  }
   3704 
   3705  unsigned get_map_count () const       { return mapCount; }
   3706  unsigned get_width () const           { return ((entryFormat >> 4) & 3) + 1; }
   3707  unsigned get_inner_bit_count () const { return (entryFormat & 0xF) + 1; }
   3708 
   3709 
   3710  bool sanitize (hb_sanitize_context_t *c) const
   3711  {
   3712    TRACE_SANITIZE (this);
   3713    return_trace (c->check_struct (this) &&
   3714 	  hb_barrier () &&
   3715                  c->check_range (mapDataZ.arrayZ,
   3716                                  mapCount,
   3717                                  get_width ()));
   3718  }
   3719 
   3720  protected:
   3721  HBUINT8       format;         /* Format identifier--format = 0 */
   3722  HBUINT8       entryFormat;    /* A packed field that describes the compressed
   3723                                 * representation of delta-set indices. */
   3724  MapCountT     mapCount;       /* The number of mapping entries. */
   3725  UnsizedArrayOf<HBUINT8>
   3726                mapDataZ;       /* The delta-set index mapping data. */
   3727 
   3728  public:
   3729  DEFINE_SIZE_ARRAY (2+MapCountT::static_size, mapDataZ);
   3730 };
   3731 
   3732 struct DeltaSetIndexMap
   3733 {
   3734  template <typename T>
   3735  bool serialize (hb_serialize_context_t *c, const T &plan)
   3736  {
   3737    TRACE_SERIALIZE (this);
   3738    unsigned length = plan.get_output_map ().length;
   3739    u.format.v = length <= 0xFFFF ? 0 : 1;
   3740    switch (u.format.v) {
   3741    case 0: hb_barrier (); return_trace (u.format0.serialize (c, plan));
   3742    case 1: hb_barrier (); return_trace (u.format1.serialize (c, plan));
   3743    default:return_trace (false);
   3744    }
   3745  }
   3746 
   3747  uint32_t map (unsigned v) const
   3748  {
   3749    switch (u.format.v) {
   3750    case 0: hb_barrier (); return (u.format0.map (v));
   3751    case 1: hb_barrier (); return (u.format1.map (v));
   3752    default:return v;
   3753    }
   3754  }
   3755 
   3756  unsigned get_map_count () const
   3757  {
   3758    switch (u.format.v) {
   3759    case 0: hb_barrier (); return u.format0.get_map_count ();
   3760    case 1: hb_barrier (); return u.format1.get_map_count ();
   3761    default:return 0;
   3762    }
   3763  }
   3764 
   3765  unsigned get_width () const
   3766  {
   3767    switch (u.format.v) {
   3768    case 0: hb_barrier (); return u.format0.get_width ();
   3769    case 1: hb_barrier (); return u.format1.get_width ();
   3770    default:return 0;
   3771    }
   3772  }
   3773 
   3774  unsigned get_inner_bit_count () const
   3775  {
   3776    switch (u.format.v) {
   3777    case 0: hb_barrier (); return u.format0.get_inner_bit_count ();
   3778    case 1: hb_barrier (); return u.format1.get_inner_bit_count ();
   3779    default:return 0;
   3780    }
   3781  }
   3782 
   3783  bool sanitize (hb_sanitize_context_t *c) const
   3784  {
   3785    TRACE_SANITIZE (this);
   3786    if (!u.format.v.sanitize (c)) return_trace (false);
   3787    hb_barrier ();
   3788    switch (u.format.v) {
   3789    case 0: hb_barrier (); return_trace (u.format0.sanitize (c));
   3790    case 1: hb_barrier (); return_trace (u.format1.sanitize (c));
   3791    default:return_trace (true);
   3792    }
   3793  }
   3794 
   3795  DeltaSetIndexMap* copy (hb_serialize_context_t *c) const
   3796  {
   3797    TRACE_SERIALIZE (this);
   3798    switch (u.format.v) {
   3799    case 0: hb_barrier (); return_trace (reinterpret_cast<DeltaSetIndexMap *> (u.format0.copy (c)));
   3800    case 1: hb_barrier (); return_trace (reinterpret_cast<DeltaSetIndexMap *> (u.format1.copy (c)));
   3801    default:return_trace (nullptr);
   3802    }
   3803  }
   3804 
   3805  protected:
   3806  union {
   3807  struct { HBUINT8 v; }             format;         /* Format identifier */
   3808  DeltaSetIndexMapFormat01<HBUINT16> format0;
   3809  DeltaSetIndexMapFormat01<HBUINT32> format1;
   3810  } u;
   3811  public:
   3812  DEFINE_SIZE_UNION (1, format.v);
   3813 };
   3814 
   3815 
   3816 struct ItemVarStoreInstancer
   3817 {
   3818  ItemVarStoreInstancer (const ItemVariationStore *varStore_,
   3819 		 const DeltaSetIndexMap *varIdxMap,
   3820 		 hb_array_t<const int> coords,
   3821 		 hb_scalar_cache_t *cache = nullptr) :
   3822    varStore (varStore_), varIdxMap (varIdxMap), coords (coords), cache (cache)
   3823  {
   3824    if (!varStore)
   3825      varStore = &Null(ItemVariationStore);
   3826  }
   3827 
   3828  operator bool () const { return varStore && bool (coords); }
   3829 
   3830  float operator[] (uint32_t varIdx) const
   3831  { return (*this) (varIdx); }
   3832 
   3833  float operator() (uint32_t varIdx, unsigned short offset = 0) const
   3834  {
   3835   if (!coords || varIdx == VarIdx::NO_VARIATION)
   3836     return 0.f;
   3837 
   3838    varIdx += offset;
   3839    if (varIdxMap)
   3840      varIdx = varIdxMap->map (varIdx);
   3841    return varStore->get_delta (varIdx, coords, cache);
   3842  }
   3843 
   3844  const ItemVariationStore *varStore;
   3845  const DeltaSetIndexMap *varIdxMap;
   3846  hb_array_t<const int> coords;
   3847  hb_scalar_cache_t *cache;
   3848 };
   3849 
   3850 struct MultiItemVarStoreInstancer
   3851 {
   3852  MultiItemVarStoreInstancer (const MultiItemVariationStore *varStore,
   3853 		      const DeltaSetIndexMap *varIdxMap,
   3854 		      hb_array_t<const int> coords,
   3855 		      hb_scalar_cache_t *cache = nullptr) :
   3856    varStore (varStore), varIdxMap (varIdxMap), coords (coords), cache (cache)
   3857  {
   3858    if (!varStore)
   3859      varStore = &Null(MultiItemVariationStore);
   3860  }
   3861 
   3862  operator bool () const { return varStore && bool (coords); }
   3863 
   3864  float operator[] (uint32_t varIdx) const
   3865  {
   3866    float v = 0;
   3867    (*this) (hb_array (&v, 1), varIdx);
   3868    return v;
   3869  }
   3870 
   3871  void operator() (hb_array_t<float> out, uint32_t varIdx, unsigned short offset = 0) const
   3872  {
   3873    if (coords && varIdx != VarIdx::NO_VARIATION)
   3874    {
   3875      varIdx += offset;
   3876      if (varIdxMap)
   3877 varIdx = varIdxMap->map (varIdx);
   3878      varStore->get_delta (varIdx, coords, out, cache);
   3879    }
   3880    else
   3881      for (unsigned i = 0; i < out.length; i++)
   3882        out.arrayZ[i] = 0.f;
   3883  }
   3884 
   3885  const MultiItemVariationStore *varStore;
   3886  const DeltaSetIndexMap *varIdxMap;
   3887  hb_array_t<const int> coords;
   3888  hb_scalar_cache_t *cache;
   3889 };
   3890 
   3891 
   3892 /*
   3893 * Feature Variations
   3894 */
   3895 enum Cond_with_Var_flag_t
   3896 {
   3897  KEEP_COND_WITH_VAR = 0,
   3898  KEEP_RECORD_WITH_VAR = 1,
   3899  DROP_COND_WITH_VAR = 2,
   3900  DROP_RECORD_WITH_VAR = 3,
   3901 };
   3902 
   3903 struct Condition;
   3904 
   3905 template <typename Instancer>
   3906 static bool
   3907 _hb_recurse_condition_evaluate (const struct Condition &condition,
   3908 			const int *coords,
   3909 			unsigned int coord_len,
   3910 			Instancer *instancer);
   3911 
   3912 struct ConditionAxisRange
   3913 {
   3914  friend struct Condition;
   3915 
   3916  bool subset (hb_subset_context_t *c) const
   3917  {
   3918    TRACE_SUBSET (this);
   3919    auto *out = c->serializer->embed (this);
   3920    if (unlikely (!out)) return_trace (false);
   3921 
   3922    const hb_map_t *index_map = &c->plan->axes_index_map;
   3923    if (index_map->is_empty ()) return_trace (true);
   3924 
   3925    const hb_map_t& axes_old_index_tag_map = c->plan->axes_old_index_tag_map;
   3926    hb_codepoint_t *axis_tag;
   3927    if (!axes_old_index_tag_map.has (axisIndex, &axis_tag) ||
   3928        !index_map->has (axisIndex))
   3929      return_trace (false);
   3930 
   3931    const hb_hashmap_t<hb_tag_t, Triple>& normalized_axes_location = c->plan->axes_location;
   3932    Triple axis_limit{-1.0, 0.0, 1.0};
   3933    Triple *normalized_limit;
   3934    if (normalized_axes_location.has (*axis_tag, &normalized_limit))
   3935      axis_limit = *normalized_limit;
   3936 
   3937    const hb_hashmap_t<hb_tag_t, TripleDistances>& axes_triple_distances = c->plan->axes_triple_distances;
   3938    TripleDistances axis_triple_distances{1.0, 1.0};
   3939    TripleDistances *triple_dists;
   3940    if (axes_triple_distances.has (*axis_tag, &triple_dists))
   3941      axis_triple_distances = *triple_dists;
   3942 
   3943    float normalized_min = renormalizeValue ((double) filterRangeMinValue.to_float (), axis_limit, axis_triple_distances, false);
   3944    float normalized_max = renormalizeValue ((double) filterRangeMaxValue.to_float (), axis_limit, axis_triple_distances, false);
   3945    out->filterRangeMinValue.set_float (normalized_min);
   3946    out->filterRangeMaxValue.set_float (normalized_max);
   3947 
   3948    return_trace (c->serializer->check_assign (out->axisIndex, index_map->get (axisIndex),
   3949                                               HB_SERIALIZE_ERROR_INT_OVERFLOW));
   3950  }
   3951 
   3952  private:
   3953  Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
   3954                                             hb_map_t *condition_map /* OUT */) const
   3955  {
   3956    //invalid axis index, drop the entire record
   3957    if (!c->axes_index_tag_map->has (axisIndex))
   3958      return DROP_RECORD_WITH_VAR;
   3959 
   3960    hb_tag_t axis_tag = c->axes_index_tag_map->get (axisIndex);
   3961 
   3962    Triple axis_range (-1.0, 0.0, 1.0);
   3963    Triple *axis_limit;
   3964    bool axis_set_by_user = false;
   3965    if (c->axes_location->has (axis_tag, &axis_limit))
   3966    {
   3967      axis_range = *axis_limit;
   3968      axis_set_by_user = true;
   3969    }
   3970 
   3971    float axis_min_val = axis_range.minimum;
   3972    float axis_default_val = axis_range.middle;
   3973    float axis_max_val = axis_range.maximum;
   3974 
   3975    float filter_min_val = filterRangeMinValue.to_float ();
   3976    float filter_max_val = filterRangeMaxValue.to_float ();
   3977 
   3978    if (axis_default_val < filter_min_val ||
   3979        axis_default_val > filter_max_val)
   3980      c->apply = false;
   3981 
   3982    //condition not met, drop the entire record
   3983    if (axis_min_val > filter_max_val || axis_max_val < filter_min_val ||
   3984        filter_min_val > filter_max_val)
   3985      return DROP_RECORD_WITH_VAR;
   3986 
   3987    //condition met and axis pinned, drop the condition
   3988    if (axis_set_by_user && axis_range.is_point ())
   3989      return DROP_COND_WITH_VAR;
   3990 
   3991    if (filter_max_val != axis_max_val || filter_min_val != axis_min_val)
   3992    {
   3993      // add axisIndex->value into the hashmap so we can check if the record is
   3994      // unique with variations
   3995      uint16_t int_filter_max_val = (uint16_t) filterRangeMaxValue.to_int ();
   3996      uint16_t int_filter_min_val = (uint16_t) filterRangeMinValue.to_int ();
   3997      hb_codepoint_t val = (int_filter_max_val << 16) + int_filter_min_val;
   3998 
   3999      condition_map->set (axisIndex, val);
   4000      return KEEP_COND_WITH_VAR;
   4001    }
   4002    return KEEP_RECORD_WITH_VAR;
   4003  }
   4004 
   4005  template <typename Instancer>
   4006  bool evaluate (const int *coords, unsigned int coord_len,
   4007 	 Instancer *instancer HB_UNUSED) const
   4008  {
   4009    int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
   4010    return filterRangeMinValue.to_int () <= coord && coord <= filterRangeMaxValue.to_int ();
   4011  }
   4012 
   4013  bool sanitize (hb_sanitize_context_t *c) const
   4014  {
   4015    TRACE_SANITIZE (this);
   4016    return_trace (c->check_struct (this));
   4017  }
   4018 
   4019  protected:
   4020  HBUINT16	format;		/* Format identifier--format = 1 */
   4021  HBUINT16	axisIndex;
   4022  F2DOT14	filterRangeMinValue;
   4023  F2DOT14	filterRangeMaxValue;
   4024  public:
   4025  DEFINE_SIZE_STATIC (8);
   4026 };
   4027 
   4028 struct ConditionValue
   4029 {
   4030  friend struct Condition;
   4031 
   4032  bool subset (hb_subset_context_t *c) const
   4033  {
   4034    TRACE_SUBSET (this);
   4035    // TODO(subset)
   4036    return_trace (false);
   4037  }
   4038 
   4039  private:
   4040  template <typename Instancer>
   4041  bool evaluate (const int *coords, unsigned int coord_len,
   4042 	 Instancer *instancer) const
   4043  {
   4044    signed value = defaultValue;
   4045    value += (*instancer)[varIdx];
   4046    return value > 0;
   4047  }
   4048 
   4049  bool subset (hb_subset_context_t *c,
   4050               hb_subset_layout_context_t *l,
   4051               bool insert_catch_all) const
   4052  {
   4053    TRACE_SUBSET (this);
   4054    // TODO(subset)
   4055    return_trace (false);
   4056  }
   4057 
   4058  bool sanitize (hb_sanitize_context_t *c) const
   4059  {
   4060    TRACE_SANITIZE (this);
   4061    return_trace (c->check_struct (this));
   4062  }
   4063 
   4064  protected:
   4065  HBUINT16	format;		/* Format identifier--format = 2 */
   4066  HBINT16	defaultValue;   /* Value at default instance. */
   4067  VarIdx	varIdx;		/* Variation index */
   4068  public:
   4069  DEFINE_SIZE_STATIC (8);
   4070 };
   4071 
   4072 struct ConditionAnd
   4073 {
   4074  friend struct Condition;
   4075 
   4076  bool subset (hb_subset_context_t *c) const
   4077  {
   4078    TRACE_SUBSET (this);
   4079    // TODO(subset)
   4080    return_trace (false);
   4081  }
   4082 
   4083  private:
   4084  template <typename Instancer>
   4085  bool evaluate (const int *coords, unsigned int coord_len,
   4086 	 Instancer *instancer) const
   4087  {
   4088    unsigned int count = conditions.len;
   4089    for (unsigned int i = 0; i < count; i++)
   4090      if (!_hb_recurse_condition_evaluate (this+conditions.arrayZ[i],
   4091 				   coords, coord_len,
   4092 				   instancer))
   4093 return false;
   4094    return true;
   4095  }
   4096 
   4097  bool subset (hb_subset_context_t *c,
   4098               hb_subset_layout_context_t *l,
   4099               bool insert_catch_all) const
   4100  {
   4101    TRACE_SUBSET (this);
   4102    // TODO(subset)
   4103    return_trace (false);
   4104  }
   4105 
   4106  bool sanitize (hb_sanitize_context_t *c) const
   4107  {
   4108    TRACE_SANITIZE (this);
   4109    return_trace (conditions.sanitize (c, this));
   4110  }
   4111 
   4112  protected:
   4113  HBUINT16	format;		/* Format identifier--format = 3 */
   4114  Array8OfOffset24To<struct Condition>	conditions;
   4115  public:
   4116  DEFINE_SIZE_ARRAY (3, conditions);
   4117 };
   4118 
   4119 struct ConditionOr
   4120 {
   4121  friend struct Condition;
   4122 
   4123  bool subset (hb_subset_context_t *c) const
   4124  {
   4125    TRACE_SUBSET (this);
   4126    // TODO(subset)
   4127    return_trace (false);
   4128  }
   4129 
   4130  private:
   4131  template <typename Instancer>
   4132  bool evaluate (const int *coords, unsigned int coord_len,
   4133 	 Instancer *instancer) const
   4134  {
   4135    unsigned int count = conditions.len;
   4136    for (unsigned int i = 0; i < count; i++)
   4137      if (_hb_recurse_condition_evaluate (this+conditions.arrayZ[i],
   4138 				  coords, coord_len,
   4139 				  instancer))
   4140 return true;
   4141    return false;
   4142  }
   4143 
   4144  bool subset (hb_subset_context_t *c,
   4145               hb_subset_layout_context_t *l,
   4146               bool insert_catch_all) const
   4147  {
   4148    TRACE_SUBSET (this);
   4149    // TODO(subset)
   4150    return_trace (false);
   4151  }
   4152 
   4153  bool sanitize (hb_sanitize_context_t *c) const
   4154  {
   4155    TRACE_SANITIZE (this);
   4156    return_trace (conditions.sanitize (c, this));
   4157  }
   4158 
   4159  protected:
   4160  HBUINT16	format;		/* Format identifier--format = 4 */
   4161  Array8OfOffset24To<struct Condition>	conditions;
   4162  public:
   4163  DEFINE_SIZE_ARRAY (3, conditions);
   4164 };
   4165 
   4166 struct ConditionNegate
   4167 {
   4168  friend struct Condition;
   4169 
   4170  bool subset (hb_subset_context_t *c) const
   4171  {
   4172    TRACE_SUBSET (this);
   4173    // TODO(subset)
   4174    return_trace (false);
   4175  }
   4176 
   4177  private:
   4178  template <typename Instancer>
   4179  bool evaluate (const int *coords, unsigned int coord_len,
   4180 	 Instancer *instancer) const
   4181  {
   4182    return !_hb_recurse_condition_evaluate (this+condition,
   4183 				    coords, coord_len,
   4184 				    instancer);
   4185  }
   4186 
   4187  bool subset (hb_subset_context_t *c,
   4188               hb_subset_layout_context_t *l,
   4189               bool insert_catch_all) const
   4190  {
   4191    TRACE_SUBSET (this);
   4192    // TODO(subset)
   4193    return_trace (false);
   4194  }
   4195 
   4196  bool sanitize (hb_sanitize_context_t *c) const
   4197  {
   4198    TRACE_SANITIZE (this);
   4199    return_trace (condition.sanitize (c, this));
   4200  }
   4201 
   4202  protected:
   4203  HBUINT16	format;		/* Format identifier--format = 5 */
   4204  Offset24To<struct Condition>	condition;
   4205  public:
   4206  DEFINE_SIZE_STATIC (5);
   4207 };
   4208 
   4209 struct Condition
   4210 {
   4211  template <typename Instancer>
   4212  bool evaluate (const int *coords, unsigned int coord_len,
   4213 	 Instancer *instancer) const
   4214  {
   4215    switch (u.format.v) {
   4216    case 1: hb_barrier (); return u.format1.evaluate (coords, coord_len, instancer);
   4217    case 2: hb_barrier (); return u.format2.evaluate (coords, coord_len, instancer);
   4218    case 3: hb_barrier (); return u.format3.evaluate (coords, coord_len, instancer);
   4219    case 4: hb_barrier (); return u.format4.evaluate (coords, coord_len, instancer);
   4220    case 5: hb_barrier (); return u.format5.evaluate (coords, coord_len, instancer);
   4221    default:return false;
   4222    }
   4223  }
   4224 
   4225  Cond_with_Var_flag_t keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
   4226                                             hb_map_t *condition_map /* OUT */) const
   4227  {
   4228    switch (u.format.v) {
   4229    case 1: hb_barrier (); return u.format1.keep_with_variations (c, condition_map);
   4230    // TODO(subset)
   4231    default: c->apply = false; return KEEP_COND_WITH_VAR;
   4232    }
   4233  }
   4234 
   4235  template <typename context_t, typename ...Ts>
   4236  typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
   4237  {
   4238    if (unlikely (!c->may_dispatch (this, &u.format.v))) return c->no_dispatch_return_value ();
   4239    TRACE_DISPATCH (this, u.format.v);
   4240    switch (u.format.v) {
   4241    case 1: hb_barrier (); return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
   4242    case 2: hb_barrier (); return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
   4243    case 3: hb_barrier (); return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
   4244    case 4: hb_barrier (); return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
   4245    case 5: hb_barrier (); return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
   4246    default:return_trace (c->default_return_value ());
   4247    }
   4248  }
   4249 
   4250  bool sanitize (hb_sanitize_context_t *c) const
   4251  {
   4252    TRACE_SANITIZE (this);
   4253    if (!u.format.v.sanitize (c)) return_trace (false);
   4254    hb_barrier ();
   4255    switch (u.format.v) {
   4256    case 1: hb_barrier (); return_trace (u.format1.sanitize (c));
   4257    case 2: hb_barrier (); return_trace (u.format2.sanitize (c));
   4258    case 3: hb_barrier (); return_trace (u.format3.sanitize (c));
   4259    case 4: hb_barrier (); return_trace (u.format4.sanitize (c));
   4260    case 5: hb_barrier (); return_trace (u.format5.sanitize (c));
   4261    default:return_trace (true);
   4262    }
   4263  }
   4264 
   4265  protected:
   4266  union {
   4267  struct { HBUINT16 v; }	format;		/* Format identifier */
   4268  ConditionAxisRange	format1;
   4269  ConditionValue	format2;
   4270  ConditionAnd		format3;
   4271  ConditionOr		format4;
   4272  ConditionNegate	format5;
   4273  } u;
   4274  public:
   4275  DEFINE_SIZE_UNION (2, format.v);
   4276 };
   4277 
   4278 template <typename Instancer>
   4279 bool
   4280 _hb_recurse_condition_evaluate (const struct Condition &condition,
   4281 			const int *coords,
   4282 			unsigned int coord_len,
   4283 			Instancer *instancer)
   4284 {
   4285  return condition.evaluate (coords, coord_len, instancer);
   4286 }
   4287 
   4288 struct ConditionList
   4289 {
   4290  const Condition& operator[] (unsigned i) const
   4291  { return this+conditions[i]; }
   4292 
   4293  bool sanitize (hb_sanitize_context_t *c) const
   4294  {
   4295    TRACE_SANITIZE (this);
   4296    return_trace (conditions.sanitize (c, this));
   4297  }
   4298 
   4299  protected:
   4300  Array32OfOffset32To<Condition> conditions;
   4301  public:
   4302  DEFINE_SIZE_ARRAY (4, conditions);
   4303 };
   4304 
   4305 struct ConditionSet
   4306 {
   4307  bool evaluate (const int *coords, unsigned int coord_len,
   4308 	 ItemVarStoreInstancer *instancer) const
   4309  {
   4310    unsigned int count = conditions.len;
   4311    for (unsigned int i = 0; i < count; i++)
   4312      if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len, instancer))
   4313 return false;
   4314    return true;
   4315  }
   4316 
   4317  void keep_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
   4318  {
   4319    hb_map_t *condition_map = hb_map_create ();
   4320    if (unlikely (!condition_map)) return;
   4321    hb::shared_ptr<hb_map_t> p {condition_map};
   4322 
   4323    hb_set_t *cond_set = hb_set_create ();
   4324    if (unlikely (!cond_set)) return;
   4325    hb::shared_ptr<hb_set_t> s {cond_set};
   4326 
   4327    c->apply = true;
   4328    bool should_keep = false;
   4329    unsigned num_kept_cond = 0, cond_idx = 0;
   4330    for (const auto& offset : conditions)
   4331    {
   4332      Cond_with_Var_flag_t ret = (this+offset).keep_with_variations (c, condition_map);
   4333      // condition is not met or condition out of range, drop the entire record
   4334      if (ret == DROP_RECORD_WITH_VAR)
   4335        return;
   4336 
   4337      if (ret == KEEP_COND_WITH_VAR)
   4338      {
   4339        should_keep = true;
   4340        cond_set->add (cond_idx);
   4341        num_kept_cond++;
   4342      }
   4343 
   4344      if (ret == KEEP_RECORD_WITH_VAR)
   4345        should_keep = true;
   4346 
   4347      cond_idx++;
   4348    }
   4349 
   4350    if (!should_keep) return;
   4351 
   4352    //check if condition_set is unique with variations
   4353    if (c->conditionset_map->has (p))
   4354      //duplicate found, drop the entire record
   4355      return;
   4356 
   4357    c->conditionset_map->set (p, 1);
   4358    c->record_cond_idx_map->set (c->cur_record_idx, s);
   4359    if (should_keep && num_kept_cond == 0)
   4360      c->universal = true;
   4361  }
   4362 
   4363  bool subset (hb_subset_context_t *c,
   4364               hb_subset_layout_context_t *l,
   4365               bool insert_catch_all) const
   4366  {
   4367    TRACE_SUBSET (this);
   4368    auto *out = c->serializer->start_embed (this);
   4369    if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
   4370 
   4371    if (insert_catch_all) return_trace (true);
   4372 
   4373    hb_set_t *retained_cond_set = nullptr;
   4374    if (l->feature_record_cond_idx_map != nullptr)
   4375      retained_cond_set = l->feature_record_cond_idx_map->get (l->cur_feature_var_record_idx);
   4376 
   4377    unsigned int count = conditions.len;
   4378    for (unsigned int i = 0; i < count; i++)
   4379    {
   4380      if (retained_cond_set != nullptr && !retained_cond_set->has (i))
   4381        continue;
   4382      subset_offset_array (c, out->conditions, this) (conditions[i]);
   4383    }
   4384 
   4385    return_trace (bool (out->conditions));
   4386  }
   4387 
   4388  bool sanitize (hb_sanitize_context_t *c) const
   4389  {
   4390    TRACE_SANITIZE (this);
   4391    return_trace (conditions.sanitize (c, this));
   4392  }
   4393 
   4394  protected:
   4395  Array16OfOffset32To<Condition>	conditions;
   4396  public:
   4397  DEFINE_SIZE_ARRAY (2, conditions);
   4398 };
   4399 
   4400 struct FeatureTableSubstitutionRecord
   4401 {
   4402  friend struct FeatureTableSubstitution;
   4403 
   4404  void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
   4405  {
   4406    return (base+feature).add_lookup_indexes_to (lookup_indexes);
   4407  }
   4408 
   4409  void closure_features (const void *base,
   4410 		 const hb_map_t *lookup_indexes,
   4411 		 hb_set_t       *feature_indexes /* OUT */) const
   4412  {
   4413    if ((base+feature).intersects_lookup_indexes (lookup_indexes))
   4414      feature_indexes->add (featureIndex);
   4415  }
   4416 
   4417  void collect_feature_substitutes_with_variations (hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
   4418                                                    hb_set_t& catch_all_record_feature_idxes,
   4419                                                    const hb_set_t *feature_indices,
   4420                                                    const void *base) const
   4421  {
   4422    if (feature_indices->has (featureIndex))
   4423    {
   4424      feature_substitutes_map->set (featureIndex, &(base+feature));
   4425      catch_all_record_feature_idxes.add (featureIndex);
   4426    }
   4427  }
   4428 
   4429  bool serialize (hb_subset_layout_context_t *c,
   4430                  unsigned feature_index,
   4431                  const Feature *f, const Tag *tag)
   4432  {
   4433    TRACE_SERIALIZE (this);
   4434    hb_serialize_context_t *s = c->subset_context->serializer;
   4435    if (unlikely (!s->extend_min (this))) return_trace (false);
   4436 
   4437    uint32_t *new_feature_idx;
   4438    if (!c->feature_map_w_duplicates->has (feature_index, &new_feature_idx))
   4439      return_trace (false);
   4440 
   4441    if (!s->check_assign (featureIndex, *new_feature_idx, HB_SERIALIZE_ERROR_INT_OVERFLOW))
   4442      return_trace (false);
   4443 
   4444    s->push ();
   4445    bool ret = f->subset (c->subset_context, c, tag);
   4446    if (ret) s->add_link (feature, s->pop_pack ());
   4447    else s->pop_discard ();
   4448 
   4449    return_trace (ret);
   4450  }
   4451 
   4452  bool subset (hb_subset_layout_context_t *c, const void *base) const
   4453  {
   4454    TRACE_SUBSET (this);
   4455    uint32_t *new_feature_index;
   4456    if (!c->feature_map_w_duplicates->has (featureIndex, &new_feature_index))
   4457      return_trace (false);
   4458 
   4459    auto *out = c->subset_context->serializer->embed (this);
   4460    if (unlikely (!out)) return_trace (false);
   4461 
   4462    out->featureIndex = *new_feature_index;
   4463    return_trace (out->feature.serialize_subset (c->subset_context, feature, base, c));
   4464  }
   4465 
   4466  bool sanitize (hb_sanitize_context_t *c, const void *base) const
   4467  {
   4468    TRACE_SANITIZE (this);
   4469    return_trace (c->check_struct (this) && feature.sanitize (c, base));
   4470  }
   4471 
   4472  protected:
   4473  HBUINT16		featureIndex;
   4474  Offset32To<Feature>	feature;
   4475  public:
   4476  DEFINE_SIZE_STATIC (6);
   4477 };
   4478 
   4479 struct FeatureTableSubstitution
   4480 {
   4481  const Feature *find_substitute (unsigned int feature_index) const
   4482  {
   4483    unsigned int count = substitutions.len;
   4484    for (unsigned int i = 0; i < count; i++)
   4485    {
   4486      const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
   4487      if (record.featureIndex == feature_index)
   4488 return &(this+record.feature);
   4489    }
   4490    return nullptr;
   4491  }
   4492 
   4493  void collect_lookups (const hb_set_t *feature_indexes,
   4494 		hb_set_t       *lookup_indexes /* OUT */) const
   4495  {
   4496    + hb_iter (substitutions)
   4497    | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
   4498    | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
   4499 	{ r.collect_lookups (this, lookup_indexes); })
   4500    ;
   4501  }
   4502 
   4503  void closure_features (const hb_map_t *lookup_indexes,
   4504 		 hb_set_t       *feature_indexes /* OUT */) const
   4505  {
   4506    for (const FeatureTableSubstitutionRecord& record : substitutions)
   4507      record.closure_features (this, lookup_indexes, feature_indexes);
   4508  }
   4509 
   4510  bool intersects_features (const hb_map_t *feature_index_map) const
   4511  {
   4512    for (const FeatureTableSubstitutionRecord& record : substitutions)
   4513    {
   4514      if (feature_index_map->has (record.featureIndex)) return true;
   4515    }
   4516    return false;
   4517  }
   4518 
   4519  void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
   4520  {
   4521    for (const FeatureTableSubstitutionRecord& record : substitutions)
   4522      record.collect_feature_substitutes_with_variations (c->feature_substitutes_map,
   4523                                                          c->catch_all_record_feature_idxes,
   4524                                                          c->feature_indices, this);
   4525  }
   4526 
   4527  bool subset (hb_subset_context_t        *c,
   4528        hb_subset_layout_context_t *l,
   4529               bool insert_catch_all) const
   4530  {
   4531    TRACE_SUBSET (this);
   4532    auto *out = c->serializer->start_embed (*this);
   4533    if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
   4534 
   4535    out->version.major = version.major;
   4536    out->version.minor = version.minor;
   4537 
   4538    if (insert_catch_all)
   4539    {
   4540      for (unsigned feature_index : *(l->catch_all_record_feature_idxes))
   4541      {
   4542        hb_pair_t<const void*, const void*> *p;
   4543        if (!l->feature_idx_tag_map->has (feature_index, &p))
   4544          return_trace (false);
   4545        auto *o = out->substitutions.serialize_append (c->serializer);
   4546        if (!o->serialize (l, feature_index,
   4547                           reinterpret_cast<const Feature*> (p->first),
   4548                           reinterpret_cast<const Tag*> (p->second)))
   4549          return_trace (false);
   4550      }
   4551      return_trace (true);
   4552    }
   4553 
   4554    + substitutions.iter ()
   4555    | hb_apply (subset_record_array (l, &(out->substitutions), this))
   4556    ;
   4557 
   4558    return_trace (bool (out->substitutions));
   4559  }
   4560 
   4561  bool sanitize (hb_sanitize_context_t *c) const
   4562  {
   4563    TRACE_SANITIZE (this);
   4564    return_trace (version.sanitize (c) &&
   4565 	  hb_barrier () &&
   4566 	  likely (version.major == 1) &&
   4567 	  substitutions.sanitize (c, this));
   4568  }
   4569 
   4570  protected:
   4571  FixedVersion<>	version;	/* Version--0x00010000u */
   4572  Array16Of<FeatureTableSubstitutionRecord>
   4573 		substitutions;
   4574  public:
   4575  DEFINE_SIZE_ARRAY (6, substitutions);
   4576 };
   4577 
   4578 struct FeatureVariationRecord
   4579 {
   4580  friend struct FeatureVariations;
   4581 
   4582  void collect_lookups (const void     *base,
   4583 		const hb_set_t *feature_indexes,
   4584 		hb_set_t       *lookup_indexes /* OUT */) const
   4585  {
   4586    return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
   4587  }
   4588 
   4589  void closure_features (const void     *base,
   4590 		 const hb_map_t *lookup_indexes,
   4591 		 hb_set_t       *feature_indexes /* OUT */) const
   4592  {
   4593    (base+substitutions).closure_features (lookup_indexes, feature_indexes);
   4594  }
   4595 
   4596  bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
   4597  {
   4598    return (base+substitutions).intersects_features (feature_index_map);
   4599  }
   4600 
   4601  void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c,
   4602                                                    const void *base) const
   4603  {
   4604    (base+conditions).keep_with_variations (c);
   4605    if (c->apply && !c->variation_applied)
   4606    {
   4607      (base+substitutions).collect_feature_substitutes_with_variations (c);
   4608      c->variation_applied = true; // set variations only once
   4609    }
   4610  }
   4611 
   4612  bool subset (hb_subset_layout_context_t *c, const void *base,
   4613               bool insert_catch_all = false) const
   4614  {
   4615    TRACE_SUBSET (this);
   4616    auto *out = c->subset_context->serializer->embed (this);
   4617    if (unlikely (!out)) return_trace (false);
   4618 
   4619    out->conditions.serialize_subset (c->subset_context, conditions, base, c, insert_catch_all);
   4620    out->substitutions.serialize_subset (c->subset_context, substitutions, base, c, insert_catch_all);
   4621 
   4622    return_trace (true);
   4623  }
   4624 
   4625  bool sanitize (hb_sanitize_context_t *c, const void *base) const
   4626  {
   4627    TRACE_SANITIZE (this);
   4628    return_trace (conditions.sanitize (c, base) &&
   4629 	  substitutions.sanitize (c, base));
   4630  }
   4631 
   4632  protected:
   4633  Offset32To<ConditionSet>
   4634 		conditions;
   4635  Offset32To<FeatureTableSubstitution>
   4636 		substitutions;
   4637  public:
   4638  DEFINE_SIZE_STATIC (8);
   4639 };
   4640 
   4641 struct FeatureVariations
   4642 {
   4643  static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
   4644 
   4645  bool find_index (const int *coords, unsigned int coord_len,
   4646 	   unsigned int *index,
   4647 	   ItemVarStoreInstancer *instancer) const
   4648  {
   4649    unsigned int count = varRecords.len;
   4650    for (unsigned int i = 0; i < count; i++)
   4651    {
   4652      const FeatureVariationRecord &record = varRecords.arrayZ[i];
   4653      if ((this+record.conditions).evaluate (coords, coord_len, instancer))
   4654      {
   4655 *index = i;
   4656 return true;
   4657      }
   4658    }
   4659    *index = NOT_FOUND_INDEX;
   4660    return false;
   4661  }
   4662 
   4663  const Feature *find_substitute (unsigned int variations_index,
   4664 			  unsigned int feature_index) const
   4665  {
   4666    const FeatureVariationRecord &record = varRecords[variations_index];
   4667    return (this+record.substitutions).find_substitute (feature_index);
   4668  }
   4669 
   4670  void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
   4671  {
   4672    unsigned int count = varRecords.len;
   4673    for (unsigned int i = 0; i < count; i++)
   4674    {
   4675      c->cur_record_idx = i;
   4676      varRecords[i].collect_feature_substitutes_with_variations (c, this);
   4677      if (c->universal)
   4678        break;
   4679    }
   4680    if (c->universal || c->record_cond_idx_map->is_empty ())
   4681      c->catch_all_record_feature_idxes.reset ();
   4682  }
   4683 
   4684  FeatureVariations* copy (hb_serialize_context_t *c) const
   4685  {
   4686    TRACE_SERIALIZE (this);
   4687    return_trace (c->embed (*this));
   4688  }
   4689 
   4690  void collect_lookups (const hb_set_t *feature_indexes,
   4691 		const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
   4692 		hb_set_t       *lookup_indexes /* OUT */) const
   4693  {
   4694    unsigned count = varRecords.len;
   4695    for (unsigned int i = 0; i < count; i++)
   4696    {
   4697      if (feature_record_cond_idx_map &&
   4698          !feature_record_cond_idx_map->has (i))
   4699        continue;
   4700      varRecords[i].collect_lookups (this, feature_indexes, lookup_indexes);
   4701    }
   4702  }
   4703 
   4704  void closure_features (const hb_map_t *lookup_indexes,
   4705 		 const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map,
   4706 		 hb_set_t       *feature_indexes /* OUT */) const
   4707  {
   4708    unsigned int count = varRecords.len;
   4709    for (unsigned int i = 0; i < count; i++)
   4710    {
   4711      if (feature_record_cond_idx_map != nullptr &&
   4712          !feature_record_cond_idx_map->has (i))
   4713        continue;
   4714      varRecords[i].closure_features (this, lookup_indexes, feature_indexes);
   4715    }
   4716  }
   4717 
   4718  bool subset (hb_subset_context_t *c,
   4719        hb_subset_layout_context_t *l) const
   4720  {
   4721    TRACE_SUBSET (this);
   4722    auto *out = c->serializer->start_embed (*this);
   4723    if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
   4724 
   4725    out->version.major = version.major;
   4726    out->version.minor = version.minor;
   4727 
   4728    int keep_up_to = -1;
   4729    for (int i = varRecords.len - 1; i >= 0; i--) {
   4730      if (varRecords[i].intersects_features (this, l->feature_map_w_duplicates)) {
   4731        keep_up_to = i;
   4732        break;
   4733      }
   4734    }
   4735 
   4736    unsigned count = (unsigned) (keep_up_to + 1);
   4737    for (unsigned i = 0; i < count; i++)
   4738    {
   4739      if (l->feature_record_cond_idx_map != nullptr &&
   4740          !l->feature_record_cond_idx_map->has (i))
   4741        continue;
   4742 
   4743      l->cur_feature_var_record_idx = i;
   4744      subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
   4745    }
   4746 
   4747    if (out->varRecords.len && !l->catch_all_record_feature_idxes->is_empty ())
   4748    {
   4749      bool insert_catch_all_record = true;
   4750      subset_record_array (l, &(out->varRecords), this, insert_catch_all_record) (varRecords[0]);
   4751    }
   4752 
   4753    return_trace (bool (out->varRecords));
   4754  }
   4755 
   4756  bool sanitize (hb_sanitize_context_t *c) const
   4757  {
   4758    TRACE_SANITIZE (this);
   4759    return_trace (version.sanitize (c) &&
   4760 	  hb_barrier () &&
   4761 	  likely (version.major == 1) &&
   4762 	  varRecords.sanitize (c, this));
   4763  }
   4764 
   4765  protected:
   4766  FixedVersion<>	version;	/* Version--0x00010000u */
   4767  Array32Of<FeatureVariationRecord>
   4768 		varRecords;
   4769  public:
   4770  DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
   4771 };
   4772 
   4773 
   4774 /*
   4775 * Device Tables
   4776 */
   4777 
   4778 struct HintingDevice
   4779 {
   4780  friend struct Device;
   4781 
   4782  private:
   4783 
   4784  hb_position_t get_x_delta (hb_font_t *font) const
   4785  { return get_delta (font->x_ppem, font->x_scale); }
   4786 
   4787  hb_position_t get_y_delta (hb_font_t *font) const
   4788  { return get_delta (font->y_ppem, font->y_scale); }
   4789 
   4790  public:
   4791 
   4792  unsigned int get_size () const
   4793  {
   4794    unsigned int f = deltaFormat;
   4795    if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
   4796    return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
   4797  }
   4798 
   4799  bool sanitize (hb_sanitize_context_t *c) const
   4800  {
   4801    TRACE_SANITIZE (this);
   4802    return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
   4803  }
   4804 
   4805  HintingDevice* copy (hb_serialize_context_t *c) const
   4806  {
   4807    TRACE_SERIALIZE (this);
   4808    return_trace (c->embed<HintingDevice> (this));
   4809  }
   4810 
   4811  private:
   4812 
   4813  int get_delta (unsigned int ppem, int scale) const
   4814  {
   4815    if (!ppem) return 0;
   4816 
   4817    int pixels = get_delta_pixels (ppem);
   4818 
   4819    if (!pixels) return 0;
   4820 
   4821    return (int) (pixels * (int64_t) scale / ppem);
   4822  }
   4823  int get_delta_pixels (unsigned int ppem_size) const
   4824  {
   4825    unsigned int f = deltaFormat;
   4826    if (unlikely (f < 1 || f > 3))
   4827      return 0;
   4828 
   4829    if (ppem_size < startSize || ppem_size > endSize)
   4830      return 0;
   4831 
   4832    unsigned int s = ppem_size - startSize;
   4833 
   4834    unsigned int byte = deltaValueZ[s >> (4 - f)];
   4835    unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
   4836    unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
   4837 
   4838    int delta = bits & mask;
   4839 
   4840    if ((unsigned int) delta >= ((mask + 1) >> 1))
   4841      delta -= mask + 1;
   4842 
   4843    return delta;
   4844  }
   4845 
   4846  protected:
   4847  HBUINT16	startSize;		/* Smallest size to correct--in ppem */
   4848  HBUINT16	endSize;		/* Largest size to correct--in ppem */
   4849  HBUINT16	deltaFormat;		/* Format of DeltaValue array data: 1, 2, or 3
   4850 				 * 1	Signed 2-bit value, 8 values per uint16
   4851 				 * 2	Signed 4-bit value, 4 values per uint16
   4852 				 * 3	Signed 8-bit value, 2 values per uint16
   4853 				 */
   4854  UnsizedArrayOf<HBUINT16>
   4855 	deltaValueZ;		/* Array of compressed data */
   4856  public:
   4857  DEFINE_SIZE_ARRAY (6, deltaValueZ);
   4858 };
   4859 
   4860 struct VariationDevice
   4861 {
   4862  friend struct Device;
   4863 
   4864  private:
   4865 
   4866  hb_position_t get_x_delta (hb_font_t *font,
   4867 		     const ItemVariationStore &store,
   4868 		     hb_scalar_cache_t *store_cache = nullptr) const
   4869  { return !font->has_nonzero_coords ? 0 : font->em_scalef_x (get_delta (font, store, store_cache)); }
   4870 
   4871  hb_position_t get_y_delta (hb_font_t *font,
   4872 		     const ItemVariationStore &store,
   4873 		     hb_scalar_cache_t *store_cache = nullptr) const
   4874  { return !font->has_nonzero_coords ? 0 : font->em_scalef_y (get_delta (font, store, store_cache)); }
   4875 
   4876  VariationDevice* copy (hb_serialize_context_t *c,
   4877                         const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map) const
   4878  {
   4879    TRACE_SERIALIZE (this);
   4880    if (!layout_variation_idx_delta_map) return_trace (nullptr);
   4881 
   4882    hb_pair_t<unsigned, int> *v;
   4883    if (!layout_variation_idx_delta_map->has (varIdx, &v))
   4884      return_trace (nullptr);
   4885 
   4886    c->start_zerocopy (this->static_size);
   4887    auto *out = c->embed (this);
   4888    if (unlikely (!out)) return_trace (nullptr);
   4889 
   4890    if (!c->check_assign (out->varIdx, hb_first (*v), HB_SERIALIZE_ERROR_INT_OVERFLOW))
   4891      return_trace (nullptr);
   4892    return_trace (out);
   4893  }
   4894 
   4895  void collect_variation_index (hb_collect_variation_indices_context_t *c) const
   4896  { c->layout_variation_indices->add (varIdx); }
   4897 
   4898  bool sanitize (hb_sanitize_context_t *c) const
   4899  {
   4900    TRACE_SANITIZE (this);
   4901    return_trace (c->check_struct (this));
   4902  }
   4903 
   4904  private:
   4905 
   4906  float get_delta (hb_font_t *font,
   4907 	   const ItemVariationStore &store,
   4908 	   hb_scalar_cache_t *store_cache = nullptr) const
   4909  {
   4910    return store.get_delta (varIdx, font->coords, font->num_coords, store_cache);
   4911  }
   4912 
   4913  protected:
   4914  VarIdx	varIdx;		/* Variation index */
   4915  HBUINT16	deltaFormat;	/* Format identifier for this table: 0x0x8000 */
   4916  public:
   4917  DEFINE_SIZE_STATIC (6);
   4918 };
   4919 
   4920 struct DeviceHeader
   4921 {
   4922  protected:
   4923  HBUINT16		reserved1;
   4924  HBUINT16		reserved2;
   4925  public:
   4926  HBUINT16		format;		/* Format identifier */
   4927  public:
   4928  DEFINE_SIZE_STATIC (6);
   4929 };
   4930 
   4931 struct Device
   4932 {
   4933  hb_position_t get_x_delta (hb_font_t *font,
   4934 		     const ItemVariationStore &store=Null (ItemVariationStore),
   4935 		     hb_scalar_cache_t *store_cache = nullptr) const
   4936  {
   4937    switch (u.b.format)
   4938    {
   4939 #ifndef HB_NO_HINTING
   4940    case 1: case 2: case 3:
   4941      return u.hinting.get_x_delta (font);
   4942 #endif
   4943 #ifndef HB_NO_VAR
   4944    case 0x8000:
   4945      return u.variation.get_x_delta (font, store, store_cache);
   4946 #endif
   4947    default:
   4948      return 0;
   4949    }
   4950  }
   4951  hb_position_t get_y_delta (hb_font_t *font,
   4952 		     const ItemVariationStore &store=Null (ItemVariationStore),
   4953 		     hb_scalar_cache_t *store_cache = nullptr) const
   4954  {
   4955    switch (u.b.format)
   4956    {
   4957    case 1: case 2: case 3:
   4958 #ifndef HB_NO_HINTING
   4959      return u.hinting.get_y_delta (font);
   4960 #endif
   4961 #ifndef HB_NO_VAR
   4962    case 0x8000:
   4963      return u.variation.get_y_delta (font, store, store_cache);
   4964 #endif
   4965    default:
   4966      return 0;
   4967    }
   4968  }
   4969 
   4970  bool sanitize (hb_sanitize_context_t *c) const
   4971  {
   4972    TRACE_SANITIZE (this);
   4973    if (!u.b.format.sanitize (c)) return_trace (false);
   4974    switch (u.b.format) {
   4975 #ifndef HB_NO_HINTING
   4976    case 1: case 2: case 3:
   4977      return_trace (u.hinting.sanitize (c));
   4978 #endif
   4979 #ifndef HB_NO_VAR
   4980    case 0x8000:
   4981      return_trace (u.variation.sanitize (c));
   4982 #endif
   4983    default:
   4984      return_trace (true);
   4985    }
   4986  }
   4987 
   4988  Device* copy (hb_serialize_context_t *c,
   4989                const hb_hashmap_t<unsigned, hb_pair_t<unsigned, int>> *layout_variation_idx_delta_map=nullptr) const
   4990  {
   4991    TRACE_SERIALIZE (this);
   4992    switch (u.b.format) {
   4993 #ifndef HB_NO_HINTING
   4994    case 1:
   4995    case 2:
   4996    case 3:
   4997      return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
   4998 #endif
   4999 #ifndef HB_NO_VAR
   5000    case 0x8000:
   5001      return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_delta_map)));
   5002 #endif
   5003    default:
   5004      return_trace (nullptr);
   5005    }
   5006  }
   5007 
   5008  void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
   5009  {
   5010    switch (u.b.format) {
   5011 #ifndef HB_NO_HINTING
   5012    case 1:
   5013    case 2:
   5014    case 3:
   5015      return;
   5016 #endif
   5017 #ifndef HB_NO_VAR
   5018    case 0x8000:
   5019      u.variation.collect_variation_index (c);
   5020      return;
   5021 #endif
   5022    default:
   5023      return;
   5024    }
   5025  }
   5026 
   5027  unsigned get_variation_index () const
   5028  {
   5029    switch (u.b.format) {
   5030 #ifndef HB_NO_VAR
   5031    case 0x8000:
   5032      return u.variation.varIdx;
   5033 #endif
   5034    default:
   5035      return HB_OT_LAYOUT_NO_VARIATIONS_INDEX;
   5036    }
   5037  }
   5038 
   5039  bool is_variation_device () const
   5040  {
   5041    switch (u.b.format) {
   5042 #ifndef HB_NO_VAR
   5043    case 0x8000:
   5044      return true;
   5045 #endif
   5046    default:
   5047      return false;
   5048    }
   5049  }
   5050 
   5051  protected:
   5052  union {
   5053  DeviceHeader		b;
   5054  HintingDevice		hinting;
   5055 #ifndef HB_NO_VAR
   5056  VariationDevice	variation;
   5057 #endif
   5058  } u;
   5059  public:
   5060  DEFINE_SIZE_UNION (6, b);
   5061 };
   5062 
   5063 
   5064 } /* namespace OT */
   5065 
   5066 
   5067 #endif /* HB_OT_LAYOUT_COMMON_HH */