tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

hb-open-type.hh (61393B)


      1 /*
      2 * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
      3 * Copyright © 2012  Google, Inc.
      4 *
      5 *  This is part of HarfBuzz, a text shaping library.
      6 *
      7 * Permission is hereby granted, without written agreement and without
      8 * license or royalty fees, to use, copy, modify, and distribute this
      9 * software and its documentation for any purpose, provided that the
     10 * above copyright notice and the following two paragraphs appear in
     11 * all copies of this software.
     12 *
     13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
     14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
     15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
     16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
     17 * DAMAGE.
     18 *
     19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
     20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
     21 * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
     22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
     23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
     24 *
     25 * Red Hat Author(s): Behdad Esfahbod
     26 * Google Author(s): Behdad Esfahbod
     27 */
     28 
     29 #ifndef HB_OPEN_TYPE_HH
     30 #define HB_OPEN_TYPE_HH
     31 
     32 #include "hb.hh"
     33 #include "hb-blob.hh"
     34 #include "hb-face.hh"
     35 #include "hb-machinery.hh"
     36 #include "hb-meta.hh"
     37 #include "hb-subset.hh"
     38 
     39 
     40 namespace OT {
     41 
     42 
     43 /*
     44 *
     45 * The OpenType Font File: Data Types
     46 */
     47 
     48 
     49 /* "The following data types are used in the OpenType font file.
     50 *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
     51 
     52 /*
     53 * Int types
     54 */
     55 
     56 /* Integer types in big-endian order and no alignment requirement */
     57 template <bool BE,
     58   typename Type,
     59   unsigned int Size = sizeof (Type)>
     60 struct NumType
     61 {
     62  typedef Type type;
     63  /* For reason we define cast out operator for signed/unsigned, instead of Type, see:
     64   * https://github.com/harfbuzz/harfbuzz/pull/2875/commits/09836013995cab2b9f07577a179ad7b024130467 */
     65  typedef typename std::conditional<std::is_integral<Type>::value && sizeof (Type) <= sizeof(int),
     66 			     typename std::conditional<std::is_signed<Type>::value, signed, unsigned>::type,
     67 			     Type>::type WideType;
     68 
     69  NumType () = default;
     70  explicit constexpr NumType (Type V) : v {V} {}
     71  NumType& operator = (Type V) { v = V; return *this; }
     72 
     73  operator WideType () const { return v; }
     74 
     75  bool operator == (const NumType &o) const { return (Type) v == (Type) o.v; }
     76  bool operator != (const NumType &o) const { return !(*this == o); }
     77 
     78  NumType& operator += (WideType count) { *this = *this + count; return *this; }
     79  NumType& operator -= (WideType count) { *this = *this - count; return *this; }
     80  NumType& operator ++ () { *this += 1; return *this; }
     81  NumType& operator -- () { *this -= 1; return *this; }
     82  NumType operator ++ (int) { NumType c (*this); ++*this; return c; }
     83  NumType operator -- (int) { NumType c (*this); --*this; return c; }
     84 
     85  uint32_t hash () const { return hb_array ((const char *) &v, sizeof (v)).hash (); }
     86  HB_INTERNAL static int cmp (const NumType *a, const NumType *b)
     87  { return b->cmp (*a); }
     88  HB_INTERNAL static int cmp (const void *a, const void *b)
     89  {
     90    NumType *pa = (NumType *) a;
     91    NumType *pb = (NumType *) b;
     92 
     93    return pb->cmp (*pa);
     94  }
     95  template <typename Type2,
     96     hb_enable_if (hb_is_convertible (Type2, Type))>
     97  int cmp (Type2 a) const
     98  {
     99    Type b = v;
    100    return (a > b) - (a < b);
    101  }
    102  bool sanitize (hb_sanitize_context_t *c) const
    103  {
    104    TRACE_SANITIZE (this);
    105    return_trace (c->check_struct (this));
    106  }
    107  protected:
    108  typename std::conditional<std::is_integral<Type>::value,
    109 		    HBInt<BE, Type, Size>,
    110 		    HBFloat<BE, Type, Size>>::type v;
    111  public:
    112  DEFINE_SIZE_STATIC (Size);
    113 };
    114 
    115 typedef NumType<true, uint8_t>  HBUINT8;	/* 8-bit big-endian unsigned integer. */
    116 typedef NumType<true, int8_t>   HBINT8;		/* 8-bit big-endian signed integer. */
    117 typedef NumType<true, uint16_t> HBUINT16;	/* 16-bit big-endian unsigned integer. */
    118 typedef NumType<true, int16_t>  HBINT16;	/* 16-bit big-endian signed integer. */
    119 typedef NumType<true, uint32_t> HBUINT32;	/* 32-bit big-endian unsigned integer. */
    120 typedef NumType<true, int32_t>  HBINT32;	/* 32-bit big-endian signed integer. */
    121 typedef NumType<true, uint64_t> HBUINT64;	/* 64-bit big-endian unsigned integer. */
    122 typedef NumType<true, int64_t>  HBINT64;	/* 64-bit big-endian signed integer. */
    123 /* Note: we cannot defined a signed HBINT24 because there's no corresponding C type.
    124 * Works for unsigned, but not signed, since we rely on compiler for sign-extension. */
    125 typedef NumType<true, uint32_t, 3> HBUINT24;	/* 24-bit big-endian unsigned integer. */
    126 
    127 typedef NumType<false, uint16_t> HBUINT16LE;	/* 16-bit little-endian unsigned integer. */
    128 typedef NumType<false, int16_t>  HBINT16LE;	/* 16-bit little-endian signed integer. */
    129 typedef NumType<false, uint32_t> HBUINT32LE;	/* 32-bit little-endian unsigned integer. */
    130 typedef NumType<false, int32_t>  HBINT32LE;	/* 32-bit little-endian signed integer. */
    131 typedef NumType<false, uint64_t> HBUINT64LE;	/* 64-bit little-endian unsigned integer. */
    132 typedef NumType<false, int64_t>  HBINT64LE;	/* 64-bit little-endian signed integer. */
    133 
    134 typedef NumType<true,  float>  HBFLOAT32BE;	/* 32-bit little-endian floating point number. */
    135 typedef NumType<true,  double> HBFLOAT64BE;	/* 64-bit little-endian floating point number. */
    136 typedef NumType<false, float>  HBFLOAT32LE;	/* 32-bit little-endian floating point number. */
    137 typedef NumType<false, double> HBFLOAT64LE;	/* 64-bit little-endian floating point number. */
    138 
    139 /* 15-bit unsigned number; top bit used for extension. */
    140 struct HBUINT15 : HBUINT16
    141 {
    142  /* TODO Flesh out; actually mask top bit. */
    143  HBUINT15& operator = (uint16_t i ) { HBUINT16::operator= (i); return *this; }
    144  public:
    145  DEFINE_SIZE_STATIC (2);
    146 };
    147 
    148 /* 32-bit unsigned integer with variable encoding. */
    149 struct HBUINT32VAR
    150 {
    151  unsigned get_size () const
    152  {
    153    unsigned b0 = v[0];
    154    if (b0 < 0x80)
    155      return 1;
    156    else if (b0 < 0xC0)
    157      return 2;
    158    else if (b0 < 0xE0)
    159      return 3;
    160    else if (b0 < 0xF0)
    161      return 4;
    162    else
    163      return 5;
    164  }
    165 
    166  static unsigned get_size (uint32_t v)
    167  {
    168    if (v < 0x80)
    169      return 1;
    170    else if (v < 0x4000)
    171      return 2;
    172    else if (v < 0x200000)
    173      return 3;
    174    else if (v < 0x10000000)
    175      return 4;
    176    else
    177      return 5;
    178  }
    179 
    180  bool sanitize (hb_sanitize_context_t *c) const
    181  {
    182    TRACE_SANITIZE (this);
    183    return_trace (c->check_range (v, 1) &&
    184 	  hb_barrier () &&
    185 	  c->check_range (v, get_size ()));
    186  }
    187 
    188  operator uint32_t () const
    189  {
    190    unsigned b0 = v[0];
    191    if (b0 < 0x80)
    192      return b0;
    193    else if (b0 < 0xC0)
    194      return ((b0 & 0x3F) << 8) | v[1];
    195    else if (b0 < 0xE0)
    196      return ((b0 & 0x1F) << 16) | (v[1] << 8) | v[2];
    197    else if (b0 < 0xF0)
    198      return ((b0 & 0x0F) << 24) | (v[1] << 16) | (v[2] << 8) | v[3];
    199    else
    200      return (v[1] << 24) | (v[2] << 16) | (v[3] << 8) | v[4];
    201  }
    202 
    203  static bool serialize (hb_serialize_context_t *c, uint32_t v)
    204  {
    205    unsigned len = get_size (v);
    206 
    207    unsigned char *buf = c->allocate_size<unsigned char> (len, false);
    208    if (unlikely (!buf))
    209      return false;
    210 
    211    unsigned char *p = buf + len;
    212    for (unsigned i = 0; i < len; i++)
    213    {
    214      *--p = v & 0xFF;
    215      v >>= 8;
    216    }
    217 
    218    if (len > 1)
    219      buf[0] |= ((1 << (len - 1)) - 1) << (9 - len);
    220 
    221    return true;
    222  }
    223 
    224  protected:
    225  unsigned char v[5];
    226 
    227  public:
    228  DEFINE_SIZE_MIN (1);
    229 };
    230 
    231 /* 16-bit signed integer (HBINT16) that describes a quantity in FUnits. */
    232 typedef HBINT16 FWORD;
    233 
    234 /* 32-bit signed integer (HBINT32) that describes a quantity in FUnits. */
    235 typedef HBINT32 FWORD32;
    236 
    237 /* 16-bit unsigned integer (HBUINT16) that describes a quantity in FUnits. */
    238 typedef HBUINT16 UFWORD;
    239 
    240 template <typename Type, unsigned fraction_bits>
    241 struct HBFixed : Type
    242 {
    243  static constexpr float mult = 1.f / (1 << fraction_bits);
    244  static_assert (Type::static_size * 8 > fraction_bits, "");
    245 
    246  operator signed () const = delete;
    247  operator unsigned () const = delete;
    248  explicit operator float () const { return to_float (); }
    249  typename Type::type to_int () const { return Type::v; }
    250  void set_int (typename Type::type i ) { Type::v = i; }
    251  float to_float (float offset = 0) const  { return ((int32_t) Type::v + offset) * mult; }
    252  void set_float (float f) { Type::v = roundf (f / mult); }
    253  public:
    254  DEFINE_SIZE_STATIC (Type::static_size);
    255 };
    256 
    257 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
    258 using F2DOT14 = HBFixed<HBINT16, 14>;
    259 using F4DOT12 = HBFixed<HBINT16, 12>;
    260 using F6DOT10 = HBFixed<HBINT16, 10>;
    261 
    262 /* 32-bit signed fixed-point number (16.16). */
    263 using F16DOT16 = HBFixed<HBINT32, 16>;
    264 
    265 /* Date represented in number of seconds since 12:00 midnight, January 1,
    266 * 1904. The value is represented as a signed 64-bit integer. */
    267 struct LONGDATETIME
    268 {
    269  bool sanitize (hb_sanitize_context_t *c) const
    270  {
    271    TRACE_SANITIZE (this);
    272    return_trace (c->check_struct (this));
    273  }
    274  protected:
    275  HBINT32 major;
    276  HBUINT32 minor;
    277  public:
    278  DEFINE_SIZE_STATIC (8);
    279 };
    280 
    281 /* Array of four uint8s (length = 32 bits) used to identify a script, language
    282 * system, feature, or baseline */
    283 struct Tag : HBUINT32
    284 {
    285  Tag& operator = (hb_tag_t i) { HBUINT32::operator= (i); return *this; }
    286  /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
    287  operator const char* () const { return reinterpret_cast<const char *> (this); }
    288  operator char* ()             { return reinterpret_cast<char *> (this); }
    289  public:
    290  DEFINE_SIZE_STATIC (4);
    291 };
    292 
    293 /* Glyph index number, same as uint16 (length = 16 bits) */
    294 struct HBGlyphID16 : HBUINT16
    295 {
    296  HBGlyphID16& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; }
    297 };
    298 struct HBGlyphID24 : HBUINT24
    299 {
    300  HBGlyphID24& operator = (uint32_t i) { HBUINT24::operator= (i); return *this; }
    301 };
    302 
    303 /* Script/language-system/feature index */
    304 struct Index : HBUINT16 {
    305  static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFu;
    306  Index& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; }
    307 };
    308 DECLARE_NULL_NAMESPACE_BYTES (OT, Index);
    309 
    310 typedef Index NameID;
    311 
    312 struct VarIdx : HBUINT32 {
    313  static constexpr unsigned NO_VARIATION = 0xFFFFFFFFu;
    314  static_assert (NO_VARIATION == HB_OT_LAYOUT_NO_VARIATIONS_INDEX, "");
    315  VarIdx& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; }
    316 };
    317 DECLARE_NULL_NAMESPACE_BYTES (OT, VarIdx);
    318 
    319 /* Offset, Null offset = 0 */
    320 template <typename Type, bool has_null=true>
    321 struct Offset : Type
    322 {
    323  Offset& operator = (typename Type::type i) { Type::operator= (i); return *this; }
    324 
    325  typedef Type type;
    326 
    327  bool is_null () const { return has_null && 0 == *this; }
    328 
    329  public:
    330  DEFINE_SIZE_STATIC (sizeof (Type));
    331 };
    332 
    333 typedef Offset<HBUINT16> Offset16;
    334 typedef Offset<HBUINT24> Offset24;
    335 typedef Offset<HBUINT32> Offset32;
    336 
    337 
    338 /* CheckSum */
    339 struct CheckSum : HBUINT32
    340 {
    341  CheckSum& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; }
    342 
    343  /* This is reference implementation from the spec. */
    344  static uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length)
    345  {
    346    uint32_t Sum = 0L;
    347    assert (0 == (Length & 3));
    348    const HBUINT32 *EndPtr = Table + Length / HBUINT32::static_size;
    349 
    350    while (Table < EndPtr)
    351      Sum += *Table++;
    352    return Sum;
    353  }
    354 
    355  /* Note: data should be 4byte aligned and have 4byte padding at the end. */
    356  void set_for_data (const void *data, unsigned int length)
    357  { *this = CalcTableChecksum ((const HBUINT32 *) data, length); }
    358 
    359  public:
    360  DEFINE_SIZE_STATIC (4);
    361 };
    362 
    363 
    364 /*
    365 * Version Numbers
    366 */
    367 
    368 template <typename FixedType=HBUINT16>
    369 struct FixedVersion
    370 {
    371  uint32_t to_int () const { return (major << (sizeof (FixedType) * 8)) + minor; }
    372 
    373  bool sanitize (hb_sanitize_context_t *c) const
    374  {
    375    TRACE_SANITIZE (this);
    376    return_trace (c->check_struct (this));
    377  }
    378 
    379  FixedType major;
    380  FixedType minor;
    381  public:
    382  DEFINE_SIZE_STATIC (2 * sizeof (FixedType));
    383 };
    384 
    385 
    386 /*
    387 * Template subclasses of Offset that do the dereferencing.
    388 * Use: (base+offset)
    389 */
    390 
    391 template <typename Type, bool has_null>
    392 struct _hb_has_null
    393 {
    394  static const Type *get_null () { return nullptr; }
    395  static Type *get_crap ()       { return nullptr; }
    396 };
    397 template <typename Type>
    398 struct _hb_has_null<Type, true>
    399 {
    400  static const Type *get_null () { return &Null (Type); }
    401  static       Type *get_crap () { return &Crap (Type); }
    402 };
    403 
    404 template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
    405 struct OffsetTo : Offset<OffsetType, has_null>
    406 {
    407  using target_t = Type;
    408 
    409  // Make sure Type is not unbounded; works only for types that are fully defined at OffsetTo time.
    410  static_assert (has_null == false ||
    411 	 (hb_has_null_size (Type) || !hb_has_min_size (Type)), "");
    412 
    413  HB_DELETE_COPY_ASSIGN (OffsetTo);
    414  OffsetTo () = default;
    415 
    416  OffsetTo& operator = (typename OffsetType::type i) { OffsetType::operator= (i); return *this; }
    417 
    418  const Type& operator () (const void *base) const
    419  {
    420    if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_null ();
    421    return StructAtOffset<const Type> (base, *this);
    422  }
    423  Type& operator () (void *base) const
    424  {
    425    if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_crap ();
    426    return StructAtOffset<Type> (base, *this);
    427  }
    428 
    429  template <typename Base,
    430     hb_enable_if (hb_is_convertible (const Base, const BaseType *))>
    431  friend const Type& operator + (const Base &base, const OffsetTo &offset) { return offset ((const void *) base); }
    432  template <typename Base,
    433     hb_enable_if (hb_is_convertible (const Base, const BaseType *))>
    434  friend const Type& operator + (const OffsetTo &offset, const Base &base) { return offset ((const void *) base); }
    435  template <typename Base,
    436     hb_enable_if (hb_is_convertible (Base, BaseType *))>
    437  friend Type& operator + (Base &&base, OffsetTo &offset) { return offset ((void *) base); }
    438  template <typename Base,
    439     hb_enable_if (hb_is_convertible (Base, BaseType *))>
    440  friend Type& operator + (OffsetTo &offset, Base &&base) { return offset ((void *) base); }
    441 
    442 
    443  template <typename Base, typename ...Ts>
    444  bool serialize_subset (hb_subset_context_t *c, const OffsetTo& src,
    445 		 const Base *src_base, Ts&&... ds)
    446  {
    447    *this = 0;
    448    if (src.is_null ())
    449      return false;
    450 
    451    auto *s = c->serializer;
    452 
    453    s->push ();
    454 
    455    bool ret = c->dispatch (src_base+src, std::forward<Ts> (ds)...);
    456 
    457    if (ret || !has_null)
    458      s->add_link (*this, s->pop_pack ());
    459    else
    460      s->pop_discard ();
    461 
    462    return ret;
    463  }
    464 
    465 
    466  template <typename ...Ts>
    467  bool serialize_serialize (hb_serialize_context_t *c, Ts&&... ds)
    468  {
    469    *this = 0;
    470 
    471    Type* obj = c->push<Type> ();
    472    bool ret = obj->serialize (c, std::forward<Ts> (ds)...);
    473 
    474    if (ret)
    475      c->add_link (*this, c->pop_pack ());
    476    else
    477      c->pop_discard ();
    478 
    479    return ret;
    480  }
    481 
    482  /* TODO: Somehow merge this with previous function into a serialize_dispatch(). */
    483  /* Workaround clang bug: https://bugs.llvm.org/show_bug.cgi?id=23029
    484   * Can't compile: whence = hb_serialize_context_t::Head followed by Ts&&...
    485   */
    486  template <typename ...Ts>
    487  bool serialize_copy (hb_serialize_context_t *c, const OffsetTo& src,
    488 	       const void *src_base, unsigned dst_bias,
    489 	       hb_serialize_context_t::whence_t whence,
    490 	       Ts&&... ds)
    491  {
    492    *this = 0;
    493    if (src.is_null ())
    494      return false;
    495 
    496    c->push ();
    497 
    498    bool ret = c->copy (src_base+src, std::forward<Ts> (ds)...);
    499 
    500    c->add_link (*this, c->pop_pack (), whence, dst_bias);
    501 
    502    return ret;
    503  }
    504 
    505  bool serialize_copy (hb_serialize_context_t *c, const OffsetTo& src,
    506 	       const void *src_base, unsigned dst_bias = 0)
    507  { return serialize_copy (c, src, src_base, dst_bias, hb_serialize_context_t::Head); }
    508 
    509  bool sanitize_shallow (hb_sanitize_context_t *c, const BaseType *base) const
    510  {
    511    TRACE_SANITIZE (this);
    512    if (unlikely (!c->check_struct (this))) return_trace (false);
    513    hb_barrier ();
    514    //if (unlikely (this->is_null ())) return_trace (true);
    515    if (unlikely ((const char *) base + (unsigned) *this < (const char *) base)) return_trace (false);
    516    return_trace (true);
    517  }
    518 
    519  template <typename ...Ts>
    520 #ifndef HB_OPTIMIZE_SIZE
    521  HB_ALWAYS_INLINE
    522 #endif
    523  bool sanitize (hb_sanitize_context_t *c, const BaseType *base, Ts&&... ds) const
    524  {
    525    TRACE_SANITIZE (this);
    526    return_trace (sanitize_shallow (c, base) &&
    527 	  hb_barrier () &&
    528 	  (this->is_null () ||
    529 	   c->dispatch (StructAtOffset<Type> (base, *this), std::forward<Ts> (ds)...)));
    530  }
    531 
    532  DEFINE_SIZE_STATIC (sizeof (OffsetType));
    533 };
    534 /* Partial specializations. */
    535 template <typename Type, typename BaseType=void, bool has_null=true> using Offset16To = OffsetTo<Type, HBUINT16, BaseType, has_null>;
    536 template <typename Type, typename BaseType=void, bool has_null=true> using Offset24To = OffsetTo<Type, HBUINT24, BaseType, has_null>;
    537 template <typename Type, typename BaseType=void, bool has_null=true> using Offset32To = OffsetTo<Type, HBUINT32, BaseType, has_null>;
    538 
    539 template <typename Type, typename OffsetType, typename BaseType=void> using NNOffsetTo = OffsetTo<Type, OffsetType, BaseType, false>;
    540 template <typename Type, typename BaseType=void> using NNOffset16To = Offset16To<Type, BaseType, false>;
    541 template <typename Type, typename BaseType=void> using NNOffset24To = Offset24To<Type, BaseType, false>;
    542 template <typename Type, typename BaseType=void> using NNOffset32To = Offset32To<Type, BaseType, false>;
    543 
    544 
    545 /*
    546 * Array Types
    547 */
    548 
    549 template <typename Type>
    550 struct UnsizedArrayOf
    551 {
    552  typedef Type item_t;
    553  static constexpr unsigned item_size = hb_static_size (Type);
    554 
    555  HB_DELETE_CREATE_COPY_ASSIGN (UnsizedArrayOf);
    556 
    557  const Type& operator [] (unsigned int i) const
    558  {
    559    return arrayZ[i];
    560  }
    561  Type& operator [] (unsigned int i)
    562  {
    563    return arrayZ[i];
    564  }
    565 
    566  static unsigned int get_size (unsigned int len)
    567  { return len * Type::static_size; }
    568 
    569  template <typename T> operator T * () { return arrayZ; }
    570  template <typename T> operator const T * () const { return arrayZ; }
    571  hb_array_t<Type> as_array (unsigned int len)
    572  { return hb_array (arrayZ, len); }
    573  hb_array_t<const Type> as_array (unsigned int len) const
    574  { return hb_array (arrayZ, len); }
    575 
    576  template <typename T>
    577  Type &lsearch (unsigned int len, const T &x, Type &not_found = Crap (Type))
    578  { return *as_array (len).lsearch (x, &not_found); }
    579  template <typename T>
    580  const Type &lsearch (unsigned int len, const T &x, const Type &not_found = Null (Type)) const
    581  { return *as_array (len).lsearch (x, &not_found); }
    582  template <typename T>
    583  bool lfind (unsigned int len, const T &x, unsigned int *i = nullptr,
    584       hb_not_found_t not_found = HB_NOT_FOUND_DONT_STORE,
    585       unsigned int to_store = (unsigned int) -1) const
    586  { return as_array (len).lfind (x, i, not_found, to_store); }
    587 
    588  void qsort (unsigned int len, unsigned int start = 0, unsigned int end = (unsigned int) -1)
    589  { as_array (len).qsort (start, end); }
    590 
    591  bool serialize (hb_serialize_context_t *c, unsigned int items_len, bool clear = true)
    592  {
    593    TRACE_SERIALIZE (this);
    594    if (unlikely (!c->extend_size (this, get_size (items_len), clear))) return_trace (false);
    595    return_trace (true);
    596  }
    597  template <typename Iterator,
    598     hb_requires (hb_is_source_of (Iterator, Type))>
    599  bool serialize (hb_serialize_context_t *c, Iterator items)
    600  {
    601    TRACE_SERIALIZE (this);
    602    unsigned count = hb_len (items);
    603    if (unlikely (!serialize (c, count, false))) return_trace (false);
    604    /* TODO Umm. Just exhaust the iterator instead?  Being extra
    605     * cautious right now.. */
    606    for (unsigned i = 0; i < count; i++, ++items)
    607      arrayZ[i] = *items;
    608    return_trace (true);
    609  }
    610 
    611  UnsizedArrayOf* copy (hb_serialize_context_t *c, unsigned count) const
    612  {
    613    TRACE_SERIALIZE (this);
    614    auto *out = c->start_embed (this);
    615    if (unlikely (!as_array (count).copy (c))) return_trace (nullptr);
    616    return_trace (out);
    617  }
    618 
    619  template <typename ...Ts>
    620  HB_ALWAYS_INLINE
    621  bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
    622  {
    623    TRACE_SANITIZE (this);
    624    if (unlikely (!sanitize_shallow (c, count))) return_trace (false);
    625    if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
    626    hb_barrier ();
    627    for (unsigned int i = 0; i < count; i++)
    628      if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
    629 return_trace (false);
    630    return_trace (true);
    631  }
    632 
    633  bool sanitize_shallow (hb_sanitize_context_t *c, unsigned int count) const
    634  {
    635    TRACE_SANITIZE (this);
    636    return_trace (c->check_array (arrayZ, count));
    637  }
    638 
    639  public:
    640  Type		arrayZ[HB_VAR_ARRAY];
    641  public:
    642  DEFINE_SIZE_UNBOUNDED (0);
    643 };
    644 
    645 /* Unsized array of offset's */
    646 template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
    647 using UnsizedArray16OfOffsetTo = UnsizedArrayOf<OffsetTo<Type, OffsetType, BaseType, has_null>>;
    648 
    649 /* Unsized array of offsets relative to the beginning of the array itself. */
    650 template <typename Type, typename OffsetType, typename BaseType=void, bool has_null=true>
    651 struct UnsizedListOfOffset16To : UnsizedArray16OfOffsetTo<Type, OffsetType, BaseType, has_null>
    652 {
    653  const Type& operator [] (int i_) const
    654  {
    655    unsigned int i = (unsigned int) i_;
    656    const OffsetTo<Type, OffsetType, BaseType, has_null> *p = &this->arrayZ[i];
    657    if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Null (Type); /* Overflowed. */
    658    hb_barrier ();
    659    return this+*p;
    660  }
    661  Type& operator [] (int i_)
    662  {
    663    unsigned int i = (unsigned int) i_;
    664    const OffsetTo<Type, OffsetType, BaseType, has_null> *p = &this->arrayZ[i];
    665    if (unlikely ((const void *) p < (const void *) this->arrayZ)) return Crap (Type); /* Overflowed. */
    666    hb_barrier ();
    667    return this+*p;
    668  }
    669 
    670  template <typename ...Ts>
    671  bool sanitize (hb_sanitize_context_t *c, unsigned int count, Ts&&... ds) const
    672  {
    673    TRACE_SANITIZE (this);
    674    return_trace ((UnsizedArray16OfOffsetTo<Type, OffsetType, BaseType, has_null>
    675 	   ::sanitize (c, count, this, std::forward<Ts> (ds)...)));
    676  }
    677 };
    678 
    679 /* An array with sorted elements.  Supports binary searching. */
    680 template <typename Type>
    681 struct SortedUnsizedArrayOf : UnsizedArrayOf<Type>
    682 {
    683  hb_sorted_array_t<Type> as_array (unsigned int len)
    684  { return hb_sorted_array (this->arrayZ, len); }
    685  hb_sorted_array_t<const Type> as_array (unsigned int len) const
    686  { return hb_sorted_array (this->arrayZ, len); }
    687  operator hb_sorted_array_t<Type> ()             { return as_array (); }
    688  operator hb_sorted_array_t<const Type> () const { return as_array (); }
    689 
    690  template <typename T>
    691  Type &bsearch (unsigned int len, const T &x, Type &not_found = Crap (Type))
    692  { return *as_array (len).bsearch (x, &not_found); }
    693  template <typename T>
    694  const Type &bsearch (unsigned int len, const T &x, const Type &not_found = Null (Type)) const
    695  { return *as_array (len).bsearch (x, &not_found); }
    696  template <typename T>
    697  bool bfind (unsigned int len, const T &x, unsigned int *i = nullptr,
    698       hb_not_found_t not_found = HB_NOT_FOUND_DONT_STORE,
    699       unsigned int to_store = (unsigned int) -1) const
    700  { return as_array (len).bfind (x, i, not_found, to_store); }
    701 };
    702 
    703 
    704 /* An array with a number of elements. */
    705 template <typename Type, typename LenType>
    706 struct ArrayOf
    707 {
    708  typedef Type item_t;
    709  static constexpr unsigned item_size = hb_static_size (Type);
    710 
    711  HB_DELETE_CREATE_COPY_ASSIGN (ArrayOf);
    712 
    713  const Type& operator [] (int i_) const
    714  {
    715    unsigned int i = (unsigned int) i_;
    716    if (unlikely (i >= len)) return Null (Type);
    717    hb_barrier ();
    718    return arrayZ[i];
    719  }
    720  Type& operator [] (int i_)
    721  {
    722    unsigned int i = (unsigned int) i_;
    723    if (unlikely (i >= len)) return Crap (Type);
    724    hb_barrier ();
    725    return arrayZ[i];
    726  }
    727 
    728  unsigned int get_size () const
    729  { return len.static_size + len * Type::static_size; }
    730 
    731  explicit operator bool () const { return len; }
    732 
    733  void pop () { len--; }
    734 
    735  hb_array_t<      Type> as_array ()       { return hb_array (arrayZ, len); }
    736  hb_array_t<const Type> as_array () const { return hb_array (arrayZ, len); }
    737 
    738  /* Iterator. */
    739  typedef hb_array_t<const Type>   iter_t;
    740  typedef hb_array_t<      Type> writer_t;
    741    iter_t   iter () const { return as_array (); }
    742  writer_t writer ()       { return as_array (); }
    743  operator   iter_t () const { return   iter (); }
    744  operator writer_t ()       { return writer (); }
    745 
    746  /* Faster range-based for loop. */
    747  const Type *begin () const { return arrayZ; }
    748  const Type *end () const { return arrayZ + len; }
    749 
    750  template <typename T>
    751  Type &lsearch (const T &x, Type &not_found = Crap (Type))
    752  { return *as_array ().lsearch (x, &not_found); }
    753  template <typename T>
    754  const Type &lsearch (const T &x, const Type &not_found = Null (Type)) const
    755  { return *as_array ().lsearch (x, &not_found); }
    756  template <typename T>
    757  bool lfind (const T &x, unsigned int *i = nullptr,
    758       hb_not_found_t not_found = HB_NOT_FOUND_DONT_STORE,
    759       unsigned int to_store = (unsigned int) -1) const
    760  { return as_array ().lfind (x, i, not_found, to_store); }
    761 
    762  void qsort ()
    763  { as_array ().qsort (); }
    764 
    765  HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned items_len, bool clear = true)
    766  {
    767    TRACE_SERIALIZE (this);
    768    if (unlikely (!c->extend_min (this))) return_trace (false);
    769    c->check_assign (len, items_len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
    770    if (unlikely (!c->extend_size (this, get_size (), clear))) return_trace (false);
    771    return_trace (true);
    772  }
    773  template <typename Iterator,
    774     hb_requires (hb_is_source_of (Iterator, Type))>
    775  HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items)
    776  {
    777    TRACE_SERIALIZE (this);
    778    unsigned count = hb_len (items);
    779    if (unlikely (!serialize (c, count, false))) return_trace (false);
    780    /* TODO Umm. Just exhaust the iterator instead?  Being extra
    781     * cautious right now.. */
    782    for (unsigned i = 0; i < count; i++, ++items)
    783      arrayZ[i] = *items;
    784    return_trace (true);
    785  }
    786 
    787  Type* serialize_append (hb_serialize_context_t *c)
    788  {
    789    TRACE_SERIALIZE (this);
    790    len++;
    791    if (unlikely (!len || !c->extend (this)))
    792    {
    793      len--;
    794      return_trace (nullptr);
    795    }
    796    return_trace (&arrayZ[len - 1]);
    797  }
    798 
    799  ArrayOf* copy (hb_serialize_context_t *c) const
    800  {
    801    TRACE_SERIALIZE (this);
    802    auto *out = c->start_embed (this);
    803    if (unlikely (!c->extend_min (out))) return_trace (nullptr);
    804    c->check_assign (out->len, len, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
    805    if (unlikely (!as_array ().copy (c))) return_trace (nullptr);
    806    return_trace (out);
    807  }
    808 
    809  template <typename ...Ts>
    810  HB_ALWAYS_INLINE
    811  bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
    812  {
    813    TRACE_SANITIZE (this);
    814    if (unlikely (!sanitize_shallow (c))) return_trace (false);
    815    if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
    816    hb_barrier ();
    817    unsigned int count = len;
    818    for (unsigned int i = 0; i < count; i++)
    819      if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
    820 return_trace (false);
    821    return_trace (true);
    822  }
    823 
    824  bool sanitize_shallow (hb_sanitize_context_t *c) const
    825  {
    826    TRACE_SANITIZE (this);
    827    return_trace (len.sanitize (c) &&
    828 	  hb_barrier () &&
    829 	  c->check_array_sized (arrayZ, len, sizeof (LenType)));
    830  }
    831 
    832  public:
    833  LenType	len;
    834  Type		arrayZ[HB_VAR_ARRAY];
    835  public:
    836  DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
    837 };
    838 template <typename Type> using Array16Of = ArrayOf<Type, HBUINT16>;
    839 template <typename Type> using Array24Of = ArrayOf<Type, HBUINT24>;
    840 template <typename Type> using Array32Of = ArrayOf<Type, HBUINT32>;
    841 using PString = ArrayOf<HBUINT8, HBUINT8>;
    842 
    843 /* Array of Offset's */
    844 template <typename Type> using Array8OfOffset24To = ArrayOf<OffsetTo<Type, HBUINT24>, HBUINT8>;
    845 template <typename Type> using Array16OfOffset16To = ArrayOf<OffsetTo<Type, HBUINT16>, HBUINT16>;
    846 template <typename Type> using Array16OfOffset32To = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT16>;
    847 template <typename Type> using Array32OfOffset32To = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32>;
    848 
    849 /* Array of offsets relative to the beginning of the array itself. */
    850 template <typename Type, typename OffsetType>
    851 struct List16OfOffsetTo : ArrayOf<OffsetTo<Type, OffsetType>, HBUINT16>
    852 {
    853  const Type& operator [] (int i_) const
    854  {
    855    unsigned int i = (unsigned int) i_;
    856    if (unlikely (i >= this->len)) return Null (Type);
    857    hb_barrier ();
    858    return this+this->arrayZ[i];
    859  }
    860  const Type& operator [] (int i_)
    861  {
    862    unsigned int i = (unsigned int) i_;
    863    if (unlikely (i >= this->len)) return Crap (Type);
    864    hb_barrier ();
    865    return this+this->arrayZ[i];
    866  }
    867 
    868  bool subset (hb_subset_context_t *c) const
    869  {
    870    TRACE_SUBSET (this);
    871    struct List16OfOffsetTo *out = c->serializer->embed (*this);
    872    if (unlikely (!out)) return_trace (false);
    873    unsigned int count = this->len;
    874    for (unsigned int i = 0; i < count; i++)
    875      out->arrayZ[i].serialize_subset (c, this->arrayZ[i], this, out);
    876    return_trace (true);
    877  }
    878 
    879  template <typename ...Ts>
    880  bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
    881  {
    882    TRACE_SANITIZE (this);
    883    return_trace ((Array16Of<OffsetTo<Type, OffsetType>>::sanitize (c, this, std::forward<Ts> (ds)...)));
    884  }
    885 };
    886 
    887 template <typename Type>
    888 using List16OfOffset16To = List16OfOffsetTo<Type, HBUINT16>;
    889 
    890 /* An array starting at second element. */
    891 template <typename Type, typename LenType>
    892 struct HeadlessArrayOf
    893 {
    894  static constexpr unsigned item_size = Type::static_size;
    895 
    896  HB_DELETE_CREATE_COPY_ASSIGN (HeadlessArrayOf);
    897 
    898  const Type& operator [] (int i_) const
    899  {
    900    unsigned int i = (unsigned int) i_;
    901    if (unlikely (i >= lenP1 || !i)) return Null (Type);
    902    hb_barrier ();
    903    return arrayZ[i-1];
    904  }
    905  Type& operator [] (int i_)
    906  {
    907    unsigned int i = (unsigned int) i_;
    908    if (unlikely (i >= lenP1 || !i)) return Crap (Type);
    909    hb_barrier ();
    910    return arrayZ[i-1];
    911  }
    912  unsigned int get_size () const
    913  { return lenP1.static_size + get_length () * Type::static_size; }
    914 
    915  unsigned get_length () const { return lenP1 ? lenP1 - 1 : 0; }
    916 
    917  hb_array_t<      Type> as_array ()       { return hb_array (arrayZ, get_length ()); }
    918  hb_array_t<const Type> as_array () const { return hb_array (arrayZ, get_length ()); }
    919 
    920  /* Iterator. */
    921  typedef hb_array_t<const Type>   iter_t;
    922  typedef hb_array_t<      Type> writer_t;
    923    iter_t   iter () const { return as_array (); }
    924  writer_t writer ()       { return as_array (); }
    925  operator   iter_t () const { return   iter (); }
    926  operator writer_t ()       { return writer (); }
    927 
    928  /* Faster range-based for loop. */
    929  const Type *begin () const { return arrayZ; }
    930  const Type *end () const { return arrayZ + get_length (); }
    931 
    932  HB_NODISCARD bool serialize (hb_serialize_context_t *c, unsigned int items_len, bool clear = true)
    933  {
    934    TRACE_SERIALIZE (this);
    935    if (unlikely (!c->extend_min (this))) return_trace (false);
    936    c->check_assign (lenP1, items_len + 1, HB_SERIALIZE_ERROR_ARRAY_OVERFLOW);
    937    if (unlikely (!c->extend_size (this, get_size (), clear))) return_trace (false);
    938    return_trace (true);
    939  }
    940  template <typename Iterator,
    941     hb_requires (hb_is_source_of (Iterator, Type))>
    942  HB_NODISCARD bool serialize (hb_serialize_context_t *c, Iterator items)
    943  {
    944    TRACE_SERIALIZE (this);
    945    unsigned count = hb_len (items);
    946    if (unlikely (!serialize (c, count, false))) return_trace (false);
    947    /* TODO Umm. Just exhaust the iterator instead?  Being extra
    948     * cautious right now.. */
    949    for (unsigned i = 0; i < count; i++, ++items)
    950      arrayZ[i] = *items;
    951    return_trace (true);
    952  }
    953 
    954  template <typename ...Ts>
    955  HB_ALWAYS_INLINE
    956  bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
    957  {
    958    TRACE_SANITIZE (this);
    959    if (unlikely (!sanitize_shallow (c))) return_trace (false);
    960    if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
    961    hb_barrier ();
    962    unsigned int count = get_length ();
    963    for (unsigned int i = 0; i < count; i++)
    964      if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
    965 return_trace (false);
    966    return_trace (true);
    967  }
    968 
    969  private:
    970  bool sanitize_shallow (hb_sanitize_context_t *c) const
    971  {
    972    TRACE_SANITIZE (this);
    973    return_trace (lenP1.sanitize (c) &&
    974 	  hb_barrier () &&
    975 	  (!lenP1 || c->check_array_sized (arrayZ, lenP1 - 1, sizeof (LenType))));
    976  }
    977 
    978  public:
    979  LenType	lenP1;
    980  Type		arrayZ[HB_VAR_ARRAY];
    981  public:
    982  DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
    983 };
    984 template <typename Type> using HeadlessArray16Of = HeadlessArrayOf<Type, HBUINT16>;
    985 
    986 /* An array storing length-1. */
    987 template <typename Type, typename LenType=HBUINT16>
    988 struct ArrayOfM1
    989 {
    990  HB_DELETE_CREATE_COPY_ASSIGN (ArrayOfM1);
    991 
    992  const Type& operator [] (int i_) const
    993  {
    994    unsigned int i = (unsigned int) i_;
    995    if (unlikely (i > lenM1)) return Null (Type);
    996    hb_barrier ();
    997    return arrayZ[i];
    998  }
    999  Type& operator [] (int i_)
   1000  {
   1001    unsigned int i = (unsigned int) i_;
   1002    if (unlikely (i > lenM1)) return Crap (Type);
   1003    hb_barrier ();
   1004    return arrayZ[i];
   1005  }
   1006  unsigned int get_size () const
   1007  { return lenM1.static_size + (lenM1 + 1) * Type::static_size; }
   1008 
   1009  template <typename ...Ts>
   1010  HB_ALWAYS_INLINE
   1011  bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
   1012  {
   1013    TRACE_SANITIZE (this);
   1014    if (unlikely (!sanitize_shallow (c))) return_trace (false);
   1015    if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
   1016    hb_barrier ();
   1017    unsigned int count = lenM1 + 1;
   1018    for (unsigned int i = 0; i < count; i++)
   1019      if (unlikely (!c->dispatch (arrayZ[i], std::forward<Ts> (ds)...)))
   1020 return_trace (false);
   1021    return_trace (true);
   1022  }
   1023 
   1024  private:
   1025  bool sanitize_shallow (hb_sanitize_context_t *c) const
   1026  {
   1027    TRACE_SANITIZE (this);
   1028    return_trace (lenM1.sanitize (c) &&
   1029 	  hb_barrier () &&
   1030 	  (c->check_array_sized (arrayZ, lenM1 + 1, sizeof (LenType))));
   1031  }
   1032 
   1033  public:
   1034  LenType	lenM1;
   1035  Type		arrayZ[HB_VAR_ARRAY];
   1036  public:
   1037  DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
   1038 };
   1039 
   1040 /* An array with sorted elements.  Supports binary searching. */
   1041 template <typename Type, typename LenType>
   1042 struct SortedArrayOf : ArrayOf<Type, LenType>
   1043 {
   1044  hb_sorted_array_t<      Type> as_array ()       { return hb_sorted_array (this->arrayZ, this->len); }
   1045  hb_sorted_array_t<const Type> as_array () const { return hb_sorted_array (this->arrayZ, this->len); }
   1046 
   1047  /* Iterator. */
   1048  typedef hb_sorted_array_t<const Type>   iter_t;
   1049  typedef hb_sorted_array_t<      Type> writer_t;
   1050    iter_t   iter () const { return as_array (); }
   1051  writer_t writer ()       { return as_array (); }
   1052  operator   iter_t () const { return   iter (); }
   1053  operator writer_t ()       { return writer (); }
   1054 
   1055  /* Faster range-based for loop. */
   1056  const Type *begin () const { return this->arrayZ; }
   1057  const Type *end () const { return this->arrayZ + this->len; }
   1058 
   1059  bool serialize (hb_serialize_context_t *c, unsigned int items_len)
   1060  {
   1061    TRACE_SERIALIZE (this);
   1062    bool ret = ArrayOf<Type, LenType>::serialize (c, items_len);
   1063    return_trace (ret);
   1064  }
   1065  template <typename Iterator,
   1066     hb_requires (hb_is_sorted_source_of (Iterator, Type))>
   1067  bool serialize (hb_serialize_context_t *c, Iterator items)
   1068  {
   1069    TRACE_SERIALIZE (this);
   1070    bool ret = ArrayOf<Type, LenType>::serialize (c, items);
   1071    return_trace (ret);
   1072  }
   1073 
   1074  SortedArrayOf* copy (hb_serialize_context_t *c) const
   1075  {
   1076    TRACE_SERIALIZE (this);
   1077    SortedArrayOf* out = reinterpret_cast<SortedArrayOf *> (ArrayOf<Type, LenType>::copy (c));
   1078    return_trace (out);
   1079  }
   1080 
   1081  template <typename T>
   1082  Type &bsearch (const T &x, Type &not_found = Crap (Type))
   1083  { return *as_array ().bsearch (x, &not_found); }
   1084  template <typename T>
   1085  const Type &bsearch (const T &x, const Type &not_found = Null (Type)) const
   1086  { return *as_array ().bsearch (x, &not_found); }
   1087  template <typename T>
   1088  bool bfind (const T &x, unsigned int *i = nullptr,
   1089       hb_not_found_t not_found = HB_NOT_FOUND_DONT_STORE,
   1090       unsigned int to_store = (unsigned int) -1) const
   1091  { return as_array ().bfind (x, i, not_found, to_store); }
   1092 };
   1093 
   1094 template <typename Type> using SortedArray16Of = SortedArrayOf<Type, HBUINT16>;
   1095 template <typename Type> using SortedArray24Of = SortedArrayOf<Type, HBUINT24>;
   1096 template <typename Type> using SortedArray32Of = SortedArrayOf<Type, HBUINT32>;
   1097 
   1098 /*
   1099 * Binary-search arrays
   1100 */
   1101 
   1102 template <typename LenType=HBUINT16>
   1103 struct BinSearchHeader
   1104 {
   1105  operator uint32_t () const { return len; }
   1106 
   1107  bool sanitize (hb_sanitize_context_t *c) const
   1108  {
   1109    TRACE_SANITIZE (this);
   1110    return_trace (c->check_struct (this));
   1111  }
   1112 
   1113  BinSearchHeader& operator = (unsigned int v)
   1114  {
   1115    len = v;
   1116    assert (len == v);
   1117    entrySelector = hb_max (1u, hb_bit_storage (v)) - 1;
   1118    searchRange = 16 * (1u << entrySelector);
   1119    rangeShift = v * 16 > searchRange
   1120 	 ? 16 * v - searchRange
   1121 	 : 0;
   1122    return *this;
   1123  }
   1124 
   1125  protected:
   1126  LenType	len;
   1127  LenType	searchRange;
   1128  LenType	entrySelector;
   1129  LenType	rangeShift;
   1130 
   1131  public:
   1132  DEFINE_SIZE_STATIC (8);
   1133 };
   1134 
   1135 template <typename Type, typename LenType=HBUINT16>
   1136 using BinSearchArrayOf = SortedArrayOf<Type, BinSearchHeader<LenType>>;
   1137 
   1138 
   1139 struct VarSizedBinSearchHeader
   1140 {
   1141 
   1142  bool sanitize (hb_sanitize_context_t *c) const
   1143  {
   1144    TRACE_SANITIZE (this);
   1145    return_trace (c->check_struct (this));
   1146  }
   1147 
   1148  HBUINT16	unitSize;	/* Size of a lookup unit for this search in bytes. */
   1149  HBUINT16	nUnits;		/* Number of units of the preceding size to be searched. */
   1150  HBUINT16	searchRange;	/* The value of unitSize times the largest power of 2
   1151 			 * that is less than or equal to the value of nUnits. */
   1152  HBUINT16	entrySelector;	/* The log base 2 of the largest power of 2 less than
   1153 			 * or equal to the value of nUnits. */
   1154  HBUINT16	rangeShift;	/* The value of unitSize times the difference of the
   1155 			 * value of nUnits minus the largest power of 2 less
   1156 			 * than or equal to the value of nUnits. */
   1157  public:
   1158  DEFINE_SIZE_STATIC (10);
   1159 };
   1160 
   1161 template <typename Type>
   1162 struct VarSizedBinSearchArrayOf
   1163 {
   1164  static constexpr unsigned item_size = Type::static_size;
   1165 
   1166  HB_DELETE_CREATE_COPY_ASSIGN (VarSizedBinSearchArrayOf);
   1167 
   1168  bool last_is_terminator () const
   1169  {
   1170    if (unlikely (!header.nUnits)) return false;
   1171 
   1172    /* Gah.
   1173     *
   1174     * "The number of termination values that need to be included is table-specific.
   1175     * The value that indicates binary search termination is 0xFFFF." */
   1176    const HBUINT16 *words = &StructAtOffset<HBUINT16> (&bytesZ, (header.nUnits - 1) * header.unitSize);
   1177    unsigned int count = Type::TerminationWordCount;
   1178    for (unsigned int i = 0; i < count; i++)
   1179      if (words[i] != 0xFFFFu)
   1180 return false;
   1181    return true;
   1182  }
   1183 
   1184  const Type& operator [] (int i_) const
   1185  {
   1186    unsigned int i = (unsigned int) i_;
   1187    if (unlikely (i >= get_length ())) return Null (Type);
   1188    hb_barrier ();
   1189    return StructAtOffset<Type> (&bytesZ, i * header.unitSize);
   1190  }
   1191  Type& operator [] (int i_)
   1192  {
   1193    unsigned int i = (unsigned int) i_;
   1194    if (unlikely (i >= get_length ())) return Crap (Type);
   1195    hb_barrier ();
   1196    return StructAtOffset<Type> (&bytesZ, i * header.unitSize);
   1197  }
   1198  unsigned int get_length () const
   1199  { return header.nUnits - last_is_terminator (); }
   1200  unsigned int get_size () const
   1201  { return header.static_size + header.nUnits * header.unitSize; }
   1202 
   1203  template <typename ...Ts>
   1204  HB_ALWAYS_INLINE
   1205  bool sanitize (hb_sanitize_context_t *c, Ts&&... ds) const
   1206  {
   1207    TRACE_SANITIZE (this);
   1208    if (unlikely (!sanitize_shallow (c))) return_trace (false);
   1209    if (!sizeof... (Ts) && hb_is_trivially_copyable(Type)) return_trace (true);
   1210    hb_barrier ();
   1211    unsigned int count = get_length ();
   1212    for (unsigned int i = 0; i < count; i++)
   1213      if (unlikely (!(*this)[i].sanitize (c, std::forward<Ts> (ds)...)))
   1214 return_trace (false);
   1215    return_trace (true);
   1216  }
   1217 
   1218  template <typename T>
   1219  const Type *bsearch (const T &key) const
   1220  {
   1221    unsigned pos;
   1222    return hb_bsearch_impl (&pos,
   1223 		    key,
   1224 		    (const void *) bytesZ,
   1225 		    get_length (),
   1226 		    header.unitSize,
   1227 		    _hb_cmp_method<T, Type>)
   1228    ? (const Type *) (((const char *) &bytesZ) + (pos * header.unitSize))
   1229    : nullptr;
   1230  }
   1231 
   1232  private:
   1233  bool sanitize_shallow (hb_sanitize_context_t *c) const
   1234  {
   1235    TRACE_SANITIZE (this);
   1236    return_trace (header.sanitize (c) &&
   1237 	  hb_barrier () &&
   1238 	  Type::static_size <= header.unitSize &&
   1239 	  c->check_range (bytesZ.arrayZ,
   1240 			  header.nUnits,
   1241 			  header.unitSize));
   1242  }
   1243 
   1244  protected:
   1245  VarSizedBinSearchHeader	header;
   1246  UnsizedArrayOf<HBUINT8>	bytesZ;
   1247  public:
   1248  DEFINE_SIZE_ARRAY (10, bytesZ);
   1249 };
   1250 
   1251 
   1252 /* CFF INDEX */
   1253 
   1254 template <typename COUNT>
   1255 struct CFFIndex
   1256 {
   1257  unsigned int offset_array_size () const
   1258  { return offSize * (count + 1); }
   1259 
   1260  template <typename Iterable,
   1261     hb_requires (hb_is_iterable (Iterable))>
   1262  bool serialize (hb_serialize_context_t *c,
   1263 	  const Iterable &iterable,
   1264 	  const unsigned *p_data_size = nullptr,
   1265                  unsigned min_off_size = 0)
   1266  {
   1267    TRACE_SERIALIZE (this);
   1268    unsigned data_size;
   1269    if (p_data_size)
   1270      data_size = *p_data_size;
   1271    else
   1272      total_size (iterable, &data_size);
   1273 
   1274    auto it = hb_iter (iterable);
   1275    if (unlikely (!serialize_header (c, +it, data_size, min_off_size))) return_trace (false);
   1276    unsigned char *ret = c->allocate_size<unsigned char> (data_size, false);
   1277    if (unlikely (!ret)) return_trace (false);
   1278    for (const auto &_ : +it)
   1279    {
   1280      unsigned len = _.length;
   1281      if (!len)
   1282 continue;
   1283      if (len <= 1)
   1284      {
   1285 *ret++ = *_.arrayZ;
   1286 continue;
   1287      }
   1288      hb_memcpy (ret, _.arrayZ, len);
   1289      ret += len;
   1290    }
   1291    return_trace (true);
   1292  }
   1293 
   1294  template <typename Iterator,
   1295     hb_requires (hb_is_iterator (Iterator))>
   1296  bool serialize_header (hb_serialize_context_t *c,
   1297 		 Iterator it,
   1298 		 unsigned data_size,
   1299                         unsigned min_off_size = 0)
   1300  {
   1301    TRACE_SERIALIZE (this);
   1302 
   1303    unsigned off_size = (hb_bit_storage (data_size + 1) + 7) / 8;
   1304    off_size = hb_max(min_off_size, off_size);
   1305 
   1306    /* serialize CFFIndex header */
   1307    if (unlikely (!c->extend_min (this))) return_trace (false);
   1308    this->count = hb_len (it);
   1309    if (!this->count) return_trace (true);
   1310    if (unlikely (!c->extend (this->offSize))) return_trace (false);
   1311    this->offSize = off_size;
   1312    if (unlikely (!c->allocate_size<HBUINT8> (off_size * (this->count + 1), false)))
   1313      return_trace (false);
   1314 
   1315    /* serialize indices */
   1316    unsigned int offset = 1;
   1317    if (HB_OPTIMIZE_SIZE_VAL)
   1318    {
   1319      unsigned int i = 0;
   1320      for (const auto &_ : +it)
   1321      {
   1322 set_offset_at (i++, offset);
   1323 offset += hb_len_of (_);
   1324      }
   1325      set_offset_at (i, offset);
   1326    }
   1327    else
   1328      switch (off_size)
   1329      {
   1330 case 1:
   1331 {
   1332   HBUINT8 *p = (HBUINT8 *) offsets;
   1333   for (const auto &_ : +it)
   1334   {
   1335     *p++ = offset;
   1336     offset += hb_len_of (_);
   1337   }
   1338   *p = offset;
   1339 }
   1340 break;
   1341 case 2:
   1342 {
   1343   HBUINT16 *p = (HBUINT16 *) offsets;
   1344   for (const auto &_ : +it)
   1345   {
   1346     *p++ = offset;
   1347     offset += hb_len_of (_);
   1348   }
   1349   *p = offset;
   1350 }
   1351 break;
   1352 case 3:
   1353 {
   1354   HBUINT24 *p = (HBUINT24 *) offsets;
   1355   for (const auto &_ : +it)
   1356   {
   1357     *p++ = offset;
   1358     offset += hb_len_of (_);
   1359   }
   1360   *p = offset;
   1361 }
   1362 break;
   1363 case 4:
   1364 {
   1365   HBUINT32 *p = (HBUINT32 *) offsets;
   1366   for (const auto &_ : +it)
   1367   {
   1368     *p++ = offset;
   1369     offset += hb_len_of (_);
   1370   }
   1371   *p = offset;
   1372 }
   1373 break;
   1374 default:
   1375 break;
   1376      }
   1377 
   1378    assert (offset == data_size + 1);
   1379    return_trace (true);
   1380  }
   1381 
   1382  template <typename Iterable,
   1383     hb_requires (hb_is_iterable (Iterable))>
   1384  static unsigned total_size (const Iterable &iterable, unsigned *data_size = nullptr, unsigned min_off_size = 0)
   1385  {
   1386    auto it = + hb_iter (iterable);
   1387    if (!it)
   1388    {
   1389      if (data_size) *data_size = 0;
   1390      return min_size;
   1391    }
   1392 
   1393    unsigned total = 0;
   1394    for (const auto &_ : +it)
   1395      total += hb_len_of (_);
   1396 
   1397    if (data_size) *data_size = total;
   1398 
   1399    unsigned off_size = (hb_bit_storage (total + 1) + 7) / 8;
   1400    off_size = hb_max(min_off_size, off_size);
   1401 
   1402    return min_size + HBUINT8::static_size + (hb_len (it) + 1) * off_size + total;
   1403  }
   1404 
   1405  void set_offset_at (unsigned int index, unsigned int offset)
   1406  {
   1407    assert (index <= count);
   1408 
   1409    unsigned int size = offSize;
   1410    const HBUINT8 *p = offsets;
   1411    switch (size)
   1412    {
   1413      case 1: ((HBUINT8  *) p)[index] = offset; break;
   1414      case 2: ((HBUINT16 *) p)[index] = offset; break;
   1415      case 3: ((HBUINT24 *) p)[index] = offset; break;
   1416      case 4: ((HBUINT32 *) p)[index] = offset; break;
   1417      default: return;
   1418    }
   1419  }
   1420 
   1421  private:
   1422  unsigned int offset_at (unsigned int index) const
   1423  {
   1424    assert (index <= count);
   1425 
   1426    unsigned int size = offSize;
   1427    const HBUINT8 *p = offsets;
   1428    switch (size)
   1429    {
   1430      case 1: return ((HBUINT8  *) p)[index];
   1431      case 2: return ((HBUINT16 *) p)[index];
   1432      case 3: return ((HBUINT24 *) p)[index];
   1433      case 4: return ((HBUINT32 *) p)[index];
   1434      default: return 0;
   1435    }
   1436  }
   1437 
   1438  const unsigned char *data_base () const
   1439  { return (const unsigned char *) this + min_size + offSize.static_size - 1 + offset_array_size (); }
   1440  public:
   1441 
   1442  hb_ubytes_t operator [] (unsigned int index) const
   1443  {
   1444    if (unlikely (index >= count)) return hb_ubytes_t ();
   1445    hb_barrier ();
   1446    unsigned offset0 = offset_at (index);
   1447    unsigned offset1 = offset_at (index + 1);
   1448    if (unlikely (offset1 < offset0 || offset1 > offset_at (count)))
   1449      return hb_ubytes_t ();
   1450    return hb_ubytes_t (data_base () + offset0, offset1 - offset0);
   1451  }
   1452 
   1453  unsigned int get_size () const
   1454  {
   1455    if (count)
   1456      return min_size + offSize.static_size + offset_array_size () + (offset_at (count) - 1);
   1457    return min_size;  /* empty CFFIndex contains count only */
   1458  }
   1459 
   1460  bool sanitize (hb_sanitize_context_t *c) const
   1461  {
   1462    TRACE_SANITIZE (this);
   1463    return_trace (likely (c->check_struct (this) &&
   1464 		  hb_barrier () &&
   1465 		  (count == 0 || /* empty INDEX */
   1466 		   (count < count + 1u &&
   1467 		    c->check_struct (&offSize) && offSize >= 1 && offSize <= 4 &&
   1468 		    c->check_array (offsets, offSize, count + 1u) &&
   1469 		    c->check_range (data_base (), offset_at (count))))));
   1470  }
   1471 
   1472  public:
   1473  COUNT		count;		/* Number of object data. Note there are (count+1) offsets */
   1474  private:
   1475  HBUINT8	offSize;	/* The byte size of each offset in the offsets array. */
   1476  HBUINT8	offsets[HB_VAR_ARRAY];
   1477 			/* The array of (count + 1) offsets into objects array (1-base). */
   1478  /* HBUINT8 data[HB_VAR_ARRAY];	Object data */
   1479  public:
   1480  DEFINE_SIZE_MIN (COUNT::static_size);
   1481 };
   1482 typedef CFFIndex<HBUINT16> CFF1Index;
   1483 typedef CFFIndex<HBUINT32> CFF2Index;
   1484 
   1485 
   1486 /* TupleValues */
   1487 struct TupleValues
   1488 {
   1489  enum packed_value_flag_t
   1490  {
   1491    VALUES_ARE_ZEROS     = 0x80,
   1492    VALUES_ARE_BYTES     = 0x00,
   1493    VALUES_ARE_WORDS     = 0x40,
   1494    VALUES_ARE_LONGS     = 0xC0,
   1495    VALUES_SIZE_MASK     = 0xC0,
   1496    VALUE_RUN_COUNT_MASK = 0x3F
   1497  };
   1498 
   1499  static unsigned compile_unsafe (hb_array_t<const int> values, /* IN */
   1500 			  unsigned char *encoded_bytes /* OUT */)
   1501  {
   1502    unsigned num_values = values.length;
   1503    unsigned encoded_len = 0;
   1504    unsigned i = 0;
   1505    while (i < num_values)
   1506    {
   1507      int val = values.arrayZ[i];
   1508      if (val == 0)
   1509        encoded_len += encode_value_run_as_zeroes (i, encoded_bytes + encoded_len, values);
   1510      else if ((int8_t) val == val)
   1511        encoded_len += encode_value_run_as_bytes (i, encoded_bytes + encoded_len, values);
   1512      else if ((int16_t) val == val)
   1513        encoded_len += encode_value_run_as_words (i, encoded_bytes + encoded_len, values);
   1514      else
   1515        encoded_len += encode_value_run_as_longs (i, encoded_bytes + encoded_len, values);
   1516    }
   1517    return encoded_len;
   1518  }
   1519 
   1520  static unsigned encode_value_run_as_zeroes (unsigned& i,
   1521 				      unsigned char *it,
   1522 				      hb_array_t<const int> values)
   1523  {
   1524    unsigned num_values = values.length;
   1525    unsigned run_length = 0;
   1526    unsigned encoded_len = 0;
   1527    while (i < num_values && values.arrayZ[i] == 0)
   1528    {
   1529      i++;
   1530      run_length++;
   1531    }
   1532 
   1533    while (run_length >= 64)
   1534    {
   1535      *it++ = char (VALUES_ARE_ZEROS | 63);
   1536      run_length -= 64;
   1537      encoded_len++;
   1538    }
   1539 
   1540    if (run_length)
   1541    {
   1542      *it++ = char (VALUES_ARE_ZEROS | (run_length - 1));
   1543      encoded_len++;
   1544    }
   1545    return encoded_len;
   1546  }
   1547 
   1548  static unsigned encode_value_run_as_bytes (unsigned &i,
   1549 				     unsigned char *it,
   1550 				     hb_array_t<const int> values)
   1551  {
   1552    unsigned start = i;
   1553    unsigned num_values = values.length;
   1554    while (i < num_values)
   1555    {
   1556      int val = values.arrayZ[i];
   1557      if ((int8_t) val != val)
   1558        break;
   1559 
   1560      /* from fonttools: if there're 2 or more zeros in a sequence,
   1561       * it is better to start a new run to save bytes. */
   1562      if (val == 0 && i + 1 < num_values && values.arrayZ[i+1] == 0)
   1563        break;
   1564 
   1565      i++;
   1566    }
   1567    unsigned run_length = i - start;
   1568 
   1569    unsigned encoded_len = 0;
   1570 
   1571    while (run_length >= 64)
   1572    {
   1573      *it++ = (VALUES_ARE_BYTES | 63);
   1574      encoded_len++;
   1575 
   1576      for (unsigned j = 0; j < 64; j++)
   1577 it[j] = static_cast<char> (values.arrayZ[start + j]);
   1578      it += 64;
   1579      encoded_len += 64;
   1580 
   1581      start += 64;
   1582      run_length -= 64;
   1583    }
   1584 
   1585    if (run_length)
   1586    {
   1587      *it++ = (VALUES_ARE_BYTES | (run_length - 1));
   1588      encoded_len++;
   1589 
   1590      for (unsigned j = 0; j < run_length; j++)
   1591        it[j] = static_cast<char> (values.arrayZ[start + j]);
   1592      encoded_len += run_length;
   1593    }
   1594 
   1595    return encoded_len;
   1596  }
   1597 
   1598  static unsigned encode_value_run_as_words (unsigned &i,
   1599 				     unsigned char *it,
   1600 				     hb_array_t<const int> values)
   1601  {
   1602    unsigned start = i;
   1603    unsigned num_values = values.length;
   1604    while (i < num_values)
   1605    {
   1606      int val = values.arrayZ[i];
   1607 
   1608      if ((int16_t) val != val)
   1609        break;
   1610 
   1611      /* start a new run for a single zero value. */
   1612      if (val == 0) break;
   1613 
   1614      /* From fonttools: continue word-encoded run if there's only one
   1615       * single value in the range [-128, 127] because it is more compact.
   1616       * Only start a new run when there're 2 continuous such values. */
   1617      if ((int8_t) val == val &&
   1618          i + 1 < num_values &&
   1619          (int8_t) values.arrayZ[i+1] == values.arrayZ[i+1])
   1620        break;
   1621 
   1622      i++;
   1623    }
   1624 
   1625    unsigned run_length = i - start;
   1626    unsigned encoded_len = 0;
   1627    while (run_length >= 64)
   1628    {
   1629      *it++ = (VALUES_ARE_WORDS | 63);
   1630      encoded_len++;
   1631 
   1632      for (unsigned j = 0; j < 64; j++)
   1633      {
   1634        int16_t value_val = values.arrayZ[start + j];
   1635        *it++ = static_cast<char> (value_val >> 8);
   1636        *it++ = static_cast<char> (value_val & 0xFF);
   1637 
   1638        encoded_len += 2;
   1639      }
   1640 
   1641      start += 64;
   1642      run_length -= 64;
   1643    }
   1644 
   1645    if (run_length)
   1646    {
   1647      *it++ = (VALUES_ARE_WORDS | (run_length - 1));
   1648      encoded_len++;
   1649      while (start < i)
   1650      {
   1651        int16_t value_val = values.arrayZ[start++];
   1652        *it++ = static_cast<char> (value_val >> 8);
   1653        *it++ = static_cast<char> (value_val & 0xFF);
   1654 
   1655        encoded_len += 2;
   1656      }
   1657    }
   1658    return encoded_len;
   1659  }
   1660 
   1661  static unsigned encode_value_run_as_longs (unsigned &i,
   1662 				     unsigned char *it,
   1663 				     hb_array_t<const int> values)
   1664  {
   1665    unsigned start = i;
   1666    unsigned num_values = values.length;
   1667    while (i < num_values)
   1668    {
   1669      int val = values.arrayZ[i];
   1670 
   1671      if ((int16_t) val == val)
   1672        break;
   1673 
   1674      i++;
   1675    }
   1676 
   1677    unsigned run_length = i - start;
   1678    unsigned encoded_len = 0;
   1679    while (run_length >= 64)
   1680    {
   1681      *it++ = (VALUES_ARE_LONGS | 63);
   1682      encoded_len++;
   1683 
   1684      for (unsigned j = 0; j < 64; j++)
   1685      {
   1686        int32_t value_val = values.arrayZ[start + j];
   1687        *it++ = static_cast<char> (value_val >> 24);
   1688        *it++ = static_cast<char> (value_val >> 16);
   1689        *it++ = static_cast<char> (value_val >> 8);
   1690        *it++ = static_cast<char> (value_val & 0xFF);
   1691 
   1692        encoded_len += 4;
   1693      }
   1694 
   1695      start += 64;
   1696      run_length -= 64;
   1697    }
   1698 
   1699    if (run_length)
   1700    {
   1701      *it++ = (VALUES_ARE_LONGS | (run_length - 1));
   1702      encoded_len++;
   1703      while (start < i)
   1704      {
   1705        int32_t value_val = values.arrayZ[start++];
   1706        *it++ = static_cast<char> (value_val >> 24);
   1707        *it++ = static_cast<char> (value_val >> 16);
   1708        *it++ = static_cast<char> (value_val >> 8);
   1709        *it++ = static_cast<char> (value_val & 0xFF);
   1710 
   1711        encoded_len += 4;
   1712      }
   1713    }
   1714    return encoded_len;
   1715  }
   1716 
   1717  template <typename T>
   1718 #ifndef HB_OPTIMIZE_SIZE
   1719  HB_ALWAYS_INLINE
   1720 #endif
   1721  static bool decompile (const HBUINT8 *&p /* IN/OUT */,
   1722 		 hb_vector_t<T> &values /* IN/OUT */,
   1723 		 const HBUINT8 *end,
   1724 		 bool consume_all = false,
   1725 		 unsigned start = 0)
   1726  {
   1727    unsigned i = 0;
   1728    unsigned count = consume_all ? UINT_MAX : values.length;
   1729    if (consume_all)
   1730      values.alloc ((end - p) / 2);
   1731    while (i < count)
   1732    {
   1733      if (unlikely (p + 1 > end)) return consume_all;
   1734      unsigned control = *p++;
   1735      unsigned run_count = (control & VALUE_RUN_COUNT_MASK) + 1;
   1736      if (consume_all)
   1737      {
   1738        if (unlikely (!values.resize_dirty  (values.length + run_count)))
   1739   return false;
   1740      }
   1741      unsigned stop = i + run_count;
   1742      if (unlikely (stop > count)) return false;
   1743 
   1744      unsigned skip = i < start ? hb_min (start - i, run_count) : 0;
   1745      i += skip;
   1746 
   1747      if ((control & VALUES_SIZE_MASK) == VALUES_ARE_ZEROS)
   1748      {
   1749 hb_memset (&values.arrayZ[i], 0, (stop - i) * sizeof (T));
   1750 i = stop;
   1751      }
   1752      else if ((control & VALUES_SIZE_MASK) ==  VALUES_ARE_WORDS)
   1753      {
   1754        if (unlikely (p + run_count * HBINT16::static_size > end)) return false;
   1755 p += skip * HBINT16::static_size;
   1756 #ifndef HB_OPTIMIZE_SIZE
   1757        for (; i + 3 < stop; i += 4)
   1758 {
   1759   values.arrayZ[i] = * (const HBINT16 *) p;
   1760   p += HBINT16::static_size;
   1761   values.arrayZ[i + 1] = * (const HBINT16 *) p;
   1762   p += HBINT16::static_size;
   1763   values.arrayZ[i + 2] = * (const HBINT16 *) p;
   1764   p += HBINT16::static_size;
   1765   values.arrayZ[i + 3] = * (const HBINT16 *) p;
   1766   p += HBINT16::static_size;
   1767 }
   1768 #endif
   1769        for (; i < stop; i++)
   1770        {
   1771          values.arrayZ[i] = * (const HBINT16 *) p;
   1772          p += HBINT16::static_size;
   1773        }
   1774      }
   1775      else if ((control & VALUES_SIZE_MASK) ==  VALUES_ARE_LONGS)
   1776      {
   1777        if (unlikely (p + run_count * HBINT32::static_size > end)) return false;
   1778 p += skip * HBINT32::static_size;
   1779        for (; i < stop; i++)
   1780        {
   1781          values.arrayZ[i] = * (const HBINT32 *) p;
   1782          p += HBINT32::static_size;
   1783        }
   1784      }
   1785      else if ((control & VALUES_SIZE_MASK) ==  VALUES_ARE_BYTES)
   1786      {
   1787        if (unlikely (p + run_count > end)) return false;
   1788 p += skip * HBINT8::static_size;
   1789 #ifndef HB_OPTIMIZE_SIZE
   1790 for (; i + 3 < stop; i += 4)
   1791 {
   1792   values.arrayZ[i] = * (const HBINT8 *) p++;
   1793   values.arrayZ[i + 1] = * (const HBINT8 *) p++;
   1794   values.arrayZ[i + 2] = * (const HBINT8 *) p++;
   1795   values.arrayZ[i + 3] = * (const HBINT8 *) p++;
   1796 }
   1797 #endif
   1798        for (; i < stop; i++)
   1799          values.arrayZ[i] = * (const HBINT8 *) p++;
   1800      }
   1801    }
   1802    return true;
   1803  }
   1804 
   1805  struct iter_t : hb_iter_with_fallback_t<iter_t, int>
   1806  {
   1807    iter_t (const unsigned char *p_, unsigned len_)
   1808     : p (p_), endp (p_ + len_)
   1809    { if (likely (ensure_run ())) read_value (); }
   1810 
   1811    private:
   1812    const unsigned char *p;
   1813    const unsigned char * const endp;
   1814    int current_value = 0;
   1815    signed run_count = 0;
   1816    unsigned width = 0;
   1817 
   1818    HB_ALWAYS_INLINE
   1819    bool ensure_run ()
   1820    {
   1821      if (likely (run_count > 0)) return true;
   1822      return _ensure_run ();
   1823    }
   1824    bool _ensure_run ()
   1825    {
   1826      if (unlikely (p >= endp))
   1827      {
   1828        run_count = 0;
   1829        current_value = 0;
   1830 return false;
   1831      }
   1832 
   1833      unsigned control = *p++;
   1834      run_count = (control & VALUE_RUN_COUNT_MASK) + 1;
   1835      width = control & VALUES_SIZE_MASK;
   1836      switch (width)
   1837      {
   1838        case VALUES_ARE_ZEROS: width = 0; break;
   1839 case VALUES_ARE_BYTES: width = HBINT8::static_size;  break;
   1840 case VALUES_ARE_WORDS: width = HBINT16::static_size; break;
   1841 case VALUES_ARE_LONGS: width = HBINT32::static_size; break;
   1842 default: assert (false);
   1843      }
   1844 
   1845      if (unlikely (p + run_count * width > endp))
   1846      {
   1847 run_count = 0;
   1848 current_value = 0;
   1849 return false;
   1850      }
   1851 
   1852      return true;
   1853    }
   1854    void read_value ()
   1855    {
   1856      switch (width)
   1857      {
   1858        case 0: current_value = 0; break;
   1859 case 1: current_value = * (const HBINT8  *) p; break;
   1860 case 2: current_value = * (const HBINT16 *) p; break;
   1861 case 4: current_value = * (const HBINT32 *) p; break;
   1862      }
   1863      p += width;
   1864    }
   1865 
   1866    public:
   1867 
   1868    typedef int __item_t__;
   1869    __item_t__ __item__ () const
   1870    { return current_value; }
   1871 
   1872    bool __more__ () const { return run_count || p < endp; }
   1873    void __next__ ()
   1874    {
   1875      run_count--;
   1876      if (unlikely (!ensure_run ()))
   1877 return;
   1878      read_value ();
   1879    }
   1880    void __forward__ (unsigned n)
   1881    {
   1882      if (unlikely (!ensure_run ()))
   1883 return;
   1884      while (n)
   1885      {
   1886 unsigned i = hb_min (n, (unsigned) run_count);
   1887 run_count -= i;
   1888 n -= i;
   1889 p += (i - 1) * width;
   1890 if (unlikely (!ensure_run ()))
   1891   return;
   1892 read_value ();
   1893      }
   1894    }
   1895    bool operator != (const iter_t& o) const
   1896    { return p != o.p || run_count != o.run_count; }
   1897    iter_t __end__ () const
   1898    {
   1899      iter_t it (endp, 0);
   1900      return it;
   1901    }
   1902  };
   1903 
   1904  struct fetcher_t
   1905  {
   1906    fetcher_t (const unsigned char *p_, unsigned len_)
   1907       : p (p_), end (p_ + len_) {}
   1908 
   1909    private:
   1910    const unsigned char *p;
   1911    const unsigned char * const end;
   1912    signed run_count = 0;
   1913    unsigned width = 0;
   1914 
   1915    HB_ALWAYS_INLINE
   1916    bool ensure_run ()
   1917    {
   1918      if (likely (run_count > 0)) return true;
   1919      return _ensure_run ();
   1920    }
   1921 
   1922    bool _ensure_run ()
   1923    {
   1924      if (unlikely (p >= end))
   1925      {
   1926        run_count = 0;
   1927 return false;
   1928      }
   1929 
   1930      unsigned control = *p++;
   1931      run_count = (control & VALUE_RUN_COUNT_MASK) + 1;
   1932      width = control & VALUES_SIZE_MASK;
   1933      switch (width)
   1934      {
   1935        case VALUES_ARE_ZEROS: width = 0; break;
   1936 case VALUES_ARE_BYTES: width = HBINT8::static_size;  break;
   1937 case VALUES_ARE_WORDS: width = HBINT16::static_size; break;
   1938 case VALUES_ARE_LONGS: width = HBINT32::static_size; break;
   1939 default: assert (false);
   1940      }
   1941 
   1942      if (unlikely (p + run_count * width > end))
   1943      {
   1944 run_count = 0;
   1945 return false;
   1946      }
   1947 
   1948      return true;
   1949    }
   1950 
   1951    void skip (unsigned n)
   1952    {
   1953      while (n)
   1954      {
   1955 if (unlikely (!ensure_run ()))
   1956   return;
   1957 unsigned i = hb_min (n, (unsigned) run_count);
   1958 run_count -= i;
   1959 n -= i;
   1960 p += i * width;
   1961      }
   1962    }
   1963 
   1964    template <bool scaled>
   1965    void _add_to (hb_array_t<float> out, float scale = 1.0f)
   1966    {
   1967      unsigned n = out.length;
   1968      float *arrayZ = out.arrayZ;
   1969 
   1970      for (unsigned i = 0; i < n;)
   1971      {
   1972 if (unlikely (!ensure_run ()))
   1973   break;
   1974 unsigned count = hb_min (n - i, (unsigned) run_count);
   1975 switch (width)
   1976 {
   1977   case 0:
   1978   {
   1979     arrayZ += count;
   1980     break;
   1981   }
   1982   case 1:
   1983   {
   1984     const auto *pp = (const HBINT8 *) p;
   1985     unsigned j = 0;
   1986 #ifndef HB_OPTIMIZE_SIZE
   1987     for (; j + 3 < count; j += 4)
   1988     {
   1989       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   1990       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   1991       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   1992       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   1993     }
   1994 #endif
   1995     for (; j < count; j++)
   1996       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   1997 
   1998     p = (const unsigned char *) pp;
   1999   }
   2000   break;
   2001   case 2:
   2002   {
   2003     const auto *pp = (const HBINT16 *) p;
   2004     unsigned j = 0;
   2005 #ifndef HB_OPTIMIZE_SIZE
   2006     for (; j + 3 < count; j += 4)
   2007     {
   2008       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   2009       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   2010       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   2011       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   2012     }
   2013 #endif
   2014     for (; j < count; j++)
   2015       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   2016 
   2017     p = (const unsigned char *) pp;
   2018   }
   2019   break;
   2020   case 4:
   2021   {
   2022     const auto *pp = (const HBINT32 *) p;
   2023     for (unsigned j = 0; j < count; j++)
   2024       *arrayZ++ += scaled ? *pp++ * scale : *pp++;
   2025 
   2026     p = (const unsigned char *) pp;
   2027   }
   2028   break;
   2029 }
   2030 run_count -= count;
   2031 i += count;
   2032      }
   2033    }
   2034 
   2035    public:
   2036    void add_to (hb_array_t<float> out, float scale = 1.0f)
   2037    {
   2038      unsigned n = out.length;
   2039 
   2040      if (scale == 0.0f)
   2041      {
   2042        skip (n);
   2043 return;
   2044      }
   2045 
   2046 #ifndef HB_OPTIMIZE_SIZE
   2047      // The following branch is supposed to speed things up by avoiding
   2048      // the multiplication in _add_to<> if scale is 1.0f.
   2049      // But in practice it seems to bloat the code and slow things down.
   2050      if (false && scale == 1.0f)
   2051        _add_to<false> (out);
   2052      else
   2053 #endif
   2054        _add_to<true> (out, scale);
   2055    }
   2056  };
   2057 };
   2058 
   2059 struct TupleList : CFF2Index
   2060 {
   2061  TupleValues::iter_t operator [] (unsigned i) const
   2062  {
   2063    auto bytes = CFF2Index::operator [] (i);
   2064    return TupleValues::iter_t (bytes.arrayZ, bytes.length);
   2065  }
   2066 
   2067  TupleValues::fetcher_t fetcher (unsigned i) const
   2068  {
   2069    auto bytes = CFF2Index::operator [] (i);
   2070    return TupleValues::fetcher_t (bytes.arrayZ, bytes.length);
   2071  }
   2072 };
   2073 
   2074 
   2075 // Alignment
   2076 
   2077 template <unsigned int alignment>
   2078 struct Align
   2079 {
   2080  unsigned get_size (const void *base) const
   2081  {
   2082    unsigned offset = (const char *) this - (const char *) base;
   2083    return (alignment - offset) & (alignment - 1);
   2084  }
   2085 
   2086  public:
   2087  DEFINE_SIZE_MIN (0);
   2088 };
   2089 
   2090 
   2091 
   2092 } /* namespace OT */
   2093 
   2094 
   2095 #endif /* HB_OPEN_TYPE_HH */