Coverage.hh (11736B)
1 /* 2 * Copyright © 2007,2008,2009 Red Hat, Inc. 3 * Copyright © 2010,2012 Google, Inc. 4 * 5 * This is part of HarfBuzz, a text shaping library. 6 * 7 * Permission is hereby granted, without written agreement and without 8 * license or royalty fees, to use, copy, modify, and distribute this 9 * software and its documentation for any purpose, provided that the 10 * above copyright notice and the following two paragraphs appear in 11 * all copies of this software. 12 * 13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR 14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES 15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN 16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH 17 * DAMAGE. 18 * 19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, 20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND 21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS 22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO 23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. 24 * 25 * Red Hat Author(s): Behdad Esfahbod 26 * Google Author(s): Behdad Esfahbod, Garret Rieger 27 */ 28 29 #ifndef OT_LAYOUT_COMMON_COVERAGE_HH 30 #define OT_LAYOUT_COMMON_COVERAGE_HH 31 32 #include "../types.hh" 33 #include "CoverageFormat1.hh" 34 #include "CoverageFormat2.hh" 35 36 namespace OT { 37 namespace Layout { 38 namespace Common { 39 40 template<typename Iterator> 41 static inline void Coverage_serialize (hb_serialize_context_t *c, 42 Iterator it); 43 44 struct Coverage 45 { 46 47 protected: 48 union { 49 struct { HBUINT16 v; } format; /* Format identifier */ 50 CoverageFormat1_3<SmallTypes> format1; 51 CoverageFormat2_4<SmallTypes> format2; 52 #ifndef HB_NO_BEYOND_64K 53 CoverageFormat1_3<MediumTypes>format3; 54 CoverageFormat2_4<MediumTypes>format4; 55 #endif 56 } u; 57 public: 58 DEFINE_SIZE_UNION (2, format.v); 59 60 #ifndef HB_OPTIMIZE_SIZE 61 HB_ALWAYS_INLINE 62 #endif 63 bool sanitize (hb_sanitize_context_t *c) const 64 { 65 TRACE_SANITIZE (this); 66 if (!u.format.v.sanitize (c)) return_trace (false); 67 hb_barrier (); 68 switch (u.format.v) 69 { 70 case 1: return_trace (u.format1.sanitize (c)); 71 case 2: return_trace (u.format2.sanitize (c)); 72 #ifndef HB_NO_BEYOND_64K 73 case 3: return_trace (u.format3.sanitize (c)); 74 case 4: return_trace (u.format4.sanitize (c)); 75 #endif 76 default:return_trace (true); 77 } 78 } 79 80 /* Has interface. */ 81 unsigned operator [] (hb_codepoint_t k) const { return get (k); } 82 bool has (hb_codepoint_t k) const { return (*this)[k] != NOT_COVERED; } 83 /* Predicate. */ 84 bool operator () (hb_codepoint_t k) const { return has (k); } 85 86 unsigned int get (hb_codepoint_t k) const { return get_coverage (k); } 87 unsigned int get_coverage (hb_codepoint_t glyph_id) const 88 { 89 switch (u.format.v) { 90 case 1: return u.format1.get_coverage (glyph_id); 91 case 2: return u.format2.get_coverage (glyph_id); 92 #ifndef HB_NO_BEYOND_64K 93 case 3: return u.format3.get_coverage (glyph_id); 94 case 4: return u.format4.get_coverage (glyph_id); 95 #endif 96 default:return NOT_COVERED; 97 } 98 } 99 unsigned int get_coverage (hb_codepoint_t glyph_id, 100 hb_ot_layout_mapping_cache_t *cache) const 101 { 102 unsigned coverage; 103 if (cache && cache->get (glyph_id, &coverage)) return coverage < cache->MAX_VALUE ? coverage : NOT_COVERED; 104 coverage = get_coverage (glyph_id); 105 if (cache) { 106 if (coverage == NOT_COVERED) 107 cache->set_unchecked (glyph_id, cache->MAX_VALUE); 108 else if (likely (coverage < cache->MAX_VALUE)) 109 cache->set_unchecked (glyph_id, coverage); 110 } 111 return coverage; 112 } 113 114 unsigned int get_coverage_binary (hb_codepoint_t glyph_id, 115 hb_ot_layout_binary_cache_t *cache) const 116 { 117 unsigned coverage; 118 if (cache && cache->get (glyph_id, &coverage)) return coverage < cache->MAX_VALUE ? coverage : NOT_COVERED; 119 coverage = get_coverage (glyph_id); 120 if (cache) { 121 if (coverage == NOT_COVERED) 122 cache->set_unchecked (glyph_id, cache->MAX_VALUE); 123 else 124 cache->set_unchecked (glyph_id, 0); 125 } 126 return coverage; 127 } 128 129 unsigned get_population () const 130 { 131 switch (u.format.v) { 132 case 1: return u.format1.get_population (); 133 case 2: return u.format2.get_population (); 134 #ifndef HB_NO_BEYOND_64K 135 case 3: return u.format3.get_population (); 136 case 4: return u.format4.get_population (); 137 #endif 138 default:return NOT_COVERED; 139 } 140 } 141 142 template <typename Iterator, 143 hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))> 144 bool serialize (hb_serialize_context_t *c, Iterator glyphs) 145 { 146 TRACE_SERIALIZE (this); 147 if (unlikely (!c->extend_min (this))) return_trace (false); 148 149 unsigned count = hb_len (glyphs); 150 unsigned num_ranges = 0; 151 hb_codepoint_t last = (hb_codepoint_t) -2; 152 hb_codepoint_t max = 0; 153 bool unsorted = false; 154 for (auto g: glyphs) 155 { 156 if (last != (hb_codepoint_t) -2 && g < last) 157 unsorted = true; 158 if (last + 1 != g) 159 num_ranges++; 160 last = g; 161 if (g > max) max = g; 162 } 163 u.format.v = !unsorted && count <= num_ranges * 3 ? 1 : 2; 164 165 #ifndef HB_NO_BEYOND_64K 166 if (max > 0xFFFFu) 167 u.format.v += 2; 168 if (unlikely (max > 0xFFFFFFu)) 169 #else 170 if (unlikely (max > 0xFFFFu)) 171 #endif 172 { 173 c->check_success (false, HB_SERIALIZE_ERROR_INT_OVERFLOW); 174 return_trace (false); 175 } 176 177 switch (u.format.v) 178 { 179 case 1: return_trace (u.format1.serialize (c, glyphs)); 180 case 2: return_trace (u.format2.serialize (c, glyphs)); 181 #ifndef HB_NO_BEYOND_64K 182 case 3: return_trace (u.format3.serialize (c, glyphs)); 183 case 4: return_trace (u.format4.serialize (c, glyphs)); 184 #endif 185 default:return_trace (false); 186 } 187 } 188 189 bool subset (hb_subset_context_t *c) const 190 { 191 TRACE_SUBSET (this); 192 auto it = 193 + iter () 194 | hb_take (c->plan->source->get_num_glyphs ()) 195 | hb_map_retains_sorting (c->plan->glyph_map_gsub) 196 | hb_filter ([] (hb_codepoint_t glyph) { return glyph != HB_MAP_VALUE_INVALID; }) 197 ; 198 199 // Cache the iterator result as it will be iterated multiple times 200 // by the serialize code below. 201 hb_sorted_vector_t<hb_codepoint_t> glyphs (it); 202 Coverage_serialize (c->serializer, glyphs.iter ()); 203 return_trace (bool (glyphs)); 204 } 205 206 bool intersects (const hb_set_t *glyphs) const 207 { 208 switch (u.format.v) 209 { 210 case 1: return u.format1.intersects (glyphs); 211 case 2: return u.format2.intersects (glyphs); 212 #ifndef HB_NO_BEYOND_64K 213 case 3: return u.format3.intersects (glyphs); 214 case 4: return u.format4.intersects (glyphs); 215 #endif 216 default:return false; 217 } 218 } 219 bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const 220 { 221 switch (u.format.v) 222 { 223 case 1: return u.format1.intersects_coverage (glyphs, index); 224 case 2: return u.format2.intersects_coverage (glyphs, index); 225 #ifndef HB_NO_BEYOND_64K 226 case 3: return u.format3.intersects_coverage (glyphs, index); 227 case 4: return u.format4.intersects_coverage (glyphs, index); 228 #endif 229 default:return false; 230 } 231 } 232 233 unsigned cost () const 234 { 235 switch (u.format.v) { 236 case 1: hb_barrier (); return u.format1.cost (); 237 case 2: hb_barrier (); return u.format2.cost (); 238 #ifndef HB_NO_BEYOND_64K 239 case 3: hb_barrier (); return u.format3.cost (); 240 case 4: hb_barrier (); return u.format4.cost (); 241 #endif 242 default:return 0u; 243 } 244 } 245 246 /* Might return false if array looks unsorted. 247 * Used for faster rejection of corrupt data. */ 248 template <typename set_t> 249 bool collect_coverage (set_t *glyphs) const 250 { 251 switch (u.format.v) 252 { 253 case 1: return u.format1.collect_coverage (glyphs); 254 case 2: return u.format2.collect_coverage (glyphs); 255 #ifndef HB_NO_BEYOND_64K 256 case 3: return u.format3.collect_coverage (glyphs); 257 case 4: return u.format4.collect_coverage (glyphs); 258 #endif 259 default:return false; 260 } 261 } 262 263 template <typename IterableOut, 264 hb_requires (hb_is_sink_of (IterableOut, hb_codepoint_t))> 265 void intersect_set (const hb_set_t &glyphs, IterableOut&& intersect_glyphs) const 266 { 267 switch (u.format.v) 268 { 269 case 1: return u.format1.intersect_set (glyphs, intersect_glyphs); 270 case 2: return u.format2.intersect_set (glyphs, intersect_glyphs); 271 #ifndef HB_NO_BEYOND_64K 272 case 3: return u.format3.intersect_set (glyphs, intersect_glyphs); 273 case 4: return u.format4.intersect_set (glyphs, intersect_glyphs); 274 #endif 275 default:return ; 276 } 277 } 278 279 struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t> 280 { 281 static constexpr bool is_sorted_iterator = true; 282 iter_t (const Coverage &c_ = Null (Coverage)) 283 { 284 hb_memset (this, 0, sizeof (*this)); 285 format = c_.u.format.v; 286 switch (format) 287 { 288 case 1: u.format1.init (c_.u.format1); return; 289 case 2: u.format2.init (c_.u.format2); return; 290 #ifndef HB_NO_BEYOND_64K 291 case 3: u.format3.init (c_.u.format3); return; 292 case 4: u.format4.init (c_.u.format4); return; 293 #endif 294 default: return; 295 } 296 } 297 bool __more__ () const 298 { 299 switch (format) 300 { 301 case 1: return u.format1.__more__ (); 302 case 2: return u.format2.__more__ (); 303 #ifndef HB_NO_BEYOND_64K 304 case 3: return u.format3.__more__ (); 305 case 4: return u.format4.__more__ (); 306 #endif 307 default:return false; 308 } 309 } 310 void __next__ () 311 { 312 switch (format) 313 { 314 case 1: u.format1.__next__ (); break; 315 case 2: u.format2.__next__ (); break; 316 #ifndef HB_NO_BEYOND_64K 317 case 3: u.format3.__next__ (); break; 318 case 4: u.format4.__next__ (); break; 319 #endif 320 default: break; 321 } 322 } 323 typedef hb_codepoint_t __item_t__; 324 __item_t__ __item__ () const { return get_glyph (); } 325 326 hb_codepoint_t get_glyph () const 327 { 328 switch (format) 329 { 330 case 1: return u.format1.get_glyph (); 331 case 2: return u.format2.get_glyph (); 332 #ifndef HB_NO_BEYOND_64K 333 case 3: return u.format3.get_glyph (); 334 case 4: return u.format4.get_glyph (); 335 #endif 336 default:return 0; 337 } 338 } 339 bool operator != (const iter_t& o) const 340 { 341 if (unlikely (format != o.format)) return true; 342 switch (format) 343 { 344 case 1: return u.format1 != o.u.format1; 345 case 2: return u.format2 != o.u.format2; 346 #ifndef HB_NO_BEYOND_64K 347 case 3: return u.format3 != o.u.format3; 348 case 4: return u.format4 != o.u.format4; 349 #endif 350 default:return false; 351 } 352 } 353 iter_t __end__ () const 354 { 355 iter_t it; 356 it.format = format; 357 switch (format) 358 { 359 case 1: it.u.format1 = u.format1.__end__ (); break; 360 case 2: it.u.format2 = u.format2.__end__ (); break; 361 #ifndef HB_NO_BEYOND_64K 362 case 3: it.u.format3 = u.format3.__end__ (); break; 363 case 4: it.u.format4 = u.format4.__end__ (); break; 364 #endif 365 default: break; 366 } 367 return it; 368 } 369 370 private: 371 unsigned int format; 372 union { 373 #ifndef HB_NO_BEYOND_64K 374 CoverageFormat2_4<MediumTypes>::iter_t format4; /* Put this one first since it's larger; helps shut up compiler. */ 375 CoverageFormat1_3<MediumTypes>::iter_t format3; 376 #endif 377 CoverageFormat2_4<SmallTypes>::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */ 378 CoverageFormat1_3<SmallTypes>::iter_t format1; 379 } u; 380 }; 381 iter_t iter () const { return iter_t (*this); } 382 }; 383 384 template<typename Iterator> 385 static inline void 386 Coverage_serialize (hb_serialize_context_t *c, 387 Iterator it) 388 { c->start_embed<Coverage> ()->serialize (c, it); } 389 390 } 391 } 392 } 393 394 #endif // #ifndef OT_LAYOUT_COMMON_COVERAGE_HH