1 /*
2  * Copyright © 2007,2008,2009  Red Hat, Inc.
3  * Copyright © 2010,2012  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_COMMON_HH
30 #define HB_OT_LAYOUT_COMMON_HH
31 
32 #include "hb.hh"
33 #include "hb-ot-layout.hh"
34 #include "hb-open-type.hh"
35 #include "hb-set.hh"
36 #include "hb-bimap.hh"
37 
38 
39 #ifndef HB_MAX_NESTING_LEVEL
40 #define HB_MAX_NESTING_LEVEL	6
41 #endif
42 #ifndef HB_MAX_CONTEXT_LENGTH
43 #define HB_MAX_CONTEXT_LENGTH	64
44 #endif
45 #ifndef HB_CLOSURE_MAX_STAGES
46 /*
47  * The maximum number of times a lookup can be applied during shaping.
48  * Used to limit the number of iterations of the closure algorithm.
49  * This must be larger than the number of times add_pause() is
50  * called in a collect_features call of any shaper.
51  */
52 #define HB_CLOSURE_MAX_STAGES	32
53 #endif
54 
55 #ifndef HB_MAX_SCRIPTS
56 #define HB_MAX_SCRIPTS	500
57 #endif
58 
59 #ifndef HB_MAX_LANGSYS
60 #define HB_MAX_LANGSYS	2000
61 #endif
62 
63 #ifndef HB_MAX_FEATURES
64 #define HB_MAX_FEATURES 750
65 #endif
66 
67 #ifndef HB_MAX_FEATURE_INDICES
68 #define HB_MAX_FEATURE_INDICES	1500
69 #endif
70 
71 #ifndef HB_MAX_LOOKUP_INDICES
72 #define HB_MAX_LOOKUP_INDICES	20000
73 #endif
74 
75 
76 namespace OT {
77 
78 
79 #define NOT_COVERED		((unsigned int) -1)
80 
81 
82 template<typename Iterator>
83 static inline void Coverage_serialize (hb_serialize_context_t *c,
84 				       Iterator it);
85 
86 template<typename Iterator>
87 static inline void ClassDef_serialize (hb_serialize_context_t *c,
88 				       Iterator it);
89 
90 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
91 					  const hb_map_t &gid_klass_map,
92 					  hb_sorted_vector_t<HBGlyphID> &glyphs,
93 					  const hb_set_t &klasses,
94 					  bool use_class_zero,
95 					  hb_map_t *klass_map /*INOUT*/);
96 
97 
98 struct hb_prune_langsys_context_t
99 {
hb_prune_langsys_context_tOT::hb_prune_langsys_context_t100   hb_prune_langsys_context_t (const void         *table_,
101                               hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map_,
102                               const hb_map_t     *duplicate_feature_map_,
103                               hb_set_t           *new_collected_feature_indexes_)
104       :table (table_),
105       script_langsys_map (script_langsys_map_),
106       duplicate_feature_map (duplicate_feature_map_),
107       new_feature_indexes (new_collected_feature_indexes_),
108       script_count (0),langsys_count (0) {}
109 
visitedScriptOT::hb_prune_langsys_context_t110   bool visitedScript (const void *s)
111   {
112     if (script_count++ > HB_MAX_SCRIPTS)
113       return true;
114 
115     return visited (s, visited_script);
116   }
117 
visitedLangsysOT::hb_prune_langsys_context_t118   bool visitedLangsys (const void *l)
119   {
120     if (langsys_count++ > HB_MAX_LANGSYS)
121       return true;
122 
123     return visited (l, visited_langsys);
124   }
125 
126   private:
127   template <typename T>
visitedOT::hb_prune_langsys_context_t128   bool visited (const T *p, hb_set_t &visited_set)
129   {
130     hb_codepoint_t delta = (hb_codepoint_t) ((uintptr_t) p - (uintptr_t) table);
131      if (visited_set.has (delta))
132       return true;
133 
134     visited_set.add (delta);
135     return false;
136   }
137 
138   public:
139   const void *table;
140   hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map;
141   const hb_map_t     *duplicate_feature_map;
142   hb_set_t           *new_feature_indexes;
143 
144   private:
145   hb_set_t visited_script;
146   hb_set_t visited_langsys;
147   unsigned script_count;
148   unsigned langsys_count;
149 };
150 
151 struct hb_subset_layout_context_t :
152   hb_dispatch_context_t<hb_subset_layout_context_t, hb_empty_t, HB_DEBUG_SUBSET>
153 {
get_nameOT::hb_subset_layout_context_t154   const char *get_name () { return "SUBSET_LAYOUT"; }
default_return_valueOT::hb_subset_layout_context_t155   static return_t default_return_value () { return hb_empty_t (); }
156 
visitScriptOT::hb_subset_layout_context_t157   bool visitScript ()
158   {
159     return script_count++ < HB_MAX_SCRIPTS;
160   }
161 
visitLangSysOT::hb_subset_layout_context_t162   bool visitLangSys ()
163   {
164     return langsys_count++ < HB_MAX_LANGSYS;
165   }
166 
visitFeatureIndexOT::hb_subset_layout_context_t167   bool visitFeatureIndex (int count)
168   {
169     feature_index_count += count;
170     return feature_index_count < HB_MAX_FEATURE_INDICES;
171   }
172 
visitLookupIndexOT::hb_subset_layout_context_t173   bool visitLookupIndex()
174   {
175     lookup_index_count++;
176     return lookup_index_count < HB_MAX_LOOKUP_INDICES;
177   }
178 
179   hb_subset_context_t *subset_context;
180   const hb_tag_t table_tag;
181   const hb_map_t *lookup_index_map;
182   const hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map;
183   const hb_map_t *feature_index_map;
184   unsigned cur_script_index;
185 
hb_subset_layout_context_tOT::hb_subset_layout_context_t186   hb_subset_layout_context_t (hb_subset_context_t *c_,
187 			      hb_tag_t tag_,
188 			      hb_map_t *lookup_map_,
189 			      hb_hashmap_t<unsigned, hb_set_t *, (unsigned)-1, nullptr> *script_langsys_map_,
190 			      hb_map_t *feature_index_map_) :
191 				subset_context (c_),
192 				table_tag (tag_),
193 				lookup_index_map (lookup_map_),
194 				script_langsys_map (script_langsys_map_),
195 				feature_index_map (feature_index_map_),
196 				cur_script_index (0xFFFFu),
197 				script_count (0),
198 				langsys_count (0),
199 				feature_index_count (0),
200 				lookup_index_count (0)
201   {}
202 
203   private:
204   unsigned script_count;
205   unsigned langsys_count;
206   unsigned feature_index_count;
207   unsigned lookup_index_count;
208 };
209 
210 struct hb_collect_variation_indices_context_t :
211        hb_dispatch_context_t<hb_collect_variation_indices_context_t>
212 {
213   template <typename T>
dispatchOT::hb_collect_variation_indices_context_t214   return_t dispatch (const T &obj) { obj.collect_variation_indices (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_variation_indices_context_t215   static return_t default_return_value () { return hb_empty_t (); }
216 
217   hb_set_t *layout_variation_indices;
218   const hb_set_t *glyph_set;
219   const hb_map_t *gpos_lookups;
220 
hb_collect_variation_indices_context_tOT::hb_collect_variation_indices_context_t221   hb_collect_variation_indices_context_t (hb_set_t *layout_variation_indices_,
222 					  const hb_set_t *glyph_set_,
223 					  const hb_map_t *gpos_lookups_) :
224 					layout_variation_indices (layout_variation_indices_),
225 					glyph_set (glyph_set_),
226 					gpos_lookups (gpos_lookups_) {}
227 };
228 
229 template<typename OutputArray>
230 struct subset_offset_array_t
231 {
subset_offset_array_tOT::subset_offset_array_t232   subset_offset_array_t (hb_subset_context_t *subset_context_,
233 			 OutputArray& out_,
234 			 const void *base_) : subset_context (subset_context_),
235 					      out (out_), base (base_) {}
236 
237   template <typename T>
operator ()OT::subset_offset_array_t238   bool operator () (T&& offset)
239   {
240     auto *o = out.serialize_append (subset_context->serializer);
241     if (unlikely (!o)) return false;
242     auto snap = subset_context->serializer->snapshot ();
243     bool ret = o->serialize_subset (subset_context, offset, base);
244     if (!ret)
245     {
246       out.pop ();
247       subset_context->serializer->revert (snap);
248     }
249     return ret;
250   }
251 
252   private:
253   hb_subset_context_t *subset_context;
254   OutputArray &out;
255   const void *base;
256 };
257 
258 
259 template<typename OutputArray, typename Arg>
260 struct subset_offset_array_arg_t
261 {
subset_offset_array_arg_tOT::subset_offset_array_arg_t262   subset_offset_array_arg_t (hb_subset_context_t *subset_context_,
263 			     OutputArray& out_,
264 			     const void *base_,
265 			     Arg &&arg_) : subset_context (subset_context_), out (out_),
266 					  base (base_), arg (arg_) {}
267 
268   template <typename T>
operator ()OT::subset_offset_array_arg_t269   bool operator () (T&& offset)
270   {
271     auto *o = out.serialize_append (subset_context->serializer);
272     if (unlikely (!o)) return false;
273     auto snap = subset_context->serializer->snapshot ();
274     bool ret = o->serialize_subset (subset_context, offset, base, arg);
275     if (!ret)
276     {
277       out.pop ();
278       subset_context->serializer->revert (snap);
279     }
280     return ret;
281   }
282 
283   private:
284   hb_subset_context_t *subset_context;
285   OutputArray &out;
286   const void *base;
287   Arg &&arg;
288 };
289 
290 /*
291  * Helper to subset an array of offsets. Subsets the thing pointed to by each offset
292  * and discards the offset in the array if the subset operation results in an empty
293  * thing.
294  */
295 struct
296 {
297   template<typename OutputArray>
298   subset_offset_array_t<OutputArray>
operator ()OT::__anon88b679aa0108299   operator () (hb_subset_context_t *subset_context, OutputArray& out,
300 	       const void *base) const
301   { return subset_offset_array_t<OutputArray> (subset_context, out, base); }
302 
303   /* Variant with one extra argument passed to serialize_subset */
304   template<typename OutputArray, typename Arg>
305   subset_offset_array_arg_t<OutputArray, Arg>
operator ()OT::__anon88b679aa0108306   operator () (hb_subset_context_t *subset_context, OutputArray& out,
307 	       const void *base, Arg &&arg) const
308   { return subset_offset_array_arg_t<OutputArray, Arg> (subset_context, out, base, arg); }
309 }
310 HB_FUNCOBJ (subset_offset_array);
311 
312 template<typename OutputArray>
313 struct subset_record_array_t
314 {
subset_record_array_tOT::subset_record_array_t315   subset_record_array_t (hb_subset_layout_context_t *c_, OutputArray* out_,
316 			 const void *base_) : subset_layout_context (c_),
317 					      out (out_), base (base_) {}
318 
319   template <typename T>
320   void
operator ()OT::subset_record_array_t321   operator () (T&& record)
322   {
323     auto snap = subset_layout_context->subset_context->serializer->snapshot ();
324     bool ret = record.subset (subset_layout_context, base);
325     if (!ret) subset_layout_context->subset_context->serializer->revert (snap);
326     else out->len++;
327   }
328 
329   private:
330   hb_subset_layout_context_t *subset_layout_context;
331   OutputArray *out;
332   const void *base;
333 };
334 
335 /*
336  * Helper to subset a RecordList/record array. Subsets each Record in the array and
337  * discards the record if the subset operation returns false.
338  */
339 struct
340 {
341   template<typename OutputArray>
342   subset_record_array_t<OutputArray>
operator ()OT::__anon88b679aa0208343   operator () (hb_subset_layout_context_t *c, OutputArray* out,
344 	       const void *base) const
345   { return subset_record_array_t<OutputArray> (c, out, base); }
346 }
347 HB_FUNCOBJ (subset_record_array);
348 
349 /*
350  *
351  * OpenType Layout Common Table Formats
352  *
353  */
354 
355 
356 /*
357  * Script, ScriptList, LangSys, Feature, FeatureList, Lookup, LookupList
358  */
359 
360 struct Record_sanitize_closure_t {
361   hb_tag_t tag;
362   const void *list_base;
363 };
364 
365 template <typename Type>
366 struct Record
367 {
cmpOT::Record368   int cmp (hb_tag_t a) const { return tag.cmp (a); }
369 
subsetOT::Record370   bool subset (hb_subset_layout_context_t *c, const void *base) const
371   {
372     TRACE_SUBSET (this);
373     auto *out = c->subset_context->serializer->embed (this);
374     if (unlikely (!out)) return_trace (false);
375     bool ret = out->offset.serialize_subset (c->subset_context, offset, base, c, &tag);
376     return_trace (ret);
377   }
378 
sanitizeOT::Record379   bool sanitize (hb_sanitize_context_t *c, const void *base) const
380   {
381     TRACE_SANITIZE (this);
382     const Record_sanitize_closure_t closure = {tag, base};
383     return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
384   }
385 
386   Tag		tag;		/* 4-byte Tag identifier */
387   Offset16To<Type>
388 		offset;		/* Offset from beginning of object holding
389 				 * the Record */
390   public:
391   DEFINE_SIZE_STATIC (6);
392 };
393 
394 template <typename Type>
395 struct RecordArrayOf : SortedArray16Of<Record<Type>>
396 {
get_offsetOT::RecordArrayOf397   const Offset16To<Type>& get_offset (unsigned int i) const
398   { return (*this)[i].offset; }
get_offsetOT::RecordArrayOf399   Offset16To<Type>& get_offset (unsigned int i)
400   { return (*this)[i].offset; }
get_tagOT::RecordArrayOf401   const Tag& get_tag (unsigned int i) const
402   { return (*this)[i].tag; }
get_tagsOT::RecordArrayOf403   unsigned int get_tags (unsigned int start_offset,
404 			 unsigned int *record_count /* IN/OUT */,
405 			 hb_tag_t     *record_tags /* OUT */) const
406   {
407     if (record_count)
408     {
409       + this->sub_array (start_offset, record_count)
410       | hb_map (&Record<Type>::tag)
411       | hb_sink (hb_array (record_tags, *record_count))
412       ;
413     }
414     return this->len;
415   }
find_indexOT::RecordArrayOf416   bool find_index (hb_tag_t tag, unsigned int *index) const
417   {
418     return this->bfind (tag, index, HB_BFIND_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
419   }
420 };
421 
422 template <typename Type>
423 struct RecordListOf : RecordArrayOf<Type>
424 {
operator []OT::RecordListOf425   const Type& operator [] (unsigned int i) const
426   { return this+this->get_offset (i); }
427 
subsetOT::RecordListOf428   bool subset (hb_subset_context_t *c,
429 	       hb_subset_layout_context_t *l) const
430   {
431     TRACE_SUBSET (this);
432     auto *out = c->serializer->start_embed (*this);
433     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
434 
435     + this->iter ()
436     | hb_apply (subset_record_array (l, out, this))
437     ;
438     return_trace (true);
439   }
440 
sanitizeOT::RecordListOf441   bool sanitize (hb_sanitize_context_t *c) const
442   {
443     TRACE_SANITIZE (this);
444     return_trace (RecordArrayOf<Type>::sanitize (c, this));
445   }
446 };
447 
448 struct Feature;
449 
450 struct RecordListOfFeature : RecordListOf<Feature>
451 {
subsetOT::RecordListOfFeature452   bool subset (hb_subset_context_t *c,
453 	       hb_subset_layout_context_t *l) const
454   {
455     TRACE_SUBSET (this);
456     auto *out = c->serializer->start_embed (*this);
457     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
458 
459     unsigned count = this->len;
460     + hb_zip (*this, hb_range (count))
461     | hb_filter (l->feature_index_map, hb_second)
462     | hb_map (hb_first)
463     | hb_apply (subset_record_array (l, out, this))
464     ;
465     return_trace (true);
466   }
467 };
468 
469 struct Script;
470 struct RecordListOfScript : RecordListOf<Script>
471 {
subsetOT::RecordListOfScript472   bool subset (hb_subset_context_t *c,
473                hb_subset_layout_context_t *l) const
474   {
475     TRACE_SUBSET (this);
476     auto *out = c->serializer->start_embed (*this);
477     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
478 
479     unsigned count = this->len;
480     for (auto _ : + hb_zip (*this, hb_range (count)))
481     {
482       auto snap = c->serializer->snapshot ();
483       l->cur_script_index = _.second;
484       bool ret = _.first.subset (l, this);
485       if (!ret) c->serializer->revert (snap);
486       else out->len++;
487     }
488 
489     return_trace (true);
490   }
491 };
492 
493 struct RangeRecord
494 {
cmpOT::RangeRecord495   int cmp (hb_codepoint_t g) const
496   { return g < first ? -1 : g <= last ? 0 : +1; }
497 
sanitizeOT::RangeRecord498   bool sanitize (hb_sanitize_context_t *c) const
499   {
500     TRACE_SANITIZE (this);
501     return_trace (c->check_struct (this));
502   }
503 
intersectsOT::RangeRecord504   bool intersects (const hb_set_t *glyphs) const
505   { return glyphs->intersects (first, last); }
506 
507   template <typename set_t>
collect_coverageOT::RangeRecord508   bool collect_coverage (set_t *glyphs) const
509   { return glyphs->add_range (first, last); }
510 
511   HBGlyphID	first;		/* First GlyphID in the range */
512   HBGlyphID	last;		/* Last GlyphID in the range */
513   HBUINT16	value;		/* Value */
514   public:
515   DEFINE_SIZE_STATIC (6);
516 };
517 DECLARE_NULL_NAMESPACE_BYTES (OT, RangeRecord);
518 
519 
520 struct IndexArray : Array16Of<Index>
521 {
intersectsOT::IndexArray522   bool intersects (const hb_map_t *indexes) const
523   { return hb_any (*this, indexes); }
524 
525   template <typename Iterator,
526 	    hb_requires (hb_is_iterator (Iterator))>
serializeOT::IndexArray527   void serialize (hb_serialize_context_t *c,
528 		  hb_subset_layout_context_t *l,
529 		  Iterator it)
530   {
531     if (!it) return;
532     if (unlikely (!c->extend_min ((*this)))) return;
533 
534     for (const auto _ : it)
535     {
536       if (!l->visitLookupIndex()) break;
537 
538       Index i;
539       i = _;
540       c->copy (i);
541       this->len++;
542     }
543   }
544 
get_indexesOT::IndexArray545   unsigned int get_indexes (unsigned int start_offset,
546 			    unsigned int *_count /* IN/OUT */,
547 			    unsigned int *_indexes /* OUT */) const
548   {
549     if (_count)
550     {
551       + this->sub_array (start_offset, _count)
552       | hb_sink (hb_array (_indexes, *_count))
553       ;
554     }
555     return this->len;
556   }
557 
add_indexes_toOT::IndexArray558   void add_indexes_to (hb_set_t* output /* OUT */) const
559   {
560     output->add_array (as_array ());
561   }
562 };
563 
564 
565 struct LangSys
566 {
get_feature_countOT::LangSys567   unsigned int get_feature_count () const
568   { return featureIndex.len; }
get_feature_indexOT::LangSys569   hb_tag_t get_feature_index (unsigned int i) const
570   { return featureIndex[i]; }
get_feature_indexesOT::LangSys571   unsigned int get_feature_indexes (unsigned int start_offset,
572 				    unsigned int *feature_count /* IN/OUT */,
573 				    unsigned int *feature_indexes /* OUT */) const
574   { return featureIndex.get_indexes (start_offset, feature_count, feature_indexes); }
add_feature_indexes_toOT::LangSys575   void add_feature_indexes_to (hb_set_t *feature_indexes) const
576   { featureIndex.add_indexes_to (feature_indexes); }
577 
has_required_featureOT::LangSys578   bool has_required_feature () const { return reqFeatureIndex != 0xFFFFu; }
get_required_feature_indexOT::LangSys579   unsigned int get_required_feature_index () const
580   {
581     if (reqFeatureIndex == 0xFFFFu)
582       return Index::NOT_FOUND_INDEX;
583    return reqFeatureIndex;
584   }
585 
copyOT::LangSys586   LangSys* copy (hb_serialize_context_t *c) const
587   {
588     TRACE_SERIALIZE (this);
589     return_trace (c->embed (*this));
590   }
591 
compareOT::LangSys592   bool compare (const LangSys& o, const hb_map_t *feature_index_map) const
593   {
594     if (reqFeatureIndex != o.reqFeatureIndex)
595       return false;
596 
597     auto iter =
598     + hb_iter (featureIndex)
599     | hb_filter (feature_index_map)
600     | hb_map (feature_index_map)
601     ;
602 
603     auto o_iter =
604     + hb_iter (o.featureIndex)
605     | hb_filter (feature_index_map)
606     | hb_map (feature_index_map)
607     ;
608 
609     if (iter.len () != o_iter.len ())
610       return false;
611 
612     for (const auto _ : + hb_zip (iter, o_iter))
613       if (_.first != _.second) return false;
614 
615     return true;
616   }
617 
collect_featuresOT::LangSys618   void collect_features (hb_prune_langsys_context_t *c) const
619   {
620     if (!has_required_feature () && !get_feature_count ()) return;
621     if (c->visitedLangsys (this)) return;
622     if (has_required_feature () &&
623         c->duplicate_feature_map->has (reqFeatureIndex))
624       c->new_feature_indexes->add (get_required_feature_index ());
625 
626     + hb_iter (featureIndex)
627     | hb_filter (c->duplicate_feature_map)
628     | hb_sink (c->new_feature_indexes)
629     ;
630   }
631 
subsetOT::LangSys632   bool subset (hb_subset_context_t        *c,
633 	       hb_subset_layout_context_t *l,
634 	       const Tag                  *tag = nullptr) const
635   {
636     TRACE_SUBSET (this);
637     auto *out = c->serializer->start_embed (*this);
638     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
639 
640     out->reqFeatureIndex = l->feature_index_map->has (reqFeatureIndex) ? l->feature_index_map->get (reqFeatureIndex) : 0xFFFFu;
641 
642     if (!l->visitFeatureIndex (featureIndex.len))
643       return_trace (false);
644 
645     auto it =
646     + hb_iter (featureIndex)
647     | hb_filter (l->feature_index_map)
648     | hb_map (l->feature_index_map)
649     ;
650 
651     bool ret = bool (it);
652     out->featureIndex.serialize (c->serializer, l, it);
653     return_trace (ret);
654   }
655 
sanitizeOT::LangSys656   bool sanitize (hb_sanitize_context_t *c,
657 		 const Record_sanitize_closure_t * = nullptr) const
658   {
659     TRACE_SANITIZE (this);
660     return_trace (c->check_struct (this) && featureIndex.sanitize (c));
661   }
662 
663   Offset16	lookupOrderZ;	/* = Null (reserved for an offset to a
664 				 * reordering table) */
665   HBUINT16	reqFeatureIndex;/* Index of a feature required for this
666 				 * language system--if no required features
667 				 * = 0xFFFFu */
668   IndexArray	featureIndex;	/* Array of indices into the FeatureList */
669   public:
670   DEFINE_SIZE_ARRAY_SIZED (6, featureIndex);
671 };
672 DECLARE_NULL_NAMESPACE_BYTES (OT, LangSys);
673 
674 struct Script
675 {
get_lang_sys_countOT::Script676   unsigned int get_lang_sys_count () const
677   { return langSys.len; }
get_lang_sys_tagOT::Script678   const Tag& get_lang_sys_tag (unsigned int i) const
679   { return langSys.get_tag (i); }
get_lang_sys_tagsOT::Script680   unsigned int get_lang_sys_tags (unsigned int start_offset,
681 				  unsigned int *lang_sys_count /* IN/OUT */,
682 				  hb_tag_t     *lang_sys_tags /* OUT */) const
683   { return langSys.get_tags (start_offset, lang_sys_count, lang_sys_tags); }
get_lang_sysOT::Script684   const LangSys& get_lang_sys (unsigned int i) const
685   {
686     if (i == Index::NOT_FOUND_INDEX) return get_default_lang_sys ();
687     return this+langSys[i].offset;
688   }
find_lang_sys_indexOT::Script689   bool find_lang_sys_index (hb_tag_t tag, unsigned int *index) const
690   { return langSys.find_index (tag, index); }
691 
has_default_lang_sysOT::Script692   bool has_default_lang_sys () const           { return defaultLangSys != 0; }
get_default_lang_sysOT::Script693   const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
694 
prune_langsysOT::Script695   void prune_langsys (hb_prune_langsys_context_t *c,
696                       unsigned script_index) const
697   {
698     if (!has_default_lang_sys () && !get_lang_sys_count ()) return;
699     if (c->visitedScript (this)) return;
700 
701     if (!c->script_langsys_map->has (script_index))
702     {
703       hb_set_t* empty_set = hb_set_create ();
704       if (unlikely (!c->script_langsys_map->set (script_index, empty_set)))
705       {
706 	hb_set_destroy (empty_set);
707 	return;
708       }
709     }
710 
711     unsigned langsys_count = get_lang_sys_count ();
712     if (has_default_lang_sys ())
713     {
714       //only collect features from non-redundant langsys
715       const LangSys& d = get_default_lang_sys ();
716       d.collect_features (c);
717 
718       for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
719       {
720         const LangSys& l = this+_.first.offset;
721         if (l.compare (d, c->duplicate_feature_map)) continue;
722 
723         l.collect_features (c);
724         c->script_langsys_map->get (script_index)->add (_.second);
725       }
726     }
727     else
728     {
729       for (auto _ : + hb_zip (langSys, hb_range (langsys_count)))
730       {
731         const LangSys& l = this+_.first.offset;
732         l.collect_features (c);
733         c->script_langsys_map->get (script_index)->add (_.second);
734       }
735     }
736   }
737 
subsetOT::Script738   bool subset (hb_subset_context_t         *c,
739 	       hb_subset_layout_context_t  *l,
740 	       const Tag                   *tag) const
741   {
742     TRACE_SUBSET (this);
743     if (!l->visitScript ()) return_trace (false);
744 
745     auto *out = c->serializer->start_embed (*this);
746     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
747 
748     bool defaultLang = false;
749     if (has_default_lang_sys ())
750     {
751       c->serializer->push ();
752       const LangSys& ls = this+defaultLangSys;
753       bool ret = ls.subset (c, l);
754       if (!ret && tag && *tag != HB_TAG ('D', 'F', 'L', 'T'))
755       {
756 	c->serializer->pop_discard ();
757 	out->defaultLangSys = 0;
758       }
759       else
760       {
761 	c->serializer->add_link (out->defaultLangSys, c->serializer->pop_pack ());
762 	defaultLang = true;
763       }
764     }
765 
766     const hb_set_t *active_langsys = l->script_langsys_map->get (l->cur_script_index);
767     if (active_langsys)
768     {
769       unsigned count = langSys.len;
770       + hb_zip (langSys, hb_range (count))
771       | hb_filter (active_langsys, hb_second)
772       | hb_map (hb_first)
773       | hb_filter ([=] (const Record<LangSys>& record) {return l->visitLangSys (); })
774       | hb_apply (subset_record_array (l, &(out->langSys), this))
775       ;
776     }
777 
778     return_trace (bool (out->langSys.len) || defaultLang || l->table_tag == HB_OT_TAG_GSUB);
779   }
780 
sanitizeOT::Script781   bool sanitize (hb_sanitize_context_t *c,
782 		 const Record_sanitize_closure_t * = nullptr) const
783   {
784     TRACE_SANITIZE (this);
785     return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
786   }
787 
788   protected:
789   Offset16To<LangSys>
790 		defaultLangSys;	/* Offset to DefaultLangSys table--from
791 				 * beginning of Script table--may be Null */
792   RecordArrayOf<LangSys>
793 		langSys;	/* Array of LangSysRecords--listed
794 				 * alphabetically by LangSysTag */
795   public:
796   DEFINE_SIZE_ARRAY_SIZED (4, langSys);
797 };
798 
799 typedef RecordListOfScript ScriptList;
800 
801 
802 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#size */
803 struct FeatureParamsSize
804 {
sanitizeOT::FeatureParamsSize805   bool sanitize (hb_sanitize_context_t *c) const
806   {
807     TRACE_SANITIZE (this);
808     if (unlikely (!c->check_struct (this))) return_trace (false);
809 
810     /* This subtable has some "history", if you will.  Some earlier versions of
811      * Adobe tools calculated the offset of the FeatureParams sutable from the
812      * beginning of the FeatureList table!  Now, that is dealt with in the
813      * Feature implementation.  But we still need to be able to tell junk from
814      * real data.  Note: We don't check that the nameID actually exists.
815      *
816      * Read Roberts wrote on 9/15/06 on opentype-list@indx.co.uk :
817      *
818      * Yes, it is correct that a new version of the AFDKO (version 2.0) will be
819      * coming out soon, and that the makeotf program will build a font with a
820      * 'size' feature that is correct by the specification.
821      *
822      * The specification for this feature tag is in the "OpenType Layout Tag
823      * Registry". You can see a copy of this at:
824      * https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#tag-size
825      *
826      * Here is one set of rules to determine if the 'size' feature is built
827      * correctly, or as by the older versions of MakeOTF. You may be able to do
828      * better.
829      *
830      * Assume that the offset to the size feature is according to specification,
831      * and make the following value checks. If it fails, assume the size
832      * feature is calculated as versions of MakeOTF before the AFDKO 2.0 built it.
833      * If this fails, reject the 'size' feature. The older makeOTF's calculated the
834      * offset from the beginning of the FeatureList table, rather than from the
835      * beginning of the 'size' Feature table.
836      *
837      * If "design size" == 0:
838      *     fails check
839      *
840      * Else if ("subfamily identifier" == 0 and
841      *     "range start" == 0 and
842      *     "range end" == 0 and
843      *     "range start" == 0 and
844      *     "menu name ID" == 0)
845      *     passes check: this is the format used when there is a design size
846      * specified, but there is no recommended size range.
847      *
848      * Else if ("design size" <  "range start" or
849      *     "design size" >   "range end" or
850      *     "range end" <= "range start" or
851      *     "menu name ID"  < 256 or
852      *     "menu name ID"  > 32767 or
853      *     menu name ID is not a name ID which is actually in the name table)
854      *     fails test
855      * Else
856      *     passes test.
857      */
858 
859     if (!designSize)
860       return_trace (false);
861     else if (subfamilyID == 0 &&
862 	     subfamilyNameID == 0 &&
863 	     rangeStart == 0 &&
864 	     rangeEnd == 0)
865       return_trace (true);
866     else if (designSize < rangeStart ||
867 	     designSize > rangeEnd ||
868 	     subfamilyNameID < 256 ||
869 	     subfamilyNameID > 32767)
870       return_trace (false);
871     else
872       return_trace (true);
873   }
874 
subsetOT::FeatureParamsSize875   bool subset (hb_subset_context_t *c) const
876   {
877     TRACE_SUBSET (this);
878     return_trace ((bool) c->serializer->embed (*this));
879   }
880 
881   HBUINT16	designSize;	/* Represents the design size in 720/inch
882 				 * units (decipoints).  The design size entry
883 				 * must be non-zero.  When there is a design
884 				 * size but no recommended size range, the
885 				 * rest of the array will consist of zeros. */
886   HBUINT16	subfamilyID;	/* Has no independent meaning, but serves
887 				 * as an identifier that associates fonts
888 				 * in a subfamily. All fonts which share a
889 				 * Preferred or Font Family name and which
890 				 * differ only by size range shall have the
891 				 * same subfamily value, and no fonts which
892 				 * differ in weight or style shall have the
893 				 * same subfamily value. If this value is
894 				 * zero, the remaining fields in the array
895 				 * will be ignored. */
896   NameID	subfamilyNameID;/* If the preceding value is non-zero, this
897 				 * value must be set in the range 256 - 32767
898 				 * (inclusive). It records the value of a
899 				 * field in the name table, which must
900 				 * contain English-language strings encoded
901 				 * in Windows Unicode and Macintosh Roman,
902 				 * and may contain additional strings
903 				 * localized to other scripts and languages.
904 				 * Each of these strings is the name an
905 				 * application should use, in combination
906 				 * with the family name, to represent the
907 				 * subfamily in a menu.  Applications will
908 				 * choose the appropriate version based on
909 				 * their selection criteria. */
910   HBUINT16	rangeStart;	/* Large end of the recommended usage range
911 				 * (inclusive), stored in 720/inch units
912 				 * (decipoints). */
913   HBUINT16	rangeEnd;	/* Small end of the recommended usage range
914 				   (exclusive), stored in 720/inch units
915 				 * (decipoints). */
916   public:
917   DEFINE_SIZE_STATIC (10);
918 };
919 
920 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#ssxx */
921 struct FeatureParamsStylisticSet
922 {
sanitizeOT::FeatureParamsStylisticSet923   bool sanitize (hb_sanitize_context_t *c) const
924   {
925     TRACE_SANITIZE (this);
926     /* Right now minorVersion is at zero.  Which means, any table supports
927      * the uiNameID field. */
928     return_trace (c->check_struct (this));
929   }
930 
subsetOT::FeatureParamsStylisticSet931   bool subset (hb_subset_context_t *c) const
932   {
933     TRACE_SUBSET (this);
934     return_trace ((bool) c->serializer->embed (*this));
935   }
936 
937   HBUINT16	version;	/* (set to 0): This corresponds to a “minor”
938 				 * version number. Additional data may be
939 				 * added to the end of this Feature Parameters
940 				 * table in the future. */
941 
942   NameID	uiNameID;	/* The 'name' table name ID that specifies a
943 				 * string (or strings, for multiple languages)
944 				 * for a user-interface label for this
945 				 * feature.  The values of uiLabelNameId and
946 				 * sampleTextNameId are expected to be in the
947 				 * font-specific name ID range (256-32767),
948 				 * though that is not a requirement in this
949 				 * Feature Parameters specification. The
950 				 * user-interface label for the feature can
951 				 * be provided in multiple languages. An
952 				 * English string should be included as a
953 				 * fallback. The string should be kept to a
954 				 * minimal length to fit comfortably with
955 				 * different application interfaces. */
956   public:
957   DEFINE_SIZE_STATIC (4);
958 };
959 
960 /* https://docs.microsoft.com/en-us/typography/opentype/spec/features_ae#cv01-cv99 */
961 struct FeatureParamsCharacterVariants
962 {
963   unsigned
get_charactersOT::FeatureParamsCharacterVariants964   get_characters (unsigned start_offset, unsigned *char_count, hb_codepoint_t *chars) const
965   {
966     if (char_count)
967     {
968       + characters.sub_array (start_offset, char_count)
969       | hb_sink (hb_array (chars, *char_count))
970       ;
971     }
972     return characters.len;
973   }
974 
get_sizeOT::FeatureParamsCharacterVariants975   unsigned get_size () const
976   { return min_size + characters.len * HBUINT24::static_size; }
977 
subsetOT::FeatureParamsCharacterVariants978   bool subset (hb_subset_context_t *c) const
979   {
980     TRACE_SUBSET (this);
981     return_trace ((bool) c->serializer->embed (*this));
982   }
983 
sanitizeOT::FeatureParamsCharacterVariants984   bool sanitize (hb_sanitize_context_t *c) const
985   {
986     TRACE_SANITIZE (this);
987     return_trace (c->check_struct (this) &&
988 		  characters.sanitize (c));
989   }
990 
991   HBUINT16	format;			/* Format number is set to 0. */
992   NameID	featUILableNameID;	/* The ‘name’ table name ID that
993 					 * specifies a string (or strings,
994 					 * for multiple languages) for a
995 					 * user-interface label for this
996 					 * feature. (May be NULL.) */
997   NameID	featUITooltipTextNameID;/* The ‘name’ table name ID that
998 					 * specifies a string (or strings,
999 					 * for multiple languages) that an
1000 					 * application can use for tooltip
1001 					 * text for this feature. (May be
1002 					 * nullptr.) */
1003   NameID	sampleTextNameID;	/* The ‘name’ table name ID that
1004 					 * specifies sample text that
1005 					 * illustrates the effect of this
1006 					 * feature. (May be NULL.) */
1007   HBUINT16	numNamedParameters;	/* Number of named parameters. (May
1008 					 * be zero.) */
1009   NameID	firstParamUILabelNameID;/* The first ‘name’ table name ID
1010 					 * used to specify strings for
1011 					 * user-interface labels for the
1012 					 * feature parameters. (Must be zero
1013 					 * if numParameters is zero.) */
1014   Array16Of<HBUINT24>
1015 		characters;		/* Array of the Unicode Scalar Value
1016 					 * of the characters for which this
1017 					 * feature provides glyph variants.
1018 					 * (May be zero.) */
1019   public:
1020   DEFINE_SIZE_ARRAY (14, characters);
1021 };
1022 
1023 struct FeatureParams
1024 {
sanitizeOT::FeatureParams1025   bool sanitize (hb_sanitize_context_t *c, hb_tag_t tag) const
1026   {
1027 #ifdef HB_NO_LAYOUT_FEATURE_PARAMS
1028     return true;
1029 #endif
1030     TRACE_SANITIZE (this);
1031     if (tag == HB_TAG ('s','i','z','e'))
1032       return_trace (u.size.sanitize (c));
1033     if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1034       return_trace (u.stylisticSet.sanitize (c));
1035     if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1036       return_trace (u.characterVariants.sanitize (c));
1037     return_trace (true);
1038   }
1039 
subsetOT::FeatureParams1040   bool subset (hb_subset_context_t *c, const Tag* tag) const
1041   {
1042     TRACE_SUBSET (this);
1043     if (!tag) return_trace (false);
1044     if (*tag == HB_TAG ('s','i','z','e'))
1045       return_trace (u.size.subset (c));
1046     if ((*tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1047       return_trace (u.stylisticSet.subset (c));
1048     if ((*tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1049       return_trace (u.characterVariants.subset (c));
1050     return_trace (false);
1051   }
1052 
1053 #ifndef HB_NO_LAYOUT_FEATURE_PARAMS
get_size_paramsOT::FeatureParams1054   const FeatureParamsSize& get_size_params (hb_tag_t tag) const
1055   {
1056     if (tag == HB_TAG ('s','i','z','e'))
1057       return u.size;
1058     return Null (FeatureParamsSize);
1059   }
get_stylistic_set_paramsOT::FeatureParams1060   const FeatureParamsStylisticSet& get_stylistic_set_params (hb_tag_t tag) const
1061   {
1062     if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
1063       return u.stylisticSet;
1064     return Null (FeatureParamsStylisticSet);
1065   }
get_character_variants_paramsOT::FeatureParams1066   const FeatureParamsCharacterVariants& get_character_variants_params (hb_tag_t tag) const
1067   {
1068     if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
1069       return u.characterVariants;
1070     return Null (FeatureParamsCharacterVariants);
1071   }
1072 #endif
1073 
1074   private:
1075   union {
1076   FeatureParamsSize			size;
1077   FeatureParamsStylisticSet		stylisticSet;
1078   FeatureParamsCharacterVariants	characterVariants;
1079   } u;
1080   public:
1081   DEFINE_SIZE_MIN (0);
1082 };
1083 
1084 struct Feature
1085 {
get_lookup_countOT::Feature1086   unsigned int get_lookup_count () const
1087   { return lookupIndex.len; }
get_lookup_indexOT::Feature1088   hb_tag_t get_lookup_index (unsigned int i) const
1089   { return lookupIndex[i]; }
get_lookup_indexesOT::Feature1090   unsigned int get_lookup_indexes (unsigned int start_index,
1091 				   unsigned int *lookup_count /* IN/OUT */,
1092 				   unsigned int *lookup_tags /* OUT */) const
1093   { return lookupIndex.get_indexes (start_index, lookup_count, lookup_tags); }
add_lookup_indexes_toOT::Feature1094   void add_lookup_indexes_to (hb_set_t *lookup_indexes) const
1095   { lookupIndex.add_indexes_to (lookup_indexes); }
1096 
get_feature_paramsOT::Feature1097   const FeatureParams &get_feature_params () const
1098   { return this+featureParams; }
1099 
intersects_lookup_indexesOT::Feature1100   bool intersects_lookup_indexes (const hb_map_t *lookup_indexes) const
1101   { return lookupIndex.intersects (lookup_indexes); }
1102 
subsetOT::Feature1103   bool subset (hb_subset_context_t         *c,
1104 	       hb_subset_layout_context_t  *l,
1105 	       const Tag                   *tag = nullptr) const
1106   {
1107     TRACE_SUBSET (this);
1108     auto *out = c->serializer->start_embed (*this);
1109     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1110 
1111     out->featureParams.serialize_subset (c, featureParams, this, tag);
1112 
1113     auto it =
1114     + hb_iter (lookupIndex)
1115     | hb_filter (l->lookup_index_map)
1116     | hb_map (l->lookup_index_map)
1117     ;
1118 
1119     out->lookupIndex.serialize (c->serializer, l, it);
1120     // The decision to keep or drop this feature is already made before we get here
1121     // so always retain it.
1122     return_trace (true);
1123   }
1124 
sanitizeOT::Feature1125   bool sanitize (hb_sanitize_context_t *c,
1126 		 const Record_sanitize_closure_t *closure = nullptr) const
1127   {
1128     TRACE_SANITIZE (this);
1129     if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
1130       return_trace (false);
1131 
1132     /* Some earlier versions of Adobe tools calculated the offset of the
1133      * FeatureParams subtable from the beginning of the FeatureList table!
1134      *
1135      * If sanitizing "failed" for the FeatureParams subtable, try it with the
1136      * alternative location.  We would know sanitize "failed" if old value
1137      * of the offset was non-zero, but it's zeroed now.
1138      *
1139      * Only do this for the 'size' feature, since at the time of the faulty
1140      * Adobe tools, only the 'size' feature had FeatureParams defined.
1141      */
1142 
1143     if (likely (featureParams.is_null ()))
1144       return_trace (true);
1145 
1146     unsigned int orig_offset = featureParams;
1147     if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
1148       return_trace (false);
1149 
1150     if (featureParams == 0 && closure &&
1151 	closure->tag == HB_TAG ('s','i','z','e') &&
1152 	closure->list_base && closure->list_base < this)
1153     {
1154       unsigned int new_offset_int = orig_offset -
1155 				    (((char *) this) - ((char *) closure->list_base));
1156 
1157       Offset16To<FeatureParams> new_offset;
1158       /* Check that it would not overflow. */
1159       new_offset = new_offset_int;
1160       if (new_offset == new_offset_int &&
1161 	  c->try_set (&featureParams, new_offset_int) &&
1162 	  !featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
1163 	return_trace (false);
1164     }
1165 
1166     return_trace (true);
1167   }
1168 
1169   Offset16To<FeatureParams>
1170 		 featureParams;	/* Offset to Feature Parameters table (if one
1171 				 * has been defined for the feature), relative
1172 				 * to the beginning of the Feature Table; = Null
1173 				 * if not required */
1174   IndexArray	 lookupIndex;	/* Array of LookupList indices */
1175   public:
1176   DEFINE_SIZE_ARRAY_SIZED (4, lookupIndex);
1177 };
1178 
1179 typedef RecordListOf<Feature> FeatureList;
1180 
1181 
1182 struct LookupFlag : HBUINT16
1183 {
1184   enum Flags {
1185     RightToLeft		= 0x0001u,
1186     IgnoreBaseGlyphs	= 0x0002u,
1187     IgnoreLigatures	= 0x0004u,
1188     IgnoreMarks		= 0x0008u,
1189     IgnoreFlags		= 0x000Eu,
1190     UseMarkFilteringSet	= 0x0010u,
1191     Reserved		= 0x00E0u,
1192     MarkAttachmentType	= 0xFF00u
1193   };
1194   public:
1195   DEFINE_SIZE_STATIC (2);
1196 };
1197 
1198 } /* namespace OT */
1199 /* This has to be outside the namespace. */
1200 HB_MARK_AS_FLAG_T (OT::LookupFlag::Flags);
1201 namespace OT {
1202 
1203 struct Lookup
1204 {
get_subtable_countOT::Lookup1205   unsigned int get_subtable_count () const { return subTable.len; }
1206 
1207   template <typename TSubTable>
get_subtablesOT::Lookup1208   const Array16OfOffset16To<TSubTable>& get_subtables () const
1209   { return reinterpret_cast<const Array16OfOffset16To<TSubTable> &> (subTable); }
1210   template <typename TSubTable>
get_subtablesOT::Lookup1211   Array16OfOffset16To<TSubTable>& get_subtables ()
1212   { return reinterpret_cast<Array16OfOffset16To<TSubTable> &> (subTable); }
1213 
1214   template <typename TSubTable>
get_subtableOT::Lookup1215   const TSubTable& get_subtable (unsigned int i) const
1216   { return this+get_subtables<TSubTable> ()[i]; }
1217   template <typename TSubTable>
get_subtableOT::Lookup1218   TSubTable& get_subtable (unsigned int i)
1219   { return this+get_subtables<TSubTable> ()[i]; }
1220 
get_sizeOT::Lookup1221   unsigned int get_size () const
1222   {
1223     const HBUINT16 &markFilteringSet = StructAfter<const HBUINT16> (subTable);
1224     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1225       return (const char *) &StructAfter<const char> (markFilteringSet) - (const char *) this;
1226     return (const char *) &markFilteringSet - (const char *) this;
1227   }
1228 
get_typeOT::Lookup1229   unsigned int get_type () const { return lookupType; }
1230 
1231   /* lookup_props is a 32-bit integer where the lower 16-bit is LookupFlag and
1232    * higher 16-bit is mark-filtering-set if the lookup uses one.
1233    * Not to be confused with glyph_props which is very similar. */
get_propsOT::Lookup1234   uint32_t get_props () const
1235   {
1236     unsigned int flag = lookupFlag;
1237     if (unlikely (flag & LookupFlag::UseMarkFilteringSet))
1238     {
1239       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1240       flag += (markFilteringSet << 16);
1241     }
1242     return flag;
1243   }
1244 
1245   template <typename TSubTable, typename context_t, typename ...Ts>
dispatchOT::Lookup1246   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1247   {
1248     unsigned int lookup_type = get_type ();
1249     TRACE_DISPATCH (this, lookup_type);
1250     unsigned int count = get_subtable_count ();
1251     for (unsigned int i = 0; i < count; i++) {
1252       typename context_t::return_t r = get_subtable<TSubTable> (i).dispatch (c, lookup_type, hb_forward<Ts> (ds)...);
1253       if (c->stop_sublookup_iteration (r))
1254 	return_trace (r);
1255     }
1256     return_trace (c->default_return_value ());
1257   }
1258 
serializeOT::Lookup1259   bool serialize (hb_serialize_context_t *c,
1260 		  unsigned int lookup_type,
1261 		  uint32_t lookup_props,
1262 		  unsigned int num_subtables)
1263   {
1264     TRACE_SERIALIZE (this);
1265     if (unlikely (!c->extend_min (*this))) return_trace (false);
1266     lookupType = lookup_type;
1267     lookupFlag = lookup_props & 0xFFFFu;
1268     if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
1269     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1270     {
1271       if (unlikely (!c->extend (*this))) return_trace (false);
1272       HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1273       markFilteringSet = lookup_props >> 16;
1274     }
1275     return_trace (true);
1276   }
1277 
1278   template <typename TSubTable>
subsetOT::Lookup1279   bool subset (hb_subset_context_t *c) const
1280   {
1281     TRACE_SUBSET (this);
1282     auto *out = c->serializer->start_embed (*this);
1283     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1284     out->lookupType = lookupType;
1285     out->lookupFlag = lookupFlag;
1286 
1287     const hb_set_t *glyphset = c->plan->glyphset_gsub ();
1288     unsigned int lookup_type = get_type ();
1289     + hb_iter (get_subtables <TSubTable> ())
1290     | hb_filter ([this, glyphset, lookup_type] (const Offset16To<TSubTable> &_) { return (this+_).intersects (glyphset, lookup_type); })
1291     | hb_apply (subset_offset_array (c, out->get_subtables<TSubTable> (), this, lookup_type))
1292     ;
1293 
1294     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1295     {
1296       if (unlikely (!c->serializer->extend (out))) return_trace (false);
1297       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1298       HBUINT16 &outMarkFilteringSet = StructAfter<HBUINT16> (out->subTable);
1299       outMarkFilteringSet = markFilteringSet;
1300     }
1301 
1302     return_trace (true);
1303   }
1304 
1305   template <typename TSubTable>
sanitizeOT::Lookup1306   bool sanitize (hb_sanitize_context_t *c) const
1307   {
1308     TRACE_SANITIZE (this);
1309     if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
1310 
1311     unsigned subtables = get_subtable_count ();
1312     if (unlikely (!c->visit_subtables (subtables))) return_trace (false);
1313 
1314     if (lookupFlag & LookupFlag::UseMarkFilteringSet)
1315     {
1316       const HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
1317       if (!markFilteringSet.sanitize (c)) return_trace (false);
1318     }
1319 
1320     if (unlikely (!get_subtables<TSubTable> ().sanitize (c, this, get_type ())))
1321       return_trace (false);
1322 
1323     if (unlikely (get_type () == TSubTable::Extension && !c->get_edit_count ()))
1324     {
1325       /* The spec says all subtables of an Extension lookup should
1326        * have the same type, which shall not be the Extension type
1327        * itself (but we already checked for that).
1328        * This is specially important if one has a reverse type!
1329        *
1330        * We only do this if sanitizer edit_count is zero.  Otherwise,
1331        * some of the subtables might have become insane after they
1332        * were sanity-checked by the edits of subsequent subtables.
1333        * https://bugs.chromium.org/p/chromium/issues/detail?id=960331
1334        */
1335       unsigned int type = get_subtable<TSubTable> (0).u.extension.get_type ();
1336       for (unsigned int i = 1; i < subtables; i++)
1337 	if (get_subtable<TSubTable> (i).u.extension.get_type () != type)
1338 	  return_trace (false);
1339     }
1340     return_trace (true);
1341   }
1342 
1343   private:
1344   HBUINT16	lookupType;		/* Different enumerations for GSUB and GPOS */
1345   HBUINT16	lookupFlag;		/* Lookup qualifiers */
1346   Array16Of<Offset16>
1347 		subTable;		/* Array of SubTables */
1348 /*HBUINT16	markFilteringSetX[HB_VAR_ARRAY];*//* Index (base 0) into GDEF mark glyph sets
1349 					 * structure. This field is only present if bit
1350 					 * UseMarkFilteringSet of lookup flags is set. */
1351   public:
1352   DEFINE_SIZE_ARRAY (6, subTable);
1353 };
1354 
1355 typedef List16OfOffset16To<Lookup> LookupList;
1356 
1357 template <typename TLookup>
1358 struct LookupOffsetList : List16OfOffset16To<TLookup>
1359 {
subsetOT::LookupOffsetList1360   bool subset (hb_subset_context_t        *c,
1361 	       hb_subset_layout_context_t *l) const
1362   {
1363     TRACE_SUBSET (this);
1364     auto *out = c->serializer->start_embed (this);
1365     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
1366 
1367     unsigned count = this->len;
1368     + hb_zip (*this, hb_range (count))
1369     | hb_filter (l->lookup_index_map, hb_second)
1370     | hb_map (hb_first)
1371     | hb_apply (subset_offset_array (c, *out, this))
1372     ;
1373     return_trace (true);
1374   }
1375 
sanitizeOT::LookupOffsetList1376   bool sanitize (hb_sanitize_context_t *c) const
1377   {
1378     TRACE_SANITIZE (this);
1379     return_trace (List16OfOffset16To<TLookup>::sanitize (c, this));
1380   }
1381 };
1382 
1383 
1384 /*
1385  * Coverage Table
1386  */
1387 
1388 struct CoverageFormat1
1389 {
1390   friend struct Coverage;
1391 
1392   private:
get_coverageOT::CoverageFormat11393   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1394   {
1395     unsigned int i;
1396     glyphArray.bfind (glyph_id, &i, HB_BFIND_NOT_FOUND_STORE, NOT_COVERED);
1397     return i;
1398   }
1399 
1400   template <typename Iterator,
1401       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::CoverageFormat11402   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1403   {
1404     TRACE_SERIALIZE (this);
1405     return_trace (glyphArray.serialize (c, glyphs));
1406   }
1407 
sanitizeOT::CoverageFormat11408   bool sanitize (hb_sanitize_context_t *c) const
1409   {
1410     TRACE_SANITIZE (this);
1411     return_trace (glyphArray.sanitize (c));
1412   }
1413 
intersectsOT::CoverageFormat11414   bool intersects (const hb_set_t *glyphs) const
1415   {
1416     /* TODO Speed up, using hb_set_next() and bsearch()? */
1417     for (const auto& g : glyphArray.as_array ())
1418       if (glyphs->has (g))
1419 	return true;
1420     return false;
1421   }
intersects_coverageOT::CoverageFormat11422   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1423   { return glyphs->has (glyphArray[index]); }
1424 
intersected_coverage_glyphsOT::CoverageFormat11425   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1426   {
1427     unsigned count = glyphArray.len;
1428     for (unsigned i = 0; i < count; i++)
1429       if (glyphs->has (glyphArray[i]))
1430         intersect_glyphs->add (glyphArray[i]);
1431   }
1432 
1433   template <typename set_t>
collect_coverageOT::CoverageFormat11434   bool collect_coverage (set_t *glyphs) const
1435   { return glyphs->add_sorted_array (glyphArray.as_array ()); }
1436 
1437   public:
1438   /* Older compilers need this to be public. */
1439   struct iter_t
1440   {
initOT::CoverageFormat1::iter_t1441     void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
finiOT::CoverageFormat1::iter_t1442     void fini () {}
moreOT::CoverageFormat1::iter_t1443     bool more () const { return i < c->glyphArray.len; }
nextOT::CoverageFormat1::iter_t1444     void next () { i++; }
get_glyphOT::CoverageFormat1::iter_t1445     hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
operator !=OT::CoverageFormat1::iter_t1446     bool operator != (const iter_t& o) const
1447     { return i != o.i || c != o.c; }
1448 
1449     private:
1450     const struct CoverageFormat1 *c;
1451     unsigned int i;
1452   };
1453   private:
1454 
1455   protected:
1456   HBUINT16	coverageFormat;	/* Format identifier--format = 1 */
1457   SortedArray16Of<HBGlyphID>
1458 		glyphArray;	/* Array of GlyphIDs--in numerical order */
1459   public:
1460   DEFINE_SIZE_ARRAY (4, glyphArray);
1461 };
1462 
1463 struct CoverageFormat2
1464 {
1465   friend struct Coverage;
1466 
1467   private:
get_coverageOT::CoverageFormat21468   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1469   {
1470     const RangeRecord &range = rangeRecord.bsearch (glyph_id);
1471     return likely (range.first <= range.last)
1472 	 ? (unsigned int) range.value + (glyph_id - range.first)
1473 	 : NOT_COVERED;
1474   }
1475 
1476   template <typename Iterator,
1477       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::CoverageFormat21478   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1479   {
1480     TRACE_SERIALIZE (this);
1481     if (unlikely (!c->extend_min (*this))) return_trace (false);
1482 
1483     if (unlikely (!glyphs))
1484     {
1485       rangeRecord.len = 0;
1486       return_trace (true);
1487     }
1488 
1489     /* TODO(iter) Write more efficiently? */
1490 
1491     unsigned num_ranges = 0;
1492     hb_codepoint_t last = (hb_codepoint_t) -2;
1493     for (auto g: glyphs)
1494     {
1495       if (last + 1 != g)
1496 	num_ranges++;
1497       last = g;
1498     }
1499 
1500     if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
1501 
1502     unsigned count = 0;
1503     unsigned range = (unsigned) -1;
1504     last = (hb_codepoint_t) -2;
1505     for (auto g: glyphs)
1506     {
1507       if (last + 1 != g)
1508       {
1509 	range++;
1510 	rangeRecord[range].first = g;
1511 	rangeRecord[range].value = count;
1512       }
1513       rangeRecord[range].last = g;
1514       last = g;
1515       count++;
1516     }
1517 
1518     return_trace (true);
1519   }
1520 
sanitizeOT::CoverageFormat21521   bool sanitize (hb_sanitize_context_t *c) const
1522   {
1523     TRACE_SANITIZE (this);
1524     return_trace (rangeRecord.sanitize (c));
1525   }
1526 
intersectsOT::CoverageFormat21527   bool intersects (const hb_set_t *glyphs) const
1528   {
1529     /* TODO Speed up, using hb_set_next() and bsearch()? */
1530     /* TODO(iter) Rewrite as dagger. */
1531     for (const auto& range : rangeRecord.as_array ())
1532       if (range.intersects (glyphs))
1533 	return true;
1534     return false;
1535   }
intersects_coverageOT::CoverageFormat21536   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1537   {
1538     /* TODO(iter) Rewrite as dagger. */
1539     for (const auto& range : rangeRecord.as_array ())
1540     {
1541       if (range.value <= index &&
1542 	  index < (unsigned int) range.value + (range.last - range.first) &&
1543 	  range.intersects (glyphs))
1544 	return true;
1545       else if (index < range.value)
1546 	return false;
1547     }
1548     return false;
1549   }
1550 
intersected_coverage_glyphsOT::CoverageFormat21551   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1552   {
1553     for (const auto& range : rangeRecord.as_array ())
1554     {
1555       if (!range.intersects (glyphs)) continue;
1556       for (hb_codepoint_t g = range.first; g <= range.last; g++)
1557         if (glyphs->has (g)) intersect_glyphs->add (g);
1558     }
1559   }
1560 
1561   template <typename set_t>
collect_coverageOT::CoverageFormat21562   bool collect_coverage (set_t *glyphs) const
1563   {
1564     unsigned int count = rangeRecord.len;
1565     for (unsigned int i = 0; i < count; i++)
1566       if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
1567 	return false;
1568     return true;
1569   }
1570 
1571   public:
1572   /* Older compilers need this to be public. */
1573   struct iter_t
1574   {
initOT::CoverageFormat2::iter_t1575     void init (const CoverageFormat2 &c_)
1576     {
1577       c = &c_;
1578       coverage = 0;
1579       i = 0;
1580       j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
1581       if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
1582       {
1583 	/* Broken table. Skip. */
1584 	i = c->rangeRecord.len;
1585       }
1586     }
finiOT::CoverageFormat2::iter_t1587     void fini () {}
moreOT::CoverageFormat2::iter_t1588     bool more () const { return i < c->rangeRecord.len; }
nextOT::CoverageFormat2::iter_t1589     void next ()
1590     {
1591       if (j >= c->rangeRecord[i].last)
1592       {
1593 	i++;
1594 	if (more ())
1595 	{
1596 	  unsigned int old = coverage;
1597 	  j = c->rangeRecord[i].first;
1598 	  coverage = c->rangeRecord[i].value;
1599 	  if (unlikely (coverage != old + 1))
1600 	  {
1601 	    /* Broken table. Skip. Important to avoid DoS.
1602 	     * Also, our callers depend on coverage being
1603 	     * consecutive and monotonically increasing,
1604 	     * ie. iota(). */
1605 	   i = c->rangeRecord.len;
1606 	   return;
1607 	  }
1608 	}
1609 	return;
1610       }
1611       coverage++;
1612       j++;
1613     }
get_glyphOT::CoverageFormat2::iter_t1614     hb_codepoint_t get_glyph () const { return j; }
operator !=OT::CoverageFormat2::iter_t1615     bool operator != (const iter_t& o) const
1616     { return i != o.i || j != o.j || c != o.c; }
1617 
1618     private:
1619     const struct CoverageFormat2 *c;
1620     unsigned int i, coverage;
1621     hb_codepoint_t j;
1622   };
1623   private:
1624 
1625   protected:
1626   HBUINT16	coverageFormat;	/* Format identifier--format = 2 */
1627   SortedArray16Of<RangeRecord>
1628 		rangeRecord;	/* Array of glyph ranges--ordered by
1629 				 * Start GlyphID. rangeCount entries
1630 				 * long */
1631   public:
1632   DEFINE_SIZE_ARRAY (4, rangeRecord);
1633 };
1634 
1635 struct Coverage
1636 {
1637   /* Has interface. */
1638   static constexpr unsigned SENTINEL = NOT_COVERED;
1639   typedef unsigned int value_t;
operator []OT::Coverage1640   value_t operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::Coverage1641   bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
1642   /* Predicate. */
operator ()OT::Coverage1643   bool operator () (hb_codepoint_t k) const { return has (k); }
1644 
getOT::Coverage1645   unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
get_coverageOT::Coverage1646   unsigned int get_coverage (hb_codepoint_t glyph_id) const
1647   {
1648     switch (u.format) {
1649     case 1: return u.format1.get_coverage (glyph_id);
1650     case 2: return u.format2.get_coverage (glyph_id);
1651     default:return NOT_COVERED;
1652     }
1653   }
1654 
1655   template <typename Iterator,
1656       hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
serializeOT::Coverage1657   bool serialize (hb_serialize_context_t *c, Iterator glyphs)
1658   {
1659     TRACE_SERIALIZE (this);
1660     if (unlikely (!c->extend_min (*this))) return_trace (false);
1661 
1662     unsigned count = 0;
1663     unsigned num_ranges = 0;
1664     hb_codepoint_t last = (hb_codepoint_t) -2;
1665     for (auto g: glyphs)
1666     {
1667       if (last + 1 != g)
1668 	num_ranges++;
1669       last = g;
1670       count++;
1671     }
1672     u.format = count <= num_ranges * 3 ? 1 : 2;
1673 
1674     switch (u.format)
1675     {
1676     case 1: return_trace (u.format1.serialize (c, glyphs));
1677     case 2: return_trace (u.format2.serialize (c, glyphs));
1678     default:return_trace (false);
1679     }
1680   }
1681 
subsetOT::Coverage1682   bool subset (hb_subset_context_t *c) const
1683   {
1684     TRACE_SUBSET (this);
1685     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1686     const hb_map_t &glyph_map = *c->plan->glyph_map;
1687 
1688     auto it =
1689     + iter ()
1690     | hb_filter (glyphset)
1691     | hb_map_retains_sorting (glyph_map)
1692     ;
1693 
1694     bool ret = bool (it);
1695     Coverage_serialize (c->serializer, it);
1696     return_trace (ret);
1697   }
1698 
sanitizeOT::Coverage1699   bool sanitize (hb_sanitize_context_t *c) const
1700   {
1701     TRACE_SANITIZE (this);
1702     if (!u.format.sanitize (c)) return_trace (false);
1703     switch (u.format)
1704     {
1705     case 1: return_trace (u.format1.sanitize (c));
1706     case 2: return_trace (u.format2.sanitize (c));
1707     default:return_trace (true);
1708     }
1709   }
1710 
intersectsOT::Coverage1711   bool intersects (const hb_set_t *glyphs) const
1712   {
1713     switch (u.format)
1714     {
1715     case 1: return u.format1.intersects (glyphs);
1716     case 2: return u.format2.intersects (glyphs);
1717     default:return false;
1718     }
1719   }
intersects_coverageOT::Coverage1720   bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
1721   {
1722     switch (u.format)
1723     {
1724     case 1: return u.format1.intersects_coverage (glyphs, index);
1725     case 2: return u.format2.intersects_coverage (glyphs, index);
1726     default:return false;
1727     }
1728   }
1729 
1730   /* Might return false if array looks unsorted.
1731    * Used for faster rejection of corrupt data. */
1732   template <typename set_t>
collect_coverageOT::Coverage1733   bool collect_coverage (set_t *glyphs) const
1734   {
1735     switch (u.format)
1736     {
1737     case 1: return u.format1.collect_coverage (glyphs);
1738     case 2: return u.format2.collect_coverage (glyphs);
1739     default:return false;
1740     }
1741   }
1742 
intersected_coverage_glyphsOT::Coverage1743   void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
1744   {
1745     switch (u.format)
1746     {
1747     case 1: return u.format1.intersected_coverage_glyphs (glyphs, intersect_glyphs);
1748     case 2: return u.format2.intersected_coverage_glyphs (glyphs, intersect_glyphs);
1749     default:return ;
1750     }
1751   }
1752 
1753   struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
1754   {
1755     static constexpr bool is_sorted_iterator = true;
iter_tOT::Coverage::iter_t1756     iter_t (const Coverage &c_ = Null (Coverage))
1757     {
1758       memset (this, 0, sizeof (*this));
1759       format = c_.u.format;
1760       switch (format)
1761       {
1762       case 1: u.format1.init (c_.u.format1); return;
1763       case 2: u.format2.init (c_.u.format2); return;
1764       default:				     return;
1765       }
1766     }
__more__OT::Coverage::iter_t1767     bool __more__ () const
1768     {
1769       switch (format)
1770       {
1771       case 1: return u.format1.more ();
1772       case 2: return u.format2.more ();
1773       default:return false;
1774       }
1775     }
__next__OT::Coverage::iter_t1776     void __next__ ()
1777     {
1778       switch (format)
1779       {
1780       case 1: u.format1.next (); break;
1781       case 2: u.format2.next (); break;
1782       default:			 break;
1783       }
1784     }
1785     typedef hb_codepoint_t __item_t__;
__item__OT::Coverage::iter_t1786     __item_t__ __item__ () const { return get_glyph (); }
1787 
get_glyphOT::Coverage::iter_t1788     hb_codepoint_t get_glyph () const
1789     {
1790       switch (format)
1791       {
1792       case 1: return u.format1.get_glyph ();
1793       case 2: return u.format2.get_glyph ();
1794       default:return 0;
1795       }
1796     }
operator !=OT::Coverage::iter_t1797     bool operator != (const iter_t& o) const
1798     {
1799       if (format != o.format) return true;
1800       switch (format)
1801       {
1802       case 1: return u.format1 != o.u.format1;
1803       case 2: return u.format2 != o.u.format2;
1804       default:return false;
1805       }
1806     }
1807 
1808     private:
1809     unsigned int format;
1810     union {
1811     CoverageFormat2::iter_t	format2; /* Put this one first since it's larger; helps shut up compiler. */
1812     CoverageFormat1::iter_t	format1;
1813     } u;
1814   };
iterOT::Coverage1815   iter_t iter () const { return iter_t (*this); }
1816 
1817   protected:
1818   union {
1819   HBUINT16		format;		/* Format identifier */
1820   CoverageFormat1	format1;
1821   CoverageFormat2	format2;
1822   } u;
1823   public:
1824   DEFINE_SIZE_UNION (2, format);
1825 };
1826 
1827 template<typename Iterator>
1828 static inline void
Coverage_serialize(hb_serialize_context_t * c,Iterator it)1829 Coverage_serialize (hb_serialize_context_t *c,
1830 		    Iterator it)
1831 { c->start_embed<Coverage> ()->serialize (c, it); }
1832 
ClassDef_remap_and_serialize(hb_serialize_context_t * c,const hb_map_t & gid_klass_map,hb_sorted_vector_t<HBGlyphID> & glyphs,const hb_set_t & klasses,bool use_class_zero,hb_map_t * klass_map)1833 static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
1834 					  const hb_map_t &gid_klass_map,
1835 					  hb_sorted_vector_t<HBGlyphID> &glyphs,
1836 					  const hb_set_t &klasses,
1837                                           bool use_class_zero,
1838 					  hb_map_t *klass_map /*INOUT*/)
1839 {
1840   if (!klass_map)
1841   {
1842     ClassDef_serialize (c, hb_zip (glyphs.iter (), + glyphs.iter ()
1843 						   | hb_map (gid_klass_map)));
1844     return;
1845   }
1846 
1847   /* any glyph not assigned a class value falls into Class zero (0),
1848    * if any glyph assigned to class 0, remapping must start with 0->0*/
1849   if (!use_class_zero)
1850     klass_map->set (0, 0);
1851 
1852   unsigned idx = klass_map->has (0) ? 1 : 0;
1853   for (const unsigned k: klasses.iter ())
1854   {
1855     if (klass_map->has (k)) continue;
1856     klass_map->set (k, idx);
1857     idx++;
1858   }
1859 
1860   auto it =
1861   + glyphs.iter ()
1862   | hb_map_retains_sorting ([&] (const HBGlyphID& gid) -> hb_pair_t<hb_codepoint_t, unsigned>
1863 			    {
1864 			      unsigned new_klass = klass_map->get (gid_klass_map[gid]);
1865 			      return hb_pair ((hb_codepoint_t)gid, new_klass);
1866 			    })
1867   ;
1868 
1869   c->propagate_error (glyphs, klasses);
1870   ClassDef_serialize (c, it);
1871 }
1872 
1873 /*
1874  * Class Definition Table
1875  */
1876 
1877 struct ClassDefFormat1
1878 {
1879   friend struct ClassDef;
1880 
1881   private:
get_classOT::ClassDefFormat11882   unsigned int get_class (hb_codepoint_t glyph_id) const
1883   {
1884     return classValue[(unsigned int) (glyph_id - startGlyph)];
1885   }
1886 
1887   template<typename Iterator,
1888 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDefFormat11889   bool serialize (hb_serialize_context_t *c,
1890 		  Iterator it)
1891   {
1892     TRACE_SERIALIZE (this);
1893     if (unlikely (!c->extend_min (*this))) return_trace (false);
1894 
1895     if (unlikely (!it))
1896     {
1897       classFormat = 1;
1898       startGlyph = 0;
1899       classValue.len = 0;
1900       return_trace (true);
1901     }
1902 
1903     hb_codepoint_t glyph_min = (*it).first;
1904     hb_codepoint_t glyph_max = + it
1905 			       | hb_map (hb_first)
1906 			       | hb_reduce (hb_max, 0u);
1907     unsigned glyph_count = glyph_max - glyph_min + 1;
1908 
1909     startGlyph = glyph_min;
1910     if (unlikely (!classValue.serialize (c, glyph_count))) return_trace (false);
1911     for (const hb_pair_t<hb_codepoint_t, unsigned> gid_klass_pair : + it)
1912     {
1913       unsigned idx = gid_klass_pair.first - glyph_min;
1914       classValue[idx] = gid_klass_pair.second;
1915     }
1916     return_trace (true);
1917   }
1918 
subsetOT::ClassDefFormat11919   bool subset (hb_subset_context_t *c,
1920 	       hb_map_t *klass_map = nullptr /*OUT*/,
1921                bool keep_empty_table = true,
1922                bool use_class_zero = true,
1923                const Coverage* glyph_filter = nullptr) const
1924   {
1925     TRACE_SUBSET (this);
1926     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1927     const hb_map_t &glyph_map = *c->plan->glyph_map;
1928 
1929     hb_sorted_vector_t<HBGlyphID> glyphs;
1930     hb_set_t orig_klasses;
1931     hb_map_t gid_org_klass_map;
1932 
1933     hb_codepoint_t start = startGlyph;
1934     hb_codepoint_t end   = start + classValue.len;
1935 
1936     for (const hb_codepoint_t gid : + hb_range (start, end)
1937                                     | hb_filter (glyphset))
1938     {
1939       if (glyph_filter && !glyph_filter->has(gid)) continue;
1940 
1941       unsigned klass = classValue[gid - start];
1942       if (!klass) continue;
1943 
1944       glyphs.push (glyph_map[gid]);
1945       gid_org_klass_map.set (glyph_map[gid], klass);
1946       orig_klasses.add (klass);
1947     }
1948 
1949     unsigned glyph_count = glyph_filter
1950                            ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
1951                            : glyphset.get_population ();
1952     use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
1953     ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
1954 				  glyphs, orig_klasses, use_class_zero, klass_map);
1955     return_trace (keep_empty_table || (bool) glyphs);
1956   }
1957 
sanitizeOT::ClassDefFormat11958   bool sanitize (hb_sanitize_context_t *c) const
1959   {
1960     TRACE_SANITIZE (this);
1961     return_trace (c->check_struct (this) && classValue.sanitize (c));
1962   }
1963 
1964   template <typename set_t>
collect_coverageOT::ClassDefFormat11965   bool collect_coverage (set_t *glyphs) const
1966   {
1967     unsigned int start = 0;
1968     unsigned int count = classValue.len;
1969     for (unsigned int i = 0; i < count; i++)
1970     {
1971       if (classValue[i])
1972 	continue;
1973 
1974       if (start != i)
1975 	if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + i)))
1976 	  return false;
1977 
1978       start = i + 1;
1979     }
1980     if (start != count)
1981       if (unlikely (!glyphs->add_range (startGlyph + start, startGlyph + count)))
1982 	return false;
1983 
1984     return true;
1985   }
1986 
1987   template <typename set_t>
collect_classOT::ClassDefFormat11988   bool collect_class (set_t *glyphs, unsigned klass) const
1989   {
1990     unsigned int count = classValue.len;
1991     for (unsigned int i = 0; i < count; i++)
1992       if (classValue[i] == klass) glyphs->add (startGlyph + i);
1993     return true;
1994   }
1995 
intersectsOT::ClassDefFormat11996   bool intersects (const hb_set_t *glyphs) const
1997   {
1998     /* TODO Speed up, using hb_set_next()? */
1999     hb_codepoint_t start = startGlyph;
2000     hb_codepoint_t end = startGlyph + classValue.len;
2001     for (hb_codepoint_t iter = startGlyph - 1;
2002 	 hb_set_next (glyphs, &iter) && iter < end;)
2003       if (classValue[iter - start]) return true;
2004     return false;
2005   }
intersects_classOT::ClassDefFormat12006   bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2007   {
2008     unsigned int count = classValue.len;
2009     if (klass == 0)
2010     {
2011       /* Match if there's any glyph that is not listed! */
2012       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2013       if (!hb_set_next (glyphs, &g)) return false;
2014       if (g < startGlyph) return true;
2015       g = startGlyph + count - 1;
2016       if (hb_set_next (glyphs, &g)) return true;
2017       /* Fall through. */
2018     }
2019     /* TODO Speed up, using set overlap first? */
2020     /* TODO(iter) Rewrite as dagger. */
2021     HBUINT16 k {klass};
2022     const HBUINT16 *arr = classValue.arrayZ;
2023     for (unsigned int i = 0; i < count; i++)
2024       if (arr[i] == k && glyphs->has (startGlyph + i))
2025 	return true;
2026     return false;
2027   }
2028 
intersected_class_glyphsOT::ClassDefFormat12029   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2030   {
2031     unsigned count = classValue.len;
2032     if (klass == 0)
2033     {
2034       hb_codepoint_t endGlyph = startGlyph + count -1;
2035       for (hb_codepoint_t g : glyphs->iter ())
2036         if (g < startGlyph || g > endGlyph)
2037           intersect_glyphs->add (g);
2038 
2039       return;
2040     }
2041 
2042     for (unsigned i = 0; i < count; i++)
2043       if (classValue[i] == klass && glyphs->has (startGlyph + i))
2044         intersect_glyphs->add (startGlyph + i);
2045   }
2046 
2047   protected:
2048   HBUINT16	classFormat;	/* Format identifier--format = 1 */
2049   HBGlyphID	startGlyph;	/* First GlyphID of the classValueArray */
2050   Array16Of<HBUINT16>
2051 		classValue;	/* Array of Class Values--one per GlyphID */
2052   public:
2053   DEFINE_SIZE_ARRAY (6, classValue);
2054 };
2055 
2056 struct ClassDefFormat2
2057 {
2058   friend struct ClassDef;
2059 
2060   private:
get_classOT::ClassDefFormat22061   unsigned int get_class (hb_codepoint_t glyph_id) const
2062   {
2063     return rangeRecord.bsearch (glyph_id).value;
2064   }
2065 
2066   template<typename Iterator,
2067 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDefFormat22068   bool serialize (hb_serialize_context_t *c,
2069 		  Iterator it)
2070   {
2071     TRACE_SERIALIZE (this);
2072     if (unlikely (!c->extend_min (*this))) return_trace (false);
2073 
2074     if (unlikely (!it))
2075     {
2076       classFormat = 2;
2077       rangeRecord.len = 0;
2078       return_trace (true);
2079     }
2080 
2081     unsigned num_ranges = 1;
2082     hb_codepoint_t prev_gid = (*it).first;
2083     unsigned prev_klass = (*it).second;
2084 
2085     RangeRecord range_rec;
2086     range_rec.first = prev_gid;
2087     range_rec.last = prev_gid;
2088     range_rec.value = prev_klass;
2089 
2090     RangeRecord *record = c->copy (range_rec);
2091     if (unlikely (!record)) return_trace (false);
2092 
2093     for (const auto gid_klass_pair : + (++it))
2094     {
2095       hb_codepoint_t cur_gid = gid_klass_pair.first;
2096       unsigned cur_klass = gid_klass_pair.second;
2097 
2098       if (cur_gid != prev_gid + 1 ||
2099 	  cur_klass != prev_klass)
2100       {
2101 	if (unlikely (!record)) break;
2102 	record->last = prev_gid;
2103 	num_ranges++;
2104 
2105 	range_rec.first = cur_gid;
2106 	range_rec.last = cur_gid;
2107 	range_rec.value = cur_klass;
2108 
2109 	record = c->copy (range_rec);
2110       }
2111 
2112       prev_klass = cur_klass;
2113       prev_gid = cur_gid;
2114     }
2115 
2116     if (likely (record)) record->last = prev_gid;
2117     rangeRecord.len = num_ranges;
2118     return_trace (true);
2119   }
2120 
subsetOT::ClassDefFormat22121   bool subset (hb_subset_context_t *c,
2122 	       hb_map_t *klass_map = nullptr /*OUT*/,
2123                bool keep_empty_table = true,
2124                bool use_class_zero = true,
2125                const Coverage* glyph_filter = nullptr) const
2126   {
2127     TRACE_SUBSET (this);
2128     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2129     const hb_map_t &glyph_map = *c->plan->glyph_map;
2130 
2131     hb_sorted_vector_t<HBGlyphID> glyphs;
2132     hb_set_t orig_klasses;
2133     hb_map_t gid_org_klass_map;
2134 
2135     unsigned count = rangeRecord.len;
2136     for (unsigned i = 0; i < count; i++)
2137     {
2138       unsigned klass = rangeRecord[i].value;
2139       if (!klass) continue;
2140       hb_codepoint_t start = rangeRecord[i].first;
2141       hb_codepoint_t end   = rangeRecord[i].last + 1;
2142       for (hb_codepoint_t g = start; g < end; g++)
2143       {
2144 	if (!glyphset.has (g)) continue;
2145         if (glyph_filter && !glyph_filter->has (g)) continue;
2146 	glyphs.push (glyph_map[g]);
2147 	gid_org_klass_map.set (glyph_map[g], klass);
2148 	orig_klasses.add (klass);
2149       }
2150     }
2151 
2152     unsigned glyph_count = glyph_filter
2153                            ? hb_len (hb_iter (glyphset) | hb_filter (glyph_filter))
2154                            : glyphset.get_population ();
2155     use_class_zero = use_class_zero && glyph_count <= gid_org_klass_map.get_population ();
2156     ClassDef_remap_and_serialize (c->serializer, gid_org_klass_map,
2157 				  glyphs, orig_klasses, use_class_zero, klass_map);
2158     return_trace (keep_empty_table || (bool) glyphs);
2159   }
2160 
sanitizeOT::ClassDefFormat22161   bool sanitize (hb_sanitize_context_t *c) const
2162   {
2163     TRACE_SANITIZE (this);
2164     return_trace (rangeRecord.sanitize (c));
2165   }
2166 
2167   template <typename set_t>
collect_coverageOT::ClassDefFormat22168   bool collect_coverage (set_t *glyphs) const
2169   {
2170     unsigned int count = rangeRecord.len;
2171     for (unsigned int i = 0; i < count; i++)
2172       if (rangeRecord[i].value)
2173 	if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2174 	  return false;
2175     return true;
2176   }
2177 
2178   template <typename set_t>
collect_classOT::ClassDefFormat22179   bool collect_class (set_t *glyphs, unsigned int klass) const
2180   {
2181     unsigned int count = rangeRecord.len;
2182     for (unsigned int i = 0; i < count; i++)
2183     {
2184       if (rangeRecord[i].value == klass)
2185 	if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
2186 	  return false;
2187     }
2188     return true;
2189   }
2190 
intersectsOT::ClassDefFormat22191   bool intersects (const hb_set_t *glyphs) const
2192   {
2193     /* TODO Speed up, using hb_set_next() and bsearch()? */
2194     unsigned int count = rangeRecord.len;
2195     for (unsigned int i = 0; i < count; i++)
2196     {
2197       const auto& range = rangeRecord[i];
2198       if (range.intersects (glyphs) && range.value)
2199 	return true;
2200     }
2201     return false;
2202   }
intersects_classOT::ClassDefFormat22203   bool intersects_class (const hb_set_t *glyphs, uint16_t klass) const
2204   {
2205     unsigned int count = rangeRecord.len;
2206     if (klass == 0)
2207     {
2208       /* Match if there's any glyph that is not listed! */
2209       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2210       for (unsigned int i = 0; i < count; i++)
2211       {
2212 	if (!hb_set_next (glyphs, &g))
2213 	  break;
2214 	if (g < rangeRecord[i].first)
2215 	  return true;
2216 	g = rangeRecord[i].last;
2217       }
2218       if (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2219 	return true;
2220       /* Fall through. */
2221     }
2222     /* TODO Speed up, using set overlap first? */
2223     /* TODO(iter) Rewrite as dagger. */
2224     HBUINT16 k {klass};
2225     const RangeRecord *arr = rangeRecord.arrayZ;
2226     for (unsigned int i = 0; i < count; i++)
2227       if (arr[i].value == k && arr[i].intersects (glyphs))
2228 	return true;
2229     return false;
2230   }
2231 
intersected_class_glyphsOT::ClassDefFormat22232   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2233   {
2234     unsigned count = rangeRecord.len;
2235     if (klass == 0)
2236     {
2237       hb_codepoint_t g = HB_SET_VALUE_INVALID;
2238       for (unsigned int i = 0; i < count; i++)
2239       {
2240         if (!hb_set_next (glyphs, &g))
2241           break;
2242         while (g != HB_SET_VALUE_INVALID && g < rangeRecord[i].first)
2243         {
2244           intersect_glyphs->add (g);
2245           hb_set_next (glyphs, &g);
2246         }
2247         g = rangeRecord[i].last;
2248       }
2249       while (g != HB_SET_VALUE_INVALID && hb_set_next (glyphs, &g))
2250         intersect_glyphs->add (g);
2251 
2252       return;
2253     }
2254 
2255     hb_codepoint_t g = HB_SET_VALUE_INVALID;
2256     for (unsigned int i = 0; i < count; i++)
2257     {
2258       if (rangeRecord[i].value != klass) continue;
2259 
2260       if (g != HB_SET_VALUE_INVALID)
2261       {
2262         if (g >= rangeRecord[i].first &&
2263             g <= rangeRecord[i].last)
2264           intersect_glyphs->add (g);
2265         if (g > rangeRecord[i].last)
2266           continue;
2267       }
2268 
2269       g = rangeRecord[i].first - 1;
2270       while (hb_set_next (glyphs, &g))
2271       {
2272         if (g >= rangeRecord[i].first && g <= rangeRecord[i].last)
2273           intersect_glyphs->add (g);
2274         else if (g > rangeRecord[i].last)
2275           break;
2276       }
2277     }
2278   }
2279 
2280   protected:
2281   HBUINT16	classFormat;	/* Format identifier--format = 2 */
2282   SortedArray16Of<RangeRecord>
2283 		rangeRecord;	/* Array of glyph ranges--ordered by
2284 				 * Start GlyphID */
2285   public:
2286   DEFINE_SIZE_ARRAY (4, rangeRecord);
2287 };
2288 
2289 struct ClassDef
2290 {
2291   /* Has interface. */
2292   static constexpr unsigned SENTINEL = 0;
2293   typedef unsigned int value_t;
operator []OT::ClassDef2294   value_t operator [] (hb_codepoint_t k) const { return get (k); }
hasOT::ClassDef2295   bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
2296   /* Projection. */
operator ()OT::ClassDef2297   hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
2298 
getOT::ClassDef2299   unsigned int get (hb_codepoint_t k) const { return get_class (k); }
get_classOT::ClassDef2300   unsigned int get_class (hb_codepoint_t glyph_id) const
2301   {
2302     switch (u.format) {
2303     case 1: return u.format1.get_class (glyph_id);
2304     case 2: return u.format2.get_class (glyph_id);
2305     default:return 0;
2306     }
2307   }
2308 
2309   template<typename Iterator,
2310 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::ClassDef2311   bool serialize (hb_serialize_context_t *c, Iterator it_with_class_zero)
2312   {
2313     TRACE_SERIALIZE (this);
2314     if (unlikely (!c->extend_min (*this))) return_trace (false);
2315 
2316     auto it = + it_with_class_zero | hb_filter (hb_second);
2317 
2318     unsigned format = 2;
2319     if (likely (it))
2320     {
2321       hb_codepoint_t glyph_min = (*it).first;
2322       hb_codepoint_t glyph_max = glyph_min;
2323 
2324       unsigned num_glyphs = 0;
2325       unsigned num_ranges = 1;
2326       hb_codepoint_t prev_gid = glyph_min;
2327       unsigned prev_klass = (*it).second;
2328 
2329       for (const auto gid_klass_pair : it)
2330       {
2331 	hb_codepoint_t cur_gid = gid_klass_pair.first;
2332 	unsigned cur_klass = gid_klass_pair.second;
2333         num_glyphs++;
2334 	if (cur_gid == glyph_min) continue;
2335         if (cur_gid > glyph_max) glyph_max = cur_gid;
2336 	if (cur_gid != prev_gid + 1 ||
2337 	    cur_klass != prev_klass)
2338 	  num_ranges++;
2339 
2340 	prev_gid = cur_gid;
2341 	prev_klass = cur_klass;
2342       }
2343 
2344       if (num_glyphs && 1 + (glyph_max - glyph_min + 1) <= num_ranges * 3)
2345 	format = 1;
2346     }
2347     u.format = format;
2348 
2349     switch (u.format)
2350     {
2351     case 1: return_trace (u.format1.serialize (c, it));
2352     case 2: return_trace (u.format2.serialize (c, it));
2353     default:return_trace (false);
2354     }
2355   }
2356 
subsetOT::ClassDef2357   bool subset (hb_subset_context_t *c,
2358 	       hb_map_t *klass_map = nullptr /*OUT*/,
2359                bool keep_empty_table = true,
2360                bool use_class_zero = true,
2361                const Coverage* glyph_filter = nullptr) const
2362   {
2363     TRACE_SUBSET (this);
2364     switch (u.format) {
2365     case 1: return_trace (u.format1.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2366     case 2: return_trace (u.format2.subset (c, klass_map, keep_empty_table, use_class_zero, glyph_filter));
2367     default:return_trace (false);
2368     }
2369   }
2370 
sanitizeOT::ClassDef2371   bool sanitize (hb_sanitize_context_t *c) const
2372   {
2373     TRACE_SANITIZE (this);
2374     if (!u.format.sanitize (c)) return_trace (false);
2375     switch (u.format) {
2376     case 1: return_trace (u.format1.sanitize (c));
2377     case 2: return_trace (u.format2.sanitize (c));
2378     default:return_trace (true);
2379     }
2380   }
2381 
2382   /* Might return false if array looks unsorted.
2383    * Used for faster rejection of corrupt data. */
2384   template <typename set_t>
collect_coverageOT::ClassDef2385   bool collect_coverage (set_t *glyphs) const
2386   {
2387     switch (u.format) {
2388     case 1: return u.format1.collect_coverage (glyphs);
2389     case 2: return u.format2.collect_coverage (glyphs);
2390     default:return false;
2391     }
2392   }
2393 
2394   /* Might return false if array looks unsorted.
2395    * Used for faster rejection of corrupt data. */
2396   template <typename set_t>
collect_classOT::ClassDef2397   bool collect_class (set_t *glyphs, unsigned int klass) const
2398   {
2399     switch (u.format) {
2400     case 1: return u.format1.collect_class (glyphs, klass);
2401     case 2: return u.format2.collect_class (glyphs, klass);
2402     default:return false;
2403     }
2404   }
2405 
intersectsOT::ClassDef2406   bool intersects (const hb_set_t *glyphs) const
2407   {
2408     switch (u.format) {
2409     case 1: return u.format1.intersects (glyphs);
2410     case 2: return u.format2.intersects (glyphs);
2411     default:return false;
2412     }
2413   }
intersects_classOT::ClassDef2414   bool intersects_class (const hb_set_t *glyphs, unsigned int klass) const
2415   {
2416     switch (u.format) {
2417     case 1: return u.format1.intersects_class (glyphs, klass);
2418     case 2: return u.format2.intersects_class (glyphs, klass);
2419     default:return false;
2420     }
2421   }
2422 
intersected_class_glyphsOT::ClassDef2423   void intersected_class_glyphs (const hb_set_t *glyphs, unsigned klass, hb_set_t *intersect_glyphs) const
2424   {
2425     switch (u.format) {
2426     case 1: return u.format1.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2427     case 2: return u.format2.intersected_class_glyphs (glyphs, klass, intersect_glyphs);
2428     default:return;
2429     }
2430   }
2431 
2432   protected:
2433   union {
2434   HBUINT16		format;		/* Format identifier */
2435   ClassDefFormat1	format1;
2436   ClassDefFormat2	format2;
2437   } u;
2438   public:
2439   DEFINE_SIZE_UNION (2, format);
2440 };
2441 
2442 template<typename Iterator>
ClassDef_serialize(hb_serialize_context_t * c,Iterator it)2443 static inline void ClassDef_serialize (hb_serialize_context_t *c,
2444 				       Iterator it)
2445 { c->start_embed<ClassDef> ()->serialize (c, it); }
2446 
2447 
2448 /*
2449  * Item Variation Store
2450  */
2451 
2452 struct VarRegionAxis
2453 {
evaluateOT::VarRegionAxis2454   float evaluate (int coord) const
2455   {
2456     int start = startCoord, peak = peakCoord, end = endCoord;
2457 
2458     /* TODO Move these to sanitize(). */
2459     if (unlikely (start > peak || peak > end))
2460       return 1.;
2461     if (unlikely (start < 0 && end > 0 && peak != 0))
2462       return 1.;
2463 
2464     if (peak == 0 || coord == peak)
2465       return 1.;
2466 
2467     if (coord <= start || end <= coord)
2468       return 0.;
2469 
2470     /* Interpolate */
2471     if (coord < peak)
2472       return float (coord - start) / (peak - start);
2473     else
2474       return float (end - coord) / (end - peak);
2475   }
2476 
sanitizeOT::VarRegionAxis2477   bool sanitize (hb_sanitize_context_t *c) const
2478   {
2479     TRACE_SANITIZE (this);
2480     return_trace (c->check_struct (this));
2481     /* TODO Handle invalid start/peak/end configs, so we don't
2482      * have to do that at runtime. */
2483   }
2484 
2485   public:
2486   F2DOT14	startCoord;
2487   F2DOT14	peakCoord;
2488   F2DOT14	endCoord;
2489   public:
2490   DEFINE_SIZE_STATIC (6);
2491 };
2492 
2493 struct VarRegionList
2494 {
evaluateOT::VarRegionList2495   float evaluate (unsigned int region_index,
2496 		  const int *coords, unsigned int coord_len) const
2497   {
2498     if (unlikely (region_index >= regionCount))
2499       return 0.;
2500 
2501     const VarRegionAxis *axes = axesZ.arrayZ + (region_index * axisCount);
2502 
2503     float v = 1.;
2504     unsigned int count = axisCount;
2505     for (unsigned int i = 0; i < count; i++)
2506     {
2507       int coord = i < coord_len ? coords[i] : 0;
2508       float factor = axes[i].evaluate (coord);
2509       if (factor == 0.f)
2510 	return 0.;
2511       v *= factor;
2512     }
2513     return v;
2514   }
2515 
sanitizeOT::VarRegionList2516   bool sanitize (hb_sanitize_context_t *c) const
2517   {
2518     TRACE_SANITIZE (this);
2519     return_trace (c->check_struct (this) &&
2520 		  axesZ.sanitize (c, (unsigned int) axisCount * (unsigned int) regionCount));
2521   }
2522 
serializeOT::VarRegionList2523   bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t &region_map)
2524   {
2525     TRACE_SERIALIZE (this);
2526     VarRegionList *out = c->allocate_min<VarRegionList> ();
2527     if (unlikely (!out)) return_trace (false);
2528     axisCount = src->axisCount;
2529     regionCount = region_map.get_population ();
2530     if (unlikely (!c->allocate_size<VarRegionList> (get_size () - min_size))) return_trace (false);
2531     unsigned int region_count = src->get_region_count ();
2532     for (unsigned int r = 0; r < regionCount; r++)
2533     {
2534       unsigned int backward = region_map.backward (r);
2535       if (backward >= region_count) return_trace (false);
2536       memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * backward], VarRegionAxis::static_size * axisCount);
2537     }
2538 
2539     return_trace (true);
2540   }
2541 
get_sizeOT::VarRegionList2542   unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
get_region_countOT::VarRegionList2543   unsigned int get_region_count () const { return regionCount; }
2544 
2545   protected:
2546   HBUINT16	axisCount;
2547   HBUINT16	regionCount;
2548   UnsizedArrayOf<VarRegionAxis>
2549 		axesZ;
2550   public:
2551   DEFINE_SIZE_ARRAY (4, axesZ);
2552 };
2553 
2554 struct VarData
2555 {
get_region_index_countOT::VarData2556   unsigned int get_region_index_count () const
2557   { return regionIndices.len; }
2558 
get_row_sizeOT::VarData2559   unsigned int get_row_size () const
2560   { return shortCount + regionIndices.len; }
2561 
get_sizeOT::VarData2562   unsigned int get_size () const
2563   { return itemCount * get_row_size (); }
2564 
get_deltaOT::VarData2565   float get_delta (unsigned int inner,
2566 		   const int *coords, unsigned int coord_count,
2567 		   const VarRegionList &regions) const
2568   {
2569     if (unlikely (inner >= itemCount))
2570       return 0.;
2571 
2572    unsigned int count = regionIndices.len;
2573    unsigned int scount = shortCount;
2574 
2575    const HBUINT8 *bytes = get_delta_bytes ();
2576    const HBUINT8 *row = bytes + inner * (scount + count);
2577 
2578    float delta = 0.;
2579    unsigned int i = 0;
2580 
2581    const HBINT16 *scursor = reinterpret_cast<const HBINT16 *> (row);
2582    for (; i < scount; i++)
2583    {
2584      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2585      delta += scalar * *scursor++;
2586    }
2587    const HBINT8 *bcursor = reinterpret_cast<const HBINT8 *> (scursor);
2588    for (; i < count; i++)
2589    {
2590      float scalar = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2591      delta += scalar * *bcursor++;
2592    }
2593 
2594    return delta;
2595   }
2596 
get_scalarsOT::VarData2597   void get_scalars (const int *coords, unsigned int coord_count,
2598 		    const VarRegionList &regions,
2599 		    float *scalars /*OUT */,
2600 		    unsigned int num_scalars) const
2601   {
2602     unsigned count = hb_min (num_scalars, regionIndices.len);
2603     for (unsigned int i = 0; i < count; i++)
2604       scalars[i] = regions.evaluate (regionIndices.arrayZ[i], coords, coord_count);
2605     for (unsigned int i = count; i < num_scalars; i++)
2606       scalars[i] = 0.f;
2607   }
2608 
sanitizeOT::VarData2609   bool sanitize (hb_sanitize_context_t *c) const
2610   {
2611     TRACE_SANITIZE (this);
2612     return_trace (c->check_struct (this) &&
2613 		  regionIndices.sanitize (c) &&
2614 		  shortCount <= regionIndices.len &&
2615 		  c->check_range (get_delta_bytes (),
2616 				  itemCount,
2617 				  get_row_size ()));
2618   }
2619 
serializeOT::VarData2620   bool serialize (hb_serialize_context_t *c,
2621 		  const VarData *src,
2622 		  const hb_inc_bimap_t &inner_map,
2623 		  const hb_bimap_t &region_map)
2624   {
2625     TRACE_SERIALIZE (this);
2626     if (unlikely (!c->extend_min (*this))) return_trace (false);
2627     itemCount = inner_map.get_next_value ();
2628 
2629     /* Optimize short count */
2630     unsigned short ri_count = src->regionIndices.len;
2631     enum delta_size_t { kZero=0, kByte, kShort };
2632     hb_vector_t<delta_size_t> delta_sz;
2633     hb_vector_t<unsigned int> ri_map;	/* maps old index to new index */
2634     delta_sz.resize (ri_count);
2635     ri_map.resize (ri_count);
2636     unsigned int new_short_count = 0;
2637     unsigned int r;
2638     for (r = 0; r < ri_count; r++)
2639     {
2640       delta_sz[r] = kZero;
2641       for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2642       {
2643 	unsigned int old = inner_map.backward (i);
2644 	int16_t delta = src->get_item_delta (old, r);
2645 	if (delta < -128 || 127 < delta)
2646 	{
2647 	  delta_sz[r] = kShort;
2648 	  new_short_count++;
2649 	  break;
2650 	}
2651 	else if (delta != 0)
2652 	  delta_sz[r] = kByte;
2653       }
2654     }
2655     unsigned int short_index = 0;
2656     unsigned int byte_index = new_short_count;
2657     unsigned int new_ri_count = 0;
2658     for (r = 0; r < ri_count; r++)
2659       if (delta_sz[r])
2660       {
2661 	ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
2662 	new_ri_count++;
2663       }
2664 
2665     shortCount = new_short_count;
2666     regionIndices.len = new_ri_count;
2667 
2668     unsigned int size = regionIndices.get_size () - HBUINT16::static_size/*regionIndices.len*/ + (get_row_size () * itemCount);
2669     if (unlikely (!c->allocate_size<HBUINT8> (size)))
2670       return_trace (false);
2671 
2672     for (r = 0; r < ri_count; r++)
2673       if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
2674 
2675     for (unsigned int i = 0; i < itemCount; i++)
2676     {
2677       unsigned int	old = inner_map.backward (i);
2678       for (unsigned int r = 0; r < ri_count; r++)
2679 	if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
2680     }
2681 
2682     return_trace (true);
2683   }
2684 
collect_region_refsOT::VarData2685   void collect_region_refs (hb_inc_bimap_t &region_map, const hb_inc_bimap_t &inner_map) const
2686   {
2687     for (unsigned int r = 0; r < regionIndices.len; r++)
2688     {
2689       unsigned int region = regionIndices[r];
2690       if (region_map.has (region)) continue;
2691       for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
2692 	if (get_item_delta (inner_map.backward (i), r) != 0)
2693 	{
2694 	  region_map.add (region);
2695 	  break;
2696 	}
2697     }
2698   }
2699 
2700   protected:
get_delta_bytesOT::VarData2701   const HBUINT8 *get_delta_bytes () const
2702   { return &StructAfter<HBUINT8> (regionIndices); }
2703 
get_delta_bytesOT::VarData2704   HBUINT8 *get_delta_bytes ()
2705   { return &StructAfter<HBUINT8> (regionIndices); }
2706 
get_item_deltaOT::VarData2707   int16_t get_item_delta (unsigned int item, unsigned int region) const
2708   {
2709     if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
2710     const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
2711     if (region < shortCount)
2712       return ((const HBINT16 *)p)[region];
2713     else
2714       return (p + HBINT16::static_size * shortCount)[region - shortCount];
2715   }
2716 
set_item_deltaOT::VarData2717   void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
2718   {
2719     HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
2720     if (region < shortCount)
2721       ((HBINT16 *)p)[region] = delta;
2722     else
2723       (p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
2724   }
2725 
2726   protected:
2727   HBUINT16		itemCount;
2728   HBUINT16		shortCount;
2729   Array16Of<HBUINT16>	regionIndices;
2730 /*UnsizedArrayOf<HBUINT8>bytesX;*/
2731   public:
2732   DEFINE_SIZE_ARRAY (6, regionIndices);
2733 };
2734 
2735 struct VariationStore
2736 {
2737   private:
get_deltaOT::VariationStore2738   float get_delta (unsigned int outer, unsigned int inner,
2739 		   const int *coords, unsigned int coord_count) const
2740   {
2741 #ifdef HB_NO_VAR
2742     return 0.f;
2743 #endif
2744 
2745     if (unlikely (outer >= dataSets.len))
2746       return 0.f;
2747 
2748     return (this+dataSets[outer]).get_delta (inner,
2749 					     coords, coord_count,
2750 					     this+regions);
2751   }
2752 
2753   public:
get_deltaOT::VariationStore2754   float get_delta (unsigned int index,
2755 		   const int *coords, unsigned int coord_count) const
2756   {
2757     unsigned int outer = index >> 16;
2758     unsigned int inner = index & 0xFFFF;
2759     return get_delta (outer, inner, coords, coord_count);
2760   }
2761 
sanitizeOT::VariationStore2762   bool sanitize (hb_sanitize_context_t *c) const
2763   {
2764 #ifdef HB_NO_VAR
2765     return true;
2766 #endif
2767 
2768     TRACE_SANITIZE (this);
2769     return_trace (c->check_struct (this) &&
2770 		  format == 1 &&
2771 		  regions.sanitize (c, this) &&
2772 		  dataSets.sanitize (c, this));
2773   }
2774 
serializeOT::VariationStore2775   bool serialize (hb_serialize_context_t *c,
2776 		  const VariationStore *src,
2777 		  const hb_array_t <hb_inc_bimap_t> &inner_maps)
2778   {
2779     TRACE_SERIALIZE (this);
2780     unsigned int set_count = 0;
2781     for (unsigned int i = 0; i < inner_maps.length; i++)
2782       if (inner_maps[i].get_population () > 0) set_count++;
2783 
2784     unsigned int size = min_size + HBUINT32::static_size * set_count;
2785     if (unlikely (!c->allocate_size<HBUINT32> (size))) return_trace (false);
2786     format = 1;
2787 
2788     hb_inc_bimap_t region_map;
2789     for (unsigned int i = 0; i < inner_maps.length; i++)
2790       (src+src->dataSets[i]).collect_region_refs (region_map, inner_maps[i]);
2791     region_map.sort ();
2792 
2793     if (unlikely (!regions.serialize (c, this)
2794 		  .serialize (c, &(src+src->regions), region_map))) return_trace (false);
2795 
2796     /* TODO: The following code could be simplified when
2797      * List16OfOffset16To::subset () can take a custom param to be passed to VarData::serialize ()
2798      */
2799     dataSets.len = set_count;
2800     unsigned int set_index = 0;
2801     for (unsigned int i = 0; i < inner_maps.length; i++)
2802     {
2803       if (inner_maps[i].get_population () == 0) continue;
2804       if (unlikely (!dataSets[set_index++].serialize (c, this)
2805 		      .serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
2806 	return_trace (false);
2807     }
2808 
2809     return_trace (true);
2810   }
2811 
subsetOT::VariationStore2812   bool subset (hb_subset_context_t *c) const
2813   {
2814     TRACE_SUBSET (this);
2815 
2816     VariationStore *varstore_prime = c->serializer->start_embed<VariationStore> ();
2817     if (unlikely (!varstore_prime)) return_trace (false);
2818 
2819     const hb_set_t *variation_indices = c->plan->layout_variation_indices;
2820     if (variation_indices->is_empty ()) return_trace (false);
2821 
2822     hb_vector_t<hb_inc_bimap_t> inner_maps;
2823     inner_maps.resize ((unsigned) dataSets.len);
2824     for (unsigned i = 0; i < inner_maps.length; i++)
2825       inner_maps[i].init ();
2826 
2827     for (unsigned idx : c->plan->layout_variation_indices->iter ())
2828     {
2829       uint16_t major = idx >> 16;
2830       uint16_t minor = idx & 0xFFFF;
2831 
2832       if (major >= inner_maps.length)
2833       {
2834 	for (unsigned i = 0; i < inner_maps.length; i++)
2835 	  inner_maps[i].fini ();
2836 	return_trace (false);
2837       }
2838       inner_maps[major].add (minor);
2839     }
2840     varstore_prime->serialize (c->serializer, this, inner_maps.as_array ());
2841 
2842     for (unsigned i = 0; i < inner_maps.length; i++)
2843       inner_maps[i].fini ();
2844 
2845     return_trace (
2846         !c->serializer->in_error()
2847         && varstore_prime->dataSets);
2848   }
2849 
get_region_index_countOT::VariationStore2850   unsigned int get_region_index_count (unsigned int ivs) const
2851   { return (this+dataSets[ivs]).get_region_index_count (); }
2852 
get_scalarsOT::VariationStore2853   void get_scalars (unsigned int ivs,
2854 		    const int *coords, unsigned int coord_count,
2855 		    float *scalars /*OUT*/,
2856 		    unsigned int num_scalars) const
2857   {
2858 #ifdef HB_NO_VAR
2859     for (unsigned i = 0; i < num_scalars; i++)
2860       scalars[i] = 0.f;
2861     return;
2862 #endif
2863 
2864     (this+dataSets[ivs]).get_scalars (coords, coord_count, this+regions,
2865 				      &scalars[0], num_scalars);
2866   }
2867 
get_sub_table_countOT::VariationStore2868   unsigned int get_sub_table_count () const { return dataSets.len; }
2869 
2870   protected:
2871   HBUINT16				format;
2872   Offset32To<VarRegionList>		regions;
2873   Array16OfOffset32To<VarData>		dataSets;
2874   public:
2875   DEFINE_SIZE_ARRAY (8, dataSets);
2876 };
2877 
2878 /*
2879  * Feature Variations
2880  */
2881 
2882 struct ConditionFormat1
2883 {
2884   friend struct Condition;
2885 
subsetOT::ConditionFormat12886   bool subset (hb_subset_context_t *c) const
2887   {
2888     TRACE_SUBSET (this);
2889     auto *out = c->serializer->embed (this);
2890     if (unlikely (!out)) return_trace (false);
2891     return_trace (true);
2892   }
2893 
2894   private:
evaluateOT::ConditionFormat12895   bool evaluate (const int *coords, unsigned int coord_len) const
2896   {
2897     int coord = axisIndex < coord_len ? coords[axisIndex] : 0;
2898     return filterRangeMinValue <= coord && coord <= filterRangeMaxValue;
2899   }
2900 
sanitizeOT::ConditionFormat12901   bool sanitize (hb_sanitize_context_t *c) const
2902   {
2903     TRACE_SANITIZE (this);
2904     return_trace (c->check_struct (this));
2905   }
2906 
2907   protected:
2908   HBUINT16	format;		/* Format identifier--format = 1 */
2909   HBUINT16	axisIndex;
2910   F2DOT14	filterRangeMinValue;
2911   F2DOT14	filterRangeMaxValue;
2912   public:
2913   DEFINE_SIZE_STATIC (8);
2914 };
2915 
2916 struct Condition
2917 {
evaluateOT::Condition2918   bool evaluate (const int *coords, unsigned int coord_len) const
2919   {
2920     switch (u.format) {
2921     case 1: return u.format1.evaluate (coords, coord_len);
2922     default:return false;
2923     }
2924   }
2925 
2926   template <typename context_t, typename ...Ts>
dispatchOT::Condition2927   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2928   {
2929     TRACE_DISPATCH (this, u.format);
2930     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2931     switch (u.format) {
2932     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2933     default:return_trace (c->default_return_value ());
2934     }
2935   }
2936 
sanitizeOT::Condition2937   bool sanitize (hb_sanitize_context_t *c) const
2938   {
2939     TRACE_SANITIZE (this);
2940     if (!u.format.sanitize (c)) return_trace (false);
2941     switch (u.format) {
2942     case 1: return_trace (u.format1.sanitize (c));
2943     default:return_trace (true);
2944     }
2945   }
2946 
2947   protected:
2948   union {
2949   HBUINT16		format;		/* Format identifier */
2950   ConditionFormat1	format1;
2951   } u;
2952   public:
2953   DEFINE_SIZE_UNION (2, format);
2954 };
2955 
2956 struct ConditionSet
2957 {
evaluateOT::ConditionSet2958   bool evaluate (const int *coords, unsigned int coord_len) const
2959   {
2960     unsigned int count = conditions.len;
2961     for (unsigned int i = 0; i < count; i++)
2962       if (!(this+conditions.arrayZ[i]).evaluate (coords, coord_len))
2963 	return false;
2964     return true;
2965   }
2966 
subsetOT::ConditionSet2967   bool subset (hb_subset_context_t *c) const
2968   {
2969     TRACE_SUBSET (this);
2970     auto *out = c->serializer->start_embed (this);
2971     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
2972 
2973     + conditions.iter ()
2974     | hb_apply (subset_offset_array (c, out->conditions, this))
2975     ;
2976 
2977     return_trace (bool (out->conditions));
2978   }
2979 
sanitizeOT::ConditionSet2980   bool sanitize (hb_sanitize_context_t *c) const
2981   {
2982     TRACE_SANITIZE (this);
2983     return_trace (conditions.sanitize (c, this));
2984   }
2985 
2986   protected:
2987   Array16OfOffset32To<Condition>	conditions;
2988   public:
2989   DEFINE_SIZE_ARRAY (2, conditions);
2990 };
2991 
2992 struct FeatureTableSubstitutionRecord
2993 {
2994   friend struct FeatureTableSubstitution;
2995 
collect_lookupsOT::FeatureTableSubstitutionRecord2996   void collect_lookups (const void *base, hb_set_t *lookup_indexes /* OUT */) const
2997   {
2998     return (base+feature).add_lookup_indexes_to (lookup_indexes);
2999   }
3000 
closure_featuresOT::FeatureTableSubstitutionRecord3001   void closure_features (const void *base,
3002 			 const hb_map_t *lookup_indexes,
3003 			 hb_set_t       *feature_indexes /* OUT */) const
3004   {
3005     if ((base+feature).intersects_lookup_indexes (lookup_indexes))
3006       feature_indexes->add (featureIndex);
3007   }
3008 
subsetOT::FeatureTableSubstitutionRecord3009   bool subset (hb_subset_layout_context_t *c, const void *base) const
3010   {
3011     TRACE_SUBSET (this);
3012     if (!c->feature_index_map->has (featureIndex)) {
3013       // Feature that is being substituted is not being retained, so we don't
3014       // need this.
3015       return_trace (false);
3016     }
3017 
3018     auto *out = c->subset_context->serializer->embed (this);
3019     if (unlikely (!out)) return_trace (false);
3020 
3021     out->featureIndex = c->feature_index_map->get (featureIndex);
3022     bool ret = out->feature.serialize_subset (c->subset_context, feature, base, c);
3023     return_trace (ret);
3024   }
3025 
sanitizeOT::FeatureTableSubstitutionRecord3026   bool sanitize (hb_sanitize_context_t *c, const void *base) const
3027   {
3028     TRACE_SANITIZE (this);
3029     return_trace (c->check_struct (this) && feature.sanitize (c, base));
3030   }
3031 
3032   protected:
3033   HBUINT16		featureIndex;
3034   Offset32To<Feature>	feature;
3035   public:
3036   DEFINE_SIZE_STATIC (6);
3037 };
3038 
3039 struct FeatureTableSubstitution
3040 {
find_substituteOT::FeatureTableSubstitution3041   const Feature *find_substitute (unsigned int feature_index) const
3042   {
3043     unsigned int count = substitutions.len;
3044     for (unsigned int i = 0; i < count; i++)
3045     {
3046       const FeatureTableSubstitutionRecord &record = substitutions.arrayZ[i];
3047       if (record.featureIndex == feature_index)
3048 	return &(this+record.feature);
3049     }
3050     return nullptr;
3051   }
3052 
collect_lookupsOT::FeatureTableSubstitution3053   void collect_lookups (const hb_set_t *feature_indexes,
3054 			hb_set_t       *lookup_indexes /* OUT */) const
3055   {
3056     + hb_iter (substitutions)
3057     | hb_filter (feature_indexes, &FeatureTableSubstitutionRecord::featureIndex)
3058     | hb_apply ([this, lookup_indexes] (const FeatureTableSubstitutionRecord& r)
3059 		{ r.collect_lookups (this, lookup_indexes); })
3060     ;
3061   }
3062 
closure_featuresOT::FeatureTableSubstitution3063   void closure_features (const hb_map_t *lookup_indexes,
3064 			 hb_set_t       *feature_indexes /* OUT */) const
3065   {
3066     for (const FeatureTableSubstitutionRecord& record : substitutions)
3067       record.closure_features (this, lookup_indexes, feature_indexes);
3068   }
3069 
intersects_featuresOT::FeatureTableSubstitution3070   bool intersects_features (const hb_map_t *feature_index_map) const
3071   {
3072     for (const FeatureTableSubstitutionRecord& record : substitutions)
3073     {
3074       if (feature_index_map->has (record.featureIndex)) return true;
3075     }
3076     return false;
3077   }
3078 
subsetOT::FeatureTableSubstitution3079   bool subset (hb_subset_context_t        *c,
3080 	       hb_subset_layout_context_t *l) const
3081   {
3082     TRACE_SUBSET (this);
3083     auto *out = c->serializer->start_embed (*this);
3084     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3085 
3086     out->version.major = version.major;
3087     out->version.minor = version.minor;
3088 
3089     + substitutions.iter ()
3090     | hb_apply (subset_record_array (l, &(out->substitutions), this))
3091     ;
3092 
3093     return_trace (bool (out->substitutions));
3094   }
3095 
sanitizeOT::FeatureTableSubstitution3096   bool sanitize (hb_sanitize_context_t *c) const
3097   {
3098     TRACE_SANITIZE (this);
3099     return_trace (version.sanitize (c) &&
3100 		  likely (version.major == 1) &&
3101 		  substitutions.sanitize (c, this));
3102   }
3103 
3104   protected:
3105   FixedVersion<>	version;	/* Version--0x00010000u */
3106   Array16Of<FeatureTableSubstitutionRecord>
3107 			substitutions;
3108   public:
3109   DEFINE_SIZE_ARRAY (6, substitutions);
3110 };
3111 
3112 struct FeatureVariationRecord
3113 {
3114   friend struct FeatureVariations;
3115 
collect_lookupsOT::FeatureVariationRecord3116   void collect_lookups (const void     *base,
3117 			const hb_set_t *feature_indexes,
3118 			hb_set_t       *lookup_indexes /* OUT */) const
3119   {
3120     return (base+substitutions).collect_lookups (feature_indexes, lookup_indexes);
3121   }
3122 
closure_featuresOT::FeatureVariationRecord3123   void closure_features (const void     *base,
3124 			 const hb_map_t *lookup_indexes,
3125 			 hb_set_t       *feature_indexes /* OUT */) const
3126   {
3127     (base+substitutions).closure_features (lookup_indexes, feature_indexes);
3128   }
3129 
intersects_featuresOT::FeatureVariationRecord3130   bool intersects_features (const void *base, const hb_map_t *feature_index_map) const
3131   {
3132     return (base+substitutions).intersects_features (feature_index_map);
3133   }
3134 
subsetOT::FeatureVariationRecord3135   bool subset (hb_subset_layout_context_t *c, const void *base) const
3136   {
3137     TRACE_SUBSET (this);
3138     auto *out = c->subset_context->serializer->embed (this);
3139     if (unlikely (!out)) return_trace (false);
3140 
3141     out->conditions.serialize_subset (c->subset_context, conditions, base);
3142     out->substitutions.serialize_subset (c->subset_context, substitutions, base, c);
3143 
3144     return_trace (true);
3145   }
3146 
sanitizeOT::FeatureVariationRecord3147   bool sanitize (hb_sanitize_context_t *c, const void *base) const
3148   {
3149     TRACE_SANITIZE (this);
3150     return_trace (conditions.sanitize (c, base) &&
3151 		  substitutions.sanitize (c, base));
3152   }
3153 
3154   protected:
3155   Offset32To<ConditionSet>
3156 			conditions;
3157   Offset32To<FeatureTableSubstitution>
3158 			substitutions;
3159   public:
3160   DEFINE_SIZE_STATIC (8);
3161 };
3162 
3163 struct FeatureVariations
3164 {
3165   static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFFFFFu;
3166 
find_indexOT::FeatureVariations3167   bool find_index (const int *coords, unsigned int coord_len,
3168 		   unsigned int *index) const
3169   {
3170     unsigned int count = varRecords.len;
3171     for (unsigned int i = 0; i < count; i++)
3172     {
3173       const FeatureVariationRecord &record = varRecords.arrayZ[i];
3174       if ((this+record.conditions).evaluate (coords, coord_len))
3175       {
3176 	*index = i;
3177 	return true;
3178       }
3179     }
3180     *index = NOT_FOUND_INDEX;
3181     return false;
3182   }
3183 
find_substituteOT::FeatureVariations3184   const Feature *find_substitute (unsigned int variations_index,
3185 				  unsigned int feature_index) const
3186   {
3187     const FeatureVariationRecord &record = varRecords[variations_index];
3188     return (this+record.substitutions).find_substitute (feature_index);
3189   }
3190 
copyOT::FeatureVariations3191   FeatureVariations* copy (hb_serialize_context_t *c) const
3192   {
3193     TRACE_SERIALIZE (this);
3194     return_trace (c->embed (*this));
3195   }
3196 
collect_lookupsOT::FeatureVariations3197   void collect_lookups (const hb_set_t *feature_indexes,
3198 			hb_set_t       *lookup_indexes /* OUT */) const
3199   {
3200     for (const FeatureVariationRecord& r : varRecords)
3201       r.collect_lookups (this, feature_indexes, lookup_indexes);
3202   }
3203 
closure_featuresOT::FeatureVariations3204   void closure_features (const hb_map_t *lookup_indexes,
3205 			 hb_set_t       *feature_indexes /* OUT */) const
3206   {
3207     for (const FeatureVariationRecord& record : varRecords)
3208       record.closure_features (this, lookup_indexes, feature_indexes);
3209   }
3210 
subsetOT::FeatureVariations3211   bool subset (hb_subset_context_t *c,
3212 	       hb_subset_layout_context_t *l) const
3213   {
3214     TRACE_SUBSET (this);
3215     auto *out = c->serializer->start_embed (*this);
3216     if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3217 
3218     out->version.major = version.major;
3219     out->version.minor = version.minor;
3220 
3221     int keep_up_to = -1;
3222     for (int i = varRecords.len - 1; i >= 0; i--) {
3223       if (varRecords[i].intersects_features (this, l->feature_index_map)) {
3224         keep_up_to = i;
3225         break;
3226       }
3227     }
3228 
3229     unsigned count = (unsigned) (keep_up_to + 1);
3230     for (unsigned i = 0; i < count; i++) {
3231       subset_record_array (l, &(out->varRecords), this) (varRecords[i]);
3232     }
3233     return_trace (bool (out->varRecords));
3234   }
3235 
sanitizeOT::FeatureVariations3236   bool sanitize (hb_sanitize_context_t *c) const
3237   {
3238     TRACE_SANITIZE (this);
3239     return_trace (version.sanitize (c) &&
3240 		  likely (version.major == 1) &&
3241 		  varRecords.sanitize (c, this));
3242   }
3243 
3244   protected:
3245   FixedVersion<>	version;	/* Version--0x00010000u */
3246   Array32Of<FeatureVariationRecord>
3247 			varRecords;
3248   public:
3249   DEFINE_SIZE_ARRAY_SIZED (8, varRecords);
3250 };
3251 
3252 
3253 /*
3254  * Device Tables
3255  */
3256 
3257 struct HintingDevice
3258 {
3259   friend struct Device;
3260 
3261   private:
3262 
get_x_deltaOT::HintingDevice3263   hb_position_t get_x_delta (hb_font_t *font) const
3264   { return get_delta (font->x_ppem, font->x_scale); }
3265 
get_y_deltaOT::HintingDevice3266   hb_position_t get_y_delta (hb_font_t *font) const
3267   { return get_delta (font->y_ppem, font->y_scale); }
3268 
3269   public:
3270 
get_sizeOT::HintingDevice3271   unsigned int get_size () const
3272   {
3273     unsigned int f = deltaFormat;
3274     if (unlikely (f < 1 || f > 3 || startSize > endSize)) return 3 * HBUINT16::static_size;
3275     return HBUINT16::static_size * (4 + ((endSize - startSize) >> (4 - f)));
3276   }
3277 
sanitizeOT::HintingDevice3278   bool sanitize (hb_sanitize_context_t *c) const
3279   {
3280     TRACE_SANITIZE (this);
3281     return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
3282   }
3283 
copyOT::HintingDevice3284   HintingDevice* copy (hb_serialize_context_t *c) const
3285   {
3286     TRACE_SERIALIZE (this);
3287     return_trace (c->embed<HintingDevice> (this));
3288   }
3289 
3290   private:
3291 
get_deltaOT::HintingDevice3292   int get_delta (unsigned int ppem, int scale) const
3293   {
3294     if (!ppem) return 0;
3295 
3296     int pixels = get_delta_pixels (ppem);
3297 
3298     if (!pixels) return 0;
3299 
3300     return (int) (pixels * (int64_t) scale / ppem);
3301   }
get_delta_pixelsOT::HintingDevice3302   int get_delta_pixels (unsigned int ppem_size) const
3303   {
3304     unsigned int f = deltaFormat;
3305     if (unlikely (f < 1 || f > 3))
3306       return 0;
3307 
3308     if (ppem_size < startSize || ppem_size > endSize)
3309       return 0;
3310 
3311     unsigned int s = ppem_size - startSize;
3312 
3313     unsigned int byte = deltaValueZ[s >> (4 - f)];
3314     unsigned int bits = (byte >> (16 - (((s & ((1 << (4 - f)) - 1)) + 1) << f)));
3315     unsigned int mask = (0xFFFFu >> (16 - (1 << f)));
3316 
3317     int delta = bits & mask;
3318 
3319     if ((unsigned int) delta >= ((mask + 1) >> 1))
3320       delta -= mask + 1;
3321 
3322     return delta;
3323   }
3324 
3325   protected:
3326   HBUINT16	startSize;		/* Smallest size to correct--in ppem */
3327   HBUINT16	endSize;		/* Largest size to correct--in ppem */
3328   HBUINT16	deltaFormat;		/* Format of DeltaValue array data: 1, 2, or 3
3329 					 * 1	Signed 2-bit value, 8 values per uint16
3330 					 * 2	Signed 4-bit value, 4 values per uint16
3331 					 * 3	Signed 8-bit value, 2 values per uint16
3332 					 */
3333   UnsizedArrayOf<HBUINT16>
3334 		deltaValueZ;		/* Array of compressed data */
3335   public:
3336   DEFINE_SIZE_ARRAY (6, deltaValueZ);
3337 };
3338 
3339 struct VariationDevice
3340 {
3341   friend struct Device;
3342 
3343   private:
3344 
get_x_deltaOT::VariationDevice3345   hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store) const
3346   { return font->em_scalef_x (get_delta (font, store)); }
3347 
get_y_deltaOT::VariationDevice3348   hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store) const
3349   { return font->em_scalef_y (get_delta (font, store)); }
3350 
copyOT::VariationDevice3351   VariationDevice* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
3352   {
3353     TRACE_SERIALIZE (this);
3354     auto snap = c->snapshot ();
3355     auto *out = c->embed (this);
3356     if (unlikely (!out)) return_trace (nullptr);
3357     if (!layout_variation_idx_map || layout_variation_idx_map->is_empty ()) return_trace (out);
3358 
3359     /* TODO Just get() and bail if NO_VARIATION. Needs to setup the map to return that. */
3360     if (!layout_variation_idx_map->has (varIdx))
3361     {
3362       c->revert (snap);
3363       return_trace (nullptr);
3364     }
3365     unsigned new_idx = layout_variation_idx_map->get (varIdx);
3366     out->varIdx = new_idx;
3367     return_trace (out);
3368   }
3369 
record_variation_indexOT::VariationDevice3370   void record_variation_index (hb_set_t *layout_variation_indices) const
3371   {
3372     layout_variation_indices->add (varIdx);
3373   }
3374 
sanitizeOT::VariationDevice3375   bool sanitize (hb_sanitize_context_t *c) const
3376   {
3377     TRACE_SANITIZE (this);
3378     return_trace (c->check_struct (this));
3379   }
3380 
3381   private:
3382 
get_deltaOT::VariationDevice3383   float get_delta (hb_font_t *font, const VariationStore &store) const
3384   {
3385     return store.get_delta (varIdx, font->coords, font->num_coords);
3386   }
3387 
3388   protected:
3389   VarIdx	varIdx;
3390   HBUINT16	deltaFormat;	/* Format identifier for this table: 0x0x8000 */
3391   public:
3392   DEFINE_SIZE_STATIC (6);
3393 };
3394 
3395 struct DeviceHeader
3396 {
3397   protected:
3398   HBUINT16		reserved1;
3399   HBUINT16		reserved2;
3400   public:
3401   HBUINT16		format;		/* Format identifier */
3402   public:
3403   DEFINE_SIZE_STATIC (6);
3404 };
3405 
3406 struct Device
3407 {
get_x_deltaOT::Device3408   hb_position_t get_x_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3409   {
3410     switch (u.b.format)
3411     {
3412 #ifndef HB_NO_HINTING
3413     case 1: case 2: case 3:
3414       return u.hinting.get_x_delta (font);
3415 #endif
3416 #ifndef HB_NO_VAR
3417     case 0x8000:
3418       return u.variation.get_x_delta (font, store);
3419 #endif
3420     default:
3421       return 0;
3422     }
3423   }
get_y_deltaOT::Device3424   hb_position_t get_y_delta (hb_font_t *font, const VariationStore &store=Null (VariationStore)) const
3425   {
3426     switch (u.b.format)
3427     {
3428     case 1: case 2: case 3:
3429 #ifndef HB_NO_HINTING
3430       return u.hinting.get_y_delta (font);
3431 #endif
3432 #ifndef HB_NO_VAR
3433     case 0x8000:
3434       return u.variation.get_y_delta (font, store);
3435 #endif
3436     default:
3437       return 0;
3438     }
3439   }
3440 
sanitizeOT::Device3441   bool sanitize (hb_sanitize_context_t *c) const
3442   {
3443     TRACE_SANITIZE (this);
3444     if (!u.b.format.sanitize (c)) return_trace (false);
3445     switch (u.b.format) {
3446 #ifndef HB_NO_HINTING
3447     case 1: case 2: case 3:
3448       return_trace (u.hinting.sanitize (c));
3449 #endif
3450 #ifndef HB_NO_VAR
3451     case 0x8000:
3452       return_trace (u.variation.sanitize (c));
3453 #endif
3454     default:
3455       return_trace (true);
3456     }
3457   }
3458 
copyOT::Device3459   Device* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map=nullptr) const
3460   {
3461     TRACE_SERIALIZE (this);
3462     switch (u.b.format) {
3463 #ifndef HB_NO_HINTING
3464     case 1:
3465     case 2:
3466     case 3:
3467       return_trace (reinterpret_cast<Device *> (u.hinting.copy (c)));
3468 #endif
3469 #ifndef HB_NO_VAR
3470     case 0x8000:
3471       return_trace (reinterpret_cast<Device *> (u.variation.copy (c, layout_variation_idx_map)));
3472 #endif
3473     default:
3474       return_trace (nullptr);
3475     }
3476   }
3477 
collect_variation_indicesOT::Device3478   void collect_variation_indices (hb_set_t *layout_variation_indices) const
3479   {
3480     switch (u.b.format) {
3481 #ifndef HB_NO_HINTING
3482     case 1:
3483     case 2:
3484     case 3:
3485       return;
3486 #endif
3487 #ifndef HB_NO_VAR
3488     case 0x8000:
3489       u.variation.record_variation_index (layout_variation_indices);
3490       return;
3491 #endif
3492     default:
3493       return;
3494     }
3495   }
3496 
3497   protected:
3498   union {
3499   DeviceHeader		b;
3500   HintingDevice		hinting;
3501 #ifndef HB_NO_VAR
3502   VariationDevice	variation;
3503 #endif
3504   } u;
3505   public:
3506   DEFINE_SIZE_UNION (6, b);
3507 };
3508 
3509 
3510 } /* namespace OT */
3511 
3512 
3513 #endif /* HB_OT_LAYOUT_COMMON_HH */
3514