1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2010,2012,2013  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
31 
32 #include "hb-ot-layout-gsubgpos.hh"
33 
34 
35 namespace OT {
36 
37 struct MarkArray;
38 static void Markclass_closure_and_remap_indexes (const Coverage  &mark_coverage,
39                                                  const MarkArray &mark_array,
40                                                  const hb_set_t  &glyphset,
41                                                  hb_map_t*        klass_mapping /* INOUT */);
42 
43 /* buffer **position** var allocations */
44 #define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
45 #define attach_type() var.u8[2] /* attachment type */
46 /* Note! if attach_chain() is zero, the value of attach_type() is irrelevant. */
47 
48 enum attach_type_t {
49   ATTACH_TYPE_NONE      = 0X00,
50 
51   /* Each attachment should be either a mark or a cursive; can't be both. */
52   ATTACH_TYPE_MARK      = 0X01,
53   ATTACH_TYPE_CURSIVE   = 0X02,
54 };
55 
56 
57 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
58 
59 typedef HBUINT16 Value;
60 
61 typedef UnsizedArrayOf<Value> ValueRecord;
62 
63 struct ValueFormat : HBUINT16
64 {
65   enum Flags {
66     xPlacement  = 0x0001u,      /* Includes horizontal adjustment for placement */
67     yPlacement  = 0x0002u,      /* Includes vertical adjustment for placement */
68     xAdvance    = 0x0004u,      /* Includes horizontal adjustment for advance */
69     yAdvance    = 0x0008u,      /* Includes vertical adjustment for advance */
70     xPlaDevice  = 0x0010u,      /* Includes horizontal Device table for placement */
71     yPlaDevice  = 0x0020u,      /* Includes vertical Device table for placement */
72     xAdvDevice  = 0x0040u,      /* Includes horizontal Device table for advance */
73     yAdvDevice  = 0x0080u,      /* Includes vertical Device table for advance */
74     ignored     = 0x0F00u,      /* Was used in TrueType Open for MM fonts */
75     reserved    = 0xF000u,      /* For future use */
76 
77     devices     = 0x00F0u       /* Mask for having any Device table */
78   };
79 
80 /* All fields are options.  Only those available advance the value pointer. */
81 #if 0
82   HBINT16               xPlacement;     /* Horizontal adjustment for
83                                          * placement--in design units */
84   HBINT16               yPlacement;     /* Vertical adjustment for
85                                          * placement--in design units */
86   HBINT16               xAdvance;       /* Horizontal adjustment for
87                                          * advance--in design units (only used
88                                          * for horizontal writing) */
89   HBINT16               yAdvance;       /* Vertical adjustment for advance--in
90                                          * design units (only used for vertical
91                                          * writing) */
92   OffsetTo<Device>      xPlaDevice;     /* Offset to Device table for
93                                          * horizontal placement--measured from
94                                          * beginning of PosTable (may be NULL) */
95   OffsetTo<Device>      yPlaDevice;     /* Offset to Device table for vertical
96                                          * placement--measured from beginning
97                                          * of PosTable (may be NULL) */
98   OffsetTo<Device>      xAdvDevice;     /* Offset to Device table for
99                                          * horizontal advance--measured from
100                                          * beginning of PosTable (may be NULL) */
101   OffsetTo<Device>      yAdvDevice;     /* Offset to Device table for vertical
102                                          * advance--measured from beginning of
103                                          * PosTable (may be NULL) */
104 #endif
105 
get_lenOT::ValueFormat106   unsigned int get_len () const  { return hb_popcount ((unsigned int) *this); }
get_sizeOT::ValueFormat107   unsigned int get_size () const { return get_len () * Value::static_size; }
108 
apply_valueOT::ValueFormat109   bool apply_value (hb_ot_apply_context_t *c,
110                     const void            *base,
111                     const Value           *values,
112                     hb_glyph_position_t   &glyph_pos) const
113   {
114     bool ret = false;
115     unsigned int format = *this;
116     if (!format) return ret;
117 
118     hb_font_t *font = c->font;
119     bool horizontal = HB_DIRECTION_IS_HORIZONTAL (c->direction);
120 
121     if (format & xPlacement) glyph_pos.x_offset  += font->em_scale_x (get_short (values++, &ret));
122     if (format & yPlacement) glyph_pos.y_offset  += font->em_scale_y (get_short (values++, &ret));
123     if (format & xAdvance) {
124       if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
125       values++;
126     }
127     /* y_advance values grow downward but font-space grows upward, hence negation */
128     if (format & yAdvance) {
129       if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
130       values++;
131     }
132 
133     if (!has_device ()) return ret;
134 
135     bool use_x_device = font->x_ppem || font->num_coords;
136     bool use_y_device = font->y_ppem || font->num_coords;
137 
138     if (!use_x_device && !use_y_device) return ret;
139 
140     const VariationStore &store = c->var_store;
141 
142     /* pixel -> fractional pixel */
143     if (format & xPlaDevice) {
144       if (use_x_device) glyph_pos.x_offset  += (base + get_device (values, &ret)).get_x_delta (font, store);
145       values++;
146     }
147     if (format & yPlaDevice) {
148       if (use_y_device) glyph_pos.y_offset  += (base + get_device (values, &ret)).get_y_delta (font, store);
149       values++;
150     }
151     if (format & xAdvDevice) {
152       if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store);
153       values++;
154     }
155     if (format & yAdvDevice) {
156       /* y_advance values grow downward but font-space grows upward, hence negation */
157       if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store);
158       values++;
159     }
160     return ret;
161   }
162 
serialize_copyOT::ValueFormat163   void serialize_copy (hb_serialize_context_t *c, const void *base,
164                        const Value *values, const hb_map_t *layout_variation_idx_map) const
165   {
166     unsigned int format = *this;
167     if (!format) return;
168 
169     if (format & xPlacement) c->copy (*values++);
170     if (format & yPlacement) c->copy (*values++);
171     if (format & xAdvance)   c->copy (*values++);
172     if (format & yAdvance)   c->copy (*values++);
173 
174     if (format & xPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
175     if (format & yPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
176     if (format & xAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
177     if (format & yAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
178   }
179 
collect_variation_indicesOT::ValueFormat180   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
181                                   const void *base,
182                                   const hb_array_t<const Value>& values) const
183   {
184     unsigned format = *this;
185     unsigned i = 0;
186     if (format & xPlacement) i++;
187     if (format & yPlacement) i++;
188     if (format & xAdvance) i++;
189     if (format & yAdvance) i++;
190     if (format & xPlaDevice)
191     {
192       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
193       i++;
194     }
195 
196     if (format & ValueFormat::yPlaDevice)
197     {
198       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
199       i++;
200     }
201 
202     if (format & ValueFormat::xAdvDevice)
203     {
204 
205       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
206       i++;
207     }
208 
209     if (format & ValueFormat::yAdvDevice)
210     {
211 
212       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
213       i++;
214     }
215   }
216 
217   private:
sanitize_value_devicesOT::ValueFormat218   bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
219   {
220     unsigned int format = *this;
221 
222     if (format & xPlacement) values++;
223     if (format & yPlacement) values++;
224     if (format & xAdvance)   values++;
225     if (format & yAdvance)   values++;
226 
227     if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
228     if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
229     if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
230     if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
231 
232     return true;
233   }
234 
get_deviceOT::ValueFormat235   static inline OffsetTo<Device>& get_device (Value* value)
236   {
237     return *static_cast<OffsetTo<Device> *> (value);
238   }
get_deviceOT::ValueFormat239   static inline const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
240   {
241     if (worked) *worked |= bool (*value);
242     return *static_cast<const OffsetTo<Device> *> (value);
243   }
244 
copy_deviceOT::ValueFormat245   bool copy_device (hb_serialize_context_t *c, const void *base,
246                     const Value *src_value, const hb_map_t *layout_variation_idx_map) const
247   {
248     Value       *dst_value = c->copy (*src_value);
249 
250     if (!dst_value) return false;
251     if (*dst_value == 0) return true;
252 
253     *dst_value = 0;
254     c->push ();
255     if ((base + get_device (src_value)).copy (c, layout_variation_idx_map))
256     {
257       c->add_link (*dst_value, c->pop_pack ());
258       return true;
259     }
260     else
261     {
262       c->pop_discard ();
263       return false;
264     }
265   }
266 
get_shortOT::ValueFormat267   static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
268   {
269     if (worked) *worked |= bool (*value);
270     return *reinterpret_cast<const HBINT16 *> (value);
271   }
272 
273   public:
274 
has_deviceOT::ValueFormat275   bool has_device () const
276   {
277     unsigned int format = *this;
278     return (format & devices) != 0;
279   }
280 
sanitize_valueOT::ValueFormat281   bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
282   {
283     TRACE_SANITIZE (this);
284     return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
285   }
286 
sanitize_valuesOT::ValueFormat287   bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
288   {
289     TRACE_SANITIZE (this);
290     unsigned int len = get_len ();
291 
292     if (!c->check_range (values, count, get_size ())) return_trace (false);
293 
294     if (!has_device ()) return_trace (true);
295 
296     for (unsigned int i = 0; i < count; i++) {
297       if (!sanitize_value_devices (c, base, values))
298         return_trace (false);
299       values += len;
300     }
301 
302     return_trace (true);
303   }
304 
305   /* Just sanitize referenced Device tables.  Doesn't check the values themselves. */
sanitize_values_stride_unsafeOT::ValueFormat306   bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
307   {
308     TRACE_SANITIZE (this);
309 
310     if (!has_device ()) return_trace (true);
311 
312     for (unsigned int i = 0; i < count; i++) {
313       if (!sanitize_value_devices (c, base, values))
314         return_trace (false);
315       values += stride;
316     }
317 
318     return_trace (true);
319   }
320 };
321 
322 template<typename Iterator>
323 static void SinglePos_serialize (hb_serialize_context_t *c,
324                                  const void *src,
325                                  Iterator it,
326                                  ValueFormat valFormat,
327                                  const hb_map_t *layout_variation_idx_map);
328 
329 
330 struct AnchorFormat1
331 {
get_anchorOT::AnchorFormat1332   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
333                    float *x, float *y) const
334   {
335     hb_font_t *font = c->font;
336     *x = font->em_fscale_x (xCoordinate);
337     *y = font->em_fscale_y (yCoordinate);
338   }
339 
sanitizeOT::AnchorFormat1340   bool sanitize (hb_sanitize_context_t *c) const
341   {
342     TRACE_SANITIZE (this);
343     return_trace (c->check_struct (this));
344   }
345 
copyOT::AnchorFormat1346   AnchorFormat1* copy (hb_serialize_context_t *c) const
347   {
348     TRACE_SERIALIZE (this);
349     return_trace (c->embed<AnchorFormat1> (this));
350   }
351 
352   protected:
353   HBUINT16      format;                 /* Format identifier--format = 1 */
354   FWORD         xCoordinate;            /* Horizontal value--in design units */
355   FWORD         yCoordinate;            /* Vertical value--in design units */
356   public:
357   DEFINE_SIZE_STATIC (6);
358 };
359 
360 struct AnchorFormat2
361 {
get_anchorOT::AnchorFormat2362   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
363                    float *x, float *y) const
364   {
365     hb_font_t *font = c->font;
366 
367 #ifdef HB_NO_HINTING
368     *x = font->em_fscale_x (xCoordinate);
369     *y = font->em_fscale_y (yCoordinate);
370     return;
371 #endif
372 
373     unsigned int x_ppem = font->x_ppem;
374     unsigned int y_ppem = font->y_ppem;
375     hb_position_t cx = 0, cy = 0;
376     bool ret;
377 
378     ret = (x_ppem || y_ppem) &&
379           font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
380     *x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
381     *y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
382   }
383 
sanitizeOT::AnchorFormat2384   bool sanitize (hb_sanitize_context_t *c) const
385   {
386     TRACE_SANITIZE (this);
387     return_trace (c->check_struct (this));
388   }
389 
copyOT::AnchorFormat2390   AnchorFormat2* copy (hb_serialize_context_t *c) const
391   {
392     TRACE_SERIALIZE (this);
393     return_trace (c->embed<AnchorFormat2> (this));
394   }
395 
396   protected:
397   HBUINT16      format;                 /* Format identifier--format = 2 */
398   FWORD         xCoordinate;            /* Horizontal value--in design units */
399   FWORD         yCoordinate;            /* Vertical value--in design units */
400   HBUINT16      anchorPoint;            /* Index to glyph contour point */
401   public:
402   DEFINE_SIZE_STATIC (8);
403 };
404 
405 struct AnchorFormat3
406 {
get_anchorOT::AnchorFormat3407   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
408                    float *x, float *y) const
409   {
410     hb_font_t *font = c->font;
411     *x = font->em_fscale_x (xCoordinate);
412     *y = font->em_fscale_y (yCoordinate);
413 
414     if (font->x_ppem || font->num_coords)
415       *x += (this+xDeviceTable).get_x_delta (font, c->var_store);
416     if (font->y_ppem || font->num_coords)
417       *y += (this+yDeviceTable).get_y_delta (font, c->var_store);
418   }
419 
sanitizeOT::AnchorFormat3420   bool sanitize (hb_sanitize_context_t *c) const
421   {
422     TRACE_SANITIZE (this);
423     return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
424   }
425 
copyOT::AnchorFormat3426   AnchorFormat3* copy (hb_serialize_context_t *c,
427                        const hb_map_t *layout_variation_idx_map) const
428   {
429     TRACE_SERIALIZE (this);
430     if (!layout_variation_idx_map) return_trace (nullptr);
431 
432     auto *out = c->embed<AnchorFormat3> (this);
433     if (unlikely (!out)) return_trace (nullptr);
434 
435     out->xDeviceTable.serialize_copy (c, xDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
436     out->yDeviceTable.serialize_copy (c, yDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
437     return_trace (out);
438   }
439 
collect_variation_indicesOT::AnchorFormat3440   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
441   {
442     (this+xDeviceTable).collect_variation_indices (c->layout_variation_indices);
443     (this+yDeviceTable).collect_variation_indices (c->layout_variation_indices);
444   }
445 
446   protected:
447   HBUINT16      format;                 /* Format identifier--format = 3 */
448   FWORD         xCoordinate;            /* Horizontal value--in design units */
449   FWORD         yCoordinate;            /* Vertical value--in design units */
450   OffsetTo<Device>
451                 xDeviceTable;           /* Offset to Device table for X
452                                          * coordinate-- from beginning of
453                                          * Anchor table (may be NULL) */
454   OffsetTo<Device>
455                 yDeviceTable;           /* Offset to Device table for Y
456                                          * coordinate-- from beginning of
457                                          * Anchor table (may be NULL) */
458   public:
459   DEFINE_SIZE_STATIC (10);
460 };
461 
462 struct Anchor
463 {
get_anchorOT::Anchor464   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
465                    float *x, float *y) const
466   {
467     *x = *y = 0;
468     switch (u.format) {
469     case 1: u.format1.get_anchor (c, glyph_id, x, y); return;
470     case 2: u.format2.get_anchor (c, glyph_id, x, y); return;
471     case 3: u.format3.get_anchor (c, glyph_id, x, y); return;
472     default:                                          return;
473     }
474   }
475 
sanitizeOT::Anchor476   bool sanitize (hb_sanitize_context_t *c) const
477   {
478     TRACE_SANITIZE (this);
479     if (!u.format.sanitize (c)) return_trace (false);
480     switch (u.format) {
481     case 1: return_trace (u.format1.sanitize (c));
482     case 2: return_trace (u.format2.sanitize (c));
483     case 3: return_trace (u.format3.sanitize (c));
484     default:return_trace (true);
485     }
486   }
487 
copyOT::Anchor488   Anchor* copy (hb_serialize_context_t *c, const hb_map_t *layout_variation_idx_map) const
489   {
490     TRACE_SERIALIZE (this);
491     switch (u.format) {
492     case 1: return_trace (reinterpret_cast<Anchor *> (u.format1.copy (c)));
493     case 2: return_trace (reinterpret_cast<Anchor *> (u.format2.copy (c)));
494     case 3: return_trace (reinterpret_cast<Anchor *> (u.format3.copy (c, layout_variation_idx_map)));
495     default:return_trace (nullptr);
496     }
497   }
498 
collect_variation_indicesOT::Anchor499   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
500   {
501     switch (u.format) {
502     case 1: case 2:
503       return;
504     case 3:
505       u.format3.collect_variation_indices (c);
506       return;
507     default: return;
508     }
509   }
510 
511   protected:
512   union {
513   HBUINT16              format;         /* Format identifier */
514   AnchorFormat1         format1;
515   AnchorFormat2         format2;
516   AnchorFormat3         format3;
517   } u;
518   public:
519   DEFINE_SIZE_UNION (2, format);
520 };
521 
522 
523 struct AnchorMatrix
524 {
get_anchorOT::AnchorMatrix525   const Anchor& get_anchor (unsigned int row, unsigned int col,
526                             unsigned int cols, bool *found) const
527   {
528     *found = false;
529     if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
530     *found = !matrixZ[row * cols + col].is_null ();
531     return this+matrixZ[row * cols + col];
532   }
533 
534   template <typename Iterator,
535             hb_requires (hb_is_iterator (Iterator))>
collect_variation_indicesOT::AnchorMatrix536   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
537                                   Iterator index_iter) const
538   {
539     for (unsigned i : index_iter)
540       (this+matrixZ[i]).collect_variation_indices (c);
541   }
542 
543   template <typename Iterator,
544             hb_requires (hb_is_iterator (Iterator))>
serializeOT::AnchorMatrix545   bool serialize (hb_serialize_context_t *c,
546                   unsigned                num_rows,
547                   AnchorMatrix const     *offset_matrix,
548                   const hb_map_t         *layout_variation_idx_map,
549                   Iterator                index_iter)
550   {
551     TRACE_SERIALIZE (this);
552     if (!index_iter) return_trace (false);
553     if (unlikely (!c->extend_min ((*this))))  return_trace (false);
554 
555     this->rows = num_rows;
556     for (const unsigned i : index_iter)
557     {
558       auto *offset = c->embed (offset_matrix->matrixZ[i]);
559       if (!offset) return_trace (false);
560       offset->serialize_copy (c, offset_matrix->matrixZ[i],
561                               offset_matrix, c->to_bias (this),
562                               hb_serialize_context_t::Head,
563                               layout_variation_idx_map);
564     }
565 
566     return_trace (true);
567   }
568 
subsetOT::AnchorMatrix569   bool subset (hb_subset_context_t *c,
570                unsigned cols,
571                const hb_map_t *klass_mapping) const
572   {
573     TRACE_SUBSET (this);
574     auto *out = c->serializer->start_embed (*this);
575 
576     auto indexes =
577     + hb_range (rows * cols)
578     | hb_filter ([=] (unsigned index) { return klass_mapping->has (index % cols); })
579     ;
580 
581     out->serialize (c->serializer,
582                     (unsigned) rows,
583                     this,
584                     c->plan->layout_variation_idx_map,
585                     indexes);
586     return_trace (true);
587   }
588 
sanitizeOT::AnchorMatrix589   bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
590   {
591     TRACE_SANITIZE (this);
592     if (!c->check_struct (this)) return_trace (false);
593     if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
594     unsigned int count = rows * cols;
595     if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
596     for (unsigned int i = 0; i < count; i++)
597       if (!matrixZ[i].sanitize (c, this)) return_trace (false);
598     return_trace (true);
599   }
600 
601   HBUINT16      rows;                   /* Number of rows */
602   UnsizedArrayOf<OffsetTo<Anchor>>
603                 matrixZ;                /* Matrix of offsets to Anchor tables--
604                                          * from beginning of AnchorMatrix table */
605   public:
606   DEFINE_SIZE_ARRAY (2, matrixZ);
607 };
608 
609 
610 struct MarkRecord
611 {
612   friend struct MarkArray;
613 
get_classOT::MarkRecord614   unsigned get_class () const { return (unsigned) klass; }
sanitizeOT::MarkRecord615   bool sanitize (hb_sanitize_context_t *c, const void *base) const
616   {
617     TRACE_SANITIZE (this);
618     return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
619   }
620 
copyOT::MarkRecord621   MarkRecord *copy (hb_serialize_context_t *c,
622                     const void             *src_base,
623                     unsigned                dst_bias,
624                     const hb_map_t         *klass_mapping,
625                     const hb_map_t         *layout_variation_idx_map) const
626   {
627     TRACE_SERIALIZE (this);
628     auto *out = c->embed (this);
629     if (unlikely (!out)) return_trace (nullptr);
630 
631     out->klass = klass_mapping->get (klass);
632     out->markAnchor.serialize_copy (c, markAnchor, src_base, dst_bias, hb_serialize_context_t::Head, layout_variation_idx_map);
633     return_trace (out);
634   }
635 
collect_variation_indicesOT::MarkRecord636   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
637                                   const void *src_base) const
638   {
639     (src_base+markAnchor).collect_variation_indices (c);
640   }
641 
642   protected:
643   HBUINT16      klass;                  /* Class defined for this mark */
644   OffsetTo<Anchor>
645                 markAnchor;             /* Offset to Anchor table--from
646                                          * beginning of MarkArray table */
647   public:
648   DEFINE_SIZE_STATIC (4);
649 };
650 
651 struct MarkArray : ArrayOf<MarkRecord>  /* Array of MarkRecords--in Coverage order */
652 {
applyOT::MarkArray653   bool apply (hb_ot_apply_context_t *c,
654               unsigned int mark_index, unsigned int glyph_index,
655               const AnchorMatrix &anchors, unsigned int class_count,
656               unsigned int glyph_pos) const
657   {
658     TRACE_APPLY (this);
659     hb_buffer_t *buffer = c->buffer;
660     const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index);
661     unsigned int mark_class = record.klass;
662 
663     const Anchor& mark_anchor = this + record.markAnchor;
664     bool found;
665     const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
666     /* If this subtable doesn't have an anchor for this base and this class,
667      * return false such that the subsequent subtables have a chance at it. */
668     if (unlikely (!found)) return_trace (false);
669 
670     float mark_x, mark_y, base_x, base_y;
671 
672     buffer->unsafe_to_break (glyph_pos, buffer->idx);
673     mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
674     glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
675 
676     hb_glyph_position_t &o = buffer->cur_pos();
677     o.x_offset = roundf (base_x - mark_x);
678     o.y_offset = roundf (base_y - mark_y);
679     o.attach_type() = ATTACH_TYPE_MARK;
680     o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
681     buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
682 
683     buffer->idx++;
684     return_trace (true);
685   }
686 
687   template<typename Iterator,
688            hb_requires (hb_is_source_of (Iterator, MarkRecord))>
serializeOT::MarkArray689   bool serialize (hb_serialize_context_t *c,
690                   const hb_map_t         *klass_mapping,
691                   const hb_map_t         *layout_variation_idx_map,
692                   const void             *base,
693                   Iterator                it)
694   {
695     TRACE_SERIALIZE (this);
696     if (unlikely (!c->extend_min (*this))) return_trace (false);
697     if (unlikely (!c->check_assign (len, it.len ()))) return_trace (false);
698     c->copy_all (it, base, c->to_bias (this), klass_mapping, layout_variation_idx_map);
699     return_trace (true);
700   }
701 
sanitizeOT::MarkArray702   bool sanitize (hb_sanitize_context_t *c) const
703   {
704     TRACE_SANITIZE (this);
705     return_trace (ArrayOf<MarkRecord>::sanitize (c, this));
706   }
707 };
708 
709 
710 /* Lookups */
711 
712 struct SinglePosFormat1
713 {
intersectsOT::SinglePosFormat1714   bool intersects (const hb_set_t *glyphs) const
715   { return (this+coverage).intersects (glyphs); }
716 
closure_lookupsOT::SinglePosFormat1717   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::SinglePosFormat1718   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
719   {
720     if (!valueFormat.has_device ()) return;
721 
722     auto it =
723     + hb_iter (this+coverage)
724     | hb_filter (c->glyph_set)
725     ;
726 
727     if (!it) return;
728     valueFormat.collect_variation_indices (c, this, values.as_array (valueFormat.get_len ()));
729   }
730 
collect_glyphsOT::SinglePosFormat1731   void collect_glyphs (hb_collect_glyphs_context_t *c) const
732   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
733 
get_coverageOT::SinglePosFormat1734   const Coverage &get_coverage () const { return this+coverage; }
735 
applyOT::SinglePosFormat1736   bool apply (hb_ot_apply_context_t *c) const
737   {
738     TRACE_APPLY (this);
739     hb_buffer_t *buffer = c->buffer;
740     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
741     if (likely (index == NOT_COVERED)) return_trace (false);
742 
743     valueFormat.apply_value (c, this, values, buffer->cur_pos());
744 
745     buffer->idx++;
746     return_trace (true);
747   }
748 
749   template<typename Iterator,
750            hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat1751   void serialize (hb_serialize_context_t *c,
752                   const void *src,
753                   Iterator it,
754                   ValueFormat valFormat,
755                   const hb_map_t *layout_variation_idx_map)
756   {
757     auto out = c->extend_min (*this);
758     if (unlikely (!out)) return;
759     if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
760 
761     + it
762     | hb_map (hb_second)
763     | hb_apply ([&] (hb_array_t<const Value> _)
764                 { valFormat.serialize_copy (c, src, &_, layout_variation_idx_map); })
765     ;
766 
767     auto glyphs =
768     + it
769     | hb_map_retains_sorting (hb_first)
770     ;
771 
772     coverage.serialize (c, this).serialize (c, glyphs);
773   }
774 
subsetOT::SinglePosFormat1775   bool subset (hb_subset_context_t *c) const
776   {
777     TRACE_SUBSET (this);
778     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
779     const hb_map_t &glyph_map = *c->plan->glyph_map;
780 
781     auto it =
782     + hb_iter (this+coverage)
783     | hb_filter (glyphset)
784     | hb_map_retains_sorting (glyph_map)
785     | hb_zip (hb_repeat (values.as_array (valueFormat.get_len ())))
786     ;
787 
788     bool ret = bool (it);
789     SinglePos_serialize (c->serializer, this, it, valueFormat, c->plan->layout_variation_idx_map);
790     return_trace (ret);
791   }
792 
sanitizeOT::SinglePosFormat1793   bool sanitize (hb_sanitize_context_t *c) const
794   {
795     TRACE_SANITIZE (this);
796     return_trace (c->check_struct (this) &&
797                   coverage.sanitize (c, this) &&
798                   valueFormat.sanitize_value (c, this, values));
799   }
800 
801   protected:
802   HBUINT16      format;                 /* Format identifier--format = 1 */
803   OffsetTo<Coverage>
804                 coverage;               /* Offset to Coverage table--from
805                                          * beginning of subtable */
806   ValueFormat   valueFormat;            /* Defines the types of data in the
807                                          * ValueRecord */
808   ValueRecord   values;                 /* Defines positioning
809                                          * value(s)--applied to all glyphs in
810                                          * the Coverage table */
811   public:
812   DEFINE_SIZE_ARRAY (6, values);
813 };
814 
815 struct SinglePosFormat2
816 {
intersectsOT::SinglePosFormat2817   bool intersects (const hb_set_t *glyphs) const
818   { return (this+coverage).intersects (glyphs); }
819 
closure_lookupsOT::SinglePosFormat2820   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::SinglePosFormat2821   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
822   {
823     if (!valueFormat.has_device ()) return;
824 
825     auto it =
826     + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
827     | hb_filter (c->glyph_set, hb_first)
828     ;
829 
830     if (!it) return;
831 
832     unsigned sub_length = valueFormat.get_len ();
833     const hb_array_t<const Value> values_array = values.as_array (valueCount * sub_length);
834 
835     for (unsigned i : + it
836                       | hb_map (hb_second))
837       valueFormat.collect_variation_indices (c, this, values_array.sub_array (i * sub_length, sub_length));
838 
839   }
840 
collect_glyphsOT::SinglePosFormat2841   void collect_glyphs (hb_collect_glyphs_context_t *c) const
842   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
843 
get_coverageOT::SinglePosFormat2844   const Coverage &get_coverage () const { return this+coverage; }
845 
applyOT::SinglePosFormat2846   bool apply (hb_ot_apply_context_t *c) const
847   {
848     TRACE_APPLY (this);
849     hb_buffer_t *buffer = c->buffer;
850     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
851     if (likely (index == NOT_COVERED)) return_trace (false);
852 
853     if (likely (index >= valueCount)) return_trace (false);
854 
855     valueFormat.apply_value (c, this,
856                              &values[index * valueFormat.get_len ()],
857                              buffer->cur_pos());
858 
859     buffer->idx++;
860     return_trace (true);
861   }
862 
863   template<typename Iterator,
864            hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat2865   void serialize (hb_serialize_context_t *c,
866                   const void *src,
867                   Iterator it,
868                   ValueFormat valFormat,
869                   const hb_map_t *layout_variation_idx_map)
870   {
871     auto out = c->extend_min (*this);
872     if (unlikely (!out)) return;
873     if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
874     if (unlikely (!c->check_assign (valueCount, it.len ()))) return;
875 
876     + it
877     | hb_map (hb_second)
878     | hb_apply ([&] (hb_array_t<const Value> _)
879                 { valFormat.serialize_copy (c, src, &_, layout_variation_idx_map); })
880     ;
881 
882     auto glyphs =
883     + it
884     | hb_map_retains_sorting (hb_first)
885     ;
886 
887     coverage.serialize (c, this).serialize (c, glyphs);
888   }
889 
subsetOT::SinglePosFormat2890   bool subset (hb_subset_context_t *c) const
891   {
892     TRACE_SUBSET (this);
893     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
894     const hb_map_t &glyph_map = *c->plan->glyph_map;
895 
896     unsigned sub_length = valueFormat.get_len ();
897     auto values_array = values.as_array (valueCount * sub_length);
898 
899     auto it =
900     + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
901     | hb_filter (glyphset, hb_first)
902     | hb_map_retains_sorting ([&] (const hb_pair_t<hb_codepoint_t, unsigned>& _)
903                               {
904                                 return hb_pair (glyph_map[_.first],
905                                                 values_array.sub_array (_.second * sub_length,
906                                                                         sub_length));
907                               })
908     ;
909 
910     bool ret = bool (it);
911     SinglePos_serialize (c->serializer, this, it, valueFormat, c->plan->layout_variation_idx_map);
912     return_trace (ret);
913   }
914 
sanitizeOT::SinglePosFormat2915   bool sanitize (hb_sanitize_context_t *c) const
916   {
917     TRACE_SANITIZE (this);
918     return_trace (c->check_struct (this) &&
919                   coverage.sanitize (c, this) &&
920                   valueFormat.sanitize_values (c, this, values, valueCount));
921   }
922 
923   protected:
924   HBUINT16      format;                 /* Format identifier--format = 2 */
925   OffsetTo<Coverage>
926                 coverage;               /* Offset to Coverage table--from
927                                          * beginning of subtable */
928   ValueFormat   valueFormat;            /* Defines the types of data in the
929                                          * ValueRecord */
930   HBUINT16      valueCount;             /* Number of ValueRecords */
931   ValueRecord   values;                 /* Array of ValueRecords--positioning
932                                          * values applied to glyphs */
933   public:
934   DEFINE_SIZE_ARRAY (8, values);
935 };
936 
937 struct SinglePos
938 {
939   template<typename Iterator,
940            hb_requires (hb_is_iterator (Iterator))>
get_formatOT::SinglePos941   unsigned get_format (Iterator glyph_val_iter_pairs)
942   {
943     hb_array_t<const Value> first_val_iter = hb_second (*glyph_val_iter_pairs);
944 
945     for (const auto iter : glyph_val_iter_pairs)
946       for (const auto _ : hb_zip (iter.second, first_val_iter))
947         if (_.first != _.second)
948           return 2;
949 
950     return 1;
951   }
952 
953 
954   template<typename Iterator,
955            hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePos956   void serialize (hb_serialize_context_t *c,
957                   const void *src,
958                   Iterator glyph_val_iter_pairs,
959                   ValueFormat valFormat,
960                   const hb_map_t *layout_variation_idx_map)
961   {
962     if (unlikely (!c->extend_min (u.format))) return;
963     unsigned format = 2;
964 
965     if (glyph_val_iter_pairs) format = get_format (glyph_val_iter_pairs);
966 
967     u.format = format;
968     switch (u.format) {
969     case 1: u.format1.serialize (c, src, glyph_val_iter_pairs, valFormat, layout_variation_idx_map);
970             return;
971     case 2: u.format2.serialize (c, src, glyph_val_iter_pairs, valFormat, layout_variation_idx_map);
972             return;
973     default:return;
974     }
975   }
976 
977   template <typename context_t, typename ...Ts>
dispatchOT::SinglePos978   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
979   {
980     TRACE_DISPATCH (this, u.format);
981     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
982     switch (u.format) {
983     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
984     case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
985     default:return_trace (c->default_return_value ());
986     }
987   }
988 
989   protected:
990   union {
991   HBUINT16              format;         /* Format identifier */
992   SinglePosFormat1      format1;
993   SinglePosFormat2      format2;
994   } u;
995 };
996 
997 template<typename Iterator>
998 static void
SinglePos_serialize(hb_serialize_context_t * c,const void * src,Iterator it,ValueFormat valFormat,const hb_map_t * layout_variation_idx_map)999 SinglePos_serialize (hb_serialize_context_t *c,
1000                      const void *src,
1001                      Iterator it,
1002                      ValueFormat valFormat,
1003                      const hb_map_t *layout_variation_idx_map)
1004 { c->start_embed<SinglePos> ()->serialize (c, src, it, valFormat, layout_variation_idx_map); }
1005 
1006 
1007 struct PairValueRecord
1008 {
1009   friend struct PairSet;
1010 
cmpOT::PairValueRecord1011   int cmp (hb_codepoint_t k) const
1012   { return secondGlyph.cmp (k); }
1013 
1014   struct serialize_closure_t
1015   {
1016     const void          *base;
1017     const ValueFormat   *valueFormats;
1018     unsigned            len1; /* valueFormats[0].get_len() */
1019     const hb_map_t      *glyph_map;
1020     const hb_map_t      *layout_variation_idx_map;
1021   };
1022 
serializeOT::PairValueRecord1023   bool serialize (hb_serialize_context_t *c,
1024                   serialize_closure_t *closure) const
1025   {
1026     TRACE_SERIALIZE (this);
1027     auto *out = c->start_embed (*this);
1028     if (unlikely (!c->extend_min (out))) return_trace (false);
1029 
1030     out->secondGlyph = (*closure->glyph_map)[secondGlyph];
1031 
1032     closure->valueFormats[0].serialize_copy (c, closure->base, &values[0], closure->layout_variation_idx_map);
1033     closure->valueFormats[1].serialize_copy (c, closure->base, &values[closure->len1], closure->layout_variation_idx_map);
1034 
1035     return_trace (true);
1036   }
1037 
collect_variation_indicesOT::PairValueRecord1038   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1039                                   const ValueFormat *valueFormats,
1040                                   const void *base) const
1041   {
1042     unsigned record1_len = valueFormats[0].get_len ();
1043     unsigned record2_len = valueFormats[1].get_len ();
1044     const hb_array_t<const Value> values_array = values.as_array (record1_len + record2_len);
1045 
1046     if (valueFormats[0].has_device ())
1047       valueFormats[0].collect_variation_indices (c, base, values_array.sub_array (0, record1_len));
1048 
1049     if (valueFormats[1].has_device ())
1050       valueFormats[1].collect_variation_indices (c, base, values_array.sub_array (record1_len, record2_len));
1051   }
1052 
1053   protected:
1054   HBGlyphID     secondGlyph;            /* GlyphID of second glyph in the
1055                                          * pair--first glyph is listed in the
1056                                          * Coverage table */
1057   ValueRecord   values;                 /* Positioning data for the first glyph
1058                                          * followed by for second glyph */
1059   public:
1060   DEFINE_SIZE_ARRAY (2, values);
1061 };
1062 
1063 struct PairSet
1064 {
1065   friend struct PairPosFormat1;
1066 
intersectsOT::PairSet1067   bool intersects (const hb_set_t *glyphs,
1068                    const ValueFormat *valueFormats) const
1069   {
1070     unsigned int len1 = valueFormats[0].get_len ();
1071     unsigned int len2 = valueFormats[1].get_len ();
1072     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1073 
1074     const PairValueRecord *record = &firstPairValueRecord;
1075     unsigned int count = len;
1076     for (unsigned int i = 0; i < count; i++)
1077     {
1078       if (glyphs->has (record->secondGlyph))
1079         return true;
1080       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1081     }
1082     return false;
1083   }
1084 
collect_glyphsOT::PairSet1085   void collect_glyphs (hb_collect_glyphs_context_t *c,
1086                        const ValueFormat *valueFormats) const
1087   {
1088     unsigned int len1 = valueFormats[0].get_len ();
1089     unsigned int len2 = valueFormats[1].get_len ();
1090     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1091 
1092     const PairValueRecord *record = &firstPairValueRecord;
1093     c->input->add_array (&record->secondGlyph, len, record_size);
1094   }
1095 
collect_variation_indicesOT::PairSet1096   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1097                                   const ValueFormat *valueFormats) const
1098   {
1099     unsigned len1 = valueFormats[0].get_len ();
1100     unsigned len2 = valueFormats[1].get_len ();
1101     unsigned record_size = HBUINT16::static_size * (1 + len1 + len2);
1102 
1103     const PairValueRecord *record = &firstPairValueRecord;
1104     unsigned count = len;
1105     for (unsigned i = 0; i < count; i++)
1106     {
1107       if (c->glyph_set->has (record->secondGlyph))
1108       { record->collect_variation_indices (c, valueFormats, this); }
1109 
1110       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1111     }
1112   }
1113 
applyOT::PairSet1114   bool apply (hb_ot_apply_context_t *c,
1115               const ValueFormat *valueFormats,
1116               unsigned int pos) const
1117   {
1118     TRACE_APPLY (this);
1119     hb_buffer_t *buffer = c->buffer;
1120     unsigned int len1 = valueFormats[0].get_len ();
1121     unsigned int len2 = valueFormats[1].get_len ();
1122     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1123 
1124     const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint,
1125                                                 &firstPairValueRecord,
1126                                                 len,
1127                                                 record_size);
1128     if (record)
1129     {
1130       /* Note the intentional use of "|" instead of short-circuit "||". */
1131       if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) |
1132           valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]))
1133         buffer->unsafe_to_break (buffer->idx, pos + 1);
1134       if (len2)
1135         pos++;
1136       buffer->idx = pos;
1137       return_trace (true);
1138     }
1139     return_trace (false);
1140   }
1141 
subsetOT::PairSet1142   bool subset (hb_subset_context_t *c,
1143                const ValueFormat valueFormats[2]) const
1144   {
1145     TRACE_SUBSET (this);
1146     auto snap = c->serializer->snapshot ();
1147 
1148     auto *out = c->serializer->start_embed (*this);
1149     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1150     out->len = 0;
1151 
1152     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1153     const hb_map_t &glyph_map = *c->plan->glyph_map;
1154 
1155     unsigned len1 = valueFormats[0].get_len ();
1156     unsigned len2 = valueFormats[1].get_len ();
1157     unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
1158 
1159     PairValueRecord::serialize_closure_t closure =
1160     {
1161       this,
1162       valueFormats,
1163       len1,
1164       &glyph_map,
1165       c->plan->layout_variation_idx_map
1166     };
1167 
1168     const PairValueRecord *record = &firstPairValueRecord;
1169     unsigned count = len, num = 0;
1170     for (unsigned i = 0; i < count; i++)
1171     {
1172       if (glyphset.has (record->secondGlyph)
1173          && record->serialize (c->serializer, &closure)) num++;
1174       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1175     }
1176 
1177     out->len = num;
1178     if (!num) c->serializer->revert (snap);
1179     return_trace (num);
1180   }
1181 
1182   struct sanitize_closure_t
1183   {
1184     const ValueFormat *valueFormats;
1185     unsigned int len1; /* valueFormats[0].get_len() */
1186     unsigned int stride; /* 1 + len1 + len2 */
1187   };
1188 
sanitizeOT::PairSet1189   bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
1190   {
1191     TRACE_SANITIZE (this);
1192     if (!(c->check_struct (this)
1193        && c->check_range (&firstPairValueRecord,
1194                           len,
1195                           HBUINT16::static_size,
1196                           closure->stride))) return_trace (false);
1197 
1198     unsigned int count = len;
1199     const PairValueRecord *record = &firstPairValueRecord;
1200     return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
1201                   closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
1202   }
1203 
1204   protected:
1205   HBUINT16              len;    /* Number of PairValueRecords */
1206   PairValueRecord       firstPairValueRecord;
1207                                 /* Array of PairValueRecords--ordered
1208                                  * by GlyphID of the second glyph */
1209   public:
1210   DEFINE_SIZE_MIN (2);
1211 };
1212 
1213 struct PairPosFormat1
1214 {
intersectsOT::PairPosFormat11215   bool intersects (const hb_set_t *glyphs) const
1216   {
1217     return
1218     + hb_zip (this+coverage, pairSet)
1219     | hb_filter (*glyphs, hb_first)
1220     | hb_map (hb_second)
1221     | hb_map ([glyphs, this] (const OffsetTo<PairSet> &_)
1222               { return (this+_).intersects (glyphs, valueFormat); })
1223     | hb_any
1224     ;
1225   }
1226 
closure_lookupsOT::PairPosFormat11227   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::PairPosFormat11228   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1229   {
1230     if ((!valueFormat[0].has_device ()) && (!valueFormat[1].has_device ())) return;
1231 
1232     auto it =
1233     + hb_zip (this+coverage, pairSet)
1234     | hb_filter (c->glyph_set, hb_first)
1235     | hb_map (hb_second)
1236     ;
1237 
1238     if (!it) return;
1239     + it
1240     | hb_map (hb_add (this))
1241     | hb_apply ([&] (const PairSet& _) { _.collect_variation_indices (c, valueFormat); })
1242     ;
1243   }
1244 
collect_glyphsOT::PairPosFormat11245   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1246   {
1247     if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1248     unsigned int count = pairSet.len;
1249     for (unsigned int i = 0; i < count; i++)
1250       (this+pairSet[i]).collect_glyphs (c, valueFormat);
1251   }
1252 
get_coverageOT::PairPosFormat11253   const Coverage &get_coverage () const { return this+coverage; }
1254 
applyOT::PairPosFormat11255   bool apply (hb_ot_apply_context_t *c) const
1256   {
1257     TRACE_APPLY (this);
1258     hb_buffer_t *buffer = c->buffer;
1259     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
1260     if (likely (index == NOT_COVERED)) return_trace (false);
1261 
1262     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1263     skippy_iter.reset (buffer->idx, 1);
1264     if (!skippy_iter.next ()) return_trace (false);
1265 
1266     return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
1267   }
1268 
subsetOT::PairPosFormat11269   bool subset (hb_subset_context_t *c) const
1270   {
1271     TRACE_SUBSET (this);
1272 
1273     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1274     const hb_map_t &glyph_map = *c->plan->glyph_map;
1275 
1276     auto *out = c->serializer->start_embed (*this);
1277     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1278     out->format = format;
1279     out->valueFormat[0] = valueFormat[0];
1280     out->valueFormat[1] = valueFormat[1];
1281 
1282     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1283 
1284     + hb_zip (this+coverage, pairSet)
1285     | hb_filter (glyphset, hb_first)
1286     | hb_filter ([this, c, out] (const OffsetTo<PairSet>& _)
1287                  {
1288                    auto *o = out->pairSet.serialize_append (c->serializer);
1289                    if (unlikely (!o)) return false;
1290                    auto snap = c->serializer->snapshot ();
1291                    bool ret = o->serialize_subset (c, _, this, valueFormat);
1292                    if (!ret)
1293                    {
1294                      out->pairSet.pop ();
1295                      c->serializer->revert (snap);
1296                    }
1297                    return ret;
1298                  },
1299                  hb_second)
1300     | hb_map (hb_first)
1301     | hb_map (glyph_map)
1302     | hb_sink (new_coverage)
1303     ;
1304 
1305     out->coverage.serialize (c->serializer, out)
1306                  .serialize (c->serializer, new_coverage.iter ());
1307 
1308     return_trace (bool (new_coverage));
1309   }
1310 
sanitizeOT::PairPosFormat11311   bool sanitize (hb_sanitize_context_t *c) const
1312   {
1313     TRACE_SANITIZE (this);
1314 
1315     if (!c->check_struct (this)) return_trace (false);
1316 
1317     unsigned int len1 = valueFormat[0].get_len ();
1318     unsigned int len2 = valueFormat[1].get_len ();
1319     PairSet::sanitize_closure_t closure =
1320     {
1321       valueFormat,
1322       len1,
1323       1 + len1 + len2
1324     };
1325 
1326     return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
1327   }
1328 
1329   protected:
1330   HBUINT16      format;                 /* Format identifier--format = 1 */
1331   OffsetTo<Coverage>
1332                 coverage;               /* Offset to Coverage table--from
1333                                          * beginning of subtable */
1334   ValueFormat   valueFormat[2];         /* [0] Defines the types of data in
1335                                          * ValueRecord1--for the first glyph
1336                                          * in the pair--may be zero (0) */
1337                                         /* [1] Defines the types of data in
1338                                          * ValueRecord2--for the second glyph
1339                                          * in the pair--may be zero (0) */
1340   OffsetArrayOf<PairSet>
1341                 pairSet;                /* Array of PairSet tables
1342                                          * ordered by Coverage Index */
1343   public:
1344   DEFINE_SIZE_ARRAY (10, pairSet);
1345 };
1346 
1347 struct PairPosFormat2
1348 {
intersectsOT::PairPosFormat21349   bool intersects (const hb_set_t *glyphs) const
1350   {
1351     return (this+coverage).intersects (glyphs) &&
1352            (this+classDef2).intersects (glyphs);
1353   }
1354 
closure_lookupsOT::PairPosFormat21355   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::PairPosFormat21356   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1357   {
1358     if ((!valueFormat1.has_device ()) && (!valueFormat2.has_device ())) return;
1359 
1360     hb_set_t class1_set, class2_set;
1361     for (const unsigned cp : c->glyph_set->iter ())
1362     {
1363       unsigned klass1 = (this+classDef1).get (cp);
1364       unsigned klass2 = (this+classDef2).get (cp);
1365       class1_set.add (klass1);
1366       class2_set.add (klass2);
1367     }
1368 
1369     if (class1_set.is_empty () || class2_set.is_empty ()) return;
1370 
1371     unsigned len1 = valueFormat1.get_len ();
1372     unsigned len2 = valueFormat2.get_len ();
1373     const hb_array_t<const Value> values_array = values.as_array ((unsigned)class1Count * (unsigned) class2Count * (len1 + len2));
1374     for (const unsigned class1_idx : class1_set.iter ())
1375     {
1376       for (const unsigned class2_idx : class2_set.iter ())
1377       {
1378         unsigned start_offset = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1379         if (valueFormat1.has_device ())
1380           valueFormat1.collect_variation_indices (c, this, values_array.sub_array (start_offset, len1));
1381 
1382         if (valueFormat2.has_device ())
1383           valueFormat2.collect_variation_indices (c, this, values_array.sub_array (start_offset+len1, len2));
1384       }
1385     }
1386   }
1387 
collect_glyphsOT::PairPosFormat21388   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1389   {
1390     if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1391     if (unlikely (!(this+classDef2).collect_coverage (c->input))) return;
1392   }
1393 
get_coverageOT::PairPosFormat21394   const Coverage &get_coverage () const { return this+coverage; }
1395 
applyOT::PairPosFormat21396   bool apply (hb_ot_apply_context_t *c) const
1397   {
1398     TRACE_APPLY (this);
1399     hb_buffer_t *buffer = c->buffer;
1400     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
1401     if (likely (index == NOT_COVERED)) return_trace (false);
1402 
1403     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1404     skippy_iter.reset (buffer->idx, 1);
1405     if (!skippy_iter.next ()) return_trace (false);
1406 
1407     unsigned int len1 = valueFormat1.get_len ();
1408     unsigned int len2 = valueFormat2.get_len ();
1409     unsigned int record_len = len1 + len2;
1410 
1411     unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
1412     unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
1413     if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
1414 
1415     const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
1416     /* Note the intentional use of "|" instead of short-circuit "||". */
1417     if (valueFormat1.apply_value (c, this, v, buffer->cur_pos()) |
1418         valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]))
1419       buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
1420 
1421     buffer->idx = skippy_iter.idx;
1422     if (len2)
1423       buffer->idx++;
1424 
1425     return_trace (true);
1426   }
1427 
subsetOT::PairPosFormat21428   bool subset (hb_subset_context_t *c) const
1429   {
1430     TRACE_SUBSET (this);
1431     auto *out = c->serializer->start_embed (*this);
1432     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1433     out->format = format;
1434     out->valueFormat1 = valueFormat1;
1435     out->valueFormat2 = valueFormat2;
1436 
1437     hb_map_t klass1_map;
1438     out->classDef1.serialize_subset (c, classDef1, this, &klass1_map);
1439     out->class1Count = klass1_map.get_population ();
1440 
1441     hb_map_t klass2_map;
1442     out->classDef2.serialize_subset (c, classDef2, this, &klass2_map);
1443     out->class2Count = klass2_map.get_population ();
1444 
1445     unsigned len1 = valueFormat1.get_len ();
1446     unsigned len2 = valueFormat2.get_len ();
1447 
1448     + hb_range ((unsigned) class1Count)
1449     | hb_filter (klass1_map)
1450     | hb_apply ([&] (const unsigned class1_idx)
1451                 {
1452                   + hb_range ((unsigned) class2Count)
1453                   | hb_filter (klass2_map)
1454                   | hb_apply ([&] (const unsigned class2_idx)
1455                               {
1456                                 unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1457                                 valueFormat1.serialize_copy (c->serializer, this, &values[idx], c->plan->layout_variation_idx_map);
1458                                 valueFormat2.serialize_copy (c->serializer, this, &values[idx + len1], c->plan->layout_variation_idx_map);
1459                               })
1460                   ;
1461                 })
1462     ;
1463 
1464     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1465     const hb_map_t &glyph_map = *c->plan->glyph_map;
1466 
1467     auto it =
1468     + hb_iter (this+coverage)
1469     | hb_filter (glyphset)
1470     | hb_map_retains_sorting (glyph_map)
1471     ;
1472 
1473     out->coverage.serialize (c->serializer, out).serialize (c->serializer, it);
1474     return_trace (out->class1Count && out->class2Count && bool (it));
1475   }
1476 
sanitizeOT::PairPosFormat21477   bool sanitize (hb_sanitize_context_t *c) const
1478   {
1479     TRACE_SANITIZE (this);
1480     if (!(c->check_struct (this)
1481        && coverage.sanitize (c, this)
1482        && classDef1.sanitize (c, this)
1483        && classDef2.sanitize (c, this))) return_trace (false);
1484 
1485     unsigned int len1 = valueFormat1.get_len ();
1486     unsigned int len2 = valueFormat2.get_len ();
1487     unsigned int stride = len1 + len2;
1488     unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
1489     unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
1490     return_trace (c->check_range ((const void *) values,
1491                                   count,
1492                                   record_size) &&
1493                   valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
1494                   valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
1495   }
1496 
1497   protected:
1498   HBUINT16      format;                 /* Format identifier--format = 2 */
1499   OffsetTo<Coverage>
1500                 coverage;               /* Offset to Coverage table--from
1501                                          * beginning of subtable */
1502   ValueFormat   valueFormat1;           /* ValueRecord definition--for the
1503                                          * first glyph of the pair--may be zero
1504                                          * (0) */
1505   ValueFormat   valueFormat2;           /* ValueRecord definition--for the
1506                                          * second glyph of the pair--may be
1507                                          * zero (0) */
1508   OffsetTo<ClassDef>
1509                 classDef1;              /* Offset to ClassDef table--from
1510                                          * beginning of PairPos subtable--for
1511                                          * the first glyph of the pair */
1512   OffsetTo<ClassDef>
1513                 classDef2;              /* Offset to ClassDef table--from
1514                                          * beginning of PairPos subtable--for
1515                                          * the second glyph of the pair */
1516   HBUINT16      class1Count;            /* Number of classes in ClassDef1
1517                                          * table--includes Class0 */
1518   HBUINT16      class2Count;            /* Number of classes in ClassDef2
1519                                          * table--includes Class0 */
1520   ValueRecord   values;                 /* Matrix of value pairs:
1521                                          * class1-major, class2-minor,
1522                                          * Each entry has value1 and value2 */
1523   public:
1524   DEFINE_SIZE_ARRAY (16, values);
1525 };
1526 
1527 struct PairPos
1528 {
1529   template <typename context_t, typename ...Ts>
dispatchOT::PairPos1530   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1531   {
1532     TRACE_DISPATCH (this, u.format);
1533     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1534     switch (u.format) {
1535     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1536     case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
1537     default:return_trace (c->default_return_value ());
1538     }
1539   }
1540 
1541   protected:
1542   union {
1543   HBUINT16              format;         /* Format identifier */
1544   PairPosFormat1        format1;
1545   PairPosFormat2        format2;
1546   } u;
1547 };
1548 
1549 
1550 struct EntryExitRecord
1551 {
1552   friend struct CursivePosFormat1;
1553 
sanitizeOT::EntryExitRecord1554   bool sanitize (hb_sanitize_context_t *c, const void *base) const
1555   {
1556     TRACE_SANITIZE (this);
1557     return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
1558   }
1559 
collect_variation_indicesOT::EntryExitRecord1560   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1561                                   const void *src_base) const
1562   {
1563     (src_base+entryAnchor).collect_variation_indices (c);
1564     (src_base+exitAnchor).collect_variation_indices (c);
1565   }
1566 
copyOT::EntryExitRecord1567   EntryExitRecord* copy (hb_serialize_context_t *c,
1568                          const void *src_base,
1569                          const void *dst_base,
1570                          const hb_map_t *layout_variation_idx_map) const
1571   {
1572     TRACE_SERIALIZE (this);
1573     auto *out = c->embed (this);
1574     if (unlikely (!out)) return_trace (nullptr);
1575 
1576     out->entryAnchor.serialize_copy (c, entryAnchor, src_base, c->to_bias (dst_base), hb_serialize_context_t::Head, layout_variation_idx_map);
1577     out->exitAnchor.serialize_copy (c, exitAnchor, src_base, c->to_bias (dst_base), hb_serialize_context_t::Head, layout_variation_idx_map);
1578     return_trace (out);
1579   }
1580 
1581   protected:
1582   OffsetTo<Anchor>
1583                 entryAnchor;            /* Offset to EntryAnchor table--from
1584                                          * beginning of CursivePos
1585                                          * subtable--may be NULL */
1586   OffsetTo<Anchor>
1587                 exitAnchor;             /* Offset to ExitAnchor table--from
1588                                          * beginning of CursivePos
1589                                          * subtable--may be NULL */
1590   public:
1591   DEFINE_SIZE_STATIC (4);
1592 };
1593 
1594 static void
1595 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent);
1596 
1597 struct CursivePosFormat1
1598 {
intersectsOT::CursivePosFormat11599   bool intersects (const hb_set_t *glyphs) const
1600   { return (this+coverage).intersects (glyphs); }
1601 
closure_lookupsOT::CursivePosFormat11602   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1603 
collect_variation_indicesOT::CursivePosFormat11604   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1605   {
1606     + hb_zip (this+coverage, entryExitRecord)
1607     | hb_filter (c->glyph_set, hb_first)
1608     | hb_map (hb_second)
1609     | hb_apply ([&] (const EntryExitRecord& record) { record.collect_variation_indices (c, this); })
1610     ;
1611   }
1612 
collect_glyphsOT::CursivePosFormat11613   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1614   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
1615 
get_coverageOT::CursivePosFormat11616   const Coverage &get_coverage () const { return this+coverage; }
1617 
applyOT::CursivePosFormat11618   bool apply (hb_ot_apply_context_t *c) const
1619   {
1620     TRACE_APPLY (this);
1621     hb_buffer_t *buffer = c->buffer;
1622 
1623     const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage  (buffer->cur().codepoint)];
1624     if (!this_record.entryAnchor) return_trace (false);
1625 
1626     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1627     skippy_iter.reset (buffer->idx, 1);
1628     if (!skippy_iter.prev ()) return_trace (false);
1629 
1630     const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage  (buffer->info[skippy_iter.idx].codepoint)];
1631     if (!prev_record.exitAnchor) return_trace (false);
1632 
1633     unsigned int i = skippy_iter.idx;
1634     unsigned int j = buffer->idx;
1635 
1636     buffer->unsafe_to_break (i, j);
1637     float entry_x, entry_y, exit_x, exit_y;
1638     (this+prev_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
1639     (this+this_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
1640 
1641     hb_glyph_position_t *pos = buffer->pos;
1642 
1643     hb_position_t d;
1644     /* Main-direction adjustment */
1645     switch (c->direction) {
1646       case HB_DIRECTION_LTR:
1647         pos[i].x_advance  = roundf (exit_x) + pos[i].x_offset;
1648 
1649         d = roundf (entry_x) + pos[j].x_offset;
1650         pos[j].x_advance -= d;
1651         pos[j].x_offset  -= d;
1652         break;
1653       case HB_DIRECTION_RTL:
1654         d = roundf (exit_x) + pos[i].x_offset;
1655         pos[i].x_advance -= d;
1656         pos[i].x_offset  -= d;
1657 
1658         pos[j].x_advance  = roundf (entry_x) + pos[j].x_offset;
1659         break;
1660       case HB_DIRECTION_TTB:
1661         pos[i].y_advance  = roundf (exit_y) + pos[i].y_offset;
1662 
1663         d = roundf (entry_y) + pos[j].y_offset;
1664         pos[j].y_advance -= d;
1665         pos[j].y_offset  -= d;
1666         break;
1667       case HB_DIRECTION_BTT:
1668         d = roundf (exit_y) + pos[i].y_offset;
1669         pos[i].y_advance -= d;
1670         pos[i].y_offset  -= d;
1671 
1672         pos[j].y_advance  = roundf (entry_y);
1673         break;
1674       case HB_DIRECTION_INVALID:
1675       default:
1676         break;
1677     }
1678 
1679     /* Cross-direction adjustment */
1680 
1681     /* We attach child to parent (think graph theory and rooted trees whereas
1682      * the root stays on baseline and each node aligns itself against its
1683      * parent.
1684      *
1685      * Optimize things for the case of RightToLeft, as that's most common in
1686      * Arabic. */
1687     unsigned int child  = i;
1688     unsigned int parent = j;
1689     hb_position_t x_offset = entry_x - exit_x;
1690     hb_position_t y_offset = entry_y - exit_y;
1691     if  (!(c->lookup_props & LookupFlag::RightToLeft))
1692     {
1693       unsigned int k = child;
1694       child = parent;
1695       parent = k;
1696       x_offset = -x_offset;
1697       y_offset = -y_offset;
1698     }
1699 
1700     /* If child was already connected to someone else, walk through its old
1701      * chain and reverse the link direction, such that the whole tree of its
1702      * previous connection now attaches to new parent.  Watch out for case
1703      * where new parent is on the path from old chain...
1704      */
1705     reverse_cursive_minor_offset (pos, child, c->direction, parent);
1706 
1707     pos[child].attach_type() = ATTACH_TYPE_CURSIVE;
1708     pos[child].attach_chain() = (int) parent - (int) child;
1709     buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
1710     if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
1711       pos[child].y_offset = y_offset;
1712     else
1713       pos[child].x_offset = x_offset;
1714 
1715     /* If parent was attached to child, break them free.
1716      * https://github.com/harfbuzz/harfbuzz/issues/2469
1717      */
1718     if (unlikely (pos[parent].attach_chain() == -pos[child].attach_chain()))
1719       pos[parent].attach_chain() = 0;
1720 
1721     buffer->idx++;
1722     return_trace (true);
1723   }
1724 
1725   template <typename Iterator,
1726             hb_requires (hb_is_iterator (Iterator))>
serializeOT::CursivePosFormat11727   void serialize (hb_serialize_context_t *c,
1728                   Iterator it,
1729                   const void *src_base,
1730                   const hb_map_t *layout_variation_idx_map)
1731   {
1732     if (unlikely (!c->extend_min ((*this)))) return;
1733     this->format = 1;
1734     this->entryExitRecord.len = it.len ();
1735 
1736     for (const EntryExitRecord& entry_record : + it
1737                                                | hb_map (hb_second))
1738       c->copy (entry_record, src_base, this, layout_variation_idx_map);
1739 
1740     auto glyphs =
1741     + it
1742     | hb_map_retains_sorting (hb_first)
1743     ;
1744 
1745     coverage.serialize (c, this).serialize (c, glyphs);
1746   }
1747 
subsetOT::CursivePosFormat11748   bool subset (hb_subset_context_t *c) const
1749   {
1750     TRACE_SUBSET (this);
1751     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1752     const hb_map_t &glyph_map = *c->plan->glyph_map;
1753 
1754     auto *out = c->serializer->start_embed (*this);
1755     if (unlikely (!out)) return_trace (false);
1756 
1757     auto it =
1758     + hb_zip (this+coverage, entryExitRecord)
1759     | hb_filter (glyphset, hb_first)
1760     | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const EntryExitRecord&> p) -> hb_pair_t<hb_codepoint_t, const EntryExitRecord&>
1761                               { return hb_pair (glyph_map[p.first], p.second);})
1762     ;
1763 
1764     bool ret = bool (it);
1765     out->serialize (c->serializer, it, this, c->plan->layout_variation_idx_map);
1766     return_trace (ret);
1767   }
1768 
sanitizeOT::CursivePosFormat11769   bool sanitize (hb_sanitize_context_t *c) const
1770   {
1771     TRACE_SANITIZE (this);
1772     return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
1773   }
1774 
1775   protected:
1776   HBUINT16      format;                 /* Format identifier--format = 1 */
1777   OffsetTo<Coverage>
1778                 coverage;               /* Offset to Coverage table--from
1779                                          * beginning of subtable */
1780   ArrayOf<EntryExitRecord>
1781                 entryExitRecord;        /* Array of EntryExit records--in
1782                                          * Coverage Index order */
1783   public:
1784   DEFINE_SIZE_ARRAY (6, entryExitRecord);
1785 };
1786 
1787 struct CursivePos
1788 {
1789   template <typename context_t, typename ...Ts>
dispatchOT::CursivePos1790   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1791   {
1792     TRACE_DISPATCH (this, u.format);
1793     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1794     switch (u.format) {
1795     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1796     default:return_trace (c->default_return_value ());
1797     }
1798   }
1799 
1800   protected:
1801   union {
1802   HBUINT16              format;         /* Format identifier */
1803   CursivePosFormat1     format1;
1804   } u;
1805 };
1806 
1807 
1808 typedef AnchorMatrix BaseArray;         /* base-major--
1809                                          * in order of BaseCoverage Index--,
1810                                          * mark-minor--
1811                                          * ordered by class--zero-based. */
1812 
Markclass_closure_and_remap_indexes(const Coverage & mark_coverage,const MarkArray & mark_array,const hb_set_t & glyphset,hb_map_t * klass_mapping)1813 static void Markclass_closure_and_remap_indexes (const Coverage  &mark_coverage,
1814                                                  const MarkArray &mark_array,
1815                                                  const hb_set_t  &glyphset,
1816                                                  hb_map_t*        klass_mapping /* INOUT */)
1817 {
1818   hb_set_t orig_classes;
1819 
1820   + hb_zip (mark_coverage, mark_array)
1821   | hb_filter (glyphset, hb_first)
1822   | hb_map (hb_second)
1823   | hb_map (&MarkRecord::get_class)
1824   | hb_sink (orig_classes)
1825   ;
1826 
1827   unsigned idx = 0;
1828   for (auto klass : orig_classes.iter ())
1829   {
1830     if (klass_mapping->has (klass)) continue;
1831     klass_mapping->set (klass, idx);
1832     idx++;
1833   }
1834 }
1835 
1836 struct MarkBasePosFormat1
1837 {
intersectsOT::MarkBasePosFormat11838   bool intersects (const hb_set_t *glyphs) const
1839   {
1840     return (this+markCoverage).intersects (glyphs) &&
1841            (this+baseCoverage).intersects (glyphs);
1842   }
1843 
closure_lookupsOT::MarkBasePosFormat11844   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1845 
collect_variation_indicesOT::MarkBasePosFormat11846   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1847   {
1848     + hb_zip (this+markCoverage, this+markArray)
1849     | hb_filter (c->glyph_set, hb_first)
1850     | hb_map (hb_second)
1851     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
1852     ;
1853 
1854     hb_map_t klass_mapping;
1855     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
1856 
1857     unsigned basecount = (this+baseArray).rows;
1858     auto base_iter =
1859     + hb_zip (this+baseCoverage, hb_range (basecount))
1860     | hb_filter (c->glyph_set, hb_first)
1861     | hb_map (hb_second)
1862     ;
1863 
1864     hb_sorted_vector_t<unsigned> base_indexes;
1865     for (const unsigned row : base_iter)
1866     {
1867       + hb_range ((unsigned) classCount)
1868       | hb_filter (klass_mapping)
1869       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
1870       | hb_sink (base_indexes)
1871       ;
1872     }
1873     (this+baseArray).collect_variation_indices (c, base_indexes.iter ());
1874   }
1875 
collect_glyphsOT::MarkBasePosFormat11876   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1877   {
1878     if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
1879     if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
1880   }
1881 
get_coverageOT::MarkBasePosFormat11882   const Coverage &get_coverage () const { return this+markCoverage; }
1883 
applyOT::MarkBasePosFormat11884   bool apply (hb_ot_apply_context_t *c) const
1885   {
1886     TRACE_APPLY (this);
1887     hb_buffer_t *buffer = c->buffer;
1888     unsigned int mark_index = (this+markCoverage).get_coverage  (buffer->cur().codepoint);
1889     if (likely (mark_index == NOT_COVERED)) return_trace (false);
1890 
1891     /* Now we search backwards for a non-mark glyph */
1892     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1893     skippy_iter.reset (buffer->idx, 1);
1894     skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
1895     do {
1896       if (!skippy_iter.prev ()) return_trace (false);
1897       /* We only want to attach to the first of a MultipleSubst sequence.
1898        * https://github.com/harfbuzz/harfbuzz/issues/740
1899        * Reject others...
1900        * ...but stop if we find a mark in the MultipleSubst sequence:
1901        * https://github.com/harfbuzz/harfbuzz/issues/1020 */
1902       if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
1903           0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) ||
1904           (skippy_iter.idx == 0 ||
1905            _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx - 1]) ||
1906            _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]) !=
1907            _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx - 1]) ||
1908            _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) !=
1909            _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx - 1]) + 1
1910            ))
1911         break;
1912       skippy_iter.reject ();
1913     } while (true);
1914 
1915     /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
1916     //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1917 
1918     unsigned int base_index = (this+baseCoverage).get_coverage  (buffer->info[skippy_iter.idx].codepoint);
1919     if (base_index == NOT_COVERED) return_trace (false);
1920 
1921     return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
1922   }
1923 
subsetOT::MarkBasePosFormat11924   bool subset (hb_subset_context_t *c) const
1925   {
1926     TRACE_SUBSET (this);
1927     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1928     const hb_map_t &glyph_map = *c->plan->glyph_map;
1929 
1930     auto *out = c->serializer->start_embed (*this);
1931     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1932     out->format = format;
1933 
1934     hb_map_t klass_mapping;
1935     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
1936 
1937     if (!klass_mapping.get_population ()) return_trace (false);
1938     out->classCount = klass_mapping.get_population ();
1939 
1940     auto mark_iter =
1941     + hb_zip (this+markCoverage, this+markArray)
1942     | hb_filter (glyphset, hb_first)
1943     ;
1944 
1945     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1946     + mark_iter
1947     | hb_map (hb_first)
1948     | hb_map (glyph_map)
1949     | hb_sink (new_coverage)
1950     ;
1951 
1952     if (!out->markCoverage.serialize (c->serializer, out)
1953                           .serialize (c->serializer, new_coverage.iter ()))
1954       return_trace (false);
1955 
1956     out->markArray.serialize (c->serializer, out)
1957                   .serialize (c->serializer, &klass_mapping, c->plan->layout_variation_idx_map, &(this+markArray), + mark_iter
1958                                                                                                                    | hb_map (hb_second));
1959 
1960     unsigned basecount = (this+baseArray).rows;
1961     auto base_iter =
1962     + hb_zip (this+baseCoverage, hb_range (basecount))
1963     | hb_filter (glyphset, hb_first)
1964     ;
1965 
1966     new_coverage.reset ();
1967     + base_iter
1968     | hb_map (hb_first)
1969     | hb_map (glyph_map)
1970     | hb_sink (new_coverage)
1971     ;
1972 
1973     if (!out->baseCoverage.serialize (c->serializer, out)
1974                           .serialize (c->serializer, new_coverage.iter ()))
1975       return_trace (false);
1976 
1977     hb_sorted_vector_t<unsigned> base_indexes;
1978     for (const unsigned row : + base_iter
1979                               | hb_map (hb_second))
1980     {
1981       + hb_range ((unsigned) classCount)
1982       | hb_filter (klass_mapping)
1983       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
1984       | hb_sink (base_indexes)
1985       ;
1986     }
1987     out->baseArray.serialize (c->serializer, out)
1988                   .serialize (c->serializer, base_iter.len (), &(this+baseArray), c->plan->layout_variation_idx_map, base_indexes.iter ());
1989 
1990     return_trace (true);
1991   }
1992 
sanitizeOT::MarkBasePosFormat11993   bool sanitize (hb_sanitize_context_t *c) const
1994   {
1995     TRACE_SANITIZE (this);
1996     return_trace (c->check_struct (this) &&
1997                   markCoverage.sanitize (c, this) &&
1998                   baseCoverage.sanitize (c, this) &&
1999                   markArray.sanitize (c, this) &&
2000                   baseArray.sanitize (c, this, (unsigned int) classCount));
2001   }
2002 
2003   protected:
2004   HBUINT16      format;                 /* Format identifier--format = 1 */
2005   OffsetTo<Coverage>
2006                 markCoverage;           /* Offset to MarkCoverage table--from
2007                                          * beginning of MarkBasePos subtable */
2008   OffsetTo<Coverage>
2009                 baseCoverage;           /* Offset to BaseCoverage table--from
2010                                          * beginning of MarkBasePos subtable */
2011   HBUINT16      classCount;             /* Number of classes defined for marks */
2012   OffsetTo<MarkArray>
2013                 markArray;              /* Offset to MarkArray table--from
2014                                          * beginning of MarkBasePos subtable */
2015   OffsetTo<BaseArray>
2016                 baseArray;              /* Offset to BaseArray table--from
2017                                          * beginning of MarkBasePos subtable */
2018   public:
2019   DEFINE_SIZE_STATIC (12);
2020 };
2021 
2022 struct MarkBasePos
2023 {
2024   template <typename context_t, typename ...Ts>
dispatchOT::MarkBasePos2025   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2026   {
2027     TRACE_DISPATCH (this, u.format);
2028     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2029     switch (u.format) {
2030     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2031     default:return_trace (c->default_return_value ());
2032     }
2033   }
2034 
2035   protected:
2036   union {
2037   HBUINT16              format;         /* Format identifier */
2038   MarkBasePosFormat1    format1;
2039   } u;
2040 };
2041 
2042 
2043 typedef AnchorMatrix LigatureAttach;    /* component-major--
2044                                          * in order of writing direction--,
2045                                          * mark-minor--
2046                                          * ordered by class--zero-based. */
2047 
2048 /* Array of LigatureAttach tables ordered by LigatureCoverage Index */
2049 struct LigatureArray : OffsetListOf<LigatureAttach>
2050 {
2051   template <typename Iterator,
2052             hb_requires (hb_is_iterator (Iterator))>
subsetOT::LigatureArray2053   bool subset (hb_subset_context_t *c,
2054                Iterator             coverage,
2055                unsigned             class_count,
2056                const hb_map_t      *klass_mapping) const
2057   {
2058     TRACE_SUBSET (this);
2059     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2060 
2061     auto *out = c->serializer->start_embed (this);
2062     if (unlikely (!c->serializer->extend_min (out)))  return_trace (false);
2063 
2064     for (const auto _ : + hb_zip (coverage, *this)
2065                   | hb_filter (glyphset, hb_first))
2066     {
2067       auto *matrix = out->serialize_append (c->serializer);
2068       if (unlikely (!matrix)) return_trace (false);
2069 
2070       matrix->serialize_subset (c,
2071                                 _.second,
2072                                 this,
2073                                 class_count,
2074                                 klass_mapping);
2075     }
2076     return_trace (this->len);
2077   }
2078 };
2079 
2080 struct MarkLigPosFormat1
2081 {
intersectsOT::MarkLigPosFormat12082   bool intersects (const hb_set_t *glyphs) const
2083   {
2084     return (this+markCoverage).intersects (glyphs) &&
2085            (this+ligatureCoverage).intersects (glyphs);
2086   }
2087 
closure_lookupsOT::MarkLigPosFormat12088   void closure_lookups (hb_closure_lookups_context_t *c) const {}
2089 
collect_variation_indicesOT::MarkLigPosFormat12090   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2091   {
2092     + hb_zip (this+markCoverage, this+markArray)
2093     | hb_filter (c->glyph_set, hb_first)
2094     | hb_map (hb_second)
2095     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
2096     ;
2097 
2098     hb_map_t klass_mapping;
2099     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
2100 
2101     unsigned ligcount = (this+ligatureArray).len;
2102     auto lig_iter =
2103     + hb_zip (this+ligatureCoverage, hb_range (ligcount))
2104     | hb_filter (c->glyph_set, hb_first)
2105     | hb_map (hb_second)
2106     ;
2107 
2108     const LigatureArray& lig_array = this+ligatureArray;
2109     for (const unsigned i : lig_iter)
2110     {
2111       hb_sorted_vector_t<unsigned> lig_indexes;
2112       unsigned row_count = lig_array[i].rows;
2113       for (unsigned row : + hb_range (row_count))
2114       {
2115         + hb_range ((unsigned) classCount)
2116         | hb_filter (klass_mapping)
2117         | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2118         | hb_sink (lig_indexes)
2119         ;
2120       }
2121 
2122       lig_array[i].collect_variation_indices (c, lig_indexes.iter ());
2123     }
2124   }
2125 
collect_glyphsOT::MarkLigPosFormat12126   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2127   {
2128     if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
2129     if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return;
2130   }
2131 
get_coverageOT::MarkLigPosFormat12132   const Coverage &get_coverage () const { return this+markCoverage; }
2133 
applyOT::MarkLigPosFormat12134   bool apply (hb_ot_apply_context_t *c) const
2135   {
2136     TRACE_APPLY (this);
2137     hb_buffer_t *buffer = c->buffer;
2138     unsigned int mark_index = (this+markCoverage).get_coverage  (buffer->cur().codepoint);
2139     if (likely (mark_index == NOT_COVERED)) return_trace (false);
2140 
2141     /* Now we search backwards for a non-mark glyph */
2142     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2143     skippy_iter.reset (buffer->idx, 1);
2144     skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
2145     if (!skippy_iter.prev ()) return_trace (false);
2146 
2147     /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
2148     //if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2149 
2150     unsigned int j = skippy_iter.idx;
2151     unsigned int lig_index = (this+ligatureCoverage).get_coverage  (buffer->info[j].codepoint);
2152     if (lig_index == NOT_COVERED) return_trace (false);
2153 
2154     const LigatureArray& lig_array = this+ligatureArray;
2155     const LigatureAttach& lig_attach = lig_array[lig_index];
2156 
2157     /* Find component to attach to */
2158     unsigned int comp_count = lig_attach.rows;
2159     if (unlikely (!comp_count)) return_trace (false);
2160 
2161     /* We must now check whether the ligature ID of the current mark glyph
2162      * is identical to the ligature ID of the found ligature.  If yes, we
2163      * can directly use the component index.  If not, we attach the mark
2164      * glyph to the last component of the ligature. */
2165     unsigned int comp_index;
2166     unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]);
2167     unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
2168     unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
2169     if (lig_id && lig_id == mark_id && mark_comp > 0)
2170       comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
2171     else
2172       comp_index = comp_count - 1;
2173 
2174     return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
2175   }
2176 
subsetOT::MarkLigPosFormat12177   bool subset (hb_subset_context_t *c) const
2178   {
2179     TRACE_SUBSET (this);
2180     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2181     const hb_map_t &glyph_map = *c->plan->glyph_map;
2182 
2183     auto *out = c->serializer->start_embed (*this);
2184     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2185     out->format = format;
2186 
2187     hb_map_t klass_mapping;
2188     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
2189 
2190     if (!klass_mapping.get_population ()) return_trace (false);
2191     out->classCount = klass_mapping.get_population ();
2192 
2193     auto mark_iter =
2194     + hb_zip (this+markCoverage, this+markArray)
2195     | hb_filter (glyphset, hb_first)
2196     ;
2197 
2198     auto new_mark_coverage =
2199     + mark_iter
2200     | hb_map_retains_sorting (hb_first)
2201     | hb_map_retains_sorting (glyph_map)
2202     ;
2203 
2204     if (!out->markCoverage.serialize (c->serializer, out)
2205                           .serialize (c->serializer, new_mark_coverage))
2206       return_trace (false);
2207 
2208     out->markArray.serialize (c->serializer, out)
2209                   .serialize (c->serializer,
2210                               &klass_mapping,
2211                               c->plan->layout_variation_idx_map,
2212                               &(this+markArray),
2213                               + mark_iter
2214                               | hb_map (hb_second));
2215 
2216     auto new_ligature_coverage =
2217     + hb_iter (this + ligatureCoverage)
2218     | hb_filter (glyphset)
2219     | hb_map_retains_sorting (glyph_map)
2220     ;
2221 
2222     if (!out->ligatureCoverage.serialize (c->serializer, out)
2223                               .serialize (c->serializer, new_ligature_coverage))
2224       return_trace (false);
2225 
2226     out->ligatureArray.serialize_subset (c, ligatureArray, this,
2227                                          hb_iter (this+ligatureCoverage), classCount, &klass_mapping);
2228 
2229     return_trace (true);
2230   }
2231 
sanitizeOT::MarkLigPosFormat12232   bool sanitize (hb_sanitize_context_t *c) const
2233   {
2234     TRACE_SANITIZE (this);
2235     return_trace (c->check_struct (this) &&
2236                   markCoverage.sanitize (c, this) &&
2237                   ligatureCoverage.sanitize (c, this) &&
2238                   markArray.sanitize (c, this) &&
2239                   ligatureArray.sanitize (c, this, (unsigned int) classCount));
2240   }
2241 
2242   protected:
2243   HBUINT16      format;                 /* Format identifier--format = 1 */
2244   OffsetTo<Coverage>
2245                 markCoverage;           /* Offset to Mark Coverage table--from
2246                                          * beginning of MarkLigPos subtable */
2247   OffsetTo<Coverage>
2248                 ligatureCoverage;       /* Offset to Ligature Coverage
2249                                          * table--from beginning of MarkLigPos
2250                                          * subtable */
2251   HBUINT16      classCount;             /* Number of defined mark classes */
2252   OffsetTo<MarkArray>
2253                 markArray;              /* Offset to MarkArray table--from
2254                                          * beginning of MarkLigPos subtable */
2255   OffsetTo<LigatureArray>
2256                 ligatureArray;          /* Offset to LigatureArray table--from
2257                                          * beginning of MarkLigPos subtable */
2258   public:
2259   DEFINE_SIZE_STATIC (12);
2260 };
2261 
2262 
2263 struct MarkLigPos
2264 {
2265   template <typename context_t, typename ...Ts>
dispatchOT::MarkLigPos2266   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2267   {
2268     TRACE_DISPATCH (this, u.format);
2269     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2270     switch (u.format) {
2271     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2272     default:return_trace (c->default_return_value ());
2273     }
2274   }
2275 
2276   protected:
2277   union {
2278   HBUINT16              format;         /* Format identifier */
2279   MarkLigPosFormat1     format1;
2280   } u;
2281 };
2282 
2283 
2284 typedef AnchorMatrix Mark2Array;        /* mark2-major--
2285                                          * in order of Mark2Coverage Index--,
2286                                          * mark1-minor--
2287                                          * ordered by class--zero-based. */
2288 
2289 struct MarkMarkPosFormat1
2290 {
intersectsOT::MarkMarkPosFormat12291   bool intersects (const hb_set_t *glyphs) const
2292   {
2293     return (this+mark1Coverage).intersects (glyphs) &&
2294            (this+mark2Coverage).intersects (glyphs);
2295   }
2296 
closure_lookupsOT::MarkMarkPosFormat12297   void closure_lookups (hb_closure_lookups_context_t *c) const {}
2298 
collect_variation_indicesOT::MarkMarkPosFormat12299   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2300   {
2301     + hb_zip (this+mark1Coverage, this+mark1Array)
2302     | hb_filter (c->glyph_set, hb_first)
2303     | hb_map (hb_second)
2304     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); })
2305     ;
2306 
2307     hb_map_t klass_mapping;
2308     Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping);
2309 
2310     unsigned mark2_count = (this+mark2Array).rows;
2311     auto mark2_iter =
2312     + hb_zip (this+mark2Coverage, hb_range (mark2_count))
2313     | hb_filter (c->glyph_set, hb_first)
2314     | hb_map (hb_second)
2315     ;
2316 
2317     hb_sorted_vector_t<unsigned> mark2_indexes;
2318     for (const unsigned row : mark2_iter)
2319     {
2320       + hb_range ((unsigned) classCount)
2321       | hb_filter (klass_mapping)
2322       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2323       | hb_sink (mark2_indexes)
2324       ;
2325     }
2326     (this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ());
2327   }
2328 
collect_glyphsOT::MarkMarkPosFormat12329   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2330   {
2331     if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return;
2332     if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return;
2333   }
2334 
get_coverageOT::MarkMarkPosFormat12335   const Coverage &get_coverage () const { return this+mark1Coverage; }
2336 
applyOT::MarkMarkPosFormat12337   bool apply (hb_ot_apply_context_t *c) const
2338   {
2339     TRACE_APPLY (this);
2340     hb_buffer_t *buffer = c->buffer;
2341     unsigned int mark1_index = (this+mark1Coverage).get_coverage  (buffer->cur().codepoint);
2342     if (likely (mark1_index == NOT_COVERED)) return_trace (false);
2343 
2344     /* now we search backwards for a suitable mark glyph until a non-mark glyph */
2345     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2346     skippy_iter.reset (buffer->idx, 1);
2347     skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags);
2348     if (!skippy_iter.prev ()) return_trace (false);
2349 
2350     if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2351 
2352     unsigned int j = skippy_iter.idx;
2353 
2354     unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur());
2355     unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]);
2356     unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur());
2357     unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]);
2358 
2359     if (likely (id1 == id2))
2360     {
2361       if (id1 == 0) /* Marks belonging to the same base. */
2362         goto good;
2363       else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
2364         goto good;
2365     }
2366     else
2367     {
2368       /* If ligature ids don't match, it may be the case that one of the marks
2369        * itself is a ligature.  In which case match. */
2370       if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
2371         goto good;
2372     }
2373 
2374     /* Didn't match. */
2375     return_trace (false);
2376 
2377     good:
2378     unsigned int mark2_index = (this+mark2Coverage).get_coverage  (buffer->info[j].codepoint);
2379     if (mark2_index == NOT_COVERED) return_trace (false);
2380 
2381     return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
2382   }
2383 
subsetOT::MarkMarkPosFormat12384   bool subset (hb_subset_context_t *c) const
2385   {
2386     TRACE_SUBSET (this);
2387     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2388     const hb_map_t &glyph_map = *c->plan->glyph_map;
2389 
2390     auto *out = c->serializer->start_embed (*this);
2391     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2392     out->format = format;
2393 
2394     hb_map_t klass_mapping;
2395     Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping);
2396 
2397     if (!klass_mapping.get_population ()) return_trace (false);
2398     out->classCount = klass_mapping.get_population ();
2399 
2400     auto mark1_iter =
2401     + hb_zip (this+mark1Coverage, this+mark1Array)
2402     | hb_filter (glyphset, hb_first)
2403     ;
2404 
2405     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2406     + mark1_iter
2407     | hb_map (hb_first)
2408     | hb_map (glyph_map)
2409     | hb_sink (new_coverage)
2410     ;
2411 
2412     if (!out->mark1Coverage.serialize (c->serializer, out)
2413                            .serialize (c->serializer, new_coverage.iter ()))
2414       return_trace (false);
2415 
2416     out->mark1Array.serialize (c->serializer, out)
2417                    .serialize (c->serializer, &klass_mapping, c->plan->layout_variation_idx_map, &(this+mark1Array), + mark1_iter
2418                                                                                                                      | hb_map (hb_second));
2419 
2420     unsigned mark2count = (this+mark2Array).rows;
2421     auto mark2_iter =
2422     + hb_zip (this+mark2Coverage, hb_range (mark2count))
2423     | hb_filter (glyphset, hb_first)
2424     ;
2425 
2426     new_coverage.reset ();
2427     + mark2_iter
2428     | hb_map (hb_first)
2429     | hb_map (glyph_map)
2430     | hb_sink (new_coverage)
2431     ;
2432 
2433     if (!out->mark2Coverage.serialize (c->serializer, out)
2434                            .serialize (c->serializer, new_coverage.iter ()))
2435       return_trace (false);
2436 
2437     hb_sorted_vector_t<unsigned> mark2_indexes;
2438     for (const unsigned row : + mark2_iter
2439                               | hb_map (hb_second))
2440     {
2441       + hb_range ((unsigned) classCount)
2442       | hb_filter (klass_mapping)
2443       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2444       | hb_sink (mark2_indexes)
2445       ;
2446     }
2447     out->mark2Array.serialize (c->serializer, out)
2448                    .serialize (c->serializer, mark2_iter.len (), &(this+mark2Array), c->plan->layout_variation_idx_map, mark2_indexes.iter ());
2449 
2450     return_trace (true);
2451   }
2452 
sanitizeOT::MarkMarkPosFormat12453   bool sanitize (hb_sanitize_context_t *c) const
2454   {
2455     TRACE_SANITIZE (this);
2456     return_trace (c->check_struct (this) &&
2457                   mark1Coverage.sanitize (c, this) &&
2458                   mark2Coverage.sanitize (c, this) &&
2459                   mark1Array.sanitize (c, this) &&
2460                   mark2Array.sanitize (c, this, (unsigned int) classCount));
2461   }
2462 
2463   protected:
2464   HBUINT16      format;                 /* Format identifier--format = 1 */
2465   OffsetTo<Coverage>
2466                 mark1Coverage;          /* Offset to Combining Mark1 Coverage
2467                                          * table--from beginning of MarkMarkPos
2468                                          * subtable */
2469   OffsetTo<Coverage>
2470                 mark2Coverage;          /* Offset to Combining Mark2 Coverage
2471                                          * table--from beginning of MarkMarkPos
2472                                          * subtable */
2473   HBUINT16      classCount;             /* Number of defined mark classes */
2474   OffsetTo<MarkArray>
2475                 mark1Array;             /* Offset to Mark1Array table--from
2476                                          * beginning of MarkMarkPos subtable */
2477   OffsetTo<Mark2Array>
2478                 mark2Array;             /* Offset to Mark2Array table--from
2479                                          * beginning of MarkMarkPos subtable */
2480   public:
2481   DEFINE_SIZE_STATIC (12);
2482 };
2483 
2484 struct MarkMarkPos
2485 {
2486   template <typename context_t, typename ...Ts>
dispatchOT::MarkMarkPos2487   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2488   {
2489     TRACE_DISPATCH (this, u.format);
2490     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2491     switch (u.format) {
2492     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2493     default:return_trace (c->default_return_value ());
2494     }
2495   }
2496 
2497   protected:
2498   union {
2499   HBUINT16              format;         /* Format identifier */
2500   MarkMarkPosFormat1    format1;
2501   } u;
2502 };
2503 
2504 
2505 struct ContextPos : Context {};
2506 
2507 struct ChainContextPos : ChainContext {};
2508 
2509 struct ExtensionPos : Extension<ExtensionPos>
2510 {
2511   typedef struct PosLookupSubTable SubTable;
2512 };
2513 
2514 
2515 
2516 /*
2517  * PosLookup
2518  */
2519 
2520 
2521 struct PosLookupSubTable
2522 {
2523   friend struct Lookup;
2524   friend struct PosLookup;
2525 
2526   enum Type {
2527     Single              = 1,
2528     Pair                = 2,
2529     Cursive             = 3,
2530     MarkBase            = 4,
2531     MarkLig             = 5,
2532     MarkMark            = 6,
2533     Context             = 7,
2534     ChainContext        = 8,
2535     Extension           = 9
2536   };
2537 
2538   template <typename context_t, typename ...Ts>
dispatchOT::PosLookupSubTable2539   typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
2540   {
2541     TRACE_DISPATCH (this, lookup_type);
2542     switch (lookup_type) {
2543     case Single:                return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
2544     case Pair:                  return_trace (u.pair.dispatch (c, hb_forward<Ts> (ds)...));
2545     case Cursive:               return_trace (u.cursive.dispatch (c, hb_forward<Ts> (ds)...));
2546     case MarkBase:              return_trace (u.markBase.dispatch (c, hb_forward<Ts> (ds)...));
2547     case MarkLig:               return_trace (u.markLig.dispatch (c, hb_forward<Ts> (ds)...));
2548     case MarkMark:              return_trace (u.markMark.dispatch (c, hb_forward<Ts> (ds)...));
2549     case Context:               return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
2550     case ChainContext:          return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
2551     case Extension:             return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
2552     default:                    return_trace (c->default_return_value ());
2553     }
2554   }
2555 
intersectsOT::PosLookupSubTable2556   bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
2557   {
2558     hb_intersects_context_t c (glyphs);
2559     return dispatch (&c, lookup_type);
2560   }
2561 
2562   protected:
2563   union {
2564   SinglePos             single;
2565   PairPos               pair;
2566   CursivePos            cursive;
2567   MarkBasePos           markBase;
2568   MarkLigPos            markLig;
2569   MarkMarkPos           markMark;
2570   ContextPos            context;
2571   ChainContextPos       chainContext;
2572   ExtensionPos          extension;
2573   } u;
2574   public:
2575   DEFINE_SIZE_MIN (0);
2576 };
2577 
2578 
2579 struct PosLookup : Lookup
2580 {
2581   typedef struct PosLookupSubTable SubTable;
2582 
get_subtableOT::PosLookup2583   const SubTable& get_subtable (unsigned int i) const
2584   { return Lookup::get_subtable<SubTable> (i); }
2585 
is_reverseOT::PosLookup2586   bool is_reverse () const
2587   {
2588     return false;
2589   }
2590 
applyOT::PosLookup2591   bool apply (hb_ot_apply_context_t *c) const
2592   {
2593     TRACE_APPLY (this);
2594     return_trace (dispatch (c));
2595   }
2596 
intersectsOT::PosLookup2597   bool intersects (const hb_set_t *glyphs) const
2598   {
2599     hb_intersects_context_t c (glyphs);
2600     return dispatch (&c);
2601   }
2602 
collect_glyphsOT::PosLookup2603   hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
2604   { return dispatch (c); }
2605 
closure_lookupsOT::PosLookup2606   hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
2607   {
2608     if (c->is_lookup_visited (this_index))
2609       return hb_closure_lookups_context_t::default_return_value ();
2610 
2611     c->set_lookup_visited (this_index);
2612     if (!intersects (c->glyphs))
2613     {
2614       c->set_lookup_inactive (this_index);
2615       return hb_closure_lookups_context_t::default_return_value ();
2616     }
2617     c->set_recurse_func (dispatch_closure_lookups_recurse_func);
2618 
2619     hb_closure_lookups_context_t::return_t ret = dispatch (c);
2620     return ret;
2621   }
2622 
2623   template <typename set_t>
collect_coverageOT::PosLookup2624   void collect_coverage (set_t *glyphs) const
2625   {
2626     hb_collect_coverage_context_t<set_t> c (glyphs);
2627     dispatch (&c);
2628   }
2629 
2630   static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
2631 
2632   template <typename context_t>
2633   static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
2634 
2635   HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index);
2636 
2637   template <typename context_t, typename ...Ts>
dispatchOT::PosLookup2638   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2639   { return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
2640 
subsetOT::PosLookup2641   bool subset (hb_subset_context_t *c) const
2642   { return Lookup::subset<SubTable> (c); }
2643 
sanitizeOT::PosLookup2644   bool sanitize (hb_sanitize_context_t *c) const
2645   { return Lookup::sanitize<SubTable> (c); }
2646 };
2647 
2648 /*
2649  * GPOS -- Glyph Positioning
2650  * https://docs.microsoft.com/en-us/typography/opentype/spec/gpos
2651  */
2652 
2653 struct GPOS : GSUBGPOS
2654 {
2655   static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
2656 
get_lookupOT::GPOS2657   const PosLookup& get_lookup (unsigned int i) const
2658   { return static_cast<const PosLookup &> (GSUBGPOS::get_lookup (i)); }
2659 
2660   static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
2661   static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
2662   static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
2663 
subsetOT::GPOS2664   bool subset (hb_subset_context_t *c) const
2665   {
2666     hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_features);
2667     return GSUBGPOS::subset<PosLookup> (&l);
2668   }
2669 
sanitizeOT::GPOS2670   bool sanitize (hb_sanitize_context_t *c) const
2671   { return GSUBGPOS::sanitize<PosLookup> (c); }
2672 
2673   HB_INTERNAL bool is_blocklisted (hb_blob_t *blob,
2674                                    hb_face_t *face) const;
2675 
collect_variation_indicesOT::GPOS2676   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2677   {
2678     for (unsigned i = 0; i < GSUBGPOS::get_lookup_count (); i++)
2679     {
2680       if (!c->gpos_lookups->has (i)) continue;
2681       const PosLookup &l = get_lookup (i);
2682       l.dispatch (c);
2683     }
2684   }
2685 
closure_lookupsOT::GPOS2686   void closure_lookups (hb_face_t      *face,
2687                         const hb_set_t *glyphs,
2688                         hb_set_t       *lookup_indexes /* IN/OUT */) const
2689   { GSUBGPOS::closure_lookups<PosLookup> (face, glyphs, lookup_indexes); }
2690 
2691   typedef GSUBGPOS::accelerator_t<GPOS> accelerator_t;
2692 };
2693 
2694 
2695 static void
reverse_cursive_minor_offset(hb_glyph_position_t * pos,unsigned int i,hb_direction_t direction,unsigned int new_parent)2696 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent)
2697 {
2698   int chain = pos[i].attach_chain(), type = pos[i].attach_type();
2699   if (likely (!chain || 0 == (type & ATTACH_TYPE_CURSIVE)))
2700     return;
2701 
2702   pos[i].attach_chain() = 0;
2703 
2704   unsigned int j = (int) i + chain;
2705 
2706   /* Stop if we see new parent in the chain. */
2707   if (j == new_parent)
2708     return;
2709 
2710   reverse_cursive_minor_offset (pos, j, direction, new_parent);
2711 
2712   if (HB_DIRECTION_IS_HORIZONTAL (direction))
2713     pos[j].y_offset = -pos[i].y_offset;
2714   else
2715     pos[j].x_offset = -pos[i].x_offset;
2716 
2717   pos[j].attach_chain() = -chain;
2718   pos[j].attach_type() = type;
2719 }
2720 static void
propagate_attachment_offsets(hb_glyph_position_t * pos,unsigned int len,unsigned int i,hb_direction_t direction)2721 propagate_attachment_offsets (hb_glyph_position_t *pos,
2722                               unsigned int len,
2723                               unsigned int i,
2724                               hb_direction_t direction)
2725 {
2726   /* Adjusts offsets of attached glyphs (both cursive and mark) to accumulate
2727    * offset of glyph they are attached to. */
2728   int chain = pos[i].attach_chain(), type = pos[i].attach_type();
2729   if (likely (!chain))
2730     return;
2731 
2732   pos[i].attach_chain() = 0;
2733 
2734   unsigned int j = (int) i + chain;
2735 
2736   if (unlikely (j >= len))
2737     return;
2738 
2739   propagate_attachment_offsets (pos, len, j, direction);
2740 
2741   assert (!!(type & ATTACH_TYPE_MARK) ^ !!(type & ATTACH_TYPE_CURSIVE));
2742 
2743   if (type & ATTACH_TYPE_CURSIVE)
2744   {
2745     if (HB_DIRECTION_IS_HORIZONTAL (direction))
2746       pos[i].y_offset += pos[j].y_offset;
2747     else
2748       pos[i].x_offset += pos[j].x_offset;
2749   }
2750   else /*if (type & ATTACH_TYPE_MARK)*/
2751   {
2752     pos[i].x_offset += pos[j].x_offset;
2753     pos[i].y_offset += pos[j].y_offset;
2754 
2755     assert (j < i);
2756     if (HB_DIRECTION_IS_FORWARD (direction))
2757       for (unsigned int k = j; k < i; k++) {
2758         pos[i].x_offset -= pos[k].x_advance;
2759         pos[i].y_offset -= pos[k].y_advance;
2760       }
2761     else
2762       for (unsigned int k = j + 1; k < i + 1; k++) {
2763         pos[i].x_offset += pos[k].x_advance;
2764         pos[i].y_offset += pos[k].y_advance;
2765       }
2766   }
2767 }
2768 
2769 void
position_start(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)2770 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2771 {
2772   unsigned int count = buffer->len;
2773   for (unsigned int i = 0; i < count; i++)
2774     buffer->pos[i].attach_chain() = buffer->pos[i].attach_type() = 0;
2775 }
2776 
2777 void
position_finish_advances(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer HB_UNUSED)2778 GPOS::position_finish_advances (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
2779 {
2780   //_hb_buffer_assert_gsubgpos_vars (buffer);
2781 }
2782 
2783 void
position_finish_offsets(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)2784 GPOS::position_finish_offsets (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2785 {
2786   _hb_buffer_assert_gsubgpos_vars (buffer);
2787 
2788   unsigned int len;
2789   hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
2790   hb_direction_t direction = buffer->props.direction;
2791 
2792   /* Handle attachments */
2793   if (buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT)
2794     for (unsigned int i = 0; i < len; i++)
2795       propagate_attachment_offsets (pos, len, i, direction);
2796 }
2797 
2798 
2799 struct GPOS_accelerator_t : GPOS::accelerator_t {};
2800 
2801 
2802 /* Out-of-class implementation for methods recursing */
2803 
2804 #ifndef HB_NO_OT_LAYOUT
2805 template <typename context_t>
dispatch_recurse_func(context_t * c,unsigned int lookup_index)2806 /*static*/ typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
2807 {
2808   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2809   return l.dispatch (c);
2810 }
2811 
dispatch_closure_lookups_recurse_func(hb_closure_lookups_context_t * c,unsigned this_index)2812 /*static*/ inline hb_closure_lookups_context_t::return_t PosLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
2813 {
2814   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (this_index);
2815   return l.closure_lookups (c, this_index);
2816 }
2817 
apply_recurse_func(hb_ot_apply_context_t * c,unsigned int lookup_index)2818 /*static*/ bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
2819 {
2820   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2821   unsigned int saved_lookup_props = c->lookup_props;
2822   unsigned int saved_lookup_index = c->lookup_index;
2823   c->set_lookup_index (lookup_index);
2824   c->set_lookup_props (l.get_props ());
2825   bool ret = l.dispatch (c);
2826   c->set_lookup_index (saved_lookup_index);
2827   c->set_lookup_props (saved_lookup_props);
2828   return ret;
2829 }
2830 #endif
2831 
2832 
2833 } /* namespace OT */
2834 
2835 
2836 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */
2837