1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2010,2012,2013  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
31 
32 #include "hb-ot-layout-gsubgpos.hh"
33 
34 
35 namespace OT {
36 
37 struct MarkArray;
38 static void Markclass_closure_and_remap_indexes (const Coverage  &mark_coverage,
39 						 const MarkArray &mark_array,
40 						 const hb_set_t  &glyphset,
41 						 hb_map_t*        klass_mapping /* INOUT */);
42 
43 /* buffer **position** var allocations */
44 #define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
45 #define attach_type() var.u8[2] /* attachment type */
46 /* Note! if attach_chain() is zero, the value of attach_type() is irrelevant. */
47 
48 enum attach_type_t {
49   ATTACH_TYPE_NONE	= 0X00,
50 
51   /* Each attachment should be either a mark or a cursive; can't be both. */
52   ATTACH_TYPE_MARK	= 0X01,
53   ATTACH_TYPE_CURSIVE	= 0X02,
54 };
55 
56 
57 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
58 
59 typedef HBUINT16 Value;
60 
61 typedef UnsizedArrayOf<Value> ValueRecord;
62 
63 struct ValueFormat : HBUINT16
64 {
65   enum Flags {
66     xPlacement	= 0x0001u,	/* Includes horizontal adjustment for placement */
67     yPlacement	= 0x0002u,	/* Includes vertical adjustment for placement */
68     xAdvance	= 0x0004u,	/* Includes horizontal adjustment for advance */
69     yAdvance	= 0x0008u,	/* Includes vertical adjustment for advance */
70     xPlaDevice	= 0x0010u,	/* Includes horizontal Device table for placement */
71     yPlaDevice	= 0x0020u,	/* Includes vertical Device table for placement */
72     xAdvDevice	= 0x0040u,	/* Includes horizontal Device table for advance */
73     yAdvDevice	= 0x0080u,	/* Includes vertical Device table for advance */
74     ignored	= 0x0F00u,	/* Was used in TrueType Open for MM fonts */
75     reserved	= 0xF000u,	/* For future use */
76 
77     devices	= 0x00F0u	/* Mask for having any Device table */
78   };
79 
80 /* All fields are options.  Only those available advance the value pointer. */
81 #if 0
82   HBINT16		xPlacement;	/* Horizontal adjustment for
83 					 * placement--in design units */
84   HBINT16		yPlacement;	/* Vertical adjustment for
85 					 * placement--in design units */
86   HBINT16		xAdvance;	/* Horizontal adjustment for
87 					 * advance--in design units (only used
88 					 * for horizontal writing) */
89   HBINT16		yAdvance;	/* Vertical adjustment for advance--in
90 					 * design units (only used for vertical
91 					 * writing) */
92   Offset16To<Device>	xPlaDevice;	/* Offset to Device table for
93 					 * horizontal placement--measured from
94 					 * beginning of PosTable (may be NULL) */
95   Offset16To<Device>	yPlaDevice;	/* Offset to Device table for vertical
96 					 * placement--measured from beginning
97 					 * of PosTable (may be NULL) */
98   Offset16To<Device>	xAdvDevice;	/* Offset to Device table for
99 					 * horizontal advance--measured from
100 					 * beginning of PosTable (may be NULL) */
101   Offset16To<Device>	yAdvDevice;	/* Offset to Device table for vertical
102 					 * advance--measured from beginning of
103 					 * PosTable (may be NULL) */
104 #endif
105 
operator =OT::ValueFormat106   IntType& operator = (uint16_t i) { v = i; return *this; }
107 
get_lenOT::ValueFormat108   unsigned int get_len () const  { return hb_popcount ((unsigned int) *this); }
get_sizeOT::ValueFormat109   unsigned int get_size () const { return get_len () * Value::static_size; }
110 
apply_valueOT::ValueFormat111   bool apply_value (hb_ot_apply_context_t *c,
112 		    const void            *base,
113 		    const Value           *values,
114 		    hb_glyph_position_t   &glyph_pos) const
115   {
116     bool ret = false;
117     unsigned int format = *this;
118     if (!format) return ret;
119 
120     hb_font_t *font = c->font;
121     bool horizontal = HB_DIRECTION_IS_HORIZONTAL (c->direction);
122 
123     if (format & xPlacement) glyph_pos.x_offset  += font->em_scale_x (get_short (values++, &ret));
124     if (format & yPlacement) glyph_pos.y_offset  += font->em_scale_y (get_short (values++, &ret));
125     if (format & xAdvance) {
126       if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
127       values++;
128     }
129     /* y_advance values grow downward but font-space grows upward, hence negation */
130     if (format & yAdvance) {
131       if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
132       values++;
133     }
134 
135     if (!has_device ()) return ret;
136 
137     bool use_x_device = font->x_ppem || font->num_coords;
138     bool use_y_device = font->y_ppem || font->num_coords;
139 
140     if (!use_x_device && !use_y_device) return ret;
141 
142     const VariationStore &store = c->var_store;
143 
144     /* pixel -> fractional pixel */
145     if (format & xPlaDevice) {
146       if (use_x_device) glyph_pos.x_offset  += (base + get_device (values, &ret)).get_x_delta (font, store);
147       values++;
148     }
149     if (format & yPlaDevice) {
150       if (use_y_device) glyph_pos.y_offset  += (base + get_device (values, &ret)).get_y_delta (font, store);
151       values++;
152     }
153     if (format & xAdvDevice) {
154       if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store);
155       values++;
156     }
157     if (format & yAdvDevice) {
158       /* y_advance values grow downward but font-space grows upward, hence negation */
159       if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store);
160       values++;
161     }
162     return ret;
163   }
164 
get_effective_formatOT::ValueFormat165   unsigned int get_effective_format (const Value *values) const
166   {
167     unsigned int format = *this;
168     for (unsigned flag = xPlacement; flag <= yAdvDevice; flag = flag << 1) {
169       if (format & flag) should_drop (*values++, (Flags) flag, &format);
170     }
171 
172     return format;
173   }
174 
175   template<typename Iterator,
176       hb_requires (hb_is_iterator (Iterator))>
get_effective_formatOT::ValueFormat177   unsigned int get_effective_format (Iterator it) const {
178     unsigned int new_format = 0;
179 
180     for (const hb_array_t<const Value>& values : it)
181       new_format = new_format | get_effective_format (&values);
182 
183     return new_format;
184   }
185 
copy_valuesOT::ValueFormat186   void copy_values (hb_serialize_context_t *c,
187                     unsigned int new_format,
188                     const void *base,
189                     const Value *values,
190                     const hb_map_t *layout_variation_idx_map) const
191   {
192     unsigned int format = *this;
193     if (!format) return;
194 
195     if (format & xPlacement) copy_value (c, new_format, xPlacement, *values++);
196     if (format & yPlacement) copy_value (c, new_format, yPlacement, *values++);
197     if (format & xAdvance)   copy_value (c, new_format, xAdvance, *values++);
198     if (format & yAdvance)   copy_value (c, new_format, yAdvance, *values++);
199 
200     if (format & xPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
201     if (format & yPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
202     if (format & xAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
203     if (format & yAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
204   }
205 
copy_valueOT::ValueFormat206   void copy_value (hb_serialize_context_t *c,
207                    unsigned int new_format,
208                    Flags flag,
209                    Value value) const
210   {
211     // Filter by new format.
212     if (!(new_format & flag)) return;
213     c->copy (value);
214   }
215 
collect_variation_indicesOT::ValueFormat216   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
217 				  const void *base,
218 				  const hb_array_t<const Value>& values) const
219   {
220     unsigned format = *this;
221     unsigned i = 0;
222     if (format & xPlacement) i++;
223     if (format & yPlacement) i++;
224     if (format & xAdvance) i++;
225     if (format & yAdvance) i++;
226     if (format & xPlaDevice)
227     {
228       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
229       i++;
230     }
231 
232     if (format & ValueFormat::yPlaDevice)
233     {
234       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
235       i++;
236     }
237 
238     if (format & ValueFormat::xAdvDevice)
239     {
240 
241       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
242       i++;
243     }
244 
245     if (format & ValueFormat::yAdvDevice)
246     {
247 
248       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
249       i++;
250     }
251   }
252 
253   private:
sanitize_value_devicesOT::ValueFormat254   bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
255   {
256     unsigned int format = *this;
257 
258     if (format & xPlacement) values++;
259     if (format & yPlacement) values++;
260     if (format & xAdvance)   values++;
261     if (format & yAdvance)   values++;
262 
263     if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
264     if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
265     if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
266     if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
267 
268     return true;
269   }
270 
get_deviceOT::ValueFormat271   static inline Offset16To<Device>& get_device (Value* value)
272   {
273     return *static_cast<Offset16To<Device> *> (value);
274   }
get_deviceOT::ValueFormat275   static inline const Offset16To<Device>& get_device (const Value* value, bool *worked=nullptr)
276   {
277     if (worked) *worked |= bool (*value);
278     return *static_cast<const Offset16To<Device> *> (value);
279   }
280 
copy_deviceOT::ValueFormat281   bool copy_device (hb_serialize_context_t *c, const void *base,
282 		    const Value *src_value, const hb_map_t *layout_variation_idx_map) const
283   {
284     Value	*dst_value = c->copy (*src_value);
285 
286     if (!dst_value) return false;
287     if (*dst_value == 0) return true;
288 
289     *dst_value = 0;
290     c->push ();
291     if ((base + get_device (src_value)).copy (c, layout_variation_idx_map))
292     {
293       c->add_link (*dst_value, c->pop_pack ());
294       return true;
295     }
296     else
297     {
298       c->pop_discard ();
299       return false;
300     }
301   }
302 
get_shortOT::ValueFormat303   static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
304   {
305     if (worked) *worked |= bool (*value);
306     return *reinterpret_cast<const HBINT16 *> (value);
307   }
308 
309   public:
310 
has_deviceOT::ValueFormat311   bool has_device () const
312   {
313     unsigned int format = *this;
314     return (format & devices) != 0;
315   }
316 
sanitize_valueOT::ValueFormat317   bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
318   {
319     TRACE_SANITIZE (this);
320     return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
321   }
322 
sanitize_valuesOT::ValueFormat323   bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
324   {
325     TRACE_SANITIZE (this);
326     unsigned int len = get_len ();
327 
328     if (!c->check_range (values, count, get_size ())) return_trace (false);
329 
330     if (!has_device ()) return_trace (true);
331 
332     for (unsigned int i = 0; i < count; i++) {
333       if (!sanitize_value_devices (c, base, values))
334 	return_trace (false);
335       values += len;
336     }
337 
338     return_trace (true);
339   }
340 
341   /* Just sanitize referenced Device tables.  Doesn't check the values themselves. */
sanitize_values_stride_unsafeOT::ValueFormat342   bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
343   {
344     TRACE_SANITIZE (this);
345 
346     if (!has_device ()) return_trace (true);
347 
348     for (unsigned int i = 0; i < count; i++) {
349       if (!sanitize_value_devices (c, base, values))
350 	return_trace (false);
351       values += stride;
352     }
353 
354     return_trace (true);
355   }
356 
357  private:
358 
should_dropOT::ValueFormat359   void should_drop (Value value, Flags flag, unsigned int* format) const
360   {
361     if (value) return;
362     *format = *format & ~flag;
363   }
364 
365 };
366 
367 template<typename Iterator, typename SrcLookup>
368 static void SinglePos_serialize (hb_serialize_context_t *c,
369 				 const SrcLookup *src,
370 				 Iterator it,
371 				 const hb_map_t *layout_variation_idx_map);
372 
373 
374 struct AnchorFormat1
375 {
get_anchorOT::AnchorFormat1376   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
377 		   float *x, float *y) const
378   {
379     hb_font_t *font = c->font;
380     *x = font->em_fscale_x (xCoordinate);
381     *y = font->em_fscale_y (yCoordinate);
382   }
383 
sanitizeOT::AnchorFormat1384   bool sanitize (hb_sanitize_context_t *c) const
385   {
386     TRACE_SANITIZE (this);
387     return_trace (c->check_struct (this));
388   }
389 
copyOT::AnchorFormat1390   AnchorFormat1* copy (hb_serialize_context_t *c) const
391   {
392     TRACE_SERIALIZE (this);
393     AnchorFormat1* out = c->embed<AnchorFormat1> (this);
394     if (!out) return_trace (out);
395     out->format = 1;
396     return_trace (out);
397   }
398 
399   protected:
400   HBUINT16	format;			/* Format identifier--format = 1 */
401   FWORD		xCoordinate;		/* Horizontal value--in design units */
402   FWORD		yCoordinate;		/* Vertical value--in design units */
403   public:
404   DEFINE_SIZE_STATIC (6);
405 };
406 
407 struct AnchorFormat2
408 {
get_anchorOT::AnchorFormat2409   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
410 		   float *x, float *y) const
411   {
412     hb_font_t *font = c->font;
413 
414 #ifdef HB_NO_HINTING
415     *x = font->em_fscale_x (xCoordinate);
416     *y = font->em_fscale_y (yCoordinate);
417     return;
418 #endif
419 
420     unsigned int x_ppem = font->x_ppem;
421     unsigned int y_ppem = font->y_ppem;
422     hb_position_t cx = 0, cy = 0;
423     bool ret;
424 
425     ret = (x_ppem || y_ppem) &&
426 	  font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
427     *x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
428     *y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
429   }
430 
sanitizeOT::AnchorFormat2431   bool sanitize (hb_sanitize_context_t *c) const
432   {
433     TRACE_SANITIZE (this);
434     return_trace (c->check_struct (this));
435   }
436 
copyOT::AnchorFormat2437   AnchorFormat2* copy (hb_serialize_context_t *c) const
438   {
439     TRACE_SERIALIZE (this);
440     return_trace (c->embed<AnchorFormat2> (this));
441   }
442 
443   protected:
444   HBUINT16	format;			/* Format identifier--format = 2 */
445   FWORD		xCoordinate;		/* Horizontal value--in design units */
446   FWORD		yCoordinate;		/* Vertical value--in design units */
447   HBUINT16	anchorPoint;		/* Index to glyph contour point */
448   public:
449   DEFINE_SIZE_STATIC (8);
450 };
451 
452 struct AnchorFormat3
453 {
get_anchorOT::AnchorFormat3454   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
455 		   float *x, float *y) const
456   {
457     hb_font_t *font = c->font;
458     *x = font->em_fscale_x (xCoordinate);
459     *y = font->em_fscale_y (yCoordinate);
460 
461     if (font->x_ppem || font->num_coords)
462       *x += (this+xDeviceTable).get_x_delta (font, c->var_store);
463     if (font->y_ppem || font->num_coords)
464       *y += (this+yDeviceTable).get_y_delta (font, c->var_store);
465   }
466 
sanitizeOT::AnchorFormat3467   bool sanitize (hb_sanitize_context_t *c) const
468   {
469     TRACE_SANITIZE (this);
470     return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
471   }
472 
copyOT::AnchorFormat3473   AnchorFormat3* copy (hb_serialize_context_t *c,
474 		       const hb_map_t *layout_variation_idx_map) const
475   {
476     TRACE_SERIALIZE (this);
477     if (!layout_variation_idx_map) return_trace (nullptr);
478 
479     auto *out = c->embed<AnchorFormat3> (this);
480     if (unlikely (!out)) return_trace (nullptr);
481 
482     out->xDeviceTable.serialize_copy (c, xDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
483     out->yDeviceTable.serialize_copy (c, yDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
484     return_trace (out);
485   }
486 
collect_variation_indicesOT::AnchorFormat3487   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
488   {
489     (this+xDeviceTable).collect_variation_indices (c->layout_variation_indices);
490     (this+yDeviceTable).collect_variation_indices (c->layout_variation_indices);
491   }
492 
493   protected:
494   HBUINT16	format;			/* Format identifier--format = 3 */
495   FWORD		xCoordinate;		/* Horizontal value--in design units */
496   FWORD		yCoordinate;		/* Vertical value--in design units */
497   Offset16To<Device>
498 		xDeviceTable;		/* Offset to Device table for X
499 					 * coordinate-- from beginning of
500 					 * Anchor table (may be NULL) */
501   Offset16To<Device>
502 		yDeviceTable;		/* Offset to Device table for Y
503 					 * coordinate-- from beginning of
504 					 * Anchor table (may be NULL) */
505   public:
506   DEFINE_SIZE_STATIC (10);
507 };
508 
509 struct Anchor
510 {
get_anchorOT::Anchor511   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
512 		   float *x, float *y) const
513   {
514     *x = *y = 0;
515     switch (u.format) {
516     case 1: u.format1.get_anchor (c, glyph_id, x, y); return;
517     case 2: u.format2.get_anchor (c, glyph_id, x, y); return;
518     case 3: u.format3.get_anchor (c, glyph_id, x, y); return;
519     default:					      return;
520     }
521   }
522 
sanitizeOT::Anchor523   bool sanitize (hb_sanitize_context_t *c) const
524   {
525     TRACE_SANITIZE (this);
526     if (!u.format.sanitize (c)) return_trace (false);
527     switch (u.format) {
528     case 1: return_trace (u.format1.sanitize (c));
529     case 2: return_trace (u.format2.sanitize (c));
530     case 3: return_trace (u.format3.sanitize (c));
531     default:return_trace (true);
532     }
533   }
534 
subsetOT::Anchor535   bool subset (hb_subset_context_t *c) const
536   {
537     TRACE_SUBSET (this);
538     switch (u.format) {
539     case 1: return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
540     case 2:
541       if (c->plan->drop_hints)
542       {
543         // AnchorFormat 2 just containins extra hinting information, so
544         // if hints are being dropped convert to format 1.
545         return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
546       }
547       return_trace (bool (reinterpret_cast<Anchor *> (u.format2.copy (c->serializer))));
548     case 3: return_trace (bool (reinterpret_cast<Anchor *> (u.format3.copy (c->serializer,
549                                                                             c->plan->layout_variation_idx_map))));
550     default:return_trace (false);
551     }
552   }
553 
collect_variation_indicesOT::Anchor554   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
555   {
556     switch (u.format) {
557     case 1: case 2:
558       return;
559     case 3:
560       u.format3.collect_variation_indices (c);
561       return;
562     default: return;
563     }
564   }
565 
566   protected:
567   union {
568   HBUINT16		format;		/* Format identifier */
569   AnchorFormat1		format1;
570   AnchorFormat2		format2;
571   AnchorFormat3		format3;
572   } u;
573   public:
574   DEFINE_SIZE_UNION (2, format);
575 };
576 
577 
578 struct AnchorMatrix
579 {
get_anchorOT::AnchorMatrix580   const Anchor& get_anchor (unsigned int row, unsigned int col,
581 			    unsigned int cols, bool *found) const
582   {
583     *found = false;
584     if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
585     *found = !matrixZ[row * cols + col].is_null ();
586     return this+matrixZ[row * cols + col];
587   }
588 
589   template <typename Iterator,
590 	    hb_requires (hb_is_iterator (Iterator))>
collect_variation_indicesOT::AnchorMatrix591   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
592 				  Iterator index_iter) const
593   {
594     for (unsigned i : index_iter)
595       (this+matrixZ[i]).collect_variation_indices (c);
596   }
597 
598   template <typename Iterator,
599       hb_requires (hb_is_iterator (Iterator))>
subsetOT::AnchorMatrix600   bool subset (hb_subset_context_t *c,
601                unsigned             num_rows,
602                Iterator             index_iter) const
603   {
604     TRACE_SUBSET (this);
605 
606     auto *out = c->serializer->start_embed (this);
607 
608     if (!index_iter) return_trace (false);
609     if (unlikely (!c->serializer->extend_min (out)))  return_trace (false);
610 
611     out->rows = num_rows;
612     for (const unsigned i : index_iter)
613     {
614       auto *offset = c->serializer->embed (matrixZ[i]);
615       if (!offset) return_trace (false);
616       offset->serialize_subset (c, matrixZ[i], this);
617     }
618 
619     return_trace (true);
620   }
621 
sanitizeOT::AnchorMatrix622   bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
623   {
624     TRACE_SANITIZE (this);
625     if (!c->check_struct (this)) return_trace (false);
626     if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
627     unsigned int count = rows * cols;
628     if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
629     for (unsigned int i = 0; i < count; i++)
630       if (!matrixZ[i].sanitize (c, this)) return_trace (false);
631     return_trace (true);
632   }
633 
634   HBUINT16	rows;			/* Number of rows */
635   UnsizedArrayOf<Offset16To<Anchor>>
636 		matrixZ;		/* Matrix of offsets to Anchor tables--
637 					 * from beginning of AnchorMatrix table */
638   public:
639   DEFINE_SIZE_ARRAY (2, matrixZ);
640 };
641 
642 
643 struct MarkRecord
644 {
645   friend struct MarkArray;
646 
get_classOT::MarkRecord647   unsigned get_class () const { return (unsigned) klass; }
sanitizeOT::MarkRecord648   bool sanitize (hb_sanitize_context_t *c, const void *base) const
649   {
650     TRACE_SANITIZE (this);
651     return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
652   }
653 
subsetOT::MarkRecord654   MarkRecord *subset (hb_subset_context_t    *c,
655                       const void             *src_base,
656                       const hb_map_t         *klass_mapping) const
657   {
658     TRACE_SUBSET (this);
659     auto *out = c->serializer->embed (this);
660     if (unlikely (!out)) return_trace (nullptr);
661 
662     out->klass = klass_mapping->get (klass);
663     out->markAnchor.serialize_subset (c, markAnchor, src_base);
664     return_trace (out);
665   }
666 
collect_variation_indicesOT::MarkRecord667   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
668 				  const void *src_base) const
669   {
670     (src_base+markAnchor).collect_variation_indices (c);
671   }
672 
673   protected:
674   HBUINT16	klass;			/* Class defined for this mark */
675   Offset16To<Anchor>
676 		markAnchor;		/* Offset to Anchor table--from
677 					 * beginning of MarkArray table */
678   public:
679   DEFINE_SIZE_STATIC (4);
680 };
681 
682 struct MarkArray : Array16Of<MarkRecord>	/* Array of MarkRecords--in Coverage order */
683 {
applyOT::MarkArray684   bool apply (hb_ot_apply_context_t *c,
685 	      unsigned int mark_index, unsigned int glyph_index,
686 	      const AnchorMatrix &anchors, unsigned int class_count,
687 	      unsigned int glyph_pos) const
688   {
689     TRACE_APPLY (this);
690     hb_buffer_t *buffer = c->buffer;
691     const MarkRecord &record = Array16Of<MarkRecord>::operator[](mark_index);
692     unsigned int mark_class = record.klass;
693 
694     const Anchor& mark_anchor = this + record.markAnchor;
695     bool found;
696     const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
697     /* If this subtable doesn't have an anchor for this base and this class,
698      * return false such that the subsequent subtables have a chance at it. */
699     if (unlikely (!found)) return_trace (false);
700 
701     float mark_x, mark_y, base_x, base_y;
702 
703     buffer->unsafe_to_break (glyph_pos, buffer->idx);
704     mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
705     glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
706 
707     hb_glyph_position_t &o = buffer->cur_pos();
708     o.x_offset = roundf (base_x - mark_x);
709     o.y_offset = roundf (base_y - mark_y);
710     o.attach_type() = ATTACH_TYPE_MARK;
711     o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
712     buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
713 
714     buffer->idx++;
715     return_trace (true);
716   }
717 
718   template <typename Iterator,
719       hb_requires (hb_is_iterator (Iterator))>
subsetOT::MarkArray720   bool subset (hb_subset_context_t *c,
721                Iterator		    coverage,
722                const hb_map_t      *klass_mapping) const
723   {
724     TRACE_SUBSET (this);
725     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
726 
727     auto* out = c->serializer->start_embed (this);
728     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
729 
730     auto mark_iter =
731     + hb_zip (coverage, this->iter ())
732     | hb_filter (glyphset, hb_first)
733     | hb_map (hb_second)
734     ;
735 
736     unsigned new_length = 0;
737     for (const auto& mark_record : mark_iter) {
738       if (unlikely (!mark_record.subset (c, this, klass_mapping)))
739         return_trace (false);
740       new_length++;
741     }
742 
743     if (unlikely (!c->serializer->check_assign (out->len, new_length,
744                                                 HB_SERIALIZE_ERROR_ARRAY_OVERFLOW)))
745       return_trace (false);
746 
747     return_trace (true);
748   }
749 
sanitizeOT::MarkArray750   bool sanitize (hb_sanitize_context_t *c) const
751   {
752     TRACE_SANITIZE (this);
753     return_trace (Array16Of<MarkRecord>::sanitize (c, this));
754   }
755 };
756 
757 
758 /* Lookups */
759 
760 struct SinglePosFormat1
761 {
intersectsOT::SinglePosFormat1762   bool intersects (const hb_set_t *glyphs) const
763   { return (this+coverage).intersects (glyphs); }
764 
closure_lookupsOT::SinglePosFormat1765   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::SinglePosFormat1766   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
767   {
768     if (!valueFormat.has_device ()) return;
769 
770     auto it =
771     + hb_iter (this+coverage)
772     | hb_filter (c->glyph_set)
773     ;
774 
775     if (!it) return;
776     valueFormat.collect_variation_indices (c, this, values.as_array (valueFormat.get_len ()));
777   }
778 
collect_glyphsOT::SinglePosFormat1779   void collect_glyphs (hb_collect_glyphs_context_t *c) const
780   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
781 
get_coverageOT::SinglePosFormat1782   const Coverage &get_coverage () const { return this+coverage; }
783 
get_value_formatOT::SinglePosFormat1784   ValueFormat get_value_format () const { return valueFormat; }
785 
applyOT::SinglePosFormat1786   bool apply (hb_ot_apply_context_t *c) const
787   {
788     TRACE_APPLY (this);
789     hb_buffer_t *buffer = c->buffer;
790     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
791     if (likely (index == NOT_COVERED)) return_trace (false);
792 
793     valueFormat.apply_value (c, this, values, buffer->cur_pos());
794 
795     buffer->idx++;
796     return_trace (true);
797   }
798 
799   template<typename Iterator,
800       typename SrcLookup,
801       hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat1802   void serialize (hb_serialize_context_t *c,
803 		  const SrcLookup *src,
804 		  Iterator it,
805 		  ValueFormat newFormat,
806 		  const hb_map_t *layout_variation_idx_map)
807   {
808     if (unlikely (!c->extend_min (*this))) return;
809     if (unlikely (!c->check_assign (valueFormat,
810                                     newFormat,
811                                     HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
812 
813     for (const hb_array_t<const Value>& _ : + it | hb_map (hb_second))
814     {
815       src->get_value_format ().copy_values (c, newFormat, src,  &_, layout_variation_idx_map);
816       // Only serialize the first entry in the iterator, the rest are assumed to
817       // be the same.
818       break;
819     }
820 
821     auto glyphs =
822     + it
823     | hb_map_retains_sorting (hb_first)
824     ;
825 
826     // TODO(garretrieger): serialize_subset this.
827     coverage.serialize (c, this).serialize (c, glyphs);
828   }
829 
subsetOT::SinglePosFormat1830   bool subset (hb_subset_context_t *c) const
831   {
832     TRACE_SUBSET (this);
833     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
834     const hb_map_t &glyph_map = *c->plan->glyph_map;
835 
836     auto it =
837     + hb_iter (this+coverage)
838     | hb_filter (glyphset)
839     | hb_map_retains_sorting (glyph_map)
840     | hb_zip (hb_repeat (values.as_array (valueFormat.get_len ())))
841     ;
842 
843     bool ret = bool (it);
844     SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
845     return_trace (ret);
846   }
847 
sanitizeOT::SinglePosFormat1848   bool sanitize (hb_sanitize_context_t *c) const
849   {
850     TRACE_SANITIZE (this);
851     return_trace (c->check_struct (this) &&
852 		  coverage.sanitize (c, this) &&
853 		  valueFormat.sanitize_value (c, this, values));
854   }
855 
856   protected:
857   HBUINT16	format;			/* Format identifier--format = 1 */
858   Offset16To<Coverage>
859 		coverage;		/* Offset to Coverage table--from
860 					 * beginning of subtable */
861   ValueFormat	valueFormat;		/* Defines the types of data in the
862 					 * ValueRecord */
863   ValueRecord	values;			/* Defines positioning
864 					 * value(s)--applied to all glyphs in
865 					 * the Coverage table */
866   public:
867   DEFINE_SIZE_ARRAY (6, values);
868 };
869 
870 struct SinglePosFormat2
871 {
intersectsOT::SinglePosFormat2872   bool intersects (const hb_set_t *glyphs) const
873   { return (this+coverage).intersects (glyphs); }
874 
closure_lookupsOT::SinglePosFormat2875   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::SinglePosFormat2876   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
877   {
878     if (!valueFormat.has_device ()) return;
879 
880     auto it =
881     + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
882     | hb_filter (c->glyph_set, hb_first)
883     ;
884 
885     if (!it) return;
886 
887     unsigned sub_length = valueFormat.get_len ();
888     const hb_array_t<const Value> values_array = values.as_array (valueCount * sub_length);
889 
890     for (unsigned i : + it
891 		      | hb_map (hb_second))
892       valueFormat.collect_variation_indices (c, this, values_array.sub_array (i * sub_length, sub_length));
893 
894   }
895 
collect_glyphsOT::SinglePosFormat2896   void collect_glyphs (hb_collect_glyphs_context_t *c) const
897   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
898 
get_coverageOT::SinglePosFormat2899   const Coverage &get_coverage () const { return this+coverage; }
900 
get_value_formatOT::SinglePosFormat2901   ValueFormat get_value_format () const { return valueFormat; }
902 
applyOT::SinglePosFormat2903   bool apply (hb_ot_apply_context_t *c) const
904   {
905     TRACE_APPLY (this);
906     hb_buffer_t *buffer = c->buffer;
907     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
908     if (likely (index == NOT_COVERED)) return_trace (false);
909 
910     if (likely (index >= valueCount)) return_trace (false);
911 
912     valueFormat.apply_value (c, this,
913 			     &values[index * valueFormat.get_len ()],
914 			     buffer->cur_pos());
915 
916     buffer->idx++;
917     return_trace (true);
918   }
919 
920   template<typename Iterator,
921       typename SrcLookup,
922       hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat2923   void serialize (hb_serialize_context_t *c,
924 		  const SrcLookup *src,
925 		  Iterator it,
926 		  ValueFormat newFormat,
927 		  const hb_map_t *layout_variation_idx_map)
928   {
929     auto out = c->extend_min (*this);
930     if (unlikely (!out)) return;
931     if (unlikely (!c->check_assign (valueFormat, newFormat, HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
932     if (unlikely (!c->check_assign (valueCount, it.len (), HB_SERIALIZE_ERROR_ARRAY_OVERFLOW))) return;
933 
934     + it
935     | hb_map (hb_second)
936     | hb_apply ([&] (hb_array_t<const Value> _)
937     { src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_map); })
938     ;
939 
940     auto glyphs =
941     + it
942     | hb_map_retains_sorting (hb_first)
943     ;
944 
945     coverage.serialize (c, this).serialize (c, glyphs);
946   }
947 
subsetOT::SinglePosFormat2948   bool subset (hb_subset_context_t *c) const
949   {
950     TRACE_SUBSET (this);
951     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
952     const hb_map_t &glyph_map = *c->plan->glyph_map;
953 
954     unsigned sub_length = valueFormat.get_len ();
955     auto values_array = values.as_array (valueCount * sub_length);
956 
957     auto it =
958     + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
959     | hb_filter (glyphset, hb_first)
960     | hb_map_retains_sorting ([&] (const hb_pair_t<hb_codepoint_t, unsigned>& _)
961 			      {
962 				return hb_pair (glyph_map[_.first],
963 						values_array.sub_array (_.second * sub_length,
964 									sub_length));
965 			      })
966     ;
967 
968     bool ret = bool (it);
969     SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
970     return_trace (ret);
971   }
972 
sanitizeOT::SinglePosFormat2973   bool sanitize (hb_sanitize_context_t *c) const
974   {
975     TRACE_SANITIZE (this);
976     return_trace (c->check_struct (this) &&
977 		  coverage.sanitize (c, this) &&
978 		  valueFormat.sanitize_values (c, this, values, valueCount));
979   }
980 
981   protected:
982   HBUINT16	format;			/* Format identifier--format = 2 */
983   Offset16To<Coverage>
984 		coverage;		/* Offset to Coverage table--from
985 					 * beginning of subtable */
986   ValueFormat	valueFormat;		/* Defines the types of data in the
987 					 * ValueRecord */
988   HBUINT16	valueCount;		/* Number of ValueRecords */
989   ValueRecord	values;			/* Array of ValueRecords--positioning
990 					 * values applied to glyphs */
991   public:
992   DEFINE_SIZE_ARRAY (8, values);
993 };
994 
995 struct SinglePos
996 {
997   template<typename Iterator,
998 	   hb_requires (hb_is_iterator (Iterator))>
get_formatOT::SinglePos999   unsigned get_format (Iterator glyph_val_iter_pairs)
1000   {
1001     hb_array_t<const Value> first_val_iter = hb_second (*glyph_val_iter_pairs);
1002 
1003     for (const auto iter : glyph_val_iter_pairs)
1004       for (const auto _ : hb_zip (iter.second, first_val_iter))
1005 	if (_.first != _.second)
1006 	  return 2;
1007 
1008     return 1;
1009   }
1010 
1011 
1012   template<typename Iterator,
1013       typename SrcLookup,
1014       hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePos1015   void serialize (hb_serialize_context_t *c,
1016 		  const SrcLookup* src,
1017 		  Iterator glyph_val_iter_pairs,
1018 		  const hb_map_t *layout_variation_idx_map)
1019   {
1020     if (unlikely (!c->extend_min (u.format))) return;
1021     unsigned format = 2;
1022     ValueFormat new_format = src->get_value_format ();
1023 
1024     if (glyph_val_iter_pairs)
1025     {
1026       format = get_format (glyph_val_iter_pairs);
1027       new_format = src->get_value_format ().get_effective_format (+ glyph_val_iter_pairs | hb_map (hb_second));
1028     }
1029 
1030     u.format = format;
1031     switch (u.format) {
1032     case 1: u.format1.serialize (c,
1033                                  src,
1034                                  glyph_val_iter_pairs,
1035                                  new_format,
1036                                  layout_variation_idx_map);
1037       return;
1038     case 2: u.format2.serialize (c,
1039                                  src,
1040                                  glyph_val_iter_pairs,
1041                                  new_format,
1042                                  layout_variation_idx_map);
1043       return;
1044     default:return;
1045     }
1046   }
1047 
1048   template <typename context_t, typename ...Ts>
dispatchOT::SinglePos1049   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1050   {
1051     TRACE_DISPATCH (this, u.format);
1052     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1053     switch (u.format) {
1054     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1055     case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
1056     default:return_trace (c->default_return_value ());
1057     }
1058   }
1059 
1060   protected:
1061   union {
1062   HBUINT16		format;		/* Format identifier */
1063   SinglePosFormat1	format1;
1064   SinglePosFormat2	format2;
1065   } u;
1066 };
1067 
1068 template<typename Iterator, typename SrcLookup>
1069 static void
SinglePos_serialize(hb_serialize_context_t * c,const SrcLookup * src,Iterator it,const hb_map_t * layout_variation_idx_map)1070 SinglePos_serialize (hb_serialize_context_t *c,
1071 		     const SrcLookup *src,
1072 		     Iterator it,
1073 		     const hb_map_t *layout_variation_idx_map)
1074 { c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_map); }
1075 
1076 
1077 struct PairValueRecord
1078 {
1079   friend struct PairSet;
1080 
cmpOT::PairValueRecord1081   int cmp (hb_codepoint_t k) const
1082   { return secondGlyph.cmp (k); }
1083 
1084   struct context_t
1085   {
1086     const void 		*base;
1087     const ValueFormat	*valueFormats;
1088     const ValueFormat	*newFormats;
1089     unsigned		len1; /* valueFormats[0].get_len() */
1090     const hb_map_t 	*glyph_map;
1091     const hb_map_t      *layout_variation_idx_map;
1092   };
1093 
subsetOT::PairValueRecord1094   bool subset (hb_subset_context_t *c,
1095                context_t *closure) const
1096   {
1097     TRACE_SERIALIZE (this);
1098     auto *s = c->serializer;
1099     auto *out = s->start_embed (*this);
1100     if (unlikely (!s->extend_min (out))) return_trace (false);
1101 
1102     out->secondGlyph = (*closure->glyph_map)[secondGlyph];
1103 
1104     closure->valueFormats[0].copy_values (s,
1105                                           closure->newFormats[0],
1106                                           closure->base, &values[0],
1107                                           closure->layout_variation_idx_map);
1108     closure->valueFormats[1].copy_values (s,
1109                                           closure->newFormats[1],
1110                                           closure->base,
1111                                           &values[closure->len1],
1112                                           closure->layout_variation_idx_map);
1113 
1114     return_trace (true);
1115   }
1116 
collect_variation_indicesOT::PairValueRecord1117   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1118 				  const ValueFormat *valueFormats,
1119 				  const void *base) const
1120   {
1121     unsigned record1_len = valueFormats[0].get_len ();
1122     unsigned record2_len = valueFormats[1].get_len ();
1123     const hb_array_t<const Value> values_array = values.as_array (record1_len + record2_len);
1124 
1125     if (valueFormats[0].has_device ())
1126       valueFormats[0].collect_variation_indices (c, base, values_array.sub_array (0, record1_len));
1127 
1128     if (valueFormats[1].has_device ())
1129       valueFormats[1].collect_variation_indices (c, base, values_array.sub_array (record1_len, record2_len));
1130   }
1131 
intersectsOT::PairValueRecord1132   bool intersects (const hb_set_t& glyphset) const
1133   {
1134     return glyphset.has(secondGlyph);
1135   }
1136 
get_values_1OT::PairValueRecord1137   const Value* get_values_1 () const
1138   {
1139     return &values[0];
1140   }
1141 
get_values_2OT::PairValueRecord1142   const Value* get_values_2 (ValueFormat format1) const
1143   {
1144     return &values[format1.get_len ()];
1145   }
1146 
1147   protected:
1148   HBGlyphID	secondGlyph;		/* GlyphID of second glyph in the
1149 					 * pair--first glyph is listed in the
1150 					 * Coverage table */
1151   ValueRecord	values;			/* Positioning data for the first glyph
1152 					 * followed by for second glyph */
1153   public:
1154   DEFINE_SIZE_ARRAY (2, values);
1155 };
1156 
1157 struct PairSet
1158 {
1159   friend struct PairPosFormat1;
1160 
intersectsOT::PairSet1161   bool intersects (const hb_set_t *glyphs,
1162 		   const ValueFormat *valueFormats) const
1163   {
1164     unsigned int len1 = valueFormats[0].get_len ();
1165     unsigned int len2 = valueFormats[1].get_len ();
1166     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1167 
1168     const PairValueRecord *record = &firstPairValueRecord;
1169     unsigned int count = len;
1170     for (unsigned int i = 0; i < count; i++)
1171     {
1172       if (glyphs->has (record->secondGlyph))
1173 	return true;
1174       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1175     }
1176     return false;
1177   }
1178 
collect_glyphsOT::PairSet1179   void collect_glyphs (hb_collect_glyphs_context_t *c,
1180 		       const ValueFormat *valueFormats) const
1181   {
1182     unsigned int len1 = valueFormats[0].get_len ();
1183     unsigned int len2 = valueFormats[1].get_len ();
1184     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1185 
1186     const PairValueRecord *record = &firstPairValueRecord;
1187     c->input->add_array (&record->secondGlyph, len, record_size);
1188   }
1189 
collect_variation_indicesOT::PairSet1190   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1191 				  const ValueFormat *valueFormats) const
1192   {
1193     unsigned len1 = valueFormats[0].get_len ();
1194     unsigned len2 = valueFormats[1].get_len ();
1195     unsigned record_size = HBUINT16::static_size * (1 + len1 + len2);
1196 
1197     const PairValueRecord *record = &firstPairValueRecord;
1198     unsigned count = len;
1199     for (unsigned i = 0; i < count; i++)
1200     {
1201       if (c->glyph_set->has (record->secondGlyph))
1202       { record->collect_variation_indices (c, valueFormats, this); }
1203 
1204       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1205     }
1206   }
1207 
applyOT::PairSet1208   bool apply (hb_ot_apply_context_t *c,
1209 	      const ValueFormat *valueFormats,
1210 	      unsigned int pos) const
1211   {
1212     TRACE_APPLY (this);
1213     hb_buffer_t *buffer = c->buffer;
1214     unsigned int len1 = valueFormats[0].get_len ();
1215     unsigned int len2 = valueFormats[1].get_len ();
1216     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1217 
1218     const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint,
1219 						&firstPairValueRecord,
1220 						len,
1221 						record_size);
1222     if (record)
1223     {
1224       /* Note the intentional use of "|" instead of short-circuit "||". */
1225       if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) |
1226 	  valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]))
1227 	buffer->unsafe_to_break (buffer->idx, pos + 1);
1228       if (len2)
1229 	pos++;
1230       buffer->idx = pos;
1231       return_trace (true);
1232     }
1233     return_trace (false);
1234   }
1235 
subsetOT::PairSet1236   bool subset (hb_subset_context_t *c,
1237 	       const ValueFormat valueFormats[2],
1238                const ValueFormat newFormats[2]) const
1239   {
1240     TRACE_SUBSET (this);
1241     auto snap = c->serializer->snapshot ();
1242 
1243     auto *out = c->serializer->start_embed (*this);
1244     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1245     out->len = 0;
1246 
1247     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1248     const hb_map_t &glyph_map = *c->plan->glyph_map;
1249 
1250     unsigned len1 = valueFormats[0].get_len ();
1251     unsigned len2 = valueFormats[1].get_len ();
1252     unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
1253 
1254     PairValueRecord::context_t context =
1255     {
1256       this,
1257       valueFormats,
1258       newFormats,
1259       len1,
1260       &glyph_map,
1261       c->plan->layout_variation_idx_map
1262     };
1263 
1264     const PairValueRecord *record = &firstPairValueRecord;
1265     unsigned count = len, num = 0;
1266     for (unsigned i = 0; i < count; i++)
1267     {
1268       if (glyphset.has (record->secondGlyph)
1269 	 && record->subset (c, &context)) num++;
1270       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1271     }
1272 
1273     out->len = num;
1274     if (!num) c->serializer->revert (snap);
1275     return_trace (num);
1276   }
1277 
1278   struct sanitize_closure_t
1279   {
1280     const ValueFormat *valueFormats;
1281     unsigned int len1; /* valueFormats[0].get_len() */
1282     unsigned int stride; /* 1 + len1 + len2 */
1283   };
1284 
sanitizeOT::PairSet1285   bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
1286   {
1287     TRACE_SANITIZE (this);
1288     if (!(c->check_struct (this)
1289        && c->check_range (&firstPairValueRecord,
1290 			  len,
1291 			  HBUINT16::static_size,
1292 			  closure->stride))) return_trace (false);
1293 
1294     unsigned int count = len;
1295     const PairValueRecord *record = &firstPairValueRecord;
1296     return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
1297 		  closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
1298   }
1299 
1300   protected:
1301   HBUINT16		len;	/* Number of PairValueRecords */
1302   PairValueRecord	firstPairValueRecord;
1303 				/* Array of PairValueRecords--ordered
1304 				 * by GlyphID of the second glyph */
1305   public:
1306   DEFINE_SIZE_MIN (2);
1307 };
1308 
1309 struct PairPosFormat1
1310 {
intersectsOT::PairPosFormat11311   bool intersects (const hb_set_t *glyphs) const
1312   {
1313     return
1314     + hb_zip (this+coverage, pairSet)
1315     | hb_filter (*glyphs, hb_first)
1316     | hb_map (hb_second)
1317     | hb_map ([glyphs, this] (const Offset16To<PairSet> &_)
1318 	      { return (this+_).intersects (glyphs, valueFormat); })
1319     | hb_any
1320     ;
1321   }
1322 
closure_lookupsOT::PairPosFormat11323   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::PairPosFormat11324   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1325   {
1326     if ((!valueFormat[0].has_device ()) && (!valueFormat[1].has_device ())) return;
1327 
1328     auto it =
1329     + hb_zip (this+coverage, pairSet)
1330     | hb_filter (c->glyph_set, hb_first)
1331     | hb_map (hb_second)
1332     ;
1333 
1334     if (!it) return;
1335     + it
1336     | hb_map (hb_add (this))
1337     | hb_apply ([&] (const PairSet& _) { _.collect_variation_indices (c, valueFormat); })
1338     ;
1339   }
1340 
collect_glyphsOT::PairPosFormat11341   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1342   {
1343     if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1344     unsigned int count = pairSet.len;
1345     for (unsigned int i = 0; i < count; i++)
1346       (this+pairSet[i]).collect_glyphs (c, valueFormat);
1347   }
1348 
get_coverageOT::PairPosFormat11349   const Coverage &get_coverage () const { return this+coverage; }
1350 
applyOT::PairPosFormat11351   bool apply (hb_ot_apply_context_t *c) const
1352   {
1353     TRACE_APPLY (this);
1354     hb_buffer_t *buffer = c->buffer;
1355     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
1356     if (likely (index == NOT_COVERED)) return_trace (false);
1357 
1358     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1359     skippy_iter.reset (buffer->idx, 1);
1360     if (!skippy_iter.next ()) return_trace (false);
1361 
1362     return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
1363   }
1364 
subsetOT::PairPosFormat11365   bool subset (hb_subset_context_t *c) const
1366   {
1367     TRACE_SUBSET (this);
1368 
1369     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1370     const hb_map_t &glyph_map = *c->plan->glyph_map;
1371 
1372     auto *out = c->serializer->start_embed (*this);
1373     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1374     out->format = format;
1375     out->valueFormat[0] = valueFormat[0];
1376     out->valueFormat[1] = valueFormat[1];
1377     if (c->plan->drop_hints)
1378     {
1379       hb_pair_t<unsigned, unsigned> newFormats = compute_effective_value_formats (glyphset);
1380       out->valueFormat[0] = newFormats.first;
1381       out->valueFormat[1] = newFormats.second;
1382     }
1383 
1384     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1385 
1386     + hb_zip (this+coverage, pairSet)
1387     | hb_filter (glyphset, hb_first)
1388     | hb_filter ([this, c, out] (const Offset16To<PairSet>& _)
1389 		 {
1390 		   auto *o = out->pairSet.serialize_append (c->serializer);
1391 		   if (unlikely (!o)) return false;
1392 		   auto snap = c->serializer->snapshot ();
1393 		   bool ret = o->serialize_subset (c, _, this, valueFormat, out->valueFormat);
1394 		   if (!ret)
1395 		   {
1396 		     out->pairSet.pop ();
1397 		     c->serializer->revert (snap);
1398 		   }
1399 		   return ret;
1400 		 },
1401 		 hb_second)
1402     | hb_map (hb_first)
1403     | hb_map (glyph_map)
1404     | hb_sink (new_coverage)
1405     ;
1406 
1407     out->coverage.serialize (c->serializer, out)
1408 		 .serialize (c->serializer, new_coverage.iter ());
1409 
1410     return_trace (bool (new_coverage));
1411   }
1412 
1413 
compute_effective_value_formatsOT::PairPosFormat11414   hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset) const
1415   {
1416     unsigned len1 = valueFormat[0].get_len ();
1417     unsigned len2 = valueFormat[1].get_len ();
1418     unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
1419 
1420     unsigned format1 = 0;
1421     unsigned format2 = 0;
1422     for (const Offset16To<PairSet>& _ :
1423              + hb_zip (this+coverage, pairSet) | hb_filter (glyphset, hb_first) | hb_map (hb_second))
1424     {
1425       const PairSet& set = (this + _);
1426       const PairValueRecord *record = &set.firstPairValueRecord;
1427 
1428       for (unsigned i = 0; i < set.len; i++)
1429       {
1430         if (record->intersects (glyphset))
1431         {
1432           format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 ());
1433           format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]));
1434         }
1435         record = &StructAtOffset<const PairValueRecord> (record, record_size);
1436       }
1437     }
1438 
1439     return hb_pair (format1, format2);
1440   }
1441 
1442 
sanitizeOT::PairPosFormat11443   bool sanitize (hb_sanitize_context_t *c) const
1444   {
1445     TRACE_SANITIZE (this);
1446 
1447     if (!c->check_struct (this)) return_trace (false);
1448 
1449     unsigned int len1 = valueFormat[0].get_len ();
1450     unsigned int len2 = valueFormat[1].get_len ();
1451     PairSet::sanitize_closure_t closure =
1452     {
1453       valueFormat,
1454       len1,
1455       1 + len1 + len2
1456     };
1457 
1458     return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
1459   }
1460 
1461   protected:
1462   HBUINT16	format;			/* Format identifier--format = 1 */
1463   Offset16To<Coverage>
1464 		coverage;		/* Offset to Coverage table--from
1465 					 * beginning of subtable */
1466   ValueFormat	valueFormat[2];		/* [0] Defines the types of data in
1467 					 * ValueRecord1--for the first glyph
1468 					 * in the pair--may be zero (0) */
1469 					/* [1] Defines the types of data in
1470 					 * ValueRecord2--for the second glyph
1471 					 * in the pair--may be zero (0) */
1472   Array16OfOffset16To<PairSet>
1473 		pairSet;		/* Array of PairSet tables
1474 					 * ordered by Coverage Index */
1475   public:
1476   DEFINE_SIZE_ARRAY (10, pairSet);
1477 };
1478 
1479 struct PairPosFormat2
1480 {
intersectsOT::PairPosFormat21481   bool intersects (const hb_set_t *glyphs) const
1482   {
1483     return (this+coverage).intersects (glyphs) &&
1484 	   (this+classDef2).intersects (glyphs);
1485   }
1486 
closure_lookupsOT::PairPosFormat21487   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::PairPosFormat21488   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1489   {
1490     if (!intersects (c->glyph_set)) return;
1491     if ((!valueFormat1.has_device ()) && (!valueFormat2.has_device ())) return;
1492 
1493     hb_set_t klass1_glyphs, klass2_glyphs;
1494     if (!(this+classDef1).collect_coverage (&klass1_glyphs)) return;
1495     if (!(this+classDef2).collect_coverage (&klass2_glyphs)) return;
1496 
1497     hb_set_t class1_set, class2_set;
1498     for (const unsigned cp : + c->glyph_set->iter () | hb_filter (this + coverage))
1499     {
1500       if (!klass1_glyphs.has (cp)) class1_set.add (0);
1501       else
1502       {
1503         unsigned klass1 = (this+classDef1).get (cp);
1504         class1_set.add (klass1);
1505       }
1506     }
1507 
1508     class2_set.add (0);
1509     for (const unsigned cp : + c->glyph_set->iter () | hb_filter (klass2_glyphs))
1510     {
1511       unsigned klass2 = (this+classDef2).get (cp);
1512       class2_set.add (klass2);
1513     }
1514 
1515     if (class1_set.is_empty ()
1516         || class2_set.is_empty ()
1517         || (class2_set.get_population() == 1 && class2_set.has(0)))
1518       return;
1519 
1520     unsigned len1 = valueFormat1.get_len ();
1521     unsigned len2 = valueFormat2.get_len ();
1522     const hb_array_t<const Value> values_array = values.as_array ((unsigned)class1Count * (unsigned) class2Count * (len1 + len2));
1523     for (const unsigned class1_idx : class1_set.iter ())
1524     {
1525       for (const unsigned class2_idx : class2_set.iter ())
1526       {
1527 	unsigned start_offset = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1528 	if (valueFormat1.has_device ())
1529 	  valueFormat1.collect_variation_indices (c, this, values_array.sub_array (start_offset, len1));
1530 
1531 	if (valueFormat2.has_device ())
1532 	  valueFormat2.collect_variation_indices (c, this, values_array.sub_array (start_offset+len1, len2));
1533       }
1534     }
1535   }
1536 
collect_glyphsOT::PairPosFormat21537   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1538   {
1539     if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1540     if (unlikely (!(this+classDef2).collect_coverage (c->input))) return;
1541   }
1542 
get_coverageOT::PairPosFormat21543   const Coverage &get_coverage () const { return this+coverage; }
1544 
applyOT::PairPosFormat21545   bool apply (hb_ot_apply_context_t *c) const
1546   {
1547     TRACE_APPLY (this);
1548     hb_buffer_t *buffer = c->buffer;
1549     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
1550     if (likely (index == NOT_COVERED)) return_trace (false);
1551 
1552     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1553     skippy_iter.reset (buffer->idx, 1);
1554     if (!skippy_iter.next ()) return_trace (false);
1555 
1556     unsigned int len1 = valueFormat1.get_len ();
1557     unsigned int len2 = valueFormat2.get_len ();
1558     unsigned int record_len = len1 + len2;
1559 
1560     unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
1561     unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
1562     if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
1563 
1564     const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
1565     /* Note the intentional use of "|" instead of short-circuit "||". */
1566     if (valueFormat1.apply_value (c, this, v, buffer->cur_pos()) |
1567 	valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]))
1568       buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
1569 
1570     buffer->idx = skippy_iter.idx;
1571     if (len2)
1572       buffer->idx++;
1573 
1574     return_trace (true);
1575   }
1576 
subsetOT::PairPosFormat21577   bool subset (hb_subset_context_t *c) const
1578   {
1579     TRACE_SUBSET (this);
1580     auto *out = c->serializer->start_embed (*this);
1581     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1582     out->format = format;
1583 
1584     hb_map_t klass1_map;
1585     out->classDef1.serialize_subset (c, classDef1, this, &klass1_map, true, true, &(this + coverage));
1586     out->class1Count = klass1_map.get_population ();
1587 
1588     hb_map_t klass2_map;
1589     out->classDef2.serialize_subset (c, classDef2, this, &klass2_map, true, false);
1590     out->class2Count = klass2_map.get_population ();
1591 
1592     unsigned len1 = valueFormat1.get_len ();
1593     unsigned len2 = valueFormat2.get_len ();
1594 
1595     hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat1, valueFormat2);
1596     if (c->plan->drop_hints)
1597       newFormats = compute_effective_value_formats (klass1_map, klass2_map);
1598 
1599     out->valueFormat1 = newFormats.first;
1600     out->valueFormat2 = newFormats.second;
1601 
1602     for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
1603     {
1604       for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
1605       {
1606         unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1607         valueFormat1.copy_values (c->serializer, newFormats.first, this, &values[idx], c->plan->layout_variation_idx_map);
1608         valueFormat2.copy_values (c->serializer, newFormats.second, this, &values[idx + len1], c->plan->layout_variation_idx_map);
1609       }
1610     }
1611 
1612     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1613     const hb_map_t &glyph_map = *c->plan->glyph_map;
1614 
1615     auto it =
1616     + hb_iter (this+coverage)
1617     | hb_filter (glyphset)
1618     | hb_map_retains_sorting (glyph_map)
1619     ;
1620 
1621     out->coverage.serialize (c->serializer, out).serialize (c->serializer, it);
1622     return_trace (out->class1Count && out->class2Count && bool (it));
1623   }
1624 
1625 
compute_effective_value_formatsOT::PairPosFormat21626   hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_map_t& klass1_map,
1627                                                                  const hb_map_t& klass2_map) const
1628   {
1629     unsigned len1 = valueFormat1.get_len ();
1630     unsigned len2 = valueFormat2.get_len ();
1631 
1632     unsigned format1 = 0;
1633     unsigned format2 = 0;
1634 
1635     for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
1636     {
1637       for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
1638       {
1639         unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1640         format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
1641         format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
1642       }
1643     }
1644 
1645     return hb_pair (format1, format2);
1646   }
1647 
1648 
sanitizeOT::PairPosFormat21649   bool sanitize (hb_sanitize_context_t *c) const
1650   {
1651     TRACE_SANITIZE (this);
1652     if (!(c->check_struct (this)
1653        && coverage.sanitize (c, this)
1654        && classDef1.sanitize (c, this)
1655        && classDef2.sanitize (c, this))) return_trace (false);
1656 
1657     unsigned int len1 = valueFormat1.get_len ();
1658     unsigned int len2 = valueFormat2.get_len ();
1659     unsigned int stride = len1 + len2;
1660     unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
1661     unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
1662     return_trace (c->check_range ((const void *) values,
1663 				  count,
1664 				  record_size) &&
1665 		  valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
1666 		  valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
1667   }
1668 
1669   protected:
1670   HBUINT16	format;			/* Format identifier--format = 2 */
1671   Offset16To<Coverage>
1672 		coverage;		/* Offset to Coverage table--from
1673 					 * beginning of subtable */
1674   ValueFormat	valueFormat1;		/* ValueRecord definition--for the
1675 					 * first glyph of the pair--may be zero
1676 					 * (0) */
1677   ValueFormat	valueFormat2;		/* ValueRecord definition--for the
1678 					 * second glyph of the pair--may be
1679 					 * zero (0) */
1680   Offset16To<ClassDef>
1681 		classDef1;		/* Offset to ClassDef table--from
1682 					 * beginning of PairPos subtable--for
1683 					 * the first glyph of the pair */
1684   Offset16To<ClassDef>
1685 		classDef2;		/* Offset to ClassDef table--from
1686 					 * beginning of PairPos subtable--for
1687 					 * the second glyph of the pair */
1688   HBUINT16	class1Count;		/* Number of classes in ClassDef1
1689 					 * table--includes Class0 */
1690   HBUINT16	class2Count;		/* Number of classes in ClassDef2
1691 					 * table--includes Class0 */
1692   ValueRecord	values;			/* Matrix of value pairs:
1693 					 * class1-major, class2-minor,
1694 					 * Each entry has value1 and value2 */
1695   public:
1696   DEFINE_SIZE_ARRAY (16, values);
1697 };
1698 
1699 struct PairPos
1700 {
1701   template <typename context_t, typename ...Ts>
dispatchOT::PairPos1702   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1703   {
1704     TRACE_DISPATCH (this, u.format);
1705     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1706     switch (u.format) {
1707     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1708     case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
1709     default:return_trace (c->default_return_value ());
1710     }
1711   }
1712 
1713   protected:
1714   union {
1715   HBUINT16		format;		/* Format identifier */
1716   PairPosFormat1	format1;
1717   PairPosFormat2	format2;
1718   } u;
1719 };
1720 
1721 
1722 struct EntryExitRecord
1723 {
1724   friend struct CursivePosFormat1;
1725 
sanitizeOT::EntryExitRecord1726   bool sanitize (hb_sanitize_context_t *c, const void *base) const
1727   {
1728     TRACE_SANITIZE (this);
1729     return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
1730   }
1731 
collect_variation_indicesOT::EntryExitRecord1732   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1733 				  const void *src_base) const
1734   {
1735     (src_base+entryAnchor).collect_variation_indices (c);
1736     (src_base+exitAnchor).collect_variation_indices (c);
1737   }
1738 
subsetOT::EntryExitRecord1739   EntryExitRecord* subset (hb_subset_context_t *c,
1740                            const void *src_base) const
1741   {
1742     TRACE_SERIALIZE (this);
1743     auto *out = c->serializer->embed (this);
1744     if (unlikely (!out)) return_trace (nullptr);
1745 
1746     out->entryAnchor.serialize_subset (c, entryAnchor, src_base);
1747     out->exitAnchor.serialize_subset (c, exitAnchor, src_base);
1748     return_trace (out);
1749   }
1750 
1751   protected:
1752   Offset16To<Anchor>
1753 		entryAnchor;		/* Offset to EntryAnchor table--from
1754 					 * beginning of CursivePos
1755 					 * subtable--may be NULL */
1756   Offset16To<Anchor>
1757 		exitAnchor;		/* Offset to ExitAnchor table--from
1758 					 * beginning of CursivePos
1759 					 * subtable--may be NULL */
1760   public:
1761   DEFINE_SIZE_STATIC (4);
1762 };
1763 
1764 static void
1765 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent);
1766 
1767 struct CursivePosFormat1
1768 {
intersectsOT::CursivePosFormat11769   bool intersects (const hb_set_t *glyphs) const
1770   { return (this+coverage).intersects (glyphs); }
1771 
closure_lookupsOT::CursivePosFormat11772   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1773 
collect_variation_indicesOT::CursivePosFormat11774   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1775   {
1776     + hb_zip (this+coverage, entryExitRecord)
1777     | hb_filter (c->glyph_set, hb_first)
1778     | hb_map (hb_second)
1779     | hb_apply ([&] (const EntryExitRecord& record) { record.collect_variation_indices (c, this); })
1780     ;
1781   }
1782 
collect_glyphsOT::CursivePosFormat11783   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1784   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
1785 
get_coverageOT::CursivePosFormat11786   const Coverage &get_coverage () const { return this+coverage; }
1787 
applyOT::CursivePosFormat11788   bool apply (hb_ot_apply_context_t *c) const
1789   {
1790     TRACE_APPLY (this);
1791     hb_buffer_t *buffer = c->buffer;
1792 
1793     const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage  (buffer->cur().codepoint)];
1794     if (!this_record.entryAnchor) return_trace (false);
1795 
1796     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1797     skippy_iter.reset (buffer->idx, 1);
1798     if (!skippy_iter.prev ()) return_trace (false);
1799 
1800     const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage  (buffer->info[skippy_iter.idx].codepoint)];
1801     if (!prev_record.exitAnchor) return_trace (false);
1802 
1803     unsigned int i = skippy_iter.idx;
1804     unsigned int j = buffer->idx;
1805 
1806     buffer->unsafe_to_break (i, j);
1807     float entry_x, entry_y, exit_x, exit_y;
1808     (this+prev_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
1809     (this+this_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
1810 
1811     hb_glyph_position_t *pos = buffer->pos;
1812 
1813     hb_position_t d;
1814     /* Main-direction adjustment */
1815     switch (c->direction) {
1816       case HB_DIRECTION_LTR:
1817 	pos[i].x_advance  = roundf (exit_x) + pos[i].x_offset;
1818 
1819 	d = roundf (entry_x) + pos[j].x_offset;
1820 	pos[j].x_advance -= d;
1821 	pos[j].x_offset  -= d;
1822 	break;
1823       case HB_DIRECTION_RTL:
1824 	d = roundf (exit_x) + pos[i].x_offset;
1825 	pos[i].x_advance -= d;
1826 	pos[i].x_offset  -= d;
1827 
1828 	pos[j].x_advance  = roundf (entry_x) + pos[j].x_offset;
1829 	break;
1830       case HB_DIRECTION_TTB:
1831 	pos[i].y_advance  = roundf (exit_y) + pos[i].y_offset;
1832 
1833 	d = roundf (entry_y) + pos[j].y_offset;
1834 	pos[j].y_advance -= d;
1835 	pos[j].y_offset  -= d;
1836 	break;
1837       case HB_DIRECTION_BTT:
1838 	d = roundf (exit_y) + pos[i].y_offset;
1839 	pos[i].y_advance -= d;
1840 	pos[i].y_offset  -= d;
1841 
1842 	pos[j].y_advance  = roundf (entry_y);
1843 	break;
1844       case HB_DIRECTION_INVALID:
1845       default:
1846 	break;
1847     }
1848 
1849     /* Cross-direction adjustment */
1850 
1851     /* We attach child to parent (think graph theory and rooted trees whereas
1852      * the root stays on baseline and each node aligns itself against its
1853      * parent.
1854      *
1855      * Optimize things for the case of RightToLeft, as that's most common in
1856      * Arabic. */
1857     unsigned int child  = i;
1858     unsigned int parent = j;
1859     hb_position_t x_offset = entry_x - exit_x;
1860     hb_position_t y_offset = entry_y - exit_y;
1861     if  (!(c->lookup_props & LookupFlag::RightToLeft))
1862     {
1863       unsigned int k = child;
1864       child = parent;
1865       parent = k;
1866       x_offset = -x_offset;
1867       y_offset = -y_offset;
1868     }
1869 
1870     /* If child was already connected to someone else, walk through its old
1871      * chain and reverse the link direction, such that the whole tree of its
1872      * previous connection now attaches to new parent.  Watch out for case
1873      * where new parent is on the path from old chain...
1874      */
1875     reverse_cursive_minor_offset (pos, child, c->direction, parent);
1876 
1877     pos[child].attach_type() = ATTACH_TYPE_CURSIVE;
1878     pos[child].attach_chain() = (int) parent - (int) child;
1879     buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
1880     if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
1881       pos[child].y_offset = y_offset;
1882     else
1883       pos[child].x_offset = x_offset;
1884 
1885     /* If parent was attached to child, break them free.
1886      * https://github.com/harfbuzz/harfbuzz/issues/2469
1887      */
1888     if (unlikely (pos[parent].attach_chain() == -pos[child].attach_chain()))
1889       pos[parent].attach_chain() = 0;
1890 
1891     buffer->idx++;
1892     return_trace (true);
1893   }
1894 
1895   template <typename Iterator,
1896 	    hb_requires (hb_is_iterator (Iterator))>
serializeOT::CursivePosFormat11897   void serialize (hb_subset_context_t *c,
1898 		  Iterator it,
1899 		  const void *src_base)
1900   {
1901     if (unlikely (!c->serializer->extend_min ((*this)))) return;
1902     this->format = 1;
1903     this->entryExitRecord.len = it.len ();
1904 
1905     for (const EntryExitRecord& entry_record : + it
1906 					       | hb_map (hb_second))
1907       entry_record.subset (c, src_base);
1908 
1909     auto glyphs =
1910     + it
1911     | hb_map_retains_sorting (hb_first)
1912     ;
1913 
1914     coverage.serialize (c->serializer, this).serialize (c->serializer, glyphs);
1915   }
1916 
subsetOT::CursivePosFormat11917   bool subset (hb_subset_context_t *c) const
1918   {
1919     TRACE_SUBSET (this);
1920     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1921     const hb_map_t &glyph_map = *c->plan->glyph_map;
1922 
1923     auto *out = c->serializer->start_embed (*this);
1924     if (unlikely (!out)) return_trace (false);
1925 
1926     auto it =
1927     + hb_zip (this+coverage, entryExitRecord)
1928     | hb_filter (glyphset, hb_first)
1929     | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const EntryExitRecord&> p) -> hb_pair_t<hb_codepoint_t, const EntryExitRecord&>
1930 			      { return hb_pair (glyph_map[p.first], p.second);})
1931     ;
1932 
1933     bool ret = bool (it);
1934     out->serialize (c, it, this);
1935     return_trace (ret);
1936   }
1937 
sanitizeOT::CursivePosFormat11938   bool sanitize (hb_sanitize_context_t *c) const
1939   {
1940     TRACE_SANITIZE (this);
1941     return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
1942   }
1943 
1944   protected:
1945   HBUINT16	format;			/* Format identifier--format = 1 */
1946   Offset16To<Coverage>
1947 		coverage;		/* Offset to Coverage table--from
1948 					 * beginning of subtable */
1949   Array16Of<EntryExitRecord>
1950 		entryExitRecord;	/* Array of EntryExit records--in
1951 					 * Coverage Index order */
1952   public:
1953   DEFINE_SIZE_ARRAY (6, entryExitRecord);
1954 };
1955 
1956 struct CursivePos
1957 {
1958   template <typename context_t, typename ...Ts>
dispatchOT::CursivePos1959   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1960   {
1961     TRACE_DISPATCH (this, u.format);
1962     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1963     switch (u.format) {
1964     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1965     default:return_trace (c->default_return_value ());
1966     }
1967   }
1968 
1969   protected:
1970   union {
1971   HBUINT16		format;		/* Format identifier */
1972   CursivePosFormat1	format1;
1973   } u;
1974 };
1975 
1976 
1977 typedef AnchorMatrix BaseArray;		/* base-major--
1978 					 * in order of BaseCoverage Index--,
1979 					 * mark-minor--
1980 					 * ordered by class--zero-based. */
1981 
Markclass_closure_and_remap_indexes(const Coverage & mark_coverage,const MarkArray & mark_array,const hb_set_t & glyphset,hb_map_t * klass_mapping)1982 static void Markclass_closure_and_remap_indexes (const Coverage  &mark_coverage,
1983 						 const MarkArray &mark_array,
1984 						 const hb_set_t  &glyphset,
1985 						 hb_map_t*        klass_mapping /* INOUT */)
1986 {
1987   hb_set_t orig_classes;
1988 
1989   + hb_zip (mark_coverage, mark_array)
1990   | hb_filter (glyphset, hb_first)
1991   | hb_map (hb_second)
1992   | hb_map (&MarkRecord::get_class)
1993   | hb_sink (orig_classes)
1994   ;
1995 
1996   unsigned idx = 0;
1997   for (auto klass : orig_classes.iter ())
1998   {
1999     if (klass_mapping->has (klass)) continue;
2000     klass_mapping->set (klass, idx);
2001     idx++;
2002   }
2003 }
2004 
2005 struct MarkBasePosFormat1
2006 {
intersectsOT::MarkBasePosFormat12007   bool intersects (const hb_set_t *glyphs) const
2008   {
2009     return (this+markCoverage).intersects (glyphs) &&
2010 	   (this+baseCoverage).intersects (glyphs);
2011   }
2012 
closure_lookupsOT::MarkBasePosFormat12013   void closure_lookups (hb_closure_lookups_context_t *c) const {}
2014 
collect_variation_indicesOT::MarkBasePosFormat12015   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2016   {
2017     + hb_zip (this+markCoverage, this+markArray)
2018     | hb_filter (c->glyph_set, hb_first)
2019     | hb_map (hb_second)
2020     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
2021     ;
2022 
2023     hb_map_t klass_mapping;
2024     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
2025 
2026     unsigned basecount = (this+baseArray).rows;
2027     auto base_iter =
2028     + hb_zip (this+baseCoverage, hb_range (basecount))
2029     | hb_filter (c->glyph_set, hb_first)
2030     | hb_map (hb_second)
2031     ;
2032 
2033     hb_sorted_vector_t<unsigned> base_indexes;
2034     for (const unsigned row : base_iter)
2035     {
2036       + hb_range ((unsigned) classCount)
2037       | hb_filter (klass_mapping)
2038       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2039       | hb_sink (base_indexes)
2040       ;
2041     }
2042     (this+baseArray).collect_variation_indices (c, base_indexes.iter ());
2043   }
2044 
collect_glyphsOT::MarkBasePosFormat12045   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2046   {
2047     if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
2048     if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
2049   }
2050 
get_coverageOT::MarkBasePosFormat12051   const Coverage &get_coverage () const { return this+markCoverage; }
2052 
applyOT::MarkBasePosFormat12053   bool apply (hb_ot_apply_context_t *c) const
2054   {
2055     TRACE_APPLY (this);
2056     hb_buffer_t *buffer = c->buffer;
2057     unsigned int mark_index = (this+markCoverage).get_coverage  (buffer->cur().codepoint);
2058     if (likely (mark_index == NOT_COVERED)) return_trace (false);
2059 
2060     /* Now we search backwards for a non-mark glyph */
2061     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2062     skippy_iter.reset (buffer->idx, 1);
2063     skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
2064     do {
2065       if (!skippy_iter.prev ()) return_trace (false);
2066       /* We only want to attach to the first of a MultipleSubst sequence.
2067        * https://github.com/harfbuzz/harfbuzz/issues/740
2068        * Reject others...
2069        * ...but stop if we find a mark in the MultipleSubst sequence:
2070        * https://github.com/harfbuzz/harfbuzz/issues/1020 */
2071       if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
2072 	  0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) ||
2073 	  (skippy_iter.idx == 0 ||
2074 	   _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx - 1]) ||
2075 	   _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]) !=
2076 	   _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx - 1]) ||
2077 	   _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) !=
2078 	   _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx - 1]) + 1
2079 	   ))
2080 	break;
2081       skippy_iter.reject ();
2082     } while (true);
2083 
2084     /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
2085     //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2086 
2087     unsigned int base_index = (this+baseCoverage).get_coverage  (buffer->info[skippy_iter.idx].codepoint);
2088     if (base_index == NOT_COVERED) return_trace (false);
2089 
2090     return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
2091   }
2092 
subsetOT::MarkBasePosFormat12093   bool subset (hb_subset_context_t *c) const
2094   {
2095     TRACE_SUBSET (this);
2096     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2097     const hb_map_t &glyph_map = *c->plan->glyph_map;
2098 
2099     auto *out = c->serializer->start_embed (*this);
2100     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2101     out->format = format;
2102 
2103     hb_map_t klass_mapping;
2104     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
2105 
2106     if (!klass_mapping.get_population ()) return_trace (false);
2107     out->classCount = klass_mapping.get_population ();
2108 
2109     auto mark_iter =
2110     + hb_zip (this+markCoverage, this+markArray)
2111     | hb_filter (glyphset, hb_first)
2112     ;
2113 
2114     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2115     + mark_iter
2116     | hb_map (hb_first)
2117     | hb_map (glyph_map)
2118     | hb_sink (new_coverage)
2119     ;
2120 
2121     if (!out->markCoverage.serialize (c->serializer, out)
2122 			  .serialize (c->serializer, new_coverage.iter ()))
2123       return_trace (false);
2124 
2125     out->markArray.serialize_subset (c, markArray, this,
2126                                      (this+markCoverage).iter (),
2127                                      &klass_mapping);
2128 
2129     unsigned basecount = (this+baseArray).rows;
2130     auto base_iter =
2131     + hb_zip (this+baseCoverage, hb_range (basecount))
2132     | hb_filter (glyphset, hb_first)
2133     ;
2134 
2135     new_coverage.reset ();
2136     + base_iter
2137     | hb_map (hb_first)
2138     | hb_map (glyph_map)
2139     | hb_sink (new_coverage)
2140     ;
2141 
2142     if (!out->baseCoverage.serialize (c->serializer, out)
2143 			  .serialize (c->serializer, new_coverage.iter ()))
2144       return_trace (false);
2145 
2146     hb_sorted_vector_t<unsigned> base_indexes;
2147     for (const unsigned row : + base_iter
2148 			      | hb_map (hb_second))
2149     {
2150       + hb_range ((unsigned) classCount)
2151       | hb_filter (klass_mapping)
2152       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2153       | hb_sink (base_indexes)
2154       ;
2155     }
2156 
2157     out->baseArray.serialize_subset (c, baseArray, this,
2158                                      base_iter.len (),
2159                                      base_indexes.iter ());
2160 
2161     return_trace (true);
2162   }
2163 
sanitizeOT::MarkBasePosFormat12164   bool sanitize (hb_sanitize_context_t *c) const
2165   {
2166     TRACE_SANITIZE (this);
2167     return_trace (c->check_struct (this) &&
2168 		  markCoverage.sanitize (c, this) &&
2169 		  baseCoverage.sanitize (c, this) &&
2170 		  markArray.sanitize (c, this) &&
2171 		  baseArray.sanitize (c, this, (unsigned int) classCount));
2172   }
2173 
2174   protected:
2175   HBUINT16	format;			/* Format identifier--format = 1 */
2176   Offset16To<Coverage>
2177 		markCoverage;		/* Offset to MarkCoverage table--from
2178 					 * beginning of MarkBasePos subtable */
2179   Offset16To<Coverage>
2180 		baseCoverage;		/* Offset to BaseCoverage table--from
2181 					 * beginning of MarkBasePos subtable */
2182   HBUINT16	classCount;		/* Number of classes defined for marks */
2183   Offset16To<MarkArray>
2184 		markArray;		/* Offset to MarkArray table--from
2185 					 * beginning of MarkBasePos subtable */
2186   Offset16To<BaseArray>
2187 		baseArray;		/* Offset to BaseArray table--from
2188 					 * beginning of MarkBasePos subtable */
2189   public:
2190   DEFINE_SIZE_STATIC (12);
2191 };
2192 
2193 struct MarkBasePos
2194 {
2195   template <typename context_t, typename ...Ts>
dispatchOT::MarkBasePos2196   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2197   {
2198     TRACE_DISPATCH (this, u.format);
2199     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2200     switch (u.format) {
2201     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2202     default:return_trace (c->default_return_value ());
2203     }
2204   }
2205 
2206   protected:
2207   union {
2208   HBUINT16		format;		/* Format identifier */
2209   MarkBasePosFormat1	format1;
2210   } u;
2211 };
2212 
2213 
2214 typedef AnchorMatrix LigatureAttach;	/* component-major--
2215 					 * in order of writing direction--,
2216 					 * mark-minor--
2217 					 * ordered by class--zero-based. */
2218 
2219 /* Array of LigatureAttach tables ordered by LigatureCoverage Index */
2220 struct LigatureArray : List16OfOffset16To<LigatureAttach>
2221 {
2222   template <typename Iterator,
2223 	    hb_requires (hb_is_iterator (Iterator))>
subsetOT::LigatureArray2224   bool subset (hb_subset_context_t *c,
2225                Iterator		    coverage,
2226 	       unsigned		    class_count,
2227 	       const hb_map_t	   *klass_mapping) const
2228   {
2229     TRACE_SUBSET (this);
2230     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2231 
2232     auto *out = c->serializer->start_embed (this);
2233     if (unlikely (!c->serializer->extend_min (out)))  return_trace (false);
2234 
2235     for (const auto _ : + hb_zip (coverage, *this)
2236 		  | hb_filter (glyphset, hb_first))
2237     {
2238       auto *matrix = out->serialize_append (c->serializer);
2239       if (unlikely (!matrix)) return_trace (false);
2240 
2241       const LigatureAttach& src = (this + _.second);
2242       auto indexes =
2243           + hb_range (src.rows * class_count)
2244           | hb_filter ([=] (unsigned index) { return klass_mapping->has (index % class_count); })
2245           ;
2246       matrix->serialize_subset (c,
2247 				_.second,
2248 				this,
2249                                 src.rows,
2250                                 indexes);
2251     }
2252     return_trace (this->len);
2253   }
2254 };
2255 
2256 struct MarkLigPosFormat1
2257 {
intersectsOT::MarkLigPosFormat12258   bool intersects (const hb_set_t *glyphs) const
2259   {
2260     return (this+markCoverage).intersects (glyphs) &&
2261 	   (this+ligatureCoverage).intersects (glyphs);
2262   }
2263 
closure_lookupsOT::MarkLigPosFormat12264   void closure_lookups (hb_closure_lookups_context_t *c) const {}
2265 
collect_variation_indicesOT::MarkLigPosFormat12266   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2267   {
2268     + hb_zip (this+markCoverage, this+markArray)
2269     | hb_filter (c->glyph_set, hb_first)
2270     | hb_map (hb_second)
2271     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
2272     ;
2273 
2274     hb_map_t klass_mapping;
2275     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
2276 
2277     unsigned ligcount = (this+ligatureArray).len;
2278     auto lig_iter =
2279     + hb_zip (this+ligatureCoverage, hb_range (ligcount))
2280     | hb_filter (c->glyph_set, hb_first)
2281     | hb_map (hb_second)
2282     ;
2283 
2284     const LigatureArray& lig_array = this+ligatureArray;
2285     for (const unsigned i : lig_iter)
2286     {
2287       hb_sorted_vector_t<unsigned> lig_indexes;
2288       unsigned row_count = lig_array[i].rows;
2289       for (unsigned row : + hb_range (row_count))
2290       {
2291 	+ hb_range ((unsigned) classCount)
2292 	| hb_filter (klass_mapping)
2293 	| hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2294 	| hb_sink (lig_indexes)
2295 	;
2296       }
2297 
2298       lig_array[i].collect_variation_indices (c, lig_indexes.iter ());
2299     }
2300   }
2301 
collect_glyphsOT::MarkLigPosFormat12302   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2303   {
2304     if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
2305     if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return;
2306   }
2307 
get_coverageOT::MarkLigPosFormat12308   const Coverage &get_coverage () const { return this+markCoverage; }
2309 
applyOT::MarkLigPosFormat12310   bool apply (hb_ot_apply_context_t *c) const
2311   {
2312     TRACE_APPLY (this);
2313     hb_buffer_t *buffer = c->buffer;
2314     unsigned int mark_index = (this+markCoverage).get_coverage  (buffer->cur().codepoint);
2315     if (likely (mark_index == NOT_COVERED)) return_trace (false);
2316 
2317     /* Now we search backwards for a non-mark glyph */
2318     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2319     skippy_iter.reset (buffer->idx, 1);
2320     skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
2321     if (!skippy_iter.prev ()) return_trace (false);
2322 
2323     /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
2324     //if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2325 
2326     unsigned int j = skippy_iter.idx;
2327     unsigned int lig_index = (this+ligatureCoverage).get_coverage  (buffer->info[j].codepoint);
2328     if (lig_index == NOT_COVERED) return_trace (false);
2329 
2330     const LigatureArray& lig_array = this+ligatureArray;
2331     const LigatureAttach& lig_attach = lig_array[lig_index];
2332 
2333     /* Find component to attach to */
2334     unsigned int comp_count = lig_attach.rows;
2335     if (unlikely (!comp_count)) return_trace (false);
2336 
2337     /* We must now check whether the ligature ID of the current mark glyph
2338      * is identical to the ligature ID of the found ligature.  If yes, we
2339      * can directly use the component index.  If not, we attach the mark
2340      * glyph to the last component of the ligature. */
2341     unsigned int comp_index;
2342     unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]);
2343     unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
2344     unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
2345     if (lig_id && lig_id == mark_id && mark_comp > 0)
2346       comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
2347     else
2348       comp_index = comp_count - 1;
2349 
2350     return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
2351   }
2352 
subsetOT::MarkLigPosFormat12353   bool subset (hb_subset_context_t *c) const
2354   {
2355     TRACE_SUBSET (this);
2356     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2357     const hb_map_t &glyph_map = *c->plan->glyph_map;
2358 
2359     auto *out = c->serializer->start_embed (*this);
2360     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2361     out->format = format;
2362 
2363     hb_map_t klass_mapping;
2364     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
2365 
2366     if (!klass_mapping.get_population ()) return_trace (false);
2367     out->classCount = klass_mapping.get_population ();
2368 
2369     auto mark_iter =
2370     + hb_zip (this+markCoverage, this+markArray)
2371     | hb_filter (glyphset, hb_first)
2372     ;
2373 
2374     auto new_mark_coverage =
2375     + mark_iter
2376     | hb_map_retains_sorting (hb_first)
2377     | hb_map_retains_sorting (glyph_map)
2378     ;
2379 
2380     if (!out->markCoverage.serialize (c->serializer, out)
2381 			  .serialize (c->serializer, new_mark_coverage))
2382       return_trace (false);
2383 
2384     out->markArray.serialize_subset (c, markArray, this,
2385                                      (this+markCoverage).iter (),
2386                                      &klass_mapping);
2387 
2388     auto new_ligature_coverage =
2389     + hb_iter (this + ligatureCoverage)
2390     | hb_filter (glyphset)
2391     | hb_map_retains_sorting (glyph_map)
2392     ;
2393 
2394     if (!out->ligatureCoverage.serialize (c->serializer, out)
2395 			      .serialize (c->serializer, new_ligature_coverage))
2396       return_trace (false);
2397 
2398     out->ligatureArray.serialize_subset (c, ligatureArray, this,
2399                                          hb_iter (this+ligatureCoverage), classCount, &klass_mapping);
2400 
2401     return_trace (true);
2402   }
2403 
sanitizeOT::MarkLigPosFormat12404   bool sanitize (hb_sanitize_context_t *c) const
2405   {
2406     TRACE_SANITIZE (this);
2407     return_trace (c->check_struct (this) &&
2408 		  markCoverage.sanitize (c, this) &&
2409 		  ligatureCoverage.sanitize (c, this) &&
2410 		  markArray.sanitize (c, this) &&
2411 		  ligatureArray.sanitize (c, this, (unsigned int) classCount));
2412   }
2413 
2414   protected:
2415   HBUINT16	format;			/* Format identifier--format = 1 */
2416   Offset16To<Coverage>
2417 		markCoverage;		/* Offset to Mark Coverage table--from
2418 					 * beginning of MarkLigPos subtable */
2419   Offset16To<Coverage>
2420 		ligatureCoverage;	/* Offset to Ligature Coverage
2421 					 * table--from beginning of MarkLigPos
2422 					 * subtable */
2423   HBUINT16	classCount;		/* Number of defined mark classes */
2424   Offset16To<MarkArray>
2425 		markArray;		/* Offset to MarkArray table--from
2426 					 * beginning of MarkLigPos subtable */
2427   Offset16To<LigatureArray>
2428 		ligatureArray;		/* Offset to LigatureArray table--from
2429 					 * beginning of MarkLigPos subtable */
2430   public:
2431   DEFINE_SIZE_STATIC (12);
2432 };
2433 
2434 
2435 struct MarkLigPos
2436 {
2437   template <typename context_t, typename ...Ts>
dispatchOT::MarkLigPos2438   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2439   {
2440     TRACE_DISPATCH (this, u.format);
2441     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2442     switch (u.format) {
2443     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2444     default:return_trace (c->default_return_value ());
2445     }
2446   }
2447 
2448   protected:
2449   union {
2450   HBUINT16		format;		/* Format identifier */
2451   MarkLigPosFormat1	format1;
2452   } u;
2453 };
2454 
2455 
2456 typedef AnchorMatrix Mark2Array;	/* mark2-major--
2457 					 * in order of Mark2Coverage Index--,
2458 					 * mark1-minor--
2459 					 * ordered by class--zero-based. */
2460 
2461 struct MarkMarkPosFormat1
2462 {
intersectsOT::MarkMarkPosFormat12463   bool intersects (const hb_set_t *glyphs) const
2464   {
2465     return (this+mark1Coverage).intersects (glyphs) &&
2466 	   (this+mark2Coverage).intersects (glyphs);
2467   }
2468 
closure_lookupsOT::MarkMarkPosFormat12469   void closure_lookups (hb_closure_lookups_context_t *c) const {}
2470 
collect_variation_indicesOT::MarkMarkPosFormat12471   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2472   {
2473     + hb_zip (this+mark1Coverage, this+mark1Array)
2474     | hb_filter (c->glyph_set, hb_first)
2475     | hb_map (hb_second)
2476     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); })
2477     ;
2478 
2479     hb_map_t klass_mapping;
2480     Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping);
2481 
2482     unsigned mark2_count = (this+mark2Array).rows;
2483     auto mark2_iter =
2484     + hb_zip (this+mark2Coverage, hb_range (mark2_count))
2485     | hb_filter (c->glyph_set, hb_first)
2486     | hb_map (hb_second)
2487     ;
2488 
2489     hb_sorted_vector_t<unsigned> mark2_indexes;
2490     for (const unsigned row : mark2_iter)
2491     {
2492       + hb_range ((unsigned) classCount)
2493       | hb_filter (klass_mapping)
2494       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2495       | hb_sink (mark2_indexes)
2496       ;
2497     }
2498     (this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ());
2499   }
2500 
collect_glyphsOT::MarkMarkPosFormat12501   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2502   {
2503     if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return;
2504     if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return;
2505   }
2506 
get_coverageOT::MarkMarkPosFormat12507   const Coverage &get_coverage () const { return this+mark1Coverage; }
2508 
applyOT::MarkMarkPosFormat12509   bool apply (hb_ot_apply_context_t *c) const
2510   {
2511     TRACE_APPLY (this);
2512     hb_buffer_t *buffer = c->buffer;
2513     unsigned int mark1_index = (this+mark1Coverage).get_coverage  (buffer->cur().codepoint);
2514     if (likely (mark1_index == NOT_COVERED)) return_trace (false);
2515 
2516     /* now we search backwards for a suitable mark glyph until a non-mark glyph */
2517     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2518     skippy_iter.reset (buffer->idx, 1);
2519     skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags);
2520     if (!skippy_iter.prev ()) return_trace (false);
2521 
2522     if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2523 
2524     unsigned int j = skippy_iter.idx;
2525 
2526     unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur());
2527     unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]);
2528     unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur());
2529     unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]);
2530 
2531     if (likely (id1 == id2))
2532     {
2533       if (id1 == 0) /* Marks belonging to the same base. */
2534 	goto good;
2535       else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
2536 	goto good;
2537     }
2538     else
2539     {
2540       /* If ligature ids don't match, it may be the case that one of the marks
2541        * itself is a ligature.  In which case match. */
2542       if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
2543 	goto good;
2544     }
2545 
2546     /* Didn't match. */
2547     return_trace (false);
2548 
2549     good:
2550     unsigned int mark2_index = (this+mark2Coverage).get_coverage  (buffer->info[j].codepoint);
2551     if (mark2_index == NOT_COVERED) return_trace (false);
2552 
2553     return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
2554   }
2555 
subsetOT::MarkMarkPosFormat12556   bool subset (hb_subset_context_t *c) const
2557   {
2558     TRACE_SUBSET (this);
2559     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2560     const hb_map_t &glyph_map = *c->plan->glyph_map;
2561 
2562     auto *out = c->serializer->start_embed (*this);
2563     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2564     out->format = format;
2565 
2566     hb_map_t klass_mapping;
2567     Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping);
2568 
2569     if (!klass_mapping.get_population ()) return_trace (false);
2570     out->classCount = klass_mapping.get_population ();
2571 
2572     auto mark1_iter =
2573     + hb_zip (this+mark1Coverage, this+mark1Array)
2574     | hb_filter (glyphset, hb_first)
2575     ;
2576 
2577     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2578     + mark1_iter
2579     | hb_map (hb_first)
2580     | hb_map (glyph_map)
2581     | hb_sink (new_coverage)
2582     ;
2583 
2584     if (!out->mark1Coverage.serialize (c->serializer, out)
2585 			   .serialize (c->serializer, new_coverage.iter ()))
2586       return_trace (false);
2587 
2588     out->mark1Array.serialize_subset (c, mark1Array, this,
2589                                       (this+mark1Coverage).iter (),
2590                                       &klass_mapping);
2591 
2592     unsigned mark2count = (this+mark2Array).rows;
2593     auto mark2_iter =
2594     + hb_zip (this+mark2Coverage, hb_range (mark2count))
2595     | hb_filter (glyphset, hb_first)
2596     ;
2597 
2598     new_coverage.reset ();
2599     + mark2_iter
2600     | hb_map (hb_first)
2601     | hb_map (glyph_map)
2602     | hb_sink (new_coverage)
2603     ;
2604 
2605     if (!out->mark2Coverage.serialize (c->serializer, out)
2606 			   .serialize (c->serializer, new_coverage.iter ()))
2607       return_trace (false);
2608 
2609     hb_sorted_vector_t<unsigned> mark2_indexes;
2610     for (const unsigned row : + mark2_iter
2611 			      | hb_map (hb_second))
2612     {
2613       + hb_range ((unsigned) classCount)
2614       | hb_filter (klass_mapping)
2615       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2616       | hb_sink (mark2_indexes)
2617       ;
2618     }
2619 
2620     out->mark2Array.serialize_subset (c, mark2Array, this, mark2_iter.len (), mark2_indexes.iter ());
2621 
2622     return_trace (true);
2623   }
2624 
sanitizeOT::MarkMarkPosFormat12625   bool sanitize (hb_sanitize_context_t *c) const
2626   {
2627     TRACE_SANITIZE (this);
2628     return_trace (c->check_struct (this) &&
2629 		  mark1Coverage.sanitize (c, this) &&
2630 		  mark2Coverage.sanitize (c, this) &&
2631 		  mark1Array.sanitize (c, this) &&
2632 		  mark2Array.sanitize (c, this, (unsigned int) classCount));
2633   }
2634 
2635   protected:
2636   HBUINT16	format;			/* Format identifier--format = 1 */
2637   Offset16To<Coverage>
2638 		mark1Coverage;		/* Offset to Combining Mark1 Coverage
2639 					 * table--from beginning of MarkMarkPos
2640 					 * subtable */
2641   Offset16To<Coverage>
2642 		mark2Coverage;		/* Offset to Combining Mark2 Coverage
2643 					 * table--from beginning of MarkMarkPos
2644 					 * subtable */
2645   HBUINT16	classCount;		/* Number of defined mark classes */
2646   Offset16To<MarkArray>
2647 		mark1Array;		/* Offset to Mark1Array table--from
2648 					 * beginning of MarkMarkPos subtable */
2649   Offset16To<Mark2Array>
2650 		mark2Array;		/* Offset to Mark2Array table--from
2651 					 * beginning of MarkMarkPos subtable */
2652   public:
2653   DEFINE_SIZE_STATIC (12);
2654 };
2655 
2656 struct MarkMarkPos
2657 {
2658   template <typename context_t, typename ...Ts>
dispatchOT::MarkMarkPos2659   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2660   {
2661     TRACE_DISPATCH (this, u.format);
2662     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2663     switch (u.format) {
2664     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2665     default:return_trace (c->default_return_value ());
2666     }
2667   }
2668 
2669   protected:
2670   union {
2671   HBUINT16		format;		/* Format identifier */
2672   MarkMarkPosFormat1	format1;
2673   } u;
2674 };
2675 
2676 
2677 struct ContextPos : Context {};
2678 
2679 struct ChainContextPos : ChainContext {};
2680 
2681 struct ExtensionPos : Extension<ExtensionPos>
2682 {
2683   typedef struct PosLookupSubTable SubTable;
2684 };
2685 
2686 
2687 
2688 /*
2689  * PosLookup
2690  */
2691 
2692 
2693 struct PosLookupSubTable
2694 {
2695   friend struct Lookup;
2696   friend struct PosLookup;
2697 
2698   enum Type {
2699     Single		= 1,
2700     Pair		= 2,
2701     Cursive		= 3,
2702     MarkBase		= 4,
2703     MarkLig		= 5,
2704     MarkMark		= 6,
2705     Context		= 7,
2706     ChainContext	= 8,
2707     Extension		= 9
2708   };
2709 
2710   template <typename context_t, typename ...Ts>
dispatchOT::PosLookupSubTable2711   typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
2712   {
2713     TRACE_DISPATCH (this, lookup_type);
2714     switch (lookup_type) {
2715     case Single:		return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
2716     case Pair:			return_trace (u.pair.dispatch (c, hb_forward<Ts> (ds)...));
2717     case Cursive:		return_trace (u.cursive.dispatch (c, hb_forward<Ts> (ds)...));
2718     case MarkBase:		return_trace (u.markBase.dispatch (c, hb_forward<Ts> (ds)...));
2719     case MarkLig:		return_trace (u.markLig.dispatch (c, hb_forward<Ts> (ds)...));
2720     case MarkMark:		return_trace (u.markMark.dispatch (c, hb_forward<Ts> (ds)...));
2721     case Context:		return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
2722     case ChainContext:		return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
2723     case Extension:		return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
2724     default:			return_trace (c->default_return_value ());
2725     }
2726   }
2727 
intersectsOT::PosLookupSubTable2728   bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
2729   {
2730     hb_intersects_context_t c (glyphs);
2731     return dispatch (&c, lookup_type);
2732   }
2733 
2734   protected:
2735   union {
2736   SinglePos		single;
2737   PairPos		pair;
2738   CursivePos		cursive;
2739   MarkBasePos		markBase;
2740   MarkLigPos		markLig;
2741   MarkMarkPos		markMark;
2742   ContextPos		context;
2743   ChainContextPos	chainContext;
2744   ExtensionPos		extension;
2745   } u;
2746   public:
2747   DEFINE_SIZE_MIN (0);
2748 };
2749 
2750 
2751 struct PosLookup : Lookup
2752 {
2753   typedef struct PosLookupSubTable SubTable;
2754 
get_subtableOT::PosLookup2755   const SubTable& get_subtable (unsigned int i) const
2756   { return Lookup::get_subtable<SubTable> (i); }
2757 
is_reverseOT::PosLookup2758   bool is_reverse () const
2759   {
2760     return false;
2761   }
2762 
applyOT::PosLookup2763   bool apply (hb_ot_apply_context_t *c) const
2764   {
2765     TRACE_APPLY (this);
2766     return_trace (dispatch (c));
2767   }
2768 
intersectsOT::PosLookup2769   bool intersects (const hb_set_t *glyphs) const
2770   {
2771     hb_intersects_context_t c (glyphs);
2772     return dispatch (&c);
2773   }
2774 
collect_glyphsOT::PosLookup2775   hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
2776   { return dispatch (c); }
2777 
closure_lookupsOT::PosLookup2778   hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
2779   {
2780     if (c->is_lookup_visited (this_index))
2781       return hb_closure_lookups_context_t::default_return_value ();
2782 
2783     c->set_lookup_visited (this_index);
2784     if (!intersects (c->glyphs))
2785     {
2786       c->set_lookup_inactive (this_index);
2787       return hb_closure_lookups_context_t::default_return_value ();
2788     }
2789     c->set_recurse_func (dispatch_closure_lookups_recurse_func);
2790 
2791     hb_closure_lookups_context_t::return_t ret = dispatch (c);
2792     return ret;
2793   }
2794 
2795   template <typename set_t>
collect_coverageOT::PosLookup2796   void collect_coverage (set_t *glyphs) const
2797   {
2798     hb_collect_coverage_context_t<set_t> c (glyphs);
2799     dispatch (&c);
2800   }
2801 
2802   static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
2803 
2804   template <typename context_t>
2805   static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
2806 
2807   HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index);
2808 
2809   template <typename context_t, typename ...Ts>
dispatchOT::PosLookup2810   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2811   { return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
2812 
subsetOT::PosLookup2813   bool subset (hb_subset_context_t *c) const
2814   { return Lookup::subset<SubTable> (c); }
2815 
sanitizeOT::PosLookup2816   bool sanitize (hb_sanitize_context_t *c) const
2817   { return Lookup::sanitize<SubTable> (c); }
2818 };
2819 
2820 /*
2821  * GPOS -- Glyph Positioning
2822  * https://docs.microsoft.com/en-us/typography/opentype/spec/gpos
2823  */
2824 
2825 struct GPOS : GSUBGPOS
2826 {
2827   static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
2828 
get_lookupOT::GPOS2829   const PosLookup& get_lookup (unsigned int i) const
2830   { return static_cast<const PosLookup &> (GSUBGPOS::get_lookup (i)); }
2831 
2832   static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
2833   static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
2834   static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
2835 
subsetOT::GPOS2836   bool subset (hb_subset_context_t *c) const
2837   {
2838     hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_langsys, c->plan->gpos_features);
2839     return GSUBGPOS::subset<PosLookup> (&l);
2840   }
2841 
sanitizeOT::GPOS2842   bool sanitize (hb_sanitize_context_t *c) const
2843   { return GSUBGPOS::sanitize<PosLookup> (c); }
2844 
2845   HB_INTERNAL bool is_blocklisted (hb_blob_t *blob,
2846 				   hb_face_t *face) const;
2847 
collect_variation_indicesOT::GPOS2848   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2849   {
2850     for (unsigned i = 0; i < GSUBGPOS::get_lookup_count (); i++)
2851     {
2852       if (!c->gpos_lookups->has (i)) continue;
2853       const PosLookup &l = get_lookup (i);
2854       l.dispatch (c);
2855     }
2856   }
2857 
closure_lookupsOT::GPOS2858   void closure_lookups (hb_face_t      *face,
2859 			const hb_set_t *glyphs,
2860 			hb_set_t       *lookup_indexes /* IN/OUT */) const
2861   { GSUBGPOS::closure_lookups<PosLookup> (face, glyphs, lookup_indexes); }
2862 
2863   typedef GSUBGPOS::accelerator_t<GPOS> accelerator_t;
2864 };
2865 
2866 
2867 static void
reverse_cursive_minor_offset(hb_glyph_position_t * pos,unsigned int i,hb_direction_t direction,unsigned int new_parent)2868 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent)
2869 {
2870   int chain = pos[i].attach_chain(), type = pos[i].attach_type();
2871   if (likely (!chain || 0 == (type & ATTACH_TYPE_CURSIVE)))
2872     return;
2873 
2874   pos[i].attach_chain() = 0;
2875 
2876   unsigned int j = (int) i + chain;
2877 
2878   /* Stop if we see new parent in the chain. */
2879   if (j == new_parent)
2880     return;
2881 
2882   reverse_cursive_minor_offset (pos, j, direction, new_parent);
2883 
2884   if (HB_DIRECTION_IS_HORIZONTAL (direction))
2885     pos[j].y_offset = -pos[i].y_offset;
2886   else
2887     pos[j].x_offset = -pos[i].x_offset;
2888 
2889   pos[j].attach_chain() = -chain;
2890   pos[j].attach_type() = type;
2891 }
2892 static void
propagate_attachment_offsets(hb_glyph_position_t * pos,unsigned int len,unsigned int i,hb_direction_t direction)2893 propagate_attachment_offsets (hb_glyph_position_t *pos,
2894 			      unsigned int len,
2895 			      unsigned int i,
2896 			      hb_direction_t direction)
2897 {
2898   /* Adjusts offsets of attached glyphs (both cursive and mark) to accumulate
2899    * offset of glyph they are attached to. */
2900   int chain = pos[i].attach_chain(), type = pos[i].attach_type();
2901   if (likely (!chain))
2902     return;
2903 
2904   pos[i].attach_chain() = 0;
2905 
2906   unsigned int j = (int) i + chain;
2907 
2908   if (unlikely (j >= len))
2909     return;
2910 
2911   propagate_attachment_offsets (pos, len, j, direction);
2912 
2913   assert (!!(type & ATTACH_TYPE_MARK) ^ !!(type & ATTACH_TYPE_CURSIVE));
2914 
2915   if (type & ATTACH_TYPE_CURSIVE)
2916   {
2917     if (HB_DIRECTION_IS_HORIZONTAL (direction))
2918       pos[i].y_offset += pos[j].y_offset;
2919     else
2920       pos[i].x_offset += pos[j].x_offset;
2921   }
2922   else /*if (type & ATTACH_TYPE_MARK)*/
2923   {
2924     pos[i].x_offset += pos[j].x_offset;
2925     pos[i].y_offset += pos[j].y_offset;
2926 
2927     assert (j < i);
2928     if (HB_DIRECTION_IS_FORWARD (direction))
2929       for (unsigned int k = j; k < i; k++) {
2930 	pos[i].x_offset -= pos[k].x_advance;
2931 	pos[i].y_offset -= pos[k].y_advance;
2932       }
2933     else
2934       for (unsigned int k = j + 1; k < i + 1; k++) {
2935 	pos[i].x_offset += pos[k].x_advance;
2936 	pos[i].y_offset += pos[k].y_advance;
2937       }
2938   }
2939 }
2940 
2941 void
position_start(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)2942 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2943 {
2944   unsigned int count = buffer->len;
2945   for (unsigned int i = 0; i < count; i++)
2946     buffer->pos[i].attach_chain() = buffer->pos[i].attach_type() = 0;
2947 }
2948 
2949 void
position_finish_advances(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer HB_UNUSED)2950 GPOS::position_finish_advances (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
2951 {
2952   //_hb_buffer_assert_gsubgpos_vars (buffer);
2953 }
2954 
2955 void
position_finish_offsets(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)2956 GPOS::position_finish_offsets (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2957 {
2958   _hb_buffer_assert_gsubgpos_vars (buffer);
2959 
2960   unsigned int len;
2961   hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
2962   hb_direction_t direction = buffer->props.direction;
2963 
2964   /* Handle attachments */
2965   if (buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT)
2966     for (unsigned int i = 0; i < len; i++)
2967       propagate_attachment_offsets (pos, len, i, direction);
2968 }
2969 
2970 
2971 struct GPOS_accelerator_t : GPOS::accelerator_t {};
2972 
2973 
2974 /* Out-of-class implementation for methods recursing */
2975 
2976 #ifndef HB_NO_OT_LAYOUT
2977 template <typename context_t>
dispatch_recurse_func(context_t * c,unsigned int lookup_index)2978 /*static*/ typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
2979 {
2980   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2981   return l.dispatch (c);
2982 }
2983 
dispatch_closure_lookups_recurse_func(hb_closure_lookups_context_t * c,unsigned this_index)2984 /*static*/ inline hb_closure_lookups_context_t::return_t PosLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
2985 {
2986   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (this_index);
2987   return l.closure_lookups (c, this_index);
2988 }
2989 
apply_recurse_func(hb_ot_apply_context_t * c,unsigned int lookup_index)2990 /*static*/ bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
2991 {
2992   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2993   unsigned int saved_lookup_props = c->lookup_props;
2994   unsigned int saved_lookup_index = c->lookup_index;
2995   c->set_lookup_index (lookup_index);
2996   c->set_lookup_props (l.get_props ());
2997   bool ret = l.dispatch (c);
2998   c->set_lookup_index (saved_lookup_index);
2999   c->set_lookup_props (saved_lookup_props);
3000   return ret;
3001 }
3002 #endif
3003 
3004 
3005 } /* namespace OT */
3006 
3007 
3008 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */
3009