1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2010,2012,2013  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
31 
32 #include "hb-ot-layout-gsubgpos.hh"
33 
34 
35 namespace OT {
36 
37 struct MarkArray;
38 static void Markclass_closure_and_remap_indexes (const Coverage  &mark_coverage,
39 						 const MarkArray &mark_array,
40 						 const hb_set_t  &glyphset,
41 						 hb_map_t*        klass_mapping /* INOUT */);
42 
43 /* buffer **position** var allocations */
44 #define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
45 #define attach_type() var.u8[2] /* attachment type */
46 /* Note! if attach_chain() is zero, the value of attach_type() is irrelevant. */
47 
48 enum attach_type_t {
49   ATTACH_TYPE_NONE	= 0X00,
50 
51   /* Each attachment should be either a mark or a cursive; can't be both. */
52   ATTACH_TYPE_MARK	= 0X01,
53   ATTACH_TYPE_CURSIVE	= 0X02,
54 };
55 
56 
57 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
58 
59 typedef HBUINT16 Value;
60 
61 typedef UnsizedArrayOf<Value> ValueRecord;
62 
63 struct ValueFormat : HBUINT16
64 {
65   enum Flags {
66     xPlacement	= 0x0001u,	/* Includes horizontal adjustment for placement */
67     yPlacement	= 0x0002u,	/* Includes vertical adjustment for placement */
68     xAdvance	= 0x0004u,	/* Includes horizontal adjustment for advance */
69     yAdvance	= 0x0008u,	/* Includes vertical adjustment for advance */
70     xPlaDevice	= 0x0010u,	/* Includes horizontal Device table for placement */
71     yPlaDevice	= 0x0020u,	/* Includes vertical Device table for placement */
72     xAdvDevice	= 0x0040u,	/* Includes horizontal Device table for advance */
73     yAdvDevice	= 0x0080u,	/* Includes vertical Device table for advance */
74     ignored	= 0x0F00u,	/* Was used in TrueType Open for MM fonts */
75     reserved	= 0xF000u,	/* For future use */
76 
77     devices	= 0x00F0u	/* Mask for having any Device table */
78   };
79 
80 /* All fields are options.  Only those available advance the value pointer. */
81 #if 0
82   HBINT16		xPlacement;	/* Horizontal adjustment for
83 					 * placement--in design units */
84   HBINT16		yPlacement;	/* Vertical adjustment for
85 					 * placement--in design units */
86   HBINT16		xAdvance;	/* Horizontal adjustment for
87 					 * advance--in design units (only used
88 					 * for horizontal writing) */
89   HBINT16		yAdvance;	/* Vertical adjustment for advance--in
90 					 * design units (only used for vertical
91 					 * writing) */
92   Offset16To<Device>	xPlaDevice;	/* Offset to Device table for
93 					 * horizontal placement--measured from
94 					 * beginning of PosTable (may be NULL) */
95   Offset16To<Device>	yPlaDevice;	/* Offset to Device table for vertical
96 					 * placement--measured from beginning
97 					 * of PosTable (may be NULL) */
98   Offset16To<Device>	xAdvDevice;	/* Offset to Device table for
99 					 * horizontal advance--measured from
100 					 * beginning of PosTable (may be NULL) */
101   Offset16To<Device>	yAdvDevice;	/* Offset to Device table for vertical
102 					 * advance--measured from beginning of
103 					 * PosTable (may be NULL) */
104 #endif
105 
operator =OT::ValueFormat106   IntType& operator = (uint16_t i) { v = i; return *this; }
107 
get_lenOT::ValueFormat108   unsigned int get_len () const  { return hb_popcount ((unsigned int) *this); }
get_sizeOT::ValueFormat109   unsigned int get_size () const { return get_len () * Value::static_size; }
110 
apply_valueOT::ValueFormat111   bool apply_value (hb_ot_apply_context_t *c,
112 		    const void            *base,
113 		    const Value           *values,
114 		    hb_glyph_position_t   &glyph_pos) const
115   {
116     bool ret = false;
117     unsigned int format = *this;
118     if (!format) return ret;
119 
120     hb_font_t *font = c->font;
121     bool horizontal =
122 #ifndef HB_NO_VERTICAL
123       HB_DIRECTION_IS_HORIZONTAL (c->direction)
124 #else
125       true
126 #endif
127       ;
128 
129     if (format & xPlacement) glyph_pos.x_offset  += font->em_scale_x (get_short (values++, &ret));
130     if (format & yPlacement) glyph_pos.y_offset  += font->em_scale_y (get_short (values++, &ret));
131     if (format & xAdvance) {
132       if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
133       values++;
134     }
135     /* y_advance values grow downward but font-space grows upward, hence negation */
136     if (format & yAdvance) {
137       if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
138       values++;
139     }
140 
141     if (!has_device ()) return ret;
142 
143     bool use_x_device = font->x_ppem || font->num_coords;
144     bool use_y_device = font->y_ppem || font->num_coords;
145 
146     if (!use_x_device && !use_y_device) return ret;
147 
148     const VariationStore &store = c->var_store;
149 
150     /* pixel -> fractional pixel */
151     if (format & xPlaDevice) {
152       if (use_x_device) glyph_pos.x_offset  += (base + get_device (values, &ret)).get_x_delta (font, store);
153       values++;
154     }
155     if (format & yPlaDevice) {
156       if (use_y_device) glyph_pos.y_offset  += (base + get_device (values, &ret)).get_y_delta (font, store);
157       values++;
158     }
159     if (format & xAdvDevice) {
160       if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store);
161       values++;
162     }
163     if (format & yAdvDevice) {
164       /* y_advance values grow downward but font-space grows upward, hence negation */
165       if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store);
166       values++;
167     }
168     return ret;
169   }
170 
get_effective_formatOT::ValueFormat171   unsigned int get_effective_format (const Value *values) const
172   {
173     unsigned int format = *this;
174     for (unsigned flag = xPlacement; flag <= yAdvDevice; flag = flag << 1) {
175       if (format & flag) should_drop (*values++, (Flags) flag, &format);
176     }
177 
178     return format;
179   }
180 
181   template<typename Iterator,
182       hb_requires (hb_is_iterator (Iterator))>
get_effective_formatOT::ValueFormat183   unsigned int get_effective_format (Iterator it) const {
184     unsigned int new_format = 0;
185 
186     for (const hb_array_t<const Value>& values : it)
187       new_format = new_format | get_effective_format (&values);
188 
189     return new_format;
190   }
191 
copy_valuesOT::ValueFormat192   void copy_values (hb_serialize_context_t *c,
193                     unsigned int new_format,
194                     const void *base,
195                     const Value *values,
196                     const hb_map_t *layout_variation_idx_map) const
197   {
198     unsigned int format = *this;
199     if (!format) return;
200 
201     if (format & xPlacement) copy_value (c, new_format, xPlacement, *values++);
202     if (format & yPlacement) copy_value (c, new_format, yPlacement, *values++);
203     if (format & xAdvance)   copy_value (c, new_format, xAdvance, *values++);
204     if (format & yAdvance)   copy_value (c, new_format, yAdvance, *values++);
205 
206     if (format & xPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
207     if (format & yPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
208     if (format & xAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
209     if (format & yAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
210   }
211 
copy_valueOT::ValueFormat212   void copy_value (hb_serialize_context_t *c,
213                    unsigned int new_format,
214                    Flags flag,
215                    Value value) const
216   {
217     // Filter by new format.
218     if (!(new_format & flag)) return;
219     c->copy (value);
220   }
221 
collect_variation_indicesOT::ValueFormat222   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
223 				  const void *base,
224 				  const hb_array_t<const Value>& values) const
225   {
226     unsigned format = *this;
227     unsigned i = 0;
228     if (format & xPlacement) i++;
229     if (format & yPlacement) i++;
230     if (format & xAdvance) i++;
231     if (format & yAdvance) i++;
232     if (format & xPlaDevice)
233     {
234       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
235       i++;
236     }
237 
238     if (format & ValueFormat::yPlaDevice)
239     {
240       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
241       i++;
242     }
243 
244     if (format & ValueFormat::xAdvDevice)
245     {
246 
247       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
248       i++;
249     }
250 
251     if (format & ValueFormat::yAdvDevice)
252     {
253 
254       (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
255       i++;
256     }
257   }
258 
259   private:
sanitize_value_devicesOT::ValueFormat260   bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
261   {
262     unsigned int format = *this;
263 
264     if (format & xPlacement) values++;
265     if (format & yPlacement) values++;
266     if (format & xAdvance)   values++;
267     if (format & yAdvance)   values++;
268 
269     if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
270     if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
271     if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
272     if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
273 
274     return true;
275   }
276 
get_deviceOT::ValueFormat277   static inline Offset16To<Device>& get_device (Value* value)
278   {
279     return *static_cast<Offset16To<Device> *> (value);
280   }
get_deviceOT::ValueFormat281   static inline const Offset16To<Device>& get_device (const Value* value, bool *worked=nullptr)
282   {
283     if (worked) *worked |= bool (*value);
284     return *static_cast<const Offset16To<Device> *> (value);
285   }
286 
copy_deviceOT::ValueFormat287   bool copy_device (hb_serialize_context_t *c, const void *base,
288 		    const Value *src_value, const hb_map_t *layout_variation_idx_map) const
289   {
290     Value	*dst_value = c->copy (*src_value);
291 
292     if (!dst_value) return false;
293     if (*dst_value == 0) return true;
294 
295     *dst_value = 0;
296     c->push ();
297     if ((base + get_device (src_value)).copy (c, layout_variation_idx_map))
298     {
299       c->add_link (*dst_value, c->pop_pack ());
300       return true;
301     }
302     else
303     {
304       c->pop_discard ();
305       return false;
306     }
307   }
308 
get_shortOT::ValueFormat309   static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
310   {
311     if (worked) *worked |= bool (*value);
312     return *reinterpret_cast<const HBINT16 *> (value);
313   }
314 
315   public:
316 
has_deviceOT::ValueFormat317   bool has_device () const
318   {
319     unsigned int format = *this;
320     return (format & devices) != 0;
321   }
322 
sanitize_valueOT::ValueFormat323   bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
324   {
325     TRACE_SANITIZE (this);
326     return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
327   }
328 
sanitize_valuesOT::ValueFormat329   bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
330   {
331     TRACE_SANITIZE (this);
332     unsigned int len = get_len ();
333 
334     if (!c->check_range (values, count, get_size ())) return_trace (false);
335 
336     if (!has_device ()) return_trace (true);
337 
338     for (unsigned int i = 0; i < count; i++) {
339       if (!sanitize_value_devices (c, base, values))
340 	return_trace (false);
341       values += len;
342     }
343 
344     return_trace (true);
345   }
346 
347   /* Just sanitize referenced Device tables.  Doesn't check the values themselves. */
sanitize_values_stride_unsafeOT::ValueFormat348   bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
349   {
350     TRACE_SANITIZE (this);
351 
352     if (!has_device ()) return_trace (true);
353 
354     for (unsigned int i = 0; i < count; i++) {
355       if (!sanitize_value_devices (c, base, values))
356 	return_trace (false);
357       values += stride;
358     }
359 
360     return_trace (true);
361   }
362 
363  private:
364 
should_dropOT::ValueFormat365   void should_drop (Value value, Flags flag, unsigned int* format) const
366   {
367     if (value) return;
368     *format = *format & ~flag;
369   }
370 
371 };
372 
373 template<typename Iterator, typename SrcLookup>
374 static void SinglePos_serialize (hb_serialize_context_t *c,
375 				 const SrcLookup *src,
376 				 Iterator it,
377 				 const hb_map_t *layout_variation_idx_map);
378 
379 
380 struct AnchorFormat1
381 {
get_anchorOT::AnchorFormat1382   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
383 		   float *x, float *y) const
384   {
385     hb_font_t *font = c->font;
386     *x = font->em_fscale_x (xCoordinate);
387     *y = font->em_fscale_y (yCoordinate);
388   }
389 
sanitizeOT::AnchorFormat1390   bool sanitize (hb_sanitize_context_t *c) const
391   {
392     TRACE_SANITIZE (this);
393     return_trace (c->check_struct (this));
394   }
395 
copyOT::AnchorFormat1396   AnchorFormat1* copy (hb_serialize_context_t *c) const
397   {
398     TRACE_SERIALIZE (this);
399     AnchorFormat1* out = c->embed<AnchorFormat1> (this);
400     if (!out) return_trace (out);
401     out->format = 1;
402     return_trace (out);
403   }
404 
405   protected:
406   HBUINT16	format;			/* Format identifier--format = 1 */
407   FWORD		xCoordinate;		/* Horizontal value--in design units */
408   FWORD		yCoordinate;		/* Vertical value--in design units */
409   public:
410   DEFINE_SIZE_STATIC (6);
411 };
412 
413 struct AnchorFormat2
414 {
get_anchorOT::AnchorFormat2415   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
416 		   float *x, float *y) const
417   {
418     hb_font_t *font = c->font;
419 
420 #ifdef HB_NO_HINTING
421     *x = font->em_fscale_x (xCoordinate);
422     *y = font->em_fscale_y (yCoordinate);
423     return;
424 #endif
425 
426     unsigned int x_ppem = font->x_ppem;
427     unsigned int y_ppem = font->y_ppem;
428     hb_position_t cx = 0, cy = 0;
429     bool ret;
430 
431     ret = (x_ppem || y_ppem) &&
432 	  font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
433     *x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
434     *y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
435   }
436 
sanitizeOT::AnchorFormat2437   bool sanitize (hb_sanitize_context_t *c) const
438   {
439     TRACE_SANITIZE (this);
440     return_trace (c->check_struct (this));
441   }
442 
copyOT::AnchorFormat2443   AnchorFormat2* copy (hb_serialize_context_t *c) const
444   {
445     TRACE_SERIALIZE (this);
446     return_trace (c->embed<AnchorFormat2> (this));
447   }
448 
449   protected:
450   HBUINT16	format;			/* Format identifier--format = 2 */
451   FWORD		xCoordinate;		/* Horizontal value--in design units */
452   FWORD		yCoordinate;		/* Vertical value--in design units */
453   HBUINT16	anchorPoint;		/* Index to glyph contour point */
454   public:
455   DEFINE_SIZE_STATIC (8);
456 };
457 
458 struct AnchorFormat3
459 {
get_anchorOT::AnchorFormat3460   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
461 		   float *x, float *y) const
462   {
463     hb_font_t *font = c->font;
464     *x = font->em_fscale_x (xCoordinate);
465     *y = font->em_fscale_y (yCoordinate);
466 
467     if (font->x_ppem || font->num_coords)
468       *x += (this+xDeviceTable).get_x_delta (font, c->var_store);
469     if (font->y_ppem || font->num_coords)
470       *y += (this+yDeviceTable).get_y_delta (font, c->var_store);
471   }
472 
sanitizeOT::AnchorFormat3473   bool sanitize (hb_sanitize_context_t *c) const
474   {
475     TRACE_SANITIZE (this);
476     return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
477   }
478 
copyOT::AnchorFormat3479   AnchorFormat3* copy (hb_serialize_context_t *c,
480 		       const hb_map_t *layout_variation_idx_map) const
481   {
482     TRACE_SERIALIZE (this);
483     if (!layout_variation_idx_map) return_trace (nullptr);
484 
485     auto *out = c->embed<AnchorFormat3> (this);
486     if (unlikely (!out)) return_trace (nullptr);
487 
488     out->xDeviceTable.serialize_copy (c, xDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
489     out->yDeviceTable.serialize_copy (c, yDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
490     return_trace (out);
491   }
492 
collect_variation_indicesOT::AnchorFormat3493   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
494   {
495     (this+xDeviceTable).collect_variation_indices (c->layout_variation_indices);
496     (this+yDeviceTable).collect_variation_indices (c->layout_variation_indices);
497   }
498 
499   protected:
500   HBUINT16	format;			/* Format identifier--format = 3 */
501   FWORD		xCoordinate;		/* Horizontal value--in design units */
502   FWORD		yCoordinate;		/* Vertical value--in design units */
503   Offset16To<Device>
504 		xDeviceTable;		/* Offset to Device table for X
505 					 * coordinate-- from beginning of
506 					 * Anchor table (may be NULL) */
507   Offset16To<Device>
508 		yDeviceTable;		/* Offset to Device table for Y
509 					 * coordinate-- from beginning of
510 					 * Anchor table (may be NULL) */
511   public:
512   DEFINE_SIZE_STATIC (10);
513 };
514 
515 struct Anchor
516 {
get_anchorOT::Anchor517   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
518 		   float *x, float *y) const
519   {
520     *x = *y = 0;
521     switch (u.format) {
522     case 1: u.format1.get_anchor (c, glyph_id, x, y); return;
523     case 2: u.format2.get_anchor (c, glyph_id, x, y); return;
524     case 3: u.format3.get_anchor (c, glyph_id, x, y); return;
525     default:					      return;
526     }
527   }
528 
sanitizeOT::Anchor529   bool sanitize (hb_sanitize_context_t *c) const
530   {
531     TRACE_SANITIZE (this);
532     if (!u.format.sanitize (c)) return_trace (false);
533     switch (u.format) {
534     case 1: return_trace (u.format1.sanitize (c));
535     case 2: return_trace (u.format2.sanitize (c));
536     case 3: return_trace (u.format3.sanitize (c));
537     default:return_trace (true);
538     }
539   }
540 
subsetOT::Anchor541   bool subset (hb_subset_context_t *c) const
542   {
543     TRACE_SUBSET (this);
544     switch (u.format) {
545     case 1: return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
546     case 2:
547       if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
548       {
549         // AnchorFormat 2 just containins extra hinting information, so
550         // if hints are being dropped convert to format 1.
551         return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
552       }
553       return_trace (bool (reinterpret_cast<Anchor *> (u.format2.copy (c->serializer))));
554     case 3: return_trace (bool (reinterpret_cast<Anchor *> (u.format3.copy (c->serializer,
555                                                                             c->plan->layout_variation_idx_map))));
556     default:return_trace (false);
557     }
558   }
559 
collect_variation_indicesOT::Anchor560   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
561   {
562     switch (u.format) {
563     case 1: case 2:
564       return;
565     case 3:
566       u.format3.collect_variation_indices (c);
567       return;
568     default: return;
569     }
570   }
571 
572   protected:
573   union {
574   HBUINT16		format;		/* Format identifier */
575   AnchorFormat1		format1;
576   AnchorFormat2		format2;
577   AnchorFormat3		format3;
578   } u;
579   public:
580   DEFINE_SIZE_UNION (2, format);
581 };
582 
583 
584 struct AnchorMatrix
585 {
get_anchorOT::AnchorMatrix586   const Anchor& get_anchor (unsigned int row, unsigned int col,
587 			    unsigned int cols, bool *found) const
588   {
589     *found = false;
590     if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
591     *found = !matrixZ[row * cols + col].is_null ();
592     return this+matrixZ[row * cols + col];
593   }
594 
595   template <typename Iterator,
596 	    hb_requires (hb_is_iterator (Iterator))>
collect_variation_indicesOT::AnchorMatrix597   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
598 				  Iterator index_iter) const
599   {
600     for (unsigned i : index_iter)
601       (this+matrixZ[i]).collect_variation_indices (c);
602   }
603 
604   template <typename Iterator,
605       hb_requires (hb_is_iterator (Iterator))>
subsetOT::AnchorMatrix606   bool subset (hb_subset_context_t *c,
607                unsigned             num_rows,
608                Iterator             index_iter) const
609   {
610     TRACE_SUBSET (this);
611 
612     auto *out = c->serializer->start_embed (this);
613 
614     if (!index_iter) return_trace (false);
615     if (unlikely (!c->serializer->extend_min (out)))  return_trace (false);
616 
617     out->rows = num_rows;
618     for (const unsigned i : index_iter)
619     {
620       auto *offset = c->serializer->embed (matrixZ[i]);
621       if (!offset) return_trace (false);
622       offset->serialize_subset (c, matrixZ[i], this);
623     }
624 
625     return_trace (true);
626   }
627 
sanitizeOT::AnchorMatrix628   bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
629   {
630     TRACE_SANITIZE (this);
631     if (!c->check_struct (this)) return_trace (false);
632     if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
633     unsigned int count = rows * cols;
634     if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
635     for (unsigned int i = 0; i < count; i++)
636       if (!matrixZ[i].sanitize (c, this)) return_trace (false);
637     return_trace (true);
638   }
639 
640   HBUINT16	rows;			/* Number of rows */
641   UnsizedArrayOf<Offset16To<Anchor>>
642 		matrixZ;		/* Matrix of offsets to Anchor tables--
643 					 * from beginning of AnchorMatrix table */
644   public:
645   DEFINE_SIZE_ARRAY (2, matrixZ);
646 };
647 
648 
649 struct MarkRecord
650 {
651   friend struct MarkArray;
652 
get_classOT::MarkRecord653   unsigned get_class () const { return (unsigned) klass; }
sanitizeOT::MarkRecord654   bool sanitize (hb_sanitize_context_t *c, const void *base) const
655   {
656     TRACE_SANITIZE (this);
657     return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
658   }
659 
subsetOT::MarkRecord660   MarkRecord *subset (hb_subset_context_t    *c,
661                       const void             *src_base,
662                       const hb_map_t         *klass_mapping) const
663   {
664     TRACE_SUBSET (this);
665     auto *out = c->serializer->embed (this);
666     if (unlikely (!out)) return_trace (nullptr);
667 
668     out->klass = klass_mapping->get (klass);
669     out->markAnchor.serialize_subset (c, markAnchor, src_base);
670     return_trace (out);
671   }
672 
collect_variation_indicesOT::MarkRecord673   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
674 				  const void *src_base) const
675   {
676     (src_base+markAnchor).collect_variation_indices (c);
677   }
678 
679   protected:
680   HBUINT16	klass;			/* Class defined for this mark */
681   Offset16To<Anchor>
682 		markAnchor;		/* Offset to Anchor table--from
683 					 * beginning of MarkArray table */
684   public:
685   DEFINE_SIZE_STATIC (4);
686 };
687 
688 struct MarkArray : Array16Of<MarkRecord>	/* Array of MarkRecords--in Coverage order */
689 {
applyOT::MarkArray690   bool apply (hb_ot_apply_context_t *c,
691 	      unsigned int mark_index, unsigned int glyph_index,
692 	      const AnchorMatrix &anchors, unsigned int class_count,
693 	      unsigned int glyph_pos) const
694   {
695     TRACE_APPLY (this);
696     hb_buffer_t *buffer = c->buffer;
697     const MarkRecord &record = Array16Of<MarkRecord>::operator[](mark_index);
698     unsigned int mark_class = record.klass;
699 
700     const Anchor& mark_anchor = this + record.markAnchor;
701     bool found;
702     const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
703     /* If this subtable doesn't have an anchor for this base and this class,
704      * return false such that the subsequent subtables have a chance at it. */
705     if (unlikely (!found)) return_trace (false);
706 
707     float mark_x, mark_y, base_x, base_y;
708 
709     buffer->unsafe_to_break (glyph_pos, buffer->idx + 1);
710     mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
711     glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
712 
713     hb_glyph_position_t &o = buffer->cur_pos();
714     o.x_offset = roundf (base_x - mark_x);
715     o.y_offset = roundf (base_y - mark_y);
716     o.attach_type() = ATTACH_TYPE_MARK;
717     o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
718     buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
719 
720     buffer->idx++;
721     return_trace (true);
722   }
723 
724   template <typename Iterator,
725       hb_requires (hb_is_iterator (Iterator))>
subsetOT::MarkArray726   bool subset (hb_subset_context_t *c,
727                Iterator		    coverage,
728                const hb_map_t      *klass_mapping) const
729   {
730     TRACE_SUBSET (this);
731     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
732 
733     auto* out = c->serializer->start_embed (this);
734     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
735 
736     auto mark_iter =
737     + hb_zip (coverage, this->iter ())
738     | hb_filter (glyphset, hb_first)
739     | hb_map (hb_second)
740     ;
741 
742     unsigned new_length = 0;
743     for (const auto& mark_record : mark_iter) {
744       if (unlikely (!mark_record.subset (c, this, klass_mapping)))
745         return_trace (false);
746       new_length++;
747     }
748 
749     if (unlikely (!c->serializer->check_assign (out->len, new_length,
750                                                 HB_SERIALIZE_ERROR_ARRAY_OVERFLOW)))
751       return_trace (false);
752 
753     return_trace (true);
754   }
755 
sanitizeOT::MarkArray756   bool sanitize (hb_sanitize_context_t *c) const
757   {
758     TRACE_SANITIZE (this);
759     return_trace (Array16Of<MarkRecord>::sanitize (c, this));
760   }
761 };
762 
763 
764 /* Lookups */
765 
766 struct SinglePosFormat1
767 {
intersectsOT::SinglePosFormat1768   bool intersects (const hb_set_t *glyphs) const
769   { return (this+coverage).intersects (glyphs); }
770 
closure_lookupsOT::SinglePosFormat1771   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::SinglePosFormat1772   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
773   {
774     if (!valueFormat.has_device ()) return;
775 
776     auto it =
777     + hb_iter (this+coverage)
778     | hb_filter (c->glyph_set)
779     ;
780 
781     if (!it) return;
782     valueFormat.collect_variation_indices (c, this, values.as_array (valueFormat.get_len ()));
783   }
784 
collect_glyphsOT::SinglePosFormat1785   void collect_glyphs (hb_collect_glyphs_context_t *c) const
786   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
787 
get_coverageOT::SinglePosFormat1788   const Coverage &get_coverage () const { return this+coverage; }
789 
get_value_formatOT::SinglePosFormat1790   ValueFormat get_value_format () const { return valueFormat; }
791 
applyOT::SinglePosFormat1792   bool apply (hb_ot_apply_context_t *c) const
793   {
794     TRACE_APPLY (this);
795     hb_buffer_t *buffer = c->buffer;
796     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
797     if (likely (index == NOT_COVERED)) return_trace (false);
798 
799     valueFormat.apply_value (c, this, values, buffer->cur_pos());
800 
801     buffer->idx++;
802     return_trace (true);
803   }
804 
805   template<typename Iterator,
806       typename SrcLookup,
807       hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat1808   void serialize (hb_serialize_context_t *c,
809 		  const SrcLookup *src,
810 		  Iterator it,
811 		  ValueFormat newFormat,
812 		  const hb_map_t *layout_variation_idx_map)
813   {
814     if (unlikely (!c->extend_min (this))) return;
815     if (unlikely (!c->check_assign (valueFormat,
816                                     newFormat,
817                                     HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
818 
819     for (const hb_array_t<const Value>& _ : + it | hb_map (hb_second))
820     {
821       src->get_value_format ().copy_values (c, newFormat, src,  &_, layout_variation_idx_map);
822       // Only serialize the first entry in the iterator, the rest are assumed to
823       // be the same.
824       break;
825     }
826 
827     auto glyphs =
828     + it
829     | hb_map_retains_sorting (hb_first)
830     ;
831 
832     coverage.serialize_serialize (c, glyphs);
833   }
834 
subsetOT::SinglePosFormat1835   bool subset (hb_subset_context_t *c) const
836   {
837     TRACE_SUBSET (this);
838     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
839     const hb_map_t &glyph_map = *c->plan->glyph_map;
840 
841     auto it =
842     + hb_iter (this+coverage)
843     | hb_filter (glyphset)
844     | hb_map_retains_sorting (glyph_map)
845     | hb_zip (hb_repeat (values.as_array (valueFormat.get_len ())))
846     ;
847 
848     bool ret = bool (it);
849     SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
850     return_trace (ret);
851   }
852 
sanitizeOT::SinglePosFormat1853   bool sanitize (hb_sanitize_context_t *c) const
854   {
855     TRACE_SANITIZE (this);
856     return_trace (c->check_struct (this) &&
857 		  coverage.sanitize (c, this) &&
858 		  valueFormat.sanitize_value (c, this, values));
859   }
860 
861   protected:
862   HBUINT16	format;			/* Format identifier--format = 1 */
863   Offset16To<Coverage>
864 		coverage;		/* Offset to Coverage table--from
865 					 * beginning of subtable */
866   ValueFormat	valueFormat;		/* Defines the types of data in the
867 					 * ValueRecord */
868   ValueRecord	values;			/* Defines positioning
869 					 * value(s)--applied to all glyphs in
870 					 * the Coverage table */
871   public:
872   DEFINE_SIZE_ARRAY (6, values);
873 };
874 
875 struct SinglePosFormat2
876 {
intersectsOT::SinglePosFormat2877   bool intersects (const hb_set_t *glyphs) const
878   { return (this+coverage).intersects (glyphs); }
879 
closure_lookupsOT::SinglePosFormat2880   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::SinglePosFormat2881   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
882   {
883     if (!valueFormat.has_device ()) return;
884 
885     auto it =
886     + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
887     | hb_filter (c->glyph_set, hb_first)
888     ;
889 
890     if (!it) return;
891 
892     unsigned sub_length = valueFormat.get_len ();
893     const hb_array_t<const Value> values_array = values.as_array (valueCount * sub_length);
894 
895     for (unsigned i : + it
896 		      | hb_map (hb_second))
897       valueFormat.collect_variation_indices (c, this, values_array.sub_array (i * sub_length, sub_length));
898 
899   }
900 
collect_glyphsOT::SinglePosFormat2901   void collect_glyphs (hb_collect_glyphs_context_t *c) const
902   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
903 
get_coverageOT::SinglePosFormat2904   const Coverage &get_coverage () const { return this+coverage; }
905 
get_value_formatOT::SinglePosFormat2906   ValueFormat get_value_format () const { return valueFormat; }
907 
applyOT::SinglePosFormat2908   bool apply (hb_ot_apply_context_t *c) const
909   {
910     TRACE_APPLY (this);
911     hb_buffer_t *buffer = c->buffer;
912     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
913     if (likely (index == NOT_COVERED)) return_trace (false);
914 
915     if (likely (index >= valueCount)) return_trace (false);
916 
917     valueFormat.apply_value (c, this,
918 			     &values[index * valueFormat.get_len ()],
919 			     buffer->cur_pos());
920 
921     buffer->idx++;
922     return_trace (true);
923   }
924 
925   template<typename Iterator,
926       typename SrcLookup,
927       hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat2928   void serialize (hb_serialize_context_t *c,
929 		  const SrcLookup *src,
930 		  Iterator it,
931 		  ValueFormat newFormat,
932 		  const hb_map_t *layout_variation_idx_map)
933   {
934     auto out = c->extend_min (this);
935     if (unlikely (!out)) return;
936     if (unlikely (!c->check_assign (valueFormat, newFormat, HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
937     if (unlikely (!c->check_assign (valueCount, it.len (), HB_SERIALIZE_ERROR_ARRAY_OVERFLOW))) return;
938 
939     + it
940     | hb_map (hb_second)
941     | hb_apply ([&] (hb_array_t<const Value> _)
942     { src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_map); })
943     ;
944 
945     auto glyphs =
946     + it
947     | hb_map_retains_sorting (hb_first)
948     ;
949 
950     coverage.serialize_serialize (c, glyphs);
951   }
952 
subsetOT::SinglePosFormat2953   bool subset (hb_subset_context_t *c) const
954   {
955     TRACE_SUBSET (this);
956     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
957     const hb_map_t &glyph_map = *c->plan->glyph_map;
958 
959     unsigned sub_length = valueFormat.get_len ();
960     auto values_array = values.as_array (valueCount * sub_length);
961 
962     auto it =
963     + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
964     | hb_filter (glyphset, hb_first)
965     | hb_map_retains_sorting ([&] (const hb_pair_t<hb_codepoint_t, unsigned>& _)
966 			      {
967 				return hb_pair (glyph_map[_.first],
968 						values_array.sub_array (_.second * sub_length,
969 									sub_length));
970 			      })
971     ;
972 
973     bool ret = bool (it);
974     SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
975     return_trace (ret);
976   }
977 
sanitizeOT::SinglePosFormat2978   bool sanitize (hb_sanitize_context_t *c) const
979   {
980     TRACE_SANITIZE (this);
981     return_trace (c->check_struct (this) &&
982 		  coverage.sanitize (c, this) &&
983 		  valueFormat.sanitize_values (c, this, values, valueCount));
984   }
985 
986   protected:
987   HBUINT16	format;			/* Format identifier--format = 2 */
988   Offset16To<Coverage>
989 		coverage;		/* Offset to Coverage table--from
990 					 * beginning of subtable */
991   ValueFormat	valueFormat;		/* Defines the types of data in the
992 					 * ValueRecord */
993   HBUINT16	valueCount;		/* Number of ValueRecords */
994   ValueRecord	values;			/* Array of ValueRecords--positioning
995 					 * values applied to glyphs */
996   public:
997   DEFINE_SIZE_ARRAY (8, values);
998 };
999 
1000 struct SinglePos
1001 {
1002   template<typename Iterator,
1003 	   hb_requires (hb_is_iterator (Iterator))>
get_formatOT::SinglePos1004   unsigned get_format (Iterator glyph_val_iter_pairs)
1005   {
1006     hb_array_t<const Value> first_val_iter = hb_second (*glyph_val_iter_pairs);
1007 
1008     for (const auto iter : glyph_val_iter_pairs)
1009       for (const auto _ : hb_zip (iter.second, first_val_iter))
1010 	if (_.first != _.second)
1011 	  return 2;
1012 
1013     return 1;
1014   }
1015 
1016 
1017   template<typename Iterator,
1018       typename SrcLookup,
1019       hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePos1020   void serialize (hb_serialize_context_t *c,
1021 		  const SrcLookup* src,
1022 		  Iterator glyph_val_iter_pairs,
1023 		  const hb_map_t *layout_variation_idx_map)
1024   {
1025     if (unlikely (!c->extend_min (u.format))) return;
1026     unsigned format = 2;
1027     ValueFormat new_format = src->get_value_format ();
1028 
1029     if (glyph_val_iter_pairs)
1030     {
1031       format = get_format (glyph_val_iter_pairs);
1032       new_format = src->get_value_format ().get_effective_format (+ glyph_val_iter_pairs | hb_map (hb_second));
1033     }
1034 
1035     u.format = format;
1036     switch (u.format) {
1037     case 1: u.format1.serialize (c,
1038                                  src,
1039                                  glyph_val_iter_pairs,
1040                                  new_format,
1041                                  layout_variation_idx_map);
1042       return;
1043     case 2: u.format2.serialize (c,
1044                                  src,
1045                                  glyph_val_iter_pairs,
1046                                  new_format,
1047                                  layout_variation_idx_map);
1048       return;
1049     default:return;
1050     }
1051   }
1052 
1053   template <typename context_t, typename ...Ts>
dispatchOT::SinglePos1054   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1055   {
1056     TRACE_DISPATCH (this, u.format);
1057     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1058     switch (u.format) {
1059     case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
1060     case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
1061     default:return_trace (c->default_return_value ());
1062     }
1063   }
1064 
1065   protected:
1066   union {
1067   HBUINT16		format;		/* Format identifier */
1068   SinglePosFormat1	format1;
1069   SinglePosFormat2	format2;
1070   } u;
1071 };
1072 
1073 template<typename Iterator, typename SrcLookup>
1074 static void
SinglePos_serialize(hb_serialize_context_t * c,const SrcLookup * src,Iterator it,const hb_map_t * layout_variation_idx_map)1075 SinglePos_serialize (hb_serialize_context_t *c,
1076 		     const SrcLookup *src,
1077 		     Iterator it,
1078 		     const hb_map_t *layout_variation_idx_map)
1079 { c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_map); }
1080 
1081 
1082 struct PairValueRecord
1083 {
1084   friend struct PairSet;
1085 
cmpOT::PairValueRecord1086   int cmp (hb_codepoint_t k) const
1087   { return secondGlyph.cmp (k); }
1088 
1089   struct context_t
1090   {
1091     const void 		*base;
1092     const ValueFormat	*valueFormats;
1093     const ValueFormat	*newFormats;
1094     unsigned		len1; /* valueFormats[0].get_len() */
1095     const hb_map_t 	*glyph_map;
1096     const hb_map_t      *layout_variation_idx_map;
1097   };
1098 
subsetOT::PairValueRecord1099   bool subset (hb_subset_context_t *c,
1100                context_t *closure) const
1101   {
1102     TRACE_SERIALIZE (this);
1103     auto *s = c->serializer;
1104     auto *out = s->start_embed (*this);
1105     if (unlikely (!s->extend_min (out))) return_trace (false);
1106 
1107     out->secondGlyph = (*closure->glyph_map)[secondGlyph];
1108 
1109     closure->valueFormats[0].copy_values (s,
1110                                           closure->newFormats[0],
1111                                           closure->base, &values[0],
1112                                           closure->layout_variation_idx_map);
1113     closure->valueFormats[1].copy_values (s,
1114                                           closure->newFormats[1],
1115                                           closure->base,
1116                                           &values[closure->len1],
1117                                           closure->layout_variation_idx_map);
1118 
1119     return_trace (true);
1120   }
1121 
collect_variation_indicesOT::PairValueRecord1122   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1123 				  const ValueFormat *valueFormats,
1124 				  const void *base) const
1125   {
1126     unsigned record1_len = valueFormats[0].get_len ();
1127     unsigned record2_len = valueFormats[1].get_len ();
1128     const hb_array_t<const Value> values_array = values.as_array (record1_len + record2_len);
1129 
1130     if (valueFormats[0].has_device ())
1131       valueFormats[0].collect_variation_indices (c, base, values_array.sub_array (0, record1_len));
1132 
1133     if (valueFormats[1].has_device ())
1134       valueFormats[1].collect_variation_indices (c, base, values_array.sub_array (record1_len, record2_len));
1135   }
1136 
intersectsOT::PairValueRecord1137   bool intersects (const hb_set_t& glyphset) const
1138   {
1139     return glyphset.has(secondGlyph);
1140   }
1141 
get_values_1OT::PairValueRecord1142   const Value* get_values_1 () const
1143   {
1144     return &values[0];
1145   }
1146 
get_values_2OT::PairValueRecord1147   const Value* get_values_2 (ValueFormat format1) const
1148   {
1149     return &values[format1.get_len ()];
1150   }
1151 
1152   protected:
1153   HBGlyphID16	secondGlyph;		/* GlyphID of second glyph in the
1154 					 * pair--first glyph is listed in the
1155 					 * Coverage table */
1156   ValueRecord	values;			/* Positioning data for the first glyph
1157 					 * followed by for second glyph */
1158   public:
1159   DEFINE_SIZE_ARRAY (2, values);
1160 };
1161 
1162 struct PairSet
1163 {
1164   friend struct PairPosFormat1;
1165 
intersectsOT::PairSet1166   bool intersects (const hb_set_t *glyphs,
1167 		   const ValueFormat *valueFormats) const
1168   {
1169     unsigned int len1 = valueFormats[0].get_len ();
1170     unsigned int len2 = valueFormats[1].get_len ();
1171     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1172 
1173     const PairValueRecord *record = &firstPairValueRecord;
1174     unsigned int count = len;
1175     for (unsigned int i = 0; i < count; i++)
1176     {
1177       if (glyphs->has (record->secondGlyph))
1178 	return true;
1179       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1180     }
1181     return false;
1182   }
1183 
collect_glyphsOT::PairSet1184   void collect_glyphs (hb_collect_glyphs_context_t *c,
1185 		       const ValueFormat *valueFormats) const
1186   {
1187     unsigned int len1 = valueFormats[0].get_len ();
1188     unsigned int len2 = valueFormats[1].get_len ();
1189     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1190 
1191     const PairValueRecord *record = &firstPairValueRecord;
1192     c->input->add_array (&record->secondGlyph, len, record_size);
1193   }
1194 
collect_variation_indicesOT::PairSet1195   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1196 				  const ValueFormat *valueFormats) const
1197   {
1198     unsigned len1 = valueFormats[0].get_len ();
1199     unsigned len2 = valueFormats[1].get_len ();
1200     unsigned record_size = HBUINT16::static_size * (1 + len1 + len2);
1201 
1202     const PairValueRecord *record = &firstPairValueRecord;
1203     unsigned count = len;
1204     for (unsigned i = 0; i < count; i++)
1205     {
1206       if (c->glyph_set->has (record->secondGlyph))
1207       { record->collect_variation_indices (c, valueFormats, this); }
1208 
1209       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1210     }
1211   }
1212 
applyOT::PairSet1213   bool apply (hb_ot_apply_context_t *c,
1214 	      const ValueFormat *valueFormats,
1215 	      unsigned int pos) const
1216   {
1217     TRACE_APPLY (this);
1218     hb_buffer_t *buffer = c->buffer;
1219     unsigned int len1 = valueFormats[0].get_len ();
1220     unsigned int len2 = valueFormats[1].get_len ();
1221     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1222 
1223     const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint,
1224 						&firstPairValueRecord,
1225 						len,
1226 						record_size);
1227     if (record)
1228     {
1229       bool applied_first = valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos());
1230       bool applied_second = valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]);
1231       if (applied_first || applied_second)
1232 	buffer->unsafe_to_break (buffer->idx, pos + 1);
1233       if (len2)
1234 	pos++;
1235       buffer->idx = pos;
1236       return_trace (true);
1237     }
1238     buffer->unsafe_to_concat (buffer->idx, pos + 1);
1239     return_trace (false);
1240   }
1241 
subsetOT::PairSet1242   bool subset (hb_subset_context_t *c,
1243 	       const ValueFormat valueFormats[2],
1244                const ValueFormat newFormats[2]) const
1245   {
1246     TRACE_SUBSET (this);
1247     auto snap = c->serializer->snapshot ();
1248 
1249     auto *out = c->serializer->start_embed (*this);
1250     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1251     out->len = 0;
1252 
1253     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1254     const hb_map_t &glyph_map = *c->plan->glyph_map;
1255 
1256     unsigned len1 = valueFormats[0].get_len ();
1257     unsigned len2 = valueFormats[1].get_len ();
1258     unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
1259 
1260     PairValueRecord::context_t context =
1261     {
1262       this,
1263       valueFormats,
1264       newFormats,
1265       len1,
1266       &glyph_map,
1267       c->plan->layout_variation_idx_map
1268     };
1269 
1270     const PairValueRecord *record = &firstPairValueRecord;
1271     unsigned count = len, num = 0;
1272     for (unsigned i = 0; i < count; i++)
1273     {
1274       if (glyphset.has (record->secondGlyph)
1275 	 && record->subset (c, &context)) num++;
1276       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1277     }
1278 
1279     out->len = num;
1280     if (!num) c->serializer->revert (snap);
1281     return_trace (num);
1282   }
1283 
1284   struct sanitize_closure_t
1285   {
1286     const ValueFormat *valueFormats;
1287     unsigned int len1; /* valueFormats[0].get_len() */
1288     unsigned int stride; /* 1 + len1 + len2 */
1289   };
1290 
sanitizeOT::PairSet1291   bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
1292   {
1293     TRACE_SANITIZE (this);
1294     if (!(c->check_struct (this)
1295        && c->check_range (&firstPairValueRecord,
1296 			  len,
1297 			  HBUINT16::static_size,
1298 			  closure->stride))) return_trace (false);
1299 
1300     unsigned int count = len;
1301     const PairValueRecord *record = &firstPairValueRecord;
1302     return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
1303 		  closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
1304   }
1305 
1306   protected:
1307   HBUINT16		len;	/* Number of PairValueRecords */
1308   PairValueRecord	firstPairValueRecord;
1309 				/* Array of PairValueRecords--ordered
1310 				 * by GlyphID of the second glyph */
1311   public:
1312   DEFINE_SIZE_MIN (2);
1313 };
1314 
1315 struct PairPosFormat1
1316 {
intersectsOT::PairPosFormat11317   bool intersects (const hb_set_t *glyphs) const
1318   {
1319     return
1320     + hb_zip (this+coverage, pairSet)
1321     | hb_filter (*glyphs, hb_first)
1322     | hb_map (hb_second)
1323     | hb_map ([glyphs, this] (const Offset16To<PairSet> &_)
1324 	      { return (this+_).intersects (glyphs, valueFormat); })
1325     | hb_any
1326     ;
1327   }
1328 
closure_lookupsOT::PairPosFormat11329   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::PairPosFormat11330   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1331   {
1332     if ((!valueFormat[0].has_device ()) && (!valueFormat[1].has_device ())) return;
1333 
1334     auto it =
1335     + hb_zip (this+coverage, pairSet)
1336     | hb_filter (c->glyph_set, hb_first)
1337     | hb_map (hb_second)
1338     ;
1339 
1340     if (!it) return;
1341     + it
1342     | hb_map (hb_add (this))
1343     | hb_apply ([&] (const PairSet& _) { _.collect_variation_indices (c, valueFormat); })
1344     ;
1345   }
1346 
collect_glyphsOT::PairPosFormat11347   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1348   {
1349     if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1350     unsigned int count = pairSet.len;
1351     for (unsigned int i = 0; i < count; i++)
1352       (this+pairSet[i]).collect_glyphs (c, valueFormat);
1353   }
1354 
get_coverageOT::PairPosFormat11355   const Coverage &get_coverage () const { return this+coverage; }
1356 
applyOT::PairPosFormat11357   bool apply (hb_ot_apply_context_t *c) const
1358   {
1359     TRACE_APPLY (this);
1360     hb_buffer_t *buffer = c->buffer;
1361     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
1362     if (likely (index == NOT_COVERED)) return_trace (false);
1363 
1364     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1365     skippy_iter.reset (buffer->idx, 1);
1366     unsigned unsafe_to;
1367     if (!skippy_iter.next (&unsafe_to))
1368     {
1369       buffer->unsafe_to_concat (buffer->idx, unsafe_to);
1370       return_trace (false);
1371     }
1372 
1373     return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
1374   }
1375 
subsetOT::PairPosFormat11376   bool subset (hb_subset_context_t *c) const
1377   {
1378     TRACE_SUBSET (this);
1379 
1380     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1381     const hb_map_t &glyph_map = *c->plan->glyph_map;
1382 
1383     auto *out = c->serializer->start_embed (*this);
1384     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1385     out->format = format;
1386     out->valueFormat[0] = valueFormat[0];
1387     out->valueFormat[1] = valueFormat[1];
1388     if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
1389     {
1390       hb_pair_t<unsigned, unsigned> newFormats = compute_effective_value_formats (glyphset);
1391       out->valueFormat[0] = newFormats.first;
1392       out->valueFormat[1] = newFormats.second;
1393     }
1394 
1395     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1396 
1397     + hb_zip (this+coverage, pairSet)
1398     | hb_filter (glyphset, hb_first)
1399     | hb_filter ([this, c, out] (const Offset16To<PairSet>& _)
1400 		 {
1401                    auto snap = c->serializer->snapshot ();
1402 		   auto *o = out->pairSet.serialize_append (c->serializer);
1403 		   if (unlikely (!o)) return false;
1404 		   bool ret = o->serialize_subset (c, _, this, valueFormat, out->valueFormat);
1405 		   if (!ret)
1406 		   {
1407 		     out->pairSet.pop ();
1408 		     c->serializer->revert (snap);
1409 		   }
1410 		   return ret;
1411 		 },
1412 		 hb_second)
1413     | hb_map (hb_first)
1414     | hb_map (glyph_map)
1415     | hb_sink (new_coverage)
1416     ;
1417 
1418     out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
1419 
1420     return_trace (bool (new_coverage));
1421   }
1422 
1423 
compute_effective_value_formatsOT::PairPosFormat11424   hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset) const
1425   {
1426     unsigned len1 = valueFormat[0].get_len ();
1427     unsigned len2 = valueFormat[1].get_len ();
1428     unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
1429 
1430     unsigned format1 = 0;
1431     unsigned format2 = 0;
1432     for (const Offset16To<PairSet>& _ :
1433              + hb_zip (this+coverage, pairSet) | hb_filter (glyphset, hb_first) | hb_map (hb_second))
1434     {
1435       const PairSet& set = (this + _);
1436       const PairValueRecord *record = &set.firstPairValueRecord;
1437 
1438       for (unsigned i = 0; i < set.len; i++)
1439       {
1440         if (record->intersects (glyphset))
1441         {
1442           format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 ());
1443           format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]));
1444         }
1445         record = &StructAtOffset<const PairValueRecord> (record, record_size);
1446       }
1447     }
1448 
1449     return hb_pair (format1, format2);
1450   }
1451 
1452 
sanitizeOT::PairPosFormat11453   bool sanitize (hb_sanitize_context_t *c) const
1454   {
1455     TRACE_SANITIZE (this);
1456 
1457     if (!c->check_struct (this)) return_trace (false);
1458 
1459     unsigned int len1 = valueFormat[0].get_len ();
1460     unsigned int len2 = valueFormat[1].get_len ();
1461     PairSet::sanitize_closure_t closure =
1462     {
1463       valueFormat,
1464       len1,
1465       1 + len1 + len2
1466     };
1467 
1468     return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
1469   }
1470 
1471   protected:
1472   HBUINT16	format;			/* Format identifier--format = 1 */
1473   Offset16To<Coverage>
1474 		coverage;		/* Offset to Coverage table--from
1475 					 * beginning of subtable */
1476   ValueFormat	valueFormat[2];		/* [0] Defines the types of data in
1477 					 * ValueRecord1--for the first glyph
1478 					 * in the pair--may be zero (0) */
1479 					/* [1] Defines the types of data in
1480 					 * ValueRecord2--for the second glyph
1481 					 * in the pair--may be zero (0) */
1482   Array16OfOffset16To<PairSet>
1483 		pairSet;		/* Array of PairSet tables
1484 					 * ordered by Coverage Index */
1485   public:
1486   DEFINE_SIZE_ARRAY (10, pairSet);
1487 };
1488 
1489 struct PairPosFormat2
1490 {
intersectsOT::PairPosFormat21491   bool intersects (const hb_set_t *glyphs) const
1492   {
1493     return (this+coverage).intersects (glyphs) &&
1494 	   (this+classDef2).intersects (glyphs);
1495   }
1496 
closure_lookupsOT::PairPosFormat21497   void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::PairPosFormat21498   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1499   {
1500     if (!intersects (c->glyph_set)) return;
1501     if ((!valueFormat1.has_device ()) && (!valueFormat2.has_device ())) return;
1502 
1503     hb_set_t klass1_glyphs, klass2_glyphs;
1504     if (!(this+classDef1).collect_coverage (&klass1_glyphs)) return;
1505     if (!(this+classDef2).collect_coverage (&klass2_glyphs)) return;
1506 
1507     hb_set_t class1_set, class2_set;
1508     for (const unsigned cp : + c->glyph_set->iter () | hb_filter (this + coverage))
1509     {
1510       if (!klass1_glyphs.has (cp)) class1_set.add (0);
1511       else
1512       {
1513         unsigned klass1 = (this+classDef1).get (cp);
1514         class1_set.add (klass1);
1515       }
1516     }
1517 
1518     class2_set.add (0);
1519     for (const unsigned cp : + c->glyph_set->iter () | hb_filter (klass2_glyphs))
1520     {
1521       unsigned klass2 = (this+classDef2).get (cp);
1522       class2_set.add (klass2);
1523     }
1524 
1525     if (class1_set.is_empty ()
1526         || class2_set.is_empty ()
1527         || (class2_set.get_population() == 1 && class2_set.has(0)))
1528       return;
1529 
1530     unsigned len1 = valueFormat1.get_len ();
1531     unsigned len2 = valueFormat2.get_len ();
1532     const hb_array_t<const Value> values_array = values.as_array ((unsigned)class1Count * (unsigned) class2Count * (len1 + len2));
1533     for (const unsigned class1_idx : class1_set.iter ())
1534     {
1535       for (const unsigned class2_idx : class2_set.iter ())
1536       {
1537 	unsigned start_offset = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1538 	if (valueFormat1.has_device ())
1539 	  valueFormat1.collect_variation_indices (c, this, values_array.sub_array (start_offset, len1));
1540 
1541 	if (valueFormat2.has_device ())
1542 	  valueFormat2.collect_variation_indices (c, this, values_array.sub_array (start_offset+len1, len2));
1543       }
1544     }
1545   }
1546 
collect_glyphsOT::PairPosFormat21547   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1548   {
1549     if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1550     if (unlikely (!(this+classDef2).collect_coverage (c->input))) return;
1551   }
1552 
get_coverageOT::PairPosFormat21553   const Coverage &get_coverage () const { return this+coverage; }
1554 
applyOT::PairPosFormat21555   bool apply (hb_ot_apply_context_t *c) const
1556   {
1557     TRACE_APPLY (this);
1558     hb_buffer_t *buffer = c->buffer;
1559     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
1560     if (likely (index == NOT_COVERED)) return_trace (false);
1561 
1562     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1563     skippy_iter.reset (buffer->idx, 1);
1564     unsigned unsafe_to;
1565     if (!skippy_iter.next (&unsafe_to))
1566     {
1567       buffer->unsafe_to_concat (buffer->idx, unsafe_to);
1568       return_trace (false);
1569     }
1570 
1571     unsigned int len1 = valueFormat1.get_len ();
1572     unsigned int len2 = valueFormat2.get_len ();
1573     unsigned int record_len = len1 + len2;
1574 
1575     unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
1576     unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
1577     if (unlikely (klass1 >= class1Count || klass2 >= class2Count))
1578     {
1579       buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
1580       return_trace (false);
1581     }
1582 
1583     const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
1584 
1585     bool applied_first = false, applied_second = false;
1586 
1587 
1588     /* Isolate simple kerning and apply it half to each side.
1589      * Results in better cursor positinoing / underline drawing.
1590      *
1591      * Disabled, because causes issues... :-(
1592      * https://github.com/harfbuzz/harfbuzz/issues/3408
1593      * https://github.com/harfbuzz/harfbuzz/pull/3235#issuecomment-1029814978
1594      */
1595 #ifndef HB_SPLIT_KERN
1596     if (0)
1597 #endif
1598     {
1599       if (!len2)
1600       {
1601 	const hb_direction_t dir = buffer->props.direction;
1602 	const bool horizontal = HB_DIRECTION_IS_HORIZONTAL (dir);
1603 	const bool backward = HB_DIRECTION_IS_BACKWARD (dir);
1604 	unsigned mask = horizontal ? ValueFormat::xAdvance : ValueFormat::yAdvance;
1605 	if (backward)
1606 	  mask |= mask >> 2; /* Add eg. xPlacement in RTL. */
1607 	/* Add Devices. */
1608 	mask |= mask << 4;
1609 
1610 	if (valueFormat1 & ~mask)
1611 	  goto bail;
1612 
1613 	/* Is simple kern. Apply value on an empty position slot,
1614 	 * then split it between sides. */
1615 
1616 	hb_glyph_position_t pos{};
1617 	if (valueFormat1.apply_value (c, this, v, pos))
1618 	{
1619 	  hb_position_t *src  = &pos.x_advance;
1620 	  hb_position_t *dst1 = &buffer->cur_pos().x_advance;
1621 	  hb_position_t *dst2 = &buffer->pos[skippy_iter.idx].x_advance;
1622 	  unsigned i = horizontal ? 0 : 1;
1623 
1624 	  hb_position_t kern  = src[i];
1625 	  hb_position_t kern1 = kern >> 1;
1626 	  hb_position_t kern2 = kern - kern1;
1627 
1628 	  if (!backward)
1629 	  {
1630 	    dst1[i] += kern1;
1631 	    dst2[i] += kern2;
1632 	    dst2[i + 2] += kern2;
1633 	  }
1634 	  else
1635 	  {
1636 	    dst1[i] += kern1;
1637 	    dst1[i + 2] += src[i + 2] - kern2;
1638 	    dst2[i] += kern2;
1639 	  }
1640 
1641 	  applied_first = applied_second = kern != 0;
1642 	  goto success;
1643 	}
1644 	goto boring;
1645       }
1646     }
1647     bail:
1648 
1649 
1650     applied_first = valueFormat1.apply_value (c, this, v, buffer->cur_pos());
1651     applied_second = valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]);
1652 
1653     success:
1654     if (applied_first || applied_second)
1655       buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
1656     else
1657     boring:
1658       buffer->unsafe_to_concat (buffer->idx, skippy_iter.idx + 1);
1659 
1660 
1661     buffer->idx = skippy_iter.idx;
1662     if (len2)
1663       buffer->idx++;
1664 
1665     return_trace (true);
1666   }
1667 
subsetOT::PairPosFormat21668   bool subset (hb_subset_context_t *c) const
1669   {
1670     TRACE_SUBSET (this);
1671     auto *out = c->serializer->start_embed (*this);
1672     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1673     out->format = format;
1674 
1675     hb_map_t klass1_map;
1676     out->classDef1.serialize_subset (c, classDef1, this, &klass1_map, true, true, &(this + coverage));
1677     out->class1Count = klass1_map.get_population ();
1678 
1679     hb_map_t klass2_map;
1680     out->classDef2.serialize_subset (c, classDef2, this, &klass2_map, true, false);
1681     out->class2Count = klass2_map.get_population ();
1682 
1683     unsigned len1 = valueFormat1.get_len ();
1684     unsigned len2 = valueFormat2.get_len ();
1685 
1686     hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat1, valueFormat2);
1687     if (c->plan->flags & HB_SUBSET_FLAGS_NO_HINTING)
1688       newFormats = compute_effective_value_formats (klass1_map, klass2_map);
1689 
1690     out->valueFormat1 = newFormats.first;
1691     out->valueFormat2 = newFormats.second;
1692 
1693     for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
1694     {
1695       for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
1696       {
1697         unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1698         valueFormat1.copy_values (c->serializer, newFormats.first, this, &values[idx], c->plan->layout_variation_idx_map);
1699         valueFormat2.copy_values (c->serializer, newFormats.second, this, &values[idx + len1], c->plan->layout_variation_idx_map);
1700       }
1701     }
1702 
1703     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1704     const hb_map_t &glyph_map = *c->plan->glyph_map;
1705 
1706     auto it =
1707     + hb_iter (this+coverage)
1708     | hb_filter (glyphset)
1709     | hb_map_retains_sorting (glyph_map)
1710     ;
1711 
1712     out->coverage.serialize_serialize (c->serializer, it);
1713     return_trace (out->class1Count && out->class2Count && bool (it));
1714   }
1715 
1716 
compute_effective_value_formatsOT::PairPosFormat21717   hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_map_t& klass1_map,
1718                                                                  const hb_map_t& klass2_map) const
1719   {
1720     unsigned len1 = valueFormat1.get_len ();
1721     unsigned len2 = valueFormat2.get_len ();
1722 
1723     unsigned format1 = 0;
1724     unsigned format2 = 0;
1725 
1726     for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
1727     {
1728       for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
1729       {
1730         unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1731         format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
1732         format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
1733       }
1734     }
1735 
1736     return hb_pair (format1, format2);
1737   }
1738 
1739 
sanitizeOT::PairPosFormat21740   bool sanitize (hb_sanitize_context_t *c) const
1741   {
1742     TRACE_SANITIZE (this);
1743     if (!(c->check_struct (this)
1744        && coverage.sanitize (c, this)
1745        && classDef1.sanitize (c, this)
1746        && classDef2.sanitize (c, this))) return_trace (false);
1747 
1748     unsigned int len1 = valueFormat1.get_len ();
1749     unsigned int len2 = valueFormat2.get_len ();
1750     unsigned int stride = len1 + len2;
1751     unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
1752     unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
1753     return_trace (c->check_range ((const void *) values,
1754 				  count,
1755 				  record_size) &&
1756 		  valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
1757 		  valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
1758   }
1759 
1760   protected:
1761   HBUINT16	format;			/* Format identifier--format = 2 */
1762   Offset16To<Coverage>
1763 		coverage;		/* Offset to Coverage table--from
1764 					 * beginning of subtable */
1765   ValueFormat	valueFormat1;		/* ValueRecord definition--for the
1766 					 * first glyph of the pair--may be zero
1767 					 * (0) */
1768   ValueFormat	valueFormat2;		/* ValueRecord definition--for the
1769 					 * second glyph of the pair--may be
1770 					 * zero (0) */
1771   Offset16To<ClassDef>
1772 		classDef1;		/* Offset to ClassDef table--from
1773 					 * beginning of PairPos subtable--for
1774 					 * the first glyph of the pair */
1775   Offset16To<ClassDef>
1776 		classDef2;		/* Offset to ClassDef table--from
1777 					 * beginning of PairPos subtable--for
1778 					 * the second glyph of the pair */
1779   HBUINT16	class1Count;		/* Number of classes in ClassDef1
1780 					 * table--includes Class0 */
1781   HBUINT16	class2Count;		/* Number of classes in ClassDef2
1782 					 * table--includes Class0 */
1783   ValueRecord	values;			/* Matrix of value pairs:
1784 					 * class1-major, class2-minor,
1785 					 * Each entry has value1 and value2 */
1786   public:
1787   DEFINE_SIZE_ARRAY (16, values);
1788 };
1789 
1790 struct PairPos
1791 {
1792   template <typename context_t, typename ...Ts>
dispatchOT::PairPos1793   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1794   {
1795     TRACE_DISPATCH (this, u.format);
1796     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1797     switch (u.format) {
1798     case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
1799     case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
1800     default:return_trace (c->default_return_value ());
1801     }
1802   }
1803 
1804   protected:
1805   union {
1806   HBUINT16		format;		/* Format identifier */
1807   PairPosFormat1	format1;
1808   PairPosFormat2	format2;
1809   } u;
1810 };
1811 
1812 
1813 struct EntryExitRecord
1814 {
1815   friend struct CursivePosFormat1;
1816 
sanitizeOT::EntryExitRecord1817   bool sanitize (hb_sanitize_context_t *c, const void *base) const
1818   {
1819     TRACE_SANITIZE (this);
1820     return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
1821   }
1822 
collect_variation_indicesOT::EntryExitRecord1823   void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1824 				  const void *src_base) const
1825   {
1826     (src_base+entryAnchor).collect_variation_indices (c);
1827     (src_base+exitAnchor).collect_variation_indices (c);
1828   }
1829 
subsetOT::EntryExitRecord1830   EntryExitRecord* subset (hb_subset_context_t *c,
1831                            const void *src_base) const
1832   {
1833     TRACE_SERIALIZE (this);
1834     auto *out = c->serializer->embed (this);
1835     if (unlikely (!out)) return_trace (nullptr);
1836 
1837     out->entryAnchor.serialize_subset (c, entryAnchor, src_base);
1838     out->exitAnchor.serialize_subset (c, exitAnchor, src_base);
1839     return_trace (out);
1840   }
1841 
1842   protected:
1843   Offset16To<Anchor>
1844 		entryAnchor;		/* Offset to EntryAnchor table--from
1845 					 * beginning of CursivePos
1846 					 * subtable--may be NULL */
1847   Offset16To<Anchor>
1848 		exitAnchor;		/* Offset to ExitAnchor table--from
1849 					 * beginning of CursivePos
1850 					 * subtable--may be NULL */
1851   public:
1852   DEFINE_SIZE_STATIC (4);
1853 };
1854 
1855 static void
1856 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent);
1857 
1858 struct CursivePosFormat1
1859 {
intersectsOT::CursivePosFormat11860   bool intersects (const hb_set_t *glyphs) const
1861   { return (this+coverage).intersects (glyphs); }
1862 
closure_lookupsOT::CursivePosFormat11863   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1864 
collect_variation_indicesOT::CursivePosFormat11865   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1866   {
1867     + hb_zip (this+coverage, entryExitRecord)
1868     | hb_filter (c->glyph_set, hb_first)
1869     | hb_map (hb_second)
1870     | hb_apply ([&] (const EntryExitRecord& record) { record.collect_variation_indices (c, this); })
1871     ;
1872   }
1873 
collect_glyphsOT::CursivePosFormat11874   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1875   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
1876 
get_coverageOT::CursivePosFormat11877   const Coverage &get_coverage () const { return this+coverage; }
1878 
applyOT::CursivePosFormat11879   bool apply (hb_ot_apply_context_t *c) const
1880   {
1881     TRACE_APPLY (this);
1882     hb_buffer_t *buffer = c->buffer;
1883 
1884     const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage  (buffer->cur().codepoint)];
1885     if (!this_record.entryAnchor) return_trace (false);
1886 
1887     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1888     skippy_iter.reset (buffer->idx, 1);
1889     unsigned unsafe_from;
1890     if (!skippy_iter.prev (&unsafe_from))
1891     {
1892       buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
1893       return_trace (false);
1894     }
1895 
1896     const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage  (buffer->info[skippy_iter.idx].codepoint)];
1897     if (!prev_record.exitAnchor)
1898     {
1899       buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
1900       return_trace (false);
1901     }
1902 
1903     unsigned int i = skippy_iter.idx;
1904     unsigned int j = buffer->idx;
1905 
1906     buffer->unsafe_to_break (i, j);
1907     float entry_x, entry_y, exit_x, exit_y;
1908     (this+prev_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
1909     (this+this_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
1910 
1911     hb_glyph_position_t *pos = buffer->pos;
1912 
1913     hb_position_t d;
1914     /* Main-direction adjustment */
1915     switch (c->direction) {
1916       case HB_DIRECTION_LTR:
1917 	pos[i].x_advance  = roundf (exit_x) + pos[i].x_offset;
1918 
1919 	d = roundf (entry_x) + pos[j].x_offset;
1920 	pos[j].x_advance -= d;
1921 	pos[j].x_offset  -= d;
1922 	break;
1923       case HB_DIRECTION_RTL:
1924 	d = roundf (exit_x) + pos[i].x_offset;
1925 	pos[i].x_advance -= d;
1926 	pos[i].x_offset  -= d;
1927 
1928 	pos[j].x_advance  = roundf (entry_x) + pos[j].x_offset;
1929 	break;
1930       case HB_DIRECTION_TTB:
1931 	pos[i].y_advance  = roundf (exit_y) + pos[i].y_offset;
1932 
1933 	d = roundf (entry_y) + pos[j].y_offset;
1934 	pos[j].y_advance -= d;
1935 	pos[j].y_offset  -= d;
1936 	break;
1937       case HB_DIRECTION_BTT:
1938 	d = roundf (exit_y) + pos[i].y_offset;
1939 	pos[i].y_advance -= d;
1940 	pos[i].y_offset  -= d;
1941 
1942 	pos[j].y_advance  = roundf (entry_y);
1943 	break;
1944       case HB_DIRECTION_INVALID:
1945       default:
1946 	break;
1947     }
1948 
1949     /* Cross-direction adjustment */
1950 
1951     /* We attach child to parent (think graph theory and rooted trees whereas
1952      * the root stays on baseline and each node aligns itself against its
1953      * parent.
1954      *
1955      * Optimize things for the case of RightToLeft, as that's most common in
1956      * Arabic. */
1957     unsigned int child  = i;
1958     unsigned int parent = j;
1959     hb_position_t x_offset = entry_x - exit_x;
1960     hb_position_t y_offset = entry_y - exit_y;
1961     if  (!(c->lookup_props & LookupFlag::RightToLeft))
1962     {
1963       unsigned int k = child;
1964       child = parent;
1965       parent = k;
1966       x_offset = -x_offset;
1967       y_offset = -y_offset;
1968     }
1969 
1970     /* If child was already connected to someone else, walk through its old
1971      * chain and reverse the link direction, such that the whole tree of its
1972      * previous connection now attaches to new parent.  Watch out for case
1973      * where new parent is on the path from old chain...
1974      */
1975     reverse_cursive_minor_offset (pos, child, c->direction, parent);
1976 
1977     pos[child].attach_type() = ATTACH_TYPE_CURSIVE;
1978     pos[child].attach_chain() = (int) parent - (int) child;
1979     buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
1980     if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
1981       pos[child].y_offset = y_offset;
1982     else
1983       pos[child].x_offset = x_offset;
1984 
1985     /* If parent was attached to child, separate them.
1986      * https://github.com/harfbuzz/harfbuzz/issues/2469
1987      */
1988     if (unlikely (pos[parent].attach_chain() == -pos[child].attach_chain()))
1989       pos[parent].attach_chain() = 0;
1990 
1991     buffer->idx++;
1992     return_trace (true);
1993   }
1994 
1995   template <typename Iterator,
1996 	    hb_requires (hb_is_iterator (Iterator))>
serializeOT::CursivePosFormat11997   void serialize (hb_subset_context_t *c,
1998 		  Iterator it,
1999 		  const void *src_base)
2000   {
2001     if (unlikely (!c->serializer->extend_min ((*this)))) return;
2002     this->format = 1;
2003     this->entryExitRecord.len = it.len ();
2004 
2005     for (const EntryExitRecord& entry_record : + it
2006 					       | hb_map (hb_second))
2007       entry_record.subset (c, src_base);
2008 
2009     auto glyphs =
2010     + it
2011     | hb_map_retains_sorting (hb_first)
2012     ;
2013 
2014     coverage.serialize_serialize (c->serializer, glyphs);
2015   }
2016 
subsetOT::CursivePosFormat12017   bool subset (hb_subset_context_t *c) const
2018   {
2019     TRACE_SUBSET (this);
2020     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2021     const hb_map_t &glyph_map = *c->plan->glyph_map;
2022 
2023     auto *out = c->serializer->start_embed (*this);
2024     if (unlikely (!out)) return_trace (false);
2025 
2026     auto it =
2027     + hb_zip (this+coverage, entryExitRecord)
2028     | hb_filter (glyphset, hb_first)
2029     | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const EntryExitRecord&> p) -> hb_pair_t<hb_codepoint_t, const EntryExitRecord&>
2030 			      { return hb_pair (glyph_map[p.first], p.second);})
2031     ;
2032 
2033     bool ret = bool (it);
2034     out->serialize (c, it, this);
2035     return_trace (ret);
2036   }
2037 
sanitizeOT::CursivePosFormat12038   bool sanitize (hb_sanitize_context_t *c) const
2039   {
2040     TRACE_SANITIZE (this);
2041     return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
2042   }
2043 
2044   protected:
2045   HBUINT16	format;			/* Format identifier--format = 1 */
2046   Offset16To<Coverage>
2047 		coverage;		/* Offset to Coverage table--from
2048 					 * beginning of subtable */
2049   Array16Of<EntryExitRecord>
2050 		entryExitRecord;	/* Array of EntryExit records--in
2051 					 * Coverage Index order */
2052   public:
2053   DEFINE_SIZE_ARRAY (6, entryExitRecord);
2054 };
2055 
2056 struct CursivePos
2057 {
2058   template <typename context_t, typename ...Ts>
dispatchOT::CursivePos2059   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2060   {
2061     TRACE_DISPATCH (this, u.format);
2062     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2063     switch (u.format) {
2064     case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2065     default:return_trace (c->default_return_value ());
2066     }
2067   }
2068 
2069   protected:
2070   union {
2071   HBUINT16		format;		/* Format identifier */
2072   CursivePosFormat1	format1;
2073   } u;
2074 };
2075 
2076 
2077 typedef AnchorMatrix BaseArray;		/* base-major--
2078 					 * in order of BaseCoverage Index--,
2079 					 * mark-minor--
2080 					 * ordered by class--zero-based. */
2081 
Markclass_closure_and_remap_indexes(const Coverage & mark_coverage,const MarkArray & mark_array,const hb_set_t & glyphset,hb_map_t * klass_mapping)2082 static void Markclass_closure_and_remap_indexes (const Coverage  &mark_coverage,
2083 						 const MarkArray &mark_array,
2084 						 const hb_set_t  &glyphset,
2085 						 hb_map_t*        klass_mapping /* INOUT */)
2086 {
2087   hb_set_t orig_classes;
2088 
2089   + hb_zip (mark_coverage, mark_array)
2090   | hb_filter (glyphset, hb_first)
2091   | hb_map (hb_second)
2092   | hb_map (&MarkRecord::get_class)
2093   | hb_sink (orig_classes)
2094   ;
2095 
2096   unsigned idx = 0;
2097   for (auto klass : orig_classes.iter ())
2098   {
2099     if (klass_mapping->has (klass)) continue;
2100     klass_mapping->set (klass, idx);
2101     idx++;
2102   }
2103 }
2104 
2105 struct MarkBasePosFormat1
2106 {
intersectsOT::MarkBasePosFormat12107   bool intersects (const hb_set_t *glyphs) const
2108   {
2109     return (this+markCoverage).intersects (glyphs) &&
2110 	   (this+baseCoverage).intersects (glyphs);
2111   }
2112 
closure_lookupsOT::MarkBasePosFormat12113   void closure_lookups (hb_closure_lookups_context_t *c) const {}
2114 
collect_variation_indicesOT::MarkBasePosFormat12115   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2116   {
2117     + hb_zip (this+markCoverage, this+markArray)
2118     | hb_filter (c->glyph_set, hb_first)
2119     | hb_map (hb_second)
2120     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
2121     ;
2122 
2123     hb_map_t klass_mapping;
2124     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
2125 
2126     unsigned basecount = (this+baseArray).rows;
2127     auto base_iter =
2128     + hb_zip (this+baseCoverage, hb_range (basecount))
2129     | hb_filter (c->glyph_set, hb_first)
2130     | hb_map (hb_second)
2131     ;
2132 
2133     hb_sorted_vector_t<unsigned> base_indexes;
2134     for (const unsigned row : base_iter)
2135     {
2136       + hb_range ((unsigned) classCount)
2137       | hb_filter (klass_mapping)
2138       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2139       | hb_sink (base_indexes)
2140       ;
2141     }
2142     (this+baseArray).collect_variation_indices (c, base_indexes.iter ());
2143   }
2144 
collect_glyphsOT::MarkBasePosFormat12145   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2146   {
2147     if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
2148     if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
2149   }
2150 
get_coverageOT::MarkBasePosFormat12151   const Coverage &get_coverage () const { return this+markCoverage; }
2152 
applyOT::MarkBasePosFormat12153   bool apply (hb_ot_apply_context_t *c) const
2154   {
2155     TRACE_APPLY (this);
2156     hb_buffer_t *buffer = c->buffer;
2157     unsigned int mark_index = (this+markCoverage).get_coverage  (buffer->cur().codepoint);
2158     if (likely (mark_index == NOT_COVERED)) return_trace (false);
2159 
2160     /* Now we search backwards for a non-mark glyph */
2161     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2162     skippy_iter.reset (buffer->idx, 1);
2163     skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
2164     do {
2165       unsigned unsafe_from;
2166       if (!skippy_iter.prev (&unsafe_from))
2167       {
2168 	buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
2169 	return_trace (false);
2170       }
2171 
2172       /* We only want to attach to the first of a MultipleSubst sequence.
2173        * https://github.com/harfbuzz/harfbuzz/issues/740
2174        * Reject others...
2175        * ...but stop if we find a mark in the MultipleSubst sequence:
2176        * https://github.com/harfbuzz/harfbuzz/issues/1020 */
2177       if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
2178 	  0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) ||
2179 	  (skippy_iter.idx == 0 ||
2180 	   _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx - 1]) ||
2181 	   _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]) !=
2182 	   _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx - 1]) ||
2183 	   _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) !=
2184 	   _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx - 1]) + 1
2185 	   ))
2186 	break;
2187       skippy_iter.reject ();
2188     } while (true);
2189 
2190     /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
2191     //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2192 
2193     unsigned int base_index = (this+baseCoverage).get_coverage  (buffer->info[skippy_iter.idx].codepoint);
2194     if (base_index == NOT_COVERED)
2195     {
2196       buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
2197       return_trace (false);
2198     }
2199 
2200     return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
2201   }
2202 
subsetOT::MarkBasePosFormat12203   bool subset (hb_subset_context_t *c) const
2204   {
2205     TRACE_SUBSET (this);
2206     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2207     const hb_map_t &glyph_map = *c->plan->glyph_map;
2208 
2209     auto *out = c->serializer->start_embed (*this);
2210     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2211     out->format = format;
2212 
2213     hb_map_t klass_mapping;
2214     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
2215 
2216     if (!klass_mapping.get_population ()) return_trace (false);
2217     out->classCount = klass_mapping.get_population ();
2218 
2219     auto mark_iter =
2220     + hb_zip (this+markCoverage, this+markArray)
2221     | hb_filter (glyphset, hb_first)
2222     ;
2223 
2224     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2225     + mark_iter
2226     | hb_map (hb_first)
2227     | hb_map (glyph_map)
2228     | hb_sink (new_coverage)
2229     ;
2230 
2231     if (!out->markCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
2232       return_trace (false);
2233 
2234     out->markArray.serialize_subset (c, markArray, this,
2235                                      (this+markCoverage).iter (),
2236                                      &klass_mapping);
2237 
2238     unsigned basecount = (this+baseArray).rows;
2239     auto base_iter =
2240     + hb_zip (this+baseCoverage, hb_range (basecount))
2241     | hb_filter (glyphset, hb_first)
2242     ;
2243 
2244     new_coverage.reset ();
2245     + base_iter
2246     | hb_map (hb_first)
2247     | hb_map (glyph_map)
2248     | hb_sink (new_coverage)
2249     ;
2250 
2251     if (!out->baseCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
2252       return_trace (false);
2253 
2254     hb_sorted_vector_t<unsigned> base_indexes;
2255     for (const unsigned row : + base_iter
2256 			      | hb_map (hb_second))
2257     {
2258       + hb_range ((unsigned) classCount)
2259       | hb_filter (klass_mapping)
2260       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2261       | hb_sink (base_indexes)
2262       ;
2263     }
2264 
2265     out->baseArray.serialize_subset (c, baseArray, this,
2266                                      base_iter.len (),
2267                                      base_indexes.iter ());
2268 
2269     return_trace (true);
2270   }
2271 
sanitizeOT::MarkBasePosFormat12272   bool sanitize (hb_sanitize_context_t *c) const
2273   {
2274     TRACE_SANITIZE (this);
2275     return_trace (c->check_struct (this) &&
2276 		  markCoverage.sanitize (c, this) &&
2277 		  baseCoverage.sanitize (c, this) &&
2278 		  markArray.sanitize (c, this) &&
2279 		  baseArray.sanitize (c, this, (unsigned int) classCount));
2280   }
2281 
2282   protected:
2283   HBUINT16	format;			/* Format identifier--format = 1 */
2284   Offset16To<Coverage>
2285 		markCoverage;		/* Offset to MarkCoverage table--from
2286 					 * beginning of MarkBasePos subtable */
2287   Offset16To<Coverage>
2288 		baseCoverage;		/* Offset to BaseCoverage table--from
2289 					 * beginning of MarkBasePos subtable */
2290   HBUINT16	classCount;		/* Number of classes defined for marks */
2291   Offset16To<MarkArray>
2292 		markArray;		/* Offset to MarkArray table--from
2293 					 * beginning of MarkBasePos subtable */
2294   Offset16To<BaseArray>
2295 		baseArray;		/* Offset to BaseArray table--from
2296 					 * beginning of MarkBasePos subtable */
2297   public:
2298   DEFINE_SIZE_STATIC (12);
2299 };
2300 
2301 struct MarkBasePos
2302 {
2303   template <typename context_t, typename ...Ts>
dispatchOT::MarkBasePos2304   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2305   {
2306     TRACE_DISPATCH (this, u.format);
2307     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2308     switch (u.format) {
2309     case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2310     default:return_trace (c->default_return_value ());
2311     }
2312   }
2313 
2314   protected:
2315   union {
2316   HBUINT16		format;		/* Format identifier */
2317   MarkBasePosFormat1	format1;
2318   } u;
2319 };
2320 
2321 
2322 typedef AnchorMatrix LigatureAttach;	/* component-major--
2323 					 * in order of writing direction--,
2324 					 * mark-minor--
2325 					 * ordered by class--zero-based. */
2326 
2327 /* Array of LigatureAttach tables ordered by LigatureCoverage Index */
2328 struct LigatureArray : List16OfOffset16To<LigatureAttach>
2329 {
2330   template <typename Iterator,
2331 	    hb_requires (hb_is_iterator (Iterator))>
subsetOT::LigatureArray2332   bool subset (hb_subset_context_t *c,
2333                Iterator		    coverage,
2334 	       unsigned		    class_count,
2335 	       const hb_map_t	   *klass_mapping) const
2336   {
2337     TRACE_SUBSET (this);
2338     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2339 
2340     auto *out = c->serializer->start_embed (this);
2341     if (unlikely (!c->serializer->extend_min (out)))  return_trace (false);
2342 
2343     for (const auto _ : + hb_zip (coverage, *this)
2344 		  | hb_filter (glyphset, hb_first))
2345     {
2346       auto *matrix = out->serialize_append (c->serializer);
2347       if (unlikely (!matrix)) return_trace (false);
2348 
2349       const LigatureAttach& src = (this + _.second);
2350       auto indexes =
2351           + hb_range (src.rows * class_count)
2352           | hb_filter ([=] (unsigned index) { return klass_mapping->has (index % class_count); })
2353           ;
2354       matrix->serialize_subset (c,
2355 				_.second,
2356 				this,
2357                                 src.rows,
2358                                 indexes);
2359     }
2360     return_trace (this->len);
2361   }
2362 };
2363 
2364 struct MarkLigPosFormat1
2365 {
intersectsOT::MarkLigPosFormat12366   bool intersects (const hb_set_t *glyphs) const
2367   {
2368     return (this+markCoverage).intersects (glyphs) &&
2369 	   (this+ligatureCoverage).intersects (glyphs);
2370   }
2371 
closure_lookupsOT::MarkLigPosFormat12372   void closure_lookups (hb_closure_lookups_context_t *c) const {}
2373 
collect_variation_indicesOT::MarkLigPosFormat12374   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2375   {
2376     + hb_zip (this+markCoverage, this+markArray)
2377     | hb_filter (c->glyph_set, hb_first)
2378     | hb_map (hb_second)
2379     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
2380     ;
2381 
2382     hb_map_t klass_mapping;
2383     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
2384 
2385     unsigned ligcount = (this+ligatureArray).len;
2386     auto lig_iter =
2387     + hb_zip (this+ligatureCoverage, hb_range (ligcount))
2388     | hb_filter (c->glyph_set, hb_first)
2389     | hb_map (hb_second)
2390     ;
2391 
2392     const LigatureArray& lig_array = this+ligatureArray;
2393     for (const unsigned i : lig_iter)
2394     {
2395       hb_sorted_vector_t<unsigned> lig_indexes;
2396       unsigned row_count = lig_array[i].rows;
2397       for (unsigned row : + hb_range (row_count))
2398       {
2399 	+ hb_range ((unsigned) classCount)
2400 	| hb_filter (klass_mapping)
2401 	| hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2402 	| hb_sink (lig_indexes)
2403 	;
2404       }
2405 
2406       lig_array[i].collect_variation_indices (c, lig_indexes.iter ());
2407     }
2408   }
2409 
collect_glyphsOT::MarkLigPosFormat12410   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2411   {
2412     if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
2413     if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return;
2414   }
2415 
get_coverageOT::MarkLigPosFormat12416   const Coverage &get_coverage () const { return this+markCoverage; }
2417 
applyOT::MarkLigPosFormat12418   bool apply (hb_ot_apply_context_t *c) const
2419   {
2420     TRACE_APPLY (this);
2421     hb_buffer_t *buffer = c->buffer;
2422     unsigned int mark_index = (this+markCoverage).get_coverage  (buffer->cur().codepoint);
2423     if (likely (mark_index == NOT_COVERED)) return_trace (false);
2424 
2425     /* Now we search backwards for a non-mark glyph */
2426     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2427     skippy_iter.reset (buffer->idx, 1);
2428     skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
2429     unsigned unsafe_from;
2430     if (!skippy_iter.prev (&unsafe_from))
2431     {
2432       buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
2433       return_trace (false);
2434     }
2435 
2436     /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
2437     //if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2438 
2439     unsigned int j = skippy_iter.idx;
2440     unsigned int lig_index = (this+ligatureCoverage).get_coverage  (buffer->info[j].codepoint);
2441     if (lig_index == NOT_COVERED)
2442     {
2443       buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
2444       return_trace (false);
2445     }
2446 
2447     const LigatureArray& lig_array = this+ligatureArray;
2448     const LigatureAttach& lig_attach = lig_array[lig_index];
2449 
2450     /* Find component to attach to */
2451     unsigned int comp_count = lig_attach.rows;
2452     if (unlikely (!comp_count))
2453     {
2454       buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
2455       return_trace (false);
2456     }
2457 
2458     /* We must now check whether the ligature ID of the current mark glyph
2459      * is identical to the ligature ID of the found ligature.  If yes, we
2460      * can directly use the component index.  If not, we attach the mark
2461      * glyph to the last component of the ligature. */
2462     unsigned int comp_index;
2463     unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]);
2464     unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
2465     unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
2466     if (lig_id && lig_id == mark_id && mark_comp > 0)
2467       comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
2468     else
2469       comp_index = comp_count - 1;
2470 
2471     return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
2472   }
2473 
subsetOT::MarkLigPosFormat12474   bool subset (hb_subset_context_t *c) const
2475   {
2476     TRACE_SUBSET (this);
2477     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2478     const hb_map_t &glyph_map = *c->plan->glyph_map;
2479 
2480     auto *out = c->serializer->start_embed (*this);
2481     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2482     out->format = format;
2483 
2484     hb_map_t klass_mapping;
2485     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
2486 
2487     if (!klass_mapping.get_population ()) return_trace (false);
2488     out->classCount = klass_mapping.get_population ();
2489 
2490     auto mark_iter =
2491     + hb_zip (this+markCoverage, this+markArray)
2492     | hb_filter (glyphset, hb_first)
2493     ;
2494 
2495     auto new_mark_coverage =
2496     + mark_iter
2497     | hb_map_retains_sorting (hb_first)
2498     | hb_map_retains_sorting (glyph_map)
2499     ;
2500 
2501     if (!out->markCoverage.serialize_serialize (c->serializer, new_mark_coverage))
2502       return_trace (false);
2503 
2504     out->markArray.serialize_subset (c, markArray, this,
2505                                      (this+markCoverage).iter (),
2506                                      &klass_mapping);
2507 
2508     auto new_ligature_coverage =
2509     + hb_iter (this + ligatureCoverage)
2510     | hb_filter (glyphset)
2511     | hb_map_retains_sorting (glyph_map)
2512     ;
2513 
2514     if (!out->ligatureCoverage.serialize_serialize (c->serializer, new_ligature_coverage))
2515       return_trace (false);
2516 
2517     out->ligatureArray.serialize_subset (c, ligatureArray, this,
2518                                          hb_iter (this+ligatureCoverage), classCount, &klass_mapping);
2519 
2520     return_trace (true);
2521   }
2522 
sanitizeOT::MarkLigPosFormat12523   bool sanitize (hb_sanitize_context_t *c) const
2524   {
2525     TRACE_SANITIZE (this);
2526     return_trace (c->check_struct (this) &&
2527 		  markCoverage.sanitize (c, this) &&
2528 		  ligatureCoverage.sanitize (c, this) &&
2529 		  markArray.sanitize (c, this) &&
2530 		  ligatureArray.sanitize (c, this, (unsigned int) classCount));
2531   }
2532 
2533   protected:
2534   HBUINT16	format;			/* Format identifier--format = 1 */
2535   Offset16To<Coverage>
2536 		markCoverage;		/* Offset to Mark Coverage table--from
2537 					 * beginning of MarkLigPos subtable */
2538   Offset16To<Coverage>
2539 		ligatureCoverage;	/* Offset to Ligature Coverage
2540 					 * table--from beginning of MarkLigPos
2541 					 * subtable */
2542   HBUINT16	classCount;		/* Number of defined mark classes */
2543   Offset16To<MarkArray>
2544 		markArray;		/* Offset to MarkArray table--from
2545 					 * beginning of MarkLigPos subtable */
2546   Offset16To<LigatureArray>
2547 		ligatureArray;		/* Offset to LigatureArray table--from
2548 					 * beginning of MarkLigPos subtable */
2549   public:
2550   DEFINE_SIZE_STATIC (12);
2551 };
2552 
2553 
2554 struct MarkLigPos
2555 {
2556   template <typename context_t, typename ...Ts>
dispatchOT::MarkLigPos2557   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2558   {
2559     TRACE_DISPATCH (this, u.format);
2560     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2561     switch (u.format) {
2562     case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2563     default:return_trace (c->default_return_value ());
2564     }
2565   }
2566 
2567   protected:
2568   union {
2569   HBUINT16		format;		/* Format identifier */
2570   MarkLigPosFormat1	format1;
2571   } u;
2572 };
2573 
2574 
2575 typedef AnchorMatrix Mark2Array;	/* mark2-major--
2576 					 * in order of Mark2Coverage Index--,
2577 					 * mark1-minor--
2578 					 * ordered by class--zero-based. */
2579 
2580 struct MarkMarkPosFormat1
2581 {
intersectsOT::MarkMarkPosFormat12582   bool intersects (const hb_set_t *glyphs) const
2583   {
2584     return (this+mark1Coverage).intersects (glyphs) &&
2585 	   (this+mark2Coverage).intersects (glyphs);
2586   }
2587 
closure_lookupsOT::MarkMarkPosFormat12588   void closure_lookups (hb_closure_lookups_context_t *c) const {}
2589 
collect_variation_indicesOT::MarkMarkPosFormat12590   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2591   {
2592     + hb_zip (this+mark1Coverage, this+mark1Array)
2593     | hb_filter (c->glyph_set, hb_first)
2594     | hb_map (hb_second)
2595     | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); })
2596     ;
2597 
2598     hb_map_t klass_mapping;
2599     Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping);
2600 
2601     unsigned mark2_count = (this+mark2Array).rows;
2602     auto mark2_iter =
2603     + hb_zip (this+mark2Coverage, hb_range (mark2_count))
2604     | hb_filter (c->glyph_set, hb_first)
2605     | hb_map (hb_second)
2606     ;
2607 
2608     hb_sorted_vector_t<unsigned> mark2_indexes;
2609     for (const unsigned row : mark2_iter)
2610     {
2611       + hb_range ((unsigned) classCount)
2612       | hb_filter (klass_mapping)
2613       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2614       | hb_sink (mark2_indexes)
2615       ;
2616     }
2617     (this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ());
2618   }
2619 
collect_glyphsOT::MarkMarkPosFormat12620   void collect_glyphs (hb_collect_glyphs_context_t *c) const
2621   {
2622     if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return;
2623     if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return;
2624   }
2625 
get_coverageOT::MarkMarkPosFormat12626   const Coverage &get_coverage () const { return this+mark1Coverage; }
2627 
applyOT::MarkMarkPosFormat12628   bool apply (hb_ot_apply_context_t *c) const
2629   {
2630     TRACE_APPLY (this);
2631     hb_buffer_t *buffer = c->buffer;
2632     unsigned int mark1_index = (this+mark1Coverage).get_coverage  (buffer->cur().codepoint);
2633     if (likely (mark1_index == NOT_COVERED)) return_trace (false);
2634 
2635     /* now we search backwards for a suitable mark glyph until a non-mark glyph */
2636     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2637     skippy_iter.reset (buffer->idx, 1);
2638     skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags);
2639     unsigned unsafe_from;
2640     if (!skippy_iter.prev (&unsafe_from))
2641     {
2642       buffer->unsafe_to_concat_from_outbuffer (unsafe_from, buffer->idx + 1);
2643       return_trace (false);
2644     }
2645 
2646     if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]))
2647     {
2648       buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
2649       return_trace (false);
2650     }
2651 
2652     unsigned int j = skippy_iter.idx;
2653 
2654     unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur());
2655     unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]);
2656     unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur());
2657     unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]);
2658 
2659     if (likely (id1 == id2))
2660     {
2661       if (id1 == 0) /* Marks belonging to the same base. */
2662 	goto good;
2663       else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
2664 	goto good;
2665     }
2666     else
2667     {
2668       /* If ligature ids don't match, it may be the case that one of the marks
2669        * itself is a ligature.  In which case match. */
2670       if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
2671 	goto good;
2672     }
2673 
2674     /* Didn't match. */
2675     buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
2676     return_trace (false);
2677 
2678     good:
2679     unsigned int mark2_index = (this+mark2Coverage).get_coverage  (buffer->info[j].codepoint);
2680     if (mark2_index == NOT_COVERED)
2681     {
2682       buffer->unsafe_to_concat_from_outbuffer (skippy_iter.idx, buffer->idx + 1);
2683       return_trace (false);
2684     }
2685 
2686     return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
2687   }
2688 
subsetOT::MarkMarkPosFormat12689   bool subset (hb_subset_context_t *c) const
2690   {
2691     TRACE_SUBSET (this);
2692     const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2693     const hb_map_t &glyph_map = *c->plan->glyph_map;
2694 
2695     auto *out = c->serializer->start_embed (*this);
2696     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2697     out->format = format;
2698 
2699     hb_map_t klass_mapping;
2700     Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping);
2701 
2702     if (!klass_mapping.get_population ()) return_trace (false);
2703     out->classCount = klass_mapping.get_population ();
2704 
2705     auto mark1_iter =
2706     + hb_zip (this+mark1Coverage, this+mark1Array)
2707     | hb_filter (glyphset, hb_first)
2708     ;
2709 
2710     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2711     + mark1_iter
2712     | hb_map (hb_first)
2713     | hb_map (glyph_map)
2714     | hb_sink (new_coverage)
2715     ;
2716 
2717     if (!out->mark1Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
2718       return_trace (false);
2719 
2720     out->mark1Array.serialize_subset (c, mark1Array, this,
2721                                       (this+mark1Coverage).iter (),
2722                                       &klass_mapping);
2723 
2724     unsigned mark2count = (this+mark2Array).rows;
2725     auto mark2_iter =
2726     + hb_zip (this+mark2Coverage, hb_range (mark2count))
2727     | hb_filter (glyphset, hb_first)
2728     ;
2729 
2730     new_coverage.reset ();
2731     + mark2_iter
2732     | hb_map (hb_first)
2733     | hb_map (glyph_map)
2734     | hb_sink (new_coverage)
2735     ;
2736 
2737     if (!out->mark2Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
2738       return_trace (false);
2739 
2740     hb_sorted_vector_t<unsigned> mark2_indexes;
2741     for (const unsigned row : + mark2_iter
2742 			      | hb_map (hb_second))
2743     {
2744       + hb_range ((unsigned) classCount)
2745       | hb_filter (klass_mapping)
2746       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2747       | hb_sink (mark2_indexes)
2748       ;
2749     }
2750 
2751     out->mark2Array.serialize_subset (c, mark2Array, this, mark2_iter.len (), mark2_indexes.iter ());
2752 
2753     return_trace (true);
2754   }
2755 
sanitizeOT::MarkMarkPosFormat12756   bool sanitize (hb_sanitize_context_t *c) const
2757   {
2758     TRACE_SANITIZE (this);
2759     return_trace (c->check_struct (this) &&
2760 		  mark1Coverage.sanitize (c, this) &&
2761 		  mark2Coverage.sanitize (c, this) &&
2762 		  mark1Array.sanitize (c, this) &&
2763 		  mark2Array.sanitize (c, this, (unsigned int) classCount));
2764   }
2765 
2766   protected:
2767   HBUINT16	format;			/* Format identifier--format = 1 */
2768   Offset16To<Coverage>
2769 		mark1Coverage;		/* Offset to Combining Mark1 Coverage
2770 					 * table--from beginning of MarkMarkPos
2771 					 * subtable */
2772   Offset16To<Coverage>
2773 		mark2Coverage;		/* Offset to Combining Mark2 Coverage
2774 					 * table--from beginning of MarkMarkPos
2775 					 * subtable */
2776   HBUINT16	classCount;		/* Number of defined mark classes */
2777   Offset16To<MarkArray>
2778 		mark1Array;		/* Offset to Mark1Array table--from
2779 					 * beginning of MarkMarkPos subtable */
2780   Offset16To<Mark2Array>
2781 		mark2Array;		/* Offset to Mark2Array table--from
2782 					 * beginning of MarkMarkPos subtable */
2783   public:
2784   DEFINE_SIZE_STATIC (12);
2785 };
2786 
2787 struct MarkMarkPos
2788 {
2789   template <typename context_t, typename ...Ts>
dispatchOT::MarkMarkPos2790   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2791   {
2792     TRACE_DISPATCH (this, u.format);
2793     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2794     switch (u.format) {
2795     case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2796     default:return_trace (c->default_return_value ());
2797     }
2798   }
2799 
2800   protected:
2801   union {
2802   HBUINT16		format;		/* Format identifier */
2803   MarkMarkPosFormat1	format1;
2804   } u;
2805 };
2806 
2807 
2808 struct ContextPos : Context {};
2809 
2810 struct ChainContextPos : ChainContext {};
2811 
2812 struct ExtensionPos : Extension<ExtensionPos>
2813 {
2814   typedef struct PosLookupSubTable SubTable;
2815 };
2816 
2817 
2818 
2819 /*
2820  * PosLookup
2821  */
2822 
2823 
2824 struct PosLookupSubTable
2825 {
2826   friend struct Lookup;
2827   friend struct PosLookup;
2828 
2829   enum Type {
2830     Single		= 1,
2831     Pair		= 2,
2832     Cursive		= 3,
2833     MarkBase		= 4,
2834     MarkLig		= 5,
2835     MarkMark		= 6,
2836     Context		= 7,
2837     ChainContext	= 8,
2838     Extension		= 9
2839   };
2840 
2841   template <typename context_t, typename ...Ts>
dispatchOT::PosLookupSubTable2842   typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
2843   {
2844     TRACE_DISPATCH (this, lookup_type);
2845     switch (lookup_type) {
2846     case Single:		return_trace (u.single.dispatch (c, std::forward<Ts> (ds)...));
2847     case Pair:			return_trace (u.pair.dispatch (c, std::forward<Ts> (ds)...));
2848     case Cursive:		return_trace (u.cursive.dispatch (c, std::forward<Ts> (ds)...));
2849     case MarkBase:		return_trace (u.markBase.dispatch (c, std::forward<Ts> (ds)...));
2850     case MarkLig:		return_trace (u.markLig.dispatch (c, std::forward<Ts> (ds)...));
2851     case MarkMark:		return_trace (u.markMark.dispatch (c, std::forward<Ts> (ds)...));
2852     case Context:		return_trace (u.context.dispatch (c, std::forward<Ts> (ds)...));
2853     case ChainContext:		return_trace (u.chainContext.dispatch (c, std::forward<Ts> (ds)...));
2854     case Extension:		return_trace (u.extension.dispatch (c, std::forward<Ts> (ds)...));
2855     default:			return_trace (c->default_return_value ());
2856     }
2857   }
2858 
intersectsOT::PosLookupSubTable2859   bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
2860   {
2861     hb_intersects_context_t c (glyphs);
2862     return dispatch (&c, lookup_type);
2863   }
2864 
2865   protected:
2866   union {
2867   SinglePos		single;
2868   PairPos		pair;
2869   CursivePos		cursive;
2870   MarkBasePos		markBase;
2871   MarkLigPos		markLig;
2872   MarkMarkPos		markMark;
2873   ContextPos		context;
2874   ChainContextPos	chainContext;
2875   ExtensionPos		extension;
2876   } u;
2877   public:
2878   DEFINE_SIZE_MIN (0);
2879 };
2880 
2881 
2882 struct PosLookup : Lookup
2883 {
2884   typedef struct PosLookupSubTable SubTable;
2885 
get_subtableOT::PosLookup2886   const SubTable& get_subtable (unsigned int i) const
2887   { return Lookup::get_subtable<SubTable> (i); }
2888 
is_reverseOT::PosLookup2889   bool is_reverse () const
2890   {
2891     return false;
2892   }
2893 
applyOT::PosLookup2894   bool apply (hb_ot_apply_context_t *c) const
2895   {
2896     TRACE_APPLY (this);
2897     return_trace (dispatch (c));
2898   }
2899 
intersectsOT::PosLookup2900   bool intersects (const hb_set_t *glyphs) const
2901   {
2902     hb_intersects_context_t c (glyphs);
2903     return dispatch (&c);
2904   }
2905 
collect_glyphsOT::PosLookup2906   hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
2907   { return dispatch (c); }
2908 
closure_lookupsOT::PosLookup2909   hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
2910   {
2911     if (c->is_lookup_visited (this_index))
2912       return hb_closure_lookups_context_t::default_return_value ();
2913 
2914     c->set_lookup_visited (this_index);
2915     if (!intersects (c->glyphs))
2916     {
2917       c->set_lookup_inactive (this_index);
2918       return hb_closure_lookups_context_t::default_return_value ();
2919     }
2920     c->set_recurse_func (dispatch_closure_lookups_recurse_func);
2921 
2922     hb_closure_lookups_context_t::return_t ret = dispatch (c);
2923     return ret;
2924   }
2925 
2926   template <typename set_t>
collect_coverageOT::PosLookup2927   void collect_coverage (set_t *glyphs) const
2928   {
2929     hb_collect_coverage_context_t<set_t> c (glyphs);
2930     dispatch (&c);
2931   }
2932 
2933   static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
2934 
2935   template <typename context_t>
2936   static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
2937 
2938   HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index);
2939 
2940   template <typename context_t, typename ...Ts>
dispatchOT::PosLookup2941   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2942   { return Lookup::dispatch<SubTable> (c, std::forward<Ts> (ds)...); }
2943 
subsetOT::PosLookup2944   bool subset (hb_subset_context_t *c) const
2945   { return Lookup::subset<SubTable> (c); }
2946 
sanitizeOT::PosLookup2947   bool sanitize (hb_sanitize_context_t *c) const
2948   { return Lookup::sanitize<SubTable> (c); }
2949 };
2950 
2951 /*
2952  * GPOS -- Glyph Positioning
2953  * https://docs.microsoft.com/en-us/typography/opentype/spec/gpos
2954  */
2955 
2956 struct GPOS : GSUBGPOS
2957 {
2958   static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
2959 
get_lookupOT::GPOS2960   const PosLookup& get_lookup (unsigned int i) const
2961   { return static_cast<const PosLookup &> (GSUBGPOS::get_lookup (i)); }
2962 
2963   static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
2964   static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
2965   static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
2966 
subsetOT::GPOS2967   bool subset (hb_subset_context_t *c) const
2968   {
2969     hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_langsys, c->plan->gpos_features);
2970     return GSUBGPOS::subset<PosLookup> (&l);
2971   }
2972 
sanitizeOT::GPOS2973   bool sanitize (hb_sanitize_context_t *c) const
2974   { return GSUBGPOS::sanitize<PosLookup> (c); }
2975 
2976   HB_INTERNAL bool is_blocklisted (hb_blob_t *blob,
2977 				   hb_face_t *face) const;
2978 
collect_variation_indicesOT::GPOS2979   void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2980   {
2981     for (unsigned i = 0; i < GSUBGPOS::get_lookup_count (); i++)
2982     {
2983       if (!c->gpos_lookups->has (i)) continue;
2984       const PosLookup &l = get_lookup (i);
2985       l.dispatch (c);
2986     }
2987   }
2988 
closure_lookupsOT::GPOS2989   void closure_lookups (hb_face_t      *face,
2990 			const hb_set_t *glyphs,
2991 			hb_set_t       *lookup_indexes /* IN/OUT */) const
2992   { GSUBGPOS::closure_lookups<PosLookup> (face, glyphs, lookup_indexes); }
2993 
2994   typedef GSUBGPOS::accelerator_t<GPOS> accelerator_t;
2995 };
2996 
2997 
2998 static void
reverse_cursive_minor_offset(hb_glyph_position_t * pos,unsigned int i,hb_direction_t direction,unsigned int new_parent)2999 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent)
3000 {
3001   int chain = pos[i].attach_chain(), type = pos[i].attach_type();
3002   if (likely (!chain || 0 == (type & ATTACH_TYPE_CURSIVE)))
3003     return;
3004 
3005   pos[i].attach_chain() = 0;
3006 
3007   unsigned int j = (int) i + chain;
3008 
3009   /* Stop if we see new parent in the chain. */
3010   if (j == new_parent)
3011     return;
3012 
3013   reverse_cursive_minor_offset (pos, j, direction, new_parent);
3014 
3015   if (HB_DIRECTION_IS_HORIZONTAL (direction))
3016     pos[j].y_offset = -pos[i].y_offset;
3017   else
3018     pos[j].x_offset = -pos[i].x_offset;
3019 
3020   pos[j].attach_chain() = -chain;
3021   pos[j].attach_type() = type;
3022 }
3023 static void
propagate_attachment_offsets(hb_glyph_position_t * pos,unsigned int len,unsigned int i,hb_direction_t direction)3024 propagate_attachment_offsets (hb_glyph_position_t *pos,
3025 			      unsigned int len,
3026 			      unsigned int i,
3027 			      hb_direction_t direction)
3028 {
3029   /* Adjusts offsets of attached glyphs (both cursive and mark) to accumulate
3030    * offset of glyph they are attached to. */
3031   int chain = pos[i].attach_chain(), type = pos[i].attach_type();
3032   if (likely (!chain))
3033     return;
3034 
3035   pos[i].attach_chain() = 0;
3036 
3037   unsigned int j = (int) i + chain;
3038 
3039   if (unlikely (j >= len))
3040     return;
3041 
3042   propagate_attachment_offsets (pos, len, j, direction);
3043 
3044   assert (!!(type & ATTACH_TYPE_MARK) ^ !!(type & ATTACH_TYPE_CURSIVE));
3045 
3046   if (type & ATTACH_TYPE_CURSIVE)
3047   {
3048     if (HB_DIRECTION_IS_HORIZONTAL (direction))
3049       pos[i].y_offset += pos[j].y_offset;
3050     else
3051       pos[i].x_offset += pos[j].x_offset;
3052   }
3053   else /*if (type & ATTACH_TYPE_MARK)*/
3054   {
3055     pos[i].x_offset += pos[j].x_offset;
3056     pos[i].y_offset += pos[j].y_offset;
3057 
3058     assert (j < i);
3059     if (HB_DIRECTION_IS_FORWARD (direction))
3060       for (unsigned int k = j; k < i; k++) {
3061 	pos[i].x_offset -= pos[k].x_advance;
3062 	pos[i].y_offset -= pos[k].y_advance;
3063       }
3064     else
3065       for (unsigned int k = j + 1; k < i + 1; k++) {
3066 	pos[i].x_offset += pos[k].x_advance;
3067 	pos[i].y_offset += pos[k].y_advance;
3068       }
3069   }
3070 }
3071 
3072 void
position_start(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)3073 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
3074 {
3075   unsigned int count = buffer->len;
3076   for (unsigned int i = 0; i < count; i++)
3077     buffer->pos[i].attach_chain() = buffer->pos[i].attach_type() = 0;
3078 }
3079 
3080 void
position_finish_advances(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer HB_UNUSED)3081 GPOS::position_finish_advances (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
3082 {
3083   //_hb_buffer_assert_gsubgpos_vars (buffer);
3084 }
3085 
3086 void
position_finish_offsets(hb_font_t * font,hb_buffer_t * buffer)3087 GPOS::position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer)
3088 {
3089   _hb_buffer_assert_gsubgpos_vars (buffer);
3090 
3091   unsigned int len;
3092   hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
3093   hb_direction_t direction = buffer->props.direction;
3094 
3095   /* Handle attachments */
3096   if (buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT)
3097     for (unsigned i = 0; i < len; i++)
3098       propagate_attachment_offsets (pos, len, i, direction);
3099 
3100   if (unlikely (font->slant))
3101   {
3102     for (unsigned i = 0; i < len; i++)
3103       if (unlikely (pos[i].y_offset))
3104         pos[i].x_offset += _hb_roundf (font->slant_xy * pos[i].y_offset);
3105   }
3106 }
3107 
3108 
3109 struct GPOS_accelerator_t : GPOS::accelerator_t {
GPOS_accelerator_tOT::GPOS_accelerator_t3110   GPOS_accelerator_t (hb_face_t *face) : GPOS::accelerator_t (face) {}
3111 };
3112 
3113 
3114 /* Out-of-class implementation for methods recursing */
3115 
3116 #ifndef HB_NO_OT_LAYOUT
3117 template <typename context_t>
dispatch_recurse_func(context_t * c,unsigned int lookup_index)3118 /*static*/ typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
3119 {
3120   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
3121   return l.dispatch (c);
3122 }
3123 
dispatch_closure_lookups_recurse_func(hb_closure_lookups_context_t * c,unsigned this_index)3124 /*static*/ inline hb_closure_lookups_context_t::return_t PosLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
3125 {
3126   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (this_index);
3127   return l.closure_lookups (c, this_index);
3128 }
3129 
apply_recurse_func(hb_ot_apply_context_t * c,unsigned int lookup_index)3130 /*static*/ bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
3131 {
3132   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
3133   unsigned int saved_lookup_props = c->lookup_props;
3134   unsigned int saved_lookup_index = c->lookup_index;
3135   c->set_lookup_index (lookup_index);
3136   c->set_lookup_props (l.get_props ());
3137   bool ret = l.dispatch (c);
3138   c->set_lookup_index (saved_lookup_index);
3139   c->set_lookup_props (saved_lookup_props);
3140   return ret;
3141 }
3142 #endif
3143 
3144 
3145 } /* namespace OT */
3146 
3147 
3148 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */
3149