1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2010,2012,2013  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
31 
32 #include "hb-ot-layout-gsubgpos.hh"
33 
34 
35 namespace OT {
36 
37 struct MarkArray;
38 static void Markclass_closure_and_remap_indexes (const Coverage  &mark_coverage,
39 						 const MarkArray &mark_array,
40 						 const hb_set_t  &glyphset,
41 						 hb_map_t*        klass_mapping /* INOUT */);
42 
43 /* buffer **position** var allocations */
44 #define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
45 #define attach_type() var.u8[2] /* attachment type */
46 /* Note! if attach_chain() is zero, the value of attach_type() is irrelevant. */
47 
48 enum attach_type_t {
49   ATTACH_TYPE_NONE	= 0X00,
50 
51   /* Each attachment should be either a mark or a cursive; can't be both. */
52   ATTACH_TYPE_MARK	= 0X01,
53   ATTACH_TYPE_CURSIVE	= 0X02,
54 };
55 
56 
57 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
58 
59 typedef HBUINT16 Value;
60 
61 typedef UnsizedArrayOf<Value> ValueRecord;
62 
63 struct ValueFormat : HBUINT16
64 {
65   enum Flags {
66     xPlacement	= 0x0001u,	/* Includes horizontal adjustment for placement */
67     yPlacement	= 0x0002u,	/* Includes vertical adjustment for placement */
68     xAdvance	= 0x0004u,	/* Includes horizontal adjustment for advance */
69     yAdvance	= 0x0008u,	/* Includes vertical adjustment for advance */
70     xPlaDevice	= 0x0010u,	/* Includes horizontal Device table for placement */
71     yPlaDevice	= 0x0020u,	/* Includes vertical Device table for placement */
72     xAdvDevice	= 0x0040u,	/* Includes horizontal Device table for advance */
73     yAdvDevice	= 0x0080u,	/* Includes vertical Device table for advance */
74     ignored	= 0x0F00u,	/* Was used in TrueType Open for MM fonts */
75     reserved	= 0xF000u,	/* For future use */
76 
77     devices	= 0x00F0u	/* Mask for having any Device table */
78   };
79 
80 /* All fields are options.  Only those available advance the value pointer. */
81 #if 0
82   HBINT16		xPlacement;		/* Horizontal adjustment for
83 					 * placement--in design units */
84   HBINT16		yPlacement;		/* Vertical adjustment for
85 					 * placement--in design units */
86   HBINT16		xAdvance;		/* Horizontal adjustment for
87 					 * advance--in design units (only used
88 					 * for horizontal writing) */
89   HBINT16		yAdvance;		/* Vertical adjustment for advance--in
90 					 * design units (only used for vertical
91 					 * writing) */
92   OffsetTo<Device>	xPlaDevice;	/* Offset to Device table for
93 					 * horizontal placement--measured from
94 					 * beginning of PosTable (may be NULL) */
95   OffsetTo<Device>	yPlaDevice;	/* Offset to Device table for vertical
96 					 * placement--measured from beginning
97 					 * of PosTable (may be NULL) */
98   OffsetTo<Device>	xAdvDevice;	/* Offset to Device table for
99 					 * horizontal advance--measured from
100 					 * beginning of PosTable (may be NULL) */
101   OffsetTo<Device>	yAdvDevice;	/* Offset to Device table for vertical
102 					 * advance--measured from beginning of
103 					 * PosTable (may be NULL) */
104 #endif
105 
get_lenOT::ValueFormat106   unsigned int get_len () const  { return hb_popcount ((unsigned int) *this); }
get_sizeOT::ValueFormat107   unsigned int get_size () const { return get_len () * Value::static_size; }
108 
apply_valueOT::ValueFormat109   bool apply_value (hb_ot_apply_context_t *c,
110 		    const void            *base,
111 		    const Value           *values,
112 		    hb_glyph_position_t   &glyph_pos) const
113   {
114     bool ret = false;
115     unsigned int format = *this;
116     if (!format) return ret;
117 
118     hb_font_t *font = c->font;
119     bool horizontal = HB_DIRECTION_IS_HORIZONTAL (c->direction);
120 
121     if (format & xPlacement) glyph_pos.x_offset  += font->em_scale_x (get_short (values++, &ret));
122     if (format & yPlacement) glyph_pos.y_offset  += font->em_scale_y (get_short (values++, &ret));
123     if (format & xAdvance) {
124       if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
125       values++;
126     }
127     /* y_advance values grow downward but font-space grows upward, hence negation */
128     if (format & yAdvance) {
129       if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
130       values++;
131     }
132 
133     if (!has_device ()) return ret;
134 
135     bool use_x_device = font->x_ppem || font->num_coords;
136     bool use_y_device = font->y_ppem || font->num_coords;
137 
138     if (!use_x_device && !use_y_device) return ret;
139 
140     const VariationStore &store = c->var_store;
141 
142     /* pixel -> fractional pixel */
143     if (format & xPlaDevice) {
144       if (use_x_device) glyph_pos.x_offset  += (base + get_device (values, &ret)).get_x_delta (font, store);
145       values++;
146     }
147     if (format & yPlaDevice) {
148       if (use_y_device) glyph_pos.y_offset  += (base + get_device (values, &ret)).get_y_delta (font, store);
149       values++;
150     }
151     if (format & xAdvDevice) {
152       if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store);
153       values++;
154     }
155     if (format & yAdvDevice) {
156       /* y_advance values grow downward but font-space grows upward, hence negation */
157       if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store);
158       values++;
159     }
160     return ret;
161   }
162 
serialize_copyOT::ValueFormat163   void serialize_copy (hb_serialize_context_t *c, const void *base, const Value *values) const
164   {
165     unsigned int format = *this;
166     if (!format) return;
167 
168     if (format & xPlacement) c->copy (*values++);
169     if (format & yPlacement) c->copy (*values++);
170     if (format & xAdvance)   c->copy (*values++);
171     if (format & yAdvance)   c->copy (*values++);
172 
173     if (format & xPlaDevice) copy_device (c, base, values++);
174     if (format & yPlaDevice) copy_device (c, base, values++);
175     if (format & xAdvDevice) copy_device (c, base, values++);
176     if (format & yAdvDevice) copy_device (c, base, values++);
177   }
178 
179   private:
sanitize_value_devicesOT::ValueFormat180   bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
181   {
182     unsigned int format = *this;
183 
184     if (format & xPlacement) values++;
185     if (format & yPlacement) values++;
186     if (format & xAdvance)   values++;
187     if (format & yAdvance)   values++;
188 
189     if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
190     if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
191     if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
192     if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
193 
194     return true;
195   }
196 
copy_deviceOT::ValueFormat197   bool copy_device (hb_serialize_context_t *c, const void *base, const Value *src_value) const
198   {
199     Value	*dst_value = c->copy (*src_value);
200 
201     if (!dst_value) return false;
202     if (*dst_value == 0) return true;
203 
204     *dst_value = 0;
205     c->push ();
206     if ((base + get_device (src_value)).copy (c))
207     {
208       c->add_link (*dst_value, c->pop_pack ());
209       return true;
210     }
211     else
212     {
213       c->pop_discard ();
214       return false;
215     }
216   }
217 
get_deviceOT::ValueFormat218   static inline OffsetTo<Device>& get_device (Value* value)
219   {
220     return *static_cast<OffsetTo<Device> *> (value);
221   }
get_deviceOT::ValueFormat222   static inline const OffsetTo<Device>& get_device (const Value* value, bool *worked=nullptr)
223   {
224     if (worked) *worked |= bool (*value);
225     return *static_cast<const OffsetTo<Device> *> (value);
226   }
227 
get_shortOT::ValueFormat228   static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
229   {
230     if (worked) *worked |= bool (*value);
231     return *reinterpret_cast<const HBINT16 *> (value);
232   }
233 
234   public:
235 
has_deviceOT::ValueFormat236   bool has_device () const
237   {
238     unsigned int format = *this;
239     return (format & devices) != 0;
240   }
241 
sanitize_valueOT::ValueFormat242   bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
243   {
244     TRACE_SANITIZE (this);
245     return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
246   }
247 
sanitize_valuesOT::ValueFormat248   bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
249   {
250     TRACE_SANITIZE (this);
251     unsigned int len = get_len ();
252 
253     if (!c->check_range (values, count, get_size ())) return_trace (false);
254 
255     if (!has_device ()) return_trace (true);
256 
257     for (unsigned int i = 0; i < count; i++) {
258       if (!sanitize_value_devices (c, base, values))
259 	return_trace (false);
260       values += len;
261     }
262 
263     return_trace (true);
264   }
265 
266   /* Just sanitize referenced Device tables.  Doesn't check the values themselves. */
sanitize_values_stride_unsafeOT::ValueFormat267   bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
268   {
269     TRACE_SANITIZE (this);
270 
271     if (!has_device ()) return_trace (true);
272 
273     for (unsigned int i = 0; i < count; i++) {
274       if (!sanitize_value_devices (c, base, values))
275 	return_trace (false);
276       values += stride;
277     }
278 
279     return_trace (true);
280   }
281 };
282 
283 template<typename Iterator>
284 static void SinglePos_serialize (hb_serialize_context_t *c,
285 				 const void *src,
286 				 Iterator it,
287 				 ValueFormat valFormat);
288 
289 
290 struct AnchorFormat1
291 {
get_anchorOT::AnchorFormat1292   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
293 		   float *x, float *y) const
294   {
295     hb_font_t *font = c->font;
296     *x = font->em_fscale_x (xCoordinate);
297     *y = font->em_fscale_y (yCoordinate);
298   }
299 
sanitizeOT::AnchorFormat1300   bool sanitize (hb_sanitize_context_t *c) const
301   {
302     TRACE_SANITIZE (this);
303     return_trace (c->check_struct (this));
304   }
305 
copyOT::AnchorFormat1306   AnchorFormat1* copy (hb_serialize_context_t *c) const
307   {
308     TRACE_SERIALIZE (this);
309     return_trace (c->embed<AnchorFormat1> (this));
310   }
311 
312   protected:
313   HBUINT16	format;			/* Format identifier--format = 1 */
314   FWORD		xCoordinate;		/* Horizontal value--in design units */
315   FWORD		yCoordinate;		/* Vertical value--in design units */
316   public:
317   DEFINE_SIZE_STATIC (6);
318 };
319 
320 struct AnchorFormat2
321 {
get_anchorOT::AnchorFormat2322   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
323 		   float *x, float *y) const
324   {
325     hb_font_t *font = c->font;
326 
327 #ifdef HB_NO_HINTING
328     *x = font->em_fscale_x (xCoordinate);
329     *y = font->em_fscale_y (yCoordinate);
330     return;
331 #endif
332 
333     unsigned int x_ppem = font->x_ppem;
334     unsigned int y_ppem = font->y_ppem;
335     hb_position_t cx = 0, cy = 0;
336     bool ret;
337 
338     ret = (x_ppem || y_ppem) &&
339 	  font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
340     *x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
341     *y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
342   }
343 
sanitizeOT::AnchorFormat2344   bool sanitize (hb_sanitize_context_t *c) const
345   {
346     TRACE_SANITIZE (this);
347     return_trace (c->check_struct (this));
348   }
349 
copyOT::AnchorFormat2350   AnchorFormat2* copy (hb_serialize_context_t *c) const
351   {
352     TRACE_SERIALIZE (this);
353     return_trace (c->embed<AnchorFormat2> (this));
354   }
355 
356   protected:
357   HBUINT16	format;			/* Format identifier--format = 2 */
358   FWORD		xCoordinate;		/* Horizontal value--in design units */
359   FWORD		yCoordinate;		/* Vertical value--in design units */
360   HBUINT16	anchorPoint;		/* Index to glyph contour point */
361   public:
362   DEFINE_SIZE_STATIC (8);
363 };
364 
365 struct AnchorFormat3
366 {
get_anchorOT::AnchorFormat3367   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
368 		   float *x, float *y) const
369   {
370     hb_font_t *font = c->font;
371     *x = font->em_fscale_x (xCoordinate);
372     *y = font->em_fscale_y (yCoordinate);
373 
374     if (font->x_ppem || font->num_coords)
375       *x += (this+xDeviceTable).get_x_delta (font, c->var_store);
376     if (font->y_ppem || font->num_coords)
377       *y += (this+yDeviceTable).get_y_delta (font, c->var_store);
378   }
379 
sanitizeOT::AnchorFormat3380   bool sanitize (hb_sanitize_context_t *c) const
381   {
382     TRACE_SANITIZE (this);
383     return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
384   }
385 
copyOT::AnchorFormat3386   AnchorFormat3* copy (hb_serialize_context_t *c) const
387   {
388     TRACE_SERIALIZE (this);
389     auto *out = c->embed<AnchorFormat3> (this);
390     if (unlikely (!out)) return_trace (nullptr);
391 
392     out->xDeviceTable.serialize_copy (c, xDeviceTable, this);
393     out->yDeviceTable.serialize_copy (c, yDeviceTable, this);
394     return_trace (out);
395   }
396 
397   protected:
398   HBUINT16	format;			/* Format identifier--format = 3 */
399   FWORD		xCoordinate;		/* Horizontal value--in design units */
400   FWORD		yCoordinate;		/* Vertical value--in design units */
401   OffsetTo<Device>
402 		xDeviceTable;		/* Offset to Device table for X
403 					 * coordinate-- from beginning of
404 					 * Anchor table (may be NULL) */
405   OffsetTo<Device>
406 		yDeviceTable;		/* Offset to Device table for Y
407 					 * coordinate-- from beginning of
408 					 * Anchor table (may be NULL) */
409   public:
410   DEFINE_SIZE_STATIC (10);
411 };
412 
413 struct Anchor
414 {
get_anchorOT::Anchor415   void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
416 		   float *x, float *y) const
417   {
418     *x = *y = 0;
419     switch (u.format) {
420     case 1: u.format1.get_anchor (c, glyph_id, x, y); return;
421     case 2: u.format2.get_anchor (c, glyph_id, x, y); return;
422     case 3: u.format3.get_anchor (c, glyph_id, x, y); return;
423     default:					      return;
424     }
425   }
426 
sanitizeOT::Anchor427   bool sanitize (hb_sanitize_context_t *c) const
428   {
429     TRACE_SANITIZE (this);
430     if (!u.format.sanitize (c)) return_trace (false);
431     switch (u.format) {
432     case 1: return_trace (u.format1.sanitize (c));
433     case 2: return_trace (u.format2.sanitize (c));
434     case 3: return_trace (u.format3.sanitize (c));
435     default:return_trace (true);
436     }
437   }
438 
copyOT::Anchor439   Anchor* copy (hb_serialize_context_t *c) const
440   {
441     TRACE_SERIALIZE (this);
442     switch (u.format) {
443     case 1: return_trace (reinterpret_cast<Anchor *> (u.format1.copy (c)));
444     case 2: return_trace (reinterpret_cast<Anchor *> (u.format2.copy (c)));
445     case 3: return_trace (reinterpret_cast<Anchor *> (u.format3.copy (c)));
446     default:return_trace (nullptr);
447     }
448   }
449 
450   protected:
451   union {
452   HBUINT16		format;		/* Format identifier */
453   AnchorFormat1		format1;
454   AnchorFormat2		format2;
455   AnchorFormat3		format3;
456   } u;
457   public:
458   DEFINE_SIZE_UNION (2, format);
459 };
460 
461 
462 struct AnchorMatrix
463 {
get_anchorOT::AnchorMatrix464   const Anchor& get_anchor (unsigned int row, unsigned int col,
465 			    unsigned int cols, bool *found) const
466   {
467     *found = false;
468     if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
469     *found = !matrixZ[row * cols + col].is_null ();
470     return this+matrixZ[row * cols + col];
471   }
472 
473   template <typename Iterator,
474 	    hb_requires (hb_is_iterator (Iterator))>
serializeOT::AnchorMatrix475   bool serialize (hb_serialize_context_t *c,
476 		  unsigned                num_rows,
477 		  AnchorMatrix const     *offset_matrix,
478 		  Iterator                index_iter)
479   {
480     TRACE_SERIALIZE (this);
481     if (!index_iter.len ()) return_trace (false);
482     if (unlikely (!c->extend_min ((*this))))  return_trace (false);
483 
484     this->rows = num_rows;
485     for (const unsigned i : index_iter)
486     {
487       auto *offset = c->embed (offset_matrix->matrixZ[i]);
488       if (!offset) return_trace (false);
489       offset->serialize_copy (c, offset_matrix->matrixZ[i], offset_matrix, c->to_bias (this));
490     }
491 
492     return_trace (true);
493   }
494 
sanitizeOT::AnchorMatrix495   bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
496   {
497     TRACE_SANITIZE (this);
498     if (!c->check_struct (this)) return_trace (false);
499     if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
500     unsigned int count = rows * cols;
501     if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
502     for (unsigned int i = 0; i < count; i++)
503       if (!matrixZ[i].sanitize (c, this)) return_trace (false);
504     return_trace (true);
505   }
506 
507   HBUINT16	rows;			/* Number of rows */
508   UnsizedArrayOf<OffsetTo<Anchor>>
509 		matrixZ;		/* Matrix of offsets to Anchor tables--
510 					 * from beginning of AnchorMatrix table */
511   public:
512   DEFINE_SIZE_ARRAY (2, matrixZ);
513 };
514 
515 
516 struct MarkRecord
517 {
518   friend struct MarkArray;
519 
get_classOT::MarkRecord520   unsigned get_class () const { return (unsigned) klass; }
sanitizeOT::MarkRecord521   bool sanitize (hb_sanitize_context_t *c, const void *base) const
522   {
523     TRACE_SANITIZE (this);
524     return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
525   }
526 
copyOT::MarkRecord527   MarkRecord *copy (hb_serialize_context_t *c, const void *base,
528 		    unsigned dst_bias, const hb_map_t *klass_mapping) const
529   {
530     TRACE_SERIALIZE (this);
531     auto *out = c->embed (this);
532     if (unlikely (!out)) return_trace (nullptr);
533 
534     out->klass = klass_mapping->get (klass);
535     out->markAnchor.serialize_copy (c, markAnchor, base, dst_bias);
536     return_trace (out);
537   }
538 
539   protected:
540   HBUINT16	klass;			/* Class defined for this mark */
541   OffsetTo<Anchor>
542 		markAnchor;		/* Offset to Anchor table--from
543 					 * beginning of MarkArray table */
544   public:
545   DEFINE_SIZE_STATIC (4);
546 };
547 
548 struct MarkArray : ArrayOf<MarkRecord>	/* Array of MarkRecords--in Coverage order */
549 {
applyOT::MarkArray550   bool apply (hb_ot_apply_context_t *c,
551 	      unsigned int mark_index, unsigned int glyph_index,
552 	      const AnchorMatrix &anchors, unsigned int class_count,
553 	      unsigned int glyph_pos) const
554   {
555     TRACE_APPLY (this);
556     hb_buffer_t *buffer = c->buffer;
557     const MarkRecord &record = ArrayOf<MarkRecord>::operator[](mark_index);
558     unsigned int mark_class = record.klass;
559 
560     const Anchor& mark_anchor = this + record.markAnchor;
561     bool found;
562     const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
563     /* If this subtable doesn't have an anchor for this base and this class,
564      * return false such that the subsequent subtables have a chance at it. */
565     if (unlikely (!found)) return_trace (false);
566 
567     float mark_x, mark_y, base_x, base_y;
568 
569     buffer->unsafe_to_break (glyph_pos, buffer->idx);
570     mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
571     glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
572 
573     hb_glyph_position_t &o = buffer->cur_pos();
574     o.x_offset = roundf (base_x - mark_x);
575     o.y_offset = roundf (base_y - mark_y);
576     o.attach_type() = ATTACH_TYPE_MARK;
577     o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
578     buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
579 
580     buffer->idx++;
581     return_trace (true);
582   }
583 
584   template<typename Iterator,
585 	   hb_requires (hb_is_source_of (Iterator, MarkRecord))>
serializeOT::MarkArray586   bool serialize (hb_serialize_context_t *c,
587 		  const hb_map_t         *klass_mapping,
588 		  const void             *base,
589 		  Iterator                it)
590   {
591     TRACE_SERIALIZE (this);
592     if (unlikely (!c->extend_min (*this))) return_trace (false);
593     if (unlikely (!c->check_assign (len, it.len ()))) return_trace (false);
594     c->copy_all (it, base, c->to_bias (this), klass_mapping);
595     return_trace (true);
596   }
597 
sanitizeOT::MarkArray598   bool sanitize (hb_sanitize_context_t *c) const
599   {
600     TRACE_SANITIZE (this);
601     return_trace (ArrayOf<MarkRecord>::sanitize (c, this));
602   }
603 };
604 
605 
606 /* Lookups */
607 
608 struct SinglePosFormat1
609 {
intersectsOT::SinglePosFormat1610   bool intersects (const hb_set_t *glyphs) const
611   { return (this+coverage).intersects (glyphs); }
612 
closure_lookupsOT::SinglePosFormat1613   void closure_lookups (hb_closure_lookups_context_t *c) const {}
614 
collect_glyphsOT::SinglePosFormat1615   void collect_glyphs (hb_collect_glyphs_context_t *c) const
616   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
617 
get_coverageOT::SinglePosFormat1618   const Coverage &get_coverage () const { return this+coverage; }
619 
applyOT::SinglePosFormat1620   bool apply (hb_ot_apply_context_t *c) const
621   {
622     TRACE_APPLY (this);
623     hb_buffer_t *buffer = c->buffer;
624     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
625     if (likely (index == NOT_COVERED)) return_trace (false);
626 
627     valueFormat.apply_value (c, this, values, buffer->cur_pos());
628 
629     buffer->idx++;
630     return_trace (true);
631   }
632 
633   template<typename Iterator,
634 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat1635   void serialize (hb_serialize_context_t *c,
636 		  const void *src,
637 		  Iterator it,
638 		  ValueFormat valFormat)
639   {
640     auto out = c->extend_min (*this);
641     if (unlikely (!out)) return;
642     if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
643 
644     + it
645     | hb_map (hb_second)
646     | hb_apply ([&] (hb_array_t<const Value> _)
647 		{ valFormat.serialize_copy (c, src, &_); })
648     ;
649 
650     auto glyphs =
651     + it
652     | hb_map_retains_sorting (hb_first)
653     ;
654 
655     coverage.serialize (c, this).serialize (c, glyphs);
656   }
657 
subsetOT::SinglePosFormat1658   bool subset (hb_subset_context_t *c) const
659   {
660     TRACE_SUBSET (this);
661     const hb_set_t &glyphset = *c->plan->glyphset ();
662     const hb_map_t &glyph_map = *c->plan->glyph_map;
663 
664     auto it =
665     + hb_iter (this+coverage)
666     | hb_filter (glyphset)
667     | hb_map_retains_sorting (glyph_map)
668     | hb_zip (hb_repeat (values.as_array (valueFormat.get_len ())))
669     ;
670 
671     bool ret = bool (it);
672     SinglePos_serialize (c->serializer, this, it, valueFormat);
673     return_trace (ret);
674   }
675 
sanitizeOT::SinglePosFormat1676   bool sanitize (hb_sanitize_context_t *c) const
677   {
678     TRACE_SANITIZE (this);
679     return_trace (c->check_struct (this) &&
680 		  coverage.sanitize (c, this) &&
681 		  valueFormat.sanitize_value (c, this, values));
682   }
683 
684   protected:
685   HBUINT16	format;			/* Format identifier--format = 1 */
686   OffsetTo<Coverage>
687 		coverage;		/* Offset to Coverage table--from
688 					 * beginning of subtable */
689   ValueFormat	valueFormat;		/* Defines the types of data in the
690 					 * ValueRecord */
691   ValueRecord	values;			/* Defines positioning
692 					 * value(s)--applied to all glyphs in
693 					 * the Coverage table */
694   public:
695   DEFINE_SIZE_ARRAY (6, values);
696 };
697 
698 struct SinglePosFormat2
699 {
intersectsOT::SinglePosFormat2700   bool intersects (const hb_set_t *glyphs) const
701   { return (this+coverage).intersects (glyphs); }
702 
closure_lookupsOT::SinglePosFormat2703   void closure_lookups (hb_closure_lookups_context_t *c) const {}
704 
collect_glyphsOT::SinglePosFormat2705   void collect_glyphs (hb_collect_glyphs_context_t *c) const
706   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
707 
get_coverageOT::SinglePosFormat2708   const Coverage &get_coverage () const { return this+coverage; }
709 
applyOT::SinglePosFormat2710   bool apply (hb_ot_apply_context_t *c) const
711   {
712     TRACE_APPLY (this);
713     hb_buffer_t *buffer = c->buffer;
714     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
715     if (likely (index == NOT_COVERED)) return_trace (false);
716 
717     if (likely (index >= valueCount)) return_trace (false);
718 
719     valueFormat.apply_value (c, this,
720 			     &values[index * valueFormat.get_len ()],
721 			     buffer->cur_pos());
722 
723     buffer->idx++;
724     return_trace (true);
725   }
726 
727   template<typename Iterator,
728 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat2729   void serialize (hb_serialize_context_t *c,
730 		  const void *src,
731 		  Iterator it,
732 		  ValueFormat valFormat)
733   {
734     auto out = c->extend_min (*this);
735     if (unlikely (!out)) return;
736     if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
737     if (unlikely (!c->check_assign (valueCount, it.len ()))) return;
738 
739     + it
740     | hb_map (hb_second)
741     | hb_apply ([&] (hb_array_t<const Value> _)
742 		{ valFormat.serialize_copy (c, src, &_); })
743     ;
744 
745     auto glyphs =
746     + it
747     | hb_map_retains_sorting (hb_first)
748     ;
749 
750     coverage.serialize (c, this).serialize (c, glyphs);
751   }
752 
subsetOT::SinglePosFormat2753   bool subset (hb_subset_context_t *c) const
754   {
755     TRACE_SUBSET (this);
756     const hb_set_t &glyphset = *c->plan->glyphset ();
757     const hb_map_t &glyph_map = *c->plan->glyph_map;
758 
759     unsigned sub_length = valueFormat.get_len ();
760     auto values_array = values.as_array (valueCount * sub_length);
761 
762     auto it =
763     + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
764     | hb_filter (glyphset, hb_first)
765     | hb_map_retains_sorting ([&] (const hb_pair_t<hb_codepoint_t, unsigned>& _)
766 			      {
767 				return hb_pair (glyph_map[_.first],
768 						values_array.sub_array (_.second * sub_length,
769 									sub_length));
770 			      })
771     ;
772 
773     bool ret = bool (it);
774     SinglePos_serialize (c->serializer, this, it, valueFormat);
775     return_trace (ret);
776   }
777 
sanitizeOT::SinglePosFormat2778   bool sanitize (hb_sanitize_context_t *c) const
779   {
780     TRACE_SANITIZE (this);
781     return_trace (c->check_struct (this) &&
782 		  coverage.sanitize (c, this) &&
783 		  valueFormat.sanitize_values (c, this, values, valueCount));
784   }
785 
786   protected:
787   HBUINT16	format;			/* Format identifier--format = 2 */
788   OffsetTo<Coverage>
789 		coverage;		/* Offset to Coverage table--from
790 					 * beginning of subtable */
791   ValueFormat	valueFormat;		/* Defines the types of data in the
792 					 * ValueRecord */
793   HBUINT16	valueCount;		/* Number of ValueRecords */
794   ValueRecord	values;			/* Array of ValueRecords--positioning
795 					 * values applied to glyphs */
796   public:
797   DEFINE_SIZE_ARRAY (8, values);
798 };
799 
800 struct SinglePos
801 {
802   template<typename Iterator,
803 	   hb_requires (hb_is_iterator (Iterator))>
get_formatOT::SinglePos804   unsigned get_format (Iterator glyph_val_iter_pairs)
805   {
806     hb_array_t<const Value> first_val_iter = hb_second (*glyph_val_iter_pairs);
807 
808     for (const auto iter : glyph_val_iter_pairs)
809       for (const auto _ : hb_zip (iter.second, first_val_iter))
810 	if (_.first != _.second)
811 	  return 2;
812 
813     return 1;
814   }
815 
816 
817   template<typename Iterator,
818 	   hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePos819   void serialize (hb_serialize_context_t *c,
820 		  const void *src,
821 		  Iterator glyph_val_iter_pairs,
822 		  ValueFormat valFormat)
823   {
824     if (unlikely (!c->extend_min (u.format))) return;
825     unsigned format = 2;
826 
827     if (glyph_val_iter_pairs) format = get_format (glyph_val_iter_pairs);
828 
829     u.format = format;
830     switch (u.format) {
831     case 1: u.format1.serialize (c, src, glyph_val_iter_pairs, valFormat);
832 	    return;
833     case 2: u.format2.serialize (c, src, glyph_val_iter_pairs, valFormat);
834 	    return;
835     default:return;
836     }
837   }
838 
839   template <typename context_t, typename ...Ts>
dispatchOT::SinglePos840   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
841   {
842     TRACE_DISPATCH (this, u.format);
843     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
844     switch (u.format) {
845     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
846     case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
847     default:return_trace (c->default_return_value ());
848     }
849   }
850 
851   protected:
852   union {
853   HBUINT16		format;		/* Format identifier */
854   SinglePosFormat1	format1;
855   SinglePosFormat2	format2;
856   } u;
857 };
858 
859 template<typename Iterator>
860 static void
SinglePos_serialize(hb_serialize_context_t * c,const void * src,Iterator it,ValueFormat valFormat)861 SinglePos_serialize (hb_serialize_context_t *c,
862 		     const void *src,
863 		     Iterator it,
864 		     ValueFormat valFormat)
865 { c->start_embed<SinglePos> ()->serialize (c, src, it, valFormat); }
866 
867 
868 struct PairValueRecord
869 {
870   friend struct PairSet;
871 
cmpOT::PairValueRecord872   int cmp (hb_codepoint_t k) const
873   { return secondGlyph.cmp (k); }
874 
875   struct serialize_closure_t
876   {
877     const void 		*base;
878     const ValueFormat	*valueFormats;
879     unsigned		len1; /* valueFormats[0].get_len() */
880     const hb_map_t 	*glyph_map;
881   };
882 
serializeOT::PairValueRecord883   bool serialize (hb_serialize_context_t *c,
884 		  serialize_closure_t *closure) const
885   {
886     TRACE_SERIALIZE (this);
887     auto *out = c->start_embed (*this);
888     if (unlikely (!c->extend_min (out))) return_trace (false);
889 
890     out->secondGlyph = (*closure->glyph_map)[secondGlyph];
891 
892     closure->valueFormats[0].serialize_copy (c, closure->base, &values[0]);
893     closure->valueFormats[1].serialize_copy (c, closure->base, &values[closure->len1]);
894 
895     return_trace (true);
896   }
897 
898   protected:
899   HBGlyphID	secondGlyph;		/* GlyphID of second glyph in the
900 					 * pair--first glyph is listed in the
901 					 * Coverage table */
902   ValueRecord	values;			/* Positioning data for the first glyph
903 					 * followed by for second glyph */
904   public:
905   DEFINE_SIZE_ARRAY (2, values);
906 };
907 
908 struct PairSet
909 {
910   friend struct PairPosFormat1;
911 
intersectsOT::PairSet912   bool intersects (const hb_set_t *glyphs,
913 		   const ValueFormat *valueFormats) const
914   {
915     unsigned int len1 = valueFormats[0].get_len ();
916     unsigned int len2 = valueFormats[1].get_len ();
917     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
918 
919     const PairValueRecord *record = &firstPairValueRecord;
920     unsigned int count = len;
921     for (unsigned int i = 0; i < count; i++)
922     {
923       if (glyphs->has (record->secondGlyph))
924 	return true;
925       record = &StructAtOffset<const PairValueRecord> (record, record_size);
926     }
927     return false;
928   }
929 
collect_glyphsOT::PairSet930   void collect_glyphs (hb_collect_glyphs_context_t *c,
931 			      const ValueFormat *valueFormats) const
932   {
933     unsigned int len1 = valueFormats[0].get_len ();
934     unsigned int len2 = valueFormats[1].get_len ();
935     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
936 
937     const PairValueRecord *record = &firstPairValueRecord;
938     c->input->add_array (&record->secondGlyph, len, record_size);
939   }
940 
applyOT::PairSet941   bool apply (hb_ot_apply_context_t *c,
942 	      const ValueFormat *valueFormats,
943 	      unsigned int pos) const
944   {
945     TRACE_APPLY (this);
946     hb_buffer_t *buffer = c->buffer;
947     unsigned int len1 = valueFormats[0].get_len ();
948     unsigned int len2 = valueFormats[1].get_len ();
949     unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
950 
951     const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint,
952 						&firstPairValueRecord,
953 						len,
954 						record_size);
955     if (record)
956     {
957       /* Note the intentional use of "|" instead of short-circuit "||". */
958       if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) |
959 	  valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]))
960 	buffer->unsafe_to_break (buffer->idx, pos + 1);
961       if (len2)
962 	pos++;
963       buffer->idx = pos;
964       return_trace (true);
965     }
966     return_trace (false);
967   }
968 
subsetOT::PairSet969   bool subset (hb_subset_context_t *c,
970 	       const ValueFormat valueFormats[2]) const
971   {
972     TRACE_SUBSET (this);
973     auto snap = c->serializer->snapshot ();
974 
975     auto *out = c->serializer->start_embed (*this);
976     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
977     out->len = 0;
978 
979     const hb_set_t &glyphset = *c->plan->glyphset ();
980     const hb_map_t &glyph_map = *c->plan->glyph_map;
981 
982     unsigned len1 = valueFormats[0].get_len ();
983     unsigned len2 = valueFormats[1].get_len ();
984     unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
985 
986     PairValueRecord::serialize_closure_t closure =
987     {
988       this,
989       valueFormats,
990       len1,
991       &glyph_map
992     };
993 
994     const PairValueRecord *record = &firstPairValueRecord;
995     unsigned count = len, num = 0;
996     for (unsigned i = 0; i < count; i++)
997     {
998       if (glyphset.has (record->secondGlyph)
999 	 && record->serialize (c->serializer, &closure)) num++;
1000       record = &StructAtOffset<const PairValueRecord> (record, record_size);
1001     }
1002 
1003     out->len = num;
1004     if (!num) c->serializer->revert (snap);
1005     return_trace (num);
1006   }
1007 
1008   struct sanitize_closure_t
1009   {
1010     const ValueFormat *valueFormats;
1011     unsigned int len1; /* valueFormats[0].get_len() */
1012     unsigned int stride; /* 1 + len1 + len2 */
1013   };
1014 
sanitizeOT::PairSet1015   bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
1016   {
1017     TRACE_SANITIZE (this);
1018     if (!(c->check_struct (this)
1019        && c->check_range (&firstPairValueRecord,
1020 			  len,
1021 			  HBUINT16::static_size,
1022 			  closure->stride))) return_trace (false);
1023 
1024     unsigned int count = len;
1025     const PairValueRecord *record = &firstPairValueRecord;
1026     return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
1027 		  closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
1028   }
1029 
1030   protected:
1031   HBUINT16		len;	/* Number of PairValueRecords */
1032   PairValueRecord	firstPairValueRecord;
1033 				/* Array of PairValueRecords--ordered
1034 				 * by GlyphID of the second glyph */
1035   public:
1036   DEFINE_SIZE_MIN (2);
1037 };
1038 
1039 struct PairPosFormat1
1040 {
intersectsOT::PairPosFormat11041   bool intersects (const hb_set_t *glyphs) const
1042   {
1043     return
1044     + hb_zip (this+coverage, pairSet)
1045     | hb_filter (*glyphs, hb_first)
1046     | hb_map (hb_second)
1047     | hb_map ([glyphs, this] (const OffsetTo<PairSet> &_)
1048 	      { return (this+_).intersects (glyphs, valueFormat); })
1049     | hb_any
1050     ;
1051   }
1052 
closure_lookupsOT::PairPosFormat11053   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1054 
collect_glyphsOT::PairPosFormat11055   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1056   {
1057     if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1058     unsigned int count = pairSet.len;
1059     for (unsigned int i = 0; i < count; i++)
1060       (this+pairSet[i]).collect_glyphs (c, valueFormat);
1061   }
1062 
get_coverageOT::PairPosFormat11063   const Coverage &get_coverage () const { return this+coverage; }
1064 
applyOT::PairPosFormat11065   bool apply (hb_ot_apply_context_t *c) const
1066   {
1067     TRACE_APPLY (this);
1068     hb_buffer_t *buffer = c->buffer;
1069     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
1070     if (likely (index == NOT_COVERED)) return_trace (false);
1071 
1072     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1073     skippy_iter.reset (buffer->idx, 1);
1074     if (!skippy_iter.next ()) return_trace (false);
1075 
1076     return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
1077   }
1078 
subsetOT::PairPosFormat11079   bool subset (hb_subset_context_t *c) const
1080   {
1081     TRACE_SUBSET (this);
1082 
1083     const hb_set_t &glyphset = *c->plan->glyphset ();
1084     const hb_map_t &glyph_map = *c->plan->glyph_map;
1085 
1086     auto *out = c->serializer->start_embed (*this);
1087     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1088     out->format = format;
1089     out->valueFormat[0] = valueFormat[0];
1090     out->valueFormat[1] = valueFormat[1];
1091 
1092     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1093 
1094     + hb_zip (this+coverage, pairSet)
1095     | hb_filter (glyphset, hb_first)
1096     | hb_filter ([this, c, out] (const OffsetTo<PairSet>& _)
1097 		 {
1098 		   auto *o = out->pairSet.serialize_append (c->serializer);
1099 		   if (unlikely (!o)) return false;
1100 		   auto snap = c->serializer->snapshot ();
1101 		   bool ret = o->serialize_subset (c, _, this, valueFormat);
1102 		   if (!ret)
1103 		   {
1104 		     out->pairSet.pop ();
1105 		     c->serializer->revert (snap);
1106 		   }
1107 		   return ret;
1108 		 },
1109 		 hb_second)
1110     | hb_map (hb_first)
1111     | hb_map (glyph_map)
1112     | hb_sink (new_coverage)
1113     ;
1114 
1115     out->coverage.serialize (c->serializer, out)
1116 		 .serialize (c->serializer, new_coverage.iter ());
1117 
1118     return_trace (bool (new_coverage));
1119   }
1120 
sanitizeOT::PairPosFormat11121   bool sanitize (hb_sanitize_context_t *c) const
1122   {
1123     TRACE_SANITIZE (this);
1124 
1125     if (!c->check_struct (this)) return_trace (false);
1126 
1127     unsigned int len1 = valueFormat[0].get_len ();
1128     unsigned int len2 = valueFormat[1].get_len ();
1129     PairSet::sanitize_closure_t closure =
1130     {
1131       valueFormat,
1132       len1,
1133       1 + len1 + len2
1134     };
1135 
1136     return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
1137   }
1138 
1139   protected:
1140   HBUINT16	format;			/* Format identifier--format = 1 */
1141   OffsetTo<Coverage>
1142 		coverage;		/* Offset to Coverage table--from
1143 					 * beginning of subtable */
1144   ValueFormat	valueFormat[2];		/* [0] Defines the types of data in
1145 					 * ValueRecord1--for the first glyph
1146 					 * in the pair--may be zero (0) */
1147 					/* [1] Defines the types of data in
1148 					 * ValueRecord2--for the second glyph
1149 					 * in the pair--may be zero (0) */
1150   OffsetArrayOf<PairSet>
1151 		pairSet;		/* Array of PairSet tables
1152 					 * ordered by Coverage Index */
1153   public:
1154   DEFINE_SIZE_ARRAY (10, pairSet);
1155 };
1156 
1157 struct PairPosFormat2
1158 {
intersectsOT::PairPosFormat21159   bool intersects (const hb_set_t *glyphs) const
1160   {
1161     return (this+coverage).intersects (glyphs) &&
1162 	   (this+classDef2).intersects (glyphs);
1163   }
1164 
closure_lookupsOT::PairPosFormat21165   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1166 
collect_glyphsOT::PairPosFormat21167   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1168   {
1169     if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1170     if (unlikely (!(this+classDef2).collect_coverage (c->input))) return;
1171   }
1172 
get_coverageOT::PairPosFormat21173   const Coverage &get_coverage () const { return this+coverage; }
1174 
applyOT::PairPosFormat21175   bool apply (hb_ot_apply_context_t *c) const
1176   {
1177     TRACE_APPLY (this);
1178     hb_buffer_t *buffer = c->buffer;
1179     unsigned int index = (this+coverage).get_coverage  (buffer->cur().codepoint);
1180     if (likely (index == NOT_COVERED)) return_trace (false);
1181 
1182     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1183     skippy_iter.reset (buffer->idx, 1);
1184     if (!skippy_iter.next ()) return_trace (false);
1185 
1186     unsigned int len1 = valueFormat1.get_len ();
1187     unsigned int len2 = valueFormat2.get_len ();
1188     unsigned int record_len = len1 + len2;
1189 
1190     unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
1191     unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
1192     if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
1193 
1194     const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
1195     /* Note the intentional use of "|" instead of short-circuit "||". */
1196     if (valueFormat1.apply_value (c, this, v, buffer->cur_pos()) |
1197 	valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]))
1198       buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
1199 
1200     buffer->idx = skippy_iter.idx;
1201     if (len2)
1202       buffer->idx++;
1203 
1204     return_trace (true);
1205   }
1206 
subsetOT::PairPosFormat21207   bool subset (hb_subset_context_t *c) const
1208   {
1209     TRACE_SUBSET (this);
1210     auto *out = c->serializer->start_embed (*this);
1211     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1212     out->format = format;
1213     out->valueFormat1 = valueFormat1;
1214     out->valueFormat2 = valueFormat2;
1215 
1216     hb_map_t klass1_map;
1217     out->classDef1.serialize_subset (c, classDef1, this, &klass1_map);
1218     out->class1Count = klass1_map.get_population ();
1219 
1220     hb_map_t klass2_map;
1221     out->classDef2.serialize_subset (c, classDef2, this, &klass2_map);
1222     out->class2Count = klass2_map.get_population ();
1223 
1224     unsigned len1 = valueFormat1.get_len ();
1225     unsigned len2 = valueFormat2.get_len ();
1226 
1227     + hb_range ((unsigned) class1Count)
1228     | hb_filter (klass1_map)
1229     | hb_apply ([&] (const unsigned class1_idx)
1230 		{
1231 		  + hb_range ((unsigned) class2Count)
1232 		  | hb_filter (klass2_map)
1233 		  | hb_apply ([&] (const unsigned class2_idx)
1234 			      {
1235 				unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1236 				valueFormat1.serialize_copy (c->serializer, this, &values[idx]);
1237 				valueFormat2.serialize_copy (c->serializer, this, &values[idx + len1]);
1238 			      })
1239 		  ;
1240 		})
1241     ;
1242 
1243     const hb_set_t &glyphset = *c->plan->_glyphset_gsub;
1244     const hb_map_t &glyph_map = *c->plan->glyph_map;
1245 
1246     auto it =
1247     + hb_iter (this+coverage)
1248     | hb_filter (glyphset)
1249     | hb_map_retains_sorting (glyph_map)
1250     ;
1251 
1252     out->coverage.serialize (c->serializer, out).serialize (c->serializer, it);
1253     return_trace (out->class1Count && out->class2Count && bool (it));
1254   }
1255 
sanitizeOT::PairPosFormat21256   bool sanitize (hb_sanitize_context_t *c) const
1257   {
1258     TRACE_SANITIZE (this);
1259     if (!(c->check_struct (this)
1260        && coverage.sanitize (c, this)
1261        && classDef1.sanitize (c, this)
1262        && classDef2.sanitize (c, this))) return_trace (false);
1263 
1264     unsigned int len1 = valueFormat1.get_len ();
1265     unsigned int len2 = valueFormat2.get_len ();
1266     unsigned int stride = len1 + len2;
1267     unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
1268     unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
1269     return_trace (c->check_range ((const void *) values,
1270 				  count,
1271 				  record_size) &&
1272 		  valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
1273 		  valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
1274   }
1275 
1276   protected:
1277   HBUINT16	format;			/* Format identifier--format = 2 */
1278   OffsetTo<Coverage>
1279 		coverage;		/* Offset to Coverage table--from
1280 					 * beginning of subtable */
1281   ValueFormat	valueFormat1;		/* ValueRecord definition--for the
1282 					 * first glyph of the pair--may be zero
1283 					 * (0) */
1284   ValueFormat	valueFormat2;		/* ValueRecord definition--for the
1285 					 * second glyph of the pair--may be
1286 					 * zero (0) */
1287   OffsetTo<ClassDef>
1288 		classDef1;		/* Offset to ClassDef table--from
1289 					 * beginning of PairPos subtable--for
1290 					 * the first glyph of the pair */
1291   OffsetTo<ClassDef>
1292 		classDef2;		/* Offset to ClassDef table--from
1293 					 * beginning of PairPos subtable--for
1294 					 * the second glyph of the pair */
1295   HBUINT16	class1Count;		/* Number of classes in ClassDef1
1296 					 * table--includes Class0 */
1297   HBUINT16	class2Count;		/* Number of classes in ClassDef2
1298 					 * table--includes Class0 */
1299   ValueRecord	values;			/* Matrix of value pairs:
1300 					 * class1-major, class2-minor,
1301 					 * Each entry has value1 and value2 */
1302   public:
1303   DEFINE_SIZE_ARRAY (16, values);
1304 };
1305 
1306 struct PairPos
1307 {
1308   template <typename context_t, typename ...Ts>
dispatchOT::PairPos1309   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1310   {
1311     TRACE_DISPATCH (this, u.format);
1312     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1313     switch (u.format) {
1314     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1315     case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
1316     default:return_trace (c->default_return_value ());
1317     }
1318   }
1319 
1320   protected:
1321   union {
1322   HBUINT16		format;		/* Format identifier */
1323   PairPosFormat1	format1;
1324   PairPosFormat2	format2;
1325   } u;
1326 };
1327 
1328 
1329 struct EntryExitRecord
1330 {
1331   friend struct CursivePosFormat1;
1332 
sanitizeOT::EntryExitRecord1333   bool sanitize (hb_sanitize_context_t *c, const void *base) const
1334   {
1335     TRACE_SANITIZE (this);
1336     return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
1337   }
1338 
copyOT::EntryExitRecord1339   EntryExitRecord* copy (hb_serialize_context_t *c, const void *base) const
1340   {
1341     TRACE_SERIALIZE (this);
1342     auto *out = c->embed (this);
1343     if (unlikely (!out)) return_trace (nullptr);
1344 
1345     out->entryAnchor.serialize_copy (c, entryAnchor, base);
1346     out->exitAnchor.serialize_copy (c, exitAnchor, base);
1347     return_trace (out);
1348   }
1349 
1350   protected:
1351   OffsetTo<Anchor>
1352 		entryAnchor;		/* Offset to EntryAnchor table--from
1353 					 * beginning of CursivePos
1354 					 * subtable--may be NULL */
1355   OffsetTo<Anchor>
1356 		exitAnchor;		/* Offset to ExitAnchor table--from
1357 					 * beginning of CursivePos
1358 					 * subtable--may be NULL */
1359   public:
1360   DEFINE_SIZE_STATIC (4);
1361 };
1362 
1363 static void
1364 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent);
1365 
1366 struct CursivePosFormat1
1367 {
intersectsOT::CursivePosFormat11368   bool intersects (const hb_set_t *glyphs) const
1369   { return (this+coverage).intersects (glyphs); }
1370 
closure_lookupsOT::CursivePosFormat11371   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1372 
collect_glyphsOT::CursivePosFormat11373   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1374   { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
1375 
get_coverageOT::CursivePosFormat11376   const Coverage &get_coverage () const { return this+coverage; }
1377 
applyOT::CursivePosFormat11378   bool apply (hb_ot_apply_context_t *c) const
1379   {
1380     TRACE_APPLY (this);
1381     hb_buffer_t *buffer = c->buffer;
1382 
1383     const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage  (buffer->cur().codepoint)];
1384     if (!this_record.entryAnchor) return_trace (false);
1385 
1386     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1387     skippy_iter.reset (buffer->idx, 1);
1388     if (!skippy_iter.prev ()) return_trace (false);
1389 
1390     const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage  (buffer->info[skippy_iter.idx].codepoint)];
1391     if (!prev_record.exitAnchor) return_trace (false);
1392 
1393     unsigned int i = skippy_iter.idx;
1394     unsigned int j = buffer->idx;
1395 
1396     buffer->unsafe_to_break (i, j);
1397     float entry_x, entry_y, exit_x, exit_y;
1398     (this+prev_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
1399     (this+this_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
1400 
1401     hb_glyph_position_t *pos = buffer->pos;
1402 
1403     hb_position_t d;
1404     /* Main-direction adjustment */
1405     switch (c->direction) {
1406       case HB_DIRECTION_LTR:
1407 	pos[i].x_advance  = roundf (exit_x) + pos[i].x_offset;
1408 
1409 	d = roundf (entry_x) + pos[j].x_offset;
1410 	pos[j].x_advance -= d;
1411 	pos[j].x_offset  -= d;
1412 	break;
1413       case HB_DIRECTION_RTL:
1414 	d = roundf (exit_x) + pos[i].x_offset;
1415 	pos[i].x_advance -= d;
1416 	pos[i].x_offset  -= d;
1417 
1418 	pos[j].x_advance  = roundf (entry_x) + pos[j].x_offset;
1419 	break;
1420       case HB_DIRECTION_TTB:
1421 	pos[i].y_advance  = roundf (exit_y) + pos[i].y_offset;
1422 
1423 	d = roundf (entry_y) + pos[j].y_offset;
1424 	pos[j].y_advance -= d;
1425 	pos[j].y_offset  -= d;
1426 	break;
1427       case HB_DIRECTION_BTT:
1428 	d = roundf (exit_y) + pos[i].y_offset;
1429 	pos[i].y_advance -= d;
1430 	pos[i].y_offset  -= d;
1431 
1432 	pos[j].y_advance  = roundf (entry_y);
1433 	break;
1434       case HB_DIRECTION_INVALID:
1435       default:
1436 	break;
1437     }
1438 
1439     /* Cross-direction adjustment */
1440 
1441     /* We attach child to parent (think graph theory and rooted trees whereas
1442      * the root stays on baseline and each node aligns itself against its
1443      * parent.
1444      *
1445      * Optimize things for the case of RightToLeft, as that's most common in
1446      * Arabic. */
1447     unsigned int child  = i;
1448     unsigned int parent = j;
1449     hb_position_t x_offset = entry_x - exit_x;
1450     hb_position_t y_offset = entry_y - exit_y;
1451     if  (!(c->lookup_props & LookupFlag::RightToLeft))
1452     {
1453       unsigned int k = child;
1454       child = parent;
1455       parent = k;
1456       x_offset = -x_offset;
1457       y_offset = -y_offset;
1458     }
1459 
1460     /* If child was already connected to someone else, walk through its old
1461      * chain and reverse the link direction, such that the whole tree of its
1462      * previous connection now attaches to new parent.  Watch out for case
1463      * where new parent is on the path from old chain...
1464      */
1465     reverse_cursive_minor_offset (pos, child, c->direction, parent);
1466 
1467     pos[child].attach_type() = ATTACH_TYPE_CURSIVE;
1468     pos[child].attach_chain() = (int) parent - (int) child;
1469     buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
1470     if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
1471       pos[child].y_offset = y_offset;
1472     else
1473       pos[child].x_offset = x_offset;
1474 
1475     buffer->idx++;
1476     return_trace (true);
1477   }
1478 
1479   template <typename Iterator,
1480 	    hb_requires (hb_is_iterator (Iterator))>
serializeOT::CursivePosFormat11481   void serialize (hb_serialize_context_t *c, Iterator it, const void *base)
1482   {
1483     if (unlikely (!c->extend_min ((*this)))) return;
1484     this->format = 1;
1485     this->entryExitRecord.len = it.len ();
1486 
1487     for (const EntryExitRecord& entry_record : + it
1488 					       | hb_map (hb_second))
1489       c->copy (entry_record, base);
1490 
1491     auto glyphs =
1492     + it
1493     | hb_map_retains_sorting (hb_first)
1494     ;
1495 
1496     coverage.serialize (c, this).serialize (c, glyphs);
1497   }
1498 
subsetOT::CursivePosFormat11499   bool subset (hb_subset_context_t *c) const
1500   {
1501     TRACE_SUBSET (this);
1502     const hb_set_t &glyphset = *c->plan->glyphset ();
1503     const hb_map_t &glyph_map = *c->plan->glyph_map;
1504 
1505     auto *out = c->serializer->start_embed (*this);
1506     if (unlikely (!out)) return_trace (false);
1507 
1508     auto it =
1509     + hb_zip (this+coverage, entryExitRecord)
1510     | hb_filter (glyphset, hb_first)
1511     | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const EntryExitRecord&> p) -> hb_pair_t<hb_codepoint_t, const EntryExitRecord&>
1512 			      { return hb_pair (glyph_map[p.first], p.second);})
1513     ;
1514 
1515     bool ret = bool (it);
1516     out->serialize (c->serializer, it, this);
1517     return_trace (ret);
1518   }
1519 
sanitizeOT::CursivePosFormat11520   bool sanitize (hb_sanitize_context_t *c) const
1521   {
1522     TRACE_SANITIZE (this);
1523     return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
1524   }
1525 
1526   protected:
1527   HBUINT16	format;			/* Format identifier--format = 1 */
1528   OffsetTo<Coverage>
1529 		coverage;		/* Offset to Coverage table--from
1530 					 * beginning of subtable */
1531   ArrayOf<EntryExitRecord>
1532 		entryExitRecord;	/* Array of EntryExit records--in
1533 					 * Coverage Index order */
1534   public:
1535   DEFINE_SIZE_ARRAY (6, entryExitRecord);
1536 };
1537 
1538 struct CursivePos
1539 {
1540   template <typename context_t, typename ...Ts>
dispatchOT::CursivePos1541   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1542   {
1543     TRACE_DISPATCH (this, u.format);
1544     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1545     switch (u.format) {
1546     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1547     default:return_trace (c->default_return_value ());
1548     }
1549   }
1550 
1551   protected:
1552   union {
1553   HBUINT16		format;		/* Format identifier */
1554   CursivePosFormat1	format1;
1555   } u;
1556 };
1557 
1558 
1559 typedef AnchorMatrix BaseArray;		/* base-major--
1560 					 * in order of BaseCoverage Index--,
1561 					 * mark-minor--
1562 					 * ordered by class--zero-based. */
1563 
Markclass_closure_and_remap_indexes(const Coverage & mark_coverage,const MarkArray & mark_array,const hb_set_t & glyphset,hb_map_t * klass_mapping)1564 static void Markclass_closure_and_remap_indexes (const Coverage  &mark_coverage,
1565 						 const MarkArray &mark_array,
1566 						 const hb_set_t  &glyphset,
1567 						 hb_map_t*        klass_mapping /* INOUT */)
1568 {
1569   hb_set_t orig_classes;
1570 
1571   + hb_zip (mark_coverage, mark_array)
1572   | hb_filter (glyphset, hb_first)
1573   | hb_map (hb_second)
1574   | hb_map (&MarkRecord::get_class)
1575   | hb_sink (orig_classes)
1576   ;
1577 
1578   unsigned idx = 0;
1579   for (auto klass : orig_classes.iter ())
1580   {
1581     if (klass_mapping->has (klass)) continue;
1582     klass_mapping->set (klass, idx);
1583     idx++;
1584   }
1585 }
1586 
1587 struct MarkBasePosFormat1
1588 {
intersectsOT::MarkBasePosFormat11589   bool intersects (const hb_set_t *glyphs) const
1590   { return (this+markCoverage).intersects (glyphs) &&
1591 	   (this+baseCoverage).intersects (glyphs); }
1592 
closure_lookupsOT::MarkBasePosFormat11593   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1594 
collect_glyphsOT::MarkBasePosFormat11595   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1596   {
1597     if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
1598     if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
1599   }
1600 
get_coverageOT::MarkBasePosFormat11601   const Coverage &get_coverage () const { return this+markCoverage; }
1602 
applyOT::MarkBasePosFormat11603   bool apply (hb_ot_apply_context_t *c) const
1604   {
1605     TRACE_APPLY (this);
1606     hb_buffer_t *buffer = c->buffer;
1607     unsigned int mark_index = (this+markCoverage).get_coverage  (buffer->cur().codepoint);
1608     if (likely (mark_index == NOT_COVERED)) return_trace (false);
1609 
1610     /* Now we search backwards for a non-mark glyph */
1611     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1612     skippy_iter.reset (buffer->idx, 1);
1613     skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
1614     do {
1615       if (!skippy_iter.prev ()) return_trace (false);
1616       /* We only want to attach to the first of a MultipleSubst sequence.
1617        * https://github.com/harfbuzz/harfbuzz/issues/740
1618        * Reject others...
1619        * ...but stop if we find a mark in the MultipleSubst sequence:
1620        * https://github.com/harfbuzz/harfbuzz/issues/1020 */
1621       if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
1622 	  0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) ||
1623 	  (skippy_iter.idx == 0 ||
1624 	   _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx - 1]) ||
1625 	   _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]) !=
1626 	   _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx - 1]) ||
1627 	   _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) !=
1628 	   _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx - 1]) + 1
1629 	   ))
1630 	break;
1631       skippy_iter.reject ();
1632     } while (true);
1633 
1634     /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
1635     //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1636 
1637     unsigned int base_index = (this+baseCoverage).get_coverage  (buffer->info[skippy_iter.idx].codepoint);
1638     if (base_index == NOT_COVERED) return_trace (false);
1639 
1640     return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
1641   }
1642 
subsetOT::MarkBasePosFormat11643   bool subset (hb_subset_context_t *c) const
1644   {
1645     TRACE_SUBSET (this);
1646     const hb_set_t &glyphset = *c->plan->glyphset ();
1647     const hb_map_t &glyph_map = *c->plan->glyph_map;
1648 
1649     auto *out = c->serializer->start_embed (*this);
1650     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1651     out->format = format;
1652 
1653     hb_map_t klass_mapping;
1654     Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
1655 
1656     if (!klass_mapping.get_population ()) return_trace (false);
1657     out->classCount = klass_mapping.get_population ();
1658 
1659     auto mark_iter =
1660     + hb_zip (this+markCoverage, this+markArray)
1661     | hb_filter (glyphset, hb_first)
1662     ;
1663 
1664     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1665     + mark_iter
1666     | hb_map (hb_first)
1667     | hb_map (glyph_map)
1668     | hb_sink (new_coverage)
1669     ;
1670 
1671     if (!out->markCoverage.serialize (c->serializer, out)
1672 			  .serialize (c->serializer, new_coverage.iter ()))
1673       return_trace (false);
1674 
1675     out->markArray.serialize (c->serializer, out)
1676 		  .serialize (c->serializer, &klass_mapping, &(this+markArray), + mark_iter
1677 										| hb_map (hb_second));
1678 
1679     unsigned basecount = (this+baseArray).rows;
1680     auto base_iter =
1681     + hb_zip (this+baseCoverage, hb_range (basecount))
1682     | hb_filter (glyphset, hb_first)
1683     ;
1684 
1685     new_coverage.reset ();
1686     + base_iter
1687     | hb_map (hb_first)
1688     | hb_map (glyph_map)
1689     | hb_sink (new_coverage)
1690     ;
1691 
1692     if (!out->baseCoverage.serialize (c->serializer, out)
1693 			  .serialize (c->serializer, new_coverage.iter ()))
1694       return_trace (false);
1695 
1696     hb_sorted_vector_t<unsigned> base_indexes;
1697     for (const unsigned row : + base_iter
1698 			      | hb_map (hb_second))
1699     {
1700       + hb_range ((unsigned) classCount)
1701       | hb_filter (klass_mapping)
1702       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
1703       | hb_sink (base_indexes)
1704       ;
1705     }
1706     out->baseArray.serialize (c->serializer, out)
1707 		  .serialize (c->serializer, base_iter.len (), &(this+baseArray), base_indexes.iter ());
1708 
1709     return_trace (true);
1710   }
1711 
sanitizeOT::MarkBasePosFormat11712   bool sanitize (hb_sanitize_context_t *c) const
1713   {
1714     TRACE_SANITIZE (this);
1715     return_trace (c->check_struct (this) &&
1716 		  markCoverage.sanitize (c, this) &&
1717 		  baseCoverage.sanitize (c, this) &&
1718 		  markArray.sanitize (c, this) &&
1719 		  baseArray.sanitize (c, this, (unsigned int) classCount));
1720   }
1721 
1722   protected:
1723   HBUINT16	format;			/* Format identifier--format = 1 */
1724   OffsetTo<Coverage>
1725 		markCoverage;		/* Offset to MarkCoverage table--from
1726 					 * beginning of MarkBasePos subtable */
1727   OffsetTo<Coverage>
1728 		baseCoverage;		/* Offset to BaseCoverage table--from
1729 					 * beginning of MarkBasePos subtable */
1730   HBUINT16	classCount;		/* Number of classes defined for marks */
1731   OffsetTo<MarkArray>
1732 		markArray;		/* Offset to MarkArray table--from
1733 					 * beginning of MarkBasePos subtable */
1734   OffsetTo<BaseArray>
1735 		baseArray;		/* Offset to BaseArray table--from
1736 					 * beginning of MarkBasePos subtable */
1737   public:
1738   DEFINE_SIZE_STATIC (12);
1739 };
1740 
1741 struct MarkBasePos
1742 {
1743   template <typename context_t, typename ...Ts>
dispatchOT::MarkBasePos1744   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1745   {
1746     TRACE_DISPATCH (this, u.format);
1747     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1748     switch (u.format) {
1749     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1750     default:return_trace (c->default_return_value ());
1751     }
1752   }
1753 
1754   protected:
1755   union {
1756   HBUINT16		format;		/* Format identifier */
1757   MarkBasePosFormat1	format1;
1758   } u;
1759 };
1760 
1761 
1762 typedef AnchorMatrix LigatureAttach;	/* component-major--
1763 					 * in order of writing direction--,
1764 					 * mark-minor--
1765 					 * ordered by class--zero-based. */
1766 
1767 typedef OffsetListOf<LigatureAttach> LigatureArray;
1768 					/* Array of LigatureAttach
1769 					 * tables ordered by
1770 					 * LigatureCoverage Index */
1771 
1772 struct MarkLigPosFormat1
1773 {
intersectsOT::MarkLigPosFormat11774   bool intersects (const hb_set_t *glyphs) const
1775   { return (this+markCoverage).intersects (glyphs) &&
1776 	   (this+ligatureCoverage).intersects (glyphs); }
1777 
closure_lookupsOT::MarkLigPosFormat11778   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1779 
collect_glyphsOT::MarkLigPosFormat11780   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1781   {
1782     if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
1783     if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return;
1784   }
1785 
get_coverageOT::MarkLigPosFormat11786   const Coverage &get_coverage () const { return this+markCoverage; }
1787 
applyOT::MarkLigPosFormat11788   bool apply (hb_ot_apply_context_t *c) const
1789   {
1790     TRACE_APPLY (this);
1791     hb_buffer_t *buffer = c->buffer;
1792     unsigned int mark_index = (this+markCoverage).get_coverage  (buffer->cur().codepoint);
1793     if (likely (mark_index == NOT_COVERED)) return_trace (false);
1794 
1795     /* Now we search backwards for a non-mark glyph */
1796     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1797     skippy_iter.reset (buffer->idx, 1);
1798     skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
1799     if (!skippy_iter.prev ()) return_trace (false);
1800 
1801     /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
1802     //if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1803 
1804     unsigned int j = skippy_iter.idx;
1805     unsigned int lig_index = (this+ligatureCoverage).get_coverage  (buffer->info[j].codepoint);
1806     if (lig_index == NOT_COVERED) return_trace (false);
1807 
1808     const LigatureArray& lig_array = this+ligatureArray;
1809     const LigatureAttach& lig_attach = lig_array[lig_index];
1810 
1811     /* Find component to attach to */
1812     unsigned int comp_count = lig_attach.rows;
1813     if (unlikely (!comp_count)) return_trace (false);
1814 
1815     /* We must now check whether the ligature ID of the current mark glyph
1816      * is identical to the ligature ID of the found ligature.  If yes, we
1817      * can directly use the component index.  If not, we attach the mark
1818      * glyph to the last component of the ligature. */
1819     unsigned int comp_index;
1820     unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]);
1821     unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1822     unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1823     if (lig_id && lig_id == mark_id && mark_comp > 0)
1824       comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
1825     else
1826       comp_index = comp_count - 1;
1827 
1828     return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
1829   }
1830 
subsetOT::MarkLigPosFormat11831   bool subset (hb_subset_context_t *c) const
1832   {
1833     TRACE_SUBSET (this);
1834     // TODO(subset)
1835     return_trace (false);
1836   }
1837 
sanitizeOT::MarkLigPosFormat11838   bool sanitize (hb_sanitize_context_t *c) const
1839   {
1840     TRACE_SANITIZE (this);
1841     return_trace (c->check_struct (this) &&
1842 		  markCoverage.sanitize (c, this) &&
1843 		  ligatureCoverage.sanitize (c, this) &&
1844 		  markArray.sanitize (c, this) &&
1845 		  ligatureArray.sanitize (c, this, (unsigned int) classCount));
1846   }
1847 
1848   protected:
1849   HBUINT16	format;			/* Format identifier--format = 1 */
1850   OffsetTo<Coverage>
1851 		markCoverage;		/* Offset to Mark Coverage table--from
1852 					 * beginning of MarkLigPos subtable */
1853   OffsetTo<Coverage>
1854 		ligatureCoverage;	/* Offset to Ligature Coverage
1855 					 * table--from beginning of MarkLigPos
1856 					 * subtable */
1857   HBUINT16	classCount;		/* Number of defined mark classes */
1858   OffsetTo<MarkArray>
1859 		markArray;		/* Offset to MarkArray table--from
1860 					 * beginning of MarkLigPos subtable */
1861   OffsetTo<LigatureArray>
1862 		ligatureArray;		/* Offset to LigatureArray table--from
1863 					 * beginning of MarkLigPos subtable */
1864   public:
1865   DEFINE_SIZE_STATIC (12);
1866 };
1867 
1868 struct MarkLigPos
1869 {
1870   template <typename context_t, typename ...Ts>
dispatchOT::MarkLigPos1871   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1872   {
1873     TRACE_DISPATCH (this, u.format);
1874     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1875     switch (u.format) {
1876     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1877     default:return_trace (c->default_return_value ());
1878     }
1879   }
1880 
1881   protected:
1882   union {
1883   HBUINT16		format;		/* Format identifier */
1884   MarkLigPosFormat1	format1;
1885   } u;
1886 };
1887 
1888 
1889 typedef AnchorMatrix Mark2Array;	/* mark2-major--
1890 					 * in order of Mark2Coverage Index--,
1891 					 * mark1-minor--
1892 					 * ordered by class--zero-based. */
1893 
1894 struct MarkMarkPosFormat1
1895 {
intersectsOT::MarkMarkPosFormat11896   bool intersects (const hb_set_t *glyphs) const
1897   { return (this+mark1Coverage).intersects (glyphs) &&
1898 	   (this+mark2Coverage).intersects (glyphs); }
1899 
closure_lookupsOT::MarkMarkPosFormat11900   void closure_lookups (hb_closure_lookups_context_t *c) const {}
1901 
collect_glyphsOT::MarkMarkPosFormat11902   void collect_glyphs (hb_collect_glyphs_context_t *c) const
1903   {
1904     if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return;
1905     if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return;
1906   }
1907 
get_coverageOT::MarkMarkPosFormat11908   const Coverage &get_coverage () const { return this+mark1Coverage; }
1909 
applyOT::MarkMarkPosFormat11910   bool apply (hb_ot_apply_context_t *c) const
1911   {
1912     TRACE_APPLY (this);
1913     hb_buffer_t *buffer = c->buffer;
1914     unsigned int mark1_index = (this+mark1Coverage).get_coverage  (buffer->cur().codepoint);
1915     if (likely (mark1_index == NOT_COVERED)) return_trace (false);
1916 
1917     /* now we search backwards for a suitable mark glyph until a non-mark glyph */
1918     hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1919     skippy_iter.reset (buffer->idx, 1);
1920     skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags);
1921     if (!skippy_iter.prev ()) return_trace (false);
1922 
1923     if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return_trace (false); }
1924 
1925     unsigned int j = skippy_iter.idx;
1926 
1927     unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur());
1928     unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]);
1929     unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur());
1930     unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]);
1931 
1932     if (likely (id1 == id2)) {
1933       if (id1 == 0) /* Marks belonging to the same base. */
1934 	goto good;
1935       else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
1936 	goto good;
1937     } else {
1938       /* If ligature ids don't match, it may be the case that one of the marks
1939        * itself is a ligature.  In which case match. */
1940       if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
1941 	goto good;
1942     }
1943 
1944     /* Didn't match. */
1945     return_trace (false);
1946 
1947     good:
1948     unsigned int mark2_index = (this+mark2Coverage).get_coverage  (buffer->info[j].codepoint);
1949     if (mark2_index == NOT_COVERED) return_trace (false);
1950 
1951     return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
1952   }
1953 
subsetOT::MarkMarkPosFormat11954   bool subset (hb_subset_context_t *c) const
1955   {
1956     TRACE_SUBSET (this);
1957     const hb_set_t &glyphset = *c->plan->glyphset ();
1958     const hb_map_t &glyph_map = *c->plan->glyph_map;
1959 
1960     auto *out = c->serializer->start_embed (*this);
1961     if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1962     out->format = format;
1963 
1964     hb_map_t klass_mapping;
1965     Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping);
1966 
1967     if (!klass_mapping.get_population ()) return_trace (false);
1968     out->classCount = klass_mapping.get_population ();
1969 
1970     auto mark1_iter =
1971     + hb_zip (this+mark1Coverage, this+mark1Array)
1972     | hb_filter (glyphset, hb_first)
1973     ;
1974 
1975     hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1976     + mark1_iter
1977     | hb_map (hb_first)
1978     | hb_map (glyph_map)
1979     | hb_sink (new_coverage)
1980     ;
1981 
1982     if (!out->mark1Coverage.serialize (c->serializer, out)
1983 			   .serialize (c->serializer, new_coverage.iter ()))
1984       return_trace (false);
1985 
1986     out->mark1Array.serialize (c->serializer, out)
1987 		   .serialize (c->serializer, &klass_mapping, &(this+mark1Array), + mark1_iter
1988 										  | hb_map (hb_second));
1989 //////
1990     unsigned mark2count = (this+mark2Array).rows;
1991     auto mark2_iter =
1992     + hb_zip (this+mark2Coverage, hb_range (mark2count))
1993     | hb_filter (glyphset, hb_first)
1994     ;
1995 
1996     new_coverage.reset ();
1997     + mark2_iter
1998     | hb_map (hb_first)
1999     | hb_map (glyph_map)
2000     | hb_sink (new_coverage)
2001     ;
2002 
2003     if (!out->mark2Coverage.serialize (c->serializer, out)
2004 			   .serialize (c->serializer, new_coverage.iter ()))
2005       return_trace (false);
2006 
2007     hb_sorted_vector_t<unsigned> mark2_indexes;
2008     for (const unsigned row : + mark2_iter
2009 			      | hb_map (hb_second))
2010     {
2011       + hb_range ((unsigned) classCount)
2012       | hb_filter (klass_mapping)
2013       | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2014       | hb_sink (mark2_indexes)
2015       ;
2016     }
2017     out->mark2Array.serialize (c->serializer, out)
2018 		   .serialize (c->serializer, mark2_iter.len (), &(this+mark2Array), mark2_indexes.iter ());
2019 
2020     return_trace (true);
2021   }
2022 
sanitizeOT::MarkMarkPosFormat12023   bool sanitize (hb_sanitize_context_t *c) const
2024   {
2025     TRACE_SANITIZE (this);
2026     return_trace (c->check_struct (this) &&
2027 		  mark1Coverage.sanitize (c, this) &&
2028 		  mark2Coverage.sanitize (c, this) &&
2029 		  mark1Array.sanitize (c, this) &&
2030 		  mark2Array.sanitize (c, this, (unsigned int) classCount));
2031   }
2032 
2033   protected:
2034   HBUINT16	format;			/* Format identifier--format = 1 */
2035   OffsetTo<Coverage>
2036 		mark1Coverage;		/* Offset to Combining Mark1 Coverage
2037 					 * table--from beginning of MarkMarkPos
2038 					 * subtable */
2039   OffsetTo<Coverage>
2040 		mark2Coverage;		/* Offset to Combining Mark2 Coverage
2041 					 * table--from beginning of MarkMarkPos
2042 					 * subtable */
2043   HBUINT16	classCount;		/* Number of defined mark classes */
2044   OffsetTo<MarkArray>
2045 		mark1Array;		/* Offset to Mark1Array table--from
2046 					 * beginning of MarkMarkPos subtable */
2047   OffsetTo<Mark2Array>
2048 		mark2Array;		/* Offset to Mark2Array table--from
2049 					 * beginning of MarkMarkPos subtable */
2050   public:
2051   DEFINE_SIZE_STATIC (12);
2052 };
2053 
2054 struct MarkMarkPos
2055 {
2056   template <typename context_t, typename ...Ts>
dispatchOT::MarkMarkPos2057   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2058   {
2059     TRACE_DISPATCH (this, u.format);
2060     if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2061     switch (u.format) {
2062     case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2063     default:return_trace (c->default_return_value ());
2064     }
2065   }
2066 
2067   protected:
2068   union {
2069   HBUINT16		format;		/* Format identifier */
2070   MarkMarkPosFormat1	format1;
2071   } u;
2072 };
2073 
2074 
2075 struct ContextPos : Context {};
2076 
2077 struct ChainContextPos : ChainContext {};
2078 
2079 struct ExtensionPos : Extension<ExtensionPos>
2080 {
2081   typedef struct PosLookupSubTable SubTable;
2082 };
2083 
2084 
2085 
2086 /*
2087  * PosLookup
2088  */
2089 
2090 
2091 struct PosLookupSubTable
2092 {
2093   friend struct Lookup;
2094   friend struct PosLookup;
2095 
2096   enum Type {
2097     Single		= 1,
2098     Pair		= 2,
2099     Cursive		= 3,
2100     MarkBase		= 4,
2101     MarkLig		= 5,
2102     MarkMark		= 6,
2103     Context		= 7,
2104     ChainContext	= 8,
2105     Extension		= 9
2106   };
2107 
2108   template <typename context_t, typename ...Ts>
dispatchOT::PosLookupSubTable2109   typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
2110   {
2111     TRACE_DISPATCH (this, lookup_type);
2112     switch (lookup_type) {
2113     case Single:		return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
2114     case Pair:			return_trace (u.pair.dispatch (c, hb_forward<Ts> (ds)...));
2115     case Cursive:		return_trace (u.cursive.dispatch (c, hb_forward<Ts> (ds)...));
2116     case MarkBase:		return_trace (u.markBase.dispatch (c, hb_forward<Ts> (ds)...));
2117     case MarkLig:		return_trace (u.markLig.dispatch (c, hb_forward<Ts> (ds)...));
2118     case MarkMark:		return_trace (u.markMark.dispatch (c, hb_forward<Ts> (ds)...));
2119     case Context:		return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
2120     case ChainContext:		return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
2121     case Extension:		return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
2122     default:			return_trace (c->default_return_value ());
2123     }
2124   }
2125 
intersectsOT::PosLookupSubTable2126   bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
2127   {
2128     hb_intersects_context_t c (glyphs);
2129     return dispatch (&c, lookup_type);
2130   }
2131 
2132   protected:
2133   union {
2134   SinglePos		single;
2135   PairPos		pair;
2136   CursivePos		cursive;
2137   MarkBasePos		markBase;
2138   MarkLigPos		markLig;
2139   MarkMarkPos		markMark;
2140   ContextPos		context;
2141   ChainContextPos	chainContext;
2142   ExtensionPos		extension;
2143   } u;
2144   public:
2145   DEFINE_SIZE_MIN (0);
2146 };
2147 
2148 
2149 struct PosLookup : Lookup
2150 {
2151   typedef struct PosLookupSubTable SubTable;
2152 
get_subtableOT::PosLookup2153   const SubTable& get_subtable (unsigned int i) const
2154   { return Lookup::get_subtable<SubTable> (i); }
2155 
is_reverseOT::PosLookup2156   bool is_reverse () const
2157   {
2158     return false;
2159   }
2160 
applyOT::PosLookup2161   bool apply (hb_ot_apply_context_t *c) const
2162   {
2163     TRACE_APPLY (this);
2164     return_trace (dispatch (c));
2165   }
2166 
intersectsOT::PosLookup2167   bool intersects (const hb_set_t *glyphs) const
2168   {
2169     hb_intersects_context_t c (glyphs);
2170     return dispatch (&c);
2171   }
2172 
collect_glyphsOT::PosLookup2173   hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
2174   { return dispatch (c); }
2175 
closure_lookupsOT::PosLookup2176   hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
2177   {
2178     if (c->is_lookup_visited (this_index))
2179       return hb_closure_lookups_context_t::default_return_value ();
2180 
2181     c->set_lookup_visited (this_index);
2182     if (!intersects (c->glyphs))
2183     {
2184       c->set_lookup_inactive (this_index);
2185       return hb_closure_lookups_context_t::default_return_value ();
2186     }
2187     c->set_recurse_func (dispatch_closure_lookups_recurse_func);
2188 
2189     hb_closure_lookups_context_t::return_t ret = dispatch (c);
2190     return ret;
2191   }
2192 
2193   template <typename set_t>
collect_coverageOT::PosLookup2194   void collect_coverage (set_t *glyphs) const
2195   {
2196     hb_collect_coverage_context_t<set_t> c (glyphs);
2197     dispatch (&c);
2198   }
2199 
2200   static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
2201 
2202   template <typename context_t>
2203   static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
2204 
2205   HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index);
2206 
2207   template <typename context_t, typename ...Ts>
dispatchOT::PosLookup2208   typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2209   { return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
2210 
subsetOT::PosLookup2211   bool subset (hb_subset_context_t *c) const
2212   { return Lookup::subset<SubTable> (c); }
2213 
sanitizeOT::PosLookup2214   bool sanitize (hb_sanitize_context_t *c) const
2215   { return Lookup::sanitize<SubTable> (c); }
2216 };
2217 
2218 /*
2219  * GPOS -- Glyph Positioning
2220  * https://docs.microsoft.com/en-us/typography/opentype/spec/gpos
2221  */
2222 
2223 struct GPOS : GSUBGPOS
2224 {
2225   static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
2226 
get_lookupOT::GPOS2227   const PosLookup& get_lookup (unsigned int i) const
2228   { return static_cast<const PosLookup &> (GSUBGPOS::get_lookup (i)); }
2229 
2230   static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
2231   static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
2232   static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
2233 
subsetOT::GPOS2234   bool subset (hb_subset_context_t *c) const
2235   {
2236     hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_features);
2237     return GSUBGPOS::subset<PosLookup> (&l);
2238   }
2239 
sanitizeOT::GPOS2240   bool sanitize (hb_sanitize_context_t *c) const
2241   { return GSUBGPOS::sanitize<PosLookup> (c); }
2242 
2243   HB_INTERNAL bool is_blacklisted (hb_blob_t *blob,
2244 				   hb_face_t *face) const;
2245 
2246   typedef GSUBGPOS::accelerator_t<GPOS> accelerator_t;
2247 };
2248 
2249 
2250 static void
reverse_cursive_minor_offset(hb_glyph_position_t * pos,unsigned int i,hb_direction_t direction,unsigned int new_parent)2251 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent)
2252 {
2253   int chain = pos[i].attach_chain(), type = pos[i].attach_type();
2254   if (likely (!chain || 0 == (type & ATTACH_TYPE_CURSIVE)))
2255     return;
2256 
2257   pos[i].attach_chain() = 0;
2258 
2259   unsigned int j = (int) i + chain;
2260 
2261   /* Stop if we see new parent in the chain. */
2262   if (j == new_parent)
2263     return;
2264 
2265   reverse_cursive_minor_offset (pos, j, direction, new_parent);
2266 
2267   if (HB_DIRECTION_IS_HORIZONTAL (direction))
2268     pos[j].y_offset = -pos[i].y_offset;
2269   else
2270     pos[j].x_offset = -pos[i].x_offset;
2271 
2272   pos[j].attach_chain() = -chain;
2273   pos[j].attach_type() = type;
2274 }
2275 static void
propagate_attachment_offsets(hb_glyph_position_t * pos,unsigned int len,unsigned int i,hb_direction_t direction)2276 propagate_attachment_offsets (hb_glyph_position_t *pos,
2277 			      unsigned int len,
2278 			      unsigned int i,
2279 			      hb_direction_t direction)
2280 {
2281   /* Adjusts offsets of attached glyphs (both cursive and mark) to accumulate
2282    * offset of glyph they are attached to. */
2283   int chain = pos[i].attach_chain(), type = pos[i].attach_type();
2284   if (likely (!chain))
2285     return;
2286 
2287   pos[i].attach_chain() = 0;
2288 
2289   unsigned int j = (int) i + chain;
2290 
2291   if (unlikely (j >= len))
2292     return;
2293 
2294   propagate_attachment_offsets (pos, len, j, direction);
2295 
2296   assert (!!(type & ATTACH_TYPE_MARK) ^ !!(type & ATTACH_TYPE_CURSIVE));
2297 
2298   if (type & ATTACH_TYPE_CURSIVE)
2299   {
2300     if (HB_DIRECTION_IS_HORIZONTAL (direction))
2301       pos[i].y_offset += pos[j].y_offset;
2302     else
2303       pos[i].x_offset += pos[j].x_offset;
2304   }
2305   else /*if (type & ATTACH_TYPE_MARK)*/
2306   {
2307     pos[i].x_offset += pos[j].x_offset;
2308     pos[i].y_offset += pos[j].y_offset;
2309 
2310     assert (j < i);
2311     if (HB_DIRECTION_IS_FORWARD (direction))
2312       for (unsigned int k = j; k < i; k++) {
2313 	pos[i].x_offset -= pos[k].x_advance;
2314 	pos[i].y_offset -= pos[k].y_advance;
2315       }
2316     else
2317       for (unsigned int k = j + 1; k < i + 1; k++) {
2318 	pos[i].x_offset += pos[k].x_advance;
2319 	pos[i].y_offset += pos[k].y_advance;
2320       }
2321   }
2322 }
2323 
2324 void
position_start(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)2325 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2326 {
2327   unsigned int count = buffer->len;
2328   for (unsigned int i = 0; i < count; i++)
2329     buffer->pos[i].attach_chain() = buffer->pos[i].attach_type() = 0;
2330 }
2331 
2332 void
position_finish_advances(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer HB_UNUSED)2333 GPOS::position_finish_advances (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
2334 {
2335   //_hb_buffer_assert_gsubgpos_vars (buffer);
2336 }
2337 
2338 void
position_finish_offsets(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)2339 GPOS::position_finish_offsets (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2340 {
2341   _hb_buffer_assert_gsubgpos_vars (buffer);
2342 
2343   unsigned int len;
2344   hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
2345   hb_direction_t direction = buffer->props.direction;
2346 
2347   /* Handle attachments */
2348   if (buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT)
2349     for (unsigned int i = 0; i < len; i++)
2350       propagate_attachment_offsets (pos, len, i, direction);
2351 }
2352 
2353 
2354 struct GPOS_accelerator_t : GPOS::accelerator_t {};
2355 
2356 
2357 /* Out-of-class implementation for methods recursing */
2358 
2359 #ifndef HB_NO_OT_LAYOUT
2360 template <typename context_t>
dispatch_recurse_func(context_t * c,unsigned int lookup_index)2361 /*static*/ typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
2362 {
2363   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2364   return l.dispatch (c);
2365 }
2366 
dispatch_closure_lookups_recurse_func(hb_closure_lookups_context_t * c,unsigned this_index)2367 /*static*/ inline hb_closure_lookups_context_t::return_t PosLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
2368 {
2369   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (this_index);
2370   return l.closure_lookups (c, this_index);
2371 }
2372 
apply_recurse_func(hb_ot_apply_context_t * c,unsigned int lookup_index)2373 /*static*/ bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
2374 {
2375   const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2376   unsigned int saved_lookup_props = c->lookup_props;
2377   unsigned int saved_lookup_index = c->lookup_index;
2378   c->set_lookup_index (lookup_index);
2379   c->set_lookup_props (l.get_props ());
2380   bool ret = l.dispatch (c);
2381   c->set_lookup_index (saved_lookup_index);
2382   c->set_lookup_props (saved_lookup_props);
2383   return ret;
2384 }
2385 #endif
2386 
2387 
2388 } /* namespace OT */
2389 
2390 
2391 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */
2392