1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012,2013 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GPOS_TABLE_HH
30 #define HB_OT_LAYOUT_GPOS_TABLE_HH
31
32 #include "hb-ot-layout-gsubgpos.hh"
33
34
35 namespace OT {
36
37 struct MarkArray;
38 static void Markclass_closure_and_remap_indexes (const Coverage &mark_coverage,
39 const MarkArray &mark_array,
40 const hb_set_t &glyphset,
41 hb_map_t* klass_mapping /* INOUT */);
42
43 /* buffer **position** var allocations */
44 #define attach_chain() var.i16[0] /* glyph to which this attaches to, relative to current glyphs; negative for going back, positive for forward. */
45 #define attach_type() var.u8[2] /* attachment type */
46 /* Note! if attach_chain() is zero, the value of attach_type() is irrelevant. */
47
48 enum attach_type_t {
49 ATTACH_TYPE_NONE = 0X00,
50
51 /* Each attachment should be either a mark or a cursive; can't be both. */
52 ATTACH_TYPE_MARK = 0X01,
53 ATTACH_TYPE_CURSIVE = 0X02,
54 };
55
56
57 /* Shared Tables: ValueRecord, Anchor Table, and MarkArray */
58
59 typedef HBUINT16 Value;
60
61 typedef UnsizedArrayOf<Value> ValueRecord;
62
63 struct ValueFormat : HBUINT16
64 {
65 enum Flags {
66 xPlacement = 0x0001u, /* Includes horizontal adjustment for placement */
67 yPlacement = 0x0002u, /* Includes vertical adjustment for placement */
68 xAdvance = 0x0004u, /* Includes horizontal adjustment for advance */
69 yAdvance = 0x0008u, /* Includes vertical adjustment for advance */
70 xPlaDevice = 0x0010u, /* Includes horizontal Device table for placement */
71 yPlaDevice = 0x0020u, /* Includes vertical Device table for placement */
72 xAdvDevice = 0x0040u, /* Includes horizontal Device table for advance */
73 yAdvDevice = 0x0080u, /* Includes vertical Device table for advance */
74 ignored = 0x0F00u, /* Was used in TrueType Open for MM fonts */
75 reserved = 0xF000u, /* For future use */
76
77 devices = 0x00F0u /* Mask for having any Device table */
78 };
79
80 /* All fields are options. Only those available advance the value pointer. */
81 #if 0
82 HBINT16 xPlacement; /* Horizontal adjustment for
83 * placement--in design units */
84 HBINT16 yPlacement; /* Vertical adjustment for
85 * placement--in design units */
86 HBINT16 xAdvance; /* Horizontal adjustment for
87 * advance--in design units (only used
88 * for horizontal writing) */
89 HBINT16 yAdvance; /* Vertical adjustment for advance--in
90 * design units (only used for vertical
91 * writing) */
92 Offset16To<Device> xPlaDevice; /* Offset to Device table for
93 * horizontal placement--measured from
94 * beginning of PosTable (may be NULL) */
95 Offset16To<Device> yPlaDevice; /* Offset to Device table for vertical
96 * placement--measured from beginning
97 * of PosTable (may be NULL) */
98 Offset16To<Device> xAdvDevice; /* Offset to Device table for
99 * horizontal advance--measured from
100 * beginning of PosTable (may be NULL) */
101 Offset16To<Device> yAdvDevice; /* Offset to Device table for vertical
102 * advance--measured from beginning of
103 * PosTable (may be NULL) */
104 #endif
105
operator =OT::ValueFormat106 IntType& operator = (uint16_t i) { v = i; return *this; }
107
get_lenOT::ValueFormat108 unsigned int get_len () const { return hb_popcount ((unsigned int) *this); }
get_sizeOT::ValueFormat109 unsigned int get_size () const { return get_len () * Value::static_size; }
110
apply_valueOT::ValueFormat111 bool apply_value (hb_ot_apply_context_t *c,
112 const void *base,
113 const Value *values,
114 hb_glyph_position_t &glyph_pos) const
115 {
116 bool ret = false;
117 unsigned int format = *this;
118 if (!format) return ret;
119
120 hb_font_t *font = c->font;
121 bool horizontal = HB_DIRECTION_IS_HORIZONTAL (c->direction);
122
123 if (format & xPlacement) glyph_pos.x_offset += font->em_scale_x (get_short (values++, &ret));
124 if (format & yPlacement) glyph_pos.y_offset += font->em_scale_y (get_short (values++, &ret));
125 if (format & xAdvance) {
126 if (likely (horizontal)) glyph_pos.x_advance += font->em_scale_x (get_short (values, &ret));
127 values++;
128 }
129 /* y_advance values grow downward but font-space grows upward, hence negation */
130 if (format & yAdvance) {
131 if (unlikely (!horizontal)) glyph_pos.y_advance -= font->em_scale_y (get_short (values, &ret));
132 values++;
133 }
134
135 if (!has_device ()) return ret;
136
137 bool use_x_device = font->x_ppem || font->num_coords;
138 bool use_y_device = font->y_ppem || font->num_coords;
139
140 if (!use_x_device && !use_y_device) return ret;
141
142 const VariationStore &store = c->var_store;
143
144 /* pixel -> fractional pixel */
145 if (format & xPlaDevice) {
146 if (use_x_device) glyph_pos.x_offset += (base + get_device (values, &ret)).get_x_delta (font, store);
147 values++;
148 }
149 if (format & yPlaDevice) {
150 if (use_y_device) glyph_pos.y_offset += (base + get_device (values, &ret)).get_y_delta (font, store);
151 values++;
152 }
153 if (format & xAdvDevice) {
154 if (horizontal && use_x_device) glyph_pos.x_advance += (base + get_device (values, &ret)).get_x_delta (font, store);
155 values++;
156 }
157 if (format & yAdvDevice) {
158 /* y_advance values grow downward but font-space grows upward, hence negation */
159 if (!horizontal && use_y_device) glyph_pos.y_advance -= (base + get_device (values, &ret)).get_y_delta (font, store);
160 values++;
161 }
162 return ret;
163 }
164
get_effective_formatOT::ValueFormat165 unsigned int get_effective_format (const Value *values) const
166 {
167 unsigned int format = *this;
168 for (unsigned flag = xPlacement; flag <= yAdvDevice; flag = flag << 1) {
169 if (format & flag) should_drop (*values++, (Flags) flag, &format);
170 }
171
172 return format;
173 }
174
175 template<typename Iterator,
176 hb_requires (hb_is_iterator (Iterator))>
get_effective_formatOT::ValueFormat177 unsigned int get_effective_format (Iterator it) const {
178 unsigned int new_format = 0;
179
180 for (const hb_array_t<const Value>& values : it)
181 new_format = new_format | get_effective_format (&values);
182
183 return new_format;
184 }
185
copy_valuesOT::ValueFormat186 void copy_values (hb_serialize_context_t *c,
187 unsigned int new_format,
188 const void *base,
189 const Value *values,
190 const hb_map_t *layout_variation_idx_map) const
191 {
192 unsigned int format = *this;
193 if (!format) return;
194
195 if (format & xPlacement) copy_value (c, new_format, xPlacement, *values++);
196 if (format & yPlacement) copy_value (c, new_format, yPlacement, *values++);
197 if (format & xAdvance) copy_value (c, new_format, xAdvance, *values++);
198 if (format & yAdvance) copy_value (c, new_format, yAdvance, *values++);
199
200 if (format & xPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
201 if (format & yPlaDevice) copy_device (c, base, values++, layout_variation_idx_map);
202 if (format & xAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
203 if (format & yAdvDevice) copy_device (c, base, values++, layout_variation_idx_map);
204 }
205
copy_valueOT::ValueFormat206 void copy_value (hb_serialize_context_t *c,
207 unsigned int new_format,
208 Flags flag,
209 Value value) const
210 {
211 // Filter by new format.
212 if (!(new_format & flag)) return;
213 c->copy (value);
214 }
215
collect_variation_indicesOT::ValueFormat216 void collect_variation_indices (hb_collect_variation_indices_context_t *c,
217 const void *base,
218 const hb_array_t<const Value>& values) const
219 {
220 unsigned format = *this;
221 unsigned i = 0;
222 if (format & xPlacement) i++;
223 if (format & yPlacement) i++;
224 if (format & xAdvance) i++;
225 if (format & yAdvance) i++;
226 if (format & xPlaDevice)
227 {
228 (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
229 i++;
230 }
231
232 if (format & ValueFormat::yPlaDevice)
233 {
234 (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
235 i++;
236 }
237
238 if (format & ValueFormat::xAdvDevice)
239 {
240
241 (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
242 i++;
243 }
244
245 if (format & ValueFormat::yAdvDevice)
246 {
247
248 (base + get_device (&(values[i]))).collect_variation_indices (c->layout_variation_indices);
249 i++;
250 }
251 }
252
253 private:
sanitize_value_devicesOT::ValueFormat254 bool sanitize_value_devices (hb_sanitize_context_t *c, const void *base, const Value *values) const
255 {
256 unsigned int format = *this;
257
258 if (format & xPlacement) values++;
259 if (format & yPlacement) values++;
260 if (format & xAdvance) values++;
261 if (format & yAdvance) values++;
262
263 if ((format & xPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
264 if ((format & yPlaDevice) && !get_device (values++).sanitize (c, base)) return false;
265 if ((format & xAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
266 if ((format & yAdvDevice) && !get_device (values++).sanitize (c, base)) return false;
267
268 return true;
269 }
270
get_deviceOT::ValueFormat271 static inline Offset16To<Device>& get_device (Value* value)
272 {
273 return *static_cast<Offset16To<Device> *> (value);
274 }
get_deviceOT::ValueFormat275 static inline const Offset16To<Device>& get_device (const Value* value, bool *worked=nullptr)
276 {
277 if (worked) *worked |= bool (*value);
278 return *static_cast<const Offset16To<Device> *> (value);
279 }
280
copy_deviceOT::ValueFormat281 bool copy_device (hb_serialize_context_t *c, const void *base,
282 const Value *src_value, const hb_map_t *layout_variation_idx_map) const
283 {
284 Value *dst_value = c->copy (*src_value);
285
286 if (!dst_value) return false;
287 if (*dst_value == 0) return true;
288
289 *dst_value = 0;
290 c->push ();
291 if ((base + get_device (src_value)).copy (c, layout_variation_idx_map))
292 {
293 c->add_link (*dst_value, c->pop_pack ());
294 return true;
295 }
296 else
297 {
298 c->pop_discard ();
299 return false;
300 }
301 }
302
get_shortOT::ValueFormat303 static inline const HBINT16& get_short (const Value* value, bool *worked=nullptr)
304 {
305 if (worked) *worked |= bool (*value);
306 return *reinterpret_cast<const HBINT16 *> (value);
307 }
308
309 public:
310
has_deviceOT::ValueFormat311 bool has_device () const
312 {
313 unsigned int format = *this;
314 return (format & devices) != 0;
315 }
316
sanitize_valueOT::ValueFormat317 bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
318 {
319 TRACE_SANITIZE (this);
320 return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
321 }
322
sanitize_valuesOT::ValueFormat323 bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
324 {
325 TRACE_SANITIZE (this);
326 unsigned int len = get_len ();
327
328 if (!c->check_range (values, count, get_size ())) return_trace (false);
329
330 if (!has_device ()) return_trace (true);
331
332 for (unsigned int i = 0; i < count; i++) {
333 if (!sanitize_value_devices (c, base, values))
334 return_trace (false);
335 values += len;
336 }
337
338 return_trace (true);
339 }
340
341 /* Just sanitize referenced Device tables. Doesn't check the values themselves. */
sanitize_values_stride_unsafeOT::ValueFormat342 bool sanitize_values_stride_unsafe (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count, unsigned int stride) const
343 {
344 TRACE_SANITIZE (this);
345
346 if (!has_device ()) return_trace (true);
347
348 for (unsigned int i = 0; i < count; i++) {
349 if (!sanitize_value_devices (c, base, values))
350 return_trace (false);
351 values += stride;
352 }
353
354 return_trace (true);
355 }
356
357 private:
358
should_dropOT::ValueFormat359 void should_drop (Value value, Flags flag, unsigned int* format) const
360 {
361 if (value) return;
362 *format = *format & ~flag;
363 }
364
365 };
366
367 template<typename Iterator, typename SrcLookup>
368 static void SinglePos_serialize (hb_serialize_context_t *c,
369 const SrcLookup *src,
370 Iterator it,
371 const hb_map_t *layout_variation_idx_map);
372
373
374 struct AnchorFormat1
375 {
get_anchorOT::AnchorFormat1376 void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
377 float *x, float *y) const
378 {
379 hb_font_t *font = c->font;
380 *x = font->em_fscale_x (xCoordinate);
381 *y = font->em_fscale_y (yCoordinate);
382 }
383
sanitizeOT::AnchorFormat1384 bool sanitize (hb_sanitize_context_t *c) const
385 {
386 TRACE_SANITIZE (this);
387 return_trace (c->check_struct (this));
388 }
389
copyOT::AnchorFormat1390 AnchorFormat1* copy (hb_serialize_context_t *c) const
391 {
392 TRACE_SERIALIZE (this);
393 AnchorFormat1* out = c->embed<AnchorFormat1> (this);
394 if (!out) return_trace (out);
395 out->format = 1;
396 return_trace (out);
397 }
398
399 protected:
400 HBUINT16 format; /* Format identifier--format = 1 */
401 FWORD xCoordinate; /* Horizontal value--in design units */
402 FWORD yCoordinate; /* Vertical value--in design units */
403 public:
404 DEFINE_SIZE_STATIC (6);
405 };
406
407 struct AnchorFormat2
408 {
get_anchorOT::AnchorFormat2409 void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
410 float *x, float *y) const
411 {
412 hb_font_t *font = c->font;
413
414 #ifdef HB_NO_HINTING
415 *x = font->em_fscale_x (xCoordinate);
416 *y = font->em_fscale_y (yCoordinate);
417 return;
418 #endif
419
420 unsigned int x_ppem = font->x_ppem;
421 unsigned int y_ppem = font->y_ppem;
422 hb_position_t cx = 0, cy = 0;
423 bool ret;
424
425 ret = (x_ppem || y_ppem) &&
426 font->get_glyph_contour_point_for_origin (glyph_id, anchorPoint, HB_DIRECTION_LTR, &cx, &cy);
427 *x = ret && x_ppem ? cx : font->em_fscale_x (xCoordinate);
428 *y = ret && y_ppem ? cy : font->em_fscale_y (yCoordinate);
429 }
430
sanitizeOT::AnchorFormat2431 bool sanitize (hb_sanitize_context_t *c) const
432 {
433 TRACE_SANITIZE (this);
434 return_trace (c->check_struct (this));
435 }
436
copyOT::AnchorFormat2437 AnchorFormat2* copy (hb_serialize_context_t *c) const
438 {
439 TRACE_SERIALIZE (this);
440 return_trace (c->embed<AnchorFormat2> (this));
441 }
442
443 protected:
444 HBUINT16 format; /* Format identifier--format = 2 */
445 FWORD xCoordinate; /* Horizontal value--in design units */
446 FWORD yCoordinate; /* Vertical value--in design units */
447 HBUINT16 anchorPoint; /* Index to glyph contour point */
448 public:
449 DEFINE_SIZE_STATIC (8);
450 };
451
452 struct AnchorFormat3
453 {
get_anchorOT::AnchorFormat3454 void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id HB_UNUSED,
455 float *x, float *y) const
456 {
457 hb_font_t *font = c->font;
458 *x = font->em_fscale_x (xCoordinate);
459 *y = font->em_fscale_y (yCoordinate);
460
461 if (font->x_ppem || font->num_coords)
462 *x += (this+xDeviceTable).get_x_delta (font, c->var_store);
463 if (font->y_ppem || font->num_coords)
464 *y += (this+yDeviceTable).get_y_delta (font, c->var_store);
465 }
466
sanitizeOT::AnchorFormat3467 bool sanitize (hb_sanitize_context_t *c) const
468 {
469 TRACE_SANITIZE (this);
470 return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
471 }
472
copyOT::AnchorFormat3473 AnchorFormat3* copy (hb_serialize_context_t *c,
474 const hb_map_t *layout_variation_idx_map) const
475 {
476 TRACE_SERIALIZE (this);
477 if (!layout_variation_idx_map) return_trace (nullptr);
478
479 auto *out = c->embed<AnchorFormat3> (this);
480 if (unlikely (!out)) return_trace (nullptr);
481
482 out->xDeviceTable.serialize_copy (c, xDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
483 out->yDeviceTable.serialize_copy (c, yDeviceTable, this, 0, hb_serialize_context_t::Head, layout_variation_idx_map);
484 return_trace (out);
485 }
486
collect_variation_indicesOT::AnchorFormat3487 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
488 {
489 (this+xDeviceTable).collect_variation_indices (c->layout_variation_indices);
490 (this+yDeviceTable).collect_variation_indices (c->layout_variation_indices);
491 }
492
493 protected:
494 HBUINT16 format; /* Format identifier--format = 3 */
495 FWORD xCoordinate; /* Horizontal value--in design units */
496 FWORD yCoordinate; /* Vertical value--in design units */
497 Offset16To<Device>
498 xDeviceTable; /* Offset to Device table for X
499 * coordinate-- from beginning of
500 * Anchor table (may be NULL) */
501 Offset16To<Device>
502 yDeviceTable; /* Offset to Device table for Y
503 * coordinate-- from beginning of
504 * Anchor table (may be NULL) */
505 public:
506 DEFINE_SIZE_STATIC (10);
507 };
508
509 struct Anchor
510 {
get_anchorOT::Anchor511 void get_anchor (hb_ot_apply_context_t *c, hb_codepoint_t glyph_id,
512 float *x, float *y) const
513 {
514 *x = *y = 0;
515 switch (u.format) {
516 case 1: u.format1.get_anchor (c, glyph_id, x, y); return;
517 case 2: u.format2.get_anchor (c, glyph_id, x, y); return;
518 case 3: u.format3.get_anchor (c, glyph_id, x, y); return;
519 default: return;
520 }
521 }
522
sanitizeOT::Anchor523 bool sanitize (hb_sanitize_context_t *c) const
524 {
525 TRACE_SANITIZE (this);
526 if (!u.format.sanitize (c)) return_trace (false);
527 switch (u.format) {
528 case 1: return_trace (u.format1.sanitize (c));
529 case 2: return_trace (u.format2.sanitize (c));
530 case 3: return_trace (u.format3.sanitize (c));
531 default:return_trace (true);
532 }
533 }
534
subsetOT::Anchor535 bool subset (hb_subset_context_t *c) const
536 {
537 TRACE_SUBSET (this);
538 switch (u.format) {
539 case 1: return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
540 case 2:
541 if (c->plan->drop_hints)
542 {
543 // AnchorFormat 2 just containins extra hinting information, so
544 // if hints are being dropped convert to format 1.
545 return_trace (bool (reinterpret_cast<Anchor *> (u.format1.copy (c->serializer))));
546 }
547 return_trace (bool (reinterpret_cast<Anchor *> (u.format2.copy (c->serializer))));
548 case 3: return_trace (bool (reinterpret_cast<Anchor *> (u.format3.copy (c->serializer,
549 c->plan->layout_variation_idx_map))));
550 default:return_trace (false);
551 }
552 }
553
collect_variation_indicesOT::Anchor554 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
555 {
556 switch (u.format) {
557 case 1: case 2:
558 return;
559 case 3:
560 u.format3.collect_variation_indices (c);
561 return;
562 default: return;
563 }
564 }
565
566 protected:
567 union {
568 HBUINT16 format; /* Format identifier */
569 AnchorFormat1 format1;
570 AnchorFormat2 format2;
571 AnchorFormat3 format3;
572 } u;
573 public:
574 DEFINE_SIZE_UNION (2, format);
575 };
576
577
578 struct AnchorMatrix
579 {
get_anchorOT::AnchorMatrix580 const Anchor& get_anchor (unsigned int row, unsigned int col,
581 unsigned int cols, bool *found) const
582 {
583 *found = false;
584 if (unlikely (row >= rows || col >= cols)) return Null (Anchor);
585 *found = !matrixZ[row * cols + col].is_null ();
586 return this+matrixZ[row * cols + col];
587 }
588
589 template <typename Iterator,
590 hb_requires (hb_is_iterator (Iterator))>
collect_variation_indicesOT::AnchorMatrix591 void collect_variation_indices (hb_collect_variation_indices_context_t *c,
592 Iterator index_iter) const
593 {
594 for (unsigned i : index_iter)
595 (this+matrixZ[i]).collect_variation_indices (c);
596 }
597
598 template <typename Iterator,
599 hb_requires (hb_is_iterator (Iterator))>
subsetOT::AnchorMatrix600 bool subset (hb_subset_context_t *c,
601 unsigned num_rows,
602 Iterator index_iter) const
603 {
604 TRACE_SUBSET (this);
605
606 auto *out = c->serializer->start_embed (this);
607
608 if (!index_iter) return_trace (false);
609 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
610
611 out->rows = num_rows;
612 for (const unsigned i : index_iter)
613 {
614 auto *offset = c->serializer->embed (matrixZ[i]);
615 if (!offset) return_trace (false);
616 offset->serialize_subset (c, matrixZ[i], this);
617 }
618
619 return_trace (true);
620 }
621
sanitizeOT::AnchorMatrix622 bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
623 {
624 TRACE_SANITIZE (this);
625 if (!c->check_struct (this)) return_trace (false);
626 if (unlikely (hb_unsigned_mul_overflows (rows, cols))) return_trace (false);
627 unsigned int count = rows * cols;
628 if (!c->check_array (matrixZ.arrayZ, count)) return_trace (false);
629 for (unsigned int i = 0; i < count; i++)
630 if (!matrixZ[i].sanitize (c, this)) return_trace (false);
631 return_trace (true);
632 }
633
634 HBUINT16 rows; /* Number of rows */
635 UnsizedArrayOf<Offset16To<Anchor>>
636 matrixZ; /* Matrix of offsets to Anchor tables--
637 * from beginning of AnchorMatrix table */
638 public:
639 DEFINE_SIZE_ARRAY (2, matrixZ);
640 };
641
642
643 struct MarkRecord
644 {
645 friend struct MarkArray;
646
get_classOT::MarkRecord647 unsigned get_class () const { return (unsigned) klass; }
sanitizeOT::MarkRecord648 bool sanitize (hb_sanitize_context_t *c, const void *base) const
649 {
650 TRACE_SANITIZE (this);
651 return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
652 }
653
subsetOT::MarkRecord654 MarkRecord *subset (hb_subset_context_t *c,
655 const void *src_base,
656 const hb_map_t *klass_mapping) const
657 {
658 TRACE_SUBSET (this);
659 auto *out = c->serializer->embed (this);
660 if (unlikely (!out)) return_trace (nullptr);
661
662 out->klass = klass_mapping->get (klass);
663 out->markAnchor.serialize_subset (c, markAnchor, src_base);
664 return_trace (out);
665 }
666
collect_variation_indicesOT::MarkRecord667 void collect_variation_indices (hb_collect_variation_indices_context_t *c,
668 const void *src_base) const
669 {
670 (src_base+markAnchor).collect_variation_indices (c);
671 }
672
673 protected:
674 HBUINT16 klass; /* Class defined for this mark */
675 Offset16To<Anchor>
676 markAnchor; /* Offset to Anchor table--from
677 * beginning of MarkArray table */
678 public:
679 DEFINE_SIZE_STATIC (4);
680 };
681
682 struct MarkArray : Array16Of<MarkRecord> /* Array of MarkRecords--in Coverage order */
683 {
applyOT::MarkArray684 bool apply (hb_ot_apply_context_t *c,
685 unsigned int mark_index, unsigned int glyph_index,
686 const AnchorMatrix &anchors, unsigned int class_count,
687 unsigned int glyph_pos) const
688 {
689 TRACE_APPLY (this);
690 hb_buffer_t *buffer = c->buffer;
691 const MarkRecord &record = Array16Of<MarkRecord>::operator[](mark_index);
692 unsigned int mark_class = record.klass;
693
694 const Anchor& mark_anchor = this + record.markAnchor;
695 bool found;
696 const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
697 /* If this subtable doesn't have an anchor for this base and this class,
698 * return false such that the subsequent subtables have a chance at it. */
699 if (unlikely (!found)) return_trace (false);
700
701 float mark_x, mark_y, base_x, base_y;
702
703 buffer->unsafe_to_break (glyph_pos, buffer->idx);
704 mark_anchor.get_anchor (c, buffer->cur().codepoint, &mark_x, &mark_y);
705 glyph_anchor.get_anchor (c, buffer->info[glyph_pos].codepoint, &base_x, &base_y);
706
707 hb_glyph_position_t &o = buffer->cur_pos();
708 o.x_offset = roundf (base_x - mark_x);
709 o.y_offset = roundf (base_y - mark_y);
710 o.attach_type() = ATTACH_TYPE_MARK;
711 o.attach_chain() = (int) glyph_pos - (int) buffer->idx;
712 buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
713
714 buffer->idx++;
715 return_trace (true);
716 }
717
718 template <typename Iterator,
719 hb_requires (hb_is_iterator (Iterator))>
subsetOT::MarkArray720 bool subset (hb_subset_context_t *c,
721 Iterator coverage,
722 const hb_map_t *klass_mapping) const
723 {
724 TRACE_SUBSET (this);
725 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
726
727 auto* out = c->serializer->start_embed (this);
728 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
729
730 auto mark_iter =
731 + hb_zip (coverage, this->iter ())
732 | hb_filter (glyphset, hb_first)
733 | hb_map (hb_second)
734 ;
735
736 unsigned new_length = 0;
737 for (const auto& mark_record : mark_iter) {
738 if (unlikely (!mark_record.subset (c, this, klass_mapping)))
739 return_trace (false);
740 new_length++;
741 }
742
743 if (unlikely (!c->serializer->check_assign (out->len, new_length,
744 HB_SERIALIZE_ERROR_ARRAY_OVERFLOW)))
745 return_trace (false);
746
747 return_trace (true);
748 }
749
sanitizeOT::MarkArray750 bool sanitize (hb_sanitize_context_t *c) const
751 {
752 TRACE_SANITIZE (this);
753 return_trace (Array16Of<MarkRecord>::sanitize (c, this));
754 }
755 };
756
757
758 /* Lookups */
759
760 struct SinglePosFormat1
761 {
intersectsOT::SinglePosFormat1762 bool intersects (const hb_set_t *glyphs) const
763 { return (this+coverage).intersects (glyphs); }
764
closure_lookupsOT::SinglePosFormat1765 void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::SinglePosFormat1766 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
767 {
768 if (!valueFormat.has_device ()) return;
769
770 auto it =
771 + hb_iter (this+coverage)
772 | hb_filter (c->glyph_set)
773 ;
774
775 if (!it) return;
776 valueFormat.collect_variation_indices (c, this, values.as_array (valueFormat.get_len ()));
777 }
778
collect_glyphsOT::SinglePosFormat1779 void collect_glyphs (hb_collect_glyphs_context_t *c) const
780 { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
781
get_coverageOT::SinglePosFormat1782 const Coverage &get_coverage () const { return this+coverage; }
783
get_value_formatOT::SinglePosFormat1784 ValueFormat get_value_format () const { return valueFormat; }
785
applyOT::SinglePosFormat1786 bool apply (hb_ot_apply_context_t *c) const
787 {
788 TRACE_APPLY (this);
789 hb_buffer_t *buffer = c->buffer;
790 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
791 if (likely (index == NOT_COVERED)) return_trace (false);
792
793 valueFormat.apply_value (c, this, values, buffer->cur_pos());
794
795 buffer->idx++;
796 return_trace (true);
797 }
798
799 template<typename Iterator,
800 typename SrcLookup,
801 hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat1802 void serialize (hb_serialize_context_t *c,
803 const SrcLookup *src,
804 Iterator it,
805 ValueFormat newFormat,
806 const hb_map_t *layout_variation_idx_map)
807 {
808 if (unlikely (!c->extend_min (*this))) return;
809 if (unlikely (!c->check_assign (valueFormat,
810 newFormat,
811 HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
812
813 for (const hb_array_t<const Value>& _ : + it | hb_map (hb_second))
814 {
815 src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_map);
816 // Only serialize the first entry in the iterator, the rest are assumed to
817 // be the same.
818 break;
819 }
820
821 auto glyphs =
822 + it
823 | hb_map_retains_sorting (hb_first)
824 ;
825
826 coverage.serialize_serialize (c, glyphs);
827 }
828
subsetOT::SinglePosFormat1829 bool subset (hb_subset_context_t *c) const
830 {
831 TRACE_SUBSET (this);
832 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
833 const hb_map_t &glyph_map = *c->plan->glyph_map;
834
835 auto it =
836 + hb_iter (this+coverage)
837 | hb_filter (glyphset)
838 | hb_map_retains_sorting (glyph_map)
839 | hb_zip (hb_repeat (values.as_array (valueFormat.get_len ())))
840 ;
841
842 bool ret = bool (it);
843 SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
844 return_trace (ret);
845 }
846
sanitizeOT::SinglePosFormat1847 bool sanitize (hb_sanitize_context_t *c) const
848 {
849 TRACE_SANITIZE (this);
850 return_trace (c->check_struct (this) &&
851 coverage.sanitize (c, this) &&
852 valueFormat.sanitize_value (c, this, values));
853 }
854
855 protected:
856 HBUINT16 format; /* Format identifier--format = 1 */
857 Offset16To<Coverage>
858 coverage; /* Offset to Coverage table--from
859 * beginning of subtable */
860 ValueFormat valueFormat; /* Defines the types of data in the
861 * ValueRecord */
862 ValueRecord values; /* Defines positioning
863 * value(s)--applied to all glyphs in
864 * the Coverage table */
865 public:
866 DEFINE_SIZE_ARRAY (6, values);
867 };
868
869 struct SinglePosFormat2
870 {
intersectsOT::SinglePosFormat2871 bool intersects (const hb_set_t *glyphs) const
872 { return (this+coverage).intersects (glyphs); }
873
closure_lookupsOT::SinglePosFormat2874 void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::SinglePosFormat2875 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
876 {
877 if (!valueFormat.has_device ()) return;
878
879 auto it =
880 + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
881 | hb_filter (c->glyph_set, hb_first)
882 ;
883
884 if (!it) return;
885
886 unsigned sub_length = valueFormat.get_len ();
887 const hb_array_t<const Value> values_array = values.as_array (valueCount * sub_length);
888
889 for (unsigned i : + it
890 | hb_map (hb_second))
891 valueFormat.collect_variation_indices (c, this, values_array.sub_array (i * sub_length, sub_length));
892
893 }
894
collect_glyphsOT::SinglePosFormat2895 void collect_glyphs (hb_collect_glyphs_context_t *c) const
896 { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
897
get_coverageOT::SinglePosFormat2898 const Coverage &get_coverage () const { return this+coverage; }
899
get_value_formatOT::SinglePosFormat2900 ValueFormat get_value_format () const { return valueFormat; }
901
applyOT::SinglePosFormat2902 bool apply (hb_ot_apply_context_t *c) const
903 {
904 TRACE_APPLY (this);
905 hb_buffer_t *buffer = c->buffer;
906 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
907 if (likely (index == NOT_COVERED)) return_trace (false);
908
909 if (likely (index >= valueCount)) return_trace (false);
910
911 valueFormat.apply_value (c, this,
912 &values[index * valueFormat.get_len ()],
913 buffer->cur_pos());
914
915 buffer->idx++;
916 return_trace (true);
917 }
918
919 template<typename Iterator,
920 typename SrcLookup,
921 hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePosFormat2922 void serialize (hb_serialize_context_t *c,
923 const SrcLookup *src,
924 Iterator it,
925 ValueFormat newFormat,
926 const hb_map_t *layout_variation_idx_map)
927 {
928 auto out = c->extend_min (*this);
929 if (unlikely (!out)) return;
930 if (unlikely (!c->check_assign (valueFormat, newFormat, HB_SERIALIZE_ERROR_INT_OVERFLOW))) return;
931 if (unlikely (!c->check_assign (valueCount, it.len (), HB_SERIALIZE_ERROR_ARRAY_OVERFLOW))) return;
932
933 + it
934 | hb_map (hb_second)
935 | hb_apply ([&] (hb_array_t<const Value> _)
936 { src->get_value_format ().copy_values (c, newFormat, src, &_, layout_variation_idx_map); })
937 ;
938
939 auto glyphs =
940 + it
941 | hb_map_retains_sorting (hb_first)
942 ;
943
944 coverage.serialize_serialize (c, glyphs);
945 }
946
subsetOT::SinglePosFormat2947 bool subset (hb_subset_context_t *c) const
948 {
949 TRACE_SUBSET (this);
950 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
951 const hb_map_t &glyph_map = *c->plan->glyph_map;
952
953 unsigned sub_length = valueFormat.get_len ();
954 auto values_array = values.as_array (valueCount * sub_length);
955
956 auto it =
957 + hb_zip (this+coverage, hb_range ((unsigned) valueCount))
958 | hb_filter (glyphset, hb_first)
959 | hb_map_retains_sorting ([&] (const hb_pair_t<hb_codepoint_t, unsigned>& _)
960 {
961 return hb_pair (glyph_map[_.first],
962 values_array.sub_array (_.second * sub_length,
963 sub_length));
964 })
965 ;
966
967 bool ret = bool (it);
968 SinglePos_serialize (c->serializer, this, it, c->plan->layout_variation_idx_map);
969 return_trace (ret);
970 }
971
sanitizeOT::SinglePosFormat2972 bool sanitize (hb_sanitize_context_t *c) const
973 {
974 TRACE_SANITIZE (this);
975 return_trace (c->check_struct (this) &&
976 coverage.sanitize (c, this) &&
977 valueFormat.sanitize_values (c, this, values, valueCount));
978 }
979
980 protected:
981 HBUINT16 format; /* Format identifier--format = 2 */
982 Offset16To<Coverage>
983 coverage; /* Offset to Coverage table--from
984 * beginning of subtable */
985 ValueFormat valueFormat; /* Defines the types of data in the
986 * ValueRecord */
987 HBUINT16 valueCount; /* Number of ValueRecords */
988 ValueRecord values; /* Array of ValueRecords--positioning
989 * values applied to glyphs */
990 public:
991 DEFINE_SIZE_ARRAY (8, values);
992 };
993
994 struct SinglePos
995 {
996 template<typename Iterator,
997 hb_requires (hb_is_iterator (Iterator))>
get_formatOT::SinglePos998 unsigned get_format (Iterator glyph_val_iter_pairs)
999 {
1000 hb_array_t<const Value> first_val_iter = hb_second (*glyph_val_iter_pairs);
1001
1002 for (const auto iter : glyph_val_iter_pairs)
1003 for (const auto _ : hb_zip (iter.second, first_val_iter))
1004 if (_.first != _.second)
1005 return 2;
1006
1007 return 1;
1008 }
1009
1010
1011 template<typename Iterator,
1012 typename SrcLookup,
1013 hb_requires (hb_is_iterator (Iterator))>
serializeOT::SinglePos1014 void serialize (hb_serialize_context_t *c,
1015 const SrcLookup* src,
1016 Iterator glyph_val_iter_pairs,
1017 const hb_map_t *layout_variation_idx_map)
1018 {
1019 if (unlikely (!c->extend_min (u.format))) return;
1020 unsigned format = 2;
1021 ValueFormat new_format = src->get_value_format ();
1022
1023 if (glyph_val_iter_pairs)
1024 {
1025 format = get_format (glyph_val_iter_pairs);
1026 new_format = src->get_value_format ().get_effective_format (+ glyph_val_iter_pairs | hb_map (hb_second));
1027 }
1028
1029 u.format = format;
1030 switch (u.format) {
1031 case 1: u.format1.serialize (c,
1032 src,
1033 glyph_val_iter_pairs,
1034 new_format,
1035 layout_variation_idx_map);
1036 return;
1037 case 2: u.format2.serialize (c,
1038 src,
1039 glyph_val_iter_pairs,
1040 new_format,
1041 layout_variation_idx_map);
1042 return;
1043 default:return;
1044 }
1045 }
1046
1047 template <typename context_t, typename ...Ts>
dispatchOT::SinglePos1048 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1049 {
1050 TRACE_DISPATCH (this, u.format);
1051 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1052 switch (u.format) {
1053 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1054 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
1055 default:return_trace (c->default_return_value ());
1056 }
1057 }
1058
1059 protected:
1060 union {
1061 HBUINT16 format; /* Format identifier */
1062 SinglePosFormat1 format1;
1063 SinglePosFormat2 format2;
1064 } u;
1065 };
1066
1067 template<typename Iterator, typename SrcLookup>
1068 static void
SinglePos_serialize(hb_serialize_context_t * c,const SrcLookup * src,Iterator it,const hb_map_t * layout_variation_idx_map)1069 SinglePos_serialize (hb_serialize_context_t *c,
1070 const SrcLookup *src,
1071 Iterator it,
1072 const hb_map_t *layout_variation_idx_map)
1073 { c->start_embed<SinglePos> ()->serialize (c, src, it, layout_variation_idx_map); }
1074
1075
1076 struct PairValueRecord
1077 {
1078 friend struct PairSet;
1079
cmpOT::PairValueRecord1080 int cmp (hb_codepoint_t k) const
1081 { return secondGlyph.cmp (k); }
1082
1083 struct context_t
1084 {
1085 const void *base;
1086 const ValueFormat *valueFormats;
1087 const ValueFormat *newFormats;
1088 unsigned len1; /* valueFormats[0].get_len() */
1089 const hb_map_t *glyph_map;
1090 const hb_map_t *layout_variation_idx_map;
1091 };
1092
subsetOT::PairValueRecord1093 bool subset (hb_subset_context_t *c,
1094 context_t *closure) const
1095 {
1096 TRACE_SERIALIZE (this);
1097 auto *s = c->serializer;
1098 auto *out = s->start_embed (*this);
1099 if (unlikely (!s->extend_min (out))) return_trace (false);
1100
1101 out->secondGlyph = (*closure->glyph_map)[secondGlyph];
1102
1103 closure->valueFormats[0].copy_values (s,
1104 closure->newFormats[0],
1105 closure->base, &values[0],
1106 closure->layout_variation_idx_map);
1107 closure->valueFormats[1].copy_values (s,
1108 closure->newFormats[1],
1109 closure->base,
1110 &values[closure->len1],
1111 closure->layout_variation_idx_map);
1112
1113 return_trace (true);
1114 }
1115
collect_variation_indicesOT::PairValueRecord1116 void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1117 const ValueFormat *valueFormats,
1118 const void *base) const
1119 {
1120 unsigned record1_len = valueFormats[0].get_len ();
1121 unsigned record2_len = valueFormats[1].get_len ();
1122 const hb_array_t<const Value> values_array = values.as_array (record1_len + record2_len);
1123
1124 if (valueFormats[0].has_device ())
1125 valueFormats[0].collect_variation_indices (c, base, values_array.sub_array (0, record1_len));
1126
1127 if (valueFormats[1].has_device ())
1128 valueFormats[1].collect_variation_indices (c, base, values_array.sub_array (record1_len, record2_len));
1129 }
1130
intersectsOT::PairValueRecord1131 bool intersects (const hb_set_t& glyphset) const
1132 {
1133 return glyphset.has(secondGlyph);
1134 }
1135
get_values_1OT::PairValueRecord1136 const Value* get_values_1 () const
1137 {
1138 return &values[0];
1139 }
1140
get_values_2OT::PairValueRecord1141 const Value* get_values_2 (ValueFormat format1) const
1142 {
1143 return &values[format1.get_len ()];
1144 }
1145
1146 protected:
1147 HBGlyphID secondGlyph; /* GlyphID of second glyph in the
1148 * pair--first glyph is listed in the
1149 * Coverage table */
1150 ValueRecord values; /* Positioning data for the first glyph
1151 * followed by for second glyph */
1152 public:
1153 DEFINE_SIZE_ARRAY (2, values);
1154 };
1155
1156 struct PairSet
1157 {
1158 friend struct PairPosFormat1;
1159
intersectsOT::PairSet1160 bool intersects (const hb_set_t *glyphs,
1161 const ValueFormat *valueFormats) const
1162 {
1163 unsigned int len1 = valueFormats[0].get_len ();
1164 unsigned int len2 = valueFormats[1].get_len ();
1165 unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1166
1167 const PairValueRecord *record = &firstPairValueRecord;
1168 unsigned int count = len;
1169 for (unsigned int i = 0; i < count; i++)
1170 {
1171 if (glyphs->has (record->secondGlyph))
1172 return true;
1173 record = &StructAtOffset<const PairValueRecord> (record, record_size);
1174 }
1175 return false;
1176 }
1177
collect_glyphsOT::PairSet1178 void collect_glyphs (hb_collect_glyphs_context_t *c,
1179 const ValueFormat *valueFormats) const
1180 {
1181 unsigned int len1 = valueFormats[0].get_len ();
1182 unsigned int len2 = valueFormats[1].get_len ();
1183 unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1184
1185 const PairValueRecord *record = &firstPairValueRecord;
1186 c->input->add_array (&record->secondGlyph, len, record_size);
1187 }
1188
collect_variation_indicesOT::PairSet1189 void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1190 const ValueFormat *valueFormats) const
1191 {
1192 unsigned len1 = valueFormats[0].get_len ();
1193 unsigned len2 = valueFormats[1].get_len ();
1194 unsigned record_size = HBUINT16::static_size * (1 + len1 + len2);
1195
1196 const PairValueRecord *record = &firstPairValueRecord;
1197 unsigned count = len;
1198 for (unsigned i = 0; i < count; i++)
1199 {
1200 if (c->glyph_set->has (record->secondGlyph))
1201 { record->collect_variation_indices (c, valueFormats, this); }
1202
1203 record = &StructAtOffset<const PairValueRecord> (record, record_size);
1204 }
1205 }
1206
applyOT::PairSet1207 bool apply (hb_ot_apply_context_t *c,
1208 const ValueFormat *valueFormats,
1209 unsigned int pos) const
1210 {
1211 TRACE_APPLY (this);
1212 hb_buffer_t *buffer = c->buffer;
1213 unsigned int len1 = valueFormats[0].get_len ();
1214 unsigned int len2 = valueFormats[1].get_len ();
1215 unsigned int record_size = HBUINT16::static_size * (1 + len1 + len2);
1216
1217 const PairValueRecord *record = hb_bsearch (buffer->info[pos].codepoint,
1218 &firstPairValueRecord,
1219 len,
1220 record_size);
1221 if (record)
1222 {
1223 /* Note the intentional use of "|" instead of short-circuit "||". */
1224 if (valueFormats[0].apply_value (c, this, &record->values[0], buffer->cur_pos()) |
1225 valueFormats[1].apply_value (c, this, &record->values[len1], buffer->pos[pos]))
1226 buffer->unsafe_to_break (buffer->idx, pos + 1);
1227 if (len2)
1228 pos++;
1229 buffer->idx = pos;
1230 return_trace (true);
1231 }
1232 return_trace (false);
1233 }
1234
subsetOT::PairSet1235 bool subset (hb_subset_context_t *c,
1236 const ValueFormat valueFormats[2],
1237 const ValueFormat newFormats[2]) const
1238 {
1239 TRACE_SUBSET (this);
1240 auto snap = c->serializer->snapshot ();
1241
1242 auto *out = c->serializer->start_embed (*this);
1243 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1244 out->len = 0;
1245
1246 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1247 const hb_map_t &glyph_map = *c->plan->glyph_map;
1248
1249 unsigned len1 = valueFormats[0].get_len ();
1250 unsigned len2 = valueFormats[1].get_len ();
1251 unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
1252
1253 PairValueRecord::context_t context =
1254 {
1255 this,
1256 valueFormats,
1257 newFormats,
1258 len1,
1259 &glyph_map,
1260 c->plan->layout_variation_idx_map
1261 };
1262
1263 const PairValueRecord *record = &firstPairValueRecord;
1264 unsigned count = len, num = 0;
1265 for (unsigned i = 0; i < count; i++)
1266 {
1267 if (glyphset.has (record->secondGlyph)
1268 && record->subset (c, &context)) num++;
1269 record = &StructAtOffset<const PairValueRecord> (record, record_size);
1270 }
1271
1272 out->len = num;
1273 if (!num) c->serializer->revert (snap);
1274 return_trace (num);
1275 }
1276
1277 struct sanitize_closure_t
1278 {
1279 const ValueFormat *valueFormats;
1280 unsigned int len1; /* valueFormats[0].get_len() */
1281 unsigned int stride; /* 1 + len1 + len2 */
1282 };
1283
sanitizeOT::PairSet1284 bool sanitize (hb_sanitize_context_t *c, const sanitize_closure_t *closure) const
1285 {
1286 TRACE_SANITIZE (this);
1287 if (!(c->check_struct (this)
1288 && c->check_range (&firstPairValueRecord,
1289 len,
1290 HBUINT16::static_size,
1291 closure->stride))) return_trace (false);
1292
1293 unsigned int count = len;
1294 const PairValueRecord *record = &firstPairValueRecord;
1295 return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, this, &record->values[0], count, closure->stride) &&
1296 closure->valueFormats[1].sanitize_values_stride_unsafe (c, this, &record->values[closure->len1], count, closure->stride));
1297 }
1298
1299 protected:
1300 HBUINT16 len; /* Number of PairValueRecords */
1301 PairValueRecord firstPairValueRecord;
1302 /* Array of PairValueRecords--ordered
1303 * by GlyphID of the second glyph */
1304 public:
1305 DEFINE_SIZE_MIN (2);
1306 };
1307
1308 struct PairPosFormat1
1309 {
intersectsOT::PairPosFormat11310 bool intersects (const hb_set_t *glyphs) const
1311 {
1312 return
1313 + hb_zip (this+coverage, pairSet)
1314 | hb_filter (*glyphs, hb_first)
1315 | hb_map (hb_second)
1316 | hb_map ([glyphs, this] (const Offset16To<PairSet> &_)
1317 { return (this+_).intersects (glyphs, valueFormat); })
1318 | hb_any
1319 ;
1320 }
1321
closure_lookupsOT::PairPosFormat11322 void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::PairPosFormat11323 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1324 {
1325 if ((!valueFormat[0].has_device ()) && (!valueFormat[1].has_device ())) return;
1326
1327 auto it =
1328 + hb_zip (this+coverage, pairSet)
1329 | hb_filter (c->glyph_set, hb_first)
1330 | hb_map (hb_second)
1331 ;
1332
1333 if (!it) return;
1334 + it
1335 | hb_map (hb_add (this))
1336 | hb_apply ([&] (const PairSet& _) { _.collect_variation_indices (c, valueFormat); })
1337 ;
1338 }
1339
collect_glyphsOT::PairPosFormat11340 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1341 {
1342 if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1343 unsigned int count = pairSet.len;
1344 for (unsigned int i = 0; i < count; i++)
1345 (this+pairSet[i]).collect_glyphs (c, valueFormat);
1346 }
1347
get_coverageOT::PairPosFormat11348 const Coverage &get_coverage () const { return this+coverage; }
1349
applyOT::PairPosFormat11350 bool apply (hb_ot_apply_context_t *c) const
1351 {
1352 TRACE_APPLY (this);
1353 hb_buffer_t *buffer = c->buffer;
1354 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
1355 if (likely (index == NOT_COVERED)) return_trace (false);
1356
1357 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1358 skippy_iter.reset (buffer->idx, 1);
1359 if (!skippy_iter.next ()) return_trace (false);
1360
1361 return_trace ((this+pairSet[index]).apply (c, valueFormat, skippy_iter.idx));
1362 }
1363
subsetOT::PairPosFormat11364 bool subset (hb_subset_context_t *c) const
1365 {
1366 TRACE_SUBSET (this);
1367
1368 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1369 const hb_map_t &glyph_map = *c->plan->glyph_map;
1370
1371 auto *out = c->serializer->start_embed (*this);
1372 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1373 out->format = format;
1374 out->valueFormat[0] = valueFormat[0];
1375 out->valueFormat[1] = valueFormat[1];
1376 if (c->plan->drop_hints)
1377 {
1378 hb_pair_t<unsigned, unsigned> newFormats = compute_effective_value_formats (glyphset);
1379 out->valueFormat[0] = newFormats.first;
1380 out->valueFormat[1] = newFormats.second;
1381 }
1382
1383 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1384
1385 + hb_zip (this+coverage, pairSet)
1386 | hb_filter (glyphset, hb_first)
1387 | hb_filter ([this, c, out] (const Offset16To<PairSet>& _)
1388 {
1389 auto *o = out->pairSet.serialize_append (c->serializer);
1390 if (unlikely (!o)) return false;
1391 auto snap = c->serializer->snapshot ();
1392 bool ret = o->serialize_subset (c, _, this, valueFormat, out->valueFormat);
1393 if (!ret)
1394 {
1395 out->pairSet.pop ();
1396 c->serializer->revert (snap);
1397 }
1398 return ret;
1399 },
1400 hb_second)
1401 | hb_map (hb_first)
1402 | hb_map (glyph_map)
1403 | hb_sink (new_coverage)
1404 ;
1405
1406 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
1407
1408 return_trace (bool (new_coverage));
1409 }
1410
1411
compute_effective_value_formatsOT::PairPosFormat11412 hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_set_t& glyphset) const
1413 {
1414 unsigned len1 = valueFormat[0].get_len ();
1415 unsigned len2 = valueFormat[1].get_len ();
1416 unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
1417
1418 unsigned format1 = 0;
1419 unsigned format2 = 0;
1420 for (const Offset16To<PairSet>& _ :
1421 + hb_zip (this+coverage, pairSet) | hb_filter (glyphset, hb_first) | hb_map (hb_second))
1422 {
1423 const PairSet& set = (this + _);
1424 const PairValueRecord *record = &set.firstPairValueRecord;
1425
1426 for (unsigned i = 0; i < set.len; i++)
1427 {
1428 if (record->intersects (glyphset))
1429 {
1430 format1 = format1 | valueFormat[0].get_effective_format (record->get_values_1 ());
1431 format2 = format2 | valueFormat[1].get_effective_format (record->get_values_2 (valueFormat[0]));
1432 }
1433 record = &StructAtOffset<const PairValueRecord> (record, record_size);
1434 }
1435 }
1436
1437 return hb_pair (format1, format2);
1438 }
1439
1440
sanitizeOT::PairPosFormat11441 bool sanitize (hb_sanitize_context_t *c) const
1442 {
1443 TRACE_SANITIZE (this);
1444
1445 if (!c->check_struct (this)) return_trace (false);
1446
1447 unsigned int len1 = valueFormat[0].get_len ();
1448 unsigned int len2 = valueFormat[1].get_len ();
1449 PairSet::sanitize_closure_t closure =
1450 {
1451 valueFormat,
1452 len1,
1453 1 + len1 + len2
1454 };
1455
1456 return_trace (coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
1457 }
1458
1459 protected:
1460 HBUINT16 format; /* Format identifier--format = 1 */
1461 Offset16To<Coverage>
1462 coverage; /* Offset to Coverage table--from
1463 * beginning of subtable */
1464 ValueFormat valueFormat[2]; /* [0] Defines the types of data in
1465 * ValueRecord1--for the first glyph
1466 * in the pair--may be zero (0) */
1467 /* [1] Defines the types of data in
1468 * ValueRecord2--for the second glyph
1469 * in the pair--may be zero (0) */
1470 Array16OfOffset16To<PairSet>
1471 pairSet; /* Array of PairSet tables
1472 * ordered by Coverage Index */
1473 public:
1474 DEFINE_SIZE_ARRAY (10, pairSet);
1475 };
1476
1477 struct PairPosFormat2
1478 {
intersectsOT::PairPosFormat21479 bool intersects (const hb_set_t *glyphs) const
1480 {
1481 return (this+coverage).intersects (glyphs) &&
1482 (this+classDef2).intersects (glyphs);
1483 }
1484
closure_lookupsOT::PairPosFormat21485 void closure_lookups (hb_closure_lookups_context_t *c) const {}
collect_variation_indicesOT::PairPosFormat21486 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1487 {
1488 if (!intersects (c->glyph_set)) return;
1489 if ((!valueFormat1.has_device ()) && (!valueFormat2.has_device ())) return;
1490
1491 hb_set_t klass1_glyphs, klass2_glyphs;
1492 if (!(this+classDef1).collect_coverage (&klass1_glyphs)) return;
1493 if (!(this+classDef2).collect_coverage (&klass2_glyphs)) return;
1494
1495 hb_set_t class1_set, class2_set;
1496 for (const unsigned cp : + c->glyph_set->iter () | hb_filter (this + coverage))
1497 {
1498 if (!klass1_glyphs.has (cp)) class1_set.add (0);
1499 else
1500 {
1501 unsigned klass1 = (this+classDef1).get (cp);
1502 class1_set.add (klass1);
1503 }
1504 }
1505
1506 class2_set.add (0);
1507 for (const unsigned cp : + c->glyph_set->iter () | hb_filter (klass2_glyphs))
1508 {
1509 unsigned klass2 = (this+classDef2).get (cp);
1510 class2_set.add (klass2);
1511 }
1512
1513 if (class1_set.is_empty ()
1514 || class2_set.is_empty ()
1515 || (class2_set.get_population() == 1 && class2_set.has(0)))
1516 return;
1517
1518 unsigned len1 = valueFormat1.get_len ();
1519 unsigned len2 = valueFormat2.get_len ();
1520 const hb_array_t<const Value> values_array = values.as_array ((unsigned)class1Count * (unsigned) class2Count * (len1 + len2));
1521 for (const unsigned class1_idx : class1_set.iter ())
1522 {
1523 for (const unsigned class2_idx : class2_set.iter ())
1524 {
1525 unsigned start_offset = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1526 if (valueFormat1.has_device ())
1527 valueFormat1.collect_variation_indices (c, this, values_array.sub_array (start_offset, len1));
1528
1529 if (valueFormat2.has_device ())
1530 valueFormat2.collect_variation_indices (c, this, values_array.sub_array (start_offset+len1, len2));
1531 }
1532 }
1533 }
1534
collect_glyphsOT::PairPosFormat21535 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1536 {
1537 if (unlikely (!(this+coverage).collect_coverage (c->input))) return;
1538 if (unlikely (!(this+classDef2).collect_coverage (c->input))) return;
1539 }
1540
get_coverageOT::PairPosFormat21541 const Coverage &get_coverage () const { return this+coverage; }
1542
applyOT::PairPosFormat21543 bool apply (hb_ot_apply_context_t *c) const
1544 {
1545 TRACE_APPLY (this);
1546 hb_buffer_t *buffer = c->buffer;
1547 unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
1548 if (likely (index == NOT_COVERED)) return_trace (false);
1549
1550 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1551 skippy_iter.reset (buffer->idx, 1);
1552 if (!skippy_iter.next ()) return_trace (false);
1553
1554 unsigned int len1 = valueFormat1.get_len ();
1555 unsigned int len2 = valueFormat2.get_len ();
1556 unsigned int record_len = len1 + len2;
1557
1558 unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
1559 unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
1560 if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
1561
1562 const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
1563 /* Note the intentional use of "|" instead of short-circuit "||". */
1564 if (valueFormat1.apply_value (c, this, v, buffer->cur_pos()) |
1565 valueFormat2.apply_value (c, this, v + len1, buffer->pos[skippy_iter.idx]))
1566 buffer->unsafe_to_break (buffer->idx, skippy_iter.idx + 1);
1567
1568 buffer->idx = skippy_iter.idx;
1569 if (len2)
1570 buffer->idx++;
1571
1572 return_trace (true);
1573 }
1574
subsetOT::PairPosFormat21575 bool subset (hb_subset_context_t *c) const
1576 {
1577 TRACE_SUBSET (this);
1578 auto *out = c->serializer->start_embed (*this);
1579 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1580 out->format = format;
1581
1582 hb_map_t klass1_map;
1583 out->classDef1.serialize_subset (c, classDef1, this, &klass1_map, true, true, &(this + coverage));
1584 out->class1Count = klass1_map.get_population ();
1585
1586 hb_map_t klass2_map;
1587 out->classDef2.serialize_subset (c, classDef2, this, &klass2_map, true, false);
1588 out->class2Count = klass2_map.get_population ();
1589
1590 unsigned len1 = valueFormat1.get_len ();
1591 unsigned len2 = valueFormat2.get_len ();
1592
1593 hb_pair_t<unsigned, unsigned> newFormats = hb_pair (valueFormat1, valueFormat2);
1594 if (c->plan->drop_hints)
1595 newFormats = compute_effective_value_formats (klass1_map, klass2_map);
1596
1597 out->valueFormat1 = newFormats.first;
1598 out->valueFormat2 = newFormats.second;
1599
1600 for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
1601 {
1602 for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
1603 {
1604 unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1605 valueFormat1.copy_values (c->serializer, newFormats.first, this, &values[idx], c->plan->layout_variation_idx_map);
1606 valueFormat2.copy_values (c->serializer, newFormats.second, this, &values[idx + len1], c->plan->layout_variation_idx_map);
1607 }
1608 }
1609
1610 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1611 const hb_map_t &glyph_map = *c->plan->glyph_map;
1612
1613 auto it =
1614 + hb_iter (this+coverage)
1615 | hb_filter (glyphset)
1616 | hb_map_retains_sorting (glyph_map)
1617 ;
1618
1619 out->coverage.serialize_serialize (c->serializer, it);
1620 return_trace (out->class1Count && out->class2Count && bool (it));
1621 }
1622
1623
compute_effective_value_formatsOT::PairPosFormat21624 hb_pair_t<unsigned, unsigned> compute_effective_value_formats (const hb_map_t& klass1_map,
1625 const hb_map_t& klass2_map) const
1626 {
1627 unsigned len1 = valueFormat1.get_len ();
1628 unsigned len2 = valueFormat2.get_len ();
1629
1630 unsigned format1 = 0;
1631 unsigned format2 = 0;
1632
1633 for (unsigned class1_idx : + hb_range ((unsigned) class1Count) | hb_filter (klass1_map))
1634 {
1635 for (unsigned class2_idx : + hb_range ((unsigned) class2Count) | hb_filter (klass2_map))
1636 {
1637 unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * (len1 + len2);
1638 format1 = format1 | valueFormat1.get_effective_format (&values[idx]);
1639 format2 = format2 | valueFormat2.get_effective_format (&values[idx + len1]);
1640 }
1641 }
1642
1643 return hb_pair (format1, format2);
1644 }
1645
1646
sanitizeOT::PairPosFormat21647 bool sanitize (hb_sanitize_context_t *c) const
1648 {
1649 TRACE_SANITIZE (this);
1650 if (!(c->check_struct (this)
1651 && coverage.sanitize (c, this)
1652 && classDef1.sanitize (c, this)
1653 && classDef2.sanitize (c, this))) return_trace (false);
1654
1655 unsigned int len1 = valueFormat1.get_len ();
1656 unsigned int len2 = valueFormat2.get_len ();
1657 unsigned int stride = len1 + len2;
1658 unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
1659 unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
1660 return_trace (c->check_range ((const void *) values,
1661 count,
1662 record_size) &&
1663 valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
1664 valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
1665 }
1666
1667 protected:
1668 HBUINT16 format; /* Format identifier--format = 2 */
1669 Offset16To<Coverage>
1670 coverage; /* Offset to Coverage table--from
1671 * beginning of subtable */
1672 ValueFormat valueFormat1; /* ValueRecord definition--for the
1673 * first glyph of the pair--may be zero
1674 * (0) */
1675 ValueFormat valueFormat2; /* ValueRecord definition--for the
1676 * second glyph of the pair--may be
1677 * zero (0) */
1678 Offset16To<ClassDef>
1679 classDef1; /* Offset to ClassDef table--from
1680 * beginning of PairPos subtable--for
1681 * the first glyph of the pair */
1682 Offset16To<ClassDef>
1683 classDef2; /* Offset to ClassDef table--from
1684 * beginning of PairPos subtable--for
1685 * the second glyph of the pair */
1686 HBUINT16 class1Count; /* Number of classes in ClassDef1
1687 * table--includes Class0 */
1688 HBUINT16 class2Count; /* Number of classes in ClassDef2
1689 * table--includes Class0 */
1690 ValueRecord values; /* Matrix of value pairs:
1691 * class1-major, class2-minor,
1692 * Each entry has value1 and value2 */
1693 public:
1694 DEFINE_SIZE_ARRAY (16, values);
1695 };
1696
1697 struct PairPos
1698 {
1699 template <typename context_t, typename ...Ts>
dispatchOT::PairPos1700 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1701 {
1702 TRACE_DISPATCH (this, u.format);
1703 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1704 switch (u.format) {
1705 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1706 case 2: return_trace (c->dispatch (u.format2, hb_forward<Ts> (ds)...));
1707 default:return_trace (c->default_return_value ());
1708 }
1709 }
1710
1711 protected:
1712 union {
1713 HBUINT16 format; /* Format identifier */
1714 PairPosFormat1 format1;
1715 PairPosFormat2 format2;
1716 } u;
1717 };
1718
1719
1720 struct EntryExitRecord
1721 {
1722 friend struct CursivePosFormat1;
1723
sanitizeOT::EntryExitRecord1724 bool sanitize (hb_sanitize_context_t *c, const void *base) const
1725 {
1726 TRACE_SANITIZE (this);
1727 return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
1728 }
1729
collect_variation_indicesOT::EntryExitRecord1730 void collect_variation_indices (hb_collect_variation_indices_context_t *c,
1731 const void *src_base) const
1732 {
1733 (src_base+entryAnchor).collect_variation_indices (c);
1734 (src_base+exitAnchor).collect_variation_indices (c);
1735 }
1736
subsetOT::EntryExitRecord1737 EntryExitRecord* subset (hb_subset_context_t *c,
1738 const void *src_base) const
1739 {
1740 TRACE_SERIALIZE (this);
1741 auto *out = c->serializer->embed (this);
1742 if (unlikely (!out)) return_trace (nullptr);
1743
1744 out->entryAnchor.serialize_subset (c, entryAnchor, src_base);
1745 out->exitAnchor.serialize_subset (c, exitAnchor, src_base);
1746 return_trace (out);
1747 }
1748
1749 protected:
1750 Offset16To<Anchor>
1751 entryAnchor; /* Offset to EntryAnchor table--from
1752 * beginning of CursivePos
1753 * subtable--may be NULL */
1754 Offset16To<Anchor>
1755 exitAnchor; /* Offset to ExitAnchor table--from
1756 * beginning of CursivePos
1757 * subtable--may be NULL */
1758 public:
1759 DEFINE_SIZE_STATIC (4);
1760 };
1761
1762 static void
1763 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent);
1764
1765 struct CursivePosFormat1
1766 {
intersectsOT::CursivePosFormat11767 bool intersects (const hb_set_t *glyphs) const
1768 { return (this+coverage).intersects (glyphs); }
1769
closure_lookupsOT::CursivePosFormat11770 void closure_lookups (hb_closure_lookups_context_t *c) const {}
1771
collect_variation_indicesOT::CursivePosFormat11772 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
1773 {
1774 + hb_zip (this+coverage, entryExitRecord)
1775 | hb_filter (c->glyph_set, hb_first)
1776 | hb_map (hb_second)
1777 | hb_apply ([&] (const EntryExitRecord& record) { record.collect_variation_indices (c, this); })
1778 ;
1779 }
1780
collect_glyphsOT::CursivePosFormat11781 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1782 { if (unlikely (!(this+coverage).collect_coverage (c->input))) return; }
1783
get_coverageOT::CursivePosFormat11784 const Coverage &get_coverage () const { return this+coverage; }
1785
applyOT::CursivePosFormat11786 bool apply (hb_ot_apply_context_t *c) const
1787 {
1788 TRACE_APPLY (this);
1789 hb_buffer_t *buffer = c->buffer;
1790
1791 const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
1792 if (!this_record.entryAnchor) return_trace (false);
1793
1794 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
1795 skippy_iter.reset (buffer->idx, 1);
1796 if (!skippy_iter.prev ()) return_trace (false);
1797
1798 const EntryExitRecord &prev_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)];
1799 if (!prev_record.exitAnchor) return_trace (false);
1800
1801 unsigned int i = skippy_iter.idx;
1802 unsigned int j = buffer->idx;
1803
1804 buffer->unsafe_to_break (i, j);
1805 float entry_x, entry_y, exit_x, exit_y;
1806 (this+prev_record.exitAnchor).get_anchor (c, buffer->info[i].codepoint, &exit_x, &exit_y);
1807 (this+this_record.entryAnchor).get_anchor (c, buffer->info[j].codepoint, &entry_x, &entry_y);
1808
1809 hb_glyph_position_t *pos = buffer->pos;
1810
1811 hb_position_t d;
1812 /* Main-direction adjustment */
1813 switch (c->direction) {
1814 case HB_DIRECTION_LTR:
1815 pos[i].x_advance = roundf (exit_x) + pos[i].x_offset;
1816
1817 d = roundf (entry_x) + pos[j].x_offset;
1818 pos[j].x_advance -= d;
1819 pos[j].x_offset -= d;
1820 break;
1821 case HB_DIRECTION_RTL:
1822 d = roundf (exit_x) + pos[i].x_offset;
1823 pos[i].x_advance -= d;
1824 pos[i].x_offset -= d;
1825
1826 pos[j].x_advance = roundf (entry_x) + pos[j].x_offset;
1827 break;
1828 case HB_DIRECTION_TTB:
1829 pos[i].y_advance = roundf (exit_y) + pos[i].y_offset;
1830
1831 d = roundf (entry_y) + pos[j].y_offset;
1832 pos[j].y_advance -= d;
1833 pos[j].y_offset -= d;
1834 break;
1835 case HB_DIRECTION_BTT:
1836 d = roundf (exit_y) + pos[i].y_offset;
1837 pos[i].y_advance -= d;
1838 pos[i].y_offset -= d;
1839
1840 pos[j].y_advance = roundf (entry_y);
1841 break;
1842 case HB_DIRECTION_INVALID:
1843 default:
1844 break;
1845 }
1846
1847 /* Cross-direction adjustment */
1848
1849 /* We attach child to parent (think graph theory and rooted trees whereas
1850 * the root stays on baseline and each node aligns itself against its
1851 * parent.
1852 *
1853 * Optimize things for the case of RightToLeft, as that's most common in
1854 * Arabic. */
1855 unsigned int child = i;
1856 unsigned int parent = j;
1857 hb_position_t x_offset = entry_x - exit_x;
1858 hb_position_t y_offset = entry_y - exit_y;
1859 if (!(c->lookup_props & LookupFlag::RightToLeft))
1860 {
1861 unsigned int k = child;
1862 child = parent;
1863 parent = k;
1864 x_offset = -x_offset;
1865 y_offset = -y_offset;
1866 }
1867
1868 /* If child was already connected to someone else, walk through its old
1869 * chain and reverse the link direction, such that the whole tree of its
1870 * previous connection now attaches to new parent. Watch out for case
1871 * where new parent is on the path from old chain...
1872 */
1873 reverse_cursive_minor_offset (pos, child, c->direction, parent);
1874
1875 pos[child].attach_type() = ATTACH_TYPE_CURSIVE;
1876 pos[child].attach_chain() = (int) parent - (int) child;
1877 buffer->scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT;
1878 if (likely (HB_DIRECTION_IS_HORIZONTAL (c->direction)))
1879 pos[child].y_offset = y_offset;
1880 else
1881 pos[child].x_offset = x_offset;
1882
1883 /* If parent was attached to child, separate them.
1884 * https://github.com/harfbuzz/harfbuzz/issues/2469
1885 */
1886 if (unlikely (pos[parent].attach_chain() == -pos[child].attach_chain()))
1887 pos[parent].attach_chain() = 0;
1888
1889 buffer->idx++;
1890 return_trace (true);
1891 }
1892
1893 template <typename Iterator,
1894 hb_requires (hb_is_iterator (Iterator))>
serializeOT::CursivePosFormat11895 void serialize (hb_subset_context_t *c,
1896 Iterator it,
1897 const void *src_base)
1898 {
1899 if (unlikely (!c->serializer->extend_min ((*this)))) return;
1900 this->format = 1;
1901 this->entryExitRecord.len = it.len ();
1902
1903 for (const EntryExitRecord& entry_record : + it
1904 | hb_map (hb_second))
1905 entry_record.subset (c, src_base);
1906
1907 auto glyphs =
1908 + it
1909 | hb_map_retains_sorting (hb_first)
1910 ;
1911
1912 coverage.serialize_serialize (c->serializer, glyphs);
1913 }
1914
subsetOT::CursivePosFormat11915 bool subset (hb_subset_context_t *c) const
1916 {
1917 TRACE_SUBSET (this);
1918 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1919 const hb_map_t &glyph_map = *c->plan->glyph_map;
1920
1921 auto *out = c->serializer->start_embed (*this);
1922 if (unlikely (!out)) return_trace (false);
1923
1924 auto it =
1925 + hb_zip (this+coverage, entryExitRecord)
1926 | hb_filter (glyphset, hb_first)
1927 | hb_map_retains_sorting ([&] (hb_pair_t<hb_codepoint_t, const EntryExitRecord&> p) -> hb_pair_t<hb_codepoint_t, const EntryExitRecord&>
1928 { return hb_pair (glyph_map[p.first], p.second);})
1929 ;
1930
1931 bool ret = bool (it);
1932 out->serialize (c, it, this);
1933 return_trace (ret);
1934 }
1935
sanitizeOT::CursivePosFormat11936 bool sanitize (hb_sanitize_context_t *c) const
1937 {
1938 TRACE_SANITIZE (this);
1939 return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
1940 }
1941
1942 protected:
1943 HBUINT16 format; /* Format identifier--format = 1 */
1944 Offset16To<Coverage>
1945 coverage; /* Offset to Coverage table--from
1946 * beginning of subtable */
1947 Array16Of<EntryExitRecord>
1948 entryExitRecord; /* Array of EntryExit records--in
1949 * Coverage Index order */
1950 public:
1951 DEFINE_SIZE_ARRAY (6, entryExitRecord);
1952 };
1953
1954 struct CursivePos
1955 {
1956 template <typename context_t, typename ...Ts>
dispatchOT::CursivePos1957 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
1958 {
1959 TRACE_DISPATCH (this, u.format);
1960 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
1961 switch (u.format) {
1962 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
1963 default:return_trace (c->default_return_value ());
1964 }
1965 }
1966
1967 protected:
1968 union {
1969 HBUINT16 format; /* Format identifier */
1970 CursivePosFormat1 format1;
1971 } u;
1972 };
1973
1974
1975 typedef AnchorMatrix BaseArray; /* base-major--
1976 * in order of BaseCoverage Index--,
1977 * mark-minor--
1978 * ordered by class--zero-based. */
1979
Markclass_closure_and_remap_indexes(const Coverage & mark_coverage,const MarkArray & mark_array,const hb_set_t & glyphset,hb_map_t * klass_mapping)1980 static void Markclass_closure_and_remap_indexes (const Coverage &mark_coverage,
1981 const MarkArray &mark_array,
1982 const hb_set_t &glyphset,
1983 hb_map_t* klass_mapping /* INOUT */)
1984 {
1985 hb_set_t orig_classes;
1986
1987 + hb_zip (mark_coverage, mark_array)
1988 | hb_filter (glyphset, hb_first)
1989 | hb_map (hb_second)
1990 | hb_map (&MarkRecord::get_class)
1991 | hb_sink (orig_classes)
1992 ;
1993
1994 unsigned idx = 0;
1995 for (auto klass : orig_classes.iter ())
1996 {
1997 if (klass_mapping->has (klass)) continue;
1998 klass_mapping->set (klass, idx);
1999 idx++;
2000 }
2001 }
2002
2003 struct MarkBasePosFormat1
2004 {
intersectsOT::MarkBasePosFormat12005 bool intersects (const hb_set_t *glyphs) const
2006 {
2007 return (this+markCoverage).intersects (glyphs) &&
2008 (this+baseCoverage).intersects (glyphs);
2009 }
2010
closure_lookupsOT::MarkBasePosFormat12011 void closure_lookups (hb_closure_lookups_context_t *c) const {}
2012
collect_variation_indicesOT::MarkBasePosFormat12013 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2014 {
2015 + hb_zip (this+markCoverage, this+markArray)
2016 | hb_filter (c->glyph_set, hb_first)
2017 | hb_map (hb_second)
2018 | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
2019 ;
2020
2021 hb_map_t klass_mapping;
2022 Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
2023
2024 unsigned basecount = (this+baseArray).rows;
2025 auto base_iter =
2026 + hb_zip (this+baseCoverage, hb_range (basecount))
2027 | hb_filter (c->glyph_set, hb_first)
2028 | hb_map (hb_second)
2029 ;
2030
2031 hb_sorted_vector_t<unsigned> base_indexes;
2032 for (const unsigned row : base_iter)
2033 {
2034 + hb_range ((unsigned) classCount)
2035 | hb_filter (klass_mapping)
2036 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2037 | hb_sink (base_indexes)
2038 ;
2039 }
2040 (this+baseArray).collect_variation_indices (c, base_indexes.iter ());
2041 }
2042
collect_glyphsOT::MarkBasePosFormat12043 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2044 {
2045 if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
2046 if (unlikely (!(this+baseCoverage).collect_coverage (c->input))) return;
2047 }
2048
get_coverageOT::MarkBasePosFormat12049 const Coverage &get_coverage () const { return this+markCoverage; }
2050
applyOT::MarkBasePosFormat12051 bool apply (hb_ot_apply_context_t *c) const
2052 {
2053 TRACE_APPLY (this);
2054 hb_buffer_t *buffer = c->buffer;
2055 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
2056 if (likely (mark_index == NOT_COVERED)) return_trace (false);
2057
2058 /* Now we search backwards for a non-mark glyph */
2059 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2060 skippy_iter.reset (buffer->idx, 1);
2061 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
2062 do {
2063 if (!skippy_iter.prev ()) return_trace (false);
2064 /* We only want to attach to the first of a MultipleSubst sequence.
2065 * https://github.com/harfbuzz/harfbuzz/issues/740
2066 * Reject others...
2067 * ...but stop if we find a mark in the MultipleSubst sequence:
2068 * https://github.com/harfbuzz/harfbuzz/issues/1020 */
2069 if (!_hb_glyph_info_multiplied (&buffer->info[skippy_iter.idx]) ||
2070 0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) ||
2071 (skippy_iter.idx == 0 ||
2072 _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx - 1]) ||
2073 _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]) !=
2074 _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx - 1]) ||
2075 _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]) !=
2076 _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx - 1]) + 1
2077 ))
2078 break;
2079 skippy_iter.reject ();
2080 } while (true);
2081
2082 /* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
2083 //if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2084
2085 unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint);
2086 if (base_index == NOT_COVERED) return_trace (false);
2087
2088 return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
2089 }
2090
subsetOT::MarkBasePosFormat12091 bool subset (hb_subset_context_t *c) const
2092 {
2093 TRACE_SUBSET (this);
2094 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2095 const hb_map_t &glyph_map = *c->plan->glyph_map;
2096
2097 auto *out = c->serializer->start_embed (*this);
2098 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2099 out->format = format;
2100
2101 hb_map_t klass_mapping;
2102 Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
2103
2104 if (!klass_mapping.get_population ()) return_trace (false);
2105 out->classCount = klass_mapping.get_population ();
2106
2107 auto mark_iter =
2108 + hb_zip (this+markCoverage, this+markArray)
2109 | hb_filter (glyphset, hb_first)
2110 ;
2111
2112 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2113 + mark_iter
2114 | hb_map (hb_first)
2115 | hb_map (glyph_map)
2116 | hb_sink (new_coverage)
2117 ;
2118
2119 if (!out->markCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
2120 return_trace (false);
2121
2122 out->markArray.serialize_subset (c, markArray, this,
2123 (this+markCoverage).iter (),
2124 &klass_mapping);
2125
2126 unsigned basecount = (this+baseArray).rows;
2127 auto base_iter =
2128 + hb_zip (this+baseCoverage, hb_range (basecount))
2129 | hb_filter (glyphset, hb_first)
2130 ;
2131
2132 new_coverage.reset ();
2133 + base_iter
2134 | hb_map (hb_first)
2135 | hb_map (glyph_map)
2136 | hb_sink (new_coverage)
2137 ;
2138
2139 if (!out->baseCoverage.serialize_serialize (c->serializer, new_coverage.iter ()))
2140 return_trace (false);
2141
2142 hb_sorted_vector_t<unsigned> base_indexes;
2143 for (const unsigned row : + base_iter
2144 | hb_map (hb_second))
2145 {
2146 + hb_range ((unsigned) classCount)
2147 | hb_filter (klass_mapping)
2148 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2149 | hb_sink (base_indexes)
2150 ;
2151 }
2152
2153 out->baseArray.serialize_subset (c, baseArray, this,
2154 base_iter.len (),
2155 base_indexes.iter ());
2156
2157 return_trace (true);
2158 }
2159
sanitizeOT::MarkBasePosFormat12160 bool sanitize (hb_sanitize_context_t *c) const
2161 {
2162 TRACE_SANITIZE (this);
2163 return_trace (c->check_struct (this) &&
2164 markCoverage.sanitize (c, this) &&
2165 baseCoverage.sanitize (c, this) &&
2166 markArray.sanitize (c, this) &&
2167 baseArray.sanitize (c, this, (unsigned int) classCount));
2168 }
2169
2170 protected:
2171 HBUINT16 format; /* Format identifier--format = 1 */
2172 Offset16To<Coverage>
2173 markCoverage; /* Offset to MarkCoverage table--from
2174 * beginning of MarkBasePos subtable */
2175 Offset16To<Coverage>
2176 baseCoverage; /* Offset to BaseCoverage table--from
2177 * beginning of MarkBasePos subtable */
2178 HBUINT16 classCount; /* Number of classes defined for marks */
2179 Offset16To<MarkArray>
2180 markArray; /* Offset to MarkArray table--from
2181 * beginning of MarkBasePos subtable */
2182 Offset16To<BaseArray>
2183 baseArray; /* Offset to BaseArray table--from
2184 * beginning of MarkBasePos subtable */
2185 public:
2186 DEFINE_SIZE_STATIC (12);
2187 };
2188
2189 struct MarkBasePos
2190 {
2191 template <typename context_t, typename ...Ts>
dispatchOT::MarkBasePos2192 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2193 {
2194 TRACE_DISPATCH (this, u.format);
2195 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2196 switch (u.format) {
2197 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2198 default:return_trace (c->default_return_value ());
2199 }
2200 }
2201
2202 protected:
2203 union {
2204 HBUINT16 format; /* Format identifier */
2205 MarkBasePosFormat1 format1;
2206 } u;
2207 };
2208
2209
2210 typedef AnchorMatrix LigatureAttach; /* component-major--
2211 * in order of writing direction--,
2212 * mark-minor--
2213 * ordered by class--zero-based. */
2214
2215 /* Array of LigatureAttach tables ordered by LigatureCoverage Index */
2216 struct LigatureArray : List16OfOffset16To<LigatureAttach>
2217 {
2218 template <typename Iterator,
2219 hb_requires (hb_is_iterator (Iterator))>
subsetOT::LigatureArray2220 bool subset (hb_subset_context_t *c,
2221 Iterator coverage,
2222 unsigned class_count,
2223 const hb_map_t *klass_mapping) const
2224 {
2225 TRACE_SUBSET (this);
2226 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2227
2228 auto *out = c->serializer->start_embed (this);
2229 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2230
2231 for (const auto _ : + hb_zip (coverage, *this)
2232 | hb_filter (glyphset, hb_first))
2233 {
2234 auto *matrix = out->serialize_append (c->serializer);
2235 if (unlikely (!matrix)) return_trace (false);
2236
2237 const LigatureAttach& src = (this + _.second);
2238 auto indexes =
2239 + hb_range (src.rows * class_count)
2240 | hb_filter ([=] (unsigned index) { return klass_mapping->has (index % class_count); })
2241 ;
2242 matrix->serialize_subset (c,
2243 _.second,
2244 this,
2245 src.rows,
2246 indexes);
2247 }
2248 return_trace (this->len);
2249 }
2250 };
2251
2252 struct MarkLigPosFormat1
2253 {
intersectsOT::MarkLigPosFormat12254 bool intersects (const hb_set_t *glyphs) const
2255 {
2256 return (this+markCoverage).intersects (glyphs) &&
2257 (this+ligatureCoverage).intersects (glyphs);
2258 }
2259
closure_lookupsOT::MarkLigPosFormat12260 void closure_lookups (hb_closure_lookups_context_t *c) const {}
2261
collect_variation_indicesOT::MarkLigPosFormat12262 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2263 {
2264 + hb_zip (this+markCoverage, this+markArray)
2265 | hb_filter (c->glyph_set, hb_first)
2266 | hb_map (hb_second)
2267 | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+markArray)); })
2268 ;
2269
2270 hb_map_t klass_mapping;
2271 Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, *c->glyph_set, &klass_mapping);
2272
2273 unsigned ligcount = (this+ligatureArray).len;
2274 auto lig_iter =
2275 + hb_zip (this+ligatureCoverage, hb_range (ligcount))
2276 | hb_filter (c->glyph_set, hb_first)
2277 | hb_map (hb_second)
2278 ;
2279
2280 const LigatureArray& lig_array = this+ligatureArray;
2281 for (const unsigned i : lig_iter)
2282 {
2283 hb_sorted_vector_t<unsigned> lig_indexes;
2284 unsigned row_count = lig_array[i].rows;
2285 for (unsigned row : + hb_range (row_count))
2286 {
2287 + hb_range ((unsigned) classCount)
2288 | hb_filter (klass_mapping)
2289 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2290 | hb_sink (lig_indexes)
2291 ;
2292 }
2293
2294 lig_array[i].collect_variation_indices (c, lig_indexes.iter ());
2295 }
2296 }
2297
collect_glyphsOT::MarkLigPosFormat12298 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2299 {
2300 if (unlikely (!(this+markCoverage).collect_coverage (c->input))) return;
2301 if (unlikely (!(this+ligatureCoverage).collect_coverage (c->input))) return;
2302 }
2303
get_coverageOT::MarkLigPosFormat12304 const Coverage &get_coverage () const { return this+markCoverage; }
2305
applyOT::MarkLigPosFormat12306 bool apply (hb_ot_apply_context_t *c) const
2307 {
2308 TRACE_APPLY (this);
2309 hb_buffer_t *buffer = c->buffer;
2310 unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
2311 if (likely (mark_index == NOT_COVERED)) return_trace (false);
2312
2313 /* Now we search backwards for a non-mark glyph */
2314 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2315 skippy_iter.reset (buffer->idx, 1);
2316 skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
2317 if (!skippy_iter.prev ()) return_trace (false);
2318
2319 /* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
2320 //if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2321
2322 unsigned int j = skippy_iter.idx;
2323 unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info[j].codepoint);
2324 if (lig_index == NOT_COVERED) return_trace (false);
2325
2326 const LigatureArray& lig_array = this+ligatureArray;
2327 const LigatureAttach& lig_attach = lig_array[lig_index];
2328
2329 /* Find component to attach to */
2330 unsigned int comp_count = lig_attach.rows;
2331 if (unlikely (!comp_count)) return_trace (false);
2332
2333 /* We must now check whether the ligature ID of the current mark glyph
2334 * is identical to the ligature ID of the found ligature. If yes, we
2335 * can directly use the component index. If not, we attach the mark
2336 * glyph to the last component of the ligature. */
2337 unsigned int comp_index;
2338 unsigned int lig_id = _hb_glyph_info_get_lig_id (&buffer->info[j]);
2339 unsigned int mark_id = _hb_glyph_info_get_lig_id (&buffer->cur());
2340 unsigned int mark_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
2341 if (lig_id && lig_id == mark_id && mark_comp > 0)
2342 comp_index = hb_min (comp_count, _hb_glyph_info_get_lig_comp (&buffer->cur())) - 1;
2343 else
2344 comp_index = comp_count - 1;
2345
2346 return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
2347 }
2348
subsetOT::MarkLigPosFormat12349 bool subset (hb_subset_context_t *c) const
2350 {
2351 TRACE_SUBSET (this);
2352 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2353 const hb_map_t &glyph_map = *c->plan->glyph_map;
2354
2355 auto *out = c->serializer->start_embed (*this);
2356 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2357 out->format = format;
2358
2359 hb_map_t klass_mapping;
2360 Markclass_closure_and_remap_indexes (this+markCoverage, this+markArray, glyphset, &klass_mapping);
2361
2362 if (!klass_mapping.get_population ()) return_trace (false);
2363 out->classCount = klass_mapping.get_population ();
2364
2365 auto mark_iter =
2366 + hb_zip (this+markCoverage, this+markArray)
2367 | hb_filter (glyphset, hb_first)
2368 ;
2369
2370 auto new_mark_coverage =
2371 + mark_iter
2372 | hb_map_retains_sorting (hb_first)
2373 | hb_map_retains_sorting (glyph_map)
2374 ;
2375
2376 if (!out->markCoverage.serialize_serialize (c->serializer, new_mark_coverage))
2377 return_trace (false);
2378
2379 out->markArray.serialize_subset (c, markArray, this,
2380 (this+markCoverage).iter (),
2381 &klass_mapping);
2382
2383 auto new_ligature_coverage =
2384 + hb_iter (this + ligatureCoverage)
2385 | hb_filter (glyphset)
2386 | hb_map_retains_sorting (glyph_map)
2387 ;
2388
2389 if (!out->ligatureCoverage.serialize_serialize (c->serializer, new_ligature_coverage))
2390 return_trace (false);
2391
2392 out->ligatureArray.serialize_subset (c, ligatureArray, this,
2393 hb_iter (this+ligatureCoverage), classCount, &klass_mapping);
2394
2395 return_trace (true);
2396 }
2397
sanitizeOT::MarkLigPosFormat12398 bool sanitize (hb_sanitize_context_t *c) const
2399 {
2400 TRACE_SANITIZE (this);
2401 return_trace (c->check_struct (this) &&
2402 markCoverage.sanitize (c, this) &&
2403 ligatureCoverage.sanitize (c, this) &&
2404 markArray.sanitize (c, this) &&
2405 ligatureArray.sanitize (c, this, (unsigned int) classCount));
2406 }
2407
2408 protected:
2409 HBUINT16 format; /* Format identifier--format = 1 */
2410 Offset16To<Coverage>
2411 markCoverage; /* Offset to Mark Coverage table--from
2412 * beginning of MarkLigPos subtable */
2413 Offset16To<Coverage>
2414 ligatureCoverage; /* Offset to Ligature Coverage
2415 * table--from beginning of MarkLigPos
2416 * subtable */
2417 HBUINT16 classCount; /* Number of defined mark classes */
2418 Offset16To<MarkArray>
2419 markArray; /* Offset to MarkArray table--from
2420 * beginning of MarkLigPos subtable */
2421 Offset16To<LigatureArray>
2422 ligatureArray; /* Offset to LigatureArray table--from
2423 * beginning of MarkLigPos subtable */
2424 public:
2425 DEFINE_SIZE_STATIC (12);
2426 };
2427
2428
2429 struct MarkLigPos
2430 {
2431 template <typename context_t, typename ...Ts>
dispatchOT::MarkLigPos2432 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2433 {
2434 TRACE_DISPATCH (this, u.format);
2435 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2436 switch (u.format) {
2437 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2438 default:return_trace (c->default_return_value ());
2439 }
2440 }
2441
2442 protected:
2443 union {
2444 HBUINT16 format; /* Format identifier */
2445 MarkLigPosFormat1 format1;
2446 } u;
2447 };
2448
2449
2450 typedef AnchorMatrix Mark2Array; /* mark2-major--
2451 * in order of Mark2Coverage Index--,
2452 * mark1-minor--
2453 * ordered by class--zero-based. */
2454
2455 struct MarkMarkPosFormat1
2456 {
intersectsOT::MarkMarkPosFormat12457 bool intersects (const hb_set_t *glyphs) const
2458 {
2459 return (this+mark1Coverage).intersects (glyphs) &&
2460 (this+mark2Coverage).intersects (glyphs);
2461 }
2462
closure_lookupsOT::MarkMarkPosFormat12463 void closure_lookups (hb_closure_lookups_context_t *c) const {}
2464
collect_variation_indicesOT::MarkMarkPosFormat12465 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2466 {
2467 + hb_zip (this+mark1Coverage, this+mark1Array)
2468 | hb_filter (c->glyph_set, hb_first)
2469 | hb_map (hb_second)
2470 | hb_apply ([&] (const MarkRecord& record) { record.collect_variation_indices (c, &(this+mark1Array)); })
2471 ;
2472
2473 hb_map_t klass_mapping;
2474 Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, *c->glyph_set, &klass_mapping);
2475
2476 unsigned mark2_count = (this+mark2Array).rows;
2477 auto mark2_iter =
2478 + hb_zip (this+mark2Coverage, hb_range (mark2_count))
2479 | hb_filter (c->glyph_set, hb_first)
2480 | hb_map (hb_second)
2481 ;
2482
2483 hb_sorted_vector_t<unsigned> mark2_indexes;
2484 for (const unsigned row : mark2_iter)
2485 {
2486 + hb_range ((unsigned) classCount)
2487 | hb_filter (klass_mapping)
2488 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2489 | hb_sink (mark2_indexes)
2490 ;
2491 }
2492 (this+mark2Array).collect_variation_indices (c, mark2_indexes.iter ());
2493 }
2494
collect_glyphsOT::MarkMarkPosFormat12495 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2496 {
2497 if (unlikely (!(this+mark1Coverage).collect_coverage (c->input))) return;
2498 if (unlikely (!(this+mark2Coverage).collect_coverage (c->input))) return;
2499 }
2500
get_coverageOT::MarkMarkPosFormat12501 const Coverage &get_coverage () const { return this+mark1Coverage; }
2502
applyOT::MarkMarkPosFormat12503 bool apply (hb_ot_apply_context_t *c) const
2504 {
2505 TRACE_APPLY (this);
2506 hb_buffer_t *buffer = c->buffer;
2507 unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint);
2508 if (likely (mark1_index == NOT_COVERED)) return_trace (false);
2509
2510 /* now we search backwards for a suitable mark glyph until a non-mark glyph */
2511 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
2512 skippy_iter.reset (buffer->idx, 1);
2513 skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags);
2514 if (!skippy_iter.prev ()) return_trace (false);
2515
2516 if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return_trace (false); }
2517
2518 unsigned int j = skippy_iter.idx;
2519
2520 unsigned int id1 = _hb_glyph_info_get_lig_id (&buffer->cur());
2521 unsigned int id2 = _hb_glyph_info_get_lig_id (&buffer->info[j]);
2522 unsigned int comp1 = _hb_glyph_info_get_lig_comp (&buffer->cur());
2523 unsigned int comp2 = _hb_glyph_info_get_lig_comp (&buffer->info[j]);
2524
2525 if (likely (id1 == id2))
2526 {
2527 if (id1 == 0) /* Marks belonging to the same base. */
2528 goto good;
2529 else if (comp1 == comp2) /* Marks belonging to the same ligature component. */
2530 goto good;
2531 }
2532 else
2533 {
2534 /* If ligature ids don't match, it may be the case that one of the marks
2535 * itself is a ligature. In which case match. */
2536 if ((id1 > 0 && !comp1) || (id2 > 0 && !comp2))
2537 goto good;
2538 }
2539
2540 /* Didn't match. */
2541 return_trace (false);
2542
2543 good:
2544 unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint);
2545 if (mark2_index == NOT_COVERED) return_trace (false);
2546
2547 return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
2548 }
2549
subsetOT::MarkMarkPosFormat12550 bool subset (hb_subset_context_t *c) const
2551 {
2552 TRACE_SUBSET (this);
2553 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2554 const hb_map_t &glyph_map = *c->plan->glyph_map;
2555
2556 auto *out = c->serializer->start_embed (*this);
2557 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2558 out->format = format;
2559
2560 hb_map_t klass_mapping;
2561 Markclass_closure_and_remap_indexes (this+mark1Coverage, this+mark1Array, glyphset, &klass_mapping);
2562
2563 if (!klass_mapping.get_population ()) return_trace (false);
2564 out->classCount = klass_mapping.get_population ();
2565
2566 auto mark1_iter =
2567 + hb_zip (this+mark1Coverage, this+mark1Array)
2568 | hb_filter (glyphset, hb_first)
2569 ;
2570
2571 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2572 + mark1_iter
2573 | hb_map (hb_first)
2574 | hb_map (glyph_map)
2575 | hb_sink (new_coverage)
2576 ;
2577
2578 if (!out->mark1Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
2579 return_trace (false);
2580
2581 out->mark1Array.serialize_subset (c, mark1Array, this,
2582 (this+mark1Coverage).iter (),
2583 &klass_mapping);
2584
2585 unsigned mark2count = (this+mark2Array).rows;
2586 auto mark2_iter =
2587 + hb_zip (this+mark2Coverage, hb_range (mark2count))
2588 | hb_filter (glyphset, hb_first)
2589 ;
2590
2591 new_coverage.reset ();
2592 + mark2_iter
2593 | hb_map (hb_first)
2594 | hb_map (glyph_map)
2595 | hb_sink (new_coverage)
2596 ;
2597
2598 if (!out->mark2Coverage.serialize_serialize (c->serializer, new_coverage.iter ()))
2599 return_trace (false);
2600
2601 hb_sorted_vector_t<unsigned> mark2_indexes;
2602 for (const unsigned row : + mark2_iter
2603 | hb_map (hb_second))
2604 {
2605 + hb_range ((unsigned) classCount)
2606 | hb_filter (klass_mapping)
2607 | hb_map ([&] (const unsigned col) { return row * (unsigned) classCount + col; })
2608 | hb_sink (mark2_indexes)
2609 ;
2610 }
2611
2612 out->mark2Array.serialize_subset (c, mark2Array, this, mark2_iter.len (), mark2_indexes.iter ());
2613
2614 return_trace (true);
2615 }
2616
sanitizeOT::MarkMarkPosFormat12617 bool sanitize (hb_sanitize_context_t *c) const
2618 {
2619 TRACE_SANITIZE (this);
2620 return_trace (c->check_struct (this) &&
2621 mark1Coverage.sanitize (c, this) &&
2622 mark2Coverage.sanitize (c, this) &&
2623 mark1Array.sanitize (c, this) &&
2624 mark2Array.sanitize (c, this, (unsigned int) classCount));
2625 }
2626
2627 protected:
2628 HBUINT16 format; /* Format identifier--format = 1 */
2629 Offset16To<Coverage>
2630 mark1Coverage; /* Offset to Combining Mark1 Coverage
2631 * table--from beginning of MarkMarkPos
2632 * subtable */
2633 Offset16To<Coverage>
2634 mark2Coverage; /* Offset to Combining Mark2 Coverage
2635 * table--from beginning of MarkMarkPos
2636 * subtable */
2637 HBUINT16 classCount; /* Number of defined mark classes */
2638 Offset16To<MarkArray>
2639 mark1Array; /* Offset to Mark1Array table--from
2640 * beginning of MarkMarkPos subtable */
2641 Offset16To<Mark2Array>
2642 mark2Array; /* Offset to Mark2Array table--from
2643 * beginning of MarkMarkPos subtable */
2644 public:
2645 DEFINE_SIZE_STATIC (12);
2646 };
2647
2648 struct MarkMarkPos
2649 {
2650 template <typename context_t, typename ...Ts>
dispatchOT::MarkMarkPos2651 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2652 {
2653 TRACE_DISPATCH (this, u.format);
2654 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2655 switch (u.format) {
2656 case 1: return_trace (c->dispatch (u.format1, hb_forward<Ts> (ds)...));
2657 default:return_trace (c->default_return_value ());
2658 }
2659 }
2660
2661 protected:
2662 union {
2663 HBUINT16 format; /* Format identifier */
2664 MarkMarkPosFormat1 format1;
2665 } u;
2666 };
2667
2668
2669 struct ContextPos : Context {};
2670
2671 struct ChainContextPos : ChainContext {};
2672
2673 struct ExtensionPos : Extension<ExtensionPos>
2674 {
2675 typedef struct PosLookupSubTable SubTable;
2676 };
2677
2678
2679
2680 /*
2681 * PosLookup
2682 */
2683
2684
2685 struct PosLookupSubTable
2686 {
2687 friend struct Lookup;
2688 friend struct PosLookup;
2689
2690 enum Type {
2691 Single = 1,
2692 Pair = 2,
2693 Cursive = 3,
2694 MarkBase = 4,
2695 MarkLig = 5,
2696 MarkMark = 6,
2697 Context = 7,
2698 ChainContext = 8,
2699 Extension = 9
2700 };
2701
2702 template <typename context_t, typename ...Ts>
dispatchOT::PosLookupSubTable2703 typename context_t::return_t dispatch (context_t *c, unsigned int lookup_type, Ts&&... ds) const
2704 {
2705 TRACE_DISPATCH (this, lookup_type);
2706 switch (lookup_type) {
2707 case Single: return_trace (u.single.dispatch (c, hb_forward<Ts> (ds)...));
2708 case Pair: return_trace (u.pair.dispatch (c, hb_forward<Ts> (ds)...));
2709 case Cursive: return_trace (u.cursive.dispatch (c, hb_forward<Ts> (ds)...));
2710 case MarkBase: return_trace (u.markBase.dispatch (c, hb_forward<Ts> (ds)...));
2711 case MarkLig: return_trace (u.markLig.dispatch (c, hb_forward<Ts> (ds)...));
2712 case MarkMark: return_trace (u.markMark.dispatch (c, hb_forward<Ts> (ds)...));
2713 case Context: return_trace (u.context.dispatch (c, hb_forward<Ts> (ds)...));
2714 case ChainContext: return_trace (u.chainContext.dispatch (c, hb_forward<Ts> (ds)...));
2715 case Extension: return_trace (u.extension.dispatch (c, hb_forward<Ts> (ds)...));
2716 default: return_trace (c->default_return_value ());
2717 }
2718 }
2719
intersectsOT::PosLookupSubTable2720 bool intersects (const hb_set_t *glyphs, unsigned int lookup_type) const
2721 {
2722 hb_intersects_context_t c (glyphs);
2723 return dispatch (&c, lookup_type);
2724 }
2725
2726 protected:
2727 union {
2728 SinglePos single;
2729 PairPos pair;
2730 CursivePos cursive;
2731 MarkBasePos markBase;
2732 MarkLigPos markLig;
2733 MarkMarkPos markMark;
2734 ContextPos context;
2735 ChainContextPos chainContext;
2736 ExtensionPos extension;
2737 } u;
2738 public:
2739 DEFINE_SIZE_MIN (0);
2740 };
2741
2742
2743 struct PosLookup : Lookup
2744 {
2745 typedef struct PosLookupSubTable SubTable;
2746
get_subtableOT::PosLookup2747 const SubTable& get_subtable (unsigned int i) const
2748 { return Lookup::get_subtable<SubTable> (i); }
2749
is_reverseOT::PosLookup2750 bool is_reverse () const
2751 {
2752 return false;
2753 }
2754
applyOT::PosLookup2755 bool apply (hb_ot_apply_context_t *c) const
2756 {
2757 TRACE_APPLY (this);
2758 return_trace (dispatch (c));
2759 }
2760
intersectsOT::PosLookup2761 bool intersects (const hb_set_t *glyphs) const
2762 {
2763 hb_intersects_context_t c (glyphs);
2764 return dispatch (&c);
2765 }
2766
collect_glyphsOT::PosLookup2767 hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
2768 { return dispatch (c); }
2769
closure_lookupsOT::PosLookup2770 hb_closure_lookups_context_t::return_t closure_lookups (hb_closure_lookups_context_t *c, unsigned this_index) const
2771 {
2772 if (c->is_lookup_visited (this_index))
2773 return hb_closure_lookups_context_t::default_return_value ();
2774
2775 c->set_lookup_visited (this_index);
2776 if (!intersects (c->glyphs))
2777 {
2778 c->set_lookup_inactive (this_index);
2779 return hb_closure_lookups_context_t::default_return_value ();
2780 }
2781 c->set_recurse_func (dispatch_closure_lookups_recurse_func);
2782
2783 hb_closure_lookups_context_t::return_t ret = dispatch (c);
2784 return ret;
2785 }
2786
2787 template <typename set_t>
collect_coverageOT::PosLookup2788 void collect_coverage (set_t *glyphs) const
2789 {
2790 hb_collect_coverage_context_t<set_t> c (glyphs);
2791 dispatch (&c);
2792 }
2793
2794 static inline bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
2795
2796 template <typename context_t>
2797 static typename context_t::return_t dispatch_recurse_func (context_t *c, unsigned int lookup_index);
2798
2799 HB_INTERNAL static hb_closure_lookups_context_t::return_t dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index);
2800
2801 template <typename context_t, typename ...Ts>
dispatchOT::PosLookup2802 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2803 { return Lookup::dispatch<SubTable> (c, hb_forward<Ts> (ds)...); }
2804
subsetOT::PosLookup2805 bool subset (hb_subset_context_t *c) const
2806 { return Lookup::subset<SubTable> (c); }
2807
sanitizeOT::PosLookup2808 bool sanitize (hb_sanitize_context_t *c) const
2809 { return Lookup::sanitize<SubTable> (c); }
2810 };
2811
2812 /*
2813 * GPOS -- Glyph Positioning
2814 * https://docs.microsoft.com/en-us/typography/opentype/spec/gpos
2815 */
2816
2817 struct GPOS : GSUBGPOS
2818 {
2819 static constexpr hb_tag_t tableTag = HB_OT_TAG_GPOS;
2820
get_lookupOT::GPOS2821 const PosLookup& get_lookup (unsigned int i) const
2822 { return static_cast<const PosLookup &> (GSUBGPOS::get_lookup (i)); }
2823
2824 static inline void position_start (hb_font_t *font, hb_buffer_t *buffer);
2825 static inline void position_finish_advances (hb_font_t *font, hb_buffer_t *buffer);
2826 static inline void position_finish_offsets (hb_font_t *font, hb_buffer_t *buffer);
2827
subsetOT::GPOS2828 bool subset (hb_subset_context_t *c) const
2829 {
2830 hb_subset_layout_context_t l (c, tableTag, c->plan->gpos_lookups, c->plan->gpos_langsys, c->plan->gpos_features);
2831 return GSUBGPOS::subset<PosLookup> (&l);
2832 }
2833
sanitizeOT::GPOS2834 bool sanitize (hb_sanitize_context_t *c) const
2835 { return GSUBGPOS::sanitize<PosLookup> (c); }
2836
2837 HB_INTERNAL bool is_blocklisted (hb_blob_t *blob,
2838 hb_face_t *face) const;
2839
collect_variation_indicesOT::GPOS2840 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
2841 {
2842 for (unsigned i = 0; i < GSUBGPOS::get_lookup_count (); i++)
2843 {
2844 if (!c->gpos_lookups->has (i)) continue;
2845 const PosLookup &l = get_lookup (i);
2846 l.dispatch (c);
2847 }
2848 }
2849
closure_lookupsOT::GPOS2850 void closure_lookups (hb_face_t *face,
2851 const hb_set_t *glyphs,
2852 hb_set_t *lookup_indexes /* IN/OUT */) const
2853 { GSUBGPOS::closure_lookups<PosLookup> (face, glyphs, lookup_indexes); }
2854
2855 typedef GSUBGPOS::accelerator_t<GPOS> accelerator_t;
2856 };
2857
2858
2859 static void
reverse_cursive_minor_offset(hb_glyph_position_t * pos,unsigned int i,hb_direction_t direction,unsigned int new_parent)2860 reverse_cursive_minor_offset (hb_glyph_position_t *pos, unsigned int i, hb_direction_t direction, unsigned int new_parent)
2861 {
2862 int chain = pos[i].attach_chain(), type = pos[i].attach_type();
2863 if (likely (!chain || 0 == (type & ATTACH_TYPE_CURSIVE)))
2864 return;
2865
2866 pos[i].attach_chain() = 0;
2867
2868 unsigned int j = (int) i + chain;
2869
2870 /* Stop if we see new parent in the chain. */
2871 if (j == new_parent)
2872 return;
2873
2874 reverse_cursive_minor_offset (pos, j, direction, new_parent);
2875
2876 if (HB_DIRECTION_IS_HORIZONTAL (direction))
2877 pos[j].y_offset = -pos[i].y_offset;
2878 else
2879 pos[j].x_offset = -pos[i].x_offset;
2880
2881 pos[j].attach_chain() = -chain;
2882 pos[j].attach_type() = type;
2883 }
2884 static void
propagate_attachment_offsets(hb_glyph_position_t * pos,unsigned int len,unsigned int i,hb_direction_t direction)2885 propagate_attachment_offsets (hb_glyph_position_t *pos,
2886 unsigned int len,
2887 unsigned int i,
2888 hb_direction_t direction)
2889 {
2890 /* Adjusts offsets of attached glyphs (both cursive and mark) to accumulate
2891 * offset of glyph they are attached to. */
2892 int chain = pos[i].attach_chain(), type = pos[i].attach_type();
2893 if (likely (!chain))
2894 return;
2895
2896 pos[i].attach_chain() = 0;
2897
2898 unsigned int j = (int) i + chain;
2899
2900 if (unlikely (j >= len))
2901 return;
2902
2903 propagate_attachment_offsets (pos, len, j, direction);
2904
2905 assert (!!(type & ATTACH_TYPE_MARK) ^ !!(type & ATTACH_TYPE_CURSIVE));
2906
2907 if (type & ATTACH_TYPE_CURSIVE)
2908 {
2909 if (HB_DIRECTION_IS_HORIZONTAL (direction))
2910 pos[i].y_offset += pos[j].y_offset;
2911 else
2912 pos[i].x_offset += pos[j].x_offset;
2913 }
2914 else /*if (type & ATTACH_TYPE_MARK)*/
2915 {
2916 pos[i].x_offset += pos[j].x_offset;
2917 pos[i].y_offset += pos[j].y_offset;
2918
2919 assert (j < i);
2920 if (HB_DIRECTION_IS_FORWARD (direction))
2921 for (unsigned int k = j; k < i; k++) {
2922 pos[i].x_offset -= pos[k].x_advance;
2923 pos[i].y_offset -= pos[k].y_advance;
2924 }
2925 else
2926 for (unsigned int k = j + 1; k < i + 1; k++) {
2927 pos[i].x_offset += pos[k].x_advance;
2928 pos[i].y_offset += pos[k].y_advance;
2929 }
2930 }
2931 }
2932
2933 void
position_start(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)2934 GPOS::position_start (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2935 {
2936 unsigned int count = buffer->len;
2937 for (unsigned int i = 0; i < count; i++)
2938 buffer->pos[i].attach_chain() = buffer->pos[i].attach_type() = 0;
2939 }
2940
2941 void
position_finish_advances(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer HB_UNUSED)2942 GPOS::position_finish_advances (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer HB_UNUSED)
2943 {
2944 //_hb_buffer_assert_gsubgpos_vars (buffer);
2945 }
2946
2947 void
position_finish_offsets(hb_font_t * font HB_UNUSED,hb_buffer_t * buffer)2948 GPOS::position_finish_offsets (hb_font_t *font HB_UNUSED, hb_buffer_t *buffer)
2949 {
2950 _hb_buffer_assert_gsubgpos_vars (buffer);
2951
2952 unsigned int len;
2953 hb_glyph_position_t *pos = hb_buffer_get_glyph_positions (buffer, &len);
2954 hb_direction_t direction = buffer->props.direction;
2955
2956 /* Handle attachments */
2957 if (buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT)
2958 for (unsigned int i = 0; i < len; i++)
2959 propagate_attachment_offsets (pos, len, i, direction);
2960 }
2961
2962
2963 struct GPOS_accelerator_t : GPOS::accelerator_t {};
2964
2965
2966 /* Out-of-class implementation for methods recursing */
2967
2968 #ifndef HB_NO_OT_LAYOUT
2969 template <typename context_t>
dispatch_recurse_func(context_t * c,unsigned int lookup_index)2970 /*static*/ typename context_t::return_t PosLookup::dispatch_recurse_func (context_t *c, unsigned int lookup_index)
2971 {
2972 const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2973 return l.dispatch (c);
2974 }
2975
dispatch_closure_lookups_recurse_func(hb_closure_lookups_context_t * c,unsigned this_index)2976 /*static*/ inline hb_closure_lookups_context_t::return_t PosLookup::dispatch_closure_lookups_recurse_func (hb_closure_lookups_context_t *c, unsigned this_index)
2977 {
2978 const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (this_index);
2979 return l.closure_lookups (c, this_index);
2980 }
2981
apply_recurse_func(hb_ot_apply_context_t * c,unsigned int lookup_index)2982 /*static*/ bool PosLookup::apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index)
2983 {
2984 const PosLookup &l = c->face->table.GPOS.get_relaxed ()->table->get_lookup (lookup_index);
2985 unsigned int saved_lookup_props = c->lookup_props;
2986 unsigned int saved_lookup_index = c->lookup_index;
2987 c->set_lookup_index (lookup_index);
2988 c->set_lookup_props (l.get_props ());
2989 bool ret = l.dispatch (c);
2990 c->set_lookup_index (saved_lookup_index);
2991 c->set_lookup_props (saved_lookup_props);
2992 return ret;
2993 }
2994 #endif
2995
2996
2997 } /* namespace OT */
2998
2999
3000 #endif /* HB_OT_LAYOUT_GPOS_TABLE_HH */
3001