1 /*
2 * Copyright © 2007,2008,2009,2010 Red Hat, Inc.
3 * Copyright © 2010,2012 Google, Inc.
4 *
5 * This is part of HarfBuzz, a text shaping library.
6 *
7 * Permission is hereby granted, without written agreement and without
8 * license or royalty fees, to use, copy, modify, and distribute this
9 * software and its documentation for any purpose, provided that the
10 * above copyright notice and the following two paragraphs appear in
11 * all copies of this software.
12 *
13 * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14 * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15 * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16 * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17 * DAMAGE.
18 *
19 * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20 * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21 * FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
22 * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23 * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24 *
25 * Red Hat Author(s): Behdad Esfahbod
26 * Google Author(s): Behdad Esfahbod
27 */
28
29 #ifndef HB_OT_LAYOUT_GSUBGPOS_HH
30 #define HB_OT_LAYOUT_GSUBGPOS_HH
31
32 #include "hb.hh"
33 #include "hb-buffer.hh"
34 #include "hb-map.hh"
35 #include "hb-set.hh"
36 #include "hb-ot-map.hh"
37 #include "hb-ot-layout-common.hh"
38 #include "hb-ot-layout-gdef-table.hh"
39
40
41 namespace OT {
42
43
44 struct hb_intersects_context_t :
45 hb_dispatch_context_t<hb_intersects_context_t, bool>
46 {
47 template <typename T>
dispatchOT::hb_intersects_context_t48 return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
default_return_valueOT::hb_intersects_context_t49 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_intersects_context_t50 bool stop_sublookup_iteration (return_t r) const { return r; }
51
52 const hb_set_t *glyphs;
53
hb_intersects_context_tOT::hb_intersects_context_t54 hb_intersects_context_t (const hb_set_t *glyphs_) :
55 glyphs (glyphs_) {}
56 };
57
58 struct hb_have_non_1to1_context_t :
59 hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
60 {
61 template <typename T>
dispatchOT::hb_have_non_1to1_context_t62 return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
default_return_valueOT::hb_have_non_1to1_context_t63 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_have_non_1to1_context_t64 bool stop_sublookup_iteration (return_t r) const { return r; }
65 };
66
67 struct hb_closure_context_t :
68 hb_dispatch_context_t<hb_closure_context_t>
69 {
70 typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
71 template <typename T>
dispatchOT::hb_closure_context_t72 return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_context_t73 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_context_t74 void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
75 {
76 if (unlikely (nesting_level_left == 0 || !recurse_func))
77 return;
78
79 nesting_level_left--;
80 recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
81 nesting_level_left++;
82 }
83
reset_lookup_visit_countOT::hb_closure_context_t84 void reset_lookup_visit_count ()
85 { lookup_count = 0; }
86
lookup_limit_exceededOT::hb_closure_context_t87 bool lookup_limit_exceeded ()
88 { return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
89
should_visit_lookupOT::hb_closure_context_t90 bool should_visit_lookup (unsigned int lookup_index)
91 {
92 if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
93 return false;
94
95 if (is_lookup_done (lookup_index))
96 return false;
97
98 return true;
99 }
100
is_lookup_doneOT::hb_closure_context_t101 bool is_lookup_done (unsigned int lookup_index)
102 {
103 if (done_lookups_glyph_count->in_error () ||
104 done_lookups_glyph_set->in_error ())
105 return true;
106
107 /* Have we visited this lookup with the current set of glyphs? */
108 if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
109 {
110 done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
111
112 if (!done_lookups_glyph_set->get (lookup_index))
113 {
114 hb_set_t* empty_set = hb_set_create ();
115 if (unlikely (!done_lookups_glyph_set->set (lookup_index, empty_set)))
116 {
117 hb_set_destroy (empty_set);
118 return true;
119 }
120 }
121
122 hb_set_clear (done_lookups_glyph_set->get (lookup_index));
123 }
124
125 hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
126 if (unlikely (covered_glyph_set->in_error ()))
127 return true;
128 if (parent_active_glyphs ().is_subset (*covered_glyph_set))
129 return true;
130
131 covered_glyph_set->union_ (parent_active_glyphs ());
132 return false;
133 }
134
previous_parent_active_glyphsOT::hb_closure_context_t135 const hb_set_t& previous_parent_active_glyphs () {
136 if (active_glyphs_stack.length <= 1)
137 return *glyphs;
138
139 return active_glyphs_stack[active_glyphs_stack.length - 2];
140 }
141
parent_active_glyphsOT::hb_closure_context_t142 const hb_set_t& parent_active_glyphs ()
143 {
144 if (!active_glyphs_stack)
145 return *glyphs;
146
147 return active_glyphs_stack.tail ();
148 }
149
push_cur_active_glyphsOT::hb_closure_context_t150 hb_set_t& push_cur_active_glyphs ()
151 {
152 return *active_glyphs_stack.push ();
153 }
154
pop_cur_done_glyphsOT::hb_closure_context_t155 bool pop_cur_done_glyphs ()
156 {
157 if (active_glyphs_stack.length < 1)
158 return false;
159
160 active_glyphs_stack.pop ();
161 return true;
162 }
163
164 hb_face_t *face;
165 hb_set_t *glyphs;
166 hb_set_t output[1];
167 hb_vector_t<hb_set_t> active_glyphs_stack;
168 recurse_func_t recurse_func;
169 unsigned int nesting_level_left;
170
hb_closure_context_tOT::hb_closure_context_t171 hb_closure_context_t (hb_face_t *face_,
172 hb_set_t *glyphs_,
173 hb_map_t *done_lookups_glyph_count_,
174 hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set_,
175 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
176 face (face_),
177 glyphs (glyphs_),
178 recurse_func (nullptr),
179 nesting_level_left (nesting_level_left_),
180 done_lookups_glyph_count (done_lookups_glyph_count_),
181 done_lookups_glyph_set (done_lookups_glyph_set_),
182 lookup_count (0)
183 {}
184
~hb_closure_context_tOT::hb_closure_context_t185 ~hb_closure_context_t () { flush (); }
186
set_recurse_funcOT::hb_closure_context_t187 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
188
flushOT::hb_closure_context_t189 void flush ()
190 {
191 output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID); /* Remove invalid glyphs. */
192 glyphs->union_ (*output);
193 output->clear ();
194 active_glyphs_stack.pop ();
195 active_glyphs_stack.reset ();
196 }
197
198 private:
199 hb_map_t *done_lookups_glyph_count;
200 hb_hashmap_t<unsigned, hb_set_t *> *done_lookups_glyph_set;
201 unsigned int lookup_count;
202 };
203
204
205
206 struct hb_closure_lookups_context_t :
207 hb_dispatch_context_t<hb_closure_lookups_context_t>
208 {
209 typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
210 template <typename T>
dispatchOT::hb_closure_lookups_context_t211 return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
default_return_valueOT::hb_closure_lookups_context_t212 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_closure_lookups_context_t213 void recurse (unsigned lookup_index)
214 {
215 if (unlikely (nesting_level_left == 0 || !recurse_func))
216 return;
217
218 /* Return if new lookup was recursed to before. */
219 if (lookup_limit_exceeded ()
220 || visited_lookups->in_error ()
221 || visited_lookups->has (lookup_index))
222 // Don't increment lookup count here, that will be done in the call to closure_lookups()
223 // made by recurse_func.
224 return;
225
226 nesting_level_left--;
227 recurse_func (this, lookup_index);
228 nesting_level_left++;
229 }
230
set_lookup_visitedOT::hb_closure_lookups_context_t231 void set_lookup_visited (unsigned lookup_index)
232 { visited_lookups->add (lookup_index); }
233
set_lookup_inactiveOT::hb_closure_lookups_context_t234 void set_lookup_inactive (unsigned lookup_index)
235 { inactive_lookups->add (lookup_index); }
236
lookup_limit_exceededOT::hb_closure_lookups_context_t237 bool lookup_limit_exceeded ()
238 {
239 bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
240 if (ret)
241 DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
242 return ret; }
243
is_lookup_visitedOT::hb_closure_lookups_context_t244 bool is_lookup_visited (unsigned lookup_index)
245 {
246 if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
247 {
248 DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
249 lookup_count, lookup_index);
250 return true;
251 }
252
253 if (unlikely (visited_lookups->in_error ()))
254 return true;
255
256 return visited_lookups->has (lookup_index);
257 }
258
259 hb_face_t *face;
260 const hb_set_t *glyphs;
261 recurse_func_t recurse_func;
262 unsigned int nesting_level_left;
263
hb_closure_lookups_context_tOT::hb_closure_lookups_context_t264 hb_closure_lookups_context_t (hb_face_t *face_,
265 const hb_set_t *glyphs_,
266 hb_set_t *visited_lookups_,
267 hb_set_t *inactive_lookups_,
268 unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
269 face (face_),
270 glyphs (glyphs_),
271 recurse_func (nullptr),
272 nesting_level_left (nesting_level_left_),
273 visited_lookups (visited_lookups_),
274 inactive_lookups (inactive_lookups_),
275 lookup_count (0) {}
276
set_recurse_funcOT::hb_closure_lookups_context_t277 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
278
279 private:
280 hb_set_t *visited_lookups;
281 hb_set_t *inactive_lookups;
282 unsigned int lookup_count;
283 };
284
285 struct hb_would_apply_context_t :
286 hb_dispatch_context_t<hb_would_apply_context_t, bool>
287 {
288 template <typename T>
dispatchOT::hb_would_apply_context_t289 return_t dispatch (const T &obj) { return obj.would_apply (this); }
default_return_valueOT::hb_would_apply_context_t290 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_would_apply_context_t291 bool stop_sublookup_iteration (return_t r) const { return r; }
292
293 hb_face_t *face;
294 const hb_codepoint_t *glyphs;
295 unsigned int len;
296 bool zero_context;
297
hb_would_apply_context_tOT::hb_would_apply_context_t298 hb_would_apply_context_t (hb_face_t *face_,
299 const hb_codepoint_t *glyphs_,
300 unsigned int len_,
301 bool zero_context_) :
302 face (face_),
303 glyphs (glyphs_),
304 len (len_),
305 zero_context (zero_context_) {}
306 };
307
308 struct hb_collect_glyphs_context_t :
309 hb_dispatch_context_t<hb_collect_glyphs_context_t>
310 {
311 typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
312 template <typename T>
dispatchOT::hb_collect_glyphs_context_t313 return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
default_return_valueOT::hb_collect_glyphs_context_t314 static return_t default_return_value () { return hb_empty_t (); }
recurseOT::hb_collect_glyphs_context_t315 void recurse (unsigned int lookup_index)
316 {
317 if (unlikely (nesting_level_left == 0 || !recurse_func))
318 return;
319
320 /* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
321 * past the previous check. For GSUB, we only want to collect the output
322 * glyphs in the recursion. If output is not requested, we can go home now.
323 *
324 * Note further, that the above is not exactly correct. A recursed lookup
325 * is allowed to match input that is not matched in the context, but that's
326 * not how most fonts are built. It's possible to relax that and recurse
327 * with all sets here if it proves to be an issue.
328 */
329
330 if (output == hb_set_get_empty ())
331 return;
332
333 /* Return if new lookup was recursed to before. */
334 if (recursed_lookups->has (lookup_index))
335 return;
336
337 hb_set_t *old_before = before;
338 hb_set_t *old_input = input;
339 hb_set_t *old_after = after;
340 before = input = after = hb_set_get_empty ();
341
342 nesting_level_left--;
343 recurse_func (this, lookup_index);
344 nesting_level_left++;
345
346 before = old_before;
347 input = old_input;
348 after = old_after;
349
350 recursed_lookups->add (lookup_index);
351 }
352
353 hb_face_t *face;
354 hb_set_t *before;
355 hb_set_t *input;
356 hb_set_t *after;
357 hb_set_t *output;
358 recurse_func_t recurse_func;
359 hb_set_t *recursed_lookups;
360 unsigned int nesting_level_left;
361
hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t362 hb_collect_glyphs_context_t (hb_face_t *face_,
363 hb_set_t *glyphs_before, /* OUT. May be NULL */
364 hb_set_t *glyphs_input, /* OUT. May be NULL */
365 hb_set_t *glyphs_after, /* OUT. May be NULL */
366 hb_set_t *glyphs_output, /* OUT. May be NULL */
367 unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
368 face (face_),
369 before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
370 input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
371 after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
372 output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
373 recurse_func (nullptr),
374 recursed_lookups (hb_set_create ()),
375 nesting_level_left (nesting_level_left_) {}
~hb_collect_glyphs_context_tOT::hb_collect_glyphs_context_t376 ~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
377
set_recurse_funcOT::hb_collect_glyphs_context_t378 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
379 };
380
381
382
383 template <typename set_t>
384 struct hb_collect_coverage_context_t :
385 hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
386 {
387 typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
388 template <typename T>
dispatchOT::hb_collect_coverage_context_t389 return_t dispatch (const T &obj) { return obj.get_coverage (); }
default_return_valueOT::hb_collect_coverage_context_t390 static return_t default_return_value () { return Null (Coverage); }
stop_sublookup_iterationOT::hb_collect_coverage_context_t391 bool stop_sublookup_iteration (return_t r) const
392 {
393 r.collect_coverage (set);
394 return false;
395 }
396
hb_collect_coverage_context_tOT::hb_collect_coverage_context_t397 hb_collect_coverage_context_t (set_t *set_) :
398 set (set_) {}
399
400 set_t *set;
401 };
402
403
404 struct hb_ot_apply_context_t :
405 hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
406 {
407 struct matcher_t
408 {
matcher_tOT::hb_ot_apply_context_t::matcher_t409 matcher_t () :
410 lookup_props (0),
411 ignore_zwnj (false),
412 ignore_zwj (false),
413 mask (-1),
414 #define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
415 syllable arg1(0),
416 #undef arg1
417 match_func (nullptr),
418 match_data (nullptr) {}
419
420 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
421
set_ignore_zwnjOT::hb_ot_apply_context_t::matcher_t422 void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
set_ignore_zwjOT::hb_ot_apply_context_t::matcher_t423 void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
set_lookup_propsOT::hb_ot_apply_context_t::matcher_t424 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
set_maskOT::hb_ot_apply_context_t::matcher_t425 void set_mask (hb_mask_t mask_) { mask = mask_; }
set_syllableOT::hb_ot_apply_context_t::matcher_t426 void set_syllable (uint8_t syllable_) { syllable = syllable_; }
set_match_funcOT::hb_ot_apply_context_t::matcher_t427 void set_match_func (match_func_t match_func_,
428 const void *match_data_)
429 { match_func = match_func_; match_data = match_data_; }
430
431 enum may_match_t {
432 MATCH_NO,
433 MATCH_YES,
434 MATCH_MAYBE
435 };
436
may_matchOT::hb_ot_apply_context_t::matcher_t437 may_match_t may_match (const hb_glyph_info_t &info,
438 const HBUINT16 *glyph_data) const
439 {
440 if (!(info.mask & mask) ||
441 (syllable && syllable != info.syllable ()))
442 return MATCH_NO;
443
444 if (match_func)
445 return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
446
447 return MATCH_MAYBE;
448 }
449
450 enum may_skip_t {
451 SKIP_NO,
452 SKIP_YES,
453 SKIP_MAYBE
454 };
455
may_skipOT::hb_ot_apply_context_t::matcher_t456 may_skip_t may_skip (const hb_ot_apply_context_t *c,
457 const hb_glyph_info_t &info) const
458 {
459 if (!c->check_glyph_property (&info, lookup_props))
460 return SKIP_YES;
461
462 if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
463 (ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
464 (ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
465 return SKIP_MAYBE;
466
467 return SKIP_NO;
468 }
469
470 protected:
471 unsigned int lookup_props;
472 bool ignore_zwnj;
473 bool ignore_zwj;
474 hb_mask_t mask;
475 uint8_t syllable;
476 match_func_t match_func;
477 const void *match_data;
478 };
479
480 struct skipping_iterator_t
481 {
initOT::hb_ot_apply_context_t::skipping_iterator_t482 void init (hb_ot_apply_context_t *c_, bool context_match = false)
483 {
484 c = c_;
485 match_glyph_data = nullptr;
486 matcher.set_match_func (nullptr, nullptr);
487 matcher.set_lookup_props (c->lookup_props);
488 /* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
489 matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
490 /* Ignore ZWJ if we are matching context, or asked to. */
491 matcher.set_ignore_zwj (context_match || c->auto_zwj);
492 matcher.set_mask (context_match ? -1 : c->lookup_mask);
493 }
set_lookup_propsOT::hb_ot_apply_context_t::skipping_iterator_t494 void set_lookup_props (unsigned int lookup_props)
495 {
496 matcher.set_lookup_props (lookup_props);
497 }
set_match_funcOT::hb_ot_apply_context_t::skipping_iterator_t498 void set_match_func (matcher_t::match_func_t match_func_,
499 const void *match_data_,
500 const HBUINT16 glyph_data[])
501 {
502 matcher.set_match_func (match_func_, match_data_);
503 match_glyph_data = glyph_data;
504 }
505
resetOT::hb_ot_apply_context_t::skipping_iterator_t506 void reset (unsigned int start_index_,
507 unsigned int num_items_)
508 {
509 idx = start_index_;
510 num_items = num_items_;
511 end = c->buffer->len;
512 matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
513 }
514
rejectOT::hb_ot_apply_context_t::skipping_iterator_t515 void reject ()
516 {
517 num_items++;
518 if (match_glyph_data) match_glyph_data--;
519 }
520
521 matcher_t::may_skip_t
may_skipOT::hb_ot_apply_context_t::skipping_iterator_t522 may_skip (const hb_glyph_info_t &info) const
523 { return matcher.may_skip (c, info); }
524
nextOT::hb_ot_apply_context_t::skipping_iterator_t525 bool next (unsigned *unsafe_to = nullptr)
526 {
527 assert (num_items > 0);
528 while (idx + num_items < end)
529 {
530 idx++;
531 const hb_glyph_info_t &info = c->buffer->info[idx];
532
533 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
534 if (unlikely (skip == matcher_t::SKIP_YES))
535 continue;
536
537 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
538 if (match == matcher_t::MATCH_YES ||
539 (match == matcher_t::MATCH_MAYBE &&
540 skip == matcher_t::SKIP_NO))
541 {
542 num_items--;
543 if (match_glyph_data) match_glyph_data++;
544 return true;
545 }
546
547 if (skip == matcher_t::SKIP_NO)
548 {
549 if (unsafe_to)
550 *unsafe_to = idx + 1;
551 return false;
552 }
553 }
554 if (unsafe_to)
555 *unsafe_to = end;
556 return false;
557 }
prevOT::hb_ot_apply_context_t::skipping_iterator_t558 bool prev (unsigned *unsafe_from = nullptr)
559 {
560 assert (num_items > 0);
561 while (idx > num_items - 1)
562 {
563 idx--;
564 const hb_glyph_info_t &info = c->buffer->out_info[idx];
565
566 matcher_t::may_skip_t skip = matcher.may_skip (c, info);
567 if (unlikely (skip == matcher_t::SKIP_YES))
568 continue;
569
570 matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
571 if (match == matcher_t::MATCH_YES ||
572 (match == matcher_t::MATCH_MAYBE &&
573 skip == matcher_t::SKIP_NO))
574 {
575 num_items--;
576 if (match_glyph_data) match_glyph_data++;
577 return true;
578 }
579
580 if (skip == matcher_t::SKIP_NO)
581 {
582 if (unsafe_from)
583 *unsafe_from = hb_max (1u, idx) - 1u;
584 return false;
585 }
586 }
587 if (unsafe_from)
588 *unsafe_from = 0;
589 return false;
590 }
591
592 unsigned int idx;
593 protected:
594 hb_ot_apply_context_t *c;
595 matcher_t matcher;
596 const HBUINT16 *match_glyph_data;
597
598 unsigned int num_items;
599 unsigned int end;
600 };
601
602
get_nameOT::hb_ot_apply_context_t603 const char *get_name () { return "APPLY"; }
604 typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
605 template <typename T>
dispatchOT::hb_ot_apply_context_t606 return_t dispatch (const T &obj) { return obj.apply (this); }
default_return_valueOT::hb_ot_apply_context_t607 static return_t default_return_value () { return false; }
stop_sublookup_iterationOT::hb_ot_apply_context_t608 bool stop_sublookup_iteration (return_t r) const { return r; }
recurseOT::hb_ot_apply_context_t609 return_t recurse (unsigned int sub_lookup_index)
610 {
611 if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
612 return default_return_value ();
613
614 nesting_level_left--;
615 bool ret = recurse_func (this, sub_lookup_index);
616 nesting_level_left++;
617 return ret;
618 }
619
620 skipping_iterator_t iter_input, iter_context;
621
622 hb_font_t *font;
623 hb_face_t *face;
624 hb_buffer_t *buffer;
625 recurse_func_t recurse_func;
626 const GDEF &gdef;
627 const VariationStore &var_store;
628
629 hb_direction_t direction;
630 hb_mask_t lookup_mask;
631 unsigned int table_index; /* GSUB/GPOS */
632 unsigned int lookup_index;
633 unsigned int lookup_props;
634 unsigned int nesting_level_left;
635
636 bool has_glyph_classes;
637 bool auto_zwnj;
638 bool auto_zwj;
639 bool random;
640
641 uint32_t random_state;
642
643
hb_ot_apply_context_tOT::hb_ot_apply_context_t644 hb_ot_apply_context_t (unsigned int table_index_,
645 hb_font_t *font_,
646 hb_buffer_t *buffer_) :
647 iter_input (), iter_context (),
648 font (font_), face (font->face), buffer (buffer_),
649 recurse_func (nullptr),
650 gdef (
651 #ifndef HB_NO_OT_LAYOUT
652 *face->table.GDEF->table
653 #else
654 Null (GDEF)
655 #endif
656 ),
657 var_store (gdef.get_var_store ()),
658 direction (buffer_->props.direction),
659 lookup_mask (1),
660 table_index (table_index_),
661 lookup_index ((unsigned int) -1),
662 lookup_props (0),
663 nesting_level_left (HB_MAX_NESTING_LEVEL),
664 has_glyph_classes (gdef.has_glyph_classes ()),
665 auto_zwnj (true),
666 auto_zwj (true),
667 random (false),
668 random_state (1) { init_iters (); }
669
init_itersOT::hb_ot_apply_context_t670 void init_iters ()
671 {
672 iter_input.init (this, false);
673 iter_context.init (this, true);
674 }
675
set_lookup_maskOT::hb_ot_apply_context_t676 void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; init_iters (); }
set_auto_zwjOT::hb_ot_apply_context_t677 void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
set_auto_zwnjOT::hb_ot_apply_context_t678 void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
set_randomOT::hb_ot_apply_context_t679 void set_random (bool random_) { random = random_; }
set_recurse_funcOT::hb_ot_apply_context_t680 void set_recurse_func (recurse_func_t func) { recurse_func = func; }
set_lookup_indexOT::hb_ot_apply_context_t681 void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
set_lookup_propsOT::hb_ot_apply_context_t682 void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
683
random_numberOT::hb_ot_apply_context_t684 uint32_t random_number ()
685 {
686 /* http://www.cplusplus.com/reference/random/minstd_rand/ */
687 random_state = random_state * 48271 % 2147483647;
688 return random_state;
689 }
690
match_properties_markOT::hb_ot_apply_context_t691 bool match_properties_mark (hb_codepoint_t glyph,
692 unsigned int glyph_props,
693 unsigned int match_props) const
694 {
695 /* If using mark filtering sets, the high short of
696 * match_props has the set index.
697 */
698 if (match_props & LookupFlag::UseMarkFilteringSet)
699 return gdef.mark_set_covers (match_props >> 16, glyph);
700
701 /* The second byte of match_props has the meaning
702 * "ignore marks of attachment type different than
703 * the attachment type specified."
704 */
705 if (match_props & LookupFlag::MarkAttachmentType)
706 return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
707
708 return true;
709 }
710
check_glyph_propertyOT::hb_ot_apply_context_t711 bool check_glyph_property (const hb_glyph_info_t *info,
712 unsigned int match_props) const
713 {
714 hb_codepoint_t glyph = info->codepoint;
715 unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
716
717 /* Not covered, if, for example, glyph class is ligature and
718 * match_props includes LookupFlags::IgnoreLigatures
719 */
720 if (glyph_props & match_props & LookupFlag::IgnoreFlags)
721 return false;
722
723 if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
724 return match_properties_mark (glyph, glyph_props, match_props);
725
726 return true;
727 }
728
_set_glyph_classOT::hb_ot_apply_context_t729 void _set_glyph_class (hb_codepoint_t glyph_index,
730 unsigned int class_guess = 0,
731 bool ligature = false,
732 bool component = false) const
733 {
734 unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
735 props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
736 if (ligature)
737 {
738 props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
739 /* In the only place that the MULTIPLIED bit is used, Uniscribe
740 * seems to only care about the "last" transformation between
741 * Ligature and Multiple substitutions. Ie. if you ligate, expand,
742 * and ligate again, it forgives the multiplication and acts as
743 * if only ligation happened. As such, clear MULTIPLIED bit.
744 */
745 props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
746 }
747 if (component)
748 props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
749 if (likely (has_glyph_classes))
750 {
751 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
752 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef.get_glyph_props (glyph_index));
753 }
754 else if (class_guess)
755 {
756 props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
757 _hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess);
758 }
759 else
760 _hb_glyph_info_set_glyph_props (&buffer->cur(), props);
761 }
762
replace_glyphOT::hb_ot_apply_context_t763 void replace_glyph (hb_codepoint_t glyph_index) const
764 {
765 _set_glyph_class (glyph_index);
766 (void) buffer->replace_glyph (glyph_index);
767 }
replace_glyph_inplaceOT::hb_ot_apply_context_t768 void replace_glyph_inplace (hb_codepoint_t glyph_index) const
769 {
770 _set_glyph_class (glyph_index);
771 buffer->cur().codepoint = glyph_index;
772 }
replace_glyph_with_ligatureOT::hb_ot_apply_context_t773 void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
774 unsigned int class_guess) const
775 {
776 _set_glyph_class (glyph_index, class_guess, true);
777 (void) buffer->replace_glyph (glyph_index);
778 }
output_glyph_for_componentOT::hb_ot_apply_context_t779 void output_glyph_for_component (hb_codepoint_t glyph_index,
780 unsigned int class_guess) const
781 {
782 _set_glyph_class (glyph_index, class_guess, false, true);
783 (void) buffer->output_glyph (glyph_index);
784 }
785 };
786
787
788 struct hb_get_subtables_context_t :
789 hb_dispatch_context_t<hb_get_subtables_context_t>
790 {
791 template <typename Type>
apply_toOT::hb_get_subtables_context_t792 static inline bool apply_to (const void *obj, OT::hb_ot_apply_context_t *c)
793 {
794 const Type *typed_obj = (const Type *) obj;
795 return typed_obj->apply (c);
796 }
797
798 typedef bool (*hb_apply_func_t) (const void *obj, OT::hb_ot_apply_context_t *c);
799
800 struct hb_applicable_t
801 {
802 template <typename T>
initOT::hb_get_subtables_context_t::hb_applicable_t803 void init (const T &obj_, hb_apply_func_t apply_func_)
804 {
805 obj = &obj_;
806 apply_func = apply_func_;
807 digest.init ();
808 obj_.get_coverage ().collect_coverage (&digest);
809 }
810
applyOT::hb_get_subtables_context_t::hb_applicable_t811 bool apply (OT::hb_ot_apply_context_t *c) const
812 {
813 return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
814 }
815
816 private:
817 const void *obj;
818 hb_apply_func_t apply_func;
819 hb_set_digest_t digest;
820 };
821
822 typedef hb_vector_t<hb_applicable_t> array_t;
823
824 /* Dispatch interface. */
825 template <typename T>
dispatchOT::hb_get_subtables_context_t826 return_t dispatch (const T &obj)
827 {
828 hb_applicable_t *entry = array.push();
829 entry->init (obj, apply_to<T>);
830 return hb_empty_t ();
831 }
default_return_valueOT::hb_get_subtables_context_t832 static return_t default_return_value () { return hb_empty_t (); }
833
hb_get_subtables_context_tOT::hb_get_subtables_context_t834 hb_get_subtables_context_t (array_t &array_) :
835 array (array_) {}
836
837 array_t &array;
838 };
839
840
841
842
843 typedef bool (*intersects_func_t) (const hb_set_t *glyphs, const HBUINT16 &value, const void *data);
844 typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs);
845 typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const HBUINT16 &value, const void *data);
846 typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data);
847
848 struct ContextClosureFuncs
849 {
850 intersects_func_t intersects;
851 intersected_glyphs_func_t intersected_glyphs;
852 };
853 struct ContextCollectGlyphsFuncs
854 {
855 collect_glyphs_func_t collect;
856 };
857 struct ContextApplyFuncs
858 {
859 match_func_t match;
860 };
861
862
intersects_glyph(const hb_set_t * glyphs,const HBUINT16 & value,const void * data HB_UNUSED)863 static inline bool intersects_glyph (const hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
864 {
865 return glyphs->has (value);
866 }
intersects_class(const hb_set_t * glyphs,const HBUINT16 & value,const void * data)867 static inline bool intersects_class (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
868 {
869 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
870 return class_def.intersects_class (glyphs, value);
871 }
intersects_coverage(const hb_set_t * glyphs,const HBUINT16 & value,const void * data)872 static inline bool intersects_coverage (const hb_set_t *glyphs, const HBUINT16 &value, const void *data)
873 {
874 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
875 return (data+coverage).intersects (glyphs);
876 }
877
878
intersected_glyph(const hb_set_t * glyphs HB_UNUSED,const void * data,unsigned value,hb_set_t * intersected_glyphs)879 static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs)
880 {
881 unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
882 intersected_glyphs->add (g);
883 }
intersected_class_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs)884 static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
885 {
886 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
887 class_def.intersected_class_glyphs (glyphs, value, intersected_glyphs);
888 }
intersected_coverage_glyphs(const hb_set_t * glyphs,const void * data,unsigned value,hb_set_t * intersected_glyphs)889 static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs)
890 {
891 Offset16To<Coverage> coverage;
892 coverage = value;
893 (data+coverage).intersected_coverage_glyphs (glyphs, intersected_glyphs);
894 }
895
896
array_is_subset_of(const hb_set_t * glyphs,unsigned int count,const HBUINT16 values[],intersects_func_t intersects_func,const void * intersects_data)897 static inline bool array_is_subset_of (const hb_set_t *glyphs,
898 unsigned int count,
899 const HBUINT16 values[],
900 intersects_func_t intersects_func,
901 const void *intersects_data)
902 {
903 for (const HBUINT16 &_ : + hb_iter (values, count))
904 if (!intersects_func (glyphs, _, intersects_data)) return false;
905 return true;
906 }
907
908
collect_glyph(hb_set_t * glyphs,const HBUINT16 & value,const void * data HB_UNUSED)909 static inline void collect_glyph (hb_set_t *glyphs, const HBUINT16 &value, const void *data HB_UNUSED)
910 {
911 glyphs->add (value);
912 }
collect_class(hb_set_t * glyphs,const HBUINT16 & value,const void * data)913 static inline void collect_class (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
914 {
915 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
916 class_def.collect_class (glyphs, value);
917 }
collect_coverage(hb_set_t * glyphs,const HBUINT16 & value,const void * data)918 static inline void collect_coverage (hb_set_t *glyphs, const HBUINT16 &value, const void *data)
919 {
920 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
921 (data+coverage).collect_coverage (glyphs);
922 }
collect_array(hb_collect_glyphs_context_t * c HB_UNUSED,hb_set_t * glyphs,unsigned int count,const HBUINT16 values[],collect_glyphs_func_t collect_func,const void * collect_data)923 static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
924 hb_set_t *glyphs,
925 unsigned int count,
926 const HBUINT16 values[],
927 collect_glyphs_func_t collect_func,
928 const void *collect_data)
929 {
930 return
931 + hb_iter (values, count)
932 | hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
933 ;
934 }
935
936
match_glyph(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data HB_UNUSED)937 static inline bool match_glyph (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data HB_UNUSED)
938 {
939 return glyph_id == value;
940 }
match_class(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data)941 static inline bool match_class (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
942 {
943 const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
944 return class_def.get_class (glyph_id) == value;
945 }
match_coverage(hb_codepoint_t glyph_id,const HBUINT16 & value,const void * data)946 static inline bool match_coverage (hb_codepoint_t glyph_id, const HBUINT16 &value, const void *data)
947 {
948 const Offset16To<Coverage> &coverage = (const Offset16To<Coverage>&)value;
949 return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
950 }
951
would_match_input(hb_would_apply_context_t * c,unsigned int count,const HBUINT16 input[],match_func_t match_func,const void * match_data)952 static inline bool would_match_input (hb_would_apply_context_t *c,
953 unsigned int count, /* Including the first glyph (not matched) */
954 const HBUINT16 input[], /* Array of input values--start with second glyph */
955 match_func_t match_func,
956 const void *match_data)
957 {
958 if (count != c->len)
959 return false;
960
961 for (unsigned int i = 1; i < count; i++)
962 if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
963 return false;
964
965 return true;
966 }
match_input(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 input[],match_func_t match_func,const void * match_data,unsigned int * end_position,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int * p_total_component_count=nullptr)967 static inline bool match_input (hb_ot_apply_context_t *c,
968 unsigned int count, /* Including the first glyph (not matched) */
969 const HBUINT16 input[], /* Array of input values--start with second glyph */
970 match_func_t match_func,
971 const void *match_data,
972 unsigned int *end_position,
973 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
974 unsigned int *p_total_component_count = nullptr)
975 {
976 TRACE_APPLY (nullptr);
977
978 if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
979
980 hb_buffer_t *buffer = c->buffer;
981
982 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
983 skippy_iter.reset (buffer->idx, count - 1);
984 skippy_iter.set_match_func (match_func, match_data, input);
985
986 /*
987 * This is perhaps the trickiest part of OpenType... Remarks:
988 *
989 * - If all components of the ligature were marks, we call this a mark ligature.
990 *
991 * - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
992 * it as a ligature glyph.
993 *
994 * - Ligatures cannot be formed across glyphs attached to different components
995 * of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
996 * LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
997 * However, it would be wrong to ligate that SHADDA,FATHA sequence.
998 * There are a couple of exceptions to this:
999 *
1000 * o If a ligature tries ligating with marks that belong to it itself, go ahead,
1001 * assuming that the font designer knows what they are doing (otherwise it can
1002 * break Indic stuff when a matra wants to ligate with a conjunct,
1003 *
1004 * o If two marks want to ligate and they belong to different components of the
1005 * same ligature glyph, and said ligature glyph is to be ignored according to
1006 * mark-filtering rules, then allow.
1007 * https://github.com/harfbuzz/harfbuzz/issues/545
1008 */
1009
1010 unsigned int total_component_count = 0;
1011 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1012
1013 unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1014 unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1015
1016 enum {
1017 LIGBASE_NOT_CHECKED,
1018 LIGBASE_MAY_NOT_SKIP,
1019 LIGBASE_MAY_SKIP
1020 } ligbase = LIGBASE_NOT_CHECKED;
1021
1022 match_positions[0] = buffer->idx;
1023 for (unsigned int i = 1; i < count; i++)
1024 {
1025 unsigned unsafe_to;
1026 if (!skippy_iter.next (&unsafe_to))
1027 {
1028 *end_position = unsafe_to;
1029 return_trace (false);
1030 }
1031
1032 match_positions[i] = skippy_iter.idx;
1033
1034 unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
1035 unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
1036
1037 if (first_lig_id && first_lig_comp)
1038 {
1039 /* If first component was attached to a previous ligature component,
1040 * all subsequent components should be attached to the same ligature
1041 * component, otherwise we shouldn't ligate them... */
1042 if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
1043 {
1044 /* ...unless, we are attached to a base ligature and that base
1045 * ligature is ignorable. */
1046 if (ligbase == LIGBASE_NOT_CHECKED)
1047 {
1048 bool found = false;
1049 const auto *out = buffer->out_info;
1050 unsigned int j = buffer->out_len;
1051 while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
1052 {
1053 if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
1054 {
1055 j--;
1056 found = true;
1057 break;
1058 }
1059 j--;
1060 }
1061
1062 if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
1063 ligbase = LIGBASE_MAY_SKIP;
1064 else
1065 ligbase = LIGBASE_MAY_NOT_SKIP;
1066 }
1067
1068 if (ligbase == LIGBASE_MAY_NOT_SKIP)
1069 return_trace (false);
1070 }
1071 }
1072 else
1073 {
1074 /* If first component was NOT attached to a previous ligature component,
1075 * all subsequent components should also NOT be attached to any ligature
1076 * component, unless they are attached to the first component itself! */
1077 if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
1078 return_trace (false);
1079 }
1080
1081 total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
1082 }
1083
1084 *end_position = skippy_iter.idx + 1;
1085
1086 if (p_total_component_count)
1087 *p_total_component_count = total_component_count;
1088
1089 return_trace (true);
1090 }
ligate_input(hb_ot_apply_context_t * c,unsigned int count,const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int match_end,hb_codepoint_t lig_glyph,unsigned int total_component_count)1091 static inline bool ligate_input (hb_ot_apply_context_t *c,
1092 unsigned int count, /* Including the first glyph */
1093 const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1094 unsigned int match_end,
1095 hb_codepoint_t lig_glyph,
1096 unsigned int total_component_count)
1097 {
1098 TRACE_APPLY (nullptr);
1099
1100 hb_buffer_t *buffer = c->buffer;
1101
1102 buffer->merge_clusters (buffer->idx, match_end);
1103
1104 /* - If a base and one or more marks ligate, consider that as a base, NOT
1105 * ligature, such that all following marks can still attach to it.
1106 * https://github.com/harfbuzz/harfbuzz/issues/1109
1107 *
1108 * - If all components of the ligature were marks, we call this a mark ligature.
1109 * If it *is* a mark ligature, we don't allocate a new ligature id, and leave
1110 * the ligature to keep its old ligature id. This will allow it to attach to
1111 * a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
1112 * and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
1113 * ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
1114 * later, we don't want them to lose their ligature id/component, otherwise
1115 * GPOS will fail to correctly position the mark ligature on top of the
1116 * LAM,LAM,HEH ligature. See:
1117 * https://bugzilla.gnome.org/show_bug.cgi?id=676343
1118 *
1119 * - If a ligature is formed of components that some of which are also ligatures
1120 * themselves, and those ligature components had marks attached to *their*
1121 * components, we have to attach the marks to the new ligature component
1122 * positions! Now *that*'s tricky! And these marks may be following the
1123 * last component of the whole sequence, so we should loop forward looking
1124 * for them and update them.
1125 *
1126 * Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
1127 * 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
1128 * id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
1129 * form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
1130 * the new ligature with a component value of 2.
1131 *
1132 * This in fact happened to a font... See:
1133 * https://bugzilla.gnome.org/show_bug.cgi?id=437633
1134 */
1135
1136 bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
1137 bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
1138 for (unsigned int i = 1; i < count; i++)
1139 if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
1140 {
1141 is_base_ligature = false;
1142 is_mark_ligature = false;
1143 break;
1144 }
1145 bool is_ligature = !is_base_ligature && !is_mark_ligature;
1146
1147 unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
1148 unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
1149 unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1150 unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1151 unsigned int components_so_far = last_num_components;
1152
1153 if (is_ligature)
1154 {
1155 _hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
1156 if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
1157 {
1158 _hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
1159 }
1160 }
1161 c->replace_glyph_with_ligature (lig_glyph, klass);
1162
1163 for (unsigned int i = 1; i < count; i++)
1164 {
1165 while (buffer->idx < match_positions[i] && buffer->successful)
1166 {
1167 if (is_ligature)
1168 {
1169 unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
1170 if (this_comp == 0)
1171 this_comp = last_num_components;
1172 unsigned int new_lig_comp = components_so_far - last_num_components +
1173 hb_min (this_comp, last_num_components);
1174 _hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
1175 }
1176 (void) buffer->next_glyph ();
1177 }
1178
1179 last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
1180 last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
1181 components_so_far += last_num_components;
1182
1183 /* Skip the base glyph */
1184 buffer->idx++;
1185 }
1186
1187 if (!is_mark_ligature && last_lig_id)
1188 {
1189 /* Re-adjust components for any marks following. */
1190 for (unsigned i = buffer->idx; i < buffer->len; ++i)
1191 {
1192 if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
1193
1194 unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
1195 if (!this_comp) break;
1196
1197 unsigned new_lig_comp = components_so_far - last_num_components +
1198 hb_min (this_comp, last_num_components);
1199 _hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
1200 }
1201 }
1202 return_trace (true);
1203 }
1204
match_backtrack(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 backtrack[],match_func_t match_func,const void * match_data,unsigned int * match_start)1205 static inline bool match_backtrack (hb_ot_apply_context_t *c,
1206 unsigned int count,
1207 const HBUINT16 backtrack[],
1208 match_func_t match_func,
1209 const void *match_data,
1210 unsigned int *match_start)
1211 {
1212 TRACE_APPLY (nullptr);
1213
1214 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1215 skippy_iter.reset (c->buffer->backtrack_len (), count);
1216 skippy_iter.set_match_func (match_func, match_data, backtrack);
1217
1218 for (unsigned int i = 0; i < count; i++)
1219 {
1220 unsigned unsafe_from;
1221 if (!skippy_iter.prev (&unsafe_from))
1222 {
1223 *match_start = unsafe_from;
1224 return_trace (false);
1225 }
1226 }
1227
1228 *match_start = skippy_iter.idx;
1229 return_trace (true);
1230 }
1231
match_lookahead(hb_ot_apply_context_t * c,unsigned int count,const HBUINT16 lookahead[],match_func_t match_func,const void * match_data,unsigned int start_index,unsigned int * end_index)1232 static inline bool match_lookahead (hb_ot_apply_context_t *c,
1233 unsigned int count,
1234 const HBUINT16 lookahead[],
1235 match_func_t match_func,
1236 const void *match_data,
1237 unsigned int start_index,
1238 unsigned int *end_index)
1239 {
1240 TRACE_APPLY (nullptr);
1241
1242 hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
1243 skippy_iter.reset (start_index - 1, count);
1244 skippy_iter.set_match_func (match_func, match_data, lookahead);
1245
1246 for (unsigned int i = 0; i < count; i++)
1247 {
1248 unsigned unsafe_to;
1249 if (!skippy_iter.next (&unsafe_to))
1250 {
1251 *end_index = unsafe_to;
1252 return_trace (false);
1253 }
1254 }
1255
1256 *end_index = skippy_iter.idx + 1;
1257 return_trace (true);
1258 }
1259
1260
1261
1262 struct LookupRecord
1263 {
serializeOT::LookupRecord1264 bool serialize (hb_serialize_context_t *c,
1265 const hb_map_t *lookup_map) const
1266 {
1267 TRACE_SERIALIZE (this);
1268 auto *out = c->embed (*this);
1269 if (unlikely (!out)) return_trace (false);
1270
1271 return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
1272 }
1273
sanitizeOT::LookupRecord1274 bool sanitize (hb_sanitize_context_t *c) const
1275 {
1276 TRACE_SANITIZE (this);
1277 return_trace (c->check_struct (this));
1278 }
1279
1280 HBUINT16 sequenceIndex; /* Index into current glyph
1281 * sequence--first glyph = 0 */
1282 HBUINT16 lookupListIndex; /* Lookup to apply to that
1283 * position--zero--based */
1284 public:
1285 DEFINE_SIZE_STATIC (4);
1286 };
1287
serialize_lookuprecord_array(hb_serialize_context_t * c,const hb_array_t<const LookupRecord> lookupRecords,const hb_map_t * lookup_map)1288 static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
1289 const hb_array_t<const LookupRecord> lookupRecords,
1290 const hb_map_t *lookup_map)
1291 {
1292 unsigned count = 0;
1293 for (const LookupRecord& r : lookupRecords)
1294 {
1295 if (!lookup_map->has (r.lookupListIndex))
1296 continue;
1297
1298 if (!r.serialize (c, lookup_map))
1299 return 0;
1300
1301 count++;
1302 }
1303 return count;
1304 }
1305
1306 enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
1307
context_closure_recurse_lookups(hb_closure_context_t * c,unsigned inputCount,const HBUINT16 input[],unsigned lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextFormat context_format,const void * data,intersected_glyphs_func_t intersected_glyphs_func)1308 static void context_closure_recurse_lookups (hb_closure_context_t *c,
1309 unsigned inputCount, const HBUINT16 input[],
1310 unsigned lookupCount,
1311 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
1312 unsigned value,
1313 ContextFormat context_format,
1314 const void *data,
1315 intersected_glyphs_func_t intersected_glyphs_func)
1316 {
1317 hb_set_t *covered_seq_indicies = hb_set_create ();
1318 for (unsigned int i = 0; i < lookupCount; i++)
1319 {
1320 unsigned seqIndex = lookupRecord[i].sequenceIndex;
1321 if (seqIndex >= inputCount) continue;
1322
1323 bool has_pos_glyphs = false;
1324 hb_set_t pos_glyphs;
1325
1326 if (hb_set_is_empty (covered_seq_indicies) || !hb_set_has (covered_seq_indicies, seqIndex))
1327 {
1328 has_pos_glyphs = true;
1329 if (seqIndex == 0)
1330 {
1331 switch (context_format) {
1332 case ContextFormat::SimpleContext:
1333 pos_glyphs.add (value);
1334 break;
1335 case ContextFormat::ClassBasedContext:
1336 intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs);
1337 break;
1338 case ContextFormat::CoverageBasedContext:
1339 pos_glyphs.set (c->parent_active_glyphs ());
1340 break;
1341 }
1342 }
1343 else
1344 {
1345 const void *input_data = input;
1346 unsigned input_value = seqIndex - 1;
1347 if (context_format != ContextFormat::SimpleContext)
1348 {
1349 input_data = data;
1350 input_value = input[seqIndex - 1];
1351 }
1352
1353 intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs);
1354 }
1355 }
1356
1357 covered_seq_indicies->add (seqIndex);
1358 if (has_pos_glyphs) {
1359 c->push_cur_active_glyphs () = pos_glyphs;
1360 } else {
1361 c->push_cur_active_glyphs ().set (*c->glyphs);
1362 }
1363
1364 unsigned endIndex = inputCount;
1365 if (context_format == ContextFormat::CoverageBasedContext)
1366 endIndex += 1;
1367
1368 c->recurse (lookupRecord[i].lookupListIndex, covered_seq_indicies, seqIndex, endIndex);
1369
1370 c->pop_cur_done_glyphs ();
1371 }
1372
1373 hb_set_destroy (covered_seq_indicies);
1374 }
1375
1376 template <typename context_t>
recurse_lookups(context_t * c,unsigned int lookupCount,const LookupRecord lookupRecord[])1377 static inline void recurse_lookups (context_t *c,
1378 unsigned int lookupCount,
1379 const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
1380 {
1381 for (unsigned int i = 0; i < lookupCount; i++)
1382 c->recurse (lookupRecord[i].lookupListIndex);
1383 }
1384
apply_lookup(hb_ot_apply_context_t * c,unsigned int count,unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned int match_end)1385 static inline void apply_lookup (hb_ot_apply_context_t *c,
1386 unsigned int count, /* Including the first glyph */
1387 unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
1388 unsigned int lookupCount,
1389 const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
1390 unsigned int match_end)
1391 {
1392 hb_buffer_t *buffer = c->buffer;
1393 int end;
1394
1395 /* All positions are distance from beginning of *output* buffer.
1396 * Adjust. */
1397 {
1398 unsigned int bl = buffer->backtrack_len ();
1399 end = bl + match_end - buffer->idx;
1400
1401 int delta = bl - buffer->idx;
1402 /* Convert positions to new indexing. */
1403 for (unsigned int j = 0; j < count; j++)
1404 match_positions[j] += delta;
1405 }
1406
1407 for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
1408 {
1409 unsigned int idx = lookupRecord[i].sequenceIndex;
1410 if (idx >= count)
1411 continue;
1412
1413 /* Don't recurse to ourself at same position.
1414 * Note that this test is too naive, it doesn't catch longer loops. */
1415 if (unlikely (idx == 0 && lookupRecord[i].lookupListIndex == c->lookup_index))
1416 continue;
1417
1418 if (unlikely (!buffer->move_to (match_positions[idx])))
1419 break;
1420
1421 if (unlikely (buffer->max_ops <= 0))
1422 break;
1423
1424 unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
1425 if (!c->recurse (lookupRecord[i].lookupListIndex))
1426 continue;
1427
1428 unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
1429 int delta = new_len - orig_len;
1430
1431 if (!delta)
1432 continue;
1433
1434 /* Recursed lookup changed buffer len. Adjust.
1435 *
1436 * TODO:
1437 *
1438 * Right now, if buffer length increased by n, we assume n new glyphs
1439 * were added right after the current position, and if buffer length
1440 * was decreased by n, we assume n match positions after the current
1441 * one where removed. The former (buffer length increased) case is
1442 * fine, but the decrease case can be improved in at least two ways,
1443 * both of which are significant:
1444 *
1445 * - If recursed-to lookup is MultipleSubst and buffer length
1446 * decreased, then it's current match position that was deleted,
1447 * NOT the one after it.
1448 *
1449 * - If buffer length was decreased by n, it does not necessarily
1450 * mean that n match positions where removed, as there might
1451 * have been marks and default-ignorables in the sequence. We
1452 * should instead drop match positions between current-position
1453 * and current-position + n instead. Though, am not sure which
1454 * one is better. Both cases have valid uses. Sigh.
1455 *
1456 * It should be possible to construct tests for both of these cases.
1457 */
1458
1459 end += delta;
1460 if (end <= int (match_positions[idx]))
1461 {
1462 /* End might end up being smaller than match_positions[idx] if the recursed
1463 * lookup ended up removing many items, more than we have had matched.
1464 * Just never rewind end back and get out of here.
1465 * https://bugs.chromium.org/p/chromium/issues/detail?id=659496 */
1466 end = match_positions[idx];
1467 /* There can't be any further changes. */
1468 break;
1469 }
1470
1471 unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
1472
1473 if (delta > 0)
1474 {
1475 if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
1476 break;
1477 }
1478 else
1479 {
1480 /* NOTE: delta is negative. */
1481 delta = hb_max (delta, (int) next - (int) count);
1482 next -= delta;
1483 }
1484
1485 /* Shift! */
1486 memmove (match_positions + next + delta, match_positions + next,
1487 (count - next) * sizeof (match_positions[0]));
1488 next += delta;
1489 count += delta;
1490
1491 /* Fill in new entries. */
1492 for (unsigned int j = idx + 1; j < next; j++)
1493 match_positions[j] = match_positions[j - 1] + 1;
1494
1495 /* And fixup the rest. */
1496 for (; next < count; next++)
1497 match_positions[next] += delta;
1498 }
1499
1500 (void) buffer->move_to (end);
1501 }
1502
1503
1504
1505 /* Contextual lookups */
1506
1507 struct ContextClosureLookupContext
1508 {
1509 ContextClosureFuncs funcs;
1510 ContextFormat context_format;
1511 const void *intersects_data;
1512 };
1513
1514 struct ContextCollectGlyphsLookupContext
1515 {
1516 ContextCollectGlyphsFuncs funcs;
1517 const void *collect_data;
1518 };
1519
1520 struct ContextApplyLookupContext
1521 {
1522 ContextApplyFuncs funcs;
1523 const void *match_data;
1524 };
1525
context_intersects(const hb_set_t * glyphs,unsigned int inputCount,const HBUINT16 input[],ContextClosureLookupContext & lookup_context)1526 static inline bool context_intersects (const hb_set_t *glyphs,
1527 unsigned int inputCount, /* Including the first glyph (not matched) */
1528 const HBUINT16 input[], /* Array of input values--start with second glyph */
1529 ContextClosureLookupContext &lookup_context)
1530 {
1531 return array_is_subset_of (glyphs,
1532 inputCount ? inputCount - 1 : 0, input,
1533 lookup_context.funcs.intersects, lookup_context.intersects_data);
1534 }
1535
context_closure_lookup(hb_closure_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ContextClosureLookupContext & lookup_context)1536 static inline void context_closure_lookup (hb_closure_context_t *c,
1537 unsigned int inputCount, /* Including the first glyph (not matched) */
1538 const HBUINT16 input[], /* Array of input values--start with second glyph */
1539 unsigned int lookupCount,
1540 const LookupRecord lookupRecord[],
1541 unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
1542 ContextClosureLookupContext &lookup_context)
1543 {
1544 if (context_intersects (c->glyphs,
1545 inputCount, input,
1546 lookup_context))
1547 context_closure_recurse_lookups (c,
1548 inputCount, input,
1549 lookupCount, lookupRecord,
1550 value,
1551 lookup_context.context_format,
1552 lookup_context.intersects_data,
1553 lookup_context.funcs.intersected_glyphs);
1554 }
1555
context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextCollectGlyphsLookupContext & lookup_context)1556 static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
1557 unsigned int inputCount, /* Including the first glyph (not matched) */
1558 const HBUINT16 input[], /* Array of input values--start with second glyph */
1559 unsigned int lookupCount,
1560 const LookupRecord lookupRecord[],
1561 ContextCollectGlyphsLookupContext &lookup_context)
1562 {
1563 collect_array (c, c->input,
1564 inputCount ? inputCount - 1 : 0, input,
1565 lookup_context.funcs.collect, lookup_context.collect_data);
1566 recurse_lookups (c,
1567 lookupCount, lookupRecord);
1568 }
1569
context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,ContextApplyLookupContext & lookup_context)1570 static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
1571 unsigned int inputCount, /* Including the first glyph (not matched) */
1572 const HBUINT16 input[], /* Array of input values--start with second glyph */
1573 unsigned int lookupCount HB_UNUSED,
1574 const LookupRecord lookupRecord[] HB_UNUSED,
1575 ContextApplyLookupContext &lookup_context)
1576 {
1577 return would_match_input (c,
1578 inputCount, input,
1579 lookup_context.funcs.match, lookup_context.match_data);
1580 }
context_apply_lookup(hb_ot_apply_context_t * c,unsigned int inputCount,const HBUINT16 input[],unsigned int lookupCount,const LookupRecord lookupRecord[],ContextApplyLookupContext & lookup_context)1581 static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
1582 unsigned int inputCount, /* Including the first glyph (not matched) */
1583 const HBUINT16 input[], /* Array of input values--start with second glyph */
1584 unsigned int lookupCount,
1585 const LookupRecord lookupRecord[],
1586 ContextApplyLookupContext &lookup_context)
1587 {
1588 unsigned match_end = 0;
1589 unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
1590 if (match_input (c,
1591 inputCount, input,
1592 lookup_context.funcs.match, lookup_context.match_data,
1593 &match_end, match_positions))
1594 {
1595 c->buffer->unsafe_to_break (c->buffer->idx, match_end);
1596 apply_lookup (c,
1597 inputCount, match_positions,
1598 lookupCount, lookupRecord,
1599 match_end);
1600 return true;
1601 }
1602 else
1603 {
1604 c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
1605 return false;
1606 }
1607 }
1608
1609 struct Rule
1610 {
intersectsOT::Rule1611 bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
1612 {
1613 return context_intersects (glyphs,
1614 inputCount, inputZ.arrayZ,
1615 lookup_context);
1616 }
1617
closureOT::Rule1618 void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
1619 {
1620 if (unlikely (c->lookup_limit_exceeded ())) return;
1621
1622 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1623 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1624 context_closure_lookup (c,
1625 inputCount, inputZ.arrayZ,
1626 lookupCount, lookupRecord.arrayZ,
1627 value, lookup_context);
1628 }
1629
closure_lookupsOT::Rule1630 void closure_lookups (hb_closure_lookups_context_t *c,
1631 ContextClosureLookupContext &lookup_context) const
1632 {
1633 if (unlikely (c->lookup_limit_exceeded ())) return;
1634 if (!intersects (c->glyphs, lookup_context)) return;
1635
1636 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1637 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1638 recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
1639 }
1640
collect_glyphsOT::Rule1641 void collect_glyphs (hb_collect_glyphs_context_t *c,
1642 ContextCollectGlyphsLookupContext &lookup_context) const
1643 {
1644 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1645 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1646 context_collect_glyphs_lookup (c,
1647 inputCount, inputZ.arrayZ,
1648 lookupCount, lookupRecord.arrayZ,
1649 lookup_context);
1650 }
1651
would_applyOT::Rule1652 bool would_apply (hb_would_apply_context_t *c,
1653 ContextApplyLookupContext &lookup_context) const
1654 {
1655 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1656 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1657 return context_would_apply_lookup (c,
1658 inputCount, inputZ.arrayZ,
1659 lookupCount, lookupRecord.arrayZ,
1660 lookup_context);
1661 }
1662
applyOT::Rule1663 bool apply (hb_ot_apply_context_t *c,
1664 ContextApplyLookupContext &lookup_context) const
1665 {
1666 TRACE_APPLY (this);
1667 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1668 (inputZ.as_array (inputCount ? inputCount - 1 : 0));
1669 return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
1670 }
1671
serializeOT::Rule1672 bool serialize (hb_serialize_context_t *c,
1673 const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
1674 const hb_map_t *lookup_map) const
1675 {
1676 TRACE_SERIALIZE (this);
1677 auto *out = c->start_embed (this);
1678 if (unlikely (!c->extend_min (out))) return_trace (false);
1679
1680 out->inputCount = inputCount;
1681 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1682 for (const auto org : input)
1683 {
1684 HBUINT16 d;
1685 d = input_mapping->get (org);
1686 c->copy (d);
1687 }
1688
1689 const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
1690 (inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
1691
1692 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
1693 return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
1694 }
1695
subsetOT::Rule1696 bool subset (hb_subset_context_t *c,
1697 const hb_map_t *lookup_map,
1698 const hb_map_t *klass_map = nullptr) const
1699 {
1700 TRACE_SUBSET (this);
1701 if (unlikely (!inputCount)) return_trace (false);
1702 const hb_array_t<const HBUINT16> input = inputZ.as_array (inputCount - 1);
1703
1704 const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
1705 if (!hb_all (input, mapping)) return_trace (false);
1706 return_trace (serialize (c->serializer, mapping, lookup_map));
1707 }
1708
1709 public:
sanitizeOT::Rule1710 bool sanitize (hb_sanitize_context_t *c) const
1711 {
1712 TRACE_SANITIZE (this);
1713 return_trace (inputCount.sanitize (c) &&
1714 lookupCount.sanitize (c) &&
1715 c->check_range (inputZ.arrayZ,
1716 inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
1717 LookupRecord::static_size * lookupCount));
1718 }
1719
1720 protected:
1721 HBUINT16 inputCount; /* Total number of glyphs in input
1722 * glyph sequence--includes the first
1723 * glyph */
1724 HBUINT16 lookupCount; /* Number of LookupRecords */
1725 UnsizedArrayOf<HBUINT16>
1726 inputZ; /* Array of match inputs--start with
1727 * second glyph */
1728 /*UnsizedArrayOf<LookupRecord>
1729 lookupRecordX;*/ /* Array of LookupRecords--in
1730 * design order */
1731 public:
1732 DEFINE_SIZE_ARRAY (4, inputZ);
1733 };
1734
1735 struct RuleSet
1736 {
intersectsOT::RuleSet1737 bool intersects (const hb_set_t *glyphs,
1738 ContextClosureLookupContext &lookup_context) const
1739 {
1740 return
1741 + hb_iter (rule)
1742 | hb_map (hb_add (this))
1743 | hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
1744 | hb_any
1745 ;
1746 }
1747
closureOT::RuleSet1748 void closure (hb_closure_context_t *c, unsigned value,
1749 ContextClosureLookupContext &lookup_context) const
1750 {
1751 if (unlikely (c->lookup_limit_exceeded ())) return;
1752
1753 return
1754 + hb_iter (rule)
1755 | hb_map (hb_add (this))
1756 | hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
1757 ;
1758 }
1759
closure_lookupsOT::RuleSet1760 void closure_lookups (hb_closure_lookups_context_t *c,
1761 ContextClosureLookupContext &lookup_context) const
1762 {
1763 if (unlikely (c->lookup_limit_exceeded ())) return;
1764 + hb_iter (rule)
1765 | hb_map (hb_add (this))
1766 | hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
1767 ;
1768 }
1769
collect_glyphsOT::RuleSet1770 void collect_glyphs (hb_collect_glyphs_context_t *c,
1771 ContextCollectGlyphsLookupContext &lookup_context) const
1772 {
1773 return
1774 + hb_iter (rule)
1775 | hb_map (hb_add (this))
1776 | hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
1777 ;
1778 }
1779
would_applyOT::RuleSet1780 bool would_apply (hb_would_apply_context_t *c,
1781 ContextApplyLookupContext &lookup_context) const
1782 {
1783 return
1784 + hb_iter (rule)
1785 | hb_map (hb_add (this))
1786 | hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
1787 | hb_any
1788 ;
1789 }
1790
applyOT::RuleSet1791 bool apply (hb_ot_apply_context_t *c,
1792 ContextApplyLookupContext &lookup_context) const
1793 {
1794 TRACE_APPLY (this);
1795 return_trace (
1796 + hb_iter (rule)
1797 | hb_map (hb_add (this))
1798 | hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
1799 | hb_any
1800 )
1801 ;
1802 }
1803
subsetOT::RuleSet1804 bool subset (hb_subset_context_t *c,
1805 const hb_map_t *lookup_map,
1806 const hb_map_t *klass_map = nullptr) const
1807 {
1808 TRACE_SUBSET (this);
1809
1810 auto snap = c->serializer->snapshot ();
1811 auto *out = c->serializer->start_embed (*this);
1812 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1813
1814 for (const Offset16To<Rule>& _ : rule)
1815 {
1816 if (!_) continue;
1817 auto o_snap = c->serializer->snapshot ();
1818 auto *o = out->rule.serialize_append (c->serializer);
1819 if (unlikely (!o)) continue;
1820
1821 if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
1822 {
1823 out->rule.pop ();
1824 c->serializer->revert (o_snap);
1825 }
1826 }
1827
1828 bool ret = bool (out->rule);
1829 if (!ret) c->serializer->revert (snap);
1830
1831 return_trace (ret);
1832 }
1833
sanitizeOT::RuleSet1834 bool sanitize (hb_sanitize_context_t *c) const
1835 {
1836 TRACE_SANITIZE (this);
1837 return_trace (rule.sanitize (c, this));
1838 }
1839
1840 protected:
1841 Array16OfOffset16To<Rule>
1842 rule; /* Array of Rule tables
1843 * ordered by preference */
1844 public:
1845 DEFINE_SIZE_ARRAY (2, rule);
1846 };
1847
1848
1849 struct ContextFormat1
1850 {
intersectsOT::ContextFormat11851 bool intersects (const hb_set_t *glyphs) const
1852 {
1853 struct ContextClosureLookupContext lookup_context = {
1854 {intersects_glyph, intersected_glyph},
1855 ContextFormat::SimpleContext,
1856 nullptr
1857 };
1858
1859 return
1860 + hb_zip (this+coverage, ruleSet)
1861 | hb_filter (*glyphs, hb_first)
1862 | hb_map (hb_second)
1863 | hb_map (hb_add (this))
1864 | hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
1865 | hb_any
1866 ;
1867 }
1868
may_have_non_1to1OT::ContextFormat11869 bool may_have_non_1to1 () const
1870 { return true; }
1871
closureOT::ContextFormat11872 void closure (hb_closure_context_t *c) const
1873 {
1874 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
1875 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
1876 cur_active_glyphs);
1877
1878 struct ContextClosureLookupContext lookup_context = {
1879 {intersects_glyph, intersected_glyph},
1880 ContextFormat::SimpleContext,
1881 nullptr
1882 };
1883
1884 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
1885 | hb_filter ([&] (hb_codepoint_t _) {
1886 return c->previous_parent_active_glyphs ().has (_);
1887 }, hb_first)
1888 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
1889 | hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
1890 ;
1891
1892 c->pop_cur_done_glyphs ();
1893 }
1894
closure_lookupsOT::ContextFormat11895 void closure_lookups (hb_closure_lookups_context_t *c) const
1896 {
1897 struct ContextClosureLookupContext lookup_context = {
1898 {intersects_glyph, intersected_glyph},
1899 ContextFormat::SimpleContext,
1900 nullptr
1901 };
1902
1903 + hb_zip (this+coverage, ruleSet)
1904 | hb_filter (*c->glyphs, hb_first)
1905 | hb_map (hb_second)
1906 | hb_map (hb_add (this))
1907 | hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
1908 ;
1909 }
1910
collect_variation_indicesOT::ContextFormat11911 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
1912
collect_glyphsOT::ContextFormat11913 void collect_glyphs (hb_collect_glyphs_context_t *c) const
1914 {
1915 (this+coverage).collect_coverage (c->input);
1916
1917 struct ContextCollectGlyphsLookupContext lookup_context = {
1918 {collect_glyph},
1919 nullptr
1920 };
1921
1922 + hb_iter (ruleSet)
1923 | hb_map (hb_add (this))
1924 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
1925 ;
1926 }
1927
would_applyOT::ContextFormat11928 bool would_apply (hb_would_apply_context_t *c) const
1929 {
1930 const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
1931 struct ContextApplyLookupContext lookup_context = {
1932 {match_glyph},
1933 nullptr
1934 };
1935 return rule_set.would_apply (c, lookup_context);
1936 }
1937
get_coverageOT::ContextFormat11938 const Coverage &get_coverage () const { return this+coverage; }
1939
applyOT::ContextFormat11940 bool apply (hb_ot_apply_context_t *c) const
1941 {
1942 TRACE_APPLY (this);
1943 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
1944 if (likely (index == NOT_COVERED))
1945 return_trace (false);
1946
1947 const RuleSet &rule_set = this+ruleSet[index];
1948 struct ContextApplyLookupContext lookup_context = {
1949 {match_glyph},
1950 nullptr
1951 };
1952 return_trace (rule_set.apply (c, lookup_context));
1953 }
1954
subsetOT::ContextFormat11955 bool subset (hb_subset_context_t *c) const
1956 {
1957 TRACE_SUBSET (this);
1958 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
1959 const hb_map_t &glyph_map = *c->plan->glyph_map;
1960
1961 auto *out = c->serializer->start_embed (*this);
1962 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
1963 out->format = format;
1964
1965 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
1966 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
1967 + hb_zip (this+coverage, ruleSet)
1968 | hb_filter (glyphset, hb_first)
1969 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
1970 | hb_map (hb_first)
1971 | hb_map (glyph_map)
1972 | hb_sink (new_coverage)
1973 ;
1974
1975 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
1976 return_trace (bool (new_coverage));
1977 }
1978
sanitizeOT::ContextFormat11979 bool sanitize (hb_sanitize_context_t *c) const
1980 {
1981 TRACE_SANITIZE (this);
1982 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
1983 }
1984
1985 protected:
1986 HBUINT16 format; /* Format identifier--format = 1 */
1987 Offset16To<Coverage>
1988 coverage; /* Offset to Coverage table--from
1989 * beginning of table */
1990 Array16OfOffset16To<RuleSet>
1991 ruleSet; /* Array of RuleSet tables
1992 * ordered by Coverage Index */
1993 public:
1994 DEFINE_SIZE_ARRAY (6, ruleSet);
1995 };
1996
1997
1998 struct ContextFormat2
1999 {
intersectsOT::ContextFormat22000 bool intersects (const hb_set_t *glyphs) const
2001 {
2002 if (!(this+coverage).intersects (glyphs))
2003 return false;
2004
2005 const ClassDef &class_def = this+classDef;
2006
2007 struct ContextClosureLookupContext lookup_context = {
2008 {intersects_class, intersected_class_glyphs},
2009 ContextFormat::ClassBasedContext,
2010 &class_def
2011 };
2012
2013 hb_set_t retained_coverage_glyphs;
2014 (this+coverage).intersected_coverage_glyphs (glyphs, &retained_coverage_glyphs);
2015
2016 hb_set_t coverage_glyph_classes;
2017 class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2018
2019
2020 return
2021 + hb_iter (ruleSet)
2022 | hb_map (hb_add (this))
2023 | hb_enumerate
2024 | hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2025 { return class_def.intersects_class (glyphs, p.first) &&
2026 coverage_glyph_classes.has (p.first) &&
2027 p.second.intersects (glyphs, lookup_context); })
2028 | hb_any
2029 ;
2030 }
2031
may_have_non_1to1OT::ContextFormat22032 bool may_have_non_1to1 () const
2033 { return true; }
2034
closureOT::ContextFormat22035 void closure (hb_closure_context_t *c) const
2036 {
2037 if (!(this+coverage).intersects (c->glyphs))
2038 return;
2039
2040 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2041 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2042 cur_active_glyphs);
2043
2044 const ClassDef &class_def = this+classDef;
2045
2046 struct ContextClosureLookupContext lookup_context = {
2047 {intersects_class, intersected_class_glyphs},
2048 ContextFormat::ClassBasedContext,
2049 &class_def
2050 };
2051
2052 + hb_enumerate (ruleSet)
2053 | hb_filter ([&] (unsigned _)
2054 { return class_def.intersects_class (&c->parent_active_glyphs (), _); },
2055 hb_first)
2056 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<RuleSet>&> _)
2057 {
2058 const RuleSet& rule_set = this+_.second;
2059 rule_set.closure (c, _.first, lookup_context);
2060 })
2061 ;
2062
2063 c->pop_cur_done_glyphs ();
2064 }
2065
closure_lookupsOT::ContextFormat22066 void closure_lookups (hb_closure_lookups_context_t *c) const
2067 {
2068 if (!(this+coverage).intersects (c->glyphs))
2069 return;
2070
2071 const ClassDef &class_def = this+classDef;
2072
2073 struct ContextClosureLookupContext lookup_context = {
2074 {intersects_class, intersected_class_glyphs},
2075 ContextFormat::ClassBasedContext,
2076 &class_def
2077 };
2078
2079 + hb_iter (ruleSet)
2080 | hb_map (hb_add (this))
2081 | hb_enumerate
2082 | hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
2083 { return class_def.intersects_class (c->glyphs, p.first); })
2084 | hb_map (hb_second)
2085 | hb_apply ([&] (const RuleSet & _)
2086 { _.closure_lookups (c, lookup_context); });
2087 }
2088
collect_variation_indicesOT::ContextFormat22089 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2090
collect_glyphsOT::ContextFormat22091 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2092 {
2093 (this+coverage).collect_coverage (c->input);
2094
2095 const ClassDef &class_def = this+classDef;
2096 struct ContextCollectGlyphsLookupContext lookup_context = {
2097 {collect_class},
2098 &class_def
2099 };
2100
2101 + hb_iter (ruleSet)
2102 | hb_map (hb_add (this))
2103 | hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2104 ;
2105 }
2106
would_applyOT::ContextFormat22107 bool would_apply (hb_would_apply_context_t *c) const
2108 {
2109 const ClassDef &class_def = this+classDef;
2110 unsigned int index = class_def.get_class (c->glyphs[0]);
2111 const RuleSet &rule_set = this+ruleSet[index];
2112 struct ContextApplyLookupContext lookup_context = {
2113 {match_class},
2114 &class_def
2115 };
2116 return rule_set.would_apply (c, lookup_context);
2117 }
2118
get_coverageOT::ContextFormat22119 const Coverage &get_coverage () const { return this+coverage; }
2120
applyOT::ContextFormat22121 bool apply (hb_ot_apply_context_t *c) const
2122 {
2123 TRACE_APPLY (this);
2124 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2125 if (likely (index == NOT_COVERED)) return_trace (false);
2126
2127 const ClassDef &class_def = this+classDef;
2128 index = class_def.get_class (c->buffer->cur().codepoint);
2129 const RuleSet &rule_set = this+ruleSet[index];
2130 struct ContextApplyLookupContext lookup_context = {
2131 {match_class},
2132 &class_def
2133 };
2134 return_trace (rule_set.apply (c, lookup_context));
2135 }
2136
subsetOT::ContextFormat22137 bool subset (hb_subset_context_t *c) const
2138 {
2139 TRACE_SUBSET (this);
2140 auto *out = c->serializer->start_embed (*this);
2141 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2142 out->format = format;
2143 if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
2144 return_trace (false);
2145
2146 hb_map_t klass_map;
2147 out->classDef.serialize_subset (c, classDef, this, &klass_map);
2148
2149 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
2150 hb_set_t retained_coverage_glyphs;
2151 (this+coverage).intersected_coverage_glyphs (glyphset, &retained_coverage_glyphs);
2152
2153 hb_set_t coverage_glyph_classes;
2154 (this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2155
2156 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2157 bool ret = true;
2158 int non_zero_index = -1, index = 0;
2159 for (const auto& _ : + hb_enumerate (ruleSet)
2160 | hb_filter (klass_map, hb_first))
2161 {
2162 auto *o = out->ruleSet.serialize_append (c->serializer);
2163 if (unlikely (!o))
2164 {
2165 ret = false;
2166 break;
2167 }
2168
2169 if (coverage_glyph_classes.has (_.first) &&
2170 o->serialize_subset (c, _.second, this, lookup_map, &klass_map))
2171 non_zero_index = index;
2172
2173 index++;
2174 }
2175
2176 if (!ret || non_zero_index == -1) return_trace (false);
2177
2178 //prune empty trailing ruleSets
2179 --index;
2180 while (index > non_zero_index)
2181 {
2182 out->ruleSet.pop ();
2183 index--;
2184 }
2185
2186 return_trace (bool (out->ruleSet));
2187 }
2188
sanitizeOT::ContextFormat22189 bool sanitize (hb_sanitize_context_t *c) const
2190 {
2191 TRACE_SANITIZE (this);
2192 return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2193 }
2194
2195 protected:
2196 HBUINT16 format; /* Format identifier--format = 2 */
2197 Offset16To<Coverage>
2198 coverage; /* Offset to Coverage table--from
2199 * beginning of table */
2200 Offset16To<ClassDef>
2201 classDef; /* Offset to glyph ClassDef table--from
2202 * beginning of table */
2203 Array16OfOffset16To<RuleSet>
2204 ruleSet; /* Array of RuleSet tables
2205 * ordered by class */
2206 public:
2207 DEFINE_SIZE_ARRAY (8, ruleSet);
2208 };
2209
2210
2211 struct ContextFormat3
2212 {
intersectsOT::ContextFormat32213 bool intersects (const hb_set_t *glyphs) const
2214 {
2215 if (!(this+coverageZ[0]).intersects (glyphs))
2216 return false;
2217
2218 struct ContextClosureLookupContext lookup_context = {
2219 {intersects_coverage, intersected_coverage_glyphs},
2220 ContextFormat::CoverageBasedContext,
2221 this
2222 };
2223 return context_intersects (glyphs,
2224 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2225 lookup_context);
2226 }
2227
may_have_non_1to1OT::ContextFormat32228 bool may_have_non_1to1 () const
2229 { return true; }
2230
closureOT::ContextFormat32231 void closure (hb_closure_context_t *c) const
2232 {
2233 if (!(this+coverageZ[0]).intersects (c->glyphs))
2234 return;
2235
2236 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2237 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2238 cur_active_glyphs);
2239
2240
2241 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2242 struct ContextClosureLookupContext lookup_context = {
2243 {intersects_coverage, intersected_coverage_glyphs},
2244 ContextFormat::CoverageBasedContext,
2245 this
2246 };
2247 context_closure_lookup (c,
2248 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2249 lookupCount, lookupRecord,
2250 0, lookup_context);
2251
2252 c->pop_cur_done_glyphs ();
2253 }
2254
closure_lookupsOT::ContextFormat32255 void closure_lookups (hb_closure_lookups_context_t *c) const
2256 {
2257 if (!intersects (c->glyphs))
2258 return;
2259 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2260 recurse_lookups (c, lookupCount, lookupRecord);
2261 }
2262
collect_variation_indicesOT::ContextFormat32263 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2264
collect_glyphsOT::ContextFormat32265 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2266 {
2267 (this+coverageZ[0]).collect_coverage (c->input);
2268
2269 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2270 struct ContextCollectGlyphsLookupContext lookup_context = {
2271 {collect_coverage},
2272 this
2273 };
2274
2275 context_collect_glyphs_lookup (c,
2276 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2277 lookupCount, lookupRecord,
2278 lookup_context);
2279 }
2280
would_applyOT::ContextFormat32281 bool would_apply (hb_would_apply_context_t *c) const
2282 {
2283 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2284 struct ContextApplyLookupContext lookup_context = {
2285 {match_coverage},
2286 this
2287 };
2288 return context_would_apply_lookup (c,
2289 glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2290 lookupCount, lookupRecord,
2291 lookup_context);
2292 }
2293
get_coverageOT::ContextFormat32294 const Coverage &get_coverage () const { return this+coverageZ[0]; }
2295
applyOT::ContextFormat32296 bool apply (hb_ot_apply_context_t *c) const
2297 {
2298 TRACE_APPLY (this);
2299 unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
2300 if (likely (index == NOT_COVERED)) return_trace (false);
2301
2302 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2303 struct ContextApplyLookupContext lookup_context = {
2304 {match_coverage},
2305 this
2306 };
2307 return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
2308 }
2309
subsetOT::ContextFormat32310 bool subset (hb_subset_context_t *c) const
2311 {
2312 TRACE_SUBSET (this);
2313 auto *out = c->serializer->start_embed (this);
2314 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2315
2316 out->format = format;
2317 out->glyphCount = glyphCount;
2318
2319 auto coverages = coverageZ.as_array (glyphCount);
2320
2321 for (const Offset16To<Coverage>& offset : coverages)
2322 {
2323 /* TODO(subset) This looks like should not be necessary to write this way. */
2324 auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
2325 if (unlikely (!o)) return_trace (false);
2326 if (!o->serialize_subset (c, offset, this)) return_trace (false);
2327 }
2328
2329 const UnsizedArrayOf<LookupRecord>& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
2330 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2331
2332
2333 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
2334 return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2335 }
2336
sanitizeOT::ContextFormat32337 bool sanitize (hb_sanitize_context_t *c) const
2338 {
2339 TRACE_SANITIZE (this);
2340 if (!c->check_struct (this)) return_trace (false);
2341 unsigned int count = glyphCount;
2342 if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
2343 if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2344 for (unsigned int i = 0; i < count; i++)
2345 if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2346 const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
2347 return_trace (c->check_array (lookupRecord, lookupCount));
2348 }
2349
2350 protected:
2351 HBUINT16 format; /* Format identifier--format = 3 */
2352 HBUINT16 glyphCount; /* Number of glyphs in the input glyph
2353 * sequence */
2354 HBUINT16 lookupCount; /* Number of LookupRecords */
2355 UnsizedArrayOf<Offset16To<Coverage>>
2356 coverageZ; /* Array of offsets to Coverage
2357 * table in glyph sequence order */
2358 /*UnsizedArrayOf<LookupRecord>
2359 lookupRecordX;*/ /* Array of LookupRecords--in
2360 * design order */
2361 public:
2362 DEFINE_SIZE_ARRAY (6, coverageZ);
2363 };
2364
2365 struct Context
2366 {
2367 template <typename context_t, typename ...Ts>
dispatchOT::Context2368 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2369 {
2370 TRACE_DISPATCH (this, u.format);
2371 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
2372 switch (u.format) {
2373 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
2374 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
2375 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2376 default:return_trace (c->default_return_value ());
2377 }
2378 }
2379
2380 protected:
2381 union {
2382 HBUINT16 format; /* Format identifier */
2383 ContextFormat1 format1;
2384 ContextFormat2 format2;
2385 ContextFormat3 format3;
2386 } u;
2387 };
2388
2389
2390 /* Chaining Contextual lookups */
2391
2392 struct ChainContextClosureLookupContext
2393 {
2394 ContextClosureFuncs funcs;
2395 ContextFormat context_format;
2396 const void *intersects_data[3];
2397 };
2398
2399 struct ChainContextCollectGlyphsLookupContext
2400 {
2401 ContextCollectGlyphsFuncs funcs;
2402 const void *collect_data[3];
2403 };
2404
2405 struct ChainContextApplyLookupContext
2406 {
2407 ContextApplyFuncs funcs;
2408 const void *match_data[3];
2409 };
2410
chain_context_intersects(const hb_set_t * glyphs,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],ChainContextClosureLookupContext & lookup_context)2411 static inline bool chain_context_intersects (const hb_set_t *glyphs,
2412 unsigned int backtrackCount,
2413 const HBUINT16 backtrack[],
2414 unsigned int inputCount, /* Including the first glyph (not matched) */
2415 const HBUINT16 input[], /* Array of input values--start with second glyph */
2416 unsigned int lookaheadCount,
2417 const HBUINT16 lookahead[],
2418 ChainContextClosureLookupContext &lookup_context)
2419 {
2420 return array_is_subset_of (glyphs,
2421 backtrackCount, backtrack,
2422 lookup_context.funcs.intersects, lookup_context.intersects_data[0])
2423 && array_is_subset_of (glyphs,
2424 inputCount ? inputCount - 1 : 0, input,
2425 lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2426 && array_is_subset_of (glyphs,
2427 lookaheadCount, lookahead,
2428 lookup_context.funcs.intersects, lookup_context.intersects_data[2]);
2429 }
2430
chain_context_closure_lookup(hb_closure_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],unsigned value,ChainContextClosureLookupContext & lookup_context)2431 static inline void chain_context_closure_lookup (hb_closure_context_t *c,
2432 unsigned int backtrackCount,
2433 const HBUINT16 backtrack[],
2434 unsigned int inputCount, /* Including the first glyph (not matched) */
2435 const HBUINT16 input[], /* Array of input values--start with second glyph */
2436 unsigned int lookaheadCount,
2437 const HBUINT16 lookahead[],
2438 unsigned int lookupCount,
2439 const LookupRecord lookupRecord[],
2440 unsigned value,
2441 ChainContextClosureLookupContext &lookup_context)
2442 {
2443 if (chain_context_intersects (c->glyphs,
2444 backtrackCount, backtrack,
2445 inputCount, input,
2446 lookaheadCount, lookahead,
2447 lookup_context))
2448 context_closure_recurse_lookups (c,
2449 inputCount, input,
2450 lookupCount, lookupRecord,
2451 value,
2452 lookup_context.context_format,
2453 lookup_context.intersects_data[1],
2454 lookup_context.funcs.intersected_glyphs);
2455 }
2456
chain_context_collect_glyphs_lookup(hb_collect_glyphs_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextCollectGlyphsLookupContext & lookup_context)2457 static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2458 unsigned int backtrackCount,
2459 const HBUINT16 backtrack[],
2460 unsigned int inputCount, /* Including the first glyph (not matched) */
2461 const HBUINT16 input[], /* Array of input values--start with second glyph */
2462 unsigned int lookaheadCount,
2463 const HBUINT16 lookahead[],
2464 unsigned int lookupCount,
2465 const LookupRecord lookupRecord[],
2466 ChainContextCollectGlyphsLookupContext &lookup_context)
2467 {
2468 collect_array (c, c->before,
2469 backtrackCount, backtrack,
2470 lookup_context.funcs.collect, lookup_context.collect_data[0]);
2471 collect_array (c, c->input,
2472 inputCount ? inputCount - 1 : 0, input,
2473 lookup_context.funcs.collect, lookup_context.collect_data[1]);
2474 collect_array (c, c->after,
2475 lookaheadCount, lookahead,
2476 lookup_context.funcs.collect, lookup_context.collect_data[2]);
2477 recurse_lookups (c,
2478 lookupCount, lookupRecord);
2479 }
2480
chain_context_would_apply_lookup(hb_would_apply_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[]HB_UNUSED,unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[]HB_UNUSED,unsigned int lookupCount HB_UNUSED,const LookupRecord lookupRecord[]HB_UNUSED,ChainContextApplyLookupContext & lookup_context)2481 static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
2482 unsigned int backtrackCount,
2483 const HBUINT16 backtrack[] HB_UNUSED,
2484 unsigned int inputCount, /* Including the first glyph (not matched) */
2485 const HBUINT16 input[], /* Array of input values--start with second glyph */
2486 unsigned int lookaheadCount,
2487 const HBUINT16 lookahead[] HB_UNUSED,
2488 unsigned int lookupCount HB_UNUSED,
2489 const LookupRecord lookupRecord[] HB_UNUSED,
2490 ChainContextApplyLookupContext &lookup_context)
2491 {
2492 return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
2493 && would_match_input (c,
2494 inputCount, input,
2495 lookup_context.funcs.match, lookup_context.match_data[1]);
2496 }
2497
chain_context_apply_lookup(hb_ot_apply_context_t * c,unsigned int backtrackCount,const HBUINT16 backtrack[],unsigned int inputCount,const HBUINT16 input[],unsigned int lookaheadCount,const HBUINT16 lookahead[],unsigned int lookupCount,const LookupRecord lookupRecord[],ChainContextApplyLookupContext & lookup_context)2498 static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
2499 unsigned int backtrackCount,
2500 const HBUINT16 backtrack[],
2501 unsigned int inputCount, /* Including the first glyph (not matched) */
2502 const HBUINT16 input[], /* Array of input values--start with second glyph */
2503 unsigned int lookaheadCount,
2504 const HBUINT16 lookahead[],
2505 unsigned int lookupCount,
2506 const LookupRecord lookupRecord[],
2507 ChainContextApplyLookupContext &lookup_context)
2508 {
2509 unsigned end_index = c->buffer->idx;
2510 unsigned match_end = 0;
2511 unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
2512 if (!(match_input (c,
2513 inputCount, input,
2514 lookup_context.funcs.match, lookup_context.match_data[1],
2515 &match_end, match_positions) && (end_index = match_end)
2516 && match_lookahead (c,
2517 lookaheadCount, lookahead,
2518 lookup_context.funcs.match, lookup_context.match_data[2],
2519 match_end, &end_index)))
2520 {
2521 c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
2522 return false;
2523 }
2524
2525 unsigned start_index = c->buffer->out_len;
2526 if (!match_backtrack (c,
2527 backtrackCount, backtrack,
2528 lookup_context.funcs.match, lookup_context.match_data[0],
2529 &start_index))
2530 {
2531 c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
2532 return false;
2533 }
2534
2535 c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
2536 apply_lookup (c,
2537 inputCount, match_positions,
2538 lookupCount, lookupRecord,
2539 match_end);
2540 return true;
2541 }
2542
2543 struct ChainRule
2544 {
intersectsOT::ChainRule2545 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2546 {
2547 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2548 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2549 return chain_context_intersects (glyphs,
2550 backtrack.len, backtrack.arrayZ,
2551 input.lenP1, input.arrayZ,
2552 lookahead.len, lookahead.arrayZ,
2553 lookup_context);
2554 }
2555
closureOT::ChainRule2556 void closure (hb_closure_context_t *c, unsigned value,
2557 ChainContextClosureLookupContext &lookup_context) const
2558 {
2559 if (unlikely (c->lookup_limit_exceeded ())) return;
2560
2561 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2562 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2563 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2564 chain_context_closure_lookup (c,
2565 backtrack.len, backtrack.arrayZ,
2566 input.lenP1, input.arrayZ,
2567 lookahead.len, lookahead.arrayZ,
2568 lookup.len, lookup.arrayZ,
2569 value,
2570 lookup_context);
2571 }
2572
closure_lookupsOT::ChainRule2573 void closure_lookups (hb_closure_lookups_context_t *c,
2574 ChainContextClosureLookupContext &lookup_context) const
2575 {
2576 if (unlikely (c->lookup_limit_exceeded ())) return;
2577 if (!intersects (c->glyphs, lookup_context)) return;
2578
2579 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2580 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2581 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2582 recurse_lookups (c, lookup.len, lookup.arrayZ);
2583 }
2584
collect_glyphsOT::ChainRule2585 void collect_glyphs (hb_collect_glyphs_context_t *c,
2586 ChainContextCollectGlyphsLookupContext &lookup_context) const
2587 {
2588 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2589 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2590 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2591 chain_context_collect_glyphs_lookup (c,
2592 backtrack.len, backtrack.arrayZ,
2593 input.lenP1, input.arrayZ,
2594 lookahead.len, lookahead.arrayZ,
2595 lookup.len, lookup.arrayZ,
2596 lookup_context);
2597 }
2598
would_applyOT::ChainRule2599 bool would_apply (hb_would_apply_context_t *c,
2600 ChainContextApplyLookupContext &lookup_context) const
2601 {
2602 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2603 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2604 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2605 return chain_context_would_apply_lookup (c,
2606 backtrack.len, backtrack.arrayZ,
2607 input.lenP1, input.arrayZ,
2608 lookahead.len, lookahead.arrayZ, lookup.len,
2609 lookup.arrayZ, lookup_context);
2610 }
2611
applyOT::ChainRule2612 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2613 {
2614 TRACE_APPLY (this);
2615 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2616 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2617 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2618 return_trace (chain_context_apply_lookup (c,
2619 backtrack.len, backtrack.arrayZ,
2620 input.lenP1, input.arrayZ,
2621 lookahead.len, lookahead.arrayZ, lookup.len,
2622 lookup.arrayZ, lookup_context));
2623 }
2624
2625 template<typename Iterator,
2626 hb_requires (hb_is_iterator (Iterator))>
serialize_arrayOT::ChainRule2627 void serialize_array (hb_serialize_context_t *c,
2628 HBUINT16 len,
2629 Iterator it) const
2630 {
2631 c->copy (len);
2632 for (const auto g : it)
2633 c->copy ((HBUINT16) g);
2634 }
2635
serializeOT::ChainRule2636 bool serialize (hb_serialize_context_t *c,
2637 const hb_map_t *lookup_map,
2638 const hb_map_t *backtrack_map,
2639 const hb_map_t *input_map = nullptr,
2640 const hb_map_t *lookahead_map = nullptr) const
2641 {
2642 TRACE_SERIALIZE (this);
2643 auto *out = c->start_embed (this);
2644 if (unlikely (!out)) return_trace (false);
2645
2646 const hb_map_t *mapping = backtrack_map;
2647 serialize_array (c, backtrack.len, + backtrack.iter ()
2648 | hb_map (mapping));
2649
2650 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2651 if (input_map) mapping = input_map;
2652 serialize_array (c, input.lenP1, + input.iter ()
2653 | hb_map (mapping));
2654
2655 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2656 if (lookahead_map) mapping = lookahead_map;
2657 serialize_array (c, lookahead.len, + lookahead.iter ()
2658 | hb_map (mapping));
2659
2660 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
2661
2662 HBUINT16* lookupCount = c->embed (&(lookupRecord.len));
2663 if (!lookupCount) return_trace (false);
2664
2665 unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (), lookup_map);
2666 return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
2667 }
2668
subsetOT::ChainRule2669 bool subset (hb_subset_context_t *c,
2670 const hb_map_t *lookup_map,
2671 const hb_map_t *backtrack_map = nullptr,
2672 const hb_map_t *input_map = nullptr,
2673 const hb_map_t *lookahead_map = nullptr) const
2674 {
2675 TRACE_SUBSET (this);
2676
2677 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2678 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2679
2680 if (!backtrack_map)
2681 {
2682 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2683 if (!hb_all (backtrack, glyphset) ||
2684 !hb_all (input, glyphset) ||
2685 !hb_all (lookahead, glyphset))
2686 return_trace (false);
2687
2688 serialize (c->serializer, lookup_map, c->plan->glyph_map);
2689 }
2690 else
2691 {
2692 if (!hb_all (backtrack, backtrack_map) ||
2693 !hb_all (input, input_map) ||
2694 !hb_all (lookahead, lookahead_map))
2695 return_trace (false);
2696
2697 serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
2698 }
2699
2700 return_trace (true);
2701 }
2702
sanitizeOT::ChainRule2703 bool sanitize (hb_sanitize_context_t *c) const
2704 {
2705 TRACE_SANITIZE (this);
2706 if (!backtrack.sanitize (c)) return_trace (false);
2707 const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
2708 if (!input.sanitize (c)) return_trace (false);
2709 const Array16Of<HBUINT16> &lookahead = StructAfter<Array16Of<HBUINT16>> (input);
2710 if (!lookahead.sanitize (c)) return_trace (false);
2711 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
2712 return_trace (lookup.sanitize (c));
2713 }
2714
2715 protected:
2716 Array16Of<HBUINT16>
2717 backtrack; /* Array of backtracking values
2718 * (to be matched before the input
2719 * sequence) */
2720 HeadlessArrayOf<HBUINT16>
2721 inputX; /* Array of input values (start with
2722 * second glyph) */
2723 Array16Of<HBUINT16>
2724 lookaheadX; /* Array of lookahead values's (to be
2725 * matched after the input sequence) */
2726 Array16Of<LookupRecord>
2727 lookupX; /* Array of LookupRecords--in
2728 * design order) */
2729 public:
2730 DEFINE_SIZE_MIN (8);
2731 };
2732
2733 struct ChainRuleSet
2734 {
intersectsOT::ChainRuleSet2735 bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
2736 {
2737 return
2738 + hb_iter (rule)
2739 | hb_map (hb_add (this))
2740 | hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2741 | hb_any
2742 ;
2743 }
closureOT::ChainRuleSet2744 void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
2745 {
2746 if (unlikely (c->lookup_limit_exceeded ())) return;
2747
2748 return
2749 + hb_iter (rule)
2750 | hb_map (hb_add (this))
2751 | hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
2752 ;
2753 }
2754
closure_lookupsOT::ChainRuleSet2755 void closure_lookups (hb_closure_lookups_context_t *c,
2756 ChainContextClosureLookupContext &lookup_context) const
2757 {
2758 if (unlikely (c->lookup_limit_exceeded ())) return;
2759
2760 + hb_iter (rule)
2761 | hb_map (hb_add (this))
2762 | hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
2763 ;
2764 }
2765
collect_glyphsOT::ChainRuleSet2766 void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
2767 {
2768 return
2769 + hb_iter (rule)
2770 | hb_map (hb_add (this))
2771 | hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2772 ;
2773 }
2774
would_applyOT::ChainRuleSet2775 bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2776 {
2777 return
2778 + hb_iter (rule)
2779 | hb_map (hb_add (this))
2780 | hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2781 | hb_any
2782 ;
2783 }
2784
applyOT::ChainRuleSet2785 bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2786 {
2787 TRACE_APPLY (this);
2788 return_trace (
2789 + hb_iter (rule)
2790 | hb_map (hb_add (this))
2791 | hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2792 | hb_any
2793 )
2794 ;
2795 }
2796
subsetOT::ChainRuleSet2797 bool subset (hb_subset_context_t *c,
2798 const hb_map_t *lookup_map,
2799 const hb_map_t *backtrack_klass_map = nullptr,
2800 const hb_map_t *input_klass_map = nullptr,
2801 const hb_map_t *lookahead_klass_map = nullptr) const
2802 {
2803 TRACE_SUBSET (this);
2804
2805 auto snap = c->serializer->snapshot ();
2806 auto *out = c->serializer->start_embed (*this);
2807 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2808
2809 for (const Offset16To<ChainRule>& _ : rule)
2810 {
2811 if (!_) continue;
2812 auto o_snap = c->serializer->snapshot ();
2813 auto *o = out->rule.serialize_append (c->serializer);
2814 if (unlikely (!o)) continue;
2815
2816 if (!o->serialize_subset (c, _, this,
2817 lookup_map,
2818 backtrack_klass_map,
2819 input_klass_map,
2820 lookahead_klass_map))
2821 {
2822 out->rule.pop ();
2823 c->serializer->revert (o_snap);
2824 }
2825 }
2826
2827 bool ret = bool (out->rule);
2828 if (!ret) c->serializer->revert (snap);
2829
2830 return_trace (ret);
2831 }
2832
sanitizeOT::ChainRuleSet2833 bool sanitize (hb_sanitize_context_t *c) const
2834 {
2835 TRACE_SANITIZE (this);
2836 return_trace (rule.sanitize (c, this));
2837 }
2838
2839 protected:
2840 Array16OfOffset16To<ChainRule>
2841 rule; /* Array of ChainRule tables
2842 * ordered by preference */
2843 public:
2844 DEFINE_SIZE_ARRAY (2, rule);
2845 };
2846
2847 struct ChainContextFormat1
2848 {
intersectsOT::ChainContextFormat12849 bool intersects (const hb_set_t *glyphs) const
2850 {
2851 struct ChainContextClosureLookupContext lookup_context = {
2852 {intersects_glyph, intersected_glyph},
2853 ContextFormat::SimpleContext,
2854 {nullptr, nullptr, nullptr}
2855 };
2856
2857 return
2858 + hb_zip (this+coverage, ruleSet)
2859 | hb_filter (*glyphs, hb_first)
2860 | hb_map (hb_second)
2861 | hb_map (hb_add (this))
2862 | hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2863 | hb_any
2864 ;
2865 }
2866
may_have_non_1to1OT::ChainContextFormat12867 bool may_have_non_1to1 () const
2868 { return true; }
2869
closureOT::ChainContextFormat12870 void closure (hb_closure_context_t *c) const
2871 {
2872 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
2873 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
2874 cur_active_glyphs);
2875
2876 struct ChainContextClosureLookupContext lookup_context = {
2877 {intersects_glyph, intersected_glyph},
2878 ContextFormat::SimpleContext,
2879 {nullptr, nullptr, nullptr}
2880 };
2881
2882 + hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
2883 | hb_filter ([&] (hb_codepoint_t _) {
2884 return c->previous_parent_active_glyphs ().has (_);
2885 }, hb_first)
2886 | hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
2887 | hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2888 ;
2889
2890 c->pop_cur_done_glyphs ();
2891 }
2892
closure_lookupsOT::ChainContextFormat12893 void closure_lookups (hb_closure_lookups_context_t *c) const
2894 {
2895 struct ChainContextClosureLookupContext lookup_context = {
2896 {intersects_glyph, intersected_glyph},
2897 ContextFormat::SimpleContext,
2898 {nullptr, nullptr, nullptr}
2899 };
2900
2901 + hb_zip (this+coverage, ruleSet)
2902 | hb_filter (*c->glyphs, hb_first)
2903 | hb_map (hb_second)
2904 | hb_map (hb_add (this))
2905 | hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
2906 ;
2907 }
2908
collect_variation_indicesOT::ChainContextFormat12909 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
2910
collect_glyphsOT::ChainContextFormat12911 void collect_glyphs (hb_collect_glyphs_context_t *c) const
2912 {
2913 (this+coverage).collect_coverage (c->input);
2914
2915 struct ChainContextCollectGlyphsLookupContext lookup_context = {
2916 {collect_glyph},
2917 {nullptr, nullptr, nullptr}
2918 };
2919
2920 + hb_iter (ruleSet)
2921 | hb_map (hb_add (this))
2922 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2923 ;
2924 }
2925
would_applyOT::ChainContextFormat12926 bool would_apply (hb_would_apply_context_t *c) const
2927 {
2928 const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
2929 struct ChainContextApplyLookupContext lookup_context = {
2930 {match_glyph},
2931 {nullptr, nullptr, nullptr}
2932 };
2933 return rule_set.would_apply (c, lookup_context);
2934 }
2935
get_coverageOT::ChainContextFormat12936 const Coverage &get_coverage () const { return this+coverage; }
2937
applyOT::ChainContextFormat12938 bool apply (hb_ot_apply_context_t *c) const
2939 {
2940 TRACE_APPLY (this);
2941 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2942 if (likely (index == NOT_COVERED)) return_trace (false);
2943
2944 const ChainRuleSet &rule_set = this+ruleSet[index];
2945 struct ChainContextApplyLookupContext lookup_context = {
2946 {match_glyph},
2947 {nullptr, nullptr, nullptr}
2948 };
2949 return_trace (rule_set.apply (c, lookup_context));
2950 }
2951
subsetOT::ChainContextFormat12952 bool subset (hb_subset_context_t *c) const
2953 {
2954 TRACE_SUBSET (this);
2955 const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
2956 const hb_map_t &glyph_map = *c->plan->glyph_map;
2957
2958 auto *out = c->serializer->start_embed (*this);
2959 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
2960 out->format = format;
2961
2962 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
2963 hb_sorted_vector_t<hb_codepoint_t> new_coverage;
2964 + hb_zip (this+coverage, ruleSet)
2965 | hb_filter (glyphset, hb_first)
2966 | hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
2967 | hb_map (hb_first)
2968 | hb_map (glyph_map)
2969 | hb_sink (new_coverage)
2970 ;
2971
2972 out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
2973 return_trace (bool (new_coverage));
2974 }
2975
sanitizeOT::ChainContextFormat12976 bool sanitize (hb_sanitize_context_t *c) const
2977 {
2978 TRACE_SANITIZE (this);
2979 return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2980 }
2981
2982 protected:
2983 HBUINT16 format; /* Format identifier--format = 1 */
2984 Offset16To<Coverage>
2985 coverage; /* Offset to Coverage table--from
2986 * beginning of table */
2987 Array16OfOffset16To<ChainRuleSet>
2988 ruleSet; /* Array of ChainRuleSet tables
2989 * ordered by Coverage Index */
2990 public:
2991 DEFINE_SIZE_ARRAY (6, ruleSet);
2992 };
2993
2994 struct ChainContextFormat2
2995 {
intersectsOT::ChainContextFormat22996 bool intersects (const hb_set_t *glyphs) const
2997 {
2998 if (!(this+coverage).intersects (glyphs))
2999 return false;
3000
3001 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3002 const ClassDef &input_class_def = this+inputClassDef;
3003 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3004
3005 struct ChainContextClosureLookupContext lookup_context = {
3006 {intersects_class, intersected_class_glyphs},
3007 ContextFormat::ClassBasedContext,
3008 {&backtrack_class_def,
3009 &input_class_def,
3010 &lookahead_class_def}
3011 };
3012
3013 hb_set_t retained_coverage_glyphs;
3014 (this+coverage).intersected_coverage_glyphs (glyphs, &retained_coverage_glyphs);
3015
3016 hb_set_t coverage_glyph_classes;
3017 input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3018
3019 return
3020 + hb_iter (ruleSet)
3021 | hb_map (hb_add (this))
3022 | hb_enumerate
3023 | hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
3024 { return input_class_def.intersects_class (glyphs, p.first) &&
3025 coverage_glyph_classes.has (p.first) &&
3026 p.second.intersects (glyphs, lookup_context); })
3027 | hb_any
3028 ;
3029 }
3030
may_have_non_1to1OT::ChainContextFormat23031 bool may_have_non_1to1 () const
3032 { return true; }
3033
closureOT::ChainContextFormat23034 void closure (hb_closure_context_t *c) const
3035 {
3036 if (!(this+coverage).intersects (c->glyphs))
3037 return;
3038
3039 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
3040 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
3041 cur_active_glyphs);
3042
3043
3044 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3045 const ClassDef &input_class_def = this+inputClassDef;
3046 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3047
3048 struct ChainContextClosureLookupContext lookup_context = {
3049 {intersects_class, intersected_class_glyphs},
3050 ContextFormat::ClassBasedContext,
3051 {&backtrack_class_def,
3052 &input_class_def,
3053 &lookahead_class_def}
3054 };
3055
3056 + hb_enumerate (ruleSet)
3057 | hb_filter ([&] (unsigned _)
3058 { return input_class_def.intersects_class (&c->parent_active_glyphs (), _); },
3059 hb_first)
3060 | hb_apply ([&] (const hb_pair_t<unsigned, const Offset16To<ChainRuleSet>&> _)
3061 {
3062 const ChainRuleSet& chainrule_set = this+_.second;
3063 chainrule_set.closure (c, _.first, lookup_context);
3064 })
3065 ;
3066
3067 c->pop_cur_done_glyphs ();
3068 }
3069
closure_lookupsOT::ChainContextFormat23070 void closure_lookups (hb_closure_lookups_context_t *c) const
3071 {
3072 if (!(this+coverage).intersects (c->glyphs))
3073 return;
3074
3075 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3076 const ClassDef &input_class_def = this+inputClassDef;
3077 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3078
3079 struct ChainContextClosureLookupContext lookup_context = {
3080 {intersects_class, intersected_class_glyphs},
3081 ContextFormat::ClassBasedContext,
3082 {&backtrack_class_def,
3083 &input_class_def,
3084 &lookahead_class_def}
3085 };
3086
3087 + hb_iter (ruleSet)
3088 | hb_map (hb_add (this))
3089 | hb_enumerate
3090 | hb_filter([&] (unsigned klass)
3091 { return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
3092 | hb_map (hb_second)
3093 | hb_apply ([&] (const ChainRuleSet &_)
3094 { _.closure_lookups (c, lookup_context); })
3095 ;
3096 }
3097
collect_variation_indicesOT::ChainContextFormat23098 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3099
collect_glyphsOT::ChainContextFormat23100 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3101 {
3102 (this+coverage).collect_coverage (c->input);
3103
3104 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3105 const ClassDef &input_class_def = this+inputClassDef;
3106 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3107
3108 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3109 {collect_class},
3110 {&backtrack_class_def,
3111 &input_class_def,
3112 &lookahead_class_def}
3113 };
3114
3115 + hb_iter (ruleSet)
3116 | hb_map (hb_add (this))
3117 | hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
3118 ;
3119 }
3120
would_applyOT::ChainContextFormat23121 bool would_apply (hb_would_apply_context_t *c) const
3122 {
3123 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3124 const ClassDef &input_class_def = this+inputClassDef;
3125 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3126
3127 unsigned int index = input_class_def.get_class (c->glyphs[0]);
3128 const ChainRuleSet &rule_set = this+ruleSet[index];
3129 struct ChainContextApplyLookupContext lookup_context = {
3130 {match_class},
3131 {&backtrack_class_def,
3132 &input_class_def,
3133 &lookahead_class_def}
3134 };
3135 return rule_set.would_apply (c, lookup_context);
3136 }
3137
get_coverageOT::ChainContextFormat23138 const Coverage &get_coverage () const { return this+coverage; }
3139
applyOT::ChainContextFormat23140 bool apply (hb_ot_apply_context_t *c) const
3141 {
3142 TRACE_APPLY (this);
3143 unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
3144 if (likely (index == NOT_COVERED)) return_trace (false);
3145
3146 const ClassDef &backtrack_class_def = this+backtrackClassDef;
3147 const ClassDef &input_class_def = this+inputClassDef;
3148 const ClassDef &lookahead_class_def = this+lookaheadClassDef;
3149
3150 index = input_class_def.get_class (c->buffer->cur().codepoint);
3151 const ChainRuleSet &rule_set = this+ruleSet[index];
3152 struct ChainContextApplyLookupContext lookup_context = {
3153 {match_class},
3154 {&backtrack_class_def,
3155 &input_class_def,
3156 &lookahead_class_def}
3157 };
3158 return_trace (rule_set.apply (c, lookup_context));
3159 }
3160
subsetOT::ChainContextFormat23161 bool subset (hb_subset_context_t *c) const
3162 {
3163 TRACE_SUBSET (this);
3164 auto *out = c->serializer->start_embed (*this);
3165 if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
3166 out->format = format;
3167 out->coverage.serialize_subset (c, coverage, this);
3168
3169 hb_map_t backtrack_klass_map;
3170 hb_map_t input_klass_map;
3171 hb_map_t lookahead_klass_map;
3172
3173 out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
3174 // TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
3175 out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
3176 out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
3177
3178 if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
3179 input_klass_map,
3180 lookahead_klass_map)))
3181 return_trace (false);
3182
3183 const hb_set_t* glyphset = c->plan->glyphset_gsub ();
3184 hb_set_t retained_coverage_glyphs;
3185 (this+coverage).intersected_coverage_glyphs (glyphset, &retained_coverage_glyphs);
3186
3187 hb_set_t coverage_glyph_classes;
3188 (this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
3189
3190 int non_zero_index = -1, index = 0;
3191 bool ret = true;
3192 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3193 auto last_non_zero = c->serializer->snapshot ();
3194 for (const auto& _ : + hb_enumerate (ruleSet)
3195 | hb_filter (input_klass_map, hb_first))
3196 {
3197 auto *o = out->ruleSet.serialize_append (c->serializer);
3198 if (unlikely (!o))
3199 {
3200 ret = false;
3201 break;
3202 }
3203 if (coverage_glyph_classes.has (_.first) &&
3204 o->serialize_subset (c, _.second, this,
3205 lookup_map,
3206 &backtrack_klass_map,
3207 &input_klass_map,
3208 &lookahead_klass_map))
3209 {
3210 last_non_zero = c->serializer->snapshot ();
3211 non_zero_index = index;
3212 }
3213
3214 index++;
3215 }
3216
3217 if (!ret || non_zero_index == -1) return_trace (false);
3218
3219 // prune empty trailing ruleSets
3220 if (index > non_zero_index) {
3221 c->serializer->revert (last_non_zero);
3222 out->ruleSet.len = non_zero_index + 1;
3223 }
3224
3225 return_trace (bool (out->ruleSet));
3226 }
3227
sanitizeOT::ChainContextFormat23228 bool sanitize (hb_sanitize_context_t *c) const
3229 {
3230 TRACE_SANITIZE (this);
3231 return_trace (coverage.sanitize (c, this) &&
3232 backtrackClassDef.sanitize (c, this) &&
3233 inputClassDef.sanitize (c, this) &&
3234 lookaheadClassDef.sanitize (c, this) &&
3235 ruleSet.sanitize (c, this));
3236 }
3237
3238 protected:
3239 HBUINT16 format; /* Format identifier--format = 2 */
3240 Offset16To<Coverage>
3241 coverage; /* Offset to Coverage table--from
3242 * beginning of table */
3243 Offset16To<ClassDef>
3244 backtrackClassDef; /* Offset to glyph ClassDef table
3245 * containing backtrack sequence
3246 * data--from beginning of table */
3247 Offset16To<ClassDef>
3248 inputClassDef; /* Offset to glyph ClassDef
3249 * table containing input sequence
3250 * data--from beginning of table */
3251 Offset16To<ClassDef>
3252 lookaheadClassDef; /* Offset to glyph ClassDef table
3253 * containing lookahead sequence
3254 * data--from beginning of table */
3255 Array16OfOffset16To<ChainRuleSet>
3256 ruleSet; /* Array of ChainRuleSet tables
3257 * ordered by class */
3258 public:
3259 DEFINE_SIZE_ARRAY (12, ruleSet);
3260 };
3261
3262 struct ChainContextFormat3
3263 {
intersectsOT::ChainContextFormat33264 bool intersects (const hb_set_t *glyphs) const
3265 {
3266 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3267
3268 if (!(this+input[0]).intersects (glyphs))
3269 return false;
3270
3271 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3272 struct ChainContextClosureLookupContext lookup_context = {
3273 {intersects_coverage, intersected_coverage_glyphs},
3274 ContextFormat::CoverageBasedContext,
3275 {this, this, this}
3276 };
3277 return chain_context_intersects (glyphs,
3278 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3279 input.len, (const HBUINT16 *) input.arrayZ + 1,
3280 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3281 lookup_context);
3282 }
3283
may_have_non_1to1OT::ChainContextFormat33284 bool may_have_non_1to1 () const
3285 { return true; }
3286
closureOT::ChainContextFormat33287 void closure (hb_closure_context_t *c) const
3288 {
3289 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3290
3291 if (!(this+input[0]).intersects (c->glyphs))
3292 return;
3293
3294 hb_set_t* cur_active_glyphs = &c->push_cur_active_glyphs ();
3295 get_coverage ().intersected_coverage_glyphs (&c->previous_parent_active_glyphs (),
3296 cur_active_glyphs);
3297
3298
3299 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3300 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3301 struct ChainContextClosureLookupContext lookup_context = {
3302 {intersects_coverage, intersected_coverage_glyphs},
3303 ContextFormat::CoverageBasedContext,
3304 {this, this, this}
3305 };
3306 chain_context_closure_lookup (c,
3307 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3308 input.len, (const HBUINT16 *) input.arrayZ + 1,
3309 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3310 lookup.len, lookup.arrayZ,
3311 0, lookup_context);
3312
3313 c->pop_cur_done_glyphs ();
3314 }
3315
closure_lookupsOT::ChainContextFormat33316 void closure_lookups (hb_closure_lookups_context_t *c) const
3317 {
3318 if (!intersects (c->glyphs))
3319 return;
3320
3321 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3322 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3323 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3324 recurse_lookups (c, lookup.len, lookup.arrayZ);
3325 }
3326
collect_variation_indicesOT::ChainContextFormat33327 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
3328
collect_glyphsOT::ChainContextFormat33329 void collect_glyphs (hb_collect_glyphs_context_t *c) const
3330 {
3331 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3332
3333 (this+input[0]).collect_coverage (c->input);
3334
3335 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3336 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3337 struct ChainContextCollectGlyphsLookupContext lookup_context = {
3338 {collect_coverage},
3339 {this, this, this}
3340 };
3341 chain_context_collect_glyphs_lookup (c,
3342 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3343 input.len, (const HBUINT16 *) input.arrayZ + 1,
3344 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3345 lookup.len, lookup.arrayZ,
3346 lookup_context);
3347 }
3348
would_applyOT::ChainContextFormat33349 bool would_apply (hb_would_apply_context_t *c) const
3350 {
3351 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3352 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3353 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3354 struct ChainContextApplyLookupContext lookup_context = {
3355 {match_coverage},
3356 {this, this, this}
3357 };
3358 return chain_context_would_apply_lookup (c,
3359 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3360 input.len, (const HBUINT16 *) input.arrayZ + 1,
3361 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3362 lookup.len, lookup.arrayZ, lookup_context);
3363 }
3364
get_coverageOT::ChainContextFormat33365 const Coverage &get_coverage () const
3366 {
3367 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3368 return this+input[0];
3369 }
3370
applyOT::ChainContextFormat33371 bool apply (hb_ot_apply_context_t *c) const
3372 {
3373 TRACE_APPLY (this);
3374 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3375
3376 unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
3377 if (likely (index == NOT_COVERED)) return_trace (false);
3378
3379 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3380 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3381 struct ChainContextApplyLookupContext lookup_context = {
3382 {match_coverage},
3383 {this, this, this}
3384 };
3385 return_trace (chain_context_apply_lookup (c,
3386 backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
3387 input.len, (const HBUINT16 *) input.arrayZ + 1,
3388 lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
3389 lookup.len, lookup.arrayZ, lookup_context));
3390 }
3391
3392 template<typename Iterator,
3393 hb_requires (hb_is_iterator (Iterator))>
serialize_coverage_offsetsOT::ChainContextFormat33394 bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
3395 {
3396 TRACE_SERIALIZE (this);
3397 auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
3398
3399 if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
3400 return_trace (false);
3401
3402 for (auto& offset : it) {
3403 auto *o = out->serialize_append (c->serializer);
3404 if (unlikely (!o) || !o->serialize_subset (c, offset, base))
3405 return_trace (false);
3406 }
3407
3408 return_trace (true);
3409 }
3410
subsetOT::ChainContextFormat33411 bool subset (hb_subset_context_t *c) const
3412 {
3413 TRACE_SUBSET (this);
3414
3415 auto *out = c->serializer->start_embed (this);
3416 if (unlikely (!out)) return_trace (false);
3417 if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
3418
3419 if (!serialize_coverage_offsets (c, backtrack.iter (), this))
3420 return_trace (false);
3421
3422 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3423 if (!serialize_coverage_offsets (c, input.iter (), this))
3424 return_trace (false);
3425
3426 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3427 if (!serialize_coverage_offsets (c, lookahead.iter (), this))
3428 return_trace (false);
3429
3430 const Array16Of<LookupRecord> &lookupRecord = StructAfter<Array16Of<LookupRecord>> (lookahead);
3431 const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? c->plan->gsub_lookups : c->plan->gpos_lookups;
3432
3433 HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookupRecord.len);
3434 if (!lookupCount) return_trace (false);
3435
3436 unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (), lookup_map);
3437 return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
3438 }
3439
sanitizeOT::ChainContextFormat33440 bool sanitize (hb_sanitize_context_t *c) const
3441 {
3442 TRACE_SANITIZE (this);
3443 if (!backtrack.sanitize (c, this)) return_trace (false);
3444 const Array16OfOffset16To<Coverage> &input = StructAfter<Array16OfOffset16To<Coverage>> (backtrack);
3445 if (!input.sanitize (c, this)) return_trace (false);
3446 if (!input.len) return_trace (false); /* To be consistent with Context. */
3447 const Array16OfOffset16To<Coverage> &lookahead = StructAfter<Array16OfOffset16To<Coverage>> (input);
3448 if (!lookahead.sanitize (c, this)) return_trace (false);
3449 const Array16Of<LookupRecord> &lookup = StructAfter<Array16Of<LookupRecord>> (lookahead);
3450 return_trace (lookup.sanitize (c));
3451 }
3452
3453 protected:
3454 HBUINT16 format; /* Format identifier--format = 3 */
3455 Array16OfOffset16To<Coverage>
3456 backtrack; /* Array of coverage tables
3457 * in backtracking sequence, in glyph
3458 * sequence order */
3459 Array16OfOffset16To<Coverage>
3460 inputX ; /* Array of coverage
3461 * tables in input sequence, in glyph
3462 * sequence order */
3463 Array16OfOffset16To<Coverage>
3464 lookaheadX; /* Array of coverage tables
3465 * in lookahead sequence, in glyph
3466 * sequence order */
3467 Array16Of<LookupRecord>
3468 lookupX; /* Array of LookupRecords--in
3469 * design order) */
3470 public:
3471 DEFINE_SIZE_MIN (10);
3472 };
3473
3474 struct ChainContext
3475 {
3476 template <typename context_t, typename ...Ts>
dispatchOT::ChainContext3477 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3478 {
3479 TRACE_DISPATCH (this, u.format);
3480 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3481 switch (u.format) {
3482 case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
3483 case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
3484 case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
3485 default:return_trace (c->default_return_value ());
3486 }
3487 }
3488
3489 protected:
3490 union {
3491 HBUINT16 format; /* Format identifier */
3492 ChainContextFormat1 format1;
3493 ChainContextFormat2 format2;
3494 ChainContextFormat3 format3;
3495 } u;
3496 };
3497
3498
3499 template <typename T>
3500 struct ExtensionFormat1
3501 {
get_typeOT::ExtensionFormat13502 unsigned int get_type () const { return extensionLookupType; }
3503
3504 template <typename X>
get_subtableOT::ExtensionFormat13505 const X& get_subtable () const
3506 { return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
3507
3508 template <typename context_t, typename ...Ts>
dispatchOT::ExtensionFormat13509 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3510 {
3511 TRACE_DISPATCH (this, format);
3512 if (unlikely (!c->may_dispatch (this, this))) return_trace (c->no_dispatch_return_value ());
3513 return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
3514 }
3515
collect_variation_indicesOT::ExtensionFormat13516 void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
3517 { dispatch (c); }
3518
3519 /* This is called from may_dispatch() above with hb_sanitize_context_t. */
sanitizeOT::ExtensionFormat13520 bool sanitize (hb_sanitize_context_t *c) const
3521 {
3522 TRACE_SANITIZE (this);
3523 return_trace (c->check_struct (this) &&
3524 extensionLookupType != T::SubTable::Extension);
3525 }
3526
subsetOT::ExtensionFormat13527 bool subset (hb_subset_context_t *c) const
3528 {
3529 TRACE_SUBSET (this);
3530
3531 auto *out = c->serializer->start_embed (this);
3532 if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
3533
3534 out->format = format;
3535 out->extensionLookupType = extensionLookupType;
3536
3537 const auto& src_offset =
3538 reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
3539 auto& dest_offset =
3540 reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
3541
3542 return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
3543 }
3544
3545 protected:
3546 HBUINT16 format; /* Format identifier. Set to 1. */
3547 HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
3548 * by ExtensionOffset (i.e. the
3549 * extension subtable). */
3550 Offset32 extensionOffset; /* Offset to the extension subtable,
3551 * of lookup type subtable. */
3552 public:
3553 DEFINE_SIZE_STATIC (8);
3554 };
3555
3556 template <typename T>
3557 struct Extension
3558 {
get_typeOT::Extension3559 unsigned int get_type () const
3560 {
3561 switch (u.format) {
3562 case 1: return u.format1.get_type ();
3563 default:return 0;
3564 }
3565 }
3566 template <typename X>
get_subtableOT::Extension3567 const X& get_subtable () const
3568 {
3569 switch (u.format) {
3570 case 1: return u.format1.template get_subtable<typename T::SubTable> ();
3571 default:return Null (typename T::SubTable);
3572 }
3573 }
3574
3575 // Specialization of dispatch for subset. dispatch() normally just
3576 // dispatches to the sub table this points too, but for subset
3577 // we need to run subset on this subtable too.
3578 template <typename ...Ts>
dispatchOT::Extension3579 typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
3580 {
3581 switch (u.format) {
3582 case 1: return u.format1.subset (c);
3583 default: return c->default_return_value ();
3584 }
3585 }
3586
3587 template <typename context_t, typename ...Ts>
dispatchOT::Extension3588 typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
3589 {
3590 TRACE_DISPATCH (this, u.format);
3591 if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->no_dispatch_return_value ());
3592 switch (u.format) {
3593 case 1: return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
3594 default:return_trace (c->default_return_value ());
3595 }
3596 }
3597
3598 protected:
3599 union {
3600 HBUINT16 format; /* Format identifier */
3601 ExtensionFormat1<T> format1;
3602 } u;
3603 };
3604
3605
3606 /*
3607 * GSUB/GPOS Common
3608 */
3609
3610 struct hb_ot_layout_lookup_accelerator_t
3611 {
3612 template <typename TLookup>
initOT::hb_ot_layout_lookup_accelerator_t3613 void init (const TLookup &lookup)
3614 {
3615 digest.init ();
3616 lookup.collect_coverage (&digest);
3617
3618 subtables.init ();
3619 OT::hb_get_subtables_context_t c_get_subtables (subtables);
3620 lookup.dispatch (&c_get_subtables);
3621 }
finiOT::hb_ot_layout_lookup_accelerator_t3622 void fini () { subtables.fini (); }
3623
may_haveOT::hb_ot_layout_lookup_accelerator_t3624 bool may_have (hb_codepoint_t g) const
3625 { return digest.may_have (g); }
3626
applyOT::hb_ot_layout_lookup_accelerator_t3627 bool apply (hb_ot_apply_context_t *c) const
3628 {
3629 for (unsigned int i = 0; i < subtables.length; i++)
3630 if (subtables[i].apply (c))
3631 return true;
3632 return false;
3633 }
3634
3635 private:
3636 hb_set_digest_t digest;
3637 hb_get_subtables_context_t::array_t subtables;
3638 };
3639
3640 struct GSUBGPOS
3641 {
has_dataOT::GSUBGPOS3642 bool has_data () const { return version.to_int (); }
get_script_countOT::GSUBGPOS3643 unsigned int get_script_count () const
3644 { return (this+scriptList).len; }
get_script_tagOT::GSUBGPOS3645 const Tag& get_script_tag (unsigned int i) const
3646 { return (this+scriptList).get_tag (i); }
get_script_tagsOT::GSUBGPOS3647 unsigned int get_script_tags (unsigned int start_offset,
3648 unsigned int *script_count /* IN/OUT */,
3649 hb_tag_t *script_tags /* OUT */) const
3650 { return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
get_scriptOT::GSUBGPOS3651 const Script& get_script (unsigned int i) const
3652 { return (this+scriptList)[i]; }
find_script_indexOT::GSUBGPOS3653 bool find_script_index (hb_tag_t tag, unsigned int *index) const
3654 { return (this+scriptList).find_index (tag, index); }
3655
get_feature_countOT::GSUBGPOS3656 unsigned int get_feature_count () const
3657 { return (this+featureList).len; }
get_feature_tagOT::GSUBGPOS3658 hb_tag_t get_feature_tag (unsigned int i) const
3659 { return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : (this+featureList).get_tag (i); }
get_feature_tagsOT::GSUBGPOS3660 unsigned int get_feature_tags (unsigned int start_offset,
3661 unsigned int *feature_count /* IN/OUT */,
3662 hb_tag_t *feature_tags /* OUT */) const
3663 { return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
get_featureOT::GSUBGPOS3664 const Feature& get_feature (unsigned int i) const
3665 { return (this+featureList)[i]; }
find_feature_indexOT::GSUBGPOS3666 bool find_feature_index (hb_tag_t tag, unsigned int *index) const
3667 { return (this+featureList).find_index (tag, index); }
3668
get_lookup_countOT::GSUBGPOS3669 unsigned int get_lookup_count () const
3670 { return (this+lookupList).len; }
get_lookupOT::GSUBGPOS3671 const Lookup& get_lookup (unsigned int i) const
3672 { return (this+lookupList)[i]; }
3673
find_variations_indexOT::GSUBGPOS3674 bool find_variations_index (const int *coords, unsigned int num_coords,
3675 unsigned int *index) const
3676 {
3677 #ifdef HB_NO_VAR
3678 *index = FeatureVariations::NOT_FOUND_INDEX;
3679 return false;
3680 #endif
3681 return (version.to_int () >= 0x00010001u ? this+featureVars : Null (FeatureVariations))
3682 .find_index (coords, num_coords, index);
3683 }
get_feature_variationOT::GSUBGPOS3684 const Feature& get_feature_variation (unsigned int feature_index,
3685 unsigned int variations_index) const
3686 {
3687 #ifndef HB_NO_VAR
3688 if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
3689 version.to_int () >= 0x00010001u)
3690 {
3691 const Feature *feature = (this+featureVars).find_substitute (variations_index,
3692 feature_index);
3693 if (feature)
3694 return *feature;
3695 }
3696 #endif
3697 return get_feature (feature_index);
3698 }
3699
feature_variation_collect_lookupsOT::GSUBGPOS3700 void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
3701 hb_set_t *lookup_indexes /* OUT */) const
3702 {
3703 #ifndef HB_NO_VAR
3704 if (version.to_int () >= 0x00010001u)
3705 (this+featureVars).collect_lookups (feature_indexes, lookup_indexes);
3706 #endif
3707 }
3708
3709 template <typename TLookup>
closure_lookupsOT::GSUBGPOS3710 void closure_lookups (hb_face_t *face,
3711 const hb_set_t *glyphs,
3712 hb_set_t *lookup_indexes /* IN/OUT */) const
3713 {
3714 hb_set_t visited_lookups, inactive_lookups;
3715 OT::hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
3716
3717 for (unsigned lookup_index : + hb_iter (lookup_indexes))
3718 reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
3719
3720 hb_set_union (lookup_indexes, &visited_lookups);
3721 hb_set_subtract (lookup_indexes, &inactive_lookups);
3722 }
3723
prune_langsysOT::GSUBGPOS3724 void prune_langsys (const hb_map_t *duplicate_feature_map,
3725 hb_hashmap_t<unsigned, hb_set_t *> *script_langsys_map,
3726 hb_set_t *new_feature_indexes /* OUT */) const
3727 {
3728 hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
3729
3730 unsigned count = get_script_count ();
3731 for (unsigned script_index = 0; script_index < count; script_index++)
3732 {
3733 const Script& s = get_script (script_index);
3734 s.prune_langsys (&c, script_index);
3735 }
3736 }
3737
3738 template <typename TLookup>
subsetOT::GSUBGPOS3739 bool subset (hb_subset_layout_context_t *c) const
3740 {
3741 TRACE_SUBSET (this);
3742 auto *out = c->subset_context->serializer->embed (*this);
3743 if (unlikely (!out)) return_trace (false);
3744
3745 typedef LookupOffsetList<TLookup> TLookupList;
3746 reinterpret_cast<Offset16To<TLookupList> &> (out->lookupList)
3747 .serialize_subset (c->subset_context,
3748 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList),
3749 this,
3750 c);
3751
3752 reinterpret_cast<Offset16To<RecordListOfFeature> &> (out->featureList)
3753 .serialize_subset (c->subset_context,
3754 reinterpret_cast<const Offset16To<RecordListOfFeature> &> (featureList),
3755 this,
3756 c);
3757
3758 out->scriptList.serialize_subset (c->subset_context,
3759 scriptList,
3760 this,
3761 c);
3762
3763 #ifndef HB_NO_VAR
3764 if (version.to_int () >= 0x00010001u)
3765 {
3766 bool ret = out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
3767 if (!ret)
3768 {
3769 out->version.major = 1;
3770 out->version.minor = 0;
3771 }
3772 }
3773 #endif
3774
3775 return_trace (true);
3776 }
3777
find_duplicate_featuresOT::GSUBGPOS3778 void find_duplicate_features (const hb_map_t *lookup_indices,
3779 const hb_set_t *feature_indices,
3780 hb_map_t *duplicate_feature_map /* OUT */) const
3781 {
3782 if (feature_indices->is_empty ()) return;
3783 hb_hashmap_t<hb_tag_t, hb_set_t *> unique_features;
3784 //find out duplicate features after subset
3785 for (unsigned i : feature_indices->iter ())
3786 {
3787 hb_tag_t t = get_feature_tag (i);
3788 if (t == HB_MAP_VALUE_INVALID) continue;
3789 if (!unique_features.has (t))
3790 {
3791 hb_set_t* indices = hb_set_create ();
3792 if (unlikely (indices == hb_set_get_empty () ||
3793 !unique_features.set (t, indices)))
3794 {
3795 hb_set_destroy (indices);
3796 for (auto _ : unique_features.iter ())
3797 hb_set_destroy (_.second);
3798 return;
3799 }
3800 if (unique_features.get (t))
3801 unique_features.get (t)->add (i);
3802 duplicate_feature_map->set (i, i);
3803 continue;
3804 }
3805
3806 bool found = false;
3807
3808 hb_set_t* same_tag_features = unique_features.get (t);
3809 for (unsigned other_f_index : same_tag_features->iter ())
3810 {
3811 const Feature& f = get_feature (i);
3812 const Feature& other_f = get_feature (other_f_index);
3813
3814 auto f_iter =
3815 + hb_iter (f.lookupIndex)
3816 | hb_filter (lookup_indices)
3817 ;
3818
3819 auto other_f_iter =
3820 + hb_iter (other_f.lookupIndex)
3821 | hb_filter (lookup_indices)
3822 ;
3823
3824 bool is_equal = true;
3825 for (; f_iter && other_f_iter; f_iter++, other_f_iter++)
3826 {
3827 unsigned a = *f_iter;
3828 unsigned b = *other_f_iter;
3829 if (a != b) { is_equal = false; break; }
3830 }
3831
3832 if (is_equal == false || f_iter || other_f_iter) continue;
3833
3834 found = true;
3835 duplicate_feature_map->set (i, other_f_index);
3836 break;
3837 }
3838
3839 if (found == false)
3840 {
3841 same_tag_features->add (i);
3842 duplicate_feature_map->set (i, i);
3843 }
3844 }
3845
3846 for (auto _ : unique_features.iter ())
3847 hb_set_destroy (_.second);
3848 }
3849
prune_featuresOT::GSUBGPOS3850 void prune_features (const hb_map_t *lookup_indices, /* IN */
3851 hb_set_t *feature_indices /* IN/OUT */) const
3852 {
3853 #ifndef HB_NO_VAR
3854 // This is the set of feature indices which have alternate versions defined
3855 // if the FeatureVariation's table and the alternate version(s) intersect the
3856 // set of lookup indices.
3857 hb_set_t alternate_feature_indices;
3858 if (version.to_int () >= 0x00010001u)
3859 (this+featureVars).closure_features (lookup_indices, &alternate_feature_indices);
3860 if (unlikely (alternate_feature_indices.in_error()))
3861 {
3862 feature_indices->err ();
3863 return;
3864 }
3865 #endif
3866
3867 for (unsigned i : feature_indices->iter())
3868 {
3869 const Feature& f = get_feature (i);
3870 hb_tag_t tag = get_feature_tag (i);
3871 if (tag == HB_TAG ('p', 'r', 'e', 'f'))
3872 // Note: Never ever drop feature 'pref', even if it's empty.
3873 // HarfBuzz chooses shaper for Khmer based on presence of this
3874 // feature. See thread at:
3875 // http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
3876 continue;
3877
3878
3879 if (!f.featureParams.is_null () &&
3880 tag == HB_TAG ('s', 'i', 'z', 'e'))
3881 continue;
3882
3883 if (!f.intersects_lookup_indexes (lookup_indices)
3884 #ifndef HB_NO_VAR
3885 && !alternate_feature_indices.has (i)
3886 #endif
3887 )
3888 feature_indices->del (i);
3889 }
3890 }
3891
get_sizeOT::GSUBGPOS3892 unsigned int get_size () const
3893 {
3894 return min_size +
3895 (version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
3896 }
3897
3898 template <typename TLookup>
sanitizeOT::GSUBGPOS3899 bool sanitize (hb_sanitize_context_t *c) const
3900 {
3901 TRACE_SANITIZE (this);
3902 typedef List16OfOffset16To<TLookup> TLookupList;
3903 if (unlikely (!(version.sanitize (c) &&
3904 likely (version.major == 1) &&
3905 scriptList.sanitize (c, this) &&
3906 featureList.sanitize (c, this) &&
3907 reinterpret_cast<const Offset16To<TLookupList> &> (lookupList).sanitize (c, this))))
3908 return_trace (false);
3909
3910 #ifndef HB_NO_VAR
3911 if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
3912 return_trace (false);
3913 #endif
3914
3915 return_trace (true);
3916 }
3917
3918 template <typename T>
3919 struct accelerator_t
3920 {
accelerator_tOT::GSUBGPOS::accelerator_t3921 accelerator_t (hb_face_t *face)
3922 {
3923 this->table = hb_sanitize_context_t ().reference_table<T> (face);
3924 if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
3925 {
3926 hb_blob_destroy (this->table.get_blob ());
3927 this->table = hb_blob_get_empty ();
3928 }
3929
3930 this->lookup_count = table->get_lookup_count ();
3931
3932 this->accels = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (this->lookup_count, sizeof (hb_ot_layout_lookup_accelerator_t));
3933 if (unlikely (!this->accels))
3934 {
3935 this->lookup_count = 0;
3936 this->table.destroy ();
3937 this->table = hb_blob_get_empty ();
3938 }
3939
3940 for (unsigned int i = 0; i < this->lookup_count; i++)
3941 this->accels[i].init (table->get_lookup (i));
3942 }
~accelerator_tOT::GSUBGPOS::accelerator_t3943 ~accelerator_t ()
3944 {
3945 for (unsigned int i = 0; i < this->lookup_count; i++)
3946 this->accels[i].fini ();
3947 hb_free (this->accels);
3948 this->table.destroy ();
3949 }
3950
3951 hb_blob_ptr_t<T> table;
3952 unsigned int lookup_count;
3953 hb_ot_layout_lookup_accelerator_t *accels;
3954 };
3955
3956 protected:
3957 FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
3958 * to 0x00010000u */
3959 Offset16To<ScriptList>
3960 scriptList; /* ScriptList table */
3961 Offset16To<FeatureList>
3962 featureList; /* FeatureList table */
3963 Offset16To<LookupList>
3964 lookupList; /* LookupList table */
3965 Offset32To<FeatureVariations>
3966 featureVars; /* Offset to Feature Variations
3967 table--from beginning of table
3968 * (may be NULL). Introduced
3969 * in version 0x00010001. */
3970 public:
3971 DEFINE_SIZE_MIN (10);
3972 };
3973
3974
3975 } /* namespace OT */
3976
3977
3978 #endif /* HB_OT_LAYOUT_GSUBGPOS_HH */
3979