1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2012  Google, Inc.
4  *
5  *  This is part of HarfBuzz, a text shaping library.
6  *
7  * Permission is hereby granted, without written agreement and without
8  * license or royalty fees, to use, copy, modify, and distribute this
9  * software and its documentation for any purpose, provided that the
10  * above copyright notice and the following two paragraphs appear in
11  * all copies of this software.
12  *
13  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
14  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
15  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
16  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
17  * DAMAGE.
18  *
19  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
20  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
21  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
22  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
23  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
24  *
25  * Red Hat Author(s): Behdad Esfahbod
26  * Google Author(s): Behdad Esfahbod
27  */
28 
29 #ifndef HB_OPEN_TYPE_PRIVATE_HH
30 #define HB_OPEN_TYPE_PRIVATE_HH
31 
32 #include "hb-private.hh"
33 #include "hb-debug.hh"
34 #include "hb-face-private.hh"
35 
36 
37 namespace OT {
38 
39 
40 
41 /*
42  * Casts
43  */
44 
45 /* Cast to struct T, reference to reference */
46 template<typename Type, typename TObject>
CastR(const TObject & X)47 static inline const Type& CastR(const TObject &X)
48 { return reinterpret_cast<const Type&> (X); }
49 template<typename Type, typename TObject>
CastR(TObject & X)50 static inline Type& CastR(TObject &X)
51 { return reinterpret_cast<Type&> (X); }
52 
53 /* Cast to struct T, pointer to pointer */
54 template<typename Type, typename TObject>
CastP(const TObject * X)55 static inline const Type* CastP(const TObject *X)
56 { return reinterpret_cast<const Type*> (X); }
57 template<typename Type, typename TObject>
CastP(TObject * X)58 static inline Type* CastP(TObject *X)
59 { return reinterpret_cast<Type*> (X); }
60 
61 /* StructAtOffset<T>(P,Ofs) returns the struct T& that is placed at memory
62  * location pointed to by P plus Ofs bytes. */
63 template<typename Type>
StructAtOffset(const void * P,unsigned int offset)64 static inline const Type& StructAtOffset(const void *P, unsigned int offset)
65 { return * reinterpret_cast<const Type*> ((const char *) P + offset); }
66 template<typename Type>
StructAtOffset(void * P,unsigned int offset)67 static inline Type& StructAtOffset(void *P, unsigned int offset)
68 { return * reinterpret_cast<Type*> ((char *) P + offset); }
69 
70 /* StructAfter<T>(X) returns the struct T& that is placed after X.
71  * Works with X of variable size also.  X must implement get_size() */
72 template<typename Type, typename TObject>
StructAfter(const TObject & X)73 static inline const Type& StructAfter(const TObject &X)
74 { return StructAtOffset<Type>(&X, X.get_size()); }
75 template<typename Type, typename TObject>
StructAfter(TObject & X)76 static inline Type& StructAfter(TObject &X)
77 { return StructAtOffset<Type>(&X, X.get_size()); }
78 
79 
80 
81 /*
82  * Size checking
83  */
84 
85 /* Check _assertion in a method environment */
86 #define _DEFINE_INSTANCE_ASSERTION1(_line, _assertion) \
87   inline void _instance_assertion_on_line_##_line (void) const \
88   { \
89     static_assert ((_assertion), ""); \
90     ASSERT_INSTANCE_POD (*this); /* Make sure it's POD. */ \
91   }
92 # define _DEFINE_INSTANCE_ASSERTION0(_line, _assertion) _DEFINE_INSTANCE_ASSERTION1 (_line, _assertion)
93 # define DEFINE_INSTANCE_ASSERTION(_assertion) _DEFINE_INSTANCE_ASSERTION0 (__LINE__, _assertion)
94 
95 /* Check that _code compiles in a method environment */
96 #define _DEFINE_COMPILES_ASSERTION1(_line, _code) \
97   inline void _compiles_assertion_on_line_##_line (void) const \
98   { _code; }
99 # define _DEFINE_COMPILES_ASSERTION0(_line, _code) _DEFINE_COMPILES_ASSERTION1 (_line, _code)
100 # define DEFINE_COMPILES_ASSERTION(_code) _DEFINE_COMPILES_ASSERTION0 (__LINE__, _code)
101 
102 
103 #define DEFINE_SIZE_STATIC(size) \
104   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size)); \
105   static const unsigned int static_size = (size); \
106   static const unsigned int min_size = (size); \
107   inline unsigned int get_size (void) const { return (size); }
108 
109 #define DEFINE_SIZE_UNION(size, _member) \
110   DEFINE_INSTANCE_ASSERTION (0*sizeof(this->u._member.static_size) + sizeof(this->u._member) == (size)); \
111   static const unsigned int min_size = (size)
112 
113 #define DEFINE_SIZE_MIN(size) \
114   DEFINE_INSTANCE_ASSERTION (sizeof (*this) >= (size)); \
115   static const unsigned int min_size = (size)
116 
117 #define DEFINE_SIZE_ARRAY(size, array) \
118   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (array[0])); \
119   DEFINE_COMPILES_ASSERTION ((void) array[0].static_size) \
120   static const unsigned int min_size = (size)
121 
122 #define DEFINE_SIZE_ARRAY2(size, array1, array2) \
123   DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + sizeof (this->array1[0]) + sizeof (this->array2[0])); \
124   DEFINE_COMPILES_ASSERTION ((void) array1[0].static_size; (void) array2[0].static_size) \
125   static const unsigned int min_size = (size)
126 
127 
128 
129 /*
130  * Null objects
131  */
132 
133 /* Global nul-content Null pool.  Enlarge as necessary. */
134 
135 #define HB_NULL_POOL_SIZE 264
136 static_assert (HB_NULL_POOL_SIZE % sizeof (void *) == 0, "Align HB_NULL_POOL_SIZE.");
137 
138 #ifdef HB_NO_VISIBILITY
139 static
140 #else
141 extern HB_INTERNAL
142 #endif
143 const void * const _hb_NullPool[HB_NULL_POOL_SIZE / sizeof (void *)]
144 #ifdef HB_NO_VISIBILITY
145 = {}
146 #endif
147 ;
148 
149 /* Generic nul-content Null objects. */
150 template <typename Type>
Null(void)151 static inline const Type& Null (void) {
152   static_assert (sizeof (Type) <= HB_NULL_POOL_SIZE, "Increase HB_NULL_POOL_SIZE.");
153   return *CastP<Type> (_hb_NullPool);
154 }
155 
156 /* Specializaiton for arbitrary-content arbitrary-sized Null objects. */
157 #define DEFINE_NULL_DATA(Type, data) \
158 static const char _Null##Type[sizeof (Type) + 1] = data; /* +1 is for nul-termination in data */ \
159 template <> \
160 /*static*/ inline const Type& Null<Type> (void) { \
161   return *CastP<Type> (_Null##Type); \
162 } /* The following line really exists such that we end in a place needing semicolon */ \
163 static_assert (Type::min_size + 1 <= sizeof (_Null##Type), "Null pool too small.  Enlarge.")
164 
165 /* Accessor macro. */
166 #define Null(Type) Null<Type>()
167 
168 
169 /*
170  * Dispatch
171  */
172 
173 template <typename Context, typename Return, unsigned int MaxDebugDepth>
174 struct hb_dispatch_context_t
175 {
176   static const unsigned int max_debug_depth = MaxDebugDepth;
177   typedef Return return_t;
178   template <typename T, typename F>
may_dispatchOT::hb_dispatch_context_t179   inline bool may_dispatch (const T *obj, const F *format) { return true; }
no_dispatch_return_valueOT::hb_dispatch_context_t180   static return_t no_dispatch_return_value (void) { return Context::default_return_value (); }
181 };
182 
183 
184 /*
185  * Sanitize
186  */
187 
188 /* This limits sanitizing time on really broken fonts. */
189 #ifndef HB_SANITIZE_MAX_EDITS
190 #define HB_SANITIZE_MAX_EDITS 32
191 #endif
192 #ifndef HB_SANITIZE_MAX_OPS_FACTOR
193 #define HB_SANITIZE_MAX_OPS_FACTOR 8
194 #endif
195 #ifndef HB_SANITIZE_MAX_OPS_MIN
196 #define HB_SANITIZE_MAX_OPS_MIN 16384
197 #endif
198 
199 struct hb_sanitize_context_t :
200        hb_dispatch_context_t<hb_sanitize_context_t, bool, HB_DEBUG_SANITIZE>
201 {
hb_sanitize_context_tOT::hb_sanitize_context_t202   inline hb_sanitize_context_t (void) :
203 	debug_depth (0),
204 	start (nullptr), end (nullptr),
205 	writable (false), edit_count (0), max_ops (0),
206 	blob (nullptr),
207 	num_glyphs (0) {}
208 
get_nameOT::hb_sanitize_context_t209   inline const char *get_name (void) { return "SANITIZE"; }
210   template <typename T, typename F>
may_dispatchOT::hb_sanitize_context_t211   inline bool may_dispatch (const T *obj, const F *format)
212   { return format->sanitize (this); }
213   template <typename T>
dispatchOT::hb_sanitize_context_t214   inline return_t dispatch (const T &obj) { return obj.sanitize (this); }
default_return_valueOT::hb_sanitize_context_t215   static return_t default_return_value (void) { return true; }
no_dispatch_return_valueOT::hb_sanitize_context_t216   static return_t no_dispatch_return_value (void) { return false; }
stop_sublookup_iterationOT::hb_sanitize_context_t217   bool stop_sublookup_iteration (const return_t r) const { return !r; }
218 
initOT::hb_sanitize_context_t219   inline void init (hb_blob_t *b)
220   {
221     this->blob = hb_blob_reference (b);
222     this->writable = false;
223   }
224 
start_processingOT::hb_sanitize_context_t225   inline void start_processing (void)
226   {
227     this->start = hb_blob_get_data (this->blob, nullptr);
228     this->end = this->start + hb_blob_get_length (this->blob);
229     assert (this->start <= this->end); /* Must not overflow. */
230     this->max_ops = MAX ((unsigned int) (this->end - this->start) * HB_SANITIZE_MAX_OPS_FACTOR,
231 			 (unsigned) HB_SANITIZE_MAX_OPS_MIN);
232     this->edit_count = 0;
233     this->debug_depth = 0;
234 
235     DEBUG_MSG_LEVEL (SANITIZE, start, 0, +1,
236 		     "start [%p..%p] (%lu bytes)",
237 		     this->start, this->end,
238 		     (unsigned long) (this->end - this->start));
239   }
240 
end_processingOT::hb_sanitize_context_t241   inline void end_processing (void)
242   {
243     DEBUG_MSG_LEVEL (SANITIZE, this->start, 0, -1,
244 		     "end [%p..%p] %u edit requests",
245 		     this->start, this->end, this->edit_count);
246 
247     hb_blob_destroy (this->blob);
248     this->blob = nullptr;
249     this->start = this->end = nullptr;
250   }
251 
check_rangeOT::hb_sanitize_context_t252   inline bool check_range (const void *base, unsigned int len) const
253   {
254     const char *p = (const char *) base;
255     bool ok = this->max_ops-- > 0 &&
256 	      this->start <= p &&
257 	      p <= this->end &&
258 	      (unsigned int) (this->end - p) >= len;
259 
260     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
261        "check_range [%p..%p] (%d bytes) in [%p..%p] -> %s",
262        p, p + len, len,
263        this->start, this->end,
264        ok ? "OK" : "OUT-OF-RANGE");
265 
266     return likely (ok);
267   }
268 
check_arrayOT::hb_sanitize_context_t269   inline bool check_array (const void *base, unsigned int record_size, unsigned int len) const
270   {
271     const char *p = (const char *) base;
272     bool overflows = _hb_unsigned_int_mul_overflows (len, record_size);
273     unsigned int array_size = record_size * len;
274     bool ok = !overflows && this->check_range (base, array_size);
275 
276     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
277        "check_array [%p..%p] (%d*%d=%d bytes) in [%p..%p] -> %s",
278        p, p + (record_size * len), record_size, len, (unsigned int) array_size,
279        this->start, this->end,
280        overflows ? "OVERFLOWS" : ok ? "OK" : "OUT-OF-RANGE");
281 
282     return likely (ok);
283   }
284 
285   template <typename Type>
check_structOT::hb_sanitize_context_t286   inline bool check_struct (const Type *obj) const
287   {
288     return likely (this->check_range (obj, obj->min_size));
289   }
290 
may_editOT::hb_sanitize_context_t291   inline bool may_edit (const void *base HB_UNUSED, unsigned int len HB_UNUSED)
292   {
293     if (this->edit_count >= HB_SANITIZE_MAX_EDITS)
294       return false;
295 
296     const char *p = (const char *) base;
297     this->edit_count++;
298 
299     DEBUG_MSG_LEVEL (SANITIZE, p, this->debug_depth+1, 0,
300        "may_edit(%u) [%p..%p] (%d bytes) in [%p..%p] -> %s",
301        this->edit_count,
302        p, p + len, len,
303        this->start, this->end,
304        this->writable ? "GRANTED" : "DENIED");
305 
306     return this->writable;
307   }
308 
309   template <typename Type, typename ValueType>
try_setOT::hb_sanitize_context_t310   inline bool try_set (const Type *obj, const ValueType &v) {
311     if (this->may_edit (obj, obj->static_size)) {
312       const_cast<Type *> (obj)->set (v);
313       return true;
314     }
315     return false;
316   }
317 
318   mutable unsigned int debug_depth;
319   const char *start, *end;
320   bool writable;
321   unsigned int edit_count;
322   mutable int max_ops;
323   hb_blob_t *blob;
324   unsigned int num_glyphs;
325 };
326 
327 
328 
329 /* Template to sanitize an object. */
330 template <typename Type>
331 struct Sanitizer
332 {
SanitizerOT::Sanitizer333   inline Sanitizer (void) {}
334 
sanitizeOT::Sanitizer335   inline hb_blob_t *sanitize (hb_blob_t *blob) {
336     bool sane;
337 
338     /* TODO is_sane() stuff */
339 
340     c->init (blob);
341 
342   retry:
343     DEBUG_MSG_FUNC (SANITIZE, c->start, "start");
344 
345     c->start_processing ();
346 
347     if (unlikely (!c->start)) {
348       c->end_processing ();
349       return blob;
350     }
351 
352     Type *t = CastP<Type> (const_cast<char *> (c->start));
353 
354     sane = t->sanitize (c);
355     if (sane) {
356       if (c->edit_count) {
357 	DEBUG_MSG_FUNC (SANITIZE, c->start, "passed first round with %d edits; going for second round", c->edit_count);
358 
359         /* sanitize again to ensure no toe-stepping */
360         c->edit_count = 0;
361 	sane = t->sanitize (c);
362 	if (c->edit_count) {
363 	  DEBUG_MSG_FUNC (SANITIZE, c->start, "requested %d edits in second round; FAILLING", c->edit_count);
364 	  sane = false;
365 	}
366       }
367     } else {
368       unsigned int edit_count = c->edit_count;
369       if (edit_count && !c->writable) {
370         c->start = hb_blob_get_data_writable (blob, nullptr);
371 	c->end = c->start + hb_blob_get_length (blob);
372 
373 	if (c->start) {
374 	  c->writable = true;
375 	  /* ok, we made it writable by relocating.  try again */
376 	  DEBUG_MSG_FUNC (SANITIZE, c->start, "retry");
377 	  goto retry;
378 	}
379       }
380     }
381 
382     c->end_processing ();
383 
384     DEBUG_MSG_FUNC (SANITIZE, c->start, sane ? "PASSED" : "FAILED");
385     if (sane)
386       return blob;
387     else {
388       hb_blob_destroy (blob);
389       return hb_blob_get_empty ();
390     }
391   }
392 
lock_instanceOT::Sanitizer393   static const Type* lock_instance (hb_blob_t *blob) {
394     hb_blob_make_immutable (blob);
395     const char *base = hb_blob_get_data (blob, nullptr);
396     return unlikely (!base) ? &Null(Type) : CastP<Type> (base);
397   }
398 
set_num_glyphsOT::Sanitizer399   inline void set_num_glyphs (unsigned int num_glyphs) { c->num_glyphs = num_glyphs; }
400 
401   private:
402   hb_sanitize_context_t c[1];
403 };
404 
405 
406 
407 /*
408  * Serialize
409  */
410 
411 
412 struct hb_serialize_context_t
413 {
hb_serialize_context_tOT::hb_serialize_context_t414   inline hb_serialize_context_t (void *start_, unsigned int size)
415   {
416     this->start = (char *) start_;
417     this->end = this->start + size;
418 
419     this->ran_out_of_room = false;
420     this->head = this->start;
421     this->debug_depth = 0;
422   }
423 
424   template <typename Type>
start_serializeOT::hb_serialize_context_t425   inline Type *start_serialize (void)
426   {
427     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
428 		     "start [%p..%p] (%lu bytes)",
429 		     this->start, this->end,
430 		     (unsigned long) (this->end - this->start));
431 
432     return start_embed<Type> ();
433   }
434 
end_serializeOT::hb_serialize_context_t435   inline void end_serialize (void)
436   {
437     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
438 		     "end [%p..%p] serialized %d bytes; %s",
439 		     this->start, this->end,
440 		     (int) (this->head - this->start),
441 		     this->ran_out_of_room ? "RAN OUT OF ROOM" : "did not ran out of room");
442 
443   }
444 
445   template <typename Type>
copyOT::hb_serialize_context_t446   inline Type *copy (void)
447   {
448     assert (!this->ran_out_of_room);
449     unsigned int len = this->head - this->start;
450     void *p = malloc (len);
451     if (p)
452       memcpy (p, this->start, len);
453     return reinterpret_cast<Type *> (p);
454   }
455 
456   template <typename Type>
allocate_sizeOT::hb_serialize_context_t457   inline Type *allocate_size (unsigned int size)
458   {
459     if (unlikely (this->ran_out_of_room || this->end - this->head < ptrdiff_t (size))) {
460       this->ran_out_of_room = true;
461       return nullptr;
462     }
463     memset (this->head, 0, size);
464     char *ret = this->head;
465     this->head += size;
466     return reinterpret_cast<Type *> (ret);
467   }
468 
469   template <typename Type>
allocate_minOT::hb_serialize_context_t470   inline Type *allocate_min (void)
471   {
472     return this->allocate_size<Type> (Type::min_size);
473   }
474 
475   template <typename Type>
start_embedOT::hb_serialize_context_t476   inline Type *start_embed (void)
477   {
478     Type *ret = reinterpret_cast<Type *> (this->head);
479     return ret;
480   }
481 
482   template <typename Type>
embedOT::hb_serialize_context_t483   inline Type *embed (const Type &obj)
484   {
485     unsigned int size = obj.get_size ();
486     Type *ret = this->allocate_size<Type> (size);
487     if (unlikely (!ret)) return nullptr;
488     memcpy (ret, obj, size);
489     return ret;
490   }
491 
492   template <typename Type>
extend_minOT::hb_serialize_context_t493   inline Type *extend_min (Type &obj)
494   {
495     unsigned int size = obj.min_size;
496     assert (this->start <= (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
497     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
498     return reinterpret_cast<Type *> (&obj);
499   }
500 
501   template <typename Type>
extendOT::hb_serialize_context_t502   inline Type *extend (Type &obj)
503   {
504     unsigned int size = obj.get_size ();
505     assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
506     if (unlikely (!this->allocate_size<Type> (((char *) &obj) + size - this->head))) return nullptr;
507     return reinterpret_cast<Type *> (&obj);
508   }
509 
truncateOT::hb_serialize_context_t510   inline void truncate (void *new_head)
511   {
512     assert (this->start < new_head && new_head <= this->head);
513     this->head = (char *) new_head;
514   }
515 
516   unsigned int debug_depth;
517   char *start, *end, *head;
518   bool ran_out_of_room;
519 };
520 
521 template <typename Type>
522 struct Supplier
523 {
SupplierOT::Supplier524   inline Supplier (const Type *array, unsigned int len_, unsigned int stride_=sizeof(Type))
525   {
526     head = array;
527     len = len_;
528     stride = stride_;
529   }
operator []OT::Supplier530   inline const Type operator [] (unsigned int i) const
531   {
532     if (unlikely (i >= len)) return Type ();
533     return * (const Type *) (const void *) ((const char *) head + stride * i);
534   }
535 
operator +=OT::Supplier536   inline Supplier<Type> & operator += (unsigned int count)
537   {
538     if (unlikely (count > len))
539       count = len;
540     len -= count;
541     head = (const Type *) (const void *) ((const char *) head + stride * count);
542     return *this;
543   }
544 
545   private:
546   inline Supplier (const Supplier<Type> &); /* Disallow copy */
547   inline Supplier<Type>& operator= (const Supplier<Type> &); /* Disallow copy */
548 
549   unsigned int len;
550   unsigned int stride;
551   const Type *head;
552 };
553 
554 
555 /*
556  *
557  * The OpenType Font File: Data Types
558  */
559 
560 
561 /* "The following data types are used in the OpenType font file.
562  *  All OpenType fonts use Motorola-style byte ordering (Big Endian):" */
563 
564 /*
565  * Int types
566  */
567 
568 
569 template <typename Type, int Bytes> struct BEInt;
570 
571 template <typename Type>
572 struct BEInt<Type, 1>
573 {
574   public:
setOT::BEInt575   inline void set (Type V)
576   {
577     v = V;
578   }
operator TypeOT::BEInt579   inline operator Type (void) const
580   {
581     return v;
582   }
583   private: uint8_t v;
584 };
585 template <typename Type>
586 struct BEInt<Type, 2>
587 {
588   public:
setOT::BEInt589   inline void set (Type V)
590   {
591     v[0] = (V >>  8) & 0xFF;
592     v[1] = (V      ) & 0xFF;
593   }
operator TypeOT::BEInt594   inline operator Type (void) const
595   {
596     return (v[0] <<  8)
597          + (v[1]      );
598   }
599   private: uint8_t v[2];
600 };
601 template <typename Type>
602 struct BEInt<Type, 3>
603 {
604   public:
setOT::BEInt605   inline void set (Type V)
606   {
607     v[0] = (V >> 16) & 0xFF;
608     v[1] = (V >>  8) & 0xFF;
609     v[2] = (V      ) & 0xFF;
610   }
operator TypeOT::BEInt611   inline operator Type (void) const
612   {
613     return (v[0] << 16)
614          + (v[1] <<  8)
615          + (v[2]      );
616   }
617   private: uint8_t v[3];
618 };
619 template <typename Type>
620 struct BEInt<Type, 4>
621 {
622   public:
setOT::BEInt623   inline void set (Type V)
624   {
625     v[0] = (V >> 24) & 0xFF;
626     v[1] = (V >> 16) & 0xFF;
627     v[2] = (V >>  8) & 0xFF;
628     v[3] = (V      ) & 0xFF;
629   }
operator TypeOT::BEInt630   inline operator Type (void) const
631   {
632     return (v[0] << 24)
633          + (v[1] << 16)
634          + (v[2] <<  8)
635          + (v[3]      );
636   }
637   private: uint8_t v[4];
638 };
639 
640 /* Integer types in big-endian order and no alignment requirement */
641 template <typename Type, unsigned int Size>
642 struct IntType
643 {
setOT::IntType644   inline void set (Type i) { v.set (i); }
operator TypeOT::IntType645   inline operator Type(void) const { return v; }
operator ==OT::IntType646   inline bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
operator !=OT::IntType647   inline bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
cmpOT::IntType648   static inline int cmp (const IntType<Type,Size> *a, const IntType<Type,Size> *b) { return b->cmp (*a); }
649   template <typename Type2>
cmpOT::IntType650   inline int cmp (Type2 a) const
651   {
652     Type b = v;
653     if (sizeof (Type) < sizeof (int) && sizeof (Type2) < sizeof (int))
654       return (int) a - (int) b;
655     else
656       return a < b ? -1 : a == b ? 0 : +1;
657   }
sanitizeOT::IntType658   inline bool sanitize (hb_sanitize_context_t *c) const
659   {
660     TRACE_SANITIZE (this);
661     return_trace (likely (c->check_struct (this)));
662   }
663   protected:
664   BEInt<Type, Size> v;
665   public:
666   DEFINE_SIZE_STATIC (Size);
667 };
668 
669 typedef IntType<uint8_t,  1> HBUINT8;	/* 8-bit unsigned integer. */
670 typedef IntType<int8_t,   1> HBINT8;	/* 8-bit signed integer. */
671 typedef IntType<uint16_t, 2> HBUINT16;	/* 16-bit unsigned integer. */
672 typedef IntType<int16_t,  2> HBINT16;	/* 16-bit signed integer. */
673 typedef IntType<uint32_t, 4> HBUINT32;	/* 32-bit unsigned integer. */
674 typedef IntType<int32_t,  4> HBINT32;	/* 32-bit signed integer. */
675 typedef IntType<uint32_t, 3> UINT24;	/* 24-bit unsigned integer. */
676 
677 /* 16-bit signed integer (HBINT16) that describes a quantity in FUnits. */
678 typedef HBINT16 FWORD;
679 
680 /* 16-bit unsigned integer (HBUINT16) that describes a quantity in FUnits. */
681 typedef HBUINT16 UFWORD;
682 
683 /* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
684 struct F2DOT14 : HBINT16
685 {
686   //inline float to_float (void) const { return ???; }
687   //inline void set_float (float f) { v.set (f * ???); }
688   public:
689   DEFINE_SIZE_STATIC (2);
690 };
691 
692 /* 32-bit signed fixed-point number (16.16). */
693 struct Fixed: HBINT32
694 {
to_floatOT::Fixed695   inline float to_float (void) const { return ((int32_t) v) / 65536.0; }
set_floatOT::Fixed696   inline void set_float (float f) { v.set (round (f * 65536.0)); }
697   public:
698   DEFINE_SIZE_STATIC (4);
699 };
700 
701 /* Date represented in number of seconds since 12:00 midnight, January 1,
702  * 1904. The value is represented as a signed 64-bit integer. */
703 struct LONGDATETIME
704 {
sanitizeOT::LONGDATETIME705   inline bool sanitize (hb_sanitize_context_t *c) const
706   {
707     TRACE_SANITIZE (this);
708     return_trace (likely (c->check_struct (this)));
709   }
710   protected:
711   HBINT32 major;
712   HBUINT32 minor;
713   public:
714   DEFINE_SIZE_STATIC (8);
715 };
716 
717 /* Array of four uint8s (length = 32 bits) used to identify a script, language
718  * system, feature, or baseline */
719 struct Tag : HBUINT32
720 {
721   /* What the char* converters return is NOT nul-terminated.  Print using "%.4s" */
operator const char*OT::Tag722   inline operator const char* (void) const { return reinterpret_cast<const char *> (&this->v); }
operator char*OT::Tag723   inline operator char* (void) { return reinterpret_cast<char *> (&this->v); }
724   public:
725   DEFINE_SIZE_STATIC (4);
726 };
727 DEFINE_NULL_DATA (Tag, "    ");
728 
729 /* Glyph index number, same as uint16 (length = 16 bits) */
730 typedef HBUINT16 GlyphID;
731 
732 /* Script/language-system/feature index */
733 struct Index : HBUINT16 {
734   static const unsigned int NOT_FOUND_INDEX = 0xFFFFu;
735 };
736 DEFINE_NULL_DATA (Index, "\xff\xff");
737 
738 /* Offset, Null offset = 0 */
739 template <typename Type>
740 struct Offset : Type
741 {
is_nullOT::Offset742   inline bool is_null (void) const { return 0 == *this; }
743 
serializeOT::Offset744   inline void *serialize (hb_serialize_context_t *c, const void *base)
745   {
746     void *t = c->start_embed<void> ();
747     this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
748     return t;
749   }
750 
751   public:
752   DEFINE_SIZE_STATIC (sizeof(Type));
753 };
754 
755 typedef Offset<HBUINT16> Offset16;
756 typedef Offset<HBUINT32> Offset32;
757 
758 
759 /* CheckSum */
760 struct CheckSum : HBUINT32
761 {
762   /* This is reference implementation from the spec. */
CalcTableChecksumOT::CheckSum763   static inline uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length)
764   {
765     uint32_t Sum = 0L;
766     assert (0 == (Length & 3));
767     const HBUINT32 *EndPtr = Table + Length / HBUINT32::static_size;
768 
769     while (Table < EndPtr)
770       Sum += *Table++;
771     return Sum;
772   }
773 
774   /* Note: data should be 4byte aligned and have 4byte padding at the end. */
set_for_dataOT::CheckSum775   inline void set_for_data (const void *data, unsigned int length)
776   { set (CalcTableChecksum ((const HBUINT32 *) data, length)); }
777 
778   public:
779   DEFINE_SIZE_STATIC (4);
780 };
781 
782 
783 /*
784  * Version Numbers
785  */
786 
787 template <typename FixedType=HBUINT16>
788 struct FixedVersion
789 {
to_intOT::FixedVersion790   inline uint32_t to_int (void) const { return (major << (sizeof(FixedType) * 8)) + minor; }
791 
sanitizeOT::FixedVersion792   inline bool sanitize (hb_sanitize_context_t *c) const
793   {
794     TRACE_SANITIZE (this);
795     return_trace (c->check_struct (this));
796   }
797 
798   FixedType major;
799   FixedType minor;
800   public:
801   DEFINE_SIZE_STATIC (2 * sizeof(FixedType));
802 };
803 
804 
805 
806 /*
807  * Template subclasses of Offset that do the dereferencing.
808  * Use: (base+offset)
809  */
810 
811 template <typename Type, typename OffsetType=HBUINT16>
812 struct OffsetTo : Offset<OffsetType>
813 {
operator ()OT::OffsetTo814   inline const Type& operator () (const void *base) const
815   {
816     unsigned int offset = *this;
817     if (unlikely (!offset)) return Null(Type);
818     return StructAtOffset<Type> (base, offset);
819   }
820 
serializeOT::OffsetTo821   inline Type& serialize (hb_serialize_context_t *c, const void *base)
822   {
823     return * (Type *) Offset<OffsetType>::serialize (c, base);
824   }
825 
sanitizeOT::OffsetTo826   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
827   {
828     TRACE_SANITIZE (this);
829     if (unlikely (!c->check_struct (this))) return_trace (false);
830     unsigned int offset = *this;
831     if (unlikely (!offset)) return_trace (true);
832     if (unlikely (!c->check_range (base, offset))) return_trace (false);
833     const Type &obj = StructAtOffset<Type> (base, offset);
834     return_trace (likely (obj.sanitize (c)) || neuter (c));
835   }
836   template <typename T>
sanitizeOT::OffsetTo837   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
838   {
839     TRACE_SANITIZE (this);
840     if (unlikely (!c->check_struct (this))) return_trace (false);
841     unsigned int offset = *this;
842     if (unlikely (!offset)) return_trace (true);
843     if (unlikely (!c->check_range (base, offset))) return_trace (false);
844     const Type &obj = StructAtOffset<Type> (base, offset);
845     return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
846   }
847 
848   /* Set the offset to Null */
neuterOT::OffsetTo849   inline bool neuter (hb_sanitize_context_t *c) const {
850     return c->try_set (this, 0);
851   }
852   DEFINE_SIZE_STATIC (sizeof(OffsetType));
853 };
854 template <typename Type> struct LOffsetTo : OffsetTo<Type, HBUINT32> {};
855 template <typename Base, typename OffsetType, typename Type>
operator +(const Base & base,const OffsetTo<Type,OffsetType> & offset)856 static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType> &offset) { return offset (base); }
857 template <typename Base, typename OffsetType, typename Type>
operator +(Base & base,OffsetTo<Type,OffsetType> & offset)858 static inline Type& operator + (Base &base, OffsetTo<Type, OffsetType> &offset) { return offset (base); }
859 
860 
861 /*
862  * Array Types
863  */
864 
865 /* An array with a number of elements. */
866 template <typename Type, typename LenType=HBUINT16>
867 struct ArrayOf
868 {
sub_arrayOT::ArrayOf869   const Type *sub_array (unsigned int start_offset, unsigned int *pcount /* IN/OUT */) const
870   {
871     unsigned int count = len;
872     if (unlikely (start_offset > count))
873       count = 0;
874     else
875       count -= start_offset;
876     count = MIN (count, *pcount);
877     *pcount = count;
878     return array + start_offset;
879   }
880 
operator []OT::ArrayOf881   inline const Type& operator [] (unsigned int i) const
882   {
883     if (unlikely (i >= len)) return Null(Type);
884     return array[i];
885   }
operator []OT::ArrayOf886   inline Type& operator [] (unsigned int i)
887   {
888     return array[i];
889   }
get_sizeOT::ArrayOf890   inline unsigned int get_size (void) const
891   { return len.static_size + len * Type::static_size; }
892 
serializeOT::ArrayOf893   inline bool serialize (hb_serialize_context_t *c,
894 			 unsigned int items_len)
895   {
896     TRACE_SERIALIZE (this);
897     if (unlikely (!c->extend_min (*this))) return_trace (false);
898     len.set (items_len); /* TODO(serialize) Overflow? */
899     if (unlikely (!c->extend (*this))) return_trace (false);
900     return_trace (true);
901   }
902 
serializeOT::ArrayOf903   inline bool serialize (hb_serialize_context_t *c,
904 			 Supplier<Type> &items,
905 			 unsigned int items_len)
906   {
907     TRACE_SERIALIZE (this);
908     if (unlikely (!serialize (c, items_len))) return_trace (false);
909     for (unsigned int i = 0; i < items_len; i++)
910       array[i] = items[i];
911     items += items_len;
912     return_trace (true);
913   }
914 
sanitizeOT::ArrayOf915   inline bool sanitize (hb_sanitize_context_t *c) const
916   {
917     TRACE_SANITIZE (this);
918     if (unlikely (!sanitize_shallow (c))) return_trace (false);
919 
920     /* Note: for structs that do not reference other structs,
921      * we do not need to call their sanitize() as we already did
922      * a bound check on the aggregate array size.  We just include
923      * a small unreachable expression to make sure the structs
924      * pointed to do have a simple sanitize(), ie. they do not
925      * reference other structs via offsets.
926      */
927     (void) (false && array[0].sanitize (c));
928 
929     return_trace (true);
930   }
sanitizeOT::ArrayOf931   inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
932   {
933     TRACE_SANITIZE (this);
934     if (unlikely (!sanitize_shallow (c))) return_trace (false);
935     unsigned int count = len;
936     for (unsigned int i = 0; i < count; i++)
937       if (unlikely (!array[i].sanitize (c, base)))
938         return_trace (false);
939     return_trace (true);
940   }
941   template <typename T>
sanitizeOT::ArrayOf942   inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
943   {
944     TRACE_SANITIZE (this);
945     if (unlikely (!sanitize_shallow (c))) return_trace (false);
946     unsigned int count = len;
947     for (unsigned int i = 0; i < count; i++)
948       if (unlikely (!array[i].sanitize (c, base, user_data)))
949         return_trace (false);
950     return_trace (true);
951   }
952 
953   template <typename SearchType>
lsearchOT::ArrayOf954   inline int lsearch (const SearchType &x) const
955   {
956     unsigned int count = len;
957     for (unsigned int i = 0; i < count; i++)
958       if (!this->array[i].cmp (x))
959         return i;
960     return -1;
961   }
962 
qsortOT::ArrayOf963   inline void qsort (void)
964   {
965     ::qsort (array, len, sizeof (Type), Type::cmp);
966   }
967 
968   private:
sanitize_shallowOT::ArrayOf969   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
970   {
971     TRACE_SANITIZE (this);
972     return_trace (len.sanitize (c) && c->check_array (array, Type::static_size, len));
973   }
974 
975   public:
976   LenType len;
977   Type array[VAR];
978   public:
979   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
980 };
981 template <typename Type> struct LArrayOf : ArrayOf<Type, HBUINT32> {};
982 
983 /* Array of Offset's */
984 template <typename Type, typename OffsetType=HBUINT16>
985 struct OffsetArrayOf : ArrayOf<OffsetTo<Type, OffsetType> > {};
986 
987 /* Array of offsets relative to the beginning of the array itself. */
988 template <typename Type>
989 struct OffsetListOf : OffsetArrayOf<Type>
990 {
operator []OT::OffsetListOf991   inline const Type& operator [] (unsigned int i) const
992   {
993     if (unlikely (i >= this->len)) return Null(Type);
994     return this+this->array[i];
995   }
996 
sanitizeOT::OffsetListOf997   inline bool sanitize (hb_sanitize_context_t *c) const
998   {
999     TRACE_SANITIZE (this);
1000     return_trace (OffsetArrayOf<Type>::sanitize (c, this));
1001   }
1002   template <typename T>
sanitizeOT::OffsetListOf1003   inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
1004   {
1005     TRACE_SANITIZE (this);
1006     return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
1007   }
1008 };
1009 
1010 
1011 /* An array starting at second element. */
1012 template <typename Type, typename LenType=HBUINT16>
1013 struct HeadlessArrayOf
1014 {
operator []OT::HeadlessArrayOf1015   inline const Type& operator [] (unsigned int i) const
1016   {
1017     if (unlikely (i >= len || !i)) return Null(Type);
1018     return array[i-1];
1019   }
get_sizeOT::HeadlessArrayOf1020   inline unsigned int get_size (void) const
1021   { return len.static_size + (len ? len - 1 : 0) * Type::static_size; }
1022 
serializeOT::HeadlessArrayOf1023   inline bool serialize (hb_serialize_context_t *c,
1024 			 Supplier<Type> &items,
1025 			 unsigned int items_len)
1026   {
1027     TRACE_SERIALIZE (this);
1028     if (unlikely (!c->extend_min (*this))) return_trace (false);
1029     len.set (items_len); /* TODO(serialize) Overflow? */
1030     if (unlikely (!items_len)) return_trace (true);
1031     if (unlikely (!c->extend (*this))) return_trace (false);
1032     for (unsigned int i = 0; i < items_len - 1; i++)
1033       array[i] = items[i];
1034     items += items_len - 1;
1035     return_trace (true);
1036   }
1037 
sanitizeOT::HeadlessArrayOf1038   inline bool sanitize (hb_sanitize_context_t *c) const
1039   {
1040     TRACE_SANITIZE (this);
1041     if (unlikely (!sanitize_shallow (c))) return_trace (false);
1042 
1043     /* Note: for structs that do not reference other structs,
1044      * we do not need to call their sanitize() as we already did
1045      * a bound check on the aggregate array size.  We just include
1046      * a small unreachable expression to make sure the structs
1047      * pointed to do have a simple sanitize(), ie. they do not
1048      * reference other structs via offsets.
1049      */
1050     (void) (false && array[0].sanitize (c));
1051 
1052     return_trace (true);
1053   }
1054 
1055   private:
sanitize_shallowOT::HeadlessArrayOf1056   inline bool sanitize_shallow (hb_sanitize_context_t *c) const
1057   {
1058     TRACE_SANITIZE (this);
1059     return_trace (len.sanitize (c) &&
1060 		  (!len || c->check_array (array, Type::static_size, len - 1)));
1061   }
1062 
1063   public:
1064   LenType len;
1065   Type array[VAR];
1066   public:
1067   DEFINE_SIZE_ARRAY (sizeof (LenType), array);
1068 };
1069 
1070 
1071 /*
1072  * An array with sorted elements.  Supports binary searching.
1073  */
1074 template <typename Type, typename LenType=HBUINT16>
1075 struct SortedArrayOf : ArrayOf<Type, LenType>
1076 {
1077   template <typename SearchType>
bsearchOT::SortedArrayOf1078   inline int bsearch (const SearchType &x) const
1079   {
1080     /* Hand-coded bsearch here since this is in the hot inner loop. */
1081     const Type *arr = this->array;
1082     int min = 0, max = (int) this->len - 1;
1083     while (min <= max)
1084     {
1085       int mid = (min + max) / 2;
1086       int c = arr[mid].cmp (x);
1087       if (c < 0)
1088         max = mid - 1;
1089       else if (c > 0)
1090         min = mid + 1;
1091       else
1092         return mid;
1093     }
1094     return -1;
1095   }
1096 };
1097 
1098 /*
1099  * Binary-search arrays
1100  */
1101 
1102 struct BinSearchHeader
1103 {
operator uint32_tOT::BinSearchHeader1104   inline operator uint32_t (void) const { return len; }
1105 
sanitizeOT::BinSearchHeader1106   inline bool sanitize (hb_sanitize_context_t *c) const
1107   {
1108     TRACE_SANITIZE (this);
1109     return_trace (c->check_struct (this));
1110   }
1111 
setOT::BinSearchHeader1112   inline void set (unsigned int v)
1113   {
1114     len.set (v);
1115     assert (len == v);
1116     entrySelectorZ.set (MAX (1u, _hb_bit_storage (v)) - 1);
1117     searchRangeZ.set (16 * (1u << entrySelectorZ));
1118     rangeShiftZ.set (v * 16 > searchRangeZ
1119                      ? 16 * v - searchRangeZ
1120                      : 0);
1121   }
1122 
1123   protected:
1124   HBUINT16	len;
1125   HBUINT16	searchRangeZ;
1126   HBUINT16	entrySelectorZ;
1127   HBUINT16	rangeShiftZ;
1128 
1129   public:
1130   DEFINE_SIZE_STATIC (8);
1131 };
1132 
1133 template <typename Type>
1134 struct BinSearchArrayOf : SortedArrayOf<Type, BinSearchHeader> {};
1135 
1136 
1137 /* Lazy struct and blob loaders. */
1138 
1139 /* Logic is shared between hb_lazy_loader_t and hb_lazy_table_loader_t */
1140 template <typename T>
1141 struct hb_lazy_loader_t
1142 {
initOT::hb_lazy_loader_t1143   inline void init (hb_face_t *face_)
1144   {
1145     face = face_;
1146     instance = nullptr;
1147   }
1148 
finiOT::hb_lazy_loader_t1149   inline void fini (void)
1150   {
1151     if (instance && instance != &OT::Null(T))
1152     {
1153       instance->fini();
1154       free (instance);
1155     }
1156   }
1157 
getOT::hb_lazy_loader_t1158   inline const T* get (void) const
1159   {
1160   retry:
1161     T *p = (T *) hb_atomic_ptr_get (&instance);
1162     if (unlikely (!p))
1163     {
1164       p = (T *) calloc (1, sizeof (T));
1165       if (unlikely (!p))
1166         p = const_cast<T *> (&OT::Null(T));
1167       else
1168 	p->init (face);
1169       if (unlikely (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), nullptr, p)))
1170       {
1171 	if (p != &OT::Null(T))
1172 	  p->fini ();
1173 	goto retry;
1174       }
1175     }
1176     return p;
1177   }
1178 
operator ->OT::hb_lazy_loader_t1179   inline const T* operator-> (void) const
1180   {
1181     return get ();
1182   }
1183 
1184   private:
1185   hb_face_t *face;
1186   T *instance;
1187 };
1188 
1189 /* Logic is shared between hb_lazy_loader_t and hb_lazy_table_loader_t */
1190 template <typename T>
1191 struct hb_lazy_table_loader_t
1192 {
initOT::hb_lazy_table_loader_t1193   inline void init (hb_face_t *face_)
1194   {
1195     face = face_;
1196     blob = nullptr;
1197     instance = nullptr;
1198   }
1199 
finiOT::hb_lazy_table_loader_t1200   inline void fini (void)
1201   {
1202     hb_blob_destroy (blob);
1203   }
1204 
getOT::hb_lazy_table_loader_t1205   inline const T* get (void) const
1206   {
1207   retry:
1208     T *p = (T *) hb_atomic_ptr_get (&instance);
1209     if (unlikely (!p))
1210     {
1211       hb_blob_t *blob_ = OT::Sanitizer<T>().sanitize (face->reference_table (T::tableTag));
1212       p = const_cast<T *>(OT::Sanitizer<T>::lock_instance (blob_));
1213       if (!hb_atomic_ptr_cmpexch (const_cast<T **>(&instance), nullptr, p))
1214       {
1215 	hb_blob_destroy (blob_);
1216 	goto retry;
1217       }
1218       blob = blob_;
1219     }
1220     return p;
1221   }
1222 
operator ->OT::hb_lazy_table_loader_t1223   inline const T* operator-> (void) const
1224   {
1225     return get();
1226   }
1227 
1228   hb_face_t *face;
1229   mutable hb_blob_t *blob;
1230   private:
1231   mutable T *instance;
1232 };
1233 
1234 
1235 } /* namespace OT */
1236 
1237 
1238 #endif /* HB_OPEN_TYPE_PRIVATE_HH */
1239