1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2012,2018  Google, Inc.
4  * Copyright © 2019  Facebook, Inc.
5  *
6  *  This is part of HarfBuzz, a text shaping library.
7  *
8  * Permission is hereby granted, without written agreement and without
9  * license or royalty fees, to use, copy, modify, and distribute this
10  * software and its documentation for any purpose, provided that the
11  * above copyright notice and the following two paragraphs appear in
12  * all copies of this software.
13  *
14  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
15  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
16  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
17  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18  * DAMAGE.
19  *
20  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
21  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
22  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
23  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
24  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25  *
26  * Red Hat Author(s): Behdad Esfahbod
27  * Google Author(s): Behdad Esfahbod
28  * Facebook Author(s): Behdad Esfahbod
29  */
30 
31 #ifndef HB_SERIALIZE_HH
32 #define HB_SERIALIZE_HH
33 
34 #include "hb.hh"
35 #include "hb-blob.hh"
36 #include "hb-map.hh"
37 #include "hb-pool.hh"
38 
39 
40 /*
41  * Serialize
42  */
43 
44 struct hb_serialize_context_t
45 {
46   typedef unsigned objidx_t;
47 
48   struct range_t
49   {
50     char *head, *tail;
51   };
52 
53   enum whence_t {
54      Head,	/* Relative to the current object head (default). */
55      Tail,	/* Relative to the current object tail after packed. */
56      Absolute	/* Absolute: from the start of the serialize buffer. */
57    };
58 
59   struct object_t : range_t
60   {
finihb_serialize_context_t::object_t61     void fini () { links.fini (); }
62 
operator ==hb_serialize_context_t::object_t63     bool operator == (const object_t &o) const
64     {
65       return (tail - head == o.tail - o.head)
66 	  && (links.length == o.links.length)
67 	  && 0 == hb_memcmp (head, o.head, tail - head)
68 	  && links.as_bytes () == o.links.as_bytes ();
69     }
hashhb_serialize_context_t::object_t70     uint32_t hash () const
71     {
72       return hb_bytes_t (head, tail - head).hash () ^
73 	     links.as_bytes ().hash ();
74     }
75 
76     struct link_t
77     {
78       bool is_wide: 1;
79       bool is_signed: 1;
80       unsigned whence: 2;
81       unsigned position: 28;
82       unsigned bias;
83       objidx_t objidx;
84     };
85 
86     hb_vector_t<link_t> links;
87     object_t *next;
88   };
89 
snapshothb_serialize_context_t90   range_t snapshot () { range_t s = {head, tail} ; return s; }
91 
92 
hb_serialize_context_thb_serialize_context_t93   hb_serialize_context_t (void *start_, unsigned int size) :
94     start ((char *) start_),
95     end (start + size),
96     current (nullptr)
97   { reset (); }
~hb_serialize_context_thb_serialize_context_t98   ~hb_serialize_context_t () { fini (); }
99 
finihb_serialize_context_t100   void fini ()
101   {
102     for (object_t *_ : ++hb_iter (packed)) _->fini ();
103     packed.fini ();
104     this->packed_map.fini ();
105 
106     while (current)
107     {
108       auto *_ = current;
109       current = current->next;
110       _->fini ();
111     }
112     object_pool.fini ();
113   }
114 
in_errorhb_serialize_context_t115   bool in_error () const { return !this->successful; }
116 
resethb_serialize_context_t117   void reset ()
118   {
119     this->successful = true;
120     this->ran_out_of_room = false;
121     this->head = this->start;
122     this->tail = this->end;
123     this->debug_depth = 0;
124 
125     fini ();
126     this->packed.push (nullptr);
127   }
128 
check_successhb_serialize_context_t129   bool check_success (bool success)
130   { return this->successful && (success || (err_other_error (), false)); }
131 
132   template <typename T1, typename T2>
check_equalhb_serialize_context_t133   bool check_equal (T1 &&v1, T2 &&v2)
134   { return check_success ((long long) v1 == (long long) v2); }
135 
136   template <typename T1, typename T2>
check_assignhb_serialize_context_t137   bool check_assign (T1 &v1, T2 &&v2)
138   { return check_equal (v1 = v2, v2); }
139 
propagate_errorhb_serialize_context_t140   template <typename T> bool propagate_error (T &&obj)
141   { return check_success (!hb_deref (obj).in_error ()); }
142 
propagate_errorhb_serialize_context_t143   template <typename T1, typename... Ts> bool propagate_error (T1 &&o1, Ts&&... os)
144   { return propagate_error (hb_forward<T1> (o1)) &&
145 	   propagate_error (hb_forward<Ts> (os)...); }
146 
147   /* To be called around main operation. */
148   template <typename Type>
start_serializehb_serialize_context_t149   Type *start_serialize ()
150   {
151     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
152 		     "start [%p..%p] (%lu bytes)",
153 		     this->start, this->end,
154 		     (unsigned long) (this->end - this->start));
155 
156     assert (!current);
157     return push<Type> ();
158   }
end_serializehb_serialize_context_t159   void end_serialize ()
160   {
161     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
162 		     "end [%p..%p] serialized %u bytes; %s",
163 		     this->start, this->end,
164 		     (unsigned) (this->head - this->start),
165 		     this->successful ? "successful" : "UNSUCCESSFUL");
166 
167     propagate_error (packed, packed_map);
168 
169     if (unlikely (!current)) return;
170     assert (!current->next);
171 
172     /* Only "pack" if there exist other objects... Otherwise, don't bother.
173      * Saves a move. */
174     if (packed.length <= 1)
175       return;
176 
177     pop_pack (false);
178 
179     resolve_links ();
180   }
181 
182   template <typename Type = void>
pushhb_serialize_context_t183   Type *push ()
184   {
185     object_t *obj = object_pool.alloc ();
186     if (unlikely (!obj))
187       check_success (false);
188     else
189     {
190       obj->head = head;
191       obj->tail = tail;
192       obj->next = current;
193       current = obj;
194     }
195     return start_embed<Type> ();
196   }
pop_discardhb_serialize_context_t197   void pop_discard ()
198   {
199     object_t *obj = current;
200     if (unlikely (!obj)) return;
201     current = current->next;
202     revert (*obj);
203     obj->fini ();
204     object_pool.free (obj);
205   }
206 
207   /* Set share to false when an object is unlikely sharable with others
208    * so not worth an attempt, or a contiguous table is serialized as
209    * multiple consecutive objects in the reverse order so can't be shared.
210    */
pop_packhb_serialize_context_t211   objidx_t pop_pack (bool share=true)
212   {
213     object_t *obj = current;
214     if (unlikely (!obj)) return 0;
215     current = current->next;
216     obj->tail = head;
217     obj->next = nullptr;
218     unsigned len = obj->tail - obj->head;
219     head = obj->head; /* Rewind head. */
220 
221     if (!len)
222     {
223       assert (!obj->links.length);
224       return 0;
225     }
226 
227     objidx_t objidx;
228     if (share)
229     {
230       objidx = packed_map.get (obj);
231       if (objidx)
232       {
233 	obj->fini ();
234 	return objidx;
235       }
236     }
237 
238     tail -= len;
239     memmove (tail, obj->head, len);
240 
241     obj->head = tail;
242     obj->tail = tail + len;
243 
244     packed.push (obj);
245 
246     if (unlikely (packed.in_error ()))
247       return 0;
248 
249     objidx = packed.length - 1;
250 
251     if (share) packed_map.set (obj, objidx);
252 
253     return objidx;
254   }
255 
reverthb_serialize_context_t256   void revert (range_t snap)
257   {
258     assert (snap.head <= head);
259     assert (tail <= snap.tail);
260     head = snap.head;
261     tail = snap.tail;
262     discard_stale_objects ();
263   }
264 
discard_stale_objectshb_serialize_context_t265   void discard_stale_objects ()
266   {
267     while (packed.length > 1 &&
268 	   packed.tail ()->head < tail)
269     {
270       packed_map.del (packed.tail ());
271       assert (!packed.tail ()->next);
272       packed.tail ()->fini ();
273       packed.pop ();
274     }
275     if (packed.length > 1)
276       assert (packed.tail ()->head == tail);
277   }
278 
279   template <typename T>
add_linkhb_serialize_context_t280   void add_link (T &ofs, objidx_t objidx,
281 		 whence_t whence = Head,
282 		 unsigned bias = 0)
283   {
284     static_assert (sizeof (T) == 2 || sizeof (T) == 4, "");
285 
286     if (!objidx)
287       return;
288 
289     assert (current);
290     assert (current->head <= (const char *) &ofs);
291 
292     auto& link = *current->links.push ();
293 
294     link.is_wide = sizeof (T) == 4;
295     link.is_signed = hb_is_signed (hb_unwrap_type (T));
296     link.whence = (unsigned) whence;
297     link.position = (const char *) &ofs - current->head;
298     link.bias = bias;
299     link.objidx = objidx;
300   }
301 
to_biashb_serialize_context_t302   unsigned to_bias (const void *base) const
303   {
304     if (!base) return 0;
305     assert (current);
306     assert (current->head <= (const char *) base);
307     return (const char *) base - current->head;
308   }
309 
resolve_linkshb_serialize_context_t310   void resolve_links ()
311   {
312     if (unlikely (in_error ())) return;
313 
314     assert (!current);
315     assert (packed.length > 1);
316 
317     for (const object_t* parent : ++hb_iter (packed))
318       for (const object_t::link_t &link : parent->links)
319       {
320 	const object_t* child = packed[link.objidx];
321 	if (unlikely (!child)) { err_other_error(); return; }
322 	unsigned offset;
323 	switch ((whence_t)link.whence) {
324 	case Head:     offset = child->head - parent->head; break;
325 	case Tail:     offset = child->head - parent->tail; break;
326 	case Absolute: offset = (head - start) + (child->head - tail); break;
327 	default: assert (0);
328 	}
329 
330 	assert (offset >= link.bias);
331 	offset -= link.bias;
332 	if (link.is_signed)
333 	{
334 	  if (link.is_wide)
335 	    assign_offset<int32_t> (parent, link, offset);
336 	  else
337 	    assign_offset<int16_t> (parent, link, offset);
338 	}
339 	else
340 	{
341 	  if (link.is_wide)
342 	    assign_offset<uint32_t> (parent, link, offset);
343 	  else
344 	    assign_offset<uint16_t> (parent, link, offset);
345 	}
346       }
347   }
348 
lengthhb_serialize_context_t349   unsigned int length () const { return this->head - current->head; }
350 
alignhb_serialize_context_t351   void align (unsigned int alignment)
352   {
353     unsigned int l = length () % alignment;
354     if (l)
355       allocate_size<void> (alignment - l);
356   }
357 
358   template <typename Type = void>
start_embedhb_serialize_context_t359   Type *start_embed (const Type *obj HB_UNUSED = nullptr) const
360   { return reinterpret_cast<Type *> (this->head); }
361   template <typename Type>
start_embedhb_serialize_context_t362   Type *start_embed (const Type &obj) const
363   { return start_embed (hb_addressof (obj)); }
364 
365   /* Following two functions exist to allow setting breakpoint on. */
err_ran_out_of_roomhb_serialize_context_t366   void err_ran_out_of_room () { this->ran_out_of_room = true; }
err_other_errorhb_serialize_context_t367   void err_other_error () { this->successful = false; }
368 
369   template <typename Type>
allocate_sizehb_serialize_context_t370   Type *allocate_size (unsigned int size)
371   {
372     if (unlikely (!this->successful)) return nullptr;
373 
374     if (this->tail - this->head < ptrdiff_t (size))
375     {
376       err_ran_out_of_room ();
377       this->successful = false;
378       return nullptr;
379     }
380     memset (this->head, 0, size);
381     char *ret = this->head;
382     this->head += size;
383     return reinterpret_cast<Type *> (ret);
384   }
385 
386   template <typename Type>
allocate_minhb_serialize_context_t387   Type *allocate_min ()
388   { return this->allocate_size<Type> (Type::min_size); }
389 
390   template <typename Type>
embedhb_serialize_context_t391   Type *embed (const Type *obj)
392   {
393     unsigned int size = obj->get_size ();
394     Type *ret = this->allocate_size<Type> (size);
395     if (unlikely (!ret)) return nullptr;
396     memcpy (ret, obj, size);
397     return ret;
398   }
399   template <typename Type>
embedhb_serialize_context_t400   Type *embed (const Type &obj)
401   { return embed (hb_addressof (obj)); }
402 
403   template <typename Type, typename ...Ts> auto
_copyhb_serialize_context_t404   _copy (const Type &src, hb_priority<1>, Ts&&... ds) HB_RETURN
405   (Type *, src.copy (this, hb_forward<Ts> (ds)...))
406 
407   template <typename Type> auto
408   _copy (const Type &src, hb_priority<0>) -> decltype (&(hb_declval<Type> () = src))
409   {
410     Type *ret = this->allocate_size<Type> (sizeof (Type));
411     if (unlikely (!ret)) return nullptr;
412     *ret = src;
413     return ret;
414   }
415 
416   /* Like embed, but active: calls obj.operator=() or obj.copy() to transfer data
417    * instead of memcpy(). */
418   template <typename Type, typename ...Ts>
419   Type *copy (const Type &src, Ts&&... ds)
420   { return _copy (src, hb_prioritize, hb_forward<Ts> (ds)...); }
421   template <typename Type, typename ...Ts>
422   Type *copy (const Type *src, Ts&&... ds)
423   { return copy (*src, hb_forward<Ts> (ds)...); }
424 
425   template<typename Iterator,
426 	   hb_requires (hb_is_iterator (Iterator)),
427 	   typename ...Ts>
copy_allhb_serialize_context_t428   void copy_all (Iterator it, Ts&&... ds)
429   { for (decltype (*it) _ : it) copy (_, hb_forward<Ts> (ds)...); }
430 
431   template <typename Type>
operator <<hb_serialize_context_t432   hb_serialize_context_t& operator << (const Type &obj) & { embed (obj); return *this; }
433 
434   template <typename Type>
extend_sizehb_serialize_context_t435   Type *extend_size (Type *obj, unsigned int size)
436   {
437     assert (this->start <= (char *) obj);
438     assert ((char *) obj <= this->head);
439     assert ((char *) obj + size >= this->head);
440     if (unlikely (!this->allocate_size<Type> (((char *) obj) + size - this->head))) return nullptr;
441     return reinterpret_cast<Type *> (obj);
442   }
443   template <typename Type>
extend_sizehb_serialize_context_t444   Type *extend_size (Type &obj, unsigned int size)
445   { return extend_size (hb_addressof (obj), size); }
446 
447   template <typename Type>
extend_minhb_serialize_context_t448   Type *extend_min (Type *obj) { return extend_size (obj, obj->min_size); }
449   template <typename Type>
extend_minhb_serialize_context_t450   Type *extend_min (Type &obj) { return extend_min (hb_addressof (obj)); }
451 
452   template <typename Type, typename ...Ts>
453   Type *extend (Type *obj, Ts&&... ds)
454   { return extend_size (obj, obj->get_size (hb_forward<Ts> (ds)...)); }
455   template <typename Type, typename ...Ts>
456   Type *extend (Type &obj, Ts&&... ds)
457   { return extend (hb_addressof (obj), hb_forward<Ts> (ds)...); }
458 
459   /* Output routines. */
copy_byteshb_serialize_context_t460   hb_bytes_t copy_bytes () const
461   {
462     assert (this->successful);
463     /* Copy both items from head side and tail side... */
464     unsigned int len = (this->head - this->start)
465 		     + (this->end  - this->tail);
466 
467     char *p = (char *) malloc (len);
468     if (unlikely (!p)) return hb_bytes_t ();
469 
470     memcpy (p, this->start, this->head - this->start);
471     memcpy (p + (this->head - this->start), this->tail, this->end - this->tail);
472     return hb_bytes_t (p, len);
473   }
474   template <typename Type>
copyhb_serialize_context_t475   Type *copy () const
476   { return reinterpret_cast<Type *> ((char *) copy_bytes ().arrayZ); }
copy_blobhb_serialize_context_t477   hb_blob_t *copy_blob () const
478   {
479     hb_bytes_t b = copy_bytes ();
480     return hb_blob_create (b.arrayZ, b.length,
481 			   HB_MEMORY_MODE_WRITABLE,
482 			   (char *) b.arrayZ, free);
483   }
484 
485   private:
486   template <typename T>
assign_offsethb_serialize_context_t487   void assign_offset (const object_t* parent, const object_t::link_t &link, unsigned offset)
488   {
489     auto &off = * ((BEInt<T, sizeof (T)> *) (parent->head + link.position));
490     assert (0 == off);
491     check_assign (off, offset);
492   }
493 
494   public: /* TODO Make private. */
495   char *start, *head, *tail, *end;
496   unsigned int debug_depth;
497   bool successful;
498   bool ran_out_of_room;
499 
500   private:
501 
502   /* Object memory pool. */
503   hb_pool_t<object_t> object_pool;
504 
505   /* Stack of currently under construction objects. */
506   object_t *current;
507 
508   /* Stack of packed objects.  Object 0 is always nil object. */
509   hb_vector_t<object_t *> packed;
510 
511   /* Map view of packed objects. */
512   hb_hashmap_t<const object_t *, objidx_t, nullptr, 0> packed_map;
513 };
514 
515 
516 #endif /* HB_SERIALIZE_HH */
517