xref: /aosp_15_r20/external/harfbuzz_ng/src/hb-serialize.hh (revision 2d1272b857b1f7575e6e246373e1cb218663db8a)
1 /*
2  * Copyright © 2007,2008,2009,2010  Red Hat, Inc.
3  * Copyright © 2012,2018  Google, Inc.
4  * Copyright © 2019  Facebook, Inc.
5  *
6  *  This is part of HarfBuzz, a text shaping library.
7  *
8  * Permission is hereby granted, without written agreement and without
9  * license or royalty fees, to use, copy, modify, and distribute this
10  * software and its documentation for any purpose, provided that the
11  * above copyright notice and the following two paragraphs appear in
12  * all copies of this software.
13  *
14  * IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
15  * DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
16  * ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
17  * IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
18  * DAMAGE.
19  *
20  * THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
21  * BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
22  * FITNESS FOR A PARTICULAR PURPOSE.  THE SOFTWARE PROVIDED HEREUNDER IS
23  * ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
24  * PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
25  *
26  * Red Hat Author(s): Behdad Esfahbod
27  * Google Author(s): Behdad Esfahbod
28  * Facebook Author(s): Behdad Esfahbod
29  */
30 
31 #ifndef HB_SERIALIZE_HH
32 #define HB_SERIALIZE_HH
33 
34 #include "hb.hh"
35 #include "hb-blob.hh"
36 #include "hb-map.hh"
37 #include "hb-pool.hh"
38 
39 #ifdef HB_EXPERIMENTAL_API
40 #include "hb-subset-repacker.h"
41 #endif
42 
43 /*
44  * Serialize
45  */
46 
47 enum hb_serialize_error_t {
48   HB_SERIALIZE_ERROR_NONE =            0x00000000u,
49   HB_SERIALIZE_ERROR_OTHER =           0x00000001u,
50   HB_SERIALIZE_ERROR_OFFSET_OVERFLOW = 0x00000002u,
51   HB_SERIALIZE_ERROR_OUT_OF_ROOM =     0x00000004u,
52   HB_SERIALIZE_ERROR_INT_OVERFLOW =    0x00000008u,
53   HB_SERIALIZE_ERROR_ARRAY_OVERFLOW =  0x00000010u
54 };
55 HB_MARK_AS_FLAG_T (hb_serialize_error_t);
56 
57 struct hb_serialize_context_t
58 {
59   typedef unsigned objidx_t;
60 
61   enum whence_t {
62      Head,	/* Relative to the current object head (default). */
63      Tail,	/* Relative to the current object tail after packed. */
64      Absolute	/* Absolute: from the start of the serialize buffer. */
65    };
66 
67 
68 
69   struct object_t
70   {
finihb_serialize_context_t::object_t71     void fini () {
72       real_links.fini ();
73       virtual_links.fini ();
74     }
75 
76     object_t () = default;
77 
78 #ifdef HB_EXPERIMENTAL_API
object_thb_serialize_context_t::object_t79     object_t (const hb_object_t &o)
80     {
81       head = o.head;
82       tail = o.tail;
83       next = nullptr;
84       real_links.alloc (o.num_real_links, true);
85       for (unsigned i = 0 ; i < o.num_real_links; i++)
86         real_links.push (o.real_links[i]);
87 
88       virtual_links.alloc (o.num_virtual_links, true);
89       for (unsigned i = 0; i < o.num_virtual_links; i++)
90         virtual_links.push (o.virtual_links[i]);
91     }
92 #endif
93 
add_virtual_linkhb_serialize_context_t::object_t94     bool add_virtual_link (objidx_t objidx)
95     {
96       if (!objidx)
97         return false;
98 
99       auto& link = *virtual_links.push ();
100       if (virtual_links.in_error ())
101         return false;
102 
103       link.objidx = objidx;
104       // Remaining fields were previously zero'd by push():
105       // link.width = 0;
106       // link.is_signed = 0;
107       // link.whence = 0;
108       // link.position = 0;
109       // link.bias = 0;
110 
111       return true;
112     }
113 
swaphb_serialize_context_t114     friend void swap (object_t& a, object_t& b) noexcept
115     {
116       hb_swap (a.head, b.head);
117       hb_swap (a.tail, b.tail);
118       hb_swap (a.next, b.next);
119       hb_swap (a.real_links, b.real_links);
120       hb_swap (a.virtual_links, b.virtual_links);
121     }
122 
operator ==hb_serialize_context_t::object_t123     bool operator == (const object_t &o) const
124     {
125       // Virtual links aren't considered for equality since they don't affect the functionality
126       // of the object.
127       return (tail - head == o.tail - o.head)
128 	  && (real_links.length == o.real_links.length)
129 	  && 0 == hb_memcmp (head, o.head, tail - head)
130 	  && real_links.as_bytes () == o.real_links.as_bytes ();
131     }
hashhb_serialize_context_t::object_t132     uint32_t hash () const
133     {
134       // Virtual links aren't considered for equality since they don't affect the functionality
135       // of the object.
136       return hb_bytes_t (head, hb_min (128, tail - head)).hash () ^
137           real_links.as_bytes ().hash ();
138     }
139 
140     struct link_t
141     {
142       unsigned width: 3;
143       unsigned is_signed: 1;
144       unsigned whence: 2;
145       unsigned bias : 26;
146       unsigned position;
147       objidx_t objidx;
148 
149       link_t () = default;
150 
151 #ifdef HB_EXPERIMENTAL_API
link_thb_serialize_context_t::object_t::link_t152       link_t (const hb_link_t &o)
153       {
154         width = o.width;
155         is_signed = 0;
156         whence = 0;
157         position = o.position;
158         bias = 0;
159         objidx = o.objidx;
160       }
161 #endif
162 
cmphb_serialize_context_t::object_t::link_t163       HB_INTERNAL static int cmp (const void* a, const void* b)
164       {
165         int cmp = ((const link_t*)a)->position - ((const link_t*)b)->position;
166         if (cmp) return cmp;
167 
168         return ((const link_t*)a)->objidx - ((const link_t*)b)->objidx;
169       }
170     };
171 
172     char *head;
173     char *tail;
174     hb_vector_t<link_t> real_links;
175     hb_vector_t<link_t> virtual_links;
176     object_t *next;
177 
178     auto all_links () const HB_AUTO_RETURN
179         (( hb_concat (real_links, virtual_links) ));
180     auto all_links_writer () HB_AUTO_RETURN
181         (( hb_concat (real_links.writer (), virtual_links.writer ()) ));
182   };
183 
184   struct snapshot_t
185   {
186     char *head;
187     char *tail;
188     object_t *current; // Just for sanity check
189     unsigned num_real_links;
190     unsigned num_virtual_links;
191     hb_serialize_error_t errors;
192   };
193 
snapshothb_serialize_context_t194   snapshot_t snapshot ()
195   {
196     return snapshot_t {
197       head, tail, current,
198       current ? current->real_links.length : 0,
199       current ? current->virtual_links.length : 0,
200       errors
201      };
202   }
203 
hb_serialize_context_thb_serialize_context_t204   hb_serialize_context_t (void *start_, unsigned int size) :
205     start ((char *) start_),
206     end (start + size),
207     current (nullptr)
208   { reset (); }
~hb_serialize_context_thb_serialize_context_t209   ~hb_serialize_context_t () { fini (); }
210 
finihb_serialize_context_t211   void fini ()
212   {
213     for (object_t *_ : ++hb_iter (packed)) _->fini ();
214     packed.fini ();
215     this->packed_map.fini ();
216 
217     while (current)
218     {
219       auto *_ = current;
220       current = current->next;
221       _->fini ();
222     }
223   }
224 
in_errorhb_serialize_context_t225   bool in_error () const { return bool (errors); }
226 
successfulhb_serialize_context_t227   bool successful () const { return !bool (errors); }
228 
ran_out_of_roomhb_serialize_context_t229   HB_NODISCARD bool ran_out_of_room () const { return errors & HB_SERIALIZE_ERROR_OUT_OF_ROOM; }
offset_overflowhb_serialize_context_t230   HB_NODISCARD bool offset_overflow () const { return errors & HB_SERIALIZE_ERROR_OFFSET_OVERFLOW; }
only_offset_overflowhb_serialize_context_t231   HB_NODISCARD bool only_offset_overflow () const { return errors == HB_SERIALIZE_ERROR_OFFSET_OVERFLOW; }
only_overflowhb_serialize_context_t232   HB_NODISCARD bool only_overflow () const
233   {
234     return errors == HB_SERIALIZE_ERROR_OFFSET_OVERFLOW
235         || errors == HB_SERIALIZE_ERROR_INT_OVERFLOW
236         || errors == HB_SERIALIZE_ERROR_ARRAY_OVERFLOW;
237   }
238 
resethb_serialize_context_t239   void reset (void *start_, unsigned int size)
240   {
241     start = (char*) start_;
242     end = start + size;
243     reset ();
244     current = nullptr;
245   }
246 
resethb_serialize_context_t247   void reset ()
248   {
249     this->errors = HB_SERIALIZE_ERROR_NONE;
250     this->head = this->start;
251     this->tail = this->end;
252     this->zerocopy = nullptr;
253     this->debug_depth = 0;
254 
255     fini ();
256     this->packed.push (nullptr);
257     this->packed_map.init ();
258   }
259 
check_successhb_serialize_context_t260   bool check_success (bool success,
261                       hb_serialize_error_t err_type = HB_SERIALIZE_ERROR_OTHER)
262   {
263     return successful ()
264         && (success || err (err_type));
265   }
266 
267   template <typename T1, typename T2>
check_equalhb_serialize_context_t268   bool check_equal (T1 &&v1, T2 &&v2, hb_serialize_error_t err_type)
269   {
270     if ((long long) v1 != (long long) v2)
271     {
272       return err (err_type);
273     }
274     return true;
275   }
276 
277   template <typename T1, typename T2>
check_assignhb_serialize_context_t278   bool check_assign (T1 &v1, T2 &&v2, hb_serialize_error_t err_type)
279   { return check_equal (v1 = v2, v2, err_type); }
280 
propagate_errorhb_serialize_context_t281   template <typename T> bool propagate_error (T &&obj)
282   { return check_success (!hb_deref (obj).in_error ()); }
283 
propagate_errorhb_serialize_context_t284   template <typename T1, typename... Ts> bool propagate_error (T1 &&o1, Ts&&... os)
285   { return propagate_error (std::forward<T1> (o1)) &&
286 	   propagate_error (std::forward<Ts> (os)...); }
287 
288   /* To be called around main operation. */
289   template <typename Type=char>
290   __attribute__((returns_nonnull))
start_serializehb_serialize_context_t291   Type *start_serialize ()
292   {
293     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, +1,
294 		     "start [%p..%p] (%lu bytes)",
295 		     this->start, this->end,
296 		     (unsigned long) (this->end - this->start));
297 
298     assert (!current);
299     return push<Type> ();
300   }
end_serializehb_serialize_context_t301   void end_serialize ()
302   {
303     DEBUG_MSG_LEVEL (SERIALIZE, this->start, 0, -1,
304 		     "end [%p..%p] serialized %u bytes; %s",
305 		     this->start, this->end,
306 		     (unsigned) (this->head - this->start),
307 		     successful () ? "successful" : "UNSUCCESSFUL");
308 
309     propagate_error (packed, packed_map);
310 
311     if (unlikely (!current)) return;
312     if (unlikely (in_error()))
313     {
314       // Offset overflows that occur before link resolution cannot be handled
315       // by repacking, so set a more general error.
316       if (offset_overflow ()) err (HB_SERIALIZE_ERROR_OTHER);
317       return;
318     }
319 
320     assert (!current->next);
321 
322     /* Only "pack" if there exist other objects... Otherwise, don't bother.
323      * Saves a move. */
324     if (packed.length <= 1)
325       return;
326 
327     pop_pack (false);
328 
329     resolve_links ();
330   }
331 
332   template <typename Type = void>
333   __attribute__((returns_nonnull))
pushhb_serialize_context_t334   Type *push ()
335   {
336     if (unlikely (in_error ())) return start_embed<Type> ();
337 
338     object_t *obj = object_pool.alloc ();
339     if (unlikely (!obj))
340       check_success (false);
341     else
342     {
343       obj->head = head;
344       obj->tail = tail;
345       obj->next = current;
346       current = obj;
347     }
348     return start_embed<Type> ();
349   }
pop_discardhb_serialize_context_t350   void pop_discard ()
351   {
352     object_t *obj = current;
353     if (unlikely (!obj)) return;
354     // Allow cleanup when we've error'd out on int overflows which don't compromise
355     // the serializer state.
356     if (unlikely (in_error() && !only_overflow ())) return;
357 
358     current = current->next;
359     revert (zerocopy ? zerocopy : obj->head, obj->tail);
360     zerocopy = nullptr;
361     obj->fini ();
362     object_pool.release (obj);
363   }
364 
365   /* Set share to false when an object is unlikely shareable with others
366    * so not worth an attempt, or a contiguous table is serialized as
367    * multiple consecutive objects in the reverse order so can't be shared.
368    */
pop_packhb_serialize_context_t369   objidx_t pop_pack (bool share=true)
370   {
371     object_t *obj = current;
372     if (unlikely (!obj)) return 0;
373     // Allow cleanup when we've error'd out on int overflows which don't compromise
374     // the serializer state.
375     if (unlikely (in_error()  && !only_overflow ())) return 0;
376 
377     current = current->next;
378     obj->tail = head;
379     obj->next = nullptr;
380     assert (obj->head <= obj->tail);
381     unsigned len = obj->tail - obj->head;
382     head = zerocopy ? zerocopy : obj->head; /* Rewind head. */
383     bool was_zerocopy = zerocopy;
384     zerocopy = nullptr;
385 
386     if (!len)
387     {
388       assert (!obj->real_links.length);
389       assert (!obj->virtual_links.length);
390       return 0;
391     }
392 
393     objidx_t objidx;
394     uint32_t hash = 0;
395     if (share)
396     {
397       hash = hb_hash (obj);
398       objidx = packed_map.get_with_hash (obj, hash);
399       if (objidx)
400       {
401         merge_virtual_links (obj, objidx);
402 	obj->fini ();
403 	return objidx;
404       }
405     }
406 
407     tail -= len;
408     if (was_zerocopy)
409       assert (tail == obj->head);
410     else
411       memmove (tail, obj->head, len);
412 
413     obj->head = tail;
414     obj->tail = tail + len;
415 
416     packed.push (obj);
417 
418     if (unlikely (!propagate_error (packed)))
419     {
420       /* Obj wasn't successfully added to packed, so clean it up otherwise its
421        * links will be leaked. When we use constructor/destructors properly, we
422        * can remove these. */
423       obj->fini ();
424       return 0;
425     }
426 
427     objidx = packed.length - 1;
428 
429     if (share) packed_map.set_with_hash (obj, hash, objidx);
430     propagate_error (packed_map);
431 
432     return objidx;
433   }
434 
reverthb_serialize_context_t435   void revert (snapshot_t snap)
436   {
437     // Overflows that happened after the snapshot will be erased by the revert.
438     if (unlikely (in_error () && !only_overflow ())) return;
439     assert (snap.current == current);
440     if (current)
441     {
442       current->real_links.shrink (snap.num_real_links);
443       current->virtual_links.shrink (snap.num_virtual_links);
444     }
445     errors = snap.errors;
446     revert (snap.head, snap.tail);
447   }
448 
reverthb_serialize_context_t449   void revert (char *snap_head,
450 	       char *snap_tail)
451   {
452     if (unlikely (in_error ())) return;
453     assert (snap_head <= head);
454     assert (tail <= snap_tail);
455     head = snap_head;
456     tail = snap_tail;
457     discard_stale_objects ();
458   }
459 
discard_stale_objectshb_serialize_context_t460   void discard_stale_objects ()
461   {
462     if (unlikely (in_error ())) return;
463     while (packed.length > 1 &&
464 	   packed.tail ()->head < tail)
465     {
466       packed_map.del (packed.tail ());
467       assert (!packed.tail ()->next);
468       packed.tail ()->fini ();
469       packed.pop ();
470     }
471     if (packed.length > 1)
472       assert (packed.tail ()->head == tail);
473   }
474 
475   // Adds a virtual link from the current object to objidx. A virtual link is not associated with
476   // an actual offset field. They are solely used to enforce ordering constraints between objects.
477   // Adding a virtual link from object a to object b will ensure that object b is always packed after
478   // object a in the final serialized order.
479   //
480   // This is useful in certain situations where there needs to be a specific ordering in the
481   // final serialization. Such as when platform bugs require certain orderings, or to provide
482   //  guidance to the repacker for better offset overflow resolution.
add_virtual_linkhb_serialize_context_t483   void add_virtual_link (objidx_t objidx)
484   {
485     if (unlikely (in_error ())) return;
486 
487     if (!objidx)
488       return;
489 
490     assert (current);
491 
492     if (!current->add_virtual_link(objidx))
493       err (HB_SERIALIZE_ERROR_OTHER);
494   }
495 
last_added_child_indexhb_serialize_context_t496   objidx_t last_added_child_index() const {
497     if (unlikely (in_error ())) return (objidx_t) -1;
498 
499     assert (current);
500     if (!bool(current->real_links)) {
501       return (objidx_t) -1;
502     }
503 
504     return current->real_links[current->real_links.length - 1].objidx;
505   }
506 
507   // For the current object ensure that the sub-table bytes for child objidx are always placed
508   // after the subtable bytes for any other existing children. This only ensures that the
509   // repacker will not move the target subtable before the other children
510   // (by adding virtual links). It is up to the caller to ensure the initial serialization
511   // order is correct.
repack_lasthb_serialize_context_t512   void repack_last(objidx_t objidx) {
513     if (unlikely (in_error ())) return;
514 
515     if (!objidx)
516       return;
517 
518     assert (current);
519     for (auto& l : current->real_links) {
520       if (l.objidx == objidx) {
521         continue;
522       }
523 
524       packed[l.objidx]->add_virtual_link(objidx);
525     }
526   }
527 
528   template <typename T>
add_linkhb_serialize_context_t529   void add_link (T &ofs, objidx_t objidx,
530 		 whence_t whence = Head,
531 		 unsigned bias = 0)
532   {
533     if (unlikely (in_error ())) return;
534 
535     if (!objidx)
536       return;
537 
538     assert (current);
539     assert (current->head <= (const char *) &ofs);
540 
541     auto& link = *current->real_links.push ();
542     if (current->real_links.in_error ())
543       err (HB_SERIALIZE_ERROR_OTHER);
544 
545     link.width = sizeof (T);
546     link.objidx = objidx;
547     if (unlikely (!sizeof (T)))
548     {
549       // This link is not associated with an actual offset and exists merely to enforce
550       // an ordering constraint.
551       link.is_signed = 0;
552       link.whence = 0;
553       link.position = 0;
554       link.bias = 0;
555       return;
556     }
557 
558     link.is_signed = std::is_signed<hb_unwrap_type (T)>::value;
559     link.whence = (unsigned) whence;
560     link.position = (const char *) &ofs - current->head;
561     link.bias = bias;
562   }
563 
to_biashb_serialize_context_t564   unsigned to_bias (const void *base) const
565   {
566     if (unlikely (in_error ())) return 0;
567     if (!base) return 0;
568     assert (current);
569     assert (current->head <= (const char *) base);
570     return (const char *) base - current->head;
571   }
572 
resolve_linkshb_serialize_context_t573   void resolve_links ()
574   {
575     if (unlikely (in_error ())) return;
576 
577     assert (!current);
578     assert (packed.length > 1);
579 
580     for (const object_t* parent : ++hb_iter (packed))
581       for (const object_t::link_t &link : parent->real_links)
582       {
583 	const object_t* child = packed[link.objidx];
584 	if (unlikely (!child)) { err (HB_SERIALIZE_ERROR_OTHER); return; }
585 	unsigned offset = 0;
586 	switch ((whence_t) link.whence) {
587 	case Head:     offset = child->head - parent->head; break;
588 	case Tail:     offset = child->head - parent->tail; break;
589 	case Absolute: offset = (head - start) + (child->head - tail); break;
590 	}
591 
592 	assert (offset >= link.bias);
593 	offset -= link.bias;
594 	if (link.is_signed)
595 	{
596 	  assert (link.width == 2 || link.width == 4);
597 	  if (link.width == 4)
598 	    assign_offset<int32_t> (parent, link, offset);
599 	  else
600 	    assign_offset<int16_t> (parent, link, offset);
601 	}
602 	else
603 	{
604 	  assert (link.width == 2 || link.width == 3 || link.width == 4);
605 	  if (link.width == 4)
606 	    assign_offset<uint32_t> (parent, link, offset);
607 	  else if (link.width == 3)
608 	    assign_offset<uint32_t, 3> (parent, link, offset);
609 	  else
610 	    assign_offset<uint16_t> (parent, link, offset);
611 	}
612       }
613   }
614 
lengthhb_serialize_context_t615   unsigned int length () const
616   {
617     if (unlikely (!current)) return 0;
618     return this->head - current->head;
619   }
620 
alignhb_serialize_context_t621   void align (unsigned int alignment)
622   {
623     unsigned int l = length () % alignment;
624     if (l)
625       (void) allocate_size<void> (alignment - l);
626   }
627 
628   template <typename Type = void>
629   __attribute__((returns_nonnull))
start_embedhb_serialize_context_t630   Type *start_embed (const Type *obj HB_UNUSED = nullptr) const
631   { return reinterpret_cast<Type *> (this->head); }
632   template <typename Type>
633   __attribute__((returns_nonnull))
start_embedhb_serialize_context_t634   Type *start_embed (const Type &obj) const
635   { return start_embed (std::addressof (obj)); }
636 
errhb_serialize_context_t637   bool err (hb_serialize_error_t err_type)
638   {
639     return !bool ((errors = (errors | err_type)));
640   }
641 
start_zerocopyhb_serialize_context_t642   bool start_zerocopy (size_t size)
643   {
644     if (unlikely (in_error ())) return false;
645 
646     if (unlikely (size > INT_MAX || this->tail - this->head < ptrdiff_t (size)))
647     {
648       err (HB_SERIALIZE_ERROR_OUT_OF_ROOM);
649       return false;
650     }
651 
652     assert (!this->zerocopy);
653     this->zerocopy = this->head;
654 
655     assert (this->current->head == this->head);
656     this->current->head = this->current->tail = this->head = this->tail - size;
657     return true;
658   }
659 
660   template <typename Type>
661   HB_NODISCARD
allocate_sizehb_serialize_context_t662   Type *allocate_size (size_t size, bool clear = true)
663   {
664     if (unlikely (in_error ())) return nullptr;
665 
666     if (unlikely (size > INT_MAX || this->tail - this->head < ptrdiff_t (size)))
667     {
668       err (HB_SERIALIZE_ERROR_OUT_OF_ROOM);
669       return nullptr;
670     }
671     if (clear)
672       hb_memset (this->head, 0, size);
673     char *ret = this->head;
674     this->head += size;
675     return reinterpret_cast<Type *> (ret);
676   }
677 
678   template <typename Type>
allocate_minhb_serialize_context_t679   Type *allocate_min ()
680   { return this->allocate_size<Type> (Type::min_size); }
681 
682   template <typename Type>
683   HB_NODISCARD
embedhb_serialize_context_t684   Type *embed (const Type *obj)
685   {
686     unsigned int size = obj->get_size ();
687     Type *ret = this->allocate_size<Type> (size, false);
688     if (unlikely (!ret)) return nullptr;
689     hb_memcpy (ret, obj, size);
690     return ret;
691   }
692   template <typename Type>
693   HB_NODISCARD
embedhb_serialize_context_t694   Type *embed (const Type &obj)
695   { return embed (std::addressof (obj)); }
embedhb_serialize_context_t696   char *embed (const char *obj, unsigned size)
697   {
698     char *ret = this->allocate_size<char> (size, false);
699     if (unlikely (!ret)) return nullptr;
700     hb_memcpy (ret, obj, size);
701     return ret;
702   }
703 
704   template <typename Type, typename ...Ts> auto
_copyhb_serialize_context_t705   _copy (const Type &src, hb_priority<1>, Ts&&... ds) HB_RETURN
706   (Type *, src.copy (this, std::forward<Ts> (ds)...))
707 
708   template <typename Type> auto
709   _copy (const Type &src, hb_priority<0>) -> decltype (&(hb_declval<Type> () = src))
710   {
711     Type *ret = this->allocate_size<Type> (sizeof (Type));
712     if (unlikely (!ret)) return nullptr;
713     *ret = src;
714     return ret;
715   }
716 
717   /* Like embed, but active: calls obj.operator=() or obj.copy() to transfer data
718    * instead of hb_memcpy(). */
719   template <typename Type, typename ...Ts>
copyhb_serialize_context_t720   Type *copy (const Type &src, Ts&&... ds)
721   { return _copy (src, hb_prioritize, std::forward<Ts> (ds)...); }
722   template <typename Type, typename ...Ts>
copyhb_serialize_context_t723   Type *copy (const Type *src, Ts&&... ds)
724   { return copy (*src, std::forward<Ts> (ds)...); }
725 
726   template<typename Iterator,
727 	   hb_requires (hb_is_iterator (Iterator)),
728 	   typename ...Ts>
copy_allhb_serialize_context_t729   void copy_all (Iterator it, Ts&&... ds)
730   { for (decltype (*it) _ : it) copy (_, std::forward<Ts> (ds)...); }
731 
732   template <typename Type>
operator <<hb_serialize_context_t733   hb_serialize_context_t& operator << (const Type &obj) & { embed (obj); return *this; }
734 
735   template <typename Type>
extend_sizehb_serialize_context_t736   Type *extend_size (Type *obj, size_t size, bool clear = true)
737   {
738     if (unlikely (in_error ())) return nullptr;
739 
740     assert (this->start <= (char *) obj);
741     assert ((char *) obj <= this->head);
742     assert ((size_t) (this->head - (char *) obj) <= size);
743     if (unlikely (((char *) obj + size < (char *) obj) ||
744 		  !this->allocate_size<Type> (((char *) obj) + size - this->head, clear))) return nullptr;
745     return reinterpret_cast<Type *> (obj);
746   }
747   template <typename Type>
extend_sizehb_serialize_context_t748   Type *extend_size (Type &obj, size_t size, bool clear = true)
749   { return extend_size (std::addressof (obj), size, clear); }
750 
751   template <typename Type>
extend_minhb_serialize_context_t752   Type *extend_min (Type *obj) { return extend_size (obj, obj->min_size); }
753   template <typename Type>
extend_minhb_serialize_context_t754   Type *extend_min (Type &obj) { return extend_min (std::addressof (obj)); }
755 
756   template <typename Type, typename ...Ts>
extendhb_serialize_context_t757   Type *extend (Type *obj, Ts&&... ds)
758   { return extend_size (obj, obj->get_size (std::forward<Ts> (ds)...)); }
759   template <typename Type, typename ...Ts>
extendhb_serialize_context_t760   Type *extend (Type &obj, Ts&&... ds)
761   { return extend (std::addressof (obj), std::forward<Ts> (ds)...); }
762 
763   /* Output routines. */
copy_byteshb_serialize_context_t764   hb_bytes_t copy_bytes () const
765   {
766     assert (successful ());
767     /* Copy both items from head side and tail side... */
768     unsigned int len = (this->head - this->start)
769 		     + (this->end  - this->tail);
770 
771     // If len is zero don't hb_malloc as the memory won't get properly
772     // cleaned up later.
773     if (!len) return hb_bytes_t ();
774 
775     char *p = (char *) hb_malloc (len);
776     if (unlikely (!p)) return hb_bytes_t ();
777 
778     hb_memcpy (p, this->start, this->head - this->start);
779     hb_memcpy (p + (this->head - this->start), this->tail, this->end - this->tail);
780     return hb_bytes_t (p, len);
781   }
782   template <typename Type>
copyhb_serialize_context_t783   Type *copy () const
784   { return reinterpret_cast<Type *> ((char *) copy_bytes ().arrayZ); }
copy_blobhb_serialize_context_t785   hb_blob_t *copy_blob () const
786   {
787     hb_bytes_t b = copy_bytes ();
788     return hb_blob_create (b.arrayZ, b.length,
789 			   HB_MEMORY_MODE_WRITABLE,
790 			   (char *) b.arrayZ, hb_free);
791   }
792 
object_graphhb_serialize_context_t793   const hb_vector_t<object_t *>& object_graph() const
794   { return packed; }
795 
796   private:
797   template <typename T, unsigned Size = sizeof (T)>
assign_offsethb_serialize_context_t798   void assign_offset (const object_t* parent, const object_t::link_t &link, unsigned offset)
799   {
800     auto &off = * ((BEInt<T, Size> *) (parent->head + link.position));
801     assert (0 == off);
802     check_assign (off, offset, HB_SERIALIZE_ERROR_OFFSET_OVERFLOW);
803   }
804 
805   public:
806   char *start, *head, *tail, *end, *zerocopy;
807   unsigned int debug_depth;
808   hb_serialize_error_t errors;
809 
810   private:
811 
merge_virtual_linkshb_serialize_context_t812   void merge_virtual_links (const object_t* from, objidx_t to_idx) {
813     object_t* to = packed[to_idx];
814     for (const auto& l : from->virtual_links) {
815       to->virtual_links.push (l);
816     }
817   }
818 
819   /* Object memory pool. */
820   hb_pool_t<object_t> object_pool;
821 
822   /* Stack of currently under construction objects. */
823   object_t *current;
824 
825   /* Stack of packed objects.  Object 0 is always nil object. */
826   hb_vector_t<object_t *> packed;
827 
828   /* Map view of packed objects. */
829   hb_hashmap_t<const object_t *, objidx_t> packed_map;
830 };
831 
832 #endif /* HB_SERIALIZE_HH */
833