src/hb-open-type-private.hh | 59 ++++++++++++++++++++++++++++------- src/hb-ot-layout-common-private.hh | 61 ++++++++++++++++++------------------- src/hb-ot-layout-gsub-table.hh | 51 ++++++++++++++++++++++++++++++ 3 files changed, 128 insertions(+), 43 deletions(-)
New commits: commit 4b312fb288faa383a2c5bd3be0428f0e58e02699 Author: Behdad Esfahbod <[email protected]> Date: Sat Sep 1 21:56:06 2012 -0400 [OT] Remove serialize alignment Will reintroduce in a different way when we actually need it. diff --git a/src/hb-open-type-private.hh b/src/hb-open-type-private.hh index 36d61ee..ce268b2 100644 --- a/src/hb-open-type-private.hh +++ b/src/hb-open-type-private.hh @@ -371,51 +371,49 @@ struct hb_serialize_context_t } template <typename Type> - inline Type *allocate_size (unsigned int size, unsigned int alignment = 1) + inline Type *allocate_size (unsigned int size) { - unsigned int padding = alignment < 2 ? 0 : (alignment - (this->head - this->start) % alignment) % alignment; - if (unlikely (this->ran_out_of_room || this->end - this->head > padding + size)) { + if (unlikely (this->ran_out_of_room || this->end - this->head > size)) { this->ran_out_of_room = true; return NULL; } - memset (this->head, 0, padding + size); - this->head += padding; + memset (this->head, 0, size); char *ret = this->head; this->head += size; return reinterpret_cast<Type *> (ret); } template <typename Type> - inline Type *allocate_min (unsigned int alignment = 2) + inline Type *allocate_min (void) { - return this->allocate_size<Type> (Type::min_size, alignment); + return this->allocate_size<Type> (Type::min_size); } template <typename Type> - inline Type *embed (const Type &obj, unsigned int alignment = 2) + inline Type *embed (const Type &obj) { unsigned int size = obj.get_size (); - Type *ret = this->allocate_size<Type> (size, alignment); + Type *ret = this->allocate_size<Type> (size); if (unlikely (!ret)) return NULL; memcpy (ret, obj, size); return ret; } template <typename Type> - inline Type *extend_min (Type &obj, unsigned int alignment = 2) + inline Type *extend_min (Type &obj) { unsigned int size = obj.min_size; assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head); - this->allocate_size<Type> (((char *) &obj) + size - this->head, alignment); + this->allocate_size<Type> (((char *) &obj) + size - this->head); return reinterpret_cast<Type *> (&obj); } template <typename Type> - inline Type *extend (Type &obj, unsigned int alignment = 2) + inline Type *extend (Type &obj) { unsigned int size = obj.get_size (); assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head); - this->allocate_size<Type> (((char *) &obj) + size - this->head, alignment); + this->allocate_size<Type> (((char *) &obj) + size - this->head); return reinterpret_cast<Type *> (&obj); } commit c61be03d6df122f18eebda3b29e42c9e768d45b9 Author: Behdad Esfahbod <[email protected]> Date: Sat Sep 1 21:43:38 2012 -0400 [OT] A bit more serialize diff --git a/src/hb-open-type-private.hh b/src/hb-open-type-private.hh index 6e9ed16..36d61ee 100644 --- a/src/hb-open-type-private.hh +++ b/src/hb-open-type-private.hh @@ -678,6 +678,20 @@ struct GenericArrayOf inline unsigned int get_size (void) const { return len.static_size + len * Type::static_size; } + inline bool serialize (hb_serialize_context_t *c, + const Type *items, + unsigned int items_len) + { + TRACE_SERIALIZE (); + if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); + len.set (items_len); /* TODO may overflow */ + if (unlikely (!c->extend (*this))) return TRACE_RETURN (false); + unsigned int count = items_len; + for (unsigned int i = 0; i < count; i++) + array[i].set (items[i]); + return TRACE_RETURN (true); + } + inline bool sanitize (hb_sanitize_context_t *c) { TRACE_SANITIZE (); if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false); diff --git a/src/hb-ot-layout-common-private.hh b/src/hb-ot-layout-common-private.hh index d5a7883..6c5e423 100644 --- a/src/hb-ot-layout-common-private.hh +++ b/src/hb-ot-layout-common-private.hh @@ -530,7 +530,7 @@ struct Coverage unsigned int num_glyphs) { TRACE_SERIALIZE (); - if (unlikely (c->extend_min (*this))) return TRACE_RETURN (false); + if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); unsigned int num_ranges = 1; for (unsigned int i = 1; i < num_glyphs; i++) if (glyphs[i - 1] + 1 != glyphs[i]) diff --git a/src/hb-ot-layout-gsub-table.hh b/src/hb-ot-layout-gsub-table.hh index ea28bcc..c50b206 100644 --- a/src/hb-ot-layout-gsub-table.hh +++ b/src/hb-ot-layout-gsub-table.hh @@ -75,12 +75,12 @@ struct SingleSubstFormat1 inline bool serialize (hb_serialize_context_t *c, const USHORT *glyphs, unsigned int num_glyphs, - SHORT delta) + unsigned int delta) { TRACE_SERIALIZE (); if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); - deltaGlyphID.set (delta); + deltaGlyphID.set (delta); /* TODO overflow? */ return TRACE_RETURN (true); } @@ -136,6 +136,18 @@ struct SingleSubstFormat2 return TRACE_RETURN (true); } + inline bool serialize (hb_serialize_context_t *c, + const USHORT *glyphs, + const USHORT *substitutes, + unsigned int num_glyphs) + { + TRACE_SERIALIZE (); + if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); + if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); + if (unlikely (!substitute.serialize (c, substitutes, num_glyphs))) return TRACE_RETURN (false); + return TRACE_RETURN (true); + } + inline bool sanitize (hb_sanitize_context_t *c) { TRACE_SANITIZE (); return TRACE_RETURN (coverage.sanitize (c, this) && substitute.sanitize (c)); @@ -188,6 +200,33 @@ struct SingleSubst } } + inline bool serialize (hb_serialize_context_t *c, + const USHORT *glyphs, + const USHORT *substitutes, + unsigned int num_glyphs) + { + TRACE_SERIALIZE (); + if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false); + unsigned int format = 2; + unsigned int delta; + if (num_glyphs) { + format = 1; + /* TODO check for wrap-around */ + delta = substitutes[0] - glyphs[0]; + for (unsigned int i = 1; i < num_glyphs; i++) + if (delta != substitutes[i] - glyphs[i]) { + format = 2; + break; + } + } + u.format.set (format); + switch (u.format) { + case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs, delta)); + case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, substitutes, num_glyphs)); + default:return TRACE_RETURN (false); + } + } + inline bool sanitize (hb_sanitize_context_t *c) { TRACE_SANITIZE (); if (!u.format.sanitize (c)) return TRACE_RETURN (false); commit abcc5ac1fde1c493e4055dd2f27b8aade7713156 Author: Behdad Esfahbod <[email protected]> Date: Sat Sep 1 21:30:17 2012 -0400 [OT] Improve serialize syntax For some definition of improvement... diff --git a/src/hb-open-type-private.hh b/src/hb-open-type-private.hh index 70a7d9d..6e9ed16 100644 --- a/src/hb-open-type-private.hh +++ b/src/hb-open-type-private.hh @@ -600,9 +600,11 @@ struct GenericOffsetTo : OffsetType return StructAtOffset<Type> (base, offset); } - inline void set_offset (void *base, void *obj) + inline Type& serialize (hb_serialize_context_t *c, void *base) { - this->set ((char *) obj - (char *) base); + Type *t = (Type *) c->head; + this->set ((char *) t - (char *) base); + return *t; } inline bool sanitize (hb_sanitize_context_t *c, void *base) { diff --git a/src/hb-ot-layout-gsub-table.hh b/src/hb-ot-layout-gsub-table.hh index 714b444..ea28bcc 100644 --- a/src/hb-ot-layout-gsub-table.hh +++ b/src/hb-ot-layout-gsub-table.hh @@ -79,9 +79,8 @@ struct SingleSubstFormat1 { TRACE_SERIALIZE (); if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); + if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); deltaGlyphID.set (delta); - coverage.set_offset (this, c->head); - if (unlikely (!(this+coverage).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); return TRACE_RETURN (true); } commit bc5be24014371ceb811b9ffd37062ede63d87bb1 Author: Behdad Esfahbod <[email protected]> Date: Sat Sep 1 20:48:22 2012 -0400 [OT] Restart work on serialize() diff --git a/src/hb-open-type-private.hh b/src/hb-open-type-private.hh index 10e777e..70a7d9d 100644 --- a/src/hb-open-type-private.hh +++ b/src/hb-open-type-private.hh @@ -371,13 +371,14 @@ struct hb_serialize_context_t } template <typename Type> - inline Type *allocate (unsigned int size, unsigned int alignment = 2) + inline Type *allocate_size (unsigned int size, unsigned int alignment = 1) { - unsigned int padding = (alignment - (this->head - this->start) % alignment) % alignment; /* TODO speedup */ + unsigned int padding = alignment < 2 ? 0 : (alignment - (this->head - this->start) % alignment) % alignment; if (unlikely (this->ran_out_of_room || this->end - this->head > padding + size)) { this->ran_out_of_room = true; return NULL; } + memset (this->head, 0, padding + size); this->head += padding; char *ret = this->head; this->head += size; @@ -387,27 +388,35 @@ struct hb_serialize_context_t template <typename Type> inline Type *allocate_min (unsigned int alignment = 2) { - return this->allocate<Type> (Type::min_size, alignment); + return this->allocate_size<Type> (Type::min_size, alignment); } template <typename Type> inline Type *embed (const Type &obj, unsigned int alignment = 2) { - return this->allocate<Type> (obj.get_size (), alignment); + unsigned int size = obj.get_size (); + Type *ret = this->allocate_size<Type> (size, alignment); + if (unlikely (!ret)) return NULL; + memcpy (ret, obj, size); + return ret; } template <typename Type> - inline Type *extend (Type &obj, unsigned int size, unsigned int alignment = 2) + inline Type *extend_min (Type &obj, unsigned int alignment = 2) { + unsigned int size = obj.min_size; assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head); - this->allocate<Type> (((char *) &obj) + size - this->head, alignment); + this->allocate_size<Type> (((char *) &obj) + size - this->head, alignment); return reinterpret_cast<Type *> (&obj); } template <typename Type> - inline Type *extend (Type &obj) + inline Type *extend (Type &obj, unsigned int alignment = 2) { - return this->extend<Type> (obj, obj.get_size ()); + unsigned int size = obj.get_size (); + assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head); + this->allocate_size<Type> (((char *) &obj) + size - this->head, alignment); + return reinterpret_cast<Type *> (&obj); } inline void truncate (void *head) @@ -585,6 +594,16 @@ struct GenericOffsetTo : OffsetType if (unlikely (!offset)) return Null(Type); return StructAtOffset<Type> (base, offset); } + inline Type& operator () (void *base) + { + unsigned int offset = *this; + return StructAtOffset<Type> (base, offset); + } + + inline void set_offset (void *base, void *obj) + { + this->set ((char *) obj - (char *) base); + } inline bool sanitize (hb_sanitize_context_t *c, void *base) { TRACE_SANITIZE (); @@ -615,7 +634,9 @@ struct GenericOffsetTo : OffsetType } }; template <typename Base, typename OffsetType, typename Type> -inline const Type& operator + (const Base &base, GenericOffsetTo<OffsetType, Type> offset) { return offset (base); } +inline const Type& operator + (const Base &base, const GenericOffsetTo<OffsetType, Type> &offset) { return offset (base); } +template <typename Base, typename OffsetType, typename Type> +inline Type& operator + (Base &base, GenericOffsetTo<OffsetType, Type> &offset) { return offset (base); } template <typename Type> struct OffsetTo : GenericOffsetTo<Offset, Type> {}; diff --git a/src/hb-ot-layout-common-private.hh b/src/hb-ot-layout-common-private.hh index 91d5a19..d5a7883 100644 --- a/src/hb-ot-layout-common-private.hh +++ b/src/hb-ot-layout-common-private.hh @@ -355,18 +355,16 @@ struct CoverageFormat1 return i; } - inline static bool serialize (hb_serialize_context_t *c, - const USHORT *glyphs, - unsigned int num_glyphs) + inline bool serialize (hb_serialize_context_t *c, + const USHORT *glyphs, + unsigned int num_glyphs) { TRACE_SERIALIZE (); - CoverageFormat1 *t = c->allocate_min<CoverageFormat1> (); - if (unlikely (!t)) return TRACE_RETURN (false); - t->coverageFormat.set (1); - t->glyphArray.len.set (num_glyphs); - if (unlikely (!c->extend (t->glyphArray))) return TRACE_RETURN (false); + if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); + glyphArray.len.set (num_glyphs); + if (unlikely (!c->extend (glyphArray))) return TRACE_RETURN (false); for (unsigned int i = 0; i < num_glyphs; i++) - t->glyphArray[i].set (glyphs[i]); + glyphArray[i].set (glyphs[i]); return TRACE_RETURN (true); } @@ -421,31 +419,32 @@ struct CoverageFormat2 return NOT_COVERED; } - inline static bool serialize (hb_serialize_context_t *c, - const USHORT *glyphs, - unsigned int num_glyphs) + inline bool serialize (hb_serialize_context_t *c, + const USHORT *glyphs, + unsigned int num_glyphs) { TRACE_SERIALIZE (); - CoverageFormat2 *t = c->allocate_min<CoverageFormat2> (); + if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); + + if (unlikely (!num_glyphs)) return TRACE_RETURN (true); + unsigned int num_ranges = 1; for (unsigned int i = 1; i < num_glyphs; i++) if (glyphs[i - 1] + 1 != glyphs[i]) num_ranges++; - if (unlikely (!t)) return TRACE_RETURN (false); - t->coverageFormat.set (2); - t->rangeRecord.len.set (num_ranges); - if (unlikely (!c->extend (t->rangeRecord))) return TRACE_RETURN (false); - if (unlikely (!num_glyphs)) return TRACE_RETURN (true); + rangeRecord.len.set (num_ranges); + if (unlikely (!c->extend (rangeRecord))) return TRACE_RETURN (false); + unsigned int range = 0; - t->rangeRecord[range].start.set (glyphs[0]); - t->rangeRecord[range].value.set (0); + rangeRecord[range].start.set (glyphs[0]); + rangeRecord[range].value.set (0); for (unsigned int i = 1; i < num_glyphs; i++) if (glyphs[i - 1] + 1 != glyphs[i]) { - t->rangeRecord[range].start.set (glyphs[i]); - t->rangeRecord[range].value.set (i); + rangeRecord[range].start.set (glyphs[i]); + rangeRecord[range].value.set (i); range++; } else { - t->rangeRecord[range].end = glyphs[i]; + rangeRecord[range].end = glyphs[i]; } return TRACE_RETURN (true); } @@ -526,20 +525,20 @@ struct Coverage } } - inline static bool serialize (hb_serialize_context_t *c, - const USHORT *glyphs, - unsigned int num_glyphs) + inline bool serialize (hb_serialize_context_t *c, + const USHORT *glyphs, + unsigned int num_glyphs) { TRACE_SERIALIZE (); - unsigned int format; + if (unlikely (c->extend_min (*this))) return TRACE_RETURN (false); unsigned int num_ranges = 1; for (unsigned int i = 1; i < num_glyphs; i++) if (glyphs[i - 1] + 1 != glyphs[i]) num_ranges++; - format = num_glyphs * 2 < num_ranges * 3 ? 1 : 2; - switch (format) { - case 1: return TRACE_RETURN (CoverageFormat1::serialize (c, glyphs, num_glyphs)); - case 2: return TRACE_RETURN (CoverageFormat2::serialize (c, glyphs, num_glyphs)); + u.format.set (num_glyphs * 2 < num_ranges * 3 ? 1 : 2); + switch (u.format) { + case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs)); + case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, num_glyphs)); default:return TRACE_RETURN (false); } } diff --git a/src/hb-ot-layout-gsub-table.hh b/src/hb-ot-layout-gsub-table.hh index 8c68984..714b444 100644 --- a/src/hb-ot-layout-gsub-table.hh +++ b/src/hb-ot-layout-gsub-table.hh @@ -72,6 +72,19 @@ struct SingleSubstFormat1 return TRACE_RETURN (true); } + inline bool serialize (hb_serialize_context_t *c, + const USHORT *glyphs, + unsigned int num_glyphs, + SHORT delta) + { + TRACE_SERIALIZE (); + if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false); + deltaGlyphID.set (delta); + coverage.set_offset (this, c->head); + if (unlikely (!(this+coverage).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false); + return TRACE_RETURN (true); + } + inline bool sanitize (hb_sanitize_context_t *c) { TRACE_SANITIZE (); return TRACE_RETURN (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c)); _______________________________________________ HarfBuzz mailing list [email protected] http://lists.freedesktop.org/mailman/listinfo/harfbuzz
