[OT] Restart work on serialize()
This commit is contained in:
parent
6912e476dd
commit
bc5be24014
|
@ -371,13 +371,14 @@ struct hb_serialize_context_t
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
inline Type *allocate (unsigned int size, unsigned int alignment = 2)
|
inline Type *allocate_size (unsigned int size, unsigned int alignment = 1)
|
||||||
{
|
{
|
||||||
unsigned int padding = (alignment - (this->head - this->start) % alignment) % alignment; /* TODO speedup */
|
unsigned int padding = alignment < 2 ? 0 : (alignment - (this->head - this->start) % alignment) % alignment;
|
||||||
if (unlikely (this->ran_out_of_room || this->end - this->head > padding + size)) {
|
if (unlikely (this->ran_out_of_room || this->end - this->head > padding + size)) {
|
||||||
this->ran_out_of_room = true;
|
this->ran_out_of_room = true;
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
memset (this->head, 0, padding + size);
|
||||||
this->head += padding;
|
this->head += padding;
|
||||||
char *ret = this->head;
|
char *ret = this->head;
|
||||||
this->head += size;
|
this->head += size;
|
||||||
|
@ -387,27 +388,35 @@ struct hb_serialize_context_t
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
inline Type *allocate_min (unsigned int alignment = 2)
|
inline Type *allocate_min (unsigned int alignment = 2)
|
||||||
{
|
{
|
||||||
return this->allocate<Type> (Type::min_size, alignment);
|
return this->allocate_size<Type> (Type::min_size, alignment);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
inline Type *embed (const Type &obj, unsigned int alignment = 2)
|
inline Type *embed (const Type &obj, unsigned int alignment = 2)
|
||||||
{
|
{
|
||||||
return this->allocate<Type> (obj.get_size (), alignment);
|
unsigned int size = obj.get_size ();
|
||||||
|
Type *ret = this->allocate_size<Type> (size, alignment);
|
||||||
|
if (unlikely (!ret)) return NULL;
|
||||||
|
memcpy (ret, obj, size);
|
||||||
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
inline Type *extend (Type &obj, unsigned int size, unsigned int alignment = 2)
|
inline Type *extend_min (Type &obj, unsigned int alignment = 2)
|
||||||
{
|
{
|
||||||
|
unsigned int size = obj.min_size;
|
||||||
assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
|
assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
|
||||||
this->allocate<Type> (((char *) &obj) + size - this->head, alignment);
|
this->allocate_size<Type> (((char *) &obj) + size - this->head, alignment);
|
||||||
return reinterpret_cast<Type *> (&obj);
|
return reinterpret_cast<Type *> (&obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
inline Type *extend (Type &obj)
|
inline Type *extend (Type &obj, unsigned int alignment = 2)
|
||||||
{
|
{
|
||||||
return this->extend<Type> (obj, obj.get_size ());
|
unsigned int size = obj.get_size ();
|
||||||
|
assert (this->start < (char *) &obj && (char *) &obj <= this->head && (char *) &obj + size >= this->head);
|
||||||
|
this->allocate_size<Type> (((char *) &obj) + size - this->head, alignment);
|
||||||
|
return reinterpret_cast<Type *> (&obj);
|
||||||
}
|
}
|
||||||
|
|
||||||
inline void truncate (void *head)
|
inline void truncate (void *head)
|
||||||
|
@ -585,6 +594,16 @@ struct GenericOffsetTo : OffsetType
|
||||||
if (unlikely (!offset)) return Null(Type);
|
if (unlikely (!offset)) return Null(Type);
|
||||||
return StructAtOffset<Type> (base, offset);
|
return StructAtOffset<Type> (base, offset);
|
||||||
}
|
}
|
||||||
|
inline Type& operator () (void *base)
|
||||||
|
{
|
||||||
|
unsigned int offset = *this;
|
||||||
|
return StructAtOffset<Type> (base, offset);
|
||||||
|
}
|
||||||
|
|
||||||
|
inline void set_offset (void *base, void *obj)
|
||||||
|
{
|
||||||
|
this->set ((char *) obj - (char *) base);
|
||||||
|
}
|
||||||
|
|
||||||
inline bool sanitize (hb_sanitize_context_t *c, void *base) {
|
inline bool sanitize (hb_sanitize_context_t *c, void *base) {
|
||||||
TRACE_SANITIZE ();
|
TRACE_SANITIZE ();
|
||||||
|
@ -615,7 +634,9 @@ struct GenericOffsetTo : OffsetType
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
template <typename Base, typename OffsetType, typename Type>
|
template <typename Base, typename OffsetType, typename Type>
|
||||||
inline const Type& operator + (const Base &base, GenericOffsetTo<OffsetType, Type> offset) { return offset (base); }
|
inline const Type& operator + (const Base &base, const GenericOffsetTo<OffsetType, Type> &offset) { return offset (base); }
|
||||||
|
template <typename Base, typename OffsetType, typename Type>
|
||||||
|
inline Type& operator + (Base &base, GenericOffsetTo<OffsetType, Type> &offset) { return offset (base); }
|
||||||
|
|
||||||
template <typename Type>
|
template <typename Type>
|
||||||
struct OffsetTo : GenericOffsetTo<Offset, Type> {};
|
struct OffsetTo : GenericOffsetTo<Offset, Type> {};
|
||||||
|
|
|
@ -355,18 +355,16 @@ struct CoverageFormat1
|
||||||
return i;
|
return i;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline static bool serialize (hb_serialize_context_t *c,
|
inline bool serialize (hb_serialize_context_t *c,
|
||||||
const USHORT *glyphs,
|
const USHORT *glyphs,
|
||||||
unsigned int num_glyphs)
|
unsigned int num_glyphs)
|
||||||
{
|
{
|
||||||
TRACE_SERIALIZE ();
|
TRACE_SERIALIZE ();
|
||||||
CoverageFormat1 *t = c->allocate_min<CoverageFormat1> ();
|
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
|
||||||
if (unlikely (!t)) return TRACE_RETURN (false);
|
glyphArray.len.set (num_glyphs);
|
||||||
t->coverageFormat.set (1);
|
if (unlikely (!c->extend (glyphArray))) return TRACE_RETURN (false);
|
||||||
t->glyphArray.len.set (num_glyphs);
|
|
||||||
if (unlikely (!c->extend (t->glyphArray))) return TRACE_RETURN (false);
|
|
||||||
for (unsigned int i = 0; i < num_glyphs; i++)
|
for (unsigned int i = 0; i < num_glyphs; i++)
|
||||||
t->glyphArray[i].set (glyphs[i]);
|
glyphArray[i].set (glyphs[i]);
|
||||||
return TRACE_RETURN (true);
|
return TRACE_RETURN (true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -421,31 +419,32 @@ struct CoverageFormat2
|
||||||
return NOT_COVERED;
|
return NOT_COVERED;
|
||||||
}
|
}
|
||||||
|
|
||||||
inline static bool serialize (hb_serialize_context_t *c,
|
inline bool serialize (hb_serialize_context_t *c,
|
||||||
const USHORT *glyphs,
|
const USHORT *glyphs,
|
||||||
unsigned int num_glyphs)
|
unsigned int num_glyphs)
|
||||||
{
|
{
|
||||||
TRACE_SERIALIZE ();
|
TRACE_SERIALIZE ();
|
||||||
CoverageFormat2 *t = c->allocate_min<CoverageFormat2> ();
|
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
|
||||||
|
|
||||||
|
if (unlikely (!num_glyphs)) return TRACE_RETURN (true);
|
||||||
|
|
||||||
unsigned int num_ranges = 1;
|
unsigned int num_ranges = 1;
|
||||||
for (unsigned int i = 1; i < num_glyphs; i++)
|
for (unsigned int i = 1; i < num_glyphs; i++)
|
||||||
if (glyphs[i - 1] + 1 != glyphs[i])
|
if (glyphs[i - 1] + 1 != glyphs[i])
|
||||||
num_ranges++;
|
num_ranges++;
|
||||||
if (unlikely (!t)) return TRACE_RETURN (false);
|
rangeRecord.len.set (num_ranges);
|
||||||
t->coverageFormat.set (2);
|
if (unlikely (!c->extend (rangeRecord))) return TRACE_RETURN (false);
|
||||||
t->rangeRecord.len.set (num_ranges);
|
|
||||||
if (unlikely (!c->extend (t->rangeRecord))) return TRACE_RETURN (false);
|
|
||||||
if (unlikely (!num_glyphs)) return TRACE_RETURN (true);
|
|
||||||
unsigned int range = 0;
|
unsigned int range = 0;
|
||||||
t->rangeRecord[range].start.set (glyphs[0]);
|
rangeRecord[range].start.set (glyphs[0]);
|
||||||
t->rangeRecord[range].value.set (0);
|
rangeRecord[range].value.set (0);
|
||||||
for (unsigned int i = 1; i < num_glyphs; i++)
|
for (unsigned int i = 1; i < num_glyphs; i++)
|
||||||
if (glyphs[i - 1] + 1 != glyphs[i]) {
|
if (glyphs[i - 1] + 1 != glyphs[i]) {
|
||||||
t->rangeRecord[range].start.set (glyphs[i]);
|
rangeRecord[range].start.set (glyphs[i]);
|
||||||
t->rangeRecord[range].value.set (i);
|
rangeRecord[range].value.set (i);
|
||||||
range++;
|
range++;
|
||||||
} else {
|
} else {
|
||||||
t->rangeRecord[range].end = glyphs[i];
|
rangeRecord[range].end = glyphs[i];
|
||||||
}
|
}
|
||||||
return TRACE_RETURN (true);
|
return TRACE_RETURN (true);
|
||||||
}
|
}
|
||||||
|
@ -526,20 +525,20 @@ struct Coverage
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline static bool serialize (hb_serialize_context_t *c,
|
inline bool serialize (hb_serialize_context_t *c,
|
||||||
const USHORT *glyphs,
|
const USHORT *glyphs,
|
||||||
unsigned int num_glyphs)
|
unsigned int num_glyphs)
|
||||||
{
|
{
|
||||||
TRACE_SERIALIZE ();
|
TRACE_SERIALIZE ();
|
||||||
unsigned int format;
|
if (unlikely (c->extend_min (*this))) return TRACE_RETURN (false);
|
||||||
unsigned int num_ranges = 1;
|
unsigned int num_ranges = 1;
|
||||||
for (unsigned int i = 1; i < num_glyphs; i++)
|
for (unsigned int i = 1; i < num_glyphs; i++)
|
||||||
if (glyphs[i - 1] + 1 != glyphs[i])
|
if (glyphs[i - 1] + 1 != glyphs[i])
|
||||||
num_ranges++;
|
num_ranges++;
|
||||||
format = num_glyphs * 2 < num_ranges * 3 ? 1 : 2;
|
u.format.set (num_glyphs * 2 < num_ranges * 3 ? 1 : 2);
|
||||||
switch (format) {
|
switch (u.format) {
|
||||||
case 1: return TRACE_RETURN (CoverageFormat1::serialize (c, glyphs, num_glyphs));
|
case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs));
|
||||||
case 2: return TRACE_RETURN (CoverageFormat2::serialize (c, glyphs, num_glyphs));
|
case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, num_glyphs));
|
||||||
default:return TRACE_RETURN (false);
|
default:return TRACE_RETURN (false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -72,6 +72,19 @@ struct SingleSubstFormat1
|
||||||
return TRACE_RETURN (true);
|
return TRACE_RETURN (true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inline bool serialize (hb_serialize_context_t *c,
|
||||||
|
const USHORT *glyphs,
|
||||||
|
unsigned int num_glyphs,
|
||||||
|
SHORT delta)
|
||||||
|
{
|
||||||
|
TRACE_SERIALIZE ();
|
||||||
|
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
|
||||||
|
deltaGlyphID.set (delta);
|
||||||
|
coverage.set_offset (this, c->head);
|
||||||
|
if (unlikely (!(this+coverage).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
|
||||||
|
return TRACE_RETURN (true);
|
||||||
|
}
|
||||||
|
|
||||||
inline bool sanitize (hb_sanitize_context_t *c) {
|
inline bool sanitize (hb_sanitize_context_t *c) {
|
||||||
TRACE_SANITIZE ();
|
TRACE_SANITIZE ();
|
||||||
return TRACE_RETURN (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
|
return TRACE_RETURN (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
|
||||||
|
|
Loading…
Reference in New Issue