diff --git a/src/hb-aat-layout-common-private.hh b/src/hb-aat-layout-common-private.hh index 32ba63267..887350bb6 100644 --- a/src/hb-aat-layout-common-private.hh +++ b/src/hb-aat-layout-common-private.hh @@ -151,6 +151,7 @@ struct UnsizedArrayOf inline bool sanitize (hb_sanitize_context_t *c, unsigned int count) const { TRACE_SANITIZE (this); + if (unlikely (!sanitize_shallow (c, count))) return_trace (false); /* Note: for structs that do not reference other structs, * we do not need to call their sanitize() as we already did @@ -159,8 +160,34 @@ struct UnsizedArrayOf * pointed to do have a simple sanitize(), ie. they do not * reference other structs via offsets. */ - (void) (false && count && arrayZ->sanitize (c)); + (void) (false && arrayZ[0].sanitize (c)); + return_trace (true); + } + inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base) const + { + TRACE_SANITIZE (this); + if (unlikely (!sanitize_shallow (c, count))) return_trace (false); + for (unsigned int i = 0; i < count; i++) + if (unlikely (!arrayZ[i].sanitize (c, base))) + return_trace (false); + return_trace (true); + } + template + inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, const void *base, T user_data) const + { + TRACE_SANITIZE (this); + if (unlikely (!sanitize_shallow (c, count))) return_trace (false); + for (unsigned int i = 0; i < count; i++) + if (unlikely (!arrayZ[i].sanitize (c, base, user_data))) + return_trace (false); + return_trace (true); + } + + private: + inline bool sanitize_shallow (hb_sanitize_context_t *c, unsigned int count) const + { + TRACE_SANITIZE (this); return_trace (c->check_array (arrayZ, arrayZ[0].static_size, count)); } @@ -170,6 +197,32 @@ struct UnsizedArrayOf DEFINE_SIZE_ARRAY (0, arrayZ); }; +/* Unsized array of offset's */ +template +struct UnsizedOffsetArrayOf : UnsizedArrayOf > {}; + +/* Unsized array of offsets relative to the beginning of the array itself. */ +template +struct UnsizedOffsetListOf : UnsizedOffsetArrayOf +{ + inline const Type& operator [] (unsigned int i) const + { + return this+this->arrayZ[i]; + } + + inline bool sanitize (hb_sanitize_context_t *c, unsigned int count) const + { + TRACE_SANITIZE (this); + return_trace ((UnsizedOffsetArrayOf::sanitize (c, count, this))); + } + template + inline bool sanitize (hb_sanitize_context_t *c, unsigned int count, T user_data) const + { + TRACE_SANITIZE (this); + return_trace ((UnsizedOffsetArrayOf::sanitize (c, count, this, user_data))); + } +}; + /* * Lookup Table @@ -507,10 +560,9 @@ struct StateTable return &entries[entry]; /* XXX bound check. */ } - inline bool sanitize (hb_sanitize_context_t *c, unsigned int num_glyphs) const + inline bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); - return_trace (true); return_trace (c->check_struct (this)); /* XXX */ } diff --git a/src/hb-aat-layout-morx-table.hh b/src/hb-aat-layout-morx-table.hh index 86e78b54f..6c9c51544 100644 --- a/src/hb-aat-layout-morx-table.hh +++ b/src/hb-aat-layout-morx-table.hh @@ -42,11 +42,17 @@ using namespace OT; template struct RearrangementSubtable { - enum { - MarkFirst = 0x8000, - DontAdvance = 0x4000, - MarkLast = 0x2000, - Verb = 0x000F, + enum Flags { + MarkFirst = 0x8000, /* If set, make the current glyph the first + * glyph to be rearranged. */ + DontAdvance = 0x4000, /* If set, don't advance to the next glyph + * before going to the new state. This means + * that the glyph index doesn't change, even + * if the glyph at that index has changed. */ + MarkLast = 0x2000, /* If set, make the current glyph the last + * glyph to be rearranged. */ + Reserved = 0x1FF0, /* These bits are reserved and should be set to 0. */ + Verb = 0x000F, /* The type of rearrangement specified. */ }; inline bool apply (hb_apply_context_t *c) const @@ -161,7 +167,7 @@ struct RearrangementSubtable inline bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); - return_trace (machine.sanitize (c, 0/*XXX*/)); + return_trace (machine.sanitize (c)); } protected: @@ -170,8 +176,27 @@ struct RearrangementSubtable DEFINE_SIZE_MIN (2); }; +template struct ContextualSubtable { + typedef typename Types::HBUINT HBUINT; + + enum Flags { + SetMark = 0x8000, /* If set, make the current glyph the marked glyph. */ + DontAdvance = 0x4000, /* If set, don't advance to the next glyph before + * going to the new state. */ + Reserved = 0x3FFF, /* These bits are reserved and should be set to 0. */ + }; + + /* XXX the following is different in mort: it's directly index to sublookups. */ + struct EntryData + { + HBUINT16 markIndex; /* Index of the substitution table for the + * marked glyph (use 0xFFFF for none). */ + HBUINT16 currentIndex; /* Index of the substitution table for the + * current glyph (use 0xFFFF for none). */ + }; + inline bool apply (hb_apply_context_t *c) const { TRACE_APPLY (this); @@ -182,11 +207,19 @@ struct ContextualSubtable inline bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); - /* TODO */ - return_trace (false); + return_trace (machine.sanitize (c) && + substitutionTables.sanitize (c, this, 0U/*XXX count*/)); } + + protected: + StateTable machine; + OffsetTo, HBUINT>, HBUINT> + substitutionTables; + public: + DEFINE_SIZE_MIN (2); }; +template struct LigatureSubtable { inline bool apply (hb_apply_context_t *c) const @@ -204,6 +237,7 @@ struct LigatureSubtable } }; +template struct NoncontextualSubtable { inline bool apply (hb_apply_context_t *c) const @@ -240,6 +274,7 @@ struct NoncontextualSubtable DEFINE_SIZE_MIN (2); }; +template struct InsertionSubtable { inline bool apply (hb_apply_context_t *c) const @@ -334,12 +369,11 @@ struct ChainSubtable HBUINT coverage; /* Coverage flags and subtable type. */ HBUINT32 subFeatureFlags;/* The 32-bit mask identifying which subtable this is. */ union { - RearrangementSubtable - rearrangement; - ContextualSubtable contextual; - LigatureSubtable ligature; - NoncontextualSubtable noncontextual; - InsertionSubtable insertion; + RearrangementSubtable rearrangement; + ContextualSubtable contextual; + LigatureSubtable ligature; + NoncontextualSubtable noncontextual; + InsertionSubtable insertion; } u; public: DEFINE_SIZE_MIN (2 * sizeof (HBUINT) + 4); @@ -455,8 +489,22 @@ struct mortmorx DEFINE_SIZE_MIN (8); }; +struct MortTypes +{ + static const bool extended = false; + typedef HBUINT16 HBUINT; + typedef HBUINT8 HBUSHORT; + struct ClassType : ClassTable + { + inline unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs HB_UNUSED) const + { + return ClassTable::get_class (glyph_id); + } + }; +}; struct MorxTypes { + static const bool extended = true; typedef HBUINT32 HBUINT; typedef HBUINT16 HBUSHORT; struct ClassType : Lookup @@ -468,18 +516,6 @@ struct MorxTypes } }; }; -struct MortTypes -{ - typedef HBUINT16 HBUINT; - typedef HBUINT8 HBUSHORT; - struct ClassType : ClassTable - { - inline unsigned int get_class (hb_codepoint_t glyph_id, unsigned int num_glyphs HB_UNUSED) const - { - return ClassTable::get_class (glyph_id); - } - }; -}; struct mort : mortmorx {