Define return_trace()

Not functional change (expected!).
This commit is contained in:
Behdad Esfahbod 2015-09-29 14:57:02 +01:00
parent c917965b9e
commit b47159011c
16 changed files with 575 additions and 546 deletions

View File

@ -56,7 +56,7 @@ typedef struct TableRecord
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
Tag tag; /* 4-byte identifier. */
@ -106,7 +106,7 @@ typedef struct OffsetTable
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && c->check_array (tables, TableRecord::static_size, numTables));
return_trace (c->check_struct (this) && c->check_array (tables, TableRecord::static_size, numTables));
}
protected:
@ -135,7 +135,7 @@ struct TTCHeaderVersion1
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (table.sanitize (c, this));
return_trace (table.sanitize (c, this));
}
protected:
@ -175,11 +175,11 @@ struct TTCHeader
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!u.header.version.sanitize (c))) return TRACE_RETURN (false);
if (unlikely (!u.header.version.sanitize (c))) return_trace (false);
switch (u.header.version.major) {
case 2: /* version 2 is compatible with version 1 */
case 1: return TRACE_RETURN (u.version1.sanitize (c));
default:return TRACE_RETURN (true);
case 1: return_trace (u.version1.sanitize (c));
default:return_trace (true);
}
}
@ -240,14 +240,14 @@ struct OpenTypeFontFile
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!u.tag.sanitize (c))) return TRACE_RETURN (false);
if (unlikely (!u.tag.sanitize (c))) return_trace (false);
switch (u.tag) {
case CFFTag: /* All the non-collection tags */
case TrueTag:
case Typ1Tag:
case TrueTypeTag: return TRACE_RETURN (u.fontFace.sanitize (c));
case TTCTag: return TRACE_RETURN (u.ttcHeader.sanitize (c));
default: return TRACE_RETURN (true);
case TrueTypeTag: return_trace (u.fontFace.sanitize (c));
case TTCTag: return_trace (u.ttcHeader.sanitize (c));
default: return_trace (true);
}
}

View File

@ -624,7 +624,7 @@ struct IntType
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (likely (c->check_struct (this)));
return_trace (likely (c->check_struct (this)));
}
protected:
BEInt<Type, Size> v;
@ -652,7 +652,7 @@ struct LONGDATETIME
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (likely (c->check_struct (this)));
return_trace (likely (c->check_struct (this)));
}
protected:
LONG major;
@ -729,7 +729,7 @@ struct FixedVersion
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
USHORT major;
@ -765,21 +765,21 @@ struct OffsetTo : Offset<OffsetType>
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false);
if (unlikely (!c->check_struct (this))) return_trace (false);
unsigned int offset = *this;
if (unlikely (!offset)) return TRACE_RETURN (true);
if (unlikely (!offset)) return_trace (true);
const Type &obj = StructAtOffset<Type> (base, offset);
return TRACE_RETURN (likely (obj.sanitize (c)) || neuter (c));
return_trace (likely (obj.sanitize (c)) || neuter (c));
}
template <typename T>
inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false);
if (unlikely (!c->check_struct (this))) return_trace (false);
unsigned int offset = *this;
if (unlikely (!offset)) return TRACE_RETURN (true);
if (unlikely (!offset)) return_trace (true);
const Type &obj = StructAtOffset<Type> (base, offset);
return TRACE_RETURN (likely (obj.sanitize (c, user_data)) || neuter (c));
return_trace (likely (obj.sanitize (c, user_data)) || neuter (c));
}
/* Set the offset to Null */
@ -830,10 +830,10 @@ struct ArrayOf
unsigned int items_len)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
len.set (items_len); /* TODO(serialize) Overflow? */
if (unlikely (!c->extend (*this))) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (unlikely (!c->extend (*this))) return_trace (false);
return_trace (true);
}
inline bool serialize (hb_serialize_context_t *c,
@ -841,17 +841,17 @@ struct ArrayOf
unsigned int items_len)
{
TRACE_SERIALIZE (this);
if (unlikely (!serialize (c, items_len))) return TRACE_RETURN (false);
if (unlikely (!serialize (c, items_len))) return_trace (false);
for (unsigned int i = 0; i < items_len; i++)
array[i] = items[i];
items.advance (items_len);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
/* Note: for structs that do not reference other structs,
* we do not need to call their sanitize() as we already did
@ -862,28 +862,28 @@ struct ArrayOf
*/
(void) (false && array[0].sanitize (c));
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
unsigned int count = len;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!array[i].sanitize (c, base)))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
return_trace (false);
return_trace (true);
}
template <typename T>
inline bool sanitize (hb_sanitize_context_t *c, const void *base, T user_data) const
{
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
unsigned int count = len;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!array[i].sanitize (c, base, user_data)))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
return_trace (false);
return_trace (true);
}
template <typename SearchType>
@ -900,7 +900,7 @@ struct ArrayOf
inline bool sanitize_shallow (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && c->check_array (this, Type::static_size, len));
return_trace (c->check_struct (this) && c->check_array (this, Type::static_size, len));
}
public:
@ -927,13 +927,13 @@ struct OffsetListOf : OffsetArrayOf<Type>
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (OffsetArrayOf<Type>::sanitize (c, this));
return_trace (OffsetArrayOf<Type>::sanitize (c, this));
}
template <typename T>
inline bool sanitize (hb_sanitize_context_t *c, T user_data) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (OffsetArrayOf<Type>::sanitize (c, this, user_data));
return_trace (OffsetArrayOf<Type>::sanitize (c, this, user_data));
}
};
@ -955,14 +955,14 @@ struct HeadlessArrayOf
unsigned int items_len)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
len.set (items_len); /* TODO(serialize) Overflow? */
if (unlikely (!items_len)) return TRACE_RETURN (true);
if (unlikely (!c->extend (*this))) return TRACE_RETURN (false);
if (unlikely (!items_len)) return_trace (true);
if (unlikely (!c->extend (*this))) return_trace (false);
for (unsigned int i = 0; i < items_len - 1; i++)
array[i] = items[i];
items.advance (items_len - 1);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize_shallow (hb_sanitize_context_t *c) const
@ -974,7 +974,7 @@ struct HeadlessArrayOf
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!sanitize_shallow (c))) return TRACE_RETURN (false);
if (unlikely (!sanitize_shallow (c))) return_trace (false);
/* Note: for structs that do not reference other structs,
* we do not need to call their sanitize() as we already did
@ -985,7 +985,7 @@ struct HeadlessArrayOf
*/
(void) (false && array[0].sanitize (c));
return TRACE_RETURN (true);
return_trace (true);
}
LenType len;

View File

@ -54,7 +54,7 @@ struct CmapSubtableFormat0
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
protected:
@ -130,7 +130,7 @@ struct CmapSubtableFormat4
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this)))
return TRACE_RETURN (false);
return_trace (false);
if (unlikely (!c->check_range (this, length)))
{
@ -141,10 +141,10 @@ struct CmapSubtableFormat4
(uintptr_t) (c->end -
(char *) this));
if (!c->try_set (&length, new_length))
return TRACE_RETURN (false);
return_trace (false);
}
return TRACE_RETURN (16 + 4 * (unsigned int) segCountX2 <= length);
return_trace (16 + 4 * (unsigned int) segCountX2 <= length);
}
protected:
@ -187,7 +187,7 @@ struct CmapSubtableLongGroup
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
private:
@ -215,7 +215,7 @@ struct CmapSubtableTrimmed
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && glyphIdArray.sanitize (c));
return_trace (c->check_struct (this) && glyphIdArray.sanitize (c));
}
protected:
@ -248,7 +248,7 @@ struct CmapSubtableLongSegmented
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && groups.sanitize (c));
return_trace (c->check_struct (this) && groups.sanitize (c));
}
protected:
@ -295,7 +295,7 @@ struct UnicodeValueRange
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
UINT24 startUnicodeValue; /* First value in this range. */
@ -317,7 +317,7 @@ struct UVSMapping
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
UINT24 unicodeValue; /* Base Unicode value of the UVS */
@ -357,9 +357,9 @@ struct VariationSelectorRecord
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) &&
defaultUVS.sanitize (c, base) &&
nonDefaultUVS.sanitize (c, base));
return_trace (c->check_struct (this) &&
defaultUVS.sanitize (c, base) &&
nonDefaultUVS.sanitize (c, base));
}
UINT24 varSelector; /* Variation selector. */
@ -383,8 +383,8 @@ struct CmapSubtableFormat14
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) &&
record.sanitize (c, this));
return_trace (c->check_struct (this) &&
record.sanitize (c, this));
}
protected:
@ -429,16 +429,16 @@ struct CmapSubtable
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format) {
case 0: return TRACE_RETURN (u.format0 .sanitize (c));
case 4: return TRACE_RETURN (u.format4 .sanitize (c));
case 6: return TRACE_RETURN (u.format6 .sanitize (c));
case 10: return TRACE_RETURN (u.format10.sanitize (c));
case 12: return TRACE_RETURN (u.format12.sanitize (c));
case 13: return TRACE_RETURN (u.format13.sanitize (c));
case 14: return TRACE_RETURN (u.format14.sanitize (c));
default:return TRACE_RETURN (true);
case 0: return_trace (u.format0 .sanitize (c));
case 4: return_trace (u.format4 .sanitize (c));
case 6: return_trace (u.format6 .sanitize (c));
case 10: return_trace (u.format10.sanitize (c));
case 12: return_trace (u.format12.sanitize (c));
case 13: return_trace (u.format13.sanitize (c));
case 14: return_trace (u.format14.sanitize (c));
default:return_trace (true);
}
}
@ -473,8 +473,8 @@ struct EncodingRecord
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) &&
subtable.sanitize (c, base));
return_trace (c->check_struct (this) &&
subtable.sanitize (c, base));
}
USHORT platformID; /* Platform ID. */
@ -509,9 +509,9 @@ struct cmap
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) &&
likely (version == 0) &&
encodingRecord.sanitize (c, this));
return_trace (c->check_struct (this) &&
likely (version == 0) &&
encodingRecord.sanitize (c, this));
}
USHORT version; /* Table version number (0). */

View File

@ -47,7 +47,7 @@ struct loca
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (true);
return_trace (true);
}
public:
@ -75,7 +75,7 @@ struct glyf
TRACE_SANITIZE (this);
/* We don't check for anything specific here. The users of the
* struct do all the hard work... */
return TRACE_RETURN (true);
return_trace (true);
}
public:

View File

@ -55,7 +55,7 @@ struct head
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && likely (version.major == 1));
return_trace (c->check_struct (this) && likely (version.major == 1));
}
protected:

View File

@ -52,7 +52,7 @@ struct _hea
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && likely (version.major == 1));
return_trace (c->check_struct (this) && likely (version.major == 1));
}
public:

View File

@ -62,7 +62,7 @@ struct _mtx
TRACE_SANITIZE (this);
/* We don't check for anything specific here. The users of the
* struct do all the hard work... */
return TRACE_RETURN (true);
return_trace (true);
}
public:

View File

@ -75,7 +75,7 @@ struct Record
{
TRACE_SANITIZE (this);
const sanitize_closure_t closure = {tag, base};
return TRACE_RETURN (c->check_struct (this) && offset.sanitize (c, base, &closure));
return_trace (c->check_struct (this) && offset.sanitize (c, base, &closure));
}
Tag tag; /* 4-byte Tag identifier */
@ -131,7 +131,7 @@ struct RecordListOf : RecordArrayOf<Type>
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (RecordArrayOf<Type>::sanitize (c, this));
return_trace (RecordArrayOf<Type>::sanitize (c, this));
}
};
@ -145,7 +145,7 @@ struct RangeRecord
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
inline bool intersects (const hb_set_t *glyphs) const {
@ -211,7 +211,7 @@ struct LangSys
const Record<LangSys>::sanitize_closure_t * = NULL) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && featureIndex.sanitize (c));
return_trace (c->check_struct (this) && featureIndex.sanitize (c));
}
Offset<> lookupOrderZ; /* = Null (reserved for an offset to a
@ -251,7 +251,7 @@ struct Script
const Record<Script>::sanitize_closure_t * = NULL) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
return_trace (defaultLangSys.sanitize (c, this) && langSys.sanitize (c, this));
}
protected:
@ -274,7 +274,7 @@ struct FeatureParamsSize
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!c->check_struct (this))) return TRACE_RETURN (false);
if (unlikely (!c->check_struct (this))) return_trace (false);
/* This subtable has some "history", if you will. Some earlier versions of
* Adobe tools calculated the offset of the FeatureParams sutable from the
@ -326,19 +326,19 @@ struct FeatureParamsSize
*/
if (!designSize)
return TRACE_RETURN (false);
return_trace (false);
else if (subfamilyID == 0 &&
subfamilyNameID == 0 &&
rangeStart == 0 &&
rangeEnd == 0)
return TRACE_RETURN (true);
return_trace (true);
else if (designSize < rangeStart ||
designSize > rangeEnd ||
subfamilyNameID < 256 ||
subfamilyNameID > 32767)
return TRACE_RETURN (false);
return_trace (false);
else
return TRACE_RETURN (true);
return_trace (true);
}
USHORT designSize; /* Represents the design size in 720/inch
@ -388,7 +388,7 @@ struct FeatureParamsStylisticSet
TRACE_SANITIZE (this);
/* Right now minorVersion is at zero. Which means, any table supports
* the uiNameID field. */
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
USHORT version; /* (set to 0): This corresponds to a “minor”
@ -420,8 +420,8 @@ struct FeatureParamsCharacterVariants
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) &&
characters.sanitize (c));
return_trace (c->check_struct (this) &&
characters.sanitize (c));
}
USHORT format; /* Format number is set to 0. */
@ -462,12 +462,12 @@ struct FeatureParams
{
TRACE_SANITIZE (this);
if (tag == HB_TAG ('s','i','z','e'))
return TRACE_RETURN (u.size.sanitize (c));
return_trace (u.size.sanitize (c));
if ((tag & 0xFFFF0000u) == HB_TAG ('s','s','\0','\0')) /* ssXX */
return TRACE_RETURN (u.stylisticSet.sanitize (c));
return_trace (u.stylisticSet.sanitize (c));
if ((tag & 0xFFFF0000u) == HB_TAG ('c','v','\0','\0')) /* cvXX */
return TRACE_RETURN (u.characterVariants.sanitize (c));
return TRACE_RETURN (true);
return_trace (u.characterVariants.sanitize (c));
return_trace (true);
}
inline const FeatureParamsSize& get_size_params (hb_tag_t tag) const
@ -505,7 +505,7 @@ struct Feature
{
TRACE_SANITIZE (this);
if (unlikely (!(c->check_struct (this) && lookupIndex.sanitize (c))))
return TRACE_RETURN (false);
return_trace (false);
/* Some earlier versions of Adobe tools calculated the offset of the
* FeatureParams subtable from the beginning of the FeatureList table!
@ -520,10 +520,10 @@ struct Feature
OffsetTo<FeatureParams> orig_offset = featureParams;
if (unlikely (!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE)))
return TRACE_RETURN (false);
return_trace (false);
if (likely (orig_offset.is_null ()))
return TRACE_RETURN (true);
return_trace (true);
if (featureParams == 0 && closure &&
closure->tag == HB_TAG ('s','i','z','e') &&
@ -538,10 +538,10 @@ struct Feature
if (new_offset == new_offset_int &&
c->try_set (&featureParams, new_offset) &&
!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
return TRACE_RETURN (false);
return_trace (false);
}
return TRACE_RETURN (true);
return_trace (true);
}
OffsetTo<FeatureParams>
@ -613,9 +613,9 @@ struct Lookup
for (unsigned int i = 0; i < count; i++) {
typename context_t::return_t r = get_subtable<SubTableType> (i).dispatch (c, lookup_type);
if (c->stop_sublookup_iteration (r))
return TRACE_RETURN (r);
return_trace (r);
}
return TRACE_RETURN (c->default_return_value ());
return_trace (c->default_return_value ());
}
inline bool serialize (hb_serialize_context_t *c,
@ -624,29 +624,29 @@ struct Lookup
unsigned int num_subtables)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
lookupType.set (lookup_type);
lookupFlag.set (lookup_props & 0xFFFFu);
if (unlikely (!subTable.serialize (c, num_subtables))) return TRACE_RETURN (false);
if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
if (lookupFlag & LookupFlag::UseMarkFilteringSet)
{
USHORT &markFilteringSet = StructAfter<USHORT> (subTable);
markFilteringSet.set (lookup_props >> 16);
}
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
/* Real sanitize of the subtables is done by GSUB/GPOS/... */
if (!(c->check_struct (this) && subTable.sanitize (c))) return TRACE_RETURN (false);
if (!(c->check_struct (this) && subTable.sanitize (c))) return_trace (false);
if (lookupFlag & LookupFlag::UseMarkFilteringSet)
{
const USHORT &markFilteringSet = StructAfter<USHORT> (subTable);
if (!markFilteringSet.sanitize (c)) return TRACE_RETURN (false);
if (!markFilteringSet.sanitize (c)) return_trace (false);
}
return TRACE_RETURN (true);
return_trace (true);
}
private:
@ -685,19 +685,19 @@ struct CoverageFormat1
unsigned int num_glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
glyphArray.len.set (num_glyphs);
if (unlikely (!c->extend (glyphArray))) return TRACE_RETURN (false);
if (unlikely (!c->extend (glyphArray))) return_trace (false);
for (unsigned int i = 0; i < num_glyphs; i++)
glyphArray[i] = glyphs[i];
glyphs.advance (num_glyphs);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (glyphArray.sanitize (c));
return_trace (glyphArray.sanitize (c));
}
inline bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const {
@ -754,16 +754,16 @@ struct CoverageFormat2
unsigned int num_glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!num_glyphs)) return TRACE_RETURN (true);
if (unlikely (!num_glyphs)) return_trace (true);
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < num_glyphs; i++)
if (glyphs[i - 1] + 1 != glyphs[i])
num_ranges++;
rangeRecord.len.set (num_ranges);
if (unlikely (!c->extend (rangeRecord))) return TRACE_RETURN (false);
if (unlikely (!c->extend (rangeRecord))) return_trace (false);
unsigned int range = 0;
rangeRecord[range].start = glyphs[0];
@ -778,13 +778,13 @@ struct CoverageFormat2
rangeRecord[range].end = glyphs[i];
}
glyphs.advance (num_glyphs);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (rangeRecord.sanitize (c));
return_trace (rangeRecord.sanitize (c));
}
inline bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const {
@ -864,27 +864,27 @@ struct Coverage
unsigned int num_glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < num_glyphs; i++)
if (glyphs[i - 1] + 1 != glyphs[i])
num_ranges++;
u.format.set (num_glyphs * 2 < num_ranges * 3 ? 1 : 2);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs));
case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, num_glyphs));
default:return TRACE_RETURN (false);
case 1: return_trace (u.format1.serialize (c, glyphs, num_glyphs));
case 2: return_trace (u.format2.serialize (c, glyphs, num_glyphs));
default:return_trace (false);
}
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.sanitize (c));
case 2: return TRACE_RETURN (u.format2.sanitize (c));
default:return TRACE_RETURN (true);
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
default:return_trace (true);
}
}
@ -993,7 +993,7 @@ struct ClassDefFormat1
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && classValue.sanitize (c));
return_trace (c->check_struct (this) && classValue.sanitize (c));
}
template <typename set_t>
@ -1050,7 +1050,7 @@ struct ClassDefFormat2
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (rangeRecord.sanitize (c));
return_trace (rangeRecord.sanitize (c));
}
template <typename set_t>
@ -1108,11 +1108,11 @@ struct ClassDef
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.sanitize (c));
case 2: return TRACE_RETURN (u.format2.sanitize (c));
default:return TRACE_RETURN (true);
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
default:return_trace (true);
}
}
@ -1201,7 +1201,7 @@ struct Device
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && c->check_range (this, this->get_size ()));
return_trace (c->check_struct (this) && c->check_range (this, this->get_size ()));
}
protected:

View File

@ -74,7 +74,7 @@ struct AttachList
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && attachPoint.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && attachPoint.sanitize (c, this));
}
protected:
@ -105,7 +105,7 @@ struct CaretValueFormat1
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
protected:
@ -132,7 +132,7 @@ struct CaretValueFormat2
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
protected:
@ -156,7 +156,7 @@ struct CaretValueFormat3
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && deviceTable.sanitize (c, this));
return_trace (c->check_struct (this) && deviceTable.sanitize (c, this));
}
protected:
@ -185,12 +185,12 @@ struct CaretValue
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.sanitize (c));
case 2: return TRACE_RETURN (u.format2.sanitize (c));
case 3: return TRACE_RETURN (u.format3.sanitize (c));
default:return TRACE_RETURN (true);
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
case 3: return_trace (u.format3.sanitize (c));
default:return_trace (true);
}
}
@ -227,7 +227,7 @@ struct LigGlyph
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (carets.sanitize (c, this));
return_trace (carets.sanitize (c, this));
}
protected:
@ -262,7 +262,7 @@ struct LigCaretList
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && ligGlyph.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && ligGlyph.sanitize (c, this));
}
protected:
@ -285,7 +285,7 @@ struct MarkGlyphSetsFormat1
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this));
return_trace (coverage.sanitize (c, this));
}
protected:
@ -310,10 +310,10 @@ struct MarkGlyphSets
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.sanitize (c));
default:return TRACE_RETURN (true);
case 1: return_trace (u.format1.sanitize (c));
default:return_trace (true);
}
}
@ -376,13 +376,13 @@ struct GDEF
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (version.sanitize (c) &&
likely (version.major == 1) &&
glyphClassDef.sanitize (c, this) &&
attachList.sanitize (c, this) &&
ligCaretList.sanitize (c, this) &&
markAttachClassDef.sanitize (c, this) &&
(version.to_int () < 0x00010002u || markGlyphSetsDef[0].sanitize (c, this)));
return_trace (version.sanitize (c) &&
likely (version.major == 1) &&
glyphClassDef.sanitize (c, this) &&
attachList.sanitize (c, this) &&
ligCaretList.sanitize (c, this) &&
markAttachClassDef.sanitize (c, this) &&
(version.to_int () < 0x00010002u || markGlyphSetsDef[0].sanitize (c, this)));
}

View File

@ -181,7 +181,7 @@ struct ValueFormat : USHORT
inline bool sanitize_value (hb_sanitize_context_t *c, const void *base, const Value *values) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
return_trace (c->check_range (values, get_size ()) && (!has_device () || sanitize_value_devices (c, base, values)));
}
inline bool sanitize_values (hb_sanitize_context_t *c, const void *base, const Value *values, unsigned int count) const
@ -189,17 +189,17 @@ struct ValueFormat : USHORT
TRACE_SANITIZE (this);
unsigned int len = get_len ();
if (!c->check_array (values, get_size (), count)) return TRACE_RETURN (false);
if (!c->check_array (values, get_size (), count)) return_trace (false);
if (!has_device ()) return TRACE_RETURN (true);
if (!has_device ()) return_trace (true);
for (unsigned int i = 0; i < count; i++) {
if (!sanitize_value_devices (c, base, values))
return TRACE_RETURN (false);
return_trace (false);
values += len;
}
return TRACE_RETURN (true);
return_trace (true);
}
/* Just sanitize referenced Device tables. Doesn't check the values themselves. */
@ -207,15 +207,15 @@ struct ValueFormat : USHORT
{
TRACE_SANITIZE (this);
if (!has_device ()) return TRACE_RETURN (true);
if (!has_device ()) return_trace (true);
for (unsigned int i = 0; i < count; i++) {
if (!sanitize_value_devices (c, base, values))
return TRACE_RETURN (false);
return_trace (false);
values += stride;
}
return TRACE_RETURN (true);
return_trace (true);
}
};
@ -232,7 +232,7 @@ struct AnchorFormat1
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
protected:
@ -262,7 +262,7 @@ struct AnchorFormat2
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
protected:
@ -291,7 +291,7 @@ struct AnchorFormat3
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
return_trace (c->check_struct (this) && xDeviceTable.sanitize (c, this) && yDeviceTable.sanitize (c, this));
}
protected:
@ -327,12 +327,12 @@ struct Anchor
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.sanitize (c));
case 2: return TRACE_RETURN (u.format2.sanitize (c));
case 3: return TRACE_RETURN (u.format3.sanitize (c));
default:return TRACE_RETURN (true);
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
case 3: return_trace (u.format3.sanitize (c));
default:return_trace (true);
}
}
@ -360,13 +360,13 @@ struct AnchorMatrix
inline bool sanitize (hb_sanitize_context_t *c, unsigned int cols) const
{
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return TRACE_RETURN (false);
if (unlikely (rows > 0 && cols >= ((unsigned int) -1) / rows)) return TRACE_RETURN (false);
if (!c->check_struct (this)) return_trace (false);
if (unlikely (rows > 0 && cols >= ((unsigned int) -1) / rows)) return_trace (false);
unsigned int count = rows * cols;
if (!c->check_array (matrixZ, matrixZ[0].static_size, count)) return TRACE_RETURN (false);
if (!c->check_array (matrixZ, matrixZ[0].static_size, count)) return_trace (false);
for (unsigned int i = 0; i < count; i++)
if (!matrixZ[i].sanitize (c, this)) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (!matrixZ[i].sanitize (c, this)) return_trace (false);
return_trace (true);
}
USHORT rows; /* Number of rows */
@ -386,7 +386,7 @@ struct MarkRecord
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && markAnchor.sanitize (c, base));
return_trace (c->check_struct (this) && markAnchor.sanitize (c, base));
}
protected:
@ -415,7 +415,7 @@ struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage orde
const Anchor& glyph_anchor = anchors.get_anchor (glyph_index, mark_class, class_count, &found);
/* If this subtable doesn't have an anchor for this base and this class,
* return false such that the subsequent subtables have a chance at it. */
if (unlikely (!found)) return TRACE_RETURN (false);
if (unlikely (!found)) return_trace (false);
hb_position_t mark_x, mark_y, base_x, base_y;
@ -428,13 +428,13 @@ struct MarkArray : ArrayOf<MarkRecord> /* Array of MarkRecords--in Coverage orde
o.attach_lookback() = buffer->idx - glyph_pos;
buffer->idx++;
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (ArrayOf<MarkRecord>::sanitize (c, this));
return_trace (ArrayOf<MarkRecord>::sanitize (c, this));
}
};
@ -459,21 +459,21 @@ struct SinglePosFormat1
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
valueFormat.apply_value (c->font, c->direction, this,
values, buffer->cur_pos());
buffer->idx++;
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this)
&& coverage.sanitize (c, this)
&& valueFormat.sanitize_value (c, this, values));
return_trace (c->check_struct (this) &&
coverage.sanitize (c, this) &&
valueFormat.sanitize_value (c, this, values));
}
protected:
@ -508,24 +508,24 @@ struct SinglePosFormat2
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
if (likely (index >= valueCount)) return TRACE_RETURN (false);
if (likely (index >= valueCount)) return_trace (false);
valueFormat.apply_value (c->font, c->direction, this,
&values[index * valueFormat.get_len ()],
buffer->cur_pos());
buffer->idx++;
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this)
&& coverage.sanitize (c, this)
&& valueFormat.sanitize_values (c, this, values, valueCount));
return_trace (c->check_struct (this) &&
coverage.sanitize (c, this) &&
valueFormat.sanitize_values (c, this, values, valueCount));
}
protected:
@ -548,11 +548,11 @@ struct SinglePos
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
case 2: return TRACE_RETURN (c->dispatch (u.format2));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
case 2: return_trace (c->dispatch (u.format2));
default:return_trace (c->default_return_value ());
}
}
@ -615,7 +615,7 @@ struct PairSet
/* Hand-coded bsearch. */
if (unlikely (!count))
return TRACE_RETURN (false);
return_trace (false);
hb_codepoint_t x = buffer->info[pos].codepoint;
int min = 0, max = (int) count - 1;
while (min <= max)
@ -636,11 +636,11 @@ struct PairSet
if (len2)
pos++;
buffer->idx = pos;
return TRACE_RETURN (true);
return_trace (true);
}
}
return TRACE_RETURN (false);
return_trace (false);
}
struct sanitize_closure_t {
@ -654,12 +654,12 @@ struct PairSet
{
TRACE_SANITIZE (this);
if (!(c->check_struct (this)
&& c->check_array (arrayZ, USHORT::static_size * closure->stride, len))) return TRACE_RETURN (false);
&& c->check_array (arrayZ, USHORT::static_size * closure->stride, len))) return_trace (false);
unsigned int count = len;
const PairValueRecord *record = CastP<PairValueRecord> (arrayZ);
return TRACE_RETURN (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride)
&& closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride));
return_trace (closure->valueFormats[0].sanitize_values_stride_unsafe (c, closure->base, &record->values[0], count, closure->stride) &&
closure->valueFormats[1].sanitize_values_stride_unsafe (c, closure->base, &record->values[closure->len1], count, closure->stride));
}
protected:
@ -691,13 +691,13 @@ struct PairPosFormat1
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
if (!skippy_iter.next ()) return TRACE_RETURN (false);
if (!skippy_iter.next ()) return_trace (false);
return TRACE_RETURN ((this+pairSet[index]).apply (c, &valueFormat1, skippy_iter.idx));
return_trace ((this+pairSet[index]).apply (c, &valueFormat1, skippy_iter.idx));
}
inline bool sanitize (hb_sanitize_context_t *c) const
@ -713,7 +713,7 @@ struct PairPosFormat1
1 + len1 + len2
};
return TRACE_RETURN (c->check_struct (this) && coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
return_trace (c->check_struct (this) && coverage.sanitize (c, this) && pairSet.sanitize (c, this, &closure));
}
protected:
@ -762,11 +762,11 @@ struct PairPosFormat2
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int index = (this+coverage).get_coverage (buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
if (!skippy_iter.next ()) return TRACE_RETURN (false);
if (!skippy_iter.next ()) return_trace (false);
unsigned int len1 = valueFormat1.get_len ();
unsigned int len2 = valueFormat2.get_len ();
@ -774,7 +774,7 @@ struct PairPosFormat2
unsigned int klass1 = (this+classDef1).get_class (buffer->cur().codepoint);
unsigned int klass2 = (this+classDef2).get_class (buffer->info[skippy_iter.idx].codepoint);
if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return TRACE_RETURN (false);
if (unlikely (klass1 >= class1Count || klass2 >= class2Count)) return_trace (false);
const Value *v = &values[record_len * (klass1 * class2Count + klass2)];
valueFormat1.apply_value (c->font, c->direction, this,
@ -786,7 +786,7 @@ struct PairPosFormat2
if (len2)
buffer->idx++;
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
@ -795,16 +795,16 @@ struct PairPosFormat2
if (!(c->check_struct (this)
&& coverage.sanitize (c, this)
&& classDef1.sanitize (c, this)
&& classDef2.sanitize (c, this))) return TRACE_RETURN (false);
&& classDef2.sanitize (c, this))) return_trace (false);
unsigned int len1 = valueFormat1.get_len ();
unsigned int len2 = valueFormat2.get_len ();
unsigned int stride = len1 + len2;
unsigned int record_size = valueFormat1.get_size () + valueFormat2.get_size ();
unsigned int count = (unsigned int) class1Count * (unsigned int) class2Count;
return TRACE_RETURN (c->check_array (values, record_size, count) &&
valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
return_trace (c->check_array (values, record_size, count) &&
valueFormat1.sanitize_values_stride_unsafe (c, this, &values[0], count, stride) &&
valueFormat2.sanitize_values_stride_unsafe (c, this, &values[len1], count, stride));
}
protected:
@ -843,11 +843,11 @@ struct PairPos
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
case 2: return TRACE_RETURN (c->dispatch (u.format2));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
case 2: return_trace (c->dispatch (u.format2));
default:return_trace (c->default_return_value ());
}
}
@ -867,7 +867,7 @@ struct EntryExitRecord
inline bool sanitize (hb_sanitize_context_t *c, const void *base) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
return_trace (entryAnchor.sanitize (c, base) && exitAnchor.sanitize (c, base));
}
protected:
@ -905,17 +905,17 @@ struct CursivePosFormat1
hb_buffer_t *buffer = c->buffer;
/* We don't handle mark glyphs here. */
if (unlikely (_hb_glyph_info_is_mark (&buffer->cur()))) return TRACE_RETURN (false);
if (unlikely (_hb_glyph_info_is_mark (&buffer->cur()))) return_trace (false);
const EntryExitRecord &this_record = entryExitRecord[(this+coverage).get_coverage (buffer->cur().codepoint)];
if (!this_record.exitAnchor) return TRACE_RETURN (false);
if (!this_record.exitAnchor) return_trace (false);
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
if (!skippy_iter.next ()) return TRACE_RETURN (false);
if (!skippy_iter.next ()) return_trace (false);
const EntryExitRecord &next_record = entryExitRecord[(this+coverage).get_coverage (buffer->info[skippy_iter.idx].codepoint)];
if (!next_record.entryAnchor) return TRACE_RETURN (false);
if (!next_record.entryAnchor) return_trace (false);
unsigned int i = buffer->idx;
unsigned int j = skippy_iter.idx;
@ -997,13 +997,13 @@ struct CursivePosFormat1
pos[child].x_offset = x_offset;
buffer->idx = j;
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && entryExitRecord.sanitize (c, this));
}
protected:
@ -1024,10 +1024,10 @@ struct CursivePos
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
default:return_trace (c->default_return_value ());
}
}
@ -1063,33 +1063,36 @@ struct MarkBasePosFormat1
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (mark_index == NOT_COVERED)) return_trace (false);
/* now we search backwards for a non-mark glyph */
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
do {
if (!skippy_iter.prev ()) return TRACE_RETURN (false);
if (!skippy_iter.prev ()) return_trace (false);
/* We only want to attach to the first of a MultipleSubst sequence. Reject others. */
if (0 == _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx])) break;
skippy_iter.reject ();
} while (1);
/* Checking that matched glyph is actually a base glyph by GDEF is too strong; disabled */
if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { /*return TRACE_RETURN (false);*/ }
if (!_hb_glyph_info_is_base_glyph (&buffer->info[skippy_iter.idx])) { /*return_trace (false);*/ }
unsigned int base_index = (this+baseCoverage).get_coverage (buffer->info[skippy_iter.idx].codepoint);
if (base_index == NOT_COVERED) return TRACE_RETURN (false);
if (base_index == NOT_COVERED) return_trace (false);
return TRACE_RETURN ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
return_trace ((this+markArray).apply (c, mark_index, base_index, this+baseArray, classCount, skippy_iter.idx));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && baseCoverage.sanitize (c, this) &&
markArray.sanitize (c, this) && baseArray.sanitize (c, this, (unsigned int) classCount));
return_trace (c->check_struct (this) &&
markCoverage.sanitize (c, this) &&
baseCoverage.sanitize (c, this) &&
markArray.sanitize (c, this) &&
baseArray.sanitize (c, this, (unsigned int) classCount));
}
protected:
@ -1117,10 +1120,10 @@ struct MarkBasePos
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
default:return_trace (c->default_return_value ());
}
}
@ -1161,27 +1164,27 @@ struct MarkLigPosFormat1
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark_index = (this+markCoverage).get_coverage (buffer->cur().codepoint);
if (likely (mark_index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (mark_index == NOT_COVERED)) return_trace (false);
/* now we search backwards for a non-mark glyph */
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
skippy_iter.set_lookup_props (LookupFlag::IgnoreMarks);
if (!skippy_iter.prev ()) return TRACE_RETURN (false);
if (!skippy_iter.prev ()) return_trace (false);
/* Checking that matched glyph is actually a ligature by GDEF is too strong; disabled */
if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { /*return TRACE_RETURN (false);*/ }
if (!_hb_glyph_info_is_ligature (&buffer->info[skippy_iter.idx])) { /*return_trace (false);*/ }
unsigned int j = skippy_iter.idx;
unsigned int lig_index = (this+ligatureCoverage).get_coverage (buffer->info[j].codepoint);
if (lig_index == NOT_COVERED) return TRACE_RETURN (false);
if (lig_index == NOT_COVERED) return_trace (false);
const LigatureArray& lig_array = this+ligatureArray;
const LigatureAttach& lig_attach = lig_array[lig_index];
/* Find component to attach to */
unsigned int comp_count = lig_attach.rows;
if (unlikely (!comp_count)) return TRACE_RETURN (false);
if (unlikely (!comp_count)) return_trace (false);
/* We must now check whether the ligature ID of the current mark glyph
* is identical to the ligature ID of the found ligature. If yes, we
@ -1196,14 +1199,17 @@ struct MarkLigPosFormat1
else
comp_index = comp_count - 1;
return TRACE_RETURN ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
return_trace ((this+markArray).apply (c, mark_index, comp_index, lig_attach, classCount, j));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && markCoverage.sanitize (c, this) && ligatureCoverage.sanitize (c, this) &&
markArray.sanitize (c, this) && ligatureArray.sanitize (c, this, (unsigned int) classCount));
return_trace (c->check_struct (this) &&
markCoverage.sanitize (c, this) &&
ligatureCoverage.sanitize (c, this) &&
markArray.sanitize (c, this) &&
ligatureArray.sanitize (c, this, (unsigned int) classCount));
}
protected:
@ -1232,10 +1238,10 @@ struct MarkLigPos
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
default:return_trace (c->default_return_value ());
}
}
@ -1271,15 +1277,15 @@ struct MarkMarkPosFormat1
TRACE_APPLY (this);
hb_buffer_t *buffer = c->buffer;
unsigned int mark1_index = (this+mark1Coverage).get_coverage (buffer->cur().codepoint);
if (likely (mark1_index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (mark1_index == NOT_COVERED)) return_trace (false);
/* now we search backwards for a suitable mark glyph until a non-mark glyph */
hb_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, 1);
skippy_iter.set_lookup_props (c->lookup_props & ~LookupFlag::IgnoreFlags);
if (!skippy_iter.prev ()) return TRACE_RETURN (false);
if (!skippy_iter.prev ()) return_trace (false);
if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return TRACE_RETURN (false); }
if (!_hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx])) { return_trace (false); }
unsigned int j = skippy_iter.idx;
@ -1301,21 +1307,23 @@ struct MarkMarkPosFormat1
}
/* Didn't match. */
return TRACE_RETURN (false);
return_trace (false);
good:
unsigned int mark2_index = (this+mark2Coverage).get_coverage (buffer->info[j].codepoint);
if (mark2_index == NOT_COVERED) return TRACE_RETURN (false);
if (mark2_index == NOT_COVERED) return_trace (false);
return TRACE_RETURN ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
return_trace ((this+mark1Array).apply (c, mark1_index, mark2_index, this+mark2Array, classCount, j));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && mark1Coverage.sanitize (c, this) &&
mark2Coverage.sanitize (c, this) && mark1Array.sanitize (c, this)
&& mark2Array.sanitize (c, this, (unsigned int) classCount));
return_trace (c->check_struct (this) &&
mark1Coverage.sanitize (c, this) &&
mark2Coverage.sanitize (c, this) &&
mark1Array.sanitize (c, this) &&
mark2Array.sanitize (c, this, (unsigned int) classCount));
}
protected:
@ -1345,10 +1353,10 @@ struct MarkMarkPos
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
default:return_trace (c->default_return_value ());
}
}
@ -1397,18 +1405,18 @@ struct PosLookupSubTable
{
TRACE_DISPATCH (this, lookup_type);
/* The sub_format passed to may_dispatch is unnecessary but harmless. */
if (unlikely (!c->may_dispatch (this, &u.sub_format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.sub_format))) return_trace (c->default_return_value ());
switch (lookup_type) {
case Single: return TRACE_RETURN (u.single.dispatch (c));
case Pair: return TRACE_RETURN (u.pair.dispatch (c));
case Cursive: return TRACE_RETURN (u.cursive.dispatch (c));
case MarkBase: return TRACE_RETURN (u.markBase.dispatch (c));
case MarkLig: return TRACE_RETURN (u.markLig.dispatch (c));
case MarkMark: return TRACE_RETURN (u.markMark.dispatch (c));
case Context: return TRACE_RETURN (u.context.dispatch (c));
case ChainContext: return TRACE_RETURN (u.chainContext.dispatch (c));
case Extension: return TRACE_RETURN (u.extension.dispatch (c));
default: return TRACE_RETURN (c->default_return_value ());
case Single: return_trace (u.single.dispatch (c));
case Pair: return_trace (u.pair.dispatch (c));
case Cursive: return_trace (u.cursive.dispatch (c));
case MarkBase: return_trace (u.markBase.dispatch (c));
case MarkLig: return_trace (u.markLig.dispatch (c));
case MarkMark: return_trace (u.markMark.dispatch (c));
case Context: return_trace (u.context.dispatch (c));
case ChainContext: return_trace (u.chainContext.dispatch (c));
case Extension: return_trace (u.extension.dispatch (c));
default: return_trace (c->default_return_value ());
}
}
@ -1443,13 +1451,13 @@ struct PosLookup : Lookup
inline bool apply (hb_apply_context_t *c) const
{
TRACE_APPLY (this);
return TRACE_RETURN (dispatch (c));
return_trace (dispatch (c));
}
inline hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
{
TRACE_COLLECT_GLYPHS (this);
return TRACE_RETURN (dispatch (c));
return_trace (dispatch (c));
}
template <typename set_t>
@ -1471,8 +1479,8 @@ struct PosLookup : Lookup
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false);
return TRACE_RETURN (dispatch (c));
if (unlikely (!Lookup::sanitize (c))) return_trace (false);
return_trace (dispatch (c));
}
};
@ -1495,9 +1503,9 @@ struct GPOS : GSUBGPOS
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false);
if (unlikely (!GSUBGPOS::sanitize (c))) return_trace (false);
const OffsetTo<PosLookupList> &list = CastR<OffsetTo<PosLookupList> > (lookupList);
return TRACE_RETURN (list.sanitize (c, this));
return_trace (list.sanitize (c, this));
}
public:
DEFINE_SIZE_STATIC (10);

View File

@ -67,7 +67,7 @@ struct SingleSubstFormat1
inline bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
inline bool apply (hb_apply_context_t *c) const
@ -75,14 +75,14 @@ struct SingleSubstFormat1
TRACE_APPLY (this);
hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
unsigned int index = (this+coverage).get_coverage (glyph_id);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
/* According to the Adobe Annotated OpenType Suite, result is always
* limited to 16bit. */
glyph_id = (glyph_id + deltaGlyphID) & 0xFFFFu;
c->replace_glyph (glyph_id);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool serialize (hb_serialize_context_t *c,
@ -91,16 +91,16 @@ struct SingleSubstFormat1
int delta)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return_trace (false);
deltaGlyphID.set (delta); /* TODO(serilaize) overflow? */
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
return_trace (coverage.sanitize (c, this) && deltaGlyphID.sanitize (c));
}
protected:
@ -144,7 +144,7 @@ struct SingleSubstFormat2
inline bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
inline bool apply (hb_apply_context_t *c) const
@ -152,14 +152,14 @@ struct SingleSubstFormat2
TRACE_APPLY (this);
hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
unsigned int index = (this+coverage).get_coverage (glyph_id);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
if (unlikely (index >= substitute.len)) return TRACE_RETURN (false);
if (unlikely (index >= substitute.len)) return_trace (false);
glyph_id = substitute[index];
c->replace_glyph (glyph_id);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool serialize (hb_serialize_context_t *c,
@ -168,16 +168,16 @@ struct SingleSubstFormat2
unsigned int num_glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!substitute.serialize (c, substitutes, num_glyphs))) return TRACE_RETURN (false);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!substitute.serialize (c, substitutes, num_glyphs))) return_trace (false);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return_trace (false);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && substitute.sanitize (c));
return_trace (coverage.sanitize (c, this) && substitute.sanitize (c));
}
protected:
@ -200,7 +200,7 @@ struct SingleSubst
unsigned int num_glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 2;
int delta = 0;
if (num_glyphs) {
@ -215,9 +215,9 @@ struct SingleSubst
}
u.format.set (format);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, num_glyphs, delta));
case 2: return TRACE_RETURN (u.format2.serialize (c, glyphs, substitutes, num_glyphs));
default:return TRACE_RETURN (false);
case 1: return_trace (u.format1.serialize (c, glyphs, num_glyphs, delta));
case 2: return_trace (u.format2.serialize (c, glyphs, substitutes, num_glyphs));
default:return_trace (false);
}
}
@ -225,11 +225,11 @@ struct SingleSubst
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
case 2: return TRACE_RETURN (c->dispatch (u.format2));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
case 2: return_trace (c->dispatch (u.format2));
default:return_trace (c->default_return_value ());
}
}
@ -273,14 +273,14 @@ struct Sequence
* buffer->move_to() makes assumptions about this too. Perhaps fix
* in the future after figuring out what to do with the clusters.
*/
if (unlikely (!count)) return TRACE_RETURN (false);
if (unlikely (!count)) return_trace (false);
/* Special-case to make it in-place and not consider this
* as a "multiplied" substitution. */
if (unlikely (count == 1))
{
c->replace_glyph (substitute.array[0]);
return TRACE_RETURN (true);
return_trace (true);
}
unsigned int klass = _hb_glyph_info_is_ligature (&c->buffer->cur()) ?
@ -292,7 +292,7 @@ struct Sequence
}
c->buffer->skip_glyph ();
return TRACE_RETURN (true);
return_trace (true);
}
inline bool serialize (hb_serialize_context_t *c,
@ -300,15 +300,15 @@ struct Sequence
unsigned int num_glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!substitute.serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!substitute.serialize (c, glyphs, num_glyphs))) return_trace (false);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (substitute.sanitize (c));
return_trace (substitute.sanitize (c));
}
protected:
@ -347,7 +347,7 @@ struct MultipleSubstFormat1
inline bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
inline bool apply (hb_apply_context_t *c) const
@ -355,9 +355,9 @@ struct MultipleSubstFormat1
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
return TRACE_RETURN ((this+sequence[index]).apply (c));
return_trace ((this+sequence[index]).apply (c));
}
inline bool serialize (hb_serialize_context_t *c,
@ -367,21 +367,21 @@ struct MultipleSubstFormat1
Supplier<GlyphID> &substitute_glyphs_list)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!sequence.serialize (c, num_glyphs))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!sequence.serialize (c, num_glyphs))) return_trace (false);
for (unsigned int i = 0; i < num_glyphs; i++)
if (unlikely (!sequence[i].serialize (c, this).serialize (c,
substitute_glyphs_list,
substitute_len_list[i]))) return TRACE_RETURN (false);
substitute_len_list[i]))) return_trace (false);
substitute_len_list.advance (num_glyphs);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return_trace (false);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && sequence.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && sequence.sanitize (c, this));
}
protected:
@ -405,12 +405,12 @@ struct MultipleSubst
Supplier<GlyphID> &substitute_glyphs_list)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 1;
u.format.set (format);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, substitute_len_list, num_glyphs, substitute_glyphs_list));
default:return TRACE_RETURN (false);
case 1: return_trace (u.format1.serialize (c, glyphs, substitute_len_list, num_glyphs, substitute_glyphs_list));
default:return_trace (false);
}
}
@ -418,10 +418,10 @@ struct MultipleSubst
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
default:return_trace (c->default_return_value ());
}
}
@ -473,7 +473,7 @@ struct AlternateSubstFormat1
inline bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
inline bool apply (hb_apply_context_t *c) const
@ -482,11 +482,11 @@ struct AlternateSubstFormat1
hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
unsigned int index = (this+coverage).get_coverage (glyph_id);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const AlternateSet &alt_set = this+alternateSet[index];
if (unlikely (!alt_set.len)) return TRACE_RETURN (false);
if (unlikely (!alt_set.len)) return_trace (false);
hb_mask_t glyph_mask = c->buffer->cur().mask;
hb_mask_t lookup_mask = c->lookup_mask;
@ -495,13 +495,13 @@ struct AlternateSubstFormat1
unsigned int shift = _hb_ctz (lookup_mask);
unsigned int alt_index = ((lookup_mask & glyph_mask) >> shift);
if (unlikely (alt_index > alt_set.len || alt_index == 0)) return TRACE_RETURN (false);
if (unlikely (alt_index > alt_set.len || alt_index == 0)) return_trace (false);
glyph_id = alt_set[alt_index - 1];
c->replace_glyph (glyph_id);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool serialize (hb_serialize_context_t *c,
@ -511,21 +511,21 @@ struct AlternateSubstFormat1
Supplier<GlyphID> &alternate_glyphs_list)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!alternateSet.serialize (c, num_glyphs))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!alternateSet.serialize (c, num_glyphs))) return_trace (false);
for (unsigned int i = 0; i < num_glyphs; i++)
if (unlikely (!alternateSet[i].serialize (c, this).serialize (c,
alternate_glyphs_list,
alternate_len_list[i]))) return TRACE_RETURN (false);
alternate_len_list[i]))) return_trace (false);
alternate_len_list.advance (num_glyphs);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs, num_glyphs))) return_trace (false);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && alternateSet.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && alternateSet.sanitize (c, this));
}
protected:
@ -549,12 +549,12 @@ struct AlternateSubst
Supplier<GlyphID> &alternate_glyphs_list)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 1;
u.format.set (format);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.serialize (c, glyphs, alternate_len_list, num_glyphs, alternate_glyphs_list));
default:return TRACE_RETURN (false);
case 1: return_trace (u.format1.serialize (c, glyphs, alternate_len_list, num_glyphs, alternate_glyphs_list));
default:return_trace (false);
}
}
@ -562,10 +562,10 @@ struct AlternateSubst
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
default:return_trace (c->default_return_value ());
}
}
@ -602,13 +602,13 @@ struct Ligature
{
TRACE_WOULD_APPLY (this);
if (c->len != component.len)
return TRACE_RETURN (false);
return_trace (false);
for (unsigned int i = 1; i < c->len; i++)
if (likely (c->glyphs[i] != component[i]))
return TRACE_RETURN (false);
return_trace (false);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool apply (hb_apply_context_t *c) const
@ -616,14 +616,14 @@ struct Ligature
TRACE_APPLY (this);
unsigned int count = component.len;
if (unlikely (!count)) return TRACE_RETURN (false);
if (unlikely (!count)) return_trace (false);
/* Special-case to make it in-place and not consider this
* as a "ligated" substitution. */
if (unlikely (count == 1))
{
c->replace_glyph (ligGlyph);
return TRACE_RETURN (true);
return_trace (true);
}
bool is_mark_ligature = false;
@ -640,7 +640,7 @@ struct Ligature
match_positions,
&is_mark_ligature,
&total_component_count)))
return TRACE_RETURN (false);
return_trace (false);
ligate_input (c,
count,
@ -650,7 +650,7 @@ struct Ligature
is_mark_ligature,
total_component_count);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool serialize (hb_serialize_context_t *c,
@ -659,17 +659,17 @@ struct Ligature
unsigned int num_components /* Including first component */)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
ligGlyph = ligature;
if (unlikely (!component.serialize (c, components, num_components))) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (unlikely (!component.serialize (c, components, num_components))) return_trace (false);
return_trace (true);
}
public:
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (ligGlyph.sanitize (c) && component.sanitize (c));
return_trace (ligGlyph.sanitize (c) && component.sanitize (c));
}
protected:
@ -708,9 +708,9 @@ struct LigatureSet
{
const Ligature &lig = this+ligature[i];
if (lig.would_apply (c))
return TRACE_RETURN (true);
return_trace (true);
}
return TRACE_RETURN (false);
return_trace (false);
}
inline bool apply (hb_apply_context_t *c) const
@ -720,10 +720,10 @@ struct LigatureSet
for (unsigned int i = 0; i < num_ligs; i++)
{
const Ligature &lig = this+ligature[i];
if (lig.apply (c)) return TRACE_RETURN (true);
if (lig.apply (c)) return_trace (true);
}
return TRACE_RETURN (false);
return_trace (false);
}
inline bool serialize (hb_serialize_context_t *c,
@ -733,22 +733,22 @@ struct LigatureSet
Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!ligature.serialize (c, num_ligatures))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!ligature.serialize (c, num_ligatures))) return_trace (false);
for (unsigned int i = 0; i < num_ligatures; i++)
if (unlikely (!ligature[i].serialize (c, this).serialize (c,
ligatures[i],
component_list,
component_count_list[i]))) return TRACE_RETURN (false);
component_count_list[i]))) return_trace (false);
ligatures.advance (num_ligatures);
component_count_list.advance (num_ligatures);
return TRACE_RETURN (true);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (ligature.sanitize (c, this));
return_trace (ligature.sanitize (c, this));
}
protected:
@ -790,10 +790,10 @@ struct LigatureSubstFormat1
{
TRACE_WOULD_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const LigatureSet &lig_set = this+ligatureSet[index];
return TRACE_RETURN (lig_set.would_apply (c));
return_trace (lig_set.would_apply (c));
}
inline bool apply (hb_apply_context_t *c) const
@ -802,10 +802,10 @@ struct LigatureSubstFormat1
hb_codepoint_t glyph_id = c->buffer->cur().codepoint;
unsigned int index = (this+coverage).get_coverage (glyph_id);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const LigatureSet &lig_set = this+ligatureSet[index];
return TRACE_RETURN (lig_set.apply (c));
return_trace (lig_set.apply (c));
}
inline bool serialize (hb_serialize_context_t *c,
@ -817,23 +817,23 @@ struct LigatureSubstFormat1
Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return TRACE_RETURN (false);
if (unlikely (!ligatureSet.serialize (c, num_first_glyphs))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!ligatureSet.serialize (c, num_first_glyphs))) return_trace (false);
for (unsigned int i = 0; i < num_first_glyphs; i++)
if (unlikely (!ligatureSet[i].serialize (c, this).serialize (c,
ligatures_list,
component_count_list,
ligature_per_first_glyph_count_list[i],
component_list))) return TRACE_RETURN (false);
component_list))) return_trace (false);
ligature_per_first_glyph_count_list.advance (num_first_glyphs);
if (unlikely (!coverage.serialize (c, this).serialize (c, first_glyphs, num_first_glyphs))) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (unlikely (!coverage.serialize (c, this).serialize (c, first_glyphs, num_first_glyphs))) return_trace (false);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && ligatureSet.sanitize (c, this));
}
protected:
@ -859,13 +859,18 @@ struct LigatureSubst
Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (u.format))) return TRACE_RETURN (false);
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 1;
u.format.set (format);
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.serialize (c, first_glyphs, ligature_per_first_glyph_count_list, num_first_glyphs,
ligatures_list, component_count_list, component_list));
default:return TRACE_RETURN (false);
case 1: return_trace (u.format1.serialize (c,
first_glyphs,
ligature_per_first_glyph_count_list,
num_first_glyphs,
ligatures_list,
component_count_list,
component_list));
default:return_trace (false);
}
}
@ -873,10 +878,10 @@ struct LigatureSubst
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
default:return_trace (c->default_return_value ());
}
}
@ -959,17 +964,17 @@ struct ReverseChainSingleSubstFormat1
inline bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return TRACE_RETURN (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
inline bool apply (hb_apply_context_t *c) const
{
TRACE_APPLY (this);
if (unlikely (c->nesting_level_left != MAX_NESTING_LEVEL))
return TRACE_RETURN (false); /* No chaining to this type */
return_trace (false); /* No chaining to this type */
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
@ -986,22 +991,22 @@ struct ReverseChainSingleSubstFormat1
/* Note: We DON'T decrease buffer->idx. The main loop does it
* for us. This is useful for preventing surprises if someone
* calls us through a Context lookup. */
return TRACE_RETURN (true);
return_trace (true);
}
return TRACE_RETURN (false);
return_trace (false);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!(coverage.sanitize (c, this) && backtrack.sanitize (c, this)))
return TRACE_RETURN (false);
return_trace (false);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
if (!lookahead.sanitize (c, this))
return TRACE_RETURN (false);
return_trace (false);
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
return TRACE_RETURN (substitute.sanitize (c));
return_trace (substitute.sanitize (c));
}
protected:
@ -1030,10 +1035,10 @@ struct ReverseChainSingleSubst
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
default:return_trace (c->default_return_value ());
}
}
@ -1070,17 +1075,17 @@ struct SubstLookupSubTable
{
TRACE_DISPATCH (this, lookup_type);
/* The sub_format passed to may_dispatch is unnecessary but harmless. */
if (unlikely (!c->may_dispatch (this, &u.sub_format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.sub_format))) return_trace (c->default_return_value ());
switch (lookup_type) {
case Single: return TRACE_RETURN (u.single.dispatch (c));
case Multiple: return TRACE_RETURN (u.multiple.dispatch (c));
case Alternate: return TRACE_RETURN (u.alternate.dispatch (c));
case Ligature: return TRACE_RETURN (u.ligature.dispatch (c));
case Context: return TRACE_RETURN (u.context.dispatch (c));
case ChainContext: return TRACE_RETURN (u.chainContext.dispatch (c));
case Extension: return TRACE_RETURN (u.extension.dispatch (c));
case ReverseChainSingle: return TRACE_RETURN (u.reverseChainContextSingle.dispatch (c));
default: return TRACE_RETURN (c->default_return_value ());
case Single: return_trace (u.single.dispatch (c));
case Multiple: return_trace (u.multiple.dispatch (c));
case Alternate: return_trace (u.alternate.dispatch (c));
case Ligature: return_trace (u.ligature.dispatch (c));
case Context: return_trace (u.context.dispatch (c));
case ChainContext: return_trace (u.chainContext.dispatch (c));
case Extension: return_trace (u.extension.dispatch (c));
case ReverseChainSingle: return_trace (u.reverseChainContextSingle.dispatch (c));
default: return_trace (c->default_return_value ());
}
}
@ -1120,21 +1125,21 @@ struct SubstLookup : Lookup
inline bool apply (hb_apply_context_t *c) const
{
TRACE_APPLY (this);
return TRACE_RETURN (dispatch (c));
return_trace (dispatch (c));
}
inline hb_closure_context_t::return_t closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
c->set_recurse_func (dispatch_recurse_func<hb_closure_context_t>);
return TRACE_RETURN (dispatch (c));
return_trace (dispatch (c));
}
inline hb_collect_glyphs_context_t::return_t collect_glyphs (hb_collect_glyphs_context_t *c) const
{
TRACE_COLLECT_GLYPHS (this);
c->set_recurse_func (dispatch_recurse_func<hb_collect_glyphs_context_t>);
return TRACE_RETURN (dispatch (c));
return_trace (dispatch (c));
}
template <typename set_t>
@ -1148,9 +1153,9 @@ struct SubstLookup : Lookup
const hb_ot_layout_lookup_accelerator_t *accel) const
{
TRACE_WOULD_APPLY (this);
if (unlikely (!c->len)) return TRACE_RETURN (false);
if (!accel->may_have (c->glyphs[0])) return TRACE_RETURN (false);
return TRACE_RETURN (dispatch (c));
if (unlikely (!c->len)) return_trace (false);
if (!accel->may_have (c->glyphs[0])) return_trace (false);
return_trace (dispatch (c));
}
static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index);
@ -1166,8 +1171,8 @@ struct SubstLookup : Lookup
unsigned int num_glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Single, lookup_props, 1))) return TRACE_RETURN (false);
return TRACE_RETURN (serialize_subtable (c, 0).u.single.serialize (c, glyphs, substitutes, num_glyphs));
if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Single, lookup_props, 1))) return_trace (false);
return_trace (serialize_subtable (c, 0).u.single.serialize (c, glyphs, substitutes, num_glyphs));
}
inline bool serialize_multiple (hb_serialize_context_t *c,
@ -1178,9 +1183,12 @@ struct SubstLookup : Lookup
Supplier<GlyphID> &substitute_glyphs_list)
{
TRACE_SERIALIZE (this);
if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Multiple, lookup_props, 1))) return TRACE_RETURN (false);
return TRACE_RETURN (serialize_subtable (c, 0).u.multiple.serialize (c, glyphs, substitute_len_list, num_glyphs,
substitute_glyphs_list));
if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Multiple, lookup_props, 1))) return_trace (false);
return_trace (serialize_subtable (c, 0).u.multiple.serialize (c,
glyphs,
substitute_len_list,
num_glyphs,
substitute_glyphs_list));
}
inline bool serialize_alternate (hb_serialize_context_t *c,
@ -1191,9 +1199,12 @@ struct SubstLookup : Lookup
Supplier<GlyphID> &alternate_glyphs_list)
{
TRACE_SERIALIZE (this);
if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Alternate, lookup_props, 1))) return TRACE_RETURN (false);
return TRACE_RETURN (serialize_subtable (c, 0).u.alternate.serialize (c, glyphs, alternate_len_list, num_glyphs,
alternate_glyphs_list));
if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Alternate, lookup_props, 1))) return_trace (false);
return_trace (serialize_subtable (c, 0).u.alternate.serialize (c,
glyphs,
alternate_len_list,
num_glyphs,
alternate_glyphs_list));
}
inline bool serialize_ligature (hb_serialize_context_t *c,
@ -1206,9 +1217,14 @@ struct SubstLookup : Lookup
Supplier<GlyphID> &component_list /* Starting from second for each ligature */)
{
TRACE_SERIALIZE (this);
if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Ligature, lookup_props, 1))) return TRACE_RETURN (false);
return TRACE_RETURN (serialize_subtable (c, 0).u.ligature.serialize (c, first_glyphs, ligature_per_first_glyph_count_list, num_first_glyphs,
ligatures_list, component_count_list, component_list));
if (unlikely (!Lookup::serialize (c, SubstLookupSubTable::Ligature, lookup_props, 1))) return_trace (false);
return_trace (serialize_subtable (c, 0).u.ligature.serialize (c,
first_glyphs,
ligature_per_first_glyph_count_list,
num_first_glyphs,
ligatures_list,
component_count_list,
component_list));
}
template <typename context_t>
@ -1221,8 +1237,8 @@ struct SubstLookup : Lookup
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!Lookup::sanitize (c))) return TRACE_RETURN (false);
if (unlikely (!dispatch (c))) return TRACE_RETURN (false);
if (unlikely (!Lookup::sanitize (c))) return_trace (false);
if (unlikely (!dispatch (c))) return_trace (false);
if (unlikely (get_type () == SubstLookupSubTable::Extension))
{
@ -1233,9 +1249,9 @@ struct SubstLookup : Lookup
unsigned int count = get_subtable_count ();
for (unsigned int i = 1; i < count; i++)
if (get_subtable (i).u.extension.get_type () != type)
return TRACE_RETURN (false);
return_trace (false);
}
return TRACE_RETURN (true);
return_trace (true);
}
};
@ -1258,9 +1274,9 @@ struct GSUB : GSUBGPOS
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!GSUBGPOS::sanitize (c))) return TRACE_RETURN (false);
if (unlikely (!GSUBGPOS::sanitize (c))) return_trace (false);
const OffsetTo<SubstLookupList> &list = CastR<OffsetTo<SubstLookupList> > (lookupList);
return TRACE_RETURN (list.sanitize (c, this));
return_trace (list.sanitize (c, this));
}
public:
DEFINE_SIZE_STATIC (10);

View File

@ -722,7 +722,7 @@ static inline bool match_input (hb_apply_context_t *c,
{
TRACE_APPLY (NULL);
if (unlikely (count > MAX_CONTEXT_LENGTH)) return TRACE_RETURN (false);
if (unlikely (count > MAX_CONTEXT_LENGTH)) return_trace (false);
hb_buffer_t *buffer = c->buffer;
@ -759,7 +759,7 @@ static inline bool match_input (hb_apply_context_t *c,
match_positions[0] = buffer->idx;
for (unsigned int i = 1; i < count; i++)
{
if (!skippy_iter.next ()) return TRACE_RETURN (false);
if (!skippy_iter.next ()) return_trace (false);
match_positions[i] = skippy_iter.idx;
@ -771,13 +771,13 @@ static inline bool match_input (hb_apply_context_t *c,
* all subsequent components should be attached to the same ligature
* component, otherwise we shouldn't ligate them. */
if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
return TRACE_RETURN (false);
return_trace (false);
} else {
/* If first component was NOT attached to a previous ligature component,
* all subsequent components should also NOT be attached to any ligature
* component, unless they are attached to the first component itself! */
if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
return TRACE_RETURN (false);
return_trace (false);
}
is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]);
@ -792,9 +792,9 @@ static inline bool match_input (hb_apply_context_t *c,
if (p_total_component_count)
*p_total_component_count = total_component_count;
return TRACE_RETURN (true);
return_trace (true);
}
static inline void ligate_input (hb_apply_context_t *c,
static inline bool ligate_input (hb_apply_context_t *c,
unsigned int count, /* Including the first glyph */
unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
unsigned int match_length,
@ -884,7 +884,7 @@ static inline void ligate_input (hb_apply_context_t *c,
break;
}
}
TRACE_RETURN (true);
return_trace (true);
}
static inline bool match_backtrack (hb_apply_context_t *c,
@ -901,9 +901,9 @@ static inline bool match_backtrack (hb_apply_context_t *c,
for (unsigned int i = 0; i < count; i++)
if (!skippy_iter.prev ())
return TRACE_RETURN (false);
return_trace (false);
return TRACE_RETURN (true);
return_trace (true);
}
static inline bool match_lookahead (hb_apply_context_t *c,
@ -921,9 +921,9 @@ static inline bool match_lookahead (hb_apply_context_t *c,
for (unsigned int i = 0; i < count; i++)
if (!skippy_iter.next ())
return TRACE_RETURN (false);
return_trace (false);
return TRACE_RETURN (true);
return_trace (true);
}
@ -933,7 +933,7 @@ struct LookupRecord
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
return_trace (c->check_struct (this));
}
USHORT sequenceIndex; /* Index into current glyph
@ -1034,7 +1034,7 @@ static inline bool apply_lookup (hb_apply_context_t *c,
buffer->move_to (end);
return TRACE_RETURN (true);
return_trace (true);
}
@ -1143,14 +1143,14 @@ struct Rule
{
TRACE_WOULD_APPLY (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
return TRACE_RETURN (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
return_trace (context_would_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
}
inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (inputZ, inputZ[0].static_size * (inputCount ? inputCount - 1 : 0));
return TRACE_RETURN (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
return_trace (context_apply_lookup (c, inputCount, inputZ, lookupCount, lookupRecord, lookup_context));
}
public:
@ -1202,9 +1202,9 @@ struct RuleSet
for (unsigned int i = 0; i < num_rules; i++)
{
if ((this+rule[i]).would_apply (c, lookup_context))
return TRACE_RETURN (true);
return_trace (true);
}
return TRACE_RETURN (false);
return_trace (false);
}
inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
@ -1214,15 +1214,15 @@ struct RuleSet
for (unsigned int i = 0; i < num_rules; i++)
{
if ((this+rule[i]).apply (c, lookup_context))
return TRACE_RETURN (true);
return_trace (true);
}
return TRACE_RETURN (false);
return_trace (false);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (rule.sanitize (c, this));
return_trace (rule.sanitize (c, this));
}
protected:
@ -1279,7 +1279,7 @@ struct ContextFormat1
{match_glyph},
NULL
};
return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
return_trace (rule_set.would_apply (c, lookup_context));
}
inline const Coverage &get_coverage (void) const
@ -1292,20 +1292,20 @@ struct ContextFormat1
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED))
return TRACE_RETURN (false);
return_trace (false);
const RuleSet &rule_set = this+ruleSet[index];
struct ContextApplyLookupContext lookup_context = {
{match_glyph},
NULL
};
return TRACE_RETURN (rule_set.apply (c, lookup_context));
return_trace (rule_set.apply (c, lookup_context));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
}
protected:
@ -1371,7 +1371,7 @@ struct ContextFormat2
{match_class},
&class_def
};
return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
return_trace (rule_set.would_apply (c, lookup_context));
}
inline const Coverage &get_coverage (void) const
@ -1383,7 +1383,7 @@ struct ContextFormat2
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const ClassDef &class_def = this+classDef;
index = class_def.get_class (c->buffer->cur().codepoint);
@ -1392,13 +1392,13 @@ struct ContextFormat2
{match_class},
&class_def
};
return TRACE_RETURN (rule_set.apply (c, lookup_context));
return_trace (rule_set.apply (c, lookup_context));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
}
protected:
@ -1462,7 +1462,7 @@ struct ContextFormat3
{match_coverage},
this
};
return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
return_trace (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
}
inline const Coverage &get_coverage (void) const
@ -1474,27 +1474,27 @@ struct ContextFormat3
{
TRACE_APPLY (this);
unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * glyphCount);
struct ContextApplyLookupContext lookup_context = {
{match_coverage},
this
};
return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
return_trace (context_apply_lookup (c, glyphCount, (const USHORT *) (coverageZ + 1), lookupCount, lookupRecord, lookup_context));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return TRACE_RETURN (false);
if (!c->check_struct (this)) return_trace (false);
unsigned int count = glyphCount;
if (!count) return TRACE_RETURN (false); /* We want to access coverageZ[0] freely. */
if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return TRACE_RETURN (false);
if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
if (!c->check_array (coverageZ, coverageZ[0].static_size, count)) return_trace (false);
for (unsigned int i = 0; i < count; i++)
if (!coverageZ[i].sanitize (c, this)) return TRACE_RETURN (false);
if (!coverageZ[i].sanitize (c, this)) return_trace (false);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverageZ, coverageZ[0].static_size * count);
return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
return_trace (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
}
protected:
@ -1517,12 +1517,12 @@ struct Context
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
case 2: return TRACE_RETURN (c->dispatch (u.format2));
case 3: return TRACE_RETURN (c->dispatch (u.format3));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
case 2: return_trace (c->dispatch (u.format2));
case 3: return_trace (c->dispatch (u.format3));
default:return_trace (c->default_return_value ());
}
}
@ -1687,11 +1687,11 @@ struct ChainRule
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
return TRACE_RETURN (chain_context_would_apply_lookup (c,
backtrack.len, backtrack.array,
input.len, input.array,
lookahead.len, lookahead.array, lookup.len,
lookup.array, lookup_context));
return_trace (chain_context_would_apply_lookup (c,
backtrack.len, backtrack.array,
input.len, input.array,
lookahead.len, lookahead.array, lookup.len,
lookup.array, lookup_context));
}
inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
@ -1700,23 +1700,23 @@ struct ChainRule
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
return TRACE_RETURN (chain_context_apply_lookup (c,
backtrack.len, backtrack.array,
input.len, input.array,
lookahead.len, lookahead.array, lookup.len,
lookup.array, lookup_context));
return_trace (chain_context_apply_lookup (c,
backtrack.len, backtrack.array,
input.len, input.array,
lookahead.len, lookahead.array, lookup.len,
lookup.array, lookup_context));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
if (!backtrack.sanitize (c)) return_trace (false);
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
if (!input.sanitize (c)) return TRACE_RETURN (false);
if (!input.sanitize (c)) return_trace (false);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
if (!lookahead.sanitize (c)) return_trace (false);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
return TRACE_RETURN (lookup.sanitize (c));
return_trace (lookup.sanitize (c));
}
protected:
@ -1761,9 +1761,9 @@ struct ChainRuleSet
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if ((this+rule[i]).would_apply (c, lookup_context))
return TRACE_RETURN (true);
return_trace (true);
return TRACE_RETURN (false);
return_trace (false);
}
inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
@ -1772,15 +1772,15 @@ struct ChainRuleSet
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if ((this+rule[i]).apply (c, lookup_context))
return TRACE_RETURN (true);
return_trace (true);
return TRACE_RETURN (false);
return_trace (false);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (rule.sanitize (c, this));
return_trace (rule.sanitize (c, this));
}
protected:
@ -1835,7 +1835,7 @@ struct ChainContextFormat1
{match_glyph},
{NULL, NULL, NULL}
};
return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
return_trace (rule_set.would_apply (c, lookup_context));
}
inline const Coverage &get_coverage (void) const
@ -1847,20 +1847,20 @@ struct ChainContextFormat1
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const ChainRuleSet &rule_set = this+ruleSet[index];
struct ChainContextApplyLookupContext lookup_context = {
{match_glyph},
{NULL, NULL, NULL}
};
return TRACE_RETURN (rule_set.apply (c, lookup_context));
return_trace (rule_set.apply (c, lookup_context));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
}
protected:
@ -1939,7 +1939,7 @@ struct ChainContextFormat2
&input_class_def,
&lookahead_class_def}
};
return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
return_trace (rule_set.would_apply (c, lookup_context));
}
inline const Coverage &get_coverage (void) const
@ -1951,7 +1951,7 @@ struct ChainContextFormat2
{
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
@ -1965,15 +1965,17 @@ struct ChainContextFormat2
&input_class_def,
&lookahead_class_def}
};
return TRACE_RETURN (rule_set.apply (c, lookup_context));
return_trace (rule_set.apply (c, lookup_context));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
ruleSet.sanitize (c, this));
return_trace (coverage.sanitize (c, this) &&
backtrackClassDef.sanitize (c, this) &&
inputClassDef.sanitize (c, this) &&
lookaheadClassDef.sanitize (c, this) &&
ruleSet.sanitize (c, this));
}
protected:
@ -2056,11 +2058,11 @@ struct ChainContextFormat3
{match_coverage},
{this, this, this}
};
return TRACE_RETURN (chain_context_would_apply_lookup (c,
backtrack.len, (const USHORT *) backtrack.array,
input.len, (const USHORT *) input.array + 1,
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array, lookup_context));
return_trace (chain_context_would_apply_lookup (c,
backtrack.len, (const USHORT *) backtrack.array,
input.len, (const USHORT *) input.array + 1,
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array, lookup_context));
}
inline const Coverage &get_coverage (void) const
@ -2075,7 +2077,7 @@ struct ChainContextFormat3
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
if (likely (index == NOT_COVERED)) return_trace (false);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
@ -2083,24 +2085,24 @@ struct ChainContextFormat3
{match_coverage},
{this, this, this}
};
return TRACE_RETURN (chain_context_apply_lookup (c,
backtrack.len, (const USHORT *) backtrack.array,
input.len, (const USHORT *) input.array + 1,
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array, lookup_context));
return_trace (chain_context_apply_lookup (c,
backtrack.len, (const USHORT *) backtrack.array,
input.len, (const USHORT *) input.array + 1,
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array, lookup_context));
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
if (!backtrack.sanitize (c, this)) return_trace (false);
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
if (!input.sanitize (c, this)) return TRACE_RETURN (false);
if (!input.len) return TRACE_RETURN (false); /* To be consistent with Context. */
if (!input.sanitize (c, this)) return_trace (false);
if (!input.len) return_trace (false); /* To be consistent with Context. */
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
if (!lookahead.sanitize (c, this)) return_trace (false);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
return TRACE_RETURN (lookup.sanitize (c));
return_trace (lookup.sanitize (c));
}
protected:
@ -2130,12 +2132,12 @@ struct ChainContext
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
case 2: return TRACE_RETURN (c->dispatch (u.format2));
case 3: return TRACE_RETURN (c->dispatch (u.format3));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (c->dispatch (u.format1));
case 2: return_trace (c->dispatch (u.format2));
case 3: return_trace (c->dispatch (u.format3));
default:return_trace (c->default_return_value ());
}
}
@ -2166,15 +2168,15 @@ struct ExtensionFormat1
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, format);
if (unlikely (!c->may_dispatch (this, this))) return TRACE_RETURN (c->default_return_value ());
return TRACE_RETURN (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ()));
if (unlikely (!c->may_dispatch (this, this))) return_trace (c->default_return_value ());
return_trace (get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ()));
}
/* This is called from may_dispatch() above with hb_sanitize_context_t. */
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) && extensionOffset != 0);
return_trace (c->check_struct (this) && extensionOffset != 0);
}
protected:
@ -2211,10 +2213,10 @@ struct Extension
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this, u.format);
if (unlikely (!c->may_dispatch (this, &u.format))) return TRACE_RETURN (c->default_return_value ());
if (unlikely (!c->may_dispatch (this, &u.format))) return_trace (c->default_return_value ());
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.dispatch (c));
default:return TRACE_RETURN (c->default_return_value ());
case 1: return_trace (u.format1.dispatch (c));
default:return_trace (c->default_return_value ());
}
}
@ -2269,10 +2271,11 @@ struct GSUBGPOS
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
scriptList.sanitize (c, this) &&
featureList.sanitize (c, this) &&
lookupList.sanitize (c, this));
return_trace (version.sanitize (c) &&
likely (version.major == 1) &&
scriptList.sanitize (c, this) &&
featureList.sanitize (c, this) &&
lookupList.sanitize (c, this));
}
protected:

View File

@ -57,17 +57,17 @@ struct JstfPriority
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) &&
shrinkageEnableGSUB.sanitize (c, this) &&
shrinkageDisableGSUB.sanitize (c, this) &&
shrinkageEnableGPOS.sanitize (c, this) &&
shrinkageDisableGPOS.sanitize (c, this) &&
shrinkageJstfMax.sanitize (c, this) &&
extensionEnableGSUB.sanitize (c, this) &&
extensionDisableGSUB.sanitize (c, this) &&
extensionEnableGPOS.sanitize (c, this) &&
extensionDisableGPOS.sanitize (c, this) &&
extensionJstfMax.sanitize (c, this));
return_trace (c->check_struct (this) &&
shrinkageEnableGSUB.sanitize (c, this) &&
shrinkageDisableGSUB.sanitize (c, this) &&
shrinkageEnableGPOS.sanitize (c, this) &&
shrinkageDisableGPOS.sanitize (c, this) &&
shrinkageJstfMax.sanitize (c, this) &&
extensionEnableGSUB.sanitize (c, this) &&
extensionDisableGSUB.sanitize (c, this) &&
extensionEnableGPOS.sanitize (c, this) &&
extensionDisableGPOS.sanitize (c, this) &&
extensionJstfMax.sanitize (c, this));
}
protected:
@ -127,7 +127,7 @@ struct JstfLangSys : OffsetListOf<JstfPriority>
const Record<JstfLangSys>::sanitize_closure_t * = NULL) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (OffsetListOf<JstfPriority>::sanitize (c));
return_trace (OffsetListOf<JstfPriority>::sanitize (c));
}
};
@ -168,9 +168,9 @@ struct JstfScript
const Record<JstfScript>::sanitize_closure_t * = NULL) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (extenderGlyphs.sanitize (c, this) &&
defaultLangSys.sanitize (c, this) &&
langSys.sanitize (c, this));
return_trace (extenderGlyphs.sanitize (c, this) &&
defaultLangSys.sanitize (c, this) &&
langSys.sanitize (c, this));
}
protected:
@ -212,8 +212,9 @@ struct JSTF
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
scriptList.sanitize (c, this));
return_trace (version.sanitize (c) &&
likely (version.major == 1) &&
scriptList.sanitize (c, this));
}
protected:

View File

@ -51,8 +51,9 @@ struct maxp
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) &&
likely (version.major == 1 || (version.major == 0 && version.minor == 0x5000u)));
return_trace (c->check_struct (this) &&
likely (version.major == 1 ||
(version.major == 0 && version.minor == 0x5000u)));
}
/* We only implement version 0.5 as none of the extra fields in version 1.0 are useful. */

View File

@ -60,7 +60,7 @@ struct NameRecord
{
TRACE_SANITIZE (this);
/* We can check from base all the way up to the end of string... */
return TRACE_RETURN (c->check_struct (this) && c->check_range ((char *) base, (unsigned int) length + offset));
return_trace (c->check_struct (this) && c->check_range ((char *) base, (unsigned int) length + offset));
}
USHORT platformID; /* Platform ID. */
@ -107,17 +107,17 @@ struct name
char *string_pool = (char *) this + stringOffset;
unsigned int _count = count;
for (unsigned int i = 0; i < _count; i++)
if (!nameRecord[i].sanitize (c, string_pool)) return TRACE_RETURN (false);
return TRACE_RETURN (true);
if (!nameRecord[i].sanitize (c, string_pool)) return_trace (false);
return_trace (true);
}
inline bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this) &&
likely (format == 0 || format == 1) &&
c->check_array (nameRecord, nameRecord[0].static_size, count) &&
sanitize_records (c));
return_trace (c->check_struct (this) &&
likely (format == 0 || format == 1) &&
c->check_array (nameRecord, nameRecord[0].static_size, count) &&
sanitize_records (c));
}
/* We only implement format 0 for now. */

View File

@ -740,7 +740,7 @@ template <typename T>
static inline void _hb_warn_no_return (bool returned)
{
if (unlikely (!returned)) {
fprintf (stderr, "OUCH, returned with no call to TRACE_RETURN. This is a bug, please report.\n");
fprintf (stderr, "OUCH, returned with no call to return_trace(). This is a bug, please report.\n");
}
}
template <>
@ -775,7 +775,7 @@ struct hb_auto_trace_t {
inline ret_t ret (ret_t v, unsigned int line = 0)
{
if (unlikely (returned)) {
fprintf (stderr, "OUCH, double calls to TRACE_RETURN. This is a bug, please report.\n");
fprintf (stderr, "OUCH, double calls to return_trace(). This is a bug, please report.\n");
return v;
}
@ -806,7 +806,7 @@ struct hb_auto_trace_t<0, ret_t> {
inline ret_t ret (ret_t v, unsigned int line HB_UNUSED = 0) { return v; }
};
#define TRACE_RETURN(RET) trace.ret (RET, __LINE__)
#define return_trace(RET) return trace.ret (RET, __LINE__)
/* Misc */