Narrow down cast operators on IntType
Say for USHORT, we were implementing casts from and to unsigned. With this change, we cast from and to uint16_t only. This allows compiler more opportunities to catch possible narrowing issues in the code. It needed a couple of fixes in the codebase though, because previously, if a USHORT was participating in arithmetic with signed numbers, eg. "u + 1", the result would have been unsigned. With this change, it would be signed. The correct fix is to update the code to read "u + 1u". That said, I think about conditionally adding back the cast out to signed/unsigned, to facilitate better type deduction. But I couldn't think of a real situation where that would help with anything. So I didn't add. Here's what it was: template <typename Type2 = hb_conditional<hb_is_signed (Type), signed, unsigned>, hb_enable_if (sizeof (Type) < sizeof (Type2))> operator hb_type_identity_t<Type2> () const { return v; } https://github.com/harfbuzz/harfbuzz/pull/2875
This commit is contained in:
parent
f4f35a4d5f
commit
567cedcc5f
|
@ -576,7 +576,7 @@ struct StateTable
|
||||||
if (unlikely (stop > states))
|
if (unlikely (stop > states))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
for (const HBUSHORT *p = states; stop < p; p--)
|
for (const HBUSHORT *p = states; stop < p; p--)
|
||||||
num_entries = hb_max (num_entries, *(p - 1) + 1);
|
num_entries = hb_max (num_entries, *(p - 1) + 1u);
|
||||||
state_neg = min_state;
|
state_neg = min_state;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -597,7 +597,7 @@ struct StateTable
|
||||||
if (unlikely (stop < states))
|
if (unlikely (stop < states))
|
||||||
return_trace (false);
|
return_trace (false);
|
||||||
for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++)
|
for (const HBUSHORT *p = &states[state_pos * num_classes]; p < stop; p++)
|
||||||
num_entries = hb_max (num_entries, *p + 1);
|
num_entries = hb_max (num_entries, *p + 1u);
|
||||||
state_pos = max_state + 1;
|
state_pos = max_state + 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -337,9 +337,9 @@ struct ContextualSubtable
|
||||||
const EntryData &data = entries[i].data;
|
const EntryData &data = entries[i].data;
|
||||||
|
|
||||||
if (data.markIndex != 0xFFFF)
|
if (data.markIndex != 0xFFFF)
|
||||||
num_lookups = hb_max (num_lookups, 1 + data.markIndex);
|
num_lookups = hb_max (num_lookups, 1u + data.markIndex);
|
||||||
if (data.currentIndex != 0xFFFF)
|
if (data.currentIndex != 0xFFFF)
|
||||||
num_lookups = hb_max (num_lookups, 1 + data.currentIndex);
|
num_lookups = hb_max (num_lookups, 1u + data.currentIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
return_trace (substitutionTables.sanitize (c, this, num_lookups));
|
return_trace (substitutionTables.sanitize (c, this, num_lookups));
|
||||||
|
|
|
@ -54,16 +54,15 @@ namespace OT {
|
||||||
|
|
||||||
/* Integer types in big-endian order and no alignment requirement */
|
/* Integer types in big-endian order and no alignment requirement */
|
||||||
template <typename Type,
|
template <typename Type,
|
||||||
unsigned int Size = sizeof (Type),
|
unsigned int Size = sizeof (Type)>
|
||||||
typename Wide = hb_conditional<hb_is_signed (Type), signed, unsigned>>
|
|
||||||
struct IntType
|
struct IntType
|
||||||
{
|
{
|
||||||
typedef Type type;
|
typedef Type type;
|
||||||
|
|
||||||
IntType () = default;
|
IntType () = default;
|
||||||
explicit constexpr IntType (Wide V) : v {V} {}
|
explicit constexpr IntType (Type V) : v {V} {}
|
||||||
IntType& operator = (Wide i) { v = i; return *this; }
|
IntType& operator = (Type i) { v = i; return *this; }
|
||||||
operator Wide () const { return v; }
|
operator Type () const { return v; }
|
||||||
|
|
||||||
bool operator == (const IntType &o) const { return (Type) v == (Type) o.v; }
|
bool operator == (const IntType &o) const { return (Type) v == (Type) o.v; }
|
||||||
bool operator != (const IntType &o) const { return !(*this == o); }
|
bool operator != (const IntType &o) const { return !(*this == o); }
|
||||||
|
|
Loading…
Reference in New Issue