[array/vector] Rename len to length

This commit is contained in:
Behdad Esfahbod 2018-12-21 18:46:51 -05:00
parent 2fc1860a5b
commit 474a12058d
41 changed files with 325 additions and 323 deletions

View File

@ -118,11 +118,10 @@ struct FeatureName
if (selectors_count)
{
hb_array_t<const SettingName> arr = settings_table.sub_array (start_offset, selectors_count);
unsigned int count = arr.len;
for (unsigned int i = 0; i < count; i++)
for (unsigned int i = 0; i < arr.length; i++)
settings_table[start_offset + i].get_info (&selectors[i], default_selector);
}
return settings_table.len;
return settings_table.length;
}
hb_aat_layout_feature_type_t get_feature_type () const

View File

@ -55,7 +55,7 @@ struct lcar
if (caret_count)
{
hb_array_t<const HBINT16> arr = array.sub_array (start_offset, caret_count);
unsigned int count = arr.len;
unsigned int count = arr.length;
for (unsigned int i = 0; i < count; ++i)
switch (format)
{

View File

@ -54,11 +54,11 @@ void
hb_aat_map_builder_t::compile (hb_aat_map_t &m)
{
/* Sort features and merge duplicates */
if (features.len)
if (features.length)
{
features.qsort ();
unsigned int j = 0;
for (unsigned int i = 1; i < features.len; i++)
for (unsigned int i = 1; i < features.length; i++)
if (features[i].type != features[j].type)
features[++j] = features[i];
features.shrink (j + 1);

View File

@ -45,9 +45,9 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
/*
* Constructors.
*/
hb_array_t () : arrayZ (nullptr), len (0) {}
hb_array_t (Type *array_, unsigned int len_) : arrayZ (array_), len (len_) {}
template <unsigned int len_> hb_array_t (Type (&array_)[len_]) : arrayZ (array_), len (len_) {}
hb_array_t () : arrayZ (nullptr), length (0) {}
hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_) {}
template <unsigned int length_> hb_array_t (Type (&array_)[length_]) : arrayZ (array_), length (length_) {}
/*
* Operators.
@ -56,29 +56,29 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
Type& operator [] (int i_) const
{
unsigned int i = (unsigned int) i_;
if (unlikely (i >= len)) return CrapOrNull(Type);
if (unlikely (i >= length)) return CrapOrNull(Type);
return arrayZ[i];
}
explicit_operator bool () const { return len; }
explicit_operator bool () const { return length; }
Type * operator & () const { return arrayZ; }
Type & operator * () { return (this->operator [])[0]; }
operator hb_array_t<const Type> () { return hb_array_t<const Type> (arrayZ, len); }
operator hb_array_t<const Type> () { return hb_array_t<const Type> (arrayZ, length); }
template <typename T> operator T * () const { return arrayZ; }
hb_array_t<Type> & operator += (unsigned int count)
{
if (unlikely (count > len))
count = len;
len -= count;
if (unlikely (count > length))
count = length;
length -= count;
arrayZ += count;
return *this;
}
hb_array_t<Type> & operator -= (unsigned int count)
{
if (unlikely (count > len))
count = len;
len -= count;
if (unlikely (count > length))
count = length;
length -= count;
return *this;
}
hb_array_t<Type> & operator ++ () { *this += 1; }
@ -99,8 +99,8 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
/* Note: our compare is NOT lexicographic; it also does NOT call Type::cmp. */
int cmp (const hb_array_t<Type> &a) const
{
if (len != a.len)
return (int) a.len - (int) len;
if (length != a.length)
return (int) a.length - (int) length;
return hb_memcmp (a.arrayZ, arrayZ, get_size ());
}
static int cmp (const void *pa, const void *pb)
@ -113,7 +113,7 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
template <typename T>
Type *lsearch (const T &x, Type *not_found = nullptr)
{
unsigned int count = len;
unsigned int count = length;
for (unsigned int i = 0; i < count; i++)
if (!this->arrayZ[i].cmp (x))
return &this->arrayZ[i];
@ -122,7 +122,7 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
template <typename T>
const Type *lsearch (const T &x, const Type *not_found = nullptr) const
{
unsigned int count = len;
unsigned int count = length;
for (unsigned int i = 0; i < count; i++)
if (!this->arrayZ[i].cmp (x))
return &this->arrayZ[i];
@ -131,17 +131,17 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
hb_sorted_array_t<Type> qsort (int (*cmp_)(const void*, const void*))
{
::qsort (arrayZ, len, item_size, cmp_);
::qsort (arrayZ, length, item_size, cmp_);
return hb_sorted_array_t<Type> (*this);
}
hb_sorted_array_t<Type> qsort ()
{
::qsort (arrayZ, len, item_size, Type::cmp);
::qsort (arrayZ, length, item_size, Type::cmp);
return hb_sorted_array_t<Type> (*this);
}
void qsort (unsigned int start, unsigned int end)
{
end = MIN (end, len);
end = MIN (end, length);
assert (start <= end);
::qsort (arrayZ + start, end - start, item_size, Type::cmp);
}
@ -150,14 +150,14 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
* Other methods.
*/
unsigned int get_size () const { return len * item_size; }
unsigned int get_size () const { return length * item_size; }
hb_array_t<Type> sub_array (unsigned int start_offset = 0, unsigned int *seg_count = nullptr /* IN/OUT */) const
{
if (!start_offset && !seg_count)
return *this;
unsigned int count = len;
unsigned int count = length;
if (unlikely (start_offset > count))
count = 0;
else
@ -171,11 +171,11 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
/* Only call if you allocated the underlying array using malloc() or similar. */
void free ()
{ ::free ((void *) arrayZ); arrayZ = nullptr; len = 0; }
{ ::free ((void *) arrayZ); arrayZ = nullptr; length = 0; }
template <typename hb_sanitize_context_t>
bool sanitize (hb_sanitize_context_t *c) const
{ return c->check_array (arrayZ, len); }
{ return c->check_array (arrayZ, length); }
/*
* Members
@ -183,13 +183,13 @@ struct hb_array_t// : hb_iter_t<hb_array_t<Type>, Type>
public:
Type *arrayZ;
unsigned int len;
unsigned int length;
};
template <typename T> inline hb_array_t<T>
hb_array (T *array, unsigned int len)
{ return hb_array_t<T> (array, len); }
template <typename T, unsigned int len_> inline hb_array_t<T>
hb_array (T (&array_)[len_])
hb_array (T *array, unsigned int length)
{ return hb_array_t<T> (array, length); }
template <typename T, unsigned int length_> inline hb_array_t<T>
hb_array (T (&array_)[length_])
{ return hb_array_t<T> (array_); }
@ -205,8 +205,8 @@ struct hb_sorted_array_t : hb_array_t<Type>
{
hb_sorted_array_t () : hb_array_t<Type> () {}
hb_sorted_array_t (const hb_array_t<Type> &o) : hb_array_t<Type> (o) {}
hb_sorted_array_t (Type *array_, unsigned int len_) : hb_array_t<Type> (array_, len_) {}
template <unsigned int len_> hb_sorted_array_t (Type (&array_)[len_]) : hb_array_t<Type> (array_) {}
hb_sorted_array_t (Type *array_, unsigned int length_) : hb_array_t<Type> (array_, length_) {}
template <unsigned int length_> hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t<Type> (array_) {}
hb_sorted_array_t<Type> sub_array (unsigned int start_offset, unsigned int *seg_count /* IN/OUT */) const
{ return hb_sorted_array_t<Type> (((const hb_array_t<Type> *) (this))->sub_array (start_offset, seg_count)); }
@ -230,7 +230,7 @@ struct hb_sorted_array_t : hb_array_t<Type>
hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE,
unsigned int to_store = (unsigned int) -1) const
{
int min = 0, max = (int) this->len - 1;
int min = 0, max = (int) this->length - 1;
const Type *array = this->arrayZ;
while (min <= max)
{
@ -259,7 +259,7 @@ struct hb_sorted_array_t : hb_array_t<Type>
break;
case HB_BFIND_NOT_FOUND_STORE_CLOSEST:
if (max < 0 || (max < (int) this->len && array[max].cmp (x) > 0))
if (max < 0 || (max < (int) this->length && array[max].cmp (x) > 0))
max++;
*i = max;
break;
@ -269,10 +269,10 @@ struct hb_sorted_array_t : hb_array_t<Type>
}
};
template <typename T> inline hb_sorted_array_t<T>
hb_sorted_array (T *array, unsigned int len)
{ return hb_sorted_array_t<T> (array, len); }
template <typename T, unsigned int len_> inline hb_sorted_array_t<T>
hb_sorted_array (T (&array_)[len_])
hb_sorted_array (T *array, unsigned int length)
{ return hb_sorted_array_t<T> (array, length); }
template <typename T, unsigned int length_> inline hb_sorted_array_t<T>
hb_sorted_array (T (&array_)[length_])
{ return hb_sorted_array_t<T> (array_); }

View File

@ -416,7 +416,7 @@ struct Stack
count = 0;
elements.init ();
elements.resize (kSizeLimit);
for (unsigned int i = 0; i < elements.len; i++)
for (unsigned int i = 0; i < elements.length; i++)
elements[i].init ();
}
@ -433,7 +433,7 @@ struct Stack
void push (const ELEM &v)
{
if (likely (count < elements.len))
if (likely (count < elements.length))
elements[count++] = v;
else
set_error ();
@ -441,7 +441,7 @@ struct Stack
ELEM &push ()
{
if (likely (count < elements.len))
if (likely (count < elements.length))
return elements[count++];
else
{
@ -482,7 +482,7 @@ struct Stack
void unpop ()
{
if (likely (count < elements.len))
if (likely (count < elements.length))
count++;
else
set_error ();
@ -623,7 +623,7 @@ struct ParsedValues
return false;
}
unsigned get_count () const { return values.len; }
unsigned get_count () const { return values.length; }
const VAL &get_value (unsigned int i) const { return values[i]; }
const VAL &operator [] (unsigned int i) const { return get_value (i); }

View File

@ -61,7 +61,7 @@ struct BlendArg : Number
deltas[i] = blends_[i];
}
bool blending () const { return deltas.len > 0; }
bool blending () const { return deltas.length > 0; }
void reset_blends ()
{
numValues = valueIndex = 0;
@ -167,10 +167,10 @@ struct CFF2CSInterpEnv : CSInterpEnv<BlendArg, CFF2Subrs>
{
if (do_blend && arg.blending ())
{
if (likely (scalars.len == arg.deltas.len))
if (likely (scalars.length == arg.deltas.length))
{
double v = arg.to_real ();
for (unsigned int i = 0; i < scalars.len; i++)
for (unsigned int i = 0; i < scalars.length; i++)
{
v += (double)scalars[i] * arg.deltas[i].to_real ();
}

View File

@ -527,7 +527,7 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
/* Scan events and save features for each range. */
hb_vector_t<active_feature_t> active_features;
unsigned int last_index = 0;
for (unsigned int i = 0; i < feature_events.len; i++)
for (unsigned int i = 0; i < feature_events.length; i++)
{
feature_event_t *event = &feature_events[i];
@ -536,13 +536,13 @@ _hb_coretext_shape (hb_shape_plan_t *shape_plan,
/* Save a snapshot of active features and the range. */
range_record_t *range = range_records.push ();
if (active_features.len)
if (active_features.length)
{
CFMutableArrayRef features_array = CFArrayCreateMutable(kCFAllocatorDefault, 0, &kCFTypeArrayCallBacks);
/* TODO sort and resolve conflicting features? */
/* active_features.qsort (); */
for (unsigned int j = 0; j < active_features.len; j++)
for (unsigned int j = 0; j < active_features.length; j++)
{
CFStringRef keys[] = {
kCTFontFeatureTypeIdentifierKey,
@ -730,7 +730,7 @@ resize_and_retry:
CFAttributedStringSetAttribute (attr_string, CFRangeMake (0, chars_len),
kCTFontAttributeName, ct_font);
if (num_features && range_records.len)
if (num_features && range_records.length)
{
unsigned int start = 0;
range_record_t *last_range = &range_records[0];
@ -879,7 +879,7 @@ resize_and_retry:
* Also see: https://bugs.chromium.org/p/chromium/issues/detail?id=597098
*/
bool matched = false;
for (unsigned int i = 0; i < range_records.len; i++)
for (unsigned int i = 0; i < range_records.length; i++)
if (range_records[i].font && CFEqual (run_ct_font, range_records[i].font))
{
matched = true;
@ -1142,7 +1142,7 @@ fail:
if (line)
CFRelease (line);
for (unsigned int i = 0; i < range_records.len; i++)
for (unsigned int i = 0; i < range_records.length; i++)
if (range_records[i].font)
CFRelease (range_records[i].font);

View File

@ -619,7 +619,7 @@ _hb_face_builder_data_destroy (void *user_data)
{
hb_face_builder_data_t *data = (hb_face_builder_data_t *) user_data;
for (unsigned int i = 0; i < data->tables.len; i++)
for (unsigned int i = 0; i < data->tables.length; i++)
hb_blob_destroy (data->tables[i].blob);
data->tables.fini ();
@ -631,7 +631,7 @@ static hb_blob_t *
_hb_face_builder_data_reference_blob (hb_face_builder_data_t *data)
{
unsigned int table_count = data->tables.len;
unsigned int table_count = data->tables.length;
unsigned int face_length = table_count * 16 + 12;
for (unsigned int i = 0; i < table_count; i++)

View File

@ -77,9 +77,10 @@ struct hb_lockable_set_t
{
l.lock ();
item_t *item = items.find (v);
if (item) {
if (item)
{
item_t old = *item;
*item = items[items.len - 1];
*item = items[items.length - 1];
items.pop ();
l.unlock ();
old.fini ();
@ -113,18 +114,20 @@ struct hb_lockable_set_t
void fini (lock_t &l)
{
if (!items.len) {
/* No need for locking. */
if (!items.length)
{
/* No need to lock. */
items.fini ();
return;
}
l.lock ();
while (items.len) {
item_t old = items[items.len - 1];
items.pop ();
l.unlock ();
old.fini ();
l.lock ();
while (items.length)
{
item_t old = items[items.length - 1];
items.pop ();
l.unlock ();
old.fini ();
l.lock ();
}
items.fini ();
l.unlock ();

View File

@ -130,7 +130,7 @@ typedef struct OffsetTable
sfnt_version.set (sfnt_tag);
/* Take space for numTables, searchRange, entrySelector, RangeShift
* and the TableRecords themselves. */
if (unlikely (!tables.serialize (c, items.len))) return_trace (false);
if (unlikely (!tables.serialize (c, items.length))) return_trace (false);
const char *dir_end = (const char *) c->head;
HBUINT32 *checksum_adjustment = nullptr;
@ -173,7 +173,7 @@ typedef struct OffsetTable
/* The following line is a slower version of the following block. */
//checksum.set_for_data (this, (const char *) c->head - (const char *) this);
checksum.set_for_data (this, dir_end - (const char *) this);
for (unsigned int i = 0; i < items.len; i++)
for (unsigned int i = 0; i < items.length; i++)
{
TableRecord &rec = tables.arrayZ[i];
checksum.set (checksum + rec.checkSum);

View File

@ -557,8 +557,8 @@ struct ArrayOf
bool serialize (hb_serialize_context_t *c, hb_array_t<const T> items)
{
TRACE_SERIALIZE (this);
if (unlikely (!serialize (c, items.len))) return_trace (false);
for (unsigned int i = 0; i < items.len; i++)
if (unlikely (!serialize (c, items.length))) return_trace (false);
for (unsigned int i = 0; i < items.length; i++)
hb_assign (arrayZ[i], items[i]);
return_trace (true);
}
@ -703,9 +703,9 @@ struct HeadlessArrayOf
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
lenP1.set (items.len + 1); /* TODO(serialize) Overflow? */
lenP1.set (items.length + 1); /* TODO(serialize) Overflow? */
if (unlikely (!c->extend (*this))) return_trace (false);
for (unsigned int i = 0; i < items.len; i++)
for (unsigned int i = 0; i < items.length; i++)
arrayZ[i] = items[i];
return_trace (true);
}

View File

@ -69,8 +69,8 @@ struct StrBuffArray : hb_vector_t<StrBuff>
unsigned int total_size () const
{
unsigned int size = 0;
for (unsigned int i = 0; i < len; i++)
size += (*this)[i].len;
for (unsigned int i = 0; i < length; i++)
size += (*this)[i].length;
return size;
}
@ -120,7 +120,7 @@ struct CFFIndex
const ByteStrArray &byteArray)
{
TRACE_SERIALIZE (this);
if (byteArray.len == 0)
if (byteArray.length == 0)
{
COUNT *dest = c->allocate_min<COUNT> ();
if (unlikely (dest == nullptr)) return_trace (false);
@ -130,15 +130,15 @@ struct CFFIndex
{
/* serialize CFFIndex header */
if (unlikely (!c->extend_min (*this))) return_trace (false);
this->count.set (byteArray.len);
this->count.set (byteArray.length);
this->offSize.set (offSize_);
if (!unlikely (c->allocate_size<HBUINT8> (offSize_ * (byteArray.len + 1))))
if (!unlikely (c->allocate_size<HBUINT8> (offSize_ * (byteArray.length + 1))))
return_trace (false);
/* serialize indices */
unsigned int offset = 1;
unsigned int i = 0;
for (; i < byteArray.len; i++)
for (; i < byteArray.length; i++)
{
set_offset_at (i, offset);
offset += byteArray[i].get_size ();
@ -146,7 +146,7 @@ struct CFFIndex
set_offset_at (i, offset);
/* serialize data */
for (unsigned int i = 0; i < byteArray.len; i++)
for (unsigned int i = 0; i < byteArray.length; i++)
{
ByteStr *dest = c->start_embed<ByteStr> ();
if (unlikely (dest == nullptr ||
@ -163,10 +163,10 @@ struct CFFIndex
{
ByteStrArray byteArray;
byteArray.init ();
byteArray.resize (buffArray.len);
for (unsigned int i = 0; i < byteArray.len; i++)
byteArray.resize (buffArray.length);
for (unsigned int i = 0; i < byteArray.length; i++)
{
byteArray[i] = ByteStr (buffArray[i].arrayZ (), buffArray[i].len);
byteArray[i] = ByteStr (buffArray[i].arrayZ (), buffArray[i].length);
}
bool result = this->serialize (c, offSize_, byteArray);
byteArray.fini ();
@ -425,7 +425,7 @@ struct Remap : hb_vector_t<hb_codepoint_t>
{
if (unlikely (!SUPER::resize (size)))
return false;
for (unsigned int i = 0; i < len; i++)
for (unsigned int i = 0; i < length; i++)
(*this)[i] = CFF_UNDEF_CODE;
count = 0;
return true;
@ -436,14 +436,14 @@ struct Remap : hb_vector_t<hb_codepoint_t>
if (unlikely (!SUPER::resize (size)))
return false;
unsigned int i;
for (i = 0; i < len; i++)
for (i = 0; i < length; i++)
(*this)[i] = i;
count = i;
return true;
}
bool excludes (hb_codepoint_t id) const
{ return (id < len) && ((*this)[id] == CFF_UNDEF_CODE); }
{ return (id < length) && ((*this)[id] == CFF_UNDEF_CODE); }
bool includes (hb_codepoint_t id) const
{ return !excludes (id); }
@ -476,15 +476,15 @@ struct FDArray : CFFIndexOf<COUNT, FontDict>
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
this->count.set (fontDicts.len);
this->count.set (fontDicts.length);
this->offSize.set (offSize_);
if (!unlikely (c->allocate_size<HBUINT8> (offSize_ * (fontDicts.len + 1))))
if (!unlikely (c->allocate_size<HBUINT8> (offSize_ * (fontDicts.length + 1))))
return_trace (false);
/* serialize font dict offsets */
unsigned int offset = 1;
unsigned int fid = 0;
for (; fid < fontDicts.len; fid++)
for (; fid < fontDicts.length; fid++)
{
CFFIndexOf<COUNT, FontDict>::set_offset_at (fid, offset);
offset += FontDict::calculate_serialized_size (fontDicts[fid], opszr);
@ -492,7 +492,7 @@ struct FDArray : CFFIndexOf<COUNT, FontDict>
CFFIndexOf<COUNT, FontDict>::set_offset_at (fid, offset);
/* serialize font dicts */
for (unsigned int i = 0; i < fontDicts.len; i++)
for (unsigned int i = 0; i < fontDicts.length; i++)
{
FontDict *dict = c->start_embed<FontDict> ();
if (unlikely (!dict->serialize (c, fontDicts[i], opszr, fontDicts[i])))
@ -521,7 +521,7 @@ struct FDArray : CFFIndexOf<COUNT, FontDict>
/* serialize font dict offsets */
unsigned int offset = 1;
unsigned int fid = 0;
for (unsigned i = 0; i < fontDicts.len; i++)
for (unsigned i = 0; i < fontDicts.length; i++)
if (fdmap.includes (i))
{
CFFIndexOf<COUNT, FontDict>::set_offset_at (fid++, offset);
@ -530,7 +530,7 @@ struct FDArray : CFFIndexOf<COUNT, FontDict>
CFFIndexOf<COUNT, FontDict>::set_offset_at (fid, offset);
/* serialize font dicts */
for (unsigned int i = 0; i < fontDicts.len; i++)
for (unsigned int i = 0; i < fontDicts.length; i++)
if (fdmap.includes (i))
{
FontDict *dict = c->start_embed<FontDict> ();

View File

@ -199,14 +199,14 @@ struct Encoding {
TRACE_SERIALIZE (this);
Encoding *dest = c->extend_min (*this);
if (unlikely (dest == nullptr)) return_trace (false);
dest->format.set (format | ((supp_codes.len > 0)? 0x80: 0));
dest->format.set (format | ((supp_codes.length > 0)? 0x80: 0));
if (format == 0)
{
Encoding0 *fmt0 = c->allocate_size<Encoding0> (Encoding0::min_size + HBUINT8::static_size * enc_count);
if (unlikely (fmt0 == nullptr)) return_trace (false);
fmt0->nCodes.set (enc_count);
unsigned int glyph = 0;
for (unsigned int i = 0; i < code_ranges.len; i++)
for (unsigned int i = 0; i < code_ranges.length; i++)
{
hb_codepoint_t code = code_ranges[i].code;
for (int left = (int)code_ranges[i].glyph; left >= 0; left--)
@ -217,10 +217,10 @@ struct Encoding {
}
else
{
Encoding1 *fmt1 = c->allocate_size<Encoding1> (Encoding1::min_size + Encoding1_Range::static_size * code_ranges.len);
Encoding1 *fmt1 = c->allocate_size<Encoding1> (Encoding1::min_size + Encoding1_Range::static_size * code_ranges.length);
if (unlikely (fmt1 == nullptr)) return_trace (false);
fmt1->nRanges.set (code_ranges.len);
for (unsigned int i = 0; i < code_ranges.len; i++)
fmt1->nRanges.set (code_ranges.length);
for (unsigned int i = 0; i < code_ranges.length; i++)
{
if (unlikely (!((code_ranges[i].code <= 0xFF) && (code_ranges[i].glyph <= 0xFF))))
return_trace (false);
@ -228,12 +228,12 @@ struct Encoding {
fmt1->ranges[i].nLeft.set (code_ranges[i].glyph);
}
}
if (supp_codes.len > 0)
if (supp_codes.length > 0)
{
CFF1SuppEncData *suppData = c->allocate_size<CFF1SuppEncData> (CFF1SuppEncData::min_size + SuppEncoding::static_size * supp_codes.len);
CFF1SuppEncData *suppData = c->allocate_size<CFF1SuppEncData> (CFF1SuppEncData::min_size + SuppEncoding::static_size * supp_codes.length);
if (unlikely (suppData == nullptr)) return_trace (false);
suppData->nSups.set (supp_codes.len);
for (unsigned int i = 0; i < supp_codes.len; i++)
suppData->nSups.set (supp_codes.length);
for (unsigned int i = 0; i < supp_codes.length; i++)
{
suppData->supps[i].code.set (supp_codes[i].code);
suppData->supps[i].glyph.set (supp_codes[i].glyph); /* actually SID */
@ -478,7 +478,7 @@ struct Charset {
Charset0 *fmt0 = c->allocate_size<Charset0> (Charset0::min_size + HBUINT16::static_size * (num_glyphs - 1));
if (unlikely (fmt0 == nullptr)) return_trace (false);
unsigned int glyph = 0;
for (unsigned int i = 0; i < sid_ranges.len; i++)
for (unsigned int i = 0; i < sid_ranges.length; i++)
{
hb_codepoint_t sid = sid_ranges[i].code;
for (int left = (int)sid_ranges[i].glyph; left >= 0; left--)
@ -487,9 +487,9 @@ struct Charset {
}
else if (format == 1)
{
Charset1 *fmt1 = c->allocate_size<Charset1> (Charset1::min_size + Charset1_Range::static_size * sid_ranges.len);
Charset1 *fmt1 = c->allocate_size<Charset1> (Charset1::min_size + Charset1_Range::static_size * sid_ranges.length);
if (unlikely (fmt1 == nullptr)) return_trace (false);
for (unsigned int i = 0; i < sid_ranges.len; i++)
for (unsigned int i = 0; i < sid_ranges.length; i++)
{
if (unlikely (!(sid_ranges[i].glyph <= 0xFF)))
return_trace (false);
@ -499,9 +499,9 @@ struct Charset {
}
else /* format 2 */
{
Charset2 *fmt2 = c->allocate_size<Charset2> (Charset2::min_size + Charset2_Range::static_size * sid_ranges.len);
Charset2 *fmt2 = c->allocate_size<Charset2> (Charset2::min_size + Charset2_Range::static_size * sid_ranges.length);
if (unlikely (fmt2 == nullptr)) return_trace (false);
for (unsigned int i = 0; i < sid_ranges.len; i++)
for (unsigned int i = 0; i < sid_ranges.length; i++)
{
if (unlikely (!(sid_ranges[i].glyph <= 0xFFFF)))
return_trace (false);

View File

@ -92,23 +92,23 @@ struct CmapSubtableFormat4
this->format.set (4);
this->length.set (get_sub_table_size (segments));
this->segCountX2.set (segments.len * 2);
this->entrySelector.set (MAX (1u, hb_bit_storage (segments.len)) - 1);
this->segCountX2.set (segments.length * 2);
this->entrySelector.set (MAX (1u, hb_bit_storage (segments.length)) - 1);
this->searchRange.set (2 * (1u << this->entrySelector));
this->rangeShift.set (segments.len * 2 > this->searchRange
? 2 * segments.len - this->searchRange
this->rangeShift.set (segments.length * 2 > this->searchRange
? 2 * segments.length - this->searchRange
: 0);
HBUINT16 *end_count = c->allocate_size<HBUINT16> (HBUINT16::static_size * segments.len);
HBUINT16 *end_count = c->allocate_size<HBUINT16> (HBUINT16::static_size * segments.length);
c->allocate_size<HBUINT16> (HBUINT16::static_size); // 2 bytes of padding.
HBUINT16 *start_count = c->allocate_size<HBUINT16> (HBUINT16::static_size * segments.len);
HBINT16 *id_delta = c->allocate_size<HBINT16> (HBUINT16::static_size * segments.len);
HBUINT16 *id_range_offset = c->allocate_size<HBUINT16> (HBUINT16::static_size * segments.len);
HBUINT16 *start_count = c->allocate_size<HBUINT16> (HBUINT16::static_size * segments.length);
HBINT16 *id_delta = c->allocate_size<HBINT16> (HBUINT16::static_size * segments.length);
HBUINT16 *id_range_offset = c->allocate_size<HBUINT16> (HBUINT16::static_size * segments.length);
if (id_range_offset == nullptr)
return_trace (false);
for (unsigned int i = 0; i < segments.len; i++)
for (unsigned int i = 0; i < segments.length; i++)
{
end_count[i].set (segments[i].end_code);
start_count[i].set (segments[i].start_code);
@ -157,7 +157,7 @@ struct CmapSubtableFormat4
static size_t get_sub_table_size (const hb_vector_t<segment_plan> &segments)
{
size_t segment_size = 0;
for (unsigned int i = 0; i < segments.len; i++)
for (unsigned int i = 0; i < segments.length; i++)
{
// Parallel array entries
segment_size +=
@ -532,7 +532,7 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
static size_t get_sub_table_size (const hb_vector_t<CmapSubtableLongGroup> &groups)
{
return 16 + 12 * groups.len;
return 16 + 12 * groups.length;
}
static bool create_sub_table_plan (const hb_subset_plan_t *plan,
@ -560,7 +560,7 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
}
DEBUG_MSG(SUBSET, nullptr, "cmap");
for (unsigned int i = 0; i < groups->len; i++) {
for (unsigned int i = 0; i < groups->length; i++) {
CmapSubtableLongGroup& group = (*groups)[i];
DEBUG_MSG(SUBSET, nullptr, " %d: U+%04X-U+%04X, gid %d-%d", i, (uint32_t) group.startCharCode, (uint32_t) group.endCharCode, (uint32_t) group.glyphID, (uint32_t) group.glyphID + ((uint32_t) group.endCharCode - (uint32_t) group.startCharCode));
}

View File

@ -104,14 +104,14 @@ struct COLR
if (count)
{
hb_array_t<const LayerRecord> segment_layers = glyph_layers.sub_array (start_offset, *count);
*count = segment_layers.len;
for (unsigned int i = 0; i < segment_layers.len; i++)
*count = segment_layers.length;
for (unsigned int i = 0; i < segment_layers.length; i++)
{
layers[i].glyph = segment_layers.arrayZ[i].glyphId;
layers[i].color_index = segment_layers.arrayZ[i].colorIdx;
}
}
return glyph_layers.len;
return glyph_layers.length;
}
bool sanitize (hb_sanitize_context_t *c) const

View File

@ -57,7 +57,7 @@ struct DeviceRecord
}
unsigned int len () const
{ return this->subset_plan->glyphs.len; }
{ return this->subset_plan->glyphs.length; }
const HBUINT8* operator [] (unsigned int i) const
{
@ -140,7 +140,7 @@ struct hdmx
this->version.set (source_hdmx->version);
this->numRecords.set (source_hdmx->numRecords);
this->sizeDeviceRecord.set (DeviceRecord::get_size (plan->glyphs.len));
this->sizeDeviceRecord.set (DeviceRecord::get_size (plan->glyphs.length));
for (unsigned int i = 0; i < source_hdmx->numRecords; i++)
{
@ -156,7 +156,7 @@ struct hdmx
static size_t get_subsetted_size (const hdmx *source_hdmx, hb_subset_plan_t *plan)
{
return min_size + source_hdmx->numRecords * DeviceRecord::get_size (plan->glyphs.len);
return min_size + source_hdmx->numRecords * DeviceRecord::get_size (plan->glyphs.length);
}
bool subset (hb_subset_plan_t *plan) const

View File

@ -94,7 +94,7 @@ struct hmtxvmtx
/* All the trailing glyphs with the same advance can use one LongMetric
* and just keep LSB */
hb_vector_t<hb_codepoint_t> &gids = plan->glyphs;
unsigned int num_advances = gids.len;
unsigned int num_advances = gids.length;
unsigned int last_advance = _mtx.get_advance (gids[num_advances - 1]);
while (num_advances > 1 &&
last_advance == _mtx.get_advance (gids[num_advances - 2]))
@ -104,14 +104,14 @@ struct hmtxvmtx
/* alloc the new table */
size_t dest_sz = num_advances * 4
+ (gids.len - num_advances) * 2;
+ (gids.length - num_advances) * 2;
void *dest = (void *) malloc (dest_sz);
if (unlikely (!dest))
{
return false;
}
DEBUG_MSG(SUBSET, nullptr, "%c%c%c%c in src has %d advances, %d lsbs", HB_UNTAG(T::tableTag), _mtx.num_advances, _mtx.num_metrics - _mtx.num_advances);
DEBUG_MSG(SUBSET, nullptr, "%c%c%c%c in dest has %d advances, %d lsbs, %u bytes", HB_UNTAG(T::tableTag), num_advances, gids.len - num_advances, (unsigned int) dest_sz);
DEBUG_MSG(SUBSET, nullptr, "%c%c%c%c in dest has %d advances, %d lsbs, %u bytes", HB_UNTAG(T::tableTag), num_advances, gids.length - num_advances, (unsigned int) dest_sz);
const char *source_table = hb_blob_get_data (_mtx.table.get_blob (), nullptr);
// Copy everything over
@ -120,7 +120,7 @@ struct hmtxvmtx
char * dest_pos = (char *) dest;
bool failed = false;
for (unsigned int i = 0; i < gids.len; i++)
for (unsigned int i = 0; i < gids.length; i++)
{
/* the last metric or the one for gids[i] */
LongMetric *src_metric = old_metrics + MIN ((hb_codepoint_t) _mtx.num_advances - 1, gids[i]);

View File

@ -900,14 +900,14 @@ struct CoverageFormat2
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!glyphs.len))
if (unlikely (!glyphs.length))
{
rangeRecord.len.set (0);
return_trace (true);
}
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < glyphs.len; i++)
for (unsigned int i = 1; i < glyphs.length; i++)
if (glyphs[i - 1] + 1 != glyphs[i])
num_ranges++;
rangeRecord.len.set (num_ranges);
@ -916,7 +916,7 @@ struct CoverageFormat2
unsigned int range = 0;
rangeRecord[range].start = glyphs[0];
rangeRecord[range].value.set (0);
for (unsigned int i = 1; i < glyphs.len; i++)
for (unsigned int i = 1; i < glyphs.length; i++)
{
if (glyphs[i - 1] + 1 != glyphs[i])
{
@ -1048,10 +1048,10 @@ struct Coverage
if (unlikely (!c->extend_min (*this))) return_trace (false);
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < glyphs.len; i++)
for (unsigned int i = 1; i < glyphs.length; i++)
if (glyphs[i - 1] + 1 != glyphs[i])
num_ranges++;
u.format.set (glyphs.len * 2 < num_ranges * 3 ? 1 : 2);
u.format.set (glyphs.length * 2 < num_ranges * 3 ? 1 : 2);
switch (u.format)
{
@ -1199,7 +1199,7 @@ struct ClassDefFormat1
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!glyphs.len))
if (unlikely (!glyphs.length))
{
startGlyph.set (0);
classValue.len.set (0);
@ -1207,13 +1207,13 @@ struct ClassDefFormat1
}
hb_codepoint_t glyph_min = glyphs[0];
hb_codepoint_t glyph_max = glyphs[glyphs.len - 1];
hb_codepoint_t glyph_max = glyphs[glyphs.length - 1];
startGlyph.set (glyph_min);
classValue.len.set (glyph_max - glyph_min + 1);
if (unlikely (!c->extend (classValue))) return_trace (false);
for (unsigned int i = 0; i < glyphs.len; i++)
for (unsigned int i = 0; i < glyphs.length; i++)
classValue[glyphs[i] - glyph_min] = klasses[i];
return_trace (true);
@ -1239,7 +1239,7 @@ struct ClassDefFormat1
}
c->serializer->propagate_error (glyphs, klasses);
ClassDef_serialize (c->serializer, glyphs, klasses);
return_trace (glyphs.len);
return_trace (glyphs.length);
}
bool sanitize (hb_sanitize_context_t *c) const
@ -1335,14 +1335,14 @@ struct ClassDefFormat2
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!glyphs.len))
if (unlikely (!glyphs.length))
{
rangeRecord.len.set (0);
return_trace (true);
}
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < glyphs.len; i++)
for (unsigned int i = 1; i < glyphs.length; i++)
if (glyphs[i - 1] + 1 != glyphs[i] ||
klasses[i - 1] != klasses[i])
num_ranges++;
@ -1352,7 +1352,7 @@ struct ClassDefFormat2
unsigned int range = 0;
rangeRecord[range].start = glyphs[0];
rangeRecord[range].value.set (klasses[0]);
for (unsigned int i = 1; i < glyphs.len; i++)
for (unsigned int i = 1; i < glyphs.length; i++)
{
if (glyphs[i - 1] + 1 != glyphs[i] ||
klasses[i - 1] != klasses[i])
@ -1390,7 +1390,7 @@ struct ClassDefFormat2
}
c->serializer->propagate_error (glyphs, klasses);
ClassDef_serialize (c->serializer, glyphs, klasses);
return_trace (glyphs.len);
return_trace (glyphs.length);
}
bool sanitize (hb_sanitize_context_t *c) const
@ -1485,13 +1485,13 @@ struct ClassDef
if (unlikely (!c->extend_min (*this))) return_trace (false);
unsigned int format = 2;
if (glyphs.len)
if (glyphs.length)
{
hb_codepoint_t glyph_min = glyphs[0];
hb_codepoint_t glyph_max = glyphs[glyphs.len - 1];
hb_codepoint_t glyph_max = glyphs[glyphs.length - 1];
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < glyphs.len; i++)
for (unsigned int i = 1; i < glyphs.length; i++)
if (glyphs[i - 1] + 1 != glyphs[i] ||
klasses[i - 1] != klasses[i])
num_ranges++;

View File

@ -64,7 +64,7 @@ struct AttachList
if (point_count)
{
hb_array_t<const HBUINT16> array = points.sub_array (start_offset, point_count);
unsigned int count = array.len;
unsigned int count = array.length;
for (unsigned int i = 0; i < count; i++)
point_array[i] = array[i];
}
@ -221,7 +221,7 @@ struct LigGlyph
if (caret_count)
{
hb_array_t <const OffsetTo<CaretValue> > array = carets.sub_array (start_offset, caret_count);
unsigned int count = array.len;
unsigned int count = array.length;
for (unsigned int i = 0; i < count; i++)
caret_array[i] = (this+array[i]).get_caret_value (font, direction, glyph_id, var_store);
}

View File

@ -120,7 +120,7 @@ struct SingleSubstFormat1
}
c->serializer->propagate_error (from, to);
SingleSubst_serialize (c->serializer, from, to);
return_trace (from.len);
return_trace (from.length);
}
bool sanitize (hb_sanitize_context_t *c) const
@ -218,7 +218,7 @@ struct SingleSubstFormat2
}
c->serializer->propagate_error (from, to);
SingleSubst_serialize (c->serializer, from, to);
return_trace (from.len);
return_trace (from.length);
}
bool sanitize (hb_sanitize_context_t *c) const
@ -249,12 +249,12 @@ struct SingleSubst
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 2;
int delta = 0;
if (glyphs.len)
if (glyphs.length)
{
format = 1;
/* TODO(serialize) check for wrap-around */
delta = substitutes[0] - glyphs[0];
for (unsigned int i = 1; i < glyphs.len; i++)
for (unsigned int i = 1; i < glyphs.length; i++)
if (delta != (int) (substitutes[i] - glyphs[i])) {
format = 2;
break;
@ -414,8 +414,8 @@ struct MultipleSubstFormat1
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!sequence.serialize (c, glyphs.len))) return_trace (false);
for (unsigned int i = 0; i < glyphs.len; i++)
if (unlikely (!sequence.serialize (c, glyphs.length))) return_trace (false);
for (unsigned int i = 0; i < glyphs.length; i++)
{
unsigned int substitute_len = substitute_len_list[i];
if (unlikely (!sequence[i].serialize (c, this)
@ -604,8 +604,8 @@ struct AlternateSubstFormat1
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!alternateSet.serialize (c, glyphs.len))) return_trace (false);
for (unsigned int i = 0; i < glyphs.len; i++)
if (unlikely (!alternateSet.serialize (c, glyphs.length))) return_trace (false);
for (unsigned int i = 0; i < glyphs.length; i++)
{
unsigned int alternate_len = alternate_len_list[i];
if (unlikely (!alternateSet[i].serialize (c, this)
@ -845,8 +845,8 @@ struct LigatureSet
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!ligature.serialize (c, ligatures.len))) return_trace (false);
for (unsigned int i = 0; i < ligatures.len; i++)
if (unlikely (!ligature.serialize (c, ligatures.length))) return_trace (false);
for (unsigned int i = 0; i < ligatures.length; i++)
{
unsigned int component_count = MAX<int> (component_count_list[i] - 1, 0);
if (unlikely (!ligature[i].serialize (c, this)
@ -947,8 +947,8 @@ struct LigatureSubstFormat1
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!ligatureSet.serialize (c, first_glyphs.len))) return_trace (false);
for (unsigned int i = 0; i < first_glyphs.len; i++)
if (unlikely (!ligatureSet.serialize (c, first_glyphs.length))) return_trace (false);
for (unsigned int i = 0; i < first_glyphs.length; i++)
{
unsigned int ligature_count = ligature_per_first_glyph_count_list[i];
if (unlikely (!ligatureSet[i].serialize (c, this)

View File

@ -2636,7 +2636,7 @@ struct hb_ot_layout_lookup_accelerator_t
bool apply (hb_ot_apply_context_t *c) const
{
for (unsigned int i = 0; i < subtables.len; i++)
for (unsigned int i = 0; i < subtables.length; i++)
if (subtables[i].apply (c))
return true;
return false;

View File

@ -1423,7 +1423,7 @@ inline void hb_ot_map_t::apply (const Proxy &proxy,
OT::hb_ot_apply_context_t c (table_index, font, buffer);
c.set_recurse_func (Proxy::Lookup::apply_recurse_func);
for (unsigned int stage_index = 0; stage_index < stages[table_index].len; stage_index++) {
for (unsigned int stage_index = 0; stage_index < stages[table_index].length; stage_index++) {
const stage_map_t *stage = &stages[table_index][stage_index];
for (; i < stage->last_lookup; i++)
{

View File

@ -33,7 +33,7 @@
void hb_ot_map_t::collect_lookups (unsigned int table_index, hb_set_t *lookups_out) const
{
for (unsigned int i = 0; i < lookups[table_index].len; i++)
for (unsigned int i = 0; i < lookups[table_index].length; i++)
hb_set_add (lookups_out, lookups[table_index][i].index);
}
@ -82,7 +82,7 @@ void hb_ot_map_builder_t::add_feature (hb_tag_t tag,
if (unlikely (!tag)) return;
feature_info_t *info = feature_infos.push();
info->tag = tag;
info->seq = feature_infos.len;
info->seq = feature_infos.length;
info->max_value = value;
info->flags = flags;
info->default_value = (flags & F_GLOBAL) ? value : 0;
@ -174,11 +174,11 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
}
/* Sort features and merge duplicates */
if (feature_infos.len)
if (feature_infos.length)
{
feature_infos.qsort ();
unsigned int j = 0;
for (unsigned int i = 1; i < feature_infos.len; i++)
for (unsigned int i = 1; i < feature_infos.length; i++)
if (feature_infos[i].tag != feature_infos[j].tag)
feature_infos[++j] = feature_infos[i];
else {
@ -202,7 +202,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
/* Allocate bits now */
unsigned int next_bit = global_bit_shift + 1;
for (unsigned int i = 0; i < feature_infos.len; i++)
for (unsigned int i = 0; i < feature_infos.length; i++)
{
const feature_info_t *info = &feature_infos[i];
@ -292,7 +292,7 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
key.variations_index[table_index],
global_bit_mask);
for (unsigned i = 0; i < m.features.len; i++)
for (unsigned i = 0; i < m.features.length; i++)
if (m.features[i].stage[table_index] == stage)
add_lookups (m, table_index,
m.features[i].index[table_index],
@ -303,12 +303,12 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
m.features[i].random);
/* Sort lookups and merge duplicates */
if (last_num_lookups < m.lookups[table_index].len)
if (last_num_lookups < m.lookups[table_index].length)
{
m.lookups[table_index].qsort (last_num_lookups, m.lookups[table_index].len);
m.lookups[table_index].qsort (last_num_lookups, m.lookups[table_index].length);
unsigned int j = last_num_lookups;
for (unsigned int i = j + 1; i < m.lookups[table_index].len; i++)
for (unsigned int i = j + 1; i < m.lookups[table_index].length; i++)
if (m.lookups[table_index][i].index != m.lookups[table_index][j].index)
m.lookups[table_index][++j] = m.lookups[table_index][i];
else
@ -320,9 +320,9 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m,
m.lookups[table_index].shrink (j + 1);
}
last_num_lookups = m.lookups[table_index].len;
last_num_lookups = m.lookups[table_index].length;
if (stage_index < stages[table_index].len && stages[table_index][stage_index].index == stage) {
if (stage_index < stages[table_index].length && stages[table_index][stage_index].index == stage) {
hb_ot_map_t::stage_map_t *stage_map = m.stages[table_index].push ();
stage_map->last_lookup = last_num_lookups;
stage_map->pause_func = stages[table_index][stage_index].pause_func;

View File

@ -145,9 +145,9 @@ struct hb_ot_map_t
*lookup_count = 0;
return;
}
assert (stage <= stages[table_index].len);
assert (stage <= stages[table_index].length);
unsigned int start = stage ? stages[table_index][stage - 1].last_lookup : 0;
unsigned int end = stage < stages[table_index].len ? stages[table_index][stage].last_lookup : lookups[table_index].len;
unsigned int end = stage < stages[table_index].length ? stages[table_index][stage].last_lookup : lookups[table_index].length;
*plookups = end == start ? nullptr : &lookups[table_index][start];
*lookup_count = end - start;
}

View File

@ -510,7 +510,7 @@ struct MathGlyphAssembly
{
int scale = font->dir_scale (direction);
hb_array_t<const MathGlyphPartRecord> arr = partRecords.sub_array (start_offset, parts_count);
unsigned int count = arr.len;
unsigned int count = arr.length;
for (unsigned int i = 0; i < count; i++)
arr[i].extract (parts[i], scale, font);
}
@ -555,7 +555,7 @@ struct MathGlyphConstruction
{
int scale = font->dir_scale (direction);
hb_array_t<const MathGlyphVariantRecord> arr = mathGlyphVariantRecord.sub_array (start_offset, variants_count);
unsigned int count = arr.len;
unsigned int count = arr.length;
for (unsigned int i = 0; i < count; i++)
{
variants[i].glyph = arr[i].variantGlyph;

View File

@ -105,7 +105,7 @@ struct maxp
}
maxp *maxp_prime = (maxp *) hb_blob_get_data (maxp_prime_blob, nullptr);
maxp_prime->set_num_glyphs (plan->glyphs.len);
maxp_prime->set_num_glyphs (plan->glyphs.length);
if (plan->drop_hints)
drop_hint_fields (plan, maxp_prime);

View File

@ -188,9 +188,9 @@ struct name
this->table->count);
this->names.init ();
this->names.alloc (all_names.len);
this->names.alloc (all_names.length);
for (unsigned int i = 0; i < all_names.len; i++)
for (unsigned int i = 0; i < all_names.length; i++)
{
hb_ot_name_entry_t *entry = this->names.push ();
@ -204,7 +204,7 @@ struct name
/* Walk and pick best only for each name_id,language pair,
* while dropping unsupported encodings. */
unsigned int j = 0;
for (unsigned int i = 0; i < this->names.len; i++)
for (unsigned int i = 0; i < this->names.length; i++)
{
if (this->names[i].entry_score == UNSUPPORTED ||
this->names[i].language == HB_LANGUAGE_INVALID)
@ -232,7 +232,7 @@ struct name
const hb_ot_name_entry_t *entry = (const hb_ot_name_entry_t *)
hb_bsearch (&key,
this->names.arrayZ(),
this->names.len,
this->names.length,
sizeof (key),
_hb_ot_name_entry_cmp_key);
if (!entry)

View File

@ -59,7 +59,7 @@ hb_ot_name_list_names (hb_face_t *face,
unsigned int *num_entries /* OUT */)
{
const OT::name_accelerator_t &name = *face->table.name;
if (num_entries) *num_entries = name.names.len;
if (num_entries) *num_entries = name.names.length;
return name.names.arrayZ();
}
@ -70,7 +70,7 @@ hb_ot_name_convert_utf (hb_bytes_t bytes,
unsigned int *text_size /* IN/OUT */,
typename out_utf_t::codepoint_t *text /* OUT */)
{
unsigned int src_len = bytes.len / sizeof (typename in_utf_t::codepoint_t);
unsigned int src_len = bytes.length / sizeof (typename in_utf_t::codepoint_t);
const typename in_utf_t::codepoint_t *src = (const typename in_utf_t::codepoint_t *) bytes.arrayZ;
const typename in_utf_t::codepoint_t *src_end = src + src_len;

View File

@ -114,7 +114,7 @@ struct post
const uint8_t *end = (const uint8_t *) (const void *) table + table_length;
for (const uint8_t *data = pool;
index_to_offset.len < 65535 && data < end && data + *data < end;
index_to_offset.length < 65535 && data < end && data + *data < end;
data += 1 + *data)
index_to_offset.push (data - pool);
}
@ -129,9 +129,9 @@ struct post
char *buf, unsigned int buf_len) const
{
hb_bytes_t s = find_glyph_name (glyph);
if (!s.len) return false;
if (!s.length) return false;
if (!buf_len) return true;
unsigned int len = MIN (buf_len - 1, s.len);
unsigned int len = MIN (buf_len - 1, s.length);
strncpy (buf, s.arrayZ, len);
buf[len] = '\0';
return true;
@ -226,7 +226,7 @@ struct post
return format1_names (index);
index -= NUM_FORMAT1_NAMES;
if (index >= index_to_offset.len)
if (index >= index_to_offset.length)
return hb_bytes_t ();
unsigned int offset = index_to_offset[index];

View File

@ -268,7 +268,7 @@ struct fvar
{
hb_array_t<const Fixed> instanceCoords = instance->get_coordinates (axisCount)
.sub_array (0, *coords_length);
for (unsigned int i = 0; i < instanceCoords.len; i++)
for (unsigned int i = 0; i < instanceCoords.length; i++)
coords[i] = instanceCoords.arrayZ[i].to_float ();
}
return axisCount;

View File

@ -85,12 +85,12 @@ struct VORG
subset_table->version.minor.set (0);
subset_table->defaultVertOriginY.set (vorg_table->defaultVertOriginY);
subset_table->vertYOrigins.len.set (subset_metrics.len);
subset_table->vertYOrigins.len.set (subset_metrics.length);
bool success = true;
if (subset_metrics.len > 0)
if (subset_metrics.length > 0)
{
unsigned int size = VertOriginMetric::static_size * subset_metrics.len;
unsigned int size = VertOriginMetric::static_size * subset_metrics.length;
VertOriginMetric *metrics = c.allocate_size<VertOriginMetric> (size);
if (likely (metrics != nullptr))
memcpy (metrics, &subset_metrics[0], size);
@ -112,7 +112,7 @@ struct VORG
subset_metrics.init ();
unsigned int glyph = 0;
unsigned int i = 0;
while ((glyph < plan->glyphs.len) && (i < vertYOrigins.len))
while ((glyph < plan->glyphs.length) && (i < vertYOrigins.len))
{
if (plan->glyphs[glyph] > vertYOrigins[i].glyph)
i++;
@ -129,7 +129,7 @@ struct VORG
}
/* alloc the new table */
unsigned int dest_sz = VORG::min_size + VertOriginMetric::static_size * subset_metrics.len;
unsigned int dest_sz = VORG::min_size + VertOriginMetric::static_size * subset_metrics.length;
void *dest = (void *) malloc (dest_sz);
if (unlikely (!dest))
{

View File

@ -220,7 +220,7 @@ struct hb_set_t
if (unlikely (!successful)) return false;
if (!pages.resize (count) || !page_map.resize (count))
{
pages.resize (page_map.len);
pages.resize (page_map.length);
successful = false;
return false;
}
@ -238,7 +238,7 @@ struct hb_set_t
}
bool is_empty () const
{
unsigned int count = pages.len;
unsigned int count = pages.length;
for (unsigned int i = 0; i < count; i++)
if (!pages[i].is_empty ())
return false;
@ -373,7 +373,7 @@ struct hb_set_t
void set (const hb_set_t *other)
{
if (unlikely (!successful)) return;
unsigned int count = other->pages.len;
unsigned int count = other->pages.length;
if (!resize (count))
return;
population = other->population;
@ -386,8 +386,8 @@ struct hb_set_t
if (get_population () != other->get_population ())
return false;
unsigned int na = pages.len;
unsigned int nb = other->pages.len;
unsigned int na = pages.length;
unsigned int nb = other->pages.length;
unsigned int a = 0, b = 0;
for (; a < na && b < nb; )
@ -429,8 +429,8 @@ struct hb_set_t
dirty ();
unsigned int na = pages.len;
unsigned int nb = other->pages.len;
unsigned int na = pages.length;
unsigned int nb = other->pages.length;
unsigned int next_page = na;
unsigned int count = 0, newCount = 0;
@ -461,7 +461,7 @@ struct hb_set_t
if (Op::passthru_right)
count += nb - b;
if (count > pages.len)
if (count > pages.length)
if (!resize (count))
return;
newCount = count;
@ -517,7 +517,7 @@ struct hb_set_t
page_at (count).v = other->page_at (b).v;
}
assert (!count);
if (pages.len > newCount)
if (pages.length > newCount)
resize (newCount);
}
@ -547,7 +547,7 @@ struct hb_set_t
page_map_t map = {get_major (*codepoint), 0};
unsigned int i;
page_map.bfind (map, &i, HB_BFIND_NOT_FOUND_STORE_CLOSEST);
if (i < page_map.len && page_map[i].major == map.major)
if (i < page_map.length && page_map[i].major == map.major)
{
if (pages[page_map[i].index].next (codepoint))
{
@ -556,7 +556,7 @@ struct hb_set_t
}
i++;
}
for (; i < page_map.len; i++)
for (; i < page_map.length; i++)
{
hb_codepoint_t m = pages[page_map[i].index].get_min ();
if (m != INVALID)
@ -578,7 +578,7 @@ struct hb_set_t
page_map_t map = {get_major (*codepoint), 0};
unsigned int i;
page_map.bfind (map, &i, HB_BFIND_NOT_FOUND_STORE_CLOSEST);
if (i < page_map.len && page_map[i].major == map.major)
if (i < page_map.length && page_map[i].major == map.major)
{
if (pages[page_map[i].index].previous (codepoint))
{
@ -642,7 +642,7 @@ struct hb_set_t
return population;
unsigned int pop = 0;
unsigned int count = pages.len;
unsigned int count = pages.length;
for (unsigned int i = 0; i < count; i++)
pop += pages[i].get_population ();
@ -651,7 +651,7 @@ struct hb_set_t
}
hb_codepoint_t get_min () const
{
unsigned int count = pages.len;
unsigned int count = pages.length;
for (unsigned int i = 0; i < count; i++)
if (!page_at (i).is_empty ())
return page_map[i].major * page_t::PAGE_BITS + page_at (i).get_min ();
@ -659,7 +659,7 @@ struct hb_set_t
}
hb_codepoint_t get_max () const
{
unsigned int count = pages.len;
unsigned int count = pages.length;
for (int i = count - 1; i >= 0; i++)
if (!page_at (i).is_empty ())
return page_map[(unsigned) i].major * page_t::PAGE_BITS + page_at (i).get_max ();
@ -670,17 +670,17 @@ struct hb_set_t
page_t *page_for_insert (hb_codepoint_t g)
{
page_map_t map = {get_major (g), pages.len};
page_map_t map = {get_major (g), pages.length};
unsigned int i;
if (!page_map.bfind (map, &i, HB_BFIND_NOT_FOUND_STORE_CLOSEST))
{
if (!resize (pages.len + 1))
if (!resize (pages.length + 1))
return nullptr;
pages[map.index].init0 ();
memmove (page_map + i + 1,
page_map + i,
(page_map.len - 1 - i) * page_map.item_size);
(page_map.length - 1 - i) * page_map.item_size);
page_map[i] = map;
}
return &pages[page_map[i].index];

View File

@ -57,7 +57,7 @@ hb_plan_subset_cff_fdselect (const hb_vector_t<hb_codepoint_t> &glyphs,
subset_fdselect_format = 0;
unsigned int num_ranges = 0;
unsigned int subset_num_glyphs = glyphs.len;
unsigned int subset_num_glyphs = glyphs.length;
if (subset_num_glyphs == 0)
return true;
@ -106,7 +106,7 @@ hb_plan_subset_cff_fdselect (const hb_vector_t<hb_codepoint_t> &glyphs,
}
/* update each font dict index stored as "code" in fdselect_ranges */
for (unsigned int i = 0; i < fdselect_ranges.len; i++)
for (unsigned int i = 0; i < fdselect_ranges.length; i++)
fdselect_ranges[i].code = fdmap[fdselect_ranges[i].code];
}
@ -153,8 +153,8 @@ serialize_fdselect_3_4 (hb_serialize_context_t *c,
TRACE_SERIALIZE (this);
FDSELECT3_4 *p = c->allocate_size<FDSELECT3_4> (size);
if (unlikely (p == nullptr)) return_trace (false);
p->nRanges.set (fdselect_ranges.len);
for (unsigned int i = 0; i < fdselect_ranges.len; i++)
p->nRanges.set (fdselect_ranges.length);
for (unsigned int i = 0; i < fdselect_ranges.length; i++)
{
p->ranges[i].first.set (fdselect_ranges[i].glyph);
p->ranges[i].fd.set (fdselect_ranges[i].code);

View File

@ -109,9 +109,9 @@ struct StrEncoder
void copy_str (const ByteStr &str)
{
unsigned int offset = buff.len;
unsigned int offset = buff.length;
buff.resize (offset + str.len);
if (unlikely (buff.len < offset + str.len))
if (unlikely (buff.length < offset + str.len))
{
set_error ();
return;
@ -295,11 +295,11 @@ struct SubrFlattener
bool flatten (StrBuffArray &flat_charstrings)
{
if (!flat_charstrings.resize (glyphs.len))
if (!flat_charstrings.resize (glyphs.length))
return false;
for (unsigned int i = 0; i < glyphs.len; i++)
for (unsigned int i = 0; i < glyphs.length; i++)
flat_charstrings[i].init ();
for (unsigned int i = 0; i < glyphs.len; i++)
for (unsigned int i = 0; i < glyphs.length; i++)
{
hb_codepoint_t glyph = glyphs[i];
const ByteStr str = (*acc.charStrings)[glyph];
@ -334,7 +334,7 @@ struct SubrClosures
if (!local_closures.resize (fd_count))
valid = false;
for (unsigned int i = 0; i < local_closures.len; i++)
for (unsigned int i = 0; i < local_closures.length; i++)
{
local_closures[i] = hb_set_create ();
if (local_closures[i] == hb_set_get_empty ())
@ -345,7 +345,7 @@ struct SubrClosures
void fini ()
{
hb_set_destroy (global_closure);
for (unsigned int i = 0; i < local_closures.len; i++)
for (unsigned int i = 0; i < local_closures.length; i++)
hb_set_destroy (local_closures[i]);
local_closures.fini ();
}
@ -353,7 +353,7 @@ struct SubrClosures
void reset ()
{
hb_set_clear (global_closure);
for (unsigned int i = 0; i < local_closures.len; i++)
for (unsigned int i = 0; i < local_closures.length; i++)
hb_set_clear (local_closures[i]);
}
@ -432,7 +432,7 @@ struct ParsedCStr : ParsedValues<ParsedCSOp>
bool at_end (unsigned int pos) const
{
return ((pos + 1 >= values.len) /* CFF2 */
return ((pos + 1 >= values.length) /* CFF2 */
|| (values[pos + 1].op == OpCode_return));
}
@ -467,7 +467,7 @@ struct ParsedCStrs : hb_vector_t<ParsedCStr>
{
SUPER::init ();
resize (len_);
for (unsigned int i = 0; i < len; i++)
for (unsigned int i = 0; i < length; i++)
(*this)[i].init ();
}
void fini () { SUPER::fini_deep (); }
@ -500,12 +500,12 @@ struct SubrSubsetParam
return parsed_charstring;
case CSType_LocalSubr:
if (likely (context.subr_num < parsed_local_subrs->len))
if (likely (context.subr_num < parsed_local_subrs->length))
return &(*parsed_local_subrs)[context.subr_num];
break;
case CSType_GlobalSubr:
if (likely (context.subr_num < parsed_global_subrs->len))
if (likely (context.subr_num < parsed_global_subrs->length))
return &(*parsed_global_subrs)[context.subr_num];
break;
}
@ -521,7 +521,7 @@ struct SubrSubsetParam
/* If the called subroutine is parsed partially but not completely yet,
* it must be because we are calling it recursively.
* Handle it as an error. */
if (unlikely (calling && !parsed_str->is_parsed () && (parsed_str->values.len > 0)))
if (unlikely (calling && !parsed_str->is_parsed () && (parsed_str->values.length > 0)))
env.set_error ();
else
current_parsed_str = parsed_str;
@ -548,7 +548,7 @@ struct SubrRemap : Remap
* no optimization based on usage counts. fonttools doesn't appear doing that either.
*/
reset (closure->get_max () + 1);
for (hb_codepoint_t old_num = 0; old_num < len; old_num++)
for (hb_codepoint_t old_num = 0; old_num < length; old_num++)
{
if (hb_set_has (closure, old_num))
add (old_num);
@ -564,7 +564,7 @@ struct SubrRemap : Remap
hb_codepoint_t operator[] (unsigned int old_num) const
{
if (old_num >= len)
if (old_num >= length)
return CFF_UNDEF_CODE;
else
return Remap::operator[] (old_num);
@ -600,7 +600,7 @@ struct SubrRemaps
void create (SubrClosures& closures)
{
global_remap.create (closures.global_closure);
for (unsigned int i = 0; i < local_remaps.len; i++)
for (unsigned int i = 0; i < local_remaps.length; i++)
local_remaps[i].create (closures.local_closures[i]);
}
@ -652,7 +652,7 @@ struct SubrSubsetter
closures.init (acc.fdCount);
remaps.init (acc.fdCount);
parsed_charstrings.init (glyphs.len);
parsed_charstrings.init (glyphs.length);
parsed_global_subrs.init (acc.globalSubrs->count);
parsed_local_subrs.resize (acc.fdCount);
for (unsigned int i = 0; i < acc.fdCount; i++)
@ -663,7 +663,7 @@ struct SubrSubsetter
return false;
/* phase 1 & 2 */
for (unsigned int i = 0; i < glyphs.len; i++)
for (unsigned int i = 0; i < glyphs.length; i++)
{
hb_codepoint_t glyph = glyphs[i];
const ByteStr str = (*acc.charStrings)[glyph];
@ -690,7 +690,7 @@ struct SubrSubsetter
if (drop_hints)
{
/* mark hint ops and arguments for drop */
for (unsigned int i = 0; i < glyphs.len; i++)
for (unsigned int i = 0; i < glyphs.length; i++)
{
unsigned int fd = acc.fdSelect->get_fd (glyphs[i]);
if (unlikely (fd >= acc.fdCount))
@ -712,7 +712,7 @@ struct SubrSubsetter
/* after dropping hints recreate closures of actually used subrs */
closures.reset ();
for (unsigned int i = 0; i < glyphs.len; i++)
for (unsigned int i = 0; i < glyphs.length; i++)
{
unsigned int fd = acc.fdSelect->get_fd (glyphs[i]);
if (unlikely (fd >= acc.fdCount))
@ -733,9 +733,9 @@ struct SubrSubsetter
bool encode_charstrings (ACC &acc, const hb_vector_t<hb_codepoint_t> &glyphs, StrBuffArray &buffArray) const
{
if (unlikely (!buffArray.resize (glyphs.len)))
if (unlikely (!buffArray.resize (glyphs.length)))
return false;
for (unsigned int i = 0; i < glyphs.len; i++)
for (unsigned int i = 0; i < glyphs.length; i++)
{
unsigned int fd = acc.fdSelect->get_fd (glyphs[i]);
if (unlikely (fd >= acc.fdCount))
@ -752,7 +752,7 @@ struct SubrSubsetter
if (unlikely (!buffArray.resize (count)))
return false;
for (unsigned int old_num = 0; old_num < subrs.len; old_num++)
for (unsigned int old_num = 0; old_num < subrs.length; old_num++)
{
hb_codepoint_t new_num = remap[old_num];
if (new_num != CFF_UNDEF_CODE)
@ -813,7 +813,7 @@ struct SubrSubsetter
{
bool seen_hint = false;
for (unsigned int pos = 0; pos < str.values.len; pos++)
for (unsigned int pos = 0; pos < str.values.length; pos++)
{
bool has_hint = false;
switch (str.values[pos].op)
@ -893,7 +893,7 @@ struct SubrSubsetter
void collect_subr_refs_in_str (ParsedCStr &str, const SubrSubsetParam &param)
{
for (unsigned int pos = 0; pos < str.values.len; pos++)
for (unsigned int pos = 0; pos < str.values.length; pos++)
{
if (!str.values[pos].for_drop ())
{

View File

@ -330,7 +330,7 @@ struct RangeList : hb_vector_t<code_pair>
bool finalize (unsigned int last_glyph)
{
bool two_byte = false;
for (unsigned int i = (*this).len; i > 0; i--)
for (unsigned int i = (*this).length; i > 0; i--)
{
code_pair &pair = (*this)[i - 1];
unsigned int nLeft = last_glyph - pair.glyph - 1;
@ -471,9 +471,9 @@ struct cff_subset_plan {
supp_size = 0;
supp_codes.init ();
subset_enc_num_codes = plan->glyphs.len - 1;
subset_enc_num_codes = plan->glyphs.length - 1;
unsigned int glyph;
for (glyph = 1; glyph < plan->glyphs.len; glyph++)
for (glyph = 1; glyph < plan->glyphs.length; glyph++)
{
hb_codepoint_t orig_glyph = plan->glyphs[glyph];
code = acc.glyph_to_code (orig_glyph);
@ -494,12 +494,12 @@ struct cff_subset_plan {
{
hb_codepoint_t sid = acc.glyph_to_sid (orig_glyph);
encoding->get_supplement_codes (sid, supp_codes);
for (unsigned int i = 0; i < supp_codes.len; i++)
for (unsigned int i = 0; i < supp_codes.length; i++)
{
code_pair pair = { supp_codes[i], sid };
subset_enc_supp_codes.push (pair);
}
supp_size += SuppEncoding::static_size * supp_codes.len;
supp_size += SuppEncoding::static_size * supp_codes.length;
}
}
supp_codes.fini ();
@ -508,7 +508,7 @@ struct cff_subset_plan {
assert (subset_enc_num_codes <= 0xFF);
size0 = Encoding0::min_size + HBUINT8::static_size * subset_enc_num_codes;
size1 = Encoding1::min_size + Encoding1_Range::static_size * subset_enc_code_ranges.len;
size1 = Encoding1::min_size + Encoding1_Range::static_size * subset_enc_code_ranges.length;
if (size0 < size1)
subset_enc_format = 0;
@ -517,8 +517,8 @@ struct cff_subset_plan {
return Encoding::calculate_serialized_size (
subset_enc_format,
subset_enc_format? subset_enc_code_ranges.len: subset_enc_num_codes,
subset_enc_supp_codes.len);
subset_enc_format? subset_enc_code_ranges.length: subset_enc_num_codes,
subset_enc_supp_codes.length);
}
unsigned int plan_subset_charset (const OT::cff1::accelerator_subset_t &acc, hb_subset_plan_t *plan)
@ -528,7 +528,7 @@ struct cff_subset_plan {
subset_charset_ranges.resize (0);
unsigned int glyph;
for (glyph = 1; glyph < plan->glyphs.len; glyph++)
for (glyph = 1; glyph < plan->glyphs.length; glyph++)
{
hb_codepoint_t orig_glyph = plan->glyphs[glyph];
sid = acc.glyph_to_sid (orig_glyph);
@ -546,11 +546,11 @@ struct cff_subset_plan {
bool two_byte = subset_charset_ranges.finalize (glyph);
size0 = Charset0::min_size + HBUINT16::static_size * (plan->glyphs.len - 1);
size0 = Charset0::min_size + HBUINT16::static_size * (plan->glyphs.length - 1);
if (!two_byte)
size_ranges = Charset1::min_size + Charset1_Range::static_size * subset_charset_ranges.len;
size_ranges = Charset1::min_size + Charset1_Range::static_size * subset_charset_ranges.length;
else
size_ranges = Charset2::min_size + Charset2_Range::static_size * subset_charset_ranges.len;
size_ranges = Charset2::min_size + Charset2_Range::static_size * subset_charset_ranges.length;
if (size0 < size_ranges)
subset_charset_format = 0;
@ -561,7 +561,7 @@ struct cff_subset_plan {
return Charset::calculate_serialized_size (
subset_charset_format,
subset_charset_format? subset_charset_ranges.len: plan->glyphs.len);
subset_charset_format? subset_charset_ranges.length: plan->glyphs.length);
}
bool collect_sids_in_dicts (const OT::cff1::accelerator_subset_t &acc)
@ -591,17 +591,17 @@ struct cff_subset_plan {
hb_subset_plan_t *plan)
{
/* make sure notdef is first */
if ((plan->glyphs.len == 0) || (plan->glyphs[0] != 0)) return false;
if ((plan->glyphs.length == 0) || (plan->glyphs[0] != 0)) return false;
final_size = 0;
num_glyphs = plan->glyphs.len;
num_glyphs = plan->glyphs.length;
orig_fdcount = acc.fdCount;
drop_hints = plan->drop_hints;
desubroutinize = plan->desubroutinize;
/* check whether the subset renumbers any glyph IDs */
gid_renum = false;
for (unsigned int glyph = 0; glyph < plan->glyphs.len; glyph++)
for (unsigned int glyph = 0; glyph < plan->glyphs.length; glyph++)
{
if (plan->glyphs[glyph] != glyph) {
gid_renum = true;
@ -708,7 +708,7 @@ struct cff_subset_plan {
offsets.globalSubrsInfo.offSize = calcOffSize (dataSize);
if (unlikely (offsets.globalSubrsInfo.offSize > 4))
return false;
offsets.globalSubrsInfo.size = CFF1Subrs::calculate_serialized_size (offsets.globalSubrsInfo.offSize, subset_globalsubrs.len, dataSize);
offsets.globalSubrsInfo.size = CFF1Subrs::calculate_serialized_size (offsets.globalSubrsInfo.offSize, subset_globalsubrs.length, dataSize);
/* local subrs */
if (!offsets.localSubrsInfos.resize (orig_fdcount))
@ -731,7 +731,7 @@ struct cff_subset_plan {
offsets.localSubrsInfos[fd].offSize = calcOffSize (dataSize);
if (unlikely (offsets.localSubrsInfos[fd].offSize > 4))
return false;
offsets.localSubrsInfos[fd].size = CFF1Subrs::calculate_serialized_size (offsets.localSubrsInfos[fd].offSize, subset_localsubrs[fd].len, dataSize);
offsets.localSubrsInfos[fd].size = CFF1Subrs::calculate_serialized_size (offsets.localSubrsInfos[fd].offSize, subset_localsubrs[fd].length, dataSize);
}
}
}
@ -769,7 +769,7 @@ struct cff_subset_plan {
offsets.FDArrayInfo.offset = final_size;
CFF1FontDict_OpSerializer fontSzr;
unsigned int dictsSize = 0;
for (unsigned int i = 0; i < acc.fontDicts.len; i++)
for (unsigned int i = 0; i < acc.fontDicts.length; i++)
if (fdmap.includes (i))
dictsSize += FontDict::calculate_serialized_size (acc.fontDicts[i], fontSzr);
@ -786,7 +786,7 @@ struct cff_subset_plan {
offsets.charStringsInfo.offSize = calcOffSize (dataSize);
if (unlikely (offsets.charStringsInfo.offSize > 4))
return false;
final_size += CFF1CharStrings::calculate_serialized_size (offsets.charStringsInfo.offSize, plan->glyphs.len, dataSize);
final_size += CFF1CharStrings::calculate_serialized_size (offsets.charStringsInfo.offSize, plan->glyphs.length, dataSize);
}
/* private dicts & local subrs */
@ -818,8 +818,8 @@ struct cff_subset_plan {
if (!acc.is_CID ())
offsets.privateDictInfo = fontdicts_mod[0].privateDictInfo;
return ((subset_charstrings.len == plan->glyphs.len)
&& (fontdicts_mod.len == subset_fdcount));
return ((subset_charstrings.length == plan->glyphs.length)
&& (fontdicts_mod.length == subset_fdcount));
}
unsigned int get_final_size () const { return final_size; }
@ -977,7 +977,7 @@ static inline bool _write_cff1 (const cff_subset_plan &plan,
{
assert (plan.offsets.FDSelectInfo.offset == c.head - c.start);
if (unlikely (!hb_serialize_cff_fdselect (&c, glyphs.len, *acc.fdSelect, acc.fdCount,
if (unlikely (!hb_serialize_cff_fdselect (&c, glyphs.length, *acc.fdSelect, acc.fdCount,
plan.subset_fdselect_format, plan.offsets.FDSelectInfo.size,
plan.subset_fdselect_ranges)))
{
@ -1016,7 +1016,7 @@ static inline bool _write_cff1 (const cff_subset_plan &plan,
/* private dicts & local subrs */
assert (plan.offsets.privateDictInfo.offset == c.head - c.start);
for (unsigned int i = 0; i < acc.privateDicts.len; i++)
for (unsigned int i = 0; i < acc.privateDicts.length; i++)
{
if (plan.fdmap.includes (i))
{

View File

@ -138,7 +138,7 @@ struct CFF2CSOpSet_Flatten : CFF2CSOpSet<CFF2CSOpSet_Flatten, FlattenParam>
{
const BlendArg &arg1 = env.argStack[i + j];
if (unlikely (!((arg1.blending () && (arg.numValues == arg1.numValues) && (arg1.valueIndex == j) &&
(arg1.deltas.len == env.get_region_count ())))))
(arg1.deltas.length == env.get_region_count ())))))
{
env.set_error ();
return;
@ -149,7 +149,7 @@ struct CFF2CSOpSet_Flatten : CFF2CSOpSet<CFF2CSOpSet_Flatten, FlattenParam>
for (unsigned int j = 0; j < arg.numValues; j++)
{
const BlendArg &arg1 = env.argStack[i + j];
for (unsigned int k = 0; k < arg1.deltas.len; k++)
for (unsigned int k = 0; k < arg1.deltas.length; k++)
encoder.encode_num (arg1.deltas[k]);
}
/* flatten the number of values followed by blend operator */
@ -310,7 +310,7 @@ struct cff2_subset_plan {
/* global subrs */
unsigned int dataSize = subset_globalsubrs.total_size ();
offsets.globalSubrsInfo.offSize = calcOffSize (dataSize);
offsets.globalSubrsInfo.size = CFF2Subrs::calculate_serialized_size (offsets.globalSubrsInfo.offSize, subset_globalsubrs.len, dataSize);
offsets.globalSubrsInfo.size = CFF2Subrs::calculate_serialized_size (offsets.globalSubrsInfo.offSize, subset_globalsubrs.length, dataSize);
/* local subrs */
if (!offsets.localSubrsInfos.resize (orig_fdcount))
@ -331,7 +331,7 @@ struct cff2_subset_plan {
{
offsets.localSubrsInfos[fd].offset = final_size;
offsets.localSubrsInfos[fd].offSize = calcOffSize (dataSize);
offsets.localSubrsInfos[fd].size = CFF2Subrs::calculate_serialized_size (offsets.localSubrsInfos[fd].offSize, subset_localsubrs[fd].len, dataSize);
offsets.localSubrsInfos[fd].size = CFF2Subrs::calculate_serialized_size (offsets.localSubrsInfos[fd].offSize, subset_localsubrs[fd].length, dataSize);
}
}
}
@ -372,7 +372,7 @@ struct cff2_subset_plan {
offsets.FDArrayInfo.offset = final_size;
CFFFontDict_OpSerializer fontSzr;
unsigned int dictsSize = 0;
for (unsigned int i = 0; i < acc.fontDicts.len; i++)
for (unsigned int i = 0; i < acc.fontDicts.length; i++)
if (fdmap.includes (i))
dictsSize += FontDict::calculate_serialized_size (acc.fontDicts[i], fontSzr);
@ -385,7 +385,7 @@ struct cff2_subset_plan {
offsets.charStringsInfo.offset = final_size;
unsigned int dataSize = subset_charstrings.total_size ();
offsets.charStringsInfo.offSize = calcOffSize (dataSize);
final_size += CFF2CharStrings::calculate_serialized_size (offsets.charStringsInfo.offSize, plan->glyphs.len, dataSize);
final_size += CFF2CharStrings::calculate_serialized_size (offsets.charStringsInfo.offSize, plan->glyphs.length, dataSize);
}
/* private dicts & local subrs */
@ -493,7 +493,7 @@ static inline bool _write_cff2 (const cff2_subset_plan &plan,
{
assert (plan.offsets.FDSelectInfo.offset == c.head - c.start);
if (unlikely (!hb_serialize_cff_fdselect (&c, glyphs.len, *(const FDSelect *)acc.fdSelect, acc.fdArray->count,
if (unlikely (!hb_serialize_cff_fdselect (&c, glyphs.length, *(const FDSelect *)acc.fdSelect, acc.fdArray->count,
plan.subset_fdselect_format, plan.offsets.FDSelectInfo.size,
plan.subset_fdselect_ranges)))
{
@ -531,7 +531,7 @@ static inline bool _write_cff2 (const cff2_subset_plan &plan,
/* private dicts & local subrs */
assert (plan.offsets.privateDictsOffset == c.head - c.start);
for (unsigned int i = 0; i < acc.privateDicts.len; i++)
for (unsigned int i = 0; i < acc.privateDicts.length; i++)
{
if (plan.fdmap.includes (i))
{

View File

@ -39,17 +39,17 @@ _calculate_glyf_and_loca_prime_size (const OT::glyf::accelerator_t &glyf,
hb_vector_t<unsigned int> *instruction_ranges /* OUT */)
{
unsigned int total = 0;
for (unsigned int i = 0; i < glyph_ids.len; i++)
for (unsigned int i = 0; i < glyph_ids.length; i++)
{
hb_codepoint_t next_glyph = glyph_ids[i];
if (!instruction_ranges->resize (instruction_ranges->len + 2))
if (!instruction_ranges->resize (instruction_ranges->length + 2))
{
DEBUG_MSG(SUBSET, nullptr, "Failed to resize instruction_ranges.");
return false;
}
unsigned int *instruction_start = &(*instruction_ranges)[instruction_ranges->len - 2];
unsigned int *instruction_start = &(*instruction_ranges)[instruction_ranges->length - 2];
*instruction_start = 0;
unsigned int *instruction_end = &(*instruction_ranges)[instruction_ranges->len - 1];
unsigned int *instruction_end = &(*instruction_ranges)[instruction_ranges->length - 1];
*instruction_end = 0;
unsigned int start_offset, end_offset;
@ -79,7 +79,7 @@ _calculate_glyf_and_loca_prime_size (const OT::glyf::accelerator_t &glyf,
*glyf_size = total;
*use_short_loca = (total <= 131070);
*loca_size = (glyph_ids.len + 1)
*loca_size = (glyph_ids.length + 1)
* (*use_short_loca ? sizeof (OT::HBUINT16) : sizeof (OT::HBUINT32));
DEBUG_MSG(SUBSET, nullptr, "preparing to subset glyf: final size %d, loca size %d, using %s loca",
@ -167,7 +167,7 @@ _write_glyf_and_loca_prime (hb_subset_plan_t *plan,
char *glyf_prime_data_next = glyf_prime_data;
bool success = true;
for (unsigned int i = 0; i < glyph_ids.len; i++)
for (unsigned int i = 0; i < glyph_ids.length; i++)
{
unsigned int start_offset, end_offset;
if (unlikely (!(glyf.get_offsets (glyph_ids[i], &start_offset, &end_offset) &&
@ -215,7 +215,7 @@ _write_glyf_and_loca_prime (hb_subset_plan_t *plan,
glyf_prime_data_next += length + (length % 2); // Align to 2 bytes for short loca.
}
success = success && _write_loca_entry (glyph_ids.len,
success = success && _write_loca_entry (glyph_ids.length,
glyf_prime_data_next - glyf_prime_data,
use_short_loca,
loca_prime_data,

View File

@ -159,7 +159,7 @@ static void
_create_old_gid_to_new_gid_map (const hb_vector_t<hb_codepoint_t> &glyphs,
hb_map_t *glyph_map)
{
for (unsigned int i = 0; i < glyphs.len; i++) {
for (unsigned int i = 0; i < glyphs.length; i++) {
glyph_map->set (glyphs[i], i);
}
}

View File

@ -652,7 +652,7 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
/* Scan events and save features for each range. */
hb_vector_t<active_feature_t> active_features;
unsigned int last_index = 0;
for (unsigned int i = 0; i < feature_events.len; i++)
for (unsigned int i = 0; i < feature_events.length; i++)
{
feature_event_t *event = &feature_events[i];
@ -661,26 +661,26 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
/* Save a snapshot of active features and the range. */
range_record_t *range = range_records.push ();
unsigned int offset = feature_records.len;
unsigned int offset = feature_records.length;
active_features.qsort ();
for (unsigned int j = 0; j < active_features.len; j++)
for (unsigned int j = 0; j < active_features.length; j++)
{
if (!j || active_features[j].rec.tagFeature != feature_records[feature_records.len - 1].tagFeature)
if (!j || active_features[j].rec.tagFeature != feature_records[feature_records.length - 1].tagFeature)
{
feature_records.push (active_features[j].rec);
}
else
{
/* Overrides value for existing feature. */
feature_records[feature_records.len - 1].lParameter = active_features[j].rec.lParameter;
feature_records[feature_records.length - 1].lParameter = active_features[j].rec.lParameter;
}
}
/* Will convert to pointer after all is ready, since feature_records.array
* may move as we grow it. */
range->props.potfRecords = reinterpret_cast<OPENTYPE_FEATURE_RECORD *> (offset);
range->props.cotfRecords = feature_records.len - offset;
range->props.cotfRecords = feature_records.length - offset;
range->index_first = last_index;
range->index_last = event->index - 1;
@ -699,11 +699,11 @@ _hb_uniscribe_shape (hb_shape_plan_t *shape_plan,
}
}
if (!range_records.len) /* No active feature found. */
if (!range_records.length) /* No active feature found. */
num_features = 0;
/* Fixup the pointers. */
for (unsigned int i = 0; i < range_records.len; i++)
for (unsigned int i = 0; i < range_records.length; i++)
{
range_record_t *range = &range_records[i];
range->props.potfRecords = (OPENTYPE_FEATURE_RECORD *) feature_records + reinterpret_cast<uintptr_t> (range->props.potfRecords);
@ -853,8 +853,8 @@ retry:
range--;
while (log_clusters[k] > range->index_last)
range++;
if (!range_properties.len ||
&range->props != range_properties[range_properties.len - 1])
if (!range_properties.length ||
&range->props != range_properties[range_properties.length - 1])
{
TEXTRANGE_PROPERTIES **props = range_properties.push ();
int *c = range_char_counts.push ();
@ -869,7 +869,7 @@ retry:
}
else
{
range_char_counts[range_char_counts.len - 1]++;
range_char_counts[range_char_counts.length - 1]++;
}
last_range = range;
@ -888,7 +888,7 @@ retry:
language_tag,
range_char_counts.arrayZ (),
range_properties.arrayZ (),
range_properties.len,
range_properties.length,
pchars + chars_offset,
item_chars_len,
glyphs_size - glyphs_offset,
@ -929,7 +929,7 @@ retry:
language_tag,
range_char_counts.arrayZ (),
range_properties.arrayZ (),
range_properties.len,
range_properties.length,
pchars + chars_offset,
log_clusters + chars_offset,
char_props + chars_offset,

View File

@ -42,7 +42,7 @@ struct hb_vector_t
hb_vector_t () { init (); }
~hb_vector_t () { fini (); }
unsigned int len;
unsigned int length;
private:
unsigned int allocated; /* == 0 means allocation failed. */
Type *arrayZ_;
@ -51,7 +51,7 @@ struct hb_vector_t
void init ()
{
len = 0;
length = 0;
allocated = ARRAY_LENGTH (static_array);
arrayZ_ = nullptr;
}
@ -61,41 +61,41 @@ struct hb_vector_t
if (arrayZ_)
free (arrayZ_);
arrayZ_ = nullptr;
allocated = len = 0;
allocated = length = 0;
}
void fini_deep ()
{
Type *array = arrayZ();
unsigned int count = len;
unsigned int count = length;
for (unsigned int i = 0; i < count; i++)
array[i].fini ();
fini ();
}
Type * arrayZ () { return arrayZ_ ? arrayZ_ : static_array; }
const Type * arrayZ () const { return arrayZ_ ? arrayZ_ : static_array; }
Type * arrayZ () { return arrayZ_ ? arrayZ_ : static_array; }
Type& operator [] (int i_)
{
unsigned int i = (unsigned int) i_;
if (unlikely (i >= len))
if (unlikely (i >= length))
return Crap (Type);
return arrayZ()[i];
}
const Type& operator [] (int i_) const
{
unsigned int i = (unsigned int) i_;
if (unlikely (i >= len))
if (unlikely (i >= length))
return Null(Type);
return arrayZ()[i];
}
explicit_operator bool () const { return len; }
explicit_operator bool () const { return length; }
hb_array_t<Type> as_array ()
{ return hb_array (arrayZ(), len); }
{ return hb_array (arrayZ(), length); }
hb_array_t<const Type> as_array () const
{ return hb_array (arrayZ(), len); }
{ return hb_array (arrayZ(), length); }
hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
{ return as_array ().sub_array (start_offset, count);}
@ -107,9 +107,9 @@ struct hb_vector_t
{ return as_array ().sub_array (start_offset, count);}
hb_sorted_array_t<Type> as_sorted_array ()
{ return hb_sorted_array (arrayZ(), len); }
{ return hb_sorted_array (arrayZ(), length); }
hb_sorted_array_t<const Type> as_sorted_array () const
{ return hb_sorted_array (arrayZ(), len); }
{ return hb_sorted_array (arrayZ(), length); }
hb_array_t<const Type> sorted_sub_array (unsigned int start_offset, unsigned int count) const
{ return as_sorted_array ().sorted_sub_array (start_offset, count);}
@ -130,9 +130,9 @@ struct hb_vector_t
Type *push ()
{
if (unlikely (!resize (len + 1)))
if (unlikely (!resize (length + 1)))
return &Crap(Type);
return &arrayZ()[len - 1];
return &arrayZ()[length - 1];
}
Type *push (const Type& v)
{
@ -143,7 +143,7 @@ struct hb_vector_t
bool in_error () const { return allocated == 0; }
/* Allocate for size but don't adjust len. */
/* Allocate for size but don't adjust length. */
bool alloc (unsigned int size)
{
if (unlikely (!allocated))
@ -164,7 +164,7 @@ struct hb_vector_t
{
new_array = (Type *) calloc (new_allocated, sizeof (Type));
if (new_array)
memcpy (new_array, static_array, len * sizeof (Type));
memcpy (new_array, static_array, length * sizeof (Type));
}
else
{
@ -191,42 +191,42 @@ struct hb_vector_t
if (!alloc (size))
return false;
if (size > len)
memset (arrayZ() + len, 0, (size - len) * sizeof (*arrayZ()));
if (size > length)
memset (arrayZ() + length, 0, (size - length) * sizeof (*arrayZ()));
len = size;
length = size;
return true;
}
void pop ()
{
if (!len) return;
len--;
if (!length) return;
length--;
}
void remove (unsigned int i)
{
if (unlikely (i >= len))
if (unlikely (i >= length))
return;
Type *array = arrayZ();
memmove (static_cast<void *> (&array[i]),
static_cast<void *> (&array[i + 1]),
(len - i - 1) * sizeof (Type));
len--;
(length - i - 1) * sizeof (Type));
length--;
}
void shrink (int size_)
{
unsigned int size = size_ < 0 ? 0u : (unsigned int) size_;
if (size < len)
len = size;
if (size < length)
length = size;
}
template <typename T>
Type *find (T v)
{
Type *array = arrayZ();
for (unsigned int i = 0; i < len; i++)
for (unsigned int i = 0; i < length; i++)
if (array[i] == v)
return &array[i];
return nullptr;
@ -235,7 +235,7 @@ struct hb_vector_t
const Type *find (T v) const
{
const Type *array = arrayZ();
for (unsigned int i = 0; i < len; i++)
for (unsigned int i = 0; i < length; i++)
if (array[i] == v)
return &array[i];
return nullptr;

View File

@ -36,10 +36,10 @@ struct array_iter_t : hb_iter_t<array_iter_t<T>, T>, hb_iter_mixin_t<array_iter_
typedef T __item_type__;
T& __item_at__ (unsigned i) const { return arr[i]; }
bool __more__ () const { return arr.len; }
bool __more__ () const { return arr.length; }
void __forward__ (unsigned n) { arr += n; }
void __rewind__ (unsigned n) { arr -= n; }
unsigned __len__ () const { return arr.len; }
unsigned __len__ () const { return arr.length; }
bool __random_access__ () const { return true; }
private: