replace .set() with =

remove .gitignore
This commit is contained in:
Michiharu Ariza 2019-03-29 23:53:06 -07:00
parent 8a8965be39
commit 3fbd242ba6
3 changed files with 22 additions and 23 deletions

1
.gitignore vendored
View File

@ -1 +0,0 @@
libtool

View File

@ -426,13 +426,13 @@ struct gvar
gvar *out = c->serializer->allocate_min<gvar> (); gvar *out = c->serializer->allocate_min<gvar> ();
if (unlikely (!out)) return_trace (false); if (unlikely (!out)) return_trace (false);
out->version.major.set (1); out->version.major = 1;
out->version.minor.set (0); out->version.minor = 0;
out->axisCount.set (axisCount); out->axisCount = axisCount;
out->sharedTupleCount.set (sharedTupleCount); out->sharedTupleCount = sharedTupleCount;
unsigned int num_glyphs = c->plan->num_output_glyphs (); unsigned int num_glyphs = c->plan->num_output_glyphs ();
out->glyphCount.set (num_glyphs); out->glyphCount = num_glyphs;
unsigned int subset_data_size = 0; unsigned int subset_data_size = 0;
for (hb_codepoint_t gid = 0; gid < num_glyphs; gid++) for (hb_codepoint_t gid = 0; gid < num_glyphs; gid++)
@ -443,26 +443,26 @@ struct gvar
} }
bool long_offset = subset_data_size & ~0xFFFFu; bool long_offset = subset_data_size & ~0xFFFFu;
out->flags.set (long_offset? 1: 0); out->flags = long_offset? 1: 0;
HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset? 4: 2) * (num_glyphs+1)); HBUINT8 *subset_offsets = c->serializer->allocate_size<HBUINT8> ((long_offset? 4: 2) * (num_glyphs+1));
if (!subset_offsets) return_trace (false); if (!subset_offsets) return_trace (false);
/* shared tuples */ /* shared tuples */
if (!sharedTupleCount || !sharedTuples) if (!sharedTupleCount || !sharedTuples)
out->sharedTuples.set (0); out->sharedTuples = 0;
else else
{ {
unsigned int shared_tuple_size = F2DOT14::static_size * axisCount * sharedTupleCount; unsigned int shared_tuple_size = F2DOT14::static_size * axisCount * sharedTupleCount;
F2DOT14 *tuples = c->serializer->allocate_size<F2DOT14> (shared_tuple_size); F2DOT14 *tuples = c->serializer->allocate_size<F2DOT14> (shared_tuple_size);
if (!tuples) return_trace (false); if (!tuples) return_trace (false);
out->sharedTuples.set ((char *)tuples - (char *)out); out->sharedTuples = (char *)tuples - (char *)out;
memcpy (tuples, &(this+sharedTuples), shared_tuple_size); memcpy (tuples, &(this+sharedTuples), shared_tuple_size);
} }
char *subset_data = c->serializer->allocate_size<char>(subset_data_size); char *subset_data = c->serializer->allocate_size<char>(subset_data_size);
if (!subset_data) return_trace (false); if (!subset_data) return_trace (false);
out->dataZ.set (subset_data - (char *)out); out->dataZ = subset_data - (char *)out;
unsigned int glyph_offset = 0; unsigned int glyph_offset = 0;
for (hb_codepoint_t gid = 0; gid < num_glyphs; gid++) for (hb_codepoint_t gid = 0; gid < num_glyphs; gid++)
@ -471,18 +471,18 @@ struct gvar
unsigned int length = c->plan->old_gid_for_new_gid (gid, &old_gid)? get_glyph_var_data_length (old_gid): 0; unsigned int length = c->plan->old_gid_for_new_gid (gid, &old_gid)? get_glyph_var_data_length (old_gid): 0;
if (long_offset) if (long_offset)
((HBUINT32 *)subset_offsets)[gid].set (glyph_offset); ((HBUINT32 *)subset_offsets)[gid] = glyph_offset;
else else
((HBUINT16 *)subset_offsets)[gid].set (glyph_offset / 2); ((HBUINT16 *)subset_offsets)[gid] = glyph_offset / 2;
if (length > 0) memcpy (subset_data, get_glyph_var_data (old_gid), length); if (length > 0) memcpy (subset_data, get_glyph_var_data (old_gid), length);
subset_data += length; subset_data += length;
glyph_offset += length; glyph_offset += length;
} }
if (long_offset) if (long_offset)
((HBUINT32 *)subset_offsets)[num_glyphs].set (glyph_offset); ((HBUINT32 *)subset_offsets)[num_glyphs] = glyph_offset;
else else
((HBUINT16 *)subset_offsets)[num_glyphs].set (glyph_offset / 2); ((HBUINT16 *)subset_offsets)[num_glyphs] = glyph_offset / 2;
return_trace (true); return_trace (true);
} }

View File

@ -56,8 +56,8 @@ struct DeltaSetIndexMap
return_trace (false); return_trace (false);
if (unlikely (!c->extend_min (*this))) return_trace (false); if (unlikely (!c->extend_min (*this))) return_trace (false);
format.set (((width-1)<<4)|(inner_bit_count-1)); format = ((width-1)<<4)|(inner_bit_count-1);
mapCount.set (output_map.length); mapCount = output_map.length;
HBUINT8 *p = c->allocate_size<HBUINT8> (width * output_map.length); HBUINT8 *p = c->allocate_size<HBUINT8> (width * output_map.length);
if (unlikely (!p)) return_trace (false); if (unlikely (!p)) return_trace (false);
for (unsigned int i = 0; i < output_map.length; i++) for (unsigned int i = 0; i < output_map.length; i++)
@ -68,7 +68,7 @@ struct DeltaSetIndexMap
unsigned int u = (outer << inner_bit_count)|inner; unsigned int u = (outer << inner_bit_count)|inner;
for (unsigned int w = width; w > 0;) for (unsigned int w = width; w > 0;)
{ {
p[--w].set (u); p[--w] = u;
u >>= 8; u >>= 8;
} }
p += width; p += width;
@ -330,15 +330,15 @@ struct HVARVVAR
{ {
TRACE_SUBSET (this); TRACE_SUBSET (this);
if (im_plans[index_map_subset_plan_t::ADV_INDEX].is_identity ()) if (im_plans[index_map_subset_plan_t::ADV_INDEX].is_identity ())
advMap.set (0); advMap = 0;
else if (unlikely (!advMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::ADV_INDEX]))) else if (unlikely (!advMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::ADV_INDEX])))
return_trace (false); return_trace (false);
if (im_plans[index_map_subset_plan_t::LSB_INDEX].is_identity ()) if (im_plans[index_map_subset_plan_t::LSB_INDEX].is_identity ())
lsbMap.set (0); lsbMap = 0;
else if (unlikely (!lsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::LSB_INDEX]))) else if (unlikely (!lsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::LSB_INDEX])))
return_trace (false); return_trace (false);
if (im_plans[index_map_subset_plan_t::RSB_INDEX].is_identity ()) if (im_plans[index_map_subset_plan_t::RSB_INDEX].is_identity ())
rsbMap.set (0); rsbMap = 0;
else if (unlikely (!rsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::RSB_INDEX]))) else if (unlikely (!rsbMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::RSB_INDEX])))
return_trace (false); return_trace (false);
@ -359,8 +359,8 @@ struct HVARVVAR
T *out = c->serializer->allocate_min<T> (); T *out = c->serializer->allocate_min<T> ();
if (unlikely (!out)) return_trace (false); if (unlikely (!out)) return_trace (false);
out->version.major.set (1); out->version.major = 1;
out->version.minor.set (0); out->version.minor = 0;
if (!unlikely (out->varStore.serialize (c->serializer, out) if (!unlikely (out->varStore.serialize (c->serializer, out)
.serialize (c->serializer, hvar_plan.var_store, hvar_plan.inner_remaps.as_array ()))) .serialize (c->serializer, hvar_plan.var_store, hvar_plan.inner_remaps.as_array ())))
@ -430,7 +430,7 @@ struct VVAR : HVARVVAR {
if (unlikely (!HVARVVAR::serialize_index_maps (c, im_plans))) if (unlikely (!HVARVVAR::serialize_index_maps (c, im_plans)))
return_trace (false); return_trace (false);
if (!im_plans[index_map_subset_plan_t::VORG_INDEX].get_map_count ()) if (!im_plans[index_map_subset_plan_t::VORG_INDEX].get_map_count ())
vorgMap.set (0); vorgMap = 0;
else if (unlikely (!vorgMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::VORG_INDEX]))) else if (unlikely (!vorgMap.serialize (c, this).serialize (c, im_plans[index_map_subset_plan_t::VORG_INDEX])))
return_trace (false); return_trace (false);