From a190140fa47ff5655edbb4eb414175852f722f85 Mon Sep 17 00:00:00 2001 From: Michiharu Ariza Date: Thu, 21 Feb 2019 15:47:27 -0800 Subject: [PATCH] fix HVAR & VarStore subsetting --- src/hb-ot-layout-common.hh | 15 ++-- src/hb-ot-var-hvar-table.hh | 140 ++++++++++++++++++------------------ 2 files changed, 77 insertions(+), 78 deletions(-) diff --git a/src/hb-ot-layout-common.hh b/src/hb-ot-layout-common.hh index c41865ff2..149a2d066 100644 --- a/src/hb-ot-layout-common.hh +++ b/src/hb-ot-layout-common.hh @@ -1741,8 +1741,9 @@ struct VarRegionList bool serialize (hb_serialize_context_t *c, const VarRegionList *src) { TRACE_SERIALIZE (this); - if (unlikely (!c->allocate_size (src->get_size ()))) return_trace (false); - memcpy (this, src, src->get_size ()); + unsigned int size = src->get_size (); + if (unlikely (!c->allocate_size (size))) return_trace (false); + memcpy (this, src, size); return_trace (true); } @@ -1831,14 +1832,14 @@ struct VarData shortCount.set (src->shortCount); unsigned int row_size = src->get_row_size (); - if (unlikely (!c->allocate_size (src->regionIndices.get_size () + row_size * remap.get_count ()))) + if (unlikely (!c->allocate_size (src->regionIndices.get_size () + (row_size * remap.get_count ())))) return_trace (false); memcpy (®ionIndices, &src->regionIndices, src->regionIndices.get_size ()); HBUINT8 *p = get_delta_bytes (); for (unsigned int i = 0; i < remap.get_count (); i++) { - memcpy (p, src->get_delta_bytes () + remap.new_to_old (i) * row_size, row_size); + memcpy (p, src->get_delta_bytes () + (row_size * remap.new_to_old (i)), row_size); p += row_size; } @@ -1899,7 +1900,8 @@ struct VariationStore const hb_array_t &inner_remaps) { TRACE_SUBSET (this); - if (unlikely (!c->extend_min (*this))) return_trace (false); + unsigned int size = min_size + HBUINT32::static_size * inner_remaps.length; + if (unlikely (!c->allocate_size (size))) return_trace (false); format.set (1); if (unlikely (!regions.serialize (c, this) .serialize (c, &(src+src->regions)))) return_trace (false); @@ -1908,9 +1910,6 @@ struct VariationStore * OffsetListOf::subset () can take a custom param to be passed to VarData::serialize () */ dataSets.len.set (inner_remaps.length); - if (unlikely (!c->allocate_size (inner_remaps.length))) - return_trace (false); - for (unsigned int i = 0; i < inner_remaps.length; i++) { if (unlikely (!dataSets[i].serialize (c, this) diff --git a/src/hb-ot-var-hvar-table.hh b/src/hb-ot-var-hvar-table.hh index 76378ad01..fa74a37a9 100644 --- a/src/hb-ot-var-hvar-table.hh +++ b/src/hb-ot-var-hvar-table.hh @@ -44,30 +44,32 @@ struct DeltaSetIndexMap get_width ())); } - bool serialize (hb_serialize_context_t *c, - unsigned int inner_bit_count, - unsigned int width, - const hb_array_t maps) + template + bool serialize (hb_serialize_context_t *c, const T &plan) { + unsigned int width = plan.get_width (); + unsigned int inner_bit_count = plan.get_inner_bit_count (); + auto map = plan.get_output_map (); + TRACE_SERIALIZE (this); - if (unlikely (maps.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0)))) + if (unlikely (map.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0)))) return_trace (false); if (unlikely (!c->extend_min (*this))) return_trace (false); format.set (((width-1)<<4)|(inner_bit_count-1)); - mapCount.set (maps.length); - HBUINT8 *p = c->allocate_size (width * maps.get_size ()); + mapCount.set (map.length); + HBUINT8 *p = c->allocate_size (width * map.length); if (unlikely (!p)) return_trace (false); - for (unsigned int i = 0; i < maps.length; i++) + for (unsigned int i = 0; i < map.length; i++) { - unsigned int v = maps[i]; + unsigned int v = map[i]; unsigned int outer = v >> 16; unsigned int inner = v & 0xFFFF; unsigned int u = (outer << inner_bit_count)|inner; for (unsigned int w = width; w > 0;) { - p[--w].set (u); - u >>= 8; + p[--w].set (u); + u >>= 8; } p += width; } @@ -93,7 +95,7 @@ struct DeltaSetIndexMap } { /* Repack it. */ - unsigned int n = get_inner_bitcount (); + unsigned int n = get_inner_bit_count (); unsigned int outer = u >> n; unsigned int inner = u & ((1 << n) - 1); u = (outer<<16) | inner; @@ -102,11 +104,9 @@ struct DeltaSetIndexMap return u; } - unsigned int get_map_count () const { return mapCount; } - - unsigned int get_width () const { return ((format >> 4) & 3) + 1; } - - unsigned int get_inner_bitcount () const { return (format & 0xF) + 1; } + unsigned int get_map_count () const { return mapCount; } + unsigned int get_width () const { return ((format >> 4) & 3) + 1; } + unsigned int get_inner_bit_count () const { return (format & 0xF) + 1; } protected: HBUINT16 format; /* A packed field that describes the compressed @@ -121,49 +121,52 @@ struct DeltaSetIndexMap struct index_map_subset_plan_t { - index_map_subset_plan_t (void) : map_count (0), outer_bit_count (0), inner_bit_count (0) {} - ~index_map_subset_plan_t (void) { fini (); } - void init (const DeltaSetIndexMap &index_map, hb_vector_t &inner_remaps, const hb_subset_plan_t *plan) { + map_count = 0; + outer_bit_count = 0; + inner_bit_count = 0; + max_inners.init (); + output_map.init (); + /* Identity map */ if (&index_map == &Null(DeltaSetIndexMap)) return; - unsigned int last_map = (unsigned int)-1; + unsigned int last_val = (unsigned int)-1; hb_codepoint_t last_gid = (hb_codepoint_t)-1; - hb_codepoint_t i = (hb_codepoint_t)index_map.get_map_count (); + hb_codepoint_t gid = (hb_codepoint_t)index_map.get_map_count (); - outer_bit_count = (index_map.get_width () * 8) - index_map.get_inner_bitcount (); + outer_bit_count = (index_map.get_width () * 8) - index_map.get_inner_bit_count (); max_inners.resize (inner_remaps.length); - for (i = 0; i < inner_remaps.length; i++) max_inners[i] = 0; + for (unsigned i = 0; i < inner_remaps.length; i++) max_inners[i] = 0; /* Search backwards for a map value different from the last map value */ - for (; i > 0; i--) + for (; gid > 0; gid--) { hb_codepoint_t old_gid; - if (!plan->old_gid_for_new_gid (i - 1, &old_gid)) + if (!plan->old_gid_for_new_gid (gid - 1, &old_gid)) continue; unsigned int v = index_map.map (old_gid); if (last_gid == (hb_codepoint_t)-1) { - last_map = v; - last_gid = i; + last_val = v; + last_gid = gid; continue; } - if (v != last_map) break; + if (v != last_val) break; - last_map = i; + last_gid = gid; } - map_count = last_map + 1; - for (unsigned int i = 0; i < map_count; i++) + map_count = last_gid + 1; + for (gid = 0; gid < map_count; gid++) { hb_codepoint_t old_gid; - if (!plan->old_gid_for_new_gid (i, &old_gid)) + if (!plan->old_gid_for_new_gid (gid, &old_gid)) continue; unsigned int v = index_map.map (old_gid); unsigned int outer = v >> 16; @@ -173,11 +176,15 @@ struct index_map_subset_plan_t } } - void fini (void) {} + void fini (void) + { + max_inners.fini (); + output_map.fini (); + } - void remap (const DeltaSetIndexMap *input_map, - const hb_vector_t &inner_remaps, - hb_vector_t & output_map) + void remap (const hb_subset_plan_t *plan, + const DeltaSetIndexMap *input_map, + const hb_vector_t &inner_remaps) { for (unsigned int i = 0; i < max_inners.length; i++) { @@ -186,27 +193,33 @@ struct index_map_subset_plan_t } output_map.resize (map_count); - for (unsigned int i = 0; i < output_map.length; i++) + for (hb_codepoint_t gid = 0; gid < output_map.length; gid++) { - unsigned int v = input_map->map (i); + hb_codepoint_t old_gid = 0; + (void)plan->old_gid_for_new_gid (gid, &old_gid); + unsigned int v = input_map->map (old_gid); unsigned int outer = v >> 16; - output_map[i] = (outer << 16) | (inner_remaps[outer][v & 0xFFFF]); + output_map[gid] = (outer << 16) | (inner_remaps[outer][v & 0xFFFF]); } } - unsigned int get_inner_bitcount (void) const { return inner_bit_count; } + unsigned int get_inner_bit_count (void) const { return inner_bit_count; } unsigned int get_width (void) const { return ((outer_bit_count + inner_bit_count + 7) / 8); } unsigned int get_map_count (void) const { return map_count; } unsigned int get_size (void) const { return (map_count? (DeltaSetIndexMap::min_size + get_width () * map_count): 0); } + hb_array_t get_output_map (void) const { return output_map.as_array (); } + protected: unsigned int map_count; hb_vector_t max_inners; unsigned int outer_bit_count; unsigned int inner_bit_count; + hb_vector_t + output_map; }; struct hvarvvar_subset_plan_t @@ -219,6 +232,7 @@ struct hvarvvar_subset_plan_t const hb_subset_plan_t *plan) { index_map_plans.resize (index_maps.length); + var_store = &_var_store; inner_remaps.resize (var_store->get_sub_table_count ()); @@ -226,34 +240,24 @@ struct hvarvvar_subset_plan_t inner_remaps[i].init (); for (unsigned int i = 0; i < index_maps.length; i++) - { index_map_plans[i].init (*index_maps[i], inner_remaps, plan); - index_map_subsets[i].init (); - } for (unsigned int i = 0; i < inner_remaps.length; i++) - { if (inner_remaps[i].get_count () > 0) inner_remaps[i].reorder (); - } for (unsigned int i = 0; i < index_maps.length; i++) - { - index_map_plans[i].remap (index_maps[i], inner_remaps, index_map_subsets[i]); - } + index_map_plans[i].remap (plan, index_maps[i], inner_remaps); } void fini (void) { inner_remaps.fini_deep (); index_map_plans.fini_deep (); - index_map_subsets.fini_deep (); } hb_vector_t inner_remaps; hb_vector_t index_map_plans; - hb_vector_t< hb_vector_t > - index_map_subsets; const VariationStore *var_store; }; @@ -291,25 +295,22 @@ struct HVARVVAR } bool serialize_index_maps (hb_serialize_context_t *c, - const hb_array_t &im_plans, - const hb_array_t > &im_subsets) + const hb_array_t &im_plans) { TRACE_SUBSET (this); - if (unlikely (!advMap.serialize (c, this) - .serialize (c, im_plans[ADV_INDEX].get_inner_bitcount (), - im_plans[ADV_INDEX].get_width (), - im_subsets[ADV_INDEX].as_array ()))) + if (im_plans[ADV_INDEX].get_map_count () == 0) + advMap.set (0); + else if (unlikely (!advMap.serialize (c, this).serialize (c, im_plans[ADV_INDEX]))) return_trace (false); - if (unlikely (!lsbMap.serialize (c, this) - .serialize (c, im_plans[LSB_INDEX].get_inner_bitcount (), - im_plans[LSB_INDEX].get_width (), - im_subsets[LSB_INDEX].as_array ()))) + if (im_plans[LSB_INDEX].get_map_count () == 0) + lsbMap.set (0); + else if (unlikely (!lsbMap.serialize (c, this).serialize (c, im_plans[LSB_INDEX]))) return_trace (false); - if (unlikely (!rsbMap.serialize (c, this) - .serialize (c, im_plans[RSB_INDEX].get_inner_bitcount (), - im_plans[RSB_INDEX].get_width (), - im_subsets[RSB_INDEX].as_array ()))) + if (im_plans[RSB_INDEX].get_map_count () == 0) + rsbMap.set (0); + else if (unlikely (!rsbMap.serialize (c, this).serialize (c, im_plans[RSB_INDEX]))) return_trace (false); + return_trace (true); } @@ -326,19 +327,18 @@ struct HVARVVAR index_maps.push (&(this+rsbMap)); hvar_plan.init (index_maps.as_array (), this+varStore, c->plan); - T *out = c->serializer->embed (*(T*)this); + T *out = c->serializer->allocate_min (); if (unlikely (!out)) return_trace (false); out->version.major.set (1); out->version.minor.set (0); - if (!unlikely (out->varStore.serialize (c->serializer, this) + if (!unlikely (out->varStore.serialize (c->serializer, out) .serialize (c->serializer, hvar_plan.var_store, hvar_plan.inner_remaps.as_array ()))) return_trace (false); return_trace (out->T::serialize_index_maps (c->serializer, - hvar_plan.index_map_plans.as_array (), - hvar_plan.index_map_subsets.as_array ())); + hvar_plan.index_map_plans.as_array ())); } float get_advance_var (hb_codepoint_t glyph,