From 09df17e71b1860e250638e8e76deee1da5e7a06b Mon Sep 17 00:00:00 2001 From: Michiharu Ariza Date: Wed, 20 Feb 2019 15:48:29 -0800 Subject: [PATCH] subset HVAR Re-implemented & repurposed CFF:remap_t as hb_map2_t (moved to hb-ot-layout-common.hh) for two-way mapping for use by index map subsetting. Hooked up HVAR subsetter through _subset2. Some renaming in CFF code. --- src/hb-ot-cff-common.hh | 63 +--------- src/hb-ot-cff1-table.hh | 8 +- src/hb-ot-layout-common.hh | 158 +++++++++++++++++++++++- src/hb-ot-var-hvar-table.hh | 232 +++++++++++++++++++++++++++++++++++- src/hb-subset-cff-common.cc | 9 +- src/hb-subset-cff-common.hh | 27 ++--- src/hb-subset-cff1.cc | 29 ++--- src/hb-subset-cff2.cc | 12 +- src/hb-subset.cc | 4 + 9 files changed, 426 insertions(+), 116 deletions(-) diff --git a/src/hb-ot-cff-common.hh b/src/hb-ot-cff-common.hh index c645953e5..5e0082d20 100644 --- a/src/hb-ot-cff-common.hh +++ b/src/hb-ot-cff-common.hh @@ -104,7 +104,7 @@ struct CFFIndex else return min_size + calculate_offset_array_size (offSize, count) + dataSize; } - + bool serialize (hb_serialize_context_t *c, const CFFIndex &src) { TRACE_SERIALIZE (this); @@ -414,57 +414,6 @@ struct table_info_t unsigned int offSize; }; -/* used to remap font index or SID from fullset to subset. - * set to CFF_UNDEF_CODE if excluded from subset */ -struct remap_t : hb_vector_t -{ - void init () { SUPER::init (); } - - void fini () { SUPER::fini (); } - - bool reset (unsigned int size) - { - if (unlikely (!SUPER::resize (size))) - return false; - for (unsigned int i = 0; i < length; i++) - (*this)[i] = CFF_UNDEF_CODE; - count = 0; - return true; - } - - bool identity (unsigned int size) - { - if (unlikely (!SUPER::resize (size))) - return false; - unsigned int i; - for (i = 0; i < length; i++) - (*this)[i] = i; - count = i; - return true; - } - - bool excludes (hb_codepoint_t id) const - { return (id < length) && ((*this)[id] == CFF_UNDEF_CODE); } - - bool includes (hb_codepoint_t id) const - { return !excludes (id); } - - unsigned int add (unsigned int i) - { - if ((*this)[i] == CFF_UNDEF_CODE) - (*this)[i] = count++; - return (*this)[i]; - } - - hb_codepoint_t get_count () const { return count; } - - protected: - hb_codepoint_t count; - - private: - typedef hb_vector_t SUPER; -}; - template struct FDArray : CFFIndexOf { @@ -508,7 +457,7 @@ struct FDArray : CFFIndexOf unsigned int offSize_, const hb_vector_t &fontDicts, unsigned int fdCount, - const remap_t &fdmap, + const hb_map2_t &fdmap, OP_SERIALIZER& opszr, const hb_vector_t &privateInfos) { @@ -523,7 +472,7 @@ struct FDArray : CFFIndexOf unsigned int offset = 1; unsigned int fid = 0; for (unsigned i = 0; i < fontDicts.length; i++) - if (fdmap.includes (i)) + if (fdmap.has (i)) { CFFIndexOf::set_offset_at (fid++, offset); offset += FontDict::calculate_serialized_size (fontDicts[i], opszr); @@ -532,7 +481,7 @@ struct FDArray : CFFIndexOf /* serialize font dicts */ for (unsigned int i = 0; i < fontDicts.length; i++) - if (fdmap.includes (i)) + if (fdmap.has (i)) { FontDict *dict = c->start_embed (); if (unlikely (!dict->serialize (c, fontDicts[i], opszr, privateInfos[fdmap[i]]))) @@ -546,12 +495,12 @@ struct FDArray : CFFIndexOf static unsigned int calculate_serialized_size (unsigned int &offSize_ /* OUT */, const hb_vector_t &fontDicts, unsigned int fdCount, - const remap_t &fdmap, + const hb_map2_t &fdmap, OP_SERIALIZER& opszr) { unsigned int dictsSize = 0; for (unsigned int i = 0; i < fontDicts.len; i++) - if (fdmap.includes (i)) + if (fdmap.has (i)) dictsSize += FontDict::calculate_serialized_size (fontDicts[i], opszr); offSize_ = calcOffSize (dictsSize); diff --git a/src/hb-ot-cff1-table.hh b/src/hb-ot-cff1-table.hh index 9d39242cf..d65b2250a 100644 --- a/src/hb-ot-cff1-table.hh +++ b/src/hb-ot-cff1-table.hh @@ -570,7 +570,7 @@ struct Charset { struct CFF1StringIndex : CFF1Index { bool serialize (hb_serialize_context_t *c, const CFF1StringIndex &strings, - unsigned int offSize_, const remap_t &sidmap) + unsigned int offSize_, const hb_map2_t &sidmap) { TRACE_SERIALIZE (this); if (unlikely ((strings.count == 0) || (sidmap.get_count () == 0))) @@ -588,7 +588,7 @@ struct CFF1StringIndex : CFF1Index for (unsigned int i = 0; i < strings.count; i++) { hb_codepoint_t j = sidmap[i]; - if (j != CFF_UNDEF_CODE) + if (j != HB_MAP_VALUE_INVALID) bytesArray[j] = strings[i]; } @@ -598,7 +598,7 @@ struct CFF1StringIndex : CFF1Index } /* in parallel to above */ - unsigned int calculate_serialized_size (unsigned int &offSize /*OUT*/, const remap_t &sidmap) const + unsigned int calculate_serialized_size (unsigned int &offSize /*OUT*/, const hb_map2_t &sidmap) const { offSize = 0; if ((count == 0) || (sidmap.get_count () == 0)) @@ -606,7 +606,7 @@ struct CFF1StringIndex : CFF1Index unsigned int dataSize = 0; for (unsigned int i = 0; i < count; i++) - if (sidmap[i] != CFF_UNDEF_CODE) + if (sidmap[i] != HB_MAP_VALUE_INVALID) dataSize += length_at (i); offSize = calcOffSize(dataSize); diff --git a/src/hb-ot-layout-common.hh b/src/hb-ot-layout-common.hh index 9b172257b..c41865ff2 100644 --- a/src/hb-ot-layout-common.hh +++ b/src/hb-ot-layout-common.hh @@ -1586,6 +1586,82 @@ static inline void ClassDef_serialize (hb_serialize_context_t *c, hb_array_t klasses) { c->start_embed ()->serialize (c, glyphs, klasses); } +struct hb_map2_t +{ + hb_map2_t () { init (); } + ~hb_map2_t () { fini (); } + + void init (void) + { + count = 0; + old_to_new_map.init (); + new_to_old_map.init (); + set.init (); + } + + void fini (void) + { + old_to_new_map.fini (); + new_to_old_map.fini (); + set.fini (); + } + + bool has (hb_codepoint_t id) const { return set.has (id); } + + hb_codepoint_t add (hb_codepoint_t i) + { + hb_codepoint_t v = old_to_new_map[i]; + if (v == HB_MAP_VALUE_INVALID) + { + set.add (i); + v = count++; + old_to_new_map.set (i, v); + new_to_old_map.set (v, i); + } + return v; + } + + /* returns HB_MAP_VALUE_INVALID if unmapped */ + hb_codepoint_t operator [] (hb_codepoint_t i) const { return old_to_new (i); } + hb_codepoint_t old_to_new (hb_codepoint_t i) const { return old_to_new_map[i]; } + hb_codepoint_t new_to_old (hb_codepoint_t i) const { return new_to_old_map[i]; } + + bool identity (unsigned int size) + { + hb_codepoint_t i; + old_to_new_map.clear (); + new_to_old_map.clear (); + set.clear (); + for (i = 0; i < size; i++) + { + old_to_new_map.set (i, i); + new_to_old_map.set (i, i); + set.add (i); + } + count = i; + return old_to_new_map.successful && new_to_old_map.successful && set.successful; + } + + /* Optional: after finished adding all mappings in a random order, + * reorder outputs in the same order as the inputs. */ + void reorder (void) + { + for (hb_codepoint_t i = HB_SET_VALUE_INVALID, count = 0; set.next (&i); count++) + { + new_to_old_map.set (count, i); + old_to_new_map.set (i, count); + } + } + + unsigned int get_count () const { return count; } + unsigned int get_bits () const { return count? hb_bit_storage (count - 1): 0; } + + protected: + unsigned int count; + hb_map_t old_to_new_map; + hb_map_t new_to_old_map; + hb_set_t set; +}; /* * Item Variation Store @@ -1662,6 +1738,15 @@ struct VarRegionList axesZ.sanitize (c, (unsigned int) axisCount * (unsigned int) regionCount)); } + bool serialize (hb_serialize_context_t *c, const VarRegionList *src) + { + TRACE_SERIALIZE (this); + if (unlikely (!c->allocate_size (src->get_size ()))) return_trace (false); + memcpy (this, src, src->get_size ()); + return_trace (true); + } + + unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; } unsigned int get_region_count () const { return regionCount; } protected: @@ -1678,9 +1763,6 @@ struct VarData unsigned int get_region_index_count () const { return regionIndices.len; } - unsigned int get_row_size () const - { return shortCount + regionIndices.len; } - unsigned int get_size () const { return itemCount * get_row_size (); } @@ -1694,7 +1776,7 @@ struct VarData unsigned int count = regionIndices.len; unsigned int scount = shortCount; - const HBUINT8 *bytes = &StructAfter (regionIndices); + const HBUINT8 *bytes = get_delta_bytes (); const HBUINT8 *row = bytes + inner * (scount + count); float delta = 0.; @@ -1734,11 +1816,45 @@ struct VarData return_trace (c->check_struct (this) && regionIndices.sanitize (c) && shortCount <= regionIndices.len && - c->check_range (&StructAfter (regionIndices), + c->check_range (get_delta_bytes (), itemCount, get_row_size ())); } + bool serialize (hb_serialize_context_t *c, + const VarData *src, + const hb_map2_t &remap) + { + TRACE_SUBSET (this); + if (unlikely (!c->extend_min (*this))) return_trace (false); + itemCount.set (remap.get_count ()); + shortCount.set (src->shortCount); + + unsigned int row_size = src->get_row_size (); + if (unlikely (!c->allocate_size (src->regionIndices.get_size () + row_size * remap.get_count ()))) + return_trace (false); + + memcpy (®ionIndices, &src->regionIndices, src->regionIndices.get_size ()); + HBUINT8 *p = get_delta_bytes (); + for (unsigned int i = 0; i < remap.get_count (); i++) + { + memcpy (p, src->get_delta_bytes () + remap.new_to_old (i) * row_size, row_size); + p += row_size; + } + + return_trace (true); + } + + protected: + unsigned int get_row_size () const + { return shortCount + regionIndices.len; } + + const HBUINT8 *get_delta_bytes () const + { return &StructAfter (regionIndices); } + + HBUINT8 *get_delta_bytes () + { return &StructAfter (regionIndices); } + protected: HBUINT16 itemCount; HBUINT16 shortCount; @@ -1778,6 +1894,33 @@ struct VariationStore dataSets.sanitize (c, this)); } + bool serialize (hb_serialize_context_t *c, + const VariationStore *src, + const hb_array_t &inner_remaps) + { + TRACE_SUBSET (this); + if (unlikely (!c->extend_min (*this))) return_trace (false); + format.set (1); + if (unlikely (!regions.serialize (c, this) + .serialize (c, &(src+src->regions)))) return_trace (false); + + /* TODO: The following code could be simplified when + * OffsetListOf::subset () can take a custom param to be passed to VarData::serialize () + */ + dataSets.len.set (inner_remaps.length); + if (unlikely (!c->allocate_size (inner_remaps.length))) + return_trace (false); + + for (unsigned int i = 0; i < inner_remaps.length; i++) + { + if (unlikely (!dataSets[i].serialize (c, this) + .serialize (c, &(src+src->dataSets[i]), inner_remaps[i]))) + return_trace (false); + } + + return_trace (true); + } + unsigned int get_region_index_count (unsigned int ivs) const { return (this+dataSets[ivs]).get_region_index_count (); } @@ -1790,6 +1933,10 @@ struct VariationStore &scalars[0], num_scalars); } + const VarRegionList &get_regions () const { return this+regions; } + + unsigned int get_sub_table_count () const { return dataSets.len; } + protected: HBUINT16 format; LOffsetTo regions; @@ -2171,7 +2318,6 @@ struct Device DEFINE_SIZE_UNION (6, b); }; - } /* namespace OT */ diff --git a/src/hb-ot-var-hvar-table.hh b/src/hb-ot-var-hvar-table.hh index a8d9fe3c8..76378ad01 100644 --- a/src/hb-ot-var-hvar-table.hh +++ b/src/hb-ot-var-hvar-table.hh @@ -44,6 +44,36 @@ struct DeltaSetIndexMap get_width ())); } + bool serialize (hb_serialize_context_t *c, + unsigned int inner_bit_count, + unsigned int width, + const hb_array_t maps) + { + TRACE_SERIALIZE (this); + if (unlikely (maps.length && ((((inner_bit_count-1)&~0xF)!=0) || (((width-1)&~0x3)!=0)))) + return_trace (false); + if (unlikely (!c->extend_min (*this))) return_trace (false); + + format.set (((width-1)<<4)|(inner_bit_count-1)); + mapCount.set (maps.length); + HBUINT8 *p = c->allocate_size (width * maps.get_size ()); + if (unlikely (!p)) return_trace (false); + for (unsigned int i = 0; i < maps.length; i++) + { + unsigned int v = maps[i]; + unsigned int outer = v >> 16; + unsigned int inner = v & 0xFFFF; + unsigned int u = (outer << inner_bit_count)|inner; + for (unsigned int w = width; w > 0;) + { + p[--w].set (u); + u >>= 8; + } + p += width; + } + return_trace (true); + } + unsigned int map (unsigned int v) const /* Returns 16.16 outer.inner. */ { /* If count is zero, pass value unchanged. This takes @@ -72,7 +102,8 @@ struct DeltaSetIndexMap return u; } - protected: + unsigned int get_map_count () const { return mapCount; } + unsigned int get_width () const { return ((format >> 4) & 3) + 1; } unsigned int get_inner_bitcount () const { return (format & 0xF) + 1; } @@ -88,6 +119,143 @@ struct DeltaSetIndexMap DEFINE_SIZE_ARRAY (4, mapDataZ); }; +struct index_map_subset_plan_t +{ + index_map_subset_plan_t (void) : map_count (0), outer_bit_count (0), inner_bit_count (0) {} + ~index_map_subset_plan_t (void) { fini (); } + + void init (const DeltaSetIndexMap &index_map, + hb_vector_t &inner_remaps, + const hb_subset_plan_t *plan) + { + /* Identity map */ + if (&index_map == &Null(DeltaSetIndexMap)) + return; + + unsigned int last_map = (unsigned int)-1; + hb_codepoint_t last_gid = (hb_codepoint_t)-1; + hb_codepoint_t i = (hb_codepoint_t)index_map.get_map_count (); + + outer_bit_count = (index_map.get_width () * 8) - index_map.get_inner_bitcount (); + max_inners.resize (inner_remaps.length); + for (i = 0; i < inner_remaps.length; i++) max_inners[i] = 0; + + /* Search backwards for a map value different from the last map value */ + for (; i > 0; i--) + { + hb_codepoint_t old_gid; + if (!plan->old_gid_for_new_gid (i - 1, &old_gid)) + continue; + + unsigned int v = index_map.map (old_gid); + if (last_gid == (hb_codepoint_t)-1) + { + last_map = v; + last_gid = i; + continue; + } + if (v != last_map) break; + + last_map = i; + } + + map_count = last_map + 1; + for (unsigned int i = 0; i < map_count; i++) + { + hb_codepoint_t old_gid; + if (!plan->old_gid_for_new_gid (i, &old_gid)) + continue; + unsigned int v = index_map.map (old_gid); + unsigned int outer = v >> 16; + unsigned int inner = v & 0xFFFF; + if (inner > max_inners[outer]) max_inners[outer] = inner; + inner_remaps[outer].add (inner); + } + } + + void fini (void) {} + + void remap (const DeltaSetIndexMap *input_map, + const hb_vector_t &inner_remaps, + hb_vector_t & output_map) + { + for (unsigned int i = 0; i < max_inners.length; i++) + { + unsigned int bit_count = hb_bit_storage (inner_remaps[i][max_inners[i]]); + if (bit_count > inner_bit_count) inner_bit_count = bit_count; + } + + output_map.resize (map_count); + for (unsigned int i = 0; i < output_map.length; i++) + { + unsigned int v = input_map->map (i); + unsigned int outer = v >> 16; + output_map[i] = (outer << 16) | (inner_remaps[outer][v & 0xFFFF]); + } + } + + unsigned int get_inner_bitcount (void) const { return inner_bit_count; } + unsigned int get_width (void) const { return ((outer_bit_count + inner_bit_count + 7) / 8); } + unsigned int get_map_count (void) const { return map_count; } + + unsigned int get_size (void) const + { return (map_count? (DeltaSetIndexMap::min_size + get_width () * map_count): 0); } + + protected: + unsigned int map_count; + hb_vector_t + max_inners; + unsigned int outer_bit_count; + unsigned int inner_bit_count; +}; + +struct hvarvvar_subset_plan_t +{ + hvarvvar_subset_plan_t() : inner_remaps (), index_map_plans () {} + ~hvarvvar_subset_plan_t() { fini (); } + + void init (const hb_array_t &index_maps, + const VariationStore &_var_store, + const hb_subset_plan_t *plan) + { + index_map_plans.resize (index_maps.length); + var_store = &_var_store; + inner_remaps.resize (var_store->get_sub_table_count ()); + + for (unsigned int i = 0; i < inner_remaps.length; i++) + inner_remaps[i].init (); + + for (unsigned int i = 0; i < index_maps.length; i++) + { + index_map_plans[i].init (*index_maps[i], inner_remaps, plan); + index_map_subsets[i].init (); + } + + for (unsigned int i = 0; i < inner_remaps.length; i++) + { + if (inner_remaps[i].get_count () > 0) inner_remaps[i].reorder (); + } + + for (unsigned int i = 0; i < index_maps.length; i++) + { + index_map_plans[i].remap (index_maps[i], inner_remaps, index_map_subsets[i]); + } + } + + void fini (void) + { + inner_remaps.fini_deep (); + index_map_plans.fini_deep (); + index_map_subsets.fini_deep (); + } + + hb_vector_t inner_remaps; + hb_vector_t + index_map_plans; + hb_vector_t< hb_vector_t > + index_map_subsets; + const VariationStore *var_store; +}; /* * HVAR -- Horizontal Metrics Variations @@ -103,6 +271,14 @@ struct HVARVVAR static constexpr hb_tag_t HVARTag = HB_OT_TAG_HVAR; static constexpr hb_tag_t VVARTag = HB_OT_TAG_VVAR; + enum index_map_index_t { + ADV_INDEX, + LSB_INDEX, + RSB_INDEX, + TSB_INDEX, + VORG_INDEX + }; + bool sanitize (hb_sanitize_context_t *c) const { TRACE_SANITIZE (this); @@ -114,6 +290,57 @@ struct HVARVVAR rsbMap.sanitize (c, this)); } + bool serialize_index_maps (hb_serialize_context_t *c, + const hb_array_t &im_plans, + const hb_array_t > &im_subsets) + { + TRACE_SUBSET (this); + if (unlikely (!advMap.serialize (c, this) + .serialize (c, im_plans[ADV_INDEX].get_inner_bitcount (), + im_plans[ADV_INDEX].get_width (), + im_subsets[ADV_INDEX].as_array ()))) + return_trace (false); + if (unlikely (!lsbMap.serialize (c, this) + .serialize (c, im_plans[LSB_INDEX].get_inner_bitcount (), + im_plans[LSB_INDEX].get_width (), + im_subsets[LSB_INDEX].as_array ()))) + return_trace (false); + if (unlikely (!rsbMap.serialize (c, this) + .serialize (c, im_plans[RSB_INDEX].get_inner_bitcount (), + im_plans[RSB_INDEX].get_width (), + im_subsets[RSB_INDEX].as_array ()))) + return_trace (false); + return_trace (true); + } + + template + bool _subset (hb_subset_context_t *c) const + { + TRACE_SUBSET (this); + hvarvvar_subset_plan_t hvar_plan; + hb_vector_t + index_maps; + + index_maps.push (&(this+advMap)); + index_maps.push (&(this+lsbMap)); + index_maps.push (&(this+rsbMap)); + hvar_plan.init (index_maps.as_array (), this+varStore, c->plan); + + T *out = c->serializer->embed (*(T*)this); + if (unlikely (!out)) return_trace (false); + + out->version.major.set (1); + out->version.minor.set (0); + + if (!unlikely (out->varStore.serialize (c->serializer, this) + .serialize (c->serializer, hvar_plan.var_store, hvar_plan.inner_remaps.as_array ()))) + return_trace (false); + + return_trace (out->T::serialize_index_maps (c->serializer, + hvar_plan.index_map_plans.as_array (), + hvar_plan.index_map_subsets.as_array ())); + } + float get_advance_var (hb_codepoint_t glyph, const int *coords, unsigned int coord_count) const { @@ -141,6 +368,7 @@ struct HVARVVAR struct HVAR : HVARVVAR { static constexpr hb_tag_t tableTag = HB_OT_TAG_HVAR; + bool subset (hb_subset_context_t *c) const { return HVARVVAR::_subset (c); } }; struct VVAR : HVARVVAR { static constexpr hb_tag_t tableTag = HB_OT_TAG_VVAR; @@ -152,6 +380,8 @@ struct VVAR : HVARVVAR { vorgMap.sanitize (c, this)); } + bool subset (hb_subset_context_t *c) const { return HVARVVAR::_subset (c); } + protected: LOffsetTo vorgMap; /* Offset to vertical-origin var-idx mapping. */ diff --git a/src/hb-subset-cff-common.cc b/src/hb-subset-cff-common.cc index f29937a22..a7e05fe03 100644 --- a/src/hb-subset-cff-common.cc +++ b/src/hb-subset-cff-common.cc @@ -50,7 +50,7 @@ hb_plan_subset_cff_fdselect (const hb_subset_plan_t *plan, unsigned int &subset_fdselect_size /* OUT */, unsigned int &subset_fdselect_format /* OUT */, hb_vector_t &fdselect_ranges /* OUT */, - remap_t &fdmap /* OUT */) + hb_map2_t &fdmap /* OUT */) { subset_fd_count = 0; subset_fdselect_size = 0; @@ -97,13 +97,6 @@ hb_plan_subset_cff_fdselect (const hb_subset_plan_t *plan, } else { - /* create a fdmap */ - if (!fdmap.reset (fdCount)) - { - hb_set_destroy (set); - return false; - } - hb_codepoint_t fd = CFF_UNDEF_CODE; while (set->next (&fd)) fdmap.add (fd); diff --git a/src/hb-subset-cff-common.hh b/src/hb-subset-cff-common.hh index 81368ee4c..31280d594 100644 --- a/src/hb-subset-cff-common.hh +++ b/src/hb-subset-cff-common.hh @@ -541,19 +541,18 @@ struct subr_subset_param_t bool drop_hints; }; -struct subr_remap_t : remap_t +struct subr_remap_t : hb_map2_t { void create (hb_set_t *closure) { /* create a remapping of subroutine numbers from old to new. * no optimization based on usage counts. fonttools doesn't appear doing that either. */ - reset (closure->get_max () + 1); - for (hb_codepoint_t old_num = 0; old_num < length; old_num++) - { - if (hb_set_has (closure, old_num)) - add (old_num); - } + hb_codepoint_t max = closure->get_max (); + if (max != HB_MAP_VALUE_INVALID) + for (hb_codepoint_t old_num = 0; old_num <= max; old_num++) + if (closure->has (old_num)) + add (old_num); if (get_count () < 1240) bias = 107; @@ -563,14 +562,6 @@ struct subr_remap_t : remap_t bias = 32768; } - hb_codepoint_t operator[] (unsigned int old_num) const - { - if (old_num >= length) - return CFF_UNDEF_CODE; - else - return remap_t::operator[] (old_num); - } - int biased_num (unsigned int old_num) const { hb_codepoint_t new_num = (*this)[old_num]; @@ -687,8 +678,8 @@ struct subr_subsetter_t if (unlikely (!interp.interpret (param))) return false; - /* finalize parsed string esp. copy CFF1 width or CFF2 vsindex to the parsed charstring for encoding */ - SUBSETTER::finalize_parsed_str (interp.env, param, parsed_charstrings[i]); + /* complete parsed string esp. copy CFF1 width or CFF2 vsindex to the parsed charstring for encoding */ + SUBSETTER::complete_parsed_str (interp.env, param, parsed_charstrings[i]); } if (plan->drop_hints) @@ -1021,7 +1012,7 @@ hb_plan_subset_cff_fdselect (const hb_subset_plan_t *plan, unsigned int &subset_fdselect_size /* OUT */, unsigned int &subset_fdselect_format /* OUT */, hb_vector_t &fdselect_ranges /* OUT */, - CFF::remap_t &fdmap /* OUT */); + CFF::hb_map2_t &fdmap /* OUT */); HB_INTERNAL bool hb_serialize_cff_fdselect (hb_serialize_context_t *c, diff --git a/src/hb-subset-cff1.cc b/src/hb-subset-cff1.cc index 49ac0bf42..05ac471ee 100644 --- a/src/hb-subset-cff1.cc +++ b/src/hb-subset-cff1.cc @@ -34,12 +34,12 @@ using namespace CFF; -struct remap_sid_t : remap_t +struct remap_sid_t : hb_map2_t { unsigned int add (unsigned int sid) { if ((sid != CFF_UNDEF_SID) && !is_std_std (sid)) - return offset_sid (remap_t::add (unoffset_sid (sid))); + return offset_sid (hb_map2_t::add (unoffset_sid (sid))); else return sid; } @@ -49,7 +49,7 @@ struct remap_sid_t : remap_t if (is_std_std (sid) || (sid == CFF_UNDEF_SID)) return sid; else - return offset_sid (remap_t::operator [] (unoffset_sid (sid))); + return offset_sid (hb_map2_t::operator [] (unoffset_sid (sid))); } static const unsigned int num_std_strings = 391; @@ -326,7 +326,7 @@ struct cff1_cs_opset_flatten_t : cff1_cs_opset_t { /* replace the first glyph ID in the "glyph" field each range with a nLeft value */ - bool finalize (unsigned int last_glyph) + bool complete (unsigned int last_glyph) { bool two_byte = false; for (unsigned int i = (*this).length; i > 0; i--) @@ -397,7 +397,7 @@ struct cff1_subr_subsetter_t : subr_subsetter_tnum_output_glyphs () - 1); if (!two_byte) @@ -577,9 +577,6 @@ struct cff_subset_plan { bool collect_sids_in_dicts (const OT::cff1::accelerator_subset_t &acc) { - if (unlikely (!sidmap.reset (acc.stringIndex->count))) - return false; - for (unsigned int i = 0; i < name_dict_values_t::ValCount; i++) { unsigned int sid = acc.topDict.nameSIDs[i]; @@ -592,7 +589,7 @@ struct cff_subset_plan { if (acc.fdArray != &Null(CFF1FDArray)) for (unsigned int i = 0; i < orig_fdcount; i++) - if (fdmap.includes (i)) + if (fdmap.has (i)) (void)sidmap.add (acc.fontDicts[i].fontName); return true; @@ -735,7 +732,7 @@ struct cff_subset_plan { { subset_localsubrs[fd].init (); offsets.localSubrsInfos[fd].init (); - if (fdmap.includes (fd)) + if (fdmap.has (fd)) { if (!subr_subsetter.encode_localsubrs (fd, subset_localsubrs[fd])) return false; @@ -786,7 +783,7 @@ struct cff_subset_plan { cff1_font_dict_op_serializer_t fontSzr; unsigned int dictsSize = 0; for (unsigned int i = 0; i < acc.fontDicts.length; i++) - if (fdmap.includes (i)) + if (fdmap.has (i)) dictsSize += FontDict::calculate_serialized_size (acc.fontDicts[i], fontSzr); offsets.FDArrayInfo.offSize = calcOffSize (dictsSize); @@ -809,7 +806,7 @@ struct cff_subset_plan { offsets.privateDictInfo.offset = final_size; for (unsigned int i = 0; i < orig_fdcount; i++) { - if (fdmap.includes (i)) + if (fdmap.has (i)) { bool has_localsubrs = offsets.localSubrsInfos[i].size > 0; cff_private_dict_op_serializer_t privSzr (desubroutinize, plan->drop_hints); @@ -853,7 +850,7 @@ struct cff_subset_plan { /* font dict index remap table from fullset FDArray to subset FDArray. * set to CFF_UNDEF_CODE if excluded from subset */ - remap_t fdmap; + hb_map2_t fdmap; str_buff_vec_t subset_charstrings; str_buff_vec_t subset_globalsubrs; @@ -1030,7 +1027,7 @@ static inline bool _write_cff1 (const cff_subset_plan &plan, assert (plan.offsets.privateDictInfo.offset == (unsigned) (c.head - c.start)); for (unsigned int i = 0; i < acc.privateDicts.length; i++) { - if (plan.fdmap.includes (i)) + if (plan.fdmap.has (i)) { PrivateDict *pd = c.start_embed (); if (unlikely (pd == nullptr)) return false; diff --git a/src/hb-subset-cff2.cc b/src/hb-subset-cff2.cc index bf76a3ede..1a160d489 100644 --- a/src/hb-subset-cff2.cc +++ b/src/hb-subset-cff2.cc @@ -228,7 +228,7 @@ struct cff2_subr_subsetter_t : subr_subsetter_t 0; cff_private_dict_op_serializer_t privSzr (desubroutinize, drop_hints); @@ -427,7 +427,7 @@ struct cff2_subset_plan { unsigned int subset_fdselect_format; hb_vector_t subset_fdselect_ranges; - remap_t fdmap; + hb_map2_t fdmap; str_buff_vec_t subset_charstrings; str_buff_vec_t subset_globalsubrs; @@ -537,7 +537,7 @@ static inline bool _write_cff2 (const cff2_subset_plan &plan, assert (plan.offsets.privateDictsOffset == (unsigned) (c.head - c.start)); for (unsigned int i = 0; i < acc.privateDicts.length; i++) { - if (plan.fdmap.includes (i)) + if (plan.fdmap.has (i)) { PrivateDict *pd = c.start_embed (); if (unlikely (pd == nullptr)) return false; diff --git a/src/hb-subset.cc b/src/hb-subset.cc index 135265fc1..fa6e4d9f3 100644 --- a/src/hb-subset.cc +++ b/src/hb-subset.cc @@ -45,6 +45,7 @@ #include "hb-ot-vorg-table.hh" #include "hb-ot-layout-gsub-table.hh" #include "hb-ot-layout-gpos-table.hh" +#include "hb-ot-var-hvar-table.hh" static unsigned int @@ -198,6 +199,9 @@ _subset_table (hb_subset_plan_t *plan, case HB_OT_TAG_GPOS: result = _subset2 (plan); break; + case HB_OT_TAG_HVAR: + result = _subset2 (plan); + break; default: hb_blob_t *source_table = hb_face_reference_table (plan->source, tag);