Merge pull request #1807 from harfbuzz/subset-varstore

VariationStore serializer
This commit is contained in:
Behdad Esfahbod 2019-09-30 13:34:05 -07:00 committed by GitHub
commit faace9437e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 187 additions and 3 deletions

View File

@ -94,6 +94,14 @@ struct hb_bimap_t
/* Inremental bimap: only lhs is given, rhs is incrementally assigned */
struct hb_inc_bimap_t : hb_bimap_t
{
hb_inc_bimap_t () { init (); }
void init ()
{
hb_bimap_t::init ();
next_value = 0;
}
/* Add a mapping from lhs to rhs with a unique value if lhs is unknown.
* Return the rhs value as the result.
*/
@ -102,12 +110,24 @@ struct hb_inc_bimap_t : hb_bimap_t
hb_codepoint_t rhs = forw_map[lhs];
if (rhs == HB_MAP_VALUE_INVALID)
{
rhs = get_population ();
rhs = next_value++;
set (lhs, rhs);
}
return rhs;
}
hb_codepoint_t skip ()
{ return next_value++; }
hb_codepoint_t get_next_value () const
{ return next_value; }
void add_set (const hb_set_t *set)
{
hb_codepoint_t i = HB_SET_VALUE_INVALID;
while (hb_set_next (set, &i)) add (i);
}
/* Create an identity map. */
bool identity (unsigned int size)
{
@ -138,6 +158,9 @@ struct hb_inc_bimap_t : hb_bimap_t
for (hb_codepoint_t rhs = 0; rhs < count; rhs++)
set (work[rhs], rhs);
}
protected:
unsigned int next_value;
};
#endif /* HB_BIMAP_HH */

View File

@ -33,6 +33,7 @@
#include "hb-ot-layout.hh"
#include "hb-open-type.hh"
#include "hb-set.hh"
#include "hb-bimap.hh"
#ifndef HB_MAX_NESTING_LEVEL
@ -1691,6 +1692,21 @@ struct VarRegionList
axesZ.sanitize (c, (unsigned int) axisCount * (unsigned int) regionCount));
}
bool serialize (hb_serialize_context_t *c, const VarRegionList *src, const hb_bimap_t &region_map)
{
TRACE_SERIALIZE (this);
VarRegionList *out = c->allocate_min<VarRegionList> ();
if (unlikely (!out)) return_trace (false);
axisCount = src->axisCount;
regionCount = region_map.get_population ();
if (unlikely (!c->allocate_size<VarRegionList> (get_size () - min_size))) return_trace (false);
for (unsigned int r = 0; r < regionCount; r++)
memcpy (&axesZ[axisCount * r], &src->axesZ[axisCount * region_map.backward (r)], VarRegionAxis::static_size * axisCount);
return_trace (true);
}
unsigned int get_size () const { return min_size + VarRegionAxis::static_size * axisCount * regionCount; }
unsigned int get_region_count () const { return regionCount; }
protected:
@ -1723,7 +1739,7 @@ struct VarData
unsigned int count = regionIndices.len;
unsigned int scount = shortCount;
const HBUINT8 *bytes = &StructAfter<HBUINT8> (regionIndices);
const HBUINT8 *bytes = get_delta_bytes ();
const HBUINT8 *row = bytes + inner * (scount + count);
float delta = 0.;
@ -1763,11 +1779,117 @@ struct VarData
return_trace (c->check_struct (this) &&
regionIndices.sanitize (c) &&
shortCount <= regionIndices.len &&
c->check_range (&StructAfter<HBUINT8> (regionIndices),
c->check_range (get_delta_bytes (),
itemCount,
get_row_size ()));
}
bool serialize (hb_serialize_context_t *c,
const VarData *src,
const hb_inc_bimap_t &inner_map,
const hb_bimap_t &region_map)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
itemCount = inner_map.get_next_value ();
/* Optimize short count */
unsigned short ri_count = src->regionIndices.len;
enum delta_size_t { kZero=0, kByte, kShort };
hb_vector_t<delta_size_t> delta_sz;
hb_vector_t<unsigned int> ri_map; /* maps old index to new index */
delta_sz.resize (ri_count);
ri_map.resize (ri_count);
unsigned int new_short_count = 0;
unsigned int r;
for (r = 0; r < ri_count; r++)
{
delta_sz[r] = kZero;
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
{
unsigned int old = inner_map.backward (i);
int16_t delta = src->get_item_delta (old, r);
if (delta < -128 || 127 < delta)
{
delta_sz[r] = kShort;
new_short_count++;
break;
}
else if (delta != 0)
delta_sz[r] = kByte;
}
}
unsigned int short_index = 0;
unsigned int byte_index = new_short_count;
unsigned int new_ri_count = 0;
for (r = 0; r < ri_count; r++)
if (delta_sz[r])
{
ri_map[r] = (delta_sz[r] == kShort)? short_index++ : byte_index++;
new_ri_count++;
}
shortCount = new_short_count;
regionIndices.len = new_ri_count;
unsigned int size = regionIndices.get_size () - HBUINT16::static_size/*regionIndices.len*/ + (get_row_size () * itemCount);
if (unlikely (!c->allocate_size<HBUINT8> (size)))
return_trace (false);
for (r = 0; r < ri_count; r++)
if (delta_sz[r]) regionIndices[ri_map[r]] = region_map[src->regionIndices[r]];
for (unsigned int i = 0; i < itemCount; i++)
{
unsigned int old = inner_map.backward (i);
for (unsigned int r = 0; r < ri_count; r++)
if (delta_sz[r]) set_item_delta (i, ri_map[r], src->get_item_delta (old, r));
}
return_trace (true);
}
void collect_region_refs (hb_inc_bimap_t &region_map, const hb_inc_bimap_t &inner_map) const
{
for (unsigned int r = 0; r < regionIndices.len; r++)
{
unsigned int region = regionIndices[r];
if (region_map.has (region)) continue;
for (unsigned int i = 0; i < inner_map.get_next_value (); i++)
if (get_item_delta (inner_map.backward (i), r) != 0)
{
region_map.add (region);
break;
}
}
}
protected:
const HBUINT8 *get_delta_bytes () const
{ return &StructAfter<HBUINT8> (regionIndices); }
HBUINT8 *get_delta_bytes ()
{ return &StructAfter<HBUINT8> (regionIndices); }
int16_t get_item_delta (unsigned int item, unsigned int region) const
{
if ( item >= itemCount || unlikely (region >= regionIndices.len)) return 0;
const HBINT8 *p = (const HBINT8 *)get_delta_bytes () + item * get_row_size ();
if (region < shortCount)
return ((const HBINT16 *)p)[region];
else
return (p + HBINT16::static_size * shortCount)[region - shortCount];
}
void set_item_delta (unsigned int item, unsigned int region, int16_t delta)
{
HBINT8 *p = (HBINT8 *)get_delta_bytes () + item * get_row_size ();
if (region < shortCount)
((HBINT16 *)p)[region] = delta;
else
(p + HBINT16::static_size * shortCount)[region - shortCount] = delta;
}
protected:
HBUINT16 itemCount;
HBUINT16 shortCount;
@ -1815,6 +1937,43 @@ struct VariationStore
dataSets.sanitize (c, this));
}
bool serialize (hb_serialize_context_t *c,
const VariationStore *src,
const hb_array_t <hb_inc_bimap_t> &inner_maps)
{
TRACE_SERIALIZE (this);
unsigned int set_count = 0;
for (unsigned int i = 0; i < inner_maps.length; i++)
if (inner_maps[i].get_population () > 0) set_count++;
unsigned int size = min_size + HBUINT32::static_size * set_count;
if (unlikely (!c->allocate_size<HBUINT32> (size))) return_trace (false);
format = 1;
hb_inc_bimap_t region_map;
for (unsigned int i = 0; i < inner_maps.length; i++)
(src+src->dataSets[i]).collect_region_refs (region_map, inner_maps[i]);
region_map.sort ();
if (unlikely (!regions.serialize (c, this)
.serialize (c, &(src+src->regions), region_map))) return_trace (false);
/* TODO: The following code could be simplified when
* OffsetListOf::subset () can take a custom param to be passed to VarData::serialize ()
*/
dataSets.len = set_count;
unsigned int set_index = 0;
for (unsigned int i = 0; i < inner_maps.length; i++)
{
if (inner_maps[i].get_population () == 0) continue;
if (unlikely (!dataSets[set_index++].serialize (c, this)
.serialize (c, &(src+src->dataSets[i]), inner_maps[i], region_map)))
return_trace (false);
}
return_trace (true);
}
unsigned int get_region_index_count (unsigned int ivs) const
{ return (this+dataSets[ivs]).get_region_index_count (); }
@ -1833,6 +1992,8 @@ struct VariationStore
&scalars[0], num_scalars);
}
unsigned int get_sub_table_count () const { return dataSets.len; }
protected:
HBUINT16 format;
LOffsetTo<VarRegionList> regions;