[repacker] when assigning each connected subgraph a space, also isolate it.

This will break any links coming from space 0 (ie. the 16 bit offset only space).
This commit is contained in:
Garret Rieger 2021-09-28 13:36:06 -07:00
parent 307acf7fb0
commit 67eb222b8e
2 changed files with 49 additions and 49 deletions

View File

@ -382,6 +382,8 @@ struct graph_t
if (!roots) return false;
// TODO(grieger): add 16 bit only space to visited so it can't be used to connect 32 bit
// subgraphs.
unsigned space = 0;
while (roots)
{
@ -399,54 +401,34 @@ struct graph_t
distance_invalid = true;
positions_invalid = true;
}
// TODO(grieger): special case for GSUB/GPOS use extension promotions to move 16 bit space
// into the 32 bit space as needed, instead of using isolation.
isolate_subgraph (connected_roots);
}
return true;
}
/*
* Finds any links using 32 bits and isolates the subgraphs they point too.
*/
bool isolate_32bit_links ()
{
bool made_changes = false;
hb_set_t target_links;
unsigned root_index = root_idx ();
int64_t next_space = 0;
for (unsigned i = 0; i <= root_index; i++)
{
if (i == root_index && root_idx () > i)
// root index may have moved due to graph modifications.
i = root_idx ();
for (auto& l : vertices_[i].obj.links)
{
if (l.width == 4 && !l.is_signed)
{
isolate_subgraph (l.objidx);
vertices_[l.objidx].space = next_space++;
distance_invalid = true;
made_changes = true;
}
}
}
return made_changes;
}
/*
* Isolates the subgraph of nodes reachable from root. Any links to nodes in the subgraph
* that originate from outside of the subgraph will be removed by duplicating the linked to
* object.
*/
bool isolate_subgraph (unsigned root_idx)
bool isolate_subgraph (hb_set_t roots)
{
update_parents ();
hb_hashmap_t<unsigned, unsigned> subgraph;
// incoming edges to root_idx should be all 32 bit in length so we don't need to de-dup these
// set the subgraph incoming edge count to match all of root_idx's incoming edges
subgraph.set (root_idx, vertices_[root_idx].incoming_edges ());
find_subgraph (root_idx, subgraph);
//
// TODO(grieger): the above assumption does not always hold, as there are 16 bit incoming
// edges, handle that case here by not including them in the count.
for (unsigned root_idx : roots)
{
subgraph.set (root_idx, vertices_[root_idx].incoming_edges ());
find_subgraph (root_idx, subgraph);
}
hb_hashmap_t<unsigned, unsigned> index_map;
bool made_changes = false;

View File

@ -155,7 +155,7 @@ populate_serializer_with_isolation_overflow_complex (hb_serialize_context_t* c)
add_offset (obj_f, c);
unsigned obj_e = c->pop_pack (false);
start_object ("cc", 2, c);
start_object ("c", 1, c);
add_offset (obj_e, c);
unsigned obj_c = c->pop_pack (false);
@ -176,9 +176,14 @@ populate_serializer_with_isolation_overflow_complex (hb_serialize_context_t* c)
add_offset (obj_d, c);
unsigned obj_g = c->pop_pack (false);
start_object (large_string.c_str(), 11000, c);
add_offset (obj_d, c);
unsigned obj_i = c->pop_pack (false);
start_object ("a", 1, c);
add_wide_offset (obj_b, c);
add_offset (obj_g, c);
add_offset (obj_i, c);
c->pop_pack (false);
c->end_serialize();
@ -190,45 +195,58 @@ populate_serializer_with_isolation_overflow_complex_expected (hb_serialize_conte
std::string large_string(70000, 'a');
c->start_serialize<char> ();
// 32 bit subgraph
// space 1
unsigned obj_f_prime = add_object ("f", 1, c);
start_object ("e", 1, c);
add_offset (obj_f_prime, c);
unsigned obj_e_prime = c->pop_pack ();
start_object ("cc", 2, c);
add_offset (obj_e_prime, c);
unsigned obj_c = c->pop_pack ();
unsigned obj_e_prime = c->pop_pack (false);
start_object ("d", 1, c);
add_offset (obj_e_prime, c);
unsigned obj_d_prime = c->pop_pack ();
unsigned obj_d_prime = c->pop_pack (false);
start_object (large_string.c_str(), 60000, c);
add_offset (obj_d_prime, c);
unsigned obj_h = c->pop_pack (false);
start_object ("c", 1, c);
add_offset (obj_e_prime, c);
unsigned obj_c = c->pop_pack (false);
start_object (large_string.c_str(), 60000, c);
add_offset (obj_c, c);
add_offset (obj_d_prime, c);
unsigned obj_b = c->pop_pack ();
add_offset (obj_h, c);
unsigned obj_b = c->pop_pack (false);
// space 0
// 16 bit subgraph
unsigned obj_f = add_object ("f", 1, c);
start_object ("e", 1, c);
add_offset (obj_f, c);
unsigned obj_e = c->pop_pack ();
unsigned obj_e = c->pop_pack (false);
start_object ("d", 1, c);
add_offset (obj_e, c);
unsigned obj_d = c->pop_pack ();
unsigned obj_d = c->pop_pack (false);
start_object (large_string.c_str(), 11000, c);
add_offset (obj_d, c);
unsigned obj_i = c->pop_pack (false);
start_object (large_string.c_str(), 10000, c);
add_offset (obj_d, c);
unsigned obj_g = c->pop_pack ();
unsigned obj_g = c->pop_pack (false);
start_object ("a", 1, c);
add_wide_offset (obj_b, c);
add_offset (obj_g, c);
c->pop_pack ();
add_offset (obj_i, c);
c->pop_pack (false);
c->end_serialize();
}
@ -708,7 +726,7 @@ static void test_resolve_overflows_via_isolation ()
hb_serialize_context_t out (out_buffer, buffer_size);
assert (c.offset_overflow ());
hb_resolve_overflows (c.object_graph (), HB_TAG ('G', 'S', 'U', 'B'), &out, 1);
hb_resolve_overflows (c.object_graph (), HB_TAG ('G', 'S', 'U', 'B'), &out, 0);
assert (!out.offset_overflow ());
hb_bytes_t result = out.copy_bytes ();
assert (result.length == (1 + 10000 + 60000 + 1 + 1
@ -731,7 +749,7 @@ static void test_resolve_overflows_via_isolation_with_recursive_duplication ()
hb_serialize_context_t out (out_buffer, buffer_size);
assert (c.offset_overflow ());
hb_resolve_overflows (c.object_graph (), HB_TAG ('G', 'S', 'U', 'B'), &out, 1);
hb_resolve_overflows (c.object_graph (), HB_TAG ('G', 'S', 'U', 'B'), &out, 0);
assert (!out.offset_overflow ());
hb_bytes_t result = out.copy_bytes ();