[unsafe-to-break] Copy flag to all glyphs in a cluster

Makes consumption easier.
This commit is contained in:
Behdad Esfahbod 2017-08-11 19:06:07 -07:00
parent ec104e5912
commit 1c17c2bde5
4 changed files with 80 additions and 24 deletions

View File

@ -57,6 +57,8 @@ enum hb_buffer_scratch_flags_t {
HB_BUFFER_SCRATCH_FLAG_HAS_DEFAULT_IGNORABLES = 0x00000002u,
HB_BUFFER_SCRATCH_FLAG_HAS_SPACE_FALLBACK = 0x00000004u,
HB_BUFFER_SCRATCH_FLAG_HAS_GPOS_ATTACHMENT = 0x00000008u,
HB_BUFFER_SCRATCH_FLAG_HAS_UNSAFE_TO_BREAK = 0x00000010u,
/* Reserved for complex shapers' internal use. */
HB_BUFFER_SCRATCH_FLAG_COMPLEX0 = 0x01000000u,
HB_BUFFER_SCRATCH_FLAG_COMPLEX1 = 0x02000000u,
@ -301,9 +303,53 @@ struct hb_buffer_t {
}
info.cluster = cluster;
}
int
_unsafe_to_break_find_min_cluster (const hb_glyph_info_t *info,
unsigned int start, unsigned int end,
unsigned int cluster) const
{
for (unsigned int i = start; i < end; i++)
cluster = MIN (cluster, info[i].cluster);
return cluster;
}
void
_unsafe_to_break_set_mask (hb_glyph_info_t *info,
unsigned int start, unsigned int end,
unsigned int cluster)
{
for (unsigned int i = start; i < end; i++)
if (cluster != info[i].cluster)
{
scratch_flags |= HB_BUFFER_SCRATCH_FLAG_HAS_UNSAFE_TO_BREAK;
info[i].mask |= HB_GLYPH_FLAG_UNSAFE_TO_BREAK;
}
}
};
/* Loop over clusters. Duplicated in foreach_syllable(). */
#define foreach_cluster(buffer, start, end) \
for (unsigned int \
_count = buffer->len, \
start = 0, end = _count ? _next_cluster (buffer, 0) : 0; \
start < _count; \
start = end, end = _next_cluster (buffer, start))
static inline unsigned int
_next_cluster (hb_buffer_t *buffer, unsigned int start)
{
hb_glyph_info_t *info = buffer->info;
unsigned int count = buffer->len;
unsigned int cluster = info[start].cluster;
while (++start < count && cluster == info[start].cluster)
;
return start;
}
#define HB_BUFFER_XALLOCATE_VAR(b, func, var) \
b->func (offsetof (hb_glyph_info_t, var) - offsetof(hb_glyph_info_t, var1), \
sizeof (b->info[0].var))

View File

@ -643,28 +643,12 @@ done:
skip_glyph ();
}
static int
unsafe_to_break_find_min (const hb_glyph_info_t *info, unsigned int start, unsigned int end,
unsigned int cluster)
{
for (unsigned int i = start; i < end; i++)
cluster = MIN (cluster, info[i].cluster);
return cluster;
}
static void
unsafe_to_break_set_mask (hb_glyph_info_t *info, unsigned int start, unsigned int end,
unsigned int cluster)
{
for (unsigned int i = start; i < end; i++)
if (cluster != info[i].cluster)
info[i].mask |= HB_GLYPH_FLAG_UNSAFE_TO_BREAK;
}
void
hb_buffer_t::unsafe_to_break_impl (unsigned int start, unsigned int end)
{
unsigned int cluster = (unsigned int) -1;
cluster = unsafe_to_break_find_min (info, start, end, cluster);
unsafe_to_break_set_mask (info, start, end, cluster);
cluster = _unsafe_to_break_find_min_cluster (info, start, end, cluster);
_unsafe_to_break_set_mask (info, start, end, cluster);
}
void
hb_buffer_t::unsafe_to_break_from_outbuffer (unsigned int start, unsigned int end)
@ -679,10 +663,10 @@ hb_buffer_t::unsafe_to_break_from_outbuffer (unsigned int start, unsigned int en
assert (idx <= end);
unsigned int cluster = (unsigned int) -1;
cluster = unsafe_to_break_find_min (out_info, start, out_len, cluster);
cluster = unsafe_to_break_find_min (info, idx, end, cluster);
unsafe_to_break_set_mask (out_info, start, out_len, cluster);
unsafe_to_break_set_mask (info, idx, end, cluster);
cluster = _unsafe_to_break_find_min_cluster (out_info, start, out_len, cluster);
cluster = _unsafe_to_break_find_min_cluster (info, idx, end, cluster);
_unsafe_to_break_set_mask (out_info, start, out_len, cluster);
_unsafe_to_break_set_mask (info, idx, end, cluster);
}
void

View File

@ -197,8 +197,7 @@ _hb_ot_layout_destroy (hb_ot_layout_t *layout);
#define syllable() var1.u8[3] /* GSUB/GPOS shaping boundaries */
/* loop over syllables */
/* Loop over syllables. Based on foreach_cluster(). */
#define foreach_syllable(buffer, start, end) \
for (unsigned int \
_count = buffer->len, \

View File

@ -783,6 +783,31 @@ hb_ot_position (hb_ot_shape_context_t *c)
_hb_buffer_deallocate_gsubgpos_vars (c->buffer);
}
static inline void
hb_propagate_flags (hb_buffer_t *buffer)
{
/* Propagate cluster-level glyph flags to be the same on all cluster glyphs.
* Simplifies using them. */
if (!(buffer->scratch_flags & HB_BUFFER_SCRATCH_FLAG_HAS_UNSAFE_TO_BREAK))
return;
hb_glyph_info_t *info = buffer->info;
foreach_cluster (buffer, start, end)
{
unsigned int mask = 0;
for (unsigned int i = start; i < end; i++)
if (info[i].mask & HB_GLYPH_FLAG_UNSAFE_TO_BREAK)
{
mask = HB_GLYPH_FLAG_UNSAFE_TO_BREAK;
break;
}
if (mask)
for (unsigned int i = start; i < end; i++)
info[i].mask |= mask;
}
}
/* Pull it all together! */
@ -826,6 +851,8 @@ hb_ot_shape_internal (hb_ot_shape_context_t *c)
if (c->plan->shaper->postprocess_glyphs)
c->plan->shaper->postprocess_glyphs (c->plan, c->buffer, c->font);
hb_propagate_flags (c->buffer);
_hb_buffer_deallocate_unicode_vars (c->buffer);
c->buffer->props.direction = c->target_direction;