Do alternate glyph selection!

Kinda hand-wavy right now.  Not tested.
This commit is contained in:
Behdad Esfahbod 2010-05-20 17:26:35 +01:00
parent 750a229455
commit f7acd8df51
4 changed files with 35 additions and 17 deletions

View File

@ -1497,15 +1497,17 @@ struct PosLookup : Lookup
{ return this+CastR<OffsetArrayOf<PosLookupSubTable> > (subTable)[i]; }
inline bool apply_once (hb_ot_layout_context_t *layout,
hb_buffer_t *buffer,
unsigned int context_length,
unsigned int nesting_level_left) const
hb_buffer_t *buffer,
hb_mask_t lookup_mask,
unsigned int context_length,
unsigned int nesting_level_left) const
{
unsigned int lookup_type = get_type ();
hb_apply_context_t c[1] = {{0}};
c->layout = layout;
c->buffer = buffer;
c->lookup_mask = lookup_mask;
c->context_length = context_length;
c->nesting_level_left = nesting_level_left;
c->lookup_flag = get_flag ();
@ -1537,7 +1539,7 @@ struct PosLookup : Lookup
bool done;
if (buffer->info[buffer->i].mask & mask)
{
done = apply_once (layout, buffer, NO_CONTEXT, MAX_NESTING_LEVEL);
done = apply_once (layout, buffer, mask, NO_CONTEXT, MAX_NESTING_LEVEL);
ret |= done;
}
else
@ -1621,7 +1623,7 @@ static inline bool position_lookup (hb_apply_context_t *c, unsigned int lookup_i
if (unlikely (c->context_length < 1))
return false;
return l.apply_once (c->layout, c->buffer, c->context_length, c->nesting_level_left - 1);
return l.apply_once (c->layout, c->buffer, c->lookup_mask, c->context_length, c->nesting_level_left - 1);
}

View File

@ -274,6 +274,8 @@ struct AlternateSubstFormat1
{
TRACE_APPLY ();
hb_codepoint_t glyph_id = c->buffer->info[c->buffer->i].codepoint;
hb_mask_t glyph_mask = c->buffer->info[c->buffer->i].mask;
hb_mask_t lookup_mask = c->lookup_mask;
unsigned int index = (this+coverage) (glyph_id);
if (likely (index == NOT_COVERED))
@ -284,14 +286,8 @@ struct AlternateSubstFormat1
if (unlikely (!alt_set.len))
return false;
unsigned int alt_index = 0;
/* XXX callback to user to choose alternate
if (c->layout->face->altfunc)
alt_index = (c->layout->face->altfunc)(c->layout->layout, c->buffer,
c->buffer->out_len, glyph_id,
alt_set.len, alt_set.array);
*/
unsigned int shift = _hb_ctz (lookup_mask);
unsigned int alt_index = (lookup_mask & glyph_mask) >> shift;
if (unlikely (alt_index >= alt_set.len))
return false;
@ -777,6 +773,7 @@ struct SubstLookup : Lookup
inline bool apply_once (hb_ot_layout_context_t *layout,
hb_buffer_t *buffer,
hb_mask_t lookup_mask,
unsigned int context_length,
unsigned int nesting_level_left) const
{
@ -785,6 +782,7 @@ struct SubstLookup : Lookup
c->layout = layout;
c->buffer = buffer;
c->lookup_mask = lookup_mask;
c->context_length = context_length;
c->nesting_level_left = nesting_level_left;
c->lookup_flag = get_flag ();
@ -831,7 +829,7 @@ struct SubstLookup : Lookup
while (buffer->i < buffer->len)
{
if ((buffer->info[buffer->i].mask & mask) &&
apply_once (layout, buffer, NO_CONTEXT, MAX_NESTING_LEVEL))
apply_once (layout, buffer, mask, NO_CONTEXT, MAX_NESTING_LEVEL))
ret = true;
else
buffer->next_glyph ();
@ -847,7 +845,7 @@ struct SubstLookup : Lookup
do
{
if ((buffer->info[buffer->i].mask & mask) &&
apply_once (layout, buffer, NO_CONTEXT, MAX_NESTING_LEVEL))
apply_once (layout, buffer, mask, NO_CONTEXT, MAX_NESTING_LEVEL))
ret = true;
else
buffer->i--;
@ -933,7 +931,7 @@ static inline bool substitute_lookup (hb_apply_context_t *c, unsigned int lookup
if (unlikely (c->context_length < 1))
return false;
return l.apply_once (c->layout, c->buffer, c->context_length, c->nesting_level_left - 1);
return l.apply_once (c->layout, c->buffer, c->lookup_mask, c->context_length, c->nesting_level_left - 1);
}

View File

@ -44,6 +44,7 @@ struct hb_apply_context_t
unsigned int debug_depth;
hb_ot_layout_context_t *layout;
hb_buffer_t *buffer;
hb_mask_t lookup_mask;
unsigned int context_length;
unsigned int nesting_level_left;
unsigned int lookup_flag;

View File

@ -156,7 +156,7 @@ static inline HB_CONST_FUNC unsigned int
_hb_bit_storage (unsigned int number)
{
#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(__OPTIMIZE__)
return likely (number) ? (sizeof (unsigned int) * 8 - __builtin_clzl(number)) : 0;
return likely (number) ? (sizeof (unsigned int) * 8 - __builtin_clz (number)) : 0;
#else
register unsigned int n_bits = 0;
while (number) {
@ -167,6 +167,23 @@ _hb_bit_storage (unsigned int number)
#endif
}
/* Returns the number of zero bits in the least significant side of number */
static inline HB_CONST_FUNC unsigned int
_hb_ctz (unsigned int number)
{
#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(__OPTIMIZE__)
return likely (number) ? __builtin_ctz (number) : 0;
#else
register unsigned int n_bits = 0;
if (unlikely (!number)) return 0;
while (!(number & 1)) {
n_bits++;
number >>= 1;
}
return n_bits;
#endif
}
/* We need external help for these */
#ifdef HAVE_GLIB