Compare commits

...

4 Commits

Author SHA1 Message Date
Behdad Esfahbod cdbf24e87f [OTLayout] Accelerate lookups by batching
If we need to apply many many lookups, we can fasten that up by applying
them in batches.  For each batch we keep the union of the coverage of
the lookups participating.  We can then skip glyph ranges that do NOT
participate in any lookup in the batch.  The batch partition is
determined optimally by a mathematical probability model on the glyphs
and a dynamic-program to optimize the partition.

The net effect is 30% speedup on Amiri.  the downside is more memory
consuption as each batch will keep an hb_set_t of its coverage.

I'm not yet convinced that the tradeoff is worth pursuing.  I'm trying
to find out ways to optimized this more, with less memory overhead.

This work also ignores the number of subtables per lookup.  That may
prove to be very important for the performance numbers from here on.
2013-05-02 15:39:16 -04:00
Behdad Esfahbod 68db8c49d8 [OTLayout] Add start/end to apply_string()
No functional change.
2013-05-02 15:39:16 -04:00
Behdad Esfahbod 1b972d893a [OTLayout] Add is_inplace() method to GSUB 2013-05-02 15:39:16 -04:00
Behdad Esfahbod 07034f447b [API] Add hb_ot_layout_lookup_get_coverage() 2013-05-02 15:39:16 -04:00
9 changed files with 621 additions and 39 deletions

View File

@ -1461,19 +1461,24 @@ struct PosLookup : Lookup
static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index);
inline bool apply_string (hb_apply_context_t *c, const hb_set_digest_t *digest) const
inline bool apply_string (hb_apply_context_t *c,
unsigned int start,
unsigned int end,
const hb_set_digest_t *digest) const
{
bool ret = false;
if (unlikely (!c->buffer->len || !c->lookup_mask))
end = MIN (end, c->buffer->len);
if (unlikely (start >= end || !c->lookup_mask))
return false;
c->set_recurse_func (apply_recurse_func);
c->set_lookup (*this);
c->buffer->idx = 0;
c->buffer->idx = start;
while (c->buffer->idx < c->buffer->len)
while (c->buffer->idx < end)
{
if (digest->may_have (c->buffer->cur().codepoint) &&
(c->buffer->cur().mask & c->lookup_mask) &&

View File

@ -37,6 +37,12 @@ namespace OT {
struct SingleSubstFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -115,6 +121,12 @@ struct SingleSubstFormat1
struct SingleSubstFormat2
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -251,6 +263,13 @@ struct SingleSubst
struct Sequence
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
/* For len==0 we don't do anything, so it's harmless. */
return TRACE_RETURN (substitute.len <= 1);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -315,6 +334,18 @@ struct Sequence
struct MultipleSubstFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
/* Some tools generate MultipleSubst with each substitute having length 1!
* So, check them. */
unsigned int count = sequence.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+sequence[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -440,6 +471,12 @@ typedef ArrayOf<GlyphID> AlternateSet; /* Array of alternate GlyphIDs--in
struct AlternateSubstFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -760,6 +797,12 @@ struct LigatureSet
struct LigatureSubstFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
return TRACE_RETURN (false);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -908,6 +951,12 @@ struct ExtensionSubst : Extension<ExtensionSubst>
struct ReverseChainSingleSubstFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -1145,6 +1194,13 @@ struct SubstLookup : Lookup
return lookup_type_is_reverse (type);
}
inline hb_is_inplace_context_t::return_t is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
c->set_recurse_func (dispatch_recurse_func<hb_is_inplace_context_t>);
return TRACE_RETURN (dispatch (c));
}
inline hb_closure_context_t::return_t closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -1191,11 +1247,16 @@ struct SubstLookup : Lookup
}
static bool apply_recurse_func (hb_apply_context_t *c, unsigned int lookup_index);
inline bool apply_string (hb_apply_context_t *c, const hb_set_digest_t *digest) const
inline bool apply_string (hb_apply_context_t *c,
unsigned int start,
unsigned int end,
const hb_set_digest_t *digest) const
{
bool inplace = end != (unsigned int) -1;
bool ret = false;
end = MIN (end, c->buffer->len);
if (unlikely (!c->buffer->len || !c->lookup_mask))
if (unlikely (start >= end || !c->lookup_mask))
return false;
c->set_recurse_func (apply_recurse_func);
@ -1204,10 +1265,11 @@ struct SubstLookup : Lookup
if (likely (!is_reverse ()))
{
/* in/out forward substitution */
c->buffer->clear_output ();
c->buffer->idx = 0;
c->buffer->idx = start;
if (inplace)
c->buffer->out_len = start;
while (c->buffer->idx < c->buffer->len)
while (c->buffer->idx < end)
{
if (digest->may_have (c->buffer->cur().codepoint) &&
(c->buffer->cur().mask & c->lookup_mask) &&
@ -1216,14 +1278,12 @@ struct SubstLookup : Lookup
else
c->buffer->next_glyph ();
}
if (ret)
c->buffer->swap_buffers ();
}
else
{
/* in-place backward substitution */
c->buffer->remove_output ();
c->buffer->idx = c->buffer->len - 1;
c->buffer->idx = end - 1;
do
{
if (digest->may_have (c->buffer->cur().codepoint) &&
@ -1232,9 +1292,8 @@ struct SubstLookup : Lookup
ret = true;
else
c->buffer->idx--;
}
while ((int) c->buffer->idx >= 0);
while ((int) c->buffer->idx >= (int) start);
}
return ret;

View File

@ -44,6 +44,55 @@ namespace OT {
"");
#ifndef HB_DEBUG_IS_INPLACE
#define HB_DEBUG_IS_INPLACE (HB_DEBUG+0)
#endif
#define TRACE_IS_INPLACE(this) \
hb_auto_trace_t<HB_DEBUG_IS_INPLACE, bool> trace \
(&c->debug_depth, c->get_name (), this, HB_FUNC, \
"");
struct hb_is_inplace_context_t
{
inline const char *get_name (void) { return "IS_INPLACE"; }
static const unsigned int max_debug_depth = HB_DEBUG_IS_INPLACE;
typedef bool return_t;
typedef return_t (*recurse_func_t) (hb_is_inplace_context_t *c, unsigned int lookup_index);
template <typename T>
inline return_t dispatch (const T &obj) { return obj.is_inplace (this); }
static return_t default_return_value (void) { return true; }
bool stop_sublookup_iteration (return_t r) const { return !r; }
return_t recurse (unsigned int lookup_index)
{
if (unlikely (nesting_level_left == 0 || !recurse_func))
return default_return_value ();
nesting_level_left--;
bool ret = recurse_func (this, lookup_index);
nesting_level_left++;
return ret;
}
hb_face_t *face;
recurse_func_t recurse_func;
unsigned int nesting_level_left;
unsigned int debug_depth;
hb_is_inplace_context_t (hb_face_t *face_,
unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
face (face_),
recurse_func (NULL),
nesting_level_left (nesting_level_left_),
debug_depth (0) {}
void set_recurse_func (recurse_func_t func) { recurse_func = func; }
};
#ifndef HB_DEBUG_CLOSURE
#define HB_DEBUG_CLOSURE (HB_DEBUG+0)
#endif
@ -1096,6 +1145,17 @@ static inline bool context_apply_lookup (hb_apply_context_t *c,
struct Rule
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
unsigned int count = lookupCount;
for (unsigned int i = 0; i < count; i++)
if (!c->recurse (lookupRecord[i].lookupListIndex))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
{
TRACE_CLOSURE (this);
@ -1155,6 +1215,16 @@ struct Rule
struct RuleSet
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if (!(this+rule[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
{
TRACE_CLOSURE (this);
@ -1211,6 +1281,16 @@ struct RuleSet
struct ContextFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+ruleSet[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -1297,6 +1377,16 @@ struct ContextFormat1
struct ContextFormat2
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+ruleSet[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -1392,6 +1482,17 @@ struct ContextFormat2
struct ContextFormat3
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
unsigned int count = lookupCount;
for (unsigned int i = 0; i < count; i++)
if (!c->recurse (lookupRecord[i].lookupListIndex))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -1633,6 +1734,19 @@ static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
struct ChainRule
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
unsigned int count = lookup.len;
for (unsigned int i = 0; i < count; i++)
if (!c->recurse (lookup.array[i].lookupListIndex))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
{
TRACE_CLOSURE (this);
@ -1718,6 +1832,16 @@ struct ChainRule
struct ChainRuleSet
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if (!(this+rule[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
{
TRACE_CLOSURE (this);
@ -1771,6 +1895,16 @@ struct ChainRuleSet
struct ChainContextFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+ruleSet[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -1854,6 +1988,16 @@ struct ChainContextFormat1
struct ChainContextFormat2
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+ruleSet[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);
@ -1978,6 +2122,20 @@ struct ChainContextFormat2
struct ChainContextFormat3
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
unsigned int count = lookup.len;
for (unsigned int i = 0; i < count; i++)
if (!c->recurse (lookup.array[i].lookupListIndex))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
TRACE_CLOSURE (this);

View File

@ -189,6 +189,23 @@ static inline uint8_t allocate_lig_id (hb_buffer_t *buffer) {
}
/*
* GSUBGPOS
*/
/* Always returns true for GPOS, but make writing code easier. */
HB_INTERNAL bool
hb_ot_layout_lookup_is_inplace (hb_face_t *face,
hb_tag_t table_tag,
unsigned int lookup_index);
/*
* GSUB
*/
HB_INTERNAL hb_bool_t
hb_ot_layout_lookup_would_substitute_fast (hb_face_t *face,
unsigned int lookup_index,
@ -206,6 +223,8 @@ HB_INTERNAL hb_bool_t
hb_ot_layout_substitute_lookup (hb_font_t *font,
hb_buffer_t *buffer,
unsigned int lookup_index,
unsigned int start,
unsigned int end,
hb_mask_t mask,
hb_bool_t auto_zwj);
@ -215,6 +234,11 @@ hb_ot_layout_substitute_finish (hb_font_t *font,
hb_buffer_t *buffer);
/*
* GPOS
*/
/* Should be called before all the position_lookup's are done. Resets positions to zero. */
HB_INTERNAL void
hb_ot_layout_position_start (hb_font_t *font,
@ -224,6 +248,8 @@ HB_INTERNAL hb_bool_t
hb_ot_layout_position_lookup (hb_font_t *font,
hb_buffer_t *buffer,
unsigned int lookup_index,
unsigned int start,
unsigned int end,
hb_mask_t mask,
hb_bool_t auto_zwj);

View File

@ -591,6 +591,31 @@ hb_ot_layout_collect_lookups (hb_face_t *face,
}
}
void
hb_ot_layout_lookup_get_coverage (hb_face_t *face,
hb_tag_t table_tag,
unsigned int lookup_index,
hb_set_t *glyphs /* OUT */)
{
if (unlikely (!hb_ot_shaper_face_data_ensure (face))) return;
switch (table_tag)
{
case HB_OT_TAG_GSUB:
{
const OT::SubstLookup& l = hb_ot_layout_from_face (face)->gsub->get_lookup (lookup_index);
l.add_coverage (glyphs);
return;
}
case HB_OT_TAG_GPOS:
{
const OT::PosLookup& l = hb_ot_layout_from_face (face)->gpos->get_lookup (lookup_index);
l.add_coverage (glyphs);
return;
}
}
}
void
hb_ot_layout_lookup_collect_glyphs (hb_face_t *face,
hb_tag_t table_tag,
@ -626,6 +651,33 @@ hb_ot_layout_lookup_collect_glyphs (hb_face_t *face,
}
/*
* GSUBGPOS
*/
bool
hb_ot_layout_lookup_is_inplace (hb_face_t *face,
hb_tag_t table_tag,
unsigned int lookup_index)
{
OT::hb_is_inplace_context_t c (face);
switch (table_tag)
{
case HB_OT_TAG_GSUB:
{
const OT::SubstLookup& l = _get_gsub (face).get_lookup (lookup_index);
return l.is_inplace (&c);
}
default:
case HB_OT_TAG_GPOS:
{
return true;
}
}
}
/*
* OT::GSUB
*/
@ -672,6 +724,8 @@ hb_bool_t
hb_ot_layout_substitute_lookup (hb_font_t *font,
hb_buffer_t *buffer,
unsigned int lookup_index,
unsigned int start,
unsigned int end,
hb_mask_t mask,
hb_bool_t auto_zwj)
{
@ -681,7 +735,7 @@ hb_ot_layout_substitute_lookup (hb_font_t *font,
const OT::SubstLookup& l = hb_ot_layout_from_face (font->face)->gsub->get_lookup (lookup_index);
return l.apply_string (&c, &hb_ot_layout_from_face (font->face)->gsub_digests[lookup_index]);
return l.apply_string (&c, start, end, &hb_ot_layout_from_face (font->face)->gsub_digests[lookup_index]);
}
void
@ -702,6 +756,7 @@ hb_ot_layout_lookup_substitute_closure (hb_face_t *face,
l.closure (&c);
}
/*
* OT::GPOS
*/
@ -722,6 +777,8 @@ hb_bool_t
hb_ot_layout_position_lookup (hb_font_t *font,
hb_buffer_t *buffer,
unsigned int lookup_index,
unsigned int start,
unsigned int end,
hb_mask_t mask,
hb_bool_t auto_zwj)
{
@ -731,7 +788,7 @@ hb_ot_layout_position_lookup (hb_font_t *font,
const OT::PosLookup& l = hb_ot_layout_from_face (font->face)->gpos->get_lookup (lookup_index);
return l.apply_string (&c, &hb_ot_layout_from_face (font->face)->gpos_digests[lookup_index]);
return l.apply_string (&c, start, end, &hb_ot_layout_from_face (font->face)->gpos_digests[lookup_index]);
}
void

View File

@ -192,6 +192,12 @@ hb_ot_shape_plan_collect_lookups (hb_shape_plan_t *shape_plan,
hb_tag_t table_tag,
hb_set_t *lookup_indexes /* OUT */);
void
hb_ot_layout_lookup_get_coverage (hb_face_t *face,
hb_tag_t table_tag,
unsigned int lookup_index,
hb_set_t *glyphs /* OUT */);
void
hb_ot_layout_lookup_collect_glyphs (hb_face_t *face,
hb_tag_t table_tag,

View File

@ -39,6 +39,7 @@ static const hb_tag_t table_tags[2] = {HB_OT_TAG_GSUB, HB_OT_TAG_GPOS};
struct hb_ot_map_t
{
friend struct hb_ot_map_builder_t;
friend struct optimize_lookups_context_t;
public:
@ -68,10 +69,15 @@ struct hb_ot_map_t
typedef void (*pause_func_t) (const struct hb_ot_shape_plan_t *plan, hb_font_t *font, hb_buffer_t *buffer);
struct stage_map_t {
unsigned int last_lookup; /* Cumulative */
unsigned int last_lookup; /* Actually, last_lookup+1 */
pause_func_t pause_func;
};
struct batch_map_t {
unsigned int last_lookup; /* Actually, last_lookup+1 */
hb_set_t *coverage;
};
hb_ot_map_t (void) { memset (this, 0, sizeof (*this)); }
@ -117,6 +123,8 @@ struct hb_ot_map_t
*lookup_count = end - start;
}
HB_INTERNAL void optimize (hb_face_t *face);
HB_INTERNAL void collect_lookups (unsigned int table_index, hb_set_t *lookups) const;
HB_INTERNAL inline void apply (unsigned int table_index,
const struct hb_ot_shape_plan_t *plan,
@ -131,6 +139,9 @@ struct hb_ot_map_t
{
lookups[table_index].finish ();
stages[table_index].finish ();
for (unsigned int batch_index = 0; batch_index < batches[table_index].len; batch_index++)
hb_set_destroy (batches[table_index][batch_index].coverage);
batches[table_index].finish ();
}
}
@ -151,6 +162,7 @@ struct hb_ot_map_t
hb_prealloced_array_t<feature_map_t, 8> features;
hb_prealloced_array_t<lookup_map_t, 32> lookups[2]; /* GSUB/GPOS */
hb_prealloced_array_t<stage_map_t, 4> stages[2]; /* GSUB/GPOS */
hb_prealloced_array_t<batch_map_t, 4> batches[2]; /* GSUB/GPOS */
};
enum hb_ot_map_feature_flags_t {

View File

@ -27,6 +27,14 @@
*/
#include "hb-ot-map-private.hh"
#include "hb-ot-layout-private.hh"
#ifndef HB_DEBUG_MAP
#define HB_DEBUG_MAP (HB_DEBUG+0)
#endif
#include "hb-ot-layout-private.hh"
@ -102,36 +110,98 @@ void hb_ot_map_builder_t::add_feature (hb_tag_t tag, unsigned int value,
info->stage[1] = current_stage[1];
}
static inline bool
may_skip (const hb_glyph_info_t &info, hb_set_t *coverage)
{
return !(info.glyph_props() & HB_OT_LAYOUT_GLYPH_PROPS_MARK) &&
!coverage->has (info.codepoint);
}
inline void hb_ot_map_t::apply (unsigned int table_index,
const hb_ot_shape_plan_t *plan,
hb_font_t *font,
hb_buffer_t *buffer) const
{
unsigned int i = 0;
unsigned int b = 0;
for (unsigned int stage_index = 0; stage_index < stages[table_index].len; stage_index++) {
const stage_map_t *stage = &stages[table_index][stage_index];
for (; i < stage->last_lookup; i++)
switch (table_index)
{
case 0:
hb_ot_layout_substitute_lookup (font, buffer, lookups[table_index][i].index,
lookups[table_index][i].mask,
lookups[table_index][i].auto_zwj);
break;
for (unsigned int stage_index = 0; stage_index < stages[table_index].len; stage_index++)
{
const stage_map_t stage = stages[table_index][stage_index];
case 1:
hb_ot_layout_position_lookup (font, buffer, lookups[table_index][i].index,
lookups[table_index][i].mask,
lookups[table_index][i].auto_zwj);
break;
}
if (stage->pause_func)
for (; b < batches[table_index].len && batches[table_index][b].last_lookup <= stage.last_lookup; b++)
{
buffer->clear_output ();
stage->pause_func (plan, font, buffer);
const batch_map_t batch = batches[table_index][b];
if (!batch.coverage)
{
switch (table_index)
{
case 0:
for (; i < batch.last_lookup; i++)
{
buffer->clear_output ();
hb_ot_layout_substitute_lookup (font, buffer, lookups[table_index][i].index,
0, (unsigned int) -1,
lookups[table_index][i].mask,
lookups[table_index][i].auto_zwj);
if (buffer->have_output)
buffer->swap_buffers ();
}
break;
case 1:
for (; i < batch.last_lookup; i++)
hb_ot_layout_position_lookup (font, buffer, lookups[table_index][i].index,
0, (unsigned int) -1,
lookups[table_index][i].mask,
lookups[table_index][i].auto_zwj);
break;
}
}
else
{
unsigned int start = 0, end = 0;
if (table_index == 0)
buffer->clear_output ();
for (;;)
{
while (start < buffer->len && may_skip (buffer->info[start], batch.coverage))
start++;
if (start >= buffer->len)
break;
end = start + 1;
while (end < buffer->len && !may_skip (buffer->info[end], batch.coverage))
end++;
switch (table_index)
{
case 0:
for (unsigned int j = i; j < batch.last_lookup; j++)
hb_ot_layout_substitute_lookup (font, buffer, lookups[table_index][j].index,
start, end,
lookups[table_index][j].mask,
lookups[table_index][j].auto_zwj);
break;
case 1:
for (unsigned int j = i; j < batch.last_lookup; j++)
hb_ot_layout_position_lookup (font, buffer, lookups[table_index][j].index,
start, end,
lookups[table_index][j].mask,
lookups[table_index][j].auto_zwj);
break;
}
start = end + 1;
}
assert (!buffer->has_separate_output ());
i = batch.last_lookup;
}
}
if (stage.pause_func)
stage.pause_func (plan, font, buffer);
}
}
@ -315,4 +385,191 @@ hb_ot_map_builder_t::compile (hb_ot_map_t &m)
}
}
}
m.optimize (face);
}
struct optimize_lookups_context_t
{
inline optimize_lookups_context_t (hb_ot_map_t *map_,
hb_face_t *face_,
unsigned int table_index_) :
map (map_),
face (face_),
table_index (table_index_),
start_lookup (0),
num_lookups (0),
num_glyphs (hb_face_get_glyph_count (face_)) {}
inline void add_lookups (unsigned int start_lookup_,
unsigned int num_lookups_)
{
set_lookups (start_lookup_, num_lookups_);
solve ();
}
private:
inline void set_lookups (unsigned int start_lookup_,
unsigned int num_lookups_)
{
start_lookup = start_lookup_;
num_lookups = num_lookups_;
}
inline void
solve (void)
{
if (!num_lookups)
return;
DEBUG_MSG_FUNC (MAP, map, "%d lookups", num_lookups);
hb_set_t *cov = hb_set_create ();
int best_move[num_lookups];
float best_cost[num_lookups];
best_move[0] = -1;
best_cost[0] = single_lookup_cost (0);
for (unsigned int i = 1; i < num_lookups; i++)
{
cov->clear ();
add_coverage (i, cov);
float this_cost = single_lookup_cost (i);
best_cost[i] = 1e20;
for (unsigned int j = i - 1; (int) j >= 0; j--)
{
if (best_cost[i] > best_cost[j] + this_cost)
{
best_cost[i] = best_cost[j] + this_cost;
best_move[i] = j;
}
add_coverage (j, cov);
this_cost = lookup_batch_cost (cov, i - j + 1);
if (this_cost > best_cost[i])
break; /* No chance */
}
if (best_cost[i] > this_cost)
{
best_cost[i] = this_cost;
best_move[i] = -1;
}
}
DEBUG_MSG_FUNC (MAP, map, "optimal solution costs %f", best_cost[num_lookups - 1]);
for (int i = num_lookups - 1; i >= 0; i = best_move[i])
{
unsigned int batch_num_lookups = i - best_move[i];
if (DEBUG_LEVEL_ENABLED (MAP, 1))
DEBUG_MSG_FUNC (MAP, map, "batch has %d lookups; costs %f",
batch_num_lookups,
best_cost[i] - (best_move[i] == -1 ? 0 : best_cost[best_move[i]]));
hb_ot_map_t::batch_map_t *batch = map->batches[table_index].push ();
if (batch)
{
batch->last_lookup = MAX (lookup_offset (i), lookup_offset (best_move[i] + 1)) + 1;
batch->coverage = batch_num_lookups == 1 ? NULL : hb_set_create ();
for (int j = i; j > best_move[i]; j--)
{
if (batch->coverage)
add_coverage (j, batch->coverage);
if (DEBUG_LEVEL_ENABLED (MAP, 2))
{
cov->clear ();
add_coverage (j, cov);
DEBUG_MSG_FUNC (MAP, map, "lookup %d (lookup-index %d) popcount %d",
lookup_offset (j),
lookup_index (j),
cov->get_population ());
}
}
}
}
hb_set_destroy (cov);
}
inline unsigned int lookup_offset (unsigned int i)
{
assert (i < num_lookups);
return start_lookup + num_lookups - 1 - i;
}
inline unsigned int lookup_index (unsigned int i)
{
return map->lookups[table_index][lookup_offset (i)].index;
}
inline void add_coverage (unsigned int i, hb_set_t *coverage)
{
hb_ot_layout_lookup_get_coverage (face,
table_tags[table_index],
lookup_index (i),
coverage);
}
/* Parameters */
inline float single_lookup_cost (unsigned int lookup_index HB_UNUSED)
{
return 1.0;
}
inline float
lookup_batch_cost (hb_set_t *cov, unsigned int n_lookups)
{
return .1 + probability (cov) * n_lookups;
}
inline float
probability (hb_set_t *cov)
{
/* Biggest hack: assume uniform glyph probability. */
return cov->get_population () / (float) num_glyphs;
}
private:
hb_ot_map_t *map;
hb_face_t *face;
unsigned int table_index;
unsigned int start_lookup;
unsigned int num_lookups;
unsigned int num_glyphs;
};
void
hb_ot_map_t::optimize (hb_face_t *face)
{
DEBUG_MSG_FUNC (MAP, this, "face %p", face);
for (unsigned int table_index = 0; table_index < 2; table_index++)
{
DEBUG_MSG_FUNC (MAP, this, "table %c%c%c%c", HB_UNTAG (table_tags[table_index]));
unsigned int i = 0;
for (unsigned int stage_index = 0; stage_index < stages[table_index].len; stage_index++)
{
stage_map_t *stage = &stages[table_index][stage_index];
unsigned int num_lookups = stage->last_lookup - i;
DEBUG_MSG_FUNC (MAP, this, "stage %d: %d lookups", stage_index, num_lookups);
optimize_lookups_context_t c (this, face, table_index);
unsigned int start = i;
for (; i < num_lookups; i++)
if (!hb_ot_layout_lookup_is_inplace (face, table_tags[table_index],
lookups[table_index][i].index))
{
DEBUG_MSG_FUNC (MAP, this, "lookup %d (lookup-index %d) NOT inplace",
i, lookups[table_index][i].index);
c.add_lookups (start, i - start);
c.add_lookups (i, 1);
start = i + 1;
}
c.add_lookups (start, i - start);
}
}
}

View File

@ -245,9 +245,11 @@ arabic_fallback_plan_shape (arabic_fallback_plan_t *fallback_plan,
hb_buffer_t *buffer)
{
for (unsigned int i = 0; i < ARABIC_NUM_FALLBACK_FEATURES; i++)
if (fallback_plan->lookup_array[i]) {
if (fallback_plan->lookup_array[i])
{
OT::hb_apply_context_t c (0, font, buffer, fallback_plan->mask_array[i], true/*auto_zwj*/);
fallback_plan->lookup_array[i]->apply_string (&c, &fallback_plan->digest_array[i]);
/* XXX Currently broken because of clear_output / swap_buffers issues. */
fallback_plan->lookup_array[i]->apply_string (&c, 0, (unsigned int) -1, &fallback_plan->digest_array[i]);
}
}