harfbuzz/src/hb-ot-layout-gsubgpos-priva...

2454 lines
80 KiB
C++
Raw Normal View History

/*
2011-04-21 23:14:28 +02:00
* Copyright © 2007,2008,2009,2010 Red Hat, Inc.
2012-04-24 04:26:13 +02:00
* Copyright © 2010,2012 Google, Inc.
*
2010-04-22 06:11:43 +02:00
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Red Hat Author(s): Behdad Esfahbod
* Google Author(s): Behdad Esfahbod
*/
#ifndef HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
#define HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH
#include "hb-buffer-private.hh"
#include "hb-ot-layout-gdef-table.hh"
#include "hb-set-private.hh"
namespace OT {
2010-07-23 21:11:18 +02:00
2012-11-23 21:06:59 +01:00
#define TRACE_DISPATCH(this) \
hb_auto_trace_t<context_t::max_debug_depth, typename context_t::return_t> trace \
(&c->debug_depth, c->get_name (), this, HB_FUNC, \
"");
#ifndef HB_DEBUG_IS_INPLACE
#define HB_DEBUG_IS_INPLACE (HB_DEBUG+0)
#endif
#define TRACE_IS_INPLACE(this) \
hb_auto_trace_t<HB_DEBUG_IS_INPLACE, bool> trace \
(&c->debug_depth, c->get_name (), this, HB_FUNC, \
"");
struct hb_is_inplace_context_t
{
inline const char *get_name (void) { return "IS_INPLACE"; }
static const unsigned int max_debug_depth = HB_DEBUG_IS_INPLACE;
typedef bool return_t;
typedef return_t (*recurse_func_t) (hb_is_inplace_context_t *c, unsigned int lookup_index);
template <typename T>
inline return_t dispatch (const T &obj) { return obj.is_inplace (this); }
static return_t default_return_value (void) { return true; }
bool stop_sublookup_iteration (return_t r) const { return !r; }
return_t recurse (unsigned int lookup_index)
{
if (unlikely (nesting_level_left == 0 || !recurse_func))
return default_return_value ();
nesting_level_left--;
bool ret = recurse_func (this, lookup_index);
nesting_level_left++;
return ret;
}
hb_face_t *face;
recurse_func_t recurse_func;
unsigned int nesting_level_left;
unsigned int debug_depth;
hb_is_inplace_context_t (hb_face_t *face_,
unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
face (face_),
recurse_func (NULL),
nesting_level_left (nesting_level_left_),
debug_depth (0) {}
void set_recurse_func (recurse_func_t func) { recurse_func = func; }
};
#ifndef HB_DEBUG_CLOSURE
#define HB_DEBUG_CLOSURE (HB_DEBUG+0)
#endif
2012-11-23 21:32:14 +01:00
#define TRACE_CLOSURE(this) \
hb_auto_trace_t<HB_DEBUG_CLOSURE, hb_void_t> trace \
(&c->debug_depth, c->get_name (), this, HB_FUNC, \
2012-11-23 21:06:59 +01:00
"");
struct hb_closure_context_t
{
inline const char *get_name (void) { return "CLOSURE"; }
static const unsigned int max_debug_depth = HB_DEBUG_CLOSURE;
typedef hb_void_t return_t;
typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
template <typename T>
inline return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; }
static return_t default_return_value (void) { return HB_VOID; }
bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
return_t recurse (unsigned int lookup_index)
{
2012-11-23 23:55:40 +01:00
if (unlikely (nesting_level_left == 0 || !recurse_func))
return default_return_value ();
nesting_level_left--;
recurse_func (this, lookup_index);
nesting_level_left++;
return HB_VOID;
}
hb_face_t *face;
2012-04-24 04:23:17 +02:00
hb_set_t *glyphs;
recurse_func_t recurse_func;
unsigned int nesting_level_left;
unsigned int debug_depth;
hb_closure_context_t (hb_face_t *face_,
2012-04-24 04:23:17 +02:00
hb_set_t *glyphs_,
unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
face (face_),
glyphs (glyphs_),
2012-11-23 23:55:40 +01:00
recurse_func (NULL),
nesting_level_left (nesting_level_left_),
debug_depth (0) {}
2012-11-23 23:55:40 +01:00
void set_recurse_func (recurse_func_t func) { recurse_func = func; }
};
#ifndef HB_DEBUG_WOULD_APPLY
#define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
#endif
2012-11-23 21:32:14 +01:00
#define TRACE_WOULD_APPLY(this) \
2012-11-23 21:06:59 +01:00
hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \
(&c->debug_depth, c->get_name (), this, HB_FUNC, \
2012-11-23 21:06:59 +01:00
"%d glyphs", c->len);
struct hb_would_apply_context_t
{
inline const char *get_name (void) { return "WOULD_APPLY"; }
static const unsigned int max_debug_depth = HB_DEBUG_WOULD_APPLY;
2012-11-22 22:47:53 +01:00
typedef bool return_t;
template <typename T>
inline return_t dispatch (const T &obj) { return obj.would_apply (this); }
2012-11-22 22:47:53 +01:00
static return_t default_return_value (void) { return false; }
bool stop_sublookup_iteration (return_t r) const { return r; }
2012-11-22 22:47:53 +01:00
hb_face_t *face;
const hb_codepoint_t *glyphs;
unsigned int len;
bool zero_context;
unsigned int debug_depth;
hb_would_apply_context_t (hb_face_t *face_,
const hb_codepoint_t *glyphs_,
unsigned int len_,
2012-09-04 21:15:19 +02:00
bool zero_context_) :
face (face_),
glyphs (glyphs_),
len (len_),
zero_context (zero_context_),
debug_depth (0) {}
};
#ifndef HB_DEBUG_COLLECT_GLYPHS
#define HB_DEBUG_COLLECT_GLYPHS (HB_DEBUG+0)
#endif
2012-11-23 21:32:14 +01:00
#define TRACE_COLLECT_GLYPHS(this) \
hb_auto_trace_t<HB_DEBUG_COLLECT_GLYPHS, hb_void_t> trace \
(&c->debug_depth, c->get_name (), this, HB_FUNC, \
2012-11-23 21:06:59 +01:00
"");
struct hb_collect_glyphs_context_t
{
inline const char *get_name (void) { return "COLLECT_GLYPHS"; }
static const unsigned int max_debug_depth = HB_DEBUG_COLLECT_GLYPHS;
typedef hb_void_t return_t;
2012-11-24 00:13:48 +01:00
typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
2012-11-22 22:47:53 +01:00
template <typename T>
inline return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; }
static return_t default_return_value (void) { return HB_VOID; }
bool stop_sublookup_iteration (return_t r HB_UNUSED) const { return false; }
2012-11-22 22:47:53 +01:00
return_t recurse (unsigned int lookup_index)
{
2012-11-24 00:13:48 +01:00
if (unlikely (nesting_level_left == 0 || !recurse_func))
return default_return_value ();
/* Note that GPOS sets recurse_func to NULL already, so it doesn't get
* past the previous check. For GSUB, we only want to collect the output
* glyphs in the recursion. If output is not requested, we can go home now.
*
* Note further, that the above is not exactly correct. A recursed lookup
* is allowed to match input that is not matched in the context, but that's
* not how most fonts are built. It's possible to relax that and recurse
* with all sets here if it proves to be an issue.
*/
if (output == hb_set_get_empty ())
return HB_VOID;
hb_set_t *old_before = before;
hb_set_t *old_input = input;
hb_set_t *old_after = after;
before = input = after = hb_set_get_empty ();
2012-11-24 00:13:48 +01:00
nesting_level_left--;
recurse_func (this, lookup_index);
2012-11-24 00:13:48 +01:00
nesting_level_left++;
before = old_before;
input = old_input;
after = old_after;
return HB_VOID;
2012-11-22 22:47:53 +01:00
}
hb_face_t *face;
hb_set_t *before;
hb_set_t *input;
hb_set_t *after;
hb_set_t *output;
2012-11-24 00:13:48 +01:00
recurse_func_t recurse_func;
unsigned int nesting_level_left;
unsigned int debug_depth;
hb_collect_glyphs_context_t (hb_face_t *face_,
hb_set_t *glyphs_before, /* OUT. May be NULL */
hb_set_t *glyphs_input, /* OUT. May be NULL */
hb_set_t *glyphs_after, /* OUT. May be NULL */
2012-11-24 00:13:48 +01:00
hb_set_t *glyphs_output, /* OUT. May be NULL */
unsigned int nesting_level_left_ = MAX_NESTING_LEVEL) :
face (face_),
before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
2012-11-24 00:13:48 +01:00
recurse_func (NULL),
nesting_level_left (nesting_level_left_),
debug_depth (0) {}
2012-11-24 00:13:48 +01:00
void set_recurse_func (recurse_func_t func) { recurse_func = func; }
};
struct hb_get_coverage_context_t
{
inline const char *get_name (void) { return "GET_COVERAGE"; }
static const unsigned int max_debug_depth = 0;
typedef const Coverage &return_t;
template <typename T>
inline return_t dispatch (const T &obj) { return obj.get_coverage (); }
static return_t default_return_value (void) { return Null(Coverage); }
2012-11-22 22:47:53 +01:00
hb_get_coverage_context_t (void) :
debug_depth (0) {}
unsigned int debug_depth;
};
2009-08-28 23:14:33 +02:00
#ifndef HB_DEBUG_APPLY
#define HB_DEBUG_APPLY (HB_DEBUG+0)
2009-08-28 23:14:33 +02:00
#endif
2012-11-23 21:32:14 +01:00
#define TRACE_APPLY(this) \
2012-11-23 21:06:59 +01:00
hb_auto_trace_t<HB_DEBUG_APPLY, bool> trace \
(&c->debug_depth, c->get_name (), this, HB_FUNC, \
2012-11-23 21:06:59 +01:00
"idx %d codepoint %u", c->buffer->idx, c->buffer->cur().codepoint);
2009-08-28 23:14:33 +02:00
struct hb_apply_context_t
{
inline const char *get_name (void) { return "APPLY"; }
static const unsigned int max_debug_depth = HB_DEBUG_APPLY;
typedef bool return_t;
typedef return_t (*recurse_func_t) (hb_apply_context_t *c, unsigned int lookup_index);
template <typename T>
inline return_t dispatch (const T &obj) { return obj.apply (this); }
2012-11-23 20:21:35 +01:00
static return_t default_return_value (void) { return false; }
bool stop_sublookup_iteration (return_t r) const { return r; }
return_t recurse (unsigned int lookup_index)
{
if (unlikely (nesting_level_left == 0 || !recurse_func))
return default_return_value ();
nesting_level_left--;
bool ret = recurse_func (this, lookup_index);
nesting_level_left++;
return ret;
}
unsigned int table_index; /* GSUB/GPOS */
hb_font_t *font;
hb_face_t *face;
2010-05-05 07:13:09 +02:00
hb_buffer_t *buffer;
hb_direction_t direction;
hb_mask_t lookup_mask;
[Indic] Futher adjust ZWJ handling in Indic-like shapers After the Ngapi hackfest work, we were assuming that fonts won't use presentation features to choose specific forms (eg. conjuncts). As such, we were using auto-joiner behavior for such features. It proved to be troublesome as many fonts used presentation forms ('pres') for example to form conjuncts, which need to be disabled when a ZWJ is inserted. Two examples: U+0D2F,U+200D,U+0D4D,U+0D2F with kartika.ttf U+0995,U+09CD,U+200D,U+09B7 with vrinda.ttf What we do now is to never do magic to ZWJ during GSUB's main input match for Indic-style shapers. Note that backtrack/lookahead are still matched liberally, as is GPOS. This seems to be an acceptable compromise. As to the bug that initially started this work, that one needs to be fixed differently: Bug 58714 - Kannada u+0cb0 u+200d u+0ccd u+0c95 u+0cbe does not provide same results as Windows8 https://bugs.freedesktop.org/show_bug.cgi?id=58714 New numbers: BENGALI: 353689 out of 354188 tests passed. 499 failed (0.140886%) DEVANAGARI: 707305 out of 707394 tests passed. 89 failed (0.0125814%) GUJARATI: 366349 out of 366457 tests passed. 108 failed (0.0294714%) GURMUKHI: 60706 out of 60747 tests passed. 41 failed (0.067493%) KANNADA: 951030 out of 951913 tests passed. 883 failed (0.0927606%) KHMER: 299070 out of 299124 tests passed. 54 failed (0.0180527%) LAO: 53611 out of 53644 tests passed. 33 failed (0.0615167%) MALAYALAM: 1048102 out of 1048334 tests passed. 232 failed (0.0221304%) ORIYA: 42320 out of 42329 tests passed. 9 failed (0.021262%) SINHALA: 271666 out of 271847 tests passed. 181 failed (0.0665816%) TAMIL: 1091753 out of 1091754 tests passed. 1 failed (9.15957e-05%) TELUGU: 970555 out of 970573 tests passed. 18 failed (0.00185457%) TIBETAN: 208469 out of 208469 tests passed. 0 failed (0%)
2013-03-19 10:53:26 +01:00
bool auto_zwj;
recurse_func_t recurse_func;
unsigned int nesting_level_left;
unsigned int lookup_props;
const GDEF &gdef;
2012-07-31 01:37:44 +02:00
bool has_glyph_classes;
unsigned int debug_depth;
hb_apply_context_t (unsigned int table_index_,
hb_font_t *font_,
2013-05-03 00:52:24 +02:00
hb_buffer_t *buffer_) :
table_index (table_index_),
font (font_), face (font->face), buffer (buffer_),
direction (buffer_->props.direction),
2013-05-03 00:52:24 +02:00
lookup_mask (1),
auto_zwj (true),
recurse_func (NULL),
2012-05-13 15:21:06 +02:00
nesting_level_left (MAX_NESTING_LEVEL),
lookup_props (0),
gdef (*hb_ot_layout_from_face (face)->gdef),
has_glyph_classes (gdef.has_glyph_classes ()),
debug_depth (0) {}
2013-05-03 00:52:24 +02:00
inline void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; }
inline void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; }
2013-02-13 17:22:42 +01:00
inline void set_recurse_func (recurse_func_t func) { recurse_func = func; }
inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
inline void set_lookup (const Lookup &l) { lookup_props = l.get_props (); }
struct matcher_t
2012-01-17 04:05:08 +01:00
{
inline matcher_t (void) :
lookup_props (0),
[Indic-like] Disable automatic joiner handling for basic shaping features Not for Arabic, but for Indic-like scripts. ZWJ/ZWNJ have special meanings in those scripts, so let font lookups take full control. This undoes the regression caused by automatic-joiners handling introduced two commits ago. We only disable automatic joiner handling for the "basic shaping features" of Indic, Myanmar, and SEAsian shapers. The "presentation forms" and other features are still applied with automatic-joiner handling. This change also changes the test suite failure statistics, such that a few scripts show more "failures". The most affected is Kannada. However, upon inspection, we believe that in most, if not all, of the new failures, we are producing results superior to Uniscribe. Hard to count those! Here's an example of what is fixed by the recent joiner-handling changes: https://bugs.freedesktop.org/show_bug.cgi?id=58714 New numbers, for future reference: BENGALI: 353892 out of 354188 tests passed. 296 failed (0.0835714%) DEVANAGARI: 707336 out of 707394 tests passed. 58 failed (0.00819911%) GUJARATI: 366262 out of 366457 tests passed. 195 failed (0.0532122%) GURMUKHI: 60706 out of 60747 tests passed. 41 failed (0.067493%) KANNADA: 950680 out of 951913 tests passed. 1233 failed (0.129529%) KHMER: 299074 out of 299124 tests passed. 50 failed (0.0167155%) LAO: 53611 out of 53644 tests passed. 33 failed (0.0615167%) MALAYALAM: 1047983 out of 1048334 tests passed. 351 failed (0.0334817%) ORIYA: 42320 out of 42329 tests passed. 9 failed (0.021262%) SINHALA: 271539 out of 271847 tests passed. 308 failed (0.113299%) TAMIL: 1091753 out of 1091754 tests passed. 1 failed (9.15957e-05%) TELUGU: 970555 out of 970573 tests passed. 18 failed (0.00185457%) TIBETAN: 208469 out of 208469 tests passed. 0 failed (0%)
2013-02-14 16:40:12 +01:00
ignore_zwnj (false),
ignore_zwj (false),
mask (-1),
#define arg1(arg) (arg) /* Remove the macro to see why it's needed! */
syllable arg1(0),
#undef arg1
match_func (NULL),
match_data (NULL) {};
typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
inline void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
inline void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
inline void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
inline void set_mask (hb_mask_t mask_) { mask = mask_; }
inline void set_syllable (uint8_t syllable_) { syllable = syllable_; }
inline void set_match_func (match_func_t match_func_,
const void *match_data_)
{ match_func = match_func_; match_data = match_data_; }
enum may_match_t {
MATCH_NO,
MATCH_YES,
MATCH_MAYBE
};
inline may_match_t may_match (const hb_glyph_info_t &info,
const USHORT *glyph_data) const
2012-01-17 04:05:08 +01:00
{
if (!(info.mask & mask) ||
(syllable && syllable != info.syllable ()))
return MATCH_NO;
if (match_func)
return match_func (info.codepoint, *glyph_data, match_data) ? MATCH_YES : MATCH_NO;
return MATCH_MAYBE;
2012-01-17 04:05:08 +01:00
}
enum may_skip_t {
SKIP_NO,
SKIP_YES,
SKIP_MAYBE
};
inline may_skip_t
may_skip (const hb_apply_context_t *c,
const hb_glyph_info_t &info) const
2013-02-13 17:22:42 +01:00
{
unsigned int property;
2013-10-18 00:42:39 +02:00
property = _hb_glyph_info_get_glyph_props (&info);
if (!c->match_properties (info.codepoint, property, lookup_props))
return SKIP_YES;
if (unlikely (_hb_glyph_info_is_default_ignorable (&info) &&
(ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
(ignore_zwj || !_hb_glyph_info_is_zwj (&info)) &&
2013-10-18 00:02:43 +02:00
!_hb_glyph_info_is_ligated (&info)))
return SKIP_MAYBE;
return SKIP_NO;
2013-02-13 17:22:42 +01:00
}
protected:
unsigned int lookup_props;
bool ignore_zwnj;
bool ignore_zwj;
hb_mask_t mask;
uint8_t syllable;
match_func_t match_func;
const void *match_data;
};
struct skipping_forward_iterator_t
{
inline skipping_forward_iterator_t (hb_apply_context_t *c_,
unsigned int start_index_,
unsigned int num_items_,
bool context_match = false) :
idx (start_index_),
c (c_),
match_glyph_data (NULL),
num_items (num_items_),
end (c->buffer->len)
2012-01-17 04:05:08 +01:00
{
matcher.set_lookup_props (c->lookup_props);
[Indic] Futher adjust ZWJ handling in Indic-like shapers After the Ngapi hackfest work, we were assuming that fonts won't use presentation features to choose specific forms (eg. conjuncts). As such, we were using auto-joiner behavior for such features. It proved to be troublesome as many fonts used presentation forms ('pres') for example to form conjuncts, which need to be disabled when a ZWJ is inserted. Two examples: U+0D2F,U+200D,U+0D4D,U+0D2F with kartika.ttf U+0995,U+09CD,U+200D,U+09B7 with vrinda.ttf What we do now is to never do magic to ZWJ during GSUB's main input match for Indic-style shapers. Note that backtrack/lookahead are still matched liberally, as is GPOS. This seems to be an acceptable compromise. As to the bug that initially started this work, that one needs to be fixed differently: Bug 58714 - Kannada u+0cb0 u+200d u+0ccd u+0c95 u+0cbe does not provide same results as Windows8 https://bugs.freedesktop.org/show_bug.cgi?id=58714 New numbers: BENGALI: 353689 out of 354188 tests passed. 499 failed (0.140886%) DEVANAGARI: 707305 out of 707394 tests passed. 89 failed (0.0125814%) GUJARATI: 366349 out of 366457 tests passed. 108 failed (0.0294714%) GURMUKHI: 60706 out of 60747 tests passed. 41 failed (0.067493%) KANNADA: 951030 out of 951913 tests passed. 883 failed (0.0927606%) KHMER: 299070 out of 299124 tests passed. 54 failed (0.0180527%) LAO: 53611 out of 53644 tests passed. 33 failed (0.0615167%) MALAYALAM: 1048102 out of 1048334 tests passed. 232 failed (0.0221304%) ORIYA: 42320 out of 42329 tests passed. 9 failed (0.021262%) SINHALA: 271666 out of 271847 tests passed. 181 failed (0.0665816%) TAMIL: 1091753 out of 1091754 tests passed. 1 failed (9.15957e-05%) TELUGU: 970555 out of 970573 tests passed. 18 failed (0.00185457%) TIBETAN: 208469 out of 208469 tests passed. 0 failed (0%)
2013-03-19 10:53:26 +01:00
/* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
matcher.set_ignore_zwnj (context_match || c->table_index == 1);
/* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj);
if (!context_match)
[Indic-like] Disable automatic joiner handling for basic shaping features Not for Arabic, but for Indic-like scripts. ZWJ/ZWNJ have special meanings in those scripts, so let font lookups take full control. This undoes the regression caused by automatic-joiners handling introduced two commits ago. We only disable automatic joiner handling for the "basic shaping features" of Indic, Myanmar, and SEAsian shapers. The "presentation forms" and other features are still applied with automatic-joiner handling. This change also changes the test suite failure statistics, such that a few scripts show more "failures". The most affected is Kannada. However, upon inspection, we believe that in most, if not all, of the new failures, we are producing results superior to Uniscribe. Hard to count those! Here's an example of what is fixed by the recent joiner-handling changes: https://bugs.freedesktop.org/show_bug.cgi?id=58714 New numbers, for future reference: BENGALI: 353892 out of 354188 tests passed. 296 failed (0.0835714%) DEVANAGARI: 707336 out of 707394 tests passed. 58 failed (0.00819911%) GUJARATI: 366262 out of 366457 tests passed. 195 failed (0.0532122%) GURMUKHI: 60706 out of 60747 tests passed. 41 failed (0.067493%) KANNADA: 950680 out of 951913 tests passed. 1233 failed (0.129529%) KHMER: 299074 out of 299124 tests passed. 50 failed (0.0167155%) LAO: 53611 out of 53644 tests passed. 33 failed (0.0615167%) MALAYALAM: 1047983 out of 1048334 tests passed. 351 failed (0.0334817%) ORIYA: 42320 out of 42329 tests passed. 9 failed (0.021262%) SINHALA: 271539 out of 271847 tests passed. 308 failed (0.113299%) TAMIL: 1091753 out of 1091754 tests passed. 1 failed (9.15957e-05%) TELUGU: 970555 out of 970573 tests passed. 18 failed (0.00185457%) TIBETAN: 208469 out of 208469 tests passed. 0 failed (0%)
2013-02-14 16:40:12 +01:00
matcher.set_mask (c->lookup_mask);
matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
2012-01-17 04:05:08 +01:00
}
inline void set_lookup_props (unsigned int lookup_props) { matcher.set_lookup_props (lookup_props); }
inline void set_syllable (unsigned int syllable) { matcher.set_syllable (syllable); }
inline void set_match_func (matcher_t::match_func_t match_func,
const void *match_data,
const USHORT glyph_data[])
{
matcher.set_match_func (match_func, match_data);
match_glyph_data = glyph_data;
}
inline bool has_no_chance (void) const { return unlikely (num_items && idx + num_items >= end); }
inline void reject (void) { num_items++; match_glyph_data--; }
2013-02-13 17:22:42 +01:00
inline bool next (void)
2012-01-17 04:05:08 +01:00
{
assert (num_items > 0);
while (!has_no_chance ())
2012-01-17 04:05:08 +01:00
{
2012-01-18 00:08:41 +01:00
idx++;
const hb_glyph_info_t &info = c->buffer->info[idx];
2013-02-21 20:51:40 +01:00
matcher_t::may_skip_t skip = matcher.may_skip (c, info);
if (unlikely (skip == matcher_t::SKIP_YES))
continue;
matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
if (match == matcher_t::MATCH_YES ||
(match == matcher_t::MATCH_MAYBE &&
skip == matcher_t::SKIP_NO))
{
num_items--;
match_glyph_data++;
return true;
}
if (skip == matcher_t::SKIP_NO)
return false;
}
return false;
2012-01-17 04:05:08 +01:00
}
unsigned int idx;
protected:
2012-01-17 04:05:08 +01:00
hb_apply_context_t *c;
matcher_t matcher;
const USHORT *match_glyph_data;
2012-01-17 04:05:08 +01:00
unsigned int num_items;
unsigned int end;
};
struct skipping_backward_iterator_t
2012-01-17 04:05:08 +01:00
{
inline skipping_backward_iterator_t (hb_apply_context_t *c_,
unsigned int start_index_,
unsigned int num_items_,
bool context_match = false) :
idx (start_index_),
c (c_),
match_glyph_data (NULL),
num_items (num_items_)
2013-02-13 17:22:42 +01:00
{
matcher.set_lookup_props (c->lookup_props);
[Indic] Futher adjust ZWJ handling in Indic-like shapers After the Ngapi hackfest work, we were assuming that fonts won't use presentation features to choose specific forms (eg. conjuncts). As such, we were using auto-joiner behavior for such features. It proved to be troublesome as many fonts used presentation forms ('pres') for example to form conjuncts, which need to be disabled when a ZWJ is inserted. Two examples: U+0D2F,U+200D,U+0D4D,U+0D2F with kartika.ttf U+0995,U+09CD,U+200D,U+09B7 with vrinda.ttf What we do now is to never do magic to ZWJ during GSUB's main input match for Indic-style shapers. Note that backtrack/lookahead are still matched liberally, as is GPOS. This seems to be an acceptable compromise. As to the bug that initially started this work, that one needs to be fixed differently: Bug 58714 - Kannada u+0cb0 u+200d u+0ccd u+0c95 u+0cbe does not provide same results as Windows8 https://bugs.freedesktop.org/show_bug.cgi?id=58714 New numbers: BENGALI: 353689 out of 354188 tests passed. 499 failed (0.140886%) DEVANAGARI: 707305 out of 707394 tests passed. 89 failed (0.0125814%) GUJARATI: 366349 out of 366457 tests passed. 108 failed (0.0294714%) GURMUKHI: 60706 out of 60747 tests passed. 41 failed (0.067493%) KANNADA: 951030 out of 951913 tests passed. 883 failed (0.0927606%) KHMER: 299070 out of 299124 tests passed. 54 failed (0.0180527%) LAO: 53611 out of 53644 tests passed. 33 failed (0.0615167%) MALAYALAM: 1048102 out of 1048334 tests passed. 232 failed (0.0221304%) ORIYA: 42320 out of 42329 tests passed. 9 failed (0.021262%) SINHALA: 271666 out of 271847 tests passed. 181 failed (0.0665816%) TAMIL: 1091753 out of 1091754 tests passed. 1 failed (9.15957e-05%) TELUGU: 970555 out of 970573 tests passed. 18 failed (0.00185457%) TIBETAN: 208469 out of 208469 tests passed. 0 failed (0%)
2013-03-19 10:53:26 +01:00
/* Ignore ZWNJ if we are matching GSUB context, or matching GPOS. */
matcher.set_ignore_zwnj (context_match || c->table_index == 1);
/* Ignore ZWJ if we are matching GSUB context, or matching GPOS, or if asked to. */
matcher.set_ignore_zwj (context_match || c->table_index == 1 || c->auto_zwj);
if (!context_match)
[Indic-like] Disable automatic joiner handling for basic shaping features Not for Arabic, but for Indic-like scripts. ZWJ/ZWNJ have special meanings in those scripts, so let font lookups take full control. This undoes the regression caused by automatic-joiners handling introduced two commits ago. We only disable automatic joiner handling for the "basic shaping features" of Indic, Myanmar, and SEAsian shapers. The "presentation forms" and other features are still applied with automatic-joiner handling. This change also changes the test suite failure statistics, such that a few scripts show more "failures". The most affected is Kannada. However, upon inspection, we believe that in most, if not all, of the new failures, we are producing results superior to Uniscribe. Hard to count those! Here's an example of what is fixed by the recent joiner-handling changes: https://bugs.freedesktop.org/show_bug.cgi?id=58714 New numbers, for future reference: BENGALI: 353892 out of 354188 tests passed. 296 failed (0.0835714%) DEVANAGARI: 707336 out of 707394 tests passed. 58 failed (0.00819911%) GUJARATI: 366262 out of 366457 tests passed. 195 failed (0.0532122%) GURMUKHI: 60706 out of 60747 tests passed. 41 failed (0.067493%) KANNADA: 950680 out of 951913 tests passed. 1233 failed (0.129529%) KHMER: 299074 out of 299124 tests passed. 50 failed (0.0167155%) LAO: 53611 out of 53644 tests passed. 33 failed (0.0615167%) MALAYALAM: 1047983 out of 1048334 tests passed. 351 failed (0.0334817%) ORIYA: 42320 out of 42329 tests passed. 9 failed (0.021262%) SINHALA: 271539 out of 271847 tests passed. 308 failed (0.113299%) TAMIL: 1091753 out of 1091754 tests passed. 1 failed (9.15957e-05%) TELUGU: 970555 out of 970573 tests passed. 18 failed (0.00185457%) TIBETAN: 208469 out of 208469 tests passed. 0 failed (0%)
2013-02-14 16:40:12 +01:00
matcher.set_mask (c->lookup_mask);
matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
2012-01-17 04:05:08 +01:00
}
inline void set_lookup_props (unsigned int lookup_props) { matcher.set_lookup_props (lookup_props); }
inline void set_syllable (unsigned int syllable) { matcher.set_syllable (syllable); }
inline void set_match_func (matcher_t::match_func_t match_func,
const void *match_data,
const USHORT glyph_data[])
{
matcher.set_match_func (match_func, match_data);
match_glyph_data = glyph_data;
}
inline bool has_no_chance (void) const { return unlikely (idx < num_items); }
inline void reject (void) { num_items++; }
2013-02-13 17:22:42 +01:00
inline bool prev (void)
2012-01-17 04:05:08 +01:00
{
assert (num_items > 0);
while (!has_no_chance ())
2012-01-17 04:05:08 +01:00
{
idx--;
const hb_glyph_info_t &info = c->buffer->out_info[idx];
2013-02-21 20:51:40 +01:00
matcher_t::may_skip_t skip = matcher.may_skip (c, info);
if (unlikely (skip == matcher_t::SKIP_YES))
continue;
matcher_t::may_match_t match = matcher.may_match (info, match_glyph_data);
if (match == matcher_t::MATCH_YES ||
(match == matcher_t::MATCH_MAYBE &&
skip == matcher_t::SKIP_NO))
{
num_items--;
match_glyph_data++;
return true;
}
if (skip == matcher_t::SKIP_NO)
return false;
}
return false;
2012-01-17 04:05:08 +01:00
}
unsigned int idx;
protected:
2012-01-17 04:05:08 +01:00
hb_apply_context_t *c;
matcher_t matcher;
const USHORT *match_glyph_data;
2012-01-17 04:05:08 +01:00
unsigned int num_items;
};
2012-07-31 01:47:53 +02:00
inline bool
match_properties_mark (hb_codepoint_t glyph,
unsigned int glyph_props,
unsigned int lookup_props) const
{
/* If using mark filtering sets, the high short of
* lookup_props has the set index.
*/
if (lookup_props & LookupFlag::UseMarkFilteringSet)
return gdef.mark_set_covers (lookup_props >> 16, glyph);
/* The second byte of lookup_props has the meaning
* "ignore marks of attachment type different than
* the attachment type specified."
*/
if (lookup_props & LookupFlag::MarkAttachmentType)
return (lookup_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
return true;
}
inline bool
match_properties (hb_codepoint_t glyph,
unsigned int glyph_props,
unsigned int lookup_props) const
{
/* Not covered, if, for example, glyph class is ligature and
* lookup_props includes LookupFlags::IgnoreLigatures
*/
if (glyph_props & lookup_props & LookupFlag::IgnoreFlags)
return false;
if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
2012-07-31 01:47:53 +02:00
return match_properties_mark (glyph, glyph_props, lookup_props);
return true;
}
inline bool
check_glyph_property (hb_glyph_info_t *info,
unsigned int lookup_props) const
{
2012-06-09 07:10:26 +02:00
unsigned int property;
2012-07-31 01:47:53 +02:00
2013-10-18 00:42:39 +02:00
property = _hb_glyph_info_get_glyph_props (info);
2012-07-31 01:47:53 +02:00
return match_properties (info->codepoint, property, lookup_props);
}
2013-10-18 00:06:30 +02:00
inline void _set_glyph_props (hb_codepoint_t glyph_index,
unsigned int class_guess = 0,
bool ligature = false) const
{
unsigned int add_in = _hb_glyph_info_get_glyph_props (&buffer->cur()) &
HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
add_in |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
if (ligature)
add_in |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
if (likely (has_glyph_classes))
_hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | gdef.get_glyph_props (glyph_index));
else if (class_guess)
_hb_glyph_info_set_glyph_props (&buffer->cur(), add_in | class_guess);
}
2012-01-17 04:05:08 +01:00
2013-10-18 00:06:30 +02:00
inline void replace_glyph (hb_codepoint_t glyph_index) const
{
2013-10-18 00:06:30 +02:00
_set_glyph_props (glyph_index);
buffer->replace_glyph (glyph_index);
}
2013-10-18 00:06:30 +02:00
inline void replace_glyph_inplace (hb_codepoint_t glyph_index) const
{
2013-10-18 00:06:30 +02:00
_set_glyph_props (glyph_index);
buffer->cur().codepoint = glyph_index;
}
inline void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
unsigned int class_guess) const
{
_set_glyph_props (glyph_index, class_guess, true);
buffer->replace_glyph (glyph_index);
}
2013-10-18 00:06:30 +02:00
inline void output_glyph (hb_codepoint_t glyph_index,
unsigned int class_guess) const
2012-07-31 00:36:42 +02:00
{
2013-10-18 00:06:30 +02:00
_set_glyph_props (glyph_index, class_guess);
buffer->output_glyph (glyph_index);
2012-07-31 00:36:42 +02:00
}
};
2010-05-05 07:13:09 +02:00
2012-04-24 04:23:17 +02:00
typedef bool (*intersects_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, const USHORT &value, const void *data);
2010-05-10 23:47:22 +02:00
typedef bool (*match_func_t) (hb_codepoint_t glyph_id, const USHORT &value, const void *data);
struct ContextClosureFuncs
{
intersects_func_t intersects;
};
struct ContextCollectGlyphsFuncs
{
collect_glyphs_func_t collect;
};
struct ContextApplyFuncs
2009-05-20 05:58:54 +02:00
{
match_func_t match;
};
2012-04-24 04:23:17 +02:00
static inline bool intersects_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
{
return glyphs->has (value);
}
2012-04-24 04:23:17 +02:00
static inline bool intersects_class (hb_set_t *glyphs, const USHORT &value, const void *data)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
return class_def.intersects_class (glyphs, value);
}
2012-04-24 04:23:17 +02:00
static inline bool intersects_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
{
const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
return (data+coverage).intersects (glyphs);
}
static inline bool intersects_array (hb_closure_context_t *c,
unsigned int count,
const USHORT values[],
intersects_func_t intersects_func,
const void *intersects_data)
{
for (unsigned int i = 0; i < count; i++)
if (likely (!intersects_func (c->glyphs, values[i], intersects_data)))
return false;
return true;
}
static inline void collect_glyph (hb_set_t *glyphs, const USHORT &value, const void *data HB_UNUSED)
{
glyphs->add (value);
}
static inline void collect_class (hb_set_t *glyphs, const USHORT &value, const void *data)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
class_def.add_class (glyphs, value);
}
static inline void collect_coverage (hb_set_t *glyphs, const USHORT &value, const void *data)
{
const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
(data+coverage).add_coverage (glyphs);
}
2012-12-06 00:46:04 +01:00
static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
hb_set_t *glyphs,
unsigned int count,
const USHORT values[],
collect_glyphs_func_t collect_func,
const void *collect_data)
{
for (unsigned int i = 0; i < count; i++)
collect_func (glyphs, values[i], collect_data);
}
2010-05-10 23:47:22 +02:00
static inline bool match_glyph (hb_codepoint_t glyph_id, const USHORT &value, const void *data HB_UNUSED)
2009-05-20 05:58:54 +02:00
{
return glyph_id == value;
}
2010-05-10 23:47:22 +02:00
static inline bool match_class (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
2009-05-20 05:58:54 +02:00
{
2009-08-04 17:38:50 +02:00
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
return class_def.get_class (glyph_id) == value;
}
2010-05-10 23:47:22 +02:00
static inline bool match_coverage (hb_codepoint_t glyph_id, const USHORT &value, const void *data)
2009-05-20 05:58:54 +02:00
{
const OffsetTo<Coverage> &coverage = (const OffsetTo<Coverage>&)value;
return (data+coverage).get_coverage (glyph_id) != NOT_COVERED;
}
static inline bool would_match_input (hb_would_apply_context_t *c,
unsigned int count, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
match_func_t match_func,
const void *match_data)
{
if (count != c->len)
return false;
for (unsigned int i = 1; i < count; i++)
if (likely (!match_func (c->glyphs[i], input[i - 1], match_data)))
return false;
return true;
}
2010-05-13 20:18:49 +02:00
static inline bool match_input (hb_apply_context_t *c,
unsigned int count, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
match_func_t match_func,
2010-05-10 23:47:22 +02:00
const void *match_data,
2013-10-17 13:55:48 +02:00
unsigned int *end_offset,
unsigned int match_positions[MAX_CONTEXT_LENGTH],
bool *p_is_mark_ligature = NULL,
unsigned int *p_total_component_count = NULL)
{
2012-11-23 21:34:11 +01:00
TRACE_APPLY (NULL);
if (unlikely (count > MAX_CONTEXT_LENGTH)) TRACE_RETURN (false);
2013-10-17 13:58:31 +02:00
hb_buffer_t *buffer = c->buffer;
hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, buffer->idx, count - 1);
skippy_iter.set_match_func (match_func, match_data, input);
if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
/*
* This is perhaps the trickiest part of OpenType... Remarks:
*
* - If all components of the ligature were marks, we call this a mark ligature.
*
* - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
* it as a ligature glyph.
*
* - Ligatures cannot be formed across glyphs attached to different components
* of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
* LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
* However, it would be wrong to ligate that SHADDA,FATHA sequence.o
* There is an exception to this: If a ligature tries ligating with marks that
* belong to it itself, go ahead, assuming that the font designer knows what
* they are doing (otherwise it can break Indic stuff when a matra wants to
* ligate with a conjunct...)
*/
2013-10-18 00:42:39 +02:00
bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->cur());
unsigned int total_component_count = 0;
2013-10-18 00:02:43 +02:00
total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
2013-10-18 00:02:43 +02:00
unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
2013-10-17 13:58:31 +02:00
match_positions[0] = buffer->idx;
2012-01-16 23:03:55 +01:00
for (unsigned int i = 1; i < count; i++)
2009-05-20 05:58:54 +02:00
{
if (!skippy_iter.next ()) return TRACE_RETURN (false);
2013-10-17 13:55:48 +02:00
match_positions[i] = skippy_iter.idx;
2013-10-18 00:02:43 +02:00
unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
if (first_lig_id && first_lig_comp) {
/* If first component was attached to a previous ligature component,
* all subsequent components should be attached to the same ligature
* component, otherwise we shouldn't ligate them. */
if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
return TRACE_RETURN (false);
} else {
/* If first component was NOT attached to a previous ligature component,
* all subsequent components should also NOT be attached to any ligature
* component, unless they are attached to the first component itself! */
if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
return TRACE_RETURN (false);
}
2013-10-18 00:42:39 +02:00
is_mark_ligature = is_mark_ligature && _hb_glyph_info_is_mark (&buffer->info[skippy_iter.idx]);
2013-10-18 00:02:43 +02:00
total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
}
2013-10-17 13:58:31 +02:00
*end_offset = skippy_iter.idx - buffer->idx + 1;
if (p_is_mark_ligature)
*p_is_mark_ligature = is_mark_ligature;
if (p_total_component_count)
*p_total_component_count = total_component_count;
2012-10-30 03:03:55 +01:00
return TRACE_RETURN (true);
}
2012-08-29 05:18:22 +02:00
static inline void ligate_input (hb_apply_context_t *c,
unsigned int count, /* Including the first glyph */
unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
unsigned int match_length,
2012-08-29 05:18:22 +02:00
hb_codepoint_t lig_glyph,
bool is_mark_ligature,
unsigned int total_component_count)
{
TRACE_APPLY (NULL);
2013-10-17 13:58:31 +02:00
hb_buffer_t *buffer = c->buffer;
buffer->merge_clusters (buffer->idx, buffer->idx + match_length);
2012-08-29 05:18:22 +02:00
/*
* - If it *is* a mark ligature, we don't allocate a new ligature id, and leave
* the ligature to keep its old ligature id. This will allow it to attach to
* a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
* and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA wit a
* ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
* later, we don't want them to lose their ligature id/component, otherwise
* GPOS will fail to correctly position the mark ligature on top of the
* LAM,LAM,HEH ligature. See:
* https://bugzilla.gnome.org/show_bug.cgi?id=676343
*
* - If a ligature is formed of components that some of which are also ligatures
* themselves, and those ligature components had marks attached to *their*
* components, we have to attach the marks to the new ligature component
* positions! Now *that*'s tricky! And these marks may be following the
* last component of the whole sequence, so we should loop forward looking
* for them and update them.
*
* Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
* 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
* id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
* form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
* the new ligature with a component value of 2.
*
* This in fact happened to a font... See:
* https://bugzilla.gnome.org/show_bug.cgi?id=437633
*/
unsigned int klass = is_mark_ligature ? 0 : HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE;
2013-10-18 00:02:43 +02:00
unsigned int lig_id = is_mark_ligature ? 0 : _hb_allocate_lig_id (buffer);
unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
2012-08-29 05:18:22 +02:00
unsigned int components_so_far = last_num_components;
if (!is_mark_ligature)
{
2013-10-18 00:02:43 +02:00
_hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
2013-10-17 13:58:31 +02:00
if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
_hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
}
2013-10-18 00:06:30 +02:00
c->replace_glyph_with_ligature (lig_glyph, klass);
2012-08-29 05:18:22 +02:00
for (unsigned int i = 1; i < count; i++)
{
2013-10-17 13:58:31 +02:00
while (buffer->idx < match_positions[i])
2012-08-29 05:18:22 +02:00
{
if (!is_mark_ligature) {
unsigned int new_lig_comp = components_so_far - last_num_components +
2013-10-18 00:02:43 +02:00
MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->cur()), 1u), last_num_components);
_hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
2012-08-29 05:18:22 +02:00
}
2013-10-17 13:58:31 +02:00
buffer->next_glyph ();
2012-08-29 05:18:22 +02:00
}
2013-10-18 00:02:43 +02:00
last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
2012-08-29 05:18:22 +02:00
components_so_far += last_num_components;
/* Skip the base glyph */
2013-10-17 13:58:31 +02:00
buffer->idx++;
2012-08-29 05:18:22 +02:00
}
if (!is_mark_ligature && last_lig_id) {
/* Re-adjust components for any marks following. */
2013-10-17 13:58:31 +02:00
for (unsigned int i = buffer->idx; i < buffer->len; i++) {
2013-10-18 00:02:43 +02:00
if (last_lig_id == _hb_glyph_info_get_lig_id (&buffer->info[i])) {
2012-08-29 05:18:22 +02:00
unsigned int new_lig_comp = components_so_far - last_num_components +
2013-10-18 00:02:43 +02:00
MIN (MAX (_hb_glyph_info_get_lig_comp (&buffer->info[i]), 1u), last_num_components);
_hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
2012-08-29 05:18:22 +02:00
} else
break;
}
}
}
2010-05-13 20:18:49 +02:00
static inline bool match_backtrack (hb_apply_context_t *c,
2009-05-18 09:47:31 +02:00
unsigned int count,
const USHORT backtrack[],
match_func_t match_func,
2010-05-10 23:47:22 +02:00
const void *match_data)
2009-05-18 09:47:31 +02:00
{
2012-11-23 21:34:11 +01:00
TRACE_APPLY (NULL);
hb_apply_context_t::skipping_backward_iterator_t skippy_iter (c, c->buffer->backtrack_len (), count, true);
skippy_iter.set_match_func (match_func, match_data, backtrack);
if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
2009-05-18 09:47:31 +02:00
for (unsigned int i = 0; i < count; i++)
2012-01-17 04:05:08 +01:00
if (!skippy_iter.prev ())
return TRACE_RETURN (false);
2009-05-18 09:47:31 +02:00
return TRACE_RETURN (true);
2009-05-18 09:47:31 +02:00
}
2010-05-13 20:18:49 +02:00
static inline bool match_lookahead (hb_apply_context_t *c,
unsigned int count,
const USHORT lookahead[],
match_func_t match_func,
2010-05-10 23:47:22 +02:00
const void *match_data,
unsigned int offset)
{
2012-11-23 21:34:11 +01:00
TRACE_APPLY (NULL);
hb_apply_context_t::skipping_forward_iterator_t skippy_iter (c, c->buffer->idx + offset - 1, count, true);
skippy_iter.set_match_func (match_func, match_data, lookahead);
if (skippy_iter.has_no_chance ()) return TRACE_RETURN (false);
2012-01-16 23:03:55 +01:00
for (unsigned int i = 0; i < count; i++)
2012-01-17 04:05:08 +01:00
if (!skippy_iter.next ())
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
2010-07-23 21:11:18 +02:00
2009-05-20 05:58:54 +02:00
struct LookupRecord
{
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
2009-08-04 08:09:34 +02:00
}
USHORT sequenceIndex; /* Index into current glyph
* sequence--first glyph = 0 */
USHORT lookupListIndex; /* Lookup to apply to that
* position--zero--based */
2010-05-10 22:38:32 +02:00
public:
DEFINE_SIZE_STATIC (4);
};
2010-07-23 21:11:18 +02:00
template <typename context_t>
static inline void recurse_lookups (context_t *c,
unsigned int lookupCount,
const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
{
for (unsigned int i = 0; i < lookupCount; i++)
2013-07-23 01:07:53 +02:00
c->recurse (lookupRecord[i].lookupListIndex);
}
2010-07-23 21:11:18 +02:00
2010-05-13 20:18:49 +02:00
static inline bool apply_lookup (hb_apply_context_t *c,
2009-05-18 09:47:31 +02:00
unsigned int count, /* Including the first glyph */
unsigned int match_positions[MAX_CONTEXT_LENGTH], /* Including the first glyph */
2009-05-18 09:47:31 +02:00
unsigned int lookupCount,
const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
unsigned int match_length)
{
2012-11-23 21:34:11 +01:00
TRACE_APPLY (NULL);
2012-11-23 21:06:59 +01:00
hb_buffer_t *buffer = c->buffer;
unsigned int end;
2009-09-21 19:58:56 +02:00
/* All positions are distance from beginning of *output* buffer.
* Adjust. */
{
unsigned int bl = buffer->backtrack_len ();
end = bl + match_length;
int delta = bl - buffer->idx;
/* Convert positions to new indexing. */
for (unsigned int j = 0; j < count; j++)
match_positions[j] += delta;
}
for (unsigned int i = 0; i < lookupCount; i++)
2009-05-20 05:58:54 +02:00
{
unsigned int idx = lookupRecord[i].sequenceIndex;
if (idx >= count)
continue;
buffer->move_to (match_positions[idx]);
unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
if (!c->recurse (lookupRecord[i].lookupListIndex))
continue;
unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
int delta = new_len - orig_len;
if (!delta)
continue;
/* Recursed lookup changed buffer len. Adjust. */
/* end can't go back past the current match position. */
end = MAX ((int) match_positions[idx] + 1, int (end) + delta);
unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
if (delta > 0)
{
if (unlikely (delta + count > MAX_CONTEXT_LENGTH))
break;
}
else
{
/* NOTE: delta is negative. */
delta = MAX (delta, (int) next - (int) count);
next -= delta;
}
/* Shift! */
memmove (match_positions + next + delta, match_positions + next,
(count - next) * sizeof (match_positions[0]));
next += delta;
count += delta;
/* Fill in new entries. */
for (unsigned int j = idx + 1; j < next; j++)
match_positions[j] = match_positions[j - 1] + 1;
/* And fixup the rest. */
for (; next < count; next++)
match_positions[next] += delta;
}
buffer->move_to (end);
2012-10-30 05:51:56 +01:00
return TRACE_RETURN (true);
}
2010-07-23 21:11:18 +02:00
/* Contextual lookups */
struct ContextClosureLookupContext
{
ContextClosureFuncs funcs;
const void *intersects_data;
};
struct ContextCollectGlyphsLookupContext
{
ContextCollectGlyphsFuncs funcs;
const void *collect_data;
};
struct ContextApplyLookupContext
2009-05-20 05:58:54 +02:00
{
ContextApplyFuncs funcs;
2010-05-10 23:47:22 +02:00
const void *match_data;
};
2012-04-24 05:03:12 +02:00
static inline void context_closure_lookup (hb_closure_context_t *c,
unsigned int inputCount, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ContextClosureLookupContext &lookup_context)
{
2012-04-24 05:03:12 +02:00
if (intersects_array (c,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.intersects, lookup_context.intersects_data))
recurse_lookups (c,
lookupCount, lookupRecord);
}
static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
unsigned int inputCount, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ContextCollectGlyphsLookupContext &lookup_context)
{
collect_array (c, c->input,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.collect, lookup_context.collect_data);
recurse_lookups (c,
lookupCount, lookupRecord);
}
static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
unsigned int inputCount, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
2012-12-06 00:46:04 +01:00
unsigned int lookupCount HB_UNUSED,
const LookupRecord lookupRecord[] HB_UNUSED,
ContextApplyLookupContext &lookup_context)
{
return would_match_input (c,
inputCount, input,
lookup_context.funcs.match, lookup_context.match_data);
}
static inline bool context_apply_lookup (hb_apply_context_t *c,
unsigned int inputCount, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ContextApplyLookupContext &lookup_context)
{
unsigned int match_length = 0;
unsigned int match_positions[MAX_CONTEXT_LENGTH];
2010-05-13 20:18:49 +02:00
return match_input (c,
2010-05-05 07:37:58 +02:00
inputCount, input,
lookup_context.funcs.match, lookup_context.match_data,
&match_length, match_positions)
&& apply_lookup (c,
inputCount, match_positions,
lookupCount, lookupRecord,
match_length);
}
2009-05-20 05:58:54 +02:00
struct Rule
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
unsigned int count = lookupCount;
for (unsigned int i = 0; i < count; i++)
if (!c->recurse (lookupRecord[i].lookupListIndex))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
2012-04-24 05:03:12 +02:00
inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
2012-04-24 05:03:12 +02:00
context_closure_lookup (c,
inputCount, input,
lookupCount, lookupRecord,
lookup_context);
}
inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
{
TRACE_COLLECT_GLYPHS (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
context_collect_glyphs_lookup (c,
inputCount, input,
lookupCount, lookupRecord,
lookup_context);
}
inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
return TRACE_RETURN (context_would_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
}
inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (input, input[0].static_size * (inputCount ? inputCount - 1 : 0));
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (context_apply_lookup (c, inputCount, input, lookupCount, lookupRecord, lookup_context));
}
2009-08-04 06:58:28 +02:00
public:
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
2010-05-13 20:18:49 +02:00
return inputCount.sanitize (c)
&& lookupCount.sanitize (c)
&& c->check_range (input,
input[0].static_size * inputCount
+ lookupRecordX[0].static_size * lookupCount);
2009-08-04 06:58:28 +02:00
}
protected:
USHORT inputCount; /* Total number of glyphs in input
* glyph sequence--includes the first
* glyph */
USHORT lookupCount; /* Number of LookupRecords */
USHORT input[VAR]; /* Array of match inputs--start with
* second glyph */
LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
* design order */
2010-05-10 22:38:32 +02:00
public:
DEFINE_SIZE_ARRAY2 (4, input, lookupRecordX);
};
2009-05-20 05:58:54 +02:00
struct RuleSet
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if (!(this+rule[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
2012-04-24 05:03:12 +02:00
inline void closure (hb_closure_context_t *c, ContextClosureLookupContext &lookup_context) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
2012-04-24 05:03:12 +02:00
(this+rule[i]).closure (c, lookup_context);
}
inline void collect_glyphs (hb_collect_glyphs_context_t *c, ContextCollectGlyphsLookupContext &lookup_context) const
{
TRACE_COLLECT_GLYPHS (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
(this+rule[i]).collect_glyphs (c, lookup_context);
}
inline bool would_apply (hb_would_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
{
if ((this+rule[i]).would_apply (c, lookup_context))
return TRACE_RETURN (true);
}
return TRACE_RETURN (false);
}
inline bool apply (hb_apply_context_t *c, ContextApplyLookupContext &lookup_context) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int num_rules = rule.len;
2009-05-20 05:58:54 +02:00
for (unsigned int i = 0; i < num_rules; i++)
{
2010-05-13 20:18:49 +02:00
if ((this+rule[i]).apply (c, lookup_context))
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (true);
}
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (false);
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (rule.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
OffsetArrayOf<Rule>
rule; /* Array of Rule tables
* ordered by preference */
2010-05-11 00:08:46 +02:00
public:
DEFINE_SIZE_ARRAY (2, rule);
};
2009-05-20 05:58:54 +02:00
struct ContextFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+ruleSet[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
const Coverage &cov = (this+coverage);
struct ContextClosureLookupContext lookup_context = {
{intersects_glyph},
NULL
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (cov.intersects_coverage (c->glyphs, i)) {
const RuleSet &rule_set = this+ruleSet[i];
2012-04-24 05:03:12 +02:00
rule_set.closure (c, lookup_context);
}
}
2012-11-24 00:13:48 +01:00
inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
TRACE_COLLECT_GLYPHS (this);
(this+coverage).add_coverage (c->input);
struct ContextCollectGlyphsLookupContext lookup_context = {
{collect_glyph},
NULL
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
(this+ruleSet[i]).collect_glyphs (c, lookup_context);
2012-11-24 00:13:48 +01:00
}
inline bool would_apply (hb_would_apply_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
struct ContextApplyLookupContext lookup_context = {
{match_glyph},
NULL
};
return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
}
inline const Coverage &get_coverage (void) const
{
return this+coverage;
}
inline bool apply (hb_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED))
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (false);
const RuleSet &rule_set = this+ruleSet[index];
struct ContextApplyLookupContext lookup_context = {
{match_glyph},
NULL
};
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (rule_set.apply (c, lookup_context));
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
USHORT format; /* Format identifier--format = 1 */
OffsetTo<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of table */
OffsetArrayOf<RuleSet>
ruleSet; /* Array of RuleSet tables
* ordered by Coverage Index */
public:
DEFINE_SIZE_ARRAY (6, ruleSet);
};
2009-05-20 05:58:54 +02:00
struct ContextFormat2
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+ruleSet[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
if (!(this+coverage).intersects (c->glyphs))
2012-04-24 05:03:12 +02:00
return;
const ClassDef &class_def = this+classDef;
struct ContextClosureLookupContext lookup_context = {
{intersects_class},
&class_def
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (class_def.intersects_class (c->glyphs, i)) {
const RuleSet &rule_set = this+ruleSet[i];
2012-04-24 05:03:12 +02:00
rule_set.closure (c, lookup_context);
}
}
2012-11-24 00:13:48 +01:00
inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
TRACE_COLLECT_GLYPHS (this);
(this+coverage).add_coverage (c->input);
const ClassDef &class_def = this+classDef;
struct ContextCollectGlyphsLookupContext lookup_context = {
{collect_class},
&class_def
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
(this+ruleSet[i]).collect_glyphs (c, lookup_context);
2012-11-24 00:13:48 +01:00
}
inline bool would_apply (hb_would_apply_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
const ClassDef &class_def = this+classDef;
unsigned int index = class_def.get_class (c->glyphs[0]);
const RuleSet &rule_set = this+ruleSet[index];
struct ContextApplyLookupContext lookup_context = {
{match_class},
&class_def
};
return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
}
inline const Coverage &get_coverage (void) const
{
return this+coverage;
}
inline bool apply (hb_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2012-05-11 02:33:11 +02:00
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
const ClassDef &class_def = this+classDef;
index = class_def.get_class (c->buffer->cur().codepoint);
const RuleSet &rule_set = this+ruleSet[index];
struct ContextApplyLookupContext lookup_context = {
{match_class},
2010-05-10 23:47:22 +02:00
&class_def
};
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (rule_set.apply (c, lookup_context));
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
USHORT format; /* Format identifier--format = 2 */
OffsetTo<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of table */
OffsetTo<ClassDef>
classDef; /* Offset to glyph ClassDef table--from
* beginning of table */
OffsetArrayOf<RuleSet>
ruleSet; /* Array of RuleSet tables
* ordered by class */
public:
DEFINE_SIZE_ARRAY (8, ruleSet);
};
2009-05-20 05:58:54 +02:00
struct ContextFormat3
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
unsigned int count = lookupCount;
for (unsigned int i = 0; i < count; i++)
if (!c->recurse (lookupRecord[i].lookupListIndex))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
if (!(this+coverage[0]).intersects (c->glyphs))
2012-04-24 05:03:12 +02:00
return;
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
struct ContextClosureLookupContext lookup_context = {
{intersects_coverage},
this
};
2012-04-24 05:03:12 +02:00
context_closure_lookup (c,
glyphCount, (const USHORT *) (coverage + 1),
lookupCount, lookupRecord,
lookup_context);
}
2012-11-24 00:13:48 +01:00
inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
TRACE_COLLECT_GLYPHS (this);
(this+coverage[0]).add_coverage (c->input);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
struct ContextCollectGlyphsLookupContext lookup_context = {
{collect_coverage},
this
};
context_collect_glyphs_lookup (c,
glyphCount, (const USHORT *) (coverage + 1),
lookupCount, lookupRecord,
lookup_context);
2012-11-24 00:13:48 +01:00
}
inline bool would_apply (hb_would_apply_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
struct ContextApplyLookupContext lookup_context = {
{match_coverage},
this
};
return TRACE_RETURN (context_would_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
}
inline const Coverage &get_coverage (void) const
{
return this+coverage[0];
}
inline bool apply (hb_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage[0]).get_coverage (c->buffer->cur().codepoint);
2012-05-11 02:33:11 +02:00
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
const LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * glyphCount);
struct ContextApplyLookupContext lookup_context = {
{match_coverage},
2010-05-10 23:47:22 +02:00
this
};
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (context_apply_lookup (c, glyphCount, (const USHORT *) (coverage + 1), lookupCount, lookupRecord, lookup_context));
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return TRACE_RETURN (false);
2009-08-04 06:58:28 +02:00
unsigned int count = glyphCount;
if (!c->check_array (coverage, coverage[0].static_size, count)) return TRACE_RETURN (false);
2009-08-04 06:58:28 +02:00
for (unsigned int i = 0; i < count; i++)
if (!coverage[i].sanitize (c, this)) return TRACE_RETURN (false);
LookupRecord *lookupRecord = &StructAtOffset<LookupRecord> (coverage, coverage[0].static_size * count);
return TRACE_RETURN (c->check_array (lookupRecord, lookupRecord[0].static_size, lookupCount));
2009-08-04 06:58:28 +02:00
}
protected:
USHORT format; /* Format identifier--format = 3 */
USHORT glyphCount; /* Number of glyphs in the input glyph
* sequence */
USHORT lookupCount; /* Number of LookupRecords */
OffsetTo<Coverage>
coverage[VAR]; /* Array of offsets to Coverage
* table in glyph sequence order */
LookupRecord lookupRecordX[VAR]; /* Array of LookupRecords--in
* design order */
2010-05-10 22:38:32 +02:00
public:
DEFINE_SIZE_ARRAY2 (6, coverage, lookupRecordX);
};
2009-05-20 05:58:54 +02:00
struct Context
{
template <typename context_t>
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this);
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
case 2: return TRACE_RETURN (c->dispatch (u.format2));
case 3: return TRACE_RETURN (c->dispatch (u.format3));
2012-11-23 23:23:41 +01:00
default:return TRACE_RETURN (c->default_return_value ());
}
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
2009-08-04 06:58:28 +02:00
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.sanitize (c));
case 2: return TRACE_RETURN (u.format2.sanitize (c));
case 3: return TRACE_RETURN (u.format3.sanitize (c));
default:return TRACE_RETURN (true);
2009-08-04 06:58:28 +02:00
}
}
protected:
union {
2009-05-18 01:47:54 +02:00
USHORT format; /* Format identifier */
2010-05-11 01:45:41 +02:00
ContextFormat1 format1;
ContextFormat2 format2;
ContextFormat3 format3;
} u;
};
/* Chaining Contextual lookups */
struct ChainContextClosureLookupContext
2009-05-20 05:58:54 +02:00
{
ContextClosureFuncs funcs;
const void *intersects_data[3];
};
struct ChainContextCollectGlyphsLookupContext
{
ContextCollectGlyphsFuncs funcs;
const void *collect_data[3];
};
struct ChainContextApplyLookupContext
{
ContextApplyFuncs funcs;
2010-05-10 23:47:22 +02:00
const void *match_data[3];
};
2012-04-24 05:03:12 +02:00
static inline void chain_context_closure_lookup (hb_closure_context_t *c,
unsigned int backtrackCount,
const USHORT backtrack[],
unsigned int inputCount, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
unsigned int lookaheadCount,
const USHORT lookahead[],
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ChainContextClosureLookupContext &lookup_context)
{
2012-04-24 05:03:12 +02:00
if (intersects_array (c,
backtrackCount, backtrack,
lookup_context.funcs.intersects, lookup_context.intersects_data[0])
&& intersects_array (c,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.intersects, lookup_context.intersects_data[1])
2013-07-23 01:02:29 +02:00
&& intersects_array (c,
2012-04-24 05:03:12 +02:00
lookaheadCount, lookahead,
lookup_context.funcs.intersects, lookup_context.intersects_data[2]))
recurse_lookups (c,
lookupCount, lookupRecord);
}
static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
unsigned int backtrackCount,
const USHORT backtrack[],
unsigned int inputCount, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
unsigned int lookaheadCount,
const USHORT lookahead[],
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ChainContextCollectGlyphsLookupContext &lookup_context)
{
collect_array (c, c->before,
backtrackCount, backtrack,
lookup_context.funcs.collect, lookup_context.collect_data[0]);
collect_array (c, c->input,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.collect, lookup_context.collect_data[1]);
collect_array (c, c->after,
lookaheadCount, lookahead,
lookup_context.funcs.collect, lookup_context.collect_data[2]);
recurse_lookups (c,
lookupCount, lookupRecord);
}
static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
unsigned int backtrackCount,
2012-12-06 00:46:04 +01:00
const USHORT backtrack[] HB_UNUSED,
unsigned int inputCount, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
unsigned int lookaheadCount,
2012-12-06 00:46:04 +01:00
const USHORT lookahead[] HB_UNUSED,
unsigned int lookupCount HB_UNUSED,
const LookupRecord lookupRecord[] HB_UNUSED,
ChainContextApplyLookupContext &lookup_context)
{
return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
&& would_match_input (c,
inputCount, input,
lookup_context.funcs.match, lookup_context.match_data[1]);
}
static inline bool chain_context_apply_lookup (hb_apply_context_t *c,
unsigned int backtrackCount,
const USHORT backtrack[],
unsigned int inputCount, /* Including the first glyph (not matched) */
const USHORT input[], /* Array of input values--start with second glyph */
unsigned int lookaheadCount,
const USHORT lookahead[],
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ChainContextApplyLookupContext &lookup_context)
{
unsigned int match_length = 0;
unsigned int match_positions[MAX_CONTEXT_LENGTH];
return match_input (c,
2010-05-05 07:37:58 +02:00
inputCount, input,
lookup_context.funcs.match, lookup_context.match_data[1],
&match_length, match_positions)
&& match_backtrack (c,
backtrackCount, backtrack,
lookup_context.funcs.match, lookup_context.match_data[0])
2010-05-13 20:18:49 +02:00
&& match_lookahead (c,
2010-05-05 07:37:58 +02:00
lookaheadCount, lookahead,
lookup_context.funcs.match, lookup_context.match_data[2],
match_length)
&& apply_lookup (c,
inputCount, match_positions,
lookupCount, lookupRecord,
match_length);
}
2009-05-20 05:58:54 +02:00
struct ChainRule
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
unsigned int count = lookup.len;
for (unsigned int i = 0; i < count; i++)
if (!c->recurse (lookup.array[i].lookupListIndex))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
2012-04-24 05:03:12 +02:00
inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2012-04-24 05:03:12 +02:00
chain_context_closure_lookup (c,
backtrack.len, backtrack.array,
input.len, input.array,
lookahead.len, lookahead.array,
lookup.len, lookup.array,
lookup_context);
}
inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
{
TRACE_COLLECT_GLYPHS (this);
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
chain_context_collect_glyphs_lookup (c,
backtrack.len, backtrack.array,
input.len, input.array,
lookahead.len, lookahead.array,
lookup.len, lookup.array,
lookup_context);
}
inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
return TRACE_RETURN (chain_context_would_apply_lookup (c,
backtrack.len, backtrack.array,
input.len, input.array,
lookahead.len, lookahead.array, lookup.len,
lookup.array, lookup_context));
}
inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
const HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
const ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (chain_context_apply_lookup (c,
backtrack.len, backtrack.array,
input.len, input.array,
lookahead.len, lookahead.array, lookup.len,
lookup.array, lookup_context));
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!backtrack.sanitize (c)) return TRACE_RETURN (false);
HeadlessArrayOf<USHORT> &input = StructAfter<HeadlessArrayOf<USHORT> > (backtrack);
if (!input.sanitize (c)) return TRACE_RETURN (false);
ArrayOf<USHORT> &lookahead = StructAfter<ArrayOf<USHORT> > (input);
if (!lookahead.sanitize (c)) return TRACE_RETURN (false);
ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
return TRACE_RETURN (lookup.sanitize (c));
2009-08-04 06:58:28 +02:00
}
protected:
2009-05-18 07:49:57 +02:00
ArrayOf<USHORT>
backtrack; /* Array of backtracking values
* (to be matched before the input
* sequence) */
2009-05-18 08:03:58 +02:00
HeadlessArrayOf<USHORT>
inputX; /* Array of input values (start with
* second glyph) */
2009-05-18 07:49:57 +02:00
ArrayOf<USHORT>
lookaheadX; /* Array of lookahead values's (to be
* matched after the input sequence) */
2009-05-18 07:49:57 +02:00
ArrayOf<LookupRecord>
lookupX; /* Array of LookupRecords--in
* design order) */
public:
2010-05-10 23:28:16 +02:00
DEFINE_SIZE_MIN (8);
};
2009-05-20 05:58:54 +02:00
struct ChainRuleSet
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if (!(this+rule[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
2012-04-24 05:03:12 +02:00
inline void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
2012-04-24 05:03:12 +02:00
(this+rule[i]).closure (c, lookup_context);
}
inline void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
{
TRACE_COLLECT_GLYPHS (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
(this+rule[i]).collect_glyphs (c, lookup_context);
}
inline bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if ((this+rule[i]).would_apply (c, lookup_context))
return TRACE_RETURN (true);
return TRACE_RETURN (false);
}
inline bool apply (hb_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int num_rules = rule.len;
2009-05-20 05:58:54 +02:00
for (unsigned int i = 0; i < num_rules; i++)
2010-05-13 20:18:49 +02:00
if ((this+rule[i]).apply (c, lookup_context))
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (true);
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (false);
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (rule.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
OffsetArrayOf<ChainRule>
rule; /* Array of ChainRule tables
* ordered by preference */
public:
DEFINE_SIZE_ARRAY (2, rule);
};
2009-05-20 05:58:54 +02:00
struct ChainContextFormat1
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+ruleSet[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
const Coverage &cov = (this+coverage);
struct ChainContextClosureLookupContext lookup_context = {
{intersects_glyph},
{NULL, NULL, NULL}
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (cov.intersects_coverage (c->glyphs, i)) {
const ChainRuleSet &rule_set = this+ruleSet[i];
2012-04-24 05:03:12 +02:00
rule_set.closure (c, lookup_context);
}
}
2012-11-24 00:13:48 +01:00
inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
TRACE_COLLECT_GLYPHS (this);
(this+coverage).add_coverage (c->input);
struct ChainContextCollectGlyphsLookupContext lookup_context = {
{collect_glyph},
{NULL, NULL, NULL}
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
(this+ruleSet[i]).collect_glyphs (c, lookup_context);
2012-11-24 00:13:48 +01:00
}
inline bool would_apply (hb_would_apply_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
struct ChainContextApplyLookupContext lookup_context = {
{match_glyph},
{NULL, NULL, NULL}
};
return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
}
inline const Coverage &get_coverage (void) const
{
return this+coverage;
}
inline bool apply (hb_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2012-05-11 02:33:11 +02:00
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
const ChainRuleSet &rule_set = this+ruleSet[index];
struct ChainContextApplyLookupContext lookup_context = {
{match_glyph},
{NULL, NULL, NULL}
};
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (rule_set.apply (c, lookup_context));
}
2009-08-04 06:58:28 +02:00
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
USHORT format; /* Format identifier--format = 1 */
OffsetTo<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of table */
OffsetArrayOf<ChainRuleSet>
ruleSet; /* Array of ChainRuleSet tables
* ordered by Coverage Index */
public:
DEFINE_SIZE_ARRAY (6, ruleSet);
};
2009-05-20 05:58:54 +02:00
struct ChainContextFormat2
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+ruleSet[i]).is_inplace (c))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
if (!(this+coverage).intersects (c->glyphs))
2012-04-24 05:03:12 +02:00
return;
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
struct ChainContextClosureLookupContext lookup_context = {
{intersects_class},
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (input_class_def.intersects_class (c->glyphs, i)) {
const ChainRuleSet &rule_set = this+ruleSet[i];
2012-04-24 05:03:12 +02:00
rule_set.closure (c, lookup_context);
}
}
2012-11-24 00:13:48 +01:00
inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
TRACE_COLLECT_GLYPHS (this);
(this+coverage).add_coverage (c->input);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
struct ChainContextCollectGlyphsLookupContext lookup_context = {
{collect_class},
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
(this+ruleSet[i]).collect_glyphs (c, lookup_context);
2012-11-24 00:13:48 +01:00
}
inline bool would_apply (hb_would_apply_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
unsigned int index = input_class_def.get_class (c->glyphs[0]);
const ChainRuleSet &rule_set = this+ruleSet[index];
struct ChainContextApplyLookupContext lookup_context = {
{match_class},
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
};
return TRACE_RETURN (rule_set.would_apply (c, lookup_context));
}
inline const Coverage &get_coverage (void) const
{
return this+coverage;
}
inline bool apply (hb_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
2012-05-11 02:33:11 +02:00
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
index = input_class_def.get_class (c->buffer->cur().codepoint);
const ChainRuleSet &rule_set = this+ruleSet[index];
struct ChainContextApplyLookupContext lookup_context = {
{match_class},
2010-05-10 23:47:22 +02:00
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
};
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (rule_set.apply (c, lookup_context));
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (coverage.sanitize (c, this) && backtrackClassDef.sanitize (c, this) &&
inputClassDef.sanitize (c, this) && lookaheadClassDef.sanitize (c, this) &&
ruleSet.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
USHORT format; /* Format identifier--format = 2 */
OffsetTo<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of table */
OffsetTo<ClassDef>
backtrackClassDef; /* Offset to glyph ClassDef table
* containing backtrack sequence
* data--from beginning of table */
OffsetTo<ClassDef>
inputClassDef; /* Offset to glyph ClassDef
* table containing input sequence
* data--from beginning of table */
OffsetTo<ClassDef>
lookaheadClassDef; /* Offset to glyph ClassDef table
* containing lookahead sequence
* data--from beginning of table */
OffsetArrayOf<ChainRuleSet>
ruleSet; /* Array of ChainRuleSet tables
* ordered by class */
public:
DEFINE_SIZE_ARRAY (12, ruleSet);
};
2009-05-20 05:58:54 +02:00
struct ChainContextFormat3
{
inline bool is_inplace (hb_is_inplace_context_t *c) const
{
TRACE_IS_INPLACE (this);
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
unsigned int count = lookup.len;
for (unsigned int i = 0; i < count; i++)
if (!c->recurse (lookup.array[i].lookupListIndex))
return TRACE_RETURN (false);
return TRACE_RETURN (true);
}
inline void closure (hb_closure_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_CLOSURE (this);
2012-04-24 05:03:12 +02:00
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
if (!(this+input[0]).intersects (c->glyphs))
return;
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
struct ChainContextClosureLookupContext lookup_context = {
{intersects_coverage},
2012-04-24 05:03:12 +02:00
{this, this, this}
};
chain_context_closure_lookup (c,
backtrack.len, (const USHORT *) backtrack.array,
input.len, (const USHORT *) input.array + 1,
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array,
lookup_context);
}
2012-11-24 00:13:48 +01:00
inline void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
TRACE_COLLECT_GLYPHS (this);
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
(this+input[0]).add_coverage (c->input);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
struct ChainContextCollectGlyphsLookupContext lookup_context = {
{collect_coverage},
{this, this, this}
};
chain_context_collect_glyphs_lookup (c,
backtrack.len, (const USHORT *) backtrack.array,
input.len, (const USHORT *) input.array + 1,
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array,
lookup_context);
2012-11-24 00:13:48 +01:00
}
inline bool would_apply (hb_would_apply_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_WOULD_APPLY (this);
2012-07-29 00:34:58 +02:00
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
struct ChainContextApplyLookupContext lookup_context = {
{match_coverage},
{this, this, this}
};
return TRACE_RETURN (chain_context_would_apply_lookup (c,
backtrack.len, (const USHORT *) backtrack.array,
input.len, (const USHORT *) input.array + 1,
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array, lookup_context));
}
inline const Coverage &get_coverage (void) const
{
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
return this+input[0];
}
inline bool apply (hb_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
2012-05-11 02:33:11 +02:00
if (likely (index == NOT_COVERED)) return TRACE_RETURN (false);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
struct ChainContextApplyLookupContext lookup_context = {
{match_coverage},
2010-05-10 23:47:22 +02:00
{this, this, this}
};
2012-05-11 02:33:11 +02:00
return TRACE_RETURN (chain_context_apply_lookup (c,
backtrack.len, (const USHORT *) backtrack.array,
input.len, (const USHORT *) input.array + 1,
lookahead.len, (const USHORT *) lookahead.array,
lookup.len, lookup.array, lookup_context));
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!backtrack.sanitize (c, this)) return TRACE_RETURN (false);
OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
if (!input.sanitize (c, this)) return TRACE_RETURN (false);
OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
if (!lookahead.sanitize (c, this)) return TRACE_RETURN (false);
ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
return TRACE_RETURN (lookup.sanitize (c));
2009-08-04 06:58:28 +02:00
}
protected:
USHORT format; /* Format identifier--format = 3 */
2009-05-18 07:49:57 +02:00
OffsetArrayOf<Coverage>
backtrack; /* Array of coverage tables
* in backtracking sequence, in glyph
* sequence order */
2009-05-18 07:49:57 +02:00
OffsetArrayOf<Coverage>
inputX ; /* Array of coverage
* tables in input sequence, in glyph
* sequence order */
2009-05-18 07:49:57 +02:00
OffsetArrayOf<Coverage>
lookaheadX; /* Array of coverage tables
* in lookahead sequence, in glyph
* sequence order */
2009-05-18 07:49:57 +02:00
ArrayOf<LookupRecord>
lookupX; /* Array of LookupRecords--in
2009-05-18 07:49:57 +02:00
* design order) */
public:
2010-05-10 23:28:16 +02:00
DEFINE_SIZE_MIN (10);
};
2009-05-20 05:58:54 +02:00
struct ChainContext
{
template <typename context_t>
inline typename context_t::return_t dispatch (context_t *c) const
{
TRACE_DISPATCH (this);
switch (u.format) {
case 1: return TRACE_RETURN (c->dispatch (u.format1));
case 2: return TRACE_RETURN (c->dispatch (u.format2));
case 3: return TRACE_RETURN (c->dispatch (u.format3));
2012-11-23 23:23:41 +01:00
default:return TRACE_RETURN (c->default_return_value ());
}
}
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
2009-08-04 06:58:28 +02:00
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.sanitize (c));
case 2: return TRACE_RETURN (u.format2.sanitize (c));
case 3: return TRACE_RETURN (u.format3.sanitize (c));
default:return TRACE_RETURN (true);
2009-08-04 06:58:28 +02:00
}
}
protected:
union {
USHORT format; /* Format identifier */
2010-05-11 01:45:41 +02:00
ChainContextFormat1 format1;
ChainContextFormat2 format2;
ChainContextFormat3 format3;
} u;
};
struct ExtensionFormat1
{
inline unsigned int get_type (void) const { return extensionLookupType; }
2010-04-22 22:51:42 +02:00
inline unsigned int get_offset (void) const { return extensionOffset; }
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (c->check_struct (this));
2009-08-04 06:58:28 +02:00
}
protected:
USHORT format; /* Format identifier. Set to 1. */
USHORT extensionLookupType; /* Lookup type of subtable referenced
* by ExtensionOffset (i.e. the
* extension subtable). */
ULONG extensionOffset; /* Offset to the extension subtable,
* of lookup type subtable. */
public:
DEFINE_SIZE_STATIC (8);
};
2012-11-23 22:57:36 +01:00
template <typename T>
struct Extension
{
inline unsigned int get_type (void) const
{
switch (u.format) {
2010-05-11 01:45:41 +02:00
case 1: return u.format1.get_type ();
default:return 0;
}
}
2010-04-22 22:51:42 +02:00
inline unsigned int get_offset (void) const
{
switch (u.format) {
2010-05-11 01:45:41 +02:00
case 1: return u.format1.get_offset ();
2010-04-22 22:51:42 +02:00
default:return 0;
}
}
2012-11-23 23:04:55 +01:00
template <typename X>
inline const X& get_subtable (void) const
{
unsigned int offset = get_offset ();
if (unlikely (!offset)) return Null(typename T::LookupSubTable);
return StructAtOffset<typename T::LookupSubTable> (this, offset);
}
2012-11-23 22:57:36 +01:00
template <typename context_t>
inline typename context_t::return_t dispatch (context_t *c) const
2012-11-23 22:57:36 +01:00
{
return get_subtable<typename T::LookupSubTable> ().dispatch (c, get_type ());
2012-11-23 22:57:36 +01:00
}
2012-11-23 23:10:40 +01:00
inline bool sanitize_self (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return TRACE_RETURN (false);
2009-08-04 06:58:28 +02:00
switch (u.format) {
case 1: return TRACE_RETURN (u.format1.sanitize (c));
default:return TRACE_RETURN (true);
2009-08-04 06:58:28 +02:00
}
}
2012-11-23 23:10:40 +01:00
inline bool sanitize (hb_sanitize_context_t *c) {
TRACE_SANITIZE (this);
if (!sanitize_self (c)) return TRACE_RETURN (false);
unsigned int offset = get_offset ();
if (unlikely (!offset)) return TRACE_RETURN (true);
return TRACE_RETURN (StructAtOffset<typename T::LookupSubTable> (this, offset).sanitize (c, get_type ()));
}
protected:
union {
USHORT format; /* Format identifier */
ExtensionFormat1 format1;
} u;
};
2009-05-18 02:13:02 +02:00
/*
* GSUB/GPOS Common
*/
2009-05-20 05:58:54 +02:00
struct GSUBGPOS
{
2009-08-05 02:27:05 +02:00
static const hb_tag_t GSUBTag = HB_OT_TAG_GSUB;
static const hb_tag_t GPOSTag = HB_OT_TAG_GPOS;
2009-05-18 02:13:02 +02:00
inline unsigned int get_script_count (void) const
{ return (this+scriptList).len; }
inline const Tag& get_script_tag (unsigned int i) const
{ return (this+scriptList).get_tag (i); }
inline unsigned int get_script_tags (unsigned int start_offset,
unsigned int *script_count /* IN/OUT */,
hb_tag_t *script_tags /* OUT */) const
{ return (this+scriptList).get_tags (start_offset, script_count, script_tags); }
inline const Script& get_script (unsigned int i) const
{ return (this+scriptList)[i]; }
inline bool find_script_index (hb_tag_t tag, unsigned int *index) const
{ return (this+scriptList).find_index (tag, index); }
inline unsigned int get_feature_count (void) const
{ return (this+featureList).len; }
inline const Tag& get_feature_tag (unsigned int i) const
{ return (this+featureList).get_tag (i); }
inline unsigned int get_feature_tags (unsigned int start_offset,
unsigned int *feature_count /* IN/OUT */,
hb_tag_t *feature_tags /* OUT */) const
{ return (this+featureList).get_tags (start_offset, feature_count, feature_tags); }
inline const Feature& get_feature (unsigned int i) const
{ return (this+featureList)[i]; }
inline bool find_feature_index (hb_tag_t tag, unsigned int *index) const
{ return (this+featureList).find_index (tag, index); }
inline unsigned int get_lookup_count (void) const
{ return (this+lookupList).len; }
inline const Lookup& get_lookup (unsigned int i) const
{ return (this+lookupList)[i]; }
2009-05-18 02:13:02 +02:00
2010-05-13 20:18:49 +02:00
inline bool sanitize (hb_sanitize_context_t *c) {
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return TRACE_RETURN (version.sanitize (c) && likely (version.major == 1) &&
scriptList.sanitize (c, this) &&
featureList.sanitize (c, this) &&
lookupList.sanitize (c, this));
2009-08-04 08:09:34 +02:00
}
2009-05-24 06:50:27 +02:00
protected:
2009-05-24 07:03:24 +02:00
FixedVersion version; /* Version of the GSUB/GPOS table--initially set
2009-05-18 02:13:02 +02:00
* to 0x00010000 */
OffsetTo<ScriptList>
scriptList; /* ScriptList table */
OffsetTo<FeatureList>
featureList; /* FeatureList table */
OffsetTo<LookupList>
lookupList; /* LookupList table */
public:
DEFINE_SIZE_STATIC (10);
2009-05-18 02:13:02 +02:00
};
2012-11-17 03:49:54 +01:00
} /* namespace OT */
2010-07-23 21:11:18 +02:00
#endif /* HB_OT_LAYOUT_GSUBGPOS_PRIVATE_HH */