harfbuzz/src/hb-ot-layout-gsubgpos.hh

4550 lines
142 KiB
C++
Raw Normal View History

/*
2011-04-21 23:14:28 +02:00
* Copyright © 2007,2008,2009,2010 Red Hat, Inc.
2012-04-24 04:26:13 +02:00
* Copyright © 2010,2012 Google, Inc.
*
2010-04-22 06:11:43 +02:00
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Red Hat Author(s): Behdad Esfahbod
* Google Author(s): Behdad Esfahbod
*/
#ifndef HB_OT_LAYOUT_GSUBGPOS_HH
#define HB_OT_LAYOUT_GSUBGPOS_HH
#include "hb.hh"
#include "hb-buffer.hh"
#include "hb-map.hh"
#include "hb-set.hh"
#include "hb-ot-map.hh"
#include "hb-ot-layout-common.hh"
#include "hb-ot-layout-gdef-table.hh"
namespace OT {
2010-07-23 21:11:18 +02:00
struct hb_intersects_context_t :
2020-06-19 01:53:54 +02:00
hb_dispatch_context_t<hb_intersects_context_t, bool>
{
template <typename T>
return_t dispatch (const T &obj) { return obj.intersects (this->glyphs); }
static return_t default_return_value () { return false; }
bool stop_sublookup_iteration (return_t r) const { return r; }
const hb_set_t *glyphs;
hb_intersects_context_t (const hb_set_t *glyphs_) :
glyphs (glyphs_) {}
};
struct hb_have_non_1to1_context_t :
hb_dispatch_context_t<hb_have_non_1to1_context_t, bool>
{
template <typename T>
return_t dispatch (const T &obj) { return obj.may_have_non_1to1 (); }
static return_t default_return_value () { return false; }
bool stop_sublookup_iteration (return_t r) const { return r; }
};
2015-10-09 18:20:58 +02:00
struct hb_closure_context_t :
hb_dispatch_context_t<hb_closure_context_t>
{
typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index);
template <typename T>
2019-05-11 05:43:26 +02:00
return_t dispatch (const T &obj) { obj.closure (this); return hb_empty_t (); }
static return_t default_return_value () { return hb_empty_t (); }
void recurse (unsigned lookup_index, hb_set_t *covered_seq_indicies, unsigned seq_index, unsigned end_index)
{
2012-11-23 23:55:40 +01:00
if (unlikely (nesting_level_left == 0 || !recurse_func))
return;
nesting_level_left--;
recurse_func (this, lookup_index, covered_seq_indicies, seq_index, end_index);
nesting_level_left++;
}
void reset_lookup_visit_count ()
{ lookup_count = 0; }
bool lookup_limit_exceeded ()
{ return lookup_count > HB_MAX_LOOKUP_VISIT_COUNT; }
bool should_visit_lookup (unsigned int lookup_index)
{
if (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT)
return false;
if (is_lookup_done (lookup_index))
return false;
return true;
}
bool is_lookup_done (unsigned int lookup_index)
{
2022-11-30 22:09:59 +01:00
if (unlikely (done_lookups_glyph_count->in_error () ||
done_lookups_glyph_set->in_error ()))
return true;
/* Have we visited this lookup with the current set of glyphs? */
if (done_lookups_glyph_count->get (lookup_index) != glyphs->get_population ())
{
done_lookups_glyph_count->set (lookup_index, glyphs->get_population ());
2021-03-30 01:08:44 +02:00
if (!done_lookups_glyph_set->has (lookup_index))
2021-03-30 01:08:44 +02:00
{
2022-06-03 02:04:12 +02:00
if (unlikely (!done_lookups_glyph_set->set (lookup_index, hb::unique_ptr<hb_set_t> {hb_set_create ()})))
return true;
2021-03-30 01:08:44 +02:00
}
done_lookups_glyph_set->get (lookup_index)->clear ();
}
hb_set_t *covered_glyph_set = done_lookups_glyph_set->get (lookup_index);
2021-04-06 20:33:38 +02:00
if (unlikely (covered_glyph_set->in_error ()))
return true;
if (parent_active_glyphs ().is_subset (*covered_glyph_set))
return true;
2021-03-30 01:08:44 +02:00
covered_glyph_set->union_ (parent_active_glyphs ());
return false;
}
const hb_set_t& previous_parent_active_glyphs () {
if (active_glyphs_stack.length <= 1)
return *glyphs;
return active_glyphs_stack[active_glyphs_stack.length - 2];
}
const hb_set_t& parent_active_glyphs ()
{
if (!active_glyphs_stack)
return *glyphs;
2021-03-30 01:08:44 +02:00
return active_glyphs_stack.tail ();
}
hb_set_t& push_cur_active_glyphs ()
{
return *active_glyphs_stack.push ();
}
bool pop_cur_done_glyphs ()
{
if (!active_glyphs_stack)
return false;
active_glyphs_stack.pop ();
return true;
}
hb_face_t *face;
2012-04-24 04:23:17 +02:00
hb_set_t *glyphs;
2019-01-09 19:45:53 +01:00
hb_set_t output[1];
hb_vector_t<hb_set_t> active_glyphs_stack;
2022-05-30 13:30:37 +02:00
recurse_func_t recurse_func = nullptr;
unsigned int nesting_level_left;
hb_closure_context_t (hb_face_t *face_,
2012-04-24 04:23:17 +02:00
hb_set_t *glyphs_,
hb_map_t *done_lookups_glyph_count_,
2022-06-03 02:04:12 +02:00
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set_,
2018-11-15 20:40:56 +01:00
unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
face (face_),
glyphs (glyphs_),
nesting_level_left (nesting_level_left_),
done_lookups_glyph_count (done_lookups_glyph_count_),
2022-05-30 13:30:37 +02:00
done_lookups_glyph_set (done_lookups_glyph_set_)
{}
2012-11-23 23:55:40 +01:00
~hb_closure_context_t () { flush (); }
2012-11-23 23:55:40 +01:00
void set_recurse_func (recurse_func_t func) { recurse_func = func; }
void flush ()
{
output->del_range (face->get_num_glyphs (), HB_SET_VALUE_INVALID); /* Remove invalid glyphs. */
glyphs->union_ (*output);
output->clear ();
active_glyphs_stack.pop ();
active_glyphs_stack.reset ();
}
private:
hb_map_t *done_lookups_glyph_count;
2022-06-03 02:04:12 +02:00
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *done_lookups_glyph_set;
2022-05-30 13:30:37 +02:00
unsigned int lookup_count = 0;
};
2019-10-23 01:00:43 +02:00
struct hb_closure_lookups_context_t :
hb_dispatch_context_t<hb_closure_lookups_context_t>
2019-10-23 01:00:43 +02:00
{
typedef return_t (*recurse_func_t) (hb_closure_lookups_context_t *c, unsigned lookup_index);
template <typename T>
return_t dispatch (const T &obj) { obj.closure_lookups (this); return hb_empty_t (); }
static return_t default_return_value () { return hb_empty_t (); }
void recurse (unsigned lookup_index)
{
if (unlikely (nesting_level_left == 0 || !recurse_func))
return;
/* Return if new lookup was recursed to before. */
if (lookup_limit_exceeded ()
|| visited_lookups->in_error ()
|| visited_lookups->has (lookup_index))
// Don't increment lookup count here, that will be done in the call to closure_lookups()
// made by recurse_func.
2019-10-23 01:00:43 +02:00
return;
nesting_level_left--;
recurse_func (this, lookup_index);
nesting_level_left++;
}
void set_lookup_visited (unsigned lookup_index)
{ visited_lookups->add (lookup_index); }
void set_lookup_inactive (unsigned lookup_index)
{ inactive_lookups->add (lookup_index); }
bool lookup_limit_exceeded ()
{
bool ret = lookup_count > HB_MAX_LOOKUP_VISIT_COUNT;
if (ret)
DEBUG_MSG (SUBSET, nullptr, "lookup visit count limit exceeded in lookup closure!");
return ret; }
2019-10-23 01:00:43 +02:00
bool is_lookup_visited (unsigned lookup_index)
{
if (unlikely (lookup_count++ > HB_MAX_LOOKUP_VISIT_COUNT))
{
DEBUG_MSG (SUBSET, nullptr, "total visited lookup count %u exceeds max limit, lookup %u is dropped.",
lookup_count, lookup_index);
return true;
}
if (unlikely (visited_lookups->in_error ()))
return true;
return visited_lookups->has (lookup_index);
}
2019-10-23 01:00:43 +02:00
hb_face_t *face;
const hb_set_t *glyphs;
recurse_func_t recurse_func;
unsigned int nesting_level_left;
hb_closure_lookups_context_t (hb_face_t *face_,
const hb_set_t *glyphs_,
hb_set_t *visited_lookups_,
hb_set_t *inactive_lookups_,
unsigned nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
face (face_),
glyphs (glyphs_),
recurse_func (nullptr),
nesting_level_left (nesting_level_left_),
visited_lookups (visited_lookups_),
inactive_lookups (inactive_lookups_),
lookup_count (0) {}
2019-10-23 01:00:43 +02:00
void set_recurse_func (recurse_func_t func) { recurse_func = func; }
private:
hb_set_t *visited_lookups;
hb_set_t *inactive_lookups;
unsigned int lookup_count;
2019-10-23 01:00:43 +02:00
};
2015-10-09 18:20:58 +02:00
struct hb_would_apply_context_t :
2020-06-19 01:53:54 +02:00
hb_dispatch_context_t<hb_would_apply_context_t, bool>
{
2012-11-22 22:47:53 +01:00
template <typename T>
return_t dispatch (const T &obj) { return obj.would_apply (this); }
static return_t default_return_value () { return false; }
bool stop_sublookup_iteration (return_t r) const { return r; }
2012-11-22 22:47:53 +01:00
hb_face_t *face;
const hb_codepoint_t *glyphs;
unsigned int len;
bool zero_context;
hb_would_apply_context_t (hb_face_t *face_,
const hb_codepoint_t *glyphs_,
unsigned int len_,
2012-09-04 21:15:19 +02:00
bool zero_context_) :
face (face_),
glyphs (glyphs_),
len (len_),
zero_context (zero_context_) {}
};
2015-10-09 18:20:58 +02:00
struct hb_collect_glyphs_context_t :
hb_dispatch_context_t<hb_collect_glyphs_context_t>
{
2012-11-24 00:13:48 +01:00
typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
2012-11-22 22:47:53 +01:00
template <typename T>
2019-05-11 05:43:26 +02:00
return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_empty_t (); }
static return_t default_return_value () { return hb_empty_t (); }
void recurse (unsigned int lookup_index)
2012-11-22 22:47:53 +01:00
{
2012-11-24 00:13:48 +01:00
if (unlikely (nesting_level_left == 0 || !recurse_func))
return;
2012-11-24 00:13:48 +01:00
2017-10-15 12:11:08 +02:00
/* Note that GPOS sets recurse_func to nullptr already, so it doesn't get
* past the previous check. For GSUB, we only want to collect the output
* glyphs in the recursion. If output is not requested, we can go home now.
*
* Note further, that the above is not exactly correct. A recursed lookup
* is allowed to match input that is not matched in the context, but that's
* not how most fonts are built. It's possible to relax that and recurse
* with all sets here if it proves to be an issue.
*/
if (output == hb_set_get_empty ())
return;
/* Return if new lookup was recursed to before. */
2017-10-22 23:48:06 +02:00
if (recursed_lookups->has (lookup_index))
return;
hb_set_t *old_before = before;
hb_set_t *old_input = input;
hb_set_t *old_after = after;
before = input = after = hb_set_get_empty ();
2012-11-24 00:13:48 +01:00
nesting_level_left--;
recurse_func (this, lookup_index);
2012-11-24 00:13:48 +01:00
nesting_level_left++;
before = old_before;
input = old_input;
after = old_after;
2017-10-22 23:48:06 +02:00
recursed_lookups->add (lookup_index);
2012-11-22 22:47:53 +01:00
}
hb_face_t *face;
hb_set_t *before;
hb_set_t *input;
hb_set_t *after;
hb_set_t *output;
2012-11-24 00:13:48 +01:00
recurse_func_t recurse_func;
2017-10-22 23:48:06 +02:00
hb_set_t *recursed_lookups;
2012-11-24 00:13:48 +01:00
unsigned int nesting_level_left;
hb_collect_glyphs_context_t (hb_face_t *face_,
2018-10-13 12:30:05 +02:00
hb_set_t *glyphs_before, /* OUT. May be NULL */
hb_set_t *glyphs_input, /* OUT. May be NULL */
hb_set_t *glyphs_after, /* OUT. May be NULL */
hb_set_t *glyphs_output, /* OUT. May be NULL */
unsigned int nesting_level_left_ = HB_MAX_NESTING_LEVEL) :
face (face_),
before (glyphs_before ? glyphs_before : hb_set_get_empty ()),
input (glyphs_input ? glyphs_input : hb_set_get_empty ()),
after (glyphs_after ? glyphs_after : hb_set_get_empty ()),
output (glyphs_output ? glyphs_output : hb_set_get_empty ()),
2017-10-15 12:11:08 +02:00
recurse_func (nullptr),
recursed_lookups (hb_set_create ()),
nesting_level_left (nesting_level_left_) {}
~hb_collect_glyphs_context_t () { hb_set_destroy (recursed_lookups); }
2012-11-24 00:13:48 +01:00
void set_recurse_func (recurse_func_t func) { recurse_func = func; }
};
template <typename set_t>
struct hb_collect_coverage_context_t :
2020-06-19 01:53:54 +02:00
hb_dispatch_context_t<hb_collect_coverage_context_t<set_t>, const Coverage &>
{
2020-06-19 02:03:05 +02:00
typedef const Coverage &return_t; // Stoopid that we have to dupe this here.
template <typename T>
return_t dispatch (const T &obj) { return obj.get_coverage (); }
static return_t default_return_value () { return Null (Coverage); }
bool stop_sublookup_iteration (return_t r) const
{
r.collect_coverage (set);
return false;
}
2012-11-22 22:47:53 +01:00
hb_collect_coverage_context_t (set_t *set_) :
set (set_) {}
set_t *set;
};
struct hb_ot_apply_context_t :
hb_dispatch_context_t<hb_ot_apply_context_t, bool, HB_DEBUG_APPLY>
{
struct matcher_t
2012-01-17 04:05:08 +01:00
{
matcher_t () :
lookup_props (0),
mask (-1),
[Indic-like] Disable automatic joiner handling for basic shaping features Not for Arabic, but for Indic-like scripts. ZWJ/ZWNJ have special meanings in those scripts, so let font lookups take full control. This undoes the regression caused by automatic-joiners handling introduced two commits ago. We only disable automatic joiner handling for the "basic shaping features" of Indic, Myanmar, and SEAsian shapers. The "presentation forms" and other features are still applied with automatic-joiner handling. This change also changes the test suite failure statistics, such that a few scripts show more "failures". The most affected is Kannada. However, upon inspection, we believe that in most, if not all, of the new failures, we are producing results superior to Uniscribe. Hard to count those! Here's an example of what is fixed by the recent joiner-handling changes: https://bugs.freedesktop.org/show_bug.cgi?id=58714 New numbers, for future reference: BENGALI: 353892 out of 354188 tests passed. 296 failed (0.0835714%) DEVANAGARI: 707336 out of 707394 tests passed. 58 failed (0.00819911%) GUJARATI: 366262 out of 366457 tests passed. 195 failed (0.0532122%) GURMUKHI: 60706 out of 60747 tests passed. 41 failed (0.067493%) KANNADA: 950680 out of 951913 tests passed. 1233 failed (0.129529%) KHMER: 299074 out of 299124 tests passed. 50 failed (0.0167155%) LAO: 53611 out of 53644 tests passed. 33 failed (0.0615167%) MALAYALAM: 1047983 out of 1048334 tests passed. 351 failed (0.0334817%) ORIYA: 42320 out of 42329 tests passed. 9 failed (0.021262%) SINHALA: 271539 out of 271847 tests passed. 308 failed (0.113299%) TAMIL: 1091753 out of 1091754 tests passed. 1 failed (9.15957e-05%) TELUGU: 970555 out of 970573 tests passed. 18 failed (0.00185457%) TIBETAN: 208469 out of 208469 tests passed. 0 failed (0%)
2013-02-14 16:40:12 +01:00
ignore_zwnj (false),
ignore_zwj (false),
per_syllable (false),
syllable {0},
2017-10-15 12:11:08 +02:00
match_func (nullptr),
match_data (nullptr) {}
typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
void set_ignore_zwnj (bool ignore_zwnj_) { ignore_zwnj = ignore_zwnj_; }
void set_ignore_zwj (bool ignore_zwj_) { ignore_zwj = ignore_zwj_; }
void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; }
void set_mask (hb_mask_t mask_) { mask = mask_; }
void set_per_syllable (bool per_syllable_) { per_syllable = per_syllable_; }
void set_syllable (uint8_t syllable_) { syllable = per_syllable ? syllable_ : 0; }
void set_match_func (match_func_t match_func_,
2020-07-18 19:44:52 +02:00
const void *match_data_)
{ match_func = match_func_; match_data = match_data_; }
enum may_match_t {
MATCH_NO,
MATCH_YES,
MATCH_MAYBE
};
may_match_t may_match (hb_glyph_info_t &info,
hb_codepoint_t glyph_data) const
2012-01-17 04:05:08 +01:00
{
if (!(info.mask & mask) ||
(syllable && syllable != info.syllable ()))
return MATCH_NO;
if (match_func)
return match_func (info, glyph_data, match_data) ? MATCH_YES : MATCH_NO;
return MATCH_MAYBE;
2012-01-17 04:05:08 +01:00
}
enum may_skip_t {
SKIP_NO,
SKIP_YES,
SKIP_MAYBE
};
may_skip_t may_skip (const hb_ot_apply_context_t *c,
const hb_glyph_info_t &info) const
2013-02-13 17:22:42 +01:00
{
2014-07-16 19:44:01 +02:00
if (!c->check_glyph_property (&info, lookup_props))
return SKIP_YES;
if (unlikely (_hb_glyph_info_is_default_ignorable_and_not_hidden (&info) &&
(ignore_zwnj || !_hb_glyph_info_is_zwnj (&info)) &&
(ignore_zwj || !_hb_glyph_info_is_zwj (&info))))
return SKIP_MAYBE;
return SKIP_NO;
2013-02-13 17:22:42 +01:00
}
protected:
unsigned int lookup_props;
hb_mask_t mask;
bool ignore_zwnj;
bool ignore_zwj;
bool per_syllable;
uint8_t syllable;
match_func_t match_func;
const void *match_data;
};
struct skipping_iterator_t
{
void init (hb_ot_apply_context_t *c_, bool context_match = false)
2012-01-17 04:05:08 +01:00
{
c = c_;
match_glyph_data16 = nullptr;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
match_glyph_data24 = nullptr;
#endif
2017-10-15 12:11:08 +02:00
matcher.set_match_func (nullptr, nullptr);
matcher.set_lookup_props (c->lookup_props);
/* Ignore ZWNJ if we are matching GPOS, or matching GSUB context and asked to. */
matcher.set_ignore_zwnj (c->table_index == 1 || (context_match && c->auto_zwnj));
/* Ignore ZWJ if we are matching context, or asked to. */
matcher.set_ignore_zwj (context_match || c->auto_zwj);
matcher.set_mask (context_match ? -1 : c->lookup_mask);
matcher.set_per_syllable (c->per_syllable);
}
void set_lookup_props (unsigned int lookup_props)
{
matcher.set_lookup_props (lookup_props);
2012-01-17 04:05:08 +01:00
}
void set_match_func (matcher_t::match_func_t match_func_,
const void *match_data_)
{
matcher.set_match_func (match_func_, match_data_);
}
void set_glyph_data (const HBUINT16 glyph_data[])
{
match_glyph_data16 = glyph_data;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
match_glyph_data24 = nullptr;
#endif
}
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
void set_glyph_data (const HBUINT24 glyph_data[])
{
match_glyph_data16 = nullptr;
match_glyph_data24 = glyph_data;
}
#endif
void reset (unsigned int start_index_,
2020-04-24 21:15:17 +02:00
unsigned int num_items_)
{
idx = start_index_;
num_items = num_items_;
end = c->buffer->len;
matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
}
void reject ()
{
num_items++;
backup_glyph_data ();
}
matcher_t::may_skip_t
may_skip (const hb_glyph_info_t &info) const
{ return matcher.may_skip (c, info); }
enum match_t {
MATCH,
NOT_MATCH,
SKIP
};
match_t match (hb_glyph_info_t &info)
{
matcher_t::may_skip_t skip = matcher.may_skip (c, info);
if (unlikely (skip == matcher_t::SKIP_YES))
return SKIP;
matcher_t::may_match_t match = matcher.may_match (info, get_glyph_data ());
if (match == matcher_t::MATCH_YES ||
(match == matcher_t::MATCH_MAYBE &&
skip == matcher_t::SKIP_NO))
return MATCH;
if (skip == matcher_t::SKIP_NO)
return NOT_MATCH;
return SKIP;
}
bool next (unsigned *unsafe_to = nullptr)
2012-01-17 04:05:08 +01:00
{
assert (num_items > 0);
2022-11-24 05:14:28 +01:00
/* The alternate condition below is faster at string boundaries,
* but produces subpar "unsafe-to-concat" values. */
signed stop = (signed) end - (signed) num_items;
if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
stop = (signed) end - 1;
while ((signed) idx < stop)
2012-01-17 04:05:08 +01:00
{
2012-01-18 00:08:41 +01:00
idx++;
switch (match (c->buffer->info[idx]))
{
case MATCH:
{
num_items--;
advance_glyph_data ();
return true;
}
case NOT_MATCH:
{
if (unsafe_to)
*unsafe_to = idx + 1;
return false;
}
case SKIP:
continue;
}
}
if (unsafe_to)
*unsafe_to = end;
return false;
2012-01-17 04:05:08 +01:00
}
bool prev (unsigned *unsafe_from = nullptr)
2012-01-17 04:05:08 +01:00
{
assert (num_items > 0);
2022-11-24 05:14:28 +01:00
/* The alternate condition below is faster at string boundaries,
* but produces subpar "unsafe-to-concat" values. */
unsigned stop = num_items - 1;
if (c->buffer->flags & HB_BUFFER_FLAG_PRODUCE_UNSAFE_TO_CONCAT)
stop = 1 - 1;
while (idx > stop)
2012-01-17 04:05:08 +01:00
{
idx--;
switch (match (c->buffer->out_info[idx]))
{
case MATCH:
{
num_items--;
advance_glyph_data ();
return true;
}
case NOT_MATCH:
{
if (unsafe_from)
*unsafe_from = hb_max (1u, idx) - 1u;
return false;
}
case SKIP:
continue;
}
}
if (unsafe_from)
*unsafe_from = 0;
return false;
2012-01-17 04:05:08 +01:00
}
hb_codepoint_t
get_glyph_data ()
{
if (match_glyph_data16) return *match_glyph_data16;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
else
if (match_glyph_data24) return *match_glyph_data24;
#endif
return 0;
}
void
advance_glyph_data ()
{
if (match_glyph_data16) match_glyph_data16++;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
else
if (match_glyph_data24) match_glyph_data24++;
#endif
}
void
backup_glyph_data ()
{
if (match_glyph_data16) match_glyph_data16--;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
else
if (match_glyph_data24) match_glyph_data24--;
#endif
}
2012-01-17 04:05:08 +01:00
unsigned int idx;
protected:
hb_ot_apply_context_t *c;
matcher_t matcher;
const HBUINT16 *match_glyph_data16;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
const HBUINT24 *match_glyph_data24;
#endif
2012-01-17 04:05:08 +01:00
unsigned int num_items;
unsigned int end;
2012-01-17 04:05:08 +01:00
};
2015-01-29 13:32:05 +01:00
const char *get_name () { return "APPLY"; }
typedef return_t (*recurse_func_t) (hb_ot_apply_context_t *c, unsigned int lookup_index);
2015-01-29 13:32:05 +01:00
template <typename T>
return_t dispatch (const T &obj) { return obj.apply (this); }
static return_t default_return_value () { return false; }
2015-01-29 13:32:05 +01:00
bool stop_sublookup_iteration (return_t r) const { return r; }
return_t recurse (unsigned int sub_lookup_index)
2015-01-29 13:32:05 +01:00
{
if (unlikely (nesting_level_left == 0 || !recurse_func || buffer->max_ops-- <= 0))
{
buffer->shaping_failed = true;
2015-01-29 13:32:05 +01:00
return default_return_value ();
}
2015-01-29 13:32:05 +01:00
nesting_level_left--;
bool ret = recurse_func (this, sub_lookup_index);
2015-01-29 13:32:05 +01:00
nesting_level_left++;
return ret;
}
skipping_iterator_t iter_input, iter_context;
2022-05-21 00:50:00 +02:00
unsigned int table_index; /* GSUB/GPOS */
2015-01-29 13:32:05 +01:00
hb_font_t *font;
hb_face_t *face;
hb_buffer_t *buffer;
2022-05-30 13:30:37 +02:00
recurse_func_t recurse_func = nullptr;
2015-01-29 13:32:05 +01:00
const GDEF &gdef;
2016-09-10 09:22:24 +02:00
const VariationStore &var_store;
2022-05-21 00:50:00 +02:00
VariationStore::cache_t *var_store_cache;
2022-11-18 20:30:46 +01:00
hb_set_digest_t digest;
hb_direction_t direction;
hb_mask_t lookup_mask = 1;
unsigned int lookup_index = (unsigned) -1;
unsigned int lookup_props = 0;
unsigned int nesting_level_left = HB_MAX_NESTING_LEVEL;
2015-01-29 13:32:05 +01:00
2018-09-10 16:24:52 +02:00
bool has_glyph_classes;
bool auto_zwnj = true;
bool auto_zwj = true;
bool per_syllable = false;
bool random = false;
uint32_t random_state = 1;
unsigned new_syllables = (unsigned) -1;
2015-01-29 13:32:05 +01:00
signed last_base = -1; // GPOS uses
unsigned last_base_until = 0; // GPOS uses
hb_ot_apply_context_t (unsigned int table_index_,
2020-07-18 19:44:52 +02:00
hb_font_t *font_,
hb_buffer_t *buffer_) :
2022-05-21 00:50:00 +02:00
table_index (table_index_),
2015-01-29 13:32:05 +01:00
font (font_), face (font->face), buffer (buffer_),
gdef (
#ifndef HB_NO_OT_LAYOUT
*face->table.GDEF->table
#else
Null (GDEF)
#endif
),
var_store (gdef.get_var_store ()),
2022-05-21 23:25:53 +02:00
var_store_cache (
#ifndef HB_NO_VAR
table_index == 1 && font->num_coords ? var_store.create_cache () : nullptr
#else
nullptr
#endif
),
2022-11-18 20:30:46 +01:00
digest (buffer_->digest ()),
direction (buffer_->props.direction),
2022-11-18 20:30:46 +01:00
has_glyph_classes (gdef.has_glyph_classes ())
2022-05-21 00:50:00 +02:00
{ init_iters (); }
2022-05-21 23:25:53 +02:00
~hb_ot_apply_context_t ()
{
#ifndef HB_NO_VAR
VariationStore::destroy_cache (var_store_cache);
#endif
}
2015-01-29 13:32:05 +01:00
void init_iters ()
{
iter_input.init (this, false);
iter_context.init (this, true);
}
2015-01-29 13:32:05 +01:00
void set_lookup_mask (hb_mask_t mask) { lookup_mask = mask; last_base = -1; last_base_until = 0; init_iters (); }
void set_auto_zwj (bool auto_zwj_) { auto_zwj = auto_zwj_; init_iters (); }
void set_auto_zwnj (bool auto_zwnj_) { auto_zwnj = auto_zwnj_; init_iters (); }
void set_per_syllable (bool per_syllable_) { per_syllable = per_syllable_; init_iters (); }
void set_random (bool random_) { random = random_; }
void set_recurse_func (recurse_func_t func) { recurse_func = func; }
void set_lookup_index (unsigned int lookup_index_) { lookup_index = lookup_index_; }
void set_lookup_props (unsigned int lookup_props_) { lookup_props = lookup_props_; init_iters (); }
uint32_t random_number ()
2018-09-11 10:51:19 +02:00
{
2018-09-11 10:57:48 +02:00
/* http://www.cplusplus.com/reference/random/minstd_rand/ */
random_state = random_state * 48271 % 2147483647;
return random_state;
2018-09-11 10:51:19 +02:00
}
bool match_properties_mark (hb_codepoint_t glyph,
unsigned int glyph_props,
unsigned int match_props) const
2012-07-31 01:47:53 +02:00
{
/* If using mark filtering sets, the high short of
* match_props has the set index.
2012-07-31 01:47:53 +02:00
*/
if (match_props & LookupFlag::UseMarkFilteringSet)
return gdef.mark_set_covers (match_props >> 16, glyph);
2012-07-31 01:47:53 +02:00
/* The second byte of match_props has the meaning
2012-07-31 01:47:53 +02:00
* "ignore marks of attachment type different than
* the attachment type specified."
*/
if (match_props & LookupFlag::MarkAttachmentType)
return (match_props & LookupFlag::MarkAttachmentType) == (glyph_props & LookupFlag::MarkAttachmentType);
2012-07-31 01:47:53 +02:00
return true;
}
bool check_glyph_property (const hb_glyph_info_t *info,
unsigned int match_props) const
2012-07-31 01:47:53 +02:00
{
2014-07-16 19:44:01 +02:00
hb_codepoint_t glyph = info->codepoint;
unsigned int glyph_props = _hb_glyph_info_get_glyph_props (info);
2012-07-31 01:47:53 +02:00
/* Not covered, if, for example, glyph class is ligature and
* match_props includes LookupFlags::IgnoreLigatures
2012-07-31 01:47:53 +02:00
*/
if (glyph_props & match_props & LookupFlag::IgnoreFlags)
2012-07-31 01:47:53 +02:00
return false;
if (unlikely (glyph_props & HB_OT_LAYOUT_GLYPH_PROPS_MARK))
return match_properties_mark (glyph, glyph_props, match_props);
2012-07-31 01:47:53 +02:00
return true;
}
2022-01-12 23:08:34 +01:00
void _set_glyph_class (hb_codepoint_t glyph_index,
unsigned int class_guess = 0,
bool ligature = false,
bool component = false)
{
digest.add (glyph_index);
if (new_syllables != (unsigned) -1)
buffer->cur().syllable() = new_syllables;
2022-01-12 23:07:34 +01:00
unsigned int props = _hb_glyph_info_get_glyph_props (&buffer->cur());
props |= HB_OT_LAYOUT_GLYPH_PROPS_SUBSTITUTED;
if (ligature)
{
2022-01-12 23:07:34 +01:00
props |= HB_OT_LAYOUT_GLYPH_PROPS_LIGATED;
/* In the only place that the MULTIPLIED bit is used, Uniscribe
* seems to only care about the "last" transformation between
2018-10-19 17:49:21 +02:00
* Ligature and Multiple substitutions. Ie. if you ligate, expand,
* and ligate again, it forgives the multiplication and acts as
* if only ligation happened. As such, clear MULTIPLIED bit.
*/
2022-01-12 23:07:34 +01:00
props &= ~HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
}
if (component)
2022-01-12 23:07:34 +01:00
props |= HB_OT_LAYOUT_GLYPH_PROPS_MULTIPLIED;
if (likely (has_glyph_classes))
2022-01-12 23:06:40 +01:00
{
2022-01-12 23:07:34 +01:00
props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
_hb_glyph_info_set_glyph_props (&buffer->cur(), props | gdef.get_glyph_props (glyph_index));
2022-01-12 23:06:40 +01:00
}
else if (class_guess)
2022-01-12 23:06:40 +01:00
{
2022-01-12 23:07:34 +01:00
props &= HB_OT_LAYOUT_GLYPH_PROPS_PRESERVE;
_hb_glyph_info_set_glyph_props (&buffer->cur(), props | class_guess);
2022-01-12 23:06:40 +01:00
}
else
2022-01-12 23:07:34 +01:00
_hb_glyph_info_set_glyph_props (&buffer->cur(), props);
}
2012-01-17 04:05:08 +01:00
void replace_glyph (hb_codepoint_t glyph_index)
{
2022-01-12 23:08:34 +01:00
_set_glyph_class (glyph_index);
2021-03-15 20:33:44 +01:00
(void) buffer->replace_glyph (glyph_index);
}
void replace_glyph_inplace (hb_codepoint_t glyph_index)
{
2022-01-12 23:08:34 +01:00
_set_glyph_class (glyph_index);
2013-10-18 00:06:30 +02:00
buffer->cur().codepoint = glyph_index;
}
void replace_glyph_with_ligature (hb_codepoint_t glyph_index,
unsigned int class_guess)
2013-10-18 00:06:30 +02:00
{
2022-01-12 23:08:34 +01:00
_set_glyph_class (glyph_index, class_guess, true);
2021-03-15 20:33:44 +01:00
(void) buffer->replace_glyph (glyph_index);
}
void output_glyph_for_component (hb_codepoint_t glyph_index,
unsigned int class_guess)
2012-07-31 00:36:42 +02:00
{
2022-01-12 23:08:34 +01:00
_set_glyph_class (glyph_index, class_guess, false, true);
(void) buffer->output_glyph (glyph_index);
2012-07-31 00:36:42 +02:00
}
};
struct hb_accelerate_subtables_context_t :
hb_dispatch_context_t<hb_accelerate_subtables_context_t>
{
template <typename Type>
static inline bool apply_to (const void *obj, hb_ot_apply_context_t *c)
{
const Type *typed_obj = (const Type *) obj;
return typed_obj->apply (c);
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
template <typename T>
static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<1>) HB_RETURN (bool, obj->apply (c, true) )
template <typename T>
static inline auto apply_cached_ (const T *obj, hb_ot_apply_context_t *c, hb_priority<0>) HB_RETURN (bool, obj->apply (c) )
template <typename Type>
static inline bool apply_cached_to (const void *obj, hb_ot_apply_context_t *c)
{
const Type *typed_obj = (const Type *) obj;
return apply_cached_ (typed_obj, c, hb_prioritize);
}
template <typename T>
static inline auto cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<1>) HB_RETURN (bool, obj->cache_func (c, enter) )
template <typename T>
static inline bool cache_func_ (const T *obj, hb_ot_apply_context_t *c, bool enter, hb_priority<0>) { return false; }
template <typename Type>
static inline bool cache_func_to (const void *obj, hb_ot_apply_context_t *c, bool enter)
{
const Type *typed_obj = (const Type *) obj;
return cache_func_ (typed_obj, c, enter, hb_prioritize);
}
#endif
typedef bool (*hb_apply_func_t) (const void *obj, hb_ot_apply_context_t *c);
typedef bool (*hb_cache_func_t) (const void *obj, hb_ot_apply_context_t *c, bool enter);
struct hb_applicable_t
{
friend struct hb_accelerate_subtables_context_t;
friend struct hb_ot_layout_lookup_accelerator_t;
template <typename T>
void init (const T &obj_,
hb_apply_func_t apply_func_
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
, hb_apply_func_t apply_cached_func_
, hb_cache_func_t cache_func_
#endif
)
{
obj = &obj_;
apply_func = apply_func_;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
apply_cached_func = apply_cached_func_;
cache_func = cache_func_;
#endif
digest.init ();
obj_.get_coverage ().collect_coverage (&digest);
}
bool apply (hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_func (obj, c);
}
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
bool apply_cached (hb_ot_apply_context_t *c) const
{
return digest.may_have (c->buffer->cur().codepoint) && apply_cached_func (obj, c);
}
bool cache_enter (hb_ot_apply_context_t *c) const
{
return cache_func (obj, c, true);
}
void cache_leave (hb_ot_apply_context_t *c) const
{
cache_func (obj, c, false);
}
#endif
private:
const void *obj;
hb_apply_func_t apply_func;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
hb_apply_func_t apply_cached_func;
hb_cache_func_t cache_func;
#endif
hb_set_digest_t digest;
};
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
template <typename T>
auto cache_cost (const T &obj, hb_priority<1>) HB_AUTO_RETURN ( obj.cache_cost () )
template <typename T>
auto cache_cost (const T &obj, hb_priority<0>) HB_AUTO_RETURN ( 0u )
#endif
/* Dispatch interface. */
template <typename T>
return_t dispatch (const T &obj)
{
hb_applicable_t *entry = &array[i++];
entry->init (obj,
apply_to<T>
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
, apply_cached_to<T>
, cache_func_to<T>
#endif
);
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
2022-06-07 17:20:27 +02:00
/* Cache handling
*
* We allow one subtable from each lookup to use a cache. The assumption
* being that multiple subtables of the same lookup cannot use a cache
* because the resources they would use will collide. As such, we ask
* each subtable to tell us how much it costs (which a cache would avoid),
* and we allocate the cache opportunity to the costliest subtable.
*/
unsigned cost = cache_cost (obj, hb_prioritize);
if (cost > cache_user_cost)
{
cache_user_idx = i - 1;
cache_user_cost = cost;
}
#endif
2019-05-11 05:43:26 +02:00
return hb_empty_t ();
}
2019-05-11 05:43:26 +02:00
static return_t default_return_value () { return hb_empty_t (); }
hb_accelerate_subtables_context_t (hb_applicable_t *array_) :
array (array_) {}
hb_applicable_t *array;
unsigned i = 0;
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
unsigned cache_user_idx = (unsigned) -1;
unsigned cache_user_cost = 0;
#endif
};
2022-12-03 19:58:04 +01:00
typedef bool (*intersects_func_t) (const hb_set_t *glyphs, unsigned value, const void *data, void *cache);
typedef void (*intersected_glyphs_func_t) (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache);
typedef void (*collect_glyphs_func_t) (hb_set_t *glyphs, unsigned value, const void *data);
typedef bool (*match_func_t) (hb_glyph_info_t &info, unsigned value, const void *data);
struct ContextClosureFuncs
{
intersects_func_t intersects;
intersected_glyphs_func_t intersected_glyphs;
};
struct ContextCollectGlyphsFuncs
{
collect_glyphs_func_t collect;
};
struct ContextApplyFuncs
2009-05-20 05:58:54 +02:00
{
match_func_t match;
};
struct ChainContextApplyFuncs
{
match_func_t match[3];
};
2022-12-03 19:58:04 +01:00
static inline bool intersects_glyph (const hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED, void *cache HB_UNUSED)
{
return glyphs->has (value);
}
2022-12-03 19:58:04 +01:00
static inline bool intersects_class (const hb_set_t *glyphs, unsigned value, const void *data, void *cache)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
hb_map_t *map = (hb_map_t *) cache;
hb_codepoint_t *cached_v;
if (map->has (value, &cached_v))
return *cached_v;
bool v = class_def.intersects_class (glyphs, value);
map->set (value, v);
return v;
}
2022-12-03 19:58:04 +01:00
static inline bool intersects_coverage (const hb_set_t *glyphs, unsigned value, const void *data, void *cache HB_UNUSED)
{
Offset16To<Coverage> coverage;
coverage = value;
return (data+coverage).intersects (glyphs);
}
static inline void intersected_glyph (const hb_set_t *glyphs HB_UNUSED, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
{
unsigned g = reinterpret_cast<const HBUINT16 *>(data)[value];
intersected_glyphs->add (g);
}
using intersected_class_cache_t = hb_hashmap_t<unsigned, hb_set_t>;
static inline void intersected_class_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, void *cache)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
intersected_class_cache_t *map = (intersected_class_cache_t *) cache;
hb_set_t *cached_v;
if (map->has (value, &cached_v))
{
intersected_glyphs->union_ (*cached_v);
return;
}
hb_set_t v;
class_def.intersected_class_glyphs (glyphs, value, &v);
intersected_glyphs->union_ (v);
map->set (value, std::move (v));
}
static inline void intersected_coverage_glyphs (const hb_set_t *glyphs, const void *data, unsigned value, hb_set_t *intersected_glyphs, HB_UNUSED void *cache)
{
Offset16To<Coverage> coverage;
coverage = value;
(data+coverage).intersect_set (*glyphs, *intersected_glyphs);
}
template <typename HBUINT>
static inline bool array_is_subset_of (const hb_set_t *glyphs,
unsigned int count,
const HBUINT values[],
intersects_func_t intersects_func,
const void *intersects_data,
2022-12-03 19:58:04 +01:00
void *cache)
{
for (const auto &_ : + hb_iter (values, count))
if (!intersects_func (glyphs, _, intersects_data, cache)) return false;
return true;
}
static inline void collect_glyph (hb_set_t *glyphs, unsigned value, const void *data HB_UNUSED)
{
glyphs->add (value);
}
static inline void collect_class (hb_set_t *glyphs, unsigned value, const void *data)
{
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
2020-04-23 19:57:30 +02:00
class_def.collect_class (glyphs, value);
}
static inline void collect_coverage (hb_set_t *glyphs, unsigned value, const void *data)
{
Offset16To<Coverage> coverage;
coverage = value;
(data+coverage).collect_coverage (glyphs);
}
template <typename HBUINT>
2012-12-06 00:46:04 +01:00
static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
hb_set_t *glyphs,
unsigned int count,
const HBUINT values[],
collect_glyphs_func_t collect_func,
const void *collect_data)
{
2019-03-30 06:27:46 +01:00
return
+ hb_iter (values, count)
| hb_apply ([&] (const HBUINT &_) { collect_func (glyphs, _, collect_data); })
2019-03-30 06:27:46 +01:00
;
}
static inline bool match_glyph (hb_glyph_info_t &info, unsigned value, const void *data HB_UNUSED)
2009-05-20 05:58:54 +02:00
{
return info.codepoint == value;
}
static inline bool match_class (hb_glyph_info_t &info, unsigned value, const void *data)
2009-05-20 05:58:54 +02:00
{
2009-08-04 17:38:50 +02:00
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
return class_def.get_class (info.codepoint) == value;
}
static inline bool match_class_cached (hb_glyph_info_t &info, unsigned value, const void *data)
{
unsigned klass = info.syllable();
if (klass < 255)
return klass == value;
const ClassDef &class_def = *reinterpret_cast<const ClassDef *>(data);
klass = class_def.get_class (info.codepoint);
if (likely (klass < 255))
info.syllable() = klass;
return klass == value;
}
static inline bool match_coverage (hb_glyph_info_t &info, unsigned value, const void *data)
2009-05-20 05:58:54 +02:00
{
Offset16To<Coverage> coverage;
coverage = value;
return (data+coverage).get_coverage (info.codepoint) != NOT_COVERED;
}
template <typename HBUINT>
static inline bool would_match_input (hb_would_apply_context_t *c,
unsigned int count, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
match_func_t match_func,
const void *match_data)
{
if (count != c->len)
return false;
for (unsigned int i = 1; i < count; i++)
{
hb_glyph_info_t info;
info.codepoint = c->glyphs[i];
if (likely (!match_func (info, input[i - 1], match_data)))
return false;
}
return true;
}
template <typename HBUINT>
static inline bool match_input (hb_ot_apply_context_t *c,
unsigned int count, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
match_func_t match_func,
2010-05-10 23:47:22 +02:00
const void *match_data,
unsigned int *end_position,
unsigned int match_positions[HB_MAX_CONTEXT_LENGTH],
2017-10-15 12:11:08 +02:00
unsigned int *p_total_component_count = nullptr)
{
2017-10-15 12:11:08 +02:00
TRACE_APPLY (nullptr);
if (unlikely (count > HB_MAX_CONTEXT_LENGTH)) return_trace (false);
2013-10-17 13:58:31 +02:00
hb_buffer_t *buffer = c->buffer;
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_input;
skippy_iter.reset (buffer->idx, count - 1);
skippy_iter.set_match_func (match_func, match_data);
skippy_iter.set_glyph_data (input);
/*
* This is perhaps the trickiest part of OpenType... Remarks:
*
* - If all components of the ligature were marks, we call this a mark ligature.
*
* - If there is no GDEF, and the ligature is NOT a mark ligature, we categorize
* it as a ligature glyph.
*
* - Ligatures cannot be formed across glyphs attached to different components
* of previous ligatures. Eg. the sequence is LAM,SHADDA,LAM,FATHA,HEH, and
* LAM,LAM,HEH form a ligature, leaving SHADDA,FATHA next to eachother.
* However, it would be wrong to ligate that SHADDA,FATHA sequence.
* There are a couple of exceptions to this:
*
* o If a ligature tries ligating with marks that belong to it itself, go ahead,
* assuming that the font designer knows what they are doing (otherwise it can
* break Indic stuff when a matra wants to ligate with a conjunct,
*
* o If two marks want to ligate and they belong to different components of the
* same ligature glyph, and said ligature glyph is to be ignored according to
* mark-filtering rules, then allow.
* https://github.com/harfbuzz/harfbuzz/issues/545
*/
unsigned int total_component_count = 0;
2013-10-18 00:02:43 +02:00
total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->cur());
2013-10-18 00:02:43 +02:00
unsigned int first_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
unsigned int first_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
enum {
LIGBASE_NOT_CHECKED,
LIGBASE_MAY_NOT_SKIP,
LIGBASE_MAY_SKIP
} ligbase = LIGBASE_NOT_CHECKED;
2013-10-17 13:58:31 +02:00
match_positions[0] = buffer->idx;
2012-01-16 23:03:55 +01:00
for (unsigned int i = 1; i < count; i++)
2009-05-20 05:58:54 +02:00
{
unsigned unsafe_to;
if (!skippy_iter.next (&unsafe_to))
{
*end_position = unsafe_to;
return_trace (false);
}
2013-10-17 13:55:48 +02:00
match_positions[i] = skippy_iter.idx;
2013-10-18 00:02:43 +02:00
unsigned int this_lig_id = _hb_glyph_info_get_lig_id (&buffer->info[skippy_iter.idx]);
unsigned int this_lig_comp = _hb_glyph_info_get_lig_comp (&buffer->info[skippy_iter.idx]);
if (first_lig_id && first_lig_comp)
{
/* If first component was attached to a previous ligature component,
* all subsequent components should be attached to the same ligature
* component, otherwise we shouldn't ligate them... */
if (first_lig_id != this_lig_id || first_lig_comp != this_lig_comp)
{
2018-11-15 20:40:56 +01:00
/* ...unless, we are attached to a base ligature and that base
* ligature is ignorable. */
2018-11-15 20:40:56 +01:00
if (ligbase == LIGBASE_NOT_CHECKED)
{
bool found = false;
2019-05-11 01:35:31 +02:00
const auto *out = buffer->out_info;
unsigned int j = buffer->out_len;
while (j && _hb_glyph_info_get_lig_id (&out[j - 1]) == first_lig_id)
{
if (_hb_glyph_info_get_lig_comp (&out[j - 1]) == 0)
{
j--;
found = true;
break;
}
j--;
}
if (found && skippy_iter.may_skip (out[j]) == hb_ot_apply_context_t::matcher_t::SKIP_YES)
ligbase = LIGBASE_MAY_SKIP;
else
ligbase = LIGBASE_MAY_NOT_SKIP;
}
2018-11-15 20:40:56 +01:00
if (ligbase == LIGBASE_MAY_NOT_SKIP)
return_trace (false);
}
}
else
{
/* If first component was NOT attached to a previous ligature component,
* all subsequent components should also NOT be attached to any ligature
* component, unless they are attached to the first component itself! */
if (this_lig_id && this_lig_comp && (this_lig_id != first_lig_id))
return_trace (false);
}
2013-10-18 00:02:43 +02:00
total_component_count += _hb_glyph_info_get_lig_num_comps (&buffer->info[skippy_iter.idx]);
}
*end_position = skippy_iter.idx + 1;
if (p_total_component_count)
*p_total_component_count = total_component_count;
return_trace (true);
}
static inline bool ligate_input (hb_ot_apply_context_t *c,
unsigned int count, /* Including the first glyph */
2018-10-20 03:09:52 +02:00
const unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
unsigned int match_end,
2012-08-29 05:18:22 +02:00
hb_codepoint_t lig_glyph,
unsigned int total_component_count)
{
2017-10-15 12:11:08 +02:00
TRACE_APPLY (nullptr);
2013-10-17 13:58:31 +02:00
hb_buffer_t *buffer = c->buffer;
buffer->merge_clusters (buffer->idx, match_end);
/* - If a base and one or more marks ligate, consider that as a base, NOT
* ligature, such that all following marks can still attach to it.
* https://github.com/harfbuzz/harfbuzz/issues/1109
*
* - If all components of the ligature were marks, we call this a mark ligature.
2018-10-02 15:02:16 +02:00
* If it *is* a mark ligature, we don't allocate a new ligature id, and leave
2012-08-29 05:18:22 +02:00
* the ligature to keep its old ligature id. This will allow it to attach to
* a base ligature in GPOS. Eg. if the sequence is: LAM,LAM,SHADDA,FATHA,HEH,
2018-10-02 15:02:16 +02:00
* and LAM,LAM,HEH for a ligature, they will leave SHADDA and FATHA with a
2012-08-29 05:18:22 +02:00
* ligature id and component value of 2. Then if SHADDA,FATHA form a ligature
* later, we don't want them to lose their ligature id/component, otherwise
* GPOS will fail to correctly position the mark ligature on top of the
* LAM,LAM,HEH ligature. See:
* https://bugzilla.gnome.org/show_bug.cgi?id=676343
*
* - If a ligature is formed of components that some of which are also ligatures
* themselves, and those ligature components had marks attached to *their*
* components, we have to attach the marks to the new ligature component
* positions! Now *that*'s tricky! And these marks may be following the
* last component of the whole sequence, so we should loop forward looking
* for them and update them.
*
* Eg. the sequence is LAM,LAM,SHADDA,FATHA,HEH, and the font first forms a
* 'calt' ligature of LAM,HEH, leaving the SHADDA and FATHA with a ligature
* id and component == 1. Now, during 'liga', the LAM and the LAM-HEH ligature
* form a LAM-LAM-HEH ligature. We need to reassign the SHADDA and FATHA to
* the new ligature with a component value of 2.
*
* This in fact happened to a font... See:
* https://bugzilla.gnome.org/show_bug.cgi?id=437633
*/
bool is_base_ligature = _hb_glyph_info_is_base_glyph (&buffer->info[match_positions[0]]);
bool is_mark_ligature = _hb_glyph_info_is_mark (&buffer->info[match_positions[0]]);
for (unsigned int i = 1; i < count; i++)
2018-10-02 15:02:16 +02:00
if (!_hb_glyph_info_is_mark (&buffer->info[match_positions[i]]))
{
is_base_ligature = false;
2018-10-02 15:02:16 +02:00
is_mark_ligature = false;
break;
}
bool is_ligature = !is_base_ligature && !is_mark_ligature;
2018-10-02 15:02:16 +02:00
unsigned int klass = is_ligature ? HB_OT_LAYOUT_GLYPH_PROPS_LIGATURE : 0;
unsigned int lig_id = is_ligature ? _hb_allocate_lig_id (buffer) : 0;
2013-10-18 00:02:43 +02:00
unsigned int last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
unsigned int last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
2012-08-29 05:18:22 +02:00
unsigned int components_so_far = last_num_components;
if (is_ligature)
{
2013-10-18 00:02:43 +02:00
_hb_glyph_info_set_lig_props_for_ligature (&buffer->cur(), lig_id, total_component_count);
2013-10-17 13:58:31 +02:00
if (_hb_glyph_info_get_general_category (&buffer->cur()) == HB_UNICODE_GENERAL_CATEGORY_NON_SPACING_MARK)
{
2015-11-03 02:44:05 +01:00
_hb_glyph_info_set_general_category (&buffer->cur(), HB_UNICODE_GENERAL_CATEGORY_OTHER_LETTER);
}
}
2013-10-18 00:06:30 +02:00
c->replace_glyph_with_ligature (lig_glyph, klass);
2012-08-29 05:18:22 +02:00
for (unsigned int i = 1; i < count; i++)
{
while (buffer->idx < match_positions[i] && buffer->successful)
2012-08-29 05:18:22 +02:00
{
if (is_ligature)
{
2018-11-15 20:40:56 +01:00
unsigned int this_comp = _hb_glyph_info_get_lig_comp (&buffer->cur());
if (this_comp == 0)
this_comp = last_num_components;
2012-08-29 05:18:22 +02:00
unsigned int new_lig_comp = components_so_far - last_num_components +
hb_min (this_comp, last_num_components);
_hb_glyph_info_set_lig_props_for_mark (&buffer->cur(), lig_id, new_lig_comp);
2012-08-29 05:18:22 +02:00
}
2021-03-15 22:18:06 +01:00
(void) buffer->next_glyph ();
2012-08-29 05:18:22 +02:00
}
2013-10-18 00:02:43 +02:00
last_lig_id = _hb_glyph_info_get_lig_id (&buffer->cur());
last_num_components = _hb_glyph_info_get_lig_num_comps (&buffer->cur());
2012-08-29 05:18:22 +02:00
components_so_far += last_num_components;
/* Skip the base glyph */
2013-10-17 13:58:31 +02:00
buffer->idx++;
2012-08-29 05:18:22 +02:00
}
if (!is_mark_ligature && last_lig_id)
{
2012-08-29 05:18:22 +02:00
/* Re-adjust components for any marks following. */
for (unsigned i = buffer->idx; i < buffer->len; ++i)
{
if (last_lig_id != _hb_glyph_info_get_lig_id (&buffer->info[i])) break;
unsigned this_comp = _hb_glyph_info_get_lig_comp (&buffer->info[i]);
if (!this_comp) break;
unsigned new_lig_comp = components_so_far - last_num_components +
hb_min (this_comp, last_num_components);
_hb_glyph_info_set_lig_props_for_mark (&buffer->info[i], lig_id, new_lig_comp);
2012-08-29 05:18:22 +02:00
}
}
return_trace (true);
2012-08-29 05:18:22 +02:00
}
template <typename HBUINT>
static inline bool match_backtrack (hb_ot_apply_context_t *c,
2009-05-18 09:47:31 +02:00
unsigned int count,
const HBUINT backtrack[],
2009-05-18 09:47:31 +02:00
match_func_t match_func,
const void *match_data,
unsigned int *match_start)
2009-05-18 09:47:31 +02:00
{
2017-10-15 12:11:08 +02:00
TRACE_APPLY (nullptr);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
skippy_iter.reset (c->buffer->backtrack_len (), count);
skippy_iter.set_match_func (match_func, match_data);
skippy_iter.set_glyph_data (backtrack);
2009-05-18 09:47:31 +02:00
for (unsigned int i = 0; i < count; i++)
{
unsigned unsafe_from;
if (!skippy_iter.prev (&unsafe_from))
{
*match_start = unsafe_from;
return_trace (false);
}
}
2009-05-18 09:47:31 +02:00
*match_start = skippy_iter.idx;
return_trace (true);
2009-05-18 09:47:31 +02:00
}
template <typename HBUINT>
static inline bool match_lookahead (hb_ot_apply_context_t *c,
unsigned int count,
const HBUINT lookahead[],
match_func_t match_func,
2010-05-10 23:47:22 +02:00
const void *match_data,
unsigned int start_index,
unsigned int *end_index)
{
2017-10-15 12:11:08 +02:00
TRACE_APPLY (nullptr);
hb_ot_apply_context_t::skipping_iterator_t &skippy_iter = c->iter_context;
skippy_iter.reset (start_index - 1, count);
skippy_iter.set_match_func (match_func, match_data);
skippy_iter.set_glyph_data (lookahead);
2012-01-16 23:03:55 +01:00
for (unsigned int i = 0; i < count; i++)
{
unsigned unsafe_to;
if (!skippy_iter.next (&unsafe_to))
{
*end_index = unsafe_to;
return_trace (false);
}
}
*end_index = skippy_iter.idx + 1;
return_trace (true);
}
2010-07-23 21:11:18 +02:00
2009-05-20 05:58:54 +02:00
struct LookupRecord
{
bool serialize (hb_serialize_context_t *c,
const hb_map_t *lookup_map) const
2019-10-23 01:00:43 +02:00
{
TRACE_SERIALIZE (this);
auto *out = c->embed (*this);
if (unlikely (!out)) return_trace (false);
2019-10-23 01:00:43 +02:00
return_trace (c->check_assign (out->lookupListIndex, lookup_map->get (lookupListIndex), HB_SERIALIZE_ERROR_INT_OVERFLOW));
2019-10-23 01:00:43 +02:00
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
2009-08-04 08:09:34 +02:00
}
HBUINT16 sequenceIndex; /* Index into current glyph
* sequence--first glyph = 0 */
HBUINT16 lookupListIndex; /* Lookup to apply to that
* position--zero--based */
2010-05-10 22:38:32 +02:00
public:
DEFINE_SIZE_STATIC (4);
};
static unsigned serialize_lookuprecord_array (hb_serialize_context_t *c,
const hb_array_t<const LookupRecord> lookupRecords,
const hb_map_t *lookup_map)
{
unsigned count = 0;
for (const LookupRecord& r : lookupRecords)
{
if (!lookup_map->has (r.lookupListIndex))
continue;
if (!r.serialize (c, lookup_map))
return 0;
count++;
}
return count;
}
enum ContextFormat { SimpleContext = 1, ClassBasedContext = 2, CoverageBasedContext = 3 };
template <typename HBUINT>
static void context_closure_recurse_lookups (hb_closure_context_t *c,
unsigned inputCount, const HBUINT input[],
unsigned lookupCount,
const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */,
unsigned value,
ContextFormat context_format,
const void *data,
intersected_glyphs_func_t intersected_glyphs_func,
void *cache)
{
2023-01-05 21:52:11 +01:00
hb_set_t covered_seq_indicies;
hb_set_t pos_glyphs;
for (unsigned int i = 0; i < lookupCount; i++)
{
unsigned seqIndex = lookupRecord[i].sequenceIndex;
if (seqIndex >= inputCount) continue;
bool has_pos_glyphs = false;
2023-01-05 21:52:11 +01:00
if (!covered_seq_indicies.has (seqIndex))
{
has_pos_glyphs = true;
2023-01-05 21:52:11 +01:00
pos_glyphs.clear ();
if (seqIndex == 0)
{
switch (context_format) {
case ContextFormat::SimpleContext:
pos_glyphs.add (value);
break;
case ContextFormat::ClassBasedContext:
intersected_glyphs_func (&c->parent_active_glyphs (), data, value, &pos_glyphs, cache);
break;
case ContextFormat::CoverageBasedContext:
pos_glyphs.set (c->parent_active_glyphs ());
break;
}
}
else
{
const void *input_data = input;
unsigned input_value = seqIndex - 1;
if (context_format != ContextFormat::SimpleContext)
{
input_data = data;
input_value = input[seqIndex - 1];
}
2021-03-30 01:08:44 +02:00
intersected_glyphs_func (c->glyphs, input_data, input_value, &pos_glyphs, cache);
}
}
2023-01-05 21:52:11 +01:00
covered_seq_indicies.add (seqIndex);
if (has_pos_glyphs) {
c->push_cur_active_glyphs () = std::move (pos_glyphs);
} else {
c->push_cur_active_glyphs ().set (*c->glyphs);
}
2021-03-30 01:08:44 +02:00
unsigned endIndex = inputCount;
if (context_format == ContextFormat::CoverageBasedContext)
endIndex += 1;
2023-01-05 21:52:11 +01:00
c->recurse (lookupRecord[i].lookupListIndex, &covered_seq_indicies, seqIndex, endIndex);
2021-03-30 01:08:44 +02:00
c->pop_cur_done_glyphs ();
}
}
2010-07-23 21:11:18 +02:00
template <typename context_t>
static inline void recurse_lookups (context_t *c,
unsigned int lookupCount,
const LookupRecord lookupRecord[] /* Array of LookupRecords--in design order */)
{
for (unsigned int i = 0; i < lookupCount; i++)
c->recurse (lookupRecord[i].lookupListIndex);
}
static inline void apply_lookup (hb_ot_apply_context_t *c,
2009-05-18 09:47:31 +02:00
unsigned int count, /* Including the first glyph */
unsigned int match_positions[HB_MAX_CONTEXT_LENGTH], /* Including the first glyph */
2009-05-18 09:47:31 +02:00
unsigned int lookupCount,
const LookupRecord lookupRecord[], /* Array of LookupRecords--in design order */
unsigned int match_end)
{
hb_buffer_t *buffer = c->buffer;
int end;
2009-09-21 19:58:56 +02:00
/* All positions are distance from beginning of *output* buffer.
* Adjust. */
{
unsigned int bl = buffer->backtrack_len ();
end = bl + match_end - buffer->idx;
int delta = bl - buffer->idx;
/* Convert positions to new indexing. */
for (unsigned int j = 0; j < count; j++)
match_positions[j] += delta;
}
for (unsigned int i = 0; i < lookupCount && buffer->successful; i++)
2009-05-20 05:58:54 +02:00
{
unsigned int idx = lookupRecord[i].sequenceIndex;
if (idx >= count)
continue;
unsigned int orig_len = buffer->backtrack_len () + buffer->lookahead_len ();
/* This can happen if earlier recursed lookups deleted many entries. */
if (unlikely (match_positions[idx] >= orig_len))
continue;
if (unlikely (!buffer->move_to (match_positions[idx])))
break;
if (unlikely (buffer->max_ops <= 0))
break;
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
if (buffer->have_output)
c->buffer->sync_so_far ();
c->buffer->message (c->font,
"recursing to lookup %u at %u",
(unsigned) lookupRecord[i].lookupListIndex,
buffer->idx);
}
if (!c->recurse (lookupRecord[i].lookupListIndex))
continue;
if (HB_BUFFER_MESSAGE_MORE && c->buffer->messaging ())
{
if (buffer->have_output)
c->buffer->sync_so_far ();
c->buffer->message (c->font,
2022-07-25 02:51:55 +02:00
"recursed to lookup %u",
(unsigned) lookupRecord[i].lookupListIndex);
}
unsigned int new_len = buffer->backtrack_len () + buffer->lookahead_len ();
int delta = new_len - orig_len;
if (!delta)
2018-11-15 20:40:56 +01:00
continue;
2017-03-05 22:51:01 +01:00
/* Recursed lookup changed buffer len. Adjust.
*
* TODO:
*
* Right now, if buffer length increased by n, we assume n new glyphs
* were added right after the current position, and if buffer length
* was decreased by n, we assume n match positions after the current
* one where removed. The former (buffer length increased) case is
* fine, but the decrease case can be improved in at least two ways,
* both of which are significant:
*
* - If recursed-to lookup is MultipleSubst and buffer length
* decreased, then it's current match position that was deleted,
* NOT the one after it.
*
* - If buffer length was decreased by n, it does not necessarily
* mean that n match positions where removed, as there recursed-to
* lookup might had a different LookupFlag. Here's a constructed
* case of that:
* https://github.com/harfbuzz/harfbuzz/discussions/3538
2017-03-05 22:51:01 +01:00
*
* It should be possible to construct tests for both of these cases.
*/
end += delta;
if (end < int (match_positions[idx]))
{
/* End might end up being smaller than match_positions[idx] if the recursed
* lookup ended up removing many items.
* Just never rewind end beyond start of current position, since that is
* not possible in the recursed lookup. Also adjust delta as such.
*
* https://bugs.chromium.org/p/chromium/issues/detail?id=659496
* https://github.com/harfbuzz/harfbuzz/issues/1611
*/
delta += match_positions[idx] - end;
end = match_positions[idx];
}
unsigned int next = idx + 1; /* next now is the position after the recursed lookup. */
if (delta > 0)
{
if (unlikely (delta + count > HB_MAX_CONTEXT_LENGTH))
break;
}
else
{
2022-03-24 20:09:53 +01:00
/* NOTE: delta is non-positive. */
delta = hb_max (delta, (int) next - (int) count);
next -= delta;
}
/* Shift! */
memmove (match_positions + next + delta, match_positions + next,
(count - next) * sizeof (match_positions[0]));
next += delta;
count += delta;
/* Fill in new entries. */
for (unsigned int j = idx + 1; j < next; j++)
match_positions[j] = match_positions[j - 1] + 1;
/* And fixup the rest. */
for (; next < count; next++)
match_positions[next] += delta;
}
2021-03-15 20:46:54 +01:00
(void) buffer->move_to (end);
}
2010-07-23 21:11:18 +02:00
/* Contextual lookups */
struct ContextClosureLookupContext
{
ContextClosureFuncs funcs;
ContextFormat context_format;
const void *intersects_data;
void *intersects_cache;
void *intersected_glyphs_cache;
};
struct ContextCollectGlyphsLookupContext
{
ContextCollectGlyphsFuncs funcs;
const void *collect_data;
};
struct ContextApplyLookupContext
2009-05-20 05:58:54 +02:00
{
ContextApplyFuncs funcs;
2010-05-10 23:47:22 +02:00
const void *match_data;
};
template <typename HBUINT>
static inline bool context_intersects (const hb_set_t *glyphs,
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
ContextClosureLookupContext &lookup_context)
{
return array_is_subset_of (glyphs,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.intersects,
lookup_context.intersects_data,
lookup_context.intersects_cache);
}
template <typename HBUINT>
2012-04-24 05:03:12 +02:00
static inline void context_closure_lookup (hb_closure_context_t *c,
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[],
unsigned value, /* Index of first glyph in Coverage or Class value in ClassDef table */
ContextClosureLookupContext &lookup_context)
{
if (context_intersects (c->glyphs,
inputCount, input,
lookup_context))
context_closure_recurse_lookups (c,
inputCount, input,
lookupCount, lookupRecord,
value,
lookup_context.context_format,
lookup_context.intersects_data,
lookup_context.funcs.intersected_glyphs,
lookup_context.intersected_glyphs_cache);
}
template <typename HBUINT>
static inline void context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ContextCollectGlyphsLookupContext &lookup_context)
{
collect_array (c, c->input,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.collect, lookup_context.collect_data);
recurse_lookups (c,
lookupCount, lookupRecord);
}
template <typename HBUINT>
static inline bool context_would_apply_lookup (hb_would_apply_context_t *c,
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
2012-12-06 00:46:04 +01:00
unsigned int lookupCount HB_UNUSED,
const LookupRecord lookupRecord[] HB_UNUSED,
2022-11-17 03:14:42 +01:00
const ContextApplyLookupContext &lookup_context)
{
return would_match_input (c,
inputCount, input,
lookup_context.funcs.match, lookup_context.match_data);
}
template <typename HBUINT>
static inline bool context_apply_lookup (hb_ot_apply_context_t *c,
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookupCount,
const LookupRecord lookupRecord[],
2022-11-17 03:14:42 +01:00
const ContextApplyLookupContext &lookup_context)
{
unsigned match_end = 0;
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
if (match_input (c,
inputCount, input,
lookup_context.funcs.match, lookup_context.match_data,
&match_end, match_positions))
{
c->buffer->unsafe_to_break (c->buffer->idx, match_end);
apply_lookup (c,
inputCount, match_positions,
lookupCount, lookupRecord,
match_end);
return true;
}
else
{
c->buffer->unsafe_to_concat (c->buffer->idx, match_end);
return false;
}
}
template <typename Types>
2009-05-20 05:58:54 +02:00
struct Rule
{
bool intersects (const hb_set_t *glyphs, ContextClosureLookupContext &lookup_context) const
{
return context_intersects (glyphs,
inputCount, inputZ.arrayZ,
lookup_context);
}
void closure (hb_closure_context_t *c, unsigned value, ContextClosureLookupContext &lookup_context) const
{
if (unlikely (c->lookup_limit_exceeded ())) return;
2022-07-08 22:00:24 +02:00
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
(inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
2012-04-24 05:03:12 +02:00
context_closure_lookup (c,
inputCount, inputZ.arrayZ,
lookupCount, lookupRecord.arrayZ,
value, lookup_context);
}
void closure_lookups (hb_closure_lookups_context_t *c,
ContextClosureLookupContext &lookup_context) const
2019-10-23 01:00:43 +02:00
{
if (unlikely (c->lookup_limit_exceeded ())) return;
if (!intersects (c->glyphs, lookup_context)) return;
2022-07-08 22:00:24 +02:00
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
2019-10-23 01:00:43 +02:00
recurse_lookups (c, lookupCount, lookupRecord.arrayZ);
}
void collect_glyphs (hb_collect_glyphs_context_t *c,
ContextCollectGlyphsLookupContext &lookup_context) const
{
2022-07-08 22:00:24 +02:00
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
context_collect_glyphs_lookup (c,
inputCount, inputZ.arrayZ,
lookupCount, lookupRecord.arrayZ,
lookup_context);
}
bool would_apply (hb_would_apply_context_t *c,
2022-11-17 03:14:42 +01:00
const ContextApplyLookupContext &lookup_context) const
{
2022-07-08 22:00:24 +02:00
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
return context_would_apply_lookup (c,
inputCount, inputZ.arrayZ,
lookupCount, lookupRecord.arrayZ,
lookup_context);
}
bool apply (hb_ot_apply_context_t *c,
2022-11-17 03:14:42 +01:00
const ContextApplyLookupContext &lookup_context) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
2022-07-08 22:00:24 +02:00
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
return_trace (context_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
}
bool serialize (hb_serialize_context_t *c,
const hb_map_t *input_mapping, /* old->new glyphid or class mapping */
const hb_map_t *lookup_map) const
{
TRACE_SERIALIZE (this);
auto *out = c->start_embed (this);
if (unlikely (!c->extend_min (out))) return_trace (false);
out->inputCount = inputCount;
const auto input = inputZ.as_array (inputCount - 1);
for (const auto org : input)
{
HBUINT16 d;
d = input_mapping->get (org);
c->copy (d);
}
2022-07-08 22:00:24 +02:00
const auto &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>>
(inputZ.as_array ((inputCount ? inputCount - 1 : 0)));
unsigned count = serialize_lookuprecord_array (c, lookupRecord.as_array (lookupCount), lookup_map);
return_trace (c->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
}
bool subset (hb_subset_context_t *c,
const hb_map_t *lookup_map,
const hb_map_t *klass_map = nullptr) const
{
TRACE_SUBSET (this);
if (unlikely (!inputCount)) return_trace (false);
const auto input = inputZ.as_array (inputCount - 1);
const hb_map_t *mapping = klass_map == nullptr ? c->plan->glyph_map : klass_map;
if (!hb_all (input, mapping)) return_trace (false);
return_trace (serialize (c->serializer, mapping, lookup_map));
}
2009-08-04 06:58:28 +02:00
public:
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (inputCount.sanitize (c) &&
lookupCount.sanitize (c) &&
c->check_range (inputZ.arrayZ,
inputZ.item_size * (inputCount ? inputCount - 1 : 0) +
LookupRecord::static_size * lookupCount));
2009-08-04 06:58:28 +02:00
}
protected:
HBUINT16 inputCount; /* Total number of glyphs in input
* glyph sequence--includes the first
* glyph */
HBUINT16 lookupCount; /* Number of LookupRecords */
UnsizedArrayOf<typename Types::HBUINT>
2019-12-31 13:23:02 +01:00
inputZ; /* Array of match inputs--start with
* second glyph */
/*UnsizedArrayOf<LookupRecord>
lookupRecordX;*/ /* Array of LookupRecords--in
* design order */
2010-05-10 22:38:32 +02:00
public:
DEFINE_SIZE_ARRAY (4, inputZ);
};
template <typename Types>
2009-05-20 05:58:54 +02:00
struct RuleSet
{
using Rule = OT::Rule<Types>;
bool intersects (const hb_set_t *glyphs,
ContextClosureLookupContext &lookup_context) const
{
2019-03-30 06:27:46 +01:00
return
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_map ([&] (const Rule &_) { return _.intersects (glyphs, lookup_context); })
2019-03-30 06:27:46 +01:00
| hb_any
;
}
void closure (hb_closure_context_t *c, unsigned value,
ContextClosureLookupContext &lookup_context) const
{
if (unlikely (c->lookup_limit_exceeded ())) return;
2019-03-30 06:27:46 +01:00
return
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_apply ([&] (const Rule &_) { _.closure (c, value, lookup_context); })
2019-03-30 06:27:46 +01:00
;
}
void closure_lookups (hb_closure_lookups_context_t *c,
ContextClosureLookupContext &lookup_context) const
2019-10-23 01:00:43 +02:00
{
if (unlikely (c->lookup_limit_exceeded ())) return;
2019-10-23 01:00:43 +02:00
+ hb_iter (rule)
| hb_map (hb_add (this))
2020-09-29 01:51:25 +02:00
| hb_apply ([&] (const Rule &_) { _.closure_lookups (c, lookup_context); })
2019-10-23 01:00:43 +02:00
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c,
ContextCollectGlyphsLookupContext &lookup_context) const
{
2019-03-30 06:27:46 +01:00
return
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_apply ([&] (const Rule &_) { _.collect_glyphs (c, lookup_context); })
2019-03-30 06:27:46 +01:00
;
}
bool would_apply (hb_would_apply_context_t *c,
2022-11-17 03:14:42 +01:00
const ContextApplyLookupContext &lookup_context) const
{
2019-03-30 06:27:46 +01:00
return
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_map ([&] (const Rule &_) { return _.would_apply (c, lookup_context); })
2019-03-30 06:27:46 +01:00
| hb_any
;
}
bool apply (hb_ot_apply_context_t *c,
2022-11-17 03:14:42 +01:00
const ContextApplyLookupContext &lookup_context) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
2019-03-30 06:27:46 +01:00
return_trace (
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_map ([&] (const Rule &_) { return _.apply (c, lookup_context); })
2019-03-30 06:27:46 +01:00
| hb_any
)
;
}
bool subset (hb_subset_context_t *c,
const hb_map_t *lookup_map,
const hb_map_t *klass_map = nullptr) const
{
TRACE_SUBSET (this);
auto snap = c->serializer->snapshot ();
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
for (const Offset16To<Rule>& _ : rule)
{
if (!_) continue;
auto o_snap = c->serializer->snapshot ();
auto *o = out->rule.serialize_append (c->serializer);
if (unlikely (!o)) continue;
if (!o->serialize_subset (c, _, this, lookup_map, klass_map))
{
2020-07-29 06:03:32 +02:00
out->rule.pop ();
c->serializer->revert (o_snap);
}
}
bool ret = bool (out->rule);
if (!ret) c->serializer->revert (snap);
return_trace (ret);
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (rule.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
2021-03-31 23:31:32 +02:00
Array16OfOffset16To<Rule>
rule; /* Array of Rule tables
* ordered by preference */
2010-05-11 00:08:46 +02:00
public:
DEFINE_SIZE_ARRAY (2, rule);
};
template <typename Types>
struct ContextFormat1_4
2009-05-20 05:58:54 +02:00
{
using RuleSet = OT::RuleSet<Types>;
bool intersects (const hb_set_t *glyphs) const
{
struct ContextClosureLookupContext lookup_context = {
{intersects_glyph, intersected_glyph},
ContextFormat::SimpleContext,
nullptr
};
2019-03-30 06:40:13 +01:00
return
+ hb_zip (this+coverage, ruleSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map (hb_add (this))
| hb_map ([&] (const RuleSet &_) { return _.intersects (glyphs, lookup_context); })
2019-03-30 06:40:13 +01:00
| hb_any
;
}
bool may_have_non_1to1 () const
{ return true; }
void closure (hb_closure_context_t *c) const
{
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
get_coverage ().intersect_set (c->previous_parent_active_glyphs (), cur_active_glyphs);
struct ContextClosureLookupContext lookup_context = {
{intersects_glyph, intersected_glyph},
ContextFormat::SimpleContext,
2017-10-15 12:11:08 +02:00
nullptr
};
2019-03-30 06:40:13 +01:00
+ hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
| hb_filter ([&] (hb_codepoint_t _) {
return c->previous_parent_active_glyphs ().has (_);
}, hb_first)
| hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const RuleSet&> (_.first, this+ruleSet[_.second]); })
| hb_apply ([&] (const hb_pair_t<unsigned, const RuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2019-03-30 06:40:13 +01:00
;
c->pop_cur_done_glyphs ();
}
2019-10-23 01:00:43 +02:00
void closure_lookups (hb_closure_lookups_context_t *c) const
{
struct ContextClosureLookupContext lookup_context = {
2022-12-03 19:53:14 +01:00
{intersects_glyph, nullptr},
ContextFormat::SimpleContext,
nullptr
};
+ hb_zip (this+coverage, ruleSet)
| hb_filter (*c->glyphs, hb_first)
| hb_map (hb_second)
2019-10-23 01:00:43 +02:00
| hb_map (hb_add (this))
2020-09-29 01:51:25 +02:00
| hb_apply ([&] (const RuleSet &_) { _.closure_lookups (c, lookup_context); })
2019-10-23 01:00:43 +02:00
;
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
2012-11-24 00:13:48 +01:00
{
(this+coverage).collect_coverage (c->input);
struct ContextCollectGlyphsLookupContext lookup_context = {
{collect_glyph},
2017-10-15 12:11:08 +02:00
nullptr
};
2019-03-30 06:40:13 +01:00
+ hb_iter (ruleSet)
| hb_map (hb_add (this))
| hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2019-03-30 06:40:13 +01:00
;
2012-11-24 00:13:48 +01:00
}
bool would_apply (hb_would_apply_context_t *c) const
{
const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
struct ContextApplyLookupContext lookup_context = {
{match_glyph},
2017-10-15 12:11:08 +02:00
nullptr
};
return rule_set.would_apply (c, lookup_context);
}
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED))
return_trace (false);
const RuleSet &rule_set = this+ruleSet[index];
struct ContextApplyLookupContext lookup_context = {
{match_glyph},
2017-10-15 12:11:08 +02:00
nullptr
};
return_trace (rule_set.apply (c, lookup_context));
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ hb_zip (this+coverage, ruleSet)
| hb_filter (glyphset, hb_first)
| hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
| hb_map (hb_first)
| hb_map (glyph_map)
| hb_sink (new_coverage)
;
out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
return_trace (bool (new_coverage));
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
HBUINT16 format; /* Format identifier--format = 1 */
typename Types::template OffsetTo<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of table */
Array16Of<typename Types::template OffsetTo<RuleSet>>
ruleSet; /* Array of RuleSet tables
* ordered by Coverage Index */
public:
DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
};
template <typename Types>
struct ContextFormat2_5
2009-05-20 05:58:54 +02:00
{
using RuleSet = OT::RuleSet<SmallTypes>;
bool intersects (const hb_set_t *glyphs) const
{
if (!(this+coverage).intersects (glyphs))
return false;
const ClassDef &class_def = this+classDef;
hb_map_t cache;
struct ContextClosureLookupContext lookup_context = {
2022-12-03 19:53:14 +01:00
{intersects_class, nullptr},
ContextFormat::ClassBasedContext,
&class_def,
&cache
};
hb_set_t retained_coverage_glyphs;
(this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
hb_set_t coverage_glyph_classes;
class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
2019-03-30 06:48:38 +01:00
return
+ hb_iter (ruleSet)
| hb_map (hb_add (this))
| hb_enumerate
| hb_map ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
{ return class_def.intersects_class (glyphs, p.first) &&
coverage_glyph_classes.has (p.first) &&
p.second.intersects (glyphs, lookup_context); })
2019-03-30 06:48:38 +01:00
| hb_any
;
}
bool may_have_non_1to1 () const
{ return true; }
void closure (hb_closure_context_t *c) const
{
if (!(this+coverage).intersects (c->glyphs))
2012-04-24 05:03:12 +02:00
return;
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
cur_active_glyphs);
const ClassDef &class_def = this+classDef;
hb_map_t cache;
intersected_class_cache_t intersected_cache;
struct ContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
ContextFormat::ClassBasedContext,
&class_def,
&cache,
&intersected_cache
};
+ hb_enumerate (ruleSet)
2019-05-16 03:15:05 +02:00
| hb_filter ([&] (unsigned _)
{ return class_def.intersects_class (&c->parent_active_glyphs (), _); },
hb_first)
| hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<RuleSet>&> _)
{
const RuleSet& rule_set = this+_.second;
rule_set.closure (c, _.first, lookup_context);
})
;
c->pop_cur_done_glyphs ();
}
2019-10-23 01:00:43 +02:00
void closure_lookups (hb_closure_lookups_context_t *c) const
{
if (!(this+coverage).intersects (c->glyphs))
return;
const ClassDef &class_def = this+classDef;
hb_map_t cache;
struct ContextClosureLookupContext lookup_context = {
2022-12-03 19:53:14 +01:00
{intersects_class, nullptr},
ContextFormat::ClassBasedContext,
&class_def,
&cache
};
2019-10-23 01:00:43 +02:00
+ hb_iter (ruleSet)
| hb_map (hb_add (this))
| hb_enumerate
| hb_filter ([&] (const hb_pair_t<unsigned, const RuleSet &> p)
{ return class_def.intersects_class (c->glyphs, p.first); })
| hb_map (hb_second)
| hb_apply ([&] (const RuleSet & _)
{ _.closure_lookups (c, lookup_context); });
2019-10-23 01:00:43 +02:00
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
2012-11-24 00:13:48 +01:00
{
(this+coverage).collect_coverage (c->input);
const ClassDef &class_def = this+classDef;
struct ContextCollectGlyphsLookupContext lookup_context = {
{collect_class},
&class_def
};
2019-03-30 06:40:13 +01:00
+ hb_iter (ruleSet)
| hb_map (hb_add (this))
| hb_apply ([&] (const RuleSet &_) { _.collect_glyphs (c, lookup_context); })
2019-03-30 06:40:13 +01:00
;
2012-11-24 00:13:48 +01:00
}
bool would_apply (hb_would_apply_context_t *c) const
{
const ClassDef &class_def = this+classDef;
unsigned int index = class_def.get_class (c->glyphs[0]);
const RuleSet &rule_set = this+ruleSet[index];
struct ContextApplyLookupContext lookup_context = {
{match_class},
&class_def
};
return rule_set.would_apply (c, lookup_context);
}
const Coverage &get_coverage () const { return this+coverage; }
unsigned cache_cost () const
{
unsigned c = (this+classDef).cost () * ruleSet.len;
return c >= 4 ? c : 0;
}
bool cache_func (hb_ot_apply_context_t *c, bool enter) const
{
if (enter)
{
if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
return false;
auto &info = c->buffer->info;
unsigned count = c->buffer->len;
for (unsigned i = 0; i < count; i++)
info[i].syllable() = 255;
c->new_syllables = 255;
return true;
}
else
{
c->new_syllables = (unsigned) -1;
HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
return true;
}
}
bool apply (hb_ot_apply_context_t *c, bool cached = false) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const ClassDef &class_def = this+classDef;
struct ContextApplyLookupContext lookup_context = {
{cached ? match_class_cached : match_class},
2010-05-10 23:47:22 +02:00
&class_def
};
if (cached && c->buffer->cur().syllable() < 255)
index = c->buffer->cur().syllable ();
else
{
index = class_def.get_class (c->buffer->cur().codepoint);
if (cached && index < 255)
c->buffer->cur().syllable() = index;
}
const RuleSet &rule_set = this+ruleSet[index];
return_trace (rule_set.apply (c, lookup_context));
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
if (unlikely (!out->coverage.serialize_subset (c, coverage, this)))
return_trace (false);
hb_map_t klass_map;
out->classDef.serialize_subset (c, classDef, this, &klass_map);
const hb_set_t* glyphset = c->plan->glyphset_gsub ();
hb_set_t retained_coverage_glyphs;
(this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
hb_set_t coverage_glyph_classes;
(this+classDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
bool ret = true;
int non_zero_index = -1, index = 0;
auto snapshot = c->serializer->snapshot();
2021-02-20 01:08:10 +01:00
for (const auto& _ : + hb_enumerate (ruleSet)
| hb_filter (klass_map, hb_first))
{
auto *o = out->ruleSet.serialize_append (c->serializer);
if (unlikely (!o))
{
2020-07-29 06:03:32 +02:00
ret = false;
break;
}
if (coverage_glyph_classes.has (_.first) &&
o->serialize_subset (c, _.second, this, lookup_map, &klass_map)) {
2020-07-29 06:03:32 +02:00
non_zero_index = index;
snapshot = c->serializer->snapshot();
}
index++;
}
if (!ret || non_zero_index == -1) return_trace (false);
//prune empty trailing ruleSets
--index;
while (index > non_zero_index)
{
out->ruleSet.pop ();
index--;
}
c->serializer->revert (snapshot);
return_trace (bool (out->ruleSet));
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (coverage.sanitize (c, this) && classDef.sanitize (c, this) && ruleSet.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
HBUINT16 format; /* Format identifier--format = 2 */
typename Types::template OffsetTo<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of table */
typename Types::template OffsetTo<ClassDef>
classDef; /* Offset to glyph ClassDef table--from
* beginning of table */
Array16Of<typename Types::template OffsetTo<RuleSet>>
ruleSet; /* Array of RuleSet tables
* ordered by class */
public:
DEFINE_SIZE_ARRAY (4 + 2 * Types::size, ruleSet);
};
2009-05-20 05:58:54 +02:00
struct ContextFormat3
{
using RuleSet = OT::RuleSet<SmallTypes>;
bool intersects (const hb_set_t *glyphs) const
{
if (!(this+coverageZ[0]).intersects (glyphs))
return false;
struct ContextClosureLookupContext lookup_context = {
2022-12-03 19:53:14 +01:00
{intersects_coverage, nullptr},
ContextFormat::CoverageBasedContext,
this
};
return context_intersects (glyphs,
glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
lookup_context);
}
bool may_have_non_1to1 () const
{ return true; }
void closure (hb_closure_context_t *c) const
{
if (!(this+coverageZ[0]).intersects (c->glyphs))
2012-04-24 05:03:12 +02:00
return;
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
cur_active_glyphs);
2018-11-02 17:23:26 +01:00
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
struct ContextClosureLookupContext lookup_context = {
{intersects_coverage, intersected_coverage_glyphs},
ContextFormat::CoverageBasedContext,
this
};
2012-04-24 05:03:12 +02:00
context_closure_lookup (c,
glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
2012-04-24 05:03:12 +02:00
lookupCount, lookupRecord,
0, lookup_context);
c->pop_cur_done_glyphs ();
}
2019-10-23 01:00:43 +02:00
void closure_lookups (hb_closure_lookups_context_t *c) const
{
if (!intersects (c->glyphs))
return;
2019-10-23 01:00:43 +02:00
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
recurse_lookups (c, lookupCount, lookupRecord);
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
2012-11-24 00:13:48 +01:00
{
(this+coverageZ[0]).collect_coverage (c->input);
2018-11-02 17:23:26 +01:00
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
struct ContextCollectGlyphsLookupContext lookup_context = {
{collect_coverage},
this
};
context_collect_glyphs_lookup (c,
glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
lookupCount, lookupRecord,
lookup_context);
2012-11-24 00:13:48 +01:00
}
bool would_apply (hb_would_apply_context_t *c) const
{
2018-11-02 17:23:26 +01:00
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
struct ContextApplyLookupContext lookup_context = {
{match_coverage},
this
};
return context_would_apply_lookup (c,
glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
lookupCount, lookupRecord,
lookup_context);
}
const Coverage &get_coverage () const { return this+coverageZ[0]; }
bool apply (hb_ot_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverageZ[0]).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
2018-11-02 17:23:26 +01:00
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
struct ContextApplyLookupContext lookup_context = {
{match_coverage},
2010-05-10 23:47:22 +02:00
this
};
return_trace (context_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
out->glyphCount = glyphCount;
auto coverages = coverageZ.as_array (glyphCount);
for (const Offset16To<Coverage>& offset : coverages)
{
2021-02-20 01:08:10 +01:00
/* TODO(subset) This looks like should not be necessary to write this way. */
2021-03-31 21:27:21 +02:00
auto *o = c->serializer->allocate_size<Offset16To<Coverage>> (Offset16To<Coverage>::static_size);
if (unlikely (!o)) return_trace (false);
if (!o->serialize_subset (c, offset, this)) return_trace (false);
}
2022-07-08 22:00:24 +02:00
const auto& lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord>> (coverageZ.as_array (glyphCount));
const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
unsigned count = serialize_lookuprecord_array (c->serializer, lookupRecord.as_array (lookupCount), lookup_map);
return_trace (c->serializer->check_assign (out->lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!c->check_struct (this)) return_trace (false);
2009-08-04 06:58:28 +02:00
unsigned int count = glyphCount;
if (!count) return_trace (false); /* We want to access coverageZ[0] freely. */
if (!c->check_array (coverageZ.arrayZ, count)) return_trace (false);
2009-08-04 06:58:28 +02:00
for (unsigned int i = 0; i < count; i++)
if (!coverageZ[i].sanitize (c, this)) return_trace (false);
2018-11-02 17:23:26 +01:00
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
return_trace (c->check_array (lookupRecord, lookupCount));
2009-08-04 06:58:28 +02:00
}
protected:
HBUINT16 format; /* Format identifier--format = 3 */
HBUINT16 glyphCount; /* Number of glyphs in the input glyph
* sequence */
HBUINT16 lookupCount; /* Number of LookupRecords */
UnsizedArrayOf<Offset16To<Coverage>>
coverageZ; /* Array of offsets to Coverage
* table in glyph sequence order */
/*UnsizedArrayOf<LookupRecord>
lookupRecordX;*/ /* Array of LookupRecords--in
* design order */
2010-05-10 22:38:32 +02:00
public:
DEFINE_SIZE_ARRAY (6, coverageZ);
};
2009-05-20 05:58:54 +02:00
struct Context
{
template <typename context_t, typename ...Ts>
2019-05-08 05:58:43 +02:00
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
2014-12-13 05:36:49 +01:00
TRACE_DISPATCH (this, u.format);
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
case 5: return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());
}
}
protected:
union {
HBUINT16 format; /* Format identifier */
ContextFormat1_4<SmallTypes> format1;
ContextFormat2_5<SmallTypes> format2;
ContextFormat3 format3;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
ContextFormat1_4<MediumTypes> format4;
ContextFormat2_5<MediumTypes> format5;
#endif
} u;
};
/* Chaining Contextual lookups */
struct ChainContextClosureLookupContext
2009-05-20 05:58:54 +02:00
{
ContextClosureFuncs funcs;
ContextFormat context_format;
const void *intersects_data[3];
void *intersects_cache[3];
void *intersected_glyphs_cache;
};
struct ChainContextCollectGlyphsLookupContext
{
ContextCollectGlyphsFuncs funcs;
const void *collect_data[3];
};
struct ChainContextApplyLookupContext
{
ChainContextApplyFuncs funcs;
2010-05-10 23:47:22 +02:00
const void *match_data[3];
};
template <typename HBUINT>
static inline bool chain_context_intersects (const hb_set_t *glyphs,
unsigned int backtrackCount,
const HBUINT backtrack[],
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookaheadCount,
const HBUINT lookahead[],
ChainContextClosureLookupContext &lookup_context)
{
return array_is_subset_of (glyphs,
backtrackCount, backtrack,
lookup_context.funcs.intersects,
lookup_context.intersects_data[0],
lookup_context.intersects_cache[0])
&& array_is_subset_of (glyphs,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.intersects,
lookup_context.intersects_data[1],
lookup_context.intersects_cache[1])
&& array_is_subset_of (glyphs,
lookaheadCount, lookahead,
lookup_context.funcs.intersects,
lookup_context.intersects_data[2],
lookup_context.intersects_cache[2]);
}
template <typename HBUINT>
2012-04-24 05:03:12 +02:00
static inline void chain_context_closure_lookup (hb_closure_context_t *c,
unsigned int backtrackCount,
const HBUINT backtrack[],
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookaheadCount,
const HBUINT lookahead[],
unsigned int lookupCount,
const LookupRecord lookupRecord[],
unsigned value,
ChainContextClosureLookupContext &lookup_context)
{
if (chain_context_intersects (c->glyphs,
backtrackCount, backtrack,
inputCount, input,
lookaheadCount, lookahead,
lookup_context))
context_closure_recurse_lookups (c,
inputCount, input,
lookupCount, lookupRecord,
value,
lookup_context.context_format,
lookup_context.intersects_data[1],
lookup_context.funcs.intersected_glyphs,
lookup_context.intersected_glyphs_cache);
}
template <typename HBUINT>
static inline void chain_context_collect_glyphs_lookup (hb_collect_glyphs_context_t *c,
2018-11-15 20:40:56 +01:00
unsigned int backtrackCount,
const HBUINT backtrack[],
2018-11-15 20:40:56 +01:00
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
2018-11-15 20:40:56 +01:00
unsigned int lookaheadCount,
const HBUINT lookahead[],
2018-11-15 20:40:56 +01:00
unsigned int lookupCount,
const LookupRecord lookupRecord[],
ChainContextCollectGlyphsLookupContext &lookup_context)
{
collect_array (c, c->before,
backtrackCount, backtrack,
lookup_context.funcs.collect, lookup_context.collect_data[0]);
collect_array (c, c->input,
inputCount ? inputCount - 1 : 0, input,
lookup_context.funcs.collect, lookup_context.collect_data[1]);
collect_array (c, c->after,
lookaheadCount, lookahead,
lookup_context.funcs.collect, lookup_context.collect_data[2]);
recurse_lookups (c,
lookupCount, lookupRecord);
}
template <typename HBUINT>
static inline bool chain_context_would_apply_lookup (hb_would_apply_context_t *c,
unsigned int backtrackCount,
const HBUINT backtrack[] HB_UNUSED,
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookaheadCount,
const HBUINT lookahead[] HB_UNUSED,
2012-12-06 00:46:04 +01:00
unsigned int lookupCount HB_UNUSED,
const LookupRecord lookupRecord[] HB_UNUSED,
2022-11-17 03:14:42 +01:00
const ChainContextApplyLookupContext &lookup_context)
{
return (c->zero_context ? !backtrackCount && !lookaheadCount : true)
&& would_match_input (c,
inputCount, input,
lookup_context.funcs.match[1], lookup_context.match_data[1]);
}
template <typename HBUINT>
static inline bool chain_context_apply_lookup (hb_ot_apply_context_t *c,
unsigned int backtrackCount,
const HBUINT backtrack[],
unsigned int inputCount, /* Including the first glyph (not matched) */
const HBUINT input[], /* Array of input values--start with second glyph */
unsigned int lookaheadCount,
const HBUINT lookahead[],
unsigned int lookupCount,
const LookupRecord lookupRecord[],
2022-11-17 03:14:42 +01:00
const ChainContextApplyLookupContext &lookup_context)
{
unsigned end_index = c->buffer->idx;
unsigned match_end = 0;
unsigned match_positions[HB_MAX_CONTEXT_LENGTH];
if (!(match_input (c,
inputCount, input,
lookup_context.funcs.match[1], lookup_context.match_data[1],
&match_end, match_positions) && (end_index = match_end)
&& match_lookahead (c,
lookaheadCount, lookahead,
lookup_context.funcs.match[2], lookup_context.match_data[2],
match_end, &end_index)))
{
c->buffer->unsafe_to_concat (c->buffer->idx, end_index);
return false;
}
unsigned start_index = c->buffer->out_len;
if (!match_backtrack (c,
backtrackCount, backtrack,
lookup_context.funcs.match[0], lookup_context.match_data[0],
&start_index))
{
c->buffer->unsafe_to_concat_from_outbuffer (start_index, end_index);
return false;
}
c->buffer->unsafe_to_break_from_outbuffer (start_index, end_index);
apply_lookup (c,
inputCount, match_positions,
lookupCount, lookupRecord,
match_end);
return true;
}
template <typename Types>
2009-05-20 05:58:54 +02:00
struct ChainRule
{
bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
{
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
return chain_context_intersects (glyphs,
backtrack.len, backtrack.arrayZ,
input.lenP1, input.arrayZ,
lookahead.len, lookahead.arrayZ,
lookup_context);
}
void closure (hb_closure_context_t *c, unsigned value,
ChainContextClosureLookupContext &lookup_context) const
{
if (unlikely (c->lookup_limit_exceeded ())) return;
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2012-04-24 05:03:12 +02:00
chain_context_closure_lookup (c,
2018-05-09 01:56:11 +02:00
backtrack.len, backtrack.arrayZ,
input.lenP1, input.arrayZ,
2018-05-09 01:56:11 +02:00
lookahead.len, lookahead.arrayZ,
lookup.len, lookup.arrayZ,
value,
2012-04-24 05:03:12 +02:00
lookup_context);
}
void closure_lookups (hb_closure_lookups_context_t *c,
ChainContextClosureLookupContext &lookup_context) const
2019-10-23 01:00:43 +02:00
{
if (unlikely (c->lookup_limit_exceeded ())) return;
if (!intersects (c->glyphs, lookup_context)) return;
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2019-10-23 01:00:43 +02:00
recurse_lookups (c, lookup.len, lookup.arrayZ);
}
void collect_glyphs (hb_collect_glyphs_context_t *c,
ChainContextCollectGlyphsLookupContext &lookup_context) const
{
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
chain_context_collect_glyphs_lookup (c,
2018-05-09 01:56:11 +02:00
backtrack.len, backtrack.arrayZ,
input.lenP1, input.arrayZ,
2018-05-09 01:56:11 +02:00
lookahead.len, lookahead.arrayZ,
lookup.len, lookup.arrayZ,
lookup_context);
}
bool would_apply (hb_would_apply_context_t *c,
2022-11-17 03:14:42 +01:00
const ChainContextApplyLookupContext &lookup_context) const
{
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
return chain_context_would_apply_lookup (c,
backtrack.len, backtrack.arrayZ,
input.lenP1, input.arrayZ,
lookahead.len, lookahead.arrayZ, lookup.len,
lookup.arrayZ, lookup_context);
}
2022-11-17 03:14:42 +01:00
bool apply (hb_ot_apply_context_t *c,
const ChainContextApplyLookupContext &lookup_context) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
return_trace (chain_context_apply_lookup (c,
2018-05-09 01:56:11 +02:00
backtrack.len, backtrack.arrayZ,
input.lenP1, input.arrayZ,
2018-05-09 01:56:11 +02:00
lookahead.len, lookahead.arrayZ, lookup.len,
lookup.arrayZ, lookup_context));
}
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
void serialize_array (hb_serialize_context_t *c,
HBUINT16 len,
Iterator it) const
{
c->copy (len);
for (const auto g : it)
c->copy ((HBUINT16) g);
}
bool serialize (hb_serialize_context_t *c,
const hb_map_t *lookup_map,
const hb_map_t *backtrack_map,
const hb_map_t *input_map = nullptr,
const hb_map_t *lookahead_map = nullptr) const
{
TRACE_SERIALIZE (this);
auto *out = c->start_embed (this);
if (unlikely (!out)) return_trace (false);
const hb_map_t *mapping = backtrack_map;
serialize_array (c, backtrack.len, + backtrack.iter ()
| hb_map (mapping));
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
if (input_map) mapping = input_map;
serialize_array (c, input.lenP1, + input.iter ()
| hb_map (mapping));
2022-07-08 22:08:51 +02:00
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
if (lookahead_map) mapping = lookahead_map;
serialize_array (c, lookahead.len, + lookahead.iter ()
| hb_map (mapping));
2022-07-08 22:08:51 +02:00
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2022-07-08 22:08:51 +02:00
HBUINT16* lookupCount = c->embed (&(lookup.len));
if (!lookupCount) return_trace (false);
2022-07-08 22:08:51 +02:00
unsigned count = serialize_lookuprecord_array (c, lookup.as_array (), lookup_map);
return_trace (c->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
}
bool subset (hb_subset_context_t *c,
const hb_map_t *lookup_map,
const hb_map_t *backtrack_map = nullptr,
const hb_map_t *input_map = nullptr,
const hb_map_t *lookahead_map = nullptr) const
{
TRACE_SUBSET (this);
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
if (!backtrack_map)
{
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
if (!hb_all (backtrack, glyphset) ||
!hb_all (input, glyphset) ||
!hb_all (lookahead, glyphset))
return_trace (false);
serialize (c->serializer, lookup_map, c->plan->glyph_map);
}
else
{
if (!hb_all (backtrack, backtrack_map) ||
!hb_all (input, input_map) ||
!hb_all (lookahead, lookahead_map))
return_trace (false);
serialize (c->serializer, lookup_map, backtrack_map, input_map, lookahead_map);
}
return_trace (true);
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!backtrack.sanitize (c)) return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
if (!input.sanitize (c)) return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
if (!lookahead.sanitize (c)) return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
return_trace (lookup.sanitize (c));
2009-08-04 06:58:28 +02:00
}
protected:
Array16Of<typename Types::HBUINT>
2009-05-18 07:49:57 +02:00
backtrack; /* Array of backtracking values
* (to be matched before the input
* sequence) */
HeadlessArrayOf<typename Types::HBUINT>
2009-05-18 08:03:58 +02:00
inputX; /* Array of input values (start with
* second glyph) */
Array16Of<typename Types::HBUINT>
2009-05-18 07:49:57 +02:00
lookaheadX; /* Array of lookahead values's (to be
* matched after the input sequence) */
2021-04-01 00:04:43 +02:00
Array16Of<LookupRecord>
lookupX; /* Array of LookupRecords--in
* design order) */
public:
2010-05-10 23:28:16 +02:00
DEFINE_SIZE_MIN (8);
};
template <typename Types>
2009-05-20 05:58:54 +02:00
struct ChainRuleSet
{
using ChainRule = OT::ChainRule<Types>;
bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
{
2019-03-30 06:40:13 +01:00
return
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_map ([&] (const ChainRule &_) { return _.intersects (glyphs, lookup_context); })
2019-03-30 06:40:13 +01:00
| hb_any
;
}
void closure (hb_closure_context_t *c, unsigned value, ChainContextClosureLookupContext &lookup_context) const
{
if (unlikely (c->lookup_limit_exceeded ())) return;
2019-03-30 06:40:13 +01:00
return
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_apply ([&] (const ChainRule &_) { _.closure (c, value, lookup_context); })
2019-03-30 06:40:13 +01:00
;
}
void closure_lookups (hb_closure_lookups_context_t *c,
ChainContextClosureLookupContext &lookup_context) const
2019-10-23 01:00:43 +02:00
{
if (unlikely (c->lookup_limit_exceeded ())) return;
2019-10-23 01:00:43 +02:00
+ hb_iter (rule)
| hb_map (hb_add (this))
2020-09-29 01:51:25 +02:00
| hb_apply ([&] (const ChainRule &_) { _.closure_lookups (c, lookup_context); })
2019-10-23 01:00:43 +02:00
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
{
2019-03-30 06:40:13 +01:00
return
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_apply ([&] (const ChainRule &_) { _.collect_glyphs (c, lookup_context); })
2019-03-30 06:40:13 +01:00
;
}
2022-11-17 03:14:42 +01:00
bool would_apply (hb_would_apply_context_t *c,
const ChainContextApplyLookupContext &lookup_context) const
{
2019-03-30 06:40:13 +01:00
return
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_map ([&] (const ChainRule &_) { return _.would_apply (c, lookup_context); })
2019-03-30 06:40:13 +01:00
| hb_any
;
}
2022-11-17 03:14:42 +01:00
bool apply (hb_ot_apply_context_t *c,
const ChainContextApplyLookupContext &lookup_context) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
2019-03-30 06:40:13 +01:00
return_trace (
+ hb_iter (rule)
| hb_map (hb_add (this))
| hb_map ([&] (const ChainRule &_) { return _.apply (c, lookup_context); })
2019-03-30 06:40:13 +01:00
| hb_any
)
;
}
bool subset (hb_subset_context_t *c,
const hb_map_t *lookup_map,
const hb_map_t *backtrack_klass_map = nullptr,
const hb_map_t *input_klass_map = nullptr,
const hb_map_t *lookahead_klass_map = nullptr) const
{
TRACE_SUBSET (this);
auto snap = c->serializer->snapshot ();
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
for (const Offset16To<ChainRule>& _ : rule)
{
if (!_) continue;
auto o_snap = c->serializer->snapshot ();
auto *o = out->rule.serialize_append (c->serializer);
if (unlikely (!o)) continue;
if (!o->serialize_subset (c, _, this,
lookup_map,
backtrack_klass_map,
input_klass_map,
lookahead_klass_map))
{
out->rule.pop ();
c->serializer->revert (o_snap);
}
}
bool ret = bool (out->rule);
if (!ret) c->serializer->revert (snap);
return_trace (ret);
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (rule.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
2021-03-31 23:31:32 +02:00
Array16OfOffset16To<ChainRule>
rule; /* Array of ChainRule tables
* ordered by preference */
public:
DEFINE_SIZE_ARRAY (2, rule);
};
template <typename Types>
struct ChainContextFormat1_4
2009-05-20 05:58:54 +02:00
{
using ChainRuleSet = OT::ChainRuleSet<Types>;
bool intersects (const hb_set_t *glyphs) const
{
struct ChainContextClosureLookupContext lookup_context = {
{intersects_glyph, intersected_glyph},
ContextFormat::SimpleContext,
{nullptr, nullptr, nullptr}
};
2019-03-30 06:40:13 +01:00
return
+ hb_zip (this+coverage, ruleSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map (hb_add (this))
| hb_map ([&] (const ChainRuleSet &_) { return _.intersects (glyphs, lookup_context); })
2019-03-30 06:40:13 +01:00
| hb_any
;
}
bool may_have_non_1to1 () const
{ return true; }
void closure (hb_closure_context_t *c) const
{
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
cur_active_glyphs);
struct ChainContextClosureLookupContext lookup_context = {
{intersects_glyph, intersected_glyph},
ContextFormat::SimpleContext,
2017-10-15 12:11:08 +02:00
{nullptr, nullptr, nullptr}
};
2019-03-30 06:40:13 +01:00
+ hb_zip (this+coverage, hb_range ((unsigned) ruleSet.len))
| hb_filter ([&] (hb_codepoint_t _) {
return c->previous_parent_active_glyphs ().has (_);
}, hb_first)
| hb_map ([&](const hb_pair_t<hb_codepoint_t, unsigned> _) { return hb_pair_t<unsigned, const ChainRuleSet&> (_.first, this+ruleSet[_.second]); })
| hb_apply ([&] (const hb_pair_t<unsigned, const ChainRuleSet&>& _) { _.second.closure (c, _.first, lookup_context); })
2019-03-30 06:40:13 +01:00
;
c->pop_cur_done_glyphs ();
}
2019-10-23 01:00:43 +02:00
void closure_lookups (hb_closure_lookups_context_t *c) const
{
struct ChainContextClosureLookupContext lookup_context = {
2022-12-03 19:53:14 +01:00
{intersects_glyph, nullptr},
ContextFormat::SimpleContext,
{nullptr, nullptr, nullptr}
};
+ hb_zip (this+coverage, ruleSet)
| hb_filter (*c->glyphs, hb_first)
| hb_map (hb_second)
2019-10-23 01:00:43 +02:00
| hb_map (hb_add (this))
2020-09-29 01:51:25 +02:00
| hb_apply ([&] (const ChainRuleSet &_) { _.closure_lookups (c, lookup_context); })
2019-10-23 01:00:43 +02:00
;
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
2012-11-24 00:13:48 +01:00
{
(this+coverage).collect_coverage (c->input);
struct ChainContextCollectGlyphsLookupContext lookup_context = {
{collect_glyph},
2017-10-15 12:11:08 +02:00
{nullptr, nullptr, nullptr}
};
2019-03-30 06:40:13 +01:00
+ hb_iter (ruleSet)
| hb_map (hb_add (this))
| hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2019-03-30 06:40:13 +01:00
;
2012-11-24 00:13:48 +01:00
}
bool would_apply (hb_would_apply_context_t *c) const
{
const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
struct ChainContextApplyLookupContext lookup_context = {
{{match_glyph, match_glyph, match_glyph}},
2017-10-15 12:11:08 +02:00
{nullptr, nullptr, nullptr}
};
return rule_set.would_apply (c, lookup_context);
}
const Coverage &get_coverage () const { return this+coverage; }
bool apply (hb_ot_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const ChainRuleSet &rule_set = this+ruleSet[index];
struct ChainContextApplyLookupContext lookup_context = {
{{match_glyph, match_glyph, match_glyph}},
2017-10-15 12:11:08 +02:00
{nullptr, nullptr, nullptr}
};
return_trace (rule_set.apply (c, lookup_context));
}
2009-08-04 06:58:28 +02:00
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
+ hb_zip (this+coverage, ruleSet)
| hb_filter (glyphset, hb_first)
| hb_filter (subset_offset_array (c, out->ruleSet, this, lookup_map), hb_second)
| hb_map (hb_first)
| hb_map (glyph_map)
| hb_sink (new_coverage)
;
out->coverage.serialize_serialize (c->serializer, new_coverage.iter ());
return_trace (bool (new_coverage));
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (coverage.sanitize (c, this) && ruleSet.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
HBUINT16 format; /* Format identifier--format = 1 */
typename Types::template OffsetTo<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of table */
Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
ruleSet; /* Array of ChainRuleSet tables
* ordered by Coverage Index */
public:
DEFINE_SIZE_ARRAY (2 + 2 * Types::size, ruleSet);
};
template <typename Types>
struct ChainContextFormat2_5
2009-05-20 05:58:54 +02:00
{
using ChainRuleSet = OT::ChainRuleSet<SmallTypes>;
bool intersects (const hb_set_t *glyphs) const
{
if (!(this+coverage).intersects (glyphs))
return false;
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
hb_map_t caches[3] = {};
struct ChainContextClosureLookupContext lookup_context = {
2022-12-03 19:53:14 +01:00
{intersects_class, nullptr},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def},
{&caches[0], &caches[1], &caches[2]}
};
hb_set_t retained_coverage_glyphs;
(this+coverage).intersect_set (*glyphs, retained_coverage_glyphs);
hb_set_t coverage_glyph_classes;
input_class_def.intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
return
+ hb_iter (ruleSet)
| hb_map (hb_add (this))
| hb_enumerate
| hb_map ([&] (const hb_pair_t<unsigned, const ChainRuleSet &> p)
2019-05-16 03:15:05 +02:00
{ return input_class_def.intersects_class (glyphs, p.first) &&
coverage_glyph_classes.has (p.first) &&
p.second.intersects (glyphs, lookup_context); })
| hb_any
;
}
bool may_have_non_1to1 () const
{ return true; }
void closure (hb_closure_context_t *c) const
{
if (!(this+coverage).intersects (c->glyphs))
2012-04-24 05:03:12 +02:00
return;
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
cur_active_glyphs);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
hb_map_t caches[3] = {};
intersected_class_cache_t intersected_cache;
struct ChainContextClosureLookupContext lookup_context = {
{intersects_class, intersected_class_glyphs},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def},
{&caches[0], &caches[1], &caches[2]},
&intersected_cache
};
+ hb_enumerate (ruleSet)
2019-05-16 03:15:05 +02:00
| hb_filter ([&] (unsigned _)
{ return input_class_def.intersects_class (&c->parent_active_glyphs (), _); },
hb_first)
| hb_apply ([&] (const hb_pair_t<unsigned, const typename Types::template OffsetTo<ChainRuleSet>&> _)
{
const ChainRuleSet& chainrule_set = this+_.second;
chainrule_set.closure (c, _.first, lookup_context);
})
;
c->pop_cur_done_glyphs ();
}
2019-10-23 01:00:43 +02:00
void closure_lookups (hb_closure_lookups_context_t *c) const
{
if (!(this+coverage).intersects (c->glyphs))
return;
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
hb_map_t caches[3] = {};
struct ChainContextClosureLookupContext lookup_context = {
2022-12-03 19:53:14 +01:00
{intersects_class, nullptr},
ContextFormat::ClassBasedContext,
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def},
{&caches[0], &caches[1], &caches[2]}
};
2019-10-23 01:00:43 +02:00
+ hb_iter (ruleSet)
| hb_map (hb_add (this))
| hb_enumerate
| hb_filter([&] (unsigned klass)
{ return input_class_def.intersects_class (c->glyphs, klass); }, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const ChainRuleSet &_)
{ _.closure_lookups (c, lookup_context); })
2019-10-23 01:00:43 +02:00
;
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
2012-11-24 00:13:48 +01:00
{
(this+coverage).collect_coverage (c->input);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
struct ChainContextCollectGlyphsLookupContext lookup_context = {
{collect_class},
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
};
2019-03-30 06:40:13 +01:00
+ hb_iter (ruleSet)
| hb_map (hb_add (this))
| hb_apply ([&] (const ChainRuleSet &_) { _.collect_glyphs (c, lookup_context); })
2019-03-30 06:40:13 +01:00
;
2012-11-24 00:13:48 +01:00
}
bool would_apply (hb_would_apply_context_t *c) const
{
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
unsigned int index = input_class_def.get_class (c->glyphs[0]);
const ChainRuleSet &rule_set = this+ruleSet[index];
struct ChainContextApplyLookupContext lookup_context = {
{{match_class, match_class, match_class}},
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
};
return rule_set.would_apply (c, lookup_context);
}
const Coverage &get_coverage () const { return this+coverage; }
unsigned cache_cost () const
{
unsigned c = (this+lookaheadClassDef).cost () * ruleSet.len;
return c >= 4 ? c : 0;
}
bool cache_func (hb_ot_apply_context_t *c, bool enter) const
{
if (enter)
{
if (!HB_BUFFER_TRY_ALLOCATE_VAR (c->buffer, syllable))
return false;
auto &info = c->buffer->info;
unsigned count = c->buffer->len;
for (unsigned i = 0; i < count; i++)
info[i].syllable() = 255;
c->new_syllables = 255;
return true;
}
else
{
c->new_syllables = (unsigned) -1;
HB_BUFFER_DEALLOCATE_VAR (c->buffer, syllable);
return true;
}
}
bool apply (hb_ot_apply_context_t *c, bool cached = false) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
/* For ChainContextFormat2_5 we cache the LookaheadClassDef instead of InputClassDef.
* The reason is that most heavy fonts want to identify a glyph in context and apply
* a lookup to it. In this scenario, the length of the input sequence is one, whereas
* the lookahead / backtrack are typically longer. The one glyph in input sequence is
* looked-up below and no input glyph is looked up in individual rules, whereas the
* lookahead and backtrack glyphs are tried. Since we match lookahead before backtrack,
* we should cache lookahead. This decisions showed a 20% improvement in shaping of
* the Gulzar font.
*/
struct ChainContextApplyLookupContext lookup_context = {
{{cached && &backtrack_class_def == &lookahead_class_def ? match_class_cached : match_class,
cached && &input_class_def == &lookahead_class_def ? match_class_cached : match_class,
cached ? match_class_cached : match_class}},
2010-05-10 23:47:22 +02:00
{&backtrack_class_def,
&input_class_def,
&lookahead_class_def}
};
index = input_class_def.get_class (c->buffer->cur().codepoint);
const ChainRuleSet &rule_set = this+ruleSet[index];
return_trace (rule_set.apply (c, lookup_context));
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (*this);
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
out->format = format;
out->coverage.serialize_subset (c, coverage, this);
hb_map_t backtrack_klass_map;
hb_map_t input_klass_map;
hb_map_t lookahead_klass_map;
out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, &backtrack_klass_map);
// TODO: subset inputClassDef based on glyphs survived in Coverage subsetting
out->inputClassDef.serialize_subset (c, inputClassDef, this, &input_klass_map);
out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, &lookahead_klass_map);
if (unlikely (!c->serializer->propagate_error (backtrack_klass_map,
input_klass_map,
lookahead_klass_map)))
return_trace (false);
const hb_set_t* glyphset = c->plan->glyphset_gsub ();
hb_set_t retained_coverage_glyphs;
(this+coverage).intersect_set (*glyphset, retained_coverage_glyphs);
hb_set_t coverage_glyph_classes;
(this+inputClassDef).intersected_classes (&retained_coverage_glyphs, &coverage_glyph_classes);
int non_zero_index = -1, index = 0;
bool ret = true;
const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
auto last_non_zero = c->serializer->snapshot ();
for (const auto& _ : + hb_enumerate (ruleSet)
| hb_filter (input_klass_map, hb_first))
{
auto *o = out->ruleSet.serialize_append (c->serializer);
if (unlikely (!o))
{
ret = false;
break;
}
if (coverage_glyph_classes.has (_.first) &&
o->serialize_subset (c, _.second, this,
lookup_map,
&backtrack_klass_map,
&input_klass_map,
&lookahead_klass_map))
{
last_non_zero = c->serializer->snapshot ();
2020-07-29 06:03:32 +02:00
non_zero_index = index;
}
index++;
}
if (!ret || non_zero_index == -1) return_trace (false);
// prune empty trailing ruleSets
if (index > non_zero_index) {
c->serializer->revert (last_non_zero);
out->ruleSet.len = non_zero_index + 1;
}
return_trace (bool (out->ruleSet));
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (coverage.sanitize (c, this) &&
backtrackClassDef.sanitize (c, this) &&
inputClassDef.sanitize (c, this) &&
lookaheadClassDef.sanitize (c, this) &&
ruleSet.sanitize (c, this));
2009-08-04 06:58:28 +02:00
}
protected:
HBUINT16 format; /* Format identifier--format = 2 */
typename Types::template OffsetTo<Coverage>
coverage; /* Offset to Coverage table--from
* beginning of table */
typename Types::template OffsetTo<ClassDef>
backtrackClassDef; /* Offset to glyph ClassDef table
* containing backtrack sequence
* data--from beginning of table */
typename Types::template OffsetTo<ClassDef>
inputClassDef; /* Offset to glyph ClassDef
* table containing input sequence
* data--from beginning of table */
typename Types::template OffsetTo<ClassDef>
lookaheadClassDef; /* Offset to glyph ClassDef table
* containing lookahead sequence
* data--from beginning of table */
Array16Of<typename Types::template OffsetTo<ChainRuleSet>>
ruleSet; /* Array of ChainRuleSet tables
* ordered by class */
public:
DEFINE_SIZE_ARRAY (4 + 4 * Types::size, ruleSet);
};
2009-05-20 05:58:54 +02:00
struct ChainContextFormat3
{
using RuleSet = OT::RuleSet<SmallTypes>;
bool intersects (const hb_set_t *glyphs) const
{
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
if (!(this+input[0]).intersects (glyphs))
return false;
2022-07-08 22:08:51 +02:00
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
struct ChainContextClosureLookupContext lookup_context = {
2022-12-03 19:53:14 +01:00
{intersects_coverage, nullptr},
ContextFormat::CoverageBasedContext,
{this, this, this}
};
return chain_context_intersects (glyphs,
backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
input.len, (const HBUINT16 *) input.arrayZ + 1,
lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
lookup_context);
}
bool may_have_non_1to1 () const
{ return true; }
void closure (hb_closure_context_t *c) const
{
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
2012-04-24 05:03:12 +02:00
if (!(this+input[0]).intersects (c->glyphs))
return;
hb_set_t& cur_active_glyphs = c->push_cur_active_glyphs ();
get_coverage ().intersect_set (c->previous_parent_active_glyphs (),
cur_active_glyphs);
2022-07-08 22:08:51 +02:00
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2012-04-24 05:03:12 +02:00
struct ChainContextClosureLookupContext lookup_context = {
{intersects_coverage, intersected_coverage_glyphs},
ContextFormat::CoverageBasedContext,
2012-04-24 05:03:12 +02:00
{this, this, this}
};
chain_context_closure_lookup (c,
2018-05-09 01:56:11 +02:00
backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
input.len, (const HBUINT16 *) input.arrayZ + 1,
lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
lookup.len, lookup.arrayZ,
0, lookup_context);
c->pop_cur_done_glyphs ();
}
2019-10-23 01:00:43 +02:00
void closure_lookups (hb_closure_lookups_context_t *c) const
{
if (!intersects (c->glyphs))
return;
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
2019-10-23 01:00:43 +02:00
recurse_lookups (c, lookup.len, lookup.arrayZ);
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const {}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
2012-11-24 00:13:48 +01:00
{
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
(this+input[0]).collect_coverage (c->input);
2022-07-08 22:08:51 +02:00
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
struct ChainContextCollectGlyphsLookupContext lookup_context = {
{collect_coverage},
{this, this, this}
};
chain_context_collect_glyphs_lookup (c,
2018-05-09 01:56:11 +02:00
backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
input.len, (const HBUINT16 *) input.arrayZ + 1,
lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
lookup.len, lookup.arrayZ,
lookup_context);
2012-11-24 00:13:48 +01:00
}
bool would_apply (hb_would_apply_context_t *c) const
{
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
struct ChainContextApplyLookupContext lookup_context = {
{{match_coverage, match_coverage, match_coverage}},
{this, this, this}
};
return chain_context_would_apply_lookup (c,
backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
input.len, (const HBUINT16 *) input.arrayZ + 1,
lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
lookup.len, lookup.arrayZ, lookup_context);
}
const Coverage &get_coverage () const
{
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
return this+input[0];
}
bool apply (hb_ot_apply_context_t *c) const
2009-05-20 05:58:54 +02:00
{
2012-11-23 21:32:14 +01:00
TRACE_APPLY (this);
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
unsigned int index = (this+input[0]).get_coverage (c->buffer->cur().codepoint);
if (likely (index == NOT_COVERED)) return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
struct ChainContextApplyLookupContext lookup_context = {
{{match_coverage, match_coverage, match_coverage}},
2010-05-10 23:47:22 +02:00
{this, this, this}
};
return_trace (chain_context_apply_lookup (c,
2018-05-09 01:56:11 +02:00
backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
input.len, (const HBUINT16 *) input.arrayZ + 1,
lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
lookup.len, lookup.arrayZ, lookup_context));
}
template<typename Iterator,
hb_requires (hb_is_iterator (Iterator))>
bool serialize_coverage_offsets (hb_subset_context_t *c, Iterator it, const void* base) const
{
TRACE_SERIALIZE (this);
2021-03-31 23:31:32 +02:00
auto *out = c->serializer->start_embed<Array16OfOffset16To<Coverage>> ();
if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size)))
return_trace (false);
for (auto& offset : it) {
auto *o = out->serialize_append (c->serializer);
if (unlikely (!o) || !o->serialize_subset (c, offset, base))
return_trace (false);
}
return_trace (true);
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (this);
if (unlikely (!out)) return_trace (false);
if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
if (!serialize_coverage_offsets (c, backtrack.iter (), this))
return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
if (!serialize_coverage_offsets (c, input.iter (), this))
return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
if (!serialize_coverage_offsets (c, lookahead.iter (), this))
return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
const hb_map_t *lookup_map = c->table_tag == HB_OT_TAG_GSUB ? &c->plan->gsub_lookups : &c->plan->gpos_lookups;
2022-07-08 22:08:51 +02:00
HBUINT16 *lookupCount = c->serializer->copy<HBUINT16> (lookup.len);
if (!lookupCount) return_trace (false);
2022-07-08 22:08:51 +02:00
unsigned count = serialize_lookuprecord_array (c->serializer, lookup.as_array (), lookup_map);
return_trace (c->serializer->check_assign (*lookupCount, count, HB_SERIALIZE_ERROR_INT_OVERFLOW));
}
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
if (!backtrack.sanitize (c, this)) return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &input = StructAfter<decltype (inputX)> (backtrack);
if (!input.sanitize (c, this)) return_trace (false);
if (!input.len) return_trace (false); /* To be consistent with Context. */
2022-07-08 22:08:51 +02:00
const auto &lookahead = StructAfter<decltype (lookaheadX)> (input);
if (!lookahead.sanitize (c, this)) return_trace (false);
2022-07-08 22:08:51 +02:00
const auto &lookup = StructAfter<decltype (lookupX)> (lookahead);
return_trace (lookup.sanitize (c));
2009-08-04 06:58:28 +02:00
}
protected:
HBUINT16 format; /* Format identifier--format = 3 */
2021-03-31 23:31:32 +02:00
Array16OfOffset16To<Coverage>
backtrack; /* Array of coverage tables
* in backtracking sequence, in glyph
* sequence order */
2021-03-31 23:31:32 +02:00
Array16OfOffset16To<Coverage>
inputX ; /* Array of coverage
* tables in input sequence, in glyph
* sequence order */
2021-03-31 23:31:32 +02:00
Array16OfOffset16To<Coverage>
lookaheadX; /* Array of coverage tables
* in lookahead sequence, in glyph
* sequence order */
2021-04-01 00:04:43 +02:00
Array16Of<LookupRecord>
lookupX; /* Array of LookupRecords--in
2009-05-18 07:49:57 +02:00
* design order) */
public:
2010-05-10 23:28:16 +02:00
DEFINE_SIZE_MIN (10);
};
2009-05-20 05:58:54 +02:00
struct ChainContext
{
template <typename context_t, typename ...Ts>
2019-05-08 05:58:43 +02:00
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
2014-12-13 05:36:49 +01:00
TRACE_DISPATCH (this, u.format);
switch (u.format) {
case 1: return_trace (c->dispatch (u.format1, std::forward<Ts> (ds)...));
case 2: return_trace (c->dispatch (u.format2, std::forward<Ts> (ds)...));
case 3: return_trace (c->dispatch (u.format3, std::forward<Ts> (ds)...));
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 4: return_trace (c->dispatch (u.format4, std::forward<Ts> (ds)...));
case 5: return_trace (c->dispatch (u.format5, std::forward<Ts> (ds)...));
#endif
default:return_trace (c->default_return_value ());
}
}
protected:
union {
HBUINT16 format; /* Format identifier */
ChainContextFormat1_4<SmallTypes> format1;
ChainContextFormat2_5<SmallTypes> format2;
ChainContextFormat3 format3;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
ChainContextFormat1_4<MediumTypes> format4;
ChainContextFormat2_5<MediumTypes> format5;
#endif
} u;
};
template <typename T>
struct ExtensionFormat1
{
unsigned int get_type () const { return extensionLookupType; }
template <typename X>
const X& get_subtable () const
{ return this + reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset); }
template <typename context_t, typename ...Ts>
2019-05-08 05:58:43 +02:00
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
{
if (unlikely (!c->may_dispatch (this, this))) return c->no_dispatch_return_value ();
TRACE_DISPATCH (this, format);
return_trace (get_subtable<typename T::SubTable> ().dispatch (c, get_type (), std::forward<Ts> (ds)...));
}
void collect_variation_indices (hb_collect_variation_indices_context_t *c) const
{ dispatch (c); }
/* This is called from may_dispatch() above with hb_sanitize_context_t. */
bool sanitize (hb_sanitize_context_t *c) const
{
2012-11-23 21:32:14 +01:00
TRACE_SANITIZE (this);
return_trace (c->check_struct (this) &&
2018-09-04 01:53:03 +02:00
extensionLookupType != T::SubTable::Extension);
2009-08-04 06:58:28 +02:00
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->serializer->start_embed (this);
if (unlikely (!out || !c->serializer->extend_min (out))) return_trace (false);
out->format = format;
out->extensionLookupType = extensionLookupType;
const auto& src_offset =
reinterpret_cast<const Offset32To<typename T::SubTable> &> (extensionOffset);
auto& dest_offset =
reinterpret_cast<Offset32To<typename T::SubTable> &> (out->extensionOffset);
return_trace (dest_offset.serialize_subset (c, src_offset, this, get_type ()));
}
protected:
HBUINT16 format; /* Format identifier. Set to 1. */
HBUINT16 extensionLookupType; /* Lookup type of subtable referenced
* by ExtensionOffset (i.e. the
* extension subtable). */
2019-05-10 22:17:41 +02:00
Offset32 extensionOffset; /* Offset to the extension subtable,
* of lookup type subtable. */
public:
DEFINE_SIZE_STATIC (8);
};
2012-11-23 22:57:36 +01:00
template <typename T>
struct Extension
{
unsigned int get_type () const
{
switch (u.format) {
2010-05-11 01:45:41 +02:00
case 1: return u.format1.get_type ();
default:return 0;
}
}
2012-11-23 23:04:55 +01:00
template <typename X>
const X& get_subtable () const
2012-11-23 23:04:55 +01:00
{
switch (u.format) {
2018-09-04 01:53:03 +02:00
case 1: return u.format1.template get_subtable<typename T::SubTable> ();
default:return Null (typename T::SubTable);
}
2012-11-23 23:04:55 +01:00
}
// Specialization of dispatch for subset. dispatch() normally just
// dispatches to the sub table this points too, but for subset
// we need to run subset on this subtable too.
template <typename ...Ts>
typename hb_subset_context_t::return_t dispatch (hb_subset_context_t *c, Ts&&... ds) const
{
switch (u.format) {
case 1: return u.format1.subset (c);
default: return c->default_return_value ();
}
}
template <typename context_t, typename ...Ts>
2019-05-08 05:58:43 +02:00
typename context_t::return_t dispatch (context_t *c, Ts&&... ds) const
2012-11-23 22:57:36 +01:00
{
if (unlikely (!c->may_dispatch (this, &u.format))) return c->no_dispatch_return_value ();
TRACE_DISPATCH (this, u.format);
2009-08-04 06:58:28 +02:00
switch (u.format) {
case 1: return_trace (u.format1.dispatch (c, std::forward<Ts> (ds)...));
default:return_trace (c->default_return_value ());
2009-08-04 06:58:28 +02:00
}
}
protected:
union {
HBUINT16 format; /* Format identifier */
ExtensionFormat1<T> format1;
} u;
};
2009-05-18 02:13:02 +02:00
/*
* GSUB/GPOS Common
*/
2018-10-10 17:41:05 +02:00
struct hb_ot_layout_lookup_accelerator_t
{
template <typename TLookup>
static hb_ot_layout_lookup_accelerator_t *create (const TLookup &lookup)
2018-10-10 17:41:05 +02:00
{
unsigned count = lookup.get_subtable_count ();
unsigned size = sizeof (hb_ot_layout_lookup_accelerator_t) -
HB_VAR_ARRAY * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t) +
count * sizeof (hb_accelerate_subtables_context_t::hb_applicable_t);
2023-02-01 00:28:49 +01:00
/* The following is a calloc because when we are collecting subtables,
* some of them might be invalid and hence not collect; as a result,
* we might not fill in all the count entries of the subtables array.
* Zeroing it allows the set digest to gatekeep it without having to
* initialize it further. */
auto *thiz = (hb_ot_layout_lookup_accelerator_t *) hb_calloc (1, size);
if (unlikely (!thiz))
return nullptr;
hb_accelerate_subtables_context_t c_accelerate_subtables (thiz->subtables);
lookup.dispatch (&c_accelerate_subtables);
thiz->digest.init ();
for (auto& subtable : hb_iter (thiz->subtables, count))
thiz->digest.add (subtable.digest);
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
thiz->cache_user_idx = c_accelerate_subtables.cache_user_idx;
for (unsigned i = 0; i < count; i++)
if (i != thiz->cache_user_idx)
thiz->subtables[i].apply_cached_func = thiz->subtables[i].apply_func;
#endif
return thiz;
2018-10-10 17:50:46 +02:00
}
2018-10-10 17:41:05 +02:00
bool may_have (hb_codepoint_t g) const
2018-10-10 17:41:05 +02:00
{ return digest.may_have (g); }
bool apply (hb_ot_apply_context_t *c, unsigned subtables_count, bool use_cache) const
2018-10-10 17:54:48 +02:00
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
if (use_cache)
{
return
+ hb_iter (hb_iter (subtables, subtables_count))
| hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply_cached (c); })
| hb_any
;
}
else
#endif
{
return
+ hb_iter (hb_iter (subtables, subtables_count))
| hb_map ([&c] (const hb_accelerate_subtables_context_t::hb_applicable_t &_) { return _.apply (c); })
| hb_any
;
}
2018-11-15 20:40:56 +01:00
return false;
2018-10-10 17:54:48 +02:00
}
bool cache_enter (hb_ot_apply_context_t *c) const
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
return cache_user_idx != (unsigned) -1 &&
subtables[cache_user_idx].cache_enter (c);
#else
return false;
#endif
}
void cache_leave (hb_ot_apply_context_t *c) const
{
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
subtables[cache_user_idx].cache_leave (c);
#endif
}
2018-10-10 17:41:05 +02:00
hb_set_digest_t digest;
private:
#ifndef HB_NO_OT_LAYOUT_LOOKUP_CACHE
unsigned cache_user_idx = (unsigned) -1;
#endif
hb_accelerate_subtables_context_t::hb_applicable_t subtables[HB_VAR_ARRAY];
2018-10-10 17:41:05 +02:00
};
template <typename Types>
struct GSUBGPOSVersion1_2
{
friend struct GSUBGPOS;
protected:
FixedVersion<>version; /* Version of the GSUB/GPOS table--initially set
* to 0x00010000u */
typename Types:: template OffsetTo<ScriptList>
scriptList; /* ScriptList table */
typename Types::template OffsetTo<FeatureList>
featureList; /* FeatureList table */
typename Types::template OffsetTo<LookupList<Types>>
lookupList; /* LookupList table */
Offset32To<FeatureVariations>
featureVars; /* Offset to Feature Variations
table--from beginning of table
* (may be NULL). Introduced
* in version 0x00010001. */
public:
DEFINE_SIZE_MIN (4 + 3 * Types::size);
unsigned int get_size () const
{
return min_size +
(version.to_int () >= 0x00010001u ? featureVars.static_size : 0);
}
const typename Types::template OffsetTo<LookupList<Types>>* get_lookup_list_offset () const
{
return &lookupList;
}
template <typename TLookup>
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
typedef List16OfOffsetTo<TLookup, typename Types::HBUINT> TLookupList;
if (unlikely (!(scriptList.sanitize (c, this) &&
featureList.sanitize (c, this) &&
reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList).sanitize (c, this))))
return_trace (false);
#ifndef HB_NO_VAR
if (unlikely (!(version.to_int () < 0x00010001u || featureVars.sanitize (c, this))))
return_trace (false);
#endif
return_trace (true);
}
template <typename TLookup>
bool subset (hb_subset_layout_context_t *c) const
{
TRACE_SUBSET (this);
auto *out = c->subset_context->serializer->start_embed (this);
if (unlikely (!c->subset_context->serializer->extend_min (out))) return_trace (false);
out->version = version;
typedef LookupOffsetList<TLookup, typename Types::HBUINT> TLookupList;
reinterpret_cast<typename Types::template OffsetTo<TLookupList> &> (out->lookupList)
.serialize_subset (c->subset_context,
reinterpret_cast<const typename Types::template OffsetTo<TLookupList> &> (lookupList),
this,
c);
reinterpret_cast<typename Types::template OffsetTo<RecordListOfFeature> &> (out->featureList)
.serialize_subset (c->subset_context,
reinterpret_cast<const typename Types::template OffsetTo<RecordListOfFeature> &> (featureList),
this,
c);
out->scriptList.serialize_subset (c->subset_context,
scriptList,
this,
c);
#ifndef HB_NO_VAR
if (version.to_int () >= 0x00010001u)
{
auto snapshot = c->subset_context->serializer->snapshot ();
if (!c->subset_context->serializer->extend_min (&out->featureVars))
return_trace (false);
// TODO(qxliu76): the current implementation doesn't correctly handle feature variations
// that are dropped by instancing when the associated conditions don't trigger.
// Since partial instancing isn't yet supported this isn't an issue yet but will
// need to be fixed for partial instancing.
// if all axes are pinned all feature vars are dropped.
bool ret = !c->subset_context->plan->all_axes_pinned
&& out->featureVars.serialize_subset (c->subset_context, featureVars, this, c);
if (!ret && version.major == 1)
{
c->subset_context->serializer->revert (snapshot);
out->version.major = 1;
out->version.minor = 0;
}
}
#endif
return_trace (true);
}
};
2009-05-20 05:58:54 +02:00
struct GSUBGPOS
{
unsigned int get_size () const
{
switch (u.version.major) {
case 1: return u.version1.get_size ();
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 2: return u.version2.get_size ();
#endif
default: return u.version.static_size;
}
}
template <typename TLookup>
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (unlikely (!u.version.sanitize (c))) return_trace (false);
switch (u.version.major) {
case 1: return_trace (u.version1.sanitize<TLookup> (c));
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 2: return_trace (u.version2.sanitize<TLookup> (c));
#endif
default: return_trace (true);
}
}
template <typename TLookup>
bool subset (hb_subset_layout_context_t *c) const
{
switch (u.version.major) {
case 1: return u.version1.subset<TLookup> (c);
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 2: return u.version2.subset<TLookup> (c);
#endif
default: return false;
}
}
const ScriptList &get_script_list () const
{
switch (u.version.major) {
case 1: return this+u.version1.scriptList;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 2: return this+u.version2.scriptList;
#endif
default: return Null (ScriptList);
}
}
const FeatureList &get_feature_list () const
{
switch (u.version.major) {
case 1: return this+u.version1.featureList;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 2: return this+u.version2.featureList;
#endif
default: return Null (FeatureList);
}
}
unsigned int get_lookup_count () const
{
switch (u.version.major) {
case 1: return (this+u.version1.lookupList).len;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 2: return (this+u.version2.lookupList).len;
#endif
default: return 0;
}
}
const Lookup& get_lookup (unsigned int i) const
{
switch (u.version.major) {
case 1: return (this+u.version1.lookupList)[i];
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 2: return (this+u.version2.lookupList)[i];
#endif
default: return Null (Lookup);
}
}
const FeatureVariations &get_feature_variations () const
{
switch (u.version.major) {
case 1: return (u.version.to_int () >= 0x00010001u ? this+u.version1.featureVars : Null (FeatureVariations));
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
case 2: return this+u.version2.featureVars;
#endif
default: return Null (FeatureVariations);
}
}
bool has_data () const { return u.version.to_int (); }
unsigned int get_script_count () const
{ return get_script_list ().len; }
const Tag& get_script_tag (unsigned int i) const
{ return get_script_list ().get_tag (i); }
unsigned int get_script_tags (unsigned int start_offset,
unsigned int *script_count /* IN/OUT */,
hb_tag_t *script_tags /* OUT */) const
{ return get_script_list ().get_tags (start_offset, script_count, script_tags); }
const Script& get_script (unsigned int i) const
{ return get_script_list ()[i]; }
bool find_script_index (hb_tag_t tag, unsigned int *index) const
{ return get_script_list ().find_index (tag, index); }
unsigned int get_feature_count () const
{ return get_feature_list ().len; }
hb_tag_t get_feature_tag (unsigned int i) const
{ return i == Index::NOT_FOUND_INDEX ? HB_TAG_NONE : get_feature_list ().get_tag (i); }
unsigned int get_feature_tags (unsigned int start_offset,
unsigned int *feature_count /* IN/OUT */,
hb_tag_t *feature_tags /* OUT */) const
{ return get_feature_list ().get_tags (start_offset, feature_count, feature_tags); }
const Feature& get_feature (unsigned int i) const
{ return get_feature_list ()[i]; }
bool find_feature_index (hb_tag_t tag, unsigned int *index) const
{ return get_feature_list ().find_index (tag, index); }
2009-05-18 02:13:02 +02:00
bool find_variations_index (const int *coords, unsigned int num_coords,
unsigned int *index) const
{
2019-10-23 01:00:43 +02:00
#ifdef HB_NO_VAR
*index = FeatureVariations::NOT_FOUND_INDEX;
return false;
#endif
return get_feature_variations ().find_index (coords, num_coords, index);
}
const Feature& get_feature_variation (unsigned int feature_index,
2019-01-22 12:02:06 +01:00
unsigned int variations_index) const
{
#ifndef HB_NO_VAR
if (FeatureVariations::NOT_FOUND_INDEX != variations_index &&
u.version.to_int () >= 0x00010001u)
{
const Feature *feature = get_feature_variations ().find_substitute (variations_index,
feature_index);
if (feature)
2018-11-15 20:40:56 +01:00
return *feature;
}
#endif
return get_feature (feature_index);
}
2019-10-23 01:00:43 +02:00
void feature_variation_collect_lookups (const hb_set_t *feature_indexes,
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map,
2019-10-23 01:00:43 +02:00
hb_set_t *lookup_indexes /* OUT */) const
{
#ifndef HB_NO_VAR
get_feature_variations ().collect_lookups (feature_indexes, feature_substitutes_map, lookup_indexes);
2019-10-23 01:00:43 +02:00
#endif
}
#ifndef HB_NO_VAR
void collect_feature_substitutes_with_variations (hb_collect_feature_substitutes_with_var_context_t *c) const
{ get_feature_variations ().collect_feature_substitutes_with_variations (c); }
#endif
template <typename TLookup>
void closure_lookups (hb_face_t *face,
const hb_set_t *glyphs,
2020-07-29 06:03:32 +02:00
hb_set_t *lookup_indexes /* IN/OUT */) const
{
hb_set_t visited_lookups, inactive_lookups;
hb_closure_lookups_context_t c (face, glyphs, &visited_lookups, &inactive_lookups);
c.set_recurse_func (TLookup::template dispatch_recurse_func<hb_closure_lookups_context_t>);
for (unsigned lookup_index : *lookup_indexes)
reinterpret_cast<const TLookup &> (get_lookup (lookup_index)).closure_lookups (&c, lookup_index);
hb_set_union (lookup_indexes, &visited_lookups);
hb_set_subtract (lookup_indexes, &inactive_lookups);
}
void prune_langsys (const hb_map_t *duplicate_feature_map,
const hb_set_t *layout_scripts,
hb_hashmap_t<unsigned, hb::unique_ptr<hb_set_t>> *script_langsys_map,
hb_set_t *new_feature_indexes /* OUT */) const
{
hb_prune_langsys_context_t c (this, script_langsys_map, duplicate_feature_map, new_feature_indexes);
2021-03-30 01:08:44 +02:00
unsigned count = get_script_count ();
for (unsigned script_index = 0; script_index < count; script_index++)
{
const Tag& tag = get_script_tag (script_index);
if (!layout_scripts->has (tag)) continue;
const Script& s = get_script (script_index);
s.prune_langsys (&c, script_index);
}
}
void prune_features (const hb_map_t *lookup_indices, /* IN */
const hb_hashmap_t<unsigned, hb::shared_ptr<hb_set_t>> *feature_record_cond_idx_map, /* IN */
const hb_hashmap_t<unsigned, const Feature*> *feature_substitutes_map, /* IN */
hb_set_t *feature_indices /* IN/OUT */) const
{
#ifndef HB_NO_VAR
// This is the set of feature indices which have alternate versions defined
// if the FeatureVariation's table and the alternate version(s) intersect the
// set of lookup indices.
hb_set_t alternate_feature_indices;
get_feature_variations ().closure_features (lookup_indices, feature_record_cond_idx_map, &alternate_feature_indices);
if (unlikely (alternate_feature_indices.in_error()))
{
feature_indices->err ();
return;
}
#endif
for (unsigned i : hb_iter (feature_indices))
{
hb_tag_t tag = get_feature_tag (i);
if (tag == HB_TAG ('p', 'r', 'e', 'f'))
// Note: Never ever drop feature 'pref', even if it's empty.
// HarfBuzz chooses shaper for Khmer based on presence of this
// feature. See thread at:
// http://lists.freedesktop.org/archives/harfbuzz/2012-November/002660.html
continue;
const Feature *f = &(get_feature (i));
const Feature** p = nullptr;
if (feature_substitutes_map->has (i, &p))
f = *p;
if (!f->featureParams.is_null () &&
tag == HB_TAG ('s', 'i', 'z', 'e'))
continue;
if (!f->intersects_lookup_indexes (lookup_indices)
#ifndef HB_NO_VAR
&& !alternate_feature_indices.has (i)
#endif
)
feature_indices->del (i);
}
}
void collect_name_ids (const hb_map_t *feature_index_map,
hb_set_t *nameids_to_retain /* OUT */) const
{
unsigned count = get_feature_count ();
for (unsigned i = 0 ; i < count; i++)
{
if (!feature_index_map->has (i)) continue;
hb_tag_t tag = get_feature_tag (i);
get_feature (i).collect_name_ids (tag, nameids_to_retain);
}
}
2018-08-26 09:47:55 +02:00
template <typename T>
struct accelerator_t
{
accelerator_t (hb_face_t *face)
2018-08-26 09:47:55 +02:00
{
this->table = hb_sanitize_context_t ().reference_table<T> (face);
2020-06-05 21:57:23 +02:00
if (unlikely (this->table->is_blocklisted (this->table.get_blob (), face)))
{
hb_blob_destroy (this->table.get_blob ());
this->table = hb_blob_get_empty ();
}
2018-08-26 09:47:55 +02:00
this->lookup_count = table->get_lookup_count ();
2018-08-26 09:47:55 +02:00
this->accels = (hb_atomic_ptr_t<hb_ot_layout_lookup_accelerator_t> *) hb_calloc (this->lookup_count, sizeof (*accels));
2018-08-26 09:47:55 +02:00
if (unlikely (!this->accels))
{
2018-11-15 20:40:56 +01:00
this->lookup_count = 0;
this->table.destroy ();
this->table = hb_blob_get_empty ();
}
2018-08-26 09:47:55 +02:00
}
~accelerator_t ()
2018-08-26 09:47:55 +02:00
{
for (unsigned int i = 0; i < this->lookup_count; i++)
hb_free (this->accels[i]);
hb_free (this->accels);
2018-11-11 17:40:57 +01:00
this->table.destroy ();
2018-08-26 09:47:55 +02:00
}
hb_ot_layout_lookup_accelerator_t *get_accel (unsigned lookup_index) const
{
if (unlikely (lookup_index >= lookup_count)) return nullptr;
retry:
auto *accel = accels[lookup_index].get_acquire ();
if (unlikely (!accel))
{
accel = hb_ot_layout_lookup_accelerator_t::create (table->get_lookup (lookup_index));
if (unlikely (!accel))
return nullptr;
if (unlikely (!accels[lookup_index].cmpexch (nullptr, accel)))
{
hb_free (accel);
goto retry;
}
}
return accel;
}
hb_blob_ptr_t<T> table;
unsigned int lookup_count;
hb_atomic_ptr_t<hb_ot_layout_lookup_accelerator_t> *accels;
2018-08-26 09:47:55 +02:00
};
2009-05-24 06:50:27 +02:00
protected:
union {
FixedVersion<> version; /* Version identifier */
GSUBGPOSVersion1_2<SmallTypes> version1;
2022-07-23 05:33:15 +02:00
#ifndef HB_NO_BEYOND_64K
GSUBGPOSVersion1_2<MediumTypes> version2;
#endif
} u;
public:
DEFINE_SIZE_MIN (4);
2009-05-18 02:13:02 +02:00
};
2012-11-17 03:49:54 +01:00
} /* namespace OT */
2010-07-23 21:11:18 +02:00
#endif /* HB_OT_LAYOUT_GSUBGPOS_HH */