Merge branch 'var-subset' of https://github.com/harfbuzz/harfbuzz into var-subset

This commit is contained in:
Michiharu Ariza 2019-03-29 10:32:45 -07:00
commit ce6093a96b
68 changed files with 2509 additions and 1756 deletions

View File

@ -27,24 +27,11 @@ jobs:
xcode: "10.2.0"
steps:
- checkout
- run: HOMEBREW_NO_AUTO_UPDATE=1 brew install wget autoconf automake libtool pkg-config ragel freetype glib cairo icu4c
- run: brew link --force icu4c
- run: export PKG_CONFIG_PATH="/usr/local/opt/icu4c/lib/pkgconfig" && ./autogen.sh --with-freetype --with-glib --with-gobject --with-cairo --with-icu --with-coretext
- run: HOMEBREW_NO_AUTO_UPDATE=1 brew install wget autoconf automake libtool pkg-config ragel freetype glib cairo icu4c graphite2
- run: export PKG_CONFIG_PATH="/usr/local/opt/icu4c/lib/pkgconfig" && ./autogen.sh --with-freetype --with-glib --with-gobject --with-cairo --with-icu --with-coretext --with-graphite2
- run: make -j4
- run: make check || .ci/fail.sh
macos-notest-ios:
macos:
xcode: "10.0.0"
steps:
- checkout
- run: HOMEBREW_NO_AUTO_UPDATE=1 brew install cmake
# not needed to be a framework but we like to test that also
# TODO: wrong way of targeting iOS as it doesn't point to iOS headers thus building
# CoreText support is not possible, after the fix feel free HB_IOS from CMake altogether
- run: cmake -DBUILD_FRAMEWORK=ON -H. -Bbuild -GXcode -DHB_HAVE_CORETEXT=OFF -DHB_BUILD_SUBSET=OFF -DHB_BUILD_TESTS=OFF
- run: cd build && xcodebuild -sdk iphoneos12.0 -configuration Release build -arch arm64
distcheck:
docker:
- image: ubuntu:17.10
@ -93,11 +80,11 @@ jobs:
clang-O3-O0:
docker:
- image: multiarch/crossbuild
- image: ubuntu:18.10
steps:
- checkout
- run: apt update || true
- run: apt install -y wget autoconf automake libtool pkg-config ragel libfreetype6-dev libfontconfig1-dev libglib2.0-dev libcairo2-dev libicu-dev libgraphite2-dev python python-pip
- run: apt install -y clang wget autoconf automake libtool pkg-config ragel libfreetype6-dev libfontconfig1-dev libglib2.0-dev libcairo2-dev libicu-dev libgraphite2-dev python python-pip
- run: pip install fonttools
- run: wget http://download.savannah.gnu.org/releases/freetype/freetype-2.9.tar.bz2 && tar xf freetype-2.9.tar.bz2 && cd freetype-2.9 && ./autogen.sh && ./configure && make -j32 && cd ..
- run: CFLAGS="-O3" CXXFLAGS="-O3" CC=clang CXX=clang++ ./autogen.sh --with-freetype --with-fontconfig --with-glib --with-cairo --with-icu --with-graphite2
@ -135,7 +122,7 @@ jobs:
- run: apt update || true
- run: apt install -y clang lld binutils libtool autoconf automake make pkg-config gtk-doc-tools ragel libfreetype6-dev libfontconfig1-dev libglib2.0-dev libcairo2-dev libicu-dev libgraphite2-dev python python-pip
- run: pip install fonttools
- run: CFLAGS="-Weverything -Wno-reserved-id-macro -Wno-conversion -Wno-padded -Wno-sign-conversion -Wno-cast-qual -Wno-documentation -Wno-documentation-unknown-command" CXXFLAGS="-Weverything -Wno-old-style-cast -Wno-documentation -Wno-documentation-unknown-command -Wno-c++98-compat -Wno-cast-qual -Wno-c++98-compat-pedantic -Wno-sign-conversion -Wno-padded -Wno-shorten-64-to-32 -Wno-reserved-id-macro -Wno-float-conversion -Wno-format-pedantic -Wno-shadow -Wno-conversion -Wno-zero-as-null-pointer-constant -Wno-missing-field-initializers -Wno-used-but-marked-unused -Wno-unused-macros -Wno-comma -Wno-float-equal -Wno-disabled-macro-expansion -Wno-weak-vtables -Wno-unused-parameter -Wno-covered-switch-default -Wno-unreachable-code" CC=clang CXX=clang++ ./autogen.sh --with-freetype --with-glib --with-cairo --with-icu --with-graphite2 --with-fontconfig
- run: CFLAGS="-Weverything -Wno-reserved-id-macro -Wno-conversion -Wno-padded -Wno-sign-conversion -Wno-cast-qual -Wno-documentation -Wno-documentation-unknown-command" CXXFLAGS="-Weverything -Wno-old-style-cast -Wno-documentation -Wno-documentation-unknown-command -Wno-c++98-compat -Wno-cast-qual -Wno-c++98-compat-pedantic -Wno-sign-conversion -Wno-padded -Wno-shorten-64-to-32 -Wno-reserved-id-macro -Wno-float-conversion -Wno-format-pedantic -Wno-shadow -Wno-conversion -Wno-zero-as-null-pointer-constant -Wno-missing-field-initializers -Wno-used-but-marked-unused -Wno-unused-macros -Wno-comma -Wno-float-equal -Wno-disabled-macro-expansion -Wno-weak-vtables -Wno-unused-parameter -Wno-covered-switch-default -Wno-unreachable-code -Wno-unused-template" CC=clang CXX=clang++ ./autogen.sh --with-freetype --with-glib --with-cairo --with-icu --with-graphite2 --with-fontconfig
- run: make -j32 CPPFLAGS="-Werror"
- run: make check CPPFLAGS="-Werror" || .ci/fail.sh
@ -263,6 +250,7 @@ jobs:
steps:
- checkout
- run: git clone https://github.com/vitasdk/vdpm && cd vdpm && ./bootstrap-vitasdk.sh
- run: echo "#""!""/bin/true" > /usr/bin/ragel && chmod +x /usr/bin/ragel
- run: ./autogen.sh --prefix=/usr/local/vitasdk/arm-vita-eabi --host=arm-vita-eabi
- run: make -j32
@ -314,7 +302,6 @@ workflows:
- macos-10.12.6-aat-fonts
- macos-10.13.6-aat-fonts
- macos-10.14.3-aat-fonts
- macos-notest-ios
# both autotools and cmake
- distcheck
@ -334,7 +321,7 @@ workflows:
# cmake based builds
- cmake-gcc
- cmake-oracledeveloperstudio
#- cmake-oracledeveloperstudio
# crosscompiles
# they can't be test thus are without tests

11
NEWS
View File

@ -1,3 +1,14 @@
Overview of changes leading to 2.4.0
Monday, March 25, 2019
====================================
- Unicode 12.
- Misc fixes.
- Subsetter improvements.
- New API:
HB_BUFFER_FLAG_DO_NOT_INSERT_DOTTED_CIRCLE
hb_directwrite_face_create()
Overview of changes leading to 2.3.1
Wednesday, January 30, 2019
====================================

View File

@ -33,13 +33,14 @@ HarfBuzz release walk-through checklist:
That's what happened to 2.0.0 going out with 1.8.0 hb-version.h... So, that's
a clue.
7. "make release-files". Enter your GPG password. This creates a sha256 hash
and signs it.
8. Now that you have release files, commit NEWS, configure.ac, and src/hb-version.h,
7. Now that you have release files, commit NEWS, configure.ac, and src/hb-version.h,
as well as any REPLACEME changes you made. The commit message is simply the
release number. Eg. "1.4.7"
8. "make dist" again to get a tarball with your new commit in the ChangeLog. Then
"make release-files". Enter your GPG password. This creates a sha256 hash
and signs it. Check the size of the three resulting files.
9. Tag the release and sign it: Eg. "git tag -s 1.4.7 -m 1.4.7". Enter your
GPG password again.

View File

@ -1,6 +1,6 @@
AC_PREREQ([2.64])
AC_INIT([HarfBuzz],
[2.3.1],
[2.4.0],
[https://github.com/harfbuzz/harfbuzz/issues/new],
[harfbuzz],
[http://harfbuzz.org/])
@ -23,7 +23,7 @@ AC_PROG_CC
AC_PROG_CC_C99
AM_PROG_CC_C_O
AC_PROG_CXX
dnl AX_CXX_COMPILE_STDCXX(11, noext, optional)
AX_CXX_COMPILE_STDCXX(11,, optional)
AC_SYS_LARGEFILE
PKG_PROG_PKG_CONFIG([0.20])
AM_MISSING_PROG([RAGEL], [ragel])

View File

@ -150,7 +150,6 @@
<index id="api-index-1-5-0" role="1.5.0"><title>Index of new symbols in 1.5.0</title><xi:include href="xml/api-index-1.5.0.xml"><xi:fallback /></xi:include></index>
<index id="api-index-1-4-3" role="1.4.3"><title>Index of new symbols in 1.4.3</title><xi:include href="xml/api-index-1.4.3.xml"><xi:fallback /></xi:include></index>
<index id="api-index-1-4-2" role="1.4.2"><title>Index of new symbols in 1.4.2</title><xi:include href="xml/api-index-1.4.2.xml"><xi:fallback /></xi:include></index>
<index id="api-index-1-4-0" role="1.4.0"><title>Index of new symbols in 1.4.0</title><xi:include href="xml/api-index-1.4.0.xml"><xi:fallback /></xi:include></index>
<index id="api-index-1-3-3" role="1.3.3"><title>Index of new symbols in 1.3.3</title><xi:include href="xml/api-index-1.3.3.xml"><xi:fallback /></xi:include></index>
<index id="api-index-1-2-3" role="1.2.3"><title>Index of new symbols in 1.2.3</title><xi:include href="xml/api-index-1.2.3.xml"><xi:fallback /></xi:include></index>
<index id="api-index-1-1-3" role="1.1.3"><title>Index of new symbols in 1.1.3</title><xi:include href="xml/api-index-1.1.3.xml"><xi:fallback /></xi:include></index>

View File

@ -216,6 +216,14 @@ hb_coretext_face_get_cg_font
hb_coretext_font_get_ct_font
</SECTION>
<SECTION>
<FILE>hb-directwrite</FILE>
hb_directwrite_face_create
hb_directwrite_face_get_font_face
<SUBSECTION Private>
hb_directwrite_shape_experimental_width
</SECTION>
<SECTION>
<FILE>hb-face</FILE>
hb_face_count
@ -715,8 +723,6 @@ hb_unicode_script_func_t
<FILE>hb-uniscribe</FILE>
hb_uniscribe_font_get_hfont
hb_uniscribe_font_get_logfontw
<SUBSECTION Private>
hb_directwrite_shape_experimental_width
</SECTION>
<SECTION>

View File

@ -384,12 +384,16 @@ dump_use_data_SOURCES = dump-use-data.cc hb-ot-shape-complex-use-table.cc
dump_use_data_CPPFLAGS = $(HBCFLAGS)
dump_use_data_LDADD = libharfbuzz.la $(HBLIBS)
COMPILED_TESTS = test-iter test-ot-tag test-unicode-ranges
COMPILED_TESTS = test-algs test-iter test-ot-tag test-unicode-ranges
COMPILED_TESTS_CPPFLAGS = $(HBCFLAGS) -DMAIN -UNDEBUG
COMPILED_TESTS_LDADD = libharfbuzz.la $(HBLIBS)
check_PROGRAMS += $(COMPILED_TESTS)
TESTS += $(COMPILED_TESTS)
test_algs_SOURCES = test-algs.cc hb-static.cc
test_algs_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
test_algs_LDADD = $(COMPILED_TESTS_LDADD)
test_iter_SOURCES = test-iter.cc hb-static.cc
test_iter_CPPFLAGS = $(COMPILED_TESTS_CPPFLAGS)
test_iter_LDADD = $(COMPILED_TESTS_LDADD)

View File

@ -16,6 +16,7 @@ HB_BASE_sources = \
hb-aat-ltag-table.hh \
hb-aat-map.cc \
hb-aat-map.hh \
hb-algs.hh \
hb-array.hh \
hb-atomic.hh \
hb-blob.cc \
@ -31,7 +32,6 @@ HB_BASE_sources = \
hb-cff2-interp-cs.hh \
hb-common.cc \
hb-debug.hh \
hb-dsalgs.hh \
hb-face.cc \
hb-face.hh \
hb-font.cc \
@ -42,6 +42,7 @@ HB_BASE_sources = \
hb-map.cc \
hb-map.hh \
hb-bimap.hh \
hb-meta.hh \
hb-mutex.hh \
hb-null.hh \
hb-object.hh \

View File

@ -84,7 +84,7 @@ struct hb_aat_map_builder_t
hb_face_t *face;
public:
hb_vector_t<feature_info_t> features;
hb_sorted_vector_t<feature_info_t> features;
};

View File

@ -24,16 +24,71 @@
* Google Author(s): Behdad Esfahbod
*/
#ifndef HB_DSALGS_HH
#define HB_DSALGS_HH
#ifndef HB_ALGS_HH
#define HB_ALGS_HH
#include "hb.hh"
#include "hb-meta.hh"
#include "hb-null.hh"
/* Void! For when we need a expression-type of void. */
typedef const struct _hb_void_t *hb_void_t;
#define HB_VOID ((const _hb_void_t *) nullptr)
static const struct
{
template <typename T,
hb_enable_if (hb_is_integer (T))>
uint32_t operator () (T v) const
{
/* Knuth's multiplicative method: */
return (uint32_t) v * 2654435761u;
}
template <typename T>
uint32_t operator () (T *v) const { return hb_hash (*v); }
template <typename T,
hb_enable_if (!hb_is_integer (hb_remove_reference (T)) && !hb_is_pointer (T))>
uint32_t operator () (T&& v) const { return v.hash (); }
} hb_hash HB_UNUSED;
static const struct
{
template <typename T> T
operator () (const T& v) const { return v; }
} hb_identity HB_UNUSED;
static const struct
{
template <typename T> bool
operator () (const T& v) const { return bool (v); }
} hb_bool HB_UNUSED;
template <typename T1, typename T2>
struct hb_pair_t
{
typedef T1 first_t;
typedef T2 second_t;
typedef hb_pair_t<T1, T2> pair_t;
hb_pair_t (T1 a, T2 b) : first (a), second (b) {}
hb_pair_t (const pair_t& o) : first (o.first), second (o.second) {}
bool operator == (const pair_t& o) const { return first == o.first && second == o.second; }
T1 first;
T2 second;
};
template <typename T1, typename T2> static inline hb_pair_t<T1, T2>
hb_pair (T1&& a, T2&& b) { return hb_pair_t<T1, T2> (a, b); }
static const struct
{
template <typename Pair> decltype (hb_declval (Pair).first)
operator () (const Pair& pair) const { return pair.first; }
} hb_first HB_UNUSED;
static const struct
{
template <typename Pair> decltype (hb_declval (Pair).second)
operator () (const Pair& pair) const { return pair.second; }
} hb_second HB_UNUSED;
/*
@ -233,18 +288,6 @@ hb_ctz (T v)
* Tiny stuff.
*/
template <typename T>
static inline T* hb_addressof (T& arg)
{
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wcast-align"
/* https://en.cppreference.com/w/cpp/memory/addressof */
return reinterpret_cast<T*>(
&const_cast<char&>(
reinterpret_cast<const volatile char&>(arg)));
#pragma GCC diagnostic pop
}
/* ASCII tag/character handling */
static inline bool ISALPHA (unsigned char c)
{ return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z'); }
@ -298,17 +341,6 @@ hb_ceil_to_4 (unsigned int v)
return ((v - 1) | 3) + 1;
}
template <typename T> struct hb_is_signed;
/* https://github.com/harfbuzz/harfbuzz/issues/1535 */
template <> struct hb_is_signed<int8_t> { enum { value = true }; };
template <> struct hb_is_signed<int16_t> { enum { value = true }; };
template <> struct hb_is_signed<int32_t> { enum { value = true }; };
template <> struct hb_is_signed<int64_t> { enum { value = true }; };
template <> struct hb_is_signed<uint8_t> { enum { value = false }; };
template <> struct hb_is_signed<uint16_t> { enum { value = false }; };
template <> struct hb_is_signed<uint32_t> { enum { value = false }; };
template <> struct hb_is_signed<uint64_t> { enum { value = false }; };
template <typename T> static inline bool
hb_in_range (T u, T lo, T hi)
{
@ -482,16 +514,17 @@ static inline void sort_r_simple(void *base, size_t nel, size_t w,
}
}
static inline void hb_sort_r(void *base, size_t nel, size_t width,
int (*compar)(const void *_a, const void *_b, void *_arg),
void *arg)
static inline void
hb_sort_r (void *base, size_t nel, size_t width,
int (*compar)(const void *_a, const void *_b, void *_arg),
void *arg)
{
sort_r_simple(base, nel, width, compar, arg);
}
template <typename T, typename T2> static inline void
hb_stable_sort (T *array, unsigned int len, int(*compar)(const T *, const T *), T2 *array2)
template <typename T, typename T2, typename T3> static inline void
hb_stable_sort (T *array, unsigned int len, int(*compar)(const T2 *, const T2 *), T3 *array2)
{
for (unsigned int i = 1; i < len; i++)
{
@ -508,8 +541,8 @@ hb_stable_sort (T *array, unsigned int len, int(*compar)(const T *, const T *),
}
if (array2)
{
T2 t = array2[i];
memmove (&array2[j + 1], &array2[j], (i - j) * sizeof (T2));
T3 t = array2[i];
memmove (&array2[j + 1], &array2[j], (i - j) * sizeof (T3));
array2[j] = t;
}
}
@ -624,4 +657,4 @@ struct hb_vector_size_t
};
#endif /* HB_DSALGS_HH */
#endif /* HB_ALGS_HH */

View File

@ -28,7 +28,7 @@
#define HB_ARRAY_HH
#include "hb.hh"
#include "hb-dsalgs.hh"
#include "hb-algs.hh"
#include "hb-iter.hh"
#include "hb-null.hh"
@ -37,22 +37,22 @@ template <typename Type>
struct hb_sorted_array_t;
template <typename Type>
struct hb_array_t :
hb_iter_t<hb_array_t<Type>, Type>,
hb_iter_mixin_t<hb_array_t<Type>, Type>
struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
{
/*
* Constructors.
*/
hb_array_t () : arrayZ (nullptr), length (0) {}
template <typename U = Type, hb_enable_if (hb_is_const (U))>
hb_array_t (const hb_array_t<hb_remove_const (Type)> &o) : arrayZ (o.arrayZ), length (o.length) {}
hb_array_t (Type *array_, unsigned int length_) : arrayZ (array_), length (length_) {}
template <unsigned int length_> hb_array_t (Type (&array_)[length_]) : arrayZ (array_), length (length_) {}
/*
* Iterator implementation.
*/
typedef Type __item_type__;
typedef Type& __item_t__;
static constexpr bool is_random_access_iterator = true;
Type& __item_at__ (unsigned i) const
{
if (unlikely (i >= length)) return CrapOrNull (Type);
@ -72,7 +72,6 @@ struct hb_array_t :
length -= n;
}
unsigned __len__ () const { return length; }
bool __random_access__ () const { return true; }
/* Extra operators.
*/
@ -183,7 +182,6 @@ template <typename T, unsigned int length_> inline hb_array_t<T>
hb_array (T (&array_)[length_])
{ return hb_array_t<T> (array_); }
enum hb_bfind_not_found_t
{
HB_BFIND_NOT_FOUND_DONT_STORE,
@ -193,12 +191,18 @@ enum hb_bfind_not_found_t
template <typename Type>
struct hb_sorted_array_t :
hb_sorted_iter_t<hb_sorted_array_t<Type>, Type>,
hb_array_t<Type>,
hb_iter_mixin_t<hb_sorted_array_t<Type>, Type>
hb_iter_t<hb_sorted_array_t<Type>, Type&>,
hb_array_t<Type>
{
typedef hb_iter_t<hb_sorted_array_t<Type>, Type&> iter_base_t;
HB_ITER_USING (iter_base_t);
static constexpr bool is_random_access_iterator = true;
static constexpr bool is_sorted_iterator = true;
hb_sorted_array_t () : hb_array_t<Type> () {}
hb_sorted_array_t (const hb_array_t<Type> &o) : hb_array_t<Type> (o) {}
template <typename U = Type, hb_enable_if (hb_is_const (U))>
hb_sorted_array_t (const hb_sorted_array_t<hb_remove_const (Type)> &o) : hb_array_t<Type> (o) {}
hb_sorted_array_t (Type *array_, unsigned int length_) : hb_array_t<Type> (array_, length_) {}
template <unsigned int length_> hb_sorted_array_t (Type (&array_)[length_]) : hb_array_t<Type> (array_) {}

View File

@ -33,6 +33,7 @@
#define HB_ATOMIC_HH
#include "hb.hh"
#include "hb-meta.hh"
/*
@ -282,7 +283,7 @@ struct hb_atomic_int_t
template <typename P>
struct hb_atomic_ptr_t
{
typedef typename hb_remove_pointer (P) T;
typedef hb_remove_pointer (P) T;
void init (T* v_ = nullptr) { set_relaxed (v_); }
void set_relaxed (T* v_) { hb_atomic_ptr_impl_set_relaxed (&v, v_); }

View File

@ -81,7 +81,7 @@ struct hb_blob_t
template <typename P>
struct hb_blob_ptr_t
{
typedef typename hb_remove_pointer (P) T;
typedef hb_remove_pointer (P) T;
hb_blob_ptr_t (hb_blob_t *b_ = nullptr) : b (b_) {}
hb_blob_t * operator = (hb_blob_t *b_) { return b = b_; }

View File

@ -287,7 +287,7 @@ hb_buffer_guess_segment_properties (hb_buffer_t *buffer);
* @HB_BUFFER_FLAG_DO_NOT_INSERT_DOTTED_CIRCLE:
* flag indicating that a dotted circle should
* not be inserted in the rendering of incorrect
* character sequences (such at <0905 093E>). Since: REPLACEME
* character sequences (such at <0905 093E>). Since: 2.4
*
* Since: 0.9.20
*/

View File

@ -272,11 +272,11 @@ struct UnsizedByteStr : UnsizedArrayOf <HBUINT8>
HBUINT8 *p = c->allocate_size<HBUINT8> (1);
if (unlikely (p == nullptr)) return_trace (false);
p->set (intOp);
*p = intOp;
INTTYPE *ip = c->allocate_size<INTTYPE> (INTTYPE::static_size);
if (unlikely (ip == nullptr)) return_trace (false);
ip->set ((unsigned int)value);
*ip = (unsigned int) value;
return_trace (true);
}

View File

@ -358,7 +358,7 @@ typedef enum
/*11.0*/HB_SCRIPT_SOGDIAN = HB_TAG ('S','o','g','d'),
/*
* Since REPLACEME
* Since 2.4.0
*/
/*12.0*/HB_SCRIPT_ELYMAIC = HB_TAG ('E','l','y','m'),
/*12.0*/HB_SCRIPT_NANDINAGARI = HB_TAG ('N','a','n','d'),

View File

@ -29,7 +29,7 @@
#include "hb.hh"
#include "hb-atomic.hh"
#include "hb-dsalgs.hh"
#include "hb-algs.hh"
#ifndef HB_DEBUG
@ -437,25 +437,12 @@ struct hb_no_trace_t {
#define TRACE_SUBSET(this) hb_no_trace_t<bool> trace
#endif
#ifndef HB_DEBUG_WOULD_APPLY
#define HB_DEBUG_WOULD_APPLY (HB_DEBUG+0)
#endif
#if HB_DEBUG_WOULD_APPLY
#define TRACE_WOULD_APPLY(this) \
hb_auto_trace_t<HB_DEBUG_WOULD_APPLY, bool> trace \
(&c->debug_depth, c->get_name (), this, HB_FUNC, \
"%d glyphs", c->len);
#else
#define TRACE_WOULD_APPLY(this) hb_no_trace_t<bool> trace
#endif
#ifndef HB_DEBUG_DISPATCH
#define HB_DEBUG_DISPATCH ( \
HB_DEBUG_APPLY + \
HB_DEBUG_SANITIZE + \
HB_DEBUG_SERIALIZE + \
HB_DEBUG_SUBSET + \
HB_DEBUG_WOULD_APPLY + \
HB_DEBUG_SUBSET + \
0)
#endif
#if HB_DEBUG_DISPATCH

View File

@ -846,10 +846,23 @@ _hb_directwrite_shape (hb_shape_plan_t *shape_plan,
features, num_features, 0);
}
/*
* Public [experimental] API
*/
/**
* hb_directwrite_shape_experimental_width:
* Experimental API to test DirectWrite's justification algorithm.
*
* It inserts Kashida at wrong order so don't use the API ever.
*
* It doesn't work with cygwin/msys due to header bugs so one
* should use MSVC toolchain in order to use it for now.
*
* @font:
* @buffer:
* @features:
* @num_features:
* @width:
*
* Since: 1.4.2
**/
hb_bool_t
hb_directwrite_shape_experimental_width (hb_font_t *font,
hb_buffer_t *buffer,
@ -918,7 +931,8 @@ _hb_directwrite_font_release (void *data)
/**
* hb_directwrite_face_create:
* @font_face:
* Since: REPLACEME
*
* Since: 2.4.0
**/
hb_face_t *
hb_directwrite_face_create (IDWriteFontFace *font_face)
@ -928,3 +942,15 @@ hb_directwrite_face_create (IDWriteFontFace *font_face)
return hb_face_create_for_tables (reference_table, font_face,
_hb_directwrite_font_release);
}
/**
* hb_directwrite_face_get_font_face:
* @face:
*
* Since: REPLACEME
**/
IDWriteFontFace *
hb_directwrite_face_get_font_face (hb_face_t *face)
{
return face->data.directwrite->fontFace;
}

View File

@ -37,6 +37,9 @@ hb_directwrite_shape_experimental_width (hb_font_t *font, hb_buffer_t *buffer,
HB_EXTERN hb_face_t *
hb_directwrite_face_create (IDWriteFontFace *font_face);
HB_EXTERN IDWriteFontFace *
hb_directwrite_face_get_font_face (hb_face_t *face);
HB_END_DECLS
#endif /* HB_DIRECTWRITE_H */

View File

@ -96,7 +96,7 @@ _hb_ft_font_create (FT_Face ft_face, bool symbol, bool unref)
ft_font->load_flags = FT_LOAD_DEFAULT | FT_LOAD_NO_HINTING;
ft_font->cached_x_scale.set (0);
ft_font->cached_x_scale.set_relaxed (0);
ft_font->advance_cache.init ();
return ft_font;

View File

@ -28,7 +28,7 @@
#define HB_ITER_HH
#include "hb.hh"
#include "hb-null.hh"
#include "hb-meta.hh"
/* Unified iterator object.
@ -41,14 +41,20 @@
* returns rvalues.
*/
/*
* Base classes for iterators.
*/
/* Base class for all iterators. */
template <typename Iter, typename Item = typename Iter::__item_type__>
template <typename iter_t, typename Item = typename iter_t::__item_t__>
struct hb_iter_t
{
typedef Iter iter_t;
typedef iter_t const_iter_t;
typedef Item item_t;
static constexpr unsigned item_size = hb_static_size (Item);
static constexpr bool is_iterator = true;
static constexpr bool is_random_access_iterator = false;
static constexpr bool is_sorted_iterator = false;
private:
/* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */
@ -56,32 +62,35 @@ struct hb_iter_t
iter_t* thiz () { return static_cast< iter_t *> (this); }
public:
/* Operators. */
operator iter_t () { return iter(); }
explicit_operator bool () const { return more (); }
item_t& operator * () const { return item (); }
item_t& operator [] (signed i) const { return item_at ((unsigned) i); }
iter_t& operator += (unsigned count) { forward (count); return *thiz(); }
iter_t& operator ++ () { next (); return *thiz(); }
iter_t& operator -= (unsigned count) { rewind (count); return *thiz(); }
iter_t& operator -- () { prev (); return *thiz(); }
iter_t operator + (unsigned count) { iter_t c (*thiz()); c += count; return c; }
iter_t operator ++ (int) { iter_t c (*thiz()); ++*thiz(); return c; }
iter_t operator - (unsigned count) { iter_t c (*thiz()); c -= count; return c; }
iter_t operator -- (int) { iter_t c (*thiz()); --*thiz(); return c; }
/* TODO:
* Port operators below to use hb_enable_if to sniff which method implements
* an operator and use it, and remove hb_iter_fallback_mixin_t completely. */
/* Methods. */
/* Operators. */
iter_t iter () const { return *thiz(); }
const_iter_t const_iter () const { return iter (); }
item_t& item () const { return thiz()->__item__ (); }
item_t& item_at (unsigned i) const { return thiz()->__item_at__ (i); }
bool more () const { return thiz()->__more__ (); }
iter_t operator + () const { return *thiz(); }
explicit operator bool () const { return thiz()->__more__ (); }
unsigned len () const { return thiz()->__len__ (); }
void next () { thiz()->__next__ (); }
void forward (unsigned n) { thiz()->__forward__ (n); }
void prev () { thiz()->__prev__ (); }
void rewind (unsigned n) { thiz()->__rewind__ (n); }
bool random_access () const { return thiz()->__random_access__ (); }
/* The following can only be enabled if item_t is reference type. Otherwise
* it will be returning pointer to temporary rvalue. */
template <typename T = item_t,
hb_enable_if (hb_is_reference (T))>
hb_remove_reference (item_t)* operator -> () const { return hb_addressof (**thiz()); }
item_t operator * () const { return thiz()->__item__ (); }
item_t operator [] (unsigned i) const { return thiz()->__item_at__ (i); }
iter_t& operator += (unsigned count) { thiz()->__forward__ (count); return *thiz(); }
iter_t& operator ++ () { thiz()->__next__ (); return *thiz(); }
iter_t& operator -= (unsigned count) { thiz()->__rewind__ (count); return *thiz(); }
iter_t& operator -- () { thiz()->__prev__ (); return *thiz(); }
iter_t operator + (unsigned count) const { auto c = thiz()->iter (); c += count; return c; }
friend iter_t operator + (unsigned count, const iter_t &it) { return it + count; }
iter_t operator ++ (int) { iter_t c (*thiz()); ++*thiz(); return c; }
iter_t operator - (unsigned count) const { auto c = thiz()->iter (); c -= count; return c; }
iter_t operator -- (int) { iter_t c (*thiz()); --*thiz(); return c; }
template <typename T>
iter_t& operator >> (T &v) { v = **thiz(); ++*thiz(); return *thiz(); }
template <typename T>
iter_t& operator << (const T v) { **thiz() = v; ++*thiz(); return *thiz(); }
protected:
hb_iter_t () {}
@ -89,20 +98,57 @@ struct hb_iter_t
void operator = (const hb_iter_t &o HB_UNUSED) {}
};
/* Base class for sorted iterators. Does not enforce anything.
* Just for class taxonomy and requirements. */
template <typename Iter, typename Item = typename Iter::__item_type__>
struct hb_sorted_iter_t : hb_iter_t<Iter, Item>
#define HB_ITER_USING(Name) \
using item_t = typename Name::item_t; \
using Name::item_size; \
using Name::is_iterator; \
using Name::iter; \
using Name::operator bool; \
using Name::len; \
using Name::operator ->; \
using Name::operator *; \
using Name::operator []; \
using Name::operator +=; \
using Name::operator ++; \
using Name::operator -=; \
using Name::operator --; \
using Name::operator +; \
using Name::operator -; \
using Name::operator >>; \
using Name::operator <<; \
static_assert (true, "")
/* Returns iterator type of a type. */
#define hb_iter_t(Iterable) decltype (hb_declval (Iterable).iter ())
/* TODO Change to function-object. */
template <typename> struct hb_array_t;
static const struct
{
protected:
hb_sorted_iter_t () {}
hb_sorted_iter_t (const hb_sorted_iter_t &o) : hb_iter_t<Iter, Item> (o) {}
void operator = (const hb_sorted_iter_t &o HB_UNUSED) {}
};
template <typename T>
hb_iter_t (T)
operator () (T&& c) const
{ return c.iter (); }
/* Specialization for C arrays. */
template <typename Type> inline hb_array_t<Type>
operator () (Type *array, unsigned int length) const
{ return hb_array_t<Type> (array, length); }
template <typename Type, unsigned int length> hb_array_t<Type>
operator () (Type (&array)[length]) const
{ return hb_array_t<Type> (array, length); }
} hb_iter HB_UNUSED;
/* Mixin to fill in what the subclass doesn't provide. */
template <typename iter_t, typename item_t = typename iter_t::__item_type__>
struct hb_iter_mixin_t
template <typename iter_t, typename item_t = typename iter_t::__item_t__>
struct hb_iter_fallback_mixin_t
{
private:
/* https://en.wikipedia.org/wiki/Curiously_recurring_template_pattern */
@ -111,38 +157,438 @@ struct hb_iter_mixin_t
public:
/* Access: Implement __item__(), or __item_at__() if random-access. */
item_t& __item__ () const { return thiz()->item_at (0); }
item_t& __item_at__ (unsigned i) const { return *(thiz() + i); }
item_t __item__ () const { return (*thiz())[0]; }
item_t __item_at__ (unsigned i) const { return *(*thiz() + i); }
/* Termination: Implement __more__(), or __len__() if random-access. */
bool __more__ () const { return thiz()->__len__ (); }
bool __more__ () const { return thiz()->len (); }
unsigned __len__ () const
{ iter_t c (*thiz()); unsigned l = 0; while (c) { c++; l++; }; return l; }
/* Advancing: Implement __next__(), or __forward__() if random-access. */
void __next__ () { thiz()->forward (1); }
void __forward__ (unsigned n) { while (n--) thiz()->next (); }
void __next__ () { *thiz() += 1; }
void __forward__ (unsigned n) { while (n--) ++*thiz(); }
/* Rewinding: Implement __prev__() or __rewind__() if bidirectional. */
void __prev__ () { thiz()->rewind (1); }
void __rewind__ (unsigned n) { while (n--) thiz()->prev (); }
void __prev__ () { *thiz() -= 1; }
void __rewind__ (unsigned n) { while (n--) --*thiz(); }
/* Random access: Return true if item_at(), len(), forward() are fast. */
bool __random_access__ () const { return false; }
protected:
hb_iter_fallback_mixin_t () {}
hb_iter_fallback_mixin_t (const hb_iter_fallback_mixin_t &o HB_UNUSED) {}
void operator = (const hb_iter_fallback_mixin_t &o HB_UNUSED) {}
};
/* Functions operating on iterators or iteratables. */
template <typename C, typename V> inline void
hb_fill (const C& c, const V &v)
template <typename iter_t, typename item_t = typename iter_t::__item_t__>
struct hb_iter_with_fallback_t :
hb_iter_t<iter_t, item_t>,
hb_iter_fallback_mixin_t<iter_t, item_t>
{
for (typename C::iter_t i (c); i; i++)
hb_assign (*i, v);
protected:
hb_iter_with_fallback_t () {}
hb_iter_with_fallback_t (const hb_iter_with_fallback_t &o HB_UNUSED) :
hb_iter_t<iter_t, item_t> (o),
hb_iter_fallback_mixin_t<iter_t, item_t> (o) {}
void operator = (const hb_iter_with_fallback_t &o HB_UNUSED) {}
};
/*
* Meta-programming predicates.
*/
/* hb_is_iterable() */
template<typename T, typename B>
struct _hb_is_iterable
{ enum { value = false }; };
template<typename T>
struct _hb_is_iterable<T, hb_bool_tt<true || sizeof (hb_declval (T).iter ())> >
{ enum { value = true }; };
template<typename T>
struct hb_is_iterable { enum { value = _hb_is_iterable<T, hb_true_t>::value }; };
#define hb_is_iterable(Iterable) hb_is_iterable<Iterable>::value
/* TODO Add hb_is_iterable_of().
* TODO Add random_access / sorted variants. */
/* hb_is_iterator() / hb_is_random_access_iterator() / hb_is_sorted_iterator() */
template <typename Iter>
struct _hb_is_iterator_of
{
char operator () (...) { return 0; }
template<typename Item> int operator () (hb_iter_t<Iter, Item> *) { return 0; }
template<typename Item> int operator () (hb_iter_t<Iter, const Item> *) { return 0; }
template<typename Item> int operator () (hb_iter_t<Iter, Item&> *) { return 0; }
template<typename Item> int operator () (hb_iter_t<Iter, const Item&> *) { return 0; }
static_assert (sizeof (char) != sizeof (int), "");
};
template<typename Iter, typename Item>
struct hb_is_iterator_of { enum {
value = sizeof (int) == sizeof (hb_declval (_hb_is_iterator_of<Iter>) (hb_declval (Iter*))) }; };
#define hb_is_iterator_of(Iter, Item) hb_is_iterator_of<Iter, Item>::value
#define hb_is_iterator(Iter) hb_is_iterator_of (Iter, typename Iter::item_t)
#define hb_is_random_access_iterator_of(Iter, Item) \
hb_is_iterator_of (Iter, Item) && Iter::is_random_access_iterator
#define hb_is_random_access_iterator(Iter) \
hb_is_random_access_iterator_of (Iter, typename Iter::item_t)
#define hb_is_sorted_iterator_of(Iter, Item) \
hb_is_iterator_of (Iter, Item) && Iter::is_sorted_iterator
#define hb_is_sorted_iterator(Iter) \
hb_is_sorted_iterator_of (Iter, typename Iter::item_t)
/*
* Adaptors, combiners, etc.
*/
template <typename Lhs, typename Rhs,
hb_enable_if (hb_is_iterator (Lhs))>
static inline decltype (hb_declval (Rhs) (hb_declval (Lhs)))
operator | (Lhs lhs, const Rhs &rhs) { return rhs (lhs); }
/* hb_map(), hb_filter(), hb_reduce() */
template <typename Iter, typename Proj,
hb_enable_if (hb_is_iterator (Iter))>
struct hb_map_iter_t :
hb_iter_t<hb_map_iter_t<Iter, Proj>,
decltype (hb_declval (Proj) (hb_declval (typename Iter::item_t)))>
{
hb_map_iter_t (const Iter& it, Proj f) : it (it), f (f) {}
typedef decltype (hb_declval (Proj) (hb_declval (typename Iter::item_t))) __item_t__;
static constexpr bool is_random_access_iterator = Iter::is_random_access_iterator;
__item_t__ __item__ () const { return f (*it); }
__item_t__ __item_at__ (unsigned i) const { return f (it[i]); }
bool __more__ () const { return bool (it); }
unsigned __len__ () const { return it.len (); }
void __next__ () { ++it; }
void __forward__ (unsigned n) { it += n; }
void __prev__ () { --it; }
void __rewind__ (unsigned n) { it -= n; }
private:
Iter it;
Proj f;
};
template <typename Proj>
struct hb_map_iter_factory_t
{
hb_map_iter_factory_t (Proj f) : f (f) {}
template <typename Iter,
hb_enable_if (hb_is_iterator (Iter))>
hb_map_iter_t<Iter, Proj>
operator () (Iter it) const
{ return hb_map_iter_t<Iter, Proj> (it, f); }
private:
Proj f;
};
static const struct
{
template <typename Proj>
hb_map_iter_factory_t<Proj>
operator () (Proj&& f) const
{ return hb_map_iter_factory_t<Proj> (f); }
} hb_map HB_UNUSED;
template <typename Iter, typename Pred, typename Proj,
hb_enable_if (hb_is_iterator (Iter))>
struct hb_filter_iter_t :
hb_iter_with_fallback_t<hb_filter_iter_t<Iter, Pred, Proj>,
typename Iter::item_t>
{
hb_filter_iter_t (const Iter& it_, Pred p, Proj f) : it (it_), p (p), f (f)
{ while (it && !p (f (*it))) ++it; }
typedef typename Iter::item_t __item_t__;
static constexpr bool is_sorted_iterator = Iter::is_sorted_iterator;
__item_t__ __item__ () const { return *it; }
bool __more__ () const { return bool (it); }
void __next__ () { do ++it; while (it && !p (f (*it))); }
void __prev__ () { --it; }
private:
Iter it;
Pred p;
Proj f;
};
template <typename Pred, typename Proj>
struct hb_filter_iter_factory_t
{
hb_filter_iter_factory_t (Pred p, Proj f) : p (p), f (f) {}
template <typename Iter,
hb_enable_if (hb_is_iterator (Iter))>
hb_filter_iter_t<Iter, Pred, Proj>
operator () (Iter it) const
{ return hb_filter_iter_t<Iter, Pred, Proj> (it, p, f); }
private:
Pred p;
Proj f;
};
static const struct
{
template <typename Pred = decltype ((hb_bool)),
typename Proj = decltype ((hb_identity))>
hb_filter_iter_factory_t<Pred, Proj>
operator () (Pred&& p = hb_bool, Proj&& f = hb_identity) const
{ return hb_filter_iter_factory_t<Pred, Proj> (p, f); }
} hb_filter HB_UNUSED;
/* hb_zip() */
template <typename A, typename B>
struct hb_zip_iter_t :
hb_iter_t<hb_zip_iter_t<A, B>,
hb_pair_t<typename A::item_t, typename B::item_t> >
{
hb_zip_iter_t () {}
hb_zip_iter_t (const A& a, const B& b) : a (a), b (b) {}
typedef hb_pair_t<typename A::item_t, typename B::item_t> __item_t__;
static constexpr bool is_random_access_iterator =
A::is_random_access_iterator &&
B::is_random_access_iterator;
static constexpr bool is_sorted_iterator =
A::is_sorted_iterator &&
B::is_sorted_iterator;
__item_t__ __item__ () const { return __item_t__ (*a, *b); }
__item_t__ __item_at__ (unsigned i) const { return __item_t__ (a[i], b[i]); }
bool __more__ () const { return a && b; }
unsigned __len__ () const { return MIN (a.len (), b.len ()); }
void __next__ () { ++a; ++b; }
void __forward__ (unsigned n) { a += n; b += n; }
void __prev__ () { --a; --b; }
void __rewind__ (unsigned n) { a -= n; b -= n; }
private:
A a;
B b;
};
static const struct
{
template <typename A, typename B,
hb_enable_if (hb_is_iterable (A) && hb_is_iterable (B))>
hb_zip_iter_t<hb_iter_t (A), hb_iter_t (B)>
operator () (A& a, B &b) const
{ return hb_zip_iter_t<hb_iter_t (A), hb_iter_t (B)> (hb_iter (a), hb_iter (b)); }
} hb_zip HB_UNUSED;
/* hb_enumerate */
template <typename Iter,
hb_enable_if (hb_is_iterator (Iter))>
struct hb_enumerate_iter_t :
hb_iter_t<hb_enumerate_iter_t<Iter>,
hb_pair_t<unsigned, typename Iter::item_t> >
{
hb_enumerate_iter_t (const Iter& it) : i (0), it (it) {}
typedef hb_pair_t<unsigned, typename Iter::item_t> __item_t__;
static constexpr bool is_random_access_iterator = Iter::is_random_access_iterator;
static constexpr bool is_sorted_iterator = true;
__item_t__ __item__ () const { return __item_t__ (+i, *it); }
__item_t__ __item_at__ (unsigned j) const { return __item_t__ (i + j, it[j]); }
bool __more__ () const { return bool (it); }
unsigned __len__ () const { return it.len (); }
void __next__ () { ++i; ++it; }
void __forward__ (unsigned n) { i += n; it += n; }
void __prev__ () { --i; --it; }
void __rewind__ (unsigned n) { i -= n; it -= n; }
private:
unsigned i;
Iter it;
};
static const struct
{
template <typename Iterable,
hb_enable_if (hb_is_iterable (Iterable))>
hb_enumerate_iter_t<hb_iter_t (Iterable)>
operator () (Iterable& it) const
{ return hb_enumerate_iter_t<hb_iter_t (Iterable)> (hb_iter (it)); }
} hb_enumerate HB_UNUSED;
/* hb_apply() */
template <typename Appl>
struct hb_apply_t
{
hb_apply_t (Appl a) : a (a) {}
template <typename Iter,
hb_enable_if (hb_is_iterator (Iter))>
void
operator () (Iter it) const
{
for (; it; ++it)
a (*it);
}
private:
Appl a;
};
static const struct
{
template <typename Appl> hb_apply_t<Appl>
operator () (Appl&& a) const
{ return hb_apply_t<Appl> (a); }
template <typename Appl> hb_apply_t<Appl&>
operator () (Appl *a) const
{ return hb_apply_t<Appl&> (*a); }
} hb_apply HB_UNUSED;
/* hb_sink() */
template <typename Sink>
struct hb_sink_t
{
hb_sink_t (Sink&& s) : s (s) {}
template <typename Iter,
hb_enable_if (hb_is_iterator (Iter))>
void
operator () (Iter it) const
{
for (; it; ++it)
s << *it;
}
private:
Sink s;
};
static const struct
{
template <typename Sink> hb_sink_t<Sink>
operator () (Sink&& s) const
{ return hb_sink_t<Sink> (s); }
template <typename Sink> hb_sink_t<Sink&>
operator () (Sink *s) const
{ return hb_sink_t<Sink&> (*s); }
} hb_sink HB_UNUSED;
/* hb-drain: hb_sink to void / blackhole / /dev/null. */
static const struct
{
template <typename Iter,
hb_enable_if (hb_is_iterator (Iter))>
void
operator () (Iter it) const
{
for (; it; ++it)
(void) *it;
}
} hb_drain HB_UNUSED;
/* hb_unzip(): unzip and sink to two sinks. */
template <typename Sink1, typename Sink2>
struct hb_unzip_t
{
hb_unzip_t (Sink1&& s1, Sink2&& s2) : s1 (s1), s2 (s2) {}
template <typename Iter,
hb_enable_if (hb_is_iterator (Iter))>
void
operator () (Iter it) const
{
for (; it; ++it)
{
const auto &v = *it;
s1 << v.first;
s2 << v.second;
}
}
private:
Sink1 s1;
Sink2 s2;
};
static const struct
{
template <typename Sink1, typename Sink2> hb_unzip_t<Sink1, Sink2>
operator () (Sink1&& s1, Sink2&& s2) const
{ return hb_unzip_t<Sink1, Sink2> (s1, s2); }
template <typename Sink1, typename Sink2> hb_unzip_t<Sink1&, Sink2&>
operator () (Sink1 *s1, Sink2 *s2) const
{ return hb_unzip_t<Sink1&, Sink2&> (*s1, *s2); }
} hb_unzip HB_UNUSED;
/* hb-all, hb-any, hb-none. */
static const struct
{
template <typename Iterable,
hb_enable_if (hb_is_iterable (Iterable))>
bool
operator () (Iterable&& c) const
{
for (auto it = hb_iter (c); it; ++it)
if (!*it)
return false;
return true;
}
} hb_all HB_UNUSED;
static const struct
{
template <typename Iterable,
hb_enable_if (hb_is_iterable (Iterable))>
bool
operator () (Iterable&& c) const
{
for (auto it = hb_iter (c); it; ++it)
if (it)
return true;
return false;
}
} hb_any HB_UNUSED;
static const struct
{
template <typename Iterable,
hb_enable_if (hb_is_iterable (Iterable))>
bool
operator () (Iterable&& c) const
{
for (auto it = hb_iter (c); it; ++it)
if (it)
return false;
return true;
}
} hb_none HB_UNUSED;
/*
* Algorithms operating on iterators.
*/
template <typename C, typename V,
hb_enable_if (hb_is_iterable (C))>
inline void
hb_fill (C& c, const V &v)
{
for (auto i = hb_iter (c); i; i++)
*i = v;
}
template <typename S, typename D> inline bool
hb_copy (hb_iter_t<D> &id, hb_iter_t<S> &is)
template <typename S, typename D,
hb_enable_if (hb_is_iterator (S) && hb_is_iterator (D))>
inline bool
hb_copy (D id, S is)
{
for (; id && is; ++id, ++is)
*id = *is;

View File

@ -405,7 +405,7 @@ struct hb_sanitize_context_t :
{
if (this->may_edit (obj, hb_static_size (Type)))
{
hb_assign (* const_cast<Type *> (obj), v);
* const_cast<Type *> (obj) = v;
return true;
}
return false;
@ -682,7 +682,11 @@ template <typename Type>
struct BEInt<Type, 1>
{
public:
void set (Type V) { v = V; }
BEInt<Type, 1>& operator = (Type V)
{
v = V;
return *this;
}
operator Type () const { return v; }
private: uint8_t v;
};
@ -690,10 +694,11 @@ template <typename Type>
struct BEInt<Type, 2>
{
public:
void set (Type V)
BEInt<Type, 2>& operator = (Type V)
{
v[0] = (V >> 8) & 0xFF;
v[1] = (V ) & 0xFF;
return *this;
}
operator Type () const
{
@ -718,11 +723,12 @@ template <typename Type>
struct BEInt<Type, 3>
{
public:
void set (Type V)
BEInt<Type, 3>& operator = (Type V)
{
v[0] = (V >> 16) & 0xFF;
v[1] = (V >> 8) & 0xFF;
v[2] = (V ) & 0xFF;
return *this;
}
operator Type () const
{
@ -736,13 +742,13 @@ template <typename Type>
struct BEInt<Type, 4>
{
public:
typedef Type type;
void set (Type V)
BEInt<Type, 4>& operator = (Type V)
{
v[0] = (V >> 24) & 0xFF;
v[1] = (V >> 16) & 0xFF;
v[2] = (V >> 8) & 0xFF;
v[3] = (V ) & 0xFF;
return *this;
}
operator Type () const
{
@ -816,7 +822,7 @@ struct hb_lazy_loader_t : hb_data_wrapper_t<Data, WheresData>
const Returned * operator -> () const { return get (); }
const Returned & operator * () const { return *get (); }
explicit_operator bool () const
explicit operator bool () const
{ return get_stored () != Funcs::get_null (); }
template <typename C> operator const C * () const { return get (); }

View File

@ -30,14 +30,6 @@
#include "hb.hh"
template <typename T>
inline uint32_t Hash (const T &v)
{
/* Knuth's multiplicative method: */
return (uint32_t) v * 2654435761u;
}
/*
* hb_map_t
*/
@ -160,14 +152,16 @@ struct hb_map_t
void del (hb_codepoint_t key) { set (key, INVALID); }
bool has (hb_codepoint_t key) const
{ return get (key) != INVALID; }
hb_codepoint_t operator [] (unsigned int key) const
{ return get (key); }
static constexpr hb_codepoint_t INVALID = HB_MAP_VALUE_INVALID;
/* Has interface. */
static constexpr hb_codepoint_t SENTINEL = INVALID;
typedef hb_codepoint_t value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
/* Projection. */
hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
void clear ()
{
if (items) memset (items, 0xFF, ((size_t) mask + 1) * sizeof (item_t));
@ -182,7 +176,7 @@ struct hb_map_t
unsigned int bucket_for (hb_codepoint_t key) const
{
unsigned int i = Hash (key) % prime;
unsigned int i = hb_hash (key) % prime;
unsigned int step = 0;
unsigned int tombstone = INVALID;
while (!items[i].is_unused ())

122
src/hb-meta.hh Normal file
View File

@ -0,0 +1,122 @@
/*
* Copyright © 2018 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Google Author(s): Behdad Esfahbod
*/
#ifndef HB_META_HH
#define HB_META_HH
#include "hb.hh"
/*
* C++ template meta-programming & fundamentals used with them.
*/
template <typename T> static inline T*
hb_addressof (const T& arg)
{
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wcast-align"
/* https://en.cppreference.com/w/cpp/memory/addressof */
return reinterpret_cast<T*>(
&const_cast<char&>(
reinterpret_cast<const volatile char&>(arg)));
#pragma GCC diagnostic pop
}
template <typename T> static inline T hb_declval ();
#define hb_declval(T) (hb_declval<T> ())
template <typename T> struct hb_match_const { typedef T type; enum { value = false }; };
template <typename T> struct hb_match_const<const T> { typedef T type; enum { value = true }; };
#define hb_remove_const(T) typename hb_match_const<T>::type
#define hb_is_const(T) hb_match_const<T>::value
template <typename T> struct hb_match_reference { typedef T type; enum { value = false }; };
template <typename T> struct hb_match_reference<T &> { typedef T type; enum { value = true }; };
#define hb_remove_reference(T) typename hb_match_reference<T>::type
#define hb_is_reference(T) hb_match_reference<T>::value
template <typename T> struct hb_match_pointer { typedef T type; enum { value = false }; };
template <typename T> struct hb_match_pointer<T *> { typedef T type; enum { value = true }; };
#define hb_remove_pointer(T) typename hb_match_pointer<T>::type
#define hb_is_pointer(T) hb_match_pointer<T>::value
/* Void! For when we need a expression-type of void. */
struct hb_void_t { typedef void value; };
/* Bool! For when we need to evaluate type-dependent expressions
* in a template argument. */
template <bool b> struct hb_bool_tt { enum { value = b }; };
typedef hb_bool_tt<true> hb_true_t;
typedef hb_bool_tt<false> hb_false_t;
template<bool B, typename T = void>
struct hb_enable_if {};
template<typename T>
struct hb_enable_if<true, T> { typedef T type; };
#define hb_enable_if(Cond) typename hb_enable_if<(Cond)>::type* = nullptr
/*
* Meta-functions.
*/
template <typename T> struct hb_is_signed;
/* https://github.com/harfbuzz/harfbuzz/issues/1535 */
template <> struct hb_is_signed<int8_t> { enum { value = true }; };
template <> struct hb_is_signed<int16_t> { enum { value = true }; };
template <> struct hb_is_signed<int32_t> { enum { value = true }; };
template <> struct hb_is_signed<int64_t> { enum { value = true }; };
template <> struct hb_is_signed<uint8_t> { enum { value = false }; };
template <> struct hb_is_signed<uint16_t> { enum { value = false }; };
template <> struct hb_is_signed<uint32_t> { enum { value = false }; };
template <> struct hb_is_signed<uint64_t> { enum { value = false }; };
#define hb_is_signed(T) hb_is_signed<T>::value
template <bool is_signed> struct hb_signedness_int;
template <> struct hb_signedness_int<false> { typedef unsigned int value; };
template <> struct hb_signedness_int<true> { typedef signed int value; };
#define hb_signedness_int(T) hb_signedness_int<T>::value
template <typename T> struct hb_is_integer { enum { value = false }; };
template <> struct hb_is_integer<char> { enum { value = true }; };
template <> struct hb_is_integer<signed char> { enum { value = true }; };
template <> struct hb_is_integer<unsigned char> { enum { value = true }; };
template <> struct hb_is_integer<signed short> { enum { value = true }; };
template <> struct hb_is_integer<unsigned short> { enum { value = true }; };
template <> struct hb_is_integer<signed int> { enum { value = true }; };
template <> struct hb_is_integer<unsigned int> { enum { value = true }; };
template <> struct hb_is_integer<signed long> { enum { value = true }; };
template <> struct hb_is_integer<unsigned long> { enum { value = true }; };
template <> struct hb_is_integer<signed long long> { enum { value = true }; };
template <> struct hb_is_integer<unsigned long long> { enum { value = true }; };
#define hb_is_integer(T) hb_is_integer<T>::value
#endif /* HB_META_HH */

View File

@ -28,6 +28,7 @@
#define HB_NULL_HH
#include "hb.hh"
#include "hb-meta.hh"
/*
@ -45,18 +46,16 @@
* https://stackoverflow.com/questions/7776448/sfinae-tried-with-bool-gives-compiler-error-template-argument-tvalue-invol
*/
template<bool> struct _hb_bool_type {};
template <typename T, typename B>
struct _hb_null_size
{ enum { value = sizeof (T) }; };
template <typename T>
struct _hb_null_size<T, _hb_bool_type<(bool) (1 + (unsigned int) T::min_size)> >
struct _hb_null_size<T, hb_bool_tt<true || sizeof (T::min_size)> >
{ enum { value = T::null_size }; };
template <typename T>
struct hb_null_size
{ enum { value = _hb_null_size<T, _hb_bool_type<true> >::value }; };
{ enum { value = _hb_null_size<T, hb_true_t>::value }; };
#define hb_null_size(T) hb_null_size<T>::value
/* These doesn't belong here, but since is copy/paste from above, put it here. */
@ -68,34 +67,15 @@ template <typename T, typename B>
struct _hb_static_size
{ enum { value = sizeof (T) }; };
template <typename T>
struct _hb_static_size<T, _hb_bool_type<(bool) (1 + (unsigned int) T::min_size)> >
struct _hb_static_size<T, hb_bool_tt<true || sizeof (T::min_size)> >
{ enum { value = T::static_size }; };
template <typename T>
struct hb_static_size
{ enum { value = _hb_static_size<T, _hb_bool_type<true> >::value }; };
{ enum { value = _hb_static_size<T, hb_true_t>::value }; };
#define hb_static_size(T) hb_static_size<T>::value
/* hb_assign (obj, value)
* Calls obj.set (value) if obj.min_size is defined and value has different type
* from obj, or obj = v otherwise. */
template <typename T, typename V, typename B>
struct _hb_assign
{ static inline void value (T &o, const V v) { o = v; } };
template <typename T, typename V>
struct _hb_assign<T, V, _hb_bool_type<(bool) (1 + (unsigned int) T::min_size)> >
{ static inline void value (T &o, const V v) { o.set (v); } };
template <typename T>
struct _hb_assign<T, T, _hb_bool_type<(bool) (1 + (unsigned int) T::min_size)> >
{ static inline void value (T &o, const T v) { o = v; } };
template <typename T, typename V>
static inline void hb_assign (T &o, const V v)
{ _hb_assign<T, V, _hb_bool_type<true> >::value (o, v); }
/*
* Null()
*/
@ -115,7 +95,7 @@ struct Null {
template <typename QType>
struct NullHelper
{
typedef typename hb_remove_const (typename hb_remove_reference (QType)) Type;
typedef hb_remove_const (hb_remove_reference (QType)) Type;
static const Type & get_null () { return Null<Type>::get_null (); }
};
#define Null(Type) NullHelper<Type>::get_null ()
@ -168,7 +148,7 @@ static inline Type& Crap () {
template <typename QType>
struct CrapHelper
{
typedef typename hb_remove_const (typename hb_remove_reference (QType)) Type;
typedef hb_remove_const (hb_remove_reference (QType)) Type;
static Type & get_crap () { return Crap<Type> (); }
};
#define Crap(Type) CrapHelper<Type>::get_crap ()
@ -191,7 +171,7 @@ struct CrapOrNullHelper<const Type> {
template <typename P>
struct hb_nonnull_ptr_t
{
typedef typename hb_remove_pointer (P) T;
typedef hb_remove_pointer (P) T;
hb_nonnull_ptr_t (T *v_ = nullptr) : v (v_) {}
T * operator = (T *v_) { return v = v_; }

View File

@ -106,7 +106,7 @@ typedef struct OffsetTable
bool find_table_index (hb_tag_t tag, unsigned int *table_index) const
{
Tag t;
t.set (tag);
t = tag;
return tables.bfind (t, table_index, HB_BFIND_NOT_FOUND_STORE, Index::NOT_FOUND_INDEX);
}
const TableRecord& get_table_by_tag (hb_tag_t tag) const
@ -127,7 +127,7 @@ typedef struct OffsetTable
/* Alloc 12 for the OTHeader. */
if (unlikely (!c->extend_min (*this))) return_trace (false);
/* Write sfntVersion (bytes 0..3). */
sfnt_version.set (sfnt_tag);
sfnt_version = sfnt_tag;
/* Take space for numTables, searchRange, entrySelector, RangeShift
* and the TableRecords themselves. */
if (unlikely (!tables.serialize (c, items.length))) return_trace (false);
@ -140,8 +140,8 @@ typedef struct OffsetTable
{
TableRecord &rec = tables.arrayZ[i];
hb_blob_t *blob = items[i].blob;
rec.tag.set (items[i].tag);
rec.length.set (hb_blob_get_length (blob));
rec.tag = items[i].tag;
rec.length = hb_blob_get_length (blob);
rec.offset.serialize (c, this);
/* Allocate room for the table and copy it. */
@ -159,7 +159,7 @@ typedef struct OffsetTable
{
head *h = (head *) start;
checksum_adjustment = &h->checkSumAdjustment;
checksum_adjustment->set (0);
*checksum_adjustment = 0;
}
rec.checkSum.set_for_data (start, end - start);
@ -177,10 +177,10 @@ typedef struct OffsetTable
for (unsigned int i = 0; i < items.length; i++)
{
TableRecord &rec = tables.arrayZ[i];
checksum.set (checksum + rec.checkSum);
checksum = checksum + rec.checkSum;
}
checksum_adjustment->set (0xB1B0AFBAu - checksum);
*checksum_adjustment = 0xB1B0AFBAu - checksum;
}
return_trace (true);

View File

@ -52,18 +52,14 @@ namespace OT {
* Int types
*/
template <bool is_signed> struct hb_signedness_int;
template <> struct hb_signedness_int<false> { typedef unsigned int value; };
template <> struct hb_signedness_int<true> { typedef signed int value; };
/* Integer types in big-endian order and no alignment requirement */
template <typename Type, unsigned int Size>
struct IntType
{
typedef Type type;
typedef typename hb_signedness_int<hb_is_signed<Type>::value>::value wide_type;
typedef typename hb_signedness_int (hb_is_signed (Type)) wide_type;
void set (wide_type i) { v.set (i); }
IntType<Type, Size>& operator = (wide_type i) { v = i; return *this; }
operator wide_type () const { return v; }
bool operator == (const IntType<Type,Size> &o) const { return (Type) v == (Type) o.v; }
bool operator != (const IntType<Type,Size> &o) const { return !(*this == o); }
@ -110,9 +106,10 @@ typedef HBUINT16 UFWORD;
/* 16-bit signed fixed number with the low 14 bits of fraction (2.14). */
struct F2DOT14 : HBINT16
{
F2DOT14& operator = (uint16_t i ) { HBINT16::operator= (i); return *this; }
// 16384 means 1<<14
float to_float () const { return ((int32_t) v) / 16384.f; }
void set_float (float f) { v.set (round (f * 16384.f)); }
void set_float (float f) { v = round (f * 16384.f); }
public:
DEFINE_SIZE_STATIC (2);
};
@ -120,9 +117,10 @@ struct F2DOT14 : HBINT16
/* 32-bit signed fixed-point number (16.16). */
struct Fixed : HBINT32
{
Fixed& operator = (uint32_t i) { HBINT32::operator= (i); return *this; }
// 65536 means 1<<16
float to_float () const { return ((int32_t) v) / 65536.f; }
void set_float (float f) { v.set (round (f * 65536.f)); }
void set_float (float f) { v = round (f * 65536.f); }
public:
DEFINE_SIZE_STATIC (4);
};
@ -147,6 +145,7 @@ struct LONGDATETIME
* system, feature, or baseline */
struct Tag : HBUINT32
{
Tag& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; }
/* What the char* converters return is NOT nul-terminated. Print using "%.4s" */
operator const char* () const { return reinterpret_cast<const char *> (&this->v); }
operator char* () { return reinterpret_cast<char *> (&this->v); }
@ -155,11 +154,15 @@ struct Tag : HBUINT32
};
/* Glyph index number, same as uint16 (length = 16 bits) */
typedef HBUINT16 GlyphID;
struct GlyphID : HBUINT16
{
GlyphID& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; }
};
/* Script/language-system/feature index */
struct Index : HBUINT16 {
static constexpr unsigned NOT_FOUND_INDEX = 0xFFFFu;
Index& operator = (uint16_t i) { HBUINT16::operator= (i); return *this; }
};
DECLARE_NULL_NAMESPACE_BYTES (OT, Index);
@ -169,6 +172,8 @@ typedef Index NameID;
template <typename Type, bool has_null=true>
struct Offset : Type
{
Offset& operator = (typename Type::type i) { Type::operator= (i); return *this; }
typedef Type type;
bool is_null () const { return has_null && 0 == *this; }
@ -176,7 +181,7 @@ struct Offset : Type
void *serialize (hb_serialize_context_t *c, const void *base)
{
void *t = c->start_embed<void> ();
this->set ((char *) t - (char *) base); /* TODO(serialize) Overflow? */
*this = (char *) t - (char *) base; /* TODO(serialize) Overflow? */
return t;
}
@ -191,6 +196,8 @@ typedef Offset<HBUINT32> Offset32;
/* CheckSum */
struct CheckSum : HBUINT32
{
CheckSum& operator = (uint32_t i) { HBUINT32::operator= (i); return *this; }
/* This is reference implementation from the spec. */
static uint32_t CalcTableChecksum (const HBUINT32 *Table, uint32_t Length)
{
@ -205,7 +212,7 @@ struct CheckSum : HBUINT32
/* Note: data should be 4byte aligned and have 4byte padding at the end. */
void set_for_data (const void *data, unsigned int length)
{ set (CalcTableChecksum ((const HBUINT32 *) data, length)); }
{ *this = CalcTableChecksum ((const HBUINT32 *) data, length); }
public:
DEFINE_SIZE_STATIC (4);
@ -255,6 +262,8 @@ struct _hb_has_null<Type, true>
template <typename Type, typename OffsetType=HBUINT16, bool has_null=true>
struct OffsetTo : Offset<OffsetType, has_null>
{
OffsetTo& operator = (typename OffsetType::type i) { OffsetType::operator= (i); return *this; }
const Type& operator () (const void *base) const
{
if (unlikely (this->is_null ())) return *_hb_has_null<Type, has_null>::get_null ();
@ -276,12 +285,12 @@ struct OffsetTo : Offset<OffsetType, has_null>
{
if (&src == &Null (T))
{
this->set (0);
*this = 0;
return;
}
serialize (c->serializer, base);
if (!src.subset (c))
this->set (0);
*this = 0;
}
bool sanitize_shallow (hb_sanitize_context_t *c, const void *base) const
@ -338,9 +347,12 @@ struct OffsetTo : Offset<OffsetType, has_null>
DEFINE_SIZE_STATIC (sizeof (OffsetType));
};
/* Partial specializations. */
template <typename Type, bool has_null=true> struct LOffsetTo : OffsetTo<Type, HBUINT32, has_null> {};
template <typename Type, typename OffsetType=HBUINT16 > struct NNOffsetTo : OffsetTo<Type, OffsetType, false> {};
template <typename Type > struct LNNOffsetTo : OffsetTo<Type, HBUINT32, false> {};
template <typename Type, bool has_null=true>
using LOffsetTo = OffsetTo<Type, HBUINT32, has_null>;
template <typename Type, typename OffsetType=HBUINT16>
using NNOffsetTo = OffsetTo<Type, OffsetType, false>;
template <typename Type>
using LNNOffsetTo = LOffsetTo<Type, false>;
template <typename Base, typename OffsetType, bool has_null, typename Type>
static inline const Type& operator + (const Base &base, const OffsetTo<Type, OffsetType, has_null> &offset) { return offset (base); }
@ -447,7 +459,7 @@ struct UnsizedArrayOf
/* Unsized array of offset's */
template <typename Type, typename OffsetType, bool has_null=true>
struct UnsizedOffsetArrayOf : UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null> > {};
using UnsizedOffsetArrayOf = UnsizedArrayOf<OffsetTo<Type, OffsetType, has_null> >;
/* Unsized array of offsets relative to the beginning of the array itself. */
template <typename Type, typename OffsetType, bool has_null=true>
@ -532,12 +544,18 @@ struct ArrayOf
unsigned int get_size () const
{ return len.static_size + len * Type::static_size; }
hb_array_t<Type> as_array ()
{ return hb_array (arrayZ, len); }
hb_array_t<const Type> as_array () const
{ return hb_array (arrayZ, len); }
operator hb_array_t<Type> (void) { return as_array (); }
operator hb_array_t<const Type> (void) const { return as_array (); }
explicit operator bool () const { return len; }
hb_array_t< Type> as_array () { return hb_array (arrayZ, len); }
hb_array_t<const Type> as_array () const { return hb_array (arrayZ, len); }
/* Iterator. */
typedef hb_array_t<const Type> iter_t;
typedef hb_array_t< Type> writer_t;
iter_t iter () const { return as_array (); }
writer_t writer () { return as_array (); }
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
{ return as_array ().sub_array (start_offset, count);}
@ -552,17 +570,21 @@ struct ArrayOf
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
len.set (items_len); /* TODO(serialize) Overflow? */
len = items_len; /* TODO(serialize) Overflow? */
if (unlikely (!c->extend (*this))) return_trace (false);
return_trace (true);
}
template <typename T>
bool serialize (hb_serialize_context_t *c, hb_array_t<const T> items)
template <typename Iterator,
hb_enable_if (hb_is_iterator_of (Iterator, const Type))>
bool serialize (hb_serialize_context_t *c, Iterator items)
{
TRACE_SERIALIZE (this);
if (unlikely (!serialize (c, items.length))) return_trace (false);
for (unsigned int i = 0; i < items.length; i++)
hb_assign (arrayZ[i], items[i]);
unsigned count = items.len ();
if (unlikely (!serialize (c, count))) return_trace (false);
/* TODO Umm. Just exhaust the iterator instead? Being extra
* cautious right now.. */
for (unsigned i = 0; i < count; i++, items++)
arrayZ[i] = *items;
return_trace (true);
}
@ -626,16 +648,17 @@ struct ArrayOf
public:
DEFINE_SIZE_ARRAY (sizeof (LenType), arrayZ);
};
template <typename Type> struct LArrayOf : ArrayOf<Type, HBUINT32> {};
typedef ArrayOf<HBUINT8, HBUINT8> PString;
template <typename Type>
using LArrayOf = ArrayOf<Type, HBUINT32>;
using PString = ArrayOf<HBUINT8, HBUINT8>;
/* Array of Offset's */
template <typename Type>
struct OffsetArrayOf : ArrayOf<OffsetTo<Type, HBUINT16> > {};
using OffsetArrayOf = ArrayOf<OffsetTo<Type, HBUINT16> >;
template <typename Type>
struct LOffsetArrayOf : ArrayOf<OffsetTo<Type, HBUINT32> > {};
using LOffsetArrayOf = ArrayOf<OffsetTo<Type, HBUINT32> >;
template <typename Type>
struct LOffsetLArrayOf : ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32> {};
using LOffsetLArrayOf = ArrayOf<OffsetTo<Type, HBUINT32>, HBUINT32>;
/* Array of offsets relative to the beginning of the array itself. */
template <typename Type>
@ -706,7 +729,7 @@ struct HeadlessArrayOf
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
lenP1.set (items.length + 1); /* TODO(serialize) Overflow? */
lenP1 = items.length + 1; /* TODO(serialize) Overflow? */
if (unlikely (!c->extend (*this))) return_trace (false);
for (unsigned int i = 0; i < items.length; i++)
arrayZ[i] = items[i];
@ -797,22 +820,42 @@ struct ArrayOfM1
template <typename Type, typename LenType=HBUINT16>
struct SortedArrayOf : ArrayOf<Type, LenType>
{
hb_sorted_array_t<Type> as_array ()
{ return hb_sorted_array (this->arrayZ, this->len); }
hb_sorted_array_t<const Type> as_array () const
{ return hb_sorted_array (this->arrayZ, this->len); }
operator hb_sorted_array_t<Type> () { return as_array (); }
operator hb_sorted_array_t<const Type> () const { return as_array (); }
hb_sorted_array_t< Type> as_array () { return hb_sorted_array (this->arrayZ, this->len); }
hb_sorted_array_t<const Type> as_array () const { return hb_sorted_array (this->arrayZ, this->len); }
hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
/* Iterator. */
typedef hb_sorted_array_t<const Type> iter_t;
typedef hb_sorted_array_t< Type> writer_t;
iter_t iter () const { return as_array (); }
writer_t writer () { return as_array (); }
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
hb_sorted_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
{ return as_array ().sub_array (start_offset, count);}
hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
hb_sorted_array_t<const Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
{ return as_array ().sub_array (start_offset, count);}
hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int count)
hb_sorted_array_t<Type> sub_array (unsigned int start_offset, unsigned int count)
{ return as_array ().sub_array (start_offset, count);}
hb_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
hb_sorted_array_t<Type> sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
{ return as_array ().sub_array (start_offset, count);}
bool serialize (hb_serialize_context_t *c, unsigned int items_len)
{
TRACE_SERIALIZE (this);
bool ret = ArrayOf<Type, LenType>::serialize (c, items_len);
return_trace (ret);
}
template <typename Iterator,
hb_enable_if (hb_is_sorted_iterator_of (Iterator, const Type))>
bool serialize (hb_serialize_context_t *c, Iterator items)
{
TRACE_SERIALIZE (this);
bool ret = ArrayOf<Type, LenType>::serialize (c, items);
return_trace (ret);
}
template <typename T>
Type &bsearch (const T &x, Type &not_found = Crap (Type))
{ return *as_array ().bsearch (x, &not_found); }
@ -841,15 +884,16 @@ struct BinSearchHeader
return_trace (c->check_struct (this));
}
void set (unsigned int v)
BinSearchHeader& operator = (unsigned int v)
{
len.set (v);
len = v;
assert (len == v);
entrySelector.set (MAX (1u, hb_bit_storage (v)) - 1);
searchRange.set (16 * (1u << entrySelector));
rangeShift.set (v * 16 > searchRange
? 16 * v - searchRange
: 0);
entrySelector = MAX (1u, hb_bit_storage (v)) - 1;
searchRange = 16 * (1u << entrySelector);
rangeShift = v * 16 > searchRange
? 16 * v - searchRange
: 0;
return *this;
}
protected:
@ -863,7 +907,7 @@ struct BinSearchHeader
};
template <typename Type, typename LenType=HBUINT16>
struct BinSearchArrayOf : SortedArrayOf<Type, BinSearchHeader<LenType> > {};
using BinSearchArrayOf = SortedArrayOf<Type, BinSearchHeader<LenType> >;
struct VarSizedBinSearchHeader

View File

@ -124,14 +124,14 @@ struct CFFIndex
{
COUNT *dest = c->allocate_min<COUNT> ();
if (unlikely (dest == nullptr)) return_trace (false);
dest->set (0);
*dest = 0;
}
else
{
/* serialize CFFIndex header */
if (unlikely (!c->extend_min (*this))) return_trace (false);
this->count.set (byteArray.length);
this->offSize.set (offSize_);
this->count = byteArray.length;
this->offSize = offSize_;
if (!unlikely (c->allocate_size<HBUINT8> (offSize_ * (byteArray.length + 1))))
return_trace (false);
@ -181,7 +181,7 @@ struct CFFIndex
for (; size; size--)
{
--p;
p->set (offset & 0xFF);
*p = offset & 0xFF;
offset >>= 8;
}
}
@ -275,8 +275,8 @@ struct CFFIndexOf : CFFIndex<COUNT>
TRACE_SERIALIZE (this);
/* serialize CFFIndex header */
if (unlikely (!c->extend_min (*this))) return_trace (false);
this->count.set (dataArrayLen);
this->offSize.set (offSize_);
this->count = dataArrayLen;
this->offSize = offSize_;
if (!unlikely (c->allocate_size<HBUINT8> (offSize_ * (dataArrayLen + 1))))
return_trace (false);
@ -376,11 +376,11 @@ struct Dict : UnsizedByteStr
if (unlikely (p == nullptr)) return_trace (false);
if (Is_OpCode_ESC (op))
{
p->set (OpCode_escape);
*p = OpCode_escape;
op = Unmake_OpCode_ESC (op);
p++;
}
p->set (op);
*p = op;
return_trace (true);
}
@ -426,8 +426,8 @@ struct FDArray : CFFIndexOf<COUNT, FontDict>
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
this->count.set (fontDicts.length);
this->offSize.set (offSize_);
this->count = fontDicts.length;
this->offSize = offSize_;
if (!unlikely (c->allocate_size<HBUINT8> (offSize_ * (fontDicts.length + 1))))
return_trace (false);
@ -463,8 +463,8 @@ struct FDArray : CFFIndexOf<COUNT, FontDict>
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
this->count.set (fdCount);
this->offSize.set (offSize_);
this->count = fdCount;
this->offSize = offSize_;
if (!unlikely (c->allocate_size<HBUINT8> (offSize_ * (fdCount + 1))))
return_trace (false);

View File

@ -196,18 +196,18 @@ struct Encoding {
TRACE_SERIALIZE (this);
Encoding *dest = c->extend_min (*this);
if (unlikely (dest == nullptr)) return_trace (false);
dest->format.set (format | ((supp_codes.length > 0)? 0x80: 0));
dest->format = format | ((supp_codes.length > 0)? 0x80: 0);
if (format == 0)
{
Encoding0 *fmt0 = c->allocate_size<Encoding0> (Encoding0::min_size + HBUINT8::static_size * enc_count);
if (unlikely (fmt0 == nullptr)) return_trace (false);
fmt0->nCodes ().set (enc_count);
fmt0->nCodes () = enc_count;
unsigned int glyph = 0;
for (unsigned int i = 0; i < code_ranges.length; i++)
{
hb_codepoint_t code = code_ranges[i].code;
for (int left = (int)code_ranges[i].glyph; left >= 0; left--)
fmt0->codes[glyph++].set (code++);
fmt0->codes[glyph++] = code++;
if (unlikely (!((glyph <= 0x100) && (code <= 0x100))))
return_trace (false);
}
@ -216,24 +216,24 @@ struct Encoding {
{
Encoding1 *fmt1 = c->allocate_size<Encoding1> (Encoding1::min_size + Encoding1_Range::static_size * code_ranges.length);
if (unlikely (fmt1 == nullptr)) return_trace (false);
fmt1->nRanges ().set (code_ranges.length);
fmt1->nRanges () = code_ranges.length;
for (unsigned int i = 0; i < code_ranges.length; i++)
{
if (unlikely (!((code_ranges[i].code <= 0xFF) && (code_ranges[i].glyph <= 0xFF))))
return_trace (false);
fmt1->ranges[i].first.set (code_ranges[i].code);
fmt1->ranges[i].nLeft.set (code_ranges[i].glyph);
fmt1->ranges[i].first = code_ranges[i].code;
fmt1->ranges[i].nLeft = code_ranges[i].glyph;
}
}
if (supp_codes.length > 0)
{
CFF1SuppEncData *suppData = c->allocate_size<CFF1SuppEncData> (CFF1SuppEncData::min_size + SuppEncoding::static_size * supp_codes.length);
if (unlikely (suppData == nullptr)) return_trace (false);
suppData->nSups ().set (supp_codes.length);
suppData->nSups () = supp_codes.length;
for (unsigned int i = 0; i < supp_codes.length; i++)
{
suppData->supps[i].code.set (supp_codes[i].code);
suppData->supps[i].glyph.set (supp_codes[i].glyph); /* actually SID */
suppData->supps[i].code = supp_codes[i].code;
suppData->supps[i].glyph = supp_codes[i].glyph; /* actually SID */
}
}
return_trace (true);
@ -469,7 +469,7 @@ struct Charset {
TRACE_SERIALIZE (this);
Charset *dest = c->extend_min (*this);
if (unlikely (dest == nullptr)) return_trace (false);
dest->format.set (format);
dest->format = format;
if (format == 0)
{
Charset0 *fmt0 = c->allocate_size<Charset0> (Charset0::min_size + HBUINT16::static_size * (num_glyphs - 1));
@ -479,7 +479,7 @@ struct Charset {
{
hb_codepoint_t sid = sid_ranges[i].code;
for (int left = (int)sid_ranges[i].glyph; left >= 0; left--)
fmt0->sids[glyph++].set (sid++);
fmt0->sids[glyph++] = sid++;
}
}
else if (format == 1)
@ -490,8 +490,8 @@ struct Charset {
{
if (unlikely (!(sid_ranges[i].glyph <= 0xFF)))
return_trace (false);
fmt1->ranges[i].first.set (sid_ranges[i].code);
fmt1->ranges[i].nLeft.set (sid_ranges[i].glyph);
fmt1->ranges[i].first = sid_ranges[i].code;
fmt1->ranges[i].nLeft = sid_ranges[i].glyph;
}
}
else /* format 2 */
@ -502,8 +502,8 @@ struct Charset {
{
if (unlikely (!(sid_ranges[i].glyph <= 0xFFFF)))
return_trace (false);
fmt2->ranges[i].first.set (sid_ranges[i].code);
fmt2->ranges[i].nLeft.set (sid_ranges[i].glyph);
fmt2->ranges[i].first = sid_ranges[i].code;
fmt2->ranges[i].nLeft = sid_ranges[i].glyph;
}
}
return_trace (true);
@ -577,7 +577,7 @@ struct CFF1StringIndex : CFF1Index
{
if (!unlikely (c->extend_min (this->count)))
return_trace (false);
count.set (0);
count = 0;
return_trace (true);
}

View File

@ -83,21 +83,21 @@ struct CmapSubtableFormat4
bool serialize (hb_serialize_context_t *c,
const hb_subset_plan_t *plan,
const hb_vector_t<segment_plan> &segments)
const hb_sorted_vector_t<segment_plan> &segments)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
this->format.set (4);
this->length.set (get_sub_table_size (segments));
this->format = 4;
this->length = get_sub_table_size (segments);
this->segCountX2.set (segments.length * 2);
this->entrySelector.set (MAX (1u, hb_bit_storage (segments.length)) - 1);
this->searchRange.set (2 * (1u << this->entrySelector));
this->rangeShift.set (segments.length * 2 > this->searchRange
? 2 * segments.length - this->searchRange
: 0);
this->segCountX2 = segments.length * 2;
this->entrySelector = MAX (1u, hb_bit_storage (segments.length)) - 1;
this->searchRange = 2 * (1u << this->entrySelector);
this->rangeShift = segments.length * 2 > this->searchRange
? 2 * segments.length - this->searchRange
: 0;
HBUINT16 *end_count = c->allocate_size<HBUINT16> (HBUINT16::static_size * segments.length);
c->allocate_size<HBUINT16> (HBUINT16::static_size); // 2 bytes of padding.
@ -110,17 +110,17 @@ struct CmapSubtableFormat4
for (unsigned int i = 0; i < segments.length; i++)
{
end_count[i].set (segments[i].end_code);
start_count[i].set (segments[i].start_code);
end_count[i] = segments[i].end_code;
start_count[i] = segments[i].start_code;
if (segments[i].use_delta)
{
hb_codepoint_t cp = segments[i].start_code;
hb_codepoint_t start_gid = 0;
if (unlikely (!plan->new_gid_for_codepoint (cp, &start_gid) && cp != 0xFFFF))
return_trace (false);
id_delta[i].set (start_gid - segments[i].start_code);
id_delta[i] = start_gid - segments[i].start_code;
} else {
id_delta[i].set (0);
id_delta[i] = 0;
unsigned int num_codepoints = segments[i].end_code - segments[i].start_code + 1;
HBUINT16 *glyph_id_array = c->allocate_size<HBUINT16> (HBUINT16::static_size * num_codepoints);
if (glyph_id_array == nullptr)
@ -138,15 +138,14 @@ struct CmapSubtableFormat4
// id_range_offset[i]
// =
// 2 * (glyph_id_array - id_range_offset - i)
id_range_offset[i].set (2 * (
glyph_id_array - id_range_offset - i));
id_range_offset[i] = 2 * (glyph_id_array - id_range_offset - i);
for (unsigned int j = 0; j < num_codepoints; j++)
{
hb_codepoint_t cp = segments[i].start_code + j;
hb_codepoint_t new_gid;
if (unlikely (!plan->new_gid_for_codepoint (cp, &new_gid)))
return_trace (false);
glyph_id_array[j].set (new_gid);
glyph_id_array[j] = new_gid;
}
}
}
@ -154,7 +153,7 @@ struct CmapSubtableFormat4
return_trace (true);
}
static size_t get_sub_table_size (const hb_vector_t<segment_plan> &segments)
static size_t get_sub_table_size (const hb_sorted_vector_t<segment_plan> &segments)
{
size_t segment_size = 0;
for (unsigned int i = 0; i < segments.length; i++)
@ -177,7 +176,7 @@ struct CmapSubtableFormat4
}
static bool create_sub_table_plan (const hb_subset_plan_t *plan,
hb_vector_t<segment_plan> *segments)
hb_sorted_vector_t<segment_plan> *segments)
{
segment_plan *segment = nullptr;
hb_codepoint_t last_gid = 0;
@ -198,11 +197,11 @@ struct CmapSubtableFormat4
cp != segment->end_code + 1u)
{
segment = segments->push ();
segment->start_code.set (cp);
segment->end_code.set (cp);
segment->start_code = cp;
segment->end_code = cp;
segment->use_delta = true;
} else {
segment->end_code.set (cp);
segment->end_code = cp;
if (last_gid + 1u != new_gid)
// gid's are not consecutive in this segment so delta
// cannot be used.
@ -216,8 +215,8 @@ struct CmapSubtableFormat4
if (segment == nullptr || segment->end_code != 0xFFFF)
{
segment = segments->push ();
segment->start_code.set (0xFFFF);
segment->end_code.set (0xFFFF);
segment->start_code = 0xFFFF;
segment->end_code = 0xFFFF;
segment->use_delta = true;
}
@ -491,7 +490,7 @@ struct CmapSubtableLongSegmented
}
bool serialize (hb_serialize_context_t *c,
const hb_vector_t<CmapSubtableLongGroup> &group_data)
const hb_sorted_vector_t<CmapSubtableLongGroup> &group_data)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
@ -519,24 +518,24 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
bool serialize (hb_serialize_context_t *c,
const hb_vector_t<CmapSubtableLongGroup> &groups)
const hb_sorted_vector_t<CmapSubtableLongGroup> &groups)
{
if (unlikely (!c->extend_min (*this))) return false;
this->format.set (12);
this->reserved.set (0);
this->length.set (get_sub_table_size (groups));
this->format = 12;
this->reserved = 0;
this->length = get_sub_table_size (groups);
return CmapSubtableLongSegmented<CmapSubtableFormat12>::serialize (c, groups);
}
static size_t get_sub_table_size (const hb_vector_t<CmapSubtableLongGroup> &groups)
static size_t get_sub_table_size (const hb_sorted_vector_t<CmapSubtableLongGroup> &groups)
{
return 16 + 12 * groups.length;
}
static bool create_sub_table_plan (const hb_subset_plan_t *plan,
hb_vector_t<CmapSubtableLongGroup> *groups)
hb_sorted_vector_t<CmapSubtableLongGroup> *groups)
{
CmapSubtableLongGroup *group = nullptr;
@ -552,11 +551,11 @@ struct CmapSubtableFormat12 : CmapSubtableLongSegmented<CmapSubtableFormat12>
if (!group || !_is_gid_consecutive (group, cp, new_gid))
{
group = groups->push ();
group->startCharCode.set (cp);
group->endCharCode.set (cp);
group->glyphID.set (new_gid);
group->startCharCode = cp;
group->endCharCode = cp;
group->glyphID = new_gid;
}
else group->endCharCode.set (cp);
else group->endCharCode = cp;
}
DEBUG_MSG(SUBSET, nullptr, "cmap");
@ -853,8 +852,8 @@ struct cmap
+ CmapSubtableFormat12::get_sub_table_size (this->format12_groups);
}
hb_vector_t<CmapSubtableFormat4::segment_plan> format4_segments;
hb_vector_t<CmapSubtableLongGroup> format12_groups;
hb_sorted_vector_t<CmapSubtableFormat4::segment_plan> format4_segments;
hb_sorted_vector_t<CmapSubtableLongGroup> format12_groups;
};
bool _create_plan (const hb_subset_plan_t *plan,
@ -879,7 +878,7 @@ struct cmap
return false;
}
table->version.set (0);
table->version = 0;
if (unlikely (!table->encodingRecord.serialize (&c, /* numTables */ 3)))
return false;
@ -888,24 +887,24 @@ struct cmap
// Format 4, Plat 0 Encoding Record
EncodingRecord &format4_plat0_rec = table->encodingRecord[0];
format4_plat0_rec.platformID.set (0); // Unicode
format4_plat0_rec.encodingID.set (3);
format4_plat0_rec.platformID = 0; // Unicode
format4_plat0_rec.encodingID = 3;
// Format 4, Plat 3 Encoding Record
EncodingRecord &format4_plat3_rec = table->encodingRecord[1];
format4_plat3_rec.platformID.set (3); // Windows
format4_plat3_rec.encodingID.set (1); // Unicode BMP
format4_plat3_rec.platformID = 3; // Windows
format4_plat3_rec.encodingID = 1; // Unicode BMP
// Format 12 Encoding Record
EncodingRecord &format12_rec = table->encodingRecord[2];
format12_rec.platformID.set (3); // Windows
format12_rec.encodingID.set (10); // Unicode UCS-4
format12_rec.platformID = 3; // Windows
format12_rec.encodingID = 10; // Unicode UCS-4
// Write out format 4 sub table
{
CmapSubtable &subtable = format4_plat0_rec.subtable.serialize (&c, table);
format4_plat3_rec.subtable.set (format4_plat0_rec.subtable);
subtable.u.format.set (4);
format4_plat3_rec.subtable = format4_plat0_rec.subtable;
subtable.u.format = 4;
CmapSubtableFormat4 &format4 = subtable.u.format4;
if (unlikely (!format4.serialize (&c, plan, cmap_subset_plan.format4_segments)))
@ -915,7 +914,7 @@ struct cmap
// Write out format 12 sub table.
{
CmapSubtable &subtable = format12_rec.subtable.serialize (&c, table);
subtable.u.format.set (12);
subtable.u.format = 12;
CmapSubtableFormat12 &format12 = subtable.u.format12;
if (unlikely (!format12.serialize (&c, cmap_subset_plan.format12_groups)))
@ -1144,8 +1143,8 @@ struct cmap
unsigned int encoding_id) const
{
EncodingRecord key;
key.platformID.set (platform_id);
key.encodingID.set (encoding_id);
key.platformID = platform_id;
key.encodingID = encoding_id;
const EncodingRecord &result = encodingRecord.bsearch (key);
if (!result.subtable)

View File

@ -117,7 +117,7 @@ struct glyf
return false;
head *head_prime = (head *) hb_blob_get_data_writable (head_prime_blob, nullptr);
head_prime->indexToLocFormat.set (use_short_loca ? 0 : 1);
head_prime->indexToLocFormat = use_short_loca ? 0 : 1;
bool success = plan->add_table (HB_OT_TAG_head, head_prime_blob);
hb_blob_destroy (head_prime_blob);

View File

@ -88,8 +88,8 @@ struct DeviceRecord
return_trace (false);
}
this->pixelSize.set (subset_view.source_device_record->pixelSize);
this->maxWidth.set (subset_view.source_device_record->maxWidth);
this->pixelSize = subset_view.source_device_record->pixelSize;
this->maxWidth = subset_view.source_device_record->maxWidth;
for (unsigned int i = 0; i < subset_view.len (); i++)
{
@ -99,7 +99,7 @@ struct DeviceRecord
DEBUG_MSG(SUBSET, nullptr, "HDMX width for new gid %d is missing.", i);
return_trace (false);
}
widthsZ[i].set (*width);
widthsZ[i] = *width;
}
return_trace (true);
@ -141,9 +141,9 @@ struct hdmx
if (unlikely (!c->extend_min ((*this)))) return_trace (false);
this->version.set (source_hdmx->version);
this->numRecords.set (source_hdmx->numRecords);
this->sizeDeviceRecord.set (DeviceRecord::get_size (plan->num_output_glyphs ()));
this->version = source_hdmx->version;
this->numRecords = source_hdmx->numRecords;
this->sizeDeviceRecord = DeviceRecord::get_size (plan->num_output_glyphs ());
for (unsigned int i = 0; i < source_hdmx->numRecords; i++)
{

View File

@ -84,7 +84,7 @@ struct hmtxvmtx
unsigned int length;
H *table = (H *) hb_blob_get_data (dest_blob, &length);
table->numberOfLongMetrics.set (num_hmetrics);
table->numberOfLongMetrics = num_hmetrics;
bool result = plan->add_table (H::tableTag, dest_blob);
hb_blob_destroy (dest_blob);
@ -134,12 +134,12 @@ struct hmtxvmtx
bool has_advance = i < num_advances;
if (has_advance)
{
((LongMetric *) dest_pos)->advance.set (advance);
((LongMetric *) dest_pos)->sb.set (side_bearing);
((LongMetric *) dest_pos)->advance = advance;
((LongMetric *) dest_pos)->sb = side_bearing;
}
else
{
((FWORD *) dest_pos)->set (side_bearing);
*((FWORD *) dest_pos) = side_bearing;
}
dest_pos += (has_advance ? 4 : 2);
}

View File

@ -67,7 +67,6 @@ namespace OT {
#define NOT_COVERED ((unsigned int) -1)
/*
*
* OpenType Layout Common Table Formats
@ -563,7 +562,7 @@ struct Feature
TRACE_SUBSET (this);
struct Feature *out = c->serializer->embed (*this);
if (unlikely (!out)) return_trace (false);
out->featureParams.set (0); /* TODO(subset) FeatureParams. */
out->featureParams = 0; /* TODO(subset) FeatureParams. */
return_trace (true);
}
@ -601,7 +600,7 @@ struct Feature
OffsetTo<FeatureParams> new_offset;
/* Check that it did not overflow. */
new_offset.set (new_offset_int);
new_offset = new_offset_int;
if (new_offset == new_offset_int &&
c->try_set (&featureParams, new_offset) &&
!featureParams.sanitize (c, this, closure ? closure->tag : HB_TAG_NONE))
@ -705,14 +704,14 @@ struct Lookup
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
lookupType.set (lookup_type);
lookupFlag.set (lookup_props & 0xFFFFu);
lookupType = lookup_type;
lookupFlag = lookup_props & 0xFFFFu;
if (unlikely (!subTable.serialize (c, num_subtables))) return_trace (false);
if (lookupFlag & LookupFlag::UseMarkFilteringSet)
{
if (unlikely (!c->extend (*this))) return_trace (false);
HBUINT16 &markFilteringSet = StructAfter<HBUINT16> (subTable);
markFilteringSet.set (lookup_props >> 16);
markFilteringSet = lookup_props >> 16;
}
return_trace (true);
}
@ -827,8 +826,9 @@ struct CoverageFormat1
return i;
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs)
template <typename Iterator,
hb_enable_if (hb_is_sorted_iterator_of (Iterator, const GlyphID))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
return_trace (glyphArray.serialize (c, glyphs));
@ -854,19 +854,17 @@ struct CoverageFormat1
template <typename set_t>
bool add_coverage (set_t *glyphs) const
{
return glyphs->add_sorted_array (glyphArray.arrayZ, glyphArray.len);
}
{ return glyphs->add_sorted_array (glyphArray.arrayZ, glyphArray.len); }
public:
/* Older compilers need this to be public. */
struct Iter {
struct iter_t
{
void init (const struct CoverageFormat1 &c_) { c = &c_; i = 0; }
void fini () {}
bool more () { return i < c->glyphArray.len; }
bool more () const { return i < c->glyphArray.len; }
void next () { i++; }
hb_codepoint_t get_glyph () { return c->glyphArray[i]; }
unsigned int get_coverage () { return i; }
hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
private:
const struct CoverageFormat1 *c;
@ -895,35 +893,38 @@ struct CoverageFormat2
NOT_COVERED;
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs)
template <typename Iterator,
hb_enable_if (hb_is_sorted_iterator_of (Iterator, const GlyphID))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!glyphs.length))
if (unlikely (!glyphs))
{
rangeRecord.len.set (0);
rangeRecord.len = 0;
return_trace (true);
}
/* TODO(iter) Port to non-random-access iterator interface. */
unsigned int count = glyphs.len ();
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < glyphs.length; i++)
for (unsigned int i = 1; i < count; i++)
if (glyphs[i - 1] + 1 != glyphs[i])
num_ranges++;
rangeRecord.len.set (num_ranges);
rangeRecord.len = num_ranges;
if (unlikely (!c->extend (rangeRecord))) return_trace (false);
unsigned int range = 0;
rangeRecord[range].start = glyphs[0];
rangeRecord[range].value.set (0);
for (unsigned int i = 1; i < glyphs.length; i++)
rangeRecord[range].value = 0;
for (unsigned int i = 1; i < count; i++)
{
if (glyphs[i - 1] + 1 != glyphs[i])
{
range++;
rangeRecord[range].start = glyphs[i];
rangeRecord[range].value.set (i);
rangeRecord[range].value = i;
}
rangeRecord[range].end = glyphs[i];
}
@ -973,7 +974,7 @@ struct CoverageFormat2
public:
/* Older compilers need this to be public. */
struct Iter
struct iter_t
{
void init (const CoverageFormat2 &c_)
{
@ -988,7 +989,7 @@ struct CoverageFormat2
}
}
void fini () {}
bool more () { return i < c->rangeRecord.len; }
bool more () const { return i < c->rangeRecord.len; }
void next ()
{
if (j >= c->rangeRecord[i].end)
@ -996,23 +997,25 @@ struct CoverageFormat2
i++;
if (more ())
{
hb_codepoint_t old = j;
unsigned int old = coverage;
j = c->rangeRecord[i].start;
if (unlikely (j <= old))
coverage = c->rangeRecord[i].value;
if (unlikely (coverage != old + 1))
{
/* Broken table. Skip. Important to avoid DoS. */
/* Broken table. Skip. Important to avoid DoS.
* Also, our callers depend on coverage being
* consecutive and monotonically increasing,
* ie. iota(). */
i = c->rangeRecord.len;
return;
}
coverage = c->rangeRecord[i].value;
}
return;
}
coverage++;
j++;
}
hb_codepoint_t get_glyph () { return j; }
unsigned int get_coverage () { return coverage; }
hb_codepoint_t get_glyph () const { return j; }
private:
const struct CoverageFormat2 *c;
@ -1033,6 +1036,15 @@ struct CoverageFormat2
struct Coverage
{
/* Has interface. */
static constexpr unsigned SENTINEL = NOT_COVERED;
typedef unsigned int value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
unsigned int get_coverage (hb_codepoint_t glyph_id) const
{
switch (u.format) {
@ -1042,17 +1054,20 @@ struct Coverage
}
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs)
template <typename Iterator,
hb_enable_if (hb_is_sorted_iterator_of (Iterator, const GlyphID))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
/* TODO(iter) Port to non-random-access iterator interface. */
unsigned int count = glyphs.len ();
unsigned int num_ranges = 1;
for (unsigned int i = 1; i < glyphs.length; i++)
for (unsigned int i = 1; i < count; i++)
if (glyphs[i - 1] + 1 != glyphs[i])
num_ranges++;
u.format.set (glyphs.length * 2 < num_ranges * 3 ? 1 : 2);
u.format = count * 2 < num_ranges * 3 ? 1 : 2;
switch (u.format)
{
@ -1106,9 +1121,10 @@ struct Coverage
}
}
struct Iter
struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
{
Iter (const Coverage &c_)
static constexpr bool is_sorted_iterator = true;
iter_t (const Coverage &c_ = Null(Coverage))
{
memset (this, 0, sizeof (*this));
format = c_.u.format;
@ -1119,7 +1135,7 @@ struct Coverage
default: return;
}
}
bool more ()
bool __more__ () const
{
switch (format)
{
@ -1128,7 +1144,7 @@ struct Coverage
default:return false;
}
}
void next ()
void __next__ ()
{
switch (format)
{
@ -1137,7 +1153,10 @@ struct Coverage
default: break;
}
}
hb_codepoint_t get_glyph ()
typedef hb_codepoint_t __item_t__;
__item_t__ __item__ () const { return get_glyph (); }
hb_codepoint_t get_glyph () const
{
switch (format)
{
@ -1146,23 +1165,15 @@ struct Coverage
default:return 0;
}
}
unsigned int get_coverage ()
{
switch (format)
{
case 1: return u.format1.get_coverage ();
case 2: return u.format2.get_coverage ();
default:return -1;
}
}
private:
unsigned int format;
union {
CoverageFormat2::Iter format2; /* Put this one first since it's larger; helps shut up compiler. */
CoverageFormat1::Iter format1;
CoverageFormat2::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */
CoverageFormat1::iter_t format1;
} u;
};
iter_t iter () const { return iter_t (*this); }
protected:
union {
@ -1194,24 +1205,24 @@ struct ClassDefFormat1
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const HBUINT16> glyphs,
hb_array_t<const GlyphID> glyphs,
hb_array_t<const HBUINT16> klasses)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!glyphs.length))
if (unlikely (!glyphs))
{
startGlyph.set (0);
classValue.len.set (0);
startGlyph = 0;
classValue.len = 0;
return_trace (true);
}
hb_codepoint_t glyph_min = glyphs[0];
hb_codepoint_t glyph_max = glyphs[glyphs.length - 1];
startGlyph.set (glyph_min);
classValue.len.set (glyph_max - glyph_min + 1);
startGlyph = glyph_min;
classValue.len = glyph_max - glyph_min + 1;
if (unlikely (!c->extend (classValue))) return_trace (false);
for (unsigned int i = 0; i < glyphs.length; i++)
@ -1225,22 +1236,22 @@ struct ClassDefFormat1
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
hb_vector_t<GlyphID> glyphs;
hb_sorted_vector_t<GlyphID> glyphs;
hb_vector_t<HBUINT16> klasses;
hb_codepoint_t start = startGlyph;
hb_codepoint_t end = start + classValue.len;
for (hb_codepoint_t g = start; g < end; g++)
{
if (!glyphset.has (g)) continue;
unsigned int value = classValue[g - start];
if (!value) continue;
if (!glyphset.has (g)) continue;
glyphs.push()->set (glyph_map[g]);
klasses.push()->set (value);
glyphs.push(glyph_map[g]);
klasses.push(value);
}
c->serializer->propagate_error (glyphs, klasses);
ClassDef_serialize (c->serializer, glyphs, klasses);
return_trace (glyphs.length);
return_trace ((bool) glyphs);
}
bool sanitize (hb_sanitize_context_t *c) const
@ -1330,15 +1341,15 @@ struct ClassDefFormat2
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const HBUINT16> glyphs,
hb_array_t<const GlyphID> glyphs,
hb_array_t<const HBUINT16> klasses)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!glyphs.length))
if (unlikely (!glyphs))
{
rangeRecord.len.set (0);
rangeRecord.len = 0;
return_trace (true);
}
@ -1347,12 +1358,12 @@ struct ClassDefFormat2
if (glyphs[i - 1] + 1 != glyphs[i] ||
klasses[i - 1] != klasses[i])
num_ranges++;
rangeRecord.len.set (num_ranges);
rangeRecord.len = num_ranges;
if (unlikely (!c->extend (rangeRecord))) return_trace (false);
unsigned int range = 0;
rangeRecord[range].start = glyphs[0];
rangeRecord[range].value.set (klasses[0]);
rangeRecord[range].value = klasses[0];
for (unsigned int i = 1; i < glyphs.length; i++)
{
if (glyphs[i - 1] + 1 != glyphs[i] ||
@ -1385,13 +1396,13 @@ struct ClassDefFormat2
for (hb_codepoint_t g = start; g < end; g++)
{
if (!glyphset.has (g)) continue;
glyphs.push ()->set (glyph_map[g]);
klasses.push ()->set (value);
glyphs.push (glyph_map[g]);
klasses.push (value);
}
}
c->serializer->propagate_error (glyphs, klasses);
ClassDef_serialize (c->serializer, glyphs, klasses);
return_trace (glyphs.length);
return_trace ((bool) glyphs);
}
bool sanitize (hb_sanitize_context_t *c) const
@ -1469,6 +1480,15 @@ struct ClassDefFormat2
struct ClassDef
{
/* Has interface. */
static constexpr unsigned SENTINEL = 0;
typedef unsigned int value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
/* Projection. */
hb_codepoint_t operator () (hb_codepoint_t k) const { return get (k); }
unsigned int get (hb_codepoint_t k) const { return get_class (k); }
unsigned int get_class (hb_codepoint_t glyph_id) const
{
switch (u.format) {
@ -1486,7 +1506,7 @@ struct ClassDef
if (unlikely (!c->extend_min (*this))) return_trace (false);
unsigned int format = 2;
if (glyphs.length)
if (likely (glyphs))
{
hb_codepoint_t glyph_min = glyphs[0];
hb_codepoint_t glyph_max = glyphs[glyphs.length - 1];
@ -1500,7 +1520,7 @@ struct ClassDef
if (1 + (glyph_max - glyph_min + 1) < num_ranges * 3)
format = 1;
}
u.format.set (format);
u.format = format;
switch (u.format)
{

View File

@ -443,15 +443,15 @@ struct GDEF
if (unlikely (!out)) return_trace (false);
out->glyphClassDef.serialize_subset (c, this+glyphClassDef, out);
out->attachList.set (0);//TODO(subset) serialize_subset (c, this+attachList, out);
out->ligCaretList.set (0);//TODO(subset) serialize_subset (c, this+ligCaretList, out);
out->attachList = 0;//TODO(subset) serialize_subset (c, this+attachList, out);
out->ligCaretList = 0;//TODO(subset) serialize_subset (c, this+ligCaretList, out);
out->markAttachClassDef.serialize_subset (c, this+markAttachClassDef, out);
if (version.to_int () >= 0x00010002u)
out->markGlyphSetsDef.set (0);// TODO(subset) serialize_subset (c, this+markGlyphSetsDef, out);
out->markGlyphSetsDef = 0;// TODO(subset) serialize_subset (c, this+markGlyphSetsDef, out);
if (version.to_int () >= 0x00010003u)
out->varStore.set (0);// TODO(subset) serialize_subset (c, this+varStore, out);
out->varStore = 0;// TODO(subset) serialize_subset (c, this+varStore, out);
return_trace (true);
}

View File

@ -616,7 +616,7 @@ struct PairSet
friend struct PairPosFormat1;
bool intersects (const hb_set_t *glyphs,
const ValueFormat *valueFormats) const
const ValueFormat *valueFormats) const
{
unsigned int len1 = valueFormats[0].get_len ();
unsigned int len2 = valueFormats[1].get_len ();
@ -722,16 +722,14 @@ struct PairPosFormat1
{
bool intersects (const hb_set_t *glyphs) const
{
unsigned int count = pairSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (glyphs->has (iter.get_glyph ()) &&
(this+pairSet[iter.get_coverage ()]).intersects (glyphs, valueFormat))
return true;
}
return false;
return
+ hb_zip (this+coverage, pairSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map ([&] (const OffsetTo<PairSet> &_) -> bool
{ return (this+_).intersects (glyphs, valueFormat); })
| hb_any
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const

View File

@ -36,7 +36,7 @@ namespace OT {
static inline void SingleSubst_serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const GlyphID> substitutes);
struct SingleSubstFormat1
@ -46,35 +46,27 @@ struct SingleSubstFormat1
void closure (hb_closure_context_t *c) const
{
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
/* TODO Switch to range-based API to work around malicious fonts.
* https://github.com/harfbuzz/harfbuzz/issues/363 */
hb_codepoint_t glyph_id = iter.get_glyph ();
if (c->glyphs->has (glyph_id))
c->out->add ((glyph_id + deltaGlyphID) & 0xFFFFu);
}
+ hb_iter (this+coverage)
| hb_filter (*c->glyphs)
| hb_map ([&] (hb_codepoint_t g) -> hb_codepoint_t { return (g + deltaGlyphID) & 0xFFFFu; })
| hb_sink (c->output)
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+coverage).add_coverage (c->input))) return;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
/* TODO Switch to range-based API to work around malicious fonts.
* https://github.com/harfbuzz/harfbuzz/issues/363 */
hb_codepoint_t glyph_id = iter.get_glyph ();
c->output->add ((glyph_id + deltaGlyphID) & 0xFFFFu);
}
+ hb_iter (this+coverage)
| hb_map ([&] (hb_codepoint_t g) -> hb_codepoint_t { return (g + deltaGlyphID) & 0xFFFFu; })
| hb_sink (c->output)
;
}
const Coverage &get_coverage () const { return this+coverage; }
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
{ return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
bool apply (hb_ot_apply_context_t *c) const
{
@ -92,13 +84,13 @@ struct SingleSubstFormat1
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
int delta)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (*this))) return_trace (false);
if (unlikely (!coverage.serialize (c, this).serialize (c, glyphs))) return_trace (false);
deltaGlyphID.set (delta); /* TODO(serialize) overflow? */
deltaGlyphID = delta; /* TODO(serialize) overflow? */
return_trace (true);
}
@ -107,15 +99,18 @@ struct SingleSubstFormat1
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
hb_vector_t<GlyphID> from;
hb_sorted_vector_t<GlyphID> from;
hb_vector_t<GlyphID> to;
hb_codepoint_t delta = deltaGlyphID;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (!glyphset.has (iter.get_glyph ())) continue;
from.push ()->set (glyph_map[iter.get_glyph ()]);
to.push ()->set (glyph_map[(iter.get_glyph () + delta) & 0xFFFF]);
}
+ hb_iter (this+coverage)
| hb_filter (glyphset)
| hb_map ([&] (hb_codepoint_t g) -> hb_pair_t<hb_codepoint_t, hb_codepoint_t>
{ return hb_pair<hb_codepoint_t, hb_codepoint_t> (glyph_map[g],
glyph_map[(g + delta) & 0xFFFF]); })
| hb_unzip (from, to);
c->serializer->propagate_error (from, to);
SingleSubst_serialize (c->serializer, from, to);
return_trace (from.length);
@ -145,35 +140,26 @@ struct SingleSubstFormat2
void closure (hb_closure_context_t *c) const
{
unsigned int count = substitute.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (c->glyphs->has (iter.get_glyph ()))
c->out->add (substitute[iter.get_coverage ()]);
}
+ hb_zip (this+coverage, substitute)
| hb_filter (*c->glyphs, hb_first)
| hb_map (hb_second)
| hb_sink (c->output)
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+coverage).add_coverage (c->input))) return;
unsigned int count = substitute.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
c->output->add (substitute[iter.get_coverage ()]);
}
+ hb_zip (this+coverage, substitute)
| hb_map (hb_second)
| hb_sink (c->output)
;
}
const Coverage &get_coverage () const { return this+coverage; }
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
{ return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
bool apply (hb_ot_apply_context_t *c) const
{
@ -189,7 +175,7 @@ struct SingleSubstFormat2
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const GlyphID> substitutes)
{
TRACE_SERIALIZE (this);
@ -204,14 +190,16 @@ struct SingleSubstFormat2
TRACE_SUBSET (this);
const hb_set_t &glyphset = *c->plan->glyphset ();
const hb_map_t &glyph_map = *c->plan->glyph_map;
hb_vector_t<GlyphID> from;
hb_sorted_vector_t<GlyphID> from;
hb_vector_t<GlyphID> to;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (!glyphset.has (iter.get_glyph ())) continue;
from.push ()->set (glyph_map[iter.get_glyph ()]);
to.push ()->set (glyph_map[substitute[iter.get_coverage ()]]);
}
+ hb_zip (this+coverage, substitute)
| hb_filter (glyphset, hb_first)
| hb_map ([&] (hb_pair_t<hb_codepoint_t, const GlyphID &> p) -> hb_pair_t<hb_codepoint_t, hb_codepoint_t>
{ return hb_pair (glyph_map[p.first], glyph_map[p.second]); })
| hb_unzip (from, to);
c->serializer->propagate_error (from, to);
SingleSubst_serialize (c->serializer, from, to);
return_trace (from.length);
@ -238,7 +226,7 @@ struct SingleSubstFormat2
struct SingleSubst
{
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const GlyphID> substitutes)
{
TRACE_SERIALIZE (this);
@ -256,7 +244,7 @@ struct SingleSubst
break;
}
}
u.format.set (format);
u.format = format;
switch (u.format) {
case 1: return_trace (u.format1.serialize (c, glyphs, delta));
case 2: return_trace (u.format2.serialize (c, glyphs, substitutes));
@ -286,7 +274,7 @@ struct SingleSubst
static inline void
SingleSubst_serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const GlyphID> substitutes)
{ c->start_embed<SingleSubst> ()->serialize (c, glyphs, substitutes); }
@ -296,7 +284,7 @@ struct Sequence
{
unsigned int count = substitute.len;
for (unsigned int i = 0; i < count; i++)
c->out->add (substitute[i]);
c->output->add (substitute[i]);
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
@ -335,10 +323,10 @@ struct Sequence
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs)
hb_array_t<const GlyphID> subst)
{
TRACE_SERIALIZE (this);
return_trace (substitute.serialize (c, glyphs));
return_trace (substitute.serialize (c, subst));
}
bool sanitize (hb_sanitize_context_t *c) const
@ -361,31 +349,26 @@ struct MultipleSubstFormat1
void closure (hb_closure_context_t *c) const
{
unsigned int count = sequence.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (c->glyphs->has (iter.get_glyph ()))
(this+sequence[iter.get_coverage ()]).closure (c);
}
+ hb_zip (this+coverage, sequence)
| hb_filter (*c->glyphs, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<Sequence> &_) { (this+_).closure (c); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+coverage).add_coverage (c->input))) return;
unsigned int count = sequence.len;
for (unsigned int i = 0; i < count; i++)
(this+sequence[i]).collect_glyphs (c);
+ hb_zip (this+coverage, sequence)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<Sequence> &_) { (this+_).collect_glyphs (c); })
;
}
const Coverage &get_coverage () const { return this+coverage; }
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
{ return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
bool apply (hb_ot_apply_context_t *c) const
{
@ -398,7 +381,7 @@ struct MultipleSubstFormat1
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const unsigned int> substitute_len_list,
hb_array_t<const GlyphID> substitute_glyphs_list)
{
@ -444,14 +427,14 @@ struct MultipleSubstFormat1
struct MultipleSubst
{
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const unsigned int> substitute_len_list,
hb_array_t<const GlyphID> substitute_glyphs_list)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 1;
u.format.set (format);
u.format = format;
switch (u.format) {
case 1: return_trace (u.format1.serialize (c, glyphs, substitute_len_list, substitute_glyphs_list));
default:return_trace (false);
@ -482,7 +465,7 @@ struct AlternateSet
{
unsigned int count = alternates.len;
for (unsigned int i = 0; i < count; i++)
c->out->add (alternates[i]);
c->output->add (alternates[i]);
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
@ -514,10 +497,10 @@ struct AlternateSet
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs)
hb_array_t<const GlyphID> alts)
{
TRACE_SERIALIZE (this);
return_trace (alternates.serialize (c, glyphs));
return_trace (alternates.serialize (c, alts));
}
bool sanitize (hb_sanitize_context_t *c) const
@ -541,35 +524,25 @@ struct AlternateSubstFormat1
void closure (hb_closure_context_t *c) const
{
unsigned int count = alternateSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (c->glyphs->has (iter.get_glyph ()))
(this+alternateSet[iter.get_coverage ()]).closure (c);
}
+ hb_zip (this+coverage, alternateSet)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<AlternateSet> &_) { (this+_).closure (c); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+coverage).add_coverage (c->input))) return;
unsigned int count = alternateSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
(this+alternateSet[iter.get_coverage ()]).collect_glyphs (c);
}
+ hb_zip (this+coverage, alternateSet)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<AlternateSet> &_) { (this+_).collect_glyphs (c); })
;
}
const Coverage &get_coverage () const { return this+coverage; }
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
{ return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
bool apply (hb_ot_apply_context_t *c) const
{
@ -582,7 +555,7 @@ struct AlternateSubstFormat1
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const unsigned int> alternate_len_list,
hb_array_t<const GlyphID> alternate_glyphs_list)
{
@ -628,14 +601,14 @@ struct AlternateSubstFormat1
struct AlternateSubst
{
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const unsigned int> alternate_len_list,
hb_array_t<const GlyphID> alternate_glyphs_list)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 1;
u.format.set (format);
u.format = format;
switch (u.format) {
case 1: return_trace (u.format1.serialize (c, glyphs, alternate_len_list, alternate_glyphs_list));
default:return_trace (false);
@ -674,11 +647,8 @@ struct Ligature
void closure (hb_closure_context_t *c) const
{
unsigned int count = component.lenP1;
for (unsigned int i = 1; i < count; i++)
if (!c->glyphs->has (component[i]))
return;
c->out->add (ligGlyph);
if (!intersects (c->glyphs)) return;
c->output->add (ligGlyph);
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
@ -689,15 +659,14 @@ struct Ligature
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
if (c->len != component.lenP1)
return_trace (false);
return false;
for (unsigned int i = 1; i < c->len; i++)
if (likely (c->glyphs[i] != component[i]))
return_trace (false);
return false;
return_trace (true);
return true;
}
bool apply (hb_ot_apply_context_t *c) const
@ -771,38 +740,34 @@ struct LigatureSet
{
bool intersects (const hb_set_t *glyphs) const
{
unsigned int num_ligs = ligature.len;
for (unsigned int i = 0; i < num_ligs; i++)
if ((this+ligature[i]).intersects (glyphs))
return true;
return false;
return
+ hb_iter (ligature)
| hb_map ([&] (const OffsetTo<Ligature> &_) -> bool { return (this+_).intersects (glyphs); })
| hb_any
;
}
void closure (hb_closure_context_t *c) const
{
unsigned int num_ligs = ligature.len;
for (unsigned int i = 0; i < num_ligs; i++)
(this+ligature[i]).closure (c);
+ hb_iter (ligature)
| hb_apply ([&] (const OffsetTo<Ligature> &_) { (this+_).closure (c); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
unsigned int num_ligs = ligature.len;
for (unsigned int i = 0; i < num_ligs; i++)
(this+ligature[i]).collect_glyphs (c);
+ hb_iter (ligature)
| hb_apply ([&] (const OffsetTo<Ligature> &_) { (this+_).collect_glyphs (c); })
;
}
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
unsigned int num_ligs = ligature.len;
for (unsigned int i = 0; i < num_ligs; i++)
{
const Ligature &lig = this+ligature[i];
if (lig.would_apply (c))
return_trace (true);
}
return_trace (false);
return
+ hb_iter (ligature)
| hb_map ([&] (const OffsetTo<Ligature> &_) -> bool { return (this+_).would_apply (c); })
| hb_any
;
}
bool apply (hb_ot_apply_context_t *c) const
@ -857,52 +822,44 @@ struct LigatureSubstFormat1
{
bool intersects (const hb_set_t *glyphs) const
{
unsigned int count = ligatureSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (glyphs->has (iter.get_glyph ()) &&
(this+ligatureSet[iter.get_coverage ()]).intersects (glyphs))
return true;
}
return false;
return
+ hb_zip (this+coverage, ligatureSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map ([&] (const OffsetTo<LigatureSet> &_) -> bool
{ return (this+_).intersects (glyphs); })
| hb_any
;
}
void closure (hb_closure_context_t *c) const
{
unsigned int count = ligatureSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (c->glyphs->has (iter.get_glyph ()))
(this+ligatureSet[iter.get_coverage ()]).closure (c);
}
+ hb_zip (this+coverage, ligatureSet)
| hb_filter (*c->glyphs, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<LigatureSet> &_) { (this+_).closure (c); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
{
if (unlikely (!(this+coverage).add_coverage (c->input))) return;
unsigned int count = ligatureSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
(this+ligatureSet[iter.get_coverage ()]).collect_glyphs (c);
}
+ hb_zip (this+coverage, ligatureSet)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<LigatureSet> &_) { (this+_).collect_glyphs (c); })
;
}
const Coverage &get_coverage () const { return this+coverage; }
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
unsigned int index = (this+coverage).get_coverage (c->glyphs[0]);
if (likely (index == NOT_COVERED)) return_trace (false);
if (likely (index == NOT_COVERED)) return false;
const LigatureSet &lig_set = this+ligatureSet[index];
return_trace (lig_set.would_apply (c));
return lig_set.would_apply (c);
}
bool apply (hb_ot_apply_context_t *c) const
@ -917,7 +874,7 @@ struct LigatureSubstFormat1
}
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> first_glyphs,
hb_sorted_array_t<const GlyphID> first_glyphs,
hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
hb_array_t<const GlyphID> ligatures_list,
hb_array_t<const unsigned int> component_count_list,
@ -968,7 +925,7 @@ struct LigatureSubstFormat1
struct LigatureSubst
{
bool serialize (hb_serialize_context_t *c,
hb_array_t<const GlyphID> first_glyphs,
hb_sorted_array_t<const GlyphID> first_glyphs,
hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
hb_array_t<const GlyphID> ligatures_list,
hb_array_t<const unsigned int> component_count_list,
@ -977,7 +934,7 @@ struct LigatureSubst
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (u.format))) return_trace (false);
unsigned int format = 1;
u.format.set (format);
u.format = format;
switch (u.format) {
case 1: return_trace (u.format1.serialize (c,
first_glyphs,
@ -1046,29 +1003,16 @@ struct ReverseChainSingleSubstFormat1
void closure (hb_closure_context_t *c) const
{
if (!intersects (c->glyphs)) return;
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
unsigned int count;
count = backtrack.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+backtrack[i]).intersects (c->glyphs))
return;
count = lookahead.len;
for (unsigned int i = 0; i < count; i++)
if (!(this+lookahead[i]).intersects (c->glyphs))
return;
const ArrayOf<GlyphID> &substitute = StructAfter<ArrayOf<GlyphID> > (lookahead);
count = substitute.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (c->glyphs->has (iter.get_glyph ()))
c->out->add (substitute[iter.get_coverage ()]);
}
+ hb_zip (this+coverage, substitute)
| hb_filter (*c->glyphs, hb_first)
| hb_map (hb_second)
| hb_sink (c->output)
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
@ -1094,10 +1038,7 @@ struct ReverseChainSingleSubstFormat1
const Coverage &get_coverage () const { return this+coverage; }
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
return_trace (c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED);
}
{ return c->len == 1 && (this+coverage).get_coverage (c->glyphs[0]) != NOT_COVERED; }
bool apply (hb_ot_apply_context_t *c) const
{
@ -1306,10 +1247,9 @@ struct SubstLookup : Lookup
bool would_apply (hb_would_apply_context_t *c,
const hb_ot_layout_lookup_accelerator_t *accel) const
{
TRACE_WOULD_APPLY (this);
if (unlikely (!c->len)) return_trace (false);
if (!accel->may_have (c->glyphs[0])) return_trace (false);
return_trace (dispatch (c));
if (unlikely (!c->len)) return false;
if (!accel->may_have (c->glyphs[0])) return false;
return dispatch (c);
}
static bool apply_recurse_func (hb_ot_apply_context_t *c, unsigned int lookup_index);
@ -1320,7 +1260,7 @@ struct SubstLookup : Lookup
bool serialize_single (hb_serialize_context_t *c,
uint32_t lookup_props,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const GlyphID> substitutes)
{
TRACE_SERIALIZE (this);
@ -1330,7 +1270,7 @@ struct SubstLookup : Lookup
bool serialize_multiple (hb_serialize_context_t *c,
uint32_t lookup_props,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const unsigned int> substitute_len_list,
hb_array_t<const GlyphID> substitute_glyphs_list)
{
@ -1344,7 +1284,7 @@ struct SubstLookup : Lookup
bool serialize_alternate (hb_serialize_context_t *c,
uint32_t lookup_props,
hb_array_t<const GlyphID> glyphs,
hb_sorted_array_t<const GlyphID> glyphs,
hb_array_t<const unsigned int> alternate_len_list,
hb_array_t<const GlyphID> alternate_glyphs_list)
{
@ -1358,7 +1298,7 @@ struct SubstLookup : Lookup
bool serialize_ligature (hb_serialize_context_t *c,
uint32_t lookup_props,
hb_array_t<const GlyphID> first_glyphs,
hb_sorted_array_t<const GlyphID> first_glyphs,
hb_array_t<const unsigned int> ligature_per_first_glyph_count_list,
hb_array_t<const GlyphID> ligatures_list,
hb_array_t<const unsigned int> component_count_list,
@ -1380,7 +1320,7 @@ struct SubstLookup : Lookup
static hb_closure_context_t::return_t dispatch_closure_recurse_func (hb_closure_context_t *c, unsigned int lookup_index)
{
if (!c->should_visit_lookup (lookup_index))
return HB_VOID;
return hb_void_t ();
hb_closure_context_t::return_t ret = dispatch_recurse_func (c, lookup_index);

View File

@ -64,8 +64,8 @@ struct hb_closure_context_t :
const char *get_name () { return "CLOSURE"; }
typedef return_t (*recurse_func_t) (hb_closure_context_t *c, unsigned int lookup_index);
template <typename T>
return_t dispatch (const T &obj) { obj.closure (this); return HB_VOID; }
static return_t default_return_value () { return HB_VOID; }
return_t dispatch (const T &obj) { obj.closure (this); return hb_void_t (); }
static return_t default_return_value () { return hb_void_t (); }
void recurse (unsigned int lookup_index)
{
if (unlikely (nesting_level_left == 0 || !recurse_func))
@ -92,7 +92,7 @@ struct hb_closure_context_t :
hb_face_t *face;
hb_set_t *glyphs;
hb_set_t out[1];
hb_set_t output[1];
recurse_func_t recurse_func;
unsigned int nesting_level_left;
unsigned int debug_depth;
@ -114,8 +114,8 @@ struct hb_closure_context_t :
void flush ()
{
hb_set_union (glyphs, out);
hb_set_clear (out);
hb_set_union (glyphs, output);
hb_set_clear (output);
}
private:
@ -124,7 +124,7 @@ struct hb_closure_context_t :
struct hb_would_apply_context_t :
hb_dispatch_context_t<hb_would_apply_context_t, bool, HB_DEBUG_WOULD_APPLY>
hb_dispatch_context_t<hb_would_apply_context_t, bool, 0>
{
const char *get_name () { return "WOULD_APPLY"; }
template <typename T>
@ -156,8 +156,8 @@ struct hb_collect_glyphs_context_t :
const char *get_name () { return "COLLECT_GLYPHS"; }
typedef return_t (*recurse_func_t) (hb_collect_glyphs_context_t *c, unsigned int lookup_index);
template <typename T>
return_t dispatch (const T &obj) { obj.collect_glyphs (this); return HB_VOID; }
static return_t default_return_value () { return HB_VOID; }
return_t dispatch (const T &obj) { obj.collect_glyphs (this); return hb_void_t (); }
static return_t default_return_value () { return hb_void_t (); }
void recurse (unsigned int lookup_index)
{
if (unlikely (nesting_level_left == 0 || !recurse_func))
@ -652,9 +652,9 @@ struct hb_get_subtables_context_t :
{
hb_applicable_t *entry = array.push();
entry->init (obj, apply_to<T>);
return HB_VOID;
return hb_void_t ();
}
static return_t default_return_value () { return HB_VOID; }
static return_t default_return_value () { return hb_void_t (); }
hb_get_subtables_context_t (array_t &array_) :
array (array_),
@ -706,10 +706,11 @@ static inline bool intersects_array (const hb_set_t *glyphs,
intersects_func_t intersects_func,
const void *intersects_data)
{
for (unsigned int i = 0; i < count; i++)
if (likely (!intersects_func (glyphs, values[i], intersects_data)))
return false;
return true;
return
+ hb_iter (values, count)
| hb_map ([&] (const HBUINT16 &_) -> bool { return intersects_func (glyphs, _, intersects_data); })
| hb_any
;
}
@ -734,8 +735,10 @@ static inline void collect_array (hb_collect_glyphs_context_t *c HB_UNUSED,
collect_glyphs_func_t collect_func,
const void *collect_data)
{
for (unsigned int i = 0; i < count; i++)
collect_func (glyphs, values[i], collect_data);
return
+ hb_iter (values, count)
| hb_apply ([&] (const HBUINT16 &_) { collect_func (glyphs, _, collect_data); })
;
}
@ -1318,10 +1321,12 @@ struct Rule
bool would_apply (hb_would_apply_context_t *c,
ContextApplyLookupContext &lookup_context) const
{
TRACE_WOULD_APPLY (this);
const UnsizedArrayOf<LookupRecord> &lookupRecord = StructAfter<UnsizedArrayOf<LookupRecord> >
(inputZ.as_array (inputCount ? inputCount - 1 : 0));
return_trace (context_would_apply_lookup (c, inputCount, inputZ.arrayZ, lookupCount, lookupRecord.arrayZ, lookup_context));
return context_would_apply_lookup (c,
inputCount, inputZ.arrayZ,
lookupCount, lookupRecord.arrayZ,
lookup_context);
}
bool apply (hb_ot_apply_context_t *c,
@ -1364,53 +1369,51 @@ struct RuleSet
bool intersects (const hb_set_t *glyphs,
ContextClosureLookupContext &lookup_context) const
{
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if ((this+rule[i]).intersects (glyphs, lookup_context))
return true;
return false;
return
+ hb_iter (rule)
| hb_map ([&] (const OffsetTo<Rule> &_) -> bool { return (this+_).intersects (glyphs, lookup_context); })
| hb_any
;
}
void closure (hb_closure_context_t *c,
ContextClosureLookupContext &lookup_context) const
{
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
(this+rule[i]).closure (c, lookup_context);
return
+ hb_iter (rule)
| hb_apply ([&] (const OffsetTo<Rule> &_) { (this+_).closure (c, lookup_context); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c,
ContextCollectGlyphsLookupContext &lookup_context) const
{
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
(this+rule[i]).collect_glyphs (c, lookup_context);
return
+ hb_iter (rule)
| hb_apply ([&] (const OffsetTo<Rule> &_) { (this+_).collect_glyphs (c, lookup_context); })
;
}
bool would_apply (hb_would_apply_context_t *c,
ContextApplyLookupContext &lookup_context) const
{
TRACE_WOULD_APPLY (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
{
if ((this+rule[i]).would_apply (c, lookup_context))
return_trace (true);
}
return_trace (false);
return
+ hb_iter (rule)
| hb_map ([&] (const OffsetTo<Rule> &_) -> bool { return (this+_).would_apply (c, lookup_context); })
| hb_any
;
}
bool apply (hb_ot_apply_context_t *c,
ContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
{
if ((this+rule[i]).apply (c, lookup_context))
return_trace (true);
}
return_trace (false);
return_trace (
+ hb_iter (rule)
| hb_map ([&] (const OffsetTo<Rule> &_) -> bool { return (this+_).apply (c, lookup_context); })
| hb_any
)
;
}
bool sanitize (hb_sanitize_context_t *c) const
@ -1437,16 +1440,13 @@ struct ContextFormat1
nullptr
};
unsigned int count = ruleSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (glyphs->has (iter.get_glyph ()) &&
(this+ruleSet[iter.get_coverage ()]).intersects (glyphs, lookup_context))
return true;
}
return false;
return
+ hb_zip (this+coverage, ruleSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map ([&] (const OffsetTo<RuleSet> &_) -> bool { return (this+_).intersects (glyphs, lookup_context); })
| hb_any
;
}
void closure (hb_closure_context_t *c) const
@ -1456,14 +1456,11 @@ struct ContextFormat1
nullptr
};
unsigned int count = ruleSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (c->glyphs->has (iter.get_glyph ()))
(this+ruleSet[iter.get_coverage ()]).closure (c, lookup_context);
}
+ hb_zip (this+coverage, ruleSet)
| hb_filter (*c->glyphs, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<RuleSet> &_) { (this+_).closure (c, lookup_context); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
@ -1475,21 +1472,19 @@ struct ContextFormat1
nullptr
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
(this+ruleSet[i]).collect_glyphs (c, lookup_context);
+ hb_iter (ruleSet)
| hb_apply ([&] (const OffsetTo<RuleSet> &_) { (this+_).collect_glyphs (c, lookup_context); })
;
}
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
const RuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
struct ContextApplyLookupContext lookup_context = {
{match_glyph},
nullptr
};
return_trace (rule_set.would_apply (c, lookup_context));
return rule_set.would_apply (c, lookup_context);
}
const Coverage &get_coverage () const { return this+coverage; }
@ -1549,13 +1544,13 @@ struct ContextFormat2
&class_def
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (class_def.intersects_class (glyphs, i) &&
(this+ruleSet[i]).intersects (glyphs, lookup_context))
return true;
return false;
return
+ hb_enumerate (ruleSet)
| hb_filter ([&] (const hb_pair_t<unsigned, const OffsetTo<RuleSet> &> p) -> bool
{ return class_def.intersects_class (glyphs, p.first) &&
(this+p.second).intersects (glyphs, lookup_context); })
| hb_any
;
}
void closure (hb_closure_context_t *c) const
@ -1570,12 +1565,14 @@ struct ContextFormat2
&class_def
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (class_def.intersects_class (c->glyphs, i)) {
const RuleSet &rule_set = this+ruleSet[i];
rule_set.closure (c, lookup_context);
}
return
+ hb_enumerate (ruleSet)
| hb_filter ([&] (unsigned _) -> bool
{ return class_def.intersects_class (c->glyphs, _); },
hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<RuleSet> &_) { (this+_).closure (c, lookup_context); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
@ -1588,15 +1585,13 @@ struct ContextFormat2
&class_def
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
(this+ruleSet[i]).collect_glyphs (c, lookup_context);
+ hb_iter (ruleSet)
| hb_apply ([&] (const OffsetTo<RuleSet> &_) { (this+_).collect_glyphs (c, lookup_context); })
;
}
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
const ClassDef &class_def = this+classDef;
unsigned int index = class_def.get_class (c->glyphs[0]);
const RuleSet &rule_set = this+ruleSet[index];
@ -1604,7 +1599,7 @@ struct ContextFormat2
{match_class},
&class_def
};
return_trace (rule_set.would_apply (c, lookup_context));
return rule_set.would_apply (c, lookup_context);
}
const Coverage &get_coverage () const { return this+coverage; }
@ -1704,14 +1699,15 @@ struct ContextFormat3
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
const LookupRecord *lookupRecord = &StructAfter<LookupRecord> (coverageZ.as_array (glyphCount));
struct ContextApplyLookupContext lookup_context = {
{match_coverage},
this
};
return_trace (context_would_apply_lookup (c, glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1), lookupCount, lookupRecord, lookup_context));
return context_would_apply_lookup (c,
glyphCount, (const HBUINT16 *) (coverageZ.arrayZ + 1),
lookupCount, lookupRecord,
lookup_context);
}
const Coverage &get_coverage () const { return this+coverageZ[0]; }
@ -1967,15 +1963,14 @@ struct ChainRule
bool would_apply (hb_would_apply_context_t *c,
ChainContextApplyLookupContext &lookup_context) const
{
TRACE_WOULD_APPLY (this);
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16> > (backtrack);
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
return_trace (chain_context_would_apply_lookup (c,
backtrack.len, backtrack.arrayZ,
input.lenP1, input.arrayZ,
lookahead.len, lookahead.arrayZ, lookup.len,
lookup.arrayZ, lookup_context));
return chain_context_would_apply_lookup (c,
backtrack.len, backtrack.arrayZ,
input.lenP1, input.arrayZ,
lookahead.len, lookahead.arrayZ, lookup.len,
lookup.arrayZ, lookup_context);
}
bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
@ -2025,46 +2020,46 @@ struct ChainRuleSet
{
bool intersects (const hb_set_t *glyphs, ChainContextClosureLookupContext &lookup_context) const
{
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if ((this+rule[i]).intersects (glyphs, lookup_context))
return true;
return false;
return
+ hb_iter (rule)
| hb_map ([&] (const OffsetTo<ChainRule> &_) -> bool { return (this+_).intersects (glyphs, lookup_context); })
| hb_any
;
}
void closure (hb_closure_context_t *c, ChainContextClosureLookupContext &lookup_context) const
{
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
(this+rule[i]).closure (c, lookup_context);
return
+ hb_iter (rule)
| hb_apply ([&] (const OffsetTo<ChainRule> &_) { (this+_).closure (c, lookup_context); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c, ChainContextCollectGlyphsLookupContext &lookup_context) const
{
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
(this+rule[i]).collect_glyphs (c, lookup_context);
return
+ hb_iter (rule)
| hb_apply ([&] (const OffsetTo<ChainRule> &_) { (this+_).collect_glyphs (c, lookup_context); })
;
}
bool would_apply (hb_would_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
{
TRACE_WOULD_APPLY (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if ((this+rule[i]).would_apply (c, lookup_context))
return_trace (true);
return_trace (false);
return
+ hb_iter (rule)
| hb_map ([&] (const OffsetTo<ChainRule> &_) -> bool { return (this+_).would_apply (c, lookup_context); })
| hb_any
;
}
bool apply (hb_ot_apply_context_t *c, ChainContextApplyLookupContext &lookup_context) const
{
TRACE_APPLY (this);
unsigned int num_rules = rule.len;
for (unsigned int i = 0; i < num_rules; i++)
if ((this+rule[i]).apply (c, lookup_context))
return_trace (true);
return_trace (false);
return_trace (
+ hb_iter (rule)
| hb_map ([&] (const OffsetTo<ChainRule> &_) -> bool { return (this+_).apply (c, lookup_context); })
| hb_any
)
;
}
bool sanitize (hb_sanitize_context_t *c) const
@ -2090,16 +2085,13 @@ struct ChainContextFormat1
{nullptr, nullptr, nullptr}
};
unsigned int count = ruleSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (glyphs->has (iter.get_glyph ()) &&
(this+ruleSet[iter.get_coverage ()]).intersects (glyphs, lookup_context))
return true;
}
return false;
return
+ hb_zip (this+coverage, ruleSet)
| hb_filter (*glyphs, hb_first)
| hb_map (hb_second)
| hb_map ([&] (const OffsetTo<ChainRuleSet> &_) -> bool { return (this+_).intersects (glyphs, lookup_context); })
| hb_any
;
}
void closure (hb_closure_context_t *c) const
@ -2109,14 +2101,11 @@ struct ChainContextFormat1
{nullptr, nullptr, nullptr}
};
unsigned int count = ruleSet.len;
for (Coverage::Iter iter (this+coverage); iter.more (); iter.next ())
{
if (unlikely (iter.get_coverage () >= count))
break; /* Work around malicious fonts. https://github.com/harfbuzz/harfbuzz/issues/363 */
if (c->glyphs->has (iter.get_glyph ()))
(this+ruleSet[iter.get_coverage ()]).closure (c, lookup_context);
}
+ hb_zip (this+coverage, ruleSet)
| hb_filter (*c->glyphs, hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<ChainRuleSet> &_) { (this+_).closure (c, lookup_context); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
@ -2128,21 +2117,19 @@ struct ChainContextFormat1
{nullptr, nullptr, nullptr}
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
(this+ruleSet[i]).collect_glyphs (c, lookup_context);
+ hb_iter (ruleSet)
| hb_apply ([&] (const OffsetTo<ChainRuleSet> &_) { (this+_).collect_glyphs (c, lookup_context); })
;
}
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
const ChainRuleSet &rule_set = this+ruleSet[(this+coverage).get_coverage (c->glyphs[0])];
struct ChainContextApplyLookupContext lookup_context = {
{match_glyph},
{nullptr, nullptr, nullptr}
};
return_trace (rule_set.would_apply (c, lookup_context));
return rule_set.would_apply (c, lookup_context);
}
const Coverage &get_coverage () const { return this+coverage; }
@ -2204,13 +2191,13 @@ struct ChainContextFormat2
&lookahead_class_def}
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (input_class_def.intersects_class (glyphs, i) &&
(this+ruleSet[i]).intersects (glyphs, lookup_context))
return true;
return false;
return
+ hb_enumerate (ruleSet)
| hb_filter ([&] (const hb_pair_t<unsigned, const OffsetTo<ChainRuleSet> &> p) -> bool
{ return input_class_def.intersects_class (glyphs, p.first) &&
(this+p.second).intersects (glyphs, lookup_context); })
| hb_any
;
}
void closure (hb_closure_context_t *c) const
{
@ -2228,12 +2215,14 @@ struct ChainContextFormat2
&lookahead_class_def}
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
if (input_class_def.intersects_class (c->glyphs, i)) {
const ChainRuleSet &rule_set = this+ruleSet[i];
rule_set.closure (c, lookup_context);
}
return
+ hb_enumerate (ruleSet)
| hb_filter ([&] (unsigned _) -> bool
{ return input_class_def.intersects_class (c->glyphs, _); },
hb_first)
| hb_map (hb_second)
| hb_apply ([&] (const OffsetTo<ChainRuleSet> &_) { (this+_).closure (c, lookup_context); })
;
}
void collect_glyphs (hb_collect_glyphs_context_t *c) const
@ -2251,15 +2240,13 @@ struct ChainContextFormat2
&lookahead_class_def}
};
unsigned int count = ruleSet.len;
for (unsigned int i = 0; i < count; i++)
(this+ruleSet[i]).collect_glyphs (c, lookup_context);
+ hb_iter (ruleSet)
| hb_apply ([&] (const OffsetTo<ChainRuleSet> &_) { (this+_).collect_glyphs (c, lookup_context); })
;
}
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
const ClassDef &backtrack_class_def = this+backtrackClassDef;
const ClassDef &input_class_def = this+inputClassDef;
const ClassDef &lookahead_class_def = this+lookaheadClassDef;
@ -2272,7 +2259,7 @@ struct ChainContextFormat2
&input_class_def,
&lookahead_class_def}
};
return_trace (rule_set.would_apply (c, lookup_context));
return rule_set.would_apply (c, lookup_context);
}
const Coverage &get_coverage () const { return this+coverage; }
@ -2403,8 +2390,6 @@ struct ChainContextFormat3
bool would_apply (hb_would_apply_context_t *c) const
{
TRACE_WOULD_APPLY (this);
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage> > (backtrack);
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage> > (input);
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord> > (lookahead);
@ -2412,11 +2397,11 @@ struct ChainContextFormat3
{match_coverage},
{this, this, this}
};
return_trace (chain_context_would_apply_lookup (c,
backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
input.len, (const HBUINT16 *) input.arrayZ + 1,
lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
lookup.len, lookup.arrayZ, lookup_context));
return chain_context_would_apply_lookup (c,
backtrack.len, (const HBUINT16 *) backtrack.arrayZ,
input.len, (const HBUINT16 *) input.arrayZ + 1,
lookahead.len, (const HBUINT16 *) lookahead.arrayZ,
lookup.len, lookup.arrayZ, lookup_context);
}
const Coverage &get_coverage () const

View File

@ -167,7 +167,7 @@ struct hb_ot_map_t
hb_mask_t global_mask;
hb_vector_t<feature_map_t> features;
hb_sorted_vector_t<feature_map_t> features;
hb_vector_t<lookup_map_t> lookups[2]; /* GSUB/GPOS */
hb_vector_t<stage_map_t> stages[2]; /* GSUB/GPOS */
};

View File

@ -77,7 +77,7 @@ struct maxp
void set_num_glyphs (unsigned int count)
{
numGlyphs.set (count);
numGlyphs = count;
}
bool sanitize (hb_sanitize_context_t *c) const
@ -119,13 +119,13 @@ struct maxp
if (maxp_prime->version.major == 1)
{
maxpV1Tail &v1 = StructAfter<maxpV1Tail> (*maxp_prime);
v1.maxZones.set (1);
v1.maxTwilightPoints.set (0);
v1.maxStorage.set (0);
v1.maxFunctionDefs.set (0);
v1.maxInstructionDefs.set (0);
v1.maxStackElements.set (0);
v1.maxSizeOfInstructions.set (0);
v1.maxZones = 1;
v1.maxTwilightPoints = 0;
v1.maxStorage = 0;
v1.maxFunctionDefs = 0;
v1.maxInstructionDefs = 0;
v1.maxStackElements = 0;
v1.maxSizeOfInstructions = 0;
}
}

View File

@ -160,8 +160,8 @@ struct OS2
uint16_t min_cp, max_cp;
find_min_and_max_codepoint (plan->unicodes, &min_cp, &max_cp);
os2_prime->usFirstCharIndex.set (min_cp);
os2_prime->usLastCharIndex.set (max_cp);
os2_prime->usFirstCharIndex = min_cp;
os2_prime->usLastCharIndex = max_cp;
_update_unicode_ranges (plan->unicodes, os2_prime->ulUnicodeRange);
bool result = plan->add_table (HB_OT_TAG_OS2, os2_prime_blob);
@ -174,7 +174,7 @@ struct OS2
HBUINT32 ulUnicodeRange[4]) const
{
for (unsigned int i = 0; i < 4; i++)
ulUnicodeRange[i].set (0);
ulUnicodeRange[i] = 0;
hb_codepoint_t cp = HB_SET_VALUE_INVALID;
while (codepoints->next (&cp)) {
@ -184,14 +184,14 @@ struct OS2
unsigned int block = bit / 32;
unsigned int bit_in_block = bit % 32;
unsigned int mask = 1 << bit_in_block;
ulUnicodeRange[block].set (ulUnicodeRange[block] | mask);
ulUnicodeRange[block] = ulUnicodeRange[block] | mask;
}
if (cp >= 0x10000 && cp <= 0x110000)
{
/* the spec says that bit 57 ("Non Plane 0") implies that there's
at least one codepoint beyond the BMP; so I also include all
the non-BMP codepoints here */
ulUnicodeRange[1].set (ulUnicodeRange[1] | (1 << 25));
ulUnicodeRange[1] = ulUnicodeRange[1] | (1 << 25);
}
}
}

View File

@ -88,7 +88,7 @@ struct post
return false;
}
post_prime->version.major.set (3); // Version 3 does not have any glyph names.
post_prime->version.major = 3; // Version 3 does not have any glyph names.
bool result = plan->add_table (HB_OT_TAG_post, post_prime_blob);
hb_blob_destroy (post_prime_blob);

View File

@ -66,8 +66,8 @@ arabic_fallback_synthesize_lookup_single (const hb_ot_shape_plan_t *plan HB_UNUS
u_glyph > 0xFFFFu || s_glyph > 0xFFFFu)
continue;
glyphs[num_glyphs].set (u_glyph);
substitutes[num_glyphs].set (s_glyph);
glyphs[num_glyphs] = u_glyph;
substitutes[num_glyphs] = s_glyph;
num_glyphs++;
}
@ -77,7 +77,9 @@ arabic_fallback_synthesize_lookup_single (const hb_ot_shape_plan_t *plan HB_UNUS
/* Bubble-sort or something equally good!
* May not be good-enough for presidential candidate interviews, but good-enough for us... */
hb_stable_sort (&glyphs[0], num_glyphs, (int(*)(const OT::GlyphID*, const OT::GlyphID *)) OT::GlyphID::cmp, &substitutes[0]);
hb_stable_sort (&glyphs[0], num_glyphs,
(int(*)(const OT::HBUINT16*, const OT::HBUINT16 *)) OT::GlyphID::cmp,
&substitutes[0]);
/* Each glyph takes four bytes max, and there's some overhead. */
@ -86,7 +88,7 @@ arabic_fallback_synthesize_lookup_single (const hb_ot_shape_plan_t *plan HB_UNUS
OT::SubstLookup *lookup = c.start_serialize<OT::SubstLookup> ();
bool ret = lookup->serialize_single (&c,
OT::LookupFlag::IgnoreMarks,
hb_array (glyphs, num_glyphs),
hb_sorted_array (glyphs, num_glyphs),
hb_array (substitutes, num_glyphs));
c.end_serialize ();
/* TODO sanitize the results? */
@ -118,12 +120,14 @@ arabic_fallback_synthesize_lookup_ligature (const hb_ot_shape_plan_t *plan HB_UN
hb_codepoint_t first_glyph;
if (!hb_font_get_glyph (font, first_u, 0, &first_glyph))
continue;
first_glyphs[num_first_glyphs].set (first_glyph);
first_glyphs[num_first_glyphs] = first_glyph;
ligature_per_first_glyph_count_list[num_first_glyphs] = 0;
first_glyphs_indirection[num_first_glyphs] = first_glyph_idx;
num_first_glyphs++;
}
hb_stable_sort (&first_glyphs[0], num_first_glyphs, (int(*)(const OT::GlyphID*, const OT::GlyphID *)) OT::GlyphID::cmp, &first_glyphs_indirection[0]);
hb_stable_sort (&first_glyphs[0], num_first_glyphs,
(int(*)(const OT::HBUINT16*, const OT::HBUINT16 *)) OT::GlyphID::cmp,
&first_glyphs_indirection[0]);
/* Now that the first-glyphs are sorted, walk again, populate ligatures. */
for (unsigned int i = 0; i < num_first_glyphs; i++)
@ -142,9 +146,9 @@ arabic_fallback_synthesize_lookup_ligature (const hb_ot_shape_plan_t *plan HB_UN
ligature_per_first_glyph_count_list[i]++;
ligature_list[num_ligatures].set (ligature_glyph);
ligature_list[num_ligatures] = ligature_glyph;
component_count_list[num_ligatures] = 2;
component_list[num_ligatures].set (second_glyph);
component_list[num_ligatures] = second_glyph;
num_ligatures++;
}
}
@ -159,7 +163,7 @@ arabic_fallback_synthesize_lookup_ligature (const hb_ot_shape_plan_t *plan HB_UN
OT::SubstLookup *lookup = c.start_serialize<OT::SubstLookup> ();
bool ret = lookup->serialize_ligature (&c,
OT::LookupFlag::IgnoreMarks,
hb_array (first_glyphs, num_first_glyphs),
hb_sorted_array (first_glyphs, num_first_glyphs),
hb_array (ligature_per_first_glyph_count_list, num_first_glyphs),
hb_array (ligature_list, num_ligatures),
hb_array (component_count_list, num_ligatures),

File diff suppressed because it is too large Load Diff

View File

@ -52,6 +52,7 @@ DOTTEDCIRCLE = 12;
RS = 13;
Repha = 15;
Ra = 16;
CM = 17;
Symbol= 18;
CS = 19;
@ -67,10 +68,10 @@ matra_group = z{0,3}.M.N?.(H | forced_rakar)?;
syllable_tail = (z?.SM.SM?.ZWNJ?)? A{0,3}?;
halant_group = (z?.H.(ZWJ.N?)?);
final_halant_group = halant_group | H.ZWNJ;
medial_group = CM?;
halant_or_matra_group = (final_halant_group | matra_group{0,4});
complex_syllable_tail = (halant_group.cn){0,4} halant_or_matra_group syllable_tail;
complex_syllable_tail = (halant_group.cn){0,4} medial_group halant_or_matra_group syllable_tail;
consonant_syllable = (Repha|CS)? cn complex_syllable_tail;
vowel_syllable = reph? V.n? (ZWJ | complex_syllable_tail);

View File

@ -720,7 +720,7 @@ initial_reordering_consonant_syllable (const hb_ot_shape_plan_t *plan,
indic_position_t last_pos = POS_START;
for (unsigned int i = start; i < end; i++)
{
if ((FLAG_UNSAFE (info[i].indic_category()) & (JOINER_FLAGS | FLAG (OT_N) | FLAG (OT_RS) | FLAG (OT_H))))
if ((FLAG_UNSAFE (info[i].indic_category()) & (JOINER_FLAGS | FLAG (OT_N) | FLAG (OT_RS) | MEDIAL_FLAGS | FLAG (OT_H))))
{
info[i].indic_position() = last_pos;
if (unlikely (info[i].indic_category() == OT_H &&

View File

@ -62,17 +62,19 @@ enum indic_category_t {
OT_Coeng = 14, /* Khmer-style Virama. */
OT_Repha = 15, /* Atomically-encoded logical or visual repha. */
OT_Ra = 16,
OT_CM = 17, /* Consonant-Medial; Unused by Indic shaper. */
OT_CM = 17, /* Consonant-Medial. */
OT_Symbol = 18, /* Avagraha, etc that take marks (SM,A,VD). */
OT_CS = 19
};
#define MEDIAL_FLAGS (FLAG (OT_CM))
/* Note:
*
* We treat Vowels and placeholders as if they were consonants. This is safe because Vowels
* cannot happen in a consonant syllable. The plus side however is, we can call the
* consonant syllable logic from the vowel syllable function and get it all right! */
#define CONSONANT_FLAGS (FLAG (OT_C) | FLAG (OT_CS) | FLAG (OT_Ra) | FLAG (OT_V) | FLAG (OT_PLACEHOLDER) | FLAG (OT_DOTTEDCIRCLE))
#define CONSONANT_FLAGS (FLAG (OT_C) | FLAG (OT_CS) | FLAG (OT_Ra) | MEDIAL_FLAGS | FLAG (OT_V) | FLAG (OT_PLACEHOLDER) | FLAG (OT_DOTTEDCIRCLE))
#define JOINER_FLAGS (FLAG (OT_ZWJ) | FLAG (OT_ZWNJ))

View File

@ -35,29 +35,27 @@
#line 36 "hb-ot-shape-complex-khmer-machine.hh"
static const unsigned char _khmer_syllable_machine_trans_keys[] = {
5u, 26u, 5u, 21u, 5u, 26u, 5u, 21u, 1u, 16u, 5u, 21u, 5u, 26u, 5u, 21u,
5u, 26u, 5u, 21u, 1u, 16u, 5u, 21u, 5u, 26u, 5u, 21u, 1u, 16u, 5u, 21u,
5u, 26u, 5u, 21u, 5u, 26u, 5u, 21u, 5u, 26u, 1u, 16u, 1u, 29u, 5u, 29u,
5u, 29u, 5u, 29u, 22u, 22u, 5u, 22u, 5u, 29u, 5u, 29u, 5u, 29u, 5u, 26u,
5u, 29u, 5u, 29u, 22u, 22u, 5u, 22u, 5u, 29u, 5u, 29u, 1u, 16u, 5u, 29u,
5u, 29u, 0
5u, 26u, 5u, 21u, 5u, 21u, 5u, 26u, 5u, 21u, 1u, 16u, 5u, 21u, 5u, 26u,
5u, 21u, 5u, 26u, 5u, 21u, 5u, 26u, 1u, 29u, 5u, 29u, 5u, 29u, 5u, 29u,
22u, 22u, 5u, 22u, 5u, 29u, 5u, 29u, 5u, 29u, 1u, 16u, 5u, 26u, 5u, 29u,
5u, 29u, 22u, 22u, 5u, 22u, 5u, 29u, 5u, 29u, 1u, 16u, 5u, 29u, 5u, 29u,
0
};
static const char _khmer_syllable_machine_key_spans[] = {
22, 17, 22, 17, 16, 17, 22, 17,
22, 17, 16, 17, 22, 17, 16, 17,
22, 17, 22, 17, 22, 16, 29, 25,
25, 25, 1, 18, 25, 25, 25, 22,
25, 25, 1, 18, 25, 25, 16, 25,
25
22, 17, 17, 22, 17, 16, 17, 22,
17, 22, 17, 22, 29, 25, 25, 25,
1, 18, 25, 25, 25, 16, 22, 25,
25, 1, 18, 25, 25, 16, 25, 25
};
static const short _khmer_syllable_machine_index_offsets[] = {
0, 23, 41, 64, 82, 99, 117, 140,
158, 181, 199, 216, 234, 257, 275, 292,
310, 333, 351, 374, 392, 415, 432, 462,
488, 514, 540, 542, 561, 587, 613, 639,
662, 688, 714, 716, 735, 761, 787, 804,
830
158, 181, 199, 217, 240, 258, 275, 293,
316, 334, 357, 375, 398, 428, 454, 480,
506, 508, 527, 553, 579, 605, 622, 645,
671, 697, 699, 718, 744, 770, 787, 813
};
static const char _khmer_syllable_machine_indicies[] = {
@ -85,142 +83,136 @@ static const char _khmer_syllable_machine_indicies[] = {
0, 0, 0, 0, 0, 0, 12, 0,
0, 0, 0, 4, 0, 11, 11, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 12, 0, 13,
13, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 13, 0,
15, 15, 14, 14, 14, 14, 14, 14,
14, 14, 14, 14, 14, 14, 14, 14,
16, 14, 15, 15, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 16, 17, 17, 17, 17, 18,
17, 19, 19, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17,
17, 18, 17, 20, 20, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 20, 17, 21, 21, 17, 17,
17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 22, 17, 23, 23,
17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 24, 17,
17, 17, 17, 18, 17, 23, 23, 17,
17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 24, 17, 25,
25, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 26,
17, 17, 17, 17, 18, 17, 25, 25,
17, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 26, 17,
15, 15, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 17, 27,
16, 17, 17, 17, 17, 18, 17, 28,
28, 17, 17, 17, 17, 17, 17, 17,
17, 17, 17, 17, 17, 17, 28, 17,
13, 13, 29, 29, 30, 30, 29, 29,
29, 29, 2, 2, 29, 31, 29, 13,
29, 29, 29, 29, 16, 20, 29, 29,
29, 18, 24, 26, 22, 29, 33, 33,
32, 32, 32, 32, 32, 32, 32, 34,
32, 32, 32, 32, 32, 2, 3, 6,
32, 32, 32, 4, 10, 12, 8, 32,
35, 35, 32, 32, 32, 32, 32, 32,
32, 36, 32, 32, 32, 32, 32, 32,
3, 6, 32, 32, 32, 4, 10, 12,
8, 32, 5, 5, 32, 32, 32, 32,
32, 32, 32, 36, 32, 32, 32, 32,
32, 32, 4, 6, 32, 32, 32, 32,
32, 32, 8, 32, 6, 32, 7, 7,
32, 32, 32, 32, 32, 32, 32, 36,
32, 32, 32, 32, 32, 32, 8, 6,
32, 37, 37, 32, 32, 32, 32, 32,
32, 32, 36, 32, 32, 32, 32, 32,
32, 10, 6, 32, 32, 32, 4, 32,
32, 8, 32, 38, 38, 32, 32, 32,
32, 32, 32, 32, 36, 32, 32, 32,
32, 32, 32, 12, 6, 32, 32, 32,
4, 10, 32, 8, 32, 35, 35, 32,
32, 32, 32, 32, 32, 32, 34, 32,
32, 32, 32, 32, 32, 3, 6, 32,
32, 32, 4, 10, 12, 8, 32, 15,
15, 39, 39, 39, 39, 39, 39, 39,
39, 39, 39, 39, 39, 39, 39, 16,
39, 39, 39, 39, 18, 39, 41, 41,
40, 40, 40, 40, 40, 40, 40, 42,
40, 40, 40, 40, 40, 40, 16, 20,
40, 40, 40, 18, 24, 26, 22, 40,
19, 19, 40, 40, 40, 40, 40, 40,
40, 42, 40, 40, 40, 40, 40, 40,
18, 20, 40, 40, 40, 40, 40, 40,
22, 40, 20, 40, 21, 21, 40, 40,
40, 40, 40, 40, 40, 42, 40, 40,
40, 40, 40, 40, 22, 20, 40, 43,
43, 40, 40, 40, 40, 40, 40, 40,
42, 40, 40, 40, 40, 40, 40, 24,
20, 40, 40, 40, 18, 40, 40, 22,
40, 44, 44, 40, 40, 40, 40, 40,
40, 40, 42, 40, 40, 40, 40, 40,
40, 26, 20, 40, 40, 40, 18, 24,
40, 22, 40, 28, 28, 39, 39, 39,
0, 0, 0, 0, 0, 12, 0, 14,
14, 13, 13, 13, 13, 13, 13, 13,
13, 13, 13, 13, 13, 13, 13, 15,
13, 14, 14, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16,
16, 15, 16, 16, 16, 16, 17, 16,
18, 18, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16,
17, 16, 19, 19, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16,
16, 19, 16, 20, 20, 16, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 21, 16, 22, 22, 16,
16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 23, 16, 16,
16, 16, 17, 16, 22, 22, 16, 16,
16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 23, 16, 24, 24,
16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 25, 16,
16, 16, 16, 17, 16, 24, 24, 16,
16, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 25, 16, 14,
14, 16, 16, 16, 16, 16, 16, 16,
16, 16, 16, 16, 16, 16, 26, 15,
16, 16, 16, 16, 17, 16, 28, 28,
27, 27, 29, 29, 27, 27, 27, 27,
2, 2, 27, 30, 27, 28, 27, 27,
27, 27, 15, 19, 27, 27, 27, 17,
23, 25, 21, 27, 32, 32, 31, 31,
31, 31, 31, 31, 31, 33, 31, 31,
31, 31, 31, 2, 3, 6, 31, 31,
31, 4, 10, 12, 8, 31, 34, 34,
31, 31, 31, 31, 31, 31, 31, 35,
31, 31, 31, 31, 31, 31, 3, 6,
31, 31, 31, 4, 10, 12, 8, 31,
5, 5, 31, 31, 31, 31, 31, 31,
31, 35, 31, 31, 31, 31, 31, 31,
4, 6, 31, 31, 31, 31, 31, 31,
8, 31, 6, 31, 7, 7, 31, 31,
31, 31, 31, 31, 31, 35, 31, 31,
31, 31, 31, 31, 8, 6, 31, 36,
36, 31, 31, 31, 31, 31, 31, 31,
35, 31, 31, 31, 31, 31, 31, 10,
6, 31, 31, 31, 4, 31, 31, 8,
31, 37, 37, 31, 31, 31, 31, 31,
31, 31, 35, 31, 31, 31, 31, 31,
31, 12, 6, 31, 31, 31, 4, 10,
31, 8, 31, 34, 34, 31, 31, 31,
31, 31, 31, 31, 33, 31, 31, 31,
31, 31, 31, 3, 6, 31, 31, 31,
4, 10, 12, 8, 31, 28, 28, 31,
31, 31, 31, 31, 31, 31, 31, 31,
31, 31, 31, 31, 28, 31, 14, 14,
38, 38, 38, 38, 38, 38, 38, 38,
38, 38, 38, 38, 38, 38, 15, 38,
38, 38, 38, 17, 38, 40, 40, 39,
39, 39, 39, 39, 39, 39, 41, 39,
39, 39, 39, 39, 39, 15, 19, 39,
39, 39, 17, 23, 25, 21, 39, 18,
18, 39, 39, 39, 39, 39, 39, 39,
41, 39, 39, 39, 39, 39, 39, 17,
19, 39, 39, 39, 39, 39, 39, 21,
39, 19, 39, 20, 20, 39, 39, 39,
39, 39, 39, 39, 41, 39, 39, 39,
39, 39, 39, 21, 19, 39, 42, 42,
39, 39, 39, 39, 39, 39, 39, 41,
39, 39, 39, 39, 39, 39, 23, 19,
39, 39, 39, 17, 39, 39, 21, 39,
43, 43, 39, 39, 39, 39, 39, 39,
39, 41, 39, 39, 39, 39, 39, 39,
25, 19, 39, 39, 39, 17, 23, 39,
21, 39, 44, 44, 39, 39, 39, 39,
39, 39, 39, 39, 39, 39, 39, 39,
39, 39, 28, 39, 45, 45, 40, 40,
40, 40, 40, 40, 40, 46, 40, 40,
40, 40, 40, 27, 16, 20, 40, 40,
40, 18, 24, 26, 22, 40, 41, 41,
40, 40, 40, 40, 40, 40, 40, 46,
40, 40, 40, 40, 40, 40, 16, 20,
40, 40, 40, 18, 24, 26, 22, 40,
0
39, 44, 39, 45, 45, 39, 39, 39,
39, 39, 39, 39, 30, 39, 39, 39,
39, 39, 26, 15, 19, 39, 39, 39,
17, 23, 25, 21, 39, 40, 40, 39,
39, 39, 39, 39, 39, 39, 30, 39,
39, 39, 39, 39, 39, 15, 19, 39,
39, 39, 17, 23, 25, 21, 39, 0
};
static const char _khmer_syllable_machine_trans_targs[] = {
22, 1, 30, 24, 25, 3, 26, 5,
27, 7, 28, 9, 29, 23, 22, 11,
32, 22, 33, 13, 34, 15, 35, 17,
36, 19, 37, 40, 39, 22, 31, 38,
22, 0, 10, 2, 4, 6, 8, 22,
22, 12, 14, 16, 18, 20, 21
20, 1, 28, 22, 23, 3, 24, 5,
25, 7, 26, 9, 27, 20, 10, 31,
20, 32, 12, 33, 14, 34, 16, 35,
18, 36, 39, 20, 21, 30, 37, 20,
0, 29, 2, 4, 6, 8, 20, 20,
11, 13, 15, 17, 38, 19
};
static const char _khmer_syllable_machine_trans_actions[] = {
1, 0, 2, 2, 2, 0, 0, 0,
2, 0, 2, 0, 2, 2, 3, 0,
4, 5, 2, 0, 0, 0, 2, 0,
2, 0, 2, 4, 4, 8, 9, 0,
10, 0, 0, 0, 0, 0, 0, 11,
12, 0, 0, 0, 0, 0, 0
2, 0, 2, 0, 2, 3, 0, 4,
5, 2, 0, 0, 0, 2, 0, 2,
0, 2, 4, 8, 2, 9, 0, 10,
0, 0, 0, 0, 0, 0, 11, 12,
0, 0, 0, 0, 4, 0
};
static const char _khmer_syllable_machine_to_state_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 6, 0,
0, 0, 0, 0, 6, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0
0, 0, 0, 0, 0, 0, 0, 0
};
static const char _khmer_syllable_machine_from_state_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 7, 0,
0, 0, 0, 0, 7, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0
0, 0, 0, 0, 0, 0, 0, 0
};
static const unsigned char _khmer_syllable_machine_eof_trans[] = {
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 15, 18, 18, 18, 18,
18, 18, 18, 18, 18, 18, 0, 33,
33, 33, 33, 33, 33, 33, 33, 40,
41, 41, 41, 41, 41, 41, 40, 41,
41
1, 1, 14, 17, 17, 17, 17, 17,
17, 17, 17, 17, 0, 32, 32, 32,
32, 32, 32, 32, 32, 32, 39, 40,
40, 40, 40, 40, 40, 40, 40, 40
};
static const int khmer_syllable_machine_start = 22;
static const int khmer_syllable_machine_first_final = 22;
static const int khmer_syllable_machine_start = 20;
static const int khmer_syllable_machine_first_final = 20;
static const int khmer_syllable_machine_error = -1;
static const int khmer_syllable_machine_en_main = 22;
static const int khmer_syllable_machine_en_main = 20;
#line 36 "hb-ot-shape-complex-khmer-machine.rl"
@ -246,7 +238,7 @@ find_syllables (hb_buffer_t *buffer)
int cs;
hb_glyph_info_t *info = buffer->info;
#line 250 "hb-ot-shape-complex-khmer-machine.hh"
#line 242 "hb-ot-shape-complex-khmer-machine.hh"
{
cs = khmer_syllable_machine_start;
ts = 0;
@ -262,7 +254,7 @@ find_syllables (hb_buffer_t *buffer)
unsigned int syllable_serial = 1;
#line 266 "hb-ot-shape-complex-khmer-machine.hh"
#line 258 "hb-ot-shape-complex-khmer-machine.hh"
{
int _slen;
int _trans;
@ -276,7 +268,7 @@ _resume:
#line 1 "NONE"
{ts = p;}
break;
#line 280 "hb-ot-shape-complex-khmer-machine.hh"
#line 272 "hb-ot-shape-complex-khmer-machine.hh"
}
_keys = _khmer_syllable_machine_trans_keys + (cs<<1);
@ -346,7 +338,7 @@ _eof_trans:
#line 76 "hb-ot-shape-complex-khmer-machine.rl"
{act = 3;}
break;
#line 350 "hb-ot-shape-complex-khmer-machine.hh"
#line 342 "hb-ot-shape-complex-khmer-machine.hh"
}
_again:
@ -355,7 +347,7 @@ _again:
#line 1 "NONE"
{ts = 0;}
break;
#line 359 "hb-ot-shape-complex-khmer-machine.hh"
#line 351 "hb-ot-shape-complex-khmer-machine.hh"
}
if ( ++p != pe )

View File

@ -66,7 +66,7 @@ matra_group = VPre? xgroup VBlw? xgroup (joiner?.VAbv)? xgroup VPst?;
syllable_tail = xgroup matra_group xgroup (Coeng.c)? ygroup;
broken_cluster = (Coeng.cn)* syllable_tail;
broken_cluster = (Coeng.cn)* (Coeng | syllable_tail);
consonant_syllable = (cn|PLACEHOLDER|DOTTEDCIRCLE) broken_cluster;
other = any;

View File

@ -171,24 +171,6 @@ hb_ot_tag_to_script (hb_tag_t tag)
/* hb_language_t */
static int
lang_compare_first_component (const void *pa,
const void *pb)
{
const char *a = (const char *) pa;
const char *b = (const char *) pb;
unsigned int da, db;
const char *p;
p = strchr (a, '-');
da = p ? (unsigned int) (p - a) : strlen (a);
p = strchr (b, '-');
db = p ? (unsigned int) (p - b) : strlen (b);
return strncmp (a, b, MAX (da, db));
}
static bool
subtag_matches (const char *lang_str,
const char *limit,
@ -213,10 +195,28 @@ lang_matches (const char *lang_str, const char *spec)
(lang_str[len] == '\0' || lang_str[len] == '-');
}
typedef struct {
struct LangTag
{
char language[4];
hb_tag_t tags[HB_OT_MAX_TAGS_PER_LANGUAGE];
} LangTag;
int cmp (const char *a) const
{
const char *b = this->language;
unsigned int da, db;
const char *p;
p = strchr (a, '-');
da = p ? (unsigned int) (p - a) : strlen (a);
p = strchr (b, '-');
db = p ? (unsigned int) (p - b) : strlen (b);
return strncmp (a, b, MAX (da, db));
}
int cmp (const LangTag *that) const
{ return cmp (that->language); }
};
#include "hb-ot-tag-table.hh"
@ -263,9 +263,7 @@ hb_ot_tags_from_language (const char *lang_str,
ISALPHA (s[1]))
lang_str = s + 1;
}
lang_tag = (LangTag *) bsearch (lang_str, ot_languages,
ARRAY_LENGTH (ot_languages), sizeof (LangTag),
lang_compare_first_component);
lang_tag = hb_sorted_array (ot_languages).bsearch (lang_str);
if (lang_tag)
{
unsigned int i;
@ -507,7 +505,7 @@ test_langs_sorted ()
{
for (unsigned int i = 1; i < ARRAY_LENGTH (ot_languages); i++)
{
int c = lang_compare_first_component (ot_languages[i-1].language, ot_languages[i].language);
int c = ot_languages[i].cmp (&ot_languages[i - 1]);
if (c >= 0)
{
fprintf (stderr, "ot_languages not sorted at index %d: %s %d %s\n",

View File

@ -81,11 +81,11 @@ struct VORG
if (unlikely (!c.extend_min (*subset_table)))
return false;
subset_table->version.major.set (1);
subset_table->version.minor.set (0);
subset_table->version.major = 1;
subset_table->version.minor = 0;
subset_table->defaultVertOriginY.set (vorg_table->defaultVertOriginY);
subset_table->vertYOrigins.len.set (subset_metrics.length);
subset_table->defaultVertOriginY = vorg_table->defaultVertOriginY;
subset_table->vertYOrigins.len = subset_metrics.length;
bool success = true;
if (subset_metrics.length > 0)
@ -130,8 +130,8 @@ struct VORG
if (plan->new_gid_for_old_gid (old_glyph, &new_glyph))
{
VertOriginMetric *metrics = subset_metrics.push ();
metrics->glyph.set (new_glyph);
metrics->vertOriginY.set (vertYOrigins[i].vertOriginY);
metrics->glyph = new_glyph;
metrics->vertOriginY = vertYOrigins[i].vertOriginY;
}
}
}

View File

@ -69,7 +69,7 @@ struct hb_set_t
void add (hb_codepoint_t g) { elt (g) |= mask (g); }
void del (hb_codepoint_t g) { elt (g) &= ~mask (g); }
bool has (hb_codepoint_t g) const { return !!(elt (g) & mask (g)); }
bool get (hb_codepoint_t g) const { return elt (g) & mask (g); }
void add_range (hb_codepoint_t a, hb_codepoint_t b)
{
@ -186,7 +186,7 @@ struct hb_set_t
hb_object_header_t header;
bool successful; /* Allocations successful */
mutable unsigned int population;
hb_vector_t<page_map_t> page_map;
hb_sorted_vector_t<page_map_t> page_map;
hb_vector_t<page_t> pages;
void init_shallow ()
@ -357,15 +357,26 @@ struct hb_set_t
for (unsigned int i = a; i < b + 1; i++)
del (i);
}
bool has (hb_codepoint_t g) const
bool get (hb_codepoint_t g) const
{
const page_t *page = page_for (g);
if (!page)
return false;
return page->has (g);
return page->get (g);
}
bool intersects (hb_codepoint_t first,
hb_codepoint_t last) const
/* Has interface. */
static constexpr bool SENTINEL = false;
typedef bool value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
/* Sink interface. */
hb_set_t& operator << (hb_codepoint_t v) { add (v); return *this; }
bool intersects (hb_codepoint_t first, hb_codepoint_t last) const
{
hb_codepoint_t c = first - 1;
return next (&c) && c <= last;
@ -671,27 +682,26 @@ struct hb_set_t
/*
* Iterator implementation.
*/
struct const_iter_t : hb_sorted_iter_t<const_iter_t, const hb_codepoint_t>
struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
{
const_iter_t (const hb_set_t &s_) :
s (s_), v (INVALID), l (s.get_population () + 1) { __next__ (); }
static constexpr bool is_sorted_iterator = true;
iter_t (const hb_set_t &s_ = Null(hb_set_t)) :
s (&s_), v (INVALID), l (s->get_population () + 1) { __next__ (); }
typedef hb_codepoint_t __item_type__;
typedef hb_codepoint_t __item_t__;
hb_codepoint_t __item__ () const { return v; }
bool __more__ () const { return v != INVALID; }
void __next__ () { s.next (&v); if (l) l--; }
void __prev__ () { s.previous (&v); }
unsigned __len__ () { return l; }
void __next__ () { s->next (&v); if (l) l--; }
void __prev__ () { s->previous (&v); }
unsigned __len__ () const { return l; }
protected:
const hb_set_t &s;
const hb_set_t *s;
hb_codepoint_t v;
unsigned l;
};
const_iter_t const_iter () const { return const_iter_t (*this); }
operator const_iter_t () const { return const_iter (); }
typedef const_iter_t iter_t;
iter_t iter () const { return const_iter (); }
iter_t iter () const { return iter_t (*this); }
operator iter_t () const { return iter (); }
protected:

View File

@ -153,13 +153,13 @@ serialize_fdselect_3_4 (hb_serialize_context_t *c,
TRACE_SERIALIZE (this);
FDSELECT3_4 *p = c->allocate_size<FDSELECT3_4> (size);
if (unlikely (p == nullptr)) return_trace (false);
p->nRanges ().set (fdselect_ranges.length);
p->nRanges () = fdselect_ranges.length;
for (unsigned int i = 0; i < fdselect_ranges.length; i++)
{
p->ranges[i].first.set (fdselect_ranges[i].glyph);
p->ranges[i].fd.set (fdselect_ranges[i].code);
p->ranges[i].first = fdselect_ranges[i].glyph;
p->ranges[i].fd = fdselect_ranges[i].code;
}
p->sentinel().set (num_glyphs);
p->sentinel() = num_glyphs;
return_trace (true);
}
@ -179,7 +179,7 @@ hb_serialize_cff_fdselect (hb_serialize_context_t *c,
TRACE_SERIALIZE (this);
FDSelect *p = c->allocate_min<FDSelect> ();
if (unlikely (p == nullptr)) return_trace (false);
p->format.set (fdselect_format);
p->format = fdselect_format;
size -= FDSelect::min_size;
switch (fdselect_format)
@ -198,7 +198,7 @@ hb_serialize_cff_fdselect (hb_serialize_context_t *c,
{
fd = fdselect_ranges[range_index++].code;
}
p->fds[i].set (fd);
p->fds[i] = fd;
}
break;
}

View File

@ -209,7 +209,7 @@ struct cff_font_dict_op_serializer_t : op_serializer_t
/* serialize the opcode */
HBUINT8 *p = c->allocate_size<HBUINT8> (1);
if (unlikely (p == nullptr)) return_trace (false);
p->set (OpCode_Private);
*p = OpCode_Private;
return_trace (true);
}

View File

@ -147,7 +147,7 @@ struct cff1_top_dict_op_serializer_t : cff_top_dict_op_serializer_t<cff1_top_dic
return_trace (false);
HBUINT8 *p = c->allocate_size<HBUINT8> (1);
if (unlikely (p == nullptr)) return_trace (false);
p->set (OpCode_Private);
*p = OpCode_Private;
}
break;
@ -889,10 +889,10 @@ static inline bool _write_cff1 (const cff_subset_plan &plan,
return false;
/* header */
cff->version.major.set (0x01);
cff->version.minor.set (0x00);
cff->nameIndex.set (cff->min_size);
cff->offSize.set (4); /* unused? */
cff->version.major = 0x01;
cff->version.minor = 0x00;
cff->nameIndex = cff->min_size;
cff->offSize = 4; /* unused? */
/* name INDEX */
{

View File

@ -449,14 +449,14 @@ static inline bool _write_cff2 (const cff2_subset_plan &plan,
return false;
/* header */
cff2->version.major.set (0x02);
cff2->version.minor.set (0x00);
cff2->topDict.set (OT::cff2::static_size);
cff2->version.major = 0x02;
cff2->version.minor = 0x00;
cff2->topDict = OT::cff2::static_size;
/* top dict */
{
assert (cff2->topDict == (unsigned) (c.head - c.start));
cff2->topDictSize.set (plan.offsets.topDictInfo.size);
cff2->topDictSize = plan.offsets.topDictInfo.size;
TopDict &dict = cff2 + cff2->topDict;
cff2_top_dict_op_serializer_t topSzr;
if (unlikely (!dict.serialize (&c, acc.topDict, topSzr, plan.offsets)))

View File

@ -43,9 +43,9 @@ struct loca_data_t
if ((id + 1) * entry_size <= size)
{
if (is_short) {
((OT::HBUINT16*) data) [id].set (offset / 2);
((OT::HBUINT16*) data) [id] = offset / 2;
} else {
((OT::HBUINT32*) data) [id].set (offset);
((OT::HBUINT32*) data) [id] = offset;
}
return true;
}
@ -164,7 +164,7 @@ _update_components (const hb_subset_plan_t *plan,
&new_gid))
continue;
((OT::glyf::CompositeGlyphHeader *) iterator.current)->glyphIndex.set (new_gid);
((OT::glyf::CompositeGlyphHeader *) iterator.current)->glyphIndex = new_gid;
} while (iterator.move_to_next ());
}
}
@ -178,7 +178,7 @@ static bool _remove_composite_instruction_flag (char *glyf_prime, unsigned int l
do {
glyph = composite_it.current;
OT::HBUINT16 *flags = const_cast<OT::HBUINT16 *> (&glyph->flags);
flags->set ( (uint16_t) *flags & ~OT::glyf::CompositeGlyphHeader::WE_HAVE_INSTRUCTIONS);
*flags = (uint16_t) *flags & ~OT::glyf::CompositeGlyphHeader::WE_HAVE_INSTRUCTIONS;
} while (composite_it.move_to_next ());
return true;
}

View File

@ -152,7 +152,7 @@ hb_subset_input_get_desubroutinize (hb_subset_input_t *subset_input)
* hb_subset_input_set_retain_gids:
* @subset_input: a subset_input.
* @retain_gids: If true the subsetter will not renumber glyph ids.
* Since: REPLACEME
* Since: 2.4.0
**/
HB_EXTERN void
hb_subset_input_set_retain_gids (hb_subset_input_t *subset_input,
@ -164,7 +164,7 @@ hb_subset_input_set_retain_gids (hb_subset_input_t *subset_input,
/**
* hb_subset_input_get_retain_gids:
* Returns: value of retain_gids.
* Since: REPLACEME
* Since: 2.4.0
**/
HB_EXTERN hb_bool_t
hb_subset_input_get_retain_gids (hb_subset_input_t *subset_input)

View File

@ -396,18 +396,18 @@ _hb_rename_font (hb_blob_t *blob, wchar_t *new_name)
memcpy(new_sfnt_data, orig_sfnt_data, length);
OT::name &name = StructAtOffset<OT::name> (new_sfnt_data, name_table_offset);
name.format.set (0);
name.count.set (ARRAY_LENGTH (name_IDs));
name.stringOffset.set (name.get_size ());
name.format = 0;
name.count = ARRAY_LENGTH (name_IDs);
name.stringOffset = name.get_size ();
for (unsigned int i = 0; i < ARRAY_LENGTH (name_IDs); i++)
{
OT::NameRecord &record = name.nameRecordZ[i];
record.platformID.set (3);
record.encodingID.set (1);
record.languageID.set (0x0409u); /* English */
record.nameID.set (name_IDs[i]);
record.length.set (name_str_len * 2);
record.offset.set (0);
record.platformID = 3;
record.encodingID = 1;
record.languageID = 0x0409u; /* English */
record.nameID = name_IDs[i];
record.length = name_str_len * 2;
record.offset = 0;
}
/* Copy string data from new_name, converting wchar_t to UTF16BE. */
@ -431,8 +431,8 @@ _hb_rename_font (hb_blob_t *blob, wchar_t *new_name)
{
OT::TableRecord &record = const_cast<OT::TableRecord &> (face.get_table (index));
record.checkSum.set_for_data (&name, padded_name_table_length);
record.offset.set (name_table_offset);
record.length.set (name_table_length);
record.offset = name_table_offset;
record.length = name_table_length;
}
else if (face_index == 0) /* Fail if first face doesn't have 'name' table. */
{

View File

@ -87,12 +87,22 @@ struct hb_vector_t
return arrayZ()[i];
}
explicit_operator bool () const { return length; }
explicit operator bool () const { return length; }
hb_array_t<Type> as_array ()
{ return hb_array (arrayZ(), length); }
hb_array_t<const Type> as_array () const
{ return hb_array (arrayZ(), length); }
/* Sink interface. */
template <typename T>
hb_vector_t& operator << (const T& v) { push (v); return *this; }
hb_array_t< Type> as_array () { return hb_array (arrayZ(), length); }
hb_array_t<const Type> as_array () const { return hb_array (arrayZ(), length); }
/* Iterator. */
typedef hb_array_t<const Type> iter_t;
typedef hb_array_t< Type> writer_t;
iter_t iter () const { return as_array (); }
writer_t writer () { return as_array (); }
operator iter_t () const { return iter (); }
operator writer_t () { return writer (); }
hb_array_t<const Type> sub_array (unsigned int start_offset, unsigned int count) const
{ return as_array ().sub_array (start_offset, count);}
@ -108,19 +118,8 @@ struct hb_vector_t
hb_sorted_array_t<const Type> as_sorted_array () const
{ return hb_sorted_array (arrayZ(), length); }
hb_array_t<const Type> sorted_sub_array (unsigned int start_offset, unsigned int count) const
{ return as_sorted_array ().sorted_sub_array (start_offset, count);}
hb_array_t<const Type> sorted_sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */) const
{ return as_sorted_array ().sorted_sub_array (start_offset, count);}
hb_array_t<Type> sorted_sub_array (unsigned int start_offset, unsigned int count)
{ return as_sorted_array ().sorted_sub_array (start_offset, count);}
hb_array_t<Type> sorted_sub_array (unsigned int start_offset, unsigned int *count = nullptr /* IN/OUT */)
{ return as_sorted_array ().sorted_sub_array (start_offset, count);}
template <typename T> explicit_operator T * () { return arrayZ(); }
template <typename T> explicit_operator const T * () const { return arrayZ(); }
operator hb_array_t<Type> () { return as_array (); }
operator hb_array_t<const Type> () const { return as_array (); }
template <typename T> explicit operator T * () { return arrayZ(); }
template <typename T> explicit operator const T * () const { return arrayZ(); }
Type * operator + (unsigned int i) { return arrayZ() + i; }
const Type * operator + (unsigned int i) const { return arrayZ() + i; }
@ -131,7 +130,8 @@ struct hb_vector_t
return &Crap(Type);
return &arrayZ()[length - 1];
}
Type *push (const Type& v)
template <typename T>
Type *push (const T& v)
{
Type *p = push ();
*p = v;
@ -242,19 +242,34 @@ struct hb_vector_t
template <typename T>
const Type *lsearch (const T &x, const Type *not_found = nullptr) const
{ return as_array ().lsearch (x, not_found); }
};
template <typename Type>
struct hb_sorted_vector_t : hb_vector_t<Type>
{
hb_sorted_array_t< Type> as_array () { return hb_sorted_array (this->arrayZ(), this->length); }
hb_sorted_array_t<const Type> as_array () const { return hb_sorted_array (this->arrayZ(), this->length); }
/* Iterator. */
typedef hb_sorted_array_t<const Type> const_iter_t;
typedef hb_sorted_array_t< Type> iter_t;
const_iter_t iter () const { return as_array (); }
const_iter_t citer () const { return as_array (); }
iter_t iter () { return as_array (); }
operator iter_t () { return iter (); }
operator const_iter_t () const { return iter (); }
template <typename T>
Type *bsearch (const T &x, Type *not_found = nullptr)
{ return as_sorted_array ().bsearch (x, not_found); }
{ return as_array ().bsearch (x, not_found); }
template <typename T>
const Type *bsearch (const T &x, const Type *not_found = nullptr) const
{ return as_sorted_array ().bsearch (x, not_found); }
{ return as_array ().bsearch (x, not_found); }
template <typename T>
bool bfind (const T &x, unsigned int *i = nullptr,
hb_bfind_not_found_t not_found = HB_BFIND_NOT_FOUND_DONT_STORE,
unsigned int to_store = (unsigned int) -1) const
{ return as_sorted_array ().bfind (x, i, not_found, to_store); }
{ return as_array ().bfind (x, i, not_found, to_store); }
};
#endif /* HB_VECTOR_HH */

View File

@ -37,10 +37,10 @@ HB_BEGIN_DECLS
#define HB_VERSION_MAJOR 2
#define HB_VERSION_MINOR 3
#define HB_VERSION_MICRO 1
#define HB_VERSION_MINOR 4
#define HB_VERSION_MICRO 0
#define HB_VERSION_STRING "2.3.1"
#define HB_VERSION_STRING "2.4.0"
#define HB_VERSION_ATLEAST(major,minor,micro) \
((major)*10000+(minor)*100+(micro) <= \

View File

@ -68,6 +68,7 @@
#pragma GCC diagnostic error "-Wformat-security"
#pragma GCC diagnostic error "-Wimplicit-function-declaration"
#pragma GCC diagnostic error "-Winit-self"
#pragma GCC diagnostic error "-Winjected-class-name"
#pragma GCC diagnostic error "-Wmissing-braces"
#pragma GCC diagnostic error "-Wmissing-declarations"
#pragma GCC diagnostic error "-Wmissing-prototypes"
@ -214,59 +215,6 @@ extern "C" int hb_memalign_impl(void **memptr, size_t alignment, size_t size);
* Compiler attributes
*/
/* https://github.com/harfbuzz/harfbuzz/issues/1634 */
#if __cplusplus < 201103L && !defined(_MSC_VER)
#ifndef nullptr
#define nullptr NULL
#endif
#ifndef constexpr
#define constexpr const
#endif
#ifndef static_assert
#define static_assert(e, msg) \
HB_UNUSED typedef int HB_PASTE(static_assertion_failed_at_line_, __LINE__) [(e) ? 1 : -1]
#endif // static_assert
#if defined(__GNUC__)
#if (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 8))
#define thread_local __thread
#endif
#else
#define thread_local
#endif
template <typename T>
struct _hb_alignof
{
struct s
{
char c;
T t;
};
static constexpr size_t value = offsetof (s, t);
};
#ifndef alignof
#define alignof(x) (_hb_alignof<x>::value)
#endif
/* https://github.com/harfbuzz/harfbuzz/issues/1127 */
#ifndef explicit_operator
#define explicit_operator operator
#endif
#else /* __cplusplus >= 201103L */
/* https://github.com/harfbuzz/harfbuzz/issues/1127 */
#ifndef explicit_operator
#define explicit_operator explicit operator
#endif
#endif /* __cplusplus < 201103L */
#if (defined(__GNUC__) || defined(__clang__)) && defined(__OPTIMIZE__)
#define likely(expr) (__builtin_expect (!!(expr), 1))
#define unlikely(expr) (__builtin_expect (!!(expr), 0))
@ -460,13 +408,6 @@ static_assert ((sizeof (hb_position_t) == 4), "");
static_assert ((sizeof (hb_mask_t) == 4), "");
static_assert ((sizeof (hb_var_int_t) == 4), "");
#if __cplusplus >= 201103L
/* We only enable these with C++11 or later, since earlier language
* does not allow structs with constructors in unions, and we need
* those. */
#define HB_NO_COPY_ASSIGN(TypeName) \
TypeName(const TypeName&); \
void operator=(const TypeName&)
@ -489,17 +430,6 @@ static_assert ((sizeof (hb_var_int_t) == 4), "");
TypeName(const TypeName<T1, T2>&); \
void operator=(const TypeName<T1, T2>&)
#else /* __cpluspplus >= 201103L */
#define HB_NO_COPY_ASSIGN(TypeName) static_assert (true, "")
#define HB_NO_COPY_ASSIGN_TEMPLATE(TypeName, T) static_assert (true, "")
#define HB_NO_COPY_ASSIGN_TEMPLATE2(TypeName, T1, T2) static_assert (true, "")
#define HB_NO_CREATE_COPY_ASSIGN(TypeName) static_assert (true, "")
#define HB_NO_CREATE_COPY_ASSIGN_TEMPLATE(TypeName, T) static_assert (true, "")
#define HB_NO_CREATE_COPY_ASSIGN_TEMPLATE2(TypeName, T1, T2) static_assert (true, "")
#endif /* __cpluspplus >= 201103L */
/*
* Compiler-assisted vectorization parameters.
@ -631,28 +561,17 @@ _hb_memalign(void **memptr, size_t alignment, size_t size)
#define HB_SCRIPT_MYANMAR_ZAWGYI ((hb_script_t) HB_TAG ('Q','a','a','g'))
/* Some really basic things everyone wants. */
template <typename T> struct hb_remove_const { typedef T value; };
template <typename T> struct hb_remove_const<const T> { typedef T value; };
#define hb_remove_const(T) hb_remove_const<T>::value
template <typename T> struct hb_remove_reference { typedef T value; };
template <typename T> struct hb_remove_reference<T &> { typedef T value; };
#define hb_remove_reference(T) hb_remove_reference<T>::value
template <typename T> struct hb_remove_pointer { typedef T value; };
template <typename T> struct hb_remove_pointer<T *> { typedef T value; };
#define hb_remove_pointer(T) hb_remove_pointer<T>::value
/* Headers we include for everyone. Keep topologically sorted by dependency.
* They express dependency amongst themselves, but no other file should include
* them directly.*/
#include "hb-atomic.hh"
#include "hb-meta.hh"
#include "hb-mutex.hh"
#include "hb-null.hh"
#include "hb-dsalgs.hh" // Requires: hb-null
#include "hb-iter.hh" // Requires: hb-null
#include "hb-debug.hh" // Requires: hb-atomic hb-dsalgs
#include "hb-array.hh" // Requires: hb-dsalgs hb-iter hb-null
#include "hb-atomic.hh" // Requires: hb-meta
#include "hb-null.hh" // Requires: hb-meta
#include "hb-algs.hh" // Requires: hb-meta hb-null
#include "hb-iter.hh" // Requires: hb-meta
#include "hb-debug.hh" // Requires: hb-algs hb-atomic
#include "hb-array.hh" // Requires: hb-algs hb-iter hb-null
#include "hb-vector.hh" // Requires: hb-array hb-null
#include "hb-object.hh" // Requires: hb-atomic hb-mutex hb-vector

50
src/test-algs.cc Normal file
View File

@ -0,0 +1,50 @@
/*
* Copyright © 2019 Facebook, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Facebook Author(s): Behdad Esfahbod
*/
#include "hb.hh"
#include "hb-algs.hh"
int
main (int argc, char **argv)
{
int i = 1;
auto p = hb_pair (1, i);
p.second = 2;
assert (i == 2);
const int c = 3;
auto pc = hb_pair (1, c);
assert (pc.second == 3);
auto q = p;
assert (&q != &p);
q.second = 4;
assert (i == 4);
return 0;
}

View File

@ -29,19 +29,20 @@
#include "hb-array.hh"
#include "hb-set.hh"
#include "hb-ot-layout-common.hh"
template <typename T>
struct array_iter_t : hb_iter_t<array_iter_t<T>, T>, hb_iter_mixin_t<array_iter_t<T>, T>
struct array_iter_t : hb_iter_with_fallback_t<array_iter_t<T>, T&>
{
array_iter_t (hb_array_t<T> arr_) : arr (arr_) {}
typedef T __item_type__;
typedef T& __item_t__;
static constexpr bool is_random_access_iterator = true;
T& __item_at__ (unsigned i) const { return arr[i]; }
void __forward__ (unsigned n) { arr += n; }
void __rewind__ (unsigned n) { arr -= n; }
unsigned __len__ () const { return arr.length; }
bool __random_access__ () const { return true; }
private:
hb_array_t<T> arr;
@ -61,6 +62,43 @@ struct some_array_t
hb_array_t<T> arr;
};
template <typename Iter,
hb_enable_if (hb_is_iterator (Iter))>
static void
test_iterator (Iter it)
{
Iter default_constructed;
assert (!default_constructed);
/* Iterate over a copy of it. */
for (auto c = it.iter (); c; c++)
*c;
/* Same. */
for (auto c = +it; c; c++)
*c;
it += it.len ();
it = it + 10;
it = 10 + it;
assert (*it == it[0]);
static_assert (true || it.is_random_access_iterator, "");
static_assert (true || it.is_sorted_iterator, "");
}
template <typename Iterable,
hb_enable_if (hb_is_iterable (Iterable))>
static void
test_iterable (const Iterable &lst = Null(Iterable))
{
// Test that can take iterator from.
test_iterator (lst.iter ());
}
int
main (int argc, char **argv)
{
@ -72,13 +110,57 @@ main (int argc, char **argv)
array_iter_t<const int> s2 (v); /* Implicit conversion from vector. */
array_iter_t<int> t (dst);
static_assert (hb_is_random_access_iterator (array_iter_t<int>), "");
some_array_t<const int> a (src);
s2 = s;
hb_iter (src);
hb_iter (src, 2);
hb_fill (t, 42);
hb_copy (t, s);
// hb_copy (t, a.iter ());
test_iterable (v);
hb_set_t st;
test_iterable (st);
hb_sorted_array_t<int> sa;
test_iterable (sa);
test_iterable<hb_array_t<int> > ();
test_iterable<hb_sorted_array_t<const int> > ();
test_iterable<hb_vector_t<float> > ();
test_iterable<hb_set_t> ();
test_iterable<OT::Coverage> ();
test_iterator (hb_zip (st, v));
hb_any (hb_zip (st, v));
hb_array_t<hb_vector_t<int> > pa;
pa->as_array ();
+ hb_iter (src)
| hb_map (hb_identity)
| hb_filter ()
| hb_filter (hb_bool)
| hb_filter (hb_bool, hb_identity)
| hb_sink (st)
;
+ hb_iter (src)
| hb_apply (&st)
;
+ hb_iter (src)
| hb_drain
;
t << 1;
long vl;
s >> vl;
return 0;
}

View File

@ -6,3 +6,5 @@
../fonts/1735326da89f0818cd8c51a0600e9789812c0f94.ttf::U+0A51:[uni25CC=0+1044|udaatguru=0+0]
../fonts/1735326da89f0818cd8c51a0600e9789812c0f94.ttf::U+25CC,U+0A51:[uni25CC=0+1044|udaatguru=0+0]
../fonts/81c368a33816fb20e9f647e8f24e2180f4720263.ttf:--no-glyph-names:U+0C80,U+0C82:[1=0+502|2=0+502]
../fonts/f75c4b05a0a4d67c1a808081ae3d74a9c66509e8.ttf::U+0A20,U+0A75,U+0A47:[tthaguru=0+1352|yakashguru=0@-90,0+0|eematraguru=0@-411,0+0]
../fonts/f75c4b05a0a4d67c1a808081ae3d74a9c66509e8.ttf::U+0A20,U+0A75,U+0A42:[tthaguru=0+1352|yakashuuguru=0+0]

View File

@ -87,3 +87,4 @@
../fonts/3998336402905b8be8301ef7f47cf7e050cbb1bd.ttf::U+17A0,U+17D2,U+1782,U+17D2,U+179F,U+17CA,U+17C0:[uni17C1=0+288|uni17A0=0+928|uni17D21782=0@20,-26+0|uni17D2179F.low=0+302|uni17CA=0@-4,30+0|uni17C0.right1.high=0+288]
../fonts/3998336402905b8be8301ef7f47cf7e050cbb1bd.ttf::U+17A0,U+17D2,U+179A,U+17D2,U+179C,U+1784,U+17D2,U+1780:[uni17D2179A=0+287|uni17A0=0+928|uni17D2179C=0@20,-26+0|uni1784=5+635|uni17D21780=5@0,-26+0]
../fonts/3998336402905b8be8301ef7f47cf7e050cbb1bd.ttf::U+17A0,U+17D2,U+179A,U+17D2,U+179C,U+17B6,U+17C6,U+1784:[uni17D2179A=0+287|uni17A017B6=0+1216|uni17D2179C=0@-268,-26+0|uni17C6=0@47,-29+0|uni1784=7+635]
../fonts/ad01ab2ea1cb1a4d3a2783e2675112ef11ae6404.ttf::U+17D2,U+17D2:[uni25CC=0+635|uni17D2=0+0|uni25CC=0+635|uni17D2=0+0]