Merge branch 'master' into var-subset
This commit is contained in:
commit
ea8fdfa079
|
@ -4,7 +4,7 @@ jobs:
|
|||
|
||||
macos-10.12.6-aat-fonts:
|
||||
macos:
|
||||
xcode: "9.2.0"
|
||||
xcode: "9.0.1"
|
||||
steps:
|
||||
- checkout
|
||||
- run: HOMEBREW_NO_AUTO_UPDATE=1 brew install wget autoconf automake libtool pkg-config ragel freetype glib cairo
|
||||
|
@ -98,7 +98,9 @@ jobs:
|
|||
- run: CFLAGS="-O0" CXXFLAGS="-O0" CC=clang CXX=clang++ ./autogen.sh --with-freetype --with-fontconfig --with-glib --with-cairo --with-icu --with-graphite2
|
||||
- run: make -j32
|
||||
- run: LD_LIBRARY_PATH="$PWD/freetype-2.9/objs/.libs" make check || .ci/fail.sh
|
||||
- run: make clean && cd src && clang++ -c hb-*.cc
|
||||
- run: make clean
|
||||
- run: make -Csrc CPPFLAGS="-DHB_TINY -DHB_NO_OT_FONT" libharfbuzz-subset.la && make clean
|
||||
- run: clang -c src/hb-*.cc -DHB_NO_MT
|
||||
|
||||
gcc-valgrind:
|
||||
docker:
|
||||
|
@ -112,7 +114,7 @@ jobs:
|
|||
- run: make -j32
|
||||
# run-shape-fuzzer-tests.py automatically runs valgrind if see available
|
||||
# but test/api runs it by request, we probably should normalize the approaches
|
||||
- run: RUN_VALGRIND=1 make check && make -Ctest/api check-valgrind || .ci/fail.sh
|
||||
- run: HB_TEST_SHAPE_FUZZER_TIMEOUT=3 HB_TEST_SUBSET_FUZZER_TIMEOUT=30 RUN_VALGRIND=1 make check && make -Ctest/api check-valgrind || .ci/fail.sh
|
||||
# informational for now
|
||||
- run: make -Ctest/api check-symbols || true
|
||||
|
||||
|
@ -164,7 +166,7 @@ jobs:
|
|||
- run: wget https://ftp.gnome.org/pub/gnome/sources/glib/2.58/glib-2.58.1.tar.xz && tar xf glib-2.58.1.tar.xz && cd glib-2.58.1 && ./autogen.sh --with-pcre CPPFLAGS="-fsanitize=memory" LDFLAGS="-fsanitize=memory" CFLAGS="-fsanitize=memory" CXXFLAGS="-fsanitize=memory" LD=ld.lld CC=clang CXX=clang++ && make -j32 && make install && cd ..
|
||||
- run: wget http://download.savannah.gnu.org/releases/freetype/freetype-2.9.tar.bz2 && tar xf freetype-2.9.tar.bz2 && cd freetype-2.9 && ./autogen.sh && ./configure CPPFLAGS="-fsanitize=memory" LDFLAGS="-fsanitize=memory -O1 -g -fno-omit-frame-pointer" CFLAGS="-fsanitize=memory -O1 -g -fno-omit-frame-pointer" CXXFLAGS="-fsanitize=memory -O1 -g -fno-omit-frame-pointer" LD=ld.lld CC=clang CXX=clang++ && make -j32 && make install && cd ..
|
||||
- run: CPPFLAGS="-fsanitize=memory -fsanitize-memory-track-origins" LDFLAGS="-fsanitize=memory -fsanitize-memory-track-origins -O1 -g -fno-omit-frame-pointer" CFLAGS="-fsanitize=memory -fsanitize-memory-track-origins -O1 -g -fno-omit-frame-pointer" CXXFLAGS="-fsanitize=memory -fsanitize-memory-track-origins -O1 -g -fno-omit-frame-pointer" LD=ld.lld CC=clang CXX=clang++ ./autogen.sh --with-freetype --with-glib --without-icu
|
||||
- run: make -j32 && MSAN_OPTIONS=exitcode=42 make check || .ci/fail.sh | asan_symbolize | c++filt
|
||||
- run: make -j32 && MSAN_OPTIONS=exitcode=42 HB_TEST_SUBSET_FUZZER_TIMEOUT=12 make check || .ci/fail.sh | asan_symbolize | c++filt
|
||||
|
||||
clang-tsan:
|
||||
docker:
|
||||
|
@ -180,7 +182,7 @@ jobs:
|
|||
- run: pip install fonttools
|
||||
- run: CPPFLAGS="-fsanitize=thread" LDFLAGS="-fsanitize=thread -O1 -g -fno-omit-frame-pointer" CFLAGS="-fsanitize=thread -O1 -g -fno-omit-frame-pointer" CXXFLAGS="-fsanitize=thread -O1 -g -fno-omit-frame-pointer" LD=ld.lld CC=clang CXX=clang++ ./autogen.sh --with-freetype --with-glib --with-cairo --with-icu --with-graphite2
|
||||
- run: make -j32
|
||||
- run: make check || .ci/fail.sh | asan_symbolize | c++filt
|
||||
- run: HB_TEST_SUBSET_FUZZER_TIMEOUT=40 make check || .ci/fail.sh | asan_symbolize | c++filt
|
||||
|
||||
clang-ubsan:
|
||||
docker:
|
||||
|
@ -203,7 +205,7 @@ jobs:
|
|||
- image: fedora
|
||||
steps:
|
||||
- checkout
|
||||
- run: dnf install -y pkg-config ragel gcc gcc-c++ automake autoconf libtool make which glib2-devel freetype-devel cairo-devel libicu-devel gobject-introspection-devel graphite2-devel redhat-rpm-config python mingw32-gcc-c++ mingw64-gcc-c++ mingw32-glib2 mingw32-cairo mingw32-freetype mingw64-glib2 mingw64-cairo mingw64-freetype glibc-devel.i686 || true
|
||||
- run: dnf install -y pkg-config ragel gcc gcc-c++ automake autoconf libtool make which diffutils glib2-devel freetype-devel cairo-devel libicu-devel gobject-introspection-devel graphite2-devel redhat-rpm-config python python-pip mingw32-gcc-c++ mingw64-gcc-c++ mingw32-glib2 mingw32-cairo mingw32-freetype mingw64-glib2 mingw64-cairo mingw64-freetype glibc-devel.i686 || true
|
||||
- run: NOCONFIGURE=1 ./autogen.sh
|
||||
- run: mkdir build && cd build && CFLAGS="-O0" CXXFLAGS="-O0" CPPFLAGS="-DHB_DEBUG" ../configure --with-freetype --with-glib --with-gobject --with-cairo --with-icu --with-graphite2 && make -j32 && (make check || ../.ci/fail.sh)
|
||||
- run: pip install pefile
|
||||
|
|
|
@ -465,6 +465,19 @@ endif ()
|
|||
add_library(harfbuzz ${project_sources} ${project_extra_sources} ${project_headers})
|
||||
target_link_libraries(harfbuzz ${THIRD_PARTY_LIBS})
|
||||
|
||||
|
||||
## Define harfbuzz-icu library
|
||||
if (HB_HAVE_ICU)
|
||||
add_library(harfbuzz-icu ${PROJECT_SOURCE_DIR}/src/hb-icu.cc ${PROJECT_SOURCE_DIR}/src/hb-icu.h)
|
||||
add_dependencies(harfbuzz-icu harfbuzz)
|
||||
target_link_libraries(harfbuzz-icu harfbuzz ${THIRD_PARTY_LIBS})
|
||||
|
||||
if (BUILD_SHARED_LIBS)
|
||||
set_target_properties(harfbuzz harfbuzz-icu PROPERTIES VISIBILITY_INLINES_HIDDEN TRUE)
|
||||
endif ()
|
||||
endif ()
|
||||
|
||||
|
||||
## Define harfbuzz-subset library
|
||||
if (HB_BUILD_SUBSET)
|
||||
add_library(harfbuzz-subset ${subset_project_sources} ${subset_project_headers})
|
||||
|
@ -723,6 +736,14 @@ if (NOT SKIP_INSTALL_LIBRARIES AND NOT SKIP_INSTALL_ALL)
|
|||
NAMESPACE harfbuzz::
|
||||
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/harfbuzz
|
||||
)
|
||||
if (HB_HAVE_ICU)
|
||||
install(TARGETS harfbuzz-icu
|
||||
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
||||
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
|
||||
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}
|
||||
FRAMEWORK DESTINATION Library/Frameworks
|
||||
)
|
||||
endif ()
|
||||
if (HB_BUILD_UTILS)
|
||||
if (WIN32 AND BUILD_SHARED_LIBS)
|
||||
install(TARGETS harfbuzz-subset
|
||||
|
|
15
NEWS
15
NEWS
|
@ -1,3 +1,18 @@
|
|||
Overview of changes leading to 2.6.4
|
||||
Monday, October 29, 2019
|
||||
====================================
|
||||
- Small bug fix.
|
||||
- Build fixes.
|
||||
|
||||
|
||||
Overview of changes leading to 2.6.3
|
||||
Monday, October 28, 2019
|
||||
====================================
|
||||
- Misc small fixes, mostly to build-related issues.
|
||||
- New API:
|
||||
+hb_font_get_nominal_glyphs()
|
||||
|
||||
|
||||
Overview of changes leading to 2.6.2
|
||||
Monday, September 30, 2019
|
||||
====================================
|
||||
|
|
11
TESTING.md
11
TESTING.md
|
@ -73,3 +73,14 @@ sudo python infra/helper.py build_image harfbuzz
|
|||
sudo python infra/helper.py build_fuzzers --sanitizer address harfbuzz
|
||||
sudo python infra/helper.py run_fuzzer harfbuzz hb-subset-fuzzer
|
||||
```
|
||||
|
||||
## Profiling
|
||||
|
||||
```
|
||||
make clean
|
||||
./configure CXXFLAGS="-fno-omit-frame-pointer -g"
|
||||
make
|
||||
perf record -o <perf output file> -g <command to run>
|
||||
perf report -i<perf output file>
|
||||
```
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
AC_PREREQ([2.64])
|
||||
AC_INIT([HarfBuzz],
|
||||
[2.6.2],
|
||||
[2.6.4],
|
||||
[https://github.com/harfbuzz/harfbuzz/issues/new],
|
||||
[harfbuzz],
|
||||
[http://harfbuzz.org/])
|
||||
|
|
|
@ -365,6 +365,8 @@ hb_ft_font_create
|
|||
hb_ft_font_create_referenced
|
||||
hb_ft_font_changed
|
||||
hb_ft_font_get_face
|
||||
hb_ft_font_lock_face
|
||||
hb_ft_font_unlock_face
|
||||
hb_ft_font_set_load_flags
|
||||
hb_ft_font_get_load_flags
|
||||
hb_ft_font_set_funcs
|
||||
|
|
|
@ -290,7 +290,7 @@ ucd-table: gen-ucd-table.py ucd.nounihan.grouped.zip hb-common.h
|
|||
use-table: gen-use-table.py IndicSyllabicCategory.txt IndicPositionalCategory.txt UnicodeData.txt Blocks.txt
|
||||
$(AM_V_GEN) $(builddir)/$^ > $(srcdir)/hb-ot-shape-complex-use-table.cc \
|
||||
|| ($(RM) $(srcdir)/hb-ot-shape-complex-use-table.cc; false)
|
||||
vowel-constraints: gen-vowel-constraints.py HBIndicVowelConstraints.txt Scripts.txt
|
||||
vowel-constraints: gen-vowel-constraints.py ms-use/IndicShapingInvalidCluster.txt Scripts.txt
|
||||
$(AM_V_GEN) $(builddir)/$^ > $(srcdir)/hb-ot-shape-complex-vowel-constraints.cc \
|
||||
|| ($(RM) $(srcdir)/hb-ot-shape-complex-vowel-constraints.cc; false)
|
||||
|
||||
|
|
|
@ -163,7 +163,6 @@ HB_BASE_sources = \
|
|||
hb-unicode.hh \
|
||||
hb-utf.hh \
|
||||
hb-vector.hh \
|
||||
hb-warning.cc \
|
||||
hb.hh \
|
||||
$(NULL)
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import io
|
|||
import sys
|
||||
|
||||
if len (sys.argv) != 3:
|
||||
print ('usage: ./gen-vowel-constraints.py HBIndicVowelConstraints.txt Scripts.txt', file=sys.stderr)
|
||||
print ('usage: ./gen-vowel-constraints.py ms-use/IndicShapingInvalidCluster.txt Scripts.txt', file=sys.stderr)
|
||||
sys.exit (1)
|
||||
|
||||
with io.open (sys.argv[2], encoding='utf-8') as f:
|
||||
|
@ -84,7 +84,8 @@ class ConstraintSet (object):
|
|||
else:
|
||||
self._c[first] = ConstraintSet (rest)
|
||||
|
||||
def _indent (self, depth):
|
||||
@staticmethod
|
||||
def _indent (depth):
|
||||
return (' ' * depth).replace (' ', '\t')
|
||||
|
||||
def __str__ (self, index=0, depth=4):
|
||||
|
@ -92,17 +93,20 @@ class ConstraintSet (object):
|
|||
indent = self._indent (depth)
|
||||
if isinstance (self._c, list):
|
||||
if len (self._c) == 0:
|
||||
assert index == 2, 'Cannot use `matched` for this constraint; the general case has not been implemented'
|
||||
s.append ('{}matched = true;\n'.format (indent))
|
||||
elif len (self._c) == 1:
|
||||
assert index == 1, 'Cannot use `matched` for this constraint; the general case has not been implemented'
|
||||
s.append ('{}matched = 0x{:04X}u == buffer->cur ({}).codepoint;\n'.format (indent, next (iter (self._c)), index or ''))
|
||||
else:
|
||||
s.append ('{}if (0x{:04X}u == buffer->cur ({}).codepoint &&\n'.format (indent, self._c[0], index))
|
||||
s.append ('{}buffer->idx + {} < count &&\n'.format (self._indent (depth + 2), len (self._c)))
|
||||
s.append ('{}if (0x{:04X}u == buffer->cur ({}).codepoint &&\n'.format (indent, self._c[0], index or ''))
|
||||
if index:
|
||||
s.append ('{}buffer->idx + {} < count &&\n'.format (self._indent (depth + 2), index + 1))
|
||||
for i, cp in enumerate (self._c[1:], start=1):
|
||||
s.append ('{}0x{:04X}u == buffer->cur ({}).codepoint{}\n'.format (
|
||||
self._indent (depth + 2), cp, index + i, ')' if i == len (self._c) - 1 else ' &&'))
|
||||
s.append ('{}{{\n'.format (indent))
|
||||
for i in range (len (self._c)):
|
||||
for i in range (index + 1):
|
||||
s.append ('{}buffer->next_glyph ();\n'.format (self._indent (depth + 1)))
|
||||
s.append ('{}_output_dotted_circle (buffer);\n'.format (self._indent (depth + 1)))
|
||||
s.append ('{}}}\n'.format (indent))
|
||||
|
@ -128,7 +132,12 @@ class ConstraintSet (object):
|
|||
|
||||
constraints = {}
|
||||
with io.open (sys.argv[1], encoding='utf-8') as f:
|
||||
constraints_header = [f.readline ().strip () for i in range (2)]
|
||||
constraints_header = []
|
||||
while True:
|
||||
line = f.readline ().strip ()
|
||||
if line == '#':
|
||||
break
|
||||
constraints_header.append(line)
|
||||
for line in f:
|
||||
j = line.find ('#')
|
||||
if j >= 0:
|
||||
|
@ -147,7 +156,7 @@ print ('/* == Start of generated functions == */')
|
|||
print ('/*')
|
||||
print (' * The following functions are generated by running:')
|
||||
print (' *')
|
||||
print (' * %s use Scripts.txt' % sys.argv[0])
|
||||
print (' * %s ms-use/IndicShapingInvalidCluster.txt Scripts.txt' % sys.argv[0])
|
||||
print (' *')
|
||||
print (' * on files with these headers:')
|
||||
print (' *')
|
||||
|
@ -185,7 +194,7 @@ print ('_hb_preprocess_text_vowel_constraints (const hb_ot_shape_plan_t *plan HB
|
|||
print ('\t\t\t\t hb_buffer_t *buffer,')
|
||||
print ('\t\t\t\t hb_font_t *font HB_UNUSED)')
|
||||
print ('{')
|
||||
print ('#if defined(HB_NO_OT_SHAPE_COMPLEX_VOWEL_CONSTRAINTS)')
|
||||
print ('#ifdef HB_NO_OT_SHAPE_COMPLEX_VOWEL_CONSTRAINTS')
|
||||
print (' return;')
|
||||
print ('#endif')
|
||||
print (' if (buffer->flags & HB_BUFFER_FLAG_DO_NOT_INSERT_DOTTED_CIRCLE)')
|
||||
|
|
|
@ -44,7 +44,6 @@
|
|||
#include "hb-static.cc"
|
||||
#include "hb-ucd.cc"
|
||||
#include "hb-unicode.cc"
|
||||
#include "hb-warning.cc"
|
||||
#include "hb-glib.cc"
|
||||
#include "hb-ft.cc"
|
||||
#include "hb-graphite2.cc"
|
||||
|
|
|
@ -99,7 +99,14 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
|||
template <typename T> operator T * () const { return arrayZ; }
|
||||
|
||||
HB_INTERNAL bool operator == (const hb_array_t &o) const;
|
||||
HB_INTERNAL uint32_t hash () const;
|
||||
|
||||
uint32_t hash () const {
|
||||
uint32_t current = 0;
|
||||
for (unsigned int i = 0; i < this->length; i++) {
|
||||
current = current * 31 + hb_hash (this->arrayZ[i]);
|
||||
}
|
||||
return current;
|
||||
}
|
||||
|
||||
/*
|
||||
* Compare, Sort, and Search.
|
||||
|
@ -189,6 +196,15 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
|||
const T *as () const
|
||||
{ return length < hb_null_size (T) ? &Null (T) : reinterpret_cast<const T *> (arrayZ); }
|
||||
|
||||
template <typename T,
|
||||
unsigned P = sizeof (Type),
|
||||
hb_enable_if (P == 1)>
|
||||
bool in_range (const T *p, unsigned int size = T::static_size) const
|
||||
{
|
||||
return ((const char *) p) >= arrayZ
|
||||
&& ((const char *) p + size) <= arrayZ + length;
|
||||
}
|
||||
|
||||
/* Only call if you allocated the underlying array using malloc() or similar. */
|
||||
void free ()
|
||||
{ ::free ((void *) arrayZ); arrayZ = nullptr; length = 0; }
|
||||
|
@ -332,27 +348,35 @@ hb_sorted_array (T (&array_)[length_])
|
|||
template <typename T>
|
||||
bool hb_array_t<T>::operator == (const hb_array_t<T> &o) const
|
||||
{
|
||||
return length == o.length &&
|
||||
+ hb_zip (*this, o)
|
||||
| hb_map ([] (hb_pair_t<T&, T&> &&_) { return _.first == _.second; })
|
||||
| hb_all
|
||||
;
|
||||
if (o.length != this->length) return false;
|
||||
for (unsigned int i = 0; i < this->length; i++) {
|
||||
if (this->arrayZ[i] != o.arrayZ[i]) return false;
|
||||
}
|
||||
template <typename T>
|
||||
uint32_t hb_array_t<T>::hash () const
|
||||
{
|
||||
return
|
||||
+ hb_iter (*this)
|
||||
| hb_map (hb_hash)
|
||||
| hb_reduce ([] (uint32_t a, uint32_t b) { return a * 31 + b; }, 0)
|
||||
;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* TODO Specialize opeator== for hb_bytes_t and hb_ubytes_t. */
|
||||
|
||||
template <>
|
||||
inline uint32_t hb_array_t<const char>::hash () const {
|
||||
uint32_t current = 0;
|
||||
for (unsigned int i = 0; i < this->length; i++)
|
||||
current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
|
||||
return current;
|
||||
}
|
||||
|
||||
template <>
|
||||
inline uint32_t hb_array_t<const unsigned char>::hash () const {
|
||||
uint32_t current = 0;
|
||||
for (unsigned int i = 0; i < this->length; i++)
|
||||
current = current * 31 + (uint32_t) (this->arrayZ[i] * 2654435761u);
|
||||
return current;
|
||||
}
|
||||
|
||||
|
||||
typedef hb_array_t<const char> hb_bytes_t;
|
||||
typedef hb_array_t<const unsigned char> hb_ubytes_t;
|
||||
|
||||
/* TODO Specialize opeator==/hash() for hb_bytes_t and hb_ubytes_t. */
|
||||
//template <>
|
||||
//uint32_t hb_array_t<const char>::hash () const { return 0; }
|
||||
|
||||
|
||||
#endif /* HB_ARRAY_HH */
|
||||
|
|
|
@ -212,18 +212,7 @@ static inline bool _hb_compare_and_swaplp (long *P, long O, long N)
|
|||
static_assert ((sizeof (long) == sizeof (void *)), "");
|
||||
|
||||
|
||||
#elif !defined(HB_NO_MT)
|
||||
|
||||
#define HB_ATOMIC_INT_NIL 1 /* Warn that fallback implementation is in use. */
|
||||
|
||||
#define _hb_memory_barrier() do {} while (0)
|
||||
|
||||
#define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
|
||||
|
||||
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
|
||||
|
||||
|
||||
#else /* HB_NO_MT */
|
||||
#elif defined(HB_NO_MT)
|
||||
|
||||
#define hb_atomic_int_impl_add(AI, V) ((*(AI) += (V)) - (V))
|
||||
|
||||
|
@ -232,6 +221,11 @@ static_assert ((sizeof (long) == sizeof (void *)), "");
|
|||
#define hb_atomic_ptr_impl_cmpexch(P,O,N) (* (void **) (P) == (void *) (O) ? (* (void **) (P) = (void *) (N), true) : false)
|
||||
|
||||
|
||||
#else
|
||||
|
||||
#error "Could not find any system to define atomic_int macros."
|
||||
#error "Check hb-atomic.hh for possible resolutions."
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
|
|
|
@ -776,8 +776,10 @@ hb_buffer_destroy (hb_buffer_t *buffer)
|
|||
|
||||
free (buffer->info);
|
||||
free (buffer->pos);
|
||||
#ifndef HB_NO_BUFFER_MESSAGE
|
||||
if (buffer->message_destroy)
|
||||
buffer->message_destroy (buffer->message_data);
|
||||
#endif
|
||||
|
||||
free (buffer);
|
||||
}
|
||||
|
@ -1858,17 +1860,7 @@ hb_buffer_normalize_glyphs (hb_buffer_t *buffer)
|
|||
|
||||
bool backward = HB_DIRECTION_IS_BACKWARD (buffer->props.direction);
|
||||
|
||||
unsigned int count = buffer->len;
|
||||
if (unlikely (!count)) return;
|
||||
hb_glyph_info_t *info = buffer->info;
|
||||
|
||||
unsigned int start = 0;
|
||||
unsigned int end;
|
||||
for (end = start + 1; end < count; end++)
|
||||
if (info[start].cluster != info[end].cluster) {
|
||||
normalize_glyphs_cluster (buffer, start, end, backward);
|
||||
start = end;
|
||||
}
|
||||
foreach_cluster (buffer, start, end)
|
||||
normalize_glyphs_cluster (buffer, start, end, backward);
|
||||
}
|
||||
|
||||
|
|
|
@ -126,9 +126,9 @@ struct hb_buffer_t
|
|||
/* Debugging API */
|
||||
#ifndef HB_NO_BUFFER_MESSAGE
|
||||
hb_buffer_message_func_t message_func;
|
||||
#endif
|
||||
void *message_data;
|
||||
hb_destroy_func_t message_destroy;
|
||||
#endif
|
||||
|
||||
/* Internal debugging. */
|
||||
/* The bits here reflect current allocations of the bytes in glyph_info_t's var1 and var2. */
|
||||
|
|
|
@ -551,8 +551,13 @@ struct path_procs_t
|
|||
|
||||
static void rcurveline (ENV &env, PARAM& param)
|
||||
{
|
||||
unsigned int arg_count = env.argStack.get_count ();
|
||||
if (unlikely (arg_count < 8))
|
||||
return;
|
||||
|
||||
unsigned int i = 0;
|
||||
for (; i + 6 <= env.argStack.get_count (); i += 6)
|
||||
unsigned int curve_limit = arg_count - 2;
|
||||
for (; i + 6 <= curve_limit; i += 6)
|
||||
{
|
||||
point_t pt1 = env.get_pt ();
|
||||
pt1.move (env.eval_arg (i), env.eval_arg (i+1));
|
||||
|
@ -562,26 +567,27 @@ struct path_procs_t
|
|||
pt3.move (env.eval_arg (i+4), env.eval_arg (i+5));
|
||||
PATH::curve (env, param, pt1, pt2, pt3);
|
||||
}
|
||||
for (; i + 2 <= env.argStack.get_count (); i += 2)
|
||||
{
|
||||
|
||||
point_t pt1 = env.get_pt ();
|
||||
pt1.move (env.eval_arg (i), env.eval_arg (i+1));
|
||||
PATH::line (env, param, pt1);
|
||||
}
|
||||
}
|
||||
|
||||
static void rlinecurve (ENV &env, PARAM& param)
|
||||
{
|
||||
unsigned int arg_count = env.argStack.get_count ();
|
||||
if (unlikely (arg_count < 8))
|
||||
return;
|
||||
|
||||
unsigned int i = 0;
|
||||
unsigned int line_limit = (env.argStack.get_count () % 6);
|
||||
unsigned int line_limit = arg_count - 6;
|
||||
for (; i + 2 <= line_limit; i += 2)
|
||||
{
|
||||
point_t pt1 = env.get_pt ();
|
||||
pt1.move (env.eval_arg (i), env.eval_arg (i+1));
|
||||
PATH::line (env, param, pt1);
|
||||
}
|
||||
for (; i + 6 <= env.argStack.get_count (); i += 6)
|
||||
{
|
||||
|
||||
point_t pt1 = env.get_pt ();
|
||||
pt1.move (env.eval_arg (i), env.eval_arg (i+1));
|
||||
point_t pt2 = pt1;
|
||||
|
@ -590,7 +596,6 @@ struct path_procs_t
|
|||
pt3.move (env.eval_arg (i+4), env.eval_arg (i+5));
|
||||
PATH::curve (env, param, pt1, pt2, pt3);
|
||||
}
|
||||
}
|
||||
|
||||
static void vvcurveto (ENV &env, PARAM& param)
|
||||
{
|
||||
|
|
|
@ -434,7 +434,7 @@ typedef void (*hb_destroy_func_t) (void *user_data);
|
|||
* @start: the cluster to start applying this feature setting (inclusive).
|
||||
* @end: the cluster to end applying this feature setting (exclusive).
|
||||
*
|
||||
* The hb_feature_t is the structure that holds information about requested
|
||||
* The #hb_feature_t is the structure that holds information about requested
|
||||
* feature application. The feature will be applied with the given value to all
|
||||
* glyphs which are in clusters between @start (inclusive) and @end (exclusive).
|
||||
* Setting start to @HB_FEATURE_GLOBAL_START and end to @HB_FEATURE_GLOBAL_END
|
||||
|
|
|
@ -791,6 +791,29 @@ hb_font_get_nominal_glyph (hb_font_t *font,
|
|||
return font->get_nominal_glyph (unicode, glyph);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_font_get_nominal_glyphs:
|
||||
* @font: a font.
|
||||
*
|
||||
*
|
||||
*
|
||||
* Return value:
|
||||
*
|
||||
* Since: 2.6.3
|
||||
**/
|
||||
unsigned int
|
||||
hb_font_get_nominal_glyphs (hb_font_t *font,
|
||||
unsigned int count,
|
||||
const hb_codepoint_t *first_unicode,
|
||||
unsigned int unicode_stride,
|
||||
hb_codepoint_t *first_glyph,
|
||||
unsigned int glyph_stride)
|
||||
{
|
||||
return font->get_nominal_glyphs (count,
|
||||
first_unicode, unicode_stride,
|
||||
first_glyph, glyph_stride);
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_font_get_variation_glyph:
|
||||
* @font: a font.
|
||||
|
|
|
@ -459,6 +459,14 @@ hb_font_get_variation_glyph (hb_font_t *font,
|
|||
hb_codepoint_t unicode, hb_codepoint_t variation_selector,
|
||||
hb_codepoint_t *glyph);
|
||||
|
||||
HB_EXTERN unsigned int
|
||||
hb_font_get_nominal_glyphs (hb_font_t *font,
|
||||
unsigned int count,
|
||||
const hb_codepoint_t *first_unicode,
|
||||
unsigned int unicode_stride,
|
||||
hb_codepoint_t *first_glyph,
|
||||
unsigned int glyph_stride);
|
||||
|
||||
HB_EXTERN hb_position_t
|
||||
hb_font_get_glyph_h_advance (hb_font_t *font,
|
||||
hb_codepoint_t glyph);
|
||||
|
|
60
src/hb-ft.cc
60
src/hb-ft.cc
|
@ -140,7 +140,7 @@ hb_ft_font_set_load_flags (hb_font_t *font, int load_flags)
|
|||
if (hb_object_is_immutable (font))
|
||||
return;
|
||||
|
||||
if (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)
|
||||
if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy))
|
||||
return;
|
||||
|
||||
hb_ft_font_t *ft_font = (hb_ft_font_t *) font->user_data;
|
||||
|
@ -160,7 +160,7 @@ hb_ft_font_set_load_flags (hb_font_t *font, int load_flags)
|
|||
int
|
||||
hb_ft_font_get_load_flags (hb_font_t *font)
|
||||
{
|
||||
if (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)
|
||||
if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy))
|
||||
return 0;
|
||||
|
||||
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font->user_data;
|
||||
|
@ -168,10 +168,19 @@ hb_ft_font_get_load_flags (hb_font_t *font)
|
|||
return ft_font->load_flags;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ft_font_get_face:
|
||||
* @font:
|
||||
*
|
||||
*
|
||||
*
|
||||
* Return value:
|
||||
* Since: 0.9.2
|
||||
**/
|
||||
FT_Face
|
||||
hb_ft_font_get_face (hb_font_t *font)
|
||||
{
|
||||
if (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy)
|
||||
if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy))
|
||||
return nullptr;
|
||||
|
||||
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font->user_data;
|
||||
|
@ -179,6 +188,47 @@ hb_ft_font_get_face (hb_font_t *font)
|
|||
return ft_font->ft_face;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ft_font_lock_face:
|
||||
* @font:
|
||||
*
|
||||
*
|
||||
*
|
||||
* Return value:
|
||||
* Since: REPLACEME
|
||||
**/
|
||||
FT_Face
|
||||
hb_ft_font_lock_face (hb_font_t *font)
|
||||
{
|
||||
if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy))
|
||||
return nullptr;
|
||||
|
||||
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font->user_data;
|
||||
|
||||
ft_font->lock.lock ();
|
||||
|
||||
return ft_font->ft_face;
|
||||
}
|
||||
|
||||
/**
|
||||
* hb_ft_font_unlock_face:
|
||||
* @font:
|
||||
*
|
||||
*
|
||||
*
|
||||
* Return value:
|
||||
* Since: REPLACEME
|
||||
**/
|
||||
void
|
||||
hb_ft_font_unlock_face (hb_font_t *font)
|
||||
{
|
||||
if (unlikely (font->destroy != (hb_destroy_func_t) _hb_ft_font_destroy))
|
||||
return;
|
||||
|
||||
const hb_ft_font_t *ft_font = (const hb_ft_font_t *) font->user_data;
|
||||
|
||||
ft_font->lock.unlock ();
|
||||
}
|
||||
|
||||
|
||||
static hb_bool_t
|
||||
|
@ -718,7 +768,7 @@ hb_ft_font_changed (hb_font_t *font)
|
|||
ft_face->size->metrics.y_ppem);
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_FT_GET_VAR_BLEND_COORDINATES
|
||||
#if defined(HAVE_FT_GET_VAR_BLEND_COORDINATES) && !defined(HB_NO_VAR)
|
||||
FT_MM_Var *mm_var = nullptr;
|
||||
if (!FT_Get_MM_Var (ft_face, &mm_var))
|
||||
{
|
||||
|
@ -857,7 +907,7 @@ hb_ft_font_set_funcs (hb_font_t *font)
|
|||
FT_Set_Transform (ft_face, &matrix, nullptr);
|
||||
}
|
||||
|
||||
#ifdef HAVE_FT_SET_VAR_BLEND_COORDINATES
|
||||
#if defined(HAVE_FT_GET_VAR_BLEND_COORDINATES) && !defined(HB_NO_VAR)
|
||||
unsigned int num_coords;
|
||||
const int *coords = hb_font_get_var_coords_normalized (font, &num_coords);
|
||||
if (num_coords)
|
||||
|
|
|
@ -110,6 +110,12 @@ hb_ft_font_create_referenced (FT_Face ft_face);
|
|||
HB_EXTERN FT_Face
|
||||
hb_ft_font_get_face (hb_font_t *font);
|
||||
|
||||
HB_EXTERN FT_Face
|
||||
hb_ft_font_lock_face (hb_font_t *font);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_ft_font_unlock_face (hb_font_t *font);
|
||||
|
||||
HB_EXTERN void
|
||||
hb_ft_font_set_load_flags (hb_font_t *font, int load_flags);
|
||||
|
||||
|
|
|
@ -135,7 +135,7 @@ static inline Type& StructAfter(TObject &X)
|
|||
|
||||
#define DEFINE_SIZE_ARRAY(size, array) \
|
||||
DEFINE_COMPILES_ASSERTION ((void) (array)[0].static_size) \
|
||||
DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + HB_VAR_ARRAY * sizeof ((array)[0])) \
|
||||
DEFINE_INSTANCE_ASSERTION (sizeof (*this) == (size) + (HB_VAR_ARRAY+0) * sizeof ((array)[0])) \
|
||||
static constexpr unsigned null_size = (size); \
|
||||
static constexpr unsigned min_size = (size)
|
||||
|
||||
|
|
|
@ -46,16 +46,13 @@ struct hb_hashmap_t
|
|||
static_assert (hb_is_integral (K) || hb_is_pointer (K), "");
|
||||
static_assert (hb_is_integral (V) || hb_is_pointer (V), "");
|
||||
|
||||
/* TODO If key type is a pointer, keep hash in item_t and use to:
|
||||
* 1. avoid rehashing when resizing table, and
|
||||
* 2. compare hash before comparing keys, for speed.
|
||||
*/
|
||||
struct item_t
|
||||
{
|
||||
K key;
|
||||
V value;
|
||||
uint32_t hash;
|
||||
|
||||
void clear () { key = kINVALID; value = vINVALID; }
|
||||
void clear () { key = kINVALID; value = vINVALID; hash = 0; }
|
||||
|
||||
bool operator == (K o) { return hb_deref (key) == hb_deref (o); }
|
||||
bool operator == (const item_t &o) { return *this == o.key; }
|
||||
|
@ -137,7 +134,9 @@ struct hb_hashmap_t
|
|||
if (old_items)
|
||||
for (unsigned int i = 0; i < old_size; i++)
|
||||
if (old_items[i].is_real ())
|
||||
set (old_items[i].key, old_items[i].value);
|
||||
set_with_hash (old_items[i].key,
|
||||
old_items[i].hash,
|
||||
old_items[i].value);
|
||||
|
||||
free (old_items);
|
||||
|
||||
|
@ -146,29 +145,9 @@ struct hb_hashmap_t
|
|||
|
||||
void set (K key, V value)
|
||||
{
|
||||
if (unlikely (!successful)) return;
|
||||
if (unlikely (key == kINVALID)) return;
|
||||
if ((occupancy + occupancy / 2) >= mask && !resize ()) return;
|
||||
unsigned int i = bucket_for (key);
|
||||
|
||||
if (value == vINVALID && items[i].key != key)
|
||||
return; /* Trying to delete non-existent key. */
|
||||
|
||||
if (!items[i].is_unused ())
|
||||
{
|
||||
occupancy--;
|
||||
if (items[i].is_tombstone ())
|
||||
population--;
|
||||
set_with_hash (key, hb_hash (key), value);
|
||||
}
|
||||
|
||||
items[i].key = key;
|
||||
items[i].value = value;
|
||||
|
||||
occupancy++;
|
||||
if (!items[i].is_tombstone ())
|
||||
population++;
|
||||
|
||||
}
|
||||
V get (K key) const
|
||||
{
|
||||
if (unlikely (!items)) return vINVALID;
|
||||
|
@ -237,14 +216,45 @@ struct hb_hashmap_t
|
|||
|
||||
protected:
|
||||
|
||||
void set_with_hash (K key, uint32_t hash, V value)
|
||||
{
|
||||
if (unlikely (!successful)) return;
|
||||
if (unlikely (key == kINVALID)) return;
|
||||
if ((occupancy + occupancy / 2) >= mask && !resize ()) return;
|
||||
unsigned int i = bucket_for_hash (key, hash);
|
||||
|
||||
if (value == vINVALID && items[i].key != key)
|
||||
return; /* Trying to delete non-existent key. */
|
||||
|
||||
if (!items[i].is_unused ())
|
||||
{
|
||||
occupancy--;
|
||||
if (items[i].is_tombstone ())
|
||||
population--;
|
||||
}
|
||||
|
||||
items[i].key = key;
|
||||
items[i].value = value;
|
||||
items[i].hash = hash;
|
||||
|
||||
occupancy++;
|
||||
if (!items[i].is_tombstone ())
|
||||
population++;
|
||||
}
|
||||
|
||||
unsigned int bucket_for (K key) const
|
||||
{
|
||||
unsigned int i = hb_hash (key) % prime;
|
||||
return bucket_for_hash (key, hb_hash (key));
|
||||
}
|
||||
|
||||
unsigned int bucket_for_hash (K key, uint32_t hash) const
|
||||
{
|
||||
unsigned int i = hash % prime;
|
||||
unsigned int step = 0;
|
||||
unsigned int tombstone = (unsigned) -1;
|
||||
while (!items[i].is_unused ())
|
||||
{
|
||||
if (items[i] == key)
|
||||
if (items[i].hash == hash && items[i] == key)
|
||||
return i;
|
||||
if (tombstone == (unsigned) -1 && items[i].is_tombstone ())
|
||||
tombstone = i;
|
||||
|
|
|
@ -92,25 +92,7 @@ typedef volatile int hb_mutex_impl_t;
|
|||
#define hb_mutex_impl_finish(M) HB_STMT_START {} HB_STMT_END
|
||||
|
||||
|
||||
#elif !defined(HB_NO_MT)
|
||||
|
||||
#if defined(HAVE_SCHED_H) && defined(HAVE_SCHED_YIELD)
|
||||
# include <sched.h>
|
||||
# define HB_SCHED_YIELD() sched_yield ()
|
||||
#else
|
||||
# define HB_SCHED_YIELD() HB_STMT_START {} HB_STMT_END
|
||||
#endif
|
||||
|
||||
#define HB_MUTEX_INT_NIL 1 /* Warn that fallback implementation is in use. */
|
||||
typedef volatile int hb_mutex_impl_t;
|
||||
#define HB_MUTEX_IMPL_INIT 0
|
||||
#define hb_mutex_impl_init(M) *(M) = 0
|
||||
#define hb_mutex_impl_lock(M) HB_STMT_START { while (*(M)) HB_SCHED_YIELD (); (*(M))++; } HB_STMT_END
|
||||
#define hb_mutex_impl_unlock(M) (*(M))--
|
||||
#define hb_mutex_impl_finish(M) HB_STMT_START {} HB_STMT_END
|
||||
|
||||
|
||||
#else /* HB_NO_MT */
|
||||
#elif defined(HB_NO_MT)
|
||||
|
||||
typedef int hb_mutex_impl_t;
|
||||
#define HB_MUTEX_IMPL_INIT 0
|
||||
|
@ -120,6 +102,11 @@ typedef int hb_mutex_impl_t;
|
|||
#define hb_mutex_impl_finish(M) HB_STMT_START {} HB_STMT_END
|
||||
|
||||
|
||||
#else
|
||||
|
||||
#error "Could not find any system to define mutex macros."
|
||||
#error "Check hb-mutex.hh for possible resolutions."
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
|
|
|
@ -336,7 +336,7 @@ bool OT::cff1::accelerator_t::get_extents (hb_font_t *font, hb_codepoint_t glyph
|
|||
else
|
||||
{
|
||||
extents->y_bearing = font->em_scalef_y (bounds.max.y.to_real ());
|
||||
extents->height = font->em_scalef_x (bounds.min.y.to_real () - bounds.max.y.to_real ());
|
||||
extents->height = font->em_scalef_y (bounds.min.y.to_real () - bounds.max.y.to_real ());
|
||||
}
|
||||
|
||||
return true;
|
||||
|
|
|
@ -342,14 +342,22 @@ struct CmapSubtableFormat4
|
|||
count--; /* Skip sentinel segment. */
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
{
|
||||
hb_codepoint_t start = this->startCount[i];
|
||||
hb_codepoint_t end = this->endCount[i];
|
||||
unsigned int rangeOffset = this->idRangeOffset[i];
|
||||
if (rangeOffset == 0)
|
||||
out->add_range (this->startCount[i], this->endCount[i]);
|
||||
{
|
||||
for (hb_codepoint_t codepoint = start; codepoint <= end; codepoint++)
|
||||
{
|
||||
hb_codepoint_t gid = (codepoint + this->idDelta[i]) & 0xFFFFu;
|
||||
if (unlikely (!gid))
|
||||
continue;
|
||||
out->add (codepoint);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
for (hb_codepoint_t codepoint = this->startCount[i];
|
||||
codepoint <= this->endCount[i];
|
||||
codepoint++)
|
||||
for (hb_codepoint_t codepoint = start; codepoint <= end; codepoint++)
|
||||
{
|
||||
unsigned int index = rangeOffset / 2 + (codepoint - this->startCount[i]) + i - this->segCount;
|
||||
if (unlikely (index >= this->glyphIdArrayLength))
|
||||
|
@ -522,10 +530,18 @@ struct CmapSubtableLongSegmented
|
|||
|
||||
void collect_unicodes (hb_set_t *out) const
|
||||
{
|
||||
for (unsigned int i = 0; i < this->groups.len; i++) {
|
||||
out->add_range (this->groups[i].startCharCode,
|
||||
hb_min ((hb_codepoint_t) this->groups[i].endCharCode,
|
||||
(hb_codepoint_t) HB_UNICODE_MAX));
|
||||
for (unsigned int i = 0; i < this->groups.len; i++)
|
||||
{
|
||||
hb_codepoint_t start = this->groups[i].startCharCode;
|
||||
hb_codepoint_t end = hb_min ((hb_codepoint_t) this->groups[i].endCharCode,
|
||||
(hb_codepoint_t) HB_UNICODE_MAX);
|
||||
for (hb_codepoint_t codepoint = start; codepoint <= end; codepoint++)
|
||||
{
|
||||
hb_codepoint_t gid = T::group_get_glyph (this->groups[i], codepoint);
|
||||
if (unlikely (!gid))
|
||||
continue;
|
||||
out->add (codepoint);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -925,9 +941,9 @@ struct CmapSubtableFormat14
|
|||
if (unlikely (!c->extend_min (*this))) return;
|
||||
this->format = 14;
|
||||
|
||||
const CmapSubtableFormat14 *src_tbl = reinterpret_cast<const CmapSubtableFormat14*> (src_base);
|
||||
for (const VariationSelectorRecord& _ : src_tbl->record)
|
||||
c->copy (_, unicodes, glyphs, glyph_map, src_base, this);
|
||||
auto src_tbl = reinterpret_cast<const CmapSubtableFormat14*> (src_base);
|
||||
c->copy_all (hb_iter (src_tbl->record),
|
||||
unicodes, glyphs, glyph_map, src_base, this);
|
||||
|
||||
if (c->length () - table_initpos == CmapSubtableFormat14::min_size)
|
||||
c->revert (snap);
|
||||
|
|
|
@ -360,9 +360,9 @@ struct glyf
|
|||
{
|
||||
typedef const CompositeGlyphChain *__item_t__;
|
||||
composite_iter_t (hb_bytes_t glyph_, __item_t__ current_) :
|
||||
glyph (glyph_), current (current_), checker (range_checker_t (glyph.arrayZ, glyph.length))
|
||||
glyph (glyph_), current (current_)
|
||||
{ if (!in_range (current)) current = nullptr; }
|
||||
composite_iter_t () : glyph (hb_bytes_t ()), current (nullptr), checker (range_checker_t (nullptr, 0)) {}
|
||||
composite_iter_t () : glyph (hb_bytes_t ()), current (nullptr) {}
|
||||
|
||||
const CompositeGlyphChain &__item__ () const { return *current; }
|
||||
bool __more__ () const { return current; }
|
||||
|
@ -380,14 +380,13 @@ struct glyf
|
|||
|
||||
bool in_range (const CompositeGlyphChain *composite) const
|
||||
{
|
||||
return checker.in_range (composite, CompositeGlyphChain::min_size)
|
||||
&& checker.in_range (composite, composite->get_size ());
|
||||
return glyph.in_range (composite, CompositeGlyphChain::min_size)
|
||||
&& glyph.in_range (composite, composite->get_size ());
|
||||
}
|
||||
|
||||
private:
|
||||
hb_bytes_t glyph;
|
||||
__item_t__ current;
|
||||
range_checker_t checker;
|
||||
};
|
||||
|
||||
struct Glyph
|
||||
|
@ -537,7 +536,7 @@ struct glyf
|
|||
template <typename T>
|
||||
static bool read_points (const HBUINT8 *&p /* IN/OUT */,
|
||||
contour_point_vector_t &points_ /* IN/OUT */,
|
||||
const range_checker_t &checker)
|
||||
const hb_bytes_t &bytes)
|
||||
{
|
||||
T coord_setter;
|
||||
float v = 0;
|
||||
|
@ -546,7 +545,7 @@ struct glyf
|
|||
uint8_t flag = points_[i].flag;
|
||||
if (coord_setter.is_short (flag))
|
||||
{
|
||||
if (unlikely (!checker.in_range (p))) return false;
|
||||
if (unlikely (!bytes.in_range (p))) return false;
|
||||
if (coord_setter.is_same (flag))
|
||||
v += *p++;
|
||||
else
|
||||
|
@ -556,7 +555,7 @@ struct glyf
|
|||
{
|
||||
if (!coord_setter.is_same (flag))
|
||||
{
|
||||
if (unlikely (!checker.in_range ((const HBUINT16 *) p))) return false;
|
||||
if (unlikely (!bytes.in_range ((const HBUINT16 *) p))) return false;
|
||||
v += *(const HBINT16 *) p;
|
||||
p += HBINT16::static_size;
|
||||
}
|
||||
|
@ -571,9 +570,8 @@ struct glyf
|
|||
const bool phantom_only=false) const
|
||||
{
|
||||
const HBUINT16 *endPtsOfContours = &StructAfter<HBUINT16> (header);
|
||||
range_checker_t checker (bytes.arrayZ, bytes.length);
|
||||
int num_contours = header.numberOfContours;
|
||||
if (unlikely (!checker.in_range (&endPtsOfContours[num_contours + 1]))) return false;
|
||||
if (unlikely (!bytes.in_range (&endPtsOfContours[num_contours + 1]))) return false;
|
||||
unsigned int num_points = endPtsOfContours[num_contours - 1] + 1;
|
||||
|
||||
points_.resize (num_points + PHANTOM_COUNT);
|
||||
|
@ -593,12 +591,12 @@ struct glyf
|
|||
/* Read flags */
|
||||
for (unsigned int i = 0; i < num_points; i++)
|
||||
{
|
||||
if (unlikely (!checker.in_range (p))) return false;
|
||||
if (unlikely (!bytes.in_range (p))) return false;
|
||||
uint8_t flag = *p++;
|
||||
points_[i].flag = flag;
|
||||
if (flag & FLAG_REPEAT)
|
||||
{
|
||||
if (unlikely (!checker.in_range (p))) return false;
|
||||
if (unlikely (!bytes.in_range (p))) return false;
|
||||
unsigned int repeat_count = *p++;
|
||||
while ((repeat_count-- > 0) && (++i < num_points))
|
||||
points_[i].flag = flag;
|
||||
|
@ -606,8 +604,8 @@ struct glyf
|
|||
}
|
||||
|
||||
/* Read x & y coordinates */
|
||||
return (read_points<x_setter_t> (p, points_, checker) &&
|
||||
read_points<y_setter_t> (p, points_, checker));
|
||||
return (read_points<x_setter_t> (p, points_, bytes) &&
|
||||
read_points<y_setter_t> (p, points_, bytes));
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -808,20 +806,36 @@ struct glyf
|
|||
|
||||
struct contour_bounds_t
|
||||
{
|
||||
contour_bounds_t () { min.x = min.y = FLT_MAX; max.x = max.y = -FLT_MAX; }
|
||||
contour_bounds_t () { min_x = min_y = FLT_MAX; max_x = max_y = -FLT_MAX; }
|
||||
|
||||
void add (const contour_point_t &p)
|
||||
{
|
||||
min.x = hb_min (min.x, p.x);
|
||||
min.y = hb_min (min.y, p.y);
|
||||
max.x = hb_max (max.x, p.x);
|
||||
max.y = hb_max (max.y, p.y);
|
||||
min_x = hb_min (min_x, p.x);
|
||||
min_y = hb_min (min_y, p.y);
|
||||
max_x = hb_max (max_x, p.x);
|
||||
max_y = hb_max (max_y, p.y);
|
||||
}
|
||||
|
||||
bool empty () const { return (min.x >= max.x) || (min.y >= max.y); }
|
||||
bool empty () const { return (min_x >= max_x) || (min_y >= max_y); }
|
||||
|
||||
contour_point_t min;
|
||||
contour_point_t max;
|
||||
void get_extents (hb_font_t *font, hb_glyph_extents_t *extents)
|
||||
{
|
||||
if (unlikely (empty ()))
|
||||
{
|
||||
extents->width = 0;
|
||||
extents->x_bearing = 0;
|
||||
extents->height = 0;
|
||||
extents->y_bearing = 0;
|
||||
return;
|
||||
}
|
||||
extents->x_bearing = font->em_scalef_x (min_x);
|
||||
extents->width = font->em_scalef_x (max_x - min_x);
|
||||
extents->y_bearing = font->em_scalef_y (max_y);
|
||||
extents->height = font->em_scalef_y (min_y - max_y);
|
||||
}
|
||||
|
||||
protected:
|
||||
float min_x, min_y, max_x, max_y;
|
||||
};
|
||||
|
||||
#ifndef HB_NO_VAR
|
||||
|
@ -919,27 +933,7 @@ struct glyf
|
|||
contour_bounds_t bounds;
|
||||
for (unsigned int i = 0; i + PHANTOM_COUNT < all_points.length; i++)
|
||||
bounds.add (all_points[i]);
|
||||
|
||||
if (bounds.min.x > bounds.max.x)
|
||||
{
|
||||
extents->width = 0;
|
||||
extents->x_bearing = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
extents->x_bearing = font->em_scalef_x (bounds.min.x);
|
||||
extents->width = font->em_scalef_x (bounds.max.x - bounds.min.x);
|
||||
}
|
||||
if (bounds.min.y > bounds.max.y)
|
||||
{
|
||||
extents->height = 0;
|
||||
extents->y_bearing = 0;
|
||||
}
|
||||
else
|
||||
{
|
||||
extents->y_bearing = font->em_scalef_y (bounds.max.y);
|
||||
extents->height = font->em_scalef_y (bounds.min.y - bounds.max.y);
|
||||
}
|
||||
bounds.get_extents (font, extents);
|
||||
}
|
||||
if (phantoms)
|
||||
for (unsigned int i = 0; i < PHANTOM_COUNT; i++)
|
||||
|
|
|
@ -66,6 +66,23 @@ namespace OT {
|
|||
|
||||
#define NOT_COVERED ((unsigned int) -1)
|
||||
|
||||
|
||||
template<typename Iterator>
|
||||
static inline void Coverage_serialize (hb_serialize_context_t *c,
|
||||
Iterator it);
|
||||
|
||||
template<typename Iterator>
|
||||
static inline void ClassDef_serialize (hb_serialize_context_t *c,
|
||||
Iterator it);
|
||||
|
||||
static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
||||
const hb_set_t &glyphset,
|
||||
const hb_map_t &gid_klass_map,
|
||||
hb_sorted_vector_t<HBGlyphID> glyphs,
|
||||
hb_sorted_vector_t<unsigned> klasses,
|
||||
hb_map_t *klass_map /*INOUT*/);
|
||||
|
||||
|
||||
template<typename OutputArray>
|
||||
struct subset_offset_array_t
|
||||
{
|
||||
|
@ -120,7 +137,6 @@ struct
|
|||
}
|
||||
HB_FUNCOBJ (subset_offset_array);
|
||||
|
||||
|
||||
/*
|
||||
*
|
||||
* OpenType Layout Common Table Formats
|
||||
|
@ -137,6 +153,26 @@ struct Record_sanitize_closure_t {
|
|||
const void *list_base;
|
||||
};
|
||||
|
||||
struct RecordList_subset_context_t {
|
||||
|
||||
RecordList_subset_context_t() : script_count (0), langsys_count (0)
|
||||
{}
|
||||
|
||||
bool visitScript ()
|
||||
{
|
||||
return script_count++ < HB_MAX_SCRIPTS;
|
||||
}
|
||||
|
||||
bool visitLangSys ()
|
||||
{
|
||||
return langsys_count++ < HB_MAX_LANGSYS;
|
||||
}
|
||||
|
||||
private:
|
||||
unsigned int script_count;
|
||||
unsigned int langsys_count;
|
||||
};
|
||||
|
||||
template <typename Type>
|
||||
struct Record
|
||||
{
|
||||
|
@ -193,11 +229,26 @@ struct RecordListOf : RecordArrayOf<Type>
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
RecordList_subset_context_t record_list_context;
|
||||
|
||||
unsigned int count = this->len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
out->get_offset (i).serialize_subset (c, this->get_offset (i), this, out);
|
||||
{
|
||||
auto *record = out->serialize_append (c->serializer);
|
||||
if (unlikely (!record)) return false;
|
||||
auto snap = c->serializer->snapshot ();
|
||||
if (record->offset.serialize_subset (c, this->get_offset (i), this, out, &record_list_context))
|
||||
{
|
||||
record->tag = this->get_tag(i);
|
||||
continue;
|
||||
}
|
||||
out->pop ();
|
||||
c->serializer->revert (snap);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
@ -262,7 +313,6 @@ struct Script;
|
|||
struct LangSys;
|
||||
struct Feature;
|
||||
|
||||
|
||||
struct LangSys
|
||||
{
|
||||
unsigned int get_feature_count () const
|
||||
|
@ -329,15 +379,33 @@ struct Script
|
|||
bool has_default_lang_sys () const { return defaultLangSys != 0; }
|
||||
const LangSys& get_default_lang_sys () const { return this+defaultLangSys; }
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
bool subset (hb_subset_context_t *c, RecordList_subset_context_t *record_list_context) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (!record_list_context->visitScript ()) return_trace (false);
|
||||
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
out->defaultLangSys.serialize_copy (c->serializer, defaultLangSys, this, out);
|
||||
unsigned int count = langSys.len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
out->langSys.arrayZ[i].offset.serialize_copy (c->serializer, langSys[i].offset, this, out);
|
||||
|
||||
for (const auto &src: langSys)
|
||||
{
|
||||
if (!record_list_context->visitLangSys ()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto snap = c->serializer->snapshot ();
|
||||
auto *lang_sys = c->serializer->embed (src);
|
||||
|
||||
if (likely(lang_sys)
|
||||
&& lang_sys->offset.serialize_copy (c->serializer, src.offset, this, out))
|
||||
{
|
||||
out->langSys.len++;
|
||||
continue;
|
||||
}
|
||||
c->serializer->revert (snap);
|
||||
}
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
|
@ -614,7 +682,7 @@ struct Feature
|
|||
const FeatureParams &get_feature_params () const
|
||||
{ return this+featureParams; }
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
bool subset (hb_subset_context_t *c, RecordList_subset_context_t *r) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto *out = c->serializer->embed (*this);
|
||||
|
@ -1127,6 +1195,23 @@ struct Coverage
|
|||
}
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto it =
|
||||
+ iter ()
|
||||
| hb_filter (glyphset)
|
||||
| hb_map_retains_sorting (glyph_map)
|
||||
;
|
||||
|
||||
bool ret = bool (it);
|
||||
Coverage_serialize (c->serializer, it);
|
||||
return_trace (ret);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
@ -1245,15 +1330,51 @@ struct Coverage
|
|||
DEFINE_SIZE_UNION (2, format);
|
||||
};
|
||||
|
||||
template<typename Iterator>
|
||||
static inline void
|
||||
Coverage_serialize (hb_serialize_context_t *c,
|
||||
Iterator it)
|
||||
{ c->start_embed<Coverage> ()->serialize (c, it); }
|
||||
|
||||
static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
|
||||
const hb_set_t &glyphset,
|
||||
const hb_map_t &gid_klass_map,
|
||||
hb_sorted_vector_t<HBGlyphID> glyphs,
|
||||
hb_sorted_vector_t<unsigned> klasses,
|
||||
hb_map_t *klass_map /*INOUT*/)
|
||||
{
|
||||
bool has_no_match = glyphset.get_population () > gid_klass_map.get_population ();
|
||||
|
||||
hb_map_t m;
|
||||
if (!klass_map) klass_map = &m;
|
||||
|
||||
if (has_no_match) klass_map->set (0, 0);
|
||||
unsigned idx = klass_map->has (0) ? 1 : 0;
|
||||
for (const unsigned k: klasses.iter ())
|
||||
{
|
||||
if (klass_map->has (k)) continue;
|
||||
klass_map->set (k, idx);
|
||||
idx++;
|
||||
}
|
||||
|
||||
auto it =
|
||||
+ glyphs.iter ()
|
||||
| hb_map_retains_sorting ([&] (const HBGlyphID& gid) -> hb_pair_t<hb_codepoint_t, HBUINT16>
|
||||
{
|
||||
HBUINT16 new_klass;
|
||||
new_klass = klass_map->get (gid_klass_map[gid]);
|
||||
return hb_pair ((hb_codepoint_t)gid, new_klass);
|
||||
})
|
||||
;
|
||||
|
||||
c->propagate_error (glyphs, klasses);
|
||||
ClassDef_serialize (c, it);
|
||||
}
|
||||
|
||||
/*
|
||||
* Class Definition Table
|
||||
*/
|
||||
|
||||
static inline void ClassDef_serialize (hb_serialize_context_t *c,
|
||||
hb_array_t<const HBGlyphID> glyphs,
|
||||
hb_array_t<const HBUINT16> klasses);
|
||||
|
||||
struct ClassDefFormat1
|
||||
{
|
||||
friend struct ClassDef;
|
||||
|
@ -1264,53 +1385,53 @@ struct ClassDefFormat1
|
|||
return classValue[(unsigned int) (glyph_id - startGlyph)];
|
||||
}
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
hb_array_t<const HBGlyphID> glyphs,
|
||||
hb_array_t<const HBUINT16> klasses)
|
||||
Iterator it)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
|
||||
if (unlikely (!glyphs))
|
||||
if (unlikely (!it))
|
||||
{
|
||||
startGlyph = 0;
|
||||
classValue.len = 0;
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
hb_codepoint_t glyph_min = +glyphs | hb_reduce (hb_min, 0xFFFFu);
|
||||
hb_codepoint_t glyph_max = +glyphs | hb_reduce (hb_max, 0u);
|
||||
|
||||
startGlyph = glyph_min;
|
||||
c->check_assign (classValue.len, glyph_max - glyph_min + 1);
|
||||
if (unlikely (!c->extend (classValue))) return_trace (false);
|
||||
|
||||
for (unsigned int i = 0; i < glyphs.length; i++)
|
||||
classValue[glyphs[i] - glyph_min] = klasses[i];
|
||||
|
||||
startGlyph = (*it).first;
|
||||
classValue.serialize (c, + it
|
||||
| hb_map (hb_second));
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_map_t *klass_map = nullptr /*OUT*/) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
hb_sorted_vector_t<HBGlyphID> glyphs;
|
||||
hb_vector_t<HBUINT16> klasses;
|
||||
hb_sorted_vector_t<unsigned> orig_klasses;
|
||||
hb_map_t gid_org_klass_map;
|
||||
|
||||
hb_codepoint_t start = startGlyph;
|
||||
hb_codepoint_t end = start + classValue.len;
|
||||
for (hb_codepoint_t g = start; g < end; g++)
|
||||
for (const hb_codepoint_t gid : + hb_range (start, end)
|
||||
| hb_filter (glyphset))
|
||||
{
|
||||
if (!glyphset.has (g)) continue;
|
||||
unsigned int value = classValue[g - start];
|
||||
if (!value) continue;
|
||||
glyphs.push(glyph_map[g]);
|
||||
klasses.push(value);
|
||||
unsigned klass = classValue[gid - start];
|
||||
if (!klass) continue;
|
||||
|
||||
glyphs.push (glyph_map[gid]);
|
||||
gid_org_klass_map.set (glyph_map[gid], klass);
|
||||
orig_klasses.push (klass);
|
||||
}
|
||||
c->serializer->propagate_error (glyphs, klasses);
|
||||
ClassDef_serialize (c->serializer, glyphs, klasses);
|
||||
|
||||
ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map,
|
||||
glyphs, orig_klasses, klass_map);
|
||||
return_trace ((bool) glyphs);
|
||||
}
|
||||
|
||||
|
@ -1400,70 +1521,89 @@ struct ClassDefFormat2
|
|||
return rangeRecord.bsearch (glyph_id).value;
|
||||
}
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
hb_array_t<const HBGlyphID> glyphs,
|
||||
hb_array_t<const HBUINT16> klasses)
|
||||
Iterator it)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
|
||||
if (unlikely (!glyphs))
|
||||
if (unlikely (!it))
|
||||
{
|
||||
rangeRecord.len = 0;
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
unsigned int count = glyphs.len ();
|
||||
unsigned int num_ranges = 1;
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
if (glyphs[i - 1] + 1 != glyphs[i] ||
|
||||
klasses[i - 1] != klasses[i])
|
||||
num_ranges++;
|
||||
rangeRecord.len = num_ranges;
|
||||
if (unlikely (!c->extend (rangeRecord))) return_trace (false);
|
||||
unsigned num_ranges = 1;
|
||||
hb_codepoint_t prev_gid = (*it).first;
|
||||
unsigned prev_klass = (*it).second;
|
||||
|
||||
unsigned int range = 0;
|
||||
rangeRecord[range].start = glyphs[0];
|
||||
rangeRecord[range].value = klasses[0];
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
RangeRecord range_rec;
|
||||
range_rec.start = prev_gid;
|
||||
range_rec.end = prev_gid;
|
||||
range_rec.value = prev_klass;
|
||||
|
||||
RangeRecord *record = c->copy (range_rec);
|
||||
if (unlikely (!record)) return_trace (false);
|
||||
|
||||
for (const auto gid_klass_pair : + (++it))
|
||||
{
|
||||
if (glyphs[i - 1] + 1 != glyphs[i] ||
|
||||
klasses[i - 1] != klasses[i])
|
||||
hb_codepoint_t cur_gid = gid_klass_pair.first;
|
||||
unsigned cur_klass = gid_klass_pair.second;
|
||||
|
||||
if (cur_gid != prev_gid + 1 ||
|
||||
cur_klass != prev_klass)
|
||||
{
|
||||
rangeRecord[range].end = glyphs[i - 1];
|
||||
range++;
|
||||
rangeRecord[range].start = glyphs[i];
|
||||
rangeRecord[range].value = klasses[i];
|
||||
if (unlikely (!record)) break;
|
||||
record->end = prev_gid;
|
||||
num_ranges++;
|
||||
|
||||
range_rec.start = cur_gid;
|
||||
range_rec.end = cur_gid;
|
||||
range_rec.value = cur_klass;
|
||||
|
||||
record = c->copy (range_rec);
|
||||
}
|
||||
|
||||
prev_klass = cur_klass;
|
||||
prev_gid = cur_gid;
|
||||
}
|
||||
rangeRecord[range].end = glyphs[count - 1];
|
||||
|
||||
if (likely (record)) record->end = prev_gid;
|
||||
rangeRecord.len = num_ranges;
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_map_t *klass_map = nullptr /*OUT*/) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
hb_vector_t<HBGlyphID> glyphs;
|
||||
hb_vector_t<HBUINT16> klasses;
|
||||
|
||||
unsigned int count = rangeRecord.len;
|
||||
for (unsigned int i = 0; i < count; i++)
|
||||
hb_sorted_vector_t<HBGlyphID> glyphs;
|
||||
hb_sorted_vector_t<unsigned> orig_klasses;
|
||||
hb_map_t gid_org_klass_map;
|
||||
|
||||
unsigned count = rangeRecord.len;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
{
|
||||
unsigned int value = rangeRecord[i].value;
|
||||
if (!value) continue;
|
||||
unsigned klass = rangeRecord[i].value;
|
||||
if (!klass) continue;
|
||||
hb_codepoint_t start = rangeRecord[i].start;
|
||||
hb_codepoint_t end = rangeRecord[i].end + 1;
|
||||
for (hb_codepoint_t g = start; g < end; g++)
|
||||
{
|
||||
if (!glyphset.has (g)) continue;
|
||||
glyphs.push (glyph_map[g]);
|
||||
klasses.push (value);
|
||||
gid_org_klass_map.set (glyph_map[g], klass);
|
||||
orig_klasses.push (klass);
|
||||
}
|
||||
}
|
||||
c->serializer->propagate_error (glyphs, klasses);
|
||||
ClassDef_serialize (c->serializer, glyphs, klasses);
|
||||
|
||||
ClassDef_remap_and_serialize (c->serializer, glyphset, gid_org_klass_map,
|
||||
glyphs, orig_klasses, klass_map);
|
||||
return_trace ((bool) glyphs);
|
||||
}
|
||||
|
||||
|
@ -1560,26 +1700,37 @@ struct ClassDef
|
|||
}
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
hb_array_t<const HBGlyphID> glyphs,
|
||||
hb_array_t<const HBUINT16> klasses)
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c, Iterator it)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
if (unlikely (!c->extend_min (*this))) return_trace (false);
|
||||
|
||||
unsigned int format = 2;
|
||||
if (likely (glyphs))
|
||||
unsigned format = 2;
|
||||
if (likely (it))
|
||||
{
|
||||
hb_codepoint_t glyph_min = +glyphs | hb_reduce (hb_min, 0xFFFFu);
|
||||
hb_codepoint_t glyph_max = +glyphs | hb_reduce (hb_max, 0u);
|
||||
hb_codepoint_t glyph_min = (*it).first;
|
||||
hb_codepoint_t glyph_max = + it
|
||||
| hb_map (hb_first)
|
||||
| hb_reduce (hb_max, 0u);
|
||||
|
||||
unsigned int count = glyphs.len ();
|
||||
unsigned int num_ranges = 1;
|
||||
for (unsigned int i = 1; i < count; i++)
|
||||
if (glyphs[i - 1] + 1 != glyphs[i] ||
|
||||
klasses[i - 1] != klasses[i])
|
||||
unsigned num_ranges = 1;
|
||||
hb_codepoint_t prev_gid = glyph_min;
|
||||
unsigned prev_klass = (*it).second;
|
||||
|
||||
for (const auto gid_klass_pair : it)
|
||||
{
|
||||
hb_codepoint_t cur_gid = gid_klass_pair.first;
|
||||
unsigned cur_klass = gid_klass_pair.second;
|
||||
if (cur_gid != prev_gid + 1 ||
|
||||
cur_klass != prev_klass)
|
||||
num_ranges++;
|
||||
|
||||
prev_gid = cur_gid;
|
||||
prev_klass = cur_klass;
|
||||
}
|
||||
|
||||
if (1 + (glyph_max - glyph_min + 1) < num_ranges * 3)
|
||||
format = 1;
|
||||
}
|
||||
|
@ -1587,18 +1738,19 @@ struct ClassDef
|
|||
|
||||
switch (u.format)
|
||||
{
|
||||
case 1: return_trace (u.format1.serialize (c, glyphs, klasses));
|
||||
case 2: return_trace (u.format2.serialize (c, glyphs, klasses));
|
||||
case 1: return_trace (u.format1.serialize (c, it));
|
||||
case 2: return_trace (u.format2.serialize (c, it));
|
||||
default:return_trace (false);
|
||||
}
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
bool subset (hb_subset_context_t *c,
|
||||
hb_map_t *klass_map = nullptr /*OUT*/) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
switch (u.format) {
|
||||
case 1: return_trace (u.format1.subset (c));
|
||||
case 2: return_trace (u.format2.subset (c));
|
||||
case 1: return_trace (u.format1.subset (c, klass_map));
|
||||
case 2: return_trace (u.format2.subset (c, klass_map));
|
||||
default:return_trace (false);
|
||||
}
|
||||
}
|
||||
|
@ -1665,10 +1817,10 @@ struct ClassDef
|
|||
DEFINE_SIZE_UNION (2, format);
|
||||
};
|
||||
|
||||
template<typename Iterator>
|
||||
static inline void ClassDef_serialize (hb_serialize_context_t *c,
|
||||
hb_array_t<const HBGlyphID> glyphs,
|
||||
hb_array_t<const HBUINT16> klasses)
|
||||
{ c->start_embed<ClassDef> ()->serialize (c, glyphs, klasses); }
|
||||
Iterator it)
|
||||
{ c->start_embed<ClassDef> ()->serialize (c, it); }
|
||||
|
||||
|
||||
/*
|
||||
|
|
|
@ -544,8 +544,7 @@ struct SinglePosFormat1
|
|||
if (unlikely (!c->extend_min (*this))) return;
|
||||
if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
|
||||
|
||||
for (const auto &_ : hb_second (*it))
|
||||
c->copy (_);
|
||||
c->copy_all (hb_second (*it));
|
||||
|
||||
auto glyphs =
|
||||
+ it
|
||||
|
@ -558,7 +557,7 @@ struct SinglePosFormat1
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto it =
|
||||
|
@ -632,9 +631,7 @@ struct SinglePosFormat2
|
|||
if (unlikely (!c->check_assign (valueFormat, valFormat))) return;
|
||||
if (unlikely (!c->check_assign (valueCount, it.len ()))) return;
|
||||
|
||||
for (const auto iter : it)
|
||||
for (const auto &_ : iter.second)
|
||||
c->copy (_);
|
||||
for (auto iter : it) c->copy_all (iter.second);
|
||||
|
||||
auto glyphs =
|
||||
+ it
|
||||
|
@ -647,7 +644,7 @@ struct SinglePosFormat2
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset_gsub ();
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
unsigned sub_length = valueFormat.get_len ();
|
||||
|
@ -761,6 +758,18 @@ struct PairValueRecord
|
|||
{
|
||||
friend struct PairSet;
|
||||
|
||||
bool serialize (hb_serialize_context_t *c,
|
||||
unsigned length,
|
||||
const hb_map_t &glyph_map) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (*this);
|
||||
if (unlikely (!c->extend_min (out))) return_trace (false);
|
||||
|
||||
out->secondGlyph = glyph_map[secondGlyph];
|
||||
return_trace (c->copy (values, length));
|
||||
}
|
||||
|
||||
protected:
|
||||
HBGlyphID secondGlyph; /* GlyphID of second glyph in the
|
||||
* pair--first glyph is listed in the
|
||||
|
@ -846,6 +855,37 @@ struct PairSet
|
|||
return_trace (false);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const ValueFormat valueFormats[2]) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
auto snap = c->serializer->snapshot ();
|
||||
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
out->len = 0;
|
||||
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
unsigned len1 = valueFormats[0].get_len ();
|
||||
unsigned len2 = valueFormats[1].get_len ();
|
||||
unsigned record_size = HBUINT16::static_size + Value::static_size * (len1 + len2);
|
||||
|
||||
const PairValueRecord *record = &firstPairValueRecord;
|
||||
unsigned count = len, num = 0;
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
{
|
||||
if (!glyphset.has (record->secondGlyph)) continue;
|
||||
if (record->serialize (c->serializer, len1 + len2, glyph_map)) num++;
|
||||
record = &StructAtOffset<const PairValueRecord> (record, record_size);
|
||||
}
|
||||
|
||||
out->len = num;
|
||||
if (!num) c->serializer->revert (snap);
|
||||
return_trace (num);
|
||||
}
|
||||
|
||||
struct sanitize_closure_t
|
||||
{
|
||||
const void *base;
|
||||
|
@ -919,8 +959,43 @@ struct PairPosFormat1
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
// TODO(subset)
|
||||
return_trace (false);
|
||||
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
out->format = format;
|
||||
out->valueFormat[0] = valueFormat[0];
|
||||
out->valueFormat[1] = valueFormat[1];
|
||||
|
||||
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
|
||||
|
||||
+ hb_zip (this+coverage, pairSet)
|
||||
| hb_filter (glyphset, hb_first)
|
||||
| hb_filter ([this, c, out] (const OffsetTo<PairSet>& _)
|
||||
{
|
||||
auto *o = out->pairSet.serialize_append (c->serializer);
|
||||
if (unlikely (!o)) return false;
|
||||
auto snap = c->serializer->snapshot ();
|
||||
bool ret = o->serialize_subset (c, _, this, out, valueFormat);
|
||||
if (!ret)
|
||||
{
|
||||
out->pairSet.pop ();
|
||||
c->serializer->revert (snap);
|
||||
}
|
||||
return ret;
|
||||
},
|
||||
hb_second)
|
||||
| hb_map (hb_first)
|
||||
| hb_map (glyph_map)
|
||||
| hb_sink (new_coverage)
|
||||
;
|
||||
|
||||
out->coverage.serialize (c->serializer, out)
|
||||
.serialize (c->serializer, new_coverage.iter ());
|
||||
|
||||
return_trace (bool (new_coverage));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -1011,8 +1086,49 @@ struct PairPosFormat2
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
// TODO(subset)
|
||||
return_trace (false);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
out->format = format;
|
||||
out->valueFormat1 = valueFormat1;
|
||||
out->valueFormat2 = valueFormat2;
|
||||
|
||||
hb_map_t klass1_map;
|
||||
out->classDef1.serialize_subset (c, classDef1, this, out, &klass1_map);
|
||||
out->class1Count = klass1_map.get_population ();
|
||||
|
||||
hb_map_t klass2_map;
|
||||
out->classDef2.serialize_subset (c, classDef2, this, out, &klass2_map);
|
||||
out->class2Count = klass2_map.get_population ();
|
||||
|
||||
unsigned record_len = valueFormat1.get_len () + valueFormat2.get_len ();
|
||||
|
||||
+ hb_range ((unsigned) class1Count)
|
||||
| hb_filter (klass1_map)
|
||||
| hb_apply ([&] (const unsigned class1_idx)
|
||||
{
|
||||
+ hb_range ((unsigned) class2Count)
|
||||
| hb_filter (klass2_map)
|
||||
| hb_apply ([&] (const unsigned class2_idx)
|
||||
{
|
||||
unsigned idx = (class1_idx * (unsigned) class2Count + class2_idx) * record_len;
|
||||
for (unsigned i = 0; i < record_len; i++)
|
||||
c->serializer->copy (values[idx+i]);
|
||||
})
|
||||
;
|
||||
})
|
||||
;
|
||||
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto it =
|
||||
+ hb_iter (this+coverage)
|
||||
| hb_filter (glyphset)
|
||||
| hb_map_retains_sorting (glyph_map)
|
||||
;
|
||||
|
||||
out->coverage.serialize (c->serializer, out).serialize (c->serializer, it);
|
||||
return_trace (out->class1Count && out->class2Count && bool (it));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
|
|
@ -363,7 +363,11 @@ struct hb_ot_apply_context_t :
|
|||
matcher.set_syllable (start_index_ == c->buffer->idx ? c->buffer->cur().syllable () : 0);
|
||||
}
|
||||
|
||||
void reject () { num_items++; match_glyph_data--; }
|
||||
void reject ()
|
||||
{
|
||||
num_items++;
|
||||
if (match_glyph_data) match_glyph_data--;
|
||||
}
|
||||
|
||||
matcher_t::may_skip_t
|
||||
may_skip (const hb_glyph_info_t &info) const
|
||||
|
@ -387,7 +391,7 @@ struct hb_ot_apply_context_t :
|
|||
skip == matcher_t::SKIP_NO))
|
||||
{
|
||||
num_items--;
|
||||
match_glyph_data++;
|
||||
if (match_glyph_data) match_glyph_data++;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -414,7 +418,7 @@ struct hb_ot_apply_context_t :
|
|||
skip == matcher_t::SKIP_NO))
|
||||
{
|
||||
num_items--;
|
||||
match_glyph_data++;
|
||||
if (match_glyph_data) match_glyph_data++;
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -712,11 +716,9 @@ static inline bool intersects_array (const hb_set_t *glyphs,
|
|||
intersects_func_t intersects_func,
|
||||
const void *intersects_data)
|
||||
{
|
||||
return
|
||||
+ hb_iter (values, count)
|
||||
| hb_map ([&] (const HBUINT16 &_) { return intersects_func (glyphs, _, intersects_data); })
|
||||
| hb_any
|
||||
;
|
||||
for (const HBUINT16 &_ : + hb_iter (values, count))
|
||||
if (intersects_func (glyphs, _, intersects_data)) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
|
@ -2002,6 +2004,83 @@ struct ChainRule
|
|||
lookup.arrayZ, lookup_context));
|
||||
}
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
void serialize_array (hb_serialize_context_t *c,
|
||||
HBUINT16 len,
|
||||
Iterator it) const
|
||||
{
|
||||
c->copy (len);
|
||||
for (const auto g : it)
|
||||
{
|
||||
HBUINT16 gid;
|
||||
gid = g;
|
||||
c->copy (gid);
|
||||
}
|
||||
}
|
||||
|
||||
ChainRule* copy (hb_serialize_context_t *c,
|
||||
const hb_map_t *backtrack_map,
|
||||
const hb_map_t *input_map = nullptr,
|
||||
const hb_map_t *lookahead_map = nullptr) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (this);
|
||||
if (unlikely (!out)) return_trace (nullptr);
|
||||
|
||||
const hb_map_t *mapping = backtrack_map;
|
||||
serialize_array (c, backtrack.len, + backtrack.iter ()
|
||||
| hb_map (mapping));
|
||||
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
|
||||
if (input_map) mapping = input_map;
|
||||
serialize_array (c, input.lenP1, + input.iter ()
|
||||
| hb_map (mapping));
|
||||
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
|
||||
if (lookahead_map) mapping = lookahead_map;
|
||||
serialize_array (c, lookahead.len, + lookahead.iter ()
|
||||
| hb_map (mapping));
|
||||
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
c->copy (lookup);
|
||||
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const hb_map_t *backtrack_map = nullptr,
|
||||
const hb_map_t *input_map = nullptr,
|
||||
const hb_map_t *lookahead_map = nullptr) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
const HeadlessArrayOf<HBUINT16> &input = StructAfter<HeadlessArrayOf<HBUINT16>> (backtrack);
|
||||
const ArrayOf<HBUINT16> &lookahead = StructAfter<ArrayOf<HBUINT16>> (input);
|
||||
|
||||
if (!backtrack_map)
|
||||
{
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
if (!hb_all (backtrack, glyphset) ||
|
||||
!hb_all (input, glyphset) ||
|
||||
!hb_all (lookahead, glyphset))
|
||||
return_trace (false);
|
||||
|
||||
copy (c->serializer, c->plan->glyph_map);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (!hb_all (backtrack, backtrack_map) ||
|
||||
!hb_all (input, input_map) ||
|
||||
!hb_all (lookahead, lookahead_map))
|
||||
return_trace (false);
|
||||
|
||||
copy (c->serializer, backtrack_map, input_map, lookahead_map);
|
||||
}
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
@ -2083,6 +2162,40 @@ struct ChainRuleSet
|
|||
;
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c,
|
||||
const hb_map_t *backtrack_klass_map = nullptr,
|
||||
const hb_map_t *input_klass_map = nullptr,
|
||||
const hb_map_t *lookahead_klass_map = nullptr) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
auto snap = c->serializer->snapshot ();
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
|
||||
for (const OffsetTo<ChainRule>& _ : rule)
|
||||
{
|
||||
if (!_) continue;
|
||||
auto *o = out->rule.serialize_append (c->serializer);
|
||||
if (unlikely (!o)) continue;
|
||||
|
||||
auto o_snap = c->serializer->snapshot ();
|
||||
if (!o->serialize_subset (c, _, this, out,
|
||||
backtrack_klass_map,
|
||||
input_klass_map,
|
||||
lookahead_klass_map))
|
||||
{
|
||||
out->rule.pop ();
|
||||
c->serializer->revert (o_snap);
|
||||
}
|
||||
}
|
||||
|
||||
bool ret = bool (out->rule);
|
||||
if (!ret) c->serializer->revert (snap);
|
||||
|
||||
return_trace (ret);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
|
@ -2175,8 +2288,25 @@ struct ChainContextFormat1
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
// TODO(subset)
|
||||
return_trace (false);
|
||||
const hb_set_t &glyphset = *c->plan->glyphset ();
|
||||
const hb_map_t &glyph_map = *c->plan->glyph_map;
|
||||
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
out->format = format;
|
||||
|
||||
hb_sorted_vector_t<hb_codepoint_t> new_coverage;
|
||||
+ hb_zip (this+coverage, ruleSet)
|
||||
| hb_filter (glyphset, hb_first)
|
||||
| hb_filter (subset_offset_array (c, out->ruleSet, this, out), hb_second)
|
||||
| hb_map (hb_first)
|
||||
| hb_map (glyph_map)
|
||||
| hb_sink (new_coverage)
|
||||
;
|
||||
|
||||
out->coverage.serialize (c->serializer, out)
|
||||
.serialize (c->serializer, new_coverage.iter ());
|
||||
return_trace (bool (new_coverage));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -2314,8 +2444,54 @@ struct ChainContextFormat2
|
|||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
// TODO(subset)
|
||||
return_trace (false);
|
||||
auto *out = c->serializer->start_embed (*this);
|
||||
if (unlikely (!c->serializer->extend_min (out))) return_trace (false);
|
||||
out->format = format;
|
||||
out->coverage.serialize_subset (c, coverage, this, out);
|
||||
|
||||
hb_map_t backtrack_klass_map;
|
||||
out->backtrackClassDef.serialize_subset (c, backtrackClassDef, this, out, &backtrack_klass_map);
|
||||
|
||||
// subset inputClassDef based on glyphs survived in Coverage subsetting
|
||||
hb_map_t input_klass_map;
|
||||
out->inputClassDef.serialize_subset (c, inputClassDef, this, out, &input_klass_map);
|
||||
|
||||
hb_map_t lookahead_klass_map;
|
||||
out->lookaheadClassDef.serialize_subset (c, lookaheadClassDef, this, out, &lookahead_klass_map);
|
||||
|
||||
hb_vector_t<unsigned> rulesets;
|
||||
bool ret = true;
|
||||
for (const OffsetTo<ChainRuleSet>& _ : + hb_enumerate (ruleSet)
|
||||
| hb_filter (input_klass_map, hb_first)
|
||||
| hb_map (hb_second))
|
||||
{
|
||||
auto *o = out->ruleSet.serialize_append (c->serializer);
|
||||
if (unlikely (!o))
|
||||
{
|
||||
ret = false;
|
||||
break;
|
||||
}
|
||||
if (!o->serialize_subset (c, _, this, out,
|
||||
&backtrack_klass_map,
|
||||
&input_klass_map,
|
||||
&lookahead_klass_map))
|
||||
{
|
||||
rulesets.push (0);
|
||||
}
|
||||
else rulesets.push (1);
|
||||
}
|
||||
|
||||
if (!ret) return_trace (ret);
|
||||
|
||||
//prune empty trailing ruleSets
|
||||
unsigned count = rulesets.length;
|
||||
while (count > 0 && rulesets[count-1] == 0)
|
||||
{
|
||||
out->ruleSet.pop ();
|
||||
count--;
|
||||
}
|
||||
|
||||
return_trace (bool (out->ruleSet));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
@ -2457,11 +2633,46 @@ struct ChainContextFormat3
|
|||
lookup.len, lookup.arrayZ, lookup_context));
|
||||
}
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize_coverage_offsets (hb_subset_context_t *c,
|
||||
Iterator it,
|
||||
const void* src_base,
|
||||
const void* dst_base) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->serializer->start_embed<OffsetArrayOf<Coverage>> ();
|
||||
|
||||
if (unlikely (!c->serializer->allocate_size<HBUINT16> (HBUINT16::static_size))) return_trace (false);
|
||||
|
||||
+ it
|
||||
| hb_apply (subset_offset_array (c, *out, src_base, dst_base))
|
||||
;
|
||||
|
||||
return_trace (out->len);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
// TODO(subset)
|
||||
|
||||
auto *out = c->serializer->start_embed (this);
|
||||
if (unlikely (!out)) return_trace (false);
|
||||
if (unlikely (!c->serializer->embed (this->format))) return_trace (false);
|
||||
|
||||
if (!serialize_coverage_offsets (c, backtrack.iter (), this, out))
|
||||
return_trace (false);
|
||||
|
||||
const OffsetArrayOf<Coverage> &input = StructAfter<OffsetArrayOf<Coverage>> (backtrack);
|
||||
if (!serialize_coverage_offsets (c, input.iter (), this, out))
|
||||
return_trace (false);
|
||||
|
||||
const OffsetArrayOf<Coverage> &lookahead = StructAfter<OffsetArrayOf<Coverage>> (input);
|
||||
if (!serialize_coverage_offsets (c, lookahead.iter (), this, out))
|
||||
return_trace (false);
|
||||
|
||||
const ArrayOf<LookupRecord> &lookup = StructAfter<ArrayOf<LookupRecord>> (lookahead);
|
||||
return_trace (c->serializer->copy (lookup));
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
|
|
@ -1223,7 +1223,7 @@ hb_ot_layout_collect_lookups (hb_face_t *face,
|
|||
* @lookup_index: The index of the feature lookup to query
|
||||
* @glyphs_before: (out): Array of glyphs preceding the substitution range
|
||||
* @glyphs_input: (out): Array of input glyphs that would be substituted by the lookup
|
||||
* @glyphs_after: (out): Array of glyphs following the substition range
|
||||
* @glyphs_after: (out): Array of glyphs following the substitution range
|
||||
* @glyphs_output: (out): Array of glyphs that would be the substitued output of the lookup
|
||||
*
|
||||
* Fetches a list of all glyphs affected by the specified lookup in the
|
||||
|
@ -1957,7 +1957,7 @@ hb_ot_layout_substitute_lookup (OT::hb_ot_apply_context_t *c,
|
|||
*
|
||||
* Fetches a baseline value from the face.
|
||||
*
|
||||
* Return value: if found baseline value in the the font.
|
||||
* Return value: if found baseline value in the font.
|
||||
*
|
||||
* Since: 2.6.0
|
||||
**/
|
||||
|
|
|
@ -191,7 +191,7 @@ struct name
|
|||
|
||||
const void *dst_string_pool = &(this + this->stringOffset);
|
||||
|
||||
for (const auto &_ : it) c->copy (_, src_string_pool, dst_string_pool);
|
||||
c->copy_all (it, src_string_pool, dst_string_pool);
|
||||
|
||||
if (unlikely (c->ran_out_of_room)) return_trace (false);
|
||||
|
||||
|
|
|
@ -35,8 +35,6 @@
|
|||
#undef HB_STRING_ARRAY_LIST
|
||||
#undef HB_STRING_ARRAY_NAME
|
||||
|
||||
#define NUM_FORMAT1_NAMES 258
|
||||
|
||||
/*
|
||||
* post -- PostScript
|
||||
* https://docs.microsoft.com/en-us/typography/opentype/spec/post
|
||||
|
@ -185,7 +183,7 @@ struct post
|
|||
unsigned int get_glyph_count () const
|
||||
{
|
||||
if (version == 0x00010000)
|
||||
return NUM_FORMAT1_NAMES;
|
||||
return format1_names_length;
|
||||
|
||||
if (version == 0x00020000)
|
||||
return glyphNameIndex->len;
|
||||
|
@ -213,7 +211,7 @@ struct post
|
|||
{
|
||||
if (version == 0x00010000)
|
||||
{
|
||||
if (glyph >= NUM_FORMAT1_NAMES)
|
||||
if (glyph >= format1_names_length)
|
||||
return hb_bytes_t ();
|
||||
|
||||
return format1_names (glyph);
|
||||
|
@ -223,9 +221,9 @@ struct post
|
|||
return hb_bytes_t ();
|
||||
|
||||
unsigned int index = glyphNameIndex->arrayZ[glyph];
|
||||
if (index < NUM_FORMAT1_NAMES)
|
||||
if (index < format1_names_length)
|
||||
return format1_names (index);
|
||||
index -= NUM_FORMAT1_NAMES;
|
||||
index -= format1_names_length;
|
||||
|
||||
if (index >= index_to_offset.length)
|
||||
return hb_bytes_t ();
|
||||
|
|
|
@ -2,15 +2,16 @@
|
|||
/*
|
||||
* The following functions are generated by running:
|
||||
*
|
||||
* ./gen-vowel-constraints.py use Scripts.txt
|
||||
* ./gen-vowel-constraints.py ms-use/IndicShapingInvalidCluster.txt Scripts.txt
|
||||
*
|
||||
* on files with these headers:
|
||||
*
|
||||
* # Copied from https://docs.microsoft.com/en-us/typography/script-development/use
|
||||
* # On October 23, 2018; with documentd dated 02/07/2018.
|
||||
* # IndicShapingInvalidCluster.txt
|
||||
* # Date: 2015-03-12, 21:17:00 GMT [AG]
|
||||
* # Date: 2019-11-08, 23:22:00 GMT [AG]
|
||||
*
|
||||
* # Scripts-12.0.0.txt
|
||||
* # Date: 2019-01-28, 22:16:47 GMT
|
||||
* # Scripts-12.1.0.txt
|
||||
* # Date: 2019-04-01, 09:10:42 GMT
|
||||
*/
|
||||
|
||||
#include "hb.hh"
|
||||
|
@ -211,6 +212,22 @@ _hb_preprocess_text_vowel_constraints (const hb_ot_shape_plan_t *plan HB_UNUSED,
|
|||
processed = true;
|
||||
break;
|
||||
|
||||
case HB_SCRIPT_TAMIL:
|
||||
for (buffer->idx = 0; buffer->idx + 1 < count && buffer->successful;)
|
||||
{
|
||||
bool matched = false;
|
||||
if (0x0B85u == buffer->cur ().codepoint &&
|
||||
0x0BC2u == buffer->cur (1).codepoint)
|
||||
{
|
||||
buffer->next_glyph ();
|
||||
_output_dotted_circle (buffer);
|
||||
}
|
||||
buffer->next_glyph ();
|
||||
if (matched) _output_with_dotted_circle (buffer);
|
||||
}
|
||||
processed = true;
|
||||
break;
|
||||
|
||||
case HB_SCRIPT_TELUGU:
|
||||
for (buffer->idx = 0; buffer->idx + 1 < count && buffer->successful;)
|
||||
{
|
||||
|
|
|
@ -166,7 +166,7 @@ hb_ot_shape_planner_t::compile (hb_ot_shape_plan_t &plan,
|
|||
plan.apply_kerx = true;
|
||||
#endif
|
||||
|
||||
if (!plan.apply_kerx && !has_gpos_kern)
|
||||
if (!plan.apply_kerx && (!has_gpos_kern || !plan.apply_gpos))
|
||||
{
|
||||
/* Apparently Apple applies kerx if GPOS kern was not applied. */
|
||||
#ifndef HB_NO_AAT_SHAPE
|
||||
|
|
|
@ -78,23 +78,6 @@ struct contour_point_vector_t : hb_vector_t<contour_point_t>
|
|||
}
|
||||
};
|
||||
|
||||
struct range_checker_t
|
||||
{
|
||||
range_checker_t (const void *data_, unsigned int length_)
|
||||
: data ((const char *) data_), length (length_) {}
|
||||
|
||||
template <typename T>
|
||||
bool in_range (const T *p, unsigned int size = T::static_size) const
|
||||
{
|
||||
return ((const char *) p) >= data
|
||||
&& ((const char *) p + size) <= data + length;
|
||||
}
|
||||
|
||||
protected:
|
||||
const char *data;
|
||||
const unsigned int length;
|
||||
};
|
||||
|
||||
struct Tuple : UnsizedArrayOf<F2DOT14> {};
|
||||
|
||||
struct TuppleIndex : HBUINT16
|
||||
|
@ -233,10 +216,10 @@ struct GlyphVarData
|
|||
{
|
||||
if (var_data->has_shared_point_numbers ())
|
||||
{
|
||||
range_checker_t checker (var_data, length);
|
||||
hb_bytes_t bytes ((const char *) var_data, length);
|
||||
const HBUINT8 *base = &(var_data+var_data->data);
|
||||
const HBUINT8 *p = base;
|
||||
if (!unpack_points (p, shared_indices, checker)) return false;
|
||||
if (!unpack_points (p, shared_indices, bytes)) return false;
|
||||
data_offset = p - base;
|
||||
}
|
||||
return true;
|
||||
|
@ -292,7 +275,7 @@ struct GlyphVarData
|
|||
|
||||
static bool unpack_points (const HBUINT8 *&p /* IN/OUT */,
|
||||
hb_vector_t<unsigned int> &points /* OUT */,
|
||||
const range_checker_t &check)
|
||||
const hb_bytes_t &bytes)
|
||||
{
|
||||
enum packed_point_flag_t
|
||||
{
|
||||
|
@ -300,12 +283,12 @@ struct GlyphVarData
|
|||
POINT_RUN_COUNT_MASK = 0x7F
|
||||
};
|
||||
|
||||
if (unlikely (!check.in_range (p))) return false;
|
||||
if (unlikely (!bytes.in_range (p))) return false;
|
||||
|
||||
uint16_t count = *p++;
|
||||
if (count & POINTS_ARE_WORDS)
|
||||
{
|
||||
if (unlikely (!check.in_range (p))) return false;
|
||||
if (unlikely (!bytes.in_range (p))) return false;
|
||||
count = ((count & POINT_RUN_COUNT_MASK) << 8) | *p++;
|
||||
}
|
||||
points.resize (count);
|
||||
|
@ -314,7 +297,7 @@ struct GlyphVarData
|
|||
uint16_t i = 0;
|
||||
while (i < count)
|
||||
{
|
||||
if (unlikely (!check.in_range (p))) return false;
|
||||
if (unlikely (!bytes.in_range (p))) return false;
|
||||
uint16_t j;
|
||||
uint8_t control = *p++;
|
||||
uint16_t run_count = (control & POINT_RUN_COUNT_MASK) + 1;
|
||||
|
@ -322,7 +305,7 @@ struct GlyphVarData
|
|||
{
|
||||
for (j = 0; j < run_count && i < count; j++, i++)
|
||||
{
|
||||
if (unlikely (!check.in_range ((const HBUINT16 *) p)))
|
||||
if (unlikely (!bytes.in_range ((const HBUINT16 *) p)))
|
||||
return false;
|
||||
n += *(const HBUINT16 *)p;
|
||||
points[i] = n;
|
||||
|
@ -333,7 +316,7 @@ struct GlyphVarData
|
|||
{
|
||||
for (j = 0; j < run_count && i < count; j++, i++)
|
||||
{
|
||||
if (unlikely (!check.in_range (p))) return false;
|
||||
if (unlikely (!bytes.in_range (p))) return false;
|
||||
n += *p++;
|
||||
points[i] = n;
|
||||
}
|
||||
|
@ -345,7 +328,7 @@ struct GlyphVarData
|
|||
|
||||
static bool unpack_deltas (const HBUINT8 *&p /* IN/OUT */,
|
||||
hb_vector_t<int> &deltas /* IN/OUT */,
|
||||
const range_checker_t &check)
|
||||
const hb_bytes_t &bytes)
|
||||
{
|
||||
enum packed_delta_flag_t
|
||||
{
|
||||
|
@ -358,7 +341,7 @@ struct GlyphVarData
|
|||
unsigned int count = deltas.length;
|
||||
while (i < count)
|
||||
{
|
||||
if (unlikely (!check.in_range (p))) return false;
|
||||
if (unlikely (!bytes.in_range (p))) return false;
|
||||
uint8_t control = *p++;
|
||||
unsigned int run_count = (control & DELTA_RUN_COUNT_MASK) + 1;
|
||||
unsigned int j;
|
||||
|
@ -368,7 +351,7 @@ struct GlyphVarData
|
|||
else if (control & DELTAS_ARE_WORDS)
|
||||
for (j = 0; j < run_count && i < count; j++, i++)
|
||||
{
|
||||
if (unlikely (!check.in_range ((const HBUINT16 *) p)))
|
||||
if (unlikely (!bytes.in_range ((const HBUINT16 *) p)))
|
||||
return false;
|
||||
deltas[i] = *(const HBINT16 *) p;
|
||||
p += HBUINT16::static_size;
|
||||
|
@ -376,7 +359,7 @@ struct GlyphVarData
|
|||
else
|
||||
for (j = 0; j < run_count && i < count; j++, i++)
|
||||
{
|
||||
if (unlikely (!check.in_range (p)))
|
||||
if (unlikely (!bytes.in_range (p)))
|
||||
return false;
|
||||
deltas[i] = *(const HBINT8 *) p++;
|
||||
}
|
||||
|
@ -611,10 +594,10 @@ struct gvar
|
|||
if (unlikely (!iterator.in_range (p, length)))
|
||||
return false;
|
||||
|
||||
range_checker_t checker (p, length);
|
||||
hb_bytes_t bytes ((const char *) p, length);
|
||||
hb_vector_t<unsigned int> private_indices;
|
||||
if (iterator.current_tuple->has_private_points () &&
|
||||
!GlyphVarData::unpack_points (p, private_indices, checker))
|
||||
!GlyphVarData::unpack_points (p, private_indices, bytes))
|
||||
return false;
|
||||
const hb_array_t<unsigned int> &indices = private_indices.length ? private_indices : shared_indices;
|
||||
|
||||
|
@ -622,11 +605,11 @@ struct gvar
|
|||
unsigned int num_deltas = apply_to_all ? points.length : indices.length;
|
||||
hb_vector_t<int> x_deltas;
|
||||
x_deltas.resize (num_deltas);
|
||||
if (!GlyphVarData::unpack_deltas (p, x_deltas, checker))
|
||||
if (!GlyphVarData::unpack_deltas (p, x_deltas, bytes))
|
||||
return false;
|
||||
hb_vector_t<int> y_deltas;
|
||||
y_deltas.resize (num_deltas);
|
||||
if (!GlyphVarData::unpack_deltas (p, y_deltas, checker))
|
||||
if (!GlyphVarData::unpack_deltas (p, y_deltas, bytes))
|
||||
return false;
|
||||
|
||||
for (unsigned int i = 0; i < deltas.length; i++)
|
||||
|
|
|
@ -84,7 +84,7 @@ struct VORG
|
|||
this->defaultVertOriginY = defaultVertOriginY;
|
||||
this->vertYOrigins.len = it.len ();
|
||||
|
||||
for (const auto _ : it) c->copy (_);
|
||||
c->copy_all (it);
|
||||
}
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
|
|
|
@ -387,6 +387,12 @@ struct hb_serialize_context_t
|
|||
Type *copy (const Type *src, Ts&&... ds)
|
||||
{ return copy (*src, hb_forward<Ts> (ds)...); }
|
||||
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator)),
|
||||
typename ...Ts>
|
||||
void copy_all (Iterator it, Ts&&... ds)
|
||||
{ for (decltype (*it) _ : it) copy (_, hb_forward<Ts> (ds)...); }
|
||||
|
||||
template <typename Type>
|
||||
hb_serialize_context_t& operator << (const Type &obj) & { embed (obj); return *this; }
|
||||
|
||||
|
|
|
@ -135,7 +135,11 @@ struct hb_set_t
|
|||
unsigned int i = m / ELT_BITS;
|
||||
unsigned int j = m & ELT_MASK;
|
||||
|
||||
const elt_t vv = v[i] & ((elt_t (1) << (j + 1)) - 1);
|
||||
/* Fancy mask to avoid shifting by elt_t bitsize, which is undefined. */
|
||||
const elt_t mask = j < 8 * sizeof (elt_t) - 1 ?
|
||||
((elt_t (1) << (j + 1)) - 1) :
|
||||
(elt_t) -1;
|
||||
const elt_t vv = v[i] & mask;
|
||||
const elt_t *p = &vv;
|
||||
while (true)
|
||||
{
|
||||
|
@ -698,8 +702,15 @@ struct hb_set_t
|
|||
struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
|
||||
{
|
||||
static constexpr bool is_sorted_iterator = true;
|
||||
iter_t (const hb_set_t &s_ = Null(hb_set_t)) :
|
||||
s (&s_), v (INVALID), l (s->get_population () + 1) { __next__ (); }
|
||||
iter_t (const hb_set_t &s_ = Null(hb_set_t),
|
||||
bool init = true) : s (&s_), v (INVALID), l(0)
|
||||
{
|
||||
if (init)
|
||||
{
|
||||
l = s->get_population () + 1;
|
||||
__next__ ();
|
||||
}
|
||||
}
|
||||
|
||||
typedef hb_codepoint_t __item_t__;
|
||||
hb_codepoint_t __item__ () const { return v; }
|
||||
|
@ -707,7 +718,7 @@ struct hb_set_t
|
|||
void __next__ () { s->next (&v); if (l) l--; }
|
||||
void __prev__ () { s->previous (&v); }
|
||||
unsigned __len__ () const { return l; }
|
||||
iter_t end () const { return iter_t (*s); }
|
||||
iter_t end () const { return iter_t (*s, false); }
|
||||
bool operator != (const iter_t& o) const
|
||||
{ return s != o.s || v != o.v; }
|
||||
|
||||
|
|
|
@ -37,6 +37,7 @@
|
|||
#define HB_STRING_ARRAY_TYPE_NAME HB_PASTE(HB_STRING_ARRAY_NAME, _msgstr_t)
|
||||
#define HB_STRING_ARRAY_POOL_NAME HB_PASTE(HB_STRING_ARRAY_NAME, _msgstr)
|
||||
#define HB_STRING_ARRAY_OFFS_NAME HB_PASTE(HB_STRING_ARRAY_NAME, _msgidx)
|
||||
#define HB_STRING_ARRAY_LENG_NAME HB_PASTE(HB_STRING_ARRAY_NAME, _length)
|
||||
|
||||
static const union HB_STRING_ARRAY_TYPE_NAME {
|
||||
struct {
|
||||
|
@ -66,6 +67,8 @@ static const unsigned int HB_STRING_ARRAY_OFFS_NAME[] =
|
|||
sizeof (HB_STRING_ARRAY_TYPE_NAME)
|
||||
};
|
||||
|
||||
static const unsigned int HB_STRING_ARRAY_LENG_NAME = ARRAY_LENGTH_CONST (HB_STRING_ARRAY_OFFS_NAME) - 1;
|
||||
|
||||
static inline hb_bytes_t
|
||||
HB_STRING_ARRAY_NAME (unsigned int i)
|
||||
{
|
||||
|
@ -77,5 +80,6 @@ HB_STRING_ARRAY_NAME (unsigned int i)
|
|||
#undef HB_STRING_ARRAY_TYPE_NAME
|
||||
#undef HB_STRING_ARRAY_POOL_NAME
|
||||
#undef HB_STRING_ARRAY_OFFS_NAME
|
||||
#undef HB_STRING_ARRAY_LENG_NAME
|
||||
|
||||
#endif /* HB_STRING_ARRAY_HH */
|
||||
|
|
|
@ -71,7 +71,10 @@ _cmap_closure (hb_face_t *face,
|
|||
const hb_set_t *unicodes,
|
||||
hb_set_t *glyphset)
|
||||
{
|
||||
face->table.cmap->table->closure_glyphs (unicodes, glyphset);
|
||||
OT::cmap::accelerator_t cmap;
|
||||
cmap.init (face);
|
||||
cmap.table->closure_glyphs (unicodes, glyphset);
|
||||
cmap.fini ();
|
||||
}
|
||||
|
||||
static inline void
|
||||
|
|
|
@ -324,10 +324,10 @@ DECLARE_NULL_INSTANCE (hb_unicode_funcs_t);
|
|||
* Modify Telugu length marks (ccc=84, ccc=91).
|
||||
* These are the only matras in the main Indic scripts range that have
|
||||
* a non-zero ccc. That makes them reorder with the Halant (ccc=9).
|
||||
* Assign 5 and 6, which are otherwise unassigned.
|
||||
* Assign 4 and 5, which are otherwise unassigned.
|
||||
*/
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC84 5 /* length mark */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC91 6 /* ai length mark */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC84 4 /* length mark */
|
||||
#define HB_MODIFIED_COMBINING_CLASS_CCC91 5 /* ai length mark */
|
||||
|
||||
/* Thai
|
||||
*
|
||||
|
|
|
@ -38,9 +38,9 @@ HB_BEGIN_DECLS
|
|||
|
||||
#define HB_VERSION_MAJOR 2
|
||||
#define HB_VERSION_MINOR 6
|
||||
#define HB_VERSION_MICRO 2
|
||||
#define HB_VERSION_MICRO 4
|
||||
|
||||
#define HB_VERSION_STRING "2.6.2"
|
||||
#define HB_VERSION_STRING "2.6.4"
|
||||
|
||||
#define HB_VERSION_ATLEAST(major,minor,micro) \
|
||||
((major)*10000+(minor)*100+(micro) <= \
|
||||
|
|
|
@ -1,37 +0,0 @@
|
|||
/*
|
||||
* Copyright © 2012 Google, Inc.
|
||||
*
|
||||
* This is part of HarfBuzz, a text shaping library.
|
||||
*
|
||||
* Permission is hereby granted, without written agreement and without
|
||||
* license or royalty fees, to use, copy, modify, and distribute this
|
||||
* software and its documentation for any purpose, provided that the
|
||||
* above copyright notice and the following two paragraphs appear in
|
||||
* all copies of this software.
|
||||
*
|
||||
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
|
||||
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
|
||||
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
|
||||
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
* DAMAGE.
|
||||
*
|
||||
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
|
||||
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
|
||||
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
|
||||
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
|
||||
*
|
||||
* Google Author(s): Behdad Esfahbod
|
||||
*/
|
||||
|
||||
#include "hb.hh"
|
||||
|
||||
#ifdef HB_ATOMIC_INT_NIL
|
||||
#error "Could not find any system to define atomic_int macros, library WILL NOT be thread-safe"
|
||||
#error "Check hb-atomic.hh for possible resolutions."
|
||||
#endif
|
||||
|
||||
#ifdef HB_MUTEX_IMPL_NIL
|
||||
#error "Could not find any system to define mutex macros, library WILL NOT be thread-safe"
|
||||
#error "Check hb-mutex.hh for possible resolutions."
|
||||
#endif
|
|
@ -370,10 +370,12 @@ extern "C" void hb_free_impl(void *ptr);
|
|||
#define getenv(Name) nullptr
|
||||
#endif
|
||||
|
||||
#ifdef HB_NO_ERRNO
|
||||
static int errno = 0; /* Use something better? */
|
||||
#else
|
||||
#ifndef HB_NO_ERRNO
|
||||
# include <errno.h>
|
||||
#else
|
||||
static int HB_UNUSED _hb_errno = 0;
|
||||
# undef errno
|
||||
# define errno _hb_errno
|
||||
#endif
|
||||
|
||||
#if defined(HAVE_ATEXIT) && !defined(HB_USE_ATEXIT)
|
||||
|
|
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) Microsoft Corporation.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE
|
|
@ -1,5 +1,12 @@
|
|||
# Copied from https://docs.microsoft.com/en-us/typography/script-development/use
|
||||
# On October 23, 2018; with documentd dated 02/07/2018.
|
||||
# IndicShapingInvalidCluster.txt
|
||||
# Date: 2015-03-12, 21:17:00 GMT [AG]
|
||||
# Date: 2019-11-08, 23:22:00 GMT [AG]
|
||||
#
|
||||
# This file defines the following property:
|
||||
#
|
||||
# Indic_Shaping_Invalid_Cluster
|
||||
#
|
||||
# Scope: This file enumerates sequences of characters that should be treated as invalid clusters
|
||||
|
||||
0905 0946 ; # DEVANAGARI LETTER A, DEVANAGARI VOWEL SIGN SHORT E
|
||||
0905 093E ; # DEVANAGARI LETTER A, DEVANAGARI VOWEL SIGN AA
|
||||
|
@ -48,6 +55,7 @@
|
|||
0B05 0B3E ; # ORIYA LETTER A, ORIYA VOWEL SIGN AA
|
||||
0B0F 0B57 ; # ORIYA LETTER E, ORIYA AU LENGTH MARK
|
||||
0B13 0B57 ; # ORIYA LETTER O, ORIYA AU LENGTH MARK
|
||||
0B85 0BC2 ; # TAMIL LETTER A, TAMIL VOWEL SIGN UU
|
||||
0C12 0C55 ; # TELUGU LETTER O, TELUGU LENGTH MARK
|
||||
0C12 0C4C ; # TELUGU LETTER O, TELUGU VOWEL SIGN AU
|
||||
0C3F 0C55 ; # TELUGU VOWEL SIGN I, TELUGU LENGTH MARK
|
Binary file not shown.
|
@ -49,6 +49,27 @@ test_collect_unicodes_format4 (void)
|
|||
hb_face_destroy (face);
|
||||
}
|
||||
|
||||
static void
|
||||
test_collect_unicodes_format12_notdef (void)
|
||||
{
|
||||
hb_face_t *face = hb_test_open_font_file ("fonts/cmunrm.otf");
|
||||
hb_set_t *codepoints = hb_set_create();
|
||||
hb_codepoint_t cp;
|
||||
|
||||
hb_face_collect_unicodes (face, codepoints);
|
||||
|
||||
cp = HB_SET_VALUE_INVALID;
|
||||
g_assert (hb_set_next (codepoints, &cp));
|
||||
g_assert_cmpuint (0x20, ==, cp);
|
||||
g_assert (hb_set_next (codepoints, &cp));
|
||||
g_assert_cmpuint (0x21, ==, cp);
|
||||
g_assert (hb_set_next (codepoints, &cp));
|
||||
g_assert_cmpuint (0x22, ==, cp);
|
||||
|
||||
hb_set_destroy (codepoints);
|
||||
hb_face_destroy (face);
|
||||
}
|
||||
|
||||
static void
|
||||
test_collect_unicodes_format12 (void)
|
||||
{
|
||||
|
@ -101,6 +122,7 @@ main (int argc, char **argv)
|
|||
hb_test_add (test_collect_unicodes);
|
||||
hb_test_add (test_collect_unicodes_format4);
|
||||
hb_test_add (test_collect_unicodes_format12);
|
||||
hb_test_add (test_collect_unicodes_format12_notdef);
|
||||
|
||||
return hb_test_run();
|
||||
}
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -33,7 +33,8 @@ def cmd (command):
|
|||
def timeout (p, is_killed):
|
||||
is_killed['value'] = True
|
||||
p.kill ()
|
||||
timer = threading.Timer (2, timeout, [p, is_killed])
|
||||
timeout_seconds = int (os.environ.get ("HB_TEST_SHAPE_FUZZER_TIMEOUT", "2"))
|
||||
timer = threading.Timer (timeout_seconds, timeout, [p, is_killed])
|
||||
|
||||
try:
|
||||
timer.start()
|
||||
|
|
|
@ -33,7 +33,8 @@ def cmd(command):
|
|||
def timeout(p, is_killed):
|
||||
is_killed['value'] = True
|
||||
p.kill()
|
||||
timer = threading.Timer (16, timeout, [p, is_killed])
|
||||
timeout_seconds = int (os.environ.get ("HB_TEST_SUBSET_FUZZER_TIMEOUT", "8"))
|
||||
timer = threading.Timer (timeout_seconds, timeout, [p, is_killed])
|
||||
|
||||
try:
|
||||
timer.start()
|
||||
|
|
|
@ -14,7 +14,9 @@ EXTRA_DIST += \
|
|||
expected/cff-japanese \
|
||||
expected/layout \
|
||||
expected/layout.gpos \
|
||||
expected/layout.gpos2 \
|
||||
expected/layout.gpos3 \
|
||||
expected/layout.gsub6 \
|
||||
expected/cmap14 \
|
||||
fonts \
|
||||
profiles \
|
||||
|
|
|
@ -6,7 +6,9 @@ TESTS = \
|
|||
tests/cff-japanese.tests \
|
||||
tests/layout.tests \
|
||||
tests/layout.gpos.tests \
|
||||
tests/layout.gpos2.tests \
|
||||
tests/layout.gpos3.tests \
|
||||
tests/layout.gsub6.tests \
|
||||
tests/cmap14.tests \
|
||||
$(NULL)
|
||||
|
||||
|
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -0,0 +1,12 @@
|
|||
FONTS:
|
||||
gpos2_1_font7.otf
|
||||
gpos2_2_font5.otf
|
||||
|
||||
PROFILES:
|
||||
keep-layout.txt
|
||||
keep-layout-retain-gids.txt
|
||||
|
||||
SUBSETS:
|
||||
!#
|
||||
!#%
|
||||
*
|
|
@ -0,0 +1,12 @@
|
|||
FONTS:
|
||||
gsub_chaining1_multiple_subrules_f1.otf
|
||||
gsub_chaining2_multiple_subrules_f1.otf
|
||||
gsub_chaining3_simple_f2.otf
|
||||
|
||||
PROFILES:
|
||||
keep-layout.txt
|
||||
keep-layout-retain-gids.txt
|
||||
|
||||
SUBSETS:
|
||||
0123
|
||||
*
|
Loading…
Reference in New Issue