Merge branch 'master' of https://github.com/harfbuzz/harfbuzz into glyf
This commit is contained in:
commit
ed727d4bb7
|
@ -36,15 +36,22 @@
|
|||
|
||||
struct
|
||||
{
|
||||
template <typename T> T
|
||||
operator () (const T& v) const { return v; }
|
||||
template <typename T> auto
|
||||
operator () (T&& v) const HB_AUTO_RETURN ( hb_forward<T> (v) )
|
||||
}
|
||||
HB_FUNCOBJ (hb_identity);
|
||||
|
||||
struct
|
||||
{
|
||||
template <typename T> hb_remove_reference<T>
|
||||
operator () (T&& v) const { return v; }
|
||||
}
|
||||
HB_FUNCOBJ (hb_rvalue);
|
||||
|
||||
struct
|
||||
{
|
||||
template <typename T> bool
|
||||
operator () (const T& v) const { return bool (v); }
|
||||
operator () (T&& v) const { return bool (hb_forward<T> (v)); }
|
||||
}
|
||||
HB_FUNCOBJ (hb_bool);
|
||||
|
||||
|
@ -69,6 +76,38 @@ struct
|
|||
}
|
||||
HB_FUNCOBJ (hb_hash);
|
||||
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
|
||||
/* Pointer-to-member-function. */
|
||||
template <typename Appl, typename T, typename ...Ts> auto
|
||||
impl (Appl&& a, hb_priority<2>, T &&v, Ts&&... ds) const HB_AUTO_RETURN
|
||||
((hb_deref (hb_forward<T> (v)).*hb_forward<Appl> (a)) (hb_forward<Ts> (ds)...))
|
||||
|
||||
/* Pointer-to-member. */
|
||||
template <typename Appl, typename T> auto
|
||||
impl (Appl&& a, hb_priority<1>, T &&v) const HB_AUTO_RETURN
|
||||
((hb_deref (hb_forward<T> (v))).*hb_forward<Appl> (a))
|
||||
|
||||
/* Operator(). */
|
||||
template <typename Appl, typename ...Ts> auto
|
||||
impl (Appl&& a, hb_priority<0>, Ts&&... ds) const HB_AUTO_RETURN
|
||||
(hb_deref (hb_forward<Appl> (a)) (hb_forward<Ts> (ds)...))
|
||||
|
||||
public:
|
||||
|
||||
template <typename Appl, typename ...Ts> auto
|
||||
operator () (Appl&& a, Ts&&... ds) const HB_AUTO_RETURN
|
||||
(
|
||||
impl (hb_forward<Appl> (a),
|
||||
hb_prioritize,
|
||||
hb_forward<Ts> (ds)...)
|
||||
)
|
||||
}
|
||||
HB_FUNCOBJ (hb_invoke);
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
|
|
|
@ -176,6 +176,17 @@ struct hb_array_t : hb_iter_with_fallback_t<hb_array_t<Type>, Type&>
|
|||
void free ()
|
||||
{ ::free ((void *) arrayZ); arrayZ = nullptr; length = 0; }
|
||||
|
||||
template <typename hb_serialize_context_t>
|
||||
hb_array_t copy (hb_serialize_context_t *c) const
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto* out = c->template start_embed (arrayZ);
|
||||
if (unlikely (!c->extend_size (out, get_size ()))) return_trace (hb_array_t ());
|
||||
for (unsigned i = 0; i < length; i++)
|
||||
out[i] = arrayZ[i]; /* TODO: add version that calls c->copy() */
|
||||
return_trace (hb_array_t (out, length));
|
||||
}
|
||||
|
||||
template <typename hb_sanitize_context_t>
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
{ return c->check_array (arrayZ, length); }
|
||||
|
|
|
@ -62,6 +62,7 @@ struct hb_hashmap_t
|
|||
bool is_unused () const { return key == kINVALID; }
|
||||
bool is_tombstone () const { return key != kINVALID && value == vINVALID; }
|
||||
bool is_real () const { return key != kINVALID && value != vINVALID; }
|
||||
hb_pair_t<K, V> get_pair() const { return hb_pair_t<K, V> (key, value); }
|
||||
};
|
||||
|
||||
hb_object_header_t header;
|
||||
|
@ -206,6 +207,30 @@ struct hb_hashmap_t
|
|||
|
||||
unsigned int get_population () const { return population; }
|
||||
|
||||
/*
|
||||
* Iterator
|
||||
*/
|
||||
auto iter () const HB_AUTO_RETURN
|
||||
(
|
||||
+ hb_array (items, mask ? mask + 1 : 0)
|
||||
| hb_filter (&item_t::is_real)
|
||||
| hb_map (&item_t::get_pair)
|
||||
)
|
||||
auto keys () const HB_AUTO_RETURN
|
||||
(
|
||||
+ hb_array (items, mask ? mask + 1 : 0)
|
||||
| hb_filter (&item_t::is_real)
|
||||
| hb_map (&item_t::key)
|
||||
| hb_map (hb_rvalue)
|
||||
)
|
||||
auto values () const HB_AUTO_RETURN
|
||||
(
|
||||
+ hb_array (items, mask ? mask + 1 : 0)
|
||||
| hb_filter (&item_t::is_real)
|
||||
| hb_map (&item_t::value)
|
||||
| hb_map (hb_rvalue)
|
||||
)
|
||||
|
||||
protected:
|
||||
|
||||
unsigned int bucket_for (K key) const
|
||||
|
@ -216,9 +241,9 @@ struct hb_hashmap_t
|
|||
while (!items[i].is_unused ())
|
||||
{
|
||||
if (items[i] == key)
|
||||
return i;
|
||||
return i;
|
||||
if (tombstone == (unsigned) -1 && items[i].is_tombstone ())
|
||||
tombstone = i;
|
||||
tombstone = i;
|
||||
i = (i + ++step) & mask;
|
||||
}
|
||||
return tombstone == (unsigned) -1 ? i : tombstone;
|
||||
|
|
|
@ -93,6 +93,7 @@ template <typename T> using hb_remove_const = typename hb_match_const<T>::type;
|
|||
#define hb_is_const(T) hb_match_const<T>::value
|
||||
template <typename T> struct hb_match_reference { typedef T type; enum { value = false }; };
|
||||
template <typename T> struct hb_match_reference<T &> { typedef T type; enum { value = true }; };
|
||||
template <typename T> struct hb_match_reference<T &&> { typedef T type; enum { value = true }; };
|
||||
template <typename T> using hb_remove_reference = typename hb_match_reference<T>::type;
|
||||
#define hb_is_reference(T) hb_match_reference<T>::value
|
||||
template <typename T> struct hb_match_pointer { typedef T type; enum { value = false }; };
|
||||
|
@ -211,36 +212,4 @@ template <> struct hb_is_integer<unsigned long long> { enum { value = true }; };
|
|||
#define hb_is_integer(T) hb_is_integer<T>::value
|
||||
|
||||
|
||||
struct
|
||||
{
|
||||
private:
|
||||
|
||||
/* Pointer-to-member-function. */
|
||||
template <typename Appl, typename T, typename ...Ts> auto
|
||||
impl (Appl&& a, hb_priority<2>, T &&v, Ts&&... ds) const HB_AUTO_RETURN
|
||||
((hb_deref (hb_forward<T> (v)).*hb_forward<Appl> (a)) (hb_forward<Ts> (ds)...))
|
||||
|
||||
/* Pointer-to-member. */
|
||||
template <typename Appl, typename T> auto
|
||||
impl (Appl&& a, hb_priority<1>, T &&v) const HB_AUTO_RETURN
|
||||
((hb_deref (hb_forward<T> (v))).*hb_forward<Appl> (a))
|
||||
|
||||
/* Operator(). */
|
||||
template <typename Appl, typename ...Ts> auto
|
||||
impl (Appl&& a, hb_priority<0>, Ts&&... ds) const HB_AUTO_RETURN
|
||||
(hb_deref (hb_forward<Appl> (a)) (hb_forward<Ts> (ds)...))
|
||||
|
||||
public:
|
||||
|
||||
template <typename Appl, typename ...Ts> auto
|
||||
operator () (Appl&& a, Ts&&... ds) const HB_AUTO_RETURN
|
||||
(
|
||||
impl (hb_forward<Appl> (a),
|
||||
hb_prioritize,
|
||||
hb_forward<Ts> (ds)...)
|
||||
)
|
||||
}
|
||||
HB_FUNCOBJ (hb_invoke);
|
||||
|
||||
|
||||
#endif /* HB_META_HH */
|
||||
|
|
|
@ -427,7 +427,7 @@ struct UnsizedArrayOf
|
|||
if (unlikely (!serialize (c, count))) return_trace (false);
|
||||
/* TODO Umm. Just exhaust the iterator instead? Being extra
|
||||
* cautious right now.. */
|
||||
for (unsigned i = 0; i < count; i++, items++)
|
||||
for (unsigned i = 0; i < count; i++, ++items)
|
||||
arrayZ[i] = *items;
|
||||
return_trace (true);
|
||||
}
|
||||
|
@ -436,9 +436,7 @@ struct UnsizedArrayOf
|
|||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (this);
|
||||
if (unlikely (!out->serialize (c, count))) return_trace (nullptr);
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
out->arrayZ[i] = arrayZ[i]; /* TODO: add version that calls c->copy() */
|
||||
if (unlikely (!as_array (count).copy (c))) return_trace (nullptr);
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
|
@ -608,7 +606,7 @@ struct ArrayOf
|
|||
if (unlikely (!serialize (c, count))) return_trace (false);
|
||||
/* TODO Umm. Just exhaust the iterator instead? Being extra
|
||||
* cautious right now.. */
|
||||
for (unsigned i = 0; i < count; i++, items++)
|
||||
for (unsigned i = 0; i < count; i++, ++items)
|
||||
arrayZ[i] = *items;
|
||||
return_trace (true);
|
||||
}
|
||||
|
@ -618,9 +616,9 @@ struct ArrayOf
|
|||
TRACE_SERIALIZE (this);
|
||||
auto *out = c->start_embed (this);
|
||||
unsigned count = len;
|
||||
if (unlikely (!out->serialize (c, count))) return_trace (nullptr);
|
||||
for (unsigned i = 0; i < count; i++)
|
||||
out->arrayZ[i] = arrayZ[i]; /* TODO: add version that calls c->copy() */
|
||||
if (unlikely (!c->extend_min (out))) return_trace (nullptr);
|
||||
c->check_assign (out->len, len);
|
||||
if (unlikely (!as_array ().copy (c))) return_trace (nullptr);
|
||||
return_trace (out);
|
||||
}
|
||||
|
||||
|
|
|
@ -41,71 +41,31 @@ namespace OT {
|
|||
|
||||
struct DeviceRecord
|
||||
{
|
||||
struct SubsetView
|
||||
{
|
||||
const DeviceRecord *source_device_record;
|
||||
unsigned int sizeDeviceRecord;
|
||||
hb_subset_plan_t *subset_plan;
|
||||
|
||||
void init (const DeviceRecord *source_device_record,
|
||||
unsigned int sizeDeviceRecord,
|
||||
hb_subset_plan_t *subset_plan)
|
||||
{
|
||||
this->source_device_record = source_device_record;
|
||||
this->sizeDeviceRecord = sizeDeviceRecord;
|
||||
this->subset_plan = subset_plan;
|
||||
}
|
||||
|
||||
unsigned int len () const
|
||||
{ return this->subset_plan->num_output_glyphs (); }
|
||||
|
||||
const HBUINT8* operator [] (unsigned int new_gid) const
|
||||
{
|
||||
if (unlikely (new_gid >= len ())) return nullptr;
|
||||
|
||||
hb_codepoint_t old_gid;
|
||||
if (!this->subset_plan->old_gid_for_new_gid (new_gid, &old_gid))
|
||||
return &Null(HBUINT8);
|
||||
|
||||
if (old_gid >= sizeDeviceRecord - DeviceRecord::min_size)
|
||||
return nullptr;
|
||||
return &(this->source_device_record->widthsZ[old_gid]);
|
||||
}
|
||||
};
|
||||
|
||||
static unsigned int get_size (unsigned int count)
|
||||
static unsigned int get_size (unsigned count)
|
||||
{ return hb_ceil_to_4 (min_size + count * HBUINT8::static_size); }
|
||||
|
||||
bool serialize (hb_serialize_context_t *c, const SubsetView &subset_view)
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c, unsigned pixelSize, Iterator it)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
unsigned int size = get_size (subset_view.len ());
|
||||
if (unlikely (!c->allocate_size<DeviceRecord> (size)))
|
||||
{
|
||||
DEBUG_MSG(SUBSET, nullptr, "Couldn't allocate enough space for DeviceRecord: %d.",
|
||||
size);
|
||||
return_trace (false);
|
||||
}
|
||||
unsigned length = it.len ();
|
||||
|
||||
this->pixelSize = subset_view.source_device_record->pixelSize;
|
||||
this->maxWidth = subset_view.source_device_record->maxWidth;
|
||||
if (unlikely (!c->extend (*this, length))) return_trace (false);
|
||||
|
||||
for (unsigned int i = 0; i < subset_view.len (); i++)
|
||||
{
|
||||
const HBUINT8 *width = subset_view[i];
|
||||
if (!width)
|
||||
{
|
||||
DEBUG_MSG(SUBSET, nullptr, "HDMX width for new gid %d is missing.", i);
|
||||
return_trace (false);
|
||||
}
|
||||
widthsZ[i] = *width;
|
||||
}
|
||||
this->pixelSize = pixelSize;
|
||||
this->maxWidth =
|
||||
+ it
|
||||
| hb_reduce (hb_max, 0u);
|
||||
|
||||
+ it
|
||||
| hb_sink (widthsZ.as_array (length));
|
||||
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned int sizeDeviceRecord) const
|
||||
bool sanitize (hb_sanitize_context_t *c, unsigned sizeDeviceRecord) const
|
||||
{
|
||||
TRACE_SANITIZE (this);
|
||||
return_trace (likely (c->check_struct (this) &&
|
||||
|
@ -135,62 +95,63 @@ struct hdmx
|
|||
return StructAtOffset<DeviceRecord> (&this->firstDeviceRecord, i * sizeDeviceRecord);
|
||||
}
|
||||
|
||||
bool serialize (hb_serialize_context_t *c, const hdmx *source_hdmx, hb_subset_plan_t *plan)
|
||||
template<typename Iterator,
|
||||
hb_requires (hb_is_iterator (Iterator))>
|
||||
bool serialize (hb_serialize_context_t *c, unsigned version, Iterator it)
|
||||
{
|
||||
TRACE_SERIALIZE (this);
|
||||
|
||||
if (unlikely (!c->extend_min ((*this)))) return_trace (false);
|
||||
|
||||
this->version = source_hdmx->version;
|
||||
this->numRecords = source_hdmx->numRecords;
|
||||
this->sizeDeviceRecord = DeviceRecord::get_size (plan->num_output_glyphs ());
|
||||
this->version = version;
|
||||
this->numRecords = it.len ();
|
||||
this->sizeDeviceRecord = DeviceRecord::get_size (it ? (*it).second.len () : 0);
|
||||
|
||||
for (unsigned int i = 0; i < source_hdmx->numRecords; i++)
|
||||
{
|
||||
DeviceRecord::SubsetView subset_view;
|
||||
subset_view.init (&(*source_hdmx)[i], source_hdmx->sizeDeviceRecord, plan);
|
||||
+ it
|
||||
| hb_apply ([&] (const hb_item_type<Iterator>& _) {
|
||||
c->start_embed<DeviceRecord> ()->serialize (c, _.first, _.second);
|
||||
})
|
||||
;
|
||||
|
||||
if (!c->start_embed<DeviceRecord> ()->serialize (c, subset_view))
|
||||
return_trace (false);
|
||||
}
|
||||
return_trace (c->successful);
|
||||
}
|
||||
|
||||
|
||||
bool subset (hb_subset_context_t *c) const
|
||||
{
|
||||
TRACE_SUBSET (this);
|
||||
|
||||
hdmx *hdmx_prime = c->serializer->start_embed <hdmx> ();
|
||||
if (unlikely (!hdmx_prime)) return_trace (false);
|
||||
|
||||
auto it =
|
||||
+ hb_iota ((unsigned) numRecords)
|
||||
| hb_map ([&] (unsigned _)
|
||||
{
|
||||
const DeviceRecord *device_record =
|
||||
&StructAtOffset<DeviceRecord> (&firstDeviceRecord,
|
||||
_ * sizeDeviceRecord);
|
||||
auto row =
|
||||
+ hb_iota (c->plan->num_output_glyphs ())
|
||||
| hb_map (c->plan->reverse_glyph_map)
|
||||
| hb_map ([=] (hb_codepoint_t _)
|
||||
{
|
||||
if (c->plan->is_empty_glyph (_))
|
||||
return Null(HBUINT8);
|
||||
return device_record->widthsZ.as_array (get_num_glyphs ()) [_];
|
||||
})
|
||||
;
|
||||
return hb_pair ((unsigned) device_record->pixelSize, +row);
|
||||
})
|
||||
;
|
||||
|
||||
hdmx_prime->serialize (c->serializer, version, it);
|
||||
return_trace (true);
|
||||
}
|
||||
|
||||
static size_t get_subsetted_size (const hdmx *source_hdmx, hb_subset_plan_t *plan)
|
||||
unsigned get_num_glyphs () const
|
||||
{
|
||||
return min_size + source_hdmx->numRecords * DeviceRecord::get_size (plan->num_output_glyphs ());
|
||||
}
|
||||
|
||||
bool subset (hb_subset_plan_t *plan) const
|
||||
{
|
||||
size_t dest_size = get_subsetted_size (this, plan);
|
||||
hdmx *dest = (hdmx *) malloc (dest_size);
|
||||
if (unlikely (!dest))
|
||||
{
|
||||
DEBUG_MSG(SUBSET, nullptr, "Unable to alloc %lu for hdmx subset output.", (unsigned long) dest_size);
|
||||
return false;
|
||||
}
|
||||
|
||||
hb_serialize_context_t c (dest, dest_size);
|
||||
hdmx *hdmx_prime = c.start_serialize<hdmx> ();
|
||||
if (!hdmx_prime || !hdmx_prime->serialize (&c, this, plan))
|
||||
{
|
||||
free (dest);
|
||||
DEBUG_MSG(SUBSET, nullptr, "Failed to serialize write new hdmx.");
|
||||
return false;
|
||||
}
|
||||
c.end_serialize ();
|
||||
|
||||
hb_blob_t *hdmx_prime_blob = hb_blob_create ((const char *) dest,
|
||||
dest_size,
|
||||
HB_MEMORY_MODE_READONLY,
|
||||
dest,
|
||||
free);
|
||||
bool result = plan->add_table (HB_OT_TAG_hdmx, hdmx_prime_blob);
|
||||
hb_blob_destroy (hdmx_prime_blob);
|
||||
|
||||
return result;
|
||||
return sizeDeviceRecord - DeviceRecord::min_size;
|
||||
}
|
||||
|
||||
bool sanitize (hb_sanitize_context_t *c) const
|
||||
|
|
|
@ -323,7 +323,7 @@ struct hb_serialize_context_t
|
|||
allocate_size<void> (alignment - l);
|
||||
}
|
||||
|
||||
template <typename Type>
|
||||
template <typename Type = void>
|
||||
Type *start_embed (const Type *obj HB_UNUSED = nullptr) const
|
||||
{ return reinterpret_cast<Type *> (this->head); }
|
||||
template <typename Type>
|
||||
|
|
|
@ -158,7 +158,7 @@ _subset_table (hb_subset_plan_t *plan,
|
|||
result = _subset2<const OT::glyf> (plan);
|
||||
break;
|
||||
case HB_OT_TAG_hdmx:
|
||||
result = _subset<const OT::hdmx> (plan);
|
||||
result = _subset2<const OT::hdmx> (plan);
|
||||
break;
|
||||
case HB_OT_TAG_name:
|
||||
result = _subset2<const OT::name> (plan);
|
||||
|
|
|
@ -161,7 +161,7 @@ main (int argc, char **argv)
|
|||
test_iterator_non_default_constructable (hb_enumerate (hb_iter (st)));
|
||||
test_iterator_non_default_constructable (hb_enumerate (hb_iter (st) + 1));
|
||||
test_iterator_non_default_constructable (hb_iter (st) | hb_filter ());
|
||||
test_iterator_non_default_constructable (hb_iter (st) | hb_map (hb_identity));
|
||||
test_iterator_non_default_constructable (hb_iter (st) | hb_map (hb_rvalue));
|
||||
|
||||
assert (true == hb_all (st));
|
||||
assert (false == hb_all (st, 42u));
|
||||
|
@ -206,6 +206,25 @@ main (int argc, char **argv)
|
|||
| hb_reduce ([&] (int acc, int value) -> int { return acc; }, 2)
|
||||
;
|
||||
|
||||
using map_pair_t = hb_item_type<hb_map_t>;
|
||||
+ hb_iter (m)
|
||||
| hb_map ([] (map_pair_t p) { return p.first * p.second; })
|
||||
;
|
||||
|
||||
m.keys ();
|
||||
using map_key_t = decltype (*m.keys());
|
||||
+ hb_iter (m.keys ())
|
||||
| hb_filter ([] (map_key_t k) { return k < 42; })
|
||||
| hb_drain
|
||||
;
|
||||
|
||||
m.values ();
|
||||
using map_value_t = decltype (*m.values());
|
||||
+ hb_iter (m.values ())
|
||||
| hb_filter ([] (map_value_t k) { return k < 42; })
|
||||
| hb_drain
|
||||
;
|
||||
|
||||
unsigned int temp1 = 10;
|
||||
unsigned int temp2 = 0;
|
||||
hb_map_t *result =
|
||||
|
|
|
@ -91,28 +91,6 @@ test_subset_hdmx_invalid (void)
|
|||
hb_face_destroy (face);
|
||||
}
|
||||
|
||||
static void
|
||||
test_subset_hdmx_fails_sanitize (void)
|
||||
{
|
||||
hb_face_t *face = hb_test_open_font_file ("../fuzzing/fonts/clusterfuzz-testcase-minimized-hb-subset-fuzzer-5609911946838016");
|
||||
|
||||
hb_subset_input_t *input = hb_subset_input_create_or_fail ();
|
||||
hb_set_t *codepoints = hb_subset_input_unicode_set (input);
|
||||
hb_face_t *subset;
|
||||
|
||||
hb_set_add (codepoints, 'a');
|
||||
hb_set_add (codepoints, 'b');
|
||||
hb_set_add (codepoints, 'c');
|
||||
|
||||
subset = hb_subset (face, input);
|
||||
g_assert (subset);
|
||||
g_assert (subset == hb_face_get_empty ());
|
||||
|
||||
hb_subset_input_destroy (input);
|
||||
hb_face_destroy (subset);
|
||||
hb_face_destroy (face);
|
||||
}
|
||||
|
||||
static void
|
||||
test_subset_hdmx_noop (void)
|
||||
{
|
||||
|
@ -140,7 +118,6 @@ main (int argc, char **argv)
|
|||
hb_test_add (test_subset_hdmx_simple_subset);
|
||||
hb_test_add (test_subset_hdmx_multiple_device_records);
|
||||
hb_test_add (test_subset_hdmx_invalid);
|
||||
hb_test_add (test_subset_hdmx_fails_sanitize);
|
||||
hb_test_add (test_subset_hdmx_noop);
|
||||
|
||||
return hb_test_run();
|
||||
|
|
Loading…
Reference in New Issue