[reorg] Move Coverage, RangeRecord into new namespace layout.

This commit is contained in:
Garret Rieger 2022-07-13 22:43:38 +00:00 committed by Behdad Esfahbod
parent 9c2518988d
commit c1e280ea78
8 changed files with 816 additions and 596 deletions

View File

@ -0,0 +1,323 @@
/*
* Copyright © 2007,2008,2009 Red Hat, Inc.
* Copyright © 2010,2012 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Red Hat Author(s): Behdad Esfahbod
* Google Author(s): Behdad Esfahbod, Garret Rieger
*/
#ifndef OT_LAYOUT_COMMON_COVERAGE_HH
#define OT_LAYOUT_COMMON_COVERAGE_HH
#include "../types.hh"
#include "CoverageFormat1.hh"
#include "CoverageFormat2.hh"
namespace OT {
namespace Layout {
namespace Common {
template<typename Iterator>
static inline void Coverage_serialize (hb_serialize_context_t *c,
Iterator it);
struct Coverage
{
protected:
union {
HBUINT16 format; /* Format identifier */
CoverageFormat1_3<SmallTypes> format1;
CoverageFormat2_4<SmallTypes> format2;
#ifndef HB_NO_BORING_EXPANSION
CoverageFormat1_3<MediumTypes>format3;
CoverageFormat2_4<MediumTypes>format4;
#endif
} u;
public:
DEFINE_SIZE_UNION (2, format);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format)
{
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
#ifndef HB_NO_BORING_EXPANSION
case 3: return_trace (u.format3.sanitize (c));
case 4: return_trace (u.format4.sanitize (c));
#endif
default:return_trace (true);
}
}
/* Has interface. */
static constexpr unsigned SENTINEL = NOT_COVERED;
typedef unsigned int value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
unsigned int get_coverage (hb_codepoint_t glyph_id) const
{
switch (u.format) {
case 1: return u.format1.get_coverage (glyph_id);
case 2: return u.format2.get_coverage (glyph_id);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.get_coverage (glyph_id);
case 4: return u.format4.get_coverage (glyph_id);
#endif
default:return NOT_COVERED;
}
}
template <typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
unsigned count = 0;
unsigned num_ranges = 0;
hb_codepoint_t last = (hb_codepoint_t) -2;
for (auto g: glyphs)
{
if (last + 1 != g)
num_ranges++;
last = g;
count++;
}
u.format = count <= num_ranges * 3 ? 1 : 2;
#ifndef HB_NO_BORING_EXPANSION
if (count && last > 0xFFFFu)
u.format += 2;
#endif
switch (u.format)
{
case 1: return_trace (u.format1.serialize (c, glyphs));
case 2: return_trace (u.format2.serialize (c, glyphs));
#ifndef HB_NO_BORING_EXPANSION
case 3: return_trace (u.format3.serialize (c, glyphs));
case 4: return_trace (u.format4.serialize (c, glyphs));
#endif
default:return_trace (false);
}
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto it =
+ iter ()
| hb_filter (c->plan->glyph_map_gsub)
| hb_map_retains_sorting (c->plan->glyph_map_gsub)
;
// Cache the iterator result as it will be iterated multiple times
// by the serialize code below.
hb_sorted_vector_t<hb_codepoint_t> glyphs (it);
Coverage_serialize (c->serializer, glyphs.iter ());
return_trace (bool (glyphs));
}
bool intersects (const hb_set_t *glyphs) const
{
switch (u.format)
{
case 1: return u.format1.intersects (glyphs);
case 2: return u.format2.intersects (glyphs);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.intersects (glyphs);
case 4: return u.format4.intersects (glyphs);
#endif
default:return false;
}
}
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
{
switch (u.format)
{
case 1: return u.format1.intersects_coverage (glyphs, index);
case 2: return u.format2.intersects_coverage (glyphs, index);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.intersects_coverage (glyphs, index);
case 4: return u.format4.intersects_coverage (glyphs, index);
#endif
default:return false;
}
}
/* Might return false if array looks unsorted.
* Used for faster rejection of corrupt data. */
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{
switch (u.format)
{
case 1: return u.format1.collect_coverage (glyphs);
case 2: return u.format2.collect_coverage (glyphs);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.collect_coverage (glyphs);
case 4: return u.format4.collect_coverage (glyphs);
#endif
default:return false;
}
}
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
{
switch (u.format)
{
case 1: return u.format1.intersected_coverage_glyphs (glyphs, intersect_glyphs);
case 2: return u.format2.intersected_coverage_glyphs (glyphs, intersect_glyphs);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.intersected_coverage_glyphs (glyphs, intersect_glyphs);
case 4: return u.format4.intersected_coverage_glyphs (glyphs, intersect_glyphs);
#endif
default:return ;
}
}
struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
{
static constexpr bool is_sorted_iterator = true;
iter_t (const Coverage &c_ = Null (Coverage))
{
memset (this, 0, sizeof (*this));
format = c_.u.format;
switch (format)
{
case 1: u.format1.init (c_.u.format1); return;
case 2: u.format2.init (c_.u.format2); return;
#ifndef HB_NO_BORING_EXPANSION
case 3: u.format3.init (c_.u.format3); return;
case 4: u.format4.init (c_.u.format4); return;
#endif
default: return;
}
}
bool __more__ () const
{
switch (format)
{
case 1: return u.format1.more ();
case 2: return u.format2.more ();
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.more ();
case 4: return u.format4.more ();
#endif
default:return false;
}
}
void __next__ ()
{
switch (format)
{
case 1: u.format1.next (); break;
case 2: u.format2.next (); break;
#ifndef HB_NO_BORING_EXPANSION
case 3: u.format3.next (); break;
case 4: u.format4.next (); break;
#endif
default: break;
}
}
typedef hb_codepoint_t __item_t__;
__item_t__ __item__ () const { return get_glyph (); }
hb_codepoint_t get_glyph () const
{
switch (format)
{
case 1: return u.format1.get_glyph ();
case 2: return u.format2.get_glyph ();
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.get_glyph ();
case 4: return u.format4.get_glyph ();
#endif
default:return 0;
}
}
bool operator != (const iter_t& o) const
{
if (unlikely (format != o.format)) return true;
switch (format)
{
case 1: return u.format1 != o.u.format1;
case 2: return u.format2 != o.u.format2;
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3 != o.u.format3;
case 4: return u.format4 != o.u.format4;
#endif
default:return false;
}
}
iter_t __end__ () const
{
iter_t it = {};
it.format = format;
switch (format)
{
case 1: it.u.format1 = u.format1.__end__ (); break;
case 2: it.u.format2 = u.format2.__end__ (); break;
#ifndef HB_NO_BORING_EXPANSION
case 3: it.u.format3 = u.format3.__end__ (); break;
case 4: it.u.format4 = u.format4.__end__ (); break;
#endif
default: break;
}
return it;
}
private:
unsigned int format;
union {
#ifndef HB_NO_BORING_EXPANSION
CoverageFormat2_4<MediumTypes>::iter_t format4; /* Put this one first since it's larger; helps shut up compiler. */
CoverageFormat1_3<MediumTypes>::iter_t format3;
#endif
CoverageFormat2_4<SmallTypes>::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */
CoverageFormat1_3<SmallTypes>::iter_t format1;
} u;
};
iter_t iter () const { return iter_t (*this); }
};
template<typename Iterator>
static inline void
Coverage_serialize (hb_serialize_context_t *c,
Iterator it)
{ c->start_embed<Coverage> ()->serialize (c, it); }
}
}
}
#endif // #ifndef OT_LAYOUT_COMMON_COVERAGE_HH

View File

@ -0,0 +1,120 @@
/*
* Copyright © 2007,2008,2009 Red Hat, Inc.
* Copyright © 2010,2012 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Red Hat Author(s): Behdad Esfahbod
* Google Author(s): Behdad Esfahbod, Garret Rieger
*/
#ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT1_HH
#define OT_LAYOUT_COMMON_COVERAGEFORMAT1_HH
namespace OT {
namespace Layout {
namespace Common {
#define NOT_COVERED ((unsigned int) -1)
template <typename Types>
struct CoverageFormat1_3
{
friend struct Coverage;
protected:
HBUINT16 coverageFormat; /* Format identifier--format = 1 */
SortedArray16Of<typename Types::HBGlyphID>
glyphArray; /* Array of GlyphIDs--in numerical order */
public:
DEFINE_SIZE_ARRAY (4, glyphArray);
private:
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (glyphArray.sanitize (c));
}
unsigned int get_coverage (hb_codepoint_t glyph_id) const
{
unsigned int i;
glyphArray.bfind (glyph_id, &i, HB_NOT_FOUND_STORE, NOT_COVERED);
return i;
}
template <typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
return_trace (glyphArray.serialize (c, glyphs));
}
bool intersects (const hb_set_t *glyphs) const
{
/* TODO Speed up, using hb_set_next() and bsearch()? */
for (const auto& g : glyphArray.as_array ())
if (glyphs->has (g))
return true;
return false;
}
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
{ return glyphs->has (glyphArray[index]); }
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
{
unsigned count = glyphArray.len;
for (unsigned i = 0; i < count; i++)
if (glyphs->has (glyphArray[i]))
intersect_glyphs->add (glyphArray[i]);
}
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{ return glyphs->add_sorted_array (glyphArray.as_array ()); }
public:
/* Older compilers need this to be public. */
struct iter_t
{
void init (const struct CoverageFormat1_3 &c_) { c = &c_; i = 0; }
void fini () {}
bool more () const { return i < c->glyphArray.len; }
void next () { i++; }
hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
bool operator != (const iter_t& o) const
{ return i != o.i; }
iter_t __end__ () const { iter_t it; it.init (*c); it.i = c->glyphArray.len; return it; }
private:
const struct CoverageFormat1_3 *c;
unsigned int i;
};
private:
};
}
}
}
#endif // #ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT1_HH

View File

@ -0,0 +1,224 @@
/*
* Copyright © 2007,2008,2009 Red Hat, Inc.
* Copyright © 2010,2012 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Red Hat Author(s): Behdad Esfahbod
* Google Author(s): Behdad Esfahbod, Garret Rieger
*/
#ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH
#define OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH
#include "RangeRecord.hh"
namespace OT {
namespace Layout {
namespace Common {
template <typename Types>
struct CoverageFormat2_4
{
friend struct Coverage;
protected:
HBUINT16 coverageFormat; /* Format identifier--format = 2 */
SortedArray16Of<RangeRecord<Types>>
rangeRecord; /* Array of glyph ranges--ordered by
* Start GlyphID. rangeCount entries
* long */
public:
DEFINE_SIZE_ARRAY (4, rangeRecord);
private:
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (rangeRecord.sanitize (c));
}
unsigned int get_coverage (hb_codepoint_t glyph_id) const
{
const RangeRecord<Types> &range = rangeRecord.bsearch (glyph_id);
return likely (range.first <= range.last)
? (unsigned int) range.value + (glyph_id - range.first)
: NOT_COVERED;
}
template <typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
/* TODO(iter) Write more efficiently? */
unsigned num_ranges = 0;
hb_codepoint_t last = (hb_codepoint_t) -2;
for (auto g: glyphs)
{
if (last + 1 != g)
num_ranges++;
last = g;
}
if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
if (!num_ranges) return_trace (true);
unsigned count = 0;
unsigned range = (unsigned) -1;
last = (hb_codepoint_t) -2;
for (auto g: glyphs)
{
if (last + 1 != g)
{
range++;
rangeRecord[range].first = g;
rangeRecord[range].value = count;
}
rangeRecord[range].last = g;
last = g;
count++;
}
return_trace (true);
}
bool intersects (const hb_set_t *glyphs) const
{
return hb_any (+ hb_iter (rangeRecord.as_array ())
| hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (glyphs); }));
}
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
{
auto cmp = [] (const void *pk, const void *pr) -> int
{
unsigned index = * (const unsigned *) pk;
const RangeRecord<Types> &range = * (const RangeRecord<Types> *) pr;
if (index < range.value) return -1;
if (index > (unsigned int) range.value + (range.last - range.first)) return +1;
return 0;
};
auto arr = rangeRecord.as_array ();
unsigned idx;
if (hb_bsearch_impl (&idx, index,
arr.arrayZ, arr.length, sizeof (arr[0]),
(int (*)(const void *_key, const void *_item)) cmp))
return arr.arrayZ[idx].intersects (glyphs);
return false;
}
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
{
for (const auto& range : rangeRecord.as_array ())
{
if (!range.intersects (glyphs)) continue;
unsigned last = range.last;
for (hb_codepoint_t g = range.first - 1;
glyphs->next (&g) && g <= last;)
intersect_glyphs->add (g);
}
}
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{
unsigned int count = rangeRecord.len;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
return false;
return true;
}
public:
/* Older compilers need this to be public. */
struct iter_t
{
void init (const CoverageFormat2_4 &c_)
{
c = &c_;
coverage = 0;
i = 0;
j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
{
/* Broken table. Skip. */
i = c->rangeRecord.len;
}
}
void fini () {}
bool more () const { return i < c->rangeRecord.len; }
void next ()
{
if (j >= c->rangeRecord[i].last)
{
i++;
if (more ())
{
unsigned int old = coverage;
j = c->rangeRecord[i].first;
coverage = c->rangeRecord[i].value;
if (unlikely (coverage != old + 1))
{
/* Broken table. Skip. Important to avoid DoS.
* Also, our callers depend on coverage being
* consecutive and monotonically increasing,
* ie. iota(). */
i = c->rangeRecord.len;
return;
}
}
else
j = 0;
return;
}
coverage++;
j++;
}
hb_codepoint_t get_glyph () const { return j; }
bool operator != (const iter_t& o) const
{ return i != o.i || j != o.j; }
iter_t __end__ () const
{
iter_t it;
it.init (*c);
it.i = c->rangeRecord.len;
it.j = 0;
return it;
}
private:
const struct CoverageFormat2_4 *c;
unsigned int i, coverage;
hb_codepoint_t j;
};
private:
};
}
}
}
#endif // #ifndef OT_LAYOUT_COMMON_COVERAGEFORMAT2_HH

View File

@ -0,0 +1,75 @@
/*
* Copyright © 2007,2008,2009 Red Hat, Inc.
* Copyright © 2010,2012 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Red Hat Author(s): Behdad Esfahbod
* Google Author(s): Behdad Esfahbod, Garret Rieger
*/
#ifndef OT_LAYOUT_COMMON_RANGERECORD_HH
#define OT_LAYOUT_COMMON_RANGERECORD_HH
namespace OT {
namespace Layout {
namespace Common {
template <typename Types>
struct RangeRecord
{
typename Types::HBGlyphID first; /* First GlyphID in the range */
typename Types::HBGlyphID last; /* Last GlyphID in the range */
HBUINT16 value; /* Value */
DEFINE_SIZE_STATIC (2 + 2 * Types::size);
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
int cmp (hb_codepoint_t g) const
{ return g < first ? -1 : g <= last ? 0 : +1; }
bool intersects (const hb_set_t *glyphs) const
{ return glyphs->intersects (first, last); }
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{ return glyphs->add_range (first, last); }
};
}
}
}
extern HB_INTERNAL const unsigned char _hb_Null_OT_RangeRecord[9];
template <typename Spec>
struct Null<OT::Layout::Common::RangeRecord<Spec>> {
static OT::Layout::Common::RangeRecord<Spec> const & get_null () {
return *reinterpret_cast<const OT::Layout::Common::RangeRecord<Spec> *> (_hb_Null_OT_RangeRecord);
}
};
#endif // #ifndef OT_LAYOUT_COMMON_RANGERECORD_HH

64
src/OT/Layout/types.hh Normal file
View File

@ -0,0 +1,64 @@
/*
* Copyright © 2007,2008,2009 Red Hat, Inc.
* Copyright © 2010,2012 Google, Inc.
*
* This is part of HarfBuzz, a text shaping library.
*
* Permission is hereby granted, without written agreement and without
* license or royalty fees, to use, copy, modify, and distribute this
* software and its documentation for any purpose, provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software.
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE.
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN "AS IS" BASIS, AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
*
* Red Hat Author(s): Behdad Esfahbod
* Google Author(s): Behdad Esfahbod, Garret Rieger
*/
#ifndef OT_LAYOUT_TYPES_HH
#define OT_LAYOUT_TYPES_HH
namespace OT {
namespace Layout {
struct SmallTypes {
static constexpr unsigned size = 2;
using HBUINT = HBUINT16;
using HBGlyphID = HBGlyphID16;
using Offset = Offset16;
template <typename Type, bool has_null=true>
using OffsetTo = OT::Offset16To<Type, has_null>;
template <typename Type>
using ArrayOf = OT::Array16Of<Type>;
template <typename Type>
using SortedArrayOf = OT::SortedArray16Of<Type>;
};
struct MediumTypes {
static constexpr unsigned size = 3;
using HBUINT = HBUINT24;
using HBGlyphID = HBGlyphID24;
using Offset = Offset24;
template <typename Type, bool has_null=true>
using OffsetTo = OT::Offset24To<Type, has_null>;
template <typename Type>
using ArrayOf = OT::Array24Of<Type>;
template <typename Type>
using SortedArrayOf = OT::SortedArray24Of<Type>;
};
}
}
#endif /* OT_LAYOUT_TYPES_HH */

View File

@ -35,6 +35,14 @@
#include "hb-set.hh"
#include "hb-bimap.hh"
#include "OT/Layout/Common/Coverage.hh"
#include "OT/Layout/types.hh"
// TODO(garretrieger): cleanup these after migration.
using OT::Layout::Common::Coverage;
using OT::Layout::Common::RangeRecord;
using OT::Layout::SmallTypes;
using OT::Layout::MediumTypes;
#ifndef HB_MAX_NESTING_LEVEL
#define HB_MAX_NESTING_LEVEL 64
@ -79,40 +87,6 @@
namespace OT {
struct SmallTypes {
static constexpr unsigned size = 2;
using HBUINT = HBUINT16;
using HBGlyphID = HBGlyphID16;
using Offset = Offset16;
template <typename Type, bool has_null=true>
using OffsetTo = OT::Offset16To<Type, has_null>;
template <typename Type>
using ArrayOf = OT::Array16Of<Type>;
template <typename Type>
using SortedArrayOf = OT::SortedArray16Of<Type>;
};
struct MediumTypes {
static constexpr unsigned size = 3;
using HBUINT = HBUINT24;
using HBGlyphID = HBGlyphID24;
using Offset = Offset24;
template <typename Type, bool has_null=true>
using OffsetTo = OT::Offset24To<Type, has_null>;
template <typename Type>
using ArrayOf = OT::Array24Of<Type>;
template <typename Type>
using SortedArrayOf = OT::SortedArray24Of<Type>;
};
#define NOT_COVERED ((unsigned int) -1)
template<typename Iterator>
static inline void Coverage_serialize (hb_serialize_context_t *c,
Iterator it);
template<typename Iterator>
static inline void ClassDef_serialize (hb_serialize_context_t *c,
Iterator it);
@ -536,34 +510,6 @@ struct RecordListOfScript : RecordListOf<Script>
}
};
template <typename Types>
struct RangeRecord
{
int cmp (hb_codepoint_t g) const
{ return g < first ? -1 : g <= last ? 0 : +1; }
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (c->check_struct (this));
}
bool intersects (const hb_set_t *glyphs) const
{ return glyphs->intersects (first, last); }
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{ return glyphs->add_range (first, last); }
typename Types::HBGlyphID first; /* First GlyphID in the range */
typename Types::HBGlyphID last; /* Last GlyphID in the range */
HBUINT16 value; /* Value */
public:
DEFINE_SIZE_STATIC (2 + 2 * Types::size);
};
DECLARE_NULL_NAMESPACE_BYTES_TEMPLATE1 (OT, RangeRecord, 9);
struct IndexArray : Array16Of<Index>
{
bool intersects (const hb_map_t *indexes) const
@ -1438,537 +1384,6 @@ struct LookupOffsetList : List16OfOffsetTo<TLookup, OffsetType>
* Coverage Table
*/
template <typename Types>
struct CoverageFormat1_3
{
friend struct Coverage;
private:
unsigned int get_coverage (hb_codepoint_t glyph_id) const
{
unsigned int i;
glyphArray.bfind (glyph_id, &i, HB_NOT_FOUND_STORE, NOT_COVERED);
return i;
}
template <typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
return_trace (glyphArray.serialize (c, glyphs));
}
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (glyphArray.sanitize (c));
}
bool intersects (const hb_set_t *glyphs) const
{
/* TODO Speed up, using hb_set_next() and bsearch()? */
for (const auto& g : glyphArray.as_array ())
if (glyphs->has (g))
return true;
return false;
}
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
{ return glyphs->has (glyphArray[index]); }
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
{
unsigned count = glyphArray.len;
for (unsigned i = 0; i < count; i++)
if (glyphs->has (glyphArray[i]))
intersect_glyphs->add (glyphArray[i]);
}
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{ return glyphs->add_sorted_array (glyphArray.as_array ()); }
public:
/* Older compilers need this to be public. */
struct iter_t
{
void init (const struct CoverageFormat1_3 &c_) { c = &c_; i = 0; }
void fini () {}
bool more () const { return i < c->glyphArray.len; }
void next () { i++; }
hb_codepoint_t get_glyph () const { return c->glyphArray[i]; }
bool operator != (const iter_t& o) const
{ return i != o.i; }
iter_t __end__ () const { iter_t it; it.init (*c); it.i = c->glyphArray.len; return it; }
private:
const struct CoverageFormat1_3 *c;
unsigned int i;
};
private:
protected:
HBUINT16 coverageFormat; /* Format identifier--format = 1 */
SortedArray16Of<typename Types::HBGlyphID>
glyphArray; /* Array of GlyphIDs--in numerical order */
public:
DEFINE_SIZE_ARRAY (4, glyphArray);
};
template <typename Types>
struct CoverageFormat2_4
{
friend struct Coverage;
private:
unsigned int get_coverage (hb_codepoint_t glyph_id) const
{
const RangeRecord<Types> &range = rangeRecord.bsearch (glyph_id);
return likely (range.first <= range.last)
? (unsigned int) range.value + (glyph_id - range.first)
: NOT_COVERED;
}
template <typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
/* TODO(iter) Write more efficiently? */
unsigned num_ranges = 0;
hb_codepoint_t last = (hb_codepoint_t) -2;
for (auto g: glyphs)
{
if (last + 1 != g)
num_ranges++;
last = g;
}
if (unlikely (!rangeRecord.serialize (c, num_ranges))) return_trace (false);
if (!num_ranges) return_trace (true);
unsigned count = 0;
unsigned range = (unsigned) -1;
last = (hb_codepoint_t) -2;
for (auto g: glyphs)
{
if (last + 1 != g)
{
range++;
rangeRecord[range].first = g;
rangeRecord[range].value = count;
}
rangeRecord[range].last = g;
last = g;
count++;
}
return_trace (true);
}
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
return_trace (rangeRecord.sanitize (c));
}
bool intersects (const hb_set_t *glyphs) const
{
return hb_any (+ hb_iter (rangeRecord.as_array ())
| hb_map ([glyphs] (const RangeRecord<Types> &range) { return range.intersects (glyphs); }));
}
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
{
auto cmp = [] (const void *pk, const void *pr) -> int
{
unsigned index = * (const unsigned *) pk;
const RangeRecord<Types> &range = * (const RangeRecord<Types> *) pr;
if (index < range.value) return -1;
if (index > (unsigned int) range.value + (range.last - range.first)) return +1;
return 0;
};
auto arr = rangeRecord.as_array ();
unsigned idx;
if (hb_bsearch_impl (&idx, index,
arr.arrayZ, arr.length, sizeof (arr[0]),
(int (*)(const void *_key, const void *_item)) cmp))
return arr.arrayZ[idx].intersects (glyphs);
return false;
}
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
{
for (const auto& range : rangeRecord.as_array ())
{
if (!range.intersects (glyphs)) continue;
unsigned last = range.last;
for (hb_codepoint_t g = range.first - 1;
glyphs->next (&g) && g <= last;)
intersect_glyphs->add (g);
}
}
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{
unsigned int count = rangeRecord.len;
for (unsigned int i = 0; i < count; i++)
if (unlikely (!rangeRecord[i].collect_coverage (glyphs)))
return false;
return true;
}
public:
/* Older compilers need this to be public. */
struct iter_t
{
void init (const CoverageFormat2_4 &c_)
{
c = &c_;
coverage = 0;
i = 0;
j = c->rangeRecord.len ? c->rangeRecord[0].first : 0;
if (unlikely (c->rangeRecord[0].first > c->rangeRecord[0].last))
{
/* Broken table. Skip. */
i = c->rangeRecord.len;
}
}
void fini () {}
bool more () const { return i < c->rangeRecord.len; }
void next ()
{
if (j >= c->rangeRecord[i].last)
{
i++;
if (more ())
{
unsigned int old = coverage;
j = c->rangeRecord[i].first;
coverage = c->rangeRecord[i].value;
if (unlikely (coverage != old + 1))
{
/* Broken table. Skip. Important to avoid DoS.
* Also, our callers depend on coverage being
* consecutive and monotonically increasing,
* ie. iota(). */
i = c->rangeRecord.len;
return;
}
}
else
j = 0;
return;
}
coverage++;
j++;
}
hb_codepoint_t get_glyph () const { return j; }
bool operator != (const iter_t& o) const
{ return i != o.i || j != o.j; }
iter_t __end__ () const
{
iter_t it;
it.init (*c);
it.i = c->rangeRecord.len;
it.j = 0;
return it;
}
private:
const struct CoverageFormat2_4 *c;
unsigned int i, coverage;
hb_codepoint_t j;
};
private:
protected:
HBUINT16 coverageFormat; /* Format identifier--format = 2 */
SortedArray16Of<RangeRecord<Types>>
rangeRecord; /* Array of glyph ranges--ordered by
* Start GlyphID. rangeCount entries
* long */
public:
DEFINE_SIZE_ARRAY (4, rangeRecord);
};
struct Coverage
{
/* Has interface. */
static constexpr unsigned SENTINEL = NOT_COVERED;
typedef unsigned int value_t;
value_t operator [] (hb_codepoint_t k) const { return get (k); }
bool has (hb_codepoint_t k) const { return (*this)[k] != SENTINEL; }
/* Predicate. */
bool operator () (hb_codepoint_t k) const { return has (k); }
unsigned int get (hb_codepoint_t k) const { return get_coverage (k); }
unsigned int get_coverage (hb_codepoint_t glyph_id) const
{
switch (u.format) {
case 1: return u.format1.get_coverage (glyph_id);
case 2: return u.format2.get_coverage (glyph_id);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.get_coverage (glyph_id);
case 4: return u.format4.get_coverage (glyph_id);
#endif
default:return NOT_COVERED;
}
}
template <typename Iterator,
hb_requires (hb_is_sorted_source_of (Iterator, hb_codepoint_t))>
bool serialize (hb_serialize_context_t *c, Iterator glyphs)
{
TRACE_SERIALIZE (this);
if (unlikely (!c->extend_min (this))) return_trace (false);
unsigned count = 0;
unsigned num_ranges = 0;
hb_codepoint_t last = (hb_codepoint_t) -2;
for (auto g: glyphs)
{
if (last + 1 != g)
num_ranges++;
last = g;
count++;
}
u.format = count <= num_ranges * 3 ? 1 : 2;
#ifndef HB_NO_BORING_EXPANSION
if (count && last > 0xFFFFu)
u.format += 2;
#endif
switch (u.format)
{
case 1: return_trace (u.format1.serialize (c, glyphs));
case 2: return_trace (u.format2.serialize (c, glyphs));
#ifndef HB_NO_BORING_EXPANSION
case 3: return_trace (u.format3.serialize (c, glyphs));
case 4: return_trace (u.format4.serialize (c, glyphs));
#endif
default:return_trace (false);
}
}
bool subset (hb_subset_context_t *c) const
{
TRACE_SUBSET (this);
auto it =
+ iter ()
| hb_filter (c->plan->glyph_map_gsub)
| hb_map_retains_sorting (c->plan->glyph_map_gsub)
;
// Cache the iterator result as it will be iterated multiple times
// by the serialize code below.
hb_sorted_vector_t<hb_codepoint_t> glyphs (it);
Coverage_serialize (c->serializer, glyphs.iter ());
return_trace (bool (glyphs));
}
bool sanitize (hb_sanitize_context_t *c) const
{
TRACE_SANITIZE (this);
if (!u.format.sanitize (c)) return_trace (false);
switch (u.format)
{
case 1: return_trace (u.format1.sanitize (c));
case 2: return_trace (u.format2.sanitize (c));
#ifndef HB_NO_BORING_EXPANSION
case 3: return_trace (u.format3.sanitize (c));
case 4: return_trace (u.format4.sanitize (c));
#endif
default:return_trace (true);
}
}
bool intersects (const hb_set_t *glyphs) const
{
switch (u.format)
{
case 1: return u.format1.intersects (glyphs);
case 2: return u.format2.intersects (glyphs);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.intersects (glyphs);
case 4: return u.format4.intersects (glyphs);
#endif
default:return false;
}
}
bool intersects_coverage (const hb_set_t *glyphs, unsigned int index) const
{
switch (u.format)
{
case 1: return u.format1.intersects_coverage (glyphs, index);
case 2: return u.format2.intersects_coverage (glyphs, index);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.intersects_coverage (glyphs, index);
case 4: return u.format4.intersects_coverage (glyphs, index);
#endif
default:return false;
}
}
/* Might return false if array looks unsorted.
* Used for faster rejection of corrupt data. */
template <typename set_t>
bool collect_coverage (set_t *glyphs) const
{
switch (u.format)
{
case 1: return u.format1.collect_coverage (glyphs);
case 2: return u.format2.collect_coverage (glyphs);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.collect_coverage (glyphs);
case 4: return u.format4.collect_coverage (glyphs);
#endif
default:return false;
}
}
void intersected_coverage_glyphs (const hb_set_t *glyphs, hb_set_t *intersect_glyphs) const
{
switch (u.format)
{
case 1: return u.format1.intersected_coverage_glyphs (glyphs, intersect_glyphs);
case 2: return u.format2.intersected_coverage_glyphs (glyphs, intersect_glyphs);
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.intersected_coverage_glyphs (glyphs, intersect_glyphs);
case 4: return u.format4.intersected_coverage_glyphs (glyphs, intersect_glyphs);
#endif
default:return ;
}
}
struct iter_t : hb_iter_with_fallback_t<iter_t, hb_codepoint_t>
{
static constexpr bool is_sorted_iterator = true;
iter_t (const Coverage &c_ = Null (Coverage))
{
memset (this, 0, sizeof (*this));
format = c_.u.format;
switch (format)
{
case 1: u.format1.init (c_.u.format1); return;
case 2: u.format2.init (c_.u.format2); return;
#ifndef HB_NO_BORING_EXPANSION
case 3: u.format3.init (c_.u.format3); return;
case 4: u.format4.init (c_.u.format4); return;
#endif
default: return;
}
}
bool __more__ () const
{
switch (format)
{
case 1: return u.format1.more ();
case 2: return u.format2.more ();
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.more ();
case 4: return u.format4.more ();
#endif
default:return false;
}
}
void __next__ ()
{
switch (format)
{
case 1: u.format1.next (); break;
case 2: u.format2.next (); break;
#ifndef HB_NO_BORING_EXPANSION
case 3: u.format3.next (); break;
case 4: u.format4.next (); break;
#endif
default: break;
}
}
typedef hb_codepoint_t __item_t__;
__item_t__ __item__ () const { return get_glyph (); }
hb_codepoint_t get_glyph () const
{
switch (format)
{
case 1: return u.format1.get_glyph ();
case 2: return u.format2.get_glyph ();
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3.get_glyph ();
case 4: return u.format4.get_glyph ();
#endif
default:return 0;
}
}
bool operator != (const iter_t& o) const
{
if (unlikely (format != o.format)) return true;
switch (format)
{
case 1: return u.format1 != o.u.format1;
case 2: return u.format2 != o.u.format2;
#ifndef HB_NO_BORING_EXPANSION
case 3: return u.format3 != o.u.format3;
case 4: return u.format4 != o.u.format4;
#endif
default:return false;
}
}
iter_t __end__ () const
{
iter_t it = {};
it.format = format;
switch (format)
{
case 1: it.u.format1 = u.format1.__end__ (); break;
case 2: it.u.format2 = u.format2.__end__ (); break;
#ifndef HB_NO_BORING_EXPANSION
case 3: it.u.format3 = u.format3.__end__ (); break;
case 4: it.u.format4 = u.format4.__end__ (); break;
#endif
default: break;
}
return it;
}
private:
unsigned int format;
union {
#ifndef HB_NO_BORING_EXPANSION
CoverageFormat2_4<MediumTypes>::iter_t format4; /* Put this one first since it's larger; helps shut up compiler. */
CoverageFormat1_3<MediumTypes>::iter_t format3;
#endif
CoverageFormat2_4<SmallTypes>::iter_t format2; /* Put this one first since it's larger; helps shut up compiler. */
CoverageFormat1_3<SmallTypes>::iter_t format1;
} u;
};
iter_t iter () const { return iter_t (*this); }
protected:
union {
HBUINT16 format; /* Format identifier */
CoverageFormat1_3<SmallTypes> format1;
CoverageFormat2_4<SmallTypes> format2;
#ifndef HB_NO_BORING_EXPANSION
CoverageFormat1_3<MediumTypes>format3;
CoverageFormat2_4<MediumTypes>format4;
#endif
} u;
public:
DEFINE_SIZE_UNION (2, format);
};
template<typename Iterator>
static inline void
Coverage_serialize (hb_serialize_context_t *c,
Iterator it)
{ c->start_embed<Coverage> ()->serialize (c, it); }
static void ClassDef_remap_and_serialize (hb_serialize_context_t *c,
const hb_set_t &klasses,

View File

@ -31,7 +31,6 @@
#include "hb-set.hh"
#include "hb-ot-layout-common.hh"
template <typename T>
struct array_iter_t : hb_iter_with_fallback_t<array_iter_t<T>, T&>
{
@ -226,7 +225,6 @@ main (int argc, char **argv)
test_iterable<hb_sorted_array_t<const int>> ();
test_iterable<hb_vector_t<float>> ();
test_iterable<hb_set_t> ();
test_iterable<OT::Coverage> ();
test_iterator (hb_zip (st, v));
test_iterator_non_default_constructable (hb_enumerate (st));

View File

@ -27,6 +27,7 @@
#include "hb-serialize.hh"
#include "hb-ot-layout-common.hh"
using OT::Layout::Common::Coverage;
int
main (int argc, char **argv)
@ -37,7 +38,7 @@ main (int argc, char **argv)
hb_sorted_vector_t<hb_codepoint_t> v{1, 2, 5};
auto c = s.start_serialize<OT::Coverage> ();
auto c = s.start_serialize<Coverage> ();
c->serialize (&s, hb_iter (v));