2015-08-23 15:42:20 +02:00
/*
* Copyright © 2015 Google , Inc .
*
* This is part of HarfBuzz , a text shaping library .
*
* Permission is hereby granted , without written agreement and without
* license or royalty fees , to use , copy , modify , and distribute this
* software and its documentation for any purpose , provided that the
* above copyright notice and the following two paragraphs appear in
* all copies of this software .
*
* IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE TO ANY PARTY FOR
* DIRECT , INDIRECT , SPECIAL , INCIDENTAL , OR CONSEQUENTIAL DAMAGES
* ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION , EVEN
* IF THE COPYRIGHT HOLDER HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
* DAMAGE .
*
* THE COPYRIGHT HOLDER SPECIFICALLY DISCLAIMS ANY WARRANTIES , INCLUDING ,
* BUT NOT LIMITED TO , THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE . THE SOFTWARE PROVIDED HEREUNDER IS
* ON AN " AS IS " BASIS , AND THE COPYRIGHT HOLDER HAS NO OBLIGATION TO
* PROVIDE MAINTENANCE , SUPPORT , UPDATES , ENHANCEMENTS , OR MODIFICATIONS .
*
2019-05-17 04:16:52 +02:00
* Google Author ( s ) : Behdad Esfahbod , Garret Rieger , Roderick Sheeter
2015-08-23 15:42:20 +02:00
*/
# ifndef HB_OT_GLYF_TABLE_HH
# define HB_OT_GLYF_TABLE_HH
2018-08-26 07:36:36 +02:00
# include "hb-open-type.hh"
2017-11-15 04:52:09 +01:00
# include "hb-ot-head-table.hh"
2015-08-23 15:42:20 +02:00
namespace OT {
/*
* loca - - Index to Location
2018-04-12 11:08:19 +02:00
* https : //docs.microsoft.com/en-us/typography/opentype/spec/loca
2015-08-23 15:42:20 +02:00
*/
# define HB_OT_TAG_loca HB_TAG('l','o','c','a')
struct loca
{
2017-11-15 04:54:48 +01:00
friend struct glyf ;
2019-01-22 12:08:57 +01:00
static constexpr hb_tag_t tableTag = HB_OT_TAG_loca ;
2015-08-23 15:42:20 +02:00
2018-12-16 20:08:10 +01:00
bool sanitize ( hb_sanitize_context_t * c HB_UNUSED ) const
2015-08-23 15:42:20 +02:00
{
TRACE_SANITIZE ( this ) ;
2015-09-29 15:57:02 +02:00
return_trace ( true ) ;
2015-08-23 15:42:20 +02:00
}
2017-11-15 04:54:48 +01:00
protected :
2018-09-10 23:29:26 +02:00
UnsizedArrayOf < HBUINT8 > dataZ ; /* Location data. */
2018-10-31 21:19:42 +01:00
public :
2018-11-22 23:56:51 +01:00
DEFINE_SIZE_MIN ( 0 ) ; /* In reality, this is UNBOUNDED() type; but since we always
* check the size externally , allow Null ( ) object of it by
2019-05-08 05:54:31 +02:00
* defining it _MIN instead . */
2015-08-23 15:42:20 +02:00
} ;
/*
* glyf - - TrueType Glyph Data
2018-04-12 11:08:19 +02:00
* https : //docs.microsoft.com/en-us/typography/opentype/spec/glyf
2015-08-23 15:42:20 +02:00
*/
# define HB_OT_TAG_glyf HB_TAG('g','l','y','f')
struct glyf
{
2019-01-22 12:08:57 +01:00
static constexpr hb_tag_t tableTag = HB_OT_TAG_glyf ;
2015-08-23 15:42:20 +02:00
2018-12-16 20:08:10 +01:00
bool sanitize ( hb_sanitize_context_t * c HB_UNUSED ) const
2015-08-23 15:42:20 +02:00
{
TRACE_SANITIZE ( this ) ;
/* We don't check for anything specific here. The users of the
* struct do all the hard work . . . */
2015-09-29 15:57:02 +02:00
return_trace ( true ) ;
2015-08-23 15:42:20 +02:00
}
2019-05-21 20:14:31 +02:00
template < typename Iterator ,
2019-05-21 21:38:53 +02:00
hb_requires ( hb_is_source_of ( Iterator , unsigned int ) ) >
static bool
2019-05-17 04:16:52 +02:00
_add_loca_and_head ( hb_subset_plan_t * plan , Iterator padded_offsets )
{
2019-05-22 05:09:36 +02:00
unsigned int max_offset = + padded_offsets | hb_reduce ( hb_max , 0 ) ;
unsigned num_offsets = padded_offsets . len ( ) + 1 ;
bool use_short_loca = max_offset < 0x1FFFF ;
unsigned entry_size = use_short_loca ? 2 : 4 ;
char * loca_prime_data = ( char * ) calloc ( entry_size , num_offsets ) ;
2019-05-17 04:16:52 +02:00
2019-05-21 21:38:53 +02:00
if ( unlikely ( ! loca_prime_data ) ) return false ;
2019-05-17 04:16:52 +02:00
if ( use_short_loca )
2019-05-22 05:09:36 +02:00
_write_loca < HBUINT16 > ( padded_offsets , 1 , hb_array_t < HBUINT16 > ( ( HBUINT16 * ) loca_prime_data , num_offsets ) ) ;
2019-05-17 04:16:52 +02:00
else
2019-05-22 05:09:36 +02:00
_write_loca < HBUINT32 > ( padded_offsets , 0 , hb_array_t < HBUINT32 > ( ( HBUINT32 * ) loca_prime_data , num_offsets ) ) ;
2019-05-17 04:16:52 +02:00
hb_blob_t * loca_blob = hb_blob_create ( loca_prime_data ,
2019-05-22 05:09:36 +02:00
entry_size * num_offsets ,
HB_MEMORY_MODE_WRITABLE ,
2019-05-17 04:16:52 +02:00
loca_prime_data ,
free ) ;
2019-05-22 05:22:40 +02:00
bool result = plan - > add_table ( HB_OT_TAG_loca , loca_blob )
& & _add_head_and_set_loca_version ( plan , use_short_loca ) ;
2019-05-17 04:16:52 +02:00
hb_blob_destroy ( loca_blob ) ;
2019-05-22 05:22:40 +02:00
return result ;
2019-05-17 04:16:52 +02:00
}
2019-05-22 05:09:36 +02:00
template < typename EntryType , typename IteratorIn , typename IteratorOut ,
2019-05-21 21:38:53 +02:00
hb_requires ( hb_is_source_of ( IteratorIn , unsigned int ) ) ,
2019-05-22 05:09:36 +02:00
hb_requires ( hb_is_sink_of ( IteratorOut , EntryType ) ) >
2019-05-10 07:12:20 +02:00
static void
2019-05-21 21:38:53 +02:00
_write_loca ( IteratorIn it , unsigned right_shift , IteratorOut dest )
2019-05-10 07:12:20 +02:00
{
unsigned int offset = 0 ;
+ it
2019-05-22 05:09:36 +02:00
| hb_map ( [ = , & offset ] ( unsigned int padded_size ) {
unsigned result = offset > > right_shift ;
DEBUG_MSG ( SUBSET , nullptr , " loca entry offset %d " , offset ) ;
2019-05-12 07:06:46 +02:00
offset + = padded_size ;
2019-05-21 20:14:31 +02:00
return result ;
} )
2019-05-22 05:09:36 +02:00
| hb_sink ( dest )
;
DEBUG_MSG ( SUBSET , nullptr , " loca entry offset %d " , offset ) ;
dest < < ( offset > > right_shift ) ;
2019-05-10 07:12:20 +02:00
}
2019-05-21 20:14:31 +02:00
// requires source of SubsetGlyph complains the identifier isn't declared
2019-05-08 23:43:18 +02:00
template < typename Iterator >
bool serialize ( hb_serialize_context_t * c ,
2019-05-12 07:06:46 +02:00
Iterator it ,
const hb_subset_plan_t * plan )
2018-02-20 23:07:40 +01:00
{
2019-05-08 23:43:18 +02:00
TRACE_SERIALIZE ( this ) ;
2019-05-12 08:16:40 +02:00
2019-05-09 01:52:00 +02:00
+ it
2019-05-22 05:09:36 +02:00
| hb_apply ( [ = ] ( const SubsetGlyph & _ ) { _ . serialize ( c , plan ) ; } )
;
2019-05-12 07:06:46 +02:00
2019-05-08 23:43:18 +02:00
return_trace ( true ) ;
}
bool subset ( hb_subset_context_t * c ) const
{
TRACE_SUBSET ( this ) ;
glyf * glyf_prime = c - > serializer - > start_embed < glyf > ( ) ;
2019-05-22 05:09:36 +02:00
if ( unlikely ( ! c - > serializer - > check_success ( glyf_prime ) ) ) return_trace ( false ) ;
2019-05-08 23:43:18 +02:00
2019-05-17 04:16:52 +02:00
// Byte region(s) per glyph to output
// unpadded, hints removed if so requested
// If we fail to process a glyph we produce an empty (0-length) glyph
hb_vector_t < SubsetGlyph > glyphs ;
2019-05-21 05:40:55 +02:00
_populate_subset_glyphs ( c - > plan , & glyphs ) ;
glyf_prime - > serialize ( c - > serializer , hb_iter ( glyphs ) , c - > plan ) ;
2019-05-22 05:09:36 +02:00
auto padded_offsets =
2019-05-21 05:40:55 +02:00
+ hb_iter ( glyphs )
2019-05-22 05:09:36 +02:00
| hb_map ( & SubsetGlyph : : padded_size )
;
2019-05-21 05:40:55 +02:00
2019-05-22 05:09:36 +02:00
return_trace ( c - > serializer - > check_success ( _add_loca_and_head ( c - > plan , padded_offsets ) ) ) ;
2019-05-21 05:40:55 +02:00
}
template < typename SubsetGlyph >
void
_populate_subset_glyphs ( const hb_subset_plan_t * plan ,
2019-05-21 20:14:31 +02:00
hb_vector_t < SubsetGlyph > * glyphs /* OUT */ ) const
2019-05-21 05:40:55 +02:00
{
OT : : glyf : : accelerator_t glyf ;
glyf . init ( plan - > source ) ;
+ hb_range ( plan - > num_output_glyphs ( ) )
2019-05-08 23:43:18 +02:00
| hb_map ( [ & ] ( hb_codepoint_t new_gid ) {
2019-05-17 04:14:16 +02:00
SubsetGlyph subset_glyph ;
2019-05-17 04:16:52 +02:00
subset_glyph . new_gid = new_gid ;
2019-05-17 04:14:16 +02:00
2019-05-17 04:16:52 +02:00
// should never fail: all old gids should be mapped
2019-05-21 05:40:55 +02:00
if ( ! plan - > old_gid_for_new_gid ( new_gid , & subset_glyph . old_gid ) ) return subset_glyph ;
2019-05-17 04:14:16 +02:00
2019-05-17 04:16:52 +02:00
subset_glyph . source_glyph = glyf . bytes_for_glyph ( ( const char * ) this , subset_glyph . old_gid ) ;
2019-05-21 05:40:55 +02:00
if ( plan - > drop_hints ) subset_glyph . drop_hints ( glyf ) ;
2019-05-17 04:16:52 +02:00
else subset_glyph . dest_start = subset_glyph . source_glyph ;
2019-05-12 07:06:46 +02:00
2019-05-17 04:14:16 +02:00
return subset_glyph ;
2019-05-17 04:16:52 +02:00
} )
2019-05-22 05:09:36 +02:00
| hb_sink ( glyphs )
;
2019-05-08 23:43:18 +02:00
2019-05-21 05:40:55 +02:00
glyf . fini ( ) ;
2018-02-20 23:07:40 +01:00
}
2019-05-12 07:06:46 +02:00
static void
_fix_component_gids ( const hb_subset_plan_t * plan ,
hb_bytes_t glyph )
{
OT : : glyf : : CompositeGlyphHeader : : Iterator iterator ;
if ( OT : : glyf : : CompositeGlyphHeader : : get_iterator ( & glyph ,
glyph . length ,
& iterator ) )
{
do
{
2019-05-17 04:16:52 +02:00
hb_codepoint_t new_gid ;
if ( ! plan - > new_gid_for_old_gid ( iterator . current - > glyphIndex ,
& new_gid ) )
2019-05-12 07:06:46 +02:00
continue ;
2019-05-17 04:16:52 +02:00
( ( OT : : glyf : : CompositeGlyphHeader * ) iterator . current ) - > glyphIndex = new_gid ;
2019-05-12 07:06:46 +02:00
} while ( iterator . move_to_next ( ) ) ;
}
}
2019-05-12 08:16:40 +02:00
static void
_zero_instruction_length ( hb_bytes_t glyph )
{
const GlyphHeader & glyph_header = StructAtOffset < GlyphHeader > ( & glyph , 0 ) ;
int16_t num_contours = ( int16_t ) glyph_header . numberOfContours ;
2019-05-17 04:14:16 +02:00
if ( num_contours < = 0 ) return ; // only for simple glyphs
const HBUINT16 & instruction_length = StructAtOffset < HBUINT16 > ( & glyph , GlyphHeader : : static_size + 2 * num_contours ) ;
( HBUINT16 & ) instruction_length = 0 ;
2019-05-12 08:16:40 +02:00
}
2019-05-17 04:14:16 +02:00
static bool _remove_composite_instruction_flag ( hb_bytes_t glyph )
{
const GlyphHeader & glyph_header = StructAtOffset < GlyphHeader > ( & glyph , 0 ) ;
if ( glyph_header . numberOfContours > = 0 ) return true ; // only for composites
/* remove WE_HAVE_INSTRUCTIONS from flags in dest */
OT : : glyf : : CompositeGlyphHeader : : Iterator composite_it ;
if ( unlikely ( ! OT : : glyf : : CompositeGlyphHeader : : get_iterator ( & glyph , glyph . length , & composite_it ) ) ) return false ;
const OT : : glyf : : CompositeGlyphHeader * composite_header ;
do {
composite_header = composite_it . current ;
OT : : HBUINT16 * flags = const_cast < OT : : HBUINT16 * > ( & composite_header - > flags ) ;
* flags = ( uint16_t ) * flags & ~ OT : : glyf : : CompositeGlyphHeader : : WE_HAVE_INSTRUCTIONS ;
} while ( composite_it . move_to_next ( ) ) ;
return true ;
}
2018-02-20 23:07:40 +01:00
static bool
2018-05-30 21:23:51 +02:00
_add_head_and_set_loca_version ( hb_subset_plan_t * plan , bool use_short_loca )
2018-02-20 23:07:40 +01:00
{
2018-11-15 20:40:56 +01:00
hb_blob_t * head_blob = hb_sanitize_context_t ( ) . reference_table < head > ( plan - > source ) ;
2018-02-20 23:29:21 +01:00
hb_blob_t * head_prime_blob = hb_blob_copy_writable_or_fail ( head_blob ) ;
2018-02-20 23:07:40 +01:00
hb_blob_destroy ( head_blob ) ;
2018-02-20 23:29:21 +01:00
if ( unlikely ( ! head_prime_blob ) )
return false ;
2018-07-23 20:57:45 +02:00
head * head_prime = ( head * ) hb_blob_get_data_writable ( head_prime_blob , nullptr ) ;
2019-03-30 04:17:46 +01:00
head_prime - > indexToLocFormat = use_short_loca ? 0 : 1 ;
2018-05-30 21:23:51 +02:00
bool success = plan - > add_table ( HB_OT_TAG_head , head_prime_blob ) ;
2018-02-20 23:29:21 +01:00
hb_blob_destroy ( head_prime_blob ) ;
return success ;
2018-02-20 23:07:40 +01:00
}
2017-11-15 04:52:09 +01:00
struct GlyphHeader
{
2018-01-10 03:07:30 +01:00
HBINT16 numberOfContours ; /* If the number of contours is
2018-11-15 20:40:56 +01:00
* greater than or equal to zero ,
* this is a simple glyph ; if negative ,
* this is a composite glyph . */
2017-11-15 04:52:09 +01:00
FWORD xMin ; /* Minimum x for coordinate data. */
FWORD yMin ; /* Minimum y for coordinate data. */
FWORD xMax ; /* Maximum x for coordinate data. */
FWORD yMax ; /* Maximum y for coordinate data. */
DEFINE_SIZE_STATIC ( 10 ) ;
} ;
2018-02-15 23:03:34 +01:00
struct CompositeGlyphHeader
{
2018-03-23 15:07:01 +01:00
enum composite_glyph_flag_t {
ARG_1_AND_2_ARE_WORDS = 0x0001 ,
ARGS_ARE_XY_VALUES = 0x0002 ,
ROUND_XY_TO_GRID = 0x0004 ,
WE_HAVE_A_SCALE = 0x0008 ,
MORE_COMPONENTS = 0x0020 ,
WE_HAVE_AN_X_AND_Y_SCALE = 0x0040 ,
WE_HAVE_A_TWO_BY_TWO = 0x0080 ,
WE_HAVE_INSTRUCTIONS = 0x0100 ,
USE_MY_METRICS = 0x0200 ,
OVERLAP_COMPOUND = 0x0400 ,
SCALED_COMPONENT_OFFSET = 0x0800 ,
UNSCALED_COMPONENT_OFFSET = 0x1000
} ;
2018-02-15 23:03:34 +01:00
HBUINT16 flags ;
2018-11-10 00:12:08 +01:00
GlyphID glyphIndex ;
2018-02-15 23:03:34 +01:00
2018-12-17 19:01:01 +01:00
unsigned int get_size ( ) const
2018-02-15 23:03:34 +01:00
{
unsigned int size = min_size ;
2018-11-15 20:40:56 +01:00
// arg1 and 2 are int16
if ( flags & ARG_1_AND_2_ARE_WORDS ) size + = 4 ;
// arg1 and 2 are int8
else size + = 2 ;
// One x 16 bit (scale)
if ( flags & WE_HAVE_A_SCALE ) size + = 2 ;
// Two x 16 bit (xscale, yscale)
else if ( flags & WE_HAVE_AN_X_AND_Y_SCALE ) size + = 4 ;
// Four x 16 bit (xscale, scale01, scale10, yscale)
else if ( flags & WE_HAVE_A_TWO_BY_TWO ) size + = 8 ;
2018-02-15 23:03:34 +01:00
return size ;
}
2019-05-21 20:14:31 +02:00
// TODO rewrite using new iterator framework if possible
2018-02-17 00:56:15 +01:00
struct Iterator
{
const char * glyph_start ;
const char * glyph_end ;
const CompositeGlyphHeader * current ;
2018-12-17 19:01:01 +01:00
bool move_to_next ( )
2018-02-17 00:56:15 +01:00
{
if ( current - > flags & CompositeGlyphHeader : : MORE_COMPONENTS )
{
const CompositeGlyphHeader * possible =
& StructAfter < CompositeGlyphHeader , CompositeGlyphHeader > ( * current ) ;
if ( ! in_range ( possible ) )
return false ;
current = possible ;
return true ;
}
return false ;
}
2018-12-16 20:08:10 +01:00
bool in_range ( const CompositeGlyphHeader * composite ) const
2018-02-17 00:56:15 +01:00
{
return ( const char * ) composite > = glyph_start
& & ( ( const char * ) composite + CompositeGlyphHeader : : min_size ) < = glyph_end
2018-11-15 20:40:56 +01:00
& & ( ( const char * ) composite + composite - > get_size ( ) ) < = glyph_end ;
2018-02-17 00:56:15 +01:00
}
} ;
2018-12-16 20:08:10 +01:00
static bool get_iterator ( const char * glyph_data ,
2018-12-22 17:17:29 +01:00
unsigned int length ,
CompositeGlyphHeader : : Iterator * iterator /* OUT */ )
2018-02-17 00:56:15 +01:00
{
if ( length < GlyphHeader : : static_size )
return false ; /* Empty glyph; zero extents. */
const GlyphHeader & glyph_header = StructAtOffset < GlyphHeader > ( glyph_data , 0 ) ;
if ( glyph_header . numberOfContours < 0 )
{
2018-11-15 20:40:56 +01:00
const CompositeGlyphHeader * possible =
2018-02-17 00:56:15 +01:00
& StructAfter < CompositeGlyphHeader , GlyphHeader > ( glyph_header ) ;
iterator - > glyph_start = glyph_data ;
iterator - > glyph_end = ( const char * ) glyph_data + length ;
if ( ! iterator - > in_range ( possible ) )
2018-11-15 20:40:56 +01:00
return false ;
iterator - > current = possible ;
return true ;
2018-02-17 00:56:15 +01:00
}
return false ;
}
2018-02-15 23:03:34 +01:00
DEFINE_SIZE_MIN ( 4 ) ;
} ;
2017-11-15 04:52:09 +01:00
struct accelerator_t
{
2018-12-16 20:08:10 +01:00
void init ( hb_face_t * face )
2017-11-15 04:52:09 +01:00
{
2018-03-20 21:00:49 +01:00
memset ( this , 0 , sizeof ( accelerator_t ) ) ;
2018-11-13 17:41:29 +01:00
const OT : : head & head = * face - > table . head ;
if ( head . indexToLocFormat > 1 | | head . glyphDataFormat ! = 0 )
2018-08-06 15:30:12 +02:00
/* Unknown format. Leave num_glyphs=0, that takes care of disabling us. */
2017-11-15 04:52:09 +01:00
return ;
2018-11-13 17:41:29 +01:00
short_offset = 0 = = head . indexToLocFormat ;
2017-11-15 04:52:09 +01:00
2018-11-15 20:40:56 +01:00
loca_table = hb_sanitize_context_t ( ) . reference_table < loca > ( face ) ;
glyf_table = hb_sanitize_context_t ( ) . reference_table < glyf > ( face ) ;
2017-11-15 04:52:09 +01:00
2019-05-08 05:54:31 +02:00
num_glyphs = hb_max ( 1u , loca_table . get_length ( ) / ( short_offset ? 2 : 4 ) ) - 1 ;
2017-11-15 04:52:09 +01:00
}
2018-12-17 19:01:01 +01:00
void fini ( )
2017-11-15 04:52:09 +01:00
{
2018-11-11 17:40:57 +01:00
loca_table . destroy ( ) ;
glyf_table . destroy ( ) ;
2017-11-15 04:52:09 +01:00
}
2018-02-15 23:03:34 +01:00
/*
* Returns true if the referenced glyph is a valid glyph and a composite glyph .
* If true is returned a pointer to the composite glyph will be written into
* composite .
*/
2018-12-16 20:08:10 +01:00
bool get_composite ( hb_codepoint_t glyph ,
CompositeGlyphHeader : : Iterator * composite /* OUT */ ) const
2018-02-15 23:03:34 +01:00
{
2018-08-06 15:30:12 +02:00
if ( unlikely ( ! num_glyphs ) )
2018-03-20 21:00:49 +01:00
return false ;
2018-02-15 23:03:34 +01:00
unsigned int start_offset , end_offset ;
2018-02-24 02:49:23 +01:00
if ( ! get_offsets ( glyph , & start_offset , & end_offset ) )
2018-11-15 20:40:56 +01:00
return false ; /* glyph not found */
2018-02-15 23:03:34 +01:00
2018-11-03 21:15:30 +01:00
return CompositeGlyphHeader : : get_iterator ( ( const char * ) this - > glyf_table + start_offset ,
2018-02-17 00:56:15 +01:00
end_offset - start_offset ,
composite ) ;
2018-02-15 23:03:34 +01:00
}
2018-03-23 15:07:01 +01:00
enum simple_glyph_flag_t {
2018-12-08 11:27:39 +01:00
FLAG_ON_CURVE = 0x01 ,
2018-03-23 15:07:01 +01:00
FLAG_X_SHORT = 0x02 ,
FLAG_Y_SHORT = 0x04 ,
FLAG_REPEAT = 0x08 ,
FLAG_X_SAME = 0x10 ,
2018-12-08 11:27:39 +01:00
FLAG_Y_SAME = 0x20 ,
FLAG_RESERVED1 = 0x40 ,
FLAG_RESERVED2 = 0x80
2018-03-23 15:07:01 +01:00
} ;
2018-02-23 22:05:58 +01:00
/* based on FontTools _g_l_y_f.py::trim */
2018-12-16 20:08:10 +01:00
bool remove_padding ( unsigned int start_offset ,
2019-05-08 21:01:55 +02:00
unsigned int * end_offset ) const
2018-02-23 22:05:58 +01:00
{
2018-11-15 20:40:56 +01:00
if ( * end_offset - start_offset < GlyphHeader : : static_size ) return true ;
2018-02-23 22:05:58 +01:00
const char * glyph = ( ( const char * ) glyf_table ) + start_offset ;
const char * const glyph_end = glyph + ( * end_offset - start_offset ) ;
const GlyphHeader & glyph_header = StructAtOffset < GlyphHeader > ( glyph , 0 ) ;
int16_t num_contours = ( int16_t ) glyph_header . numberOfContours ;
if ( num_contours < 0 )
2018-11-15 20:40:56 +01:00
/* Trimming for composites not implemented.
* If removing hints it falls out of that . */
return true ;
2018-02-23 22:05:58 +01:00
else if ( num_contours > 0 )
{
2018-11-15 20:40:56 +01:00
/* simple glyph w/contours, possibly trimmable */
glyph + = GlyphHeader : : static_size + 2 * num_contours ;
if ( unlikely ( glyph + 2 > = glyph_end ) ) return false ;
uint16_t nCoordinates = ( uint16_t ) StructAtOffset < HBUINT16 > ( glyph - 2 , 0 ) + 1 ;
uint16_t nInstructions = ( uint16_t ) StructAtOffset < HBUINT16 > ( glyph , 0 ) ;
glyph + = 2 + nInstructions ;
if ( unlikely ( glyph + 2 > = glyph_end ) ) return false ;
unsigned int coordBytes = 0 ;
unsigned int coordsWithFlags = 0 ;
while ( glyph < glyph_end )
{
uint8_t flag = ( uint8_t ) * glyph ;
glyph + + ;
unsigned int repeat = 1 ;
if ( flag & FLAG_REPEAT )
{
if ( glyph > = glyph_end )
{
DEBUG_MSG ( SUBSET , nullptr , " Bad flag " ) ;
return false ;
}
repeat = ( ( uint8_t ) * glyph ) + 1 ;
glyph + + ;
}
unsigned int xBytes , yBytes ;
xBytes = yBytes = 0 ;
if ( flag & FLAG_X_SHORT ) xBytes = 1 ;
else if ( ( flag & FLAG_X_SAME ) = = 0 ) xBytes = 2 ;
if ( flag & FLAG_Y_SHORT ) yBytes = 1 ;
else if ( ( flag & FLAG_Y_SAME ) = = 0 ) yBytes = 2 ;
coordBytes + = ( xBytes + yBytes ) * repeat ;
coordsWithFlags + = repeat ;
if ( coordsWithFlags > = nCoordinates )
break ;
}
if ( coordsWithFlags ! = nCoordinates )
{
DEBUG_MSG ( SUBSET , nullptr , " Expect %d coords to have flags, got flags for %d " , nCoordinates , coordsWithFlags ) ;
return false ;
}
glyph + = coordBytes ;
if ( glyph < glyph_end )
* end_offset - = glyph_end - glyph ;
2018-02-23 22:05:58 +01:00
}
return true ;
}
2018-12-16 20:08:10 +01:00
bool get_offsets ( hb_codepoint_t glyph ,
2018-12-22 17:17:29 +01:00
unsigned int * start_offset /* OUT */ ,
unsigned int * end_offset /* OUT */ ) const
2017-11-15 04:52:09 +01:00
{
if ( unlikely ( glyph > = num_glyphs ) )
return false ;
if ( short_offset )
{
2018-11-15 20:40:56 +01:00
const HBUINT16 * offsets = ( const HBUINT16 * ) loca_table - > dataZ . arrayZ ;
2018-02-07 19:55:30 +01:00
* start_offset = 2 * offsets [ glyph ] ;
* end_offset = 2 * offsets [ glyph + 1 ] ;
2017-11-15 04:52:09 +01:00
}
else
{
2018-11-15 20:40:56 +01:00
const HBUINT32 * offsets = ( const HBUINT32 * ) loca_table - > dataZ . arrayZ ;
2018-02-23 22:05:58 +01:00
2018-02-07 19:55:30 +01:00
* start_offset = offsets [ glyph ] ;
* end_offset = offsets [ glyph + 1 ] ;
2017-11-15 04:52:09 +01:00
}
2018-11-11 06:26:55 +01:00
if ( * start_offset > * end_offset | | * end_offset > glyf_table . get_length ( ) )
2017-11-15 04:52:09 +01:00
return false ;
2018-02-07 19:55:30 +01:00
return true ;
}
2019-05-09 01:52:00 +02:00
bool get_instruction_length ( hb_bytes_t glyph ,
unsigned int * length /* OUT */ ) const
2018-02-22 07:23:05 +01:00
{
2019-05-09 01:52:00 +02:00
/* Empty glyph; no instructions. */
2019-05-12 08:16:40 +02:00
if ( glyph . length < GlyphHeader : : static_size )
2018-02-22 07:23:05 +01:00
{
2019-05-09 01:52:00 +02:00
* length = 0 ;
2019-05-12 08:16:40 +02:00
// only 0 byte glyphs are healthy when missing GlyphHeader
return glyph . length = = 0 ;
2018-02-22 07:23:05 +01:00
}
2019-05-09 01:52:00 +02:00
const GlyphHeader & glyph_header = StructAtOffset < GlyphHeader > ( & glyph , 0 ) ;
2018-02-22 07:23:05 +01:00
int16_t num_contours = ( int16_t ) glyph_header . numberOfContours ;
if ( num_contours < 0 )
{
2019-05-12 08:16:40 +02:00
unsigned int start = glyph . length ;
unsigned int end = glyph . length ;
2019-05-17 04:14:16 +02:00
unsigned int glyph_offset = & glyph - glyf_table ;
2018-11-15 20:40:56 +01:00
CompositeGlyphHeader : : Iterator composite_it ;
2019-05-09 01:52:00 +02:00
if ( unlikely ( ! CompositeGlyphHeader : : get_iterator ( & glyph , glyph . length , & composite_it ) ) ) return false ;
2018-11-15 20:40:56 +01:00
const CompositeGlyphHeader * last ;
do {
last = composite_it . current ;
} while ( composite_it . move_to_next ( ) ) ;
if ( ( uint16_t ) last - > flags & CompositeGlyphHeader : : WE_HAVE_INSTRUCTIONS )
2019-05-17 04:14:16 +02:00
start = ( ( char * ) last - ( char * ) glyf_table - > dataZ . arrayZ ) + last - > get_size ( ) - glyph_offset ;
2019-05-09 01:52:00 +02:00
if ( unlikely ( start > end ) )
2018-11-15 20:40:56 +01:00
{
2019-05-09 01:52:00 +02:00
DEBUG_MSG ( SUBSET , nullptr , " Invalid instruction offset, %d is outside %d byte buffer " , start , glyph . length ) ;
2018-11-15 20:40:56 +01:00
return false ;
}
2019-05-12 08:16:40 +02:00
* length = end - start ;
2018-02-22 07:23:05 +01:00
}
else
{
2019-05-09 01:52:00 +02:00
unsigned int instruction_length_offset = GlyphHeader : : static_size + 2 * num_contours ;
if ( unlikely ( instruction_length_offset + 2 > glyph . length ) )
2018-04-20 00:30:35 +02:00
{
DEBUG_MSG ( SUBSET , nullptr , " Glyph size is too short, missing field instructionLength. " ) ;
return false ;
}
2019-05-12 08:16:40 +02:00
const HBUINT16 & instruction_length = StructAtOffset < HBUINT16 > ( & glyph , instruction_length_offset ) ;
if ( unlikely ( instruction_length_offset + instruction_length > glyph . length ) ) // Out of bounds of the current glyph
2018-04-20 00:30:35 +02:00
{
DEBUG_MSG ( SUBSET , nullptr , " The instructions array overruns the glyph's boundaries. " ) ;
return false ;
}
2019-05-12 08:16:40 +02:00
* length = ( uint16_t ) instruction_length ;
2018-02-22 07:23:05 +01:00
}
return true ;
}
2018-12-16 20:08:10 +01:00
bool get_extents ( hb_codepoint_t glyph , hb_glyph_extents_t * extents ) const
2018-02-07 19:55:30 +01:00
{
unsigned int start_offset , end_offset ;
2018-02-24 02:49:23 +01:00
if ( ! get_offsets ( glyph , & start_offset , & end_offset ) )
2018-11-15 20:40:56 +01:00
return false ;
2018-02-07 19:55:30 +01:00
2017-11-15 04:52:09 +01:00
if ( end_offset - start_offset < GlyphHeader : : static_size )
return true ; /* Empty glyph; zero extents. */
const GlyphHeader & glyph_header = StructAtOffset < GlyphHeader > ( glyf_table , start_offset ) ;
2019-05-08 05:54:31 +02:00
extents - > x_bearing = hb_min ( glyph_header . xMin , glyph_header . xMax ) ;
extents - > y_bearing = hb_max ( glyph_header . yMin , glyph_header . yMax ) ;
extents - > width = hb_max ( glyph_header . xMin , glyph_header . xMax ) - extents - > x_bearing ;
extents - > height = hb_min ( glyph_header . yMin , glyph_header . yMax ) - extents - > y_bearing ;
2017-11-15 04:52:09 +01:00
return true ;
}
2019-05-17 04:16:52 +02:00
hb_bytes_t bytes_for_glyph ( const char * glyf , hb_codepoint_t gid )
{
unsigned int start_offset , end_offset ;
if ( unlikely ( ! ( get_offsets ( gid , & start_offset , & end_offset ) & &
remove_padding ( start_offset , & end_offset ) ) ) )
{
DEBUG_MSG ( SUBSET , nullptr , " Unable to get offset or remove padding for %d " , gid ) ;
return hb_bytes_t ( ) ;
}
hb_bytes_t glyph = hb_bytes_t ( glyf + start_offset , end_offset - start_offset ) ;
if ( glyph . length = = 0 ) return glyph ;
if ( unlikely ( glyph . length < GlyphHeader : : static_size ) )
{
DEBUG_MSG ( SUBSET , nullptr , " Glyph size smaller than minimum header %d " , gid ) ;
return hb_bytes_t ( ) ;
}
return glyph ;
}
2017-11-15 04:52:09 +01:00
private :
bool short_offset ;
unsigned int num_glyphs ;
2018-11-11 06:26:55 +01:00
hb_blob_ptr_t < loca > loca_table ;
hb_blob_ptr_t < glyf > glyf_table ;
2017-11-15 04:52:09 +01:00
} ;
2019-05-17 04:16:52 +02:00
struct SubsetGlyph
{
hb_codepoint_t new_gid ;
hb_codepoint_t old_gid ;
hb_bytes_t source_glyph ;
hb_bytes_t dest_start ; // region of source_glyph to copy first
hb_bytes_t dest_end ; // region of source_glyph to copy second
bool serialize ( hb_serialize_context_t * c ,
const hb_subset_plan_t * plan ) const
{
TRACE_SERIALIZE ( this ) ;
hb_bytes_t dest_glyph = dest_start . copy ( c ) ;
dest_glyph = hb_bytes_t ( & dest_glyph , dest_glyph . length + dest_end . copy ( c ) . length ) ;
unsigned int pad_length = padding ( ) ;
DEBUG_MSG ( SUBSET , nullptr , " serialize %d byte glyph, width %d pad %d " , dest_glyph . length , dest_glyph . length + pad_length , pad_length ) ;
HBUINT8 pad ;
pad = 0 ;
while ( pad_length > 0 )
{
c - > embed ( pad ) ;
pad_length - - ;
}
if ( dest_glyph . length )
{
_fix_component_gids ( plan , dest_glyph ) ;
if ( plan - > drop_hints )
{
_zero_instruction_length ( dest_glyph ) ;
c - > check_success ( _remove_composite_instruction_flag ( dest_glyph ) ) ;
}
}
return_trace ( true ) ;
}
void drop_hints ( const OT : : glyf : : accelerator_t & glyf )
{
if ( source_glyph . length = = 0 ) return ;
unsigned int instruction_length = 0 ;
if ( ! glyf . get_instruction_length ( source_glyph , & instruction_length ) )
{
DEBUG_MSG ( SUBSET , nullptr , " Unable to read instruction length for new_gid %d " , new_gid ) ;
return ;
}
const GlyphHeader & header = StructAtOffset < GlyphHeader > ( & source_glyph , 0 ) ;
int16_t num_contours = ( int16_t ) header . numberOfContours ;
DEBUG_MSG ( SUBSET , nullptr , " new_gid %d (%d contours) drop %d instruction bytes from %d byte source glyph " , new_gid , num_contours , instruction_length , source_glyph . length ) ;
if ( num_contours < 0 )
{
// composite, just chop instructions off the end
dest_start = hb_bytes_t ( & source_glyph , source_glyph . length - instruction_length ) ;
}
else
{
// simple glyph
dest_start = hb_bytes_t ( & source_glyph , GlyphHeader : : static_size + 2 * header . numberOfContours + 2 ) ;
dest_end = hb_bytes_t ( & source_glyph + dest_start . length + instruction_length ,
source_glyph . length - dest_start . length - instruction_length ) ;
DEBUG_MSG ( SUBSET , nullptr , " source_len %d start len %d instruction_len %d end len %d " , source_glyph . length , dest_start . length , instruction_length , dest_end . length ) ;
}
}
unsigned int length ( ) const
{
return dest_start . length + dest_end . length ;
}
// pad to 2 to ensure 2-byte loca will be ok
unsigned int padding ( ) const
{
return length ( ) % 2 ;
}
unsigned int padded_size ( ) const
{
return length ( ) + padding ( ) ;
}
} ;
2017-11-15 04:54:48 +01:00
protected :
2018-09-10 23:29:26 +02:00
UnsizedArrayOf < HBUINT8 > dataZ ; /* Glyphs data. */
2018-10-31 21:19:42 +01:00
public :
2018-11-22 23:56:51 +01:00
DEFINE_SIZE_MIN ( 0 ) ; /* In reality, this is UNBOUNDED() type; but since we always
* check the size externally , allow Null ( ) object of it by
2019-05-08 05:54:31 +02:00
* defining it _MIN instead . */
2015-08-23 15:42:20 +02:00
} ;
2018-08-27 00:11:24 +02:00
struct glyf_accelerator_t : glyf : : accelerator_t { } ;
2015-08-23 15:42:20 +02:00
} /* namespace OT */
# endif /* HB_OT_GLYF_TABLE_HH */