parent
03aa12c75e
commit
57a224f51d
144
.gitlab-ci.yml
144
.gitlab-ci.yml
|
@ -8,6 +8,7 @@ before_script:
|
|||
- dnf -y upgrade --disablerepo=rawhide-modular
|
||||
- dnf -y install --disablerepo=rawhide-modular --allowerasing --skip-broken @buildsys-build autoconf automake libtool gettext gettext-devel gperf expat-devel freetype-devel json-c-devel git docbook-utils docbook-utils-pdf bubblewrap
|
||||
- dnf -y install --disablerepo=rawhide-modular --allowerasing --skip-broken mingw64-gettext mingw64-freetype mingw64-expat wine
|
||||
- dnf -y install --disablerepo=rawhide-modular --allowerasing --skip-broken meson ninja-build wget
|
||||
|
||||
shared-build:
|
||||
stage: build
|
||||
|
@ -78,3 +79,146 @@ mingw-build:
|
|||
- build-*/test/*.log
|
||||
- build-*/test/*.trs
|
||||
- build-*/test/out*
|
||||
meson-shared-build:
|
||||
image: fedora:latest
|
||||
stage: build
|
||||
script:
|
||||
- export BUILD_ID="fontconfig-$CI_JOB_NAME_$CI_COMMIT_SHA-$CI_JOB_ID"
|
||||
- export PREFIX="$(pwd)/prefix-$BUILD_ID"
|
||||
- export BUILDDIR="$(pwd)/build-$BUILD_ID"
|
||||
- export MAKEFLAGS="-j4"
|
||||
- meson --prefix="$PREFIX" --default-library=shared "$BUILDDIR"
|
||||
- ninja -C "$BUILDDIR"
|
||||
- ninja -C "$BUILDDIR" test
|
||||
- ninja -C "$BUILDDIR" install
|
||||
artifacts:
|
||||
name: fontconfig-$CI_COMMIT_SHA-$CI_JOB_ID
|
||||
when: always
|
||||
paths:
|
||||
- build-*/meson-logs/*txt
|
||||
- prefix-*
|
||||
meson-static-build:
|
||||
image: fedora:latest
|
||||
stage: build
|
||||
script:
|
||||
- export BUILD_ID="fontconfig-$CI_JOB_NAME_$CI_COMMIT_SHA-$CI_JOB_ID"
|
||||
- export PREFIX="$(pwd)/prefix-$BUILD_ID"
|
||||
- export BUILDDIR="$(pwd)/build-$BUILD_ID"
|
||||
- export MAKEFLAGS="-j4"
|
||||
- meson --prefix="$PREFIX" --default-library=static "$BUILDDIR"
|
||||
- ninja -C "$BUILDDIR"
|
||||
- ninja -C "$BUILDDIR" test
|
||||
- ninja -C "$BUILDDIR" install
|
||||
artifacts:
|
||||
name: fontconfig-$CI_COMMIT_SHA-$CI_JOB_ID
|
||||
when: always
|
||||
paths:
|
||||
- build-*/meson-logs/*txt
|
||||
- prefix-*
|
||||
meson-mingw-w64-build:
|
||||
image: fedora:latest
|
||||
stage: build
|
||||
script:
|
||||
- export BUILD_ID="fontconfig-$CI_JOB_NAME_$CI_COMMIT_SHA-$CI_JOB_ID"
|
||||
- export PREFIX="$(pwd)/prefix-$BUILD_ID"
|
||||
- export BUILDDIR="$(pwd)/build-$BUILD_ID"
|
||||
- meson --prefix="$PREFIX" "$BUILDDIR" --cross-file .gitlab-ci/linux-mingw-w64-64bit.txt
|
||||
- ninja -C "$BUILDDIR"
|
||||
- ninja -C "$BUILDDIR" test
|
||||
# install doesn't work, fccache problems, but autotools ci doesn't do that either
|
||||
# - ninja -C "$BUILDDIR" install
|
||||
artifacts:
|
||||
name: fontconfig-$CI_COMMIT_SHA-$CI_JOB_ID
|
||||
when: always
|
||||
paths:
|
||||
- build-*/meson-logs/*txt
|
||||
- prefix-*
|
||||
|
||||
# FIXME: fontconfig should probably get its own image
|
||||
.build meson windows:
|
||||
image: 'registry.freedesktop.org/gstreamer/gst-ci/amd64/windows:v10'
|
||||
stage: 'build'
|
||||
tags:
|
||||
- 'docker'
|
||||
- 'windows'
|
||||
- '1809'
|
||||
variables:
|
||||
# Make sure any failure in PowerShell scripts is fatal
|
||||
ErrorActionPreference: 'Stop'
|
||||
WarningPreference: 'Stop'
|
||||
# Uncomment the following key if need to pass custom args, as well with the
|
||||
# $env:MESON_ARGS line in the `script:` blocks
|
||||
# MESON_ARGS: >-
|
||||
# -Dfoo=enabled
|
||||
# -Dbar=disabled
|
||||
before_script:
|
||||
# Make sure meson is up to date, so we don't need to rebuild the image with each release
|
||||
- pip3 install -U meson
|
||||
script:
|
||||
# For some reason, options are separated by newline instead of space, so we
|
||||
# have to replace them first.
|
||||
# - $env:MESON_ARGS = $env:MESON_ARGS.replace("`n"," ")
|
||||
# Gitlab executes PowerShell in docker, but VsDevCmd.bat is a batch script.
|
||||
# Environment variables substitutions is done by PowerShell before calling
|
||||
# cmd.exe, that's why we use $env:FOO instead of %FOO%
|
||||
- cmd.exe /C "C:\BuildTools\Common7\Tools\VsDevCmd.bat -host_arch=amd64 -arch=$env:ARCH &&
|
||||
meson build $env:MESON_ARGS &&
|
||||
ninja -C build &&
|
||||
ninja -C build test"
|
||||
|
||||
meson vs2017 amd64:
|
||||
extends: '.build meson windows'
|
||||
variables:
|
||||
ARCH: 'amd64'
|
||||
|
||||
meson vs2017 x86:
|
||||
extends: '.build meson windows'
|
||||
variables:
|
||||
ARCH: 'x86'
|
||||
|
||||
meson macos:
|
||||
stage: 'build'
|
||||
tags:
|
||||
- gst-macos-10.15
|
||||
artifacts:
|
||||
name: "${CI_JOB_NAME}_${CI_COMMIT_SHA}"
|
||||
expire_in: '5 days'
|
||||
when: 'always'
|
||||
paths:
|
||||
- "build/meson-logs/*txt"
|
||||
before_script:
|
||||
- pip3 install --upgrade pip
|
||||
# Make sure meson is up to date
|
||||
- pip3 install -U meson
|
||||
# Need to install certificates for python
|
||||
- pip3 install --upgrade certifi
|
||||
# Anther way t install certificates
|
||||
- open /Applications/Python\ 3.8/Install\ Certificates.command
|
||||
# Get ninja
|
||||
- curl -L -o ninja-mac.zip https://github.com/ninja-build/ninja/releases/download/v1.10.0/ninja-mac.zip
|
||||
- unzip ninja-mac.zip
|
||||
- sudo cp ninja /usr/local/bin
|
||||
script:
|
||||
- CERT_PATH=$(python3 -m certifi) && export SSL_CERT_FILE=${CERT_PATH} && export REQUESTS_CA_BUNDLE=${CERT_PATH} && meson build
|
||||
- ninja -C build
|
||||
- ninja -C build test
|
||||
|
||||
# msys infrastructure is a bit broken, disable for now
|
||||
meson msys2:
|
||||
extends: '.build meson windows'
|
||||
when: 'manual'
|
||||
allow_failure: true
|
||||
script:
|
||||
# For some reason, options are separated by newline instead of space, so we
|
||||
# have to replace them first.
|
||||
# - $env:MESON_ARGS = $env:MESON_ARGS.replace("`n"," ")
|
||||
|
||||
- $env:PATH += ";C:\msys64\usr\bin;C:\msys64\mingw64/bin;C:\msys64\mingw32/bin"
|
||||
# XXX: Copied from https://gitlab.freedesktop.org/gstreamer/gst-ci/blob/master/gitlab/ci_template.yml#L487
|
||||
# For some reason docker build hangs if this is included in the image, needs more troubleshooting
|
||||
- C:\msys64\usr\bin\bash -c "pacman-key --init && pacman-key --populate msys2 && pacman-key --refresh-keys || true"
|
||||
- C:\msys64\usr\bin\bash -c "pacman -Syuu --noconfirm"
|
||||
- C:\msys64\usr\bin\bash -c "pacman -Sy --noconfirm --needed mingw-w64-x86_64-toolchain ninja"
|
||||
- C:\msys64\usr\bin\bash -c "meson build $env:MESON_ARGS &&
|
||||
ninja -C build &&
|
||||
ninja -C build test"
|
||||
|
|
|
@ -0,0 +1,20 @@
|
|||
[host_machine]
|
||||
system = 'windows'
|
||||
cpu_family = 'x86_64'
|
||||
cpu = 'x86_64'
|
||||
endian = 'little'
|
||||
|
||||
[properties]
|
||||
c_args = []
|
||||
c_link_args = []
|
||||
|
||||
[binaries]
|
||||
c = 'x86_64-w64-mingw32-gcc'
|
||||
cpp = 'x86_64-w64-mingw32-g++'
|
||||
ar = 'x86_64-w64-mingw32-ar'
|
||||
ld = 'x86_64-w64-mingw32-ld'
|
||||
objcopy = 'x86_64-w64-mingw32-objcopy'
|
||||
strip = 'x86_64-w64-mingw32-strip'
|
||||
pkgconfig = 'x86_64-w64-mingw32-pkg-config'
|
||||
windres = 'x86_64-w64-mingw32-windres'
|
||||
# exe_wrapper = 'wine64'
|
34
Makefile.am
34
Makefile.am
|
@ -31,12 +31,34 @@ endif
|
|||
|
||||
ACLOCAL_AMFLAGS = -I m4
|
||||
|
||||
MESON_FILES = \
|
||||
conf.d/link_confs.py \
|
||||
conf.d/write-35-lang-normalize-conf.py \
|
||||
doc/edit-sgml.py \
|
||||
doc/extract-man-list.py \
|
||||
doc/run-quiet.py \
|
||||
fc-case/fc-case.py \
|
||||
fc-lang/fc-lang.py \
|
||||
install-cache.py \
|
||||
meson.build \
|
||||
meson_options.txt \
|
||||
src/cutout.py \
|
||||
src/fcstdint.h.in \
|
||||
src/fcwindows.h \
|
||||
src/fontconfig.def.in \
|
||||
src/makealias.py \
|
||||
stdin_wrapper.py \
|
||||
$(wildcard $(srcdir)/*/meson.build) \
|
||||
$(wildcard $(srcdir)/meson-cc-tests/*) \
|
||||
$(wildcard $(srcdir)/subprojects/*.wrap)
|
||||
|
||||
EXTRA_DIST = config.rpath \
|
||||
fontconfig.pc.in \
|
||||
fonts.conf.in \
|
||||
fonts.dtd \
|
||||
fontconfig-zip.in \
|
||||
config-fixups.h
|
||||
config-fixups.h \
|
||||
$(MESON_FILES)
|
||||
CLEANFILES = fonts.conf
|
||||
DISTCLEANFILES = config.cache doltcompile
|
||||
MAINTAINERCLEANFILES = \
|
||||
|
@ -153,4 +175,14 @@ debuild-dirs: distdir
|
|||
|
||||
DISTCHECK_CONFIGURE_FLAGS =
|
||||
|
||||
check-versions:
|
||||
@$(GREP) -e '^\s*version\s*:\s*.'$(VERSION)'.,' $(srcdir)/meson.build >/dev/null || { \
|
||||
echo "======================================================================================"; \
|
||||
echo "Meson version does not seem to match autotools version $(VERSION), update meson.build!"; \
|
||||
echo "======================================================================================"; \
|
||||
exit 1; \
|
||||
}
|
||||
|
||||
all-local: check-versions
|
||||
|
||||
-include $(top_srcdir)/git.mk
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
if __name__=='__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('availpath')
|
||||
parser.add_argument('confpath')
|
||||
parser.add_argument('links', nargs='+')
|
||||
args = parser.parse_args()
|
||||
|
||||
confpath = os.path.join(os.environ['MESON_INSTALL_DESTDIR_PREFIX'], args.confpath)
|
||||
|
||||
if not os.path.exists(confpath):
|
||||
os.makedirs(confpath)
|
||||
|
||||
for link in args.links:
|
||||
src = os.path.join(args.availpath, link)
|
||||
dst = os.path.join(confpath, link)
|
||||
try:
|
||||
os.symlink(src, dst)
|
||||
except NotImplementedError:
|
||||
# Not supported on this version of Windows
|
||||
break
|
||||
except OSError as e:
|
||||
# Symlink privileges are not available
|
||||
if len(e.args) == 1 and 'privilege' in e.args[0]:
|
||||
break
|
||||
raise
|
||||
except FileExistsError:
|
||||
pass
|
|
@ -0,0 +1,92 @@
|
|||
conf_files = [
|
||||
'05-reset-dirs-sample.conf',
|
||||
'09-autohint-if-no-hinting.conf',
|
||||
'10-autohint.conf',
|
||||
'10-hinting-full.conf',
|
||||
'10-hinting-medium.conf',
|
||||
'10-hinting-none.conf',
|
||||
'10-hinting-slight.conf',
|
||||
'10-no-sub-pixel.conf',
|
||||
'10-scale-bitmap-fonts.conf',
|
||||
'10-sub-pixel-bgr.conf',
|
||||
'10-sub-pixel-rgb.conf',
|
||||
'10-sub-pixel-vbgr.conf',
|
||||
'10-sub-pixel-vrgb.conf',
|
||||
'10-unhinted.conf',
|
||||
'11-lcdfilter-default.conf',
|
||||
'11-lcdfilter-legacy.conf',
|
||||
'11-lcdfilter-light.conf',
|
||||
'20-unhint-small-vera.conf',
|
||||
'25-unhint-nonlatin.conf',
|
||||
'30-metric-aliases.conf',
|
||||
'40-nonlatin.conf',
|
||||
'45-generic.conf',
|
||||
'45-latin.conf',
|
||||
'49-sansserif.conf',
|
||||
'50-user.conf',
|
||||
'51-local.conf',
|
||||
'60-generic.conf',
|
||||
'60-latin.conf',
|
||||
'65-fonts-persian.conf',
|
||||
'65-khmer.conf',
|
||||
'65-nonlatin.conf',
|
||||
'69-unifont.conf',
|
||||
'70-no-bitmaps.conf',
|
||||
'70-yes-bitmaps.conf',
|
||||
'80-delicious.conf',
|
||||
'90-synthetic.conf',
|
||||
]
|
||||
|
||||
preferred_hinting = 'slight'
|
||||
|
||||
conf_links = [
|
||||
'10-hinting-@0@.conf'.format(preferred_hinting),
|
||||
'10-scale-bitmap-fonts.conf',
|
||||
'20-unhint-small-vera.conf',
|
||||
'30-metric-aliases.conf',
|
||||
'40-nonlatin.conf',
|
||||
'45-generic.conf',
|
||||
'45-latin.conf',
|
||||
'49-sansserif.conf',
|
||||
'50-user.conf',
|
||||
'51-local.conf',
|
||||
'60-generic.conf',
|
||||
'60-latin.conf',
|
||||
'65-fonts-persian.conf',
|
||||
'65-nonlatin.conf',
|
||||
'69-unifont.conf',
|
||||
'80-delicious.conf',
|
||||
'90-synthetic.conf',
|
||||
]
|
||||
|
||||
install_data(conf_files, install_dir: join_paths(get_option('datadir'), 'fontconfig/conf.avail'))
|
||||
|
||||
meson.add_install_script('link_confs.py',
|
||||
join_paths(get_option('prefix'), get_option('datadir'), 'fontconfig/conf.avail'),
|
||||
join_paths(get_option('sysconfdir'), 'fonts', 'conf.d'),
|
||||
conf_links,
|
||||
)
|
||||
|
||||
# 35-lang-normalize.conf
|
||||
orths = []
|
||||
foreach o : orth_files # orth_files is from fc-lang/meson.build
|
||||
o = o.split('.')[0] # strip filename suffix
|
||||
if not o.contains('_') # ignore those with an underscore
|
||||
orths += [o]
|
||||
endif
|
||||
endforeach
|
||||
|
||||
custom_target('35-lang-normalize.conf',
|
||||
output: '35-lang-normalize.conf',
|
||||
command: [find_program('write-35-lang-normalize-conf.py'), ','.join(orths), '@OUTPUT@'],
|
||||
install_dir: join_paths(get_option('datadir'), 'fontconfig/conf.avail'),
|
||||
install: true)
|
||||
|
||||
# README
|
||||
readme_cdata = configuration_data()
|
||||
readme_cdata.set('TEMPLATEDIR', fc_templatedir)
|
||||
configure_file(output: 'README',
|
||||
input: 'README.in',
|
||||
configuration: readme_cdata,
|
||||
install_dir: join_paths(get_option('sysconfdir'), 'fonts', 'conf.d'),
|
||||
install: true)
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# fontconfig write-35-lang-normalize-conf.py
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
print('ERROR: usage: {} ORTH_LIST [OUTPUT.CONF]'.format(sys.argv[0]))
|
||||
sys.exit(-1)
|
||||
|
||||
orth_list_unsorted = sys.argv[1].split(',')
|
||||
|
||||
if len(sys.argv) > 2 and sys.argv[2] != '-':
|
||||
f_out = open(sys.argv[2], 'w', encoding='utf8')
|
||||
else:
|
||||
f_out = sys.stdout
|
||||
|
||||
orth_list = sorted(sys.argv[1].split(','))
|
||||
|
||||
print('<?xml version="1.0"?>', file=f_out)
|
||||
print('<!DOCTYPE fontconfig SYSTEM "urn:fontconfig:fonts.dtd">', file=f_out)
|
||||
print('<fontconfig>', file=f_out)
|
||||
|
||||
for o in orth_list:
|
||||
print(f' <!-- {o}* -> {o} -->', file=f_out)
|
||||
print(f' <match>', file=f_out)
|
||||
print(f' <test name="lang" compare="contains"><string>{o}</string></test>', file=f_out)
|
||||
print(f' <edit name="lang" mode="assign" binding="same"><string>{o}</string></edit>', file=f_out)
|
||||
print(f' </match>', file=f_out)
|
||||
|
||||
print('</fontconfig>', file=f_out)
|
||||
|
||||
f_out.close()
|
||||
|
||||
sys.exit(0)
|
|
@ -0,0 +1,160 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# fontconfig/doc/edit-sgml.py
|
||||
#
|
||||
# Copyright © 2003 Keith Packard
|
||||
# Copyright © 2020 Tim-Philipp Müller
|
||||
#
|
||||
# Permission to use, copy, modify, distribute, and sell this software and its
|
||||
# documentation for any purpose is hereby granted without fee, provided that
|
||||
# the above copyright notice appear in all copies and that both that
|
||||
# copyright notice and this permission notice appear in supporting
|
||||
# documentation, and that the name of the author(s) not be used in
|
||||
# advertising or publicity pertaining to distribution of the software without
|
||||
# specific, written prior permission. The authors make no
|
||||
# representations about the suitability of this software for any purpose. It
|
||||
# is provided "as is" without express or implied warranty.
|
||||
#
|
||||
# THE AUTHOR(S) DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
|
||||
# EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import re
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('template')
|
||||
parser.add_argument('input')
|
||||
parser.add_argument('output')
|
||||
|
||||
args = parser.parse_known_args()
|
||||
|
||||
template_fn = args[0].template
|
||||
output_fn = args[0].output
|
||||
input_fn = args[0].input
|
||||
|
||||
# -------------
|
||||
# Read template
|
||||
# -------------
|
||||
|
||||
with open(template_fn, 'r', encoding='utf8') as f:
|
||||
template_text = f.read()
|
||||
|
||||
template_lines = template_text.strip().split('\n')
|
||||
|
||||
# -------------------------------------
|
||||
# Read replacement sets from .fncs file
|
||||
# -------------------------------------
|
||||
|
||||
replacement_sets = []
|
||||
|
||||
# TODO: also allow '-' for stdin
|
||||
with open(input_fn, 'r', encoding='utf8') as f:
|
||||
fncs_text = f.read()
|
||||
|
||||
# split into replacement sets
|
||||
fncs_chunks = fncs_text.strip().split('@@')
|
||||
|
||||
for chunk in fncs_chunks:
|
||||
# get rid of any preamble such as license and FcFreeTypeQueryAll decl in fcfreetype.fncs
|
||||
start = chunk.find('@')
|
||||
if start:
|
||||
chunk = chunk[start:]
|
||||
|
||||
# split at '@' and remove empty lines (keep it simple instead of doing fancy
|
||||
# things with regular expression matches, we control the input after all)
|
||||
lines = [line for line in chunk.split('@') if line.strip()]
|
||||
|
||||
replacement_set = {}
|
||||
|
||||
while lines:
|
||||
tag = lines.pop(0).strip()
|
||||
# FIXME: this hard codes the tag used in funcs.sgml - we're lazy
|
||||
if tag.startswith('PROTOTYPE'):
|
||||
text = ''
|
||||
else:
|
||||
text = lines.pop(0).strip()
|
||||
if text.endswith('%'):
|
||||
text = text[:-1] + ' '
|
||||
|
||||
replacement_set[tag] = text
|
||||
|
||||
if replacement_set:
|
||||
replacement_sets += [replacement_set]
|
||||
|
||||
# ----------------
|
||||
# Open output file
|
||||
# ----------------
|
||||
|
||||
if output_fn == '-':
|
||||
fout = sys.stdout
|
||||
else:
|
||||
fout = open(output_fn, "w", encoding='utf8')
|
||||
|
||||
# ----------------
|
||||
# Process template
|
||||
# ----------------
|
||||
|
||||
def do_replace(template_lines, rep, tag_suffix=''):
|
||||
skip_tag = None
|
||||
skip_lines = False
|
||||
loop_lines = []
|
||||
loop_tag = None
|
||||
|
||||
for t_line in template_lines:
|
||||
# This makes processing easier and is the case for our templates
|
||||
if t_line.startswith('@') and not t_line.endswith('@'):
|
||||
sys.exit('Template lines starting with @ are expected to end with @, please fix me!')
|
||||
|
||||
if loop_tag:
|
||||
loop_lines += [t_line]
|
||||
|
||||
# Check if line starts with a directive
|
||||
if t_line.startswith('@?'):
|
||||
tag = t_line[2:-1] + tag_suffix
|
||||
if skip_tag:
|
||||
sys.exit('Recursive skipping not supported, please fix me!')
|
||||
skip_tag = tag
|
||||
skip_lines = tag not in rep
|
||||
elif t_line.startswith('@:'):
|
||||
if not skip_tag:
|
||||
sys.exit('Skip else but no active skip list?!')
|
||||
skip_lines = skip_tag in rep
|
||||
elif t_line.startswith('@;'):
|
||||
if not skip_tag:
|
||||
sys.exit('Skip end but no active skip list?!')
|
||||
skip_tag = None
|
||||
skip_lines = False
|
||||
elif t_line.startswith('@{'):
|
||||
if loop_tag or tag_suffix != '':
|
||||
sys.exit('Recursive looping not supported, please fix me!')
|
||||
loop_tag = t_line[2:-1]
|
||||
elif t_line.startswith('@}'):
|
||||
tag = t_line[2:-1] + tag_suffix
|
||||
if not loop_tag:
|
||||
sys.exit('Loop end but no active loop?!')
|
||||
if loop_tag != tag:
|
||||
sys.exit(f'Loop end but loop tag mismatch: {loop_tag} != {tag}!')
|
||||
loop_lines.pop() # remove loop end directive
|
||||
suffix = '+'
|
||||
while loop_tag + suffix in rep:
|
||||
do_replace(loop_lines, rep, suffix)
|
||||
suffix += '+'
|
||||
loop_tag = None
|
||||
loop_lines = []
|
||||
else:
|
||||
if not skip_lines:
|
||||
# special-case inline optional substitution (hard-codes specific pattern in funcs.sgml because we're lazy)
|
||||
output_line = re.sub(r'@\?(RET)@@RET@@:@(void)@;@', lambda m: rep.get(m.group(1) + tag_suffix, m.group(2)), t_line)
|
||||
# replace any substitution tags with their respective substitution text
|
||||
output_line = re.sub(r'@(\w+)@', lambda m: rep.get(m.group(1) + tag_suffix, ''), output_line)
|
||||
print(output_line, file=fout)
|
||||
|
||||
# process template for each replacement set
|
||||
for rep in replacement_sets:
|
||||
do_replace(template_lines, rep)
|
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# fontconfig/doc/extract-man-list.py
|
||||
#
|
||||
# Parses .fncs files and extracts list of man pages that will be generated
|
||||
#
|
||||
# Copyright © 2020 Tim-Philipp Müller
|
||||
#
|
||||
# Permission to use, copy, modify, distribute, and sell this software and its
|
||||
# documentation for any purpose is hereby granted without fee, provided that
|
||||
# the above copyright notice appear in all copies and that both that
|
||||
# copyright notice and this permission notice appear in supporting
|
||||
# documentation, and that the name of the author(s) not be used in
|
||||
# advertising or publicity pertaining to distribution of the software without
|
||||
# specific, written prior permission. The authors make no
|
||||
# representations about the suitability of this software for any purpose. It
|
||||
# is provided "as is" without express or implied warranty.
|
||||
#
|
||||
# THE AUTHOR(S) DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
|
||||
# EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import sys
|
||||
import re
|
||||
|
||||
replacement_sets = []
|
||||
|
||||
# -------------------------------------
|
||||
# Read replacement sets from .fncs file
|
||||
# -------------------------------------
|
||||
|
||||
def read_fncs_file(fn):
|
||||
global replacement_sets
|
||||
|
||||
with open(fn, 'r', encoding='utf8') as f:
|
||||
fncs_text = f.read()
|
||||
|
||||
# split into replacement sets
|
||||
fncs_chunks = fncs_text.strip().split('@@')
|
||||
|
||||
for chunk in fncs_chunks:
|
||||
# get rid of any preamble such as license and FcFreeTypeQueryAll decl in fcfreetype.fncs
|
||||
start = chunk.find('@')
|
||||
if start:
|
||||
chunk = chunk[start:]
|
||||
|
||||
# split at '@' and remove empty lines (keep it simple instead of doing fancy
|
||||
# things with regular expression matches, we control the input after all)
|
||||
lines = [line for line in chunk.split('@') if line.strip()]
|
||||
|
||||
replacement_set = {}
|
||||
|
||||
while lines:
|
||||
tag = lines.pop(0).strip()
|
||||
# FIXME: this hard codes the tag used in funcs.sgml - we're lazy
|
||||
if tag.startswith('PROTOTYPE'):
|
||||
text = ''
|
||||
else:
|
||||
text = lines.pop(0).strip()
|
||||
if text.endswith('%'):
|
||||
text = text[:-1] + ' '
|
||||
|
||||
replacement_set[tag] = text
|
||||
|
||||
if replacement_set:
|
||||
replacement_sets += [replacement_set]
|
||||
|
||||
# ----------------------------------------------------------------------------
|
||||
# Main
|
||||
# ----------------------------------------------------------------------------
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
sys.exit('Usage: {} FILE1.FNCS [FILE2.FNCS...]'.format(sys.argv[0]))
|
||||
|
||||
fout = sys.stdout
|
||||
|
||||
for input_fn in sys.argv[1:]:
|
||||
read_fncs_file(input_fn)
|
||||
|
||||
# process template for each replacement set
|
||||
for rep in replacement_sets:
|
||||
if 'FUNC+' in rep:
|
||||
man_page_title = rep.get('TITLE', rep['FUNC'])
|
||||
else:
|
||||
man_page_title = rep['FUNC']
|
||||
print(man_page_title)
|
|
@ -0,0 +1,167 @@
|
|||
docbook2man = find_program('docbook2man', required: get_option('doc-man'))
|
||||
docbook2txt = find_program('docbook2txt', required: get_option('doc-txt'))
|
||||
docbook2pdf = find_program('docbook2pdf', required: get_option('doc-pdf'))
|
||||
docbook2html = find_program('docbook2html', required: get_option('doc-html'))
|
||||
|
||||
# docbook is very spammy
|
||||
run_quiet = find_program('run-quiet.py')
|
||||
|
||||
# .fncs files
|
||||
doc_funcs_fncs = [
|
||||
'fcatomic',
|
||||
'fcblanks',
|
||||
'fccache',
|
||||
'fccharset',
|
||||
'fcconfig',
|
||||
'fcconstant',
|
||||
'fcdircache',
|
||||
'fcfile',
|
||||
'fcfontset',
|
||||
'fcformat',
|
||||
'fcfreetype',
|
||||
'fcinit',
|
||||
'fclangset',
|
||||
'fcmatrix',
|
||||
'fcobjectset',
|
||||
'fcobjecttype',
|
||||
'fcpattern',
|
||||
'fcrange',
|
||||
'fcstring',
|
||||
'fcstrset',
|
||||
'fcvalue',
|
||||
'fcweight',
|
||||
]
|
||||
|
||||
fncs_files = []
|
||||
foreach f : doc_funcs_fncs
|
||||
fncs_files += files('@0@.fncs'.format(f))
|
||||
endforeach
|
||||
|
||||
man_pages = []
|
||||
|
||||
extract_man_list = find_program('extract-man-list.py')
|
||||
man_list = run_command(extract_man_list, fncs_files, check: true).stdout().split()
|
||||
|
||||
foreach m : man_list
|
||||
man_pages += ['@0@.3'.format(m)]
|
||||
endforeach
|
||||
|
||||
# Generate sgml pages for funcs
|
||||
edit_sgml = find_program('edit-sgml.py')
|
||||
|
||||
# copy into build directory, it includes generated files from build directory
|
||||
fontconfig_devel_sgml = configure_file(output: 'fontconfig-devel.sgml',
|
||||
input: 'fontconfig-devel.sgml',
|
||||
copy: true)
|
||||
|
||||
fontconfig_user_sgml = configure_file(output: 'fontconfig-user.sgml',
|
||||
input: 'fontconfig-user.sgml',
|
||||
copy: true)
|
||||
|
||||
version_conf = configuration_data()
|
||||
version_conf.set('VERSION', meson.project_version())
|
||||
|
||||
configure_file(output: 'version.sgml',
|
||||
input: 'version.sgml.in',
|
||||
configuration: version_conf)
|
||||
|
||||
confdir_conf = configuration_data()
|
||||
confdir_conf.set('BASECONFIGDIR', fc_configdir)
|
||||
|
||||
confdir_sgml = configure_file(output: 'confdir.sgml',
|
||||
input: 'confdir.sgml.in',
|
||||
configuration: confdir_conf)
|
||||
|
||||
funcs_sgml = []
|
||||
|
||||
foreach f : doc_funcs_fncs
|
||||
funcs_sgml += [custom_target('@0@.sgml'.format(f),
|
||||
input: [files('func.sgml'), files('@0@.fncs'.format(f))],
|
||||
output: '@0@.sgml'.format(f),
|
||||
command: [edit_sgml, '@INPUT0@', '@INPUT1@', '@OUTPUT@'],
|
||||
install: false)]
|
||||
endforeach
|
||||
|
||||
if docbook2man.found()
|
||||
custom_target('devel-man',
|
||||
input: [fontconfig_devel_sgml, funcs_sgml],
|
||||
output: man_pages,
|
||||
command: [run_quiet, docbook2man, '@INPUT0@', '--output', '@OUTDIR@'],
|
||||
build_by_default: true,
|
||||
install_dir: get_option('mandir') / 'man3',
|
||||
install: true)
|
||||
|
||||
# fonts.conf(5)
|
||||
custom_target('fonts-conf-5-man-page',
|
||||
input: [fontconfig_user_sgml],
|
||||
output: 'fonts-conf.5',
|
||||
command: [run_quiet, docbook2man, '@INPUT0@', '--output', '@OUTDIR@'],
|
||||
install_dir: get_option('mandir') / 'man5',
|
||||
build_by_default: true,
|
||||
install: true)
|
||||
|
||||
# Generate man pages for tools
|
||||
foreach t : tools_man_pages
|
||||
# docbook2man doesn't seem to have a --quiet option unfortunately
|
||||
custom_target('@0@-man-page'.format(t),
|
||||
input: '../@0@/@0@.sgml'.format(t),
|
||||
output: '@0@.1'.format(t),
|
||||
command: [run_quiet, docbook2man, '@INPUT@', '--output', '@OUTDIR@'],
|
||||
install_dir: get_option('mandir') / 'man1',
|
||||
install: true)
|
||||
endforeach
|
||||
endif
|
||||
|
||||
if docbook2pdf.found()
|
||||
custom_target('devel-pdf',
|
||||
input: [fontconfig_devel_sgml, funcs_sgml],
|
||||
output: 'fontconfig-devel.pdf',
|
||||
command: [run_quiet, docbook2pdf, '@INPUT0@', '--output', '@OUTDIR@'],
|
||||
build_by_default: true,
|
||||
install_dir: get_option('datadir') / 'doc' / 'fontconfig',
|
||||
install: true)
|
||||
|
||||
custom_target('user-pdf',
|
||||
input: [fontconfig_user_sgml, funcs_sgml],
|
||||
output: 'fontconfig-user.pdf',
|
||||
command: [run_quiet, docbook2pdf, '@INPUT0@', '--output', '@OUTDIR@'],
|
||||
build_by_default: true,
|
||||
install_dir: get_option('datadir') / 'doc' / 'fontconfig',
|
||||
install: true)
|
||||
endif
|
||||
|
||||
if docbook2txt.found()
|
||||
custom_target('devel-txt',
|
||||
input: [fontconfig_devel_sgml, funcs_sgml],
|
||||
output: 'fontconfig-devel.txt',
|
||||
command: [run_quiet, docbook2txt, '@INPUT0@', '--output', '@OUTDIR@'],
|
||||
build_by_default: true,
|
||||
install_dir: get_option('datadir') / 'doc' / 'fontconfig',
|
||||
install: true)
|
||||
|
||||
custom_target('user-txt',
|
||||
input: [fontconfig_user_sgml, funcs_sgml],
|
||||
output: 'fontconfig-user.txt',
|
||||
command: [run_quiet, docbook2txt, '@INPUT0@', '--output', '@OUTDIR@'],
|
||||
build_by_default: true,
|
||||
install_dir: get_option('datadir') / 'doc' / 'fontconfig',
|
||||
install: true)
|
||||
endif
|
||||
|
||||
if docbook2html.found()
|
||||
custom_target('devel-html',
|
||||
input: [fontconfig_devel_sgml, funcs_sgml],
|
||||
output: 'fontconfig-devel.html',
|
||||
command: [run_quiet, docbook2html, '--nochunks', '@INPUT0@', '--output', '@OUTDIR@'],
|
||||
build_by_default: true,
|
||||
install_dir: get_option('datadir') / 'doc' / 'fontconfig',
|
||||
install: true)
|
||||
|
||||
custom_target('user-html',
|
||||
input: [fontconfig_user_sgml, funcs_sgml],
|
||||
output: 'fontconfig-user.html',
|
||||
command: [run_quiet, docbook2html, '--nochunks', '@INPUT0@', '--output', '@OUTDIR@'],
|
||||
build_by_default: true,
|
||||
install_dir: get_option('datadir') / 'doc' / 'fontconfig',
|
||||
install: true)
|
||||
endif
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# fontconfig/doc/run-quiet.py
|
||||
#
|
||||
# Runs command and discards anything it sends to stdout
|
||||
#
|
||||
# Copyright © 2020 Tim-Philipp Müller
|
||||
#
|
||||
# Permission to use, copy, modify, distribute, and sell this software and its
|
||||
# documentation for any purpose is hereby granted without fee, provided that
|
||||
# the above copyright notice appear in all copies and that both that
|
||||
# copyright notice and this permission notice appear in supporting
|
||||
# documentation, and that the name of the author(s) not be used in
|
||||
# advertising or publicity pertaining to distribution of the software without
|
||||
# specific, written prior permission. The authors make no
|
||||
# representations about the suitability of this software for any purpose. It
|
||||
# is provided "as is" without express or implied warranty.
|
||||
#
|
||||
# THE AUTHOR(S) DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
|
||||
# EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
import subprocess
|
||||
import sys
|
||||
import os
|
||||
|
||||
if len(sys.argv) < 2:
|
||||
sys.exit('Usage: {} PROGRAM [ARGS..]'.format(sys.argv[0]))
|
||||
|
||||
command = sys.argv[1:]
|
||||
|
||||
with open(os.devnull, 'w') as out:
|
||||
sys.exit(subprocess.run(command, stdout=out).returncode)
|
|
@ -66,6 +66,10 @@
|
|||
#define O_BINARY 0
|
||||
#endif
|
||||
|
||||
#ifndef S_ISDIR
|
||||
#define S_ISDIR(m) (((m) & _S_IFMT) == _S_IFDIR)
|
||||
#endif
|
||||
|
||||
#ifndef HAVE_GETOPT
|
||||
#define HAVE_GETOPT 0
|
||||
#endif
|
||||
|
|
|
@ -0,0 +1,8 @@
|
|||
fccache = executable('fc-cache', ['fc-cache.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-cache']
|
|
@ -0,0 +1,240 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# fontconfig/fc-case/fc-case.py
|
||||
#
|
||||
# Copyright © 2004 Keith Packard
|
||||
# Copyright © 2019 Tim-Philipp Müller
|
||||
#
|
||||
# Permission to use, copy, modify, distribute, and sell this software and its
|
||||
# documentation for any purpose is hereby granted without fee, provided that
|
||||
# the above copyright notice appear in all copies and that both that
|
||||
# copyright notice and this permission notice appear in supporting
|
||||
# documentation, and that the name of the author(s) not be used in
|
||||
# advertising or publicity pertaining to distribution of the software without
|
||||
# specific, written prior permission. The authors make no
|
||||
# representations about the suitability of this software for any purpose. It
|
||||
# is provided "as is" without express or implied warranty.
|
||||
#
|
||||
# THE AUTHOR(S) DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
|
||||
# EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
from enum import Enum
|
||||
import argparse
|
||||
import string
|
||||
import sys
|
||||
|
||||
class CaseFoldClass(Enum):
|
||||
COMMON = 1
|
||||
FULL = 2
|
||||
SIMPLE = 3
|
||||
TURKIC = 4
|
||||
|
||||
class CaseFoldMethod(Enum):
|
||||
RANGE = 0
|
||||
EVEN_ODD = 1
|
||||
FULL = 2
|
||||
|
||||
caseFoldClassMap = {
|
||||
'C' : CaseFoldClass.COMMON,
|
||||
'F' : CaseFoldClass.FULL,
|
||||
'S' : CaseFoldClass.SIMPLE,
|
||||
'T' : CaseFoldClass.TURKIC
|
||||
}
|
||||
|
||||
folds = []
|
||||
|
||||
def ucs4_to_utf8(ucs4):
|
||||
utf8_rep = []
|
||||
|
||||
if ucs4 < 0x80:
|
||||
utf8_rep.append(ucs4)
|
||||
bits = -6
|
||||
elif ucs4 < 0x800:
|
||||
utf8_rep.append(((ucs4 >> 6) & 0x1F) | 0xC0)
|
||||
bits = 0
|
||||
elif ucs4 < 0x10000:
|
||||
utf8_rep.append(((ucs4 >> 12) & 0x0F) | 0xE0)
|
||||
bits = 6
|
||||
elif ucs4 < 0x200000:
|
||||
utf8_rep.append(((ucs4 >> 18) & 0x07) | 0xF0)
|
||||
bits = 12
|
||||
elif ucs4 < 0x4000000:
|
||||
utf8_rep.append(((ucs4 >> 24) & 0x03) | 0xF8)
|
||||
bits = 18
|
||||
elif ucs4 < 0x80000000:
|
||||
utf8_rep.append(((ucs4 >> 30) & 0x01) | 0xFC)
|
||||
bits = 24
|
||||
else:
|
||||
return [];
|
||||
|
||||
while bits >= 0:
|
||||
utf8_rep.append(((ucs4 >> bits) & 0x3F) | 0x80)
|
||||
bits-= 6
|
||||
|
||||
return utf8_rep
|
||||
|
||||
def utf8_size(ucs4):
|
||||
return len(ucs4_to_utf8(ucs4))
|
||||
|
||||
case_fold_method_name_map = {
|
||||
CaseFoldMethod.RANGE: 'FC_CASE_FOLD_RANGE,',
|
||||
CaseFoldMethod.EVEN_ODD: 'FC_CASE_FOLD_EVEN_ODD,',
|
||||
CaseFoldMethod.FULL: 'FC_CASE_FOLD_FULL,',
|
||||
}
|
||||
|
||||
if __name__=='__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('case_folding_file')
|
||||
parser.add_argument('--template', dest='template_file', default=None)
|
||||
parser.add_argument('--output', dest='output_file', default=None)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
minFoldChar = None
|
||||
maxFoldChar = None
|
||||
fold = None
|
||||
|
||||
foldChars = []
|
||||
maxFoldChars = 0
|
||||
|
||||
maxExpand = 0
|
||||
|
||||
# Read the standard Unicode CaseFolding.txt file
|
||||
with open(args.case_folding_file, 'r', encoding='utf-8') as casefile:
|
||||
for cnt, line in enumerate(casefile):
|
||||
if not line or not line[0] in string.hexdigits:
|
||||
continue
|
||||
|
||||
# print('Line {}: {}'.format(cnt, line.strip()))
|
||||
|
||||
tokens = line.split('; ')
|
||||
|
||||
if len(tokens) < 3:
|
||||
print('Not enough tokens in line {}'.format(cnt), file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Get upper case value
|
||||
upper = int(tokens.pop(0), 16)
|
||||
|
||||
# Get class
|
||||
cfclass = caseFoldClassMap[tokens.pop(0)]
|
||||
|
||||
# Get list of result characters
|
||||
lower = list(map(lambda s: int(s,16), tokens.pop(0).split()))
|
||||
|
||||
# print('\t----> {:04X} {} {}'.format(upper, cfclass, lower))
|
||||
|
||||
if not minFoldChar:
|
||||
minFoldChar = upper
|
||||
|
||||
maxFoldChar = upper;
|
||||
|
||||
if cfclass in [CaseFoldClass.COMMON, CaseFoldClass.FULL]:
|
||||
if len(lower) == 1:
|
||||
# foldExtends
|
||||
if fold and fold['method'] == CaseFoldMethod.RANGE:
|
||||
foldExtends = (lower[0] - upper) == fold['offset'] and upper == fold['upper'] + fold['count']
|
||||
elif fold and fold['method'] == CaseFoldMethod.EVEN_ODD:
|
||||
foldExtends = (lower[0] - upper) == 1 and upper == (fold['upper'] + fold['count'] + 1)
|
||||
else:
|
||||
foldExtends = False
|
||||
|
||||
if foldExtends:
|
||||
# This modifies the last fold item in the array too
|
||||
fold['count'] = upper - fold['upper'] + 1;
|
||||
else:
|
||||
fold = {}
|
||||
fold['upper'] = upper
|
||||
fold['offset'] = lower[0] - upper;
|
||||
if fold['offset'] == 1:
|
||||
fold['method'] = CaseFoldMethod.EVEN_ODD
|
||||
else:
|
||||
fold['method'] = CaseFoldMethod.RANGE
|
||||
fold['count'] = 1
|
||||
folds.append(fold)
|
||||
expand = utf8_size (lower[0]) - utf8_size(upper)
|
||||
else:
|
||||
fold = {}
|
||||
fold['upper'] = upper
|
||||
fold['method'] = CaseFoldMethod.FULL
|
||||
fold['offset'] = len(foldChars)
|
||||
|
||||
# add chars
|
||||
for c in lower:
|
||||
utf8_rep = ucs4_to_utf8(c)
|
||||
# print('{} -> {}'.format(c,utf8_rep))
|
||||
for utf8_char in utf8_rep:
|
||||
foldChars.append(utf8_char)
|
||||
|
||||
fold['count'] = len(foldChars) - fold['offset']
|
||||
folds.append(fold)
|
||||
|
||||
if fold['count'] > maxFoldChars:
|
||||
maxFoldChars = fold['count']
|
||||
|
||||
expand = fold['count'] - utf8_size(upper)
|
||||
if expand > maxExpand:
|
||||
maxExpand = expand
|
||||
|
||||
# Open output file
|
||||
if args.output_file:
|
||||
sys.stdout = open(args.output_file, 'w', encoding='utf-8')
|
||||
|
||||
# Read the template file
|
||||
if args.template_file:
|
||||
tmpl_file = open(args.template_file, 'r', encoding='utf-8')
|
||||
else:
|
||||
tmpl_file = sys.stdin
|
||||
|
||||
# Scan the input until the marker is found
|
||||
# FIXME: this is a bit silly really, might just as well harcode
|
||||
# the license header in the script and drop the template
|
||||
for line in tmpl_file:
|
||||
if line.strip() == '@@@':
|
||||
break
|
||||
print(line, end='')
|
||||
|
||||
# Dump these tables
|
||||
print('#define FC_NUM_CASE_FOLD\t{}'.format(len(folds)))
|
||||
print('#define FC_NUM_CASE_FOLD_CHARS\t{}'.format(len(foldChars)))
|
||||
print('#define FC_MAX_CASE_FOLD_CHARS\t{}'.format(maxFoldChars))
|
||||
print('#define FC_MAX_CASE_FOLD_EXPAND\t{}'.format(maxExpand))
|
||||
print('#define FC_MIN_FOLD_CHAR\t0x{:08x}'.format(minFoldChar))
|
||||
print('#define FC_MAX_FOLD_CHAR\t0x{:08x}'.format(maxFoldChar))
|
||||
print('')
|
||||
|
||||
# Dump out ranges
|
||||
print('static const FcCaseFold fcCaseFold[FC_NUM_CASE_FOLD] = {')
|
||||
for f in folds:
|
||||
short_offset = f['offset']
|
||||
if short_offset < -32367:
|
||||
short_offset += 65536
|
||||
if short_offset > 32368:
|
||||
short_offset -= 65536
|
||||
print(' {} 0x{:08x}, {:22s} 0x{:04x}, {:6d} {},'.format('{',
|
||||
f['upper'], case_fold_method_name_map[f['method']],
|
||||
f['count'], short_offset, '}'))
|
||||
print('};\n')
|
||||
|
||||
# Dump out "other" values
|
||||
print('static const FcChar8\tfcCaseFoldChars[FC_NUM_CASE_FOLD_CHARS] = {')
|
||||
for n, c in enumerate(foldChars):
|
||||
if n == len(foldChars) - 1:
|
||||
end = ''
|
||||
elif n % 16 == 15:
|
||||
end = ',\n'
|
||||
else:
|
||||
end = ','
|
||||
print('0x{:02x}'.format(c), end=end)
|
||||
print('\n};')
|
||||
|
||||
# And flush out the rest of the input file
|
||||
for line in tmpl_file:
|
||||
print(line, end='')
|
||||
|
||||
sys.stdout.flush()
|
|
@ -0,0 +1,4 @@
|
|||
fccase_h = custom_target('fccase.h',
|
||||
output: 'fccase.h',
|
||||
input: ['CaseFolding.txt', 'fccase.tmpl.h'],
|
||||
command: [find_program('fc-case.py'), '@INPUT0@', '--template', '@INPUT1@', '--output', '@OUTPUT@'])
|
|
@ -0,0 +1,8 @@
|
|||
fccat = executable('fc-cat', ['fc-cat.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-cat']
|
|
@ -0,0 +1,8 @@
|
|||
fcconflist = executable('fc-conflist', ['fc-conflist.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-conflist']
|
|
@ -0,0 +1,387 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# fontconfig/fc-lang/fc-lang.py
|
||||
#
|
||||
# Copyright © 2001-2002 Keith Packard
|
||||
# Copyright © 2019 Tim-Philipp Müller
|
||||
#
|
||||
# Permission to use, copy, modify, distribute, and sell this software and its
|
||||
# documentation for any purpose is hereby granted without fee, provided that
|
||||
# the above copyright notice appear in all copies and that both that
|
||||
# copyright notice and this permission notice appear in supporting
|
||||
# documentation, and that the name of the author(s) not be used in
|
||||
# advertising or publicity pertaining to distribution of the software without
|
||||
# specific, written prior permission. The authors make no
|
||||
# representations about the suitability of this software for any purpose. It
|
||||
# is provided "as is" without express or implied warranty.
|
||||
#
|
||||
# THE AUTHOR(S) DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
|
||||
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO
|
||||
# EVENT SHALL THE AUTHOR(S) BE LIABLE FOR ANY SPECIAL, INDIRECT OR
|
||||
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
|
||||
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
|
||||
# TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
# fc-lang
|
||||
#
|
||||
# Read a set of language orthographies and build C declarations for
|
||||
# charsets which can then be used to identify which languages are
|
||||
# supported by a given font.
|
||||
#
|
||||
# TODO: this code is not very pythonic, a lot of it is a 1:1 translation
|
||||
# of the C code and we could probably simplify it a bit
|
||||
import argparse
|
||||
import string
|
||||
import sys
|
||||
import os
|
||||
|
||||
# we just store the leaves in a dict, we can order the leaves later if needed
|
||||
class CharSet:
|
||||
def __init__(self):
|
||||
self.leaves = {} # leaf_number -> leaf data (= 16 uint32)
|
||||
|
||||
def add_char(self, ucs4):
|
||||
assert ucs4 < 0x01000000
|
||||
leaf_num = ucs4 >> 8
|
||||
if leaf_num in self.leaves:
|
||||
leaf = self.leaves[leaf_num]
|
||||
else:
|
||||
leaf = [0, 0, 0, 0, 0, 0, 0, 0] # 256/32 = 8
|
||||
self.leaves[leaf_num] = leaf
|
||||
leaf[(ucs4 & 0xff) >> 5] |= (1 << (ucs4 & 0x1f))
|
||||
#print('{:08x} [{:04x}] --> {}'.format(ucs4, ucs4>>8, leaf))
|
||||
|
||||
def del_char(self, ucs4):
|
||||
assert ucs4 < 0x01000000
|
||||
leaf_num = ucs4 >> 8
|
||||
if leaf_num in self.leaves:
|
||||
leaf = self.leaves[leaf_num]
|
||||
leaf[(ucs4 & 0xff) >> 5] &= ~(1 << (ucs4 & 0x1f))
|
||||
# We don't bother removing the leaf if it's empty */
|
||||
#print('{:08x} [{:04x}] --> {}'.format(ucs4, ucs4>>8, leaf))
|
||||
|
||||
def equals(self, other_cs):
|
||||
keys = sorted(self.leaves.keys())
|
||||
other_keys = sorted(other_cs.leaves.keys())
|
||||
if len(keys) != len(other_keys):
|
||||
return False
|
||||
for k1, k2 in zip(keys, other_keys):
|
||||
if k1 != k2:
|
||||
return False
|
||||
if not leaves_equal(self.leaves[k1], other_cs.leaves[k2]):
|
||||
return False
|
||||
return True
|
||||
|
||||
# Convert a file name into a name suitable for C declarations
|
||||
def get_name(file_name):
|
||||
return file_name.split('.')[0]
|
||||
|
||||
# Convert a C name into a language name
|
||||
def get_lang(c_name):
|
||||
return c_name.replace('_', '-').replace(' ', '').lower()
|
||||
|
||||
def read_orth_file(file_name):
|
||||
lines = []
|
||||
with open(file_name, 'r', encoding='utf-8') as orth_file:
|
||||
for num, line in enumerate(orth_file):
|
||||
if line.startswith('include '):
|
||||
include_fn = line[8:].strip()
|
||||
lines += read_orth_file(include_fn)
|
||||
else:
|
||||
# remove comments and strip whitespaces
|
||||
line = line.split('#')[0].strip()
|
||||
line = line.split('\t')[0].strip()
|
||||
# skip empty lines
|
||||
if line:
|
||||
lines += [(file_name, num, line)]
|
||||
|
||||
return lines
|
||||
|
||||
def leaves_equal(leaf1, leaf2):
|
||||
for v1, v2 in zip(leaf1, leaf2):
|
||||
if v1 != v2:
|
||||
return False
|
||||
return True
|
||||
|
||||
# Build a single charset from a source file
|
||||
#
|
||||
# The file format is quite simple, either
|
||||
# a single hex value or a pair separated with a dash
|
||||
def parse_orth_file(file_name, lines):
|
||||
charset = CharSet()
|
||||
for fn, num, line in lines:
|
||||
delete_char = line.startswith('-')
|
||||
if delete_char:
|
||||
line = line[1:]
|
||||
if line.find('-') != -1:
|
||||
parts = line.split('-')
|
||||
elif line.find('..') != -1:
|
||||
parts = line.split('..')
|
||||
else:
|
||||
parts = [line]
|
||||
|
||||
start = int(parts.pop(0), 16)
|
||||
end = start
|
||||
if parts:
|
||||
end = int(parts.pop(0), 16)
|
||||
if parts:
|
||||
print('ERROR: {} line {}: parse error (too many parts)'.format(fn, num))
|
||||
|
||||
for ucs4 in range(start, end+1):
|
||||
if delete_char:
|
||||
charset.del_char(ucs4)
|
||||
else:
|
||||
charset.add_char(ucs4)
|
||||
|
||||
assert charset.equals(charset) # sanity check for the equals function
|
||||
|
||||
return charset
|
||||
|
||||
if __name__=='__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('orth_files', nargs='+', help='List of .orth files')
|
||||
parser.add_argument('--directory', dest='directory', default=None)
|
||||
parser.add_argument('--template', dest='template_file', default=None)
|
||||
parser.add_argument('--output', dest='output_file', default=None)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
sets = []
|
||||
names = []
|
||||
langs = []
|
||||
country = []
|
||||
|
||||
total_leaves = 0
|
||||
|
||||
LangCountrySets = {}
|
||||
|
||||
# Open output file
|
||||
if args.output_file:
|
||||
sys.stdout = open(args.output_file, 'w', encoding='utf-8')
|
||||
|
||||
# Read the template file
|
||||
if args.template_file:
|
||||
tmpl_file = open(args.template_file, 'r', encoding='utf-8')
|
||||
else:
|
||||
tmpl_file = sys.stdin
|
||||
|
||||
# Change into source dir if specified (after opening other files)
|
||||
if args.directory:
|
||||
os.chdir(args.directory)
|
||||
|
||||
orth_entries = {}
|
||||
for i, fn in enumerate(args.orth_files):
|
||||
orth_entries[fn] = i
|
||||
|
||||
for fn in sorted(orth_entries.keys()):
|
||||
lines = read_orth_file(fn)
|
||||
charset = parse_orth_file(fn, lines)
|
||||
|
||||
sets.append(charset)
|
||||
|
||||
name = get_name(fn)
|
||||
names.append(name)
|
||||
|
||||
lang = get_lang(name)
|
||||
langs.append(lang)
|
||||
if lang.find('-') != -1:
|
||||
country.append(orth_entries[fn]) # maps to original index
|
||||
language_family = lang.split('-')[0]
|
||||
if not language_family in LangCountrySets:
|
||||
LangCountrySets[language_family] = []
|
||||
LangCountrySets[language_family] += [orth_entries[fn]]
|
||||
|
||||
total_leaves += len(charset.leaves)
|
||||
|
||||
# Find unique leaves
|
||||
leaves = []
|
||||
for s in sets:
|
||||
for leaf_num in sorted(s.leaves.keys()):
|
||||
leaf = s.leaves[leaf_num]
|
||||
is_unique = True
|
||||
for existing_leaf in leaves:
|
||||
if leaves_equal(leaf, existing_leaf):
|
||||
is_unique = False
|
||||
break
|
||||
#print('unique: ', is_unique)
|
||||
if is_unique:
|
||||
leaves.append(leaf)
|
||||
|
||||
# Find duplicate charsets
|
||||
duplicate = []
|
||||
for i, s in enumerate(sets):
|
||||
dup_num = None
|
||||
if i >= 1:
|
||||
for j, s_cmp in enumerate(sets):
|
||||
if j >= i:
|
||||
break
|
||||
if s_cmp.equals(s):
|
||||
dup_num = j
|
||||
break
|
||||
|
||||
duplicate.append(dup_num)
|
||||
|
||||
tn = 0
|
||||
off = {}
|
||||
for i, s in enumerate(sets):
|
||||
if duplicate[i]:
|
||||
continue
|
||||
off[i] = tn
|
||||
tn += len(s.leaves)
|
||||
|
||||
# Scan the input until the marker is found
|
||||
# FIXME: this is a bit silly really, might just as well hardcode
|
||||
# the license header in the script and drop the template
|
||||
for line in tmpl_file:
|
||||
if line.strip() == '@@@':
|
||||
break
|
||||
print(line, end='')
|
||||
|
||||
print('/* total size: {} unique leaves: {} */\n'.format(total_leaves, len(leaves)))
|
||||
|
||||
print('#define LEAF0 ({} * sizeof (FcLangCharSet))'.format(len(sets)))
|
||||
print('#define OFF0 (LEAF0 + {} * sizeof (FcCharLeaf))'.format(len(leaves)))
|
||||
print('#define NUM0 (OFF0 + {} * sizeof (uintptr_t))'.format(tn))
|
||||
print('#define SET(n) (n * sizeof (FcLangCharSet) + offsetof (FcLangCharSet, charset))')
|
||||
print('#define OFF(s,o) (OFF0 + o * sizeof (uintptr_t) - SET(s))')
|
||||
print('#define NUM(s,n) (NUM0 + n * sizeof (FcChar16) - SET(s))')
|
||||
print('#define LEAF(o,l) (LEAF0 + l * sizeof (FcCharLeaf) - (OFF0 + o * sizeof (intptr_t)))')
|
||||
print('#define fcLangCharSets (fcLangData.langCharSets)')
|
||||
print('#define fcLangCharSetIndices (fcLangData.langIndices)')
|
||||
print('#define fcLangCharSetIndicesInv (fcLangData.langIndicesInv)')
|
||||
|
||||
assert len(sets) < 256 # FIXME: need to change index type to 16-bit below then
|
||||
|
||||
print('''
|
||||
static const struct {{
|
||||
FcLangCharSet langCharSets[{}];
|
||||
FcCharLeaf leaves[{}];
|
||||
uintptr_t leaf_offsets[{}];
|
||||
FcChar16 numbers[{}];
|
||||
{} langIndices[{}];
|
||||
{} langIndicesInv[{}];
|
||||
}} fcLangData = {{'''.format(len(sets), len(leaves), tn, tn,
|
||||
'FcChar8 ', len(sets), 'FcChar8 ', len(sets)))
|
||||
|
||||
# Dump sets
|
||||
print('{')
|
||||
for i, s in enumerate(sets):
|
||||
if duplicate[i]:
|
||||
j = duplicate[i]
|
||||
else:
|
||||
j = i
|
||||
print(' {{ "{}", {{ FC_REF_CONSTANT, {}, OFF({},{}), NUM({},{}) }} }}, /* {} */'.format(
|
||||
langs[i], len(sets[j].leaves), i, off[j], i, off[j], i))
|
||||
|
||||
print('},')
|
||||
|
||||
# Dump leaves
|
||||
print('{')
|
||||
for l, leaf in enumerate(leaves):
|
||||
print(' {{ {{ /* {} */'.format(l), end='')
|
||||
for i in range(0, 8): # 256/32 = 8
|
||||
if i % 4 == 0:
|
||||
print('\n ', end='')
|
||||
print(' 0x{:08x},'.format(leaf[i]), end='')
|
||||
print('\n } },')
|
||||
print('},')
|
||||
|
||||
# Dump leaves
|
||||
print('{')
|
||||
for i, s in enumerate(sets):
|
||||
if duplicate[i]:
|
||||
continue
|
||||
|
||||
print(' /* {} */'.format(names[i]))
|
||||
|
||||
for n, leaf_num in enumerate(sorted(s.leaves.keys())):
|
||||
leaf = s.leaves[leaf_num]
|
||||
if n % 4 == 0:
|
||||
print(' ', end='')
|
||||
found = [k for k, unique_leaf in enumerate(leaves) if leaves_equal(unique_leaf,leaf)]
|
||||
assert found, "Couldn't find leaf in unique leaves list!"
|
||||
assert len(found) == 1
|
||||
print(' LEAF({:3},{:3}),'.format(off[i], found[0]), end='')
|
||||
if n % 4 == 3:
|
||||
print('')
|
||||
if len(s.leaves) % 4 != 0:
|
||||
print('')
|
||||
|
||||
print('},')
|
||||
|
||||
print('{')
|
||||
for i, s in enumerate(sets):
|
||||
if duplicate[i]:
|
||||
continue
|
||||
|
||||
print(' /* {} */'.format(names[i]))
|
||||
|
||||
for n, leaf_num in enumerate(sorted(s.leaves.keys())):
|
||||
leaf = s.leaves[leaf_num]
|
||||
if n % 8 == 0:
|
||||
print(' ', end='')
|
||||
print(' 0x{:04x},'.format(leaf_num), end='')
|
||||
if n % 8 == 7:
|
||||
print('')
|
||||
if len(s.leaves) % 8 != 0:
|
||||
print('')
|
||||
|
||||
print('},')
|
||||
|
||||
# langIndices
|
||||
print('{')
|
||||
for i, s in enumerate(sets):
|
||||
fn = '{}.orth'.format(names[i])
|
||||
print(' {}, /* {} */'.format(orth_entries[fn], names[i]))
|
||||
print('},')
|
||||
|
||||
# langIndicesInv
|
||||
print('{')
|
||||
for i, k in enumerate(orth_entries.keys()):
|
||||
name = get_name(k)
|
||||
idx = names.index(name)
|
||||
print(' {}, /* {} */'.format(idx, name))
|
||||
print('}')
|
||||
|
||||
print('};\n')
|
||||
|
||||
print('#define NUM_LANG_CHAR_SET {}'.format(len(sets)))
|
||||
num_lang_set_map = (len(sets) + 31) // 32;
|
||||
print('#define NUM_LANG_SET_MAP {}'.format(num_lang_set_map))
|
||||
|
||||
# Dump indices with country codes
|
||||
assert len(country) > 0
|
||||
assert len(LangCountrySets) > 0
|
||||
print('')
|
||||
print('static const FcChar32 fcLangCountrySets[][NUM_LANG_SET_MAP] = {')
|
||||
for k in sorted(LangCountrySets.keys()):
|
||||
langset_map = [0] * num_lang_set_map # initialise all zeros
|
||||
for entries_id in LangCountrySets[k]:
|
||||
langset_map[entries_id >> 5] |= (1 << (entries_id & 0x1f))
|
||||
print(' {', end='')
|
||||
for v in langset_map:
|
||||
print(' 0x{:08x},'.format(v), end='')
|
||||
print(' }}, /* {} */'.format(k))
|
||||
|
||||
print('};\n')
|
||||
print('#define NUM_COUNTRY_SET {}\n'.format(len(LangCountrySets)))
|
||||
|
||||
# Find ranges for each letter for faster searching
|
||||
# Dump sets start/finish for the fastpath
|
||||
print('static const FcLangCharSetRange fcLangCharSetRanges[] = {\n')
|
||||
for c in string.ascii_lowercase: # a-z
|
||||
start = 9999
|
||||
stop = -1
|
||||
for i, s in enumerate(sets):
|
||||
if names[i].startswith(c):
|
||||
start = min(start,i)
|
||||
stop = max(stop,i)
|
||||
print(' {{ {}, {} }}, /* {} */'.format(start, stop, c))
|
||||
print('};\n')
|
||||
|
||||
# And flush out the rest of the input file
|
||||
for line in tmpl_file:
|
||||
print(line, end='')
|
||||
|
||||
sys.stdout.flush()
|
|
@ -0,0 +1,256 @@
|
|||
# Do not reorder, magic
|
||||
orth_files = [
|
||||
'aa.orth',
|
||||
'ab.orth',
|
||||
'af.orth',
|
||||
'am.orth',
|
||||
'ar.orth',
|
||||
'as.orth',
|
||||
'ast.orth',
|
||||
'av.orth',
|
||||
'ay.orth',
|
||||
'az_az.orth',
|
||||
'az_ir.orth',
|
||||
'ba.orth',
|
||||
'bm.orth',
|
||||
'be.orth',
|
||||
'bg.orth',
|
||||
'bh.orth',
|
||||
'bho.orth',
|
||||
'bi.orth',
|
||||
'bin.orth',
|
||||
'bn.orth',
|
||||
'bo.orth',
|
||||
'br.orth',
|
||||
'bs.orth',
|
||||
'bua.orth',
|
||||
'ca.orth',
|
||||
'ce.orth',
|
||||
'ch.orth',
|
||||
'chm.orth',
|
||||
'chr.orth',
|
||||
'co.orth',
|
||||
'cs.orth',
|
||||
'cu.orth',
|
||||
'cv.orth',
|
||||
'cy.orth',
|
||||
'da.orth',
|
||||
'de.orth',
|
||||
'dz.orth',
|
||||
'el.orth',
|
||||
'en.orth',
|
||||
'eo.orth',
|
||||
'es.orth',
|
||||
'et.orth',
|
||||
'eu.orth',
|
||||
'fa.orth',
|
||||
'fi.orth',
|
||||
'fj.orth',
|
||||
'fo.orth',
|
||||
'fr.orth',
|
||||
'ff.orth',
|
||||
'fur.orth',
|
||||
'fy.orth',
|
||||
'ga.orth',
|
||||
'gd.orth',
|
||||
'gez.orth',
|
||||
'gl.orth',
|
||||
'gn.orth',
|
||||
'gu.orth',
|
||||
'gv.orth',
|
||||
'ha.orth',
|
||||
'haw.orth',
|
||||
'he.orth',
|
||||
'hi.orth',
|
||||
'ho.orth',
|
||||
'hr.orth',
|
||||
'hu.orth',
|
||||
'hy.orth',
|
||||
'ia.orth',
|
||||
'ig.orth',
|
||||
'id.orth',
|
||||
'ie.orth',
|
||||
'ik.orth',
|
||||
'io.orth',
|
||||
'is.orth',
|
||||
'it.orth',
|
||||
'iu.orth',
|
||||
'ja.orth',
|
||||
'ka.orth',
|
||||
'kaa.orth',
|
||||
'ki.orth',
|
||||
'kk.orth',
|
||||
'kl.orth',
|
||||
'km.orth',
|
||||
'kn.orth',
|
||||
'ko.orth',
|
||||
'kok.orth',
|
||||
'ks.orth',
|
||||
'ku_am.orth',
|
||||
'ku_ir.orth',
|
||||
'kum.orth',
|
||||
'kv.orth',
|
||||
'kw.orth',
|
||||
'ky.orth',
|
||||
'la.orth',
|
||||
'lb.orth',
|
||||
'lez.orth',
|
||||
'ln.orth',
|
||||
'lo.orth',
|
||||
'lt.orth',
|
||||
'lv.orth',
|
||||
'mg.orth',
|
||||
'mh.orth',
|
||||
'mi.orth',
|
||||
'mk.orth',
|
||||
'ml.orth',
|
||||
'mn_cn.orth',
|
||||
'mo.orth',
|
||||
'mr.orth',
|
||||
'mt.orth',
|
||||
'my.orth',
|
||||
'nb.orth',
|
||||
'nds.orth',
|
||||
'ne.orth',
|
||||
'nl.orth',
|
||||
'nn.orth',
|
||||
'no.orth',
|
||||
'nr.orth',
|
||||
'nso.orth',
|
||||
'ny.orth',
|
||||
'oc.orth',
|
||||
'om.orth',
|
||||
'or.orth',
|
||||
'os.orth',
|
||||
'pa.orth',
|
||||
'pl.orth',
|
||||
'ps_af.orth',
|
||||
'ps_pk.orth',
|
||||
'pt.orth',
|
||||
'rm.orth',
|
||||
'ro.orth',
|
||||
'ru.orth',
|
||||
'sa.orth',
|
||||
'sah.orth',
|
||||
'sco.orth',
|
||||
'se.orth',
|
||||
'sel.orth',
|
||||
'sh.orth',
|
||||
'shs.orth',
|
||||
'si.orth',
|
||||
'sk.orth',
|
||||
'sl.orth',
|
||||
'sm.orth',
|
||||
'sma.orth',
|
||||
'smj.orth',
|
||||
'smn.orth',
|
||||
'sms.orth',
|
||||
'so.orth',
|
||||
'sq.orth',
|
||||
'sr.orth',
|
||||
'ss.orth',
|
||||
'st.orth',
|
||||
'sv.orth',
|
||||
'sw.orth',
|
||||
'syr.orth',
|
||||
'ta.orth',
|
||||
'te.orth',
|
||||
'tg.orth',
|
||||
'th.orth',
|
||||
'ti_er.orth',
|
||||
'ti_et.orth',
|
||||
'tig.orth',
|
||||
'tk.orth',
|
||||
'tl.orth',
|
||||
'tn.orth',
|
||||
'to.orth',
|
||||
'tr.orth',
|
||||
'ts.orth',
|
||||
'tt.orth',
|
||||
'tw.orth',
|
||||
'tyv.orth',
|
||||
'ug.orth',
|
||||
'uk.orth',
|
||||
'ur.orth',
|
||||
'uz.orth',
|
||||
've.orth',
|
||||
'vi.orth',
|
||||
'vo.orth',
|
||||
'vot.orth',
|
||||
'wa.orth',
|
||||
'wen.orth',
|
||||
'wo.orth',
|
||||
'xh.orth',
|
||||
'yap.orth',
|
||||
'yi.orth',
|
||||
'yo.orth',
|
||||
'zh_cn.orth',
|
||||
'zh_hk.orth',
|
||||
'zh_mo.orth',
|
||||
'zh_sg.orth',
|
||||
'zh_tw.orth',
|
||||
'zu.orth',
|
||||
'ak.orth',
|
||||
'an.orth',
|
||||
'ber_dz.orth',
|
||||
'ber_ma.orth',
|
||||
'byn.orth',
|
||||
'crh.orth',
|
||||
'csb.orth',
|
||||
'dv.orth',
|
||||
'ee.orth',
|
||||
'fat.orth',
|
||||
'fil.orth',
|
||||
'hne.orth',
|
||||
'hsb.orth',
|
||||
'ht.orth',
|
||||
'hz.orth',
|
||||
'ii.orth',
|
||||
'jv.orth',
|
||||
'kab.orth',
|
||||
'kj.orth',
|
||||
'kr.orth',
|
||||
'ku_iq.orth',
|
||||
'ku_tr.orth',
|
||||
'kwm.orth',
|
||||
'lg.orth',
|
||||
'li.orth',
|
||||
'mai.orth',
|
||||
'mn_mn.orth',
|
||||
'ms.orth',
|
||||
'na.orth',
|
||||
'ng.orth',
|
||||
'nv.orth',
|
||||
'ota.orth',
|
||||
'pa_pk.orth',
|
||||
'pap_an.orth',
|
||||
'pap_aw.orth',
|
||||
'qu.orth',
|
||||
'quz.orth',
|
||||
'rn.orth',
|
||||
'rw.orth',
|
||||
'sc.orth',
|
||||
'sd.orth',
|
||||
'sg.orth',
|
||||
'sid.orth',
|
||||
'sn.orth',
|
||||
'su.orth',
|
||||
'ty.orth',
|
||||
'wal.orth',
|
||||
'za.orth',
|
||||
'lah.orth',
|
||||
'nqo.orth',
|
||||
'brx.orth',
|
||||
'sat.orth',
|
||||
'doi.orth',
|
||||
'mni.orth',
|
||||
'und_zsye.orth',
|
||||
'und_zmth.orth',
|
||||
]
|
||||
|
||||
fclang_h = custom_target('fclang.h',
|
||||
output: ['fclang.h'],
|
||||
input: orth_files,
|
||||
command: [find_program('fc-lang.py'), orth_files, '--template', files('fclang.tmpl.h')[0], '--output', '@OUTPUT@', '--directory', meson.current_source_dir()],
|
||||
build_by_default: true,
|
||||
)
|
|
@ -0,0 +1,8 @@
|
|||
fclist = executable('fc-list', ['fc-list.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-list']
|
|
@ -0,0 +1,8 @@
|
|||
fcmatch = executable('fc-match', ['fc-match.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-match']
|
|
@ -0,0 +1,8 @@
|
|||
fcpattern = executable('fc-pattern', ['fc-pattern.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-pattern']
|
|
@ -0,0 +1,9 @@
|
|||
fcquery = executable('fc-query', ['fc-query.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
dependencies: [freetype_dep],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-query']
|
|
@ -0,0 +1,9 @@
|
|||
fcscan = executable('fc-scan', ['fc-scan.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
dependencies: [freetype_dep],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-scan']
|
|
@ -0,0 +1,9 @@
|
|||
fcvalidate = executable('fc-validate', ['fc-validate.c', fcstdint_h, alias_headers, ft_alias_headers],
|
||||
include_directories: [incbase, incsrc],
|
||||
link_with: [libfontconfig],
|
||||
dependencies: [freetype_dep],
|
||||
c_args: c_args,
|
||||
install: true,
|
||||
)
|
||||
|
||||
tools_man_pages += ['fc-validate']
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import subprocess
|
||||
|
||||
if __name__=='__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('fccache')
|
||||
args = parser.parse_args()
|
||||
sys.exit(subprocess.run([args.fccache, '-s', '-f', '-v']).returncode)
|
|
@ -0,0 +1,6 @@
|
|||
gettext_files = [
|
||||
'fontconfig.its',
|
||||
'fontconfig.loc',
|
||||
]
|
||||
|
||||
install_data(gettext_files, install_dir: join_paths(get_option('datadir'), 'gettext/its'))
|
|
@ -0,0 +1,14 @@
|
|||
#include <stdlib.h>
|
||||
#include <stdio.h>
|
||||
#include <stddef.h>
|
||||
|
||||
struct s { int n; double d[]; };
|
||||
|
||||
int main(void)
|
||||
{
|
||||
int m = getchar ();
|
||||
struct s *p = malloc (offsetof (struct s, d)
|
||||
+ m * sizeof (double));
|
||||
p->d[0] = 0.0;
|
||||
return p->d != (double *) NULL;
|
||||
}
|
|
@ -0,0 +1,6 @@
|
|||
void memory_barrier (void) { __sync_synchronize (); }
|
||||
int atomic_add (int *i) { return __sync_fetch_and_add (i, 1); }
|
||||
int mutex_trylock (int *m) { return __sync_lock_test_and_set (m, 1); }
|
||||
void mutex_unlock (int *m) { __sync_lock_release (m); }
|
||||
|
||||
int main(void) { return 0;}
|
|
@ -0,0 +1,8 @@
|
|||
#include <atomic.h>
|
||||
/* This requires Solaris Studio 12.2 or newer: */
|
||||
#include <mbarrier.h>
|
||||
void memory_barrier (void) { __machine_rw_barrier (); }
|
||||
int atomic_add (volatile unsigned *i) { return atomic_add_int_nv (i, 1); }
|
||||
void *atomic_ptr_cmpxchg (volatile void **target, void *cmp, void *newval) { return atomic_cas_ptr (target, cmp, newval); }
|
||||
|
||||
int main(void) { return 0; }
|
|
@ -0,0 +1,362 @@
|
|||
project('fontconfig', 'c',
|
||||
version: '2.13.91',
|
||||
meson_version : '>= 0.50.0',
|
||||
default_options: [ 'buildtype=debugoptimized'],
|
||||
)
|
||||
|
||||
fc_version = meson.project_version()
|
||||
version_arr = fc_version.split('.')
|
||||
fc_version_major = version_arr[0].to_int()
|
||||
fc_version_minor = version_arr[1].to_int()
|
||||
fc_version_micro = version_arr[2].to_int()
|
||||
|
||||
# Try and maintain compatibility with the previous libtool versioning
|
||||
# (this is a bit of a hack, but it should work fine for our case where
|
||||
# API is added, in which case LT_AGE and LIBT_CURRENT are both increased)
|
||||
soversion = fc_version_major - 1
|
||||
curversion = fc_version_minor - 1
|
||||
libversion = '@0@.@1@.0'.format(soversion, curversion)
|
||||
defversion = '@0@.@1@'.format(curversion, fc_version_micro)
|
||||
osxversion = curversion + 1
|
||||
|
||||
freetype_req = '>= 21.0.15'
|
||||
|
||||
freetype_dep = dependency('freetype2', version: freetype_req,
|
||||
fallback: ['freetype2', 'freetype_dep'])
|
||||
|
||||
expat_dep = dependency('expat',
|
||||
fallback: ['expat', 'expat_dep'])
|
||||
|
||||
cc = meson.get_compiler('c')
|
||||
i18n = import('i18n')
|
||||
pkgmod = import('pkgconfig')
|
||||
python3 = import('python').find_installation()
|
||||
|
||||
check_headers = [
|
||||
['dirent.h'],
|
||||
['fcntl.h'],
|
||||
['stdlib.h'],
|
||||
['string.h'],
|
||||
['unistd.h'],
|
||||
['sys/statvfs.h'],
|
||||
['sys/vfs.h'],
|
||||
['sys/statfs.h'],
|
||||
['sys/param.h'],
|
||||
['sys/mount.h'],
|
||||
]
|
||||
|
||||
check_funcs = [
|
||||
['link'],
|
||||
['mkstemp'],
|
||||
['mkostemp'],
|
||||
['_mktemp_s'],
|
||||
['mkdtemp'],
|
||||
['getopt'],
|
||||
['getopt_long'],
|
||||
['getprogname'],
|
||||
['getexecname'],
|
||||
['rand'],
|
||||
['random'],
|
||||
['lrand48'],
|
||||
['random_r'],
|
||||
['rand_r'],
|
||||
['readlink'],
|
||||
['fstatvfs'],
|
||||
['fstatfs'],
|
||||
['lstat'],
|
||||
['mmap'],
|
||||
['vprintf'],
|
||||
]
|
||||
|
||||
check_freetype_funcs = [
|
||||
['FT_Get_BDF_Property', {'dependencies': freetype_dep}],
|
||||
['FT_Get_PS_Font_Info', {'dependencies': freetype_dep}],
|
||||
['FT_Has_PS_Glyph_Names', {'dependencies': freetype_dep}],
|
||||
['FT_Get_X11_Font_Format', {'dependencies': freetype_dep}],
|
||||
['FT_Done_MM_Var', {'dependencies': freetype_dep}],
|
||||
]
|
||||
|
||||
check_header_symbols = [
|
||||
['posix_fadvise', 'fcntl.h']
|
||||
]
|
||||
|
||||
check_struct_members = [
|
||||
['struct statvfs', 'f_basetype', ['sys/statvfs.h']],
|
||||
['struct statvfs', 'f_fstypename', ['sys/statvfs.']],
|
||||
['struct statfs', 'f_flags', []],
|
||||
['struct statfs', 'f_fstypename', []],
|
||||
['struct dirent', 'd_type', ['sys/types.h', 'dirent.h']],
|
||||
]
|
||||
|
||||
check_sizeofs = [
|
||||
['void *', {'conf-name': 'SIZEOF_VOID_P'}],
|
||||
]
|
||||
|
||||
check_alignofs = [
|
||||
['void *', {'conf-name': 'ALIGNOF_VOID_P'}],
|
||||
['double'],
|
||||
]
|
||||
|
||||
add_project_arguments('-DHAVE_CONFIG_H', language: 'c')
|
||||
|
||||
c_args = []
|
||||
|
||||
conf = configuration_data()
|
||||
deps = [freetype_dep, expat_dep]
|
||||
incbase = include_directories('.')
|
||||
|
||||
# We cannot try compiling against an internal dependency
|
||||
if freetype_dep.type_name() == 'internal'
|
||||
foreach func: check_freetype_funcs
|
||||
name = func[0]
|
||||
conf.set('HAVE_@0@'.format(name.to_upper()), 1)
|
||||
endforeach
|
||||
else
|
||||
check_funcs += check_freetype_funcs
|
||||
endif
|
||||
|
||||
foreach check : check_headers
|
||||
name = check[0]
|
||||
|
||||
if cc.has_header(name)
|
||||
conf.set('HAVE_@0@'.format(name.to_upper().underscorify()), 1)
|
||||
endif
|
||||
endforeach
|
||||
|
||||
foreach check : check_funcs
|
||||
name = check[0]
|
||||
opts = check.length() > 1 ? check[1] : {}
|
||||
extra_deps = opts.get('dependencies', [])
|
||||
|
||||
if cc.has_function(name, dependencies: extra_deps)
|
||||
conf.set('HAVE_@0@'.format(name.to_upper()), 1)
|
||||
endif
|
||||
endforeach
|
||||
|
||||
foreach check : check_header_symbols
|
||||
name = check[0]
|
||||
header = check[1]
|
||||
|
||||
if cc.has_header_symbol(header, name)
|
||||
conf.set('HAVE_@0@'.format(name.to_upper()), 1)
|
||||
endif
|
||||
endforeach
|
||||
|
||||
foreach check : check_struct_members
|
||||
struct_name = check[0]
|
||||
member_name = check[1]
|
||||
headers = check[2]
|
||||
|
||||
prefix = ''
|
||||
|
||||
foreach header : headers
|
||||
prefix += '#include <@0@>\n'.format(header)
|
||||
endforeach
|
||||
|
||||
if cc.has_member(struct_name, member_name, prefix: prefix)
|
||||
conf.set('HAVE_@0@_@1@'.format(struct_name, member_name).to_upper().underscorify(), 1)
|
||||
endif
|
||||
endforeach
|
||||
|
||||
foreach check : check_sizeofs
|
||||
type = check[0]
|
||||
opts = check.length() > 1 ? check[1] : {}
|
||||
|
||||
conf_name = opts.get('conf-name', 'SIZEOF_@0@'.format(type.to_upper()))
|
||||
|
||||
conf.set(conf_name, cc.sizeof(type))
|
||||
endforeach
|
||||
|
||||
foreach check : check_alignofs
|
||||
type = check[0]
|
||||
opts = check.length() > 1 ? check[1] : {}
|
||||
|
||||
conf_name = opts.get('conf-name', 'ALIGNOF_@0@'.format(type.to_upper()))
|
||||
|
||||
conf.set(conf_name, cc.alignment(type))
|
||||
endforeach
|
||||
|
||||
if cc.compiles(files('meson-cc-tests/flexible-array-member-test.c'))
|
||||
conf.set('FLEXIBLE_ARRAY_MEMBER', true)
|
||||
else
|
||||
conf.set('FLEXIBLE_ARRAY_MEMBER', 1)
|
||||
endif
|
||||
|
||||
if cc.links(files('meson-cc-tests/intel-atomic-primitives-test.c'), name: 'Intel atomics')
|
||||
conf.set('HAVE_INTEL_ATOMIC_PRIMITIVES', 1)
|
||||
endif
|
||||
|
||||
if cc.links(files('meson-cc-tests/solaris-atomic-operations.c'), name: 'Solaris atomic ops')
|
||||
conf.set('HAVE_SOLARIS_ATOMIC_OPS', 1)
|
||||
endif
|
||||
|
||||
|
||||
prefix = get_option('prefix')
|
||||
|
||||
fonts_conf = configuration_data()
|
||||
|
||||
if host_machine.system() == 'windows'
|
||||
conf.set_quoted('FC_DEFAULT_FONTS', 'WINDOWSFONTDIR')
|
||||
fonts_conf.set('FC_DEFAULT_FONTS', 'WINDOWSFONTDIR')
|
||||
fc_cachedir = 'LOCAL_APPDATA_FONTCONFIG_CACHE'
|
||||
else
|
||||
conf.set_quoted('FC_DEFAULT_FONTS', '/usr/share/fonts')
|
||||
fonts_conf.set('FC_DEFAULT_FONTS', '/usr/share/fonts')
|
||||
fc_cachedir = join_paths(prefix, get_option('localstatedir'), 'cache', meson.project_name())
|
||||
thread_dep = dependency('threads')
|
||||
conf.set('HAVE_PTHREAD', 1)
|
||||
deps += [thread_dep]
|
||||
endif
|
||||
|
||||
fc_templatedir = join_paths(prefix, get_option('datadir'), 'fontconfig/conf.avail')
|
||||
fc_baseconfigdir = join_paths(prefix, get_option('sysconfdir'), 'fonts')
|
||||
fc_configdir = join_paths(fc_baseconfigdir, 'conf.d')
|
||||
fc_xmldir = join_paths(prefix, get_option('datadir'), 'xml/fontconfig')
|
||||
|
||||
|
||||
conf.set_quoted('CONFIGDIR', fc_configdir)
|
||||
conf.set_quoted('FC_CACHEDIR', fc_cachedir)
|
||||
conf.set_quoted('FC_TEMPLATEDIR', fc_templatedir)
|
||||
conf.set_quoted('FONTCONFIG_PATH', fc_baseconfigdir)
|
||||
conf.set_quoted('FC_FONTPATH', '')
|
||||
|
||||
fonts_conf.set('FC_FONTPATH', '')
|
||||
fonts_conf.set('FC_CACHEDIR', fc_cachedir)
|
||||
fonts_conf.set('CONFIGDIR', fc_configdir)
|
||||
# strip off fc_baseconfigdir prefix if that is the prefix
|
||||
if fc_configdir.startswith(fc_baseconfigdir + '/')
|
||||
fonts_conf.set('CONFIGDIR', fc_configdir.split(fc_baseconfigdir + '/')[1])
|
||||
endif
|
||||
|
||||
gperf = find_program('gperf', required: build_machine.system() != 'windows')
|
||||
if not gperf.found()
|
||||
subproject('gperf', required: true)
|
||||
gperf = find_program('gperf')
|
||||
endif
|
||||
|
||||
sh = find_program('sh', required : false)
|
||||
|
||||
if not sh.found() # host_machine.system() == 'windows' or not sh.found()
|
||||
# TODO: This is not always correct
|
||||
if cc.get_id() == 'msvc'
|
||||
gperf_len_type = 'size_t'
|
||||
else
|
||||
gperf_len_type = 'unsigned'
|
||||
endif
|
||||
else
|
||||
gperf_test_format = '''
|
||||
#include <string.h>
|
||||
const char * in_word_set(const char *, @0@);
|
||||
@1@
|
||||
'''
|
||||
gperf_snippet_format = 'echo foo,bar | @0@ -L ANSI-C'
|
||||
gperf_snippet = run_command(sh, '-c', gperf_snippet_format.format(gperf.path()))
|
||||
gperf_test = gperf_test_format.format('size_t', gperf_snippet.stdout())
|
||||
|
||||
if cc.compiles(gperf_test)
|
||||
gperf_len_type = 'size_t'
|
||||
else
|
||||
gperf_test = gperf_test_format.format('unsigned', gperf_snippet.stdout())
|
||||
if cc.compiles(gperf_test)
|
||||
gperf_len_type = 'unsigned'
|
||||
else
|
||||
error('unable to determine gperf len type')
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
message('gperf len type is @0@'.format(gperf_len_type))
|
||||
|
||||
conf.set('FC_GPERF_SIZE_T', gperf_len_type,
|
||||
description : 'The type of gperf "len" parameter')
|
||||
|
||||
conf.set('_GNU_SOURCE', true)
|
||||
|
||||
conf.set_quoted('GETTEXT_PACKAGE', meson.project_name())
|
||||
|
||||
incsrc = include_directories('src')
|
||||
|
||||
# We assume stdint.h is available
|
||||
foreach t : ['uint64_t', 'int32_t', 'uintptr_t', 'intptr_t']
|
||||
if not cc.has_type(t, prefix: '#include <stdint.h>')
|
||||
error('Sanity check failed: type @0@ not provided via stdint.h'.format(t))
|
||||
endif
|
||||
endforeach
|
||||
|
||||
fcstdint_h = configure_file(
|
||||
input: 'src/fcstdint.h.in',
|
||||
output: 'fcstdint.h',
|
||||
copy: true)
|
||||
|
||||
stdinwrapper = files('stdin_wrapper.py')[0]
|
||||
makealias = files('src/makealias.py')[0]
|
||||
|
||||
alias_headers = custom_target('alias_headers',
|
||||
output: ['fcalias.h', 'fcaliastail.h'],
|
||||
input: ['fontconfig/fontconfig.h', 'src/fcdeprecate.h', 'fontconfig/fcprivate.h'],
|
||||
command: [python3, makealias, join_paths(meson.current_source_dir(), 'src'), '@OUTPUT@', '@INPUT@'],
|
||||
)
|
||||
|
||||
ft_alias_headers = custom_target('ft_alias_headers',
|
||||
output: ['fcftalias.h', 'fcftaliastail.h'],
|
||||
input: ['fontconfig/fcfreetype.h'],
|
||||
command: [python3, makealias, join_paths(meson.current_source_dir(), 'src'), '@OUTPUT@', '@INPUT@']
|
||||
)
|
||||
|
||||
tools_man_pages = []
|
||||
|
||||
# Do not reorder
|
||||
subdir('fc-case')
|
||||
subdir('fc-lang')
|
||||
subdir('src')
|
||||
|
||||
if not get_option('tools').disabled()
|
||||
subdir('fc-cache')
|
||||
subdir('fc-cat')
|
||||
subdir('fc-conflist')
|
||||
subdir('fc-list')
|
||||
subdir('fc-match')
|
||||
subdir('fc-pattern')
|
||||
subdir('fc-query')
|
||||
subdir('fc-scan')
|
||||
subdir('fc-validate')
|
||||
endif
|
||||
|
||||
if not get_option('tests').disabled()
|
||||
subdir('test')
|
||||
endif
|
||||
|
||||
subdir('conf.d')
|
||||
subdir('its')
|
||||
|
||||
# xgettext is optional (on Windows for instance)
|
||||
if find_program('xgettext', required : get_option('nls')).found()
|
||||
subdir('po')
|
||||
subdir('po-conf')
|
||||
endif
|
||||
|
||||
if not get_option('doc').disabled()
|
||||
subdir('doc')
|
||||
endif
|
||||
|
||||
configure_file(output: 'config.h', configuration: conf)
|
||||
|
||||
configure_file(output: 'fonts.conf',
|
||||
input: 'fonts.conf.in',
|
||||
configuration: fonts_conf,
|
||||
install_dir: fc_baseconfigdir,
|
||||
install: true)
|
||||
|
||||
install_data('fonts.dtd',
|
||||
install_dir: join_paths(get_option('prefix'), get_option('datadir'), 'xml/fontconfig')
|
||||
)
|
||||
|
||||
fc_headers = [
|
||||
'fontconfig/fontconfig.h',
|
||||
'fontconfig/fcfreetype.h',
|
||||
'fontconfig/fcprivate.h',
|
||||
]
|
||||
|
||||
install_headers(fc_headers, subdir: meson.project_name())
|
||||
|
||||
meson.add_install_script('install-cache.py', fccache.full_path())
|
|
@ -0,0 +1,13 @@
|
|||
# Common feature options
|
||||
option('doc', type : 'feature', value : 'auto', yield: true,
|
||||
description: 'Build documentation')
|
||||
option('doc-txt', type: 'feature', value: 'auto')
|
||||
option('doc-man', type: 'feature', value: 'auto')
|
||||
option('doc-pdf', type: 'feature', value: 'auto')
|
||||
option('doc-html', type: 'feature', value: 'auto')
|
||||
option('nls', type : 'feature', value : 'auto', yield: true,
|
||||
description : 'Enable native language support (translations)')
|
||||
option('tests', type : 'feature', value : 'auto', yield : true,
|
||||
description: 'Enable unit tests')
|
||||
option('tools', type : 'feature', value : 'auto', yield : true,
|
||||
description: 'Build command-line tools (fc-list, fc-query, etc.)')
|
|
@ -0,0 +1,3 @@
|
|||
i18n.gettext(meson.project_name(),
|
||||
args: '--directory=' + meson.source_root()
|
||||
)
|
|
@ -0,0 +1,3 @@
|
|||
i18n.gettext(meson.project_name() + '-conf',
|
||||
args: '--directory=' + meson.source_root()
|
||||
)
|
|
@ -0,0 +1,33 @@
|
|||
import argparse
|
||||
import subprocess
|
||||
import os
|
||||
import re
|
||||
|
||||
if __name__== '__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('input')
|
||||
parser.add_argument('output')
|
||||
|
||||
args = parser.parse_known_args()
|
||||
print (args[0].output)
|
||||
|
||||
cpp = args[1]
|
||||
ret = subprocess.run(cpp + [args[0].input], stdout=subprocess.PIPE)
|
||||
|
||||
stdout = ret.stdout.decode('utf8')
|
||||
|
||||
with open(args[0].output, 'w') as out:
|
||||
write = True
|
||||
for l in stdout.split('\n'):
|
||||
l = l.strip('\r')
|
||||
if l.startswith('CUT_OUT_BEGIN'):
|
||||
write = False
|
||||
|
||||
if write and l:
|
||||
stripped = re.sub('^\s+', '', l)
|
||||
stripped = re.sub('\s*,\s*', ',', stripped)
|
||||
if not stripped.isspace() and stripped:
|
||||
out.write('%s\n' % stripped)
|
||||
|
||||
if l.startswith('CUT_OUT_END'):
|
||||
write = True
|
|
@ -290,6 +290,91 @@ FcReadLink (const FcChar8 *pathname,
|
|||
#endif
|
||||
}
|
||||
|
||||
/* On Windows MingW provides dirent.h / openddir(), but MSVC does not */
|
||||
#ifndef HAVE_DIRENT_H
|
||||
|
||||
struct DIR {
|
||||
struct dirent d_ent;
|
||||
HANDLE handle;
|
||||
WIN32_FIND_DATA fdata;
|
||||
FcBool valid;
|
||||
};
|
||||
|
||||
FcPrivate DIR *
|
||||
FcCompatOpendirWin32 (const char *dirname)
|
||||
{
|
||||
size_t len;
|
||||
char *name;
|
||||
DIR *dir;
|
||||
|
||||
dir = calloc (1, sizeof (struct DIR));
|
||||
if (dir == NULL)
|
||||
return NULL;
|
||||
|
||||
len = strlen (dirname);
|
||||
name = malloc (len + 3);
|
||||
if (name == NULL)
|
||||
{
|
||||
free (dir);
|
||||
return NULL;
|
||||
}
|
||||
memcpy (name, dirname, len);
|
||||
name[len++] = FC_DIR_SEPARATOR;
|
||||
name[len++] = '*';
|
||||
name[len] = '\0';
|
||||
|
||||
dir->handle = FindFirstFileEx (name, FindExInfoBasic, &dir->fdata, FindExSearchNameMatch, NULL, 0);
|
||||
|
||||
free (name);
|
||||
|
||||
if (!dir->handle)
|
||||
{
|
||||
free (dir);
|
||||
dir = NULL;
|
||||
|
||||
if (GetLastError () == ERROR_FILE_NOT_FOUND)
|
||||
errno = ENOENT;
|
||||
else
|
||||
errno = EACCES;
|
||||
}
|
||||
|
||||
dir->valid = FcTrue;
|
||||
return dir;
|
||||
}
|
||||
|
||||
FcPrivate struct dirent *
|
||||
FcCompatReaddirWin32 (DIR *dir)
|
||||
{
|
||||
if (dir->valid != FcTrue)
|
||||
return NULL;
|
||||
|
||||
dir->d_ent.d_name = dir->fdata.cFileName;
|
||||
|
||||
if ((dir->fdata.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY) != 0)
|
||||
dir->d_ent.d_type = DT_DIR;
|
||||
else if (dir->fdata.dwFileAttributes == FILE_ATTRIBUTE_NORMAL)
|
||||
dir->d_ent.d_type = DT_REG;
|
||||
else
|
||||
dir->d_ent.d_type = DT_UNKNOWN;
|
||||
|
||||
if (!FindNextFile (dir->handle, &dir->fdata))
|
||||
dir->valid = FcFalse;
|
||||
|
||||
return &dir->d_ent;
|
||||
}
|
||||
|
||||
FcPrivate int
|
||||
FcCompatClosedirWin32 (DIR *dir)
|
||||
{
|
||||
if (dir != NULL && dir->handle != NULL)
|
||||
{
|
||||
FindClose (dir->handle);
|
||||
free (dir);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
#endif /* HAVE_DIRENT_H */
|
||||
|
||||
#define __fccompat__
|
||||
#include "fcaliastail.h"
|
||||
#undef __fccompat__
|
||||
|
|
|
@ -0,0 +1 @@
|
|||
#include <stdint.h>
|
|
@ -44,6 +44,7 @@
|
|||
# define WIN32_EXTRA_LEAN
|
||||
# define STRICT
|
||||
# include <windows.h>
|
||||
# include <io.h>
|
||||
|
||||
#if defined(_MSC_VER)
|
||||
#include <BaseTsd.h>
|
||||
|
@ -52,6 +53,53 @@ typedef SSIZE_T ssize_t;
|
|||
|
||||
#define S_ISREG(m) (((m) & S_IFMT) == S_IFREG)
|
||||
|
||||
#ifndef S_ISDIR
|
||||
#define S_ISDIR(m) (((m) & _S_IFMT) == _S_IFDIR)
|
||||
#endif
|
||||
|
||||
#ifndef F_OK
|
||||
#define F_OK 0
|
||||
#endif
|
||||
#ifndef X_OK
|
||||
#define X_OK 0 /* no execute bit on windows */
|
||||
#endif
|
||||
#ifndef W_OK
|
||||
#define W_OK 2
|
||||
#endif
|
||||
#ifndef R_OK
|
||||
#define R_OK 4
|
||||
#endif
|
||||
|
||||
/* MingW provides dirent.h / openddir(), but MSVC does not */
|
||||
#ifndef HAVE_DIRENT_H
|
||||
|
||||
#define HAVE_STRUCT_DIRENT_D_TYPE 1
|
||||
|
||||
typedef struct DIR DIR;
|
||||
|
||||
typedef enum {
|
||||
DT_UNKNOWN = 0,
|
||||
DT_DIR,
|
||||
DT_REG,
|
||||
} DIR_TYPE;
|
||||
|
||||
typedef struct dirent {
|
||||
const char *d_name;
|
||||
DIR_TYPE d_type;
|
||||
} dirent;
|
||||
|
||||
#define opendir(dirname) FcCompatOpendirWin32(dirname)
|
||||
#define closedir(d) FcCompatClosedirWin32(d)
|
||||
#define readdir(d) FcCompatReaddirWin32(d)
|
||||
|
||||
DIR * FcCompatOpendirWin32 (const char *dirname);
|
||||
|
||||
struct dirent * FcCompatReaddirWin32 (DIR *dir);
|
||||
|
||||
int FcCompatClosedirWin32 (DIR *dir);
|
||||
|
||||
#endif /* HAVE_DIRENT_H */
|
||||
|
||||
#endif /* _WIN32 */
|
||||
|
||||
#endif /* _FCWINDOWS_H_ */
|
||||
|
|
|
@ -0,0 +1,234 @@
|
|||
EXPORTS
|
||||
FcAtomicCreate
|
||||
FcAtomicDeleteNew
|
||||
FcAtomicDestroy
|
||||
FcAtomicLock
|
||||
FcAtomicNewFile
|
||||
FcAtomicOrigFile
|
||||
FcAtomicReplaceOrig
|
||||
FcAtomicUnlock
|
||||
FcBlanksAdd
|
||||
FcBlanksCreate
|
||||
FcBlanksDestroy
|
||||
FcBlanksIsMember
|
||||
FcCacheCopySet
|
||||
FcCacheCreateTagFile
|
||||
FcCacheDir
|
||||
FcCacheNumFont
|
||||
FcCacheNumSubdir
|
||||
FcCacheSubdir
|
||||
FcCharSetAddChar
|
||||
FcCharSetCopy
|
||||
FcCharSetCount
|
||||
FcCharSetCoverage
|
||||
FcCharSetCreate
|
||||
FcCharSetDelChar
|
||||
FcCharSetDestroy
|
||||
FcCharSetEqual
|
||||
FcCharSetFirstPage
|
||||
FcCharSetHasChar
|
||||
FcCharSetIntersect
|
||||
FcCharSetIntersectCount
|
||||
FcCharSetIsSubset
|
||||
FcCharSetMerge
|
||||
FcCharSetNew
|
||||
FcCharSetNextPage
|
||||
FcCharSetSubtract
|
||||
FcCharSetSubtractCount
|
||||
FcCharSetUnion
|
||||
FcConfigAppFontAddDir
|
||||
FcConfigAppFontAddFile
|
||||
FcConfigAppFontClear
|
||||
FcConfigBuildFonts
|
||||
FcConfigCreate
|
||||
FcConfigDestroy
|
||||
FcConfigEnableHome
|
||||
FcConfigFileInfoIterGet
|
||||
FcConfigFileInfoIterInit
|
||||
FcConfigFileInfoIterNext
|
||||
FcConfigFilename
|
||||
FcConfigGetBlanks
|
||||
FcConfigGetCache
|
||||
FcConfigGetCacheDirs
|
||||
FcConfigGetConfigDirs
|
||||
FcConfigGetConfigFiles
|
||||
FcConfigGetCurrent
|
||||
FcConfigGetFontDirs
|
||||
FcConfigGetFonts
|
||||
FcConfigGetRescanInterval
|
||||
FcConfigGetRescanInverval
|
||||
FcConfigGetSysRoot
|
||||
FcConfigHome
|
||||
FcConfigParseAndLoad
|
||||
FcConfigParseAndLoadFromMemory
|
||||
FcConfigReference
|
||||
FcConfigSetCurrent
|
||||
FcConfigSetRescanInterval
|
||||
FcConfigSetRescanInverval
|
||||
FcConfigSetSysRoot
|
||||
FcConfigSubstitute
|
||||
FcConfigSubstituteWithPat
|
||||
FcConfigUptoDate
|
||||
FcDefaultSubstitute
|
||||
FcDirCacheClean
|
||||
FcDirCacheCreateUUID
|
||||
FcDirCacheDeleteUUID
|
||||
FcDirCacheLoad
|
||||
FcDirCacheLoadFile
|
||||
FcDirCacheRead
|
||||
FcDirCacheRescan
|
||||
FcDirCacheUnlink
|
||||
FcDirCacheUnload
|
||||
FcDirCacheValid
|
||||
FcDirSave
|
||||
FcDirScan
|
||||
FcFileIsDir
|
||||
FcFileScan
|
||||
FcFini
|
||||
FcFontList
|
||||
FcFontMatch
|
||||
FcFontRenderPrepare
|
||||
FcFontSetAdd
|
||||
FcFontSetCreate
|
||||
FcFontSetDestroy
|
||||
FcFontSetList
|
||||
FcFontSetMatch
|
||||
FcFontSetPrint
|
||||
FcFontSetSort
|
||||
FcFontSetSortDestroy
|
||||
FcFontSort
|
||||
FcFreeTypeCharIndex
|
||||
FcFreeTypeCharSet
|
||||
FcFreeTypeCharSetAndSpacing
|
||||
FcFreeTypeQuery
|
||||
FcFreeTypeQueryAll
|
||||
FcFreeTypeQueryFace
|
||||
FcGetDefaultLangs
|
||||
FcGetLangs
|
||||
FcGetVersion
|
||||
FcInit
|
||||
FcInitBringUptoDate
|
||||
FcInitLoadConfig
|
||||
FcInitLoadConfigAndFonts
|
||||
FcInitReinitialize
|
||||
FcLangGetCharSet
|
||||
FcLangNormalize
|
||||
FcLangSetAdd
|
||||
FcLangSetCompare
|
||||
FcLangSetContains
|
||||
FcLangSetCopy
|
||||
FcLangSetCreate
|
||||
FcLangSetDel
|
||||
FcLangSetDestroy
|
||||
FcLangSetEqual
|
||||
FcLangSetGetLangs
|
||||
FcLangSetHash
|
||||
FcLangSetHasLang
|
||||
FcLangSetSubtract
|
||||
FcLangSetUnion
|
||||
FcMatrixCopy
|
||||
FcMatrixEqual
|
||||
FcMatrixMultiply
|
||||
FcMatrixRotate
|
||||
FcMatrixScale
|
||||
FcMatrixShear
|
||||
FcNameConstant
|
||||
FcNameGetConstant
|
||||
FcNameGetObjectType
|
||||
FcNameParse
|
||||
FcNameRegisterConstants
|
||||
FcNameRegisterObjectTypes
|
||||
FcNameUnparse
|
||||
FcNameUnregisterConstants
|
||||
FcNameUnregisterObjectTypes
|
||||
FcObjectSetAdd
|
||||
FcObjectSetBuild
|
||||
FcObjectSetCreate
|
||||
FcObjectSetDestroy
|
||||
FcObjectSetVaBuild
|
||||
FcPatternAdd
|
||||
FcPatternAddBool
|
||||
FcPatternAddCharSet
|
||||
FcPatternAddDouble
|
||||
FcPatternAddFTFace
|
||||
FcPatternAddInteger
|
||||
FcPatternAddLangSet
|
||||
FcPatternAddMatrix
|
||||
FcPatternAddRange
|
||||
FcPatternAddString
|
||||
FcPatternAddWeak
|
||||
FcPatternBuild
|
||||
FcPatternCreate
|
||||
FcPatternDel
|
||||
FcPatternDestroy
|
||||
FcPatternDuplicate
|
||||
FcPatternEqual
|
||||
FcPatternEqualSubset
|
||||
FcPatternFilter
|
||||
FcPatternFindIter
|
||||
FcPatternFormat
|
||||
FcPatternGet
|
||||
FcPatternGetBool
|
||||
FcPatternGetCharSet
|
||||
FcPatternGetDouble
|
||||
FcPatternGetFTFace
|
||||
FcPatternGetInteger
|
||||
FcPatternGetLangSet
|
||||
FcPatternGetMatrix
|
||||
FcPatternGetRange
|
||||
FcPatternGetString
|
||||
FcPatternGetWithBinding
|
||||
FcPatternHash
|
||||
FcPatternIterEqual
|
||||
FcPatternIterGetObject
|
||||
FcPatternIterGetValue
|
||||
FcPatternIterIsValid
|
||||
FcPatternIterNext
|
||||
FcPatternIterStart
|
||||
FcPatternIterValueCount
|
||||
FcPatternObjectCount
|
||||
FcPatternPrint
|
||||
FcPatternReference
|
||||
FcPatternRemove
|
||||
FcPatternVaBuild
|
||||
FcRangeCopy
|
||||
FcRangeCreateDouble
|
||||
FcRangeCreateInteger
|
||||
FcRangeDestroy
|
||||
FcRangeGetDouble
|
||||
FcStrBasename
|
||||
FcStrBuildFilename
|
||||
FcStrCmp
|
||||
FcStrCmpIgnoreCase
|
||||
FcStrCopy
|
||||
FcStrCopyFilename
|
||||
FcStrDirname
|
||||
FcStrDowncase
|
||||
FcStrFree
|
||||
FcStrListCreate
|
||||
FcStrListDone
|
||||
FcStrListFirst
|
||||
FcStrListNext
|
||||
FcStrPlus
|
||||
FcStrSetAdd
|
||||
FcStrSetAddFilename
|
||||
FcStrSetCreate
|
||||
FcStrSetDel
|
||||
FcStrSetDestroy
|
||||
FcStrSetEqual
|
||||
FcStrSetMember
|
||||
FcStrStr
|
||||
FcStrStrIgnoreCase
|
||||
FcUcs4ToUtf8
|
||||
FcUtf16Len
|
||||
FcUtf16ToUcs4
|
||||
FcUtf8Len
|
||||
FcUtf8ToUcs4
|
||||
FcValueDestroy
|
||||
FcValueEqual
|
||||
FcValuePrint
|
||||
FcValueSave
|
||||
FcWeightFromOpenType
|
||||
FcWeightFromOpenTypeDouble
|
||||
FcWeightToOpenType
|
||||
FcWeightToOpenTypeDouble
|
|
@ -0,0 +1,71 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import argparse
|
||||
from collections import OrderedDict
|
||||
|
||||
# cat fontconfig/fontconfig.h | grep '^Fc[^ ]* *(' | sed -e 's/ *(.*$//'
|
||||
|
||||
def extract(fname):
|
||||
with open(fname, 'r', encoding='utf-8') as f:
|
||||
for l in f.readlines():
|
||||
l = l.rstrip()
|
||||
m = re.match(r'^(Fc[^ ]*)[\s\w]*\(.*', l)
|
||||
|
||||
if m and m.group(1) not in ['FcCacheDir', 'FcCacheSubdir']:
|
||||
yield m.group(1)
|
||||
|
||||
if __name__=='__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('srcdir')
|
||||
parser.add_argument('head')
|
||||
parser.add_argument('tail')
|
||||
parser.add_argument('headers', nargs='+')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
definitions = {}
|
||||
|
||||
for fname in os.listdir(args.srcdir):
|
||||
define_name, ext = os.path.splitext(fname)
|
||||
if ext != '.c':
|
||||
continue
|
||||
|
||||
define_name = '__%s__' % os.path.basename(define_name)
|
||||
|
||||
for definition in extract(os.path.join(args.srcdir, fname)):
|
||||
definitions[definition] = define_name
|
||||
|
||||
declarations = OrderedDict()
|
||||
|
||||
for fname in args.headers:
|
||||
for declaration in extract(fname):
|
||||
try:
|
||||
define_name = definitions[declaration]
|
||||
except KeyError:
|
||||
print ('error: could not locate %s in src/*.c' % declaration)
|
||||
sys.exit(1)
|
||||
|
||||
declarations[declaration] = define_name
|
||||
|
||||
with open(args.head, 'w') as head:
|
||||
with open(args.tail, 'w') as tail:
|
||||
tail.write('#if HAVE_GNUC_ATTRIBUTE\n')
|
||||
last = None
|
||||
for name, define_name in declarations.items():
|
||||
alias = 'IA__%s' % name
|
||||
hattr = 'FC_ATTRIBUTE_VISIBILITY_HIDDEN'
|
||||
head.write('extern __typeof (%s) %s %s;\n' % (name, alias, hattr))
|
||||
head.write('#define %s %s\n' % (name, alias))
|
||||
if define_name != last:
|
||||
if last is not None:
|
||||
tail.write('#endif /* %s */\n' % last)
|
||||
tail.write('#ifdef %s\n' % define_name)
|
||||
last = define_name
|
||||
tail.write('# undef %s\n' % name)
|
||||
cattr = '__attribute((alias("%s"))) FC_ATTRIBUTE_VISIBILITY_EXPORT' % alias
|
||||
tail.write('extern __typeof (%s) %s %s;\n' % (name, name, cattr))
|
||||
tail.write('#endif /* %s */\n' % last)
|
||||
tail.write('#endif /* HAVE_GNUC_ATTRIBUTE */\n')
|
|
@ -0,0 +1,91 @@
|
|||
fc_sources = [
|
||||
'fcatomic.c',
|
||||
'fccache.c',
|
||||
'fccfg.c',
|
||||
'fccharset.c',
|
||||
'fccompat.c',
|
||||
'fcdbg.c',
|
||||
'fcdefault.c',
|
||||
'fcdir.c',
|
||||
'fcformat.c',
|
||||
'fcfreetype.c',
|
||||
'fcfs.c',
|
||||
'fcptrlist.c',
|
||||
'fchash.c',
|
||||
'fcinit.c',
|
||||
'fclang.c',
|
||||
'fclist.c',
|
||||
'fcmatch.c',
|
||||
'fcmatrix.c',
|
||||
'fcname.c',
|
||||
'fcobjs.c',
|
||||
'fcpat.c',
|
||||
'fcrange.c',
|
||||
'fcserialize.c',
|
||||
'fcstat.c',
|
||||
'fcstr.c',
|
||||
'fcweight.c',
|
||||
'fcxml.c',
|
||||
'ftglue.c',
|
||||
]
|
||||
|
||||
# FIXME: obviously fragile, cc.preprocess would be sweet
|
||||
if cc.get_id() == 'gcc'
|
||||
cpp = ['gcc', '-E', '-P']
|
||||
elif cc.get_id() == 'msvc'
|
||||
cpp = ['cl', '/EP']
|
||||
elif cc.get_id() == 'clang'
|
||||
cpp = ['clang', '-E', '-P']
|
||||
else
|
||||
error('FIXME: implement cc.preprocess')
|
||||
endif
|
||||
|
||||
cpp += ['-I', join_paths(meson.current_source_dir(), '..')]
|
||||
|
||||
fcobjshash_gperf = custom_target('fcobjshash.gperf',
|
||||
input: 'fcobjshash.gperf.h',
|
||||
output: 'fcobjshash.gperf',
|
||||
command: [python3, files('cutout.py')[0], '@INPUT@', '@OUTPUT@', cpp],
|
||||
build_by_default: true,
|
||||
)
|
||||
|
||||
fcobjshash_h = custom_target('fcobjshash.h',
|
||||
input: fcobjshash_gperf,
|
||||
output: 'fcobjshash.h',
|
||||
command: [gperf, '--pic', '-m', '100', '@INPUT@', '--output-file', '@OUTPUT@']
|
||||
)
|
||||
|
||||
# write def file with exports for windows
|
||||
cdata_def = configuration_data()
|
||||
cdata_def.set('DEF_VERSION', defversion)
|
||||
fontconfig_def = configure_file(input: 'fontconfig.def.in', output: 'fontconfig.def', configuration: cdata_def)
|
||||
|
||||
libfontconfig = library('fontconfig',
|
||||
fc_sources, alias_headers, ft_alias_headers, fclang_h, fccase_h, fcobjshash_h,
|
||||
c_args: c_args,
|
||||
include_directories: incbase,
|
||||
dependencies: deps,
|
||||
vs_module_defs: fontconfig_def,
|
||||
install: true,
|
||||
soversion: soversion,
|
||||
version: libversion,
|
||||
darwin_versions: osxversion,
|
||||
)
|
||||
|
||||
fontconfig_dep = declare_dependency(link_with: libfontconfig,
|
||||
include_directories: incbase,
|
||||
dependencies: deps,
|
||||
)
|
||||
|
||||
pkgmod.generate(libfontconfig,
|
||||
description: 'Font configuration and customization library',
|
||||
filebase: 'fontconfig',
|
||||
name: 'Fontconfig',
|
||||
requires: ['freetype2 ' + freetype_req],
|
||||
version: fc_version,
|
||||
variables: [
|
||||
'sysconfdir=@0@'.format(join_paths(prefix, get_option('sysconfdir'))),
|
||||
'localstatedir=@0@'.format(join_paths(prefix, get_option('localstatedir'))),
|
||||
'confdir=${sysconfdir}/fonts',
|
||||
'cachedir=${localstatedir}/cache/fontconfig',
|
||||
])
|
|
@ -0,0 +1,20 @@
|
|||
import argparse
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
if __name__=='__main__':
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('prog')
|
||||
parser.add_argument('input')
|
||||
parser.add_argument('output')
|
||||
parser.add_argument('args', nargs='*')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
unescaped_args = [arg.strip('""') for arg in args.args]
|
||||
|
||||
command = [args.prog] + unescaped_args
|
||||
|
||||
with open(args.output, 'w') as out:
|
||||
with open(args.input, 'r') as in_:
|
||||
sys.exit(subprocess.run(command, stdin=in_, stdout=out).returncode)
|
|
@ -0,0 +1,6 @@
|
|||
/expat-*
|
||||
/freetype2
|
||||
/gperf
|
||||
/zlib-*
|
||||
/libpng-*
|
||||
/packagecache
|
|
@ -0,0 +1,10 @@
|
|||
[wrap-file]
|
||||
directory = expat-2.2.6
|
||||
|
||||
source_url = https://github.com/libexpat/libexpat/releases/download/R_2_2_6/expat-2.2.6.tar.bz2
|
||||
source_filename = expat-2.2.6.tar.bz2
|
||||
source_hash = 17b43c2716d521369f82fc2dc70f359860e90fa440bea65b3b85f0b246ea81f2
|
||||
|
||||
patch_url = https://wrapdb.mesonbuild.com/v1/projects/expat/2.2.6/1/get_zip
|
||||
patch_filename = expat-2.2.6-1-wrap.zip
|
||||
patch_hash = b8312fae757c7bff6f0cb430108104441a3da7a0a333809f5c80b354157eaa4d
|
|
@ -0,0 +1,5 @@
|
|||
[wrap-git]
|
||||
directory=freetype2
|
||||
url=https://github.com/centricular/freetype2.git
|
||||
push-url=git@github.com:centricular/freetype2.git
|
||||
revision=meson
|
|
@ -0,0 +1,5 @@
|
|||
[wrap-git]
|
||||
directory=gperf
|
||||
url=https://gitlab.freedesktop.org/tpm/gperf.git
|
||||
push-url=https://gitlab.freedesktop.org/tpm/gperf.git
|
||||
revision=meson
|
|
@ -0,0 +1,10 @@
|
|||
[wrap-file]
|
||||
directory = libpng-1.6.37
|
||||
|
||||
source_url = https://github.com/glennrp/libpng/archive/v1.6.37.tar.gz
|
||||
source_filename = libpng-1.6.37.tar.gz
|
||||
source_hash = ca74a0dace179a8422187671aee97dd3892b53e168627145271cad5b5ac81307
|
||||
|
||||
patch_url = https://wrapdb.mesonbuild.com/v1/projects/libpng/1.6.37/1/get_zip
|
||||
patch_filename = libpng-1.6.37-1-wrap.zip
|
||||
patch_hash = 9a863ae8a5657315a484c94c51f9f636b1fb9f49a15196cc896b72e5f21d78f0
|
|
@ -0,0 +1,10 @@
|
|||
[wrap-file]
|
||||
directory = zlib-1.2.11
|
||||
|
||||
source_url = http://zlib.net/fossils/zlib-1.2.11.tar.gz
|
||||
source_filename = zlib-1.2.11.tar.gz
|
||||
source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
|
||||
|
||||
patch_url = https://wrapdb.mesonbuild.com/v1/projects/zlib/1.2.11/4/get_zip
|
||||
patch_filename = zlib-1.2.11-4-wrap.zip
|
||||
patch_hash = f733976fbfc59e0bcde01aa9469a24eeb16faf0a4280b17e9eaa60a301d75657
|
|
@ -0,0 +1,46 @@
|
|||
tests = [
|
||||
['test-bz89617.c', {'c_args': ['-DSRCDIR="@0@"'.format(meson.current_source_dir())]}],
|
||||
['test-bz131804.c'],
|
||||
['test-bz96676.c'],
|
||||
['test-name-parse.c'],
|
||||
['test-bz106618.c'],
|
||||
['test-bz1744377.c'],
|
||||
['test-issue180.c'],
|
||||
]
|
||||
|
||||
if host_machine.system() != 'windows'
|
||||
tests += [
|
||||
# FIXME: ['test-migration.c'],
|
||||
['test-bz106632.c', {'c_args': ['-DFONTFILE="@0@"'.format(join_paths(meson.current_source_dir(), '4x6.pcf'))]}],
|
||||
['test-issue107.c'], # FIXME: fails on mingw
|
||||
# FIXME: this needs NotoSans-hinted.zip font downloaded and unpacked into test build directory! see run-test.sh
|
||||
['test-crbug1004254.c', {'dependencies': dependency('threads')}], # for pthread
|
||||
]
|
||||
|
||||
if get_option('default_library') == 'static'
|
||||
tests += [
|
||||
['test-issue110.c'],
|
||||
['test-d1f48f11.c'],
|
||||
]
|
||||
endif
|
||||
endif
|
||||
|
||||
foreach test_data : tests
|
||||
fname = test_data[0]
|
||||
opts = test_data.length() > 1 ? test_data[1] : {}
|
||||
extra_c_args = opts.get('c_args', [])
|
||||
extra_deps = opts.get('dependencies', [])
|
||||
|
||||
test_name = fname.split('.')[0].underscorify()
|
||||
exe = executable(test_name, fname,
|
||||
c_args: c_args + extra_c_args,
|
||||
include_directories: incbase,
|
||||
link_with: [libfontconfig],
|
||||
dependencies: extra_deps,
|
||||
)
|
||||
|
||||
test(test_name, exe)
|
||||
endforeach
|
||||
|
||||
# FIXME: run-test.sh stuff
|
||||
# FIXME: jsonc test-conf
|
Loading…
Reference in New Issue