Merged the changes from 2.1.6 upstream release tag

This commit is contained in:
George Sokianos 2024-11-30 14:44:52 +00:00
parent d74688028c
commit 7d4742970d
41 changed files with 1542 additions and 975 deletions

3
.github/FUNDING.yml vendored
View File

@ -1,3 +0,0 @@
# These are supported funding model platforms
github: franko

56
.github/labeler.yml vendored
View File

@ -1,38 +1,60 @@
"Category: CI": "Category: CI":
- .github/workflows/* - changed-files:
- any-glob-to-any-file:
- .github/workflows/*
"Category: Meta": "Category: Meta":
- ./* - changed-files:
- .github/* - any-glob-to-any-file:
- .github/ISSUE_TEMPLATE/* - ./*
- .github/PULL_REQUEST_TEMPLATE/* - .github/*
- .gitignore - .github/ISSUE_TEMPLATE/*
- .github/PULL_REQUEST_TEMPLATE/*
- .gitignore
"Category: Build System": "Category: Build System":
- meson.build - changed-files:
- meson_options.txt - any-glob-to-any-file:
- subprojects/* - meson.build
- meson_options.txt
- subprojects/*
"Category: Documentation": "Category: Documentation":
- docs/**/* - changed-files:
- any-glob-to-any-file:
- docs/**/*
"Category: Resources": "Category: Resources":
- resources/**/* - changed-files:
- any-glob-to-any-file:
- resources/**/*
"Category: Themes": "Category: Themes":
- data/colors/* - changed-files:
- any-glob-to-any-file:
- data/colors/*
"Category: Lua Core": "Category: Lua Core":
- data/core/**/* - changed-files:
- any-glob-to-any-file:
- data/core/**/*
"Category: Fonts": "Category: Fonts":
- data/fonts/* - changed-files:
- any-glob-to-any-file:
- data/fonts/*
"Category: Plugins": "Category: Plugins":
- data/plugins/* - changed-files:
- any-glob-to-any-file:
- data/plugins/*
"Category: C Core": "Category: C Core":
- src/**/* - changed-files:
- any-glob-to-any-file:
- src/**/*
"Category: Libraries": "Category: Libraries":
- lib/**/* - changed-files:
- any-glob-to-any-file:
- lib/**/*

View File

@ -10,7 +10,7 @@ jobs:
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Apply Type Label - name: Apply Type Label
uses: actions/labeler@v3 uses: actions/labeler@v5
with: with:
repo-token: "${{ secrets.GITHUB_TOKEN }}" repo-token: "${{ secrets.GITHUB_TOKEN }}"
sync-labels: "" # works around actions/labeler#104 sync-labels: false

View File

@ -3,11 +3,11 @@ name: CI
on: on:
push: push:
branches: branches:
- '*' - "*"
pull_request: pull_request:
branches: branches:
- '*' - "*"
workflow_dispatch: workflow_dispatch:
@ -18,11 +18,12 @@ jobs:
strategy: strategy:
matrix: matrix:
config: config:
- { name: "GCC", cc: gcc, cxx: g++ } - { name: "GCC", cc: gcc, cxx: g++ }
- { name: "clang", cc: clang, cxx: clang++ } - { name: "clang", cc: clang, cxx: clang++ }
env: env:
CC: ${{ matrix.config.cc }} CC: ${{ matrix.config.cc }}
CXX: ${{ matrix.config.cxx }} CXX: ${{ matrix.config.cxx }}
steps: steps:
- name: Set Environment Variables - name: Set Environment Variables
if: ${{ matrix.config.cc == 'gcc' }} if: ${{ matrix.config.cc == 'gcc' }}
@ -30,38 +31,51 @@ jobs:
echo "$HOME/.local/bin" >> "$GITHUB_PATH" echo "$HOME/.local/bin" >> "$GITHUB_PATH"
echo "INSTALL_REF=${GITHUB_REF##*/}" >> "$GITHUB_ENV" echo "INSTALL_REF=${GITHUB_REF##*/}" >> "$GITHUB_ENV"
echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-linux-$(uname -m)-portable" >> "$GITHUB_ENV" echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-linux-$(uname -m)-portable" >> "$GITHUB_ENV"
- uses: actions/checkout@v3
- name: Checkout code
uses: actions/checkout@v4
- name: Python Setup - name: Python Setup
uses: actions/setup-python@v4 uses: actions/setup-python@v5
with: with:
python-version: 3.9 python-version: "3.11"
- name: Update Packages - name: Update Packages
run: sudo apt-get update run: sudo apt-get update
- name: Install Dependencies - name: Install Dependencies
run: bash scripts/install-dependencies.sh --debug run: bash scripts/install-dependencies.sh --debug
- name: Build - name: Build
run: | run: |
bash --version bash --version
bash scripts/build.sh --debug --forcefallback --portable bash scripts/build.sh --debug --forcefallback --portable
- name: Package - name: Package
if: ${{ matrix.config.cc == 'gcc' }} if: ${{ matrix.config.cc == 'gcc' }}
run: bash scripts/package.sh --version ${INSTALL_REF} --debug --binary run: bash scripts/package.sh --version ${INSTALL_REF} --debug --binary
- name: Upload Artifacts - name: Upload Artifacts
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
if: ${{ matrix.config.cc == 'gcc' }} if: ${{ matrix.config.cc == 'gcc' }}
with: with:
name: Linux Artifacts name: Linux Artifacts
path: ${{ env.INSTALL_NAME }}.tar.gz path: ${{ env.INSTALL_NAME }}.tar.gz
compression-level: 0
build_macos: build_macos:
name: macOS name: macOS
runs-on: macos-11 strategy:
matrix:
config:
- { arch: x86_64, runner: macos-13 } # macos-13 uses x86_64
- { arch: arm64, runner: macos-14 } # macos-14 / latest uses M1
runs-on: ${{ matrix.config.runner }}
env: env:
CC: clang CC: clang
CXX: clang++ CXX: clang++
strategy:
matrix:
arch: ['x86_64', 'arm64']
steps: steps:
- name: System Information - name: System Information
run: | run: |
@ -69,36 +83,47 @@ jobs:
bash --version bash --version
gcc -v gcc -v
xcodebuild -version xcodebuild -version
- name: Set Environment Variables - name: Set Environment Variables
run: | run: |
echo "$HOME/.local/bin" >> "$GITHUB_PATH" echo "$HOME/.local/bin" >> "$GITHUB_PATH"
echo "INSTALL_REF=${GITHUB_REF##*/}" >> "$GITHUB_ENV" echo "INSTALL_REF=${GITHUB_REF##*/}" >> "$GITHUB_ENV"
echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-macos-${{ matrix.arch }}" >> "$GITHUB_ENV" echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-macos-${{ matrix.config.arch }}" >> "$GITHUB_ENV"
if [[ $(uname -m) != ${{ matrix.arch }} ]]; then echo "ARCH=--cross-arch ${{ matrix.arch }}" >> "$GITHUB_ENV"; fi
- uses: actions/checkout@v3 - name: Checkout code
uses: actions/checkout@v4
- name: Python Setup - name: Python Setup
uses: actions/setup-python@v4 uses: actions/setup-python@v5
with: with:
python-version: 3.9 python-version: "3.11"
# installing md5sha1sum will eliminate a warning with arm64 and libusb
- name: Install Dependencies - name: Install Dependencies
# --lhelper will eliminate a warning with arm64 and libusb run: |
run: bash scripts/install-dependencies.sh --debug --lhelper brew install bash md5sha1sum
pip install meson ninja dmgbuild
- name: Build - name: Build
run: | run: |
bash --version bash --version
bash scripts/build.sh --bundle --debug --forcefallback $ARCH bash scripts/build.sh --bundle --debug --forcefallback
- name: Create DMG Image - name: Create DMG Image
run: bash scripts/package.sh --version ${INSTALL_REF} $ARCH --debug --dmg run: bash scripts/package.sh --version ${INSTALL_REF} --debug --dmg
- name: Upload DMG Image - name: Upload DMG Image
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: macOS DMG Images name: macOS DMG Images (${{ matrix.config.arch }})
path: ${{ env.INSTALL_NAME }}.dmg path: ${{ env.INSTALL_NAME }}.dmg
compression-level: 0
build_macos_universal: build_macos_universal:
name: macOS (Universal) name: macOS (Universal)
runs-on: macos-11 runs-on: macos-14
needs: build_macos needs: build_macos
steps: steps:
- name: System Information - name: System Information
run: | run: |
@ -106,32 +131,42 @@ jobs:
bash --version bash --version
gcc -v gcc -v
xcodebuild -version xcodebuild -version
- name: Set Environment Variables - name: Set Environment Variables
run: | run: |
echo "$HOME/.local/bin" >> "$GITHUB_PATH" echo "$HOME/.local/bin" >> "$GITHUB_PATH"
echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-macos-universal" >> "$GITHUB_ENV" echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-macos-universal" >> "$GITHUB_ENV"
- name: Setup Python - name: Setup Python
uses: actions/setup-python@v4 uses: actions/setup-python@v5
with: with:
python-version: '3.9' python-version: "3.11"
- name: Install dmgbuild - name: Install dmgbuild
run: pip install dmgbuild run: pip install dmgbuild
- uses: actions/checkout@v3
- name: Checkout code
uses: actions/checkout@v4
- name: Download artifacts - name: Download artifacts
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
id: download id: download
with: with:
name: macOS DMG Images pattern: macOS DMG Images *
merge-multiple: true
path: dmgs-original path: dmgs-original
- name: Make universal bundles - name: Make universal bundles
run: | run: |
bash --version bash --version
bash scripts/make-universal-binaries.sh ${{ steps.download.outputs.download-path }} "${INSTALL_NAME}" bash scripts/make-universal-binaries.sh ${{ steps.download.outputs.download-path }} "${INSTALL_NAME}"
- name: Upload DMG Image - name: Upload DMG Image
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: macOS Universal DMG Images name: macOS DMG Images (Universal)
path: ${{ env.INSTALL_NAME }}.dmg path: ${{ env.INSTALL_NAME }}.dmg
compression-level: 0
build_windows_msys2: build_windows_msys2:
name: Windows name: Windows
@ -139,48 +174,58 @@ jobs:
strategy: strategy:
matrix: matrix:
config: config:
- {msystem: MINGW32, arch: i686} - { msystem: MINGW32, arch: i686 }
- {msystem: MINGW64, arch: x86_64} - { msystem: MINGW64, arch: x86_64 }
defaults: defaults:
run: run:
shell: msys2 {0} shell: msys2 {0}
steps: steps:
- uses: actions/checkout@v3 - name: Checkout code
- uses: msys2/setup-msys2@v2 uses: actions/checkout@v4
with:
msystem: ${{ matrix.config.msystem }} - name: Setup MSYS2
install: >- uses: msys2/setup-msys2@v2
base-devel with:
git msystem: ${{ matrix.config.msystem }}
zip install: >-
mingw-w64-${{ matrix.config.arch }}-gcc base-devel
mingw-w64-${{ matrix.config.arch }}-meson git
mingw-w64-${{ matrix.config.arch }}-ninja zip
mingw-w64-${{ matrix.config.arch }}-ca-certificates mingw-w64-${{ matrix.config.arch }}-gcc
mingw-w64-${{ matrix.config.arch }}-ntldd mingw-w64-${{ matrix.config.arch }}-meson
- name: Set Environment Variables mingw-w64-${{ matrix.config.arch }}-ninja
run: | mingw-w64-${{ matrix.config.arch }}-ca-certificates
echo "$HOME/.local/bin" >> "$GITHUB_PATH" mingw-w64-${{ matrix.config.arch }}-ntldd
echo "INSTALL_REF=${GITHUB_REF##*/}" >> "$GITHUB_ENV"
if [[ "${MSYSTEM}" == "MINGW64" ]]; then - name: Set Environment Variables
echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-windows-x86_64" >> "$GITHUB_ENV" run: |
else echo "$HOME/.local/bin" >> "$GITHUB_PATH"
echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-windows-i686" >> "$GITHUB_ENV" echo "INSTALL_REF=${GITHUB_REF##*/}" >> "$GITHUB_ENV"
fi if [[ "${MSYSTEM}" == "MINGW64" ]]; then
- name: Install Dependencies echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-windows-x86_64" >> "$GITHUB_ENV"
if: false else
run: bash scripts/install-dependencies.sh --debug echo "INSTALL_NAME=lite-xl-${GITHUB_REF##*/}-windows-i686" >> "$GITHUB_ENV"
- name: Build fi
run: |
bash --version - name: Install Dependencies
bash scripts/build.sh -U --debug --forcefallback if: false
- name: Package run: bash scripts/install-dependencies.sh --debug
run: bash scripts/package.sh --version ${INSTALL_REF} --debug --binary
- name: Upload Artifacts - name: Build
uses: actions/upload-artifact@v3 run: |
with: bash --version
name: Windows Artifacts bash scripts/build.sh -U --debug --forcefallback
path: ${{ env.INSTALL_NAME }}.zip
- name: Package
run: bash scripts/package.sh --version ${INSTALL_REF} --debug --binary
- name: Upload Artifacts
uses: actions/upload-artifact@v4
with:
name: Windows Artifacts (${{ matrix.config.msystem }})
path: ${{ env.INSTALL_NAME }}.zip
compression-level: 0
build_windows_msvc: build_windows_msvc:
name: Windows (MSVC) name: Windows (MSVC)
@ -190,38 +235,52 @@ jobs:
arch: arch:
- { target: x86, name: i686 } - { target: x86, name: i686 }
- { target: x64, name: x86_64 } - { target: x64, name: x86_64 }
steps: steps:
- uses: actions/checkout@v3 - name: Checkout code
- uses: ilammy/msvc-dev-cmd@v1 uses: actions/checkout@v4
- name: Setup MSVC
uses: ilammy/msvc-dev-cmd@v1
with: with:
arch: ${{ matrix.arch.target }} arch: ${{ matrix.arch.target }}
- uses: actions/setup-python@v4
- name: Setup Python
uses: actions/setup-python@v5
with: with:
python-version: '3.x' python-version: "3.11"
- name: Install meson and ninja - name: Install meson and ninja
run: pip install meson ninja run: pip install meson ninja
- name: Set up environment variables - name: Set up environment variables
run: | run: |
"INSTALL_NAME=lite-xl-$($env:GITHUB_REF -replace ".*/")-windows-msvc-${{ matrix.arch.name }}" >> $env:GITHUB_ENV "INSTALL_NAME=lite-xl-$($env:GITHUB_REF -replace ".*/")-windows-msvc-${{ matrix.arch.name }}" >> $env:GITHUB_ENV
"INSTALL_REF=$($env:GITHUB_REF -replace ".*/")" >> $env:GITHUB_ENV "INSTALL_REF=$($env:GITHUB_REF -replace ".*/")" >> $env:GITHUB_ENV
"LUA_SUBPROJECT_PATH=subprojects/$(awk -F ' *= *' '/directory/ { printf $2 }' subprojects/lua.wrap)" >> $env:GITHUB_ENV "LUA_SUBPROJECT_PATH=subprojects/$(awk -F ' *= *' '/directory/ { printf $2 }' subprojects/lua.wrap)" >> $env:GITHUB_ENV
- name: Download and patch subprojects - name: Download and patch subprojects
shell: bash shell: bash
run: | run: |
meson subprojects download meson subprojects download
cat resources/windows/001-lua-unicode.diff | patch -Np1 -d "$LUA_SUBPROJECT_PATH" cat resources/windows/001-lua-unicode.diff | patch -Np1 -d "$LUA_SUBPROJECT_PATH"
- name: Configure - name: Configure
run: | run: |
meson setup --wrap-mode=forcefallback build meson setup --wrap-mode=forcefallback build
- name: Build - name: Build
run: | run: |
meson install -C build --destdir="../lite-xl" meson install -C build --destdir="../lite-xl"
- name: Package - name: Package
run: | run: |
Remove-Item -Recurse -Force -Path "lite-xl/lib","lite-xl/include" Remove-Item -Recurse -Force -Path "lite-xl/lib","lite-xl/include"
Compress-Archive -Path lite-xl -DestinationPath "$env:INSTALL_NAME.zip" Compress-Archive -Path lite-xl -DestinationPath "$env:INSTALL_NAME.zip"
- name: Upload Artifacts - name: Upload Artifacts
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: Windows Artifacts (MSVC) name: Windows Artifacts (MSVC ${{ matrix.arch.target }})
path: ${{ env.INSTALL_NAME }}.zip path: ${{ env.INSTALL_NAME }}.zip
compression-level: 0

View File

@ -9,7 +9,7 @@ on:
inputs: inputs:
version: version:
description: Release Version description: Release Version
default: v2.1.4 default: v2.1.6
required: true required: true
jobs: jobs:
@ -21,54 +21,63 @@ jobs:
outputs: outputs:
upload_url: ${{ steps.create_release.outputs.upload_url }} upload_url: ${{ steps.create_release.outputs.upload_url }}
version: ${{ steps.tag.outputs.version }} version: ${{ steps.tag.outputs.version }}
steps: steps:
- name: Checkout code - name: Checkout code
uses: actions/checkout@v3 uses: actions/checkout@v4
- name: Fetch Version - name: Fetch Version
id: tag id: tag
run: | run: |
if [[ "${{ github.event.inputs.version }}" != "" ]]; then if [[ "${{ github.event.inputs.version }}" != "" ]]; then
echo ::set-output name=version::${{ github.event.inputs.version }} echo "version=${{ github.event.inputs.version }}" >> $GITHUB_OUTPUT
else else
echo ::set-output name=version::${GITHUB_REF/refs\/tags\//} echo "version=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
fi fi
- name: Update Tag - name: Update Tag
uses: richardsimko/update-tag@v1 uses: richardsimko/update-tag@v1
with: with:
tag_name: ${{ steps.tag.outputs.version }} tag_name: ${{ steps.tag.outputs.version }}
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Generate Release Notes
env:
GH_TOKEN: ${{ github.token }}
run: bash scripts/generate-release-notes.sh --debug --version ${{ steps.tag.outputs.version }}
- name: Create Release - name: Create Release
id: create_release id: create_release
uses: softprops/action-gh-release@v1 uses: ncipollo/release-action@v1
with: with:
tag_name: ${{ steps.tag.outputs.version }}
name: Lite XL ${{ steps.tag.outputs.version }} name: Lite XL ${{ steps.tag.outputs.version }}
tag: ${{ steps.tag.outputs.version }}
draft: true draft: true
body_path: changelog.md bodyFile: release-notes.md
generate_release_notes: true allowUpdates: true
build_linux: build_linux:
name: Linux name: Linux
needs: release needs: release
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: ghcr.io/lite-xl/lite-xl-build-box:latest
env:
CC: gcc
CXX: g++
steps: steps:
- name: Set Environment Variables - name: Set Environment Variables
run: | run: |
echo "$HOME/.local/bin" >> "$GITHUB_PATH" echo "$HOME/.local/bin" >> "$GITHUB_PATH"
echo "INSTALL_REF=${{ needs.release.outputs.version }}" >> "$GITHUB_ENV" echo "INSTALL_REF=${{ needs.release.outputs.version }}" >> "$GITHUB_ENV"
- uses: actions/checkout@v3 echo "CCACHE_DIR=$PWD/.ccache" >> $GITHUB_ENV
- name: Checkout code
uses: actions/checkout@v4
# disabled because this will break our own Python install # disabled because this will break our own Python install
- name: Python Setup - name: Python Setup
if: false if: false
uses: actions/setup-python@v4 uses: actions/setup-python@v5
with: with:
python-version: 3.9 python-version: "3.11"
# disabled because the container has up-to-date packages # disabled because the container has up-to-date packages
- name: Update Packages - name: Update Packages
@ -83,23 +92,34 @@ jobs:
sudo apt-get install -y ccache sudo apt-get install -y ccache
- name: Build Portable - name: Build Portable
run: | uses: docker://ghcr.io/lite-xl/lite-xl-build-box-manylinux:v3
bash --version
bash scripts/build.sh --debug --forcefallback --portable --release
- name: Package Portables
run: |
bash scripts/package.sh --version ${INSTALL_REF} --debug --binary --release
bash scripts/package.sh --version ${INSTALL_REF} --debug --addons --binary --release
- name: Build AppImages
run: |
bash scripts/appimage.sh --debug --static --version ${INSTALL_REF} --release
bash scripts/appimage.sh --debug --nobuild --addons --version ${INSTALL_REF}
- name: Upload Files
uses: softprops/action-gh-release@v1
with: with:
tag_name: ${{ needs.release.outputs.version }} entrypoint: /entrypoint.sh
draft: true args: |
files: | bash --version
bash scripts/build.sh --debug --forcefallback --portable --release
- name: Package Portables
uses: docker://ghcr.io/lite-xl/lite-xl-build-box-manylinux:v3
with:
entrypoint: /entrypoint.sh
args: |
bash scripts/package.sh --version ${INSTALL_REF} --debug --binary --release
bash scripts/package.sh --version ${INSTALL_REF} --debug --addons --binary --release
- name: Build AppImages
uses: docker://ghcr.io/lite-xl/lite-xl-build-box-manylinux:v3
with:
entrypoint: /entrypoint.sh
args: |
bash scripts/appimage.sh --debug --static --version ${INSTALL_REF} --release
bash scripts/appimage.sh --debug --nobuild --addons --version ${INSTALL_REF}
- name: Upload Artifacts
uses: actions/upload-artifact@v4
with:
name: Linux Artifacts
path: |
lite-xl-${{ env.INSTALL_REF }}-linux-x86_64-portable.tar.gz lite-xl-${{ env.INSTALL_REF }}-linux-x86_64-portable.tar.gz
lite-xl-${{ env.INSTALL_REF }}-addons-linux-x86_64-portable.tar.gz lite-xl-${{ env.INSTALL_REF }}-addons-linux-x86_64-portable.tar.gz
LiteXL-${{ env.INSTALL_REF }}-x86_64.AppImage LiteXL-${{ env.INSTALL_REF }}-x86_64.AppImage
@ -108,13 +128,17 @@ jobs:
build_macos: build_macos:
name: macOS name: macOS
needs: release needs: release
runs-on: macos-11
strategy: strategy:
matrix: matrix:
arch: [x86_64, arm64] config:
- { arch: x86_64, runner: macos-13 } # macos-13 uses x86_64
- { arch: arm64, runner: macos-14 } # macos-14 / latest uses M1
runs-on: ${{ matrix.config.runner }}
env: env:
CC: clang CC: clang
CXX: clang++ CXX: clang++
steps: steps:
- name: System Information - name: System Information
run: | run: |
@ -122,48 +146,50 @@ jobs:
bash --version bash --version
gcc -v gcc -v
xcodebuild -version xcodebuild -version
- name: Set Environment Variables - name: Set Environment Variables
run: | run: |
echo "$HOME/.local/bin" >> "$GITHUB_PATH" echo "$HOME/.local/bin" >> "$GITHUB_PATH"
echo "INSTALL_REF=${{ needs.release.outputs.version }}" >> "$GITHUB_ENV" echo "INSTALL_REF=${{ needs.release.outputs.version }}" >> "$GITHUB_ENV"
echo "INSTALL_NAME=lite-xl-${{ needs.release.outputs.version }}-macos-${{ matrix.arch }}" >> "$GITHUB_ENV" echo "INSTALL_NAME=lite-xl-${{ needs.release.outputs.version }}-macos-${{ matrix.config.arch }}" >> "$GITHUB_ENV"
echo "INSTALL_NAME_ADDONS=lite-xl-${{ needs.release.outputs.version }}-addons-macos-${{ matrix.arch }}" >> "$GITHUB_ENV" echo "INSTALL_NAME_ADDONS=lite-xl-${{ needs.release.outputs.version }}-addons-macos-${{ matrix.config.arch }}" >> "$GITHUB_ENV"
if [[ $(uname -m) != ${{ matrix.arch }} ]]; then echo "ARCH=--cross-arch ${{ matrix.arch }}" >> "$GITHUB_ENV"; fi
- uses: actions/checkout@v3 - name: Checkout code
uses: actions/checkout@v4
- name: Python Setup - name: Python Setup
uses: actions/setup-python@v4 uses: actions/setup-python@v5
with: with:
python-version: 3.9 python-version: "3.11"
- name: Install Dependencies - name: Install Dependencies
run: bash scripts/install-dependencies.sh --debug run: |
brew install bash md5sha1sum
pip install meson ninja dmgbuild
- name: Build - name: Build
run: | run: |
bash --version bash --version
bash scripts/build.sh --bundle --debug --forcefallback --release $ARCH bash scripts/build.sh --bundle --debug --forcefallback --release
- name: Create DMG Image - name: Create DMG Image
run: | run: |
bash scripts/package.sh --version ${INSTALL_REF} $ARCH --debug --dmg --release bash scripts/package.sh --version ${INSTALL_REF} --debug --dmg --release
bash scripts/package.sh --version ${INSTALL_REF} $ARCH --debug --addons --dmg --release bash scripts/package.sh --version ${INSTALL_REF} --debug --addons --dmg --release
- name: Upload Artifacts - name: Upload Artifacts
uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v4
with: with:
name: macOS DMG Images name: macOS DMG Images (${{ matrix.config.arch }})
path: | path: |
${{ env.INSTALL_NAME }}.dmg ${{ env.INSTALL_NAME }}.dmg
${{ env.INSTALL_NAME_ADDONS }}.dmg ${{ env.INSTALL_NAME_ADDONS }}.dmg
- name: Upload Files
uses: softprops/action-gh-release@v1
with:
tag_name: ${{ needs.release.outputs.version }}
draft: true
files: |
${{ env.INSTALL_NAME }}.dmg
${{ env.INSTALL_NAME_ADDONS }}.dmg
build_macos_universal: build_macos_universal:
name: macOS (Universal) name: macOS (Universal)
needs: [release, build_macos] needs: [release, build_macos]
runs-on: macos-11 runs-on: macos-14
steps: steps:
- name: System Information - name: System Information
run: | run: |
@ -171,40 +197,48 @@ jobs:
bash --version bash --version
gcc -v gcc -v
xcodebuild -version xcodebuild -version
- name: Set Environment Variables - name: Set Environment Variables
run: | run: |
echo "$HOME/.local/bin" >> "$GITHUB_PATH" echo "$HOME/.local/bin" >> "$GITHUB_PATH"
echo "INSTALL_BASE=lite-xl-${{ needs.release.outputs.version }}-macos" >> "$GITHUB_ENV" echo "INSTALL_BASE=lite-xl-${{ needs.release.outputs.version }}-macos" >> "$GITHUB_ENV"
echo "INSTALL_BASE_ADDONS=lite-xl-${{ needs.release.outputs.version }}-addons-macos" >> "$GITHUB_ENV" echo "INSTALL_BASE_ADDONS=lite-xl-${{ needs.release.outputs.version }}-addons-macos" >> "$GITHUB_ENV"
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: Download Artifacts - name: Download Artifacts
uses: actions/download-artifact@v3 uses: actions/download-artifact@v4
id: download id: download
with: with:
name: macOS DMG Images pattern: macOS DMG Images *
merge-multiple: true
path: dmgs-original path: dmgs-original
- name: Python Setup - name: Python Setup
uses: actions/setup-python@v2 uses: actions/setup-python@v5
with: with:
python-version: 3.9 python-version: "3.11"
- name: Install dmgbuild - name: Install dmgbuild
run: pip install dmgbuild run: pip install dmgbuild
- name: Prepare DMG Images - name: Prepare DMG Images
run: | run: |
mkdir -p dmgs-addons dmgs-normal mkdir -p dmgs-addons dmgs-normal
mv -v "${{ steps.download.outputs.download-path }}/$INSTALL_BASE-"{x86_64,arm64}.dmg dmgs-normal mv -v "${{ steps.download.outputs.download-path }}/$INSTALL_BASE-"{x86_64,arm64}.dmg dmgs-normal
mv -v "${{ steps.download.outputs.download-path }}/$INSTALL_BASE_ADDONS-"{x86_64,arm64}.dmg dmgs-addons mv -v "${{ steps.download.outputs.download-path }}/$INSTALL_BASE_ADDONS-"{x86_64,arm64}.dmg dmgs-addons
- name: Create Universal DMGs - name: Create Universal DMGs
run: | run: |
bash --version bash --version
bash scripts/make-universal-binaries.sh dmgs-normal "$INSTALL_BASE-universal" bash scripts/make-universal-binaries.sh dmgs-normal "$INSTALL_BASE-universal"
bash scripts/make-universal-binaries.sh dmgs-addons "$INSTALL_BASE_ADDONS-universal" bash scripts/make-universal-binaries.sh dmgs-addons "$INSTALL_BASE_ADDONS-universal"
- name: Upload Files
uses: softprops/action-gh-release@v1 - name: Upload Artifacts
uses: actions/upload-artifact@v4
with: with:
tag_name: ${{ needs.release.outputs.version }} name: macOS DMG Images (Universal)
draft: true path: |
files: |
${{ env.INSTALL_BASE }}-universal.dmg ${{ env.INSTALL_BASE }}-universal.dmg
${{ env.INSTALL_BASE_ADDONS }}-universal.dmg ${{ env.INSTALL_BASE_ADDONS }}-universal.dmg
@ -214,54 +248,97 @@ jobs:
runs-on: windows-2019 runs-on: windows-2019
strategy: strategy:
matrix: matrix:
msystem: [MINGW32, MINGW64] config:
- { msystem: MINGW32, arch: i686 }
- { msystem: MINGW64, arch: x86_64 }
defaults: defaults:
run: run:
shell: msys2 {0} shell: msys2 {0}
steps: steps:
- uses: actions/checkout@v3 - name: Checkout code
- uses: msys2/setup-msys2@v2 uses: actions/checkout@v4
- name: Setup MSYS2
uses: msys2/setup-msys2@v2
with: with:
msystem: ${{ matrix.msystem }} msystem: ${{ matrix.config.msystem }}
update: true update: true
install: >- install: >-
base-devel base-devel
git git
zip zip
unzip
mingw-w64-${{ matrix.config.arch }}-gcc
mingw-w64-${{ matrix.config.arch }}-meson
mingw-w64-${{ matrix.config.arch }}-ninja
mingw-w64-${{ matrix.config.arch }}-ca-certificates
mingw-w64-${{ matrix.config.arch }}-ntldd
- name: Set Environment Variables - name: Set Environment Variables
run: | run: |
echo "$HOME/.local/bin" >> "$GITHUB_PATH" echo "$HOME/.local/bin" >> "$GITHUB_PATH"
echo "INSTALL_REF=${{ needs.release.outputs.version }}" >> "$GITHUB_ENV" echo "INSTALL_REF=${{ needs.release.outputs.version }}" >> "$GITHUB_ENV"
if [[ "${MSYSTEM}" == "MINGW64" ]]; then if [[ "${MSYSTEM}" == "MINGW64" ]]; then
echo "BUILD_ARCH=x86_64" >> "$GITHUB_ENV"
echo "INSTALL_NAME=lite-xl-${{ needs.release.outputs.version }}-windows-x86_64" >> "$GITHUB_ENV" echo "INSTALL_NAME=lite-xl-${{ needs.release.outputs.version }}-windows-x86_64" >> "$GITHUB_ENV"
echo "INSTALL_NAME_ADDONS=lite-xl-${{ needs.release.outputs.version }}-addons-windows-x86_64" >> "$GITHUB_ENV" echo "INSTALL_NAME_ADDONS=lite-xl-${{ needs.release.outputs.version }}-addons-windows-x86_64" >> "$GITHUB_ENV"
else else
echo "BUILD_ARCH=i686" >> "$GITHUB_ENV"
echo "INSTALL_NAME=lite-xl-${{ needs.release.outputs.version }}-windows-i686" >> "$GITHUB_ENV" echo "INSTALL_NAME=lite-xl-${{ needs.release.outputs.version }}-windows-i686" >> "$GITHUB_ENV"
echo "INSTALL_NAME_ADDONS=lite-xl-${{ needs.release.outputs.version }}-addons-windows-i686" >> "$GITHUB_ENV" echo "INSTALL_NAME_ADDONS=lite-xl-${{ needs.release.outputs.version }}-addons-windows-i686" >> "$GITHUB_ENV"
fi fi
- name: Install Dependencies - name: Install Dependencies
if: false
run: bash scripts/install-dependencies.sh --debug run: bash scripts/install-dependencies.sh --debug
- name: Build - name: Build
run: | run: |
bash --version bash --version
bash scripts/build.sh -U --debug --forcefallback --release bash scripts/build.sh -U --debug --forcefallback --release
- name: Package - name: Package
run: bash scripts/package.sh --version ${INSTALL_REF} --debug --binary --release run: bash scripts/package.sh --version ${INSTALL_REF} --debug --binary --release
- name: Build Installer - name: Build Installer
run: bash scripts/innosetup/innosetup.sh --debug --version ${INSTALL_REF} run: bash scripts/innosetup/innosetup.sh --debug --version ${INSTALL_REF}
- name: Package With Addons - name: Package With Addons
run: bash scripts/package.sh --version ${INSTALL_REF} --debug --addons --binary --release run: bash scripts/package.sh --version ${INSTALL_REF} --debug --addons --binary --release
- name: Build Installer With Addons - name: Build Installer With Addons
run: bash scripts/innosetup/innosetup.sh --debug --version ${INSTALL_REF} --addons run: bash scripts/innosetup/innosetup.sh --debug --version ${INSTALL_REF} --addons
- name: Upload Files
uses: softprops/action-gh-release@v1 - name: Upload Artifacts
uses: actions/upload-artifact@v4
with: with:
tag_name: ${{ needs.release.outputs.version }} name: Windows Builds (${{ matrix.config.msystem }})
draft: true path: |
files: |
${{ env.INSTALL_NAME }}.zip ${{ env.INSTALL_NAME }}.zip
${{ env.INSTALL_NAME_ADDONS }}.zip ${{ env.INSTALL_NAME_ADDONS }}.zip
LiteXL-${{ env.INSTALL_REF }}-${{ env.BUILD_ARCH }}-setup.exe LiteXL-${{ env.INSTALL_REF }}-${{ matrix.config.arch }}-setup.exe
LiteXL-${{ env.INSTALL_REF }}-addons-${{ env.BUILD_ARCH }}-setup.exe LiteXL-${{ env.INSTALL_REF }}-addons-${{ matrix.config.arch }}-setup.exe
upload_artifacts:
name: Upload Release Artifacts
runs-on: ubuntu-latest
needs:
[release, build_linux, build_macos, build_macos_universal, build_windows_msys2]
permissions:
contents: write
steps:
- name: Download Artifacts
uses: actions/download-artifact@v4
with:
merge-multiple: true
- name: Update Release
uses: ncipollo/release-action@v1
with:
tag: ${{ needs.release.outputs.version }}
omitBodyDuringUpdate: true
omitDraftDuringUpdate: true
omitNameDuringUpdate: true
omitPrereleaseDuringUpdate: true
allowUpdates: true
artifacts: "*.exe,*.zip,*.tar.gz,*.dmg,*.AppImage"

View File

@ -1,8 +0,0 @@
CC="${CC:-gcc}"
CXX="${CXX:-g++}"
CFLAGS=
CXXFLAGS=
LDFLAGS=
BUILD_TYPE=Release
packages=(pcre2 freetype2 sdl2 lua)

View File

@ -1,5 +1,152 @@
# Changes Log # Changes Log
## [2.1.6] - 2024-11-29
This release introduces a new icon for macOS, improves the performance of the renderer,
adds syntax highlighting support for CUDA as well as QOL fixes and improvements.
### Features
* Add CUDA syntax highlighting support
([#1848](https://github.com/lite-xl/lite-xl/pull/1848))
* Add macOS-specific application icon
([#1844](https://github.com/lite-xl/lite-xl/pull/1844))
* Add keyboard shortcut to tooltips in ToolbarView
([#1880](https://github.com/lite-xl/lite-xl/pull/1880))
* Improve projectsearch (status on the top, horizontal scrolling, elipsis)
([#1876](https://github.com/lite-xl/lite-xl/pull/1876))
### Fixes
* Correctly free SDL_Texture and SDL_Renderer
([#1849](https://github.com/lite-xl/lite-xl/pull/1850))
* Fixed minor typo from merge of #1854 for Windows builds
* Fix multi-type usage in delimited patterns
([#1740](https://github.com/lite-xl/lite-xl/pull/1740))
* Fix appimage cd error and use static appimage runtime
([#1924](https://github.com/lite-xl/lite-xl/pull/1924))
### Other Changes
* Rewrite glyph cache
([#1845](https://github.com/lite-xl/lite-xl/pull/1845))
* Use lite-xl-build-box-manylinux
([#1877](https://github.com/lite-xl/lite-xl/pull/1877))
* Remove unused calls to system.absolute_path()
([#1895](https://github.com/lite-xl/lite-xl/pull/1895))
* Remove lhelper script, build configuration and dependency support
([#1906](https://github.com/lite-xl/lite-xl/pull/1906))
* Refactor how arguments are handled in process.start()
([#1854](https://github.com/lite-xl/lite-xl/pull/1854))
* Add a proper name to EmptyView
([#1569](https://github.com/lite-xl/lite-xl/pull/1569))
* Format renderer font scale code to be actually readable
([#1921](https://github.com/lite-xl/lite-xl/pull/1921))
* Update PCRE2 wrap
([#1927](https://github.com/lite-xl/lite-xl/pull/1927))
* Use meson datadir as lite_datadir base
([#1939](https://github.com/lite-xl/lite-xl/pull/1939))
* Convert unix style paths literals into meson path segments
([#1938](https://github.com/lite-xl/lite-xl/pull/1938))
## [2.1.5] - 2024-06-29
This release addresses several bugs from upstream dependencies and
improves the stability and usability of the program.
### Features
* New macOS installer background
([#1816](https://github.com/lite-xl/lite-xl/pull/1816))
* Improve number highlighting for `language_c`
([#1752](https://github.com/lite-xl/lite-xl/pull/1752))
* Backport number highlighting improvements to `language_cpp`
([#1818](https://github.com/lite-xl/lite-xl/pull/1818))
* Support binary integer literals for `language_cpp`
([#1819](https://github.com/lite-xl/lite-xl/pull/1819))
* Improve syntax highlighting for `language_python`
([#1723](https://github.com/lite-xl/lite-xl/pull/1723))
* Support for drag-and-drop into Dock and "Open with" menu in macOS
([#1822](https://github.com/lite-xl/lite-xl/pull/1822))
* Support `static constexpr` syntax in `language_cpp`
([#1806](https://github.com/lite-xl/lite-xl/pull/1806))
### Fixes
* Fix removing threads when iterating over `core.threads`
([#1794](https://github.com/lite-xl/lite-xl/pull/1794))
* Fix dirmonitor backend selection
([#1790](https://github.com/lite-xl/lite-xl/pull/1790))
* Fix clipboard removing newlines on Windows
([#1788](https://github.com/lite-xl/lite-xl/pull/1788))
* Change co_wait to co_await in `language_cpp`
([#1800](https://github.com/lite-xl/lite-xl/pull/1800))
* Fix font scale on monitor change when `RENDERER` backend is used
([#1650](https://github.com/lite-xl/lite-xl/pull/1650))
* Avoid calling the change callback multiple times in the same notification
([#1824](https://github.com/lite-xl/lite-xl/pull/1824))
* Fix autoreload reloading file too soon and misses some updates
([#1823](https://github.com/lite-xl/lite-xl/pull/1823))
* Fix drag-and-drop multiple folders into Dock icon in macOS
([#1828](https://github.com/lite-xl/lite-xl/pull/1828))
* Fix `Doc:merge_cursors()` accepting selection table index instead of selection index
([#1833](https://github.com/lite-xl/lite-xl/pull/1833))
* Fix `Doc:merge_cursors()` table index calculation
([#1834](https://github.com/lite-xl/lite-xl/pull/1834))
### Other Changes
* Add functionality to generate release notes
([#1774](https://github.com/lite-xl/lite-xl/pull/1774))
* Fix typo in release note template
([#1801](https://github.com/lite-xl/lite-xl/pull/1801))
* Update action dependencies
([#1724](https://github.com/lite-xl/lite-xl/pull/1724))
* Update labeler action config
([#1805](https://github.com/lite-xl/lite-xl/pull/1805))
* Update macOS runner images
([#1804](https://github.com/lite-xl/lite-xl/pull/1804))
* Update macOS copyright notice
([#1815](https://github.com/lite-xl/lite-xl/pull/1815))
* Update SDL2 and PCRE2
([#1812](https://github.com/lite-xl/lite-xl/pull/1812))
## [2.1.4] - 2024-04-16 ## [2.1.4] - 2024-04-16
This release addresses severe bugs not found in previous releases, This release addresses severe bugs not found in previous releases,
@ -1504,6 +1651,8 @@ A new global variable `USERDIR` is exposed to point to the user's directory.
- subpixel font rendering with gamma correction - subpixel font rendering with gamma correction
[2.1.6]: https://github.com/lite-xl/lite-xl/releases/tag/v2.1.6
[2.1.5]: https://github.com/lite-xl/lite-xl/releases/tag/v2.1.5
[2.1.4]: https://github.com/lite-xl/lite-xl/releases/tag/v2.1.4 [2.1.4]: https://github.com/lite-xl/lite-xl/releases/tag/v2.1.4
[2.1.3]: https://github.com/lite-xl/lite-xl/releases/tag/v2.1.3 [2.1.3]: https://github.com/lite-xl/lite-xl/releases/tag/v2.1.3
[2.1.2]: https://github.com/lite-xl/lite-xl/releases/tag/v2.1.2 [2.1.2]: https://github.com/lite-xl/lite-xl/releases/tag/v2.1.2

View File

@ -249,7 +249,8 @@ function Doc:set_selection(line1, col1, line2, col2, swap)
end end
function Doc:merge_cursors(idx) function Doc:merge_cursors(idx)
for i = (idx or (#self.selections - 3)), (idx or 5), -4 do local table_index = idx and (idx - 1) * 4 + 1
for i = (table_index or (#self.selections - 3)), (table_index or 5), -4 do
for j = 1, i - 4, 4 do for j = 1, i - 4, 4 do
if self.selections[i] == self.selections[j] and if self.selections[i] == self.selections[j] and
self.selections[i+1] == self.selections[j+1] then self.selections[i+1] == self.selections[j+1] then

View File

@ -6,6 +6,14 @@ local View = require "core.view"
---@field super core.view ---@field super core.view
local EmptyView = View:extend() local EmptyView = View:extend()
function EmptyView:get_name()
return "Get Started"
end
function EmptyView:get_filename()
return ""
end
local function draw_text(x, y, color) local function draw_text(x, y, color)
local lines = { local lines = {
{ fmt = "%s to run a command", cmd = "core:find-command" }, { fmt = "%s to run a command", cmd = "core:find-command" },

View File

@ -1092,8 +1092,14 @@ function core.show_title_bar(show)
end end
local thread_counter = 0
function core.add_thread(f, weak_ref, ...) function core.add_thread(f, weak_ref, ...)
local key = weak_ref or #core.threads + 1 local key = weak_ref
if not key then
thread_counter = thread_counter + 1
key = thread_counter
end
assert(core.threads[key] == nil, "Duplicate thread reference")
local args = {...} local args = {...}
local fn = function() return core.try(f, table.unpack(args)) end local fn = function() return core.try(f, table.unpack(args)) end
core.threads[key] = { cr = coroutine.create(fn), wake = 0 } core.threads[key] = { cr = coroutine.create(fn), wake = 0 }
@ -1266,20 +1272,7 @@ function core.on_event(type, ...)
elseif type == "minimized" or type == "maximized" or type == "restored" then elseif type == "minimized" or type == "maximized" or type == "restored" then
core.window_mode = type == "restored" and "normal" or type core.window_mode = type == "restored" and "normal" or type
elseif type == "filedropped" then elseif type == "filedropped" then
if not core.root_view:on_file_dropped(...) then core.root_view:on_file_dropped(...)
local filename, mx, my = ...
local info = system.get_file_info(filename)
if info and info.type == "dir" then
system.exec(string.format("%q %q", EXEFILE, filename))
else
local ok, doc = core.try(core.open_doc, filename)
if ok then
local node = core.root_view.root_node:get_child_overlapping_point(mx, my)
node:set_active_view(node.active_view)
core.root_view:open_doc(doc)
end
end
end
elseif type == "focuslost" then elseif type == "focuslost" then
core.root_view:on_focus_lost(...) core.root_view:on_focus_lost(...)
elseif type == "quit" then elseif type == "quit" then
@ -1356,16 +1349,20 @@ local run_threads = coroutine.wrap(function()
local max_time = 1 / config.fps - 0.004 local max_time = 1 / config.fps - 0.004
local minimal_time_to_wake = math.huge local minimal_time_to_wake = math.huge
local threads = {}
-- We modify core.threads while iterating, both by removing dead threads,
-- and by potentially adding more threads while we yielded early,
-- so we need to extract the threads list and iterate over that instead.
for k, thread in pairs(core.threads) do for k, thread in pairs(core.threads) do
-- run thread threads[k] = thread
if thread.wake < system.get_time() then end
for k, thread in pairs(threads) do
-- Run thread if it wasn't deleted externally and it's time to resume it
if core.threads[k] and thread.wake < system.get_time() then
local _, wait = assert(coroutine.resume(thread.cr)) local _, wait = assert(coroutine.resume(thread.cr))
if coroutine.status(thread.cr) == "dead" then if coroutine.status(thread.cr) == "dead" then
if type(k) == "number" then core.threads[k] = nil
table.remove(core.threads, k)
else
core.threads[k] = nil
end
elseif wait then elseif wait then
thread.wake = system.get_time() + wait thread.wake = system.get_time() + wait
minimal_time_to_wake = math.min(minimal_time_to_wake, wait) minimal_time_to_wake = math.min(minimal_time_to_wake, wait)

50
data/core/process.lua Normal file
View File

@ -0,0 +1,50 @@
local function env_key(str)
if PLATFORM == "Windows" then return str:upper() else return str end
end
---Sorts the environment variable by its key, converted to uppercase.
---This is only needed on Windows.
local function compare_env(a, b)
return env_key(a:match("([^=]*)=")) < env_key(b:match("([^=]*)="))
end
local old_start = process.start
function process.start(command, options)
assert(type(command) == "table" or type(command) == "string", "invalid argument #1 to process.start(), expected string or table, got "..type(command))
assert(type(options) == "table" or type(options) == "nil", "invalid argument #2 to process.start(), expected table or nil, got "..type(options))
if PLATFORM == "Windows" then
if type(command) == "table" then
-- escape the arguments into a command line string
-- https://github.com/python/cpython/blob/48f9d3e3faec5faaa4f7c9849fecd27eae4da213/Lib/subprocess.py#L531
local arglist = {}
for _, v in ipairs(command) do
local backslash, arg = 0, {}
for c in v:gmatch(".") do
if c == "\\" then backslash = backslash + 1
elseif c == '"' then arg[#arg+1] = string.rep("\\", backslash * 2 + 1)..'"'; backslash = 0
else arg[#arg+1] = string.rep("\\", backslash) .. c; backslash = 0 end
end
arg[#arg+1] = string.rep("\\", backslash) -- add remaining backslashes
if #v == 0 or v:find("[\t\v\r\n ]") then arglist[#arglist+1] = '"'..table.concat(arg, "")..'"'
else arglist[#arglist+1] = table.concat(arg, "") end
end
command = table.concat(arglist, " ")
end
else
command = type(command) == "table" and command or { command }
end
if type(options) == "table" and options.env then
local user_env = options.env --[[@as table]]
options.env = function(system_env)
local final_env, envlist = {}, {}
for k, v in pairs(system_env) do final_env[env_key(k)] = k.."="..v end
for k, v in pairs(user_env) do final_env[env_key(k)] = k.."="..v end
for _, v in pairs(final_env) do envlist[#envlist+1] = v end
if PLATFORM == "Windows" then table.sort(envlist, compare_env) end
return table.concat(envlist, "\0").."\0\0"
end
end
return old_start(command, options)
end
return process

View File

@ -24,6 +24,7 @@ function RootView:new()
base_color = style.drag_overlay_tab, base_color = style.drag_overlay_tab,
color = { table.unpack(style.drag_overlay_tab) } } color = { table.unpack(style.drag_overlay_tab) } }
self.drag_overlay_tab.to = { x = 0, y = 0, w = 0, h = 0 } self.drag_overlay_tab.to = { x = 0, y = 0, w = 0, h = 0 }
self.first_dnd_processed = false
end end
@ -319,7 +320,29 @@ end
---@return boolean ---@return boolean
function RootView:on_file_dropped(filename, x, y) function RootView:on_file_dropped(filename, x, y)
local node = self.root_node:get_child_overlapping_point(x, y) local node = self.root_node:get_child_overlapping_point(x, y)
return node and node.active_view:on_file_dropped(filename, x, y) local result = node and node.active_view:on_file_dropped(filename, x, y)
if result then return result end
local info = system.get_file_info(filename)
if info and info.type == "dir" then
if self.first_dnd_processed then
-- first update done, open in new window
system.exec(string.format("%q %q", EXEFILE, filename))
else
-- DND event before first update, this is sent by macOS when folder is dropped into the dock
core.confirm_close_docs(core.docs, function(dirpath)
core.open_folder_project(dirpath)
end, system.absolute_path(filename))
self.first_dnd_processed = true
end
else
local ok, doc = core.try(core.open_doc, filename)
if ok then
local node = core.root_view.root_node:get_child_overlapping_point(x, y)
node:set_active_view(node.active_view)
core.root_view:open_doc(doc)
end
end
return true
end end
@ -363,6 +386,9 @@ function RootView:update()
self:update_drag_overlay() self:update_drag_overlay()
self:interpolate_drag_overlay(self.drag_overlay) self:interpolate_drag_overlay(self.drag_overlay)
self:interpolate_drag_overlay(self.drag_overlay_tab) self:interpolate_drag_overlay(self.drag_overlay_tab)
-- set this to true because at this point there are no dnd requests
-- that are caused by the initial dnd into dock user action
self.first_dnd_processed = true
end end

View File

@ -1,5 +1,5 @@
-- this file is used by lite-xl to setup the Lua environment when starting -- this file is used by lite-xl to setup the Lua environment when starting
VERSION = "2.1.4r1" VERSION = "2.1.6r1"
MOD_VERSION = "3" MOD_VERSION = "3"
SCALE = tonumber(os.getenv("LITE_SCALE") or os.getenv("GDK_SCALE") or os.getenv("QT_SCALE_FACTOR")) or 1 SCALE = tonumber(os.getenv("LITE_SCALE") or os.getenv("GDK_SCALE") or os.getenv("QT_SCALE_FACTOR")) or 1
@ -9,7 +9,7 @@ EXEDIR = EXEFILE:match("^(.+)[/\\][^/\\]+$")
if MACOS_RESOURCES then if MACOS_RESOURCES then
DATADIR = MACOS_RESOURCES DATADIR = MACOS_RESOURCES
else else
local prefix = os.getenv('LITE_PREFIX') or EXEDIR:match("^(.+)[/\\]bin$") local prefix = EXEDIR:match("^(.+)[/\\]bin$")
DATADIR = prefix and (prefix .. PATHSEP .. 'share' .. PATHSEP .. 'lite-xl') or (EXEDIR .. PATHSEP .. 'data') DATADIR = prefix and (prefix .. PATHSEP .. 'share' .. PATHSEP .. 'lite-xl') or (EXEDIR .. PATHSEP .. 'data')
end end
USERDIR = (system.get_file_info(EXEDIR .. PATHSEP .. 'user') and (EXEDIR .. PATHSEP .. 'user')) USERDIR = (system.get_file_info(EXEDIR .. PATHSEP .. 'user') and (EXEDIR .. PATHSEP .. 'user'))
@ -46,6 +46,7 @@ table.unpack = table.unpack or unpack
bit32 = bit32 or require "core.bit" bit32 = bit32 or require "core.bit"
require "core.utf8string" require "core.utf8string"
require "core.process"
-- Because AppImages change the working directory before running the executable, -- Because AppImages change the working directory before running the executable,
-- we need to change it back to the original one. -- we need to change it back to the original one.

View File

@ -6,7 +6,6 @@ local tokenizer = {}
local bad_patterns = {} local bad_patterns = {}
local function push_token(t, type, text) local function push_token(t, type, text)
if not text or #text == 0 then return end
type = type or "normal" type = type or "normal"
local prev_type = t[#t-1] local prev_type = t[#t-1]
local prev_text = t[#t] local prev_text = t[#t]
@ -284,7 +283,8 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume)
-- continue trying to match the end pattern of a pair if we have a state set -- continue trying to match the end pattern of a pair if we have a state set
if current_pattern_idx > 0 then if current_pattern_idx > 0 then
local p = current_syntax.patterns[current_pattern_idx] local p = current_syntax.patterns[current_pattern_idx]
local s, e = find_text(text, p, i, false, true) local find_results = { find_text(text, p, i, false, true) }
local s, e = find_results[1], find_results[2]
local cont = true local cont = true
-- If we're in subsyntax mode, always check to see if we end our syntax -- If we're in subsyntax mode, always check to see if we end our syntax
@ -306,7 +306,12 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume)
-- continue on as normal. -- continue on as normal.
if cont then if cont then
if s then if s then
push_token(res, p.type, text:usub(i, e)) -- Push remaining token before the end delimiter
if s > i then
push_token(res, p.type, text:usub(i, s - 1))
end
-- Push the end delimiter
push_tokens(res, current_syntax, p, text, find_results)
set_subsyntax_pattern_idx(0) set_subsyntax_pattern_idx(0)
i = e + 1 i = e + 1
else else

View File

@ -1,7 +1,6 @@
-- mod-version:3 -- mod-version:3
local core = require "core" local core = require "core"
local config = require "core.config" local config = require "core.config"
local style = require "core.style"
local Doc = require "core.doc" local Doc = require "core.doc"
local Node = require "core.node" local Node = require "core.node"
local common = require "core.common" local common = require "core.common"
@ -26,7 +25,7 @@ local times = setmetatable({}, { __mode = "k" })
local visible = setmetatable({}, { __mode = "k" }) local visible = setmetatable({}, { __mode = "k" })
local function get_project_doc_watch(doc) local function get_project_doc_watch(doc)
for i, v in ipairs(core.project_directories) do for _, v in ipairs(core.project_directories) do
if doc.abs_filename:find(v.name, 1, true) == 1 then return v.watch end if doc.abs_filename:find(v.name, 1, true) == 1 then return v.watch end
end end
return watch return watch
@ -43,6 +42,34 @@ local function reload_doc(doc)
core.log_quiet("Auto-reloaded doc \"%s\"", doc.filename) core.log_quiet("Auto-reloaded doc \"%s\"", doc.filename)
end end
local timers = setmetatable({}, { __mode = "k" })
local function delayed_reload(doc, mtime)
if timers[doc] then
-- If mtime remains the same, there's no need to restart the timer
-- as we're waiting a full second anyways.
if not mtime or timers[doc].mtime ~= mtime then
timers[doc] = { last_trigger = system.get_time(), mtime = mtime }
end
return
end
timers[doc] = { last_trigger = system.get_time(), mtime = mtime }
core.add_thread(function()
local diff = system.get_time() - timers[doc].last_trigger
-- Wait a second before triggering a reload because we're using mtime
-- to determine if a file has changed, and on many systems it has a
-- resolution of 1 second.
while diff < 1 do
coroutine.yield(diff)
diff = system.get_time() - timers[doc].last_trigger
end
timers[doc] = nil
reload_doc(doc)
end)
end
local function check_prompt_reload(doc) local function check_prompt_reload(doc)
if doc and doc.deferred_reload then if doc and doc.deferred_reload then
core.nag_view:show("File Changed", doc.filename .. " has changed. Reload this file?", { core.nag_view:show("File Changed", doc.filename .. " has changed. Reload this file?", {
@ -71,7 +98,7 @@ function dirwatch:check(change_callback, ...)
local info = system.get_file_info(doc.filename or "") local info = system.get_file_info(doc.filename or "")
if info and info.type == "file" and times[doc] ~= info.modified then if info and info.type == "file" and times[doc] ~= info.modified then
if not doc:is_dirty() and not config.plugins.autoreload.always_show_nagview then if not doc:is_dirty() and not config.plugins.autoreload.always_show_nagview then
reload_doc(doc) delayed_reload(doc, info.modified)
else else
doc.deferred_reload = true doc.deferred_reload = true
if doc == core.active_view.doc then check_prompt_reload(doc) end if doc == core.active_view.doc then check_prompt_reload(doc) end

View File

@ -1,6 +1,11 @@
-- mod-version:3 -- mod-version:3
local syntax = require "core.syntax" local syntax = require "core.syntax"
-- integer suffix combinations as a regex
local isuf = [[(?:[lL][uU]|ll[uU]|LL[uU]|[uU][lL]\b|[uU]ll|[uU]LL|[uU]|[lL]\b|ll|LL)?]]
-- float suffix combinations as a Lua pattern / regex
local fsuf = "[fFlL]?"
syntax.add { syntax.add {
name = "C", name = "C",
files = { "%.c$" }, files = { "%.c$" },
@ -11,9 +16,13 @@ syntax.add {
{ pattern = { "/%*", "%*/" }, type = "comment" }, { pattern = { "/%*", "%*/" }, type = "comment" },
{ pattern = { '"', '"', '\\' }, type = "string" }, { pattern = { '"', '"', '\\' }, type = "string" },
{ pattern = { "'", "'", '\\' }, type = "string" }, { pattern = { "'", "'", '\\' }, type = "string" },
{ pattern = "0x%x+", type = "number" }, { regex = "0x[0-9a-fA-f]+"..isuf, type = "number" },
{ pattern = "%d+[%d%.eE]*f?", type = "number" }, { regex = "0()[0-7]+"..isuf, type = { "keyword", "number" } },
{ pattern = "%.?%d+f?", type = "number" }, { pattern = "%d+%.%d*[Ee]%d+"..fsuf, type = "number" },
{ pattern = "%d+[Ee]%d+"..fsuf, type = "number" },
{ pattern = "%d+%.%d*"..fsuf, type = "number" },
{ pattern = "%.%d+"..fsuf, type = "number" },
{ regex = "\\d+"..isuf, type = "number" },
{ pattern = "[%+%-=/%*%^%%<>!~|&]", type = "operator" }, { pattern = "[%+%-=/%*%^%%<>!~|&]", type = "operator" },
{ pattern = "##", type = "operator" }, { pattern = "##", type = "operator" },
{ pattern = "struct%s()[%a_][%w_]*", type = {"keyword", "keyword2"} }, { pattern = "struct%s()[%a_][%w_]*", type = {"keyword", "keyword2"} },

View File

@ -1,12 +1,17 @@
-- mod-version:3 -- mod-version:3
local syntax = require "core.syntax" local syntax = require "core.syntax"
-- integer suffix combinations as a regex
local isuf = [[(?:[lL][uU]|ll[uU]|LL[uU]|[uU][lL]\b|[uU]ll|[uU]LL|[uU]|[lL]\b|ll|LL)?]]
-- float suffix combinations as a Lua pattern / regex
local fsuf = "[fFlL]?"
syntax.add { syntax.add {
name = "C++", name = "C++",
files = { files = {
"%.h$", "%.inl$", "%.cpp$", "%.cc$", "%.C$", "%.cxx$", "%.h$", "%.inl$", "%.cpp$", "%.cc$", "%.C$", "%.cxx$",
"%.c++$", "%.hh$", "%.H$", "%.hxx$", "%.hpp$", "%.h++$", "%.c++$", "%.hh$", "%.H$", "%.hxx$", "%.hpp$", "%.h++$",
"%.ino$" "%.ino$", "%.cu$", "%.cuh$"
}, },
comment = "//", comment = "//",
block_comment = { "/*", "*/" }, block_comment = { "/*", "*/" },
@ -15,9 +20,14 @@ syntax.add {
{ pattern = { "/%*", "%*/" }, type = "comment" }, { pattern = { "/%*", "%*/" }, type = "comment" },
{ pattern = { '"', '"', '\\' }, type = "string" }, { pattern = { '"', '"', '\\' }, type = "string" },
{ pattern = { "'", "'", '\\' }, type = "string" }, { pattern = { "'", "'", '\\' }, type = "string" },
{ pattern = "0x%x+[%x']*", type = "number" }, { regex = "0x[0-9a-fA-f]+"..isuf, type = "number" },
{ pattern = "%d+[%d%.'eE]*f?", type = "number" }, { regex = "0b[01]+"..isuf, type = "number" },
{ pattern = "%.?%d+[%d']*f?", type = "number" }, { regex = "0()[0-7]+"..isuf, type = { "keyword", "number" } },
{ pattern = "%d+%.%d*[Ee]%d+"..fsuf, type = "number" },
{ pattern = "%d+[Ee]%d+"..fsuf, type = "number" },
{ pattern = "%d+%.%d*"..fsuf, type = "number" },
{ pattern = "%.%d+"..fsuf, type = "number" },
{ regex = "\\d+"..isuf, type = "number" },
{ pattern = "[%+%-=/%*%^%%<>!~|:&]", type = "operator" }, { pattern = "[%+%-=/%*%^%%<>!~|:&]", type = "operator" },
{ pattern = "##", type = "operator" }, { pattern = "##", type = "operator" },
{ pattern = "struct%s()[%a_][%w_]*", type = {"keyword", "keyword2"} }, { pattern = "struct%s()[%a_][%w_]*", type = {"keyword", "keyword2"} },
@ -28,6 +38,15 @@ syntax.add {
{ pattern = "static()%s+()inline", { pattern = "static()%s+()inline",
type = { "keyword", "normal", "keyword" } type = { "keyword", "normal", "keyword" }
}, },
{ pattern = "static()%s+()constexpr",
type = { "keyword", "normal", "keyword" }
},
{ pattern = "static()%s+()constinit",
type = { "keyword", "normal", "keyword" }
},
{ pattern = "static()%s+()consteval",
type = { "keyword", "normal", "keyword" }
},
{ pattern = "static()%s+()const", { pattern = "static()%s+()const",
type = { "keyword", "normal", "keyword" } type = { "keyword", "normal", "keyword" }
}, },
@ -133,7 +152,7 @@ syntax.add {
["this"] = "keyword", ["this"] = "keyword",
["thread_local"] = "keyword", ["thread_local"] = "keyword",
["requires"] = "keyword", ["requires"] = "keyword",
["co_wait"] = "keyword", ["co_await"] = "keyword",
["co_return"] = "keyword", ["co_return"] = "keyword",
["co_yield"] = "keyword", ["co_yield"] = "keyword",
["decltype"] = "keyword", ["decltype"] = "keyword",

View File

@ -1,66 +1,188 @@
-- mod-version:3 -- mod-version:3
local syntax = require "core.syntax" local syntax = require "core.syntax"
local function table_merge(a, b)
local t = {}
for _, v in pairs(a) do table.insert(t, v) end
for _, v in pairs(b) do table.insert(t, v) end
return t
end
local python_symbols = {
["class"] = "keyword",
["finally"] = "keyword",
["is"] = "keyword",
["return"] = "keyword",
["continue"] = "keyword",
["for"] = "keyword",
["lambda"] = "keyword",
["try"] = "keyword",
["except"] = "keyword",
["def"] = "keyword",
["async"] = "keyword",
["await"] = "keyword",
["from"] = "keyword",
["nonlocal"] = "keyword",
["while"] = "keyword",
["and"] = "keyword",
["global"] = "keyword",
["not"] = "keyword",
["with"] = "keyword",
["as"] = "keyword",
["elif"] = "keyword",
["if"] = "keyword",
["or"] = "keyword",
["else"] = "keyword",
["match"] = "keyword",
["case"] = "keyword",
["import"] = "keyword",
["pass"] = "keyword",
["break"] = "keyword",
["in"] = "keyword",
["del"] = "keyword",
["raise"] = "keyword",
["yield"] = "keyword",
["assert"] = "keyword",
["self"] = "keyword2",
["None"] = "literal",
["True"] = "literal",
["False"] = "literal",
}
local python_fstring = {
patterns = {
{ pattern = "\\.", type = "string" },
{ pattern = '[^"\\{}\']+', type = "string" }
},
symbols = {}
}
local python_patterns = {
{ pattern = '[uUrR]%f["]', type = "keyword" },
{ pattern = { '[ruU]?"""', '"""', '\\' }, type = "string" },
{ pattern = { "[ruU]?'''", "'''", '\\' }, type = "string" },
{ pattern = { '[ruU]?"', '"', '\\' }, type = "string" },
{ pattern = { "[ruU]?'", "'", '\\' }, type = "string" },
{ pattern = { 'f"', '"', "\\" }, type = "string", syntax = python_fstring },
{ pattern = { "f'", "'", "\\" }, type = "string", syntax = python_fstring },
{ pattern = "%d+[%d%.eE_]*", type = "number" },
{ pattern = "0[xboXBO][%da-fA-F_]+", type = "number" },
{ pattern = "%.?%d+", type = "number" },
{ pattern = "%f[-%w_]-%f[%d%.]", type = "number" },
{ pattern = "[%+%-=/%*%^%%<>!~|&]", type = "operator" },
{ pattern = "[%a_][%w_]*%f[(]", type = "function" },
{ pattern = "[%a_][%w_]+", type = "symbol" },
}
local python_type = {
patterns = {
{ pattern = "|", type = "operator" },
{ pattern = "[%w_]+", type = "keyword2" },
{ pattern = "[%a_][%w_]+", type = "symbol" },
},
symbols = {
["None"] = "literal"
}
}
-- Add this line after in order for the recursion to work.
-- Makes sure that the square brackets are well balanced when capturing the syntax
-- (in order to make something like this work: Tuple[Tuple[int, str], float])
table.insert(python_type.patterns, 1, { pattern = { "%[", "%]" }, syntax = python_type })
-- For things like this_list = other_list[a:b:c]
local not_python_type = {
patterns = python_patterns,
symbols = python_symbols
}
table.insert(not_python_type.patterns, 1, { pattern = { "%[", "%]" }, syntax = not_python_type })
table.insert(not_python_type.patterns, 1, { pattern = { "{", "}" }, syntax = not_python_type })
table.insert(python_fstring.patterns, 1, { pattern = { "{", "}" }, syntax = not_python_type })
local python_func = {
patterns = table_merge({
{ pattern = { "->", "%f[:]" }, type = "operator", syntax = python_type },
{ pattern = { ":%s*", "%f[^%[%]%w_]" }, syntax = python_type },
}, python_patterns),
symbols = python_symbols
}
table.insert(python_func.patterns, 1, { pattern = { "%(", "%)" }, syntax = python_func })
syntax.add { syntax.add {
name = "Python", name = "Python",
files = { "%.py$", "%.pyw$", "%.rpy$", "%.pyi$" }, files = { "%.py$", "%.pyw$", "%.rpy$", "%.pyi$" },
headers = "^#!.*[ /]python", headers = "^#!.*[ /]python",
comment = "#", comment = "#",
block_comment = { '"""', '"""' }, block_comment = { '"""', '"""' },
patterns = {
{ pattern = "#.*", type = "comment" }, patterns = table_merge({
{ pattern = { '^%s*"""', '"""' }, type = "comment" },
{ pattern = '[uUrR]%f["]', type = "keyword" }, { pattern = "#.*", type = "comment" },
{ pattern = "class%s+()[%a_][%w_]*", type = {"keyword", "keyword2"} }, { pattern = { '^%s*"""', '"""' }, type = "comment" },
{ pattern = { '[ruU]?"""', '"""'; '\\' }, type = "string" },
{ pattern = { "[ruU]?'''", "'''", '\\' }, type = "string" }, { pattern = { "%[", "%]" }, syntax = not_python_type },
{ pattern = { '[ruU]?"', '"', '\\' }, type = "string" }, { pattern = { "{", "}" }, syntax = not_python_type },
{ pattern = { "[ruU]?'", "'", '\\' }, type = "string" },
{ pattern = "-?0[xboXBO][%da-fA-F_]+",type = "number" }, { pattern = { "^%s*()def%f[%s]", ":" }, type = { "normal", "keyword" }, syntax = python_func }, -- this and the following prevent one-liner highlight bugs
{ pattern = "-?%d+[%d%.eE_]*", type = "number" },
{ pattern = "-?%.?%d+", type = "number" }, { pattern = { "^%s*()for%f[%s]", ":" }, type = { "normal", "keyword" }, syntax = not_python_type },
{ pattern = "[%+%-=/%*%^%%<>!~|&]", type = "operator" }, { pattern = { "^%s*()if%f[%s]", ":" }, type = { "normal", "keyword" }, syntax = not_python_type },
{ pattern = "[%a_][%w_]*%f[(]", type = "function" }, { pattern = { "^%s*()elif%f[%s]", ":" }, type = { "normal", "keyword" }, syntax = not_python_type },
{ pattern = "[%a_][%w_]*", type = "symbol" }, { pattern = { "^%s*()while%f[%s]", ":" }, type = { "normal", "keyword" }, syntax = not_python_type },
}, { pattern = { "^%s*()match%f[%s]", ":" }, type = { "normal", "keyword" }, syntax = not_python_type },
symbols = { { pattern = { "^%s*()case%f[%s]", ":" }, type = { "normal", "keyword" }, syntax = not_python_type },
["class"] = "keyword", { pattern = { "^%s*()except%f[%s]", ":" }, type = { "normal", "keyword" }, syntax = not_python_type },
["finally"] = "keyword",
["is"] = "keyword", { pattern = "else():", type = { "keyword", "normal" } },
["return"] = "keyword", { pattern = "try():", type = { "keyword", "normal" } },
["continue"] = "keyword",
["for"] = "keyword", { pattern = "lambda()%s.+:", type = { "keyword", "normal" } },
["lambda"] = "keyword", { pattern = "class%s+()[%a_][%w_]+().*:", type = { "keyword", "keyword2", "normal" } },
["try"] = "keyword",
["def"] = "keyword",
["async"] = "keyword", { pattern = { ":%s*", "%f[^%[%]%w_]"}, syntax = python_type },
["await"] = "keyword",
["from"] = "keyword", }, python_patterns),
["nonlocal"] = "keyword",
["while"] = "keyword", symbols = python_symbols
["and"] = "keyword",
["global"] = "keyword",
["not"] = "keyword",
["with"] = "keyword",
["as"] = "keyword",
["elif"] = "keyword",
["if"] = "keyword",
["or"] = "keyword",
["else"] = "keyword",
["match"] = "keyword",
["case"] = "keyword",
["import"] = "keyword",
["pass"] = "keyword",
["break"] = "keyword",
["except"] = "keyword",
["in"] = "keyword",
["del"] = "keyword",
["raise"] = "keyword",
["yield"] = "keyword",
["assert"] = "keyword",
["self"] = "keyword2",
["None"] = "literal",
["True"] = "literal",
["False"] = "literal",
}
} }

View File

@ -15,6 +15,7 @@ function ResultsView:new(path, text, fn)
ResultsView.super.new(self) ResultsView.super.new(self)
self.scrollable = true self.scrollable = true
self.brightness = 0 self.brightness = 0
self.max_h_scroll = 0
self:begin_search(path, text, fn) self:begin_search(path, text, fn)
end end
@ -34,7 +35,9 @@ local function find_all_matches_in_file(t, filename, fn)
-- Insert maximum 256 characters. If we insert more, for compiled files, which can have very long lines -- Insert maximum 256 characters. If we insert more, for compiled files, which can have very long lines
-- things tend to get sluggish. If our line is longer than 80 characters, begin to truncate the thing. -- things tend to get sluggish. If our line is longer than 80 characters, begin to truncate the thing.
local start_index = math.max(s - 80, 1) local start_index = math.max(s - 80, 1)
table.insert(t, { file = filename, text = (start_index > 1 and "..." or "") .. line:sub(start_index, 256 + start_index), line = n, col = s }) local text = (start_index > 1 and "..." or "") .. line:sub(start_index, 256 + start_index)
if #line > 256 + start_index then text = text .. "..." end
table.insert(t, { file = filename, text = text, line = n, col = s })
core.redraw = true core.redraw = true
end end
if n % 100 == 0 then coroutine.yield() end if n % 100 == 0 then coroutine.yield() end
@ -133,10 +136,16 @@ function ResultsView:get_scrollable_size()
end end
function ResultsView:get_h_scrollable_size()
return self.max_h_scroll
end
function ResultsView:get_visible_results_range() function ResultsView:get_visible_results_range()
local lh = self:get_line_height() local lh = self:get_line_height()
local oy = self:get_results_yoffset() local oy = self:get_results_yoffset()
local min = math.max(1, math.floor((self.scroll.y - oy) / lh)) local min = self.scroll.y+oy-style.font:get_height()
min = math.max(1, math.floor(min / lh))
return min, min + math.floor(self.size.y / lh) + 1 return min, min + math.floor(self.size.y / lh) + 1
end end
@ -150,7 +159,8 @@ function ResultsView:each_visible_result()
for i = min, max do for i = min, max do
local item = self.results[i] local item = self.results[i]
if not item then break end if not item then break end
coroutine.yield(i, item, x, y, self.size.x, lh) local _, _, w = self:get_content_bounds()
coroutine.yield(i, item, x, y, w, lh)
y = y + lh y = y + lh
end end
end) end)
@ -159,9 +169,9 @@ end
function ResultsView:scroll_to_make_selected_visible() function ResultsView:scroll_to_make_selected_visible()
local h = self:get_line_height() local h = self:get_line_height()
local y = self:get_results_yoffset() + h * (self.selected_idx - 1) local y = h * (self.selected_idx - 1)
self.scroll.to.y = math.min(self.scroll.to.y, y) self.scroll.to.y = math.min(self.scroll.to.y, y)
self.scroll.to.y = math.max(self.scroll.to.y, y + h - self.size.y) self.scroll.to.y = math.max(self.scroll.to.y, y + h - self.size.y + self:get_results_yoffset())
end end
@ -169,7 +179,13 @@ function ResultsView:draw()
self:draw_background(style.background) self:draw_background(style.background)
-- status -- status
local ox, oy = self:get_content_offset() local ox, oy = self.position.x, self.position.y
local yoffset = self:get_results_yoffset()
renderer.draw_rect(self.position.x, self.position.y, self.size.x, yoffset, style.background)
if self.scroll.y ~= 0 then
renderer.draw_rect(self.position.x, self.position.y+yoffset, self.size.x, style.divider_size, style.divider)
end
local x, y = ox + style.padding.x, oy + style.padding.y local x, y = ox + style.padding.x, oy + style.padding.y
local files_number = core.project_files_number() local files_number = core.project_files_number()
local per = common.clamp(files_number and self.last_file_idx / files_number or 1, 0, 1) local per = common.clamp(files_number and self.last_file_idx / files_number or 1, 0, 1)
@ -191,7 +207,6 @@ function ResultsView:draw()
renderer.draw_text(style.font, text, x, y, color) renderer.draw_text(style.font, text, x, y, color)
-- horizontal line -- horizontal line
local yoffset = self:get_results_yoffset()
local x = ox + style.padding.x local x = ox + style.padding.x
local w = self.size.x - style.padding.x * 2 local w = self.size.x - style.padding.x * 2
local h = style.divider_size local h = style.divider_size
@ -202,6 +217,8 @@ function ResultsView:draw()
end end
-- results -- results
local _, _, bw = self:get_content_bounds()
core.push_clip_rect(ox, oy+yoffset + style.divider_size, bw, self.size.y-yoffset)
local y1, y2 = self.position.y, self.position.y + self.size.y local y1, y2 = self.position.y, self.position.y + self.size.y
for i, item, x,y,w,h in self:each_visible_result() do for i, item, x,y,w,h in self:each_visible_result() do
local color = style.text local color = style.text
@ -213,7 +230,9 @@ function ResultsView:draw()
local text = string.format("%s at line %d (col %d): ", item.file, item.line, item.col) local text = string.format("%s at line %d (col %d): ", item.file, item.line, item.col)
x = common.draw_text(style.font, style.dim, text, "left", x, y, w, h) x = common.draw_text(style.font, style.dim, text, "left", x, y, w, h)
x = common.draw_text(style.code_font, color, item.text, "left", x, y, w, h) x = common.draw_text(style.code_font, color, item.text, "left", x, y, w, h)
self.max_h_scroll = math.max(self.max_h_scroll, x)
end end
core.pop_clip_rect()
self:draw_scrollbar() self:draw_scrollbar()
end end

View File

@ -3,6 +3,7 @@ local core = require "core"
local common = require "core.common" local common = require "core.common"
local command = require "core.command" local command = require "core.command"
local style = require "core.style" local style = require "core.style"
local keymap = require "core.keymap"
local View = require "core.view" local View = require "core.view"
local ToolbarView = View:extend() local ToolbarView = View:extend()
@ -110,7 +111,9 @@ function ToolbarView:on_mouse_moved(px, py, ...)
y_min, y_max = y, y + h y_min, y_max = y, y + h
if px > x and py > y and px <= x + w and py <= y + h then if px > x and py > y and px <= x + w and py <= y + h then
self.hovered_item = item self.hovered_item = item
core.status_view:show_tooltip(command.prettify_name(item.command)) local binding = keymap.get_binding(item.command)
local name = command.prettify_name(item.command)
core.status_view:show_tooltip(binding and { name, style.dim, " ", binding } or { name })
self.tooltip = true self.tooltip = true
return return
end end

View File

@ -359,9 +359,6 @@ function TreeView:draw()
self:draw_background(style.background2) self:draw_background(style.background2)
local _y, _h = self.position.y, self.size.y local _y, _h = self.position.y, self.size.y
local doc = core.active_view.doc
local active_filename = doc and system.absolute_path(doc.filename or "")
for item, x,y,w,h in self:each_item() do for item, x,y,w,h in self:each_item() do
if y + h >= _y and y < _y + _h then if y + h >= _y and y < _y + _h then
self:draw_item(item, self:draw_item(item,

View File

@ -1,6 +1,6 @@
project('lite-xl', project('lite-xl',
['c'], ['c'],
version : '2.1.4', version : '2.1.6',
license : 'MIT', license : 'MIT',
meson_version : '>= 0.56', meson_version : '>= 0.56',
default_options : [ default_options : [
@ -123,7 +123,6 @@ if not get_option('source-only')
sdl_options += 'use_atomic=enabled' sdl_options += 'use_atomic=enabled'
sdl_options += 'use_threads=enabled' sdl_options += 'use_threads=enabled'
sdl_options += 'use_timers=enabled' sdl_options += 'use_timers=enabled'
sdl_options += 'with_main=true'
# investigate if this is truly needed # investigate if this is truly needed
# Do not remove before https://github.com/libsdl-org/SDL/issues/5413 is released # Do not remove before https://github.com/libsdl-org/SDL/issues/5413 is released
sdl_options += 'use_events=enabled' sdl_options += 'use_events=enabled'
@ -150,9 +149,11 @@ if not get_option('source-only')
sdl_options += 'test=false' sdl_options += 'test=false'
sdl_options += 'use_sensor=disabled' sdl_options += 'use_sensor=disabled'
sdl_options += 'use_haptic=disabled' sdl_options += 'use_haptic=disabled'
sdl_options += 'use_hidapi=disabled'
sdl_options += 'use_audio=disabled' sdl_options += 'use_audio=disabled'
sdl_options += 'use_cpuinfo=disabled' sdl_options += 'use_cpuinfo=disabled'
sdl_options += 'use_joystick=disabled' sdl_options += 'use_joystick=disabled'
sdl_options += 'use_joystick_xinput=disabled'
sdl_options += 'use_video_vulkan=disabled' sdl_options += 'use_video_vulkan=disabled'
sdl_options += 'use_video_offscreen=disabled' sdl_options += 'use_video_offscreen=disabled'
sdl_options += 'use_power=disabled' sdl_options += 'use_power=disabled'
@ -183,50 +184,55 @@ if get_option('portable') or host_machine.system() == 'windows'
lite_docdir = '/doc' lite_docdir = '/doc'
lite_datadir = '/data' lite_datadir = '/data'
configure_file( configure_file(
input: 'resources/windows/lite-xl.exe.manifest.in', input: 'resources' / 'windows' / 'lite-xl.exe.manifest.in',
output: 'lite-xl.exe.manifest', output: 'lite-xl.exe.manifest',
configuration: conf_data configuration: conf_data
) )
elif get_option('bundle') and host_machine.system() == 'darwin' elif get_option('bundle') and host_machine.system() == 'darwin'
lite_cargs += '-DMACOS_USE_BUNDLE' lite_cargs += '-DMACOS_USE_BUNDLE'
lite_bindir = 'Contents/MacOS' lite_bindir = 'Contents' / 'MacOS'
lite_docdir = 'Contents/Resources' lite_docdir = 'Contents' / 'Resources'
lite_datadir = 'Contents/Resources' lite_datadir = 'Contents' / 'Resources'
install_data('resources/icons/icon.icns', install_dir : 'Contents/Resources') conf_data.set(
'CURRENT_YEAR',
run_command('date', '+%Y', capture: true).stdout().strip()
)
install_data('resources' / 'icons' / 'icon.icns', install_dir : 'Contents' / 'Resources')
configure_file( configure_file(
input : 'resources/macos/Info.plist.in', input : 'resources' / 'macos' / 'Info.plist.in',
output : 'Info.plist', output : 'Info.plist',
configuration : conf_data, configuration : conf_data,
install : true, install : true,
install_dir : 'Contents' install_dir : 'Contents'
) )
else else
message()
lite_bindir = 'bin' lite_bindir = 'bin'
lite_docdir = 'share/doc/lite-xl' lite_docdir = get_option('datadir') / 'doc' / 'lite-xl'
lite_datadir = 'share/lite-xl' lite_datadir = get_option('datadir') / 'lite-xl'
if host_machine.system() == 'linux' if host_machine.system() == 'linux'
install_data('resources/icons/lite-xl.svg', install_data('resources' / 'icons' / 'lite-xl.svg',
install_dir : 'share/icons/hicolor/scalable/apps' install_dir : get_option('datadir') / 'icons' / 'hicolor' / 'scalable' / 'apps'
) )
install_data('resources/linux/org.lite_xl.lite_xl.desktop', install_data('resources' / 'linux' / 'org.lite_xl.lite_xl.desktop',
install_dir : 'share/applications' install_dir : get_option('datadir') / 'applications'
) )
install_data('resources/linux/org.lite_xl.lite_xl.appdata.xml', install_data('resources' / 'linux' / 'org.lite_xl.lite_xl.appdata.xml',
install_dir : 'share/metainfo' install_dir : get_option('datadir') / 'metainfo'
) )
endif endif
endif endif
install_data('licenses/licenses.md', install_dir : lite_docdir) install_data('licenses' / 'licenses.md', install_dir : lite_docdir)
install_subdir('docs/api' , install_dir : lite_datadir, strip_directory: true) install_subdir('docs' / 'api' , install_dir : lite_datadir, strip_directory: true)
install_subdir('data/core' , install_dir : lite_datadir, exclude_files : 'start.lua') install_subdir('data' / 'core' , install_dir : lite_datadir, exclude_files : 'start.lua')
foreach data_module : ['fonts', 'plugins', 'colors'] foreach data_module : ['fonts', 'plugins', 'colors']
install_subdir(join_paths('data', data_module), install_dir : lite_datadir) install_subdir(join_paths('data', data_module), install_dir : lite_datadir)
endforeach endforeach
configure_file( configure_file(
input : 'data/core/start.lua', input : 'data' / 'core' / 'start.lua',
output : 'start.lua', output : 'start.lua',
configuration : conf_data, configuration : conf_data,
install_dir : join_paths(lite_datadir, 'core'), install_dir : join_paths(lite_datadir, 'core'),

View File

@ -17,13 +17,14 @@ Various scripts and configurations used to configure, build, and package Lite XL
### Utility ### Utility
- **common.sh**: Common functions used by other scripts. - **common.sh**: Common functions used by other scripts.
- **install-dependencies.sh**: Installs required applications to build, package - **install-dependencies.sh**: Installs required applications to build, package
and run Lite XL, mainly useful for CI and documentation purpose. and run Lite XL, mainly useful for CI and documentation purpose.
Preferably not to be used in user systems. Preferably not to be used in user systems.
- **fontello-config.json**: Used by the icons generator. - **fontello-config.json**: Used by the icons generator.
- **generate_header.sh**: Generates a header file for native plugin API - **generate_header.sh**: Generates a header file for native plugin API
- **keymap-generator**: Generates a JSON file containing the keymap - **keymap-generator**: Generates a JSON file containing the keymap
- **generate-release-notes.sh**: Generates a release note for Lite XL releases.
[1]: https://github.com/dmgbuild/dmgbuild [1]: https://github.com/dmgbuild/dmgbuild
[2]: https://docs.appimage.org/ [2]: https://docs.appimage.org/

View File

@ -85,7 +85,7 @@ fi
setup_appimagetool() { setup_appimagetool() {
if [ ! -e appimagetool ]; then if [ ! -e appimagetool ]; then
if ! wget -O appimagetool "https://github.com/AppImage/AppImageKit/releases/download/continuous/appimagetool-${ARCH}.AppImage" ; then if ! wget -O appimagetool "https://github.com/AppImage/appimagetool/releases/download/continuous/appimagetool-${ARCH}.AppImage" ; then
echo "Could not download the appimagetool for the arch '${ARCH}'." echo "Could not download the appimagetool for the arch '${ARCH}'."
exit 1 exit 1
else else
@ -94,17 +94,6 @@ setup_appimagetool() {
fi fi
} }
download_appimage_apprun() {
if [ ! -e AppRun ]; then
if ! wget -O AppRun "https://github.com/AppImage/AppImageKit/releases/download/continuous/AppRun-${ARCH}" ; then
echo "Could not download AppRun for the arch '${ARCH}'."
exit 1
else
chmod 0755 AppRun
fi
fi
}
build_litexl() { build_litexl() {
if [ -e build ]; then if [ -e build ]; then
rm -rf build rm -rf build
@ -117,11 +106,12 @@ build_litexl() {
echo "Build lite-xl..." echo "Build lite-xl..."
sleep 1 sleep 1
if [[ $STATIC_BUILD == false ]]; then if [[ $STATIC_BUILD == false ]]; then
meson setup --buildtype=$BUILD_TYPE --prefix=/usr ${BUILD_DIR} meson setup --buildtype=$BUILD_TYPE --prefix=/usr -Dportable=false ${BUILD_DIR}
else else
meson setup --wrap-mode=forcefallback \ meson setup --wrap-mode=forcefallback \
--buildtype=$BUILD_TYPE \ --buildtype=$BUILD_TYPE \
--prefix=/usr \ --prefix=/usr \
-Dportable=false \
${BUILD_DIR} ${BUILD_DIR}
fi fi
meson compile -C ${BUILD_DIR} meson compile -C ${BUILD_DIR}
@ -135,11 +125,18 @@ generate_appimage() {
echo "Creating LiteXL.AppDir..." echo "Creating LiteXL.AppDir..."
DESTDIR="$(realpath LiteXL.AppDir)" meson install --skip-subprojects -C ${BUILD_DIR} DESTDIR="$(realpath LiteXL.AppDir)" meson install --skip-subprojects -C ${BUILD_DIR}
mv AppRun LiteXL.AppDir/
# These could be symlinks but it seems they doesn't work with AppimageLauncher
cp resources/icons/lite-xl.svg LiteXL.AppDir/ cp resources/icons/lite-xl.svg LiteXL.AppDir/
cp resources/linux/org.lite_xl.lite_xl.desktop LiteXL.AppDir/ cp resources/linux/org.lite_xl.lite_xl.desktop LiteXL.AppDir/
echo "Creating AppRun..."
cat >> LiteXL.AppDir/AppRun <<- 'EOF'
#!/bin/sh
CURRENTDIR="$(dirname "$(readlink -f "$0")")"
exec "$CURRENTDIR/usr/bin/lite-xl" "$@"
EOF
chmod +x LiteXL.AppDir/AppRun
if [[ $ADDONS == true ]]; then if [[ $ADDONS == true ]]; then
addons_download "${BUILD_DIR}" addons_download "${BUILD_DIR}"
addons_install "${BUILD_DIR}" "LiteXL.AppDir/usr/share/lite-xl" addons_install "${BUILD_DIR}" "LiteXL.AppDir/usr/share/lite-xl"
@ -181,10 +178,9 @@ generate_appimage() {
version="${version}-addons" version="${version}-addons"
fi fi
./appimagetool --appimage-extract-and-run LiteXL.AppDir LiteXL${version}-${ARCH}.AppImage APPIMAGE_EXTRACT_AND_RUN=1 ./appimagetool LiteXL.AppDir LiteXL${version}-${ARCH}.AppImage
} }
setup_appimagetool setup_appimagetool
download_appimage_apprun
if [[ $RUN_BUILD == true ]]; then build_litexl; fi if [[ $RUN_BUILD == true ]]; then build_litexl; fi
generate_appimage $1 generate_appimage $1

View File

@ -0,0 +1,69 @@
#!/usr/bin/env bash
if [ ! -e "src/api/api.h" ]; then
echo "Please run this script from the root directory of Lite XL."; exit 1
fi
show_help() {
echo
echo "Release notes generator for lite-xl releases."
echo "USE IT AT YOUR OWN RISK!"
echo
echo "Usage: $0 <OPTIONS>"
echo
echo "Available options:"
echo
echo "--version The current version used to generate release notes."
echo "--debug Debug this script."
echo "--help Show this message."
echo
}
main() {
local version
local last_version
for i in "$@"; do
case $i in
--debug)
set -x
shift
;;
--help)
show_help
exit 0
;;
--version)
version="$2"
shift
shift
;;
*)
# unknown option
;;
esac
done
if [[ -n $1 ]]; then
show_help
exit 0
fi
if [[ -z "$version" ]]; then
echo "error: a version must be provided"
exit 1
fi
# use gh cli to get the last version
read -r last_version < <(gh release list --exclude-pre-releases --limit 1 | awk 'BEGIN {FS="\t"}; {print $3}')
if [[ -z "$last_version" ]]; then
echo "error: cannot get last release git tag"
exit 1
fi
export RELEASE_TAG="$version"
export LAST_RELEASE_TAG="$last_version"
envsubst '$RELEASE_TAG:$LAST_RELEASE_TAG' > release-notes.md < resources/release-notes.md
}
main "$@"

View File

@ -14,20 +14,16 @@ show_help() {
echo echo
echo "Available options:" echo "Available options:"
echo echo
echo "-l --lhelper Install tools required by LHelper and doesn't"
echo " install external libraries."
echo " --debug Debug this script." echo " --debug Debug this script."
echo echo
} }
main() { main() {
local lhelper=false
for i in "$@"; do for i in "$@"; do
case $i in case $i in
-s|--lhelper) -s|--lhelper)
lhelper=true echo "error: support for lhelper has been deprecated" >> /dev/stderr
shift exit 1
;; ;;
--debug) --debug)
set -x set -x
@ -45,27 +41,14 @@ main() {
fi fi
if [[ "$OSTYPE" == "linux"* ]]; then if [[ "$OSTYPE" == "linux"* ]]; then
if [[ $lhelper == true ]]; then sudo apt-get install -qq libfuse2 ninja-build wayland-protocols libsdl2-dev libfreetype6 desktop-file-utils
sudo apt-get install -qq ninja-build
else
sudo apt-get install -qq libfuse2 ninja-build wayland-protocols libsdl2-dev libfreetype6
fi
pip3 install meson pip3 install meson
elif [[ "$OSTYPE" == "darwin"* ]]; then elif [[ "$OSTYPE" == "darwin"* ]]; then
if [[ $lhelper == true ]]; then brew install bash ninja sdl2
brew install bash md5sha1sum ninja
else
brew install bash ninja sdl2
fi
pip3 install meson dmgbuild pip3 install meson dmgbuild
elif [[ "$OSTYPE" == "msys" ]]; then elif [[ "$OSTYPE" == "msys" ]]; then
if [[ $lhelper == true ]]; then pacman --noconfirm -S \
pacman --noconfirm -S \ ${MINGW_PACKAGE_PREFIX}-{ca-certificates,gcc,meson,ninja,ntldd,pkg-config,mesa,freetype,pcre2,SDL2} unzip
${MINGW_PACKAGE_PREFIX}-{ca-certificates,gcc,meson,ninja,ntldd,pkg-config,mesa} unzip
else
pacman --noconfirm -S \
${MINGW_PACKAGE_PREFIX}-{ca-certificates,gcc,meson,ninja,ntldd,pkg-config,mesa,freetype,pcre2,SDL2} unzip
fi
fi fi
} }

View File

@ -1,73 +0,0 @@
#!/bin/bash
set -e
show_help() {
echo
echo "Usage: $0 <OPTIONS>"
echo
echo "Available options:"
echo
echo " --debug Debug this script."
echo "-h --help Show this help and exit."
echo "-p --prefix PREFIX Install directory prefix."
echo " Default: '$HOME/.local'."
echo
}
main() {
local lhelper_prefix="$HOME/.local"
for i in "$@"; do
case $i in
-h|--help)
show_help
exit 0
;;
-p|--prefix)
lhelper_prefix="$2"
echo "LHelper prefix set to: \"${lhelper_prefix}\""
shift
shift
;;
--debug)
set -x
shift
;;
*)
# unknown option
;;
esac
done
if [[ -n $1 ]]; then show_help; exit 1; fi
if [[ ! -f ${lhelper_prefix}/bin/lhelper ]]; then
git clone https://github.com/franko/lhelper.git
# FIXME: This should be set in ~/.bash_profile if not using CI
# export PATH="${HOME}/.local/bin:${PATH}"
mkdir -p "${lhelper_prefix}/bin"
pushd lhelper; bash install "${lhelper_prefix}"; popd
if [[ "$OSTYPE" == "darwin"* ]]; then
CC=clang CXX=clang++ lhelper create build
else
lhelper create lite-xl build
fi
fi
# Not using $(lhelper activate lite-xl) to support CI
source "$(lhelper env-source build)"
# Help MSYS2 to find the SDL2 include and lib directories to avoid errors
# during build and linking when using lhelper.
# Francesco: not sure why this is needed. I have never observed the problem when
# building on window.
# if [[ "$OSTYPE" == "msys" ]]; then
# CFLAGS=-I${LHELPER_ENV_PREFIX}/include/SDL2
# LDFLAGS=-L${LHELPER_ENV_PREFIX}/lib
# fi
}
main "$@"

View File

@ -30,4 +30,9 @@ done
lipo -create -output "Lite XL.app/Contents/MacOS/lite-xl" "$WORKDIR/"*-lite-xl lipo -create -output "Lite XL.app/Contents/MacOS/lite-xl" "$WORKDIR/"*-lite-xl
# https://eclecticlight.co/2019/01/17/code-signing-for-the-concerned-3-signing-an-app/
# https://wiki.lazarus.freepascal.org/Code_Signing_for_macOS#Big_Sur_and_later_on_Apple_M1_ARM64_processors
# codesign all the files again, hopefully this would fix signature validation
codesign --force --deep --digest-algorithm=sha1,sha256 -s - "Lite XL.app"
source scripts/appdmg.sh "$2" source scripts/appdmg.sh "$2"

View File

@ -270,7 +270,8 @@ main() {
if [[ $bundle == true ]]; then if [[ $bundle == true ]]; then
# https://eclecticlight.co/2019/01/17/code-signing-for-the-concerned-3-signing-an-app/ # https://eclecticlight.co/2019/01/17/code-signing-for-the-concerned-3-signing-an-app/
codesign --force --deep -s - "${dest_dir}" # https://wiki.lazarus.freepascal.org/Code_Signing_for_macOS#Big_Sur_and_later_on_Apple_M1_ARM64_processors
codesign --force --deep --digest-algorithm=sha1,sha256 -s - "${dest_dir}"
fi fi
echo "Creating a compressed archive ${package_name}" echo "Creating a compressed archive ${package_name}"

View File

@ -26,7 +26,19 @@ int get_mode_dirmonitor();
static int f_check_dir_callback(int watch_id, const char* path, void* L) { static int f_check_dir_callback(int watch_id, const char* path, void* L) {
// using absolute indices from f_dirmonitor_check (2: callback, 3: error_callback) // using absolute indices from f_dirmonitor_check (2: callback, 3: error_callback, 4: watch_id notified table)
// Check if we already notified about this watch
lua_rawgeti(L, 4, watch_id);
bool skip = !lua_isnoneornil(L, -1);
lua_pop(L, 1);
if (skip) return 0;
// Set watch as notified
lua_pushboolean(L, true);
lua_rawseti(L, 4, watch_id);
// Prepare callback call
lua_pushvalue(L, 2); lua_pushvalue(L, 2);
if (path) if (path)
lua_pushlstring(L, path, watch_id); lua_pushlstring(L, path, watch_id);
@ -117,6 +129,9 @@ static int f_dirmonitor_check(lua_State* L) {
if (monitor->length < 0) if (monitor->length < 0)
lua_pushnil(L); lua_pushnil(L);
else if (monitor->length > 0) { else if (monitor->length > 0) {
// Create a table for keeping track of what watch ids were notified in this check,
// so that we avoid notifying multiple times.
lua_newtable(L);
if (translate_changes_dirmonitor(monitor->internal, monitor->buffer, monitor->length, f_check_dir_callback, L) == 0) if (translate_changes_dirmonitor(monitor->internal, monitor->buffer, monitor->length, f_check_dir_callback, L) == 0)
monitor->length = 0; monitor->length = 0;
lua_pushboolean(L, 1); lua_pushboolean(L, 1);

View File

@ -12,6 +12,7 @@
// https://stackoverflow.com/questions/60645/overlapped-i-o-on-anonymous-pipe // https://stackoverflow.com/questions/60645/overlapped-i-o-on-anonymous-pipe
// https://docs.microsoft.com/en-us/windows/win32/procthread/creating-a-child-process-with-redirected-input-and-output // https://docs.microsoft.com/en-us/windows/win32/procthread/creating-a-child-process-with-redirected-input-and-output
#include <windows.h> #include <windows.h>
#include "../utfconv.h"
#else #else
#include <errno.h> #include <errno.h>
#include <unistd.h> #include <unistd.h>
@ -20,9 +21,8 @@
#include <sys/types.h> #include <sys/types.h>
#include <sys/wait.h> #include <sys/wait.h>
#endif #endif
#if defined(__amigaos4__)
#include "platform/amigaos4.h" #include "../arena_allocator.h"
#endif
#define READ_BUF_SIZE 2048 #define READ_BUF_SIZE 2048
#define PROCESS_TERM_TRIES 3 #define PROCESS_TERM_TRIES 3
@ -354,268 +354,35 @@ static bool signal_process(process_t* proc, signal_e sig) {
return true; return true;
} }
static UNUSED char *xstrdup(const char *str) {
char *result = str ? malloc(strlen(str) + 1) : NULL;
if (result) strcpy(result, str);
return result;
}
static int process_arglist_init(process_arglist_t *list, size_t *list_len, size_t nargs) {
*list_len = 0;
#ifdef _WIN32
memset(*list, 0, sizeof(process_arglist_t));
#else
*list = calloc(sizeof(char *), nargs + 1);
if (!*list) return ENOMEM;
#endif
return 0;
}
static int process_arglist_add(process_arglist_t *list, size_t *list_len, const char *arg, bool escape) {
size_t len = *list_len;
#ifdef _WIN32
int arg_len;
wchar_t *cmdline = *list;
wchar_t arg_w[32767];
// this length includes the null terminator!
if (!(arg_len = MultiByteToWideChar(CP_UTF8, 0, arg, -1, arg_w, 32767)))
return GetLastError();
if (arg_len + len > 32767)
return ERROR_NOT_ENOUGH_MEMORY;
if (!escape) {
// replace the current null terminator with a space
if (len > 0) cmdline[len-1] = ' ';
memcpy(cmdline + len, arg_w, arg_len * sizeof(wchar_t));
len += arg_len;
} else {
// if the string contains spaces, then we must quote it
bool quote = wcspbrk(arg_w, L" \t\v\r\n");
int backslash = 0, escaped_len = quote ? 2 : 0;
for (int i = 0; i < arg_len; i++) {
if (arg_w[i] == L'\\') {
backslash++;
} else if (arg_w[i] == L'"') {
escaped_len += backslash + 1;
backslash = 0;
} else {
backslash = 0;
}
escaped_len++;
}
// escape_len contains NUL terminator
if (escaped_len + len > 32767)
return ERROR_NOT_ENOUGH_MEMORY;
// replace our previous NUL terminator with space
if (len > 0) cmdline[len-1] = L' ';
if (quote) cmdline[len++] = L'"';
// we are not going to iterate over NUL terminator
for (int i = 0;arg_w[i]; i++) {
if (arg_w[i] == L'\\') {
backslash++;
} else if (arg_w[i] == L'"') {
// add backslash + 1 backslashes
for (int j = 0; j < backslash; j++)
cmdline[len++] = L'\\';
cmdline[len++] = L'\\';
backslash = 0;
} else {
backslash = 0;
}
cmdline[len++] = arg_w[i];
}
if (quote) cmdline[len++] = L'"';
cmdline[len++] = L'\0';
}
#else
char **cmd = *list;
cmd[len] = xstrdup(arg);
if (!cmd[len]) return ENOMEM;
len++;
#endif
*list_len = len;
return 0;
}
static void process_arglist_free(process_arglist_t *list) {
if (!*list) return;
#ifndef _WIN32
char **cmd = *list;
for (int i = 0; cmd[i]; i++)
free(cmd[i]);
free(cmd);
*list = NULL;
#endif
}
static int process_env_init(process_env_t *env_list, size_t *env_len, size_t nenv) {
*env_len = 0;
#ifdef _WIN32
*env_list = NULL;
#else
*env_list = calloc(sizeof(char *), nenv * 2);
if (!*env_list) return ENOMEM;
#endif
return 0;
}
#ifdef _WIN32
static int cmp_name(wchar_t *a, wchar_t *b) {
wchar_t _A[32767], _B[32767], *A = _A, *B = _B, *a_eq, *b_eq;
int na, nb, r;
a_eq = wcschr(a, L'=');
b_eq = wcschr(b, L'=');
assert(a_eq);
assert(b_eq);
na = a_eq - a;
nb = b_eq - b;
r = LCMapStringW(LOCALE_INVARIANT, LCMAP_UPPERCASE, a, na, A, na);
assert(r == na);
A[na] = L'\0';
r = LCMapStringW(LOCALE_INVARIANT, LCMAP_UPPERCASE, b, nb, B, nb);
assert(r == nb);
B[nb] = L'\0';
for (;;) {
wchar_t AA = *A++, BB = *B++;
if (AA > BB)
return 1;
else if (AA < BB)
return -1;
else if (!AA && !BB)
return 0;
}
}
static int process_env_add_variable(process_env_t *env_list, size_t *env_list_len, wchar_t *var, size_t var_len) {
wchar_t *list, *list_p;
size_t block_var_len, list_len;
list = list_p = *env_list;
list_len = *env_list_len;
if (list_len) {
// check if it is already in the block
while ((block_var_len = wcslen(list_p))) {
if (cmp_name(list_p, var) == 0)
return -1; // already installed
list_p += block_var_len + 1;
}
}
// allocate list + 1 characters for the block terminator
list = realloc(list, (list_len + var_len + 1) * sizeof(wchar_t));
if (!list) return ERROR_NOT_ENOUGH_MEMORY;
// copy the env variable to the block
memcpy(list + list_len, var, var_len * sizeof(wchar_t));
// terminate the block again
list[list_len + var_len] = L'\0';
*env_list = list;
*env_list_len = (list_len + var_len);
return 0;
}
static int process_env_add_system(process_env_t *env_list, size_t *env_list_len) {
int retval = 0;
wchar_t *proc_env_block, *proc_env_block_p;
int proc_env_len;
proc_env_block = proc_env_block_p = GetEnvironmentStringsW();
while ((proc_env_len = wcslen(proc_env_block_p))) {
// try to add it to the list
if ((retval = process_env_add_variable(env_list, env_list_len, proc_env_block_p, proc_env_len + 1)) > 0)
goto cleanup;
proc_env_block_p += proc_env_len + 1;
}
retval = 0;
cleanup:
if (proc_env_block) FreeEnvironmentStringsW(proc_env_block);
return retval;
}
#endif
static int process_env_add(process_env_t *env_list, size_t *env_len, const char *key, const char *value) {
#ifdef _WIN32
wchar_t env_var[32767];
int r, var_len = 0;
if (!(r = MultiByteToWideChar(CP_UTF8, 0, key, -1, env_var, 32767)))
return GetLastError();
var_len += r;
env_var[var_len-1] = L'=';
if (!(r = MultiByteToWideChar(CP_UTF8, 0, value, -1, env_var + var_len, 32767 - var_len)))
return GetLastError();
var_len += r;
return process_env_add_variable(env_list, env_len, env_var, var_len);
#else
(*env_list)[*env_len] = xstrdup(key);
if (!(*env_list)[*env_len])
return ENOMEM;
(*env_list)[*env_len + 1] = xstrdup(value);
if (!(*env_list)[*env_len + 1])
return ENOMEM;
*env_len += 2;
#endif
return 0;
}
static void process_env_free(process_env_t *list, size_t list_len) {
if (!*list) return;
#ifndef _WIN32
for (size_t i = 0; i < list_len; i++)
free((*list)[i]);
#endif
free(*list);
*list = NULL;
}
static int process_start(lua_State* L) { static int process_start(lua_State* L) {
int r, retval = 1; int retval = 1;
size_t env_len = 0, cmd_len = 0, arglist_len = 0, env_vars_len = 0;
process_t *self = NULL; process_t *self = NULL;
process_arglist_t arglist = PROCESS_ARGLIST_INITIALIZER; int deadline = 10, detach = false, new_fds[3] = { STDIN_FD, STDOUT_FD, STDERR_FD };
process_env_t env_vars = NULL;
const char *cwd = NULL;
bool detach = false, escape = true;
int deadline = 10, new_fds[3] = { STDIN_FD, STDOUT_FD, STDERR_FD };
if (lua_isstring(L, 1)) { #ifdef _WIN32
escape = false; wchar_t *commandline = NULL, *env = NULL, *cwd = NULL;
// create a table that contains the string as the value #else
lua_createtable(L, 1, 0); const char **cmd = NULL, *env = NULL, *cwd = NULL;
lua_pushvalue(L, 1); #endif
lua_rawseti(L, -2, 1);
lua_replace(L, 1);
}
lua_settop(L, 3);
lxl_arena *A = lxl_arena_init(L);
// copy command line arguments
#ifdef _WIN32
if ( !(commandline = utfconv_fromutf8(A, luaL_checkstring(L, 1))) )
return luaL_error(L, "%s", UTFCONV_ERROR_INVALID_CONVERSION);
#else
luaL_checktype(L, 1, LUA_TTABLE); luaL_checktype(L, 1, LUA_TTABLE);
#if LUA_VERSION_NUM > 501 int len = luaL_len(L, 1);
lua_len(L, 1); cmd = lxl_arena_zero(A, (len + 1) * sizeof(char *));
#else for (int i = 0; i < len; i++) {
lua_pushinteger(L, (int)lua_objlen(L, 1)); cmd[i] = lxl_arena_strdup(A, (lua_rawgeti(L, 1, i+1), luaL_checkstring(L, -1)));
#endif
cmd_len = luaL_checknumber(L, -1); lua_pop(L, 1);
if (!cmd_len)
return luaL_argerror(L, 1, "table cannot be empty");
// check if each arguments is a string
for (size_t i = 1; i <= cmd_len; ++i) {
lua_rawgeti(L, 1, i);
luaL_checkstring(L, -1);
lua_pop(L, 1);
} }
#endif
if (lua_istable(L, 2)) { if (lua_istable(L, 2)) {
lua_getfield(L, 2, "detach"); detach = lua_toboolean(L, -1); lua_getfield(L, 2, "detach"); detach = lua_toboolean(L, -1);
lua_getfield(L, 2, "timeout"); deadline = luaL_optnumber(L, -1, deadline); lua_getfield(L, 2, "timeout"); deadline = luaL_optnumber(L, -1, deadline);
lua_getfield(L, 2, "cwd"); cwd = luaL_optstring(L, -1, NULL);
lua_getfield(L, 2, "stdin"); new_fds[STDIN_FD] = luaL_optnumber(L, -1, STDIN_FD); lua_getfield(L, 2, "stdin"); new_fds[STDIN_FD] = luaL_optnumber(L, -1, STDIN_FD);
lua_getfield(L, 2, "stdout"); new_fds[STDOUT_FD] = luaL_optnumber(L, -1, STDOUT_FD); lua_getfield(L, 2, "stdout"); new_fds[STDOUT_FD] = luaL_optnumber(L, -1, STDOUT_FD);
lua_getfield(L, 2, "stderr"); new_fds[STDERR_FD] = luaL_optnumber(L, -1, STDERR_FD); lua_getfield(L, 2, "stderr"); new_fds[STDERR_FD] = luaL_optnumber(L, -1, STDERR_FD);
@ -623,52 +390,42 @@ static int process_start(lua_State* L) {
if (new_fds[stream] > STDERR_FD || new_fds[stream] < REDIRECT_PARENT) if (new_fds[stream] > STDERR_FD || new_fds[stream] < REDIRECT_PARENT)
return luaL_error(L, "error: redirect to handles, FILE* and paths are not supported"); return luaL_error(L, "error: redirect to handles, FILE* and paths are not supported");
} }
lua_pop(L, 6); // pop all the values above lua_pop(L, 5); // pop all the values above
luaL_getsubtable(L, 2, "env"); #ifdef _WIN32
// count environment variobles if (lua_getfield(L, 2, "env") == LUA_TFUNCTION) {
lua_pushnil(L); lua_newtable(L);
while (lua_next(L, -2) != 0) { LPWCH system_env = GetEnvironmentStringsW(), envp = system_env;
luaL_checkstring(L, -2); while (wcslen(envp) > 0) {
luaL_checkstring(L, -1); const char *env = utfconv_fromwstr(A, envp), *eq = env ? strchr(env, '=') : NULL;
lua_pop(L, 1); if (!env) return (FreeEnvironmentStringsW(system_env), luaL_error(L, "%s", UTFCONV_ERROR_INVALID_CONVERSION));
env_len++; if (!eq) return (FreeEnvironmentStringsW(system_env), luaL_error(L, "invalid environment variable"));
} lua_pushlstring(L, env, eq - env); lua_pushstring(L, eq+1);
lxl_arena_free(A, (void *) env);
if (env_len) { lua_rawset(L, -3);
if ((r = process_env_init(&env_vars, &env_vars_len, env_len)) != 0) { envp += wcslen(envp) + 1;
retval = -1;
push_error(L, "cannot allocate environment list", r);
goto cleanup;
}
lua_pushnil(L);
while (lua_next(L, -2) != 0) {
if ((r = process_env_add(&env_vars, &env_vars_len, lua_tostring(L, -2), lua_tostring(L, -1))) != 0) {
retval = -1;
push_error(L, "cannot copy environment variable", r);
goto cleanup;
}
lua_pop(L, 1);
env_len++;
} }
FreeEnvironmentStringsW(system_env);
lua_call(L, 1, 1);
size_t len = 0; const char *env_mb = luaL_checklstring(L, -1, &len);
if (!(env = utfconv_fromlutf8(A, env_mb, len)))
return luaL_error(L, "%s", UTFCONV_ERROR_INVALID_CONVERSION);
} }
} if (lua_getfield(L, 2, "cwd"), luaL_optstring(L, -1, NULL)) {
if ( !(cwd = utfconv_fromutf8(A, lua_tostring(L, -1))) )
// allocate and copy commands return luaL_error(L, UTFCONV_ERROR_INVALID_CONVERSION);
if ((r = process_arglist_init(&arglist, &arglist_len, cmd_len)) != 0) {
retval = -1;
push_error(L, "cannot create argument list", r);
goto cleanup;
}
for (size_t i = 1; i <= cmd_len; i++) {
lua_rawgeti(L, 1, i);
if ((r = process_arglist_add(&arglist, &arglist_len, lua_tostring(L, -1), escape)) != 0) {
retval = -1;
push_error(L, "cannot add argument", r);
goto cleanup;
} }
lua_pop(L, 1); lua_pop(L, 2);
#else
if (lua_getfield(L, 2, "env") == LUA_TFUNCTION) {
lua_newtable(L);
lua_call(L, 1, 1);
size_t len = 0; env = lua_tolstring(L, -1, &len);
env = lxl_arena_copy(A, (void *) env, len+1);
}
cwd = lxl_arena_strdup(A, (lua_getfield(L, 2, "cwd"), luaL_optstring(L, -1, NULL)));
lua_pop(L, 2);
#endif
} }
self = lua_newuserdata(L, sizeof(process_t)); self = lua_newuserdata(L, sizeof(process_t));
@ -677,13 +434,6 @@ static int process_start(lua_State* L) {
self->deadline = deadline; self->deadline = deadline;
self->detached = detach; self->detached = detach;
#if _WIN32 #if _WIN32
if (env_vars) {
if ((r = process_env_add_system(&env_vars, &env_vars_len)) != 0) {
retval = -1;
push_error(L, "cannot add environment variable", r);
goto cleanup;
}
}
for (int i = 0; i < 3; ++i) { for (int i = 0; i < 3; ++i) {
switch (new_fds[i]) { switch (new_fds[i]) {
case REDIRECT_PARENT: case REDIRECT_PARENT:
@ -742,10 +492,7 @@ static int process_start(lua_State* L) {
siStartInfo.hStdInput = self->child_pipes[STDIN_FD][0]; siStartInfo.hStdInput = self->child_pipes[STDIN_FD][0];
siStartInfo.hStdOutput = self->child_pipes[STDOUT_FD][1]; siStartInfo.hStdOutput = self->child_pipes[STDOUT_FD][1];
siStartInfo.hStdError = self->child_pipes[STDERR_FD][1]; siStartInfo.hStdError = self->child_pipes[STDERR_FD][1];
wchar_t cwd_w[MAX_PATH]; if (!CreateProcessW(NULL, commandline, NULL, NULL, true, (detach ? DETACHED_PROCESS : CREATE_NO_WINDOW) | CREATE_UNICODE_ENVIRONMENT, env, cwd, &siStartInfo, &self->process_information)) {
if (cwd) // TODO: error handling
MultiByteToWideChar(CP_UTF8, 0, cwd, -1, cwd_w, MAX_PATH);
if (!CreateProcessW(NULL, arglist, NULL, NULL, true, (detach ? DETACHED_PROCESS : CREATE_NO_WINDOW) | CREATE_UNICODE_ENVIRONMENT, env_vars, cwd ? cwd_w : NULL, &siStartInfo, &self->process_information)) {
push_error(L, NULL, GetLastError()); push_error(L, NULL, GetLastError());
retval = -1; retval = -1;
goto cleanup; goto cleanup;
@ -792,10 +539,17 @@ static int process_start(lua_State* L) {
dup2(self->child_pipes[new_fds[stream]][new_fds[stream] == STDIN_FD ? 0 : 1], stream); dup2(self->child_pipes[new_fds[stream]][new_fds[stream] == STDIN_FD ? 0 : 1], stream);
close(self->child_pipes[stream][stream == STDIN_FD ? 1 : 0]); close(self->child_pipes[stream][stream == STDIN_FD ? 1 : 0]);
} }
size_t set; if (env) {
for (set = 0; set < env_vars_len && setenv(env_vars[set], env_vars[set+1], 1) == 0; set += 2); size_t len = 0;
if (set == env_vars_len && (!detach || setsid() != -1) && (!cwd || chdir(cwd) != -1)) while ((len = strlen(env)) != 0) {
execvp(arglist[0], (char** const)arglist); char *value = strchr(env, '=');
*value = '\0'; value++; // change the '=' into '\0', forming 2 strings side by side
setenv(env, value, 1);
env += len+1;
}
}
if ((!detach || setsid() != -1) && (!cwd || chdir(cwd) != -1))
execvp(cmd[0], (char** const) cmd);
write(control_pipe[1], &errno, sizeof(errno)); write(control_pipe[1], &errno, sizeof(errno));
_exit(-1); _exit(-1);
} }
@ -837,8 +591,6 @@ static int process_start(lua_State* L) {
} }
} }
} }
process_arglist_free(&arglist);
process_env_free(&env_vars, env_vars_len);
if (retval == -1) if (retval == -1)
return lua_error(L); return lua_error(L);

View File

@ -98,18 +98,24 @@ static int f_font_load(lua_State *L) {
} }
static bool font_retrieve(lua_State* L, RenFont** fonts, int idx) { static bool font_retrieve(lua_State* L, RenFont** fonts, int idx) {
bool is_table;
memset(fonts, 0, sizeof(RenFont*)*FONT_FALLBACK_MAX); memset(fonts, 0, sizeof(RenFont*)*FONT_FALLBACK_MAX);
if (lua_type(L, idx) != LUA_TTABLE) { if (lua_type(L, idx) != LUA_TTABLE) {
fonts[0] = *(RenFont**)luaL_checkudata(L, idx, API_TYPE_FONT); fonts[0] = *(RenFont**)luaL_checkudata(L, idx, API_TYPE_FONT);
return false; is_table = false;
} else {
is_table = true;
int len = luaL_len(L, idx); len = len > FONT_FALLBACK_MAX ? FONT_FALLBACK_MAX : len;
for (int i = 0; i < len; i++) {
lua_rawgeti(L, idx, i+1);
fonts[i] = *(RenFont**) luaL_checkudata(L, -1, API_TYPE_FONT);
lua_pop(L, 1);
}
} }
int len = luaL_len(L, idx); len = len > FONT_FALLBACK_MAX ? FONT_FALLBACK_MAX : len; #ifdef LITE_USE_SDL_RENDERER
for (int i = 0; i < len; i++) { update_font_scale(&window_renderer, fonts);
lua_rawgeti(L, idx, i+1); #endif
fonts[i] = *(RenFont**) luaL_checkudata(L, -1, API_TYPE_FONT); return is_table;
lua_pop(L, 1);
}
return true;
} }
static int f_font_copy(lua_State *L) { static int f_font_copy(lua_State *L) {

View File

@ -830,7 +830,14 @@ static int f_mkdir(lua_State *L) {
static int f_get_clipboard(lua_State *L) { static int f_get_clipboard(lua_State *L) {
char *text = SDL_GetClipboardText(); char *text = SDL_GetClipboardText();
if (!text) { return 0; } if (!text) { return 0; }
#ifdef _WIN32
// on windows, text-based clipboard formats must terminate with \r\n
// we need to convert it to \n for Lite XL to read them properly
// https://learn.microsoft.com/en-us/windows/win32/dataxchg/standard-clipboard-formats
luaL_gsub(L, text, "\r\n", "\n");
#else
lua_pushstring(L, text); lua_pushstring(L, text);
#endif
SDL_free(text); SDL_free(text);
return 1; return 1;
} }

View File

@ -196,8 +196,6 @@ int main(int argc, char **argv) {
SDL_SetHint("SDL_MOUSE_DOUBLE_CLICK_RADIUS", "4"); SDL_SetHint("SDL_MOUSE_DOUBLE_CLICK_RADIUS", "4");
#endif #endif
SDL_SetHint(SDL_HINT_RENDER_DRIVER, "software");
SDL_DisplayMode dm; SDL_DisplayMode dm;
SDL_GetCurrentDisplayMode(0, &dm); SDL_GetCurrentDisplayMode(0, &dm);
@ -270,8 +268,6 @@ init_lua:
set_macos_bundle_resources(L); set_macos_bundle_resources(L);
#endif #endif
#endif #endif
SDL_EventState(SDL_TEXTINPUT, SDL_ENABLE);
SDL_EventState(SDL_TEXTEDITING, SDL_ENABLE);
const char *init_lite_code = \ const char *init_lite_code = \
"local core\n" "local core\n"

View File

@ -5,12 +5,14 @@ lite_sources = [
'api/system.c', 'api/system.c',
'api/process.c', 'api/process.c',
'api/utf8.c', 'api/utf8.c',
'arena_allocator.c',
'renderer.c', 'renderer.c',
'renwindow.c', 'renwindow.c',
'rencache.c', 'rencache.c',
'main.c', 'main.c',
] ]
lite_sources += 'api/dirmonitor.c'
# dirmonitor backend # dirmonitor backend
if get_option('dirmonitor_backend') == '' if get_option('dirmonitor_backend') == ''
if cc.has_function('inotify_init', prefix : '#include<sys/inotify.h>') if cc.has_function('inotify_init', prefix : '#include<sys/inotify.h>')
@ -19,6 +21,8 @@ if get_option('dirmonitor_backend') == ''
dirmonitor_backend = 'fsevents' dirmonitor_backend = 'fsevents'
elif cc.has_function('kqueue', prefix : '#include<sys/event.h>') elif cc.has_function('kqueue', prefix : '#include<sys/event.h>')
dirmonitor_backend = 'kqueue' dirmonitor_backend = 'kqueue'
elif cc.has_function('create_inode_watcher', prefix : '#include<fcntl.h>')
dirmonitor_backend = 'inodewatcher'
elif dependency('libkqueue', required : false).found() elif dependency('libkqueue', required : false).found()
dirmonitor_backend = 'kqueue' dirmonitor_backend = 'kqueue'
elif host_machine.system() == 'windows' elif host_machine.system() == 'windows'
@ -31,26 +35,32 @@ else
dirmonitor_backend = get_option('dirmonitor_backend') dirmonitor_backend = get_option('dirmonitor_backend')
endif endif
message('dirmonitor_backend: @0@'.format(dirmonitor_backend)) if dirmonitor_backend == 'inotify'
lite_sources += 'api' / 'dirmonitor' / 'inotify.c'
if dirmonitor_backend == 'kqueue' elif dirmonitor_backend == 'fsevents'
lite_sources += 'api' / 'dirmonitor' / 'fsevents.c'
elif dirmonitor_backend == 'kqueue'
lite_sources += 'api' / 'dirmonitor' / 'kqueue.c'
libkqueue_dep = dependency('libkqueue', required : false) libkqueue_dep = dependency('libkqueue', required : false)
if libkqueue_dep.found() if libkqueue_dep.found()
lite_deps += libkqueue_dep lite_deps += libkqueue_dep
endif endif
elif dirmonitor_backend == 'inodewatcher'
add_languages('cpp')
lite_sources += 'api' / 'dirmonitor' / 'inodewatcher.cpp'
elif dirmonitor_backend == 'win32'
lite_sources += 'api' / 'dirmonitor' / 'win32.c'
else
lite_sources += 'api' / 'dirmonitor' / 'dummy.c'
endif endif
lite_sources += [ message('dirmonitor_backend: @0@'.format(dirmonitor_backend))
'api/dirmonitor.c',
'api/dirmonitor/' + dirmonitor_backend + '.c',
]
lite_rc = [] lite_rc = []
if host_machine.system() == 'windows' if host_machine.system() == 'windows'
windows = import('windows') windows = import('windows')
lite_rc += windows.compile_resources('../resources/icons/icon.rc') lite_rc += windows.compile_resources('..' / 'resources' / 'icons' / 'icon.rc')
lite_rc += windows.compile_resources('../resources/windows/manifest.rc') lite_rc += windows.compile_resources('..' / 'resources' / 'windows' / 'manifest.rc')
elif host_machine.system() == 'darwin' elif host_machine.system() == 'darwin'
lite_sources += 'bundle_open.m' lite_sources += 'bundle_open.m'
endif endif

View File

@ -9,28 +9,23 @@
#include FT_OUTLINE_H #include FT_OUTLINE_H
#include FT_SYSTEM_H #include FT_SYSTEM_H
#ifdef _WIN32
#include <windows.h>
#include "utfconv.h"
#endif
#include "renderer.h" #include "renderer.h"
#include "renwindow.h" #include "renwindow.h"
#define MAX_UNICODE 0x100000 // uncomment the line below for more debugging information through printf
#define GLYPHSET_SIZE 256 // #define RENDERER_DEBUG
#define MAX_LOADABLE_GLYPHSETS (MAX_UNICODE / GLYPHSET_SIZE)
#define SUBPIXEL_BITMAPS_CACHED 3
RenWindow window_renderer = {0}; RenWindow window_renderer = {0};
static FT_Library library; static FT_Library library;
// draw_rect_surface is used as a 1x1 surface to simplify ren_draw_rect with blending // draw_rect_surface is used as a 1x1 surface to simplify ren_draw_rect with blending
static SDL_Surface *draw_rect_surface; static SDL_Surface *draw_rect_surface = NULL;
static FT_Library library = NULL;
static void* check_alloc(void *ptr) { #define check_alloc(P) _check_alloc(P, __FILE__, __LINE__)
static void* _check_alloc(void *ptr, const char *const file, size_t ln) {
if (!ptr) { if (!ptr) {
fprintf(stderr, "Fatal error: memory allocation failed\n"); fprintf(stderr, "%s:%zu: memory allocation failed\n", file, ln);
exit(EXIT_FAILURE); exit(EXIT_FAILURE);
} }
return ptr; return ptr;
@ -38,31 +33,92 @@ static void* check_alloc(void *ptr) {
/************************* Fonts *************************/ /************************* Fonts *************************/
// approximate number of glyphs per atlas surface
#define GLYPHS_PER_ATLAS 256
// some padding to add to atlas surface to store more glyphs
#define FONT_HEIGHT_OVERFLOW_PX 6
#define FONT_WIDTH_OVERFLOW_PX 6
// maximum unicode codepoint supported (https://stackoverflow.com/a/52203901)
#define MAX_UNICODE 0x10FFFF
// number of rows and columns in the codepoint map
#define CHARMAP_ROW 128
#define CHARMAP_COL (MAX_UNICODE / CHARMAP_ROW)
// the maximum number of glyphs for OpenType
#define MAX_GLYPHS 65535
// number of rows and columns in the glyph map
#define GLYPHMAP_ROW 128
#define GLYPHMAP_COL (MAX_GLYPHS / GLYPHMAP_ROW)
// number of subpixel bitmaps
#define SUBPIXEL_BITMAPS_CACHED 3
typedef enum {
EGlyphNone = 0,
EGlyphLoaded = (1 << 0L),
EGlyphBitmap = (1 << 1L),
// currently no-op because blits are always assumed to be dual source
EGlyphDualSource = (1 << 2L),
} ERenGlyphFlags;
// metrics for a loaded glyph
typedef struct { typedef struct {
unsigned int x0, x1, y0, y1, loaded; unsigned short atlas_idx, surface_idx;
unsigned int x1, y0, y1, flags;
int bitmap_left, bitmap_top; int bitmap_left, bitmap_top;
float xadvance; float xadvance;
} GlyphMetric; } GlyphMetric;
// maps codepoints -> glyph IDs
typedef struct { typedef struct {
SDL_Surface* surface; unsigned int *rows[CHARMAP_ROW];
GlyphMetric metrics[GLYPHSET_SIZE]; } CharMap;
} GlyphSet;
// a bitmap atlas with a fixed width, each surface acting as a bump allocator
typedef struct {
SDL_Surface **surfaces;
unsigned int width, nsurface;
} GlyphAtlas;
// maps glyph IDs -> glyph metrics
typedef struct {
// accessed with metrics[bitmap_idx][glyph_id / nrow][glyph_id - (row * ncol)]
GlyphMetric *metrics[SUBPIXEL_BITMAPS_CACHED][GLYPHMAP_ROW];
// accessed with atlas[bitmap_idx][atlas_idx].surfaces[surface_idx]
GlyphAtlas *atlas[SUBPIXEL_BITMAPS_CACHED];
size_t natlas, bytesize;
} GlyphMap;
typedef struct RenFont { typedef struct RenFont {
FT_Face face; FT_Face face;
FT_StreamRec stream; CharMap charmap;
GlyphSet* sets[SUBPIXEL_BITMAPS_CACHED][MAX_LOADABLE_GLYPHSETS]; GlyphMap glyphs;
float size, space_advance, tab_advance; #ifdef LITE_USE_SDL_RENDERER
unsigned short max_height, baseline, height; int scale;
#endif
float size, space_advance;
unsigned short max_height, baseline, height, tab_size;
unsigned short underline_thickness;
ERenFontAntialiasing antialiasing; ERenFontAntialiasing antialiasing;
ERenFontHinting hinting; ERenFontHinting hinting;
unsigned char style; unsigned char style;
unsigned short underline_thickness;
char path[]; char path[];
} RenFont; } RenFont;
static const char* utf8_to_codepoint(const char *p, unsigned *dst) { #ifdef LITE_USE_SDL_RENDERER
void update_font_scale(RenWindow *window_renderer, RenFont **fonts) {
const int surface_scale = renwin_get_surface(window_renderer).scale;
for (int i = 0; i < FONT_FALLBACK_MAX && fonts[i]; ++i) {
if (fonts[i]->scale != surface_scale) {
ren_font_group_set_size(window_renderer, fonts, fonts[0]->size);
return;
}
}
}
#endif
static const char* utf8_to_codepoint(const char *p, const char *endp, unsigned *dst) {
const unsigned char *up = (unsigned char*)p; const unsigned char *up = (unsigned char*)p;
unsigned res, n; unsigned res, n;
switch (*p & 0xf0) { switch (*p & 0xf0) {
@ -72,7 +128,7 @@ static const char* utf8_to_codepoint(const char *p, unsigned *dst) {
case 0xc0 : res = *up & 0x1f; n = 1; break; case 0xc0 : res = *up & 0x1f; n = 1; break;
default : res = *up; n = 0; break; default : res = *up; n = 0; break;
} }
while (n--) { while (up < (const unsigned char *)endp && n--) {
res = (res << 6) | (*(++up) & 0x3f); res = (res << 6) | (*(++up) & 0x3f);
} }
*dst = res; *dst = res;
@ -92,7 +148,7 @@ static int font_set_render_options(RenFont* font) {
if (font->antialiasing == FONT_ANTIALIASING_SUBPIXEL) { if (font->antialiasing == FONT_ANTIALIASING_SUBPIXEL) {
unsigned char weights[] = { 0x10, 0x40, 0x70, 0x40, 0x10 } ; unsigned char weights[] = { 0x10, 0x40, 0x70, 0x40, 0x10 } ;
switch (font->hinting) { switch (font->hinting) {
case FONT_HINTING_NONE: FT_Library_SetLcdFilter(library, FT_LCD_FILTER_NONE); break; case FONT_HINTING_NONE: FT_Library_SetLcdFilter(library, FT_LCD_FILTER_NONE); break;
case FONT_HINTING_SLIGHT: case FONT_HINTING_SLIGHT:
case FONT_HINTING_FULL: FT_Library_SetLcdFilterWeights(library, weights); break; case FONT_HINTING_FULL: FT_Library_SetLcdFilterWeights(library, weights); break;
} }
@ -120,97 +176,201 @@ static int font_set_style(FT_Outline* outline, int x_translation, unsigned char
return 0; return 0;
} }
static void font_load_glyphset(RenFont* font, int idx) { static unsigned int font_get_glyph_id(RenFont *font, unsigned int codepoint) {
unsigned int render_option = font_set_render_options(font), load_option = font_set_load_options(font); if (codepoint > MAX_UNICODE) return 0;
int bitmaps_cached = font->antialiasing == FONT_ANTIALIASING_SUBPIXEL ? SUBPIXEL_BITMAPS_CACHED : 1; size_t row = codepoint / CHARMAP_COL;
unsigned int byte_width = font->antialiasing == FONT_ANTIALIASING_SUBPIXEL ? 3 : 1; size_t col = codepoint - (row * CHARMAP_COL);
for (int j = 0, pen_x = 0; j < bitmaps_cached; ++j) { if (!font->charmap.rows[row]) font->charmap.rows[row] = check_alloc(calloc(sizeof(unsigned int), CHARMAP_COL));
GlyphSet* set = check_alloc(calloc(1, sizeof(GlyphSet))); if (font->charmap.rows[row][col] == 0) {
font->sets[j][idx] = set; unsigned int glyph_id = FT_Get_Char_Index(font->face, codepoint);
for (int i = 0; i < GLYPHSET_SIZE; ++i) { // use -1 as a sentinel value for "glyph not available", a bit risky, but OpenType
int glyph_index = FT_Get_Char_Index(font->face, i + idx * GLYPHSET_SIZE); // uses uint16 to store glyph IDs. In theory this cannot ever be reached
if (!glyph_index || FT_Load_Glyph(font->face, glyph_index, load_option | FT_LOAD_BITMAP_METRICS_ONLY) font->charmap.rows[row][col] = glyph_id ? glyph_id : (unsigned int) -1;
|| font_set_style(&font->face->glyph->outline, j * (64 / SUBPIXEL_BITMAPS_CACHED), font->style) || FT_Render_Glyph(font->face->glyph, render_option)) { }
continue; return font->charmap.rows[row][col] == (unsigned int) -1 ? 0 : font->charmap.rows[row][col];
} }
FT_GlyphSlot slot = font->face->glyph;
unsigned int glyph_width = slot->bitmap.width / byte_width; #define FONT_IS_SUBPIXEL(F) ((F)->antialiasing == FONT_ANTIALIASING_SUBPIXEL)
if (font->antialiasing == FONT_ANTIALIASING_NONE) #define FONT_BITMAP_COUNT(F) ((F)->antialiasing == FONT_ANTIALIASING_SUBPIXEL ? SUBPIXEL_BITMAPS_CACHED : 1)
glyph_width *= 8;
set->metrics[i] = (GlyphMetric){ pen_x, pen_x + glyph_width, 0, slot->bitmap.rows, true, slot->bitmap_left, slot->bitmap_top, (slot->advance.x + slot->lsb_delta - slot->rsb_delta) / 64.0f}; static SDL_Surface *font_allocate_glyph_surface(RenFont *font, FT_GlyphSlot slot, int bitmap_idx, GlyphMetric *metric) {
pen_x += glyph_width; // get an atlas with the correct width
font->max_height = slot->bitmap.rows > font->max_height ? slot->bitmap.rows : font->max_height; int atlas_idx = -1;
// In order to fix issues with monospacing; we need the unhinted xadvance; as FreeType doesn't correctly report the hinted advance for spaces on monospace fonts (like RobotoMono). See #843. for (int i = 0; i < font->glyphs.natlas; i++) {
if (!glyph_index || FT_Load_Glyph(font->face, glyph_index, (load_option | FT_LOAD_BITMAP_METRICS_ONLY | FT_LOAD_NO_HINTING) & ~FT_LOAD_FORCE_AUTOHINT) if (font->glyphs.atlas[bitmap_idx][i].width >= metric->x1) {
|| font_set_style(&font->face->glyph->outline, j * (64 / SUBPIXEL_BITMAPS_CACHED), font->style) || FT_Render_Glyph(font->face->glyph, render_option)) { atlas_idx = i;
continue; break;
}
slot = font->face->glyph;
set->metrics[i].xadvance = slot->advance.x / 64.0f;
} }
if (pen_x == 0) }
if (atlas_idx < 0) {
// create a new atlas with the correct width, for each subpixel bitmap
for (int i = 0; i < FONT_BITMAP_COUNT(font); i++) {
font->glyphs.atlas[i] = check_alloc(realloc(font->glyphs.atlas[i], sizeof(GlyphAtlas) * (font->glyphs.natlas + 1)));
font->glyphs.atlas[i][font->glyphs.natlas] = (GlyphAtlas) {
.width = metric->x1 + FONT_WIDTH_OVERFLOW_PX, .nsurface = 0,
.surfaces = NULL,
};
}
font->glyphs.bytesize += sizeof(GlyphAtlas);
atlas_idx = font->glyphs.natlas++;
}
metric->atlas_idx = atlas_idx;
GlyphAtlas *atlas = &font->glyphs.atlas[bitmap_idx][atlas_idx];
// find the surface with the minimum height that can fit the glyph (limited to last 100 surfaces)
int surface_idx = -1, max_surface_idx = (int) atlas->nsurface - 100, min_waste = INT_MAX;
for (int i = atlas->nsurface - 1; i >= 0 && i > max_surface_idx; i--) {
assert(atlas->surfaces[i]->userdata);
GlyphMetric *m = (GlyphMetric *) atlas->surfaces[i]->userdata;
int new_min_waste = (int) atlas->surfaces[i]->h - (int) m->y1;
if (new_min_waste >= metric->y1 && new_min_waste < min_waste) {
surface_idx = i;
min_waste = new_min_waste;
}
}
if (surface_idx < 0) {
// allocate a new surface array, and a surface
int h = FONT_HEIGHT_OVERFLOW_PX + (double) font->face->size->metrics.height / 64.0f;
if (h <= FONT_HEIGHT_OVERFLOW_PX) h += slot->bitmap.rows;
if (h <= FONT_HEIGHT_OVERFLOW_PX) h += font->size;
atlas->surfaces = check_alloc(realloc(atlas->surfaces, sizeof(SDL_Surface *) * (atlas->nsurface + 1)));
atlas->surfaces[atlas->nsurface] = check_alloc(SDL_CreateRGBSurface(
0, atlas->width, GLYPHS_PER_ATLAS * h, FONT_BITMAP_COUNT(font) * 8,
0, 0, 0, 0
));
atlas->surfaces[atlas->nsurface]->userdata = NULL;
surface_idx = atlas->nsurface++;
font->glyphs.bytesize += (sizeof(SDL_Surface *) + sizeof(SDL_Surface) + atlas->width * GLYPHS_PER_ATLAS * h * FONT_BITMAP_COUNT(font));
}
metric->surface_idx = surface_idx;
if (atlas->surfaces[surface_idx]->userdata) {
GlyphMetric *last_metric = (GlyphMetric *) atlas->surfaces[surface_idx]->userdata;
metric->y0 = last_metric->y1; metric->y1 += last_metric->y1;
}
atlas->surfaces[surface_idx]->userdata = (void *) metric;
return atlas->surfaces[surface_idx];
}
static void font_load_glyph(RenFont *font, unsigned int glyph_id) {
unsigned int render_option = font_set_render_options(font);
unsigned int load_option = font_set_load_options(font);
// load the font without hinting to fix an issue with monospaced fonts,
// because freetype doesn't report the correct LSB and RSB delta. Transformation & subpixel positioning don't affect
// the xadvance, so we can save some time by not doing this step multiple times
if (FT_Load_Glyph(font->face, glyph_id, (load_option | FT_LOAD_BITMAP_METRICS_ONLY | FT_LOAD_NO_HINTING) & ~FT_LOAD_FORCE_AUTOHINT) != 0)
return;
double unhinted_xadv = font->face->glyph->advance.x / 64.0f;
// render the glyph for all bitmap
int bitmaps = FONT_BITMAP_COUNT(font);
int row = glyph_id / GLYPHMAP_COL, col = glyph_id - (row * GLYPHMAP_COL);
for (int bitmap_idx = 0; bitmap_idx < bitmaps; bitmap_idx++) {
FT_GlyphSlot slot = font->face->glyph;
if (FT_Load_Glyph(font->face, glyph_id, load_option | FT_LOAD_BITMAP_METRICS_ONLY) != 0
|| font_set_style(&slot->outline, bitmap_idx * (64 / SUBPIXEL_BITMAPS_CACHED), font->style) != 0
|| FT_Render_Glyph(slot, render_option) != 0)
return;
// save the metrics
if (!font->glyphs.metrics[bitmap_idx][row]) {
font->glyphs.metrics[bitmap_idx][row] = check_alloc(calloc(sizeof(GlyphMetric), GLYPHMAP_COL));
font->glyphs.bytesize += sizeof(GlyphMetric) * GLYPHMAP_COL;
}
GlyphMetric *metric = &font->glyphs.metrics[bitmap_idx][row][col];
metric->flags = EGlyphLoaded;
metric->xadvance = unhinted_xadv;
// if this bitmap is empty, or has a format we don't support, just store the xadvance
if (!slot->bitmap.width || !slot->bitmap.rows || !slot->bitmap.buffer ||
(slot->bitmap.pixel_mode != FT_PIXEL_MODE_MONO
&& slot->bitmap.pixel_mode != FT_PIXEL_MODE_GRAY
&& slot->bitmap.pixel_mode != FT_PIXEL_MODE_LCD))
continue; continue;
set->surface = check_alloc(SDL_CreateRGBSurface(0, pen_x, font->max_height, font->antialiasing == FONT_ANTIALIASING_SUBPIXEL ? 24 : 8, 0, 0, 0, 0));
uint8_t* pixels = set->surface->pixels; unsigned int glyph_width = slot->bitmap.width / bitmaps;
for (int i = 0; i < GLYPHSET_SIZE; ++i) { // FT_PIXEL_MODE_MONO uses 1 bit per pixel packed bitmap
int glyph_index = FT_Get_Char_Index(font->face, i + idx * GLYPHSET_SIZE); if (slot->bitmap.pixel_mode == FT_PIXEL_MODE_MONO) glyph_width *= 8;
if (!glyph_index || FT_Load_Glyph(font->face, glyph_index, load_option))
continue; metric->x1 = glyph_width;
FT_GlyphSlot slot = font->face->glyph; metric->y1 = slot->bitmap.rows;
font_set_style(&slot->outline, (64 / bitmaps_cached) * j, font->style); metric->bitmap_left = slot->bitmap_left;
if (FT_Render_Glyph(slot, render_option)) metric->bitmap_top = slot->bitmap_top;
continue; metric->flags |= (EGlyphBitmap | EGlyphDualSource);
for (unsigned int line = 0; line < slot->bitmap.rows; ++line) {
int target_offset = set->surface->pitch * line + set->metrics[i].x0 * byte_width; // find the best surface to copy the glyph over, and copy it
int source_offset = line * slot->bitmap.pitch; SDL_Surface *surface = font_allocate_glyph_surface(font, slot, bitmap_idx, metric);
if (font->antialiasing == FONT_ANTIALIASING_NONE) { uint8_t* pixels = surface->pixels;
for (unsigned int column = 0; column < slot->bitmap.width; ++column) { for (unsigned int line = 0; line < slot->bitmap.rows; ++line) {
int current_source_offset = source_offset + (column / 8); int target_offset = surface->pitch * (line + metric->y0); // x0 is always assumed to be 0
int source_pixel = slot->bitmap.buffer[current_source_offset]; int source_offset = line * slot->bitmap.pitch;
pixels[++target_offset] = ((source_pixel >> (7 - (column % 8))) & 0x1) * 0xFF; if (font->antialiasing == FONT_ANTIALIASING_NONE) {
} for (unsigned int column = 0; column < slot->bitmap.width; ++column) {
} else int current_source_offset = source_offset + (column / 8);
memcpy(&pixels[target_offset], &slot->bitmap.buffer[source_offset], slot->bitmap.width); int source_pixel = slot->bitmap.buffer[current_source_offset];
pixels[++target_offset] = ((source_pixel >> (7 - (column % 8))) & 0x1) * 0xFF;
}
} else {
memcpy(&pixels[target_offset], &slot->bitmap.buffer[source_offset], slot->bitmap.width);
} }
} }
} }
} }
static GlyphSet* font_get_glyphset(RenFont* font, unsigned int codepoint, int subpixel_idx) { // https://en.wikipedia.org/wiki/Whitespace_character
int idx = (codepoint / GLYPHSET_SIZE) % MAX_LOADABLE_GLYPHSETS; static inline int is_whitespace(unsigned int codepoint) {
if (!font->sets[font->antialiasing == FONT_ANTIALIASING_SUBPIXEL ? subpixel_idx : 0][idx]) switch (codepoint) {
font_load_glyphset(font, idx); case 0x20: case 0x85: case 0xA0: case 0x1680: case 0x2028: case 0x2029: case 0x202F: case 0x205F: case 0x3000: return 1;
return font->sets[font->antialiasing == FONT_ANTIALIASING_SUBPIXEL ? subpixel_idx : 0][idx]; }
return (codepoint >= 0x9 && codepoint <= 0xD) || (codepoint >= 0x2000 && codepoint <= 0x200A);
} }
static RenFont* font_group_get_glyph(GlyphSet** set, GlyphMetric** metric, RenFont** fonts, unsigned int codepoint, int bitmap_index) { static inline GlyphMetric *font_get_glyph(RenFont *font, unsigned int glyph_id, int subpixel_idx) {
if (!metric) { int row = glyph_id / GLYPHMAP_COL, col = glyph_id - (row * GLYPHMAP_COL);
return NULL; return font->glyphs.metrics[subpixel_idx][row] ? &font->glyphs.metrics[subpixel_idx][row][col] : NULL;
}
static RenFont *font_group_get_glyph(RenFont **fonts, unsigned int codepoint, int subpixel_idx, SDL_Surface **surface, GlyphMetric **metric) {
if (subpixel_idx < 0) subpixel_idx += SUBPIXEL_BITMAPS_CACHED;
RenFont *font = NULL;
unsigned int glyph_id = 0;
for (int i = 0; i < FONT_FALLBACK_MAX && fonts[i]; i++) {
font = fonts[i]; glyph_id = font_get_glyph_id(fonts[i], codepoint);
// use the first font that has representation for the glyph ID, but for whitespaces always use the first font
if (glyph_id || is_whitespace(codepoint)) break;
} }
if (bitmap_index < 0) // load the glyph if it is not loaded
bitmap_index += SUBPIXEL_BITMAPS_CACHED; subpixel_idx = FONT_IS_SUBPIXEL(font) ? subpixel_idx : 0;
for (int i = 0; i < FONT_FALLBACK_MAX && fonts[i]; ++i) { GlyphMetric *m = font_get_glyph(font, glyph_id, subpixel_idx);
*set = font_get_glyphset(fonts[i], codepoint, bitmap_index); if (!m || !(m->flags & EGlyphLoaded)) font_load_glyph(font, glyph_id);
*metric = &(*set)->metrics[codepoint % GLYPHSET_SIZE]; // if the glyph ID (possibly 0) is not available and we are not trying to load whitespace, try to load U+25A1 (box character)
if ((*metric)->loaded || codepoint < 0xFF) if ((!m || !(m->flags & EGlyphLoaded)) && codepoint != 0x25A1 && !is_whitespace(codepoint))
return fonts[i]; return font_group_get_glyph(fonts, 0x25A1, subpixel_idx, surface, metric);
} // fetch the glyph metrics again and save it
if (*metric && !(*metric)->loaded && codepoint > 0xFF && codepoint != 0x25A1) m = font_get_glyph(font, glyph_id, subpixel_idx);
return font_group_get_glyph(set, metric, fonts, 0x25A1, bitmap_index); if (metric && m) *metric = m;
return fonts[0]; if (surface && m && m->flags & EGlyphBitmap) *surface = font->glyphs.atlas[subpixel_idx][m->atlas_idx].surfaces[m->surface_idx];
return font;
} }
static void font_clear_glyph_cache(RenFont* font) { static void font_clear_glyph_cache(RenFont* font) {
for (int i = 0; i < SUBPIXEL_BITMAPS_CACHED; ++i) { int bitmaps = FONT_BITMAP_COUNT(font);
for (int j = 0; j < MAX_LOADABLE_GLYPHSETS; ++j) { for (int bitmap_idx = 0; bitmap_idx < bitmaps; bitmap_idx++) {
if (font->sets[i][j]) { for (int atlas_idx = 0; atlas_idx < font->glyphs.natlas; atlas_idx++) {
if (font->sets[i][j]->surface) GlyphAtlas *atlas = &font->glyphs.atlas[bitmap_idx][atlas_idx];
SDL_FreeSurface(font->sets[i][j]->surface); for (int surface_idx = 0; surface_idx < atlas->nsurface; surface_idx++) {
free(font->sets[i][j]); SDL_FreeSurface(atlas->surfaces[surface_idx]);
font->sets[i][j] = NULL;
} }
free(atlas->surfaces);
}
free(font->glyphs.atlas[bitmap_idx]);
font->glyphs.atlas[bitmap_idx] = NULL;
// clear glyph metric
for (int glyphmap_row = 0; glyphmap_row < GLYPHMAP_ROW; glyphmap_row++) {
free(font->glyphs.metrics[bitmap_idx][glyphmap_row]);
font->glyphs.metrics[bitmap_idx][glyphmap_row] = NULL;
} }
} }
font->glyphs.bytesize = 0;
font->glyphs.natlas = 0;
} }
// based on https://github.com/libsdl-org/SDL_ttf/blob/2a094959055fba09f7deed6e1ffeb986188982ae/SDL_ttf.c#L1735 // based on https://github.com/libsdl-org/SDL_ttf/blob/2a094959055fba09f7deed6e1ffeb986188982ae/SDL_ttf.c#L1735
@ -227,66 +387,72 @@ static unsigned long font_file_read(FT_Stream stream, unsigned long offset, unsi
} }
static void font_file_close(FT_Stream stream) { static void font_file_close(FT_Stream stream) {
if (stream && stream->descriptor.pointer) { if (stream && stream->descriptor.pointer)
SDL_RWclose((SDL_RWops *) stream->descriptor.pointer); SDL_RWclose((SDL_RWops *) stream->descriptor.pointer);
stream->descriptor.pointer = NULL; free(stream);
}
} }
RenFont* ren_font_load(RenWindow *window_renderer, const char* path, float size, ERenFontAntialiasing antialiasing, ERenFontHinting hinting, unsigned char style) { static int font_set_face_metrics(RenFont *font, FT_Face face) {
RenFont *font = NULL; FT_Error err;
FT_Face face = NULL; if ((err = FT_Set_Pixel_Sizes(face, 0, (int) font->size)) != 0)
return err;
SDL_RWops *file = SDL_RWFromFile(path, "rb");
if (!file)
goto rwops_failure;
int len = strlen(path);
font = check_alloc(calloc(1, sizeof(RenFont) + len + 1));
font->stream.read = font_file_read;
font->stream.close = font_file_close;
font->stream.descriptor.pointer = file;
font->stream.pos = 0;
font->stream.size = (unsigned long) SDL_RWsize(file);
if (FT_Open_Face(library, &(FT_Open_Args){ .flags = FT_OPEN_STREAM, .stream = &font->stream }, 0, &face))
goto failure;
const int surface_scale = renwin_get_surface(window_renderer).scale;
if (FT_Set_Pixel_Sizes(face, 0, (int)(size*surface_scale)))
goto failure;
strcpy(font->path, path);
font->face = face; font->face = face;
font->size = size; if(FT_IS_SCALABLE(face)) {
font->height = (short)((face->height / (float)face->units_per_EM) * font->size); font->height = (short)((face->height / (float)face->units_per_EM) * font->size);
font->baseline = (short)((face->ascender / (float)face->units_per_EM) * font->size); font->baseline = (short)((face->ascender / (float)face->units_per_EM) * font->size);
font->antialiasing = antialiasing;
font->hinting = hinting;
font->style = style;
if(FT_IS_SCALABLE(face))
font->underline_thickness = (unsigned short)((face->underline_thickness / (float)face->units_per_EM) * font->size); font->underline_thickness = (unsigned short)((face->underline_thickness / (float)face->units_per_EM) * font->size);
} else {
font->height = (short) font->face->size->metrics.height / 64.0f;
font->baseline = (short) font->face->size->metrics.ascender / 64.0f;
}
if(!font->underline_thickness) if(!font->underline_thickness)
font->underline_thickness = ceil((double) font->height / 14.0); font->underline_thickness = ceil((double) font->height / 14.0);
if (FT_Load_Char(face, ' ', font_set_load_options(font))) if ((err = FT_Load_Char(face, ' ', (font_set_load_options(font) | FT_LOAD_BITMAP_METRICS_ONLY | FT_LOAD_NO_HINTING) & ~FT_LOAD_FORCE_AUTOHINT)) != 0)
goto failure; return err;
font->space_advance = face->glyph->advance.x / 64.0f; font->space_advance = face->glyph->advance.x / 64.0f;
font->tab_advance = font->space_advance * 2; return 0;
}
RenFont* ren_font_load(RenWindow *window_renderer, const char* path, float size, ERenFontAntialiasing antialiasing, ERenFontHinting hinting, unsigned char style) {
SDL_RWops *file = NULL; RenFont *font = NULL;
FT_Face face = NULL; FT_Stream stream = NULL;
file = SDL_RWFromFile(path, "rb");
if (!file) return NULL;
int len = strlen(path);
font = check_alloc(calloc(1, sizeof(RenFont) + len + 1));
strcpy(font->path, path);
font->size = size;
font->antialiasing = antialiasing;
font->hinting = hinting;
font->style = style;
font->tab_size = 2;
#ifdef LITE_USE_SDL_RENDERER
font->scale = 1;
#endif
stream = check_alloc(calloc(1, sizeof(FT_StreamRec)));
if (!stream) goto stream_failure;
stream->read = &font_file_read;
stream->close = &font_file_close;
stream->descriptor.pointer = file;
stream->pos = 0;
stream->size = (unsigned long) SDL_RWsize(file);
if (FT_Open_Face(library, &(FT_Open_Args) { .flags = FT_OPEN_STREAM, .stream = stream }, 0, &face) != 0)
goto failure;
if (font_set_face_metrics(font, face) != 0)
goto failure;
return font; return font;
stream_failure:
if (file) SDL_RWclose(file);
failure: failure:
if (face) if (face) FT_Done_Face(face);
FT_Done_Face(face); if (font) free(font);
if (font)
free(font);
return NULL;
rwops_failure:
if (file)
SDL_RWclose(file);
return NULL; return NULL;
} }
@ -304,25 +470,22 @@ const char* ren_font_get_path(RenFont *font) {
void ren_font_free(RenFont* font) { void ren_font_free(RenFont* font) {
font_clear_glyph_cache(font); font_clear_glyph_cache(font);
// free codepoint cache as well
for (int i = 0; i < CHARMAP_ROW; i++) {
free(font->charmap.rows[i]);
}
FT_Done_Face(font->face); FT_Done_Face(font->face);
free(font); free(font);
} }
void ren_font_group_set_tab_size(RenFont **fonts, int n) { void ren_font_group_set_tab_size(RenFont **fonts, int n) {
unsigned int tab_index = '\t' % GLYPHSET_SIZE;
for (int j = 0; j < FONT_FALLBACK_MAX && fonts[j]; ++j) { for (int j = 0; j < FONT_FALLBACK_MAX && fonts[j]; ++j) {
for (int i = 0; i < (fonts[j]->antialiasing == FONT_ANTIALIASING_SUBPIXEL ? SUBPIXEL_BITMAPS_CACHED : 1); ++i) fonts[j]->tab_size = n;
font_get_glyphset(fonts[j], '\t', i)->metrics[tab_index].xadvance = fonts[j]->space_advance * n;
} }
} }
int ren_font_group_get_tab_size(RenFont **fonts) { int ren_font_group_get_tab_size(RenFont **fonts) {
unsigned int tab_index = '\t' % GLYPHSET_SIZE; return fonts[0]->tab_size;
float advance = font_get_glyphset(fonts[0], '\t', 0)->metrics[tab_index].xadvance;
if (fonts[0]->space_advance) {
advance /= fonts[0]->space_advance;
}
return advance;
} }
float ren_font_group_get_size(RenFont **fonts) { float ren_font_group_get_size(RenFont **fonts) {
@ -333,14 +496,12 @@ void ren_font_group_set_size(RenWindow *window_renderer, RenFont **fonts, float
const int surface_scale = renwin_get_surface(window_renderer).scale; const int surface_scale = renwin_get_surface(window_renderer).scale;
for (int i = 0; i < FONT_FALLBACK_MAX && fonts[i]; ++i) { for (int i = 0; i < FONT_FALLBACK_MAX && fonts[i]; ++i) {
font_clear_glyph_cache(fonts[i]); font_clear_glyph_cache(fonts[i]);
FT_Face face = fonts[i]->face;
FT_Set_Pixel_Sizes(face, 0, (int)(size*surface_scale));
fonts[i]->size = size; fonts[i]->size = size;
fonts[i]->height = (short)((face->height / (float)face->units_per_EM) * size); fonts[i]->tab_size = 2;
fonts[i]->baseline = (short)((face->ascender / (float)face->units_per_EM) * size); #ifdef LITE_USE_SDL_RENDERER
FT_Load_Char(face, ' ', font_set_load_options(fonts[i])); fonts[i]->scale = surface_scale;
fonts[i]->space_advance = face->glyph->advance.x / 64.0f; #endif
fonts[i]->tab_advance = fonts[i]->space_advance * 2; font_set_face_metrics(fonts[i], fonts[i]->face);
} }
} }
@ -348,30 +509,51 @@ int ren_font_group_get_height(RenFont **fonts) {
return fonts[0]->height; return fonts[0]->height;
} }
// some fonts provide xadvance for whitespaces (e.g. Unifont), which we need to ignore
#define FONT_GET_XADVANCE(F, C, M) (is_whitespace((C)) || !(M) || !(M)->xadvance \
? (F)->space_advance * (float) ((C) == '\t' ? (F)->tab_size : 1) \
: (M)->xadvance)
double ren_font_group_get_width(RenWindow *window_renderer, RenFont **fonts, const char *text, size_t len, int *x_offset) { double ren_font_group_get_width(RenWindow *window_renderer, RenFont **fonts, const char *text, size_t len, int *x_offset) {
double width = 0; double width = 0;
const char* end = text + len; const char* end = text + len;
GlyphMetric* metric = NULL; GlyphSet* set = NULL;
bool set_x_offset = x_offset == NULL; bool set_x_offset = x_offset == NULL;
while (text < end) { while (text < end) {
unsigned int codepoint; unsigned int codepoint;
text = utf8_to_codepoint(text, &codepoint); text = utf8_to_codepoint(text, end, &codepoint);
RenFont* font = font_group_get_glyph(&set, &metric, fonts, codepoint, 0); GlyphMetric *metric = NULL;
if (!metric) font_group_get_glyph(fonts, codepoint, 0, NULL, &metric);
break; width += FONT_GET_XADVANCE(fonts[0], codepoint, metric);
width += (!font || metric->xadvance) ? metric->xadvance : fonts[0]->space_advance; if (!set_x_offset && metric) {
if (!set_x_offset) {
set_x_offset = true; set_x_offset = true;
*x_offset = metric->bitmap_left; // TODO: should this be scaled by the surface scale? *x_offset = metric->bitmap_left; // TODO: should this be scaled by the surface scale?
} }
} }
const int surface_scale = renwin_get_surface(window_renderer).scale; const int surface_scale = renwin_get_surface(window_renderer).scale;
if (!set_x_offset) { if (!set_x_offset)
*x_offset = 0; *x_offset = 0;
}
return width / surface_scale; return width / surface_scale;
} }
#ifdef RENDERER_DEBUG
// this function can be used to debug font atlases, it is not public
void ren_font_dump(RenFont *font) {
char filename[1024];
int bitmaps = FONT_BITMAP_COUNT(font);
for (int bitmap_idx = 0; bitmap_idx < bitmaps; bitmap_idx++) {
for (int atlas_idx = 0; atlas_idx < font->glyphs.natlas; atlas_idx++) {
GlyphAtlas *atlas = &font->glyphs.atlas[bitmap_idx][atlas_idx];
for (int surface_idx = 0; surface_idx < atlas->nsurface; surface_idx++) {
snprintf(filename, 1024, "%s-%d-%d-%d.bmp", font->face->family_name, bitmap_idx, atlas_idx, surface_idx);
SDL_SaveBMP(atlas->surfaces[surface_idx], filename);
}
}
}
fprintf(stderr, "%s: %zu bytes\n", font->face->family_name, font->glyphs.bytesize);
}
#endif
double ren_draw_text(RenSurface *rs, RenFont **fonts, const char *text, size_t len, float x, int y, RenColor color) { double ren_draw_text(RenSurface *rs, RenFont **fonts, const char *text, size_t len, float x, int y, RenColor color) {
SDL_Surface *surface = rs->surface; SDL_Surface *surface = rs->surface;
SDL_Rect clip; SDL_Rect clip;
@ -380,7 +562,6 @@ double ren_draw_text(RenSurface *rs, RenFont **fonts, const char *text, size_t l
const int surface_scale = rs->scale; const int surface_scale = rs->scale;
double pen_x = x * surface_scale; double pen_x = x * surface_scale;
y *= surface_scale; y *= surface_scale;
int bytes_per_pixel = surface->format->BytesPerPixel;
const char* end = text + len; const char* end = text + len;
uint8_t* destination_pixels = surface->pixels; uint8_t* destination_pixels = surface->pixels;
int clip_end_x = clip.x + clip.w, clip_end_y = clip.y + clip.h; int clip_end_x = clip.x + clip.w, clip_end_y = clip.y + clip.h;
@ -392,20 +573,20 @@ double ren_draw_text(RenSurface *rs, RenFont **fonts, const char *text, size_t l
while (text < end) { while (text < end) {
unsigned int codepoint, r, g, b; unsigned int codepoint, r, g, b;
text = utf8_to_codepoint(text, &codepoint); text = utf8_to_codepoint(text, end, &codepoint);
GlyphSet* set = NULL; GlyphMetric* metric = NULL; SDL_Surface *font_surface = NULL; GlyphMetric *metric = NULL;
RenFont* font = font_group_get_glyph(&set, &metric, fonts, codepoint, (int)(fmod(pen_x, 1.0) * SUBPIXEL_BITMAPS_CACHED)); RenFont* font = font_group_get_glyph(fonts, codepoint, (int)(fmod(pen_x, 1.0) * SUBPIXEL_BITMAPS_CACHED), &font_surface, &metric);
if (!metric) if (!metric)
break; break;
int start_x = floor(pen_x) + metric->bitmap_left; int start_x = floor(pen_x) + metric->bitmap_left;
int end_x = (metric->x1 - metric->x0) + start_x; int end_x = metric->x1 + start_x; // x0 is assumed to be 0
int glyph_end = metric->x1, glyph_start = metric->x0; int glyph_end = metric->x1, glyph_start = 0;
if (!metric->loaded && codepoint > 0xFF) if (!font_surface && !is_whitespace(codepoint))
ren_draw_rect(rs, (RenRect){ start_x + 1, y, font->space_advance - 1, ren_font_group_get_height(fonts) }, color); ren_draw_rect(rs, (RenRect){ start_x + 1, y, font->space_advance - 1, ren_font_group_get_height(fonts) }, color);
if (set->surface && color.a > 0 && end_x >= clip.x && start_x < clip_end_x) { if (!is_whitespace(codepoint) && font_surface && color.a > 0 && end_x >= clip.x && start_x < clip_end_x) {
uint8_t* source_pixels = set->surface->pixels; uint8_t* source_pixels = font_surface->pixels;
for (int line = metric->y0; line < metric->y1; ++line) { for (int line = metric->y0; line < metric->y1; ++line) {
int target_y = line + y - metric->bitmap_top + fonts[0]->baseline * surface_scale; int target_y = line - metric->y0 + y - metric->bitmap_top + (fonts[0]->baseline * surface_scale);
if (target_y < clip.y) if (target_y < clip.y)
continue; continue;
if (target_y >= clip_end_y) if (target_y >= clip_end_y)
@ -417,8 +598,8 @@ double ren_draw_text(RenSurface *rs, RenFont **fonts, const char *text, size_t l
start_x += offset; start_x += offset;
glyph_start += offset; glyph_start += offset;
} }
uint32_t* destination_pixel = (uint32_t*)&(destination_pixels[surface->pitch * target_y + start_x * bytes_per_pixel]); uint32_t* destination_pixel = (uint32_t*)&(destination_pixels[surface->pitch * target_y + start_x * surface->format->BytesPerPixel ]);
uint8_t* source_pixel = &source_pixels[line * set->surface->pitch + glyph_start * (font->antialiasing == FONT_ANTIALIASING_SUBPIXEL ? 3 : 1)]; uint8_t* source_pixel = &source_pixels[line * font_surface->pitch + glyph_start * font_surface->format->BytesPerPixel];
for (int x = glyph_start; x < glyph_end; ++x) { for (int x = glyph_start; x < glyph_end; ++x) {
uint32_t destination_color = *destination_pixel; uint32_t destination_color = *destination_pixel;
// the standard way of doing this would be SDL_GetRGBA, but that introduces a performance regression. needs to be investigated // the standard way of doing this would be SDL_GetRGBA, but that introduces a performance regression. needs to be investigated
@ -441,12 +622,12 @@ double ren_draw_text(RenSurface *rs, RenFont **fonts, const char *text, size_t l
g = (color.g * src.g * color.a + dst.g * (65025 - src.g * color.a) + 32767) / 65025; g = (color.g * src.g * color.a + dst.g * (65025 - src.g * color.a) + 32767) / 65025;
b = (color.b * src.b * color.a + dst.b * (65025 - src.b * color.a) + 32767) / 65025; b = (color.b * src.b * color.a + dst.b * (65025 - src.b * color.a) + 32767) / 65025;
// the standard way of doing this would be SDL_GetRGBA, but that introduces a performance regression. needs to be investigated // the standard way of doing this would be SDL_GetRGBA, but that introduces a performance regression. needs to be investigated
*destination_pixel++ = dst.a << surface->format->Ashift | r << surface->format->Rshift | g << surface->format->Gshift | b << surface->format->Bshift; *destination_pixel++ = (unsigned int) dst.a << surface->format->Ashift | r << surface->format->Rshift | g << surface->format->Gshift | b << surface->format->Bshift;
} }
} }
} }
float adv = metric->xadvance ? metric->xadvance : font->space_advance; float adv = FONT_GET_XADVANCE(fonts[0], codepoint, metric);
if(!last) last = font; if(!last) last = font;
else if(font != last || text == end) { else if(font != last || text == end) {

View File

@ -33,6 +33,9 @@ int ren_font_group_get_tab_size(RenFont **font);
int ren_font_group_get_height(RenFont **font); int ren_font_group_get_height(RenFont **font);
float ren_font_group_get_size(RenFont **font); float ren_font_group_get_size(RenFont **font);
void ren_font_group_set_size(RenWindow *window_renderer, RenFont **font, float size); void ren_font_group_set_size(RenWindow *window_renderer, RenFont **font, float size);
#ifdef LITE_USE_SDL_RENDERER
void update_font_scale(RenWindow *window_renderer, RenFont **fonts);
#endif
void ren_font_group_set_tab_size(RenFont **font, int n); void ren_font_group_set_tab_size(RenFont **font, int n);
double ren_font_group_get_width(RenWindow *window_renderer, RenFont **font, const char *text, size_t len, int *x_offset); double ren_font_group_get_width(RenWindow *window_renderer, RenFont **font, const char *text, size_t len, int *x_offset);
double ren_draw_text(RenSurface *rs, RenFont **font, const char *text, size_t len, float x, int y, RenColor color); double ren_draw_text(RenSurface *rs, RenFont **font, const char *text, size_t len, float x, int y, RenColor color);

View File

@ -95,12 +95,15 @@ RenSurface renwin_get_surface(RenWindow *ren) {
#endif #endif
} }
void renwin_resize_surface(UNUSED RenWindow *ren) { void renwin_resize_surface(RenWindow *ren) {
#ifdef LITE_USE_SDL_RENDERER #ifdef LITE_USE_SDL_RENDERER
int new_w, new_h; int new_w, new_h, new_scale;
SDL_GL_GetDrawableSize(ren->window, &new_w, &new_h); SDL_GL_GetDrawableSize(ren->window, &new_w, &new_h);
new_scale = query_surface_scale(ren);
/* Note that (w, h) may differ from (new_w, new_h) on retina displays. */ /* Note that (w, h) may differ from (new_w, new_h) on retina displays. */
if (new_w != ren->rensurface.surface->w || new_h != ren->rensurface.surface->h) { if (new_scale != ren->rensurface.scale ||
new_w != ren->rensurface.surface->w ||
new_h != ren->rensurface.surface->h) {
renwin_init_surface(ren); renwin_init_surface(ren);
renwin_clip_to_surface(ren); renwin_clip_to_surface(ren);
setup_renderer(ren, new_w, new_h); setup_renderer(ren, new_w, new_h);
@ -141,11 +144,11 @@ void renwin_update_rects(RenWindow *ren, RenRect *rects, int count) {
} }
void renwin_free(RenWindow *ren) { void renwin_free(RenWindow *ren) {
SDL_DestroyWindow(ren->window);
ren->window = NULL;
#ifdef LITE_USE_SDL_RENDERER #ifdef LITE_USE_SDL_RENDERER
SDL_DestroyTexture(ren->texture); SDL_DestroyTexture(ren->texture);
SDL_DestroyRenderer(ren->renderer); SDL_DestroyRenderer(ren->renderer);
SDL_FreeSurface(ren->rensurface.surface); SDL_FreeSurface(ren->rensurface.surface);
#endif #endif
SDL_DestroyWindow(ren->window);
ren->window = NULL;
} }

View File

@ -12,8 +12,34 @@
#include <stdlib.h> #include <stdlib.h>
#include <windows.h> #include <windows.h>
#include "arena_allocator.h"
#define UTFCONV_ERROR_INVALID_CONVERSION "Input contains invalid byte sequences." #define UTFCONV_ERROR_INVALID_CONVERSION "Input contains invalid byte sequences."
#define utfconv_fromutf8(A, str) utfconv_fromlutf8(A, str, -1)
static UNUSED LPWSTR utfconv_fromlutf8(lxl_arena *A, const char *str, int len) {
int output_len = MultiByteToWideChar(CP_UTF8, 0, str, len, NULL, 0);
if (!output_len) return NULL;
LPWSTR output = lxl_arena_malloc(A, sizeof(WCHAR) * output_len);
if (!output) return NULL;
output_len = MultiByteToWideChar(CP_UTF8, 0, str, len, output, output_len);
if (!output_len) return (lxl_arena_free(A, output), NULL);
return output;
}
#define utfconv_fromwstr(A, str) utfconv_fromlwstr(A, str, -1)
static UNUSED const char *utfconv_fromlwstr(lxl_arena *A, LPWSTR str, int len) {
int output_len = WideCharToMultiByte(CP_UTF8, 0, str, len, NULL, 0, NULL, NULL);
if (!output_len) return NULL;
char *output = lxl_arena_malloc(A, sizeof(char) * output_len);
if (!output) return NULL;
output_len = WideCharToMultiByte(CP_UTF8, 0, str, len, output, output_len, NULL, NULL);
if (!output_len) return (lxl_arena_free(A, output), NULL);
return (const char *) output;
}
static UNUSED LPWSTR utfconv_utf8towc(const char *str) { static UNUSED LPWSTR utfconv_utf8towc(const char *str) {
LPWSTR output; LPWSTR output;
int len; int len;

View File

@ -1,13 +1,13 @@
[wrap-file] [wrap-file]
directory = pcre2-10.42 directory = pcre2-10.44
source_url = https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.42/pcre2-10.42.tar.bz2 source_url = https://github.com/PCRE2Project/pcre2/releases/download/pcre2-10.44/pcre2-10.44.tar.bz2
source_filename = pcre2-10.42.tar.bz2 source_filename = pcre2-10.44.tar.bz2
source_hash = 8d36cd8cb6ea2a4c2bb358ff6411b0c788633a2a45dabbf1aeb4b701d1b5e840 source_hash = d34f02e113cf7193a1ebf2770d3ac527088d485d4e047ed10e5d217c6ef5de96
patch_filename = pcre2_10.42-5_patch.zip patch_filename = pcre2_10.44-2_patch.zip
patch_url = https://wrapdb.mesonbuild.com/v2/pcre2_10.42-5/get_patch patch_url = https://wrapdb.mesonbuild.com/v2/pcre2_10.44-2/get_patch
patch_hash = 7ba1730a3786c46f41735658a9884b09bc592af3840716e0ccc552e7ddf5630c patch_hash = 4336d422ee9043847e5e10dbbbd01940d4c9e5027f31ccdc33a7898a1ca94009
source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/pcre2_10.42-5/pcre2-10.42.tar.bz2 source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/pcre2_10.44-2/pcre2-10.44.tar.bz2
wrapdb_version = 10.42-5 wrapdb_version = 10.44-2
[provide] [provide]
libpcre2-8 = libpcre2_8 libpcre2-8 = libpcre2_8

View File

@ -1,13 +1,13 @@
[wrap-file] [wrap-file]
directory = SDL2-2.28.1 directory = SDL2-2.30.3
source_url = https://github.com/libsdl-org/SDL/releases/download/release-2.28.1/SDL2-2.28.1.tar.gz source_url = https://github.com/libsdl-org/SDL/releases/download/release-2.30.3/SDL2-2.30.3.tar.gz
source_filename = SDL2-2.28.1.tar.gz source_filename = SDL2-2.30.3.tar.gz
source_hash = 4977ceba5c0054dbe6c2f114641aced43ce3bf2b41ea64b6a372d6ba129cb15d source_hash = 820440072f8f5b50188c1dae104f2ad25984de268785be40c41a099a510f0aec
patch_filename = sdl2_2.28.1-2_patch.zip patch_filename = sdl2_2.30.3-2_patch.zip
patch_url = https://wrapdb.mesonbuild.com/v2/sdl2_2.28.1-2/get_patch patch_url = https://wrapdb.mesonbuild.com/v2/sdl2_2.30.3-2/get_patch
patch_hash = 2dd332226ba2a4373c6d4eb29fa915e9d5414cf7bb9fa2e4a5ef3b16a06e2736 patch_hash = 2c08bde67b3896db88e01481c379322625ea6c928cdb68ead91d0e3749863bc2
source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/sdl2_2.28.1-2/SDL2-2.28.1.tar.gz source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/sdl2_2.30.3-2/SDL2-2.30.3.tar.gz
wrapdb_version = 2.28.1-2 wrapdb_version = 2.30.3-2
[provide] [provide]
sdl2 = sdl2_dep sdl2 = sdl2_dep