From 9be5a46a22fd1fea116aa4300fbfcdbcb14ebc40 Mon Sep 17 00:00:00 2001 From: Guldoman Date: Wed, 29 Nov 2023 17:11:46 +0100 Subject: [PATCH] Fix patterns starting with `^` in `tokenizer` (#1645) Previously the "dirty" version of the pattern was used, which could result in trying to match with multiple `^`, which failed valid matches. --- data/core/tokenizer.lua | 2 ++ 1 file changed, 2 insertions(+) diff --git a/data/core/tokenizer.lua b/data/core/tokenizer.lua index 6d99d9c4..02ddcb75 100644 --- a/data/core/tokenizer.lua +++ b/data/core/tokenizer.lua @@ -210,9 +210,11 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume) -- Remove '^' from the beginning of the pattern if type(target) == "table" then target[p_idx] = code:usub(2) + code = target[p_idx] else p.pattern = p.pattern and code:usub(2) p.regex = p.regex and code:usub(2) + code = p.pattern or p.regex end end end