From 46b1499f4fb035f303412a39217f49210a490695 Mon Sep 17 00:00:00 2001 From: Guldoman Date: Tue, 22 Oct 2024 18:33:01 +0200 Subject: [PATCH] Fix multi-type usage in delimited patterns (#1740) In the "end" pattern we weren't considering the multiple types. --- data/core/tokenizer.lua | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/data/core/tokenizer.lua b/data/core/tokenizer.lua index 2d8ae426..b8f0fe04 100644 --- a/data/core/tokenizer.lua +++ b/data/core/tokenizer.lua @@ -284,7 +284,8 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume) -- continue trying to match the end pattern of a pair if we have a state set if current_pattern_idx > 0 then local p = current_syntax.patterns[current_pattern_idx] - local s, e = find_text(text, p, i, false, true) + local find_results = { find_text(text, p, i, false, true) } + local s, e = find_results[1], find_results[2] -- Use the first token type specified in the type table for the "middle" -- part of the subsyntax. local token_type = type(p.type) == "table" and p.type[1] or p.type @@ -309,7 +310,12 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume) -- continue on as normal. if cont then if s then - push_token(res, token_type, text:usub(i, e)) + -- Push remaining token before the end delimiter + if s > i then + push_token(res, token_type, text:usub(i, s - 1)) + end + -- Push the end delimiter + push_tokens(res, current_syntax, p, text, find_results) set_subsyntax_pattern_idx(0) i = e + 1 else