Fix multi-type usage in delimited patterns (#1740)

In the "end" pattern we weren't considering the multiple types.
This commit is contained in:
Guldoman 2024-10-22 18:33:01 +02:00 committed by GitHub
parent d486058875
commit 46b1499f4f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
1 changed files with 8 additions and 2 deletions

View File

@ -284,7 +284,8 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume)
-- continue trying to match the end pattern of a pair if we have a state set -- continue trying to match the end pattern of a pair if we have a state set
if current_pattern_idx > 0 then if current_pattern_idx > 0 then
local p = current_syntax.patterns[current_pattern_idx] local p = current_syntax.patterns[current_pattern_idx]
local s, e = find_text(text, p, i, false, true) local find_results = { find_text(text, p, i, false, true) }
local s, e = find_results[1], find_results[2]
-- Use the first token type specified in the type table for the "middle" -- Use the first token type specified in the type table for the "middle"
-- part of the subsyntax. -- part of the subsyntax.
local token_type = type(p.type) == "table" and p.type[1] or p.type local token_type = type(p.type) == "table" and p.type[1] or p.type
@ -309,7 +310,12 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume)
-- continue on as normal. -- continue on as normal.
if cont then if cont then
if s then if s then
push_token(res, token_type, text:usub(i, e)) -- Push remaining token before the end delimiter
if s > i then
push_token(res, token_type, text:usub(i, s - 1))
end
-- Push the end delimiter
push_tokens(res, current_syntax, p, text, find_results)
set_subsyntax_pattern_idx(0) set_subsyntax_pattern_idx(0)
i = e + 1 i = e + 1
else else