diff --git a/data/core/tokenizer.lua b/data/core/tokenizer.lua index 555d60b5..9231cce7 100644 --- a/data/core/tokenizer.lua +++ b/data/core/tokenizer.lua @@ -192,15 +192,21 @@ function tokenizer.tokenize(incoming_syntax, text, state) end res[1] = next end - if res[1] and close and target[3] then + if res[1] and target[3] then + -- Check to see if the escaped character is there, + -- and if it is not itself escaped. local count = 0 for i = res[1] - 1, 1, -1 do if text:byte(i) ~= target[3]:byte() then break end count = count + 1 end - -- Check to see if the escaped character is there, - -- and if it is not itself escaped. - if count % 2 == 0 then break end + if count % 2 == 0 then + -- The match is not escaped, so confirm it + break + elseif not close then + -- The *open* match is escaped, so avoid it + return + end end until not res[1] or not close or not target[3] return table.unpack(res)