diff --git a/data/core/tokenizer.lua b/data/core/tokenizer.lua index d32d7d70..6db47ff6 100644 --- a/data/core/tokenizer.lua +++ b/data/core/tokenizer.lua @@ -133,12 +133,12 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume) local res local i = 1 - if #incoming_syntax.patterns == 0 then - return { "normal", text } - end - state = state or string.char(0) + if #incoming_syntax.patterns == 0 then + return { "normal", text }, state + end + if resume then res = resume.res -- Remove "incomplete" tokens