diff --git a/data/core/tokenizer.lua b/data/core/tokenizer.lua index fe74ca6d..5f70fa20 100644 --- a/data/core/tokenizer.lua +++ b/data/core/tokenizer.lua @@ -133,12 +133,12 @@ function tokenizer.tokenize(incoming_syntax, text, state, resume) local res local i = 1 - if #incoming_syntax.patterns == 0 then - return { "normal", text } - end - state = state or string.char(0) + if #incoming_syntax.patterns == 0 then + return { "normal", text }, state + end + if resume then res = resume.res -- Remove "incomplete" tokens