54 lines
1.6 KiB
Lua
54 lines
1.6 KiB
Lua
local common = require "core.common"
|
|
|
|
local syntax = {}
|
|
syntax.items = {}
|
|
|
|
local plain_text_syntax = { name = "Plain Text", patterns = {}, symbols = {} }
|
|
|
|
|
|
function syntax.add(t)
|
|
if type(t.space_handling) ~= "boolean" then t.space_handling = true end
|
|
|
|
if t.patterns then
|
|
-- the rule %s+ gives us a performance gain for the tokenizer in lines with
|
|
-- long amounts of consecutive spaces, can be disabled by plugins where it
|
|
-- causes conflicts by declaring the table property: space_handling = false
|
|
if t.space_handling then
|
|
table.insert(t.patterns, { pattern = "%s+", type = "normal" })
|
|
end
|
|
|
|
-- this rule gives us additional performance gain by matching every word
|
|
-- that was not matched by the syntax patterns as a single token, preventing
|
|
-- the tokenizer from iterating over each character individually which is a
|
|
-- lot slower since iteration occurs in lua instead of C and adding to that
|
|
-- it will also try to match every pattern to a single char (same as spaces)
|
|
table.insert(t.patterns, { pattern = "%w+%f[%s]", type = "normal" })
|
|
end
|
|
|
|
table.insert(syntax.items, t)
|
|
end
|
|
|
|
|
|
local function find(string, field)
|
|
local best_match = 0
|
|
local best_syntax
|
|
for i = #syntax.items, 1, -1 do
|
|
local t = syntax.items[i]
|
|
local s, e = common.match_pattern(string, t[field] or {})
|
|
if s and e - s > best_match then
|
|
best_match = e - s
|
|
best_syntax = t
|
|
end
|
|
end
|
|
return best_syntax
|
|
end
|
|
|
|
function syntax.get(filename, header)
|
|
return (filename and find(filename, "files"))
|
|
or (header and find(header, "headers"))
|
|
or plain_text_syntax
|
|
end
|
|
|
|
|
|
return syntax
|