lite-xl/data/core/syntax.lua

54 lines
1.6 KiB
Lua
Raw Normal View History

local common = require "core.common"
2019-12-28 12:16:32 +01:00
local syntax = {}
2019-12-28 12:16:32 +01:00
syntax.items = {}
2024-02-11 18:51:12 +01:00
local plain_text_syntax = { name = "Plain Text", patterns = {}, symbols = {} }
2019-12-28 12:16:32 +01:00
function syntax.add(t)
if type(t.space_handling) ~= "boolean" then t.space_handling = true end
if t.patterns then
-- the rule %s+ gives us a performance gain for the tokenizer in lines with
-- long amounts of consecutive spaces, can be disabled by plugins where it
-- causes conflicts by declaring the table property: space_handling = false
if t.space_handling then
table.insert(t.patterns, { pattern = "%s+", type = "normal" })
end
-- this rule gives us additional performance gain by matching every word
-- that was not matched by the syntax patterns as a single token, preventing
-- the tokenizer from iterating over each character individually which is a
-- lot slower since iteration occurs in lua instead of C and adding to that
-- it will also try to match every pattern to a single char (same as spaces)
table.insert(t.patterns, { pattern = "%w+%f[%s]", type = "normal" })
end
2019-12-28 12:16:32 +01:00
table.insert(syntax.items, t)
end
local function find(string, field)
local best_match = 0
local best_syntax
2019-12-28 12:16:32 +01:00
for i = #syntax.items, 1, -1 do
local t = syntax.items[i]
local s, e = common.match_pattern(string, t[field] or {})
if s and e - s > best_match then
best_match = e - s
best_syntax = t
2019-12-28 12:16:32 +01:00
end
end
return best_syntax
end
function syntax.get(filename, header)
return (filename and find(filename, "files"))
or (header and find(header, "headers"))
2024-02-11 18:51:12 +01:00
or plain_text_syntax
2019-12-28 12:16:32 +01:00
end
return syntax