| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| |
|
| | local yield,wrap = coroutine.yield,coroutine.wrap |
| | local strfind = string.find |
| | local strsub = string.sub |
| | local append = table.insert |
| |
|
| | local function assert_arg(idx,val,tp) |
| | if type(val) ~= tp then |
| | error("argument "..idx.." must be "..tp, 2) |
| | end |
| | end |
| |
|
| | local lexer = {} |
| |
|
| | local NUMBER1 = '^[%+%-]?%d+%.?%d*[eE][%+%-]?%d+' |
| | local NUMBER2 = '^[%+%-]?%d+%.?%d*' |
| | local NUMBER3 = '^0x[%da-fA-F]+' |
| | local NUMBER4 = '^%d+%.?%d*[eE][%+%-]?%d+' |
| | local NUMBER5 = '^%d+%.?%d*' |
| | local IDEN = '^[%a_][%w_]*' |
| | local WSPACE = '^%s+' |
| | local STRING1 = "^(['\"])%1" |
| | local STRING2 = [[^(['"])(\*)%2%1]] |
| | local STRING3 = [[^(['"]).-[^\](\*)%2%1]] |
| | local CHAR1 = "^''" |
| | local CHAR2 = [[^'(\*)%1']] |
| | local CHAR3 = [[^'.-[^\](\*)%1']] |
| | local PREPRO = '^#.-[^\\]\n' |
| |
|
| | local plain_matches,lua_matches,cpp_matches,lua_keyword,cpp_keyword |
| |
|
| | local function tdump(tok) |
| | return yield(tok,tok) |
| | end |
| |
|
| | local function ndump(tok,options) |
| | if options and options.number then |
| | tok = tonumber(tok) |
| | end |
| | return yield("number",tok) |
| | end |
| |
|
| | |
| | |
| | local function sdump(tok,options) |
| | if options and options.string then |
| | tok = tok:sub(2,-2) |
| | end |
| | return yield("string",tok) |
| | end |
| |
|
| | |
| | local function sdump_l(tok,options,findres) |
| | if options and options.string then |
| | local quotelen = 3 |
| | if findres[3] then |
| | quotelen = quotelen + findres[3]:len() |
| | end |
| | tok = tok:sub(quotelen, -quotelen) |
| | if tok:sub(1, 1) == "\n" then |
| | tok = tok:sub(2) |
| | end |
| | end |
| | return yield("string",tok) |
| | end |
| |
|
| | local function chdump(tok,options) |
| | if options and options.string then |
| | tok = tok:sub(2,-2) |
| | end |
| | return yield("char",tok) |
| | end |
| |
|
| | local function cdump(tok) |
| | return yield('comment',tok) |
| | end |
| |
|
| | local function wsdump (tok) |
| | return yield("space",tok) |
| | end |
| |
|
| | local function pdump (tok) |
| | return yield('prepro',tok) |
| | end |
| |
|
| | local function plain_vdump(tok) |
| | return yield("iden",tok) |
| | end |
| |
|
| | local function lua_vdump(tok) |
| | if lua_keyword[tok] then |
| | return yield("keyword",tok) |
| | else |
| | return yield("iden",tok) |
| | end |
| | end |
| |
|
| | local function cpp_vdump(tok) |
| | if cpp_keyword[tok] then |
| | return yield("keyword",tok) |
| | else |
| | return yield("iden",tok) |
| | end |
| | end |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | |
| | function lexer.scan(s,matches,filter,options) |
| | local file = type(s) ~= 'string' and s |
| | filter = filter or {space=true} |
| | options = options or {number=true,string=true} |
| | if filter then |
| | if filter.space then filter[wsdump] = true end |
| | if filter.comments then |
| | filter[cdump] = true |
| | end |
| | end |
| | if not matches then |
| | if not plain_matches then |
| | plain_matches = { |
| | {WSPACE,wsdump}, |
| | {NUMBER3,ndump}, |
| | {IDEN,plain_vdump}, |
| | {NUMBER1,ndump}, |
| | {NUMBER2,ndump}, |
| | {STRING1,sdump}, |
| | {STRING2,sdump}, |
| | {STRING3,sdump}, |
| | {'^.',tdump} |
| | } |
| | end |
| | matches = plain_matches |
| | end |
| | local function lex(first_arg) |
| | local line_nr = 0 |
| | local next_line = file and file:read() |
| | local sz = file and 0 or #s |
| | local idx = 1 |
| |
|
| | |
| | local function handle_requests(res) |
| | while res do |
| | local tp = type(res) |
| | |
| | if tp == 'table' then |
| | res = yield('','') |
| | for _,t in ipairs(res) do |
| | res = yield(t[1],t[2]) |
| | end |
| | elseif tp == 'string' then |
| | local i1,i2 = strfind(s,res,idx) |
| | if i1 then |
| | local tok = strsub(s,i1,i2) |
| | idx = i2 + 1 |
| | res = yield('',tok) |
| | else |
| | res = yield('','') |
| | idx = sz + 1 |
| | end |
| | else |
| | res = yield(line_nr,idx) |
| | end |
| | end |
| | end |
| |
|
| | handle_requests(first_arg) |
| | if not file then line_nr = 1 end |
| |
|
| | while true do |
| | if idx > sz then |
| | if file then |
| | if not next_line then return end |
| | s = next_line |
| | line_nr = line_nr + 1 |
| | next_line = file:read() |
| | if next_line then |
| | s = s .. '\n' |
| | end |
| | idx, sz = 1, #s |
| | else |
| | while true do |
| | handle_requests(yield()) |
| | end |
| | end |
| | end |
| |
|
| | for _,m in ipairs(matches) do |
| | local pat = m[1] |
| | local fun = m[2] |
| | local findres = {strfind(s,pat,idx)} |
| | local i1, i2 = findres[1], findres[2] |
| | if i1 then |
| | local tok = strsub(s,i1,i2) |
| | idx = i2 + 1 |
| | local res |
| | if not (filter and filter[fun]) then |
| | lexer.finished = idx > sz |
| | res = fun(tok, options, findres) |
| | end |
| | if not file and tok:find("\n") then |
| | |
| | local _, newlines = tok:gsub("\n", {}) |
| | line_nr = line_nr + newlines |
| | end |
| | handle_requests(res) |
| | break |
| | end |
| | end |
| | end |
| | end |
| | return wrap(lex) |
| | end |
| |
|
| | local function isstring (s) |
| | return type(s) == 'string' |
| | end |
| |
|
| | |
| | |
| | |
| | |
| | |
| | function lexer.insert (tok,a1,a2) |
| | if not a1 then return end |
| | local ts |
| | if isstring(a1) and isstring(a2) then |
| | ts = {{a1,a2}} |
| | elseif type(a1) == 'function' then |
| | ts = {} |
| | for t,v in a1() do |
| | append(ts,{t,v}) |
| | end |
| | else |
| | ts = a1 |
| | end |
| | tok(ts) |
| | end |
| |
|
| | |
| | |
| | |
| | function lexer.getline (tok) |
| | local t,v = tok('.-\n') |
| | return v |
| | end |
| |
|
| | |
| | |
| | |
| | |
| | |
| | function lexer.lineno (tok) |
| | return tok(0) |
| | end |
| |
|
| | |
| | |
| | |
| | function lexer.getrest (tok) |
| | local t,v = tok('.+') |
| | return v |
| | end |
| |
|
| | |
| | |
| | |
| | function lexer.get_keywords () |
| | if not lua_keyword then |
| | lua_keyword = { |
| | ["and"] = true, ["break"] = true, ["do"] = true, |
| | ["else"] = true, ["elseif"] = true, ["end"] = true, |
| | ["false"] = true, ["for"] = true, ["function"] = true, |
| | ["if"] = true, ["in"] = true, ["local"] = true, ["nil"] = true, |
| | ["not"] = true, ["or"] = true, ["repeat"] = true, |
| | ["return"] = true, ["then"] = true, ["true"] = true, |
| | ["until"] = true, ["while"] = true |
| | } |
| | end |
| | return lua_keyword |
| | end |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | function lexer.lua(s,filter,options) |
| | filter = filter or {space=true,comments=true} |
| | lexer.get_keywords() |
| | if not lua_matches then |
| | lua_matches = { |
| | {WSPACE,wsdump}, |
| | {NUMBER3,ndump}, |
| | {IDEN,lua_vdump}, |
| | {NUMBER4,ndump}, |
| | {NUMBER5,ndump}, |
| | {STRING1,sdump}, |
| | {STRING2,sdump}, |
| | {STRING3,sdump}, |
| | {'^%-%-%[(=*)%[.-%]%1%]',cdump}, |
| | {'^%-%-.-\n',cdump}, |
| | {'^%[(=*)%[.-%]%1%]',sdump_l}, |
| | {'^==',tdump}, |
| | {'^~=',tdump}, |
| | {'^<=',tdump}, |
| | {'^>=',tdump}, |
| | {'^%.%.%.',tdump}, |
| | {'^%.%.',tdump}, |
| | {'^.',tdump} |
| | } |
| | end |
| | return lexer.scan(s,lua_matches,filter,options) |
| | end |
| |
|
| | |
| | |
| | |
| | |
| | |
| | |
| | function lexer.cpp(s,filter,options) |
| | filter = filter or {space=true,comments=true} |
| | if not cpp_keyword then |
| | cpp_keyword = { |
| | ["class"] = true, ["break"] = true, ["do"] = true, ["sizeof"] = true, |
| | ["else"] = true, ["continue"] = true, ["struct"] = true, |
| | ["false"] = true, ["for"] = true, ["public"] = true, ["void"] = true, |
| | ["private"] = true, ["protected"] = true, ["goto"] = true, |
| | ["if"] = true, ["static"] = true, ["const"] = true, ["typedef"] = true, |
| | ["enum"] = true, ["char"] = true, ["int"] = true, ["bool"] = true, |
| | ["long"] = true, ["float"] = true, ["true"] = true, ["delete"] = true, |
| | ["double"] = true, ["while"] = true, ["new"] = true, |
| | ["namespace"] = true, ["try"] = true, ["catch"] = true, |
| | ["switch"] = true, ["case"] = true, ["extern"] = true, |
| | ["return"] = true,["default"] = true,['unsigned'] = true,['signed'] = true, |
| | ["union"] = true, ["volatile"] = true, ["register"] = true,["short"] = true, |
| | } |
| | end |
| | if not cpp_matches then |
| | cpp_matches = { |
| | {WSPACE,wsdump}, |
| | {PREPRO,pdump}, |
| | {NUMBER3,ndump}, |
| | {IDEN,cpp_vdump}, |
| | {NUMBER4,ndump}, |
| | {NUMBER5,ndump}, |
| | {CHAR1,chdump}, |
| | {CHAR2,chdump}, |
| | {CHAR3,chdump}, |
| | {STRING1,sdump}, |
| | {STRING2,sdump}, |
| | {STRING3,sdump}, |
| | {'^//.-\n',cdump}, |
| | {'^/%*.-%*/',cdump}, |
| | {'^==',tdump}, |
| | {'^!=',tdump}, |
| | {'^<=',tdump}, |
| | {'^>=',tdump}, |
| | {'^->',tdump}, |
| | {'^&&',tdump}, |
| | {'^||',tdump}, |
| | {'^%+%+',tdump}, |
| | {'^%-%-',tdump}, |
| | {'^%+=',tdump}, |
| | {'^%-=',tdump}, |
| | {'^%*=',tdump}, |
| | {'^/=',tdump}, |
| | {'^|=',tdump}, |
| | {'^%^=',tdump}, |
| | {'^::',tdump}, |
| | {'^.',tdump} |
| | } |
| | end |
| | return lexer.scan(s,cpp_matches,filter,options) |
| | end |
| |
|
| | |
| | |
| | |
| | |
| | |
| | function lexer.get_separated_list(tok,endtoken,delim) |
| | endtoken = endtoken or ')' |
| | delim = delim or ',' |
| | local parm_values = {} |
| | local level = 1 |
| | local tl = {} |
| | local function tappend (tl,t,val) |
| | val = val or t |
| | append(tl,{t,val}) |
| | end |
| | local is_end |
| | if endtoken == '\n' then |
| | is_end = function(t,val) |
| | return t == 'space' and val:find '\n' |
| | end |
| | else |
| | is_end = function (t) |
| | return t == endtoken |
| | end |
| | end |
| | local token,value |
| | while true do |
| | token,value=tok() |
| | if not token then return nil,'EOS' end |
| | if is_end(token,value) and level == 1 then |
| | append(parm_values,tl) |
| | break |
| | elseif token == '(' then |
| | level = level + 1 |
| | tappend(tl,'(') |
| | elseif token == ')' then |
| | level = level - 1 |
| | if level == 0 then |
| | append(parm_values,tl) |
| | break |
| | else |
| | tappend(tl,')') |
| | end |
| | elseif token == delim and level == 1 then |
| | append(parm_values,tl) |
| | tl = {} |
| | else |
| | tappend(tl,token,value) |
| | end |
| | end |
| | return parm_values,{token,value} |
| | end |
| |
|
| | |
| | |
| | function lexer.skipws (tok) |
| | local t,v = tok() |
| | while t == 'space' do |
| | t,v = tok() |
| | end |
| | return t,v |
| | end |
| |
|
| | local skipws = lexer.skipws |
| |
|
| | |
| | |
| | |
| | |
| | |
| | function lexer.expecting (tok,expected_type,no_skip_ws) |
| | assert_arg(1,tok,'function') |
| | assert_arg(2,expected_type,'string') |
| | local t,v |
| | if no_skip_ws then |
| | t,v = tok() |
| | else |
| | t,v = skipws(tok) |
| | end |
| | if t ~= expected_type then error ("expecting "..expected_type,2) end |
| | return v |
| | end |
| |
|
| | return lexer |
| |
|