|
|
import { chevrotain } from '../../../lib.js'; |
|
|
const { createToken, Lexer } = chevrotain; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const modes = { |
|
|
plaintext: 'plaintext_mode', |
|
|
macro_def: 'macro_def_mode', |
|
|
macro_identifier_end: 'macro_identifier_end_mode', |
|
|
macro_args: 'macro_args_mode', |
|
|
macro_filter_modifer: 'macro_filter_modifer_mode', |
|
|
macro_filter_modifier_end: 'macro_filter_modifier_end_mode', |
|
|
}; |
|
|
|
|
|
|
|
|
const Tokens = { |
|
|
|
|
|
Plaintext: createToken({ name: 'Plaintext', pattern: /(?:[^{]|\{(?!\{))+/u, line_breaks: true }), |
|
|
|
|
|
PlaintextOpenBrace: createToken({ name: 'Plaintext.OpenBrace', pattern: /\{(?=\{\{)/ }), |
|
|
|
|
|
|
|
|
Macro: { |
|
|
Start: createToken({ name: 'Macro.Start', pattern: /\{\{/ }), |
|
|
|
|
|
|
|
|
Flags: createToken({ name: 'Macro.Flag', pattern: /[!?#~/.$]/ }), |
|
|
DoubleSlash: createToken({ name: 'Macro.DoubleSlash', pattern: /\/\// }), |
|
|
Identifier: createToken({ name: 'Macro.Identifier', pattern: /[a-zA-Z][\w-_]*/ }), |
|
|
|
|
|
EndOfIdentifier: createToken({ name: 'Macro.EndOfIdentifier', pattern: /(?:\s+|(?=:{1,2})|(?=[|}]))/, group: Lexer.SKIPPED }), |
|
|
BeforeEnd: createToken({ name: 'Macro.BeforeEnd', pattern: /(?=\}\})/, group: Lexer.SKIPPED }), |
|
|
End: createToken({ name: 'Macro.End', pattern: /\}\}/ }), |
|
|
}, |
|
|
|
|
|
|
|
|
Args: { |
|
|
DoubleColon: createToken({ name: 'Args.DoubleColon', pattern: /::/ }), |
|
|
Colon: createToken({ name: 'Args.Colon', pattern: /:/ }), |
|
|
Equals: createToken({ name: 'Args.Equals', pattern: /=/ }), |
|
|
Quote: createToken({ name: 'Args.Quote', pattern: /"/ }), |
|
|
}, |
|
|
|
|
|
Filter: { |
|
|
EscapedPipe: createToken({ name: 'Filter.EscapedPipe', pattern: /\\\|/ }), |
|
|
Pipe: createToken({ name: 'Filter.Pipe', pattern: /\|/ }), |
|
|
Identifier: createToken({ name: 'Filter.Identifier', pattern: /[a-zA-Z][\w-_]*/ }), |
|
|
|
|
|
EndOfIdentifier: createToken({ name: 'Filter.EndOfIdentifier', pattern: /(?:\s+|(?=:{1,2})|(?=[|}]))/, group: Lexer.SKIPPED }), |
|
|
}, |
|
|
|
|
|
|
|
|
Identifier: createToken({ name: 'Identifier', pattern: /[a-zA-Z][\w-_]*/ }), |
|
|
WhiteSpace: createToken({ name: 'WhiteSpace', pattern: /\s+/, group: Lexer.SKIPPED }), |
|
|
|
|
|
|
|
|
|
|
|
Unknown: createToken({ name: 'Unknown', pattern: /([^}]|\}(?!\}))/ }), |
|
|
|
|
|
|
|
|
Text: createToken({ name: 'Text', pattern: /.+(?=\}\}|\{\{)/, line_breaks: true }), |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
ModePopper: createToken({ name: 'ModePopper', pattern: () => [''], line_breaks: false, group: Lexer.SKIPPED }), |
|
|
}; |
|
|
|
|
|
|
|
|
const enterModesMap = new Map(); |
|
|
|
|
|
const Def = { |
|
|
modes: { |
|
|
[modes.plaintext]: [ |
|
|
using(Tokens.Plaintext), |
|
|
using(Tokens.PlaintextOpenBrace), |
|
|
enter(Tokens.Macro.Start, modes.macro_def), |
|
|
], |
|
|
[modes.macro_def]: [ |
|
|
exits(Tokens.Macro.End, modes.macro_def), |
|
|
|
|
|
|
|
|
enter(Tokens.Macro.DoubleSlash, modes.macro_args), |
|
|
|
|
|
using(Tokens.Macro.Flags), |
|
|
|
|
|
|
|
|
using(Tokens.WhiteSpace), |
|
|
|
|
|
|
|
|
|
|
|
enter(Tokens.Macro.Identifier, modes.macro_identifier_end), |
|
|
|
|
|
|
|
|
|
|
|
exits(Tokens.ModePopper, modes.macro_def), |
|
|
], |
|
|
[modes.macro_identifier_end]: [ |
|
|
|
|
|
exits(Tokens.Macro.BeforeEnd, modes.macro_identifier_end), |
|
|
enter(Tokens.Macro.EndOfIdentifier, modes.macro_args, { andExits: modes.macro_identifier_end }), |
|
|
], |
|
|
[modes.macro_args]: [ |
|
|
|
|
|
enter(Tokens.Macro.Start, modes.macro_def), |
|
|
|
|
|
|
|
|
using(Tokens.Filter.EscapedPipe), |
|
|
|
|
|
|
|
|
enter(Tokens.Filter.Pipe, modes.macro_filter_modifer), |
|
|
|
|
|
using(Tokens.Args.DoubleColon), |
|
|
using(Tokens.Args.Colon), |
|
|
using(Tokens.Args.Equals), |
|
|
using(Tokens.Args.Quote), |
|
|
using(Tokens.Identifier), |
|
|
|
|
|
using(Tokens.WhiteSpace), |
|
|
|
|
|
|
|
|
using(Tokens.Unknown), |
|
|
|
|
|
|
|
|
exits(Tokens.ModePopper, modes.macro_args), |
|
|
], |
|
|
[modes.macro_filter_modifer]: [ |
|
|
using(Tokens.WhiteSpace), |
|
|
|
|
|
enter(Tokens.Filter.Identifier, modes.macro_filter_modifier_end, { andExits: modes.macro_filter_modifer }), |
|
|
], |
|
|
[modes.macro_filter_modifier_end]: [ |
|
|
|
|
|
exits(Tokens.Macro.BeforeEnd, modes.macro_identifier_end), |
|
|
exits(Tokens.Filter.EndOfIdentifier, modes.macro_filter_modifer), |
|
|
], |
|
|
}, |
|
|
defaultMode: modes.plaintext, |
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
let instance; |
|
|
export { instance as MacroLexer }; |
|
|
|
|
|
class MacroLexer extends Lexer { |
|
|
static #instance; |
|
|
static get instance() { return MacroLexer.#instance ?? (MacroLexer.#instance = new MacroLexer()); } |
|
|
|
|
|
|
|
|
static tokens = Tokens; |
|
|
static def = Def; |
|
|
tokens = Tokens; |
|
|
def = MacroLexer.def; |
|
|
|
|
|
|
|
|
constructor() { |
|
|
super(MacroLexer.def, { |
|
|
traceInitPerf: false, |
|
|
}); |
|
|
} |
|
|
|
|
|
test(input) { |
|
|
const result = this.tokenize(input); |
|
|
return { |
|
|
errors: result.errors, |
|
|
groups: result.groups, |
|
|
tokens: result.tokens.map(({ tokenType, ...rest }) => ({ type: tokenType.name, ...rest, tokenType: tokenType })), |
|
|
}; |
|
|
} |
|
|
} |
|
|
|
|
|
instance = MacroLexer.instance; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function enter(token, mode, { andExits = undefined } = {}) { |
|
|
if (!token) throw new Error('Token must not be undefined'); |
|
|
if (enterModesMap.has(token.name) && enterModesMap.get(token.name) !== mode) { |
|
|
throw new Error(`Token ${token.name} already is set to enter mode ${enterModesMap.get(token.name)}. The token definition are global, so they cannot be used to lead to different modes.`); |
|
|
} |
|
|
|
|
|
if (andExits) exits(token, andExits); |
|
|
|
|
|
token.PUSH_MODE = mode; |
|
|
enterModesMap.set(token.name, mode); |
|
|
return token; |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function exits(token, mode) { |
|
|
if (!token) throw new Error('Token must not be undefined'); |
|
|
token.POP_MODE = !!mode; |
|
|
return token; |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
function using(token) { |
|
|
if (!token) throw new Error('Token must not be undefined'); |
|
|
if (enterModesMap.has(token.name)) { |
|
|
throw new Error(`Token ${token.name} is already marked to enter a mode (${enterModesMap.get(token.name)}). The token definition are global, so they cannot be used to lead or stay differently.`); |
|
|
} |
|
|
return token; |
|
|
} |
|
|
|