2022-01-28 17:22:59 +00:00
|
|
|
|
2022-01-29 19:07:21 +00:00
|
|
|
import type { SourceLocation, SourceLine, WorkerError, SourceLocated } from "./workertypes";
|
2022-01-28 17:22:59 +00:00
|
|
|
|
|
|
|
export class CompileError extends Error {
|
|
|
|
$loc: SourceLocation;
|
|
|
|
constructor(msg: string, loc: SourceLocation) {
|
|
|
|
super(msg);
|
|
|
|
Object.setPrototypeOf(this, CompileError.prototype);
|
|
|
|
this.$loc = loc;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export function mergeLocs(a: SourceLocation, b: SourceLocation): SourceLocation {
|
|
|
|
return {
|
|
|
|
line: Math.min(a.line, b.line),
|
|
|
|
start: Math.min(a.start, b.start),
|
|
|
|
end: Math.max(a.end, b.end),
|
|
|
|
label: a.label || b.label,
|
|
|
|
path: a.path || b.path,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export enum TokenType {
|
|
|
|
EOF = 'eof',
|
|
|
|
EOL = 'eol',
|
|
|
|
Ident = 'ident',
|
|
|
|
Comment = 'comment',
|
|
|
|
Ignore = 'ignore',
|
|
|
|
CatchAll = 'catch-all',
|
|
|
|
}
|
|
|
|
|
|
|
|
export class Token implements SourceLocated {
|
|
|
|
str: string;
|
|
|
|
type: string;
|
|
|
|
$loc: SourceLocation;
|
|
|
|
}
|
|
|
|
|
|
|
|
export class TokenRule {
|
|
|
|
type: string;
|
|
|
|
regex: RegExp;
|
|
|
|
}
|
|
|
|
|
|
|
|
const CATCH_ALL_RULES: TokenRule[] = [
|
|
|
|
{ type: TokenType.CatchAll, regex: /.+?/ }
|
|
|
|
]
|
|
|
|
|
|
|
|
function re_escape(rule: TokenRule): string {
|
|
|
|
return `(${rule.regex.source})`;
|
|
|
|
}
|
|
|
|
|
2022-02-01 15:13:37 +00:00
|
|
|
export class TokenizerRuleSet {
|
2022-01-28 17:22:59 +00:00
|
|
|
rules: TokenRule[];
|
|
|
|
regex: RegExp;
|
2022-02-01 15:13:37 +00:00
|
|
|
constructor(rules: TokenRule[]) {
|
|
|
|
this.rules = rules.concat(CATCH_ALL_RULES);
|
|
|
|
var pattern = this.rules.map(re_escape).join('|');
|
|
|
|
this.regex = new RegExp(pattern, "gs"); // global, dotall
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
export class Tokenizer {
|
|
|
|
ruleset: TokenizerRuleSet;
|
|
|
|
lineindex: number[];
|
2022-01-28 17:22:59 +00:00
|
|
|
path: string;
|
|
|
|
lineno: number;
|
|
|
|
tokens: Token[];
|
|
|
|
lasttoken: Token;
|
|
|
|
errors: WorkerError[];
|
|
|
|
curlabel: string;
|
2022-02-01 15:13:37 +00:00
|
|
|
eof: Token;
|
2022-01-28 17:22:59 +00:00
|
|
|
errorOnCatchAll = false;
|
2022-02-10 15:21:24 +00:00
|
|
|
deferred: (() => void)[] = [];
|
2022-01-28 17:22:59 +00:00
|
|
|
|
|
|
|
constructor() {
|
|
|
|
this.errors = [];
|
2022-02-01 15:13:37 +00:00
|
|
|
this.lineno = 0;
|
|
|
|
this.lineindex = [];
|
|
|
|
this.tokens = [];
|
|
|
|
}
|
|
|
|
setTokenRuleSet(ruleset: TokenizerRuleSet) {
|
|
|
|
this.ruleset = ruleset;
|
2022-01-28 17:22:59 +00:00
|
|
|
}
|
|
|
|
setTokenRules(rules: TokenRule[]) {
|
2022-02-01 15:13:37 +00:00
|
|
|
this.setTokenRuleSet(new TokenizerRuleSet(rules));
|
2022-01-28 17:22:59 +00:00
|
|
|
}
|
|
|
|
tokenizeFile(contents: string, path: string) {
|
|
|
|
this.path = path;
|
2022-02-01 15:13:37 +00:00
|
|
|
let m;
|
|
|
|
let re = /\n|\r\n?/g;
|
|
|
|
this.lineindex.push(0);
|
|
|
|
while (m = re.exec(contents)) {
|
|
|
|
this.lineindex.push(m.index);
|
|
|
|
}
|
|
|
|
this._tokenize(contents);
|
|
|
|
this.eof = { type: TokenType.EOF, str: "", $loc: { path: this.path, line: this.lineno } };
|
|
|
|
this.pushToken(this.eof);
|
|
|
|
}
|
|
|
|
_tokenize(text: string): void {
|
2022-01-28 17:22:59 +00:00
|
|
|
// iterate over each token via re_toks regex
|
|
|
|
let m: RegExpMatchArray;
|
2022-02-01 15:13:37 +00:00
|
|
|
this.lineno = 0;
|
|
|
|
while (m = this.ruleset.regex.exec(text)) {
|
2022-01-28 17:22:59 +00:00
|
|
|
let found = false;
|
2022-02-01 15:13:37 +00:00
|
|
|
// find line #
|
|
|
|
while (m.index >= this.lineindex[this.lineno]) {
|
|
|
|
this.lineno++;
|
|
|
|
}
|
2022-01-28 17:22:59 +00:00
|
|
|
// find out which capture group was matched, and thus token type
|
2022-02-01 15:13:37 +00:00
|
|
|
let rules = this.ruleset.rules;
|
|
|
|
for (let i = 0; i < rules.length; i++) {
|
2022-01-28 17:22:59 +00:00
|
|
|
let s: string = m[i + 1];
|
|
|
|
if (s != null) {
|
|
|
|
found = true;
|
2022-02-02 22:32:04 +00:00
|
|
|
let col = m.index - (this.lineindex[this.lineno-1] || -1) - 1;
|
|
|
|
let loc = { path: this.path, line: this.lineno, start: col, end: col + s.length };
|
2022-02-01 15:13:37 +00:00
|
|
|
let rule = rules[i];
|
2022-01-28 17:22:59 +00:00
|
|
|
// add token to list
|
|
|
|
switch (rule.type) {
|
|
|
|
case TokenType.CatchAll:
|
2022-02-01 15:13:37 +00:00
|
|
|
if (this.errorOnCatchAll) {
|
2022-02-10 21:51:03 +00:00
|
|
|
this.compileError(`I didn't expect the character "${m[0]}" here.`, loc);
|
2022-01-28 17:22:59 +00:00
|
|
|
}
|
|
|
|
default:
|
2022-02-01 15:13:37 +00:00
|
|
|
this.pushToken({ str: s, type: rule.type, $loc: loc });
|
2022-01-30 16:48:56 +00:00
|
|
|
case TokenType.Comment:
|
2022-01-28 17:22:59 +00:00
|
|
|
case TokenType.Ignore:
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (!found) {
|
|
|
|
this.compileError(`Could not parse token: <<${m[0]}>>`)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2022-02-01 15:13:37 +00:00
|
|
|
pushToken(token: Token) {
|
2022-01-28 17:22:59 +00:00
|
|
|
this.tokens.push(token);
|
|
|
|
}
|
|
|
|
addError(msg: string, loc?: SourceLocation) {
|
|
|
|
let tok = this.lasttoken || this.peekToken();
|
|
|
|
if (!loc) loc = tok.$loc;
|
|
|
|
this.errors.push({ path: loc.path, line: loc.line, label: this.curlabel, start: loc.start, end: loc.end, msg: msg });
|
|
|
|
}
|
|
|
|
internalError() {
|
|
|
|
this.compileError("Internal error.");
|
|
|
|
}
|
|
|
|
notImplementedError() {
|
|
|
|
this.compileError("Not yet implemented.");
|
|
|
|
}
|
|
|
|
compileError(msg: string, loc?: SourceLocation, loc2?: SourceLocation) {
|
|
|
|
this.addError(msg, loc);
|
|
|
|
//if (loc2 != null) this.addError(`...`, loc2);
|
|
|
|
throw new CompileError(msg, loc);
|
|
|
|
}
|
|
|
|
peekToken(lookahead?: number): Token {
|
|
|
|
let tok = this.tokens[lookahead || 0];
|
2022-02-01 15:13:37 +00:00
|
|
|
return tok ? tok : this.eof;
|
2022-01-28 17:22:59 +00:00
|
|
|
}
|
|
|
|
consumeToken(): Token {
|
2022-02-01 15:13:37 +00:00
|
|
|
let tok = this.lasttoken = (this.tokens.shift() || this.eof);
|
2022-01-28 17:22:59 +00:00
|
|
|
return tok;
|
|
|
|
}
|
2022-02-05 01:48:08 +00:00
|
|
|
ifToken(match: string): Token | undefined {
|
|
|
|
if (this.peekToken().str == match) return this.consumeToken();
|
|
|
|
}
|
2022-01-28 17:22:59 +00:00
|
|
|
expectToken(str: string, msg?: string): Token {
|
|
|
|
let tok = this.consumeToken();
|
|
|
|
let tokstr = tok.str;
|
|
|
|
if (str != tokstr) {
|
|
|
|
this.compileError(msg || `There should be a "${str}" here.`);
|
|
|
|
}
|
|
|
|
return tok;
|
|
|
|
}
|
2022-02-07 20:58:03 +00:00
|
|
|
expectTokens(strlist: readonly string[], msg?: string): Token {
|
2022-01-28 17:22:59 +00:00
|
|
|
let tok = this.consumeToken();
|
|
|
|
let tokstr = tok.str;
|
2022-02-03 02:06:44 +00:00
|
|
|
if (!strlist.includes(tokstr)) {
|
2022-02-03 16:44:29 +00:00
|
|
|
this.compileError(msg || `These keywords are valid here: ${strlist.join(', ')}`);
|
2022-01-28 17:22:59 +00:00
|
|
|
}
|
|
|
|
return tok;
|
|
|
|
}
|
|
|
|
parseModifiers(modifiers: string[]): { [modifier: string]: boolean } {
|
|
|
|
let result = {};
|
|
|
|
do {
|
|
|
|
var tok = this.peekToken();
|
|
|
|
if (modifiers.indexOf(tok.str) < 0)
|
|
|
|
return result;
|
|
|
|
this.consumeToken();
|
|
|
|
result[tok.str] = true;
|
|
|
|
} while (tok != null);
|
|
|
|
}
|
|
|
|
expectIdent(msg?: string): Token {
|
|
|
|
let tok = this.consumeToken();
|
|
|
|
if (tok.type != TokenType.Ident)
|
|
|
|
this.compileError(msg || `There should be an identifier here.`);
|
|
|
|
return tok;
|
|
|
|
}
|
|
|
|
pushbackToken(tok: Token) {
|
|
|
|
this.tokens.unshift(tok);
|
|
|
|
}
|
|
|
|
isEOF() {
|
|
|
|
return this.tokens.length == 0 || this.peekToken().type == 'eof'; // TODO?
|
|
|
|
}
|
|
|
|
expectEOL(msg?: string) {
|
|
|
|
let tok = this.consumeToken();
|
|
|
|
if (tok.type != TokenType.EOL)
|
|
|
|
this.compileError(msg || `There's too much stuff on this line.`);
|
|
|
|
}
|
|
|
|
skipBlankLines() {
|
|
|
|
this.skipTokenTypes(['eol']);
|
|
|
|
}
|
|
|
|
skipTokenTypes(types: string[]) {
|
|
|
|
while (types.includes(this.peekToken().type))
|
|
|
|
this.consumeToken();
|
|
|
|
}
|
|
|
|
expectTokenTypes(types: string[], msg?: string) {
|
|
|
|
let tok = this.consumeToken();
|
|
|
|
if (!types.includes(tok.type))
|
|
|
|
this.compileError(msg || `There should be a ${types.map((s) => `"${s}"`).join(' or ')} here. not a "${tok.type}".`);
|
|
|
|
return tok;
|
|
|
|
}
|
|
|
|
parseList<T>(parseFunc:()=>T, delim:string): T[] {
|
|
|
|
var sep;
|
|
|
|
var list = [];
|
|
|
|
do {
|
|
|
|
var el = parseFunc.bind(this)(); // call parse function
|
|
|
|
if (el != null) list.push(el); // add parsed element to list
|
|
|
|
sep = this.consumeToken(); // consume seperator token
|
|
|
|
} while (sep.str == delim);
|
|
|
|
this.pushbackToken(sep);
|
|
|
|
return list;
|
|
|
|
}
|
2022-02-10 15:21:24 +00:00
|
|
|
runDeferred() {
|
|
|
|
while (this.deferred.length) {
|
|
|
|
this.deferred.shift()();
|
|
|
|
}
|
|
|
|
}
|
2022-01-28 17:22:59 +00:00
|
|
|
}
|