This repository has been archived by the owner on Jun 15, 2019. It is now read-only.
forked from zaach/ebnf-parser
-
Notifications
You must be signed in to change notification settings - Fork 1
/
ebnf-parser.js
100 lines (89 loc) · 3.27 KB
/
ebnf-parser.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
import bnf from "./parser";
import transform from "./ebnf-transform";
import jisonlex from "@gerhobbelt/lex-parser";
var version = '0.6.1-205'; // require('./package.json').version;
function parse(grammar) {
return bnf.parser.parse(grammar);
}
// adds a declaration to the grammar
bnf.parser.yy.addDeclaration = function bnfAddDeclaration(grammar, decl) {
if (decl.start) {
grammar.start = decl.start;
} else if (decl.lex) {
grammar.lex = parseLex(decl.lex.text, decl.lex.position);
} else if (decl.operator) {
if (!grammar.operators) grammar.operators = [];
grammar.operators.push(decl.operator);
} else if (decl.token) {
if (!grammar.extra_tokens) grammar.extra_tokens = [];
grammar.extra_tokens.push(decl.token);
} else if (decl.token_list) {
if (!grammar.extra_tokens) grammar.extra_tokens = [];
decl.token_list.forEach(function (tok) {
grammar.extra_tokens.push(tok);
});
} else if (decl.parseParams) {
if (!grammar.parseParams) grammar.parseParams = [];
grammar.parseParams = grammar.parseParams.concat(decl.parseParams);
} else if (decl.parserType) {
if (!grammar.options) grammar.options = {};
grammar.options.type = decl.parserType;
} else if (decl.include) {
if (!grammar.moduleInclude) grammar.moduleInclude = '';
grammar.moduleInclude += decl.include;
} else if (decl.options) {
if (!grammar.options) grammar.options = {};
// last occurrence of `%options` wins:
for (var i = 0; i < decl.options.length; i++) {
grammar.options[decl.options[i][0]] = decl.options[i][1];
}
} else if (decl.unknownDecl) {
if (!grammar.unknownDecls) grammar.unknownDecls = [];
grammar.unknownDecls.push(decl.unknownDecl);
} else if (decl.imports) {
if (!grammar.imports) grammar.imports = [];
grammar.imports.push(decl.imports);
} else if (decl.actionInclude) {
if (!grammar.actionInclude) {
grammar.actionInclude = '';
}
grammar.actionInclude += decl.actionInclude;
} else if (decl.initCode) {
if (!grammar.moduleInit) {
grammar.moduleInit = [];
}
grammar.moduleInit.push(decl.initCode); // {qualifier: <name>, include: <source code chunk>}
}
};
// parse an embedded lex section
function parseLex(text, position) {
text = text.replace(/(?:^%lex)|(?:\/lex$)/g, '');
// We want the lex input to start at the given 'position', if any,
// so that error reports will produce a line number and character index
// which matches the original input file:
position = position || {};
position.range = position.range || [];
var l = position.first_line | 0;
var c = position.range[0] | 0;
var prelude = '';
if (l > 1) {
prelude += (new Array(l)).join('\n');
c -= prelude.length;
}
if (c > 3) {
prelude = '// ' + (new Array(c - 3)).join('.') + prelude;
}
return jisonlex.parse(prelude + text);
}
const ebnf_parser = {
transform
};
export default {
parse,
transform,
// assistant exports for debugging/testing:
bnf_parser: bnf,
ebnf_parser,
bnf_lexer: jisonlex,
version,
};