-
Notifications
You must be signed in to change notification settings - Fork 58
/
Copy pathres_parser.ml
197 lines (176 loc) · 5.49 KB
/
res_parser.ml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
module Scanner = Res_scanner
module Diagnostics = Res_diagnostics
module Token = Res_token
module Grammar = Res_grammar
module Reporting = Res_reporting
module Comment = Res_comment
type mode = ParseForTypeChecker | Default
type region_status = Report | Silent
type t = {
mode: mode;
mutable scanner: Scanner.t;
mutable token: Token.t;
mutable start_pos: Lexing.position;
mutable end_pos: Lexing.position;
mutable prev_end_pos: Lexing.position;
mutable breadcrumbs: (Grammar.t * Lexing.position) list;
mutable errors: Reporting.parse_error list;
mutable diagnostics: Diagnostics.t list;
mutable comments: Comment.t list;
mutable regions: region_status ref list;
mutable uncurried_config: Config.uncurried;
}
let err ?start_pos ?end_pos p error =
match p.regions with
| ({contents = Report} as region) :: _ ->
let d =
Diagnostics.make
~start_pos:
(match start_pos with
| Some pos -> pos
| None -> p.start_pos)
~end_pos:
(match end_pos with
| Some pos -> pos
| None -> p.end_pos)
error
in
p.diagnostics <- d :: p.diagnostics;
region := Silent
| _ -> ()
let begin_region p = p.regions <- ref Report :: p.regions
let end_region p =
match p.regions with
| [] -> ()
| _ :: rest -> p.regions <- rest
let doc_comment_to_attribute_token comment =
let txt = Comment.txt comment in
let loc = Comment.loc comment in
Token.DocComment (loc, txt)
let module_comment_to_attribute_token comment =
let txt = Comment.txt comment in
let loc = Comment.loc comment in
Token.ModuleComment (loc, txt)
(* Advance to the next non-comment token and store any encountered comment
* in the parser's state. Every comment contains the end position of its
* previous token to facilite comment interleaving *)
let rec next ?prev_end_pos p =
if p.token = Eof then assert false;
let prev_end_pos =
match prev_end_pos with
| Some pos -> pos
| None -> p.end_pos
in
let start_pos, end_pos, token = Scanner.scan p.scanner in
match token with
| Comment c ->
if Comment.is_doc_comment c then (
p.token <- doc_comment_to_attribute_token c;
p.prev_end_pos <- prev_end_pos;
p.start_pos <- start_pos;
p.end_pos <- end_pos)
else if Comment.is_module_comment c then (
p.token <- module_comment_to_attribute_token c;
p.prev_end_pos <- prev_end_pos;
p.start_pos <- start_pos;
p.end_pos <- end_pos)
else (
Comment.set_prev_tok_end_pos c p.end_pos;
p.comments <- c :: p.comments;
p.prev_end_pos <- p.end_pos;
p.end_pos <- end_pos;
next ~prev_end_pos p)
| _ ->
p.token <- token;
p.prev_end_pos <- prev_end_pos;
p.start_pos <- start_pos;
p.end_pos <- end_pos
let next_unsafe p = if p.token <> Eof then next p
let next_template_literal_token p =
let start_pos, end_pos, token =
Scanner.scan_template_literal_token p.scanner
in
p.token <- token;
p.prev_end_pos <- p.end_pos;
p.start_pos <- start_pos;
p.end_pos <- end_pos
let check_progress ~prev_end_pos ~result p =
if p.end_pos == prev_end_pos then None else Some result
let make ?(mode = ParseForTypeChecker) src filename =
let scanner = Scanner.make ~filename src in
let parser_state =
{
mode;
scanner;
token = Token.Semicolon;
start_pos = Lexing.dummy_pos;
prev_end_pos = Lexing.dummy_pos;
end_pos = Lexing.dummy_pos;
breadcrumbs = [];
errors = [];
diagnostics = [];
comments = [];
regions = [ref Report];
uncurried_config = !Config.uncurried;
}
in
parser_state.scanner.err <-
(fun ~start_pos ~end_pos error ->
let diagnostic = Diagnostics.make ~start_pos ~end_pos error in
parser_state.diagnostics <- diagnostic :: parser_state.diagnostics);
next parser_state;
parser_state
let leave_breadcrumb p circumstance =
let crumb = (circumstance, p.start_pos) in
p.breadcrumbs <- crumb :: p.breadcrumbs
let eat_breadcrumb p =
match p.breadcrumbs with
| [] -> ()
| _ :: crumbs -> p.breadcrumbs <- crumbs
let optional p token =
if p.token = token then
let () = next p in
true
else false
let expect ?grammar token p =
if p.token = token then next p
else
let error = Diagnostics.expected ?grammar p.prev_end_pos token in
err ~start_pos:p.prev_end_pos p error
(* Don't use immutable copies here, it trashes certain heuristics
* in the ocaml compiler, resulting in massive slowdowns of the parser *)
let lookahead p callback =
let err = p.scanner.err in
let ch = p.scanner.ch in
let offset = p.scanner.offset in
let offset16 = p.scanner.offset16 in
let line_offset = p.scanner.line_offset in
let lnum = p.scanner.lnum in
let mode = p.scanner.mode in
let token = p.token in
let start_pos = p.start_pos in
let end_pos = p.end_pos in
let prev_end_pos = p.prev_end_pos in
let breadcrumbs = p.breadcrumbs in
let errors = p.errors in
let diagnostics = p.diagnostics in
let comments = p.comments in
let uncurried_config = p.uncurried_config in
let res = callback p in
p.scanner.err <- err;
p.scanner.ch <- ch;
p.scanner.offset <- offset;
p.scanner.offset16 <- offset16;
p.scanner.line_offset <- line_offset;
p.scanner.lnum <- lnum;
p.scanner.mode <- mode;
p.token <- token;
p.start_pos <- start_pos;
p.end_pos <- end_pos;
p.prev_end_pos <- prev_end_pos;
p.breadcrumbs <- breadcrumbs;
p.errors <- errors;
p.diagnostics <- diagnostics;
p.comments <- comments;
p.uncurried_config <- uncurried_config;
res