9
9
// except according to those terms.
10
10
11
11
use abi:: { self , Abi } ;
12
- use ast:: BareFnTy ;
12
+ use ast:: { AttrStyle , BareFnTy } ;
13
13
use ast:: { RegionTyParamBound , TraitTyParamBound , TraitBoundModifier } ;
14
14
use ast:: Unsafety ;
15
15
use ast:: { Mod , Arg , Arm , Attribute , BindingMode , TraitItemKind } ;
@@ -46,21 +46,21 @@ use errors::{self, DiagnosticBuilder};
46
46
use parse:: { self , classify, token} ;
47
47
use parse:: common:: SeqSep ;
48
48
use parse:: lexer:: TokenAndSpan ;
49
+ use parse:: lexer:: comments:: { doc_comment_style, strip_doc_comment_decoration} ;
49
50
use parse:: obsolete:: ObsoleteSyntax ;
50
51
use parse:: { new_sub_parser_from_file, ParseSess , Directory , DirectoryOwnership } ;
51
52
use util:: parser:: { AssocOp , Fixity } ;
52
53
use print:: pprust;
53
54
use ptr:: P ;
54
55
use parse:: PResult ;
55
- use tokenstream:: { Delimited , TokenTree } ;
56
+ use tokenstream:: { self , Delimited , TokenTree , TokenStream } ;
56
57
use symbol:: { Symbol , keywords} ;
57
58
use util:: ThinVec ;
58
59
59
60
use std:: collections:: HashSet ;
60
- use std:: mem;
61
+ use std:: { cmp , mem, slice } ;
61
62
use std:: path:: { Path , PathBuf } ;
62
63
use std:: rc:: Rc ;
63
- use std:: slice;
64
64
65
65
bitflags ! {
66
66
flags Restrictions : u8 {
@@ -175,12 +175,108 @@ pub struct Parser<'a> {
175
175
/// into modules, and sub-parsers have new values for this name.
176
176
pub root_module_name : Option < String > ,
177
177
pub expected_tokens : Vec < TokenType > ,
178
- pub tts : Vec < ( TokenTree , usize ) > ,
178
+ token_cursor : TokenCursor ,
179
179
pub desugar_doc_comments : bool ,
180
180
/// Whether we should configure out of line modules as we parse.
181
181
pub cfg_mods : bool ,
182
182
}
183
183
184
+ struct TokenCursor {
185
+ frame : TokenCursorFrame ,
186
+ stack : Vec < TokenCursorFrame > ,
187
+ }
188
+
189
+ struct TokenCursorFrame {
190
+ delim : token:: DelimToken ,
191
+ span : Span ,
192
+ open_delim : bool ,
193
+ tree_cursor : tokenstream:: Cursor ,
194
+ close_delim : bool ,
195
+ }
196
+
197
+ impl TokenCursorFrame {
198
+ fn new ( sp : Span , delimited : & Delimited ) -> Self {
199
+ TokenCursorFrame {
200
+ delim : delimited. delim ,
201
+ span : sp,
202
+ open_delim : delimited. delim == token:: NoDelim ,
203
+ tree_cursor : delimited. tts . iter ( ) . cloned ( ) . collect :: < TokenStream > ( ) . into_trees ( ) ,
204
+ close_delim : delimited. delim == token:: NoDelim ,
205
+ }
206
+ }
207
+ }
208
+
209
+ impl TokenCursor {
210
+ fn next ( & mut self ) -> TokenAndSpan {
211
+ loop {
212
+ let tree = if !self . frame . open_delim {
213
+ self . frame . open_delim = true ;
214
+ Delimited { delim : self . frame . delim , tts : Vec :: new ( ) } . open_tt ( self . frame . span )
215
+ } else if let Some ( tree) = self . frame . tree_cursor . next ( ) {
216
+ tree
217
+ } else if !self . frame . close_delim {
218
+ self . frame . close_delim = true ;
219
+ Delimited { delim : self . frame . delim , tts : Vec :: new ( ) } . close_tt ( self . frame . span )
220
+ } else if let Some ( frame) = self . stack . pop ( ) {
221
+ self . frame = frame;
222
+ continue
223
+ } else {
224
+ return TokenAndSpan { tok : token:: Eof , sp : self . frame . span }
225
+ } ;
226
+
227
+ match tree {
228
+ TokenTree :: Token ( sp, tok) => return TokenAndSpan { tok : tok, sp : sp } ,
229
+ TokenTree :: Delimited ( sp, ref delimited) => {
230
+ let frame = TokenCursorFrame :: new ( sp, delimited) ;
231
+ self . stack . push ( mem:: replace ( & mut self . frame , frame) ) ;
232
+ }
233
+ }
234
+ }
235
+ }
236
+
237
+ fn next_desugared ( & mut self ) -> TokenAndSpan {
238
+ let ( sp, name) = match self . next ( ) {
239
+ TokenAndSpan { sp, tok : token:: DocComment ( name) } => ( sp, name) ,
240
+ tok @ _ => return tok,
241
+ } ;
242
+
243
+ let stripped = strip_doc_comment_decoration ( & name. as_str ( ) ) ;
244
+
245
+ // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
246
+ // required to wrap the text.
247
+ let mut num_of_hashes = 0 ;
248
+ let mut count = 0 ;
249
+ for ch in stripped. chars ( ) {
250
+ count = match ch {
251
+ '"' => 1 ,
252
+ '#' if count > 0 => count + 1 ,
253
+ _ => 0 ,
254
+ } ;
255
+ num_of_hashes = cmp:: max ( num_of_hashes, count) ;
256
+ }
257
+
258
+ let body = TokenTree :: Delimited ( sp, Rc :: new ( Delimited {
259
+ delim : token:: Bracket ,
260
+ tts : vec ! [ TokenTree :: Token ( sp, token:: Ident ( ast:: Ident :: from_str( "doc" ) ) ) ,
261
+ TokenTree :: Token ( sp, token:: Eq ) ,
262
+ TokenTree :: Token ( sp, token:: Literal (
263
+ token:: StrRaw ( Symbol :: intern( & stripped) , num_of_hashes) , None ) ) ] ,
264
+ } ) ) ;
265
+
266
+ self . stack . push ( mem:: replace ( & mut self . frame , TokenCursorFrame :: new ( sp, & Delimited {
267
+ delim : token:: NoDelim ,
268
+ tts : if doc_comment_style ( & name. as_str ( ) ) == AttrStyle :: Inner {
269
+ [ TokenTree :: Token ( sp, token:: Pound ) , TokenTree :: Token ( sp, token:: Not ) , body]
270
+ . iter ( ) . cloned ( ) . collect ( )
271
+ } else {
272
+ [ TokenTree :: Token ( sp, token:: Pound ) , body] . iter ( ) . cloned ( ) . collect ( )
273
+ } ,
274
+ } ) ) ) ;
275
+
276
+ self . next ( )
277
+ }
278
+ }
279
+
184
280
#[ derive( PartialEq , Eq , Clone ) ]
185
281
pub enum TokenType {
186
282
Token ( token:: Token ) ,
@@ -313,10 +409,6 @@ impl<'a> Parser<'a> {
313
409
directory : Option < Directory > ,
314
410
desugar_doc_comments : bool )
315
411
-> Self {
316
- let tt = TokenTree :: Delimited ( syntax_pos:: DUMMY_SP , Rc :: new ( Delimited {
317
- delim : token:: NoDelim ,
318
- tts : tokens,
319
- } ) ) ;
320
412
let mut parser = Parser {
321
413
sess : sess,
322
414
token : token:: Underscore ,
@@ -328,7 +420,13 @@ impl<'a> Parser<'a> {
328
420
directory : Directory { path : PathBuf :: new ( ) , ownership : DirectoryOwnership :: Owned } ,
329
421
root_module_name : None ,
330
422
expected_tokens : Vec :: new ( ) ,
331
- tts : if tt. len ( ) > 0 { vec ! [ ( tt, 0 ) ] } else { Vec :: new ( ) } ,
423
+ token_cursor : TokenCursor {
424
+ frame : TokenCursorFrame :: new ( syntax_pos:: DUMMY_SP , & Delimited {
425
+ delim : token:: NoDelim ,
426
+ tts : tokens,
427
+ } ) ,
428
+ stack : Vec :: new ( ) ,
429
+ } ,
332
430
desugar_doc_comments : desugar_doc_comments,
333
431
cfg_mods : true ,
334
432
} ;
@@ -346,28 +444,9 @@ impl<'a> Parser<'a> {
346
444
}
347
445
348
446
fn next_tok ( & mut self ) -> TokenAndSpan {
349
- loop {
350
- let tok = if let Some ( ( tts, i) ) = self . tts . pop ( ) {
351
- let tt = tts. get_tt ( i) ;
352
- if i + 1 < tts. len ( ) {
353
- self . tts . push ( ( tts, i + 1 ) ) ;
354
- }
355
- if let TokenTree :: Token ( sp, tok) = tt {
356
- TokenAndSpan { tok : tok, sp : sp }
357
- } else {
358
- self . tts . push ( ( tt, 0 ) ) ;
359
- continue
360
- }
361
- } else {
362
- TokenAndSpan { tok : token:: Eof , sp : self . span }
363
- } ;
364
-
365
- match tok. tok {
366
- token:: DocComment ( name) if self . desugar_doc_comments => {
367
- self . tts . push ( ( TokenTree :: Token ( tok. sp , token:: DocComment ( name) ) , 0 ) ) ;
368
- }
369
- _ => return tok,
370
- }
447
+ match self . desugar_doc_comments {
448
+ true => self . token_cursor . next_desugared ( ) ,
449
+ false => self . token_cursor . next ( ) ,
371
450
}
372
451
}
373
452
@@ -972,19 +1051,16 @@ impl<'a> Parser<'a> {
972
1051
F : FnOnce ( & token:: Token ) -> R ,
973
1052
{
974
1053
if dist == 0 {
975
- return f ( & self . token ) ;
976
- }
977
- let mut tok = token:: Eof ;
978
- if let Some ( & ( ref tts, mut i) ) = self . tts . last ( ) {
979
- i += dist - 1 ;
980
- if i < tts. len ( ) {
981
- tok = match tts. get_tt ( i) {
982
- TokenTree :: Token ( _, tok) => tok,
983
- TokenTree :: Delimited ( _, delimited) => token:: OpenDelim ( delimited. delim ) ,
984
- } ;
985
- }
1054
+ return f ( & self . token )
986
1055
}
987
- f ( & tok)
1056
+
1057
+ f ( & match self . token_cursor . frame . tree_cursor . look_ahead ( dist - 1 ) {
1058
+ Some ( tree) => match tree {
1059
+ TokenTree :: Token ( _, tok) => tok,
1060
+ TokenTree :: Delimited ( _, delimited) => token:: OpenDelim ( delimited. delim ) ,
1061
+ } ,
1062
+ None => token:: CloseDelim ( self . token_cursor . frame . delim ) ,
1063
+ } )
988
1064
}
989
1065
pub fn fatal ( & self , m : & str ) -> DiagnosticBuilder < ' a > {
990
1066
self . sess . span_diagnostic . struct_span_fatal ( self . span , m)
@@ -2569,10 +2645,14 @@ impl<'a> Parser<'a> {
2569
2645
pub fn parse_token_tree ( & mut self ) -> PResult < ' a , TokenTree > {
2570
2646
match self . token {
2571
2647
token:: OpenDelim ( ..) => {
2572
- let tt = self . tts . pop ( ) . unwrap ( ) . 0 ;
2573
- self . span = tt. span ( ) ;
2648
+ let frame = mem:: replace ( & mut self . token_cursor . frame ,
2649
+ self . token_cursor . stack . pop ( ) . unwrap ( ) ) ;
2650
+ self . span = frame. span ;
2574
2651
self . bump ( ) ;
2575
- return Ok ( tt) ;
2652
+ return Ok ( TokenTree :: Delimited ( frame. span , Rc :: new ( Delimited {
2653
+ delim : frame. delim ,
2654
+ tts : frame. tree_cursor . original_stream ( ) . trees ( ) . collect ( ) ,
2655
+ } ) ) ) ;
2576
2656
} ,
2577
2657
token:: CloseDelim ( _) | token:: Eof => unreachable ! ( ) ,
2578
2658
_ => Ok ( TokenTree :: Token ( self . span , self . bump_and_get ( ) ) ) ,
0 commit comments