@@ -30,7 +30,7 @@ pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
30
30
31
31
#[ macro_use]
32
32
pub mod parser;
33
- use parser:: { emit_unclosed_delims , make_unclosed_delims_error, Parser } ;
33
+ use parser:: { make_unclosed_delims_error, Parser } ;
34
34
pub mod lexer;
35
35
pub mod validate_attr;
36
36
@@ -96,10 +96,7 @@ pub fn parse_stream_from_source_str(
96
96
sess : & ParseSess ,
97
97
override_span : Option < Span > ,
98
98
) -> TokenStream {
99
- let ( stream, mut errors) =
100
- source_file_to_stream ( sess, sess. source_map ( ) . new_source_file ( name, source) , override_span) ;
101
- emit_unclosed_delims ( & mut errors, & sess) ;
102
- stream
99
+ source_file_to_stream ( sess, sess. source_map ( ) . new_source_file ( name, source) , override_span)
103
100
}
104
101
105
102
/// Creates a new parser from a source string.
@@ -135,9 +132,8 @@ fn maybe_source_file_to_parser(
135
132
source_file : Lrc < SourceFile > ,
136
133
) -> Result < Parser < ' _ > , Vec < Diagnostic > > {
137
134
let end_pos = source_file. end_pos ;
138
- let ( stream, unclosed_delims ) = maybe_file_to_stream ( sess, source_file, None ) ?;
135
+ let stream = maybe_file_to_stream ( sess, source_file, None ) ?;
139
136
let mut parser = stream_to_parser ( sess, stream, None ) ;
140
- parser. unclosed_delims = unclosed_delims;
141
137
if parser. token == token:: Eof {
142
138
parser. token . span = Span :: new ( end_pos, end_pos, parser. token . span . ctxt ( ) , None ) ;
143
139
}
@@ -182,7 +178,7 @@ pub fn source_file_to_stream(
182
178
sess : & ParseSess ,
183
179
source_file : Lrc < SourceFile > ,
184
180
override_span : Option < Span > ,
185
- ) -> ( TokenStream , Vec < lexer :: UnmatchedDelim > ) {
181
+ ) -> TokenStream {
186
182
panictry_buffer ! ( & sess. span_diagnostic, maybe_file_to_stream( sess, source_file, override_span) )
187
183
}
188
184
@@ -192,7 +188,7 @@ pub fn maybe_file_to_stream(
192
188
sess : & ParseSess ,
193
189
source_file : Lrc < SourceFile > ,
194
190
override_span : Option < Span > ,
195
- ) -> Result < ( TokenStream , Vec < lexer :: UnmatchedDelim > ) , Vec < Diagnostic > > {
191
+ ) -> Result < TokenStream , Vec < Diagnostic > > {
196
192
let src = source_file. src . as_ref ( ) . unwrap_or_else ( || {
197
193
sess. span_diagnostic . bug ( & format ! (
198
194
"cannot lex `source_file` without source: {}" ,
@@ -204,7 +200,7 @@ pub fn maybe_file_to_stream(
204
200
lexer:: parse_token_trees ( sess, src. as_str ( ) , source_file. start_pos , override_span) ;
205
201
206
202
match token_trees {
207
- Ok ( stream) if unmatched_delims. is_empty ( ) => Ok ( ( stream, unmatched_delims ) ) ,
203
+ Ok ( stream) if unmatched_delims. is_empty ( ) => Ok ( stream) ,
208
204
_ => {
209
205
// Return error if there are unmatched delimiters or unclosng delimiters.
210
206
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
0 commit comments