Skip to content

Commit 298c56f

Browse files
committed
Simplify LazyAttrTokenStream.
This commit does the following. - Changes it from `Lrc<Box<dyn ToAttrTokenStream>>` to `Lrc<LazyAttrTokenStreamInner>`. - Reworks `LazyAttrTokenStreamImpl` as `LazyAttrTokenStreamInner`, which is a two-variant enum. - Removes the `ToAttrTokenStream` trait and the two impls of it. The recursion limit must be increased in some crates otherwise rustdoc aborts.
1 parent 28236ab commit 298c56f

File tree

12 files changed

+138
-118
lines changed

12 files changed

+138
-118
lines changed

compiler/rustc_ast/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@
2020
#![feature(never_type)]
2121
#![feature(rustdoc_internals)]
2222
#![feature(stmt_expr_attributes)]
23+
#![recursion_limit = "256"]
2324
// tidy-alphabetical-end
2425

2526
pub mod util {

compiler/rustc_ast/src/mut_visit.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -835,7 +835,7 @@ fn visit_lazy_tts_opt_mut<T: MutVisitor>(vis: &mut T, lazy_tts: Option<&mut Lazy
835835
if let Some(lazy_tts) = lazy_tts {
836836
let mut tts = lazy_tts.to_attr_token_stream();
837837
visit_attr_tts(vis, &mut tts);
838-
*lazy_tts = LazyAttrTokenStream::new(tts);
838+
*lazy_tts = LazyAttrTokenStream::new_direct(tts);
839839
}
840840
}
841841
}

compiler/rustc_ast/src/tokenstream.rs

+119-95
Original file line numberDiff line numberDiff line change
@@ -107,25 +107,30 @@ where
107107
}
108108
}
109109

110-
pub trait ToAttrTokenStream: sync::DynSend + sync::DynSync {
111-
fn to_attr_token_stream(&self) -> AttrTokenStream;
112-
}
113-
114-
impl ToAttrTokenStream for AttrTokenStream {
115-
fn to_attr_token_stream(&self) -> AttrTokenStream {
116-
self.clone()
117-
}
118-
}
119-
120-
/// A lazy version of [`TokenStream`], which defers creation
121-
/// of an actual `TokenStream` until it is needed.
122-
/// `Box` is here only to reduce the structure size.
110+
/// A lazy version of [`AttrTokenStream`], which defers creation of an actual
111+
/// `AttrTokenStream` until it is needed.
123112
#[derive(Clone)]
124-
pub struct LazyAttrTokenStream(Arc<Box<dyn ToAttrTokenStream>>);
113+
pub struct LazyAttrTokenStream(Arc<LazyAttrTokenStreamInner>);
125114

126115
impl LazyAttrTokenStream {
127-
pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream {
128-
LazyAttrTokenStream(Arc::new(Box::new(inner)))
116+
pub fn new_direct(stream: AttrTokenStream) -> LazyAttrTokenStream {
117+
LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Direct(stream)))
118+
}
119+
120+
pub fn new_pending(
121+
start_token: (Token, Spacing),
122+
cursor_snapshot: TokenCursor,
123+
num_calls: u32,
124+
break_last_token: u32,
125+
node_replacements: Box<[NodeReplacement]>,
126+
) -> LazyAttrTokenStream {
127+
LazyAttrTokenStream(Arc::new(LazyAttrTokenStreamInner::Pending {
128+
start_token,
129+
cursor_snapshot,
130+
num_calls,
131+
break_last_token,
132+
node_replacements,
133+
}))
129134
}
130135

131136
pub fn to_attr_token_stream(&self) -> AttrTokenStream {
@@ -208,91 +213,109 @@ impl NodeRange {
208213
}
209214
}
210215

211-
// From a value of this type we can reconstruct the `TokenStream` seen by the
212-
// `f` callback passed to a call to `Parser::collect_tokens`, by
213-
// replaying the getting of the tokens. This saves us producing a `TokenStream`
214-
// if it is never needed, e.g. a captured `macro_rules!` argument that is never
215-
// passed to a proc macro. In practice, token stream creation happens rarely
216-
// compared to calls to `collect_tokens` (see some statistics in #78736) so we
217-
// are doing as little up-front work as possible.
218-
//
219-
// This also makes `Parser` very cheap to clone, since
220-
// there is no intermediate collection buffer to clone.
221-
pub struct LazyAttrTokenStreamImpl {
222-
pub start_token: (Token, Spacing),
223-
pub cursor_snapshot: TokenCursor,
224-
pub num_calls: u32,
225-
pub break_last_token: u32,
226-
pub node_replacements: Box<[NodeReplacement]>,
216+
enum LazyAttrTokenStreamInner {
217+
// The token stream has already been produced.
218+
Direct(AttrTokenStream),
219+
220+
// From a value of this type we can reconstruct the `TokenStream` seen by
221+
// the `f` callback passed to a call to `Parser::collect_tokens`, by
222+
// replaying the getting of the tokens. This saves us producing a
223+
// `TokenStream` if it is never needed, e.g. a captured `macro_rules!`
224+
// argument that is never passed to a proc macro. In practice, token stream
225+
// creation happens rarely compared to calls to `collect_tokens` (see some
226+
// statistics in #78736) so we are doing as little up-front work as
227+
// possible.
228+
//
229+
// This also makes `Parser` very cheap to clone, since there is no
230+
// intermediate collection buffer to clone.
231+
Pending {
232+
start_token: (Token, Spacing),
233+
cursor_snapshot: TokenCursor,
234+
num_calls: u32,
235+
break_last_token: u32,
236+
node_replacements: Box<[NodeReplacement]>,
237+
},
227238
}
228239

229-
impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
240+
impl LazyAttrTokenStreamInner {
230241
fn to_attr_token_stream(&self) -> AttrTokenStream {
231-
// The token produced by the final call to `{,inlined_}next` was not
232-
// actually consumed by the callback. The combination of chaining the
233-
// initial token and using `take` produces the desired result - we
234-
// produce an empty `TokenStream` if no calls were made, and omit the
235-
// final token otherwise.
236-
let mut cursor_snapshot = self.cursor_snapshot.clone();
237-
let tokens = iter::once(FlatToken::Token(self.start_token))
238-
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
239-
.take(self.num_calls as usize);
240-
241-
if self.node_replacements.is_empty() {
242-
make_attr_token_stream(tokens, self.break_last_token)
243-
} else {
244-
let mut tokens: Vec<_> = tokens.collect();
245-
let mut node_replacements = self.node_replacements.to_vec();
246-
node_replacements.sort_by_key(|(range, _)| range.0.start);
242+
match self {
243+
LazyAttrTokenStreamInner::Direct(stream) => stream.clone(),
244+
LazyAttrTokenStreamInner::Pending {
245+
start_token,
246+
cursor_snapshot,
247+
num_calls,
248+
break_last_token,
249+
node_replacements,
250+
} => {
251+
// The token produced by the final call to `{,inlined_}next` was not
252+
// actually consumed by the callback. The combination of chaining the
253+
// initial token and using `take` produces the desired result - we
254+
// produce an empty `TokenStream` if no calls were made, and omit the
255+
// final token otherwise.
256+
let mut cursor_snapshot = cursor_snapshot.clone();
257+
let tokens = iter::once(FlatToken::Token(*start_token))
258+
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
259+
.take(*num_calls as usize);
260+
261+
if node_replacements.is_empty() {
262+
make_attr_token_stream(tokens, *break_last_token)
263+
} else {
264+
let mut tokens: Vec<_> = tokens.collect();
265+
let mut node_replacements = node_replacements.to_vec();
266+
node_replacements.sort_by_key(|(range, _)| range.0.start);
247267

248-
#[cfg(debug_assertions)]
249-
for [(node_range, tokens), (next_node_range, next_tokens)] in
250-
node_replacements.array_windows()
251-
{
252-
assert!(
253-
node_range.0.end <= next_node_range.0.start
254-
|| node_range.0.end >= next_node_range.0.end,
255-
"Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
256-
node_range,
257-
tokens,
258-
next_node_range,
259-
next_tokens,
260-
);
261-
}
268+
#[cfg(debug_assertions)]
269+
for [(node_range, tokens), (next_node_range, next_tokens)] in
270+
node_replacements.array_windows()
271+
{
272+
assert!(
273+
node_range.0.end <= next_node_range.0.start
274+
|| node_range.0.end >= next_node_range.0.end,
275+
"Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
276+
node_range,
277+
tokens,
278+
next_node_range,
279+
next_tokens,
280+
);
281+
}
262282

263-
// Process the replace ranges, starting from the highest start
264-
// position and working our way back. If have tokens like:
265-
//
266-
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
267-
//
268-
// Then we will generate replace ranges for both
269-
// the `#[cfg(FALSE)] field: bool` and the entire
270-
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
271-
//
272-
// By starting processing from the replace range with the greatest
273-
// start position, we ensure that any (outer) replace range which
274-
// encloses another (inner) replace range will fully overwrite the
275-
// inner range's replacement.
276-
for (node_range, target) in node_replacements.into_iter().rev() {
277-
assert!(
278-
!node_range.0.is_empty(),
279-
"Cannot replace an empty node range: {:?}",
280-
node_range.0
281-
);
282-
283-
// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
284-
// enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
285-
// total length of `tokens` constant throughout the replacement process, allowing
286-
// us to do all replacements without adjusting indices.
287-
let target_len = target.is_some() as usize;
288-
tokens.splice(
289-
(node_range.0.start as usize)..(node_range.0.end as usize),
290-
target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
291-
iter::repeat(FlatToken::Empty).take(node_range.0.len() - target_len),
292-
),
293-
);
283+
// Process the replace ranges, starting from the highest start
284+
// position and working our way back. If have tokens like:
285+
//
286+
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
287+
//
288+
// Then we will generate replace ranges for both
289+
// the `#[cfg(FALSE)] field: bool` and the entire
290+
// `#[cfg(FALSE)] struct Foo { #[cfg(FALSE)] field: bool }`
291+
//
292+
// By starting processing from the replace range with the greatest
293+
// start position, we ensure that any (outer) replace range which
294+
// encloses another (inner) replace range will fully overwrite the
295+
// inner range's replacement.
296+
for (node_range, target) in node_replacements.into_iter().rev() {
297+
assert!(
298+
!node_range.0.is_empty(),
299+
"Cannot replace an empty node range: {:?}",
300+
node_range.0
301+
);
302+
303+
// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s,
304+
// plus enough `FlatToken::Empty`s to fill up the rest of the range. This
305+
// keeps the total length of `tokens` constant throughout the replacement
306+
// process, allowing us to do all replacements without adjusting indices.
307+
let target_len = target.is_some() as usize;
308+
tokens.splice(
309+
(node_range.0.start as usize)..(node_range.0.end as usize),
310+
target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
311+
iter::repeat(FlatToken::Empty)
312+
.take(node_range.0.len() - target_len),
313+
),
314+
);
315+
}
316+
make_attr_token_stream(tokens.into_iter(), *break_last_token)
317+
}
294318
}
295-
make_attr_token_stream(tokens.into_iter(), self.break_last_token)
296319
}
297320
}
298321
}
@@ -1011,6 +1034,7 @@ mod size_asserts {
10111034
static_assert_size!(AttrTokenStream, 8);
10121035
static_assert_size!(AttrTokenTree, 32);
10131036
static_assert_size!(LazyAttrTokenStream, 8);
1037+
static_assert_size!(LazyAttrTokenStreamInner, 96);
10141038
static_assert_size!(Option<LazyAttrTokenStream>, 8); // must be small, used in many AST nodes
10151039
static_assert_size!(TokenStream, 8);
10161040
static_assert_size!(TokenTree, 32);

compiler/rustc_attr_parsing/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -80,6 +80,7 @@
8080
#![cfg_attr(bootstrap, feature(let_chains))]
8181
#![doc(rust_logo)]
8282
#![feature(rustdoc_internals)]
83+
#![recursion_limit = "256"]
8384
// tidy-alphabetical-end
8485

8586
#[macro_use]

compiler/rustc_builtin_macros/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
#![feature(rustdoc_internals)]
1919
#![feature(string_from_utf8_lossy_owned)]
2020
#![feature(try_blocks)]
21+
#![recursion_limit = "256"]
2122
// tidy-alphabetical-end
2223

2324
extern crate proc_macro;

compiler/rustc_codegen_ssa/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#![feature(string_from_utf8_lossy_owned)]
1515
#![feature(trait_alias)]
1616
#![feature(try_blocks)]
17+
#![recursion_limit = "256"]
1718
// tidy-alphabetical-end
1819

1920
//! This crate contains codegen code that is used by all codegen backends (LLVM and others).

compiler/rustc_expand/src/config.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ pub(crate) fn attr_into_trace(mut attr: Attribute, trace_name: Symbol) -> Attrib
162162
let NormalAttr { item, tokens } = &mut **normal;
163163
item.path.segments[0].ident.name = trace_name;
164164
// This makes the trace attributes unobservable to token-based proc macros.
165-
*tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::default()));
165+
*tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::default()));
166166
}
167167
AttrKind::DocComment(..) => unreachable!(),
168168
}
@@ -192,7 +192,7 @@ impl<'a> StripUnconfigured<'a> {
192192
if self.config_tokens {
193193
if let Some(Some(tokens)) = node.tokens_mut() {
194194
let attr_stream = tokens.to_attr_token_stream();
195-
*tokens = LazyAttrTokenStream::new(self.configure_tokens(&attr_stream));
195+
*tokens = LazyAttrTokenStream::new_direct(self.configure_tokens(&attr_stream));
196196
}
197197
}
198198
}
@@ -223,7 +223,7 @@ impl<'a> StripUnconfigured<'a> {
223223
target.attrs.flat_map_in_place(|attr| self.process_cfg_attr(&attr));
224224

225225
if self.in_cfg(&target.attrs) {
226-
target.tokens = LazyAttrTokenStream::new(
226+
target.tokens = LazyAttrTokenStream::new_direct(
227227
self.configure_tokens(&target.tokens.to_attr_token_stream()),
228228
);
229229
Some(AttrTokenTree::AttrsTarget(target))
@@ -361,7 +361,7 @@ impl<'a> StripUnconfigured<'a> {
361361
.to_attr_token_stream(),
362362
));
363363

364-
let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees)));
364+
let tokens = Some(LazyAttrTokenStream::new_direct(AttrTokenStream::new(trees)));
365365
let attr = ast::attr::mk_attr_from_item(
366366
&self.sess.psess.attr_id_generator,
367367
item,

compiler/rustc_hir/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
#![feature(never_type)]
1515
#![feature(rustc_attrs)]
1616
#![feature(variant_count)]
17+
#![recursion_limit = "256"]
1718
// tidy-alphabetical-end
1819

1920
extern crate self as rustc_hir;

compiler/rustc_middle/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -61,6 +61,7 @@
6161
#![feature(try_trait_v2_yeet)]
6262
#![feature(type_alias_impl_trait)]
6363
#![feature(yeet_expr)]
64+
#![recursion_limit = "256"]
6465
// tidy-alphabetical-end
6566

6667
#[cfg(test)]

compiler/rustc_parse/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
#![feature(if_let_guard)]
1212
#![feature(iter_intersperse)]
1313
#![feature(string_from_utf8_lossy_owned)]
14+
#![recursion_limit = "256"]
1415
// tidy-alphabetical-end
1516

1617
use std::path::{Path, PathBuf};

compiler/rustc_parse/src/parser/attr_wrapper.rs

+6-18
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@ use std::mem;
33

44
use rustc_ast::token::Token;
55
use rustc_ast::tokenstream::{
6-
AttrsTarget, LazyAttrTokenStream, LazyAttrTokenStreamImpl, NodeRange, ParserRange, Spacing,
7-
TokenCursor,
6+
AttrsTarget, LazyAttrTokenStream, NodeRange, ParserRange, Spacing, TokenCursor,
87
};
98
use rustc_ast::{self as ast, AttrVec, Attribute, HasAttrs, HasTokens};
109
use rustc_data_structures::fx::FxHashSet;
@@ -337,13 +336,13 @@ impl<'a> Parser<'a> {
337336
// - `attrs`: includes the outer and the inner attr.
338337
// - `tokens`: lazy tokens for `g` (with its inner attr deleted).
339338

340-
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
341-
start_token: collect_pos.start_token,
342-
cursor_snapshot: collect_pos.cursor_snapshot,
339+
let tokens = LazyAttrTokenStream::new_pending(
340+
collect_pos.start_token,
341+
collect_pos.cursor_snapshot,
343342
num_calls,
344-
break_last_token: self.break_last_token,
343+
self.break_last_token,
345344
node_replacements,
346-
});
345+
);
347346
let mut tokens_used = false;
348347

349348
// If in "definite capture mode" we need to register a replace range
@@ -405,14 +404,3 @@ fn needs_tokens(attrs: &[ast::Attribute]) -> bool {
405404
}
406405
})
407406
}
408-
409-
// Some types are used a lot. Make sure they don't unintentionally get bigger.
410-
#[cfg(target_pointer_width = "64")]
411-
mod size_asserts {
412-
use rustc_data_structures::static_assert_size;
413-
414-
use super::*;
415-
// tidy-alphabetical-start
416-
static_assert_size!(LazyAttrTokenStreamImpl, 96);
417-
// tidy-alphabetical-end
418-
}

compiler/rustc_resolve/src/lib.rs

+1
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@
1919
#![feature(iter_intersperse)]
2020
#![feature(rustc_attrs)]
2121
#![feature(rustdoc_internals)]
22+
#![recursion_limit = "256"]
2223
// tidy-alphabetical-end
2324

2425
use std::cell::{Cell, RefCell};

0 commit comments

Comments
 (0)