Skip to content

Commit be304af

Browse files
committed
Auto merge of #40202 - jseyfried:integrate_tokenstream, r=nrc
syntax: integrate `TokenStream` Use `TokenStream` instead of `Vec<TokenTree>` in `TokenTree::Delimited` and elsewhere. r? @nrc
2 parents 8c6c0f8 + 0d55413 commit be304af

File tree

32 files changed

+555
-535
lines changed

32 files changed

+555
-535
lines changed

src/libproc_macro/lib.rs

+3-4
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ pub mod __internal {
101101

102102
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
103103
with_parse_sess(move |sess| {
104-
let mut parser = parse::new_parser_from_ts(sess, stream.inner);
104+
let mut parser = parse::stream_to_parser(sess, stream.inner);
105105
let mut items = Vec::new();
106106

107107
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
@@ -177,9 +177,8 @@ impl FromStr for TokenStream {
177177
__internal::with_parse_sess(|sess| {
178178
let src = src.to_string();
179179
let name = "<proc-macro source code>".to_string();
180-
let tts = parse::parse_tts_from_source_str(name, src, sess);
181-
182-
Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
180+
let stream = parse::parse_stream_from_source_str(name, src, sess);
181+
Ok(__internal::token_stream_wrap(stream))
183182
})
184183
}
185184
}

src/libproc_macro_plugin/qquote.rs

+10-20
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ use syntax::symbol::Symbol;
1717
use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream};
1818
use syntax_pos::DUMMY_SP;
1919

20-
use std::rc::Rc;
20+
use std::iter;
2121

2222
pub fn qquote<'cx>(stream: TokenStream) -> TokenStream {
2323
stream.quote()
@@ -49,10 +49,7 @@ macro_rules! quote_tree {
4949
}
5050

5151
fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
52-
TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
53-
delim: delim,
54-
tts: stream.trees().cloned().collect(),
55-
})).into()
52+
TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into()
5653
}
5754

5855
macro_rules! quote {
@@ -75,9 +72,9 @@ impl Quote for TokenStream {
7572
return quote!(::syntax::tokenstream::TokenStream::empty());
7673
}
7774

78-
struct Quote<'a>(tokenstream::Cursor<'a>);
75+
struct Quote(iter::Peekable<tokenstream::Cursor>);
7976

80-
impl<'a> Iterator for Quote<'a> {
77+
impl Iterator for Quote {
8178
type Item = TokenStream;
8279

8380
fn next(&mut self) -> Option<TokenStream> {
@@ -89,25 +86,18 @@ impl Quote for TokenStream {
8986
_ => false,
9087
};
9188

92-
self.0.next().cloned().map(|tree| {
89+
self.0.next().map(|tree| {
9390
let quoted_tree = if is_unquote { tree.into() } else { tree.quote() };
9491
quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),)
9592
})
9693
}
9794
}
9895

99-
let quoted = Quote(self.trees()).collect::<TokenStream>();
96+
let quoted = Quote(self.trees().peekable()).collect::<TokenStream>();
10097
quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>())
10198
}
10299
}
103100

104-
impl Quote for Vec<TokenTree> {
105-
fn quote(&self) -> TokenStream {
106-
let stream = self.iter().cloned().collect::<TokenStream>();
107-
quote!((quote stream).trees().cloned().collect::<::std::vec::Vec<_> >())
108-
}
109-
}
110-
111101
impl Quote for TokenTree {
112102
fn quote(&self) -> TokenStream {
113103
match *self {
@@ -123,12 +113,12 @@ impl Quote for TokenTree {
123113
}
124114
}
125115

126-
impl Quote for Rc<Delimited> {
116+
impl Quote for Delimited {
127117
fn quote(&self) -> TokenStream {
128-
quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
118+
quote!(::syntax::tokenstream::Delimited {
129119
delim: (quote self.delim),
130-
tts: (quote self.tts),
131-
}))
120+
tts: (quote self.stream()).into(),
121+
})
132122
}
133123
}
134124

src/librustc/hir/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
4040
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
4141
use syntax::ptr::P;
4242
use syntax::symbol::{Symbol, keywords};
43-
use syntax::tokenstream::TokenTree;
43+
use syntax::tokenstream::TokenStream;
4444
use syntax::util::ThinVec;
4545

4646
use std::collections::BTreeMap;
@@ -471,7 +471,7 @@ pub struct MacroDef {
471471
pub attrs: HirVec<Attribute>,
472472
pub id: NodeId,
473473
pub span: Span,
474-
pub body: HirVec<TokenTree>,
474+
pub body: TokenStream,
475475
}
476476

477477
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]

src/librustc_incremental/calculate_svh/svh_visitor.rs

+5-11
Original file line numberDiff line numberDiff line change
@@ -866,8 +866,8 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has
866866
debug!("visit_macro_def: st={:?}", self.st);
867867
SawMacroDef.hash(self.st);
868868
hash_attrs!(self, &macro_def.attrs);
869-
for tt in &macro_def.body {
870-
self.hash_token_tree(tt);
869+
for tt in macro_def.body.trees() {
870+
self.hash_token_tree(&tt);
871871
}
872872
visit::walk_macro_def(self, macro_def)
873873
}
@@ -1033,15 +1033,9 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
10331033
}
10341034
tokenstream::TokenTree::Delimited(span, ref delimited) => {
10351035
hash_span!(self, span);
1036-
let tokenstream::Delimited {
1037-
ref delim,
1038-
ref tts,
1039-
} = **delimited;
1040-
1041-
delim.hash(self.st);
1042-
tts.len().hash(self.st);
1043-
for sub_tt in tts {
1044-
self.hash_token_tree(sub_tt);
1036+
delimited.delim.hash(self.st);
1037+
for sub_tt in delimited.stream().trees() {
1038+
self.hash_token_tree(&sub_tt);
10451039
}
10461040
}
10471041
}

src/librustc_metadata/cstore_impl.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ use std::rc::Rc;
3434

3535
use syntax::ast;
3636
use syntax::attr;
37-
use syntax::parse::filemap_to_tts;
37+
use syntax::parse::filemap_to_stream;
3838
use syntax::symbol::Symbol;
3939
use syntax_pos::{mk_sp, Span};
4040
use rustc::hir::svh::Svh;
@@ -401,7 +401,7 @@ impl CrateStore for cstore::CStore {
401401

402402
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
403403
let local_span = mk_sp(filemap.start_pos, filemap.end_pos);
404-
let body = filemap_to_tts(&sess.parse_sess, filemap);
404+
let body = filemap_to_stream(&sess.parse_sess, filemap);
405405

406406
// Mark the attrs as used
407407
let attrs = data.get_item_attrs(id.index);
@@ -419,7 +419,7 @@ impl CrateStore for cstore::CStore {
419419
id: ast::DUMMY_NODE_ID,
420420
span: local_span,
421421
attrs: attrs,
422-
body: body,
422+
body: body.into(),
423423
})
424424
}
425425

src/librustc_metadata/encoder.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -853,9 +853,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
853853

854854
/// Serialize the text of exported macros
855855
fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> {
856+
use syntax::print::pprust;
856857
Entry {
857858
kind: EntryKind::MacroDef(self.lazy(&MacroDef {
858-
body: ::syntax::print::pprust::tts_to_string(&macro_def.body)
859+
body: pprust::tts_to_string(&macro_def.body.trees().collect::<Vec<_>>()),
859860
})),
860861
visibility: self.lazy(&ty::Visibility::Public),
861862
span: self.lazy(&macro_def.span),

src/librustc_resolve/build_reduced_graph.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -516,7 +516,7 @@ impl<'a> Resolver<'a> {
516516
expansion: Cell::new(LegacyScope::Empty),
517517
});
518518
self.invocations.insert(mark, invocation);
519-
macro_rules.body = mark_tts(&macro_rules.body, mark);
519+
macro_rules.body = mark_tts(macro_rules.stream(), mark).into();
520520
let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, &macro_rules));
521521
self.macro_map.insert(def_id, ext.clone());
522522
ext

src/librustc_resolve/macros.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -545,7 +545,7 @@ impl<'a> Resolver<'a> {
545545

546546
pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<'a>) {
547547
let tts = match item.node {
548-
ast::ItemKind::Mac(ref mac) => &mac.node.tts,
548+
ast::ItemKind::Mac(ref mac) => mac.node.stream(),
549549
_ => unreachable!(),
550550
};
551551

@@ -562,7 +562,7 @@ impl<'a> Resolver<'a> {
562562
attrs: item.attrs.clone(),
563563
id: ast::DUMMY_NODE_ID,
564564
span: item.span,
565-
body: mark_tts(tts, mark),
565+
body: mark_tts(tts, mark).into(),
566566
};
567567

568568
*legacy_scope = LegacyScope::Binding(self.arenas.alloc_legacy_binding(LegacyBinding {

src/librustc_save_analysis/span_utils.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ impl<'a> SpanUtils<'a> {
284284
pub fn signature_string_for_span(&self, span: Span) -> String {
285285
let mut toks = self.retokenise_span(span);
286286
toks.real_token();
287-
let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
287+
let mut toks = toks.parse_all_token_trees().unwrap().trees();
288288
let mut prev = toks.next().unwrap();
289289

290290
let first_span = prev.span();

src/librustdoc/visit_ast.rs

+4-2
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
211211
};
212212

213213
// FIXME(jseyfried) merge with `self.visit_macro()`
214-
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
214+
let tts = def.stream().trees().collect::<Vec<_>>();
215+
let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
215216
om.macros.push(Macro {
216217
def_id: def_id,
217218
attrs: def.attrs.clone().into(),
@@ -520,8 +521,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
520521

521522
// convert each exported_macro into a doc item
522523
fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
524+
let tts = def.body.trees().collect::<Vec<_>>();
523525
// Extract the spans of all matchers. They represent the "interface" of the macro.
524-
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
526+
let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
525527

526528
Macro {
527529
def_id: self.cx.tcx.hir.local_def_id(def.id),

src/libsyntax/ast.rs

+15-3
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ use ext::hygiene::SyntaxContext;
2424
use print::pprust;
2525
use ptr::P;
2626
use symbol::{Symbol, keywords};
27-
use tokenstream::{TokenTree};
27+
use tokenstream::{ThinTokenStream, TokenStream};
2828

2929
use std::collections::HashSet;
3030
use std::fmt;
@@ -1033,7 +1033,13 @@ pub type Mac = Spanned<Mac_>;
10331033
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
10341034
pub struct Mac_ {
10351035
pub path: Path,
1036-
pub tts: Vec<TokenTree>,
1036+
pub tts: ThinTokenStream,
1037+
}
1038+
1039+
impl Mac_ {
1040+
pub fn stream(&self) -> TokenStream {
1041+
self.tts.clone().into()
1042+
}
10371043
}
10381044

10391045
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
@@ -1915,7 +1921,13 @@ pub struct MacroDef {
19151921
pub attrs: Vec<Attribute>,
19161922
pub id: NodeId,
19171923
pub span: Span,
1918-
pub body: Vec<TokenTree>,
1924+
pub body: ThinTokenStream,
1925+
}
1926+
1927+
impl MacroDef {
1928+
pub fn stream(&self) -> TokenStream {
1929+
self.body.clone().into()
1930+
}
19191931
}
19201932

19211933
#[cfg(test)]

src/libsyntax/ext/base.rs

+6-14
Original file line numberDiff line numberDiff line change
@@ -188,10 +188,7 @@ impl<F> AttrProcMacro for F
188188

189189
/// Represents a thing that maps token trees to Macro Results
190190
pub trait TTMacroExpander {
191-
fn expand<'cx>(&self,
192-
ecx: &'cx mut ExtCtxt,
193-
span: Span,
194-
token_tree: &[tokenstream::TokenTree])
191+
fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
195192
-> Box<MacResult+'cx>;
196193
}
197194

@@ -200,15 +197,11 @@ pub type MacroExpanderFn =
200197
-> Box<MacResult+'cx>;
201198

202199
impl<F> TTMacroExpander for F
203-
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
204-
-> Box<MacResult+'cx>
200+
where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box<MacResult+'cx>
205201
{
206-
fn expand<'cx>(&self,
207-
ecx: &'cx mut ExtCtxt,
208-
span: Span,
209-
token_tree: &[tokenstream::TokenTree])
202+
fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
210203
-> Box<MacResult+'cx> {
211-
(*self)(ecx, span, token_tree)
204+
(*self)(ecx, span, &input.trees().collect::<Vec<_>>())
212205
}
213206
}
214207

@@ -654,9 +647,8 @@ impl<'a> ExtCtxt<'a> {
654647
expand::MacroExpander::new(self, true)
655648
}
656649

657-
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
658-
-> parser::Parser<'a> {
659-
parse::tts_to_parser(self.parse_sess, tts.to_vec())
650+
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
651+
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect())
660652
}
661653
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
662654
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }

0 commit comments

Comments
 (0)