Skip to content

Commit f6eaaf3

Browse files
committed
Integrate TokenStream.
1 parent 8cd0c08 commit f6eaaf3

File tree

27 files changed

+276
-319
lines changed

27 files changed

+276
-319
lines changed

src/libproc_macro/lib.rs

+3-4
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,7 @@ pub mod __internal {
101101

102102
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
103103
with_parse_sess(move |sess| {
104-
let mut parser = parse::new_parser_from_ts(sess, stream.inner);
104+
let mut parser = parse::stream_to_parser(sess, stream.inner);
105105
let mut items = Vec::new();
106106

107107
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
@@ -177,9 +177,8 @@ impl FromStr for TokenStream {
177177
__internal::with_parse_sess(|sess| {
178178
let src = src.to_string();
179179
let name = "<proc-macro source code>".to_string();
180-
let tts = parse::parse_tts_from_source_str(name, src, sess);
181-
182-
Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
180+
let stream = parse::parse_stream_from_source_str(name, src, sess);
181+
Ok(__internal::token_stream_wrap(stream))
183182
})
184183
}
185184
}

src/libproc_macro_plugin/qquote.rs

+5-16
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,6 @@ use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream};
1818
use syntax_pos::DUMMY_SP;
1919

2020
use std::iter;
21-
use std::rc::Rc;
2221

2322
pub fn qquote<'cx>(stream: TokenStream) -> TokenStream {
2423
stream.quote()
@@ -50,10 +49,7 @@ macro_rules! quote_tree {
5049
}
5150

5251
fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
53-
TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
54-
delim: delim,
55-
tts: stream.into_trees().collect(),
56-
})).into()
52+
TokenTree::Delimited(DUMMY_SP, Delimited { delim: delim, tts: stream.into() }).into()
5753
}
5854

5955
macro_rules! quote {
@@ -102,13 +98,6 @@ impl Quote for TokenStream {
10298
}
10399
}
104100

105-
impl Quote for Vec<TokenTree> {
106-
fn quote(&self) -> TokenStream {
107-
let stream = self.iter().cloned().collect::<TokenStream>();
108-
quote!((quote stream).into_trees().collect::<::std::vec::Vec<_> >())
109-
}
110-
}
111-
112101
impl Quote for TokenTree {
113102
fn quote(&self) -> TokenStream {
114103
match *self {
@@ -124,12 +113,12 @@ impl Quote for TokenTree {
124113
}
125114
}
126115

127-
impl Quote for Rc<Delimited> {
116+
impl Quote for Delimited {
128117
fn quote(&self) -> TokenStream {
129-
quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
118+
quote!(::syntax::tokenstream::Delimited {
130119
delim: (quote self.delim),
131-
tts: (quote self.tts),
132-
}))
120+
tts: (quote self.stream()).into(),
121+
})
133122
}
134123
}
135124

src/librustc/hir/mod.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ use syntax::ast::{Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
4040
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
4141
use syntax::ptr::P;
4242
use syntax::symbol::{Symbol, keywords};
43-
use syntax::tokenstream::TokenTree;
43+
use syntax::tokenstream::TokenStream;
4444
use syntax::util::ThinVec;
4545

4646
use std::collections::BTreeMap;
@@ -466,7 +466,7 @@ pub struct MacroDef {
466466
pub attrs: HirVec<Attribute>,
467467
pub id: NodeId,
468468
pub span: Span,
469-
pub body: HirVec<TokenTree>,
469+
pub body: TokenStream,
470470
}
471471

472472
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]

src/librustc_incremental/calculate_svh/svh_visitor.rs

+5-11
Original file line numberDiff line numberDiff line change
@@ -866,8 +866,8 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has
866866
debug!("visit_macro_def: st={:?}", self.st);
867867
SawMacroDef.hash(self.st);
868868
hash_attrs!(self, &macro_def.attrs);
869-
for tt in &macro_def.body {
870-
self.hash_token_tree(tt);
869+
for tt in macro_def.body.trees() {
870+
self.hash_token_tree(&tt);
871871
}
872872
visit::walk_macro_def(self, macro_def)
873873
}
@@ -1033,15 +1033,9 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
10331033
}
10341034
tokenstream::TokenTree::Delimited(span, ref delimited) => {
10351035
hash_span!(self, span);
1036-
let tokenstream::Delimited {
1037-
ref delim,
1038-
ref tts,
1039-
} = **delimited;
1040-
1041-
delim.hash(self.st);
1042-
tts.len().hash(self.st);
1043-
for sub_tt in tts {
1044-
self.hash_token_tree(sub_tt);
1036+
delimited.delim.hash(self.st);
1037+
for sub_tt in delimited.stream().trees() {
1038+
self.hash_token_tree(&sub_tt);
10451039
}
10461040
}
10471041
}

src/librustc_metadata/cstore_impl.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ use std::rc::Rc;
3434

3535
use syntax::ast;
3636
use syntax::attr;
37-
use syntax::parse::filemap_to_tts;
37+
use syntax::parse::filemap_to_stream;
3838
use syntax::symbol::Symbol;
3939
use syntax_pos::{mk_sp, Span};
4040
use rustc::hir::svh::Svh;
@@ -397,7 +397,7 @@ impl CrateStore for cstore::CStore {
397397

398398
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
399399
let local_span = mk_sp(filemap.start_pos, filemap.end_pos);
400-
let body = filemap_to_tts(&sess.parse_sess, filemap);
400+
let body = filemap_to_stream(&sess.parse_sess, filemap);
401401

402402
// Mark the attrs as used
403403
let attrs = data.get_item_attrs(id.index);
@@ -415,7 +415,7 @@ impl CrateStore for cstore::CStore {
415415
id: ast::DUMMY_NODE_ID,
416416
span: local_span,
417417
attrs: attrs,
418-
body: body,
418+
body: body.into(),
419419
})
420420
}
421421

src/librustc_metadata/encoder.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -853,9 +853,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
853853

854854
/// Serialize the text of exported macros
855855
fn encode_info_for_macro_def(&mut self, macro_def: &hir::MacroDef) -> Entry<'tcx> {
856+
use syntax::print::pprust;
856857
Entry {
857858
kind: EntryKind::MacroDef(self.lazy(&MacroDef {
858-
body: ::syntax::print::pprust::tts_to_string(&macro_def.body)
859+
body: pprust::tts_to_string(&macro_def.body.trees().collect::<Vec<_>>()),
859860
})),
860861
visibility: self.lazy(&ty::Visibility::Public),
861862
span: self.lazy(&macro_def.span),

src/librustc_resolve/build_reduced_graph.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -516,7 +516,7 @@ impl<'a> Resolver<'a> {
516516
expansion: Cell::new(LegacyScope::Empty),
517517
});
518518
self.invocations.insert(mark, invocation);
519-
macro_rules.body = mark_tts(&macro_rules.body, mark);
519+
macro_rules.body = mark_tts(macro_rules.stream(), mark).into();
520520
let ext = Rc::new(macro_rules::compile(&self.session.parse_sess, &macro_rules));
521521
self.macro_map.insert(def_id, ext.clone());
522522
ext

src/librustc_resolve/macros.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -545,7 +545,7 @@ impl<'a> Resolver<'a> {
545545

546546
pub fn define_macro(&mut self, item: &ast::Item, legacy_scope: &mut LegacyScope<'a>) {
547547
let tts = match item.node {
548-
ast::ItemKind::Mac(ref mac) => &mac.node.tts,
548+
ast::ItemKind::Mac(ref mac) => mac.node.stream(),
549549
_ => unreachable!(),
550550
};
551551

@@ -562,7 +562,7 @@ impl<'a> Resolver<'a> {
562562
attrs: item.attrs.clone(),
563563
id: ast::DUMMY_NODE_ID,
564564
span: item.span,
565-
body: mark_tts(tts, mark),
565+
body: mark_tts(tts, mark).into(),
566566
};
567567

568568
*legacy_scope = LegacyScope::Binding(self.arenas.alloc_legacy_binding(LegacyBinding {

src/librustc_save_analysis/span_utils.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -284,7 +284,7 @@ impl<'a> SpanUtils<'a> {
284284
pub fn signature_string_for_span(&self, span: Span) -> String {
285285
let mut toks = self.retokenise_span(span);
286286
toks.real_token();
287-
let mut toks = toks.parse_all_token_trees().unwrap().into_iter();
287+
let mut toks = toks.parse_all_token_trees().unwrap().trees();
288288
let mut prev = toks.next().unwrap();
289289

290290
let first_span = prev.span();

src/librustdoc/visit_ast.rs

+4-2
Original file line numberDiff line numberDiff line change
@@ -211,7 +211,8 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
211211
};
212212

213213
// FIXME(jseyfried) merge with `self.visit_macro()`
214-
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
214+
let tts = def.stream().trees().collect::<Vec<_>>();
215+
let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
215216
om.macros.push(Macro {
216217
def_id: def_id,
217218
attrs: def.attrs.clone().into(),
@@ -520,8 +521,9 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> {
520521

521522
// convert each exported_macro into a doc item
522523
fn visit_local_macro(&self, def: &hir::MacroDef) -> Macro {
524+
let tts = def.body.trees().collect::<Vec<_>>();
523525
// Extract the spans of all matchers. They represent the "interface" of the macro.
524-
let matchers = def.body.chunks(4).map(|arm| arm[0].span()).collect();
526+
let matchers = tts.chunks(4).map(|arm| arm[0].span()).collect();
525527

526528
Macro {
527529
def_id: self.cx.tcx.hir.local_def_id(def.id),

src/libsyntax/ast.rs

+15-3
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ use ext::hygiene::SyntaxContext;
2424
use print::pprust;
2525
use ptr::P;
2626
use symbol::{Symbol, keywords};
27-
use tokenstream::{TokenTree};
27+
use tokenstream::{ThinTokenStream, TokenStream};
2828

2929
use std::collections::HashSet;
3030
use std::fmt;
@@ -1033,7 +1033,13 @@ pub type Mac = Spanned<Mac_>;
10331033
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
10341034
pub struct Mac_ {
10351035
pub path: Path,
1036-
pub tts: Vec<TokenTree>,
1036+
pub tts: ThinTokenStream,
1037+
}
1038+
1039+
impl Mac_ {
1040+
pub fn stream(&self) -> TokenStream {
1041+
self.tts.clone().into()
1042+
}
10371043
}
10381044

10391045
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
@@ -1915,7 +1921,13 @@ pub struct MacroDef {
19151921
pub attrs: Vec<Attribute>,
19161922
pub id: NodeId,
19171923
pub span: Span,
1918-
pub body: Vec<TokenTree>,
1924+
pub body: ThinTokenStream,
1925+
}
1926+
1927+
impl MacroDef {
1928+
pub fn stream(&self) -> TokenStream {
1929+
self.body.clone().into()
1930+
}
19191931
}
19201932

19211933
#[cfg(test)]

src/libsyntax/ext/base.rs

+6-14
Original file line numberDiff line numberDiff line change
@@ -188,10 +188,7 @@ impl<F> AttrProcMacro for F
188188

189189
/// Represents a thing that maps token trees to Macro Results
190190
pub trait TTMacroExpander {
191-
fn expand<'cx>(&self,
192-
ecx: &'cx mut ExtCtxt,
193-
span: Span,
194-
token_tree: &[tokenstream::TokenTree])
191+
fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
195192
-> Box<MacResult+'cx>;
196193
}
197194

@@ -200,15 +197,11 @@ pub type MacroExpanderFn =
200197
-> Box<MacResult+'cx>;
201198

202199
impl<F> TTMacroExpander for F
203-
where F : for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree])
204-
-> Box<MacResult+'cx>
200+
where F: for<'cx> Fn(&'cx mut ExtCtxt, Span, &[tokenstream::TokenTree]) -> Box<MacResult+'cx>
205201
{
206-
fn expand<'cx>(&self,
207-
ecx: &'cx mut ExtCtxt,
208-
span: Span,
209-
token_tree: &[tokenstream::TokenTree])
202+
fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
210203
-> Box<MacResult+'cx> {
211-
(*self)(ecx, span, token_tree)
204+
(*self)(ecx, span, &input.trees().collect::<Vec<_>>())
212205
}
213206
}
214207

@@ -654,9 +647,8 @@ impl<'a> ExtCtxt<'a> {
654647
expand::MacroExpander::new(self, true)
655648
}
656649

657-
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree])
658-
-> parser::Parser<'a> {
659-
parse::tts_to_parser(self.parse_sess, tts.to_vec())
650+
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
651+
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect())
660652
}
661653
pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() }
662654
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }

0 commit comments

Comments
 (0)