Skip to content

Commit 7d41674

Browse files
committed
Clean up tokenstream::Cursor and proc_macro.
1 parent d4488b7 commit 7d41674

File tree

2 files changed

+38
-44
lines changed

2 files changed

+38
-44
lines changed

src/libproc_macro/lib.rs

Lines changed: 11 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ use std::str::FromStr;
4848

4949
use syntax::errors::DiagnosticBuilder;
5050
use syntax::parse;
51-
use syntax::tokenstream::TokenStream as TokenStream_;
51+
use syntax::tokenstream;
5252

5353
/// The main type provided by this crate, representing an abstract stream of
5454
/// tokens.
@@ -60,9 +60,7 @@ use syntax::tokenstream::TokenStream as TokenStream_;
6060
/// The API of this type is intentionally bare-bones, but it'll be expanded over
6161
/// time!
6262
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
63-
pub struct TokenStream {
64-
inner: TokenStream_,
65-
}
63+
pub struct TokenStream(tokenstream::TokenStream);
6664

6765
/// Error returned from `TokenStream::from_str`.
6866
#[derive(Debug)]
@@ -91,26 +89,22 @@ pub mod __internal {
9189
use syntax::ext::hygiene::Mark;
9290
use syntax::ptr::P;
9391
use syntax::parse::{self, token, ParseSess};
94-
use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_};
92+
use syntax::tokenstream;
9593

9694
use super::{TokenStream, LexError};
9795

9896
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
99-
TokenStream {
100-
inner: TokenTree::Token(item.span, token::Interpolated(Rc::new(token::NtItem(item))))
101-
.into()
102-
}
97+
let (span, token) = (item.span, token::Interpolated(Rc::new(token::NtItem(item))));
98+
TokenStream(tokenstream::TokenTree::Token(span, token).into())
10399
}
104100

105-
pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
106-
TokenStream {
107-
inner: inner
108-
}
101+
pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {
102+
TokenStream(inner)
109103
}
110104

111105
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
112106
with_sess(move |(sess, _)| {
113-
let mut parser = parse::stream_to_parser(sess, stream.inner);
107+
let mut parser = parse::stream_to_parser(sess, stream.0);
114108
let mut items = Vec::new();
115109

116110
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
@@ -121,8 +115,8 @@ pub mod __internal {
121115
})
122116
}
123117

124-
pub fn token_stream_inner(stream: TokenStream) -> TokenStream_ {
125-
stream.inner
118+
pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream {
119+
stream.0
126120
}
127121

128122
pub trait Registry {
@@ -197,6 +191,6 @@ impl FromStr for TokenStream {
197191
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
198192
impl fmt::Display for TokenStream {
199193
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
200-
self.inner.fmt(f)
194+
self.0.fmt(f)
201195
}
202196
}

src/libsyntax/tokenstream.rs

Lines changed: 27 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -199,7 +199,7 @@ impl TokenStream {
199199
pub fn concat(mut streams: Vec<TokenStream>) -> TokenStream {
200200
match streams.len() {
201201
0 => TokenStream::empty(),
202-
1 => TokenStream::from(streams.pop().unwrap()),
202+
1 => streams.pop().unwrap(),
203203
_ => TokenStream::concat_rc_slice(RcSlice::new(streams)),
204204
}
205205
}
@@ -244,44 +244,44 @@ struct StreamCursor {
244244
stack: Vec<(RcSlice<TokenStream>, usize)>,
245245
}
246246

247-
impl Iterator for Cursor {
248-
type Item = TokenTree;
249-
247+
impl StreamCursor {
250248
fn next(&mut self) -> Option<TokenTree> {
251-
let cursor = match self.0 {
252-
CursorKind::Stream(ref mut cursor) => cursor,
253-
CursorKind::Tree(ref tree, ref mut consumed @ false) => {
254-
*consumed = true;
255-
return Some(tree.clone());
256-
}
257-
_ => return None,
258-
};
259-
260249
loop {
261-
if cursor.index < cursor.stream.len() {
262-
match cursor.stream[cursor.index].kind.clone() {
263-
TokenStreamKind::Tree(tree) => {
264-
cursor.index += 1;
265-
return Some(tree);
266-
}
250+
if self.index < self.stream.len() {
251+
self.index += 1;
252+
match self.stream[self.index - 1].kind.clone() {
253+
TokenStreamKind::Tree(tree) => return Some(tree),
267254
TokenStreamKind::Stream(stream) => {
268-
cursor.stack.push((mem::replace(&mut cursor.stream, stream),
269-
mem::replace(&mut cursor.index, 0) + 1));
270-
}
271-
TokenStreamKind::Empty => {
272-
cursor.index += 1;
255+
self.stack.push((mem::replace(&mut self.stream, stream),
256+
mem::replace(&mut self.index, 0)));
273257
}
258+
TokenStreamKind::Empty => {}
274259
}
275-
} else if let Some((stream, index)) = cursor.stack.pop() {
276-
cursor.stream = stream;
277-
cursor.index = index;
260+
} else if let Some((stream, index)) = self.stack.pop() {
261+
self.stream = stream;
262+
self.index = index;
278263
} else {
279264
return None;
280265
}
281266
}
282267
}
283268
}
284269

270+
impl Iterator for Cursor {
271+
type Item = TokenTree;
272+
273+
fn next(&mut self) -> Option<TokenTree> {
274+
let (tree, consumed) = match self.0 {
275+
CursorKind::Tree(ref tree, ref mut consumed @ false) => (tree, consumed),
276+
CursorKind::Stream(ref mut cursor) => return cursor.next(),
277+
_ => return None,
278+
};
279+
280+
*consumed = true;
281+
Some(tree.clone())
282+
}
283+
}
284+
285285
impl Cursor {
286286
fn new(stream: TokenStream) -> Self {
287287
Cursor(match stream.kind {

0 commit comments

Comments
 (0)