Skip to content

Commit 0d55413

Browse files
committed
Fix fallout in unit tests.
1 parent a02c18a commit 0d55413

File tree

7 files changed

+45
-53
lines changed

7 files changed

+45
-53
lines changed

src/libsyntax/parse/mod.rs

+29-29
Original file line numberDiff line numberDiff line change
@@ -598,7 +598,6 @@ pub fn integer_lit(s: &str, suffix: Option<Symbol>, sd: &Handler, sp: Span) -> a
598598
#[cfg(test)]
599599
mod tests {
600600
use super::*;
601-
use std::rc::Rc;
602601
use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION};
603602
use codemap::Spanned;
604603
use ast::{self, Ident, PatKind};
@@ -609,7 +608,7 @@ mod tests {
609608
use print::pprust::item_to_string;
610609
use ptr::P;
611610
use tokenstream::{self, TokenTree};
612-
use util::parser_testing::{string_to_tts, string_to_parser};
611+
use util::parser_testing::{string_to_stream, string_to_parser};
613612
use util::parser_testing::{string_to_expr, string_to_item, string_to_stmt};
614613
use util::ThinVec;
615614

@@ -654,7 +653,8 @@ mod tests {
654653
// check the token-tree-ization of macros
655654
#[test]
656655
fn string_to_tts_macro () {
657-
let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
656+
let tts: Vec<_> =
657+
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
658658
let tts: &[TokenTree] = &tts[..];
659659

660660
match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) {
@@ -667,7 +667,7 @@ mod tests {
667667
)
668668
if name_macro_rules.name == "macro_rules"
669669
&& name_zip.name == "zip" => {
670-
let tts = &macro_delimed.tts[..];
670+
let tts = &macro_delimed.stream().trees().collect::<Vec<_>>();
671671
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
672672
(
673673
3,
@@ -676,17 +676,17 @@ mod tests {
676676
Some(&TokenTree::Delimited(_, ref second_delimed)),
677677
)
678678
if macro_delimed.delim == token::Paren => {
679-
let tts = &first_delimed.tts[..];
679+
let tts = &first_delimed.stream().trees().collect::<Vec<_>>();
680680
match (tts.len(), tts.get(0), tts.get(1)) {
681681
(
682682
2,
683683
Some(&TokenTree::Token(_, token::Dollar)),
684684
Some(&TokenTree::Token(_, token::Ident(ident))),
685685
)
686686
if first_delimed.delim == token::Paren && ident.name == "a" => {},
687-
_ => panic!("value 3: {:?}", **first_delimed),
687+
_ => panic!("value 3: {:?}", *first_delimed),
688688
}
689-
let tts = &second_delimed.tts[..];
689+
let tts = &second_delimed.stream().trees().collect::<Vec<_>>();
690690
match (tts.len(), tts.get(0), tts.get(1)) {
691691
(
692692
2,
@@ -695,10 +695,10 @@ mod tests {
695695
)
696696
if second_delimed.delim == token::Paren
697697
&& ident.name == "a" => {},
698-
_ => panic!("value 4: {:?}", **second_delimed),
698+
_ => panic!("value 4: {:?}", *second_delimed),
699699
}
700700
},
701-
_ => panic!("value 2: {:?}", **macro_delimed),
701+
_ => panic!("value 2: {:?}", *macro_delimed),
702702
}
703703
},
704704
_ => panic!("value: {:?}",tts),
@@ -707,31 +707,31 @@ mod tests {
707707

708708
#[test]
709709
fn string_to_tts_1() {
710-
let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
710+
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
711711

712-
let expected = vec![
713-
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))),
714-
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))),
712+
let expected = TokenStream::concat(vec![
713+
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))).into(),
714+
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))).into(),
715715
TokenTree::Delimited(
716716
sp(5, 14),
717-
Rc::new(tokenstream::Delimited {
717+
tokenstream::Delimited {
718718
delim: token::DelimToken::Paren,
719-
tts: vec![
720-
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
721-
TokenTree::Token(sp(8, 9), token::Colon),
722-
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
723-
],
724-
})),
719+
tts: TokenStream::concat(vec![
720+
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))).into(),
721+
TokenTree::Token(sp(8, 9), token::Colon).into(),
722+
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))).into(),
723+
]).into(),
724+
}).into(),
725725
TokenTree::Delimited(
726726
sp(15, 21),
727-
Rc::new(tokenstream::Delimited {
727+
tokenstream::Delimited {
728728
delim: token::DelimToken::Brace,
729-
tts: vec![
730-
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
731-
TokenTree::Token(sp(18, 19), token::Semi),
732-
],
733-
}))
734-
];
729+
tts: TokenStream::concat(vec![
730+
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))).into(),
731+
TokenTree::Token(sp(18, 19), token::Semi).into(),
732+
]).into(),
733+
}).into()
734+
]);
735735

736736
assert_eq!(tts, expected);
737737
}
@@ -974,8 +974,8 @@ mod tests {
974974
let expr = parse::parse_expr_from_source_str("foo".to_string(),
975975
"foo!( fn main() { body } )".to_string(), &sess).unwrap();
976976

977-
let tts = match expr.node {
978-
ast::ExprKind::Mac(ref mac) => mac.node.tts.clone(),
977+
let tts: Vec<_> = match expr.node {
978+
ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(),
979979
_ => panic!("not a macro"),
980980
};
981981

src/libsyntax/tokenstream.rs

+3-11
Original file line numberDiff line numberDiff line change
@@ -409,10 +409,10 @@ mod tests {
409409
use syntax::ast::Ident;
410410
use syntax_pos::{Span, BytePos, NO_EXPANSION};
411411
use parse::token::Token;
412-
use util::parser_testing::string_to_tts;
412+
use util::parser_testing::string_to_stream;
413413

414414
fn string_to_ts(string: &str) -> TokenStream {
415-
string_to_tts(string.to_owned()).into_iter().collect()
415+
string_to_stream(string.to_owned())
416416
}
417417

418418
fn sp(a: u32, b: u32) -> Span {
@@ -428,20 +428,12 @@ mod tests {
428428
let test_res = string_to_ts("foo::bar::baz");
429429
let test_fst = string_to_ts("foo::bar");
430430
let test_snd = string_to_ts("::baz");
431-
let eq_res = TokenStream::concat([test_fst, test_snd].iter().cloned());
431+
let eq_res = TokenStream::concat(vec![test_fst, test_snd]);
432432
assert_eq!(test_res.trees().count(), 5);
433433
assert_eq!(eq_res.trees().count(), 5);
434434
assert_eq!(test_res.eq_unspanned(&eq_res), true);
435435
}
436436

437-
#[test]
438-
fn test_from_to_bijection() {
439-
let test_start = string_to_tts("foo::bar(baz)".to_string());
440-
let ts = test_start.iter().cloned().collect::<TokenStream>();
441-
let test_end: Vec<TokenTree> = ts.trees().collect();
442-
assert_eq!(test_start, test_end)
443-
}
444-
445437
#[test]
446438
fn test_to_from_bijection() {
447439
let test_start = string_to_ts("foo::bar(baz)");

src/libsyntax/util/parser_testing.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -9,17 +9,17 @@
99
// except according to those terms.
1010

1111
use ast::{self, Ident};
12-
use parse::{ParseSess,PResult,filemap_to_tts};
12+
use parse::{ParseSess, PResult, filemap_to_stream};
1313
use parse::{lexer, new_parser_from_source_str};
1414
use parse::parser::Parser;
1515
use ptr::P;
16-
use tokenstream;
16+
use tokenstream::TokenStream;
1717
use std::iter::Peekable;
1818

1919
/// Map a string to tts, using a made-up filename:
20-
pub fn string_to_tts(source_str: String) -> Vec<tokenstream::TokenTree> {
20+
pub fn string_to_stream(source_str: String) -> TokenStream {
2121
let ps = ParseSess::new();
22-
filemap_to_tts(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str))
22+
filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str))
2323
}
2424

2525
/// Map string to parser (via tts)

src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ use syntax::ast::*;
1818
use syntax::attr::*;
1919
use syntax::ast;
2020
use syntax::parse;
21-
use syntax::parse::{ParseSess,filemap_to_tts, PResult};
21+
use syntax::parse::{ParseSess, PResult};
2222
use syntax::parse::new_parser_from_source_str;
2323
use syntax::parse::parser::Parser;
2424
use syntax::parse::token;

src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs

+4-4
Original file line numberDiff line numberDiff line change
@@ -32,13 +32,13 @@ pub fn plugin_registrar(reg: &mut Registry) {
3232

3333
fn cond(input: TokenStream) -> TokenStream {
3434
let mut conds = Vec::new();
35-
let mut input = input.trees();
35+
let mut input = input.trees().peekable();
3636
while let Some(tree) = input.next() {
37-
let cond: TokenStream = match *tree {
38-
TokenTree::Delimited(_, ref delimited) => delimited.tts.iter().cloned().collect(),
37+
let mut cond = match tree {
38+
TokenTree::Delimited(_, ref delimited) => delimited.stream(),
3939
_ => panic!("Invalid input"),
4040
};
41-
let mut trees = cond.trees().cloned();
41+
let mut trees = cond.trees();
4242
let test = trees.next();
4343
let rhs = trees.collect::<TokenStream>();
4444
if rhs.is_empty() {

src/test/run-pass-fulldeps/auxiliary/plugin_args.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ use syntax::print::pprust;
2626
use syntax::ptr::P;
2727
use syntax::symbol::Symbol;
2828
use syntax_pos::Span;
29-
use syntax::tokenstream;
29+
use syntax::tokenstream::TokenStream;
3030
use rustc_plugin::Registry;
3131

3232
struct Expander {
@@ -37,7 +37,7 @@ impl TTMacroExpander for Expander {
3737
fn expand<'cx>(&self,
3838
ecx: &'cx mut ExtCtxt,
3939
sp: Span,
40-
_: &[tokenstream::TokenTree]) -> Box<MacResult+'cx> {
40+
_: TokenStream) -> Box<MacResult+'cx> {
4141
let args = self.args.iter().map(|i| pprust::meta_list_item_to_string(i))
4242
.collect::<Vec<_>>().join(", ");
4343
MacEager::expr(ecx.expr_str(sp, Symbol::intern(&args)))

src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs

+2-2
Original file line numberDiff line numberDiff line change
@@ -35,8 +35,8 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
3535
-> Box<MacResult + 'static> {
3636

3737
let mbe_matcher = quote_tokens!(cx, $$matched:expr, $$($$pat:pat)|+);
38-
let mbe_matcher = quoted::parse(&mbe_matcher, true, cx.parse_sess);
39-
let map = match TokenTree::parse(cx, &mbe_matcher, args) {
38+
let mbe_matcher = quoted::parse(mbe_matcher.into_iter().collect(), true, cx.parse_sess);
39+
let map = match TokenTree::parse(cx, &mbe_matcher, args.iter().cloned().collect()) {
4040
Success(map) => map,
4141
Failure(_, tok) => {
4242
panic!("expected Success, but got Failure: {}", parse_failure_msg(tok));

0 commit comments

Comments
 (0)