Skip to content

Commit ec1a8f0

Browse files
committed
proc_macro: Tweak doc comments and negative literals
This commit tweaks the tokenization of a doc comment to use `#[doc = "..."]` like `macro_rules!` does (instead of treating it as a `Literal` token). Additionally it fixes treatment of negative literals in the compiler, for exapmle `Literal::i32(-1)`. The current fix is a bit of a hack around the current compiler implementation, providing a fix at the proc-macro layer rather than the libsyntax layer.
1 parent 6960761 commit ec1a8f0

File tree

4 files changed

+164
-54
lines changed

4 files changed

+164
-54
lines changed

src/libproc_macro/lib.rs

+80-51
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,6 @@
4040
#![feature(lang_items)]
4141
#![feature(optin_builtin_traits)]
4242

43-
#[macro_use]
4443
extern crate syntax;
4544
extern crate syntax_pos;
4645
extern crate rustc_errors;
@@ -156,7 +155,7 @@ impl IntoIterator for TokenStream {
156155
type IntoIter = TokenTreeIter;
157156

158157
fn into_iter(self) -> TokenTreeIter {
159-
TokenTreeIter { cursor: self.0.trees(), next: None }
158+
TokenTreeIter { cursor: self.0.trees(), stack: Vec::new() }
160159
}
161160
}
162161

@@ -554,7 +553,7 @@ impl Literal {
554553
#[unstable(feature = "proc_macro", issue = "38356")]
555554
pub struct TokenTreeIter {
556555
cursor: tokenstream::Cursor,
557-
next: Option<tokenstream::TokenStream>,
556+
stack: Vec<TokenTree>,
558557
}
559558

560559
#[unstable(feature = "proc_macro", issue = "38356")]
@@ -563,9 +562,10 @@ impl Iterator for TokenTreeIter {
563562

564563
fn next(&mut self) -> Option<TokenTree> {
565564
loop {
566-
let next =
567-
unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None);
568-
let tree = TokenTree::from_internal(next, &mut self.next);
565+
let tree = self.stack.pop().or_else(|| {
566+
let next = self.cursor.next_as_stream()?;
567+
Some(TokenTree::from_internal(next, &mut self.stack))
568+
})?;
569569
if tree.span.0 == DUMMY_SP {
570570
if let TokenNode::Group(Delimiter::None, stream) = tree.kind {
571571
self.cursor.insert(stream.0);
@@ -598,12 +598,12 @@ impl Delimiter {
598598
}
599599

600600
impl TokenTree {
601-
fn from_internal(stream: tokenstream::TokenStream, next: &mut Option<tokenstream::TokenStream>)
601+
fn from_internal(stream: tokenstream::TokenStream, stack: &mut Vec<TokenTree>)
602602
-> TokenTree {
603603
use syntax::parse::token::*;
604604

605605
let (tree, is_joint) = stream.as_tree();
606-
let (mut span, token) = match tree {
606+
let (span, token) = match tree {
607607
tokenstream::TokenTree::Token(span, token) => (span, token),
608608
tokenstream::TokenTree::Delimited(span, delimed) => {
609609
let delimiter = Delimiter::from_internal(delimed.delim);
@@ -615,34 +615,32 @@ impl TokenTree {
615615
};
616616

617617
let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
618-
macro_rules! op {
619-
($op:expr) => { TokenNode::Op($op, op_kind) }
620-
}
621-
622-
macro_rules! joint {
623-
($first:expr, $rest:expr) => { joint($first, $rest, is_joint, &mut span, next) }
618+
macro_rules! tt {
619+
($e:expr) => (TokenTree { span: Span(span), kind: $e })
624620
}
625-
626-
fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span,
627-
next: &mut Option<tokenstream::TokenStream>)
628-
-> TokenNode {
629-
let (first_span, rest_span) = (*span, *span);
630-
*span = first_span;
631-
let tree = tokenstream::TokenTree::Token(rest_span, rest);
632-
*next = Some(if is_joint { tree.joint() } else { tree.into() });
633-
TokenNode::Op(first, Spacing::Joint)
621+
macro_rules! op {
622+
($a:expr) => (TokenNode::Op($a, op_kind));
623+
($a:expr, $b:expr) => ({
624+
stack.push(tt!(TokenNode::Op($b, op_kind).into()));
625+
TokenNode::Op($a, Spacing::Joint)
626+
});
627+
($a:expr, $b:expr, $c:expr) => ({
628+
stack.push(tt!(TokenNode::Op($c, op_kind)));
629+
stack.push(tt!(TokenNode::Op($b, Spacing::Joint)));
630+
TokenNode::Op($a, Spacing::Joint)
631+
})
634632
}
635633

636634
let kind = match token {
637635
Eq => op!('='),
638636
Lt => op!('<'),
639-
Le => joint!('<', Eq),
640-
EqEq => joint!('=', Eq),
641-
Ne => joint!('!', Eq),
642-
Ge => joint!('>', Eq),
637+
Le => op!('<', '='),
638+
EqEq => op!('=', '='),
639+
Ne => op!('!', '='),
640+
Ge => op!('>', '='),
643641
Gt => op!('>'),
644-
AndAnd => joint!('&', BinOp(And)),
645-
OrOr => joint!('|', BinOp(Or)),
642+
AndAnd => op!('&', '&'),
643+
OrOr => op!('|', '|'),
646644
Not => op!('!'),
647645
Tilde => op!('~'),
648646
BinOp(Plus) => op!('+'),
@@ -653,37 +651,46 @@ impl TokenTree {
653651
BinOp(Caret) => op!('^'),
654652
BinOp(And) => op!('&'),
655653
BinOp(Or) => op!('|'),
656-
BinOp(Shl) => joint!('<', Lt),
657-
BinOp(Shr) => joint!('>', Gt),
658-
BinOpEq(Plus) => joint!('+', Eq),
659-
BinOpEq(Minus) => joint!('-', Eq),
660-
BinOpEq(Star) => joint!('*', Eq),
661-
BinOpEq(Slash) => joint!('/', Eq),
662-
BinOpEq(Percent) => joint!('%', Eq),
663-
BinOpEq(Caret) => joint!('^', Eq),
664-
BinOpEq(And) => joint!('&', Eq),
665-
BinOpEq(Or) => joint!('|', Eq),
666-
BinOpEq(Shl) => joint!('<', Le),
667-
BinOpEq(Shr) => joint!('>', Ge),
654+
BinOp(Shl) => op!('<', '<'),
655+
BinOp(Shr) => op!('>', '>'),
656+
BinOpEq(Plus) => op!('+', '='),
657+
BinOpEq(Minus) => op!('-', '='),
658+
BinOpEq(Star) => op!('*', '='),
659+
BinOpEq(Slash) => op!('/', '='),
660+
BinOpEq(Percent) => op!('%', '='),
661+
BinOpEq(Caret) => op!('^', '='),
662+
BinOpEq(And) => op!('&', '='),
663+
BinOpEq(Or) => op!('|', '='),
664+
BinOpEq(Shl) => op!('<', '<', '='),
665+
BinOpEq(Shr) => op!('>', '>', '='),
668666
At => op!('@'),
669667
Dot => op!('.'),
670-
DotDot => joint!('.', Dot),
671-
DotDotDot => joint!('.', DotDot),
672-
DotDotEq => joint!('.', DotEq),
668+
DotDot => op!('.', '.'),
669+
DotDotDot => op!('.', '.', '.'),
670+
DotDotEq => op!('.', '.', '='),
673671
Comma => op!(','),
674672
Semi => op!(';'),
675673
Colon => op!(':'),
676-
ModSep => joint!(':', Colon),
677-
RArrow => joint!('-', Gt),
678-
LArrow => joint!('<', BinOp(Minus)),
679-
FatArrow => joint!('=', Gt),
674+
ModSep => op!(':', ':'),
675+
RArrow => op!('-', '>'),
676+
LArrow => op!('<', '-'),
677+
FatArrow => op!('=', '>'),
680678
Pound => op!('#'),
681679
Dollar => op!('$'),
682680
Question => op!('?'),
683681

684682
Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
685683
Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
686-
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),
684+
Literal(..) => TokenNode::Literal(self::Literal(token)),
685+
DocComment(c) => {
686+
let stream = vec![
687+
tt!(TokenNode::Term(Term::intern("doc"))),
688+
tt!(op!('=')),
689+
tt!(TokenNode::Literal(self::Literal(Literal(Lit::Str_(c), None)))),
690+
].into_iter().collect();
691+
stack.push(tt!(TokenNode::Group(Delimiter::Bracket, stream)));
692+
op!('#')
693+
}
687694

688695
Interpolated(_) => {
689696
__internal::with_sess(|(sess, _)| {
@@ -692,7 +699,7 @@ impl TokenTree {
692699
})
693700
}
694701

695-
DotEq => joint!('.', Eq),
702+
DotEq => op!('.', '='),
696703
OpenDelim(..) | CloseDelim(..) => unreachable!(),
697704
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
698705
};
@@ -724,7 +731,29 @@ impl TokenTree {
724731
} else { Ident(ident, false) };
725732
return TokenTree::Token(self.span.0, token).into();
726733
}
727-
TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(),
734+
TokenNode::Literal(self::Literal(Literal(Lit::Integer(ref a), b)))
735+
if a.as_str().starts_with("-") =>
736+
{
737+
let minus = BinOp(BinOpToken::Minus);
738+
let integer = Symbol::intern(&a.as_str()[1..]);
739+
let integer = Literal(Lit::Integer(integer), b);
740+
let a = TokenTree::Token(self.span.0, minus);
741+
let b = TokenTree::Token(self.span.0, integer);
742+
return vec![a, b].into_iter().collect()
743+
}
744+
TokenNode::Literal(self::Literal(Literal(Lit::Float(ref a), b)))
745+
if a.as_str().starts_with("-") =>
746+
{
747+
let minus = BinOp(BinOpToken::Minus);
748+
let float = Symbol::intern(&a.as_str()[1..]);
749+
let float = Literal(Lit::Float(float), b);
750+
let a = TokenTree::Token(self.span.0, minus);
751+
let b = TokenTree::Token(self.span.0, float);
752+
return vec![a, b].into_iter().collect()
753+
}
754+
TokenNode::Literal(token) => {
755+
return TokenTree::Token(self.span.0, token.0).into()
756+
}
728757
};
729758

730759
let token = match op {

src/test/compile-fail-fulldeps/proc-macro/auxiliary/attributes-included.rs

+27-3
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616

1717
extern crate proc_macro;
1818

19-
use proc_macro::{TokenStream, TokenTree, TokenNode, Delimiter, Literal};
19+
use proc_macro::{TokenStream, TokenTree, TokenNode, Delimiter, Literal, Spacing};
2020

2121
#[proc_macro_attribute]
2222
pub fn foo(attr: TokenStream, input: TokenStream) -> TokenStream {
@@ -65,10 +65,34 @@ fn assert_inline(slice: &mut &[TokenTree]) {
6565

6666
fn assert_doc(slice: &mut &[TokenTree]) {
6767
match slice[0].kind {
68+
TokenNode::Op('#', Spacing::Alone) => {}
69+
_ => panic!("expected #"),
70+
}
71+
let inner = match slice[1].kind {
72+
TokenNode::Group(Delimiter::Bracket, ref s) => s.clone(),
73+
_ => panic!("expected brackets"),
74+
};
75+
let tokens = inner.into_iter().collect::<Vec<_>>();
76+
let tokens = &tokens[..];
77+
78+
if tokens.len() != 3 {
79+
panic!("expected three tokens in doc")
80+
}
81+
82+
match tokens[0].kind {
83+
TokenNode::Term(ref t) => assert_eq!("doc", t.as_str()),
84+
_ => panic!("expected `doc`"),
85+
}
86+
match tokens[1].kind {
87+
TokenNode::Op('=', Spacing::Alone) => {}
88+
_ => panic!("expected equals"),
89+
}
90+
match tokens[2].kind {
6891
TokenNode::Literal(_) => {}
69-
_ => panic!("expected literal doc comment got other"),
92+
_ => panic!("expected literal"),
7093
}
71-
*slice = &slice[1..];
94+
95+
*slice = &slice[2..];
7296
}
7397

7498
fn assert_invoc(slice: &mut &[TokenTree]) {
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,34 @@
1+
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
// no-prefer-dynamic
12+
13+
#![feature(proc_macro)]
14+
#![crate_type = "proc-macro"]
15+
16+
extern crate proc_macro;
17+
18+
use proc_macro::*;
19+
20+
#[proc_macro]
21+
pub fn neg_one(_input: TokenStream) -> TokenStream {
22+
TokenTree {
23+
span: Span::call_site(),
24+
kind: TokenNode::Literal(Literal::i32(-1)),
25+
}.into()
26+
}
27+
28+
#[proc_macro]
29+
pub fn neg_one_float(_input: TokenStream) -> TokenStream {
30+
TokenTree {
31+
span: Span::call_site(),
32+
kind: TokenNode::Literal(Literal::f32(-1.0)),
33+
}.into()
34+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
// aux-build:negative-token.rs
12+
// ignore-stage1
13+
14+
#![feature(proc_macro)]
15+
16+
extern crate negative_token;
17+
18+
use negative_token::*;
19+
20+
fn main() {
21+
assert_eq!(-1, neg_one!());
22+
assert_eq!(-1.0, neg_one_float!());
23+
}

0 commit comments

Comments
 (0)