Skip to content

Commit 75fd391

Browse files
committed
Introduce TtHandle and use it in TokenSet.
This removes the last use of `<mbe::TokenTree as Clone>`. It also removes two trivial methods on `Delimited`.
1 parent 2657d8f commit 75fd391

File tree

3 files changed

+111
-53
lines changed

3 files changed

+111
-53
lines changed

compiler/rustc_expand/src/mbe.rs

-12
Original file line numberDiff line numberDiff line change
@@ -26,18 +26,6 @@ struct Delimited {
2626
tts: Vec<TokenTree>,
2727
}
2828

29-
impl Delimited {
30-
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
31-
fn open_tt(&self, span: DelimSpan) -> TokenTree {
32-
TokenTree::token(token::OpenDelim(self.delim), span.open)
33-
}
34-
35-
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
36-
fn close_tt(&self, span: DelimSpan) -> TokenTree {
37-
TokenTree::token(token::CloseDelim(self.delim), span.close)
38-
}
39-
}
40-
4129
#[derive(PartialEq, Encodable, Decodable, Debug)]
4230
struct SequenceRepetition {
4331
/// The sequence of token trees

compiler/rustc_expand/src/mbe/macro_parser.rs

+5-2
Original file line numberDiff line numberDiff line change
@@ -142,10 +142,13 @@ pub(super) fn compute_locs(sess: &ParseSess, matcher: &[TokenTree]) -> Vec<Match
142142
locs.push(MatcherLoc::Token { token: token.clone() });
143143
}
144144
TokenTree::Delimited(span, delimited) => {
145+
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
146+
let close_token = Token::new(token::CloseDelim(delimited.delim), span.close);
147+
145148
locs.push(MatcherLoc::Delimited);
146-
inner(sess, &[delimited.open_tt(*span)], locs, next_metavar, seq_depth);
149+
locs.push(MatcherLoc::Token { token: open_token });
147150
inner(sess, &delimited.tts, locs, next_metavar, seq_depth);
148-
inner(sess, &[delimited.close_tt(*span)], locs, next_metavar, seq_depth);
151+
locs.push(MatcherLoc::Token { token: close_token });
149152
}
150153
TokenTree::Sequence(_, seq) => {
151154
// We can't determine `idx_first_after` and construct the final

compiler/rustc_expand/src/mbe/macro_rules.rs

+106-39
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
88
use crate::mbe::transcribe::transcribe;
99

1010
use rustc_ast as ast;
11-
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind::*};
11+
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind, TokenKind::*};
1212
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
1313
use rustc_ast::{NodeId, DUMMY_NODE_ID};
1414
use rustc_ast_pretty::pprust;
@@ -658,18 +658,18 @@ fn check_matcher(
658658
// that do not try to inject artificial span information. My plan is
659659
// to try to catch such cases ahead of time and not include them in
660660
// the precomputed mapping.)
661-
struct FirstSets {
661+
struct FirstSets<'tt> {
662662
// this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
663663
// span in the original matcher to the First set for the inner sequence `tt ...`.
664664
//
665665
// If two sequences have the same span in a matcher, then map that
666666
// span to None (invalidating the mapping here and forcing the code to
667667
// use a slow path).
668-
first: FxHashMap<Span, Option<TokenSet>>,
668+
first: FxHashMap<Span, Option<TokenSet<'tt>>>,
669669
}
670670

671-
impl FirstSets {
672-
fn new(tts: &[mbe::TokenTree]) -> FirstSets {
671+
impl<'tt> FirstSets<'tt> {
672+
fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
673673
use mbe::TokenTree;
674674

675675
let mut sets = FirstSets { first: FxHashMap::default() };
@@ -679,19 +679,22 @@ impl FirstSets {
679679
// walks backward over `tts`, returning the FIRST for `tts`
680680
// and updating `sets` at the same time for all sequence
681681
// substructure we find within `tts`.
682-
fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
682+
fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
683683
let mut first = TokenSet::empty();
684684
for tt in tts.iter().rev() {
685685
match *tt {
686686
TokenTree::Token(..)
687687
| TokenTree::MetaVar(..)
688688
| TokenTree::MetaVarDecl(..)
689689
| TokenTree::MetaVarExpr(..) => {
690-
first.replace_with(tt.clone());
690+
first.replace_with(TtHandle::TtRef(tt));
691691
}
692692
TokenTree::Delimited(span, ref delimited) => {
693693
build_recur(sets, &delimited.tts);
694-
first.replace_with(delimited.open_tt(span));
694+
first.replace_with(TtHandle::from_token_kind(
695+
token::OpenDelim(delimited.delim),
696+
span.open,
697+
));
695698
}
696699
TokenTree::Sequence(sp, ref seq_rep) => {
697700
let subfirst = build_recur(sets, &seq_rep.tts);
@@ -715,7 +718,7 @@ impl FirstSets {
715718
// token could be the separator token itself.
716719

717720
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
718-
first.add_one_maybe(TokenTree::Token(sep.clone()));
721+
first.add_one_maybe(TtHandle::from_token(sep.clone()));
719722
}
720723

721724
// Reverse scan: Sequence comes before `first`.
@@ -741,7 +744,7 @@ impl FirstSets {
741744

742745
// walks forward over `tts` until all potential FIRST tokens are
743746
// identified.
744-
fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
747+
fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
745748
use mbe::TokenTree;
746749

747750
let mut first = TokenSet::empty();
@@ -752,11 +755,14 @@ impl FirstSets {
752755
| TokenTree::MetaVar(..)
753756
| TokenTree::MetaVarDecl(..)
754757
| TokenTree::MetaVarExpr(..) => {
755-
first.add_one(tt.clone());
758+
first.add_one(TtHandle::TtRef(tt));
756759
return first;
757760
}
758761
TokenTree::Delimited(span, ref delimited) => {
759-
first.add_one(delimited.open_tt(span));
762+
first.add_one(TtHandle::from_token_kind(
763+
token::OpenDelim(delimited.delim),
764+
span.open,
765+
));
760766
return first;
761767
}
762768
TokenTree::Sequence(sp, ref seq_rep) => {
@@ -775,7 +781,7 @@ impl FirstSets {
775781
// If the sequence contents can be empty, then the first
776782
// token could be the separator token itself.
777783
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
778-
first.add_one_maybe(TokenTree::Token(sep.clone()));
784+
first.add_one_maybe(TtHandle::from_token(sep.clone()));
779785
}
780786

781787
assert!(first.maybe_empty);
@@ -803,6 +809,62 @@ impl FirstSets {
803809
}
804810
}
805811

812+
// Most `mbe::TokenTree`s are pre-existing in the matcher, but some are defined
813+
// implicitly, such as opening/closing delimiters and sequence repetition ops.
814+
// This type encapsulates both kinds. It implements `Clone` while avoiding the
815+
// need for `mbe::TokenTree` to implement `Clone`.
816+
#[derive(Debug)]
817+
enum TtHandle<'tt> {
818+
/// This is used in most cases.
819+
TtRef(&'tt mbe::TokenTree),
820+
821+
/// This is only used for implicit token trees. The `mbe::TokenTree` *must*
822+
/// be `mbe::TokenTree::Token`. No other variants are allowed. We store an
823+
/// `mbe::TokenTree` rather than a `Token` so that `get()` can return a
824+
/// `&mbe::TokenTree`.
825+
Token(mbe::TokenTree),
826+
}
827+
828+
impl<'tt> TtHandle<'tt> {
829+
fn from_token(tok: Token) -> Self {
830+
TtHandle::Token(mbe::TokenTree::Token(tok))
831+
}
832+
833+
fn from_token_kind(kind: TokenKind, span: Span) -> Self {
834+
TtHandle::from_token(Token::new(kind, span))
835+
}
836+
837+
// Get a reference to a token tree.
838+
fn get(&'tt self) -> &'tt mbe::TokenTree {
839+
match self {
840+
TtHandle::TtRef(tt) => tt,
841+
TtHandle::Token(token_tt) => &token_tt,
842+
}
843+
}
844+
}
845+
846+
impl<'tt> PartialEq for TtHandle<'tt> {
847+
fn eq(&self, other: &TtHandle<'tt>) -> bool {
848+
self.get() == other.get()
849+
}
850+
}
851+
852+
impl<'tt> Clone for TtHandle<'tt> {
853+
fn clone(&self) -> Self {
854+
match self {
855+
TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
856+
857+
// This variant *must* contain a `mbe::TokenTree::Token`, and not
858+
// any other variant of `mbe::TokenTree`.
859+
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
860+
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
861+
}
862+
863+
_ => unreachable!(),
864+
}
865+
}
866+
}
867+
806868
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
807869
// (for macro-by-example syntactic variables). It also carries the
808870
// `maybe_empty` flag; that is true if and only if the matcher can
@@ -814,28 +876,28 @@ impl FirstSets {
814876
//
815877
// (Notably, we must allow for *-op to occur zero times.)
816878
#[derive(Clone, Debug)]
817-
struct TokenSet {
818-
tokens: Vec<mbe::TokenTree>,
879+
struct TokenSet<'tt> {
880+
tokens: Vec<TtHandle<'tt>>,
819881
maybe_empty: bool,
820882
}
821883

822-
impl TokenSet {
884+
impl<'tt> TokenSet<'tt> {
823885
// Returns a set for the empty sequence.
824886
fn empty() -> Self {
825887
TokenSet { tokens: Vec::new(), maybe_empty: true }
826888
}
827889

828890
// Returns the set `{ tok }` for the single-token (and thus
829891
// non-empty) sequence [tok].
830-
fn singleton(tok: mbe::TokenTree) -> Self {
831-
TokenSet { tokens: vec![tok], maybe_empty: false }
892+
fn singleton(tt: TtHandle<'tt>) -> Self {
893+
TokenSet { tokens: vec![tt], maybe_empty: false }
832894
}
833895

834896
// Changes self to be the set `{ tok }`.
835897
// Since `tok` is always present, marks self as non-empty.
836-
fn replace_with(&mut self, tok: mbe::TokenTree) {
898+
fn replace_with(&mut self, tt: TtHandle<'tt>) {
837899
self.tokens.clear();
838-
self.tokens.push(tok);
900+
self.tokens.push(tt);
839901
self.maybe_empty = false;
840902
}
841903

@@ -848,17 +910,17 @@ impl TokenSet {
848910
}
849911

850912
// Adds `tok` to the set for `self`, marking sequence as non-empy.
851-
fn add_one(&mut self, tok: mbe::TokenTree) {
852-
if !self.tokens.contains(&tok) {
853-
self.tokens.push(tok);
913+
fn add_one(&mut self, tt: TtHandle<'tt>) {
914+
if !self.tokens.contains(&tt) {
915+
self.tokens.push(tt);
854916
}
855917
self.maybe_empty = false;
856918
}
857919

858920
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
859-
fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
860-
if !self.tokens.contains(&tok) {
861-
self.tokens.push(tok);
921+
fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
922+
if !self.tokens.contains(&tt) {
923+
self.tokens.push(tt);
862924
}
863925
}
864926

@@ -870,9 +932,9 @@ impl TokenSet {
870932
// setting of the empty flag of `self`. If `other` is guaranteed
871933
// non-empty, then `self` is marked non-empty.
872934
fn add_all(&mut self, other: &Self) {
873-
for tok in &other.tokens {
874-
if !self.tokens.contains(tok) {
875-
self.tokens.push(tok.clone());
935+
for tt in &other.tokens {
936+
if !self.tokens.contains(tt) {
937+
self.tokens.push(tt.clone());
876938
}
877939
}
878940
if !other.maybe_empty {
@@ -892,14 +954,14 @@ impl TokenSet {
892954
//
893955
// Requires that `first_sets` is pre-computed for `matcher`;
894956
// see `FirstSets::new`.
895-
fn check_matcher_core(
957+
fn check_matcher_core<'tt>(
896958
sess: &ParseSess,
897959
features: &Features,
898960
def: &ast::Item,
899-
first_sets: &FirstSets,
900-
matcher: &[mbe::TokenTree],
901-
follow: &TokenSet,
902-
) -> TokenSet {
961+
first_sets: &FirstSets<'tt>,
962+
matcher: &'tt [mbe::TokenTree],
963+
follow: &TokenSet<'tt>,
964+
) -> TokenSet<'tt> {
903965
use mbe::TokenTree;
904966

905967
let mut last = TokenSet::empty();
@@ -938,12 +1000,15 @@ fn check_matcher_core(
9381000
// followed by anything against SUFFIX.
9391001
continue 'each_token;
9401002
} else {
941-
last.replace_with(token.clone());
1003+
last.replace_with(TtHandle::TtRef(token));
9421004
suffix_first = build_suffix_first();
9431005
}
9441006
}
9451007
TokenTree::Delimited(span, ref d) => {
946-
let my_suffix = TokenSet::singleton(d.close_tt(span));
1008+
let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
1009+
token::CloseDelim(d.delim),
1010+
span.close,
1011+
));
9471012
check_matcher_core(sess, features, def, first_sets, &d.tts, &my_suffix);
9481013
// don't track non NT tokens
9491014
last.replace_with_irrelevant();
@@ -967,7 +1032,7 @@ fn check_matcher_core(
9671032
let mut new;
9681033
let my_suffix = if let Some(sep) = &seq_rep.separator {
9691034
new = suffix_first.clone();
970-
new.add_one_maybe(TokenTree::Token(sep.clone()));
1035+
new.add_one_maybe(TtHandle::from_token(sep.clone()));
9711036
&new
9721037
} else {
9731038
&suffix_first
@@ -994,9 +1059,11 @@ fn check_matcher_core(
9941059

9951060
// Now `last` holds the complete set of NT tokens that could
9961061
// end the sequence before SUFFIX. Check that every one works with `suffix`.
997-
for token in &last.tokens {
998-
if let TokenTree::MetaVarDecl(span, name, Some(kind)) = *token {
1062+
for tt in &last.tokens {
1063+
if let &TokenTree::MetaVarDecl(span, name, Some(kind)) = tt.get() {
9991064
for next_token in &suffix_first.tokens {
1065+
let next_token = next_token.get();
1066+
10001067
// Check if the old pat is used and the next token is `|`
10011068
// to warn about incompatibility with Rust 2021.
10021069
// We only emit this lint if we're parsing the original

0 commit comments

Comments
 (0)