@@ -8,7 +8,7 @@ use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
8
8
use crate :: mbe:: transcribe:: transcribe;
9
9
10
10
use rustc_ast as ast;
11
- use rustc_ast:: token:: { self , NonterminalKind , Token , TokenKind :: * } ;
11
+ use rustc_ast:: token:: { self , NonterminalKind , Token , TokenKind , TokenKind :: * } ;
12
12
use rustc_ast:: tokenstream:: { DelimSpan , TokenStream } ;
13
13
use rustc_ast:: { NodeId , DUMMY_NODE_ID } ;
14
14
use rustc_ast_pretty:: pprust;
@@ -658,18 +658,18 @@ fn check_matcher(
658
658
// that do not try to inject artificial span information. My plan is
659
659
// to try to catch such cases ahead of time and not include them in
660
660
// the precomputed mapping.)
661
- struct FirstSets {
661
+ struct FirstSets < ' tt > {
662
662
// this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
663
663
// span in the original matcher to the First set for the inner sequence `tt ...`.
664
664
//
665
665
// If two sequences have the same span in a matcher, then map that
666
666
// span to None (invalidating the mapping here and forcing the code to
667
667
// use a slow path).
668
- first : FxHashMap < Span , Option < TokenSet > > ,
668
+ first : FxHashMap < Span , Option < TokenSet < ' tt > > > ,
669
669
}
670
670
671
- impl FirstSets {
672
- fn new ( tts : & [ mbe:: TokenTree ] ) -> FirstSets {
671
+ impl < ' tt > FirstSets < ' tt > {
672
+ fn new ( tts : & ' tt [ mbe:: TokenTree ] ) -> FirstSets < ' tt > {
673
673
use mbe:: TokenTree ;
674
674
675
675
let mut sets = FirstSets { first : FxHashMap :: default ( ) } ;
@@ -679,19 +679,22 @@ impl FirstSets {
679
679
// walks backward over `tts`, returning the FIRST for `tts`
680
680
// and updating `sets` at the same time for all sequence
681
681
// substructure we find within `tts`.
682
- fn build_recur ( sets : & mut FirstSets , tts : & [ TokenTree ] ) -> TokenSet {
682
+ fn build_recur < ' tt > ( sets : & mut FirstSets < ' tt > , tts : & ' tt [ TokenTree ] ) -> TokenSet < ' tt > {
683
683
let mut first = TokenSet :: empty ( ) ;
684
684
for tt in tts. iter ( ) . rev ( ) {
685
685
match * tt {
686
686
TokenTree :: Token ( ..)
687
687
| TokenTree :: MetaVar ( ..)
688
688
| TokenTree :: MetaVarDecl ( ..)
689
689
| TokenTree :: MetaVarExpr ( ..) => {
690
- first. replace_with ( tt . clone ( ) ) ;
690
+ first. replace_with ( TtHandle :: TtRef ( tt ) ) ;
691
691
}
692
692
TokenTree :: Delimited ( span, ref delimited) => {
693
693
build_recur ( sets, & delimited. tts ) ;
694
- first. replace_with ( delimited. open_tt ( span) ) ;
694
+ first. replace_with ( TtHandle :: from_token_kind (
695
+ token:: OpenDelim ( delimited. delim ) ,
696
+ span. open ,
697
+ ) ) ;
695
698
}
696
699
TokenTree :: Sequence ( sp, ref seq_rep) => {
697
700
let subfirst = build_recur ( sets, & seq_rep. tts ) ;
@@ -715,7 +718,7 @@ impl FirstSets {
715
718
// token could be the separator token itself.
716
719
717
720
if let ( Some ( sep) , true ) = ( & seq_rep. separator , subfirst. maybe_empty ) {
718
- first. add_one_maybe ( TokenTree :: Token ( sep. clone ( ) ) ) ;
721
+ first. add_one_maybe ( TtHandle :: from_token ( sep. clone ( ) ) ) ;
719
722
}
720
723
721
724
// Reverse scan: Sequence comes before `first`.
@@ -741,7 +744,7 @@ impl FirstSets {
741
744
742
745
// walks forward over `tts` until all potential FIRST tokens are
743
746
// identified.
744
- fn first ( & self , tts : & [ mbe:: TokenTree ] ) -> TokenSet {
747
+ fn first ( & self , tts : & ' tt [ mbe:: TokenTree ] ) -> TokenSet < ' tt > {
745
748
use mbe:: TokenTree ;
746
749
747
750
let mut first = TokenSet :: empty ( ) ;
@@ -752,11 +755,14 @@ impl FirstSets {
752
755
| TokenTree :: MetaVar ( ..)
753
756
| TokenTree :: MetaVarDecl ( ..)
754
757
| TokenTree :: MetaVarExpr ( ..) => {
755
- first. add_one ( tt . clone ( ) ) ;
758
+ first. add_one ( TtHandle :: TtRef ( tt ) ) ;
756
759
return first;
757
760
}
758
761
TokenTree :: Delimited ( span, ref delimited) => {
759
- first. add_one ( delimited. open_tt ( span) ) ;
762
+ first. add_one ( TtHandle :: from_token_kind (
763
+ token:: OpenDelim ( delimited. delim ) ,
764
+ span. open ,
765
+ ) ) ;
760
766
return first;
761
767
}
762
768
TokenTree :: Sequence ( sp, ref seq_rep) => {
@@ -775,7 +781,7 @@ impl FirstSets {
775
781
// If the sequence contents can be empty, then the first
776
782
// token could be the separator token itself.
777
783
if let ( Some ( sep) , true ) = ( & seq_rep. separator , subfirst. maybe_empty ) {
778
- first. add_one_maybe ( TokenTree :: Token ( sep. clone ( ) ) ) ;
784
+ first. add_one_maybe ( TtHandle :: from_token ( sep. clone ( ) ) ) ;
779
785
}
780
786
781
787
assert ! ( first. maybe_empty) ;
@@ -803,6 +809,62 @@ impl FirstSets {
803
809
}
804
810
}
805
811
812
+ // Most `mbe::TokenTree`s are pre-existing in the matcher, but some are defined
813
+ // implicitly, such as opening/closing delimiters and sequence repetition ops.
814
+ // This type encapsulates both kinds. It implements `Clone` while avoiding the
815
+ // need for `mbe::TokenTree` to implement `Clone`.
816
+ #[ derive( Debug ) ]
817
+ enum TtHandle < ' tt > {
818
+ /// This is used in most cases.
819
+ TtRef ( & ' tt mbe:: TokenTree ) ,
820
+
821
+ /// This is only used for implicit token trees. The `mbe::TokenTree` *must*
822
+ /// be `mbe::TokenTree::Token`. No other variants are allowed. We store an
823
+ /// `mbe::TokenTree` rather than a `Token` so that `get()` can return a
824
+ /// `&mbe::TokenTree`.
825
+ Token ( mbe:: TokenTree ) ,
826
+ }
827
+
828
+ impl < ' tt > TtHandle < ' tt > {
829
+ fn from_token ( tok : Token ) -> Self {
830
+ TtHandle :: Token ( mbe:: TokenTree :: Token ( tok) )
831
+ }
832
+
833
+ fn from_token_kind ( kind : TokenKind , span : Span ) -> Self {
834
+ TtHandle :: from_token ( Token :: new ( kind, span) )
835
+ }
836
+
837
+ // Get a reference to a token tree.
838
+ fn get ( & ' tt self ) -> & ' tt mbe:: TokenTree {
839
+ match self {
840
+ TtHandle :: TtRef ( tt) => tt,
841
+ TtHandle :: Token ( token_tt) => & token_tt,
842
+ }
843
+ }
844
+ }
845
+
846
+ impl < ' tt > PartialEq for TtHandle < ' tt > {
847
+ fn eq ( & self , other : & TtHandle < ' tt > ) -> bool {
848
+ self . get ( ) == other. get ( )
849
+ }
850
+ }
851
+
852
+ impl < ' tt > Clone for TtHandle < ' tt > {
853
+ fn clone ( & self ) -> Self {
854
+ match self {
855
+ TtHandle :: TtRef ( tt) => TtHandle :: TtRef ( tt) ,
856
+
857
+ // This variant *must* contain a `mbe::TokenTree::Token`, and not
858
+ // any other variant of `mbe::TokenTree`.
859
+ TtHandle :: Token ( mbe:: TokenTree :: Token ( tok) ) => {
860
+ TtHandle :: Token ( mbe:: TokenTree :: Token ( tok. clone ( ) ) )
861
+ }
862
+
863
+ _ => unreachable ! ( ) ,
864
+ }
865
+ }
866
+ }
867
+
806
868
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
807
869
// (for macro-by-example syntactic variables). It also carries the
808
870
// `maybe_empty` flag; that is true if and only if the matcher can
@@ -814,28 +876,28 @@ impl FirstSets {
814
876
//
815
877
// (Notably, we must allow for *-op to occur zero times.)
816
878
#[ derive( Clone , Debug ) ]
817
- struct TokenSet {
818
- tokens : Vec < mbe :: TokenTree > ,
879
+ struct TokenSet < ' tt > {
880
+ tokens : Vec < TtHandle < ' tt > > ,
819
881
maybe_empty : bool ,
820
882
}
821
883
822
- impl TokenSet {
884
+ impl < ' tt > TokenSet < ' tt > {
823
885
// Returns a set for the empty sequence.
824
886
fn empty ( ) -> Self {
825
887
TokenSet { tokens : Vec :: new ( ) , maybe_empty : true }
826
888
}
827
889
828
890
// Returns the set `{ tok }` for the single-token (and thus
829
891
// non-empty) sequence [tok].
830
- fn singleton ( tok : mbe :: TokenTree ) -> Self {
831
- TokenSet { tokens : vec ! [ tok ] , maybe_empty : false }
892
+ fn singleton ( tt : TtHandle < ' tt > ) -> Self {
893
+ TokenSet { tokens : vec ! [ tt ] , maybe_empty : false }
832
894
}
833
895
834
896
// Changes self to be the set `{ tok }`.
835
897
// Since `tok` is always present, marks self as non-empty.
836
- fn replace_with ( & mut self , tok : mbe :: TokenTree ) {
898
+ fn replace_with ( & mut self , tt : TtHandle < ' tt > ) {
837
899
self . tokens . clear ( ) ;
838
- self . tokens . push ( tok ) ;
900
+ self . tokens . push ( tt ) ;
839
901
self . maybe_empty = false ;
840
902
}
841
903
@@ -848,17 +910,17 @@ impl TokenSet {
848
910
}
849
911
850
912
// Adds `tok` to the set for `self`, marking sequence as non-empy.
851
- fn add_one ( & mut self , tok : mbe :: TokenTree ) {
852
- if !self . tokens . contains ( & tok ) {
853
- self . tokens . push ( tok ) ;
913
+ fn add_one ( & mut self , tt : TtHandle < ' tt > ) {
914
+ if !self . tokens . contains ( & tt ) {
915
+ self . tokens . push ( tt ) ;
854
916
}
855
917
self . maybe_empty = false ;
856
918
}
857
919
858
920
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
859
- fn add_one_maybe ( & mut self , tok : mbe :: TokenTree ) {
860
- if !self . tokens . contains ( & tok ) {
861
- self . tokens . push ( tok ) ;
921
+ fn add_one_maybe ( & mut self , tt : TtHandle < ' tt > ) {
922
+ if !self . tokens . contains ( & tt ) {
923
+ self . tokens . push ( tt ) ;
862
924
}
863
925
}
864
926
@@ -870,9 +932,9 @@ impl TokenSet {
870
932
// setting of the empty flag of `self`. If `other` is guaranteed
871
933
// non-empty, then `self` is marked non-empty.
872
934
fn add_all ( & mut self , other : & Self ) {
873
- for tok in & other. tokens {
874
- if !self . tokens . contains ( tok ) {
875
- self . tokens . push ( tok . clone ( ) ) ;
935
+ for tt in & other. tokens {
936
+ if !self . tokens . contains ( tt ) {
937
+ self . tokens . push ( tt . clone ( ) ) ;
876
938
}
877
939
}
878
940
if !other. maybe_empty {
@@ -892,14 +954,14 @@ impl TokenSet {
892
954
//
893
955
// Requires that `first_sets` is pre-computed for `matcher`;
894
956
// see `FirstSets::new`.
895
- fn check_matcher_core (
957
+ fn check_matcher_core < ' tt > (
896
958
sess : & ParseSess ,
897
959
features : & Features ,
898
960
def : & ast:: Item ,
899
- first_sets : & FirstSets ,
900
- matcher : & [ mbe:: TokenTree ] ,
901
- follow : & TokenSet ,
902
- ) -> TokenSet {
961
+ first_sets : & FirstSets < ' tt > ,
962
+ matcher : & ' tt [ mbe:: TokenTree ] ,
963
+ follow : & TokenSet < ' tt > ,
964
+ ) -> TokenSet < ' tt > {
903
965
use mbe:: TokenTree ;
904
966
905
967
let mut last = TokenSet :: empty ( ) ;
@@ -938,12 +1000,15 @@ fn check_matcher_core(
938
1000
// followed by anything against SUFFIX.
939
1001
continue ' each_token;
940
1002
} else {
941
- last. replace_with ( token . clone ( ) ) ;
1003
+ last. replace_with ( TtHandle :: TtRef ( token ) ) ;
942
1004
suffix_first = build_suffix_first ( ) ;
943
1005
}
944
1006
}
945
1007
TokenTree :: Delimited ( span, ref d) => {
946
- let my_suffix = TokenSet :: singleton ( d. close_tt ( span) ) ;
1008
+ let my_suffix = TokenSet :: singleton ( TtHandle :: from_token_kind (
1009
+ token:: CloseDelim ( d. delim ) ,
1010
+ span. close ,
1011
+ ) ) ;
947
1012
check_matcher_core ( sess, features, def, first_sets, & d. tts , & my_suffix) ;
948
1013
// don't track non NT tokens
949
1014
last. replace_with_irrelevant ( ) ;
@@ -967,7 +1032,7 @@ fn check_matcher_core(
967
1032
let mut new;
968
1033
let my_suffix = if let Some ( sep) = & seq_rep. separator {
969
1034
new = suffix_first. clone ( ) ;
970
- new. add_one_maybe ( TokenTree :: Token ( sep. clone ( ) ) ) ;
1035
+ new. add_one_maybe ( TtHandle :: from_token ( sep. clone ( ) ) ) ;
971
1036
& new
972
1037
} else {
973
1038
& suffix_first
@@ -994,9 +1059,11 @@ fn check_matcher_core(
994
1059
995
1060
// Now `last` holds the complete set of NT tokens that could
996
1061
// end the sequence before SUFFIX. Check that every one works with `suffix`.
997
- for token in & last. tokens {
998
- if let TokenTree :: MetaVarDecl ( span, name, Some ( kind) ) = * token {
1062
+ for tt in & last. tokens {
1063
+ if let & TokenTree :: MetaVarDecl ( span, name, Some ( kind) ) = tt . get ( ) {
999
1064
for next_token in & suffix_first. tokens {
1065
+ let next_token = next_token. get ( ) ;
1066
+
1000
1067
// Check if the old pat is used and the next token is `|`
1001
1068
// to warn about incompatibility with Rust 2021.
1002
1069
// We only emit this lint if we're parsing the original
0 commit comments