Skip to content

Commit 00676c8

Browse files
committed
Add ast::SequenceRepetition
1 parent 964191a commit 00676c8

File tree

7 files changed

+160
-93
lines changed

7 files changed

+160
-93
lines changed

src/libsyntax/ast.rs

+63-32
Original file line numberDiff line numberDiff line change
@@ -627,6 +627,19 @@ impl Delimited {
627627
}
628628
}
629629

630+
/// A sequence of token treesee
631+
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
632+
pub struct SequenceRepetition {
633+
/// The sequence of token trees
634+
pub tts: Vec<TokenTree>,
635+
/// The optional separator
636+
pub separator: Option<token::Token>,
637+
/// Whether the sequence can be repeated zero (*), or one or more times (+)
638+
pub op: KleeneOp,
639+
/// The number of `MatchNt`s that appear in the sequence (and subsequences)
640+
pub num_captures: uint,
641+
}
642+
630643
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
631644
/// for token sequences.
632645
#[deriving(Clone, PartialEq, Eq, Encodable, Decodable, Hash, Show)]
@@ -657,58 +670,76 @@ pub enum TokenTree {
657670

658671
// This only makes sense in MBE macros.
659672

660-
/// A kleene-style repetition sequence with a span, a TT forest,
661-
/// an optional separator, and a boolean where true indicates
662-
/// zero or more (..), and false indicates one or more (+).
663-
/// The last member denotes the number of `MATCH_NONTERMINAL`s
664-
/// in the forest.
665-
// FIXME(eddyb) #12938 Use Rc<[TokenTree]> after DST.
666-
TtSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, KleeneOp, uint),
673+
/// A kleene-style repetition sequence with a span
674+
// FIXME(eddyb) #12938 Use DST.
675+
TtSequence(Span, Rc<SequenceRepetition>),
667676
}
668677

669678
impl TokenTree {
670-
/// For unrolling some tokens or token trees into equivalent sequences.
671-
pub fn expand_into_tts(self) -> Rc<Vec<TokenTree>> {
672-
match self {
673-
TtToken(sp, token::DocComment(name)) => {
679+
pub fn len(&self) -> uint {
680+
match *self {
681+
TtToken(_, token::DocComment(_)) => 2,
682+
TtToken(_, token::SubstNt(..)) => 2,
683+
TtToken(_, token::MatchNt(..)) => 3,
684+
TtDelimited(_, ref delimed) => {
685+
delimed.tts.len() + 2
686+
}
687+
TtSequence(_, ref seq) => {
688+
seq.tts.len()
689+
}
690+
TtToken(..) => 0
691+
}
692+
}
693+
694+
pub fn get_tt(&self, index: uint) -> TokenTree {
695+
match (self, index) {
696+
(&TtToken(sp, token::DocComment(_)), 0) => {
697+
TtToken(sp, token::Pound)
698+
}
699+
(&TtToken(sp, token::DocComment(name)), 1) => {
674700
let doc = MetaNameValue(token::intern_and_get_ident("doc"),
675701
respan(sp, LitStr(token::get_name(name), CookedStr)));
676702
let doc = token::NtMeta(P(respan(sp, doc)));
677-
let delimed = Delimited {
703+
TtDelimited(sp, Rc::new(Delimited {
678704
delim: token::Bracket,
679705
open_span: sp,
680706
tts: vec![TtToken(sp, token::Interpolated(doc))],
681707
close_span: sp,
682-
};
683-
Rc::new(vec![TtToken(sp, token::Pound),
684-
TtDelimited(sp, Rc::new(delimed))])
708+
}))
685709
}
686-
TtDelimited(_, ref delimed) => {
687-
let mut tts = Vec::with_capacity(1 + delimed.tts.len() + 1);
688-
tts.push(delimed.open_tt());
689-
tts.extend(delimed.tts.iter().map(|tt| tt.clone()));
690-
tts.push(delimed.close_tt());
691-
Rc::new(tts)
710+
(&TtDelimited(_, ref delimed), _) => {
711+
if index == 0 {
712+
return delimed.open_tt();
713+
}
714+
if index == delimed.tts.len() + 1 {
715+
return delimed.close_tt();
716+
}
717+
delimed.tts[index - 1].clone()
718+
}
719+
(&TtToken(sp, token::SubstNt(name, name_st)), _) => {
720+
let v = [TtToken(sp, token::Dollar),
721+
TtToken(sp, token::Ident(name, name_st))];
722+
v[index]
692723
}
693-
TtToken(sp, token::SubstNt(name, namep)) => {
694-
Rc::new(vec![TtToken(sp, token::Dollar),
695-
TtToken(sp, token::Ident(name, namep))])
724+
(&TtToken(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => {
725+
let v = [TtToken(sp, token::SubstNt(name, name_st)),
726+
TtToken(sp, token::Colon),
727+
TtToken(sp, token::Ident(kind, kind_st))];
728+
v[index]
696729
}
697-
TtToken(sp, token::MatchNt(name, kind, namep, kindp)) => {
698-
Rc::new(vec![TtToken(sp, token::SubstNt(name, namep)),
699-
TtToken(sp, token::Colon),
700-
TtToken(sp, token::Ident(kind, kindp))])
730+
(&TtSequence(_, ref seq), _) => {
731+
seq.tts[index].clone()
701732
}
702-
_ => panic!("Cannot expand a token")
733+
_ => panic!("Cannot expand a token tree")
703734
}
704735
}
705736

706737
/// Returns the `Span` corresponding to this token tree.
707738
pub fn get_span(&self) -> Span {
708739
match *self {
709-
TtToken(span, _) => span,
710-
TtDelimited(span, _) => span,
711-
TtSequence(span, _, _, _, _) => span,
740+
TtToken(span, _) => span,
741+
TtDelimited(span, _) => span,
742+
TtSequence(span, _) => span,
712743
}
713744
}
714745
}

src/libsyntax/ext/tt/macro_parser.rs

+46-26
Original file line numberDiff line numberDiff line change
@@ -100,17 +100,39 @@ use std::collections::hash_map::{Vacant, Occupied};
100100
// To avoid costly uniqueness checks, we require that `MatchSeq` always has
101101
// a nonempty body.
102102

103+
#[deriving(Clone)]
104+
enum TokenTreeOrTokenTreeVec {
105+
Tt(ast::TokenTree),
106+
TtSeq(Rc<Vec<ast::TokenTree>>),
107+
}
108+
109+
impl TokenTreeOrTokenTreeVec {
110+
fn len(&self) -> uint {
111+
match self {
112+
&TtSeq(ref v) => v.len(),
113+
&Tt(ref tt) => tt.len(),
114+
}
115+
}
116+
117+
fn get_tt(&self, index: uint) -> TokenTree {
118+
match self {
119+
&TtSeq(ref v) => v[index].clone(),
120+
&Tt(ref tt) => tt.get_tt(index),
121+
}
122+
}
123+
}
124+
103125
/// an unzipping of `TokenTree`s
104126
#[deriving(Clone)]
105127
struct MatcherTtFrame {
106-
elts: Rc<Vec<ast::TokenTree>>,
128+
elts: TokenTreeOrTokenTreeVec,
107129
idx: uint,
108130
}
109131

110132
#[deriving(Clone)]
111133
pub struct MatcherPos {
112134
stack: Vec<MatcherTtFrame>,
113-
elts: Rc<Vec<ast::TokenTree>>,
135+
top_elts: TokenTreeOrTokenTreeVec,
114136
sep: Option<Token>,
115137
idx: uint,
116138
up: Option<Box<MatcherPos>>,
@@ -124,8 +146,8 @@ pub struct MatcherPos {
124146
pub fn count_names(ms: &[TokenTree]) -> uint {
125147
ms.iter().fold(0, |count, elt| {
126148
count + match elt {
127-
&TtSequence(_, _, _, _, advance_by) => {
128-
advance_by
149+
&TtSequence(_, ref seq) => {
150+
seq.num_captures
129151
}
130152
&TtDelimited(_, ref delim) => {
131153
count_names(delim.tts.as_slice())
@@ -144,7 +166,7 @@ pub fn initial_matcher_pos(ms: Rc<Vec<TokenTree>>, sep: Option<Token>, lo: ByteP
144166
let matches = Vec::from_fn(match_idx_hi, |_i| Vec::new());
145167
box MatcherPos {
146168
stack: vec![],
147-
elts: ms,
169+
top_elts: TtSeq(ms),
148170
sep: sep,
149171
idx: 0u,
150172
up: None,
@@ -183,8 +205,8 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
183205
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
184206
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut uint) {
185207
match m {
186-
&TtSequence(_, ref more_ms, _, _, _) => {
187-
for next_m in more_ms.iter() {
208+
&TtSequence(_, ref seq) => {
209+
for next_m in seq.tts.iter() {
188210
n_rec(p_s, next_m, res, ret_val, idx)
189211
}
190212
}
@@ -278,18 +300,18 @@ pub fn parse(sess: &ParseSess,
278300
};
279301

280302
// When unzipped trees end, remove them
281-
while ei.idx >= ei.elts.len() {
303+
while ei.idx >= ei.top_elts.len() {
282304
match ei.stack.pop() {
283305
Some(MatcherTtFrame { elts, idx }) => {
284-
ei.elts = elts;
306+
ei.top_elts = elts;
285307
ei.idx = idx + 1;
286308
}
287309
None => break
288310
}
289311
}
290312

291313
let idx = ei.idx;
292-
let len = ei.elts.len();
314+
let len = ei.top_elts.len();
293315

294316
/* at end of sequence */
295317
if idx >= len {
@@ -352,17 +374,16 @@ pub fn parse(sess: &ParseSess,
352374
eof_eis.push(ei);
353375
}
354376
} else {
355-
match (*ei.elts)[idx].clone() {
377+
match ei.top_elts.get_tt(idx) {
356378
/* need to descend into sequence */
357-
TtSequence(_, ref matchers, ref sep, kleene_op, match_num) => {
358-
if kleene_op == ast::ZeroOrMore {
379+
TtSequence(sp, seq) => {
380+
if seq.op == ast::ZeroOrMore {
359381
let mut new_ei = ei.clone();
360-
new_ei.match_cur += match_num;
382+
new_ei.match_cur += seq.num_captures;
361383
new_ei.idx += 1u;
362384
//we specifically matched zero repeats.
363-
for idx in range(ei.match_cur, ei.match_cur + match_num) {
364-
new_ei.matches[idx]
365-
.push(Rc::new(MatchedSeq(Vec::new(), sp)));
385+
for idx in range(ei.match_cur, ei.match_cur + seq.num_captures) {
386+
new_ei.matches[idx].push(Rc::new(MatchedSeq(Vec::new(), sp)));
366387
}
367388

368389
cur_eis.push(new_ei);
@@ -372,15 +393,15 @@ pub fn parse(sess: &ParseSess,
372393
let ei_t = ei;
373394
cur_eis.push(box MatcherPos {
374395
stack: vec![],
375-
elts: matchers.clone(),
376-
sep: (*sep).clone(),
396+
sep: seq.separator.clone(),
377397
idx: 0u,
378398
matches: matches,
379399
match_lo: ei_t.match_cur,
380400
match_cur: ei_t.match_cur,
381-
match_hi: ei_t.match_cur + match_num,
401+
match_hi: ei_t.match_cur + seq.num_captures,
382402
up: Some(ei_t),
383-
sp_lo: sp.lo
403+
sp_lo: sp.lo,
404+
top_elts: Tt(TtSequence(sp, seq)),
384405
});
385406
}
386407
TtToken(_, MatchNt(..)) => {
@@ -395,11 +416,10 @@ pub fn parse(sess: &ParseSess,
395416
return Error(sp, "Cannot transcribe in macro LHS".into_string())
396417
}
397418
seq @ TtDelimited(..) | seq @ TtToken(_, DocComment(..)) => {
398-
let tts = seq.expand_into_tts();
399-
let elts = mem::replace(&mut ei.elts, tts);
419+
let lower_elts = mem::replace(&mut ei.top_elts, Tt(seq));
400420
let idx = ei.idx;
401421
ei.stack.push(MatcherTtFrame {
402-
elts: elts,
422+
elts: lower_elts,
403423
idx: idx,
404424
});
405425
ei.idx = 0;
@@ -433,7 +453,7 @@ pub fn parse(sess: &ParseSess,
433453
if (bb_eis.len() > 0u && next_eis.len() > 0u)
434454
|| bb_eis.len() > 1u {
435455
let nts = bb_eis.iter().map(|ei| {
436-
match (*ei.elts)[ei.idx] {
456+
match ei.top_elts.get_tt(ei.idx) {
437457
TtToken(_, MatchNt(bind, name, _, _)) => {
438458
(format!("{} ('{}')",
439459
token::get_ident(name),
@@ -458,7 +478,7 @@ pub fn parse(sess: &ParseSess,
458478
let mut rust_parser = Parser::new(sess, cfg.clone(), box rdr.clone());
459479

460480
let mut ei = bb_eis.pop().unwrap();
461-
match (*ei.elts)[ei.idx] {
481+
match ei.top_elts.get_tt(ei.idx) {
462482
TtToken(_, MatchNt(_, name, _, _)) => {
463483
let name_string = token::get_ident(name);
464484
let match_cur = ei.match_cur;

src/libsyntax/ext/tt/macro_rules.rs

+15-11
Original file line numberDiff line numberDiff line change
@@ -233,20 +233,24 @@ pub fn add_new_extension<'cx>(cx: &'cx mut ExtCtxt,
233233
let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
234234
let argument_gram = vec!(
235235
TtSequence(DUMMY_SP,
236-
Rc::new(vec![
237-
TtToken(DUMMY_SP, match_lhs),
238-
TtToken(DUMMY_SP, token::FatArrow),
239-
TtToken(DUMMY_SP, match_rhs)]),
240-
Some(token::Semi),
241-
ast::OneOrMore,
242-
2),
236+
Rc::new(ast::SequenceRepetition {
237+
tts: vec![
238+
TtToken(DUMMY_SP, match_lhs_tok),
239+
TtToken(DUMMY_SP, token::FatArrow),
240+
TtToken(DUMMY_SP, match_rhs_tok)],
241+
separator: Some(token::Semi),
242+
op: ast::OneOrMore,
243+
num_captures: 2
244+
})),
243245
//to phase into semicolon-termination instead of
244246
//semicolon-separation
245247
TtSequence(DUMMY_SP,
246-
Rc::new(vec![TtToken(DUMMY_SP, token::Semi)]),
247-
None,
248-
ast::ZeroOrMore,
249-
0));
248+
Rc::new(ast::SequenceRepetition {
249+
tts: vec![TtToken(DUMMY_SP, token::Semi)],
250+
separator: None,
251+
op: ast::ZeroOrMore,
252+
num_captures: 0
253+
})));
250254

251255

252256
// Parse the macro_rules! invocation (`none` is for no interpolations):

0 commit comments

Comments
 (0)