Skip to content

Commit 6a1efbd

Browse files
committed
Merge branch 'master' into E0403-update-error-format
2 parents f0ff2d3 + 76fa587 commit 6a1efbd

File tree

151 files changed

+4440
-4278
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

151 files changed

+4440
-4278
lines changed

mk/crates.mk

+5-3
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_
6060
rustc_data_structures rustc_platform_intrinsics rustc_errors \
6161
rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \
6262
rustc_const_eval rustc_const_math rustc_incremental
63-
HOST_CRATES := syntax syntax_ext syntax_pos $(RUSTC_CRATES) rustdoc fmt_macros \
63+
HOST_CRATES := syntax syntax_ext proc_macro syntax_pos $(RUSTC_CRATES) rustdoc fmt_macros \
6464
flate arena graphviz rbml log serialize
6565
TOOLS := compiletest rustdoc rustc rustbook error_index_generator
6666

@@ -100,6 +100,7 @@ DEPS_test := std getopts term native:rust_test_helpers
100100

101101
DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode rustc_errors syntax_pos
102102
DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros
103+
DEPS_proc_macro := syntax syntax_pos rustc_plugin log
103104
DEPS_syntax_pos := serialize
104105

105106
DEPS_rustc_const_math := std syntax log serialize
@@ -114,8 +115,9 @@ DEPS_rustc_borrowck := rustc log graphviz syntax syntax_pos rustc_errors rustc_m
114115
DEPS_rustc_data_structures := std log serialize
115116
DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \
116117
rustc_typeck rustc_mir rustc_resolve log syntax serialize rustc_llvm \
117-
rustc_trans rustc_privacy rustc_lint rustc_plugin \
118-
rustc_metadata syntax_ext rustc_passes rustc_save_analysis rustc_const_eval \
118+
rustc_trans rustc_privacy rustc_lint rustc_plugin \
119+
rustc_metadata syntax_ext proc_macro \
120+
rustc_passes rustc_save_analysis rustc_const_eval \
119121
rustc_incremental syntax_pos rustc_errors
120122
DEPS_rustc_errors := log libc serialize syntax_pos
121123
DEPS_rustc_lint := rustc log syntax syntax_pos rustc_const_eval

src/libcollections/vec.rs

+9-6
Original file line numberDiff line numberDiff line change
@@ -1453,10 +1453,11 @@ impl<T> IntoIterator for Vec<T> {
14531453
} else {
14541454
begin.offset(self.len() as isize) as *const T
14551455
};
1456-
let buf = ptr::read(&self.buf);
1456+
let cap = self.buf.cap();
14571457
mem::forget(self);
14581458
IntoIter {
1459-
_buf: buf,
1459+
buf: Shared::new(begin),
1460+
cap: cap,
14601461
ptr: begin,
14611462
end: end,
14621463
}
@@ -1708,8 +1709,9 @@ impl<'a, T> FromIterator<T> for Cow<'a, [T]> where T: Clone {
17081709
/// [`IntoIterator`]: ../../std/iter/trait.IntoIterator.html
17091710
#[stable(feature = "rust1", since = "1.0.0")]
17101711
pub struct IntoIter<T> {
1711-
_buf: RawVec<T>,
1712-
ptr: *mut T,
1712+
buf: Shared<T>,
1713+
cap: usize,
1714+
ptr: *const T,
17131715
end: *const T,
17141716
}
17151717

@@ -1750,7 +1752,7 @@ impl<T> IntoIter<T> {
17501752
#[unstable(feature = "vec_into_iter_as_slice", issue = "35601")]
17511753
pub fn as_mut_slice(&self) -> &mut [T] {
17521754
unsafe {
1753-
slice::from_raw_parts_mut(self.ptr, self.len())
1755+
slice::from_raw_parts_mut(self.ptr as *mut T, self.len())
17541756
}
17551757
}
17561758
}
@@ -1846,9 +1848,10 @@ impl<T> Drop for IntoIter<T> {
18461848
#[unsafe_destructor_blind_to_params]
18471849
fn drop(&mut self) {
18481850
// destroy the remaining elements
1849-
for _x in self {}
1851+
for _x in self.by_ref() {}
18501852

18511853
// RawVec handles deallocation
1854+
let _ = unsafe { RawVec::from_raw_parts(*self.buf, self.cap) };
18521855
}
18531856
}
18541857

src/libcollectionstest/slice.rs

+18
Original file line numberDiff line numberDiff line change
@@ -645,6 +645,24 @@ fn test_iter_size_hints() {
645645
assert_eq!(xs.iter_mut().size_hint(), (5, Some(5)));
646646
}
647647

648+
#[test]
649+
fn test_iter_as_slice() {
650+
let xs = [1, 2, 5, 10, 11];
651+
let mut iter = xs.iter();
652+
assert_eq!(iter.as_slice(), &[1, 2, 5, 10, 11]);
653+
iter.next();
654+
assert_eq!(iter.as_slice(), &[2, 5, 10, 11]);
655+
}
656+
657+
#[test]
658+
fn test_iter_as_ref() {
659+
let xs = [1, 2, 5, 10, 11];
660+
let mut iter = xs.iter();
661+
assert_eq!(iter.as_ref(), &[1, 2, 5, 10, 11]);
662+
iter.next();
663+
assert_eq!(iter.as_ref(), &[2, 5, 10, 11]);
664+
}
665+
648666
#[test]
649667
fn test_iter_clone() {
650668
let xs = [1, 2, 5];

src/libcollectionstest/vec.rs

+2-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
use std::borrow::Cow;
1212
use std::iter::{FromIterator, repeat};
1313
use std::mem::size_of;
14-
use std::vec::Drain;
14+
use std::vec::{Drain, IntoIter};
1515

1616
use test::Bencher;
1717

@@ -537,6 +537,7 @@ fn test_cow_from() {
537537
#[allow(dead_code)]
538538
fn assert_covariance() {
539539
fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d }
540+
fn into_iter<'new>(i: IntoIter<&'static str>) -> IntoIter<&'new str> { i }
540541
}
541542

542543
#[bench]

src/libcore/slice.rs

+8
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ use clone::Clone;
3737
use cmp::{Ordering, PartialEq, PartialOrd, Eq, Ord};
3838
use cmp::Ordering::{Less, Equal, Greater};
3939
use cmp;
40+
use convert::AsRef;
4041
use default::Default;
4142
use fmt;
4243
use intrinsics::assume;
@@ -996,6 +997,13 @@ impl<'a, T> Clone for Iter<'a, T> {
996997
fn clone(&self) -> Iter<'a, T> { Iter { ptr: self.ptr, end: self.end, _marker: self._marker } }
997998
}
998999

1000+
#[stable(feature = "slice_iter_as_ref", since = "1.12.0")]
1001+
impl<'a, T> AsRef<[T]> for Iter<'a, T> {
1002+
fn as_ref(&self) -> &[T] {
1003+
self.as_slice()
1004+
}
1005+
}
1006+
9991007
/// Mutable slice iterator.
10001008
///
10011009
/// This struct is created by the [`iter_mut`] method on [slices].

src/libproc_macro/Cargo.toml

+15
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
[package]
2+
authors = ["The Rust Project Developers"]
3+
name = "proc_macro"
4+
version = "0.0.0"
5+
6+
[lib]
7+
name = "proc_macro"
8+
path = "lib.rs"
9+
crate-type = ["dylib"]
10+
11+
[dependencies]
12+
log = { path = "../liblog" }
13+
rustc_plugin = { path = "../librustc_plugin" }
14+
syntax = { path = "../libsyntax" }
15+
syntax_pos = { path = "../libsyntax_pos" }

src/libproc_macro/build.rs

+89
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
extern crate syntax;
12+
extern crate syntax_pos;
13+
14+
use syntax::ast::Ident;
15+
use syntax::codemap::DUMMY_SP;
16+
use syntax::parse::token::{self, Token, keywords, str_to_ident};
17+
use syntax::tokenstream::{self, TokenTree, TokenStream};
18+
use std::rc::Rc;
19+
20+
/// A wrapper around `TokenStream::concat` to avoid extra namespace specification and
21+
/// provide TokenStream concatenation as a generic operator.
22+
pub fn concat(ts1: TokenStream, ts2: TokenStream) -> TokenStream {
23+
TokenStream::concat(ts1, ts2)
24+
}
25+
26+
/// Checks if two identifiers have the same name, disregarding context. This allows us to
27+
/// fake 'reserved' keywords.
28+
// FIXME We really want `free-identifier-=?` (a la Dybvig 1993). von Tander 2007 is
29+
// probably the easiest way to do that.
30+
pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool {
31+
let tid = match *tident {
32+
TokenTree::Token(_, Token::Ident(ref id)) => id,
33+
_ => {
34+
return false;
35+
}
36+
};
37+
38+
tid.name == id.name
39+
}
40+
41+
// ____________________________________________________________________________________________
42+
// Conversion operators
43+
44+
/// Convert a `&str` into a Token.
45+
pub fn str_to_token_ident(s: &str) -> Token {
46+
Token::Ident(str_to_ident(s))
47+
}
48+
49+
/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that
50+
/// corresponds to it.
51+
pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token {
52+
Token::Ident(str_to_ident(&kw.name().as_str()[..]))
53+
}
54+
55+
// ____________________________________________________________________________________________
56+
// Build Procedures
57+
58+
/// Generically takes a `ts` and delimiter and returns `ts` delimited by the specified
59+
/// delimiter.
60+
pub fn build_delimited(ts: TokenStream, delim: token::DelimToken) -> TokenStream {
61+
let tts = ts.to_tts();
62+
TokenStream::from_tts(vec![TokenTree::Delimited(DUMMY_SP,
63+
Rc::new(tokenstream::Delimited {
64+
delim: delim,
65+
open_span: DUMMY_SP,
66+
tts: tts,
67+
close_span: DUMMY_SP,
68+
}))])
69+
}
70+
71+
/// Takes `ts` and returns `[ts]`.
72+
pub fn build_bracket_delimited(ts: TokenStream) -> TokenStream {
73+
build_delimited(ts, token::DelimToken::Bracket)
74+
}
75+
76+
/// Takes `ts` and returns `{ts}`.
77+
pub fn build_brace_delimited(ts: TokenStream) -> TokenStream {
78+
build_delimited(ts, token::DelimToken::Brace)
79+
}
80+
81+
/// Takes `ts` and returns `(ts)`.
82+
pub fn build_paren_delimited(ts: TokenStream) -> TokenStream {
83+
build_delimited(ts, token::DelimToken::Paren)
84+
}
85+
86+
/// Constructs `()`.
87+
pub fn build_empty_args() -> TokenStream {
88+
build_paren_delimited(TokenStream::mk_empty())
89+
}

src/libproc_macro/lib.rs

+137
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,137 @@
1+
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
2+
// file at the top-level directory of this distribution and at
3+
// http://rust-lang.org/COPYRIGHT.
4+
//
5+
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6+
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7+
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8+
// option. This file may not be copied, modified, or distributed
9+
// except according to those terms.
10+
11+
//! # Proc_Macro
12+
//!
13+
//! A library for procedural macro writers.
14+
//!
15+
//! ## Usage
16+
//! This package provides the `qquote!` macro for syntax creation, and the prelude
17+
//! (at libproc_macro::prelude) provides a number of operations:
18+
//! - `concat`, for concatenating two TokenStreams.
19+
//! - `ident_eq`, for checking if two identifiers are equal regardless of syntax context.
20+
//! - `str_to_token_ident`, for converting an `&str` into a Token.
21+
//! - `keyword_to_token_delim`, for converting a `parse::token::keywords::Keyword` into a
22+
//! Token.
23+
//! - `build_delimited`, for creating a new TokenStream from an existing one and a delimiter
24+
//! by wrapping the TokenStream in the delimiter.
25+
//! - `build_bracket_delimited`, `build_brace_delimited`, and `build_paren_delimited`, for
26+
//! easing the above.
27+
//! - `build_empty_args`, which returns a TokenStream containing `()`.
28+
//! - `lex`, which takes an `&str` and returns the TokenStream it represents.
29+
//!
30+
//! The `qquote!` macro also imports `syntax::ext::proc_macro_shim::prelude::*`, so you
31+
//! will need to `extern crate syntax` for usage. (This is a temporary solution until more
32+
//! of the external API in libproc_macro is stabilized to support the token construction
33+
//! operations that the qausiquoter relies on.) The shim file also provides additional
34+
//! operations, such as `build_block_emitter` (as used in the `cond` example below).
35+
//!
36+
//! ## TokenStreams
37+
//!
38+
//! TokenStreams serve as the basis of the macro system. They are, in essence, vectors of
39+
//! TokenTrees, where indexing treats delimited values as a single term. That is, the term
40+
//! `even(a+c) && even(b)` will be indexibly encoded as `even | (a+c) | even | (b)` where,
41+
//! in reality, `(a+c)` is actually a decorated pointer to `a | + | c`.
42+
//!
43+
//! If a user has a TokenStream that is a single, delimited value, they can use
44+
//! `maybe_delimited` to destruct it and receive the internal vector as a new TokenStream
45+
//! as:
46+
//! ```
47+
//! `(a+c)`.maybe_delimited() ~> Some(a | + | c)`
48+
//! ```
49+
//!
50+
//! Check the TokenStream documentation for more information; the structure also provides
51+
//! cheap concatenation and slicing.
52+
//!
53+
//! ## Quasiquotation
54+
//!
55+
//! The quasiquoter creates output that, when run, constructs the tokenstream specified as
56+
//! input. For example, `qquote!(5 + 5)` will produce a program, that, when run, will
57+
//! construct the TokenStream `5 | + | 5`.
58+
//!
59+
//! ### Unquoting
60+
//!
61+
//! Unquoting is currently done as `unquote`, and works by taking the single next
62+
//! TokenTree in the TokenStream as the unquoted term. Ergonomically, `unquote(foo)` works
63+
//! fine, but `unquote foo` is also supported.
64+
//!
65+
//! A simple example might be:
66+
//!
67+
//!```
68+
//!fn double(tmp: TokenStream) -> TokenStream {
69+
//! qquote!(unquote(tmp) * 2)
70+
//!}
71+
//!```
72+
//!
73+
//! ### Large Example: Implementing Scheme's `cond`
74+
//!
75+
//! Below is the full implementation of Scheme's `cond` operator.
76+
//!
77+
//! ```
78+
//! fn cond_rec(input: TokenStream) -> TokenStream {
79+
//! if input.is_empty() { return quote!(); }
80+
//!
81+
//! let next = input.slice(0..1);
82+
//! let rest = input.slice_from(1..);
83+
//!
84+
//! let clause : TokenStream = match next.maybe_delimited() {
85+
//! Some(ts) => ts,
86+
//! _ => panic!("Invalid input"),
87+
//! };
88+
//!
89+
//! // clause is ([test]) [rhs]
90+
//! if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) }
91+
//!
92+
//! let test: TokenStream = clause.slice(0..1);
93+
//! let rhs: TokenStream = clause.slice_from(1..);
94+
//!
95+
//! if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() {
96+
//! quote!({unquote(rhs)})
97+
//! } else {
98+
//! quote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
99+
//! }
100+
//! }
101+
//! ```
102+
//!
103+
104+
#![crate_name = "proc_macro"]
105+
#![unstable(feature = "rustc_private", issue = "27812")]
106+
#![feature(plugin_registrar)]
107+
#![crate_type = "dylib"]
108+
#![crate_type = "rlib"]
109+
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
110+
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
111+
html_root_url = "https://doc.rust-lang.org/nightly/")]
112+
#![cfg_attr(not(stage0), deny(warnings))]
113+
114+
#![feature(staged_api)]
115+
#![feature(rustc_diagnostic_macros)]
116+
#![feature(rustc_private)]
117+
118+
extern crate rustc_plugin;
119+
extern crate syntax;
120+
extern crate syntax_pos;
121+
#[macro_use] extern crate log;
122+
123+
mod qquote;
124+
pub mod build;
125+
pub mod parse;
126+
pub mod prelude;
127+
use qquote::qquote;
128+
129+
use rustc_plugin::Registry;
130+
131+
// ____________________________________________________________________________________________
132+
// Main macro definition
133+
134+
#[plugin_registrar]
135+
pub fn plugin_registrar(reg: &mut Registry) {
136+
reg.register_macro("qquote", qquote);
137+
}

0 commit comments

Comments
 (0)