Skip to content

Commit ce49944

Browse files
authored
Rollup merge of #73569 - Aaron1011:fix/macro-rules-group, r=petrochenkov
Handle `macro_rules!` tokens consistently across crates When we serialize a `macro_rules!` macro, we used a 'lowered' `TokenStream` for its body, which has all `Nonterminal`s expanded in-place via `nt_to_tokenstream`. This matters when an 'outer' `macro_rules!` macro expands to an 'inner' `macro_rules!` macro - the inner macro may use tokens captured from the 'outer' macro in its definition. This means that invoking a foreign `macro_rules!` macro may use a different body `TokenStream` than when the same `macro_rules!` macro is invoked in the same crate. This difference is observable by proc-macros invoked by a `macro_rules!` macro - a `None`-delimited group will be seen in the same-crate case (inserted when convering `Nonterminal`s to the `proc_macro` crate's structs), but no `None`-delimited group in the cross-crate case. To fix this inconsistency, we now insert `None`-delimited groups when 'lowering' a `Nonterminal` `macro_rules!` body, just as we do in `proc_macro_server`. Additionally, we no longer print extra spaces for `None`-delimited groups - as far as pretty-printing is concerned, they don't exist (only their contents do). This ensures that `Display` output of a `TokenStream` does not depend on which crate a `macro_rules!` macro was invoked from. This PR is necessary in order to patch the `solana-genesis-programs` for the upcoming hygiene serialization breakage (#72121 (comment)). The `solana-genesis-programs` crate will need to use a proc macro to re-span certain tokens in a nested `macro_rules!`, which requires us to consistently use a `None`-delimited group. See `src/test/ui/proc-macro/nested-macro-rules.rs` for an example of the kind of nested `macro_rules!` affected by this crate.
2 parents 6b57050 + 1ded7a5 commit ce49944

File tree

17 files changed

+395
-56
lines changed

17 files changed

+395
-56
lines changed

src/librustc_ast/attr/mod.rs

+14-4
Original file line numberDiff line numberDiff line change
@@ -560,6 +560,9 @@ impl MetaItemKind {
560560
tokens: &mut impl Iterator<Item = TokenTree>,
561561
) -> Option<MetaItemKind> {
562562
match tokens.next() {
563+
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
564+
MetaItemKind::name_value_from_tokens(&mut inner_tokens.trees())
565+
}
563566
Some(TokenTree::Token(token)) => {
564567
Lit::from_token(&token).ok().map(MetaItemKind::NameValue)
565568
}
@@ -619,13 +622,20 @@ impl NestedMetaItem {
619622
where
620623
I: Iterator<Item = TokenTree>,
621624
{
622-
if let Some(TokenTree::Token(token)) = tokens.peek() {
623-
if let Ok(lit) = Lit::from_token(token) {
625+
match tokens.peek() {
626+
Some(TokenTree::Token(token)) => {
627+
if let Ok(lit) = Lit::from_token(token) {
628+
tokens.next();
629+
return Some(NestedMetaItem::Literal(lit));
630+
}
631+
}
632+
Some(TokenTree::Delimited(_, token::NoDelim, inner_tokens)) => {
633+
let inner_tokens = inner_tokens.clone();
624634
tokens.next();
625-
return Some(NestedMetaItem::Literal(lit));
635+
return NestedMetaItem::from_tokens(&mut inner_tokens.into_trees().peekable());
626636
}
637+
_ => {}
627638
}
628-
629639
MetaItem::from_tokens(tokens).map(NestedMetaItem::MetaItem)
630640
}
631641
}

src/librustc_ast_lowering/lib.rs

+8-3
Original file line numberDiff line numberDiff line change
@@ -39,8 +39,8 @@ use rustc_ast::ast;
3939
use rustc_ast::ast::*;
4040
use rustc_ast::attr;
4141
use rustc_ast::node_id::NodeMap;
42-
use rustc_ast::token::{self, Nonterminal, Token};
43-
use rustc_ast::tokenstream::{TokenStream, TokenTree};
42+
use rustc_ast::token::{self, DelimToken, Nonterminal, Token};
43+
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
4444
use rustc_ast::visit::{self, AssocCtxt, Visitor};
4545
use rustc_ast::walk_list;
4646
use rustc_ast_pretty::pprust;
@@ -1029,7 +1029,12 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
10291029
match token.kind {
10301030
token::Interpolated(nt) => {
10311031
let tts = (self.nt_to_tokenstream)(&nt, &self.sess.parse_sess, token.span);
1032-
self.lower_token_stream(tts)
1032+
TokenTree::Delimited(
1033+
DelimSpan::from_single(token.span),
1034+
DelimToken::NoDelim,
1035+
self.lower_token_stream(tts),
1036+
)
1037+
.into()
10331038
}
10341039
_ => TokenTree::Token(token).into(),
10351040
}

src/librustc_ast_pretty/pprust.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -257,7 +257,7 @@ fn token_kind_to_string_ext(tok: &TokenKind, convert_dollar_crate: Option<Span>)
257257
token::CloseDelim(token::Bracket) => "]".to_string(),
258258
token::OpenDelim(token::Brace) => "{".to_string(),
259259
token::CloseDelim(token::Brace) => "}".to_string(),
260-
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => " ".to_string(),
260+
token::OpenDelim(token::NoDelim) | token::CloseDelim(token::NoDelim) => "".to_string(),
261261
token::Pound => "#".to_string(),
262262
token::Dollar => "$".to_string(),
263263
token::Question => "?".to_string(),

src/librustc_expand/mbe/macro_rules.rs

+1
Original file line numberDiff line numberDiff line change
@@ -387,6 +387,7 @@ pub fn compile_declarative_macro(
387387
def: &ast::Item,
388388
edition: Edition,
389389
) -> SyntaxExtension {
390+
debug!("compile_declarative_macro: {:?}", def);
390391
let mk_syn_ext = |expander| {
391392
SyntaxExtension::new(
392393
sess,

src/librustc_expand/mbe/quoted.rs

+63-47
Original file line numberDiff line numberDiff line change
@@ -90,72 +90,88 @@ pub(super) fn parse(
9090
/// # Parameters
9191
///
9292
/// - `tree`: the tree we wish to convert.
93-
/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish
93+
/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish
9494
/// converting `tree`
9595
/// - `expect_matchers`: same as for `parse` (see above).
9696
/// - `sess`: the parsing session. Any errors will be emitted to this session.
9797
/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
9898
/// unstable features or not.
9999
fn parse_tree(
100100
tree: tokenstream::TokenTree,
101-
trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
101+
outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
102102
expect_matchers: bool,
103103
sess: &ParseSess,
104104
node_id: NodeId,
105105
) -> TokenTree {
106106
// Depending on what `tree` is, we could be parsing different parts of a macro
107107
match tree {
108108
// `tree` is a `$` token. Look at the next token in `trees`
109-
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => match trees.next() {
110-
// `tree` is followed by a delimited set of token trees. This indicates the beginning
111-
// of a repetition sequence in the macro (e.g. `$(pat)*`).
112-
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
113-
// Must have `(` not `{` or `[`
114-
if delim != token::Paren {
115-
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
116-
let msg = format!("expected `(`, found `{}`", tok);
117-
sess.span_diagnostic.span_err(span.entire(), &msg);
118-
}
119-
// Parse the contents of the sequence itself
120-
let sequence = parse(tts, expect_matchers, sess, node_id);
121-
// Get the Kleene operator and optional separator
122-
let (separator, kleene) = parse_sep_and_kleene_op(trees, span.entire(), sess);
123-
// Count the number of captured "names" (i.e., named metavars)
124-
let name_captures = macro_parser::count_names(&sequence);
125-
TokenTree::Sequence(
126-
span,
127-
Lrc::new(SequenceRepetition {
128-
tts: sequence,
129-
separator,
130-
kleene,
131-
num_captures: name_captures,
132-
}),
133-
)
109+
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
110+
// FIXME: Handle `None`-delimited groups in a more systematic way
111+
// during parsing.
112+
let mut next = outer_trees.next();
113+
let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
114+
if let Some(tokenstream::TokenTree::Delimited(_, token::NoDelim, tts)) = next {
115+
trees = Box::new(tts.into_trees());
116+
next = trees.next();
117+
} else {
118+
trees = Box::new(outer_trees);
134119
}
135120

136-
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
137-
// metavariable that names the crate of the invocation.
138-
Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
139-
let (ident, is_raw) = token.ident().unwrap();
140-
let span = ident.span.with_lo(span.lo());
141-
if ident.name == kw::Crate && !is_raw {
142-
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
143-
} else {
144-
TokenTree::MetaVar(span, ident)
121+
match next {
122+
// `tree` is followed by a delimited set of token trees. This indicates the beginning
123+
// of a repetition sequence in the macro (e.g. `$(pat)*`).
124+
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
125+
// Must have `(` not `{` or `[`
126+
if delim != token::Paren {
127+
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
128+
let msg = format!("expected `(`, found `{}`", tok);
129+
sess.span_diagnostic.span_err(span.entire(), &msg);
130+
}
131+
// Parse the contents of the sequence itself
132+
let sequence = parse(tts, expect_matchers, sess, node_id);
133+
// Get the Kleene operator and optional separator
134+
let (separator, kleene) =
135+
parse_sep_and_kleene_op(&mut trees, span.entire(), sess);
136+
// Count the number of captured "names" (i.e., named metavars)
137+
let name_captures = macro_parser::count_names(&sequence);
138+
TokenTree::Sequence(
139+
span,
140+
Lrc::new(SequenceRepetition {
141+
tts: sequence,
142+
separator,
143+
kleene,
144+
num_captures: name_captures,
145+
}),
146+
)
145147
}
146-
}
147148

148-
// `tree` is followed by a random token. This is an error.
149-
Some(tokenstream::TokenTree::Token(token)) => {
150-
let msg =
151-
format!("expected identifier, found `{}`", pprust::token_to_string(&token),);
152-
sess.span_diagnostic.span_err(token.span, &msg);
153-
TokenTree::MetaVar(token.span, Ident::invalid())
154-
}
149+
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
150+
// metavariable that names the crate of the invocation.
151+
Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
152+
let (ident, is_raw) = token.ident().unwrap();
153+
let span = ident.span.with_lo(span.lo());
154+
if ident.name == kw::Crate && !is_raw {
155+
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
156+
} else {
157+
TokenTree::MetaVar(span, ident)
158+
}
159+
}
155160

156-
// There are no more tokens. Just return the `$` we already have.
157-
None => TokenTree::token(token::Dollar, span),
158-
},
161+
// `tree` is followed by a random token. This is an error.
162+
Some(tokenstream::TokenTree::Token(token)) => {
163+
let msg = format!(
164+
"expected identifier, found `{}`",
165+
pprust::token_to_string(&token),
166+
);
167+
sess.span_diagnostic.span_err(token.span, &msg);
168+
TokenTree::MetaVar(token.span, Ident::invalid())
169+
}
170+
171+
// There are no more tokens. Just return the `$` we already have.
172+
None => TokenTree::token(token::Dollar, span),
173+
}
174+
}
159175

160176
// `tree` is an arbitrary token. Keep it.
161177
tokenstream::TokenTree::Token(token) => TokenTree::Token(token),

src/librustc_middle/ty/context.rs

+1
Original file line numberDiff line numberDiff line change
@@ -1049,6 +1049,7 @@ impl<'tcx> TyCtxt<'tcx> {
10491049
Some(attr) => attr,
10501050
None => return Bound::Unbounded,
10511051
};
1052+
debug!("layout_scalar_valid_range: attr={:?}", attr);
10521053
for meta in attr.meta_item_list().expect("rustc_layout_scalar_valid_range takes args") {
10531054
match meta.literal().expect("attribute takes lit").kind {
10541055
ast::LitKind::Int(a, _) => return Bound::Included(a),

src/test/ui/macros/doc-comment.rs

+25
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
// check-pass
2+
// Tests that we properly handle a nested macro expansion
3+
// involving a `#[doc]` attribute
4+
#![deny(missing_docs)]
5+
//! Crate docs
6+
7+
macro_rules! doc_comment {
8+
($x:expr, $($tt:tt)*) => {
9+
#[doc = $x]
10+
$($tt)*
11+
}
12+
}
13+
14+
macro_rules! make_comment {
15+
() => {
16+
doc_comment!("Function docs",
17+
pub fn bar() {}
18+
);
19+
}
20+
}
21+
22+
23+
make_comment!();
24+
25+
fn main() {}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
macro_rules! produce_it {
2+
($dollar_one:tt $foo:ident $my_name:ident) => {
3+
#[macro_export]
4+
macro_rules! meta_delim {
5+
($dollar_one ($dollar_one $my_name:ident)*) => {
6+
stringify!($dollar_one ($dollar_one $my_name)*)
7+
}
8+
}
9+
}
10+
}
11+
12+
produce_it!($my_name name);
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
pub struct FirstStruct;
2+
3+
#[macro_export]
4+
macro_rules! outer_macro {
5+
($name:ident) => {
6+
#[macro_export]
7+
macro_rules! inner_macro {
8+
($wrapper:ident) => {
9+
$wrapper!($name)
10+
}
11+
}
12+
}
13+
}
14+
15+
outer_macro!(FirstStruct);

src/test/ui/proc-macro/auxiliary/test-macros.rs

+6
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,12 @@ pub fn print_bang(input: TokenStream) -> TokenStream {
101101
print_helper(input, "BANG")
102102
}
103103

104+
#[proc_macro]
105+
pub fn print_bang_consume(input: TokenStream) -> TokenStream {
106+
print_helper(input, "BANG");
107+
TokenStream::new()
108+
}
109+
104110
#[proc_macro_attribute]
105111
pub fn print_attr(_: TokenStream, input: TokenStream) -> TokenStream {
106112
print_helper(input, "ATTR")

src/test/ui/proc-macro/input-interpolated.stdout

-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,4 @@
11
PRINT-BANG INPUT (DISPLAY): A
2-
PRINT-BANG RE-COLLECTED (DISPLAY): A
32
PRINT-BANG INPUT (DEBUG): TokenStream [
43
Group {
54
delimiter: None,

src/test/ui/proc-macro/meta-delim.rs

+12
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
// aux-build:meta-delim.rs
2+
// edition:2018
3+
// run-pass
4+
5+
// Tests that we can properly deserialize a macro with strange delimiters
6+
// See https://github.com/rust-lang/rust/pull/73569#issuecomment-650860457
7+
8+
extern crate meta_delim;
9+
10+
fn main() {
11+
assert_eq!("a bunch of idents", meta_delim::meta_delim!(a bunch of idents));
12+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
// run-pass
2+
// aux-build:nested-macro-rules.rs
3+
// aux-build:test-macros.rs
4+
// compile-flags: -Z span-debug
5+
// edition:2018
6+
7+
extern crate nested_macro_rules;
8+
extern crate test_macros;
9+
10+
use test_macros::print_bang;
11+
12+
use nested_macro_rules::FirstStruct;
13+
struct SecondStruct;
14+
15+
fn main() {
16+
nested_macro_rules::inner_macro!(print_bang);
17+
18+
nested_macro_rules::outer_macro!(SecondStruct);
19+
inner_macro!(print_bang);
20+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,26 @@
1+
PRINT-BANG INPUT (DISPLAY): FirstStruct
2+
PRINT-BANG INPUT (DEBUG): TokenStream [
3+
Group {
4+
delimiter: None,
5+
stream: TokenStream [
6+
Ident {
7+
ident: "FirstStruct",
8+
span: $DIR/auxiliary/nested-macro-rules.rs:15:14: 15:25 (#3),
9+
},
10+
],
11+
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#3),
12+
},
13+
]
14+
PRINT-BANG INPUT (DISPLAY): SecondStruct
15+
PRINT-BANG INPUT (DEBUG): TokenStream [
16+
Group {
17+
delimiter: None,
18+
stream: TokenStream [
19+
Ident {
20+
ident: "SecondStruct",
21+
span: $DIR/nested-macro-rules.rs:18:38: 18:50 (#9),
22+
},
23+
],
24+
span: $DIR/auxiliary/nested-macro-rules.rs:9:27: 9:32 (#8),
25+
},
26+
]
+19
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
// run-pass
2+
// aux-build:test-macros.rs
3+
// compile-flags: -Z span-debug
4+
// edition:2018
5+
//
6+
// Tests the pretty-printing behavior of inserting `NoDelim` groups
7+
8+
extern crate test_macros;
9+
use test_macros::print_bang_consume;
10+
11+
macro_rules! expand_it {
12+
(($val1:expr) ($val2:expr)) => { expand_it!($val1 + $val2) };
13+
($val:expr) => { print_bang_consume!("hi" $val (1 + 1)) };
14+
}
15+
16+
fn main() {
17+
expand_it!(1 + (25) + 1);
18+
expand_it!(("hello".len()) ("world".len()));
19+
}

0 commit comments

Comments
 (0)