@@ -3,12 +3,11 @@ use std::collections::hash_map::Entry;
3
3
use std:: { mem, slice} ;
4
4
5
5
use ast:: token:: IdentIsRaw ;
6
- use rustc_ast as ast;
7
6
use rustc_ast:: token:: NtPatKind :: * ;
8
7
use rustc_ast:: token:: TokenKind :: * ;
9
8
use rustc_ast:: token:: { self , Delimiter , NonterminalKind , Token , TokenKind } ;
10
9
use rustc_ast:: tokenstream:: { DelimSpan , TokenStream } ;
11
- use rustc_ast:: { DUMMY_NODE_ID , NodeId } ;
10
+ use rustc_ast:: { self as ast , DUMMY_NODE_ID , NodeId } ;
12
11
use rustc_ast_pretty:: pprust;
13
12
use rustc_attr:: { self as attr, TransparencyError } ;
14
13
use rustc_data_structures:: fx:: { FxHashMap , FxIndexMap } ;
@@ -370,34 +369,32 @@ pub(super) fn try_match_macro<'matcher, T: Tracker<'matcher>>(
370
369
pub fn compile_declarative_macro (
371
370
sess : & Session ,
372
371
features : & Features ,
373
- def : & ast:: Item ,
372
+ macro_def : & ast:: MacroDef ,
373
+ ident : Ident ,
374
+ attrs : & [ ast:: Attribute ] ,
375
+ span : Span ,
376
+ node_id : NodeId ,
374
377
edition : Edition ,
375
378
) -> ( SyntaxExtension , Vec < ( usize , Span ) > ) {
376
- debug ! ( "compile_declarative_macro: {:?}" , def) ;
377
379
let mk_syn_ext = |expander| {
378
380
SyntaxExtension :: new (
379
381
sess,
380
382
features,
381
383
SyntaxExtensionKind :: LegacyBang ( expander) ,
382
- def . span ,
384
+ span,
383
385
Vec :: new ( ) ,
384
386
edition,
385
- def . ident . name ,
386
- & def . attrs ,
387
- def . id != DUMMY_NODE_ID ,
387
+ ident. name ,
388
+ attrs,
389
+ node_id != DUMMY_NODE_ID ,
388
390
)
389
391
} ;
390
392
let dummy_syn_ext = |guar| ( mk_syn_ext ( Box :: new ( DummyExpander ( guar) ) ) , Vec :: new ( ) ) ;
391
393
392
394
let dcx = sess. dcx ( ) ;
393
- let lhs_nm = Ident :: new ( sym:: lhs, def . span ) ;
394
- let rhs_nm = Ident :: new ( sym:: rhs, def . span ) ;
395
+ let lhs_nm = Ident :: new ( sym:: lhs, span) ;
396
+ let rhs_nm = Ident :: new ( sym:: rhs, span) ;
395
397
let tt_spec = Some ( NonterminalKind :: TT ) ;
396
-
397
- let macro_def = match & def. kind {
398
- ast:: ItemKind :: MacroDef ( def) => def,
399
- _ => unreachable ! ( ) ,
400
- } ;
401
398
let macro_rules = macro_def. macro_rules ;
402
399
403
400
// Parse the macro_rules! invocation
@@ -410,25 +407,22 @@ pub fn compile_declarative_macro(
410
407
let argument_gram = vec ! [
411
408
mbe:: TokenTree :: Sequence ( DelimSpan :: dummy( ) , mbe:: SequenceRepetition {
412
409
tts: vec![
413
- mbe:: TokenTree :: MetaVarDecl ( def . span, lhs_nm, tt_spec) ,
414
- mbe:: TokenTree :: token( token:: FatArrow , def . span) ,
415
- mbe:: TokenTree :: MetaVarDecl ( def . span, rhs_nm, tt_spec) ,
410
+ mbe:: TokenTree :: MetaVarDecl ( span, lhs_nm, tt_spec) ,
411
+ mbe:: TokenTree :: token( token:: FatArrow , span) ,
412
+ mbe:: TokenTree :: MetaVarDecl ( span, rhs_nm, tt_spec) ,
416
413
] ,
417
- separator: Some ( Token :: new(
418
- if macro_rules { token:: Semi } else { token:: Comma } ,
419
- def. span,
420
- ) ) ,
421
- kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: OneOrMore , def. span) ,
414
+ separator: Some ( Token :: new( if macro_rules { token:: Semi } else { token:: Comma } , span) ) ,
415
+ kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: OneOrMore , span) ,
422
416
num_captures: 2 ,
423
417
} ) ,
424
418
// to phase into semicolon-termination instead of semicolon-separation
425
419
mbe:: TokenTree :: Sequence ( DelimSpan :: dummy( ) , mbe:: SequenceRepetition {
426
420
tts: vec![ mbe:: TokenTree :: token(
427
421
if macro_rules { token:: Semi } else { token:: Comma } ,
428
- def . span,
422
+ span,
429
423
) ] ,
430
424
separator: None ,
431
- kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: ZeroOrMore , def . span) ,
425
+ kleene: mbe:: KleeneToken :: new( mbe:: KleeneOp :: ZeroOrMore , span) ,
432
426
num_captures: 0 ,
433
427
} ) ,
434
428
] ;
@@ -460,15 +454,15 @@ pub fn compile_declarative_macro(
460
454
} ;
461
455
462
456
let s = parse_failure_msg ( & token, track. get_expected_token ( ) ) ;
463
- let sp = token. span . substitute_dummy ( def . span ) ;
457
+ let sp = token. span . substitute_dummy ( span) ;
464
458
let mut err = sess. dcx ( ) . struct_span_err ( sp, s) ;
465
459
err. span_label ( sp, msg) ;
466
460
annotate_doc_comment ( & mut err, sess. source_map ( ) , sp) ;
467
461
let guar = err. emit ( ) ;
468
462
return dummy_syn_ext ( guar) ;
469
463
}
470
464
Error ( sp, msg) => {
471
- let guar = sess. dcx ( ) . span_err ( sp. substitute_dummy ( def . span ) , msg) ;
465
+ let guar = sess. dcx ( ) . span_err ( sp. substitute_dummy ( span) , msg) ;
472
466
return dummy_syn_ext ( guar) ;
473
467
}
474
468
ErrorReported ( guar) => {
@@ -489,21 +483,21 @@ pub fn compile_declarative_macro(
489
483
& TokenStream :: new ( vec ! [ tt. clone( ) ] ) ,
490
484
true ,
491
485
sess,
492
- def . id ,
486
+ node_id ,
493
487
features,
494
488
edition,
495
489
)
496
490
. pop ( )
497
491
. unwrap ( ) ;
498
492
// We don't handle errors here, the driver will abort
499
493
// after parsing/expansion. We can report every error in every macro this way.
500
- check_emission ( check_lhs_nt_follows ( sess, def , & tt) ) ;
494
+ check_emission ( check_lhs_nt_follows ( sess, node_id , & tt) ) ;
501
495
return tt;
502
496
}
503
- sess. dcx ( ) . span_bug ( def . span , "wrong-structured lhs" )
497
+ sess. dcx ( ) . span_bug ( span, "wrong-structured lhs" )
504
498
} )
505
499
. collect :: < Vec < mbe:: TokenTree > > ( ) ,
506
- _ => sess. dcx ( ) . span_bug ( def . span , "wrong-structured lhs" ) ,
500
+ _ => sess. dcx ( ) . span_bug ( span, "wrong-structured lhs" ) ,
507
501
} ;
508
502
509
503
let rhses = match & argument_map[ & MacroRulesNormalizedIdent :: new ( rhs_nm) ] {
@@ -515,17 +509,17 @@ pub fn compile_declarative_macro(
515
509
& TokenStream :: new ( vec ! [ tt. clone( ) ] ) ,
516
510
false ,
517
511
sess,
518
- def . id ,
512
+ node_id ,
519
513
features,
520
514
edition,
521
515
)
522
516
. pop ( )
523
517
. unwrap ( ) ;
524
518
}
525
- sess. dcx ( ) . span_bug ( def . span , "wrong-structured rhs" )
519
+ sess. dcx ( ) . span_bug ( span, "wrong-structured rhs" )
526
520
} )
527
521
. collect :: < Vec < mbe:: TokenTree > > ( ) ,
528
- _ => sess. dcx ( ) . span_bug ( def . span , "wrong-structured rhs" ) ,
522
+ _ => sess. dcx ( ) . span_bug ( span, "wrong-structured rhs" ) ,
529
523
} ;
530
524
531
525
for rhs in & rhses {
@@ -537,15 +531,9 @@ pub fn compile_declarative_macro(
537
531
check_emission ( check_lhs_no_empty_seq ( sess, slice:: from_ref ( lhs) ) ) ;
538
532
}
539
533
540
- check_emission ( macro_check:: check_meta_variables (
541
- & sess. psess ,
542
- def. id ,
543
- def. span ,
544
- & lhses,
545
- & rhses,
546
- ) ) ;
534
+ check_emission ( macro_check:: check_meta_variables ( & sess. psess , node_id, span, & lhses, & rhses) ) ;
547
535
548
- let ( transparency, transparency_error) = attr:: find_transparency ( & def . attrs , macro_rules) ;
536
+ let ( transparency, transparency_error) = attr:: find_transparency ( attrs, macro_rules) ;
549
537
match transparency_error {
550
538
Some ( TransparencyError :: UnknownTransparency ( value, span) ) => {
551
539
dcx. span_err ( span, format ! ( "unknown macro transparency: `{value}`" ) ) ;
@@ -564,7 +552,7 @@ pub fn compile_declarative_macro(
564
552
565
553
// Compute the spans of the macro rules for unused rule linting.
566
554
// Also, we are only interested in non-foreign macros.
567
- let rule_spans = if def . id != DUMMY_NODE_ID {
555
+ let rule_spans = if node_id != DUMMY_NODE_ID {
568
556
lhses
569
557
. iter ( )
570
558
. zip ( rhses. iter ( ) )
@@ -590,15 +578,15 @@ pub fn compile_declarative_macro(
590
578
mbe:: TokenTree :: Delimited ( .., delimited) => {
591
579
mbe:: macro_parser:: compute_locs ( & delimited. tts )
592
580
}
593
- _ => sess. dcx ( ) . span_bug ( def . span , "malformed macro lhs" ) ,
581
+ _ => sess. dcx ( ) . span_bug ( span, "malformed macro lhs" ) ,
594
582
}
595
583
} )
596
584
. collect ( ) ;
597
585
598
586
let expander = Box :: new ( MacroRulesMacroExpander {
599
- name : def . ident ,
600
- span : def . span ,
601
- node_id : def . id ,
587
+ name : ident,
588
+ span,
589
+ node_id,
602
590
transparency,
603
591
lhses,
604
592
rhses,
@@ -608,13 +596,13 @@ pub fn compile_declarative_macro(
608
596
609
597
fn check_lhs_nt_follows (
610
598
sess : & Session ,
611
- def : & ast :: Item ,
599
+ node_id : NodeId ,
612
600
lhs : & mbe:: TokenTree ,
613
601
) -> Result < ( ) , ErrorGuaranteed > {
614
602
// lhs is going to be like TokenTree::Delimited(...), where the
615
603
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
616
604
if let mbe:: TokenTree :: Delimited ( .., delimited) = lhs {
617
- check_matcher ( sess, def , & delimited. tts )
605
+ check_matcher ( sess, node_id , & delimited. tts )
618
606
} else {
619
607
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters" ;
620
608
Err ( sess. dcx ( ) . span_err ( lhs. span ( ) , msg) )
@@ -686,12 +674,12 @@ fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> Result<(), ErrorGuaranteed
686
674
687
675
fn check_matcher (
688
676
sess : & Session ,
689
- def : & ast :: Item ,
677
+ node_id : NodeId ,
690
678
matcher : & [ mbe:: TokenTree ] ,
691
679
) -> Result < ( ) , ErrorGuaranteed > {
692
680
let first_sets = FirstSets :: new ( matcher) ;
693
681
let empty_suffix = TokenSet :: empty ( ) ;
694
- check_matcher_core ( sess, def , & first_sets, matcher, & empty_suffix) ?;
682
+ check_matcher_core ( sess, node_id , & first_sets, matcher, & empty_suffix) ?;
695
683
Ok ( ( ) )
696
684
}
697
685
@@ -1028,7 +1016,7 @@ impl<'tt> TokenSet<'tt> {
1028
1016
// see `FirstSets::new`.
1029
1017
fn check_matcher_core < ' tt > (
1030
1018
sess : & Session ,
1031
- def : & ast :: Item ,
1019
+ node_id : NodeId ,
1032
1020
first_sets : & FirstSets < ' tt > ,
1033
1021
matcher : & ' tt [ mbe:: TokenTree ] ,
1034
1022
follow : & TokenSet < ' tt > ,
@@ -1082,7 +1070,7 @@ fn check_matcher_core<'tt>(
1082
1070
token:: CloseDelim ( d. delim ) ,
1083
1071
span. close ,
1084
1072
) ) ;
1085
- check_matcher_core ( sess, def , first_sets, & d. tts , & my_suffix) ?;
1073
+ check_matcher_core ( sess, node_id , first_sets, & d. tts , & my_suffix) ?;
1086
1074
// don't track non NT tokens
1087
1075
last. replace_with_irrelevant ( ) ;
1088
1076
@@ -1114,7 +1102,7 @@ fn check_matcher_core<'tt>(
1114
1102
// At this point, `suffix_first` is built, and
1115
1103
// `my_suffix` is some TokenSet that we can use
1116
1104
// for checking the interior of `seq_rep`.
1117
- let next = check_matcher_core ( sess, def , first_sets, & seq_rep. tts , my_suffix) ?;
1105
+ let next = check_matcher_core ( sess, node_id , first_sets, & seq_rep. tts , my_suffix) ?;
1118
1106
if next. maybe_empty {
1119
1107
last. add_all ( & next) ;
1120
1108
} else {
@@ -1144,7 +1132,7 @@ fn check_matcher_core<'tt>(
1144
1132
// macro. (See #86567.)
1145
1133
// Macros defined in the current crate have a real node id,
1146
1134
// whereas macros from an external crate have a dummy id.
1147
- if def . id != DUMMY_NODE_ID
1135
+ if node_id != DUMMY_NODE_ID
1148
1136
&& matches ! ( kind, NonterminalKind :: Pat ( PatParam { inferred: true } ) )
1149
1137
&& matches ! (
1150
1138
next_token,
0 commit comments