@@ -17,13 +17,13 @@ use hir::def::DefKind;
17
17
use rustc_ast:: Mutability ;
18
18
use rustc_data_structures:: fx:: { FxHashSet , FxIndexMap } ;
19
19
use rustc_hir as hir;
20
+ use rustc_hir:: definitions:: { DefPathData , DisambiguatorState } ;
20
21
use rustc_middle:: middle:: codegen_fn_attrs:: CodegenFnAttrs ;
21
22
use rustc_middle:: mir:: interpret:: { ConstAllocation , CtfeProvenance , InterpResult } ;
22
23
use rustc_middle:: query:: TyCtxtAt ;
23
24
use rustc_middle:: span_bug;
24
25
use rustc_middle:: ty:: layout:: TyAndLayout ;
25
26
use rustc_span:: def_id:: LocalDefId ;
26
- use rustc_span:: sym;
27
27
use tracing:: { instrument, trace} ;
28
28
29
29
use super :: {
@@ -66,6 +66,7 @@ fn intern_shallow<'tcx, T, M: CompileTimeMachine<'tcx, T>>(
66
66
ecx : & mut InterpCx < ' tcx , M > ,
67
67
alloc_id : AllocId ,
68
68
mutability : Mutability ,
69
+ disambiguator : Option < & mut DisambiguatorState > ,
69
70
) -> Result < impl Iterator < Item = CtfeProvenance > + ' tcx , ( ) > {
70
71
trace ! ( "intern_shallow {:?}" , alloc_id) ;
71
72
// remove allocation
@@ -88,7 +89,13 @@ fn intern_shallow<'tcx, T, M: CompileTimeMachine<'tcx, T>>(
88
89
// link the alloc id to the actual allocation
89
90
let alloc = ecx. tcx . mk_const_alloc ( alloc) ;
90
91
if let Some ( static_id) = ecx. machine . static_def_id ( ) {
91
- intern_as_new_static ( ecx. tcx , static_id, alloc_id, alloc) ;
92
+ intern_as_new_static (
93
+ ecx. tcx ,
94
+ static_id,
95
+ alloc_id,
96
+ alloc,
97
+ disambiguator. expect ( "disambiguator needed" ) ,
98
+ ) ;
92
99
} else {
93
100
ecx. tcx . set_alloc_id_memory ( alloc_id, alloc) ;
94
101
}
@@ -102,11 +109,18 @@ fn intern_as_new_static<'tcx>(
102
109
static_id : LocalDefId ,
103
110
alloc_id : AllocId ,
104
111
alloc : ConstAllocation < ' tcx > ,
112
+ disambiguator : & mut DisambiguatorState ,
105
113
) {
114
+ // `intern_const_alloc_recursive` is called once per static and it contains the `DisambiguatorState`.
115
+ // The `<static_id>::{{nested}}` path is thus unique to `intern_const_alloc_recursive` and the
116
+ // `DisambiguatorState` ensures the generated path is unique for this call as we generate
117
+ // `<static_id>::{{nested#n}}` where `n` is the `n`th `intern_as_new_static` call.
106
118
let feed = tcx. create_def (
107
119
static_id,
108
- Some ( sym :: nested ) ,
120
+ None ,
109
121
DefKind :: Static { safety : hir:: Safety :: Safe , mutability : alloc. 0 . mutability , nested : true } ,
122
+ Some ( DefPathData :: NestedStatic ) ,
123
+ disambiguator,
110
124
) ;
111
125
tcx. set_nested_alloc_id_static ( alloc_id, feed. def_id ( ) ) ;
112
126
@@ -154,6 +168,8 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx, const_eval
154
168
intern_kind : InternKind ,
155
169
ret : & MPlaceTy < ' tcx > ,
156
170
) -> Result < ( ) , InternResult > {
171
+ let mut disambiguator = DisambiguatorState :: new ( ) ;
172
+
157
173
// We are interning recursively, and for mutability we are distinguishing the "root" allocation
158
174
// that we are starting in, and all other allocations that we are encountering recursively.
159
175
let ( base_mutability, inner_mutability, is_static) = match intern_kind {
@@ -197,7 +213,9 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx, const_eval
197
213
alloc. 1 . mutability = base_mutability;
198
214
alloc. 1 . provenance ( ) . ptrs ( ) . iter ( ) . map ( |& ( _, prov) | prov) . collect ( )
199
215
} else {
200
- intern_shallow ( ecx, base_alloc_id, base_mutability) . unwrap ( ) . collect ( )
216
+ intern_shallow ( ecx, base_alloc_id, base_mutability, Some ( & mut disambiguator) )
217
+ . unwrap ( )
218
+ . collect ( )
201
219
} ;
202
220
// We need to distinguish "has just been interned" from "was already in `tcx`",
203
221
// so we track this in a separate set.
@@ -291,7 +309,7 @@ pub fn intern_const_alloc_recursive<'tcx, M: CompileTimeMachine<'tcx, const_eval
291
309
// okay with losing some potential for immutability here. This can anyway only affect
292
310
// `static mut`.
293
311
just_interned. insert ( alloc_id) ;
294
- match intern_shallow ( ecx, alloc_id, inner_mutability) {
312
+ match intern_shallow ( ecx, alloc_id, inner_mutability, Some ( & mut disambiguator ) ) {
295
313
Ok ( nested) => todo. extend ( nested) ,
296
314
Err ( ( ) ) => {
297
315
ecx. tcx . dcx ( ) . delayed_bug ( "found dangling pointer during const interning" ) ;
@@ -313,8 +331,9 @@ pub fn intern_const_alloc_for_constprop<'tcx, T, M: CompileTimeMachine<'tcx, T>>
313
331
return interp_ok ( ( ) ) ;
314
332
}
315
333
// Move allocation to `tcx`.
316
- if let Some ( _) =
317
- ( intern_shallow ( ecx, alloc_id, Mutability :: Not ) . map_err ( |( ) | err_ub ! ( DeadLocal ) ) ?) . next ( )
334
+ if let Some ( _) = intern_shallow ( ecx, alloc_id, Mutability :: Not , None )
335
+ . map_err ( |( ) | err_ub ! ( DeadLocal ) ) ?
336
+ . next ( )
318
337
{
319
338
// We are not doing recursive interning, so we don't currently support provenance.
320
339
// (If this assertion ever triggers, we should just implement a
@@ -340,7 +359,7 @@ impl<'tcx> InterpCx<'tcx, DummyMachine> {
340
359
let dest = self . allocate ( layout, MemoryKind :: Stack ) ?;
341
360
f ( self , & dest. clone ( ) . into ( ) ) ?;
342
361
let alloc_id = dest. ptr ( ) . provenance . unwrap ( ) . alloc_id ( ) ; // this was just allocated, it must have provenance
343
- for prov in intern_shallow ( self , alloc_id, Mutability :: Not ) . unwrap ( ) {
362
+ for prov in intern_shallow ( self , alloc_id, Mutability :: Not , None ) . unwrap ( ) {
344
363
// We are not doing recursive interning, so we don't currently support provenance.
345
364
// (If this assertion ever triggers, we should just implement a
346
365
// proper recursive interning loop -- or just call `intern_const_alloc_recursive`.
0 commit comments