@@ -164,75 +164,6 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
164
164
& self . ecx
165
165
}
166
166
167
- fn visit_aggregate (
168
- & mut self ,
169
- mplace : & MPlaceTy < ' tcx > ,
170
- fields : impl Iterator < Item = InterpResult < ' tcx , Self :: V > > ,
171
- ) -> InterpResult < ' tcx > {
172
- // We want to walk the aggregate to look for references to intern. While doing that we
173
- // also need to take special care of interior mutability.
174
- //
175
- // As an optimization, however, if the allocation does not contain any references: we don't
176
- // need to do the walk. It can be costly for big arrays for example (e.g. issue #93215).
177
- let is_walk_needed = |mplace : & MPlaceTy < ' tcx > | -> InterpResult < ' tcx , bool > {
178
- // ZSTs cannot contain pointers, we can avoid the interning walk.
179
- if mplace. layout . is_zst ( ) {
180
- return Ok ( false ) ;
181
- }
182
-
183
- // Now, check whether this allocation could contain references.
184
- //
185
- // Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
186
- // to avoid could be expensive: on the potentially larger types, arrays and slices,
187
- // rather than on all aggregates unconditionally.
188
- if matches ! ( mplace. layout. ty. kind( ) , ty:: Array ( ..) | ty:: Slice ( ..) ) {
189
- let Some ( ( size, align) ) = self . ecx . size_and_align_of_mplace ( & mplace) ? else {
190
- // We do the walk if we can't determine the size of the mplace: we may be
191
- // dealing with extern types here in the future.
192
- return Ok ( true ) ;
193
- } ;
194
-
195
- // If there is no provenance in this allocation, it does not contain references
196
- // that point to another allocation, and we can avoid the interning walk.
197
- if let Some ( alloc) = self . ecx . get_ptr_alloc ( mplace. ptr , size, align) ? {
198
- if !alloc. has_provenance ( ) {
199
- return Ok ( false ) ;
200
- }
201
- } else {
202
- // We're encountering a ZST here, and can avoid the walk as well.
203
- return Ok ( false ) ;
204
- }
205
- }
206
-
207
- // In the general case, we do the walk.
208
- Ok ( true )
209
- } ;
210
-
211
- // If this allocation contains no references to intern, we avoid the potentially costly
212
- // walk.
213
- //
214
- // We can do this before the checks for interior mutability below, because only references
215
- // are relevant in that situation, and we're checking if there are any here.
216
- if !is_walk_needed ( mplace) ? {
217
- return Ok ( ( ) ) ;
218
- }
219
-
220
- if let Some ( def) = mplace. layout . ty . ty_adt_def ( ) {
221
- if def. is_unsafe_cell ( ) {
222
- // We are crossing over an `UnsafeCell`, we can mutate again. This means that
223
- // References we encounter inside here are interned as pointing to mutable
224
- // allocations.
225
- // Remember the `old` value to handle nested `UnsafeCell`.
226
- let old = std:: mem:: replace ( & mut self . inside_unsafe_cell , true ) ;
227
- let walked = self . walk_aggregate ( mplace, fields) ;
228
- self . inside_unsafe_cell = old;
229
- return walked;
230
- }
231
- }
232
-
233
- self . walk_aggregate ( mplace, fields)
234
- }
235
-
236
167
fn visit_value ( & mut self , mplace : & MPlaceTy < ' tcx > ) -> InterpResult < ' tcx > {
237
168
// Handle Reference types, as these are the only types with provenance supported by const eval.
238
169
// Raw pointers (and boxes) are handled by the `leftover_allocations` logic.
@@ -315,7 +246,63 @@ impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::Memory
315
246
}
316
247
Ok ( ( ) )
317
248
} else {
318
- // Not a reference -- proceed recursively.
249
+ // Not a reference. Check if we want to recurse.
250
+ let is_walk_needed = |mplace : & MPlaceTy < ' tcx > | -> InterpResult < ' tcx , bool > {
251
+ // ZSTs cannot contain pointers, we can avoid the interning walk.
252
+ if mplace. layout . is_zst ( ) {
253
+ return Ok ( false ) ;
254
+ }
255
+
256
+ // Now, check whether this allocation could contain references.
257
+ //
258
+ // Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
259
+ // to avoid could be expensive: on the potentially larger types, arrays and slices,
260
+ // rather than on all aggregates unconditionally.
261
+ if matches ! ( mplace. layout. ty. kind( ) , ty:: Array ( ..) | ty:: Slice ( ..) ) {
262
+ let Some ( ( size, align) ) = self . ecx . size_and_align_of_mplace ( & mplace) ? else {
263
+ // We do the walk if we can't determine the size of the mplace: we may be
264
+ // dealing with extern types here in the future.
265
+ return Ok ( true ) ;
266
+ } ;
267
+
268
+ // If there is no provenance in this allocation, it does not contain references
269
+ // that point to another allocation, and we can avoid the interning walk.
270
+ if let Some ( alloc) = self . ecx . get_ptr_alloc ( mplace. ptr , size, align) ? {
271
+ if !alloc. has_provenance ( ) {
272
+ return Ok ( false ) ;
273
+ }
274
+ } else {
275
+ // We're encountering a ZST here, and can avoid the walk as well.
276
+ return Ok ( false ) ;
277
+ }
278
+ }
279
+
280
+ // In the general case, we do the walk.
281
+ Ok ( true )
282
+ } ;
283
+
284
+ // If this allocation contains no references to intern, we avoid the potentially costly
285
+ // walk.
286
+ //
287
+ // We can do this before the checks for interior mutability below, because only references
288
+ // are relevant in that situation, and we're checking if there are any here.
289
+ if !is_walk_needed ( mplace) ? {
290
+ return Ok ( ( ) ) ;
291
+ }
292
+
293
+ if let Some ( def) = mplace. layout . ty . ty_adt_def ( ) {
294
+ if def. is_unsafe_cell ( ) {
295
+ // We are crossing over an `UnsafeCell`, we can mutate again. This means that
296
+ // References we encounter inside here are interned as pointing to mutable
297
+ // allocations.
298
+ // Remember the `old` value to handle nested `UnsafeCell`.
299
+ let old = std:: mem:: replace ( & mut self . inside_unsafe_cell , true ) ;
300
+ let walked = self . walk_value ( mplace) ;
301
+ self . inside_unsafe_cell = old;
302
+ return walked;
303
+ }
304
+ }
305
+
319
306
self . walk_value ( mplace)
320
307
}
321
308
}
0 commit comments