diff --git a/compiler/rustc_const_eval/src/interpret/memory.rs b/compiler/rustc_const_eval/src/interpret/memory.rs index 32b739f9335a0..9ae50d0df80d7 100644 --- a/compiler/rustc_const_eval/src/interpret/memory.rs +++ b/compiler/rustc_const_eval/src/interpret/memory.rs @@ -1058,7 +1058,7 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { // operating system this can avoid physically allocating the page. dest_alloc .write_uninit(&tcx, dest_range) - .map_err(|e| e.to_interp_error(dest_alloc_id))?; // `Size` multiplication + .map_err(|e| e.to_interp_error(dest_alloc_id))?; // We can forget about the relocations, this is all not initialized anyway. return Ok(()); } diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index e106f9efc1870..ad1ea1a6d39ca 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -269,7 +269,7 @@ impl Allocation { /// `get_bytes_with_uninit_and_ptr` instead, /// /// This function also guarantees that the resulting pointer will remain stable - /// even when new allocations are pushed to the `HashMap`. `copy_repeatedly` relies + /// even when new allocations are pushed to the `HashMap`. `mem_copy_repeatedly` relies /// on that. /// /// It is the caller's responsibility to check bounds and alignment beforehand. @@ -605,6 +605,9 @@ impl Allocation { /// Applies a relocation copy. /// The affected range, as defined in the parameters to `prepare_relocation_copy` is expected /// to be clear of relocations. + /// + /// This is dangerous to use as it can violate internal `Allocation` invariants! + /// It only exists to support an efficient implementation of `mem_copy_repeatedly`. pub fn mark_relocation_range(&mut self, relocations: AllocationRelocations) { self.relocations.0.insert_presorted(relocations.relative_relocations); } @@ -1124,6 +1127,9 @@ impl Allocation { } /// Applies multiple instances of the run-length encoding to the initialization mask. + /// + /// This is dangerous to use as it can violate internal `Allocation` invariants! + /// It only exists to support an efficient implementation of `mem_copy_repeatedly`. pub fn mark_compressed_init_range( &mut self, defined: &InitMaskCompressed,