diff --git a/compiler/rustc_codegen_gcc/src/builder.rs b/compiler/rustc_codegen_gcc/src/builder.rs index 3cffd862b9b98..e677f8a68d75a 100644 --- a/compiler/rustc_codegen_gcc/src/builder.rs +++ b/compiler/rustc_codegen_gcc/src/builder.rs @@ -562,6 +562,14 @@ impl<'a, 'gcc, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'gcc, 'tcx> { self.llbb().end_with_conditional(self.location, cond, then_block, else_block) } + fn phi( + &mut self, + _typ: Type<'gcc>, + _cases: impl ExactSizeIterator, + ) -> Self::Value { + unimplemented!() + } + fn switch( &mut self, value: RValue<'gcc>, diff --git a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs index 2a7c88afe17dd..18412e94ad8ec 100644 --- a/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs +++ b/compiler/rustc_codegen_gcc/src/intrinsic/mod.rs @@ -8,7 +8,6 @@ use std::iter; use gccjit::Type; use gccjit::{ComparisonOp, Function, FunctionType, RValue, ToRValue, UnaryOp}; use rustc_abi::{BackendRepr, HasDataLayout, WrappingRange}; -use rustc_codegen_ssa::MemFlags; use rustc_codegen_ssa::base::wants_msvc_seh; use rustc_codegen_ssa::common::IntPredicate; use rustc_codegen_ssa::errors::InvalidMonomorphization; @@ -20,6 +19,7 @@ use rustc_codegen_ssa::traits::{ ArgAbiBuilderMethods, BaseTypeCodegenMethods, BuilderMethods, ConstCodegenMethods, IntrinsicCallBuilderMethods, LayoutTypeCodegenMethods, }; +use rustc_codegen_ssa::{MemFlags, RetagInfo}; use rustc_data_structures::fx::FxHashSet; #[cfg(feature = "master")] use rustc_middle::ty::layout::FnAbiOf; @@ -729,6 +729,14 @@ impl<'a, 'gcc, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tc // FIXME(antoyo): implement. self.context.new_rvalue_from_int(self.int_type, 0) } + + fn retag_reg(&mut self, _ptr: Self::Value, _info: RetagInfo) -> Self::Value { + unimplemented!(); + } + + fn retag_mem(&mut self, _place: Self::Value, _info: RetagInfo) { + unimplemented!(); + } } impl<'a, 'gcc, 'tcx> ArgAbiBuilderMethods<'tcx> for Builder<'a, 'gcc, 'tcx> { diff --git a/compiler/rustc_codegen_llvm/src/builder.rs b/compiler/rustc_codegen_llvm/src/builder.rs index 056a0763087a2..b6b9f793baba7 100644 --- a/compiler/rustc_codegen_llvm/src/builder.rs +++ b/compiler/rustc_codegen_llvm/src/builder.rs @@ -347,6 +347,18 @@ impl<'a, 'll, 'tcx> BuilderMethods<'a, 'tcx> for Builder<'a, 'll, 'tcx> { } } + fn phi( + &mut self, + ty: &'ll Type, + cases: impl ExactSizeIterator, + ) -> Self::Value { + let phi = unsafe { llvm::LLVMBuildPhi(self.llbuilder, ty, UNNAMED) }; + for (bb, value) in cases { + self.add_incoming_to_phi(phi, value, bb); + } + phi + } + fn switch( &mut self, v: &'ll Value, diff --git a/compiler/rustc_codegen_llvm/src/context.rs b/compiler/rustc_codegen_llvm/src/context.rs index 5b730b820b84a..8a71688e0e959 100644 --- a/compiler/rustc_codegen_llvm/src/context.rs +++ b/compiler/rustc_codegen_llvm/src/context.rs @@ -962,19 +962,36 @@ impl<'ll> CodegenCx<'ll, '_> { // This isn't an "LLVM intrinsic", but LLVM's optimization passes // recognize it like one (including turning it into `bcmp` sometimes) // and we use it to implement intrinsics like `raw_eq` and `compare_bytes` - if base_name == "memcmp" { - let fn_ty = self - .type_func(&[self.type_ptr(), self.type_ptr(), self.type_isize()], self.type_int()); - let f = self.declare_cfn("memcmp", llvm::UnnamedAddr::No, fn_ty); - - return (fn_ty, f); + match base_name { + "memcmp" => { + let fn_ty = self.type_func( + &[self.type_ptr(), self.type_ptr(), self.type_isize()], + self.type_int(), + ); + let f = self.declare_cfn("memcmp", llvm::UnnamedAddr::No, fn_ty); + (fn_ty, f) + } + "__rust_retag_reg" => { + let fn_ty = self.type_func(type_params, self.type_ptr()); + let llfn = self.declare_cfn(base_name, llvm::UnnamedAddr::No, fn_ty); + let nounwind = llvm::AttributeKind::NoUnwind.create_attr(self.llcx); + attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[nounwind]); + (fn_ty, llfn) + } + "__rust_retag_mem" => { + let fn_ty = self.type_func(type_params, self.type_void()); + let llfn = self.declare_cfn(base_name, llvm::UnnamedAddr::No, fn_ty); + let nounwind = llvm::AttributeKind::NoUnwind.create_attr(self.llcx); + attributes::apply_to_llfn(llfn, llvm::AttributePlace::Function, &[nounwind]); + (fn_ty, llfn) + } + _ => { + let intrinsic = llvm::Intrinsic::lookup(base_name.as_bytes()) + .unwrap_or_else(|| bug!("Unknown intrinsic: `{base_name}`")); + let f = intrinsic.get_declaration(self.llmod, &type_params); + (self.get_type_of_global(f), f) + } } - - let intrinsic = llvm::Intrinsic::lookup(base_name.as_bytes()) - .unwrap_or_else(|| bug!("Unknown intrinsic: `{base_name}`")); - let f = intrinsic.get_declaration(self.llmod, &type_params); - - (self.get_type_of_global(f), f) } pub(crate) fn eh_catch_typeinfo(&self) -> &'ll Value { diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 94bd4a6ef76ef..2a85f466d5800 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -6,6 +6,7 @@ use rustc_abi::{ AddressSpace, Align, BackendRepr, Float, HasDataLayout, Integer, NumScalableVectors, Primitive, Size, WrappingRange, }; +use rustc_codegen_ssa::RetagInfo; use rustc_codegen_ssa::base::{compare_simd_types, wants_msvc_seh, wants_wasm_eh}; use rustc_codegen_ssa::common::{IntPredicate, TypeKind}; use rustc_codegen_ssa::errors::{ExpectedPointerMutability, InvalidMonomorphization}; @@ -1015,6 +1016,29 @@ impl<'ll, 'tcx> IntrinsicCallBuilderMethods<'tcx> for Builder<'_, 'll, 'tcx> { fn va_end(&mut self, va_list: &'ll Value) -> &'ll Value { self.call_intrinsic("llvm.va_end", &[self.val_ty(va_list)], &[va_list]) } + + fn retag_reg(&mut self, ptr: Self::Value, info: RetagInfo) -> Self::Value { + codegen_retag_inner(self, "__rust_retag_reg", ptr, info) + } + + fn retag_mem(&mut self, ptr: Self::Value, info: RetagInfo) { + codegen_retag_inner(self, "__rust_retag_mem", ptr, info); + } +} + +fn codegen_retag_inner<'ll, 'tcx>( + bx: &mut Builder<'_, 'll, 'tcx>, + name: &'static str, + ptr: &'ll Value, + info: RetagInfo<&'ll Value>, +) -> &'ll Value { + let size = bx.const_usize(info.size.bytes()); + let perms = bx.const_u8(info.flags.bits()); + bx.call_intrinsic( + name, + &[bx.type_ptr(), bx.val_ty(size), bx.type_i8(), bx.type_ptr(), bx.type_ptr()], + &[ptr, size, perms, info.im_layout, info.pin_layout], + ) } fn llvm_arch_for(rust_arch: &Arch) -> Option<&'static str> { diff --git a/compiler/rustc_codegen_ssa/src/lib.rs b/compiler/rustc_codegen_ssa/src/lib.rs index 1c266382d0279..d36f4f417862a 100644 --- a/compiler/rustc_codegen_ssa/src/lib.rs +++ b/compiler/rustc_codegen_ssa/src/lib.rs @@ -17,6 +17,7 @@ use std::io; use std::path::{Path, PathBuf}; use std::sync::Arc; +use rustc_abi::Size; use rustc_data_structures::fx::{FxHashSet, FxIndexMap}; use rustc_data_structures::unord::UnordMap; use rustc_hir::CRATE_HIR_ID; @@ -172,6 +173,37 @@ bitflags::bitflags! { } } +#[derive(Debug, Copy, Clone)] +pub struct RetagInfo { + /// The size of the initial range within the allocation that is + /// associated with the permission created by the retag. + pub size: Size, + /// Encoded type information used to determine the kind of permission + /// created by the retag. + pub flags: RetagFlags, + /// A constant array of (offset, size) pairs describing + /// the ranges covered by `UnsafeCell` within the pointee type. + pub im_layout: V, + /// A constant array of (offset, size) pairs describing + /// the ranges covered by `UnsafePinned` within the pointee type. + pub pin_layout: V, +} + +bitflags::bitflags! { + #[repr(C)] + #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + pub struct RetagFlags: u8 { + /// If this is a function-entry retag. + const IS_PROTECTED = 1 << 0; + /// If this is a mutable reference or a `Box`. + const IS_MUTABLE = 1 << 1; + /// If this is a `Box`. + const IS_BOX = 1 << 2; + /// If the pointee type is `Freeze` + const IS_FREEZE = 1 << 3; + } +} + // This is the same as `rustc_session::cstore::NativeLib`, except: // - (important) the `foreign_module` field is missing, because it contains a `DefId`, which can't // be encoded with `FileEncoder`. diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 06c81662d6018..0a1cbaa790df6 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -264,6 +264,12 @@ impl<'a, 'tcx> TerminatorCodegenHelper<'tcx> { bx.lifetime_end(tmp, size); } fx.store_return(bx, ret_dest, &fn_abi.ret, invokeret); + + // If the return value has variants that needed to be retagged, + // then we might be in a different basic block now. + // Update the cached block for `target` to point to this new + // block, where codegen will continue. + fx.cached_llbbs[target] = CachedLlbb::Some(bx.llbb()); } MergingSucc::False } else { @@ -1055,21 +1061,23 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let result_layout = self.cx.layout_of(self.monomorphized_place_ty(destination.as_ref())); + let needs_retag = !destination.is_indirect_first_projection(); + let return_dest = if result_layout.is_zst() { ReturnDest::Nothing } else if let Some(index) = destination.as_local() { match self.locals[index] { - LocalRef::Place(dest) => ReturnDest::Store(dest), + LocalRef::Place(dest) => ReturnDest::Store(dest, needs_retag), LocalRef::UnsizedPlace(_) => bug!("return type must be sized"), LocalRef::PendingOperand => { // Handle temporary places, specifically `Operand` ones, as // they don't have `alloca`s. - ReturnDest::DirectOperand(index) + ReturnDest::DirectOperand(index, needs_retag) } LocalRef::Operand(_) => bug!("place local already assigned to"), } } else { - ReturnDest::Store(self.codegen_place(bx, destination.as_ref())) + ReturnDest::Store(self.codegen_place(bx, destination.as_ref()), needs_retag) }; let args = @@ -2039,6 +2047,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if fn_ret.is_ignore() { return ReturnDest::Nothing; } + let needs_retag = !dest.is_indirect_first_projection(); let dest = if let Some(index) = dest.as_local() { match self.locals[index] { LocalRef::Place(dest) => dest, @@ -2052,9 +2061,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let tmp = PlaceRef::alloca(bx, fn_ret.layout); tmp.storage_live(bx); llargs.push(tmp.val.llval); - ReturnDest::IndirectOperand(tmp, index) + ReturnDest::IndirectOperand(tmp, index, needs_retag) } else { - ReturnDest::DirectOperand(index) + ReturnDest::DirectOperand(index, needs_retag) }; } LocalRef::Operand(_) => { @@ -2077,7 +2086,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { llargs.push(dest.val.llval); ReturnDest::Nothing } else { - ReturnDest::Store(dest) + ReturnDest::Store(dest, needs_retag) } } @@ -2090,19 +2099,27 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { llval: Bx::Value, ) { use self::ReturnDest::*; - + let retags_enabled = bx.tcx().sess.opts.unstable_opts.codegen_emit_retag.is_some(); match dest { Nothing => (), - Store(dst) => bx.store_arg(ret_abi, llval, dst), - IndirectOperand(tmp, index) => { - let op = bx.load_operand(tmp); + Store(dst, needs_retag) => { + bx.store_arg(ret_abi, llval, dst); + if retags_enabled && needs_retag { + self.codegen_retag_place(bx, dst, false); + } + } + IndirectOperand(tmp, index, needs_retag) => { + let mut op = bx.load_operand(tmp); + if retags_enabled && needs_retag { + op = self.codegen_retag_operand(bx, op, false); + } tmp.storage_dead(bx); self.overwrite_local(index, LocalRef::Operand(op)); self.debug_introduce_local(bx, index); } - DirectOperand(index) => { + DirectOperand(index, needs_retag) => { // If there is a cast, we have to store and reload. - let op = if let PassMode::Cast { .. } = ret_abi.mode { + let mut op = if let PassMode::Cast { .. } = ret_abi.mode { let tmp = PlaceRef::alloca(bx, ret_abi.layout); tmp.storage_live(bx); bx.store_arg(ret_abi, llval, tmp); @@ -2112,6 +2129,9 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { } else { OperandRef::from_immediate_or_packed_pair(bx, llval, ret_abi.layout) }; + if retags_enabled && needs_retag { + op = self.codegen_retag_operand(bx, op, false); + } self.overwrite_local(index, LocalRef::Operand(op)); self.debug_introduce_local(bx, index); } @@ -2123,11 +2143,11 @@ enum ReturnDest<'tcx, V> { /// Do nothing; the return value is indirect or ignored. Nothing, /// Store the return value to the pointer. - Store(PlaceRef<'tcx, V>), + Store(PlaceRef<'tcx, V>, bool), /// Store an indirect return value to an operand local place. - IndirectOperand(PlaceRef<'tcx, V>, mir::Local), + IndirectOperand(PlaceRef<'tcx, V>, mir::Local, bool), /// Store a direct return value to an operand local place. - DirectOperand(mir::Local), + DirectOperand(mir::Local, bool), } fn load_cast<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 93da12107bab0..1117108f3858b 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -12,7 +12,6 @@ use tracing::{debug, instrument}; use crate::base; use crate::traits::*; - mod analyze; mod block; mod constant; @@ -23,6 +22,7 @@ mod locals; pub mod naked_asm; pub mod operand; pub mod place; +mod retag; mod rvalue; mod statement; @@ -294,6 +294,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( .chain(mir.vars_and_temps_iter().map(allocate_local)) .collect() }; + fx.initialize_locals(local_values); // Apply debuginfo to the newly allocated locals. @@ -395,7 +396,7 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( return vec![]; } - let args = mir + let mut args = mir .args_iter() .enumerate() .map(|(arg_index, local)| { @@ -530,6 +531,41 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( }) .collect::>(); + if bx.tcx().sess.opts.unstable_opts.codegen_emit_retag.is_some() { + if let ty::InstanceKind::DropGlue(_, _) | ty::InstanceKind::AsyncDropGlue(_, _) = + fx.instance.def + { + // We need to special-case drop glue for now. The first argument is + // a raw pointer, but it needs to be treated as if it were `&mut _`. + args[0] = dropee_emit_retag(bx, fx, &args[0]); + } else { + args = args + .iter() + .map(|arg| match arg { + &LocalRef::Place(place_ref) => { + fx.codegen_retag_place(bx, place_ref, true); + LocalRef::Place(place_ref) + } + &LocalRef::UnsizedPlace(place_ref) => { + let operand = bx.load_operand(place_ref); + let retagged = fx.codegen_retag_operand(bx, operand, true); + assert!(matches!(retagged.val, OperandValue::Pair(_, _))); + retagged.val.store(bx, place_ref); + LocalRef::UnsizedPlace(place_ref) + } + &LocalRef::Operand(operand_ref) => { + let retagged = fx.codegen_retag_operand(bx, operand_ref, true); + LocalRef::Operand(retagged) + } + LocalRef::PendingOperand => LocalRef::PendingOperand, + }) + .collect::>(); + } + // If we branched during retagging, then we need to update the + // start block to the new location. + fx.cached_llbbs[mir::START_BLOCK] = CachedLlbb::Some(bx.llbb()); + } + if fx.instance.def.requires_caller_location(bx.tcx()) { let mir_args = if let Some(num_untupled) = num_untupled { // Subtract off the tupled argument that gets 'expanded' @@ -560,6 +596,34 @@ fn arg_local_refs<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( args } +fn dropee_emit_retag<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( + bx: &mut Bx, + fx: &mut FunctionCx<'a, 'tcx, Bx>, + local: &LocalRef<'tcx, Bx::Value>, +) -> LocalRef<'tcx, Bx::Value> { + // We have `*mut _` as our first argument + if let &LocalRef::Operand(OperandRef { val, layout, move_annotation }) = local { + if layout.ty.is_raw_ptr() + && let Some(deref_ty) = layout.ty.builtin_deref(true) + { + // Create `&mut _` + let lifetime = bx.tcx().lifetimes.re_erased; + let subst_ty_kind = ty::Ref(lifetime, deref_ty, ty::Mutability::Mut); + let subst_ty = bx.tcx().mk_ty_from_kind(subst_ty_kind); + let subst_layout = bx.layout_of(subst_ty); + + // We want the same operand value, but use the reference type for it. + let operand_ref = OperandRef { val, layout: subst_layout, move_annotation }; + + let retagged = fx.codegen_retag_operand(bx, operand_ref, true); + + // Return the retagged parameter, but use the original layout now. + return LocalRef::Operand(OperandRef { val: retagged.val, layout, move_annotation }); + } + } + bug!("dropee isn't a raw pointer") +} + fn find_cold_blocks<'tcx>( tcx: TyCtxt<'tcx>, mir: &mir::Body<'tcx>, diff --git a/compiler/rustc_codegen_ssa/src/mir/operand.rs b/compiler/rustc_codegen_ssa/src/mir/operand.rs index e1d1ef858c017..b4fef8e59de9e 100644 --- a/compiler/rustc_codegen_ssa/src/mir/operand.rs +++ b/compiler/rustc_codegen_ssa/src/mir/operand.rs @@ -701,6 +701,34 @@ impl<'a, 'tcx, V: CodegenObject> OperandRefBuilder<'tcx, V> { OperandRefBuilder { val, layout } } + /// Creates an initialized builder for an existing `operand`. + /// + /// ICEs for [`BackendRepr::Memory`] types (other than ZSTs), which use + /// which use [`OperandValue::Ref`]. In this case, updates should be + /// performed by writing into the place + pub(super) fn from_existing(operand: OperandRef<'tcx, V>) -> Self { + let layout = operand.layout; + let val = match (operand.val, layout.backend_repr) { + (OperandValue::ZeroSized, _) => OperandValueBuilder::ZeroSized, + (OperandValue::Immediate(v), BackendRepr::Scalar(_)) => { + OperandValueBuilder::Immediate(Either::Left(v)) + } + (OperandValue::Immediate(v), BackendRepr::SimdVector { .. }) => { + OperandValueBuilder::Vector(Either::Left(v)) + } + (OperandValue::Pair(a, b), BackendRepr::ScalarPair(_, _)) => { + OperandValueBuilder::Pair(Either::Left(a), Either::Left(b)) + } + (_, BackendRepr::Memory { .. }) => { + bug!("Cannot use non-ZST Memory-ABI type in operand builder: {layout:?}"); + } + _ => { + bug!("Operand cannot be used with `from_existing`: {operand:?}") + } + }; + OperandRefBuilder { val, layout } + } + pub(super) fn insert_field>( &mut self, bx: &mut Bx, @@ -812,6 +840,27 @@ impl<'a, 'tcx, V: CodegenObject> OperandRefBuilder<'tcx, V> { } } + /// Replaces the current immediate value at the offset `offset` + /// with the value `imm`. A value must already be present. + /// + /// This is used along with [`Self::from_existing`] to perform in-place updates + /// of any operand. + pub(super) fn update_imm(&mut self, offset: Size, imm: V) { + let is_zero_offset = offset == Size::ZERO; + match &mut self.val { + OperandValueBuilder::Immediate(val @ Either::Left(_)) if is_zero_offset => { + *val = Either::Left(imm); + } + OperandValueBuilder::Pair(fst @ Either::Left(_), _) if is_zero_offset => { + *fst = Either::Left(imm); + } + OperandValueBuilder::Pair(_, snd @ Either::Left(_)) if !is_zero_offset => { + *snd = Either::Left(imm); + } + _ => bug!("Tried to update {imm:?} at offset {offset:?} of {self:?}"), + } + } + /// After having set all necessary fields, this converts the builder back /// to the normal `OperandRef`. /// diff --git a/compiler/rustc_codegen_ssa/src/mir/retag.rs b/compiler/rustc_codegen_ssa/src/mir/retag.rs new file mode 100644 index 0000000000000..0480bab755ac2 --- /dev/null +++ b/compiler/rustc_codegen_ssa/src/mir/retag.rs @@ -0,0 +1,757 @@ +//! Support for emitting retags as function calls. +//! +//! Both Stacked and Tree Borrows rely retag operations to create +//! and update the permissions associated with pointers. This module provides support +//! for emitting retags as function calls, making it possible to find aliasing violations +//! in lower-level representations of Rust programs. The underlying functions do not +//! actually exist; they are just a vehicle for lowering type and aliasing information. +//! +//! There are two kinds of retag intrinsics. The first variant, `__rust_retag_reg`, +//! is used to retag a pointer that has already been loaded into a register. Its first +//! argument is the pointer being retagged, and it returns an alias with the same address, +//! but different provenance. The second variant, `__rust_retag_mem` is used to retag a +//! pointer stored within a place. It receives a pointer to the place. If we used the `reg` +//! variant instead, then we would need to load the pointer from the place and store the +//! retagged result back to reflect that its provenance had changed. If the place has LLVM's +//! `readonly` attribute or equivalent, then this additional store is undefined behavior. +//! The `mem` variant communicates this level of indirection without having to insert an +//! explicit store. The remaining arguments are the same for each variant. +//! +//! * Size (`i64`) - The size of the permission created by the retag. +//! * Permissions (`i8`) - A set of flags encoding the type of permission (see [`RetagFlags`]) +//! * Interior Mutable Ranges (`ptr`) - A pointer to a global array of the ranges covered by `UnsafeCell`. +//! * Pinned Ranges (`ptr`) - A pointer to a global array of the ranges covered by `UnsafePinned`. +//! +//! We attempt to retag every argument and return value of a function, and every rvalue +//! of an assignment. The first step to retagging is to generate a [`RetagPlan`], which +//! describes which pointers within the place or operand can be retagged. We traverse +//! the [`RetagPlan`] to codegen each call, as needed. Traversal is made easier by [`Retagable`]. +//! Both [`PlaceRef`] and [`OperandRef`] implement this trait,allowing us to use the same visitor +//! pattern for each case. + +use std::vec; + +use rustc_abi::{BackendRepr, FieldIdx, FieldsShape, Size, VariantIdx, Variants}; +use rustc_middle::mir::interpret::Allocation; +use rustc_middle::ty::Mutability; +use rustc_middle::ty::data_structures::IndexMap; +use rustc_middle::ty::layout::HasTypingEnv; +use rustc_middle::{bug, ty}; + +use super::{BuilderMethods, FunctionCx}; +use crate::mir::operand::{OperandRef, OperandRefBuilder, OperandValue}; +use crate::mir::place::PlaceRef; +use crate::mir::{Ty, TyAndLayout}; +use crate::traits::{BaseTypeCodegenMethods, CodegenMethods}; +use crate::{RetagFlags, RetagInfo}; + +/// A description of the pointers within a type that are affected by a retag. +#[derive(Debug)] +enum RetagPlan { + /// Indicates that a pointer should be retagged. + EmitRetag(RetagInfo), + + /// Indicates that one or more fields or variants of this type + /// contain pointers that need to be retagged. + Recurse { + fields: IndexMap>, + variants: IndexMap>, + }, +} + +impl RetagPlan { + /// A helper function to move a [`RetagPlan`] into a particular field. + fn for_field(plan: RetagPlan, idx: FieldIdx) -> Self { + let (mut fields, variants) = (IndexMap::default(), IndexMap::default()); + fields.insert(idx, plan); + RetagPlan::Recurse { fields, variants } + } +} + +impl<'a, 'tcx, V> RetagPlan { + /// Attempts to create a [`RetagPlan`] for a place or operand with the given layout. + fn build>( + bx: &mut Bx, + fx: &mut FunctionCx<'a, 'tcx, Bx>, + layout: TyAndLayout<'tcx>, + is_fn_entry: bool, + ) -> Option> { + // If the value being retagged is smaller than a pointer, then it can't contain any + // pointers we need to retag, so we can stop recursion early. This optimization is crucial + // for ZSTs, because they can contain way more fields than we can ever visit. + if layout.is_sized() && layout.size < bx.tcx().data_layout.pointer_size() { + return None; + } + // SIMD vectors may only contain raw pointers, integers, and floating point values, + // which do not need to be retagged. + if matches!(layout.backend_repr, BackendRepr::SimdVector { .. }) { + return None; + } + + // Check the type of this value to see what to do with it (retag, or recurse). + match layout.ty.kind() { + &ty::Ref(_, pointee, mt) => { + let pointee_layout = bx.layout_of(pointee); + Self::emit_retag(bx, pointee_layout, Some(mt), is_fn_entry) + } + &ty::RawPtr(_, _) => None, + // `Box` needs special handling, since the innermost pointer is what gets retagged, but + // though the outermost `Box` is what determines the permission that gets created. + ty::Adt(adt, _) if adt.is_box() => Self::visit_box(bx, fx, layout, is_fn_entry), + + _ => Self::walk_value(bx, fx, layout, is_fn_entry), + } + } + + /// Recurses through the fields and variants of a value in memory order to create a [`RetagPlan`]. + fn walk_value>( + bx: &mut Bx, + fx: &mut FunctionCx<'a, 'tcx, Bx>, + layout: TyAndLayout<'tcx>, + is_fn_entry: bool, + ) -> Option> { + let indices: Vec = match &layout.fields { + FieldsShape::Union(_) | FieldsShape::Primitive => vec![], + FieldsShape::Arbitrary { in_memory_order, .. } => { + in_memory_order.iter().copied().collect() + } + FieldsShape::Array { .. } => { + layout.fields.index_by_increasing_offset().map(FieldIdx::from_usize).collect() + } + }; + + let fields: Vec<(FieldIdx, RetagPlan)> = indices + .iter() + .filter_map(|idx| { + let field_layout = layout.field(bx, idx.as_usize()); + Self::build(bx, fx, field_layout, is_fn_entry).map(|plan| (*idx, plan)) + }) + .collect(); + + let variants: Vec<(VariantIdx, RetagPlan)> = match &layout.variants { + Variants::Multiple { variants, .. } => variants + .indices() + .filter_map(|vidx| { + let variant_layout = layout.for_variant(bx, vidx); + Self::build(bx, fx, variant_layout, is_fn_entry).map(|plan| (vidx, plan)) + }) + .collect(), + Variants::Single { .. } | Variants::Empty => vec![], + }; + + (!fields.is_empty() || !variants.is_empty()).then(|| RetagPlan::Recurse { + fields: fields.into_iter().collect(), + variants: variants.into_iter().collect(), + }) + } + + /// Emits a retag for a `Box`. + fn visit_box>( + bx: &mut Bx, + fx: &mut FunctionCx<'a, 'tcx, Bx>, + ptr_layout: TyAndLayout<'tcx>, + is_fn_entry: bool, + ) -> Option> { + assert!(ptr_layout.ty.is_box()); + assert_eq!(ptr_layout.fields.count(), 2, "`Box` must have exactly 2 fields"); + let mut fields = vec![]; + + // Only retag the inner pointer of a `Box` if it came from the global allocator. + // We need special handling here because we are retagging a raw pointer, which would + // usually be skipped. + if ptr_layout.ty.is_box_global(bx.tcx()) { + let boxed_ty = ptr_layout.ty.expect_boxed_ty(); + let boxed_layout = bx.layout_of(boxed_ty); + if let Some(mut plan) = Self::emit_retag(bx, boxed_layout, None, is_fn_entry) { + // `Unique` + let unique = ptr_layout.field(bx, 0); + plan = RetagPlan::for_field(plan, FieldIdx::ZERO); + + // `NonNull` + let nonnull = unique.field(bx, 0); + plan = RetagPlan::for_field(plan, FieldIdx::ZERO); + + // `pattern_type!(*mut T + ..)` + let pattern = nonnull.field(bx, 0); + plan = RetagPlan::for_field(plan, FieldIdx::ZERO); + + // `*mut T` + let ptr = pattern.field(bx, 0); + assert_eq!(ptr.ty.builtin_deref(true), Some(boxed_ty)); + fields.push((FieldIdx::ZERO, plan)); + } + } + + // We always try to retag the second field (the allocator) + let field_layout = ptr_layout.field(bx, 1); + if let Some(plan) = Self::build(bx, fx, field_layout, is_fn_entry) { + fields.push((FieldIdx::ONE, plan)); + } + + (!fields.is_empty()).then(|| RetagPlan::Recurse { + fields: fields.into_iter().collect(), + variants: IndexMap::default(), + }) + } + + /// Attempts to retag a pointer to a type with the given layout. + /// Returns `None` for mutable pointers to types that are entirely + /// covered by `UnsafePinned`, for which retags are a noop. + fn emit_retag>( + bx: &mut Bx, + pointee_layout: TyAndLayout<'tcx>, + ptr_kind: Option, + is_fn_entry: bool, + ) -> Option> { + let opts = bx.tcx().sess.opts.unstable_opts.codegen_emit_retag.unwrap_or_default(); + + let pointee_ty = pointee_layout.ty; + let pin_ranges = UnsafePinnedRanges::collect(bx, pointee_layout, opts.no_precise_pin); + + let is_mutable = matches!(ptr_kind, Some(Mutability::Mut) | None); + let is_unpin = UnsafePinnedRanges::excludes(bx, pointee_ty); + let is_freeze = UnsafeCellRanges::excludes(bx, pointee_ty); + let is_box = ptr_kind.is_none(); + + // `&mut !Unpin` is not protected + let is_protected = is_fn_entry && (!is_mutable || is_unpin); + + if is_mutable { + // Everything is covered by `UnsafePinned`. + let all_pinned = matches!( + pin_ranges.as_slice(), + [[Size::ZERO, size]] if *size == pointee_layout.size, + ); + // We can't find any `UnsafePinned`, but the type is still + // `!Unpin` or `!UnsafeUnpin`. + let implicitly_pinned = pin_ranges.is_empty() && !is_unpin; + + if all_pinned || implicitly_pinned { + return None; + } + }; + + let im_ranges = UnsafeCellRanges::collect(bx, pointee_layout, opts.no_precise_im); + + let mut flags = RetagFlags::empty(); + flags.set(RetagFlags::IS_PROTECTED, is_protected); + flags.set(RetagFlags::IS_MUTABLE, is_mutable); + flags.set(RetagFlags::IS_BOX, is_box); + + // We need to track `Freeze` separately from `UnsafeCellRanges` so that we can + // handle ZSTs, which still need to be treated as interior mutable (e.g. `UnsafeCell<()>`). + flags.set(RetagFlags::IS_FREEZE, is_freeze); + + Some(RetagPlan::EmitRetag(RetagInfo { + size: pointee_layout.size, + im_layout: Self::alloc_ranges(bx, im_ranges), + pin_layout: Self::alloc_ranges(bx, pin_ranges), + flags, + })) + } + + /// Creates a pointer to a global static allocation containing adjacent pairs of `usize` bytes, + /// which indicate the offset and width of a range within the layout of a type. Returns a null + /// pointer if the list of ranges is empty. + fn alloc_ranges>( + bx: &mut Bx, + ranges: Vec<[Size; 2]>, + ) -> Bx::Value { + let tcx = bx.tcx(); + if ranges.is_empty() { + return bx.const_null(bx.type_ptr()); + } + + let bytes: Vec = + ranges.iter().flatten().flat_map(|u| u.bytes_usize().to_ne_bytes()).collect(); + + let align = tcx.data_layout.ptr_sized_integer().align(&tcx.data_layout).abi; + + let alloc = Allocation::from_bytes(&bytes, align, Mutability::Not, ()); + let const_alloc = tcx.mk_const_alloc(alloc); + + // Different IDs are produced, but identical range lists + // will resolve to the same allocation. + let alloc_id = tcx.reserve_and_set_memory_alloc(const_alloc); + + let global_alloc = tcx.global_alloc(alloc_id); + let global_mem = global_alloc.unwrap_memory(); + bx.cx().static_addr_of(global_mem, None) + } +} + +/// A value containing pointers that can be retagged (this is either an [`OperandRef`] or a [`PlaceRef`]). +trait Retagable<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>: Copy { + /// When we are retagging an [`OperandRef`], we use an [`OperandRefBuilder`] + /// to replace retagged pointers (e.g `%new_ptr = __rust_retag_reg(%old_ptr, ..)`). + /// This becomes the "context" for the current retagging operation. No context is + /// needed for [`PlaceRef`], so this and all related operations become noops. + type Cx: RetagCx<'a, 'tcx, Bx, Self>; + + /// Creates a new context object that tracks updates to the current value. + fn retag_cx(&self) -> Self::Cx; + + /// Projects to the given variant of the value being retagged. + fn project_downcast(self, bx: &mut Bx, idx: VariantIdx) -> Self; + + /// Projects to the given field of the value being retagged. + fn project_field(self, bx: &mut Bx, fx: &mut FunctionCx<'a, 'tcx, Bx>, idx: FieldIdx) -> Self; + + /// Returns the layout of the value being retagged. + fn layout(&self) -> TyAndLayout<'tcx>; + + /// Obtains an [`OperandRef`] from the current value being retagged. + fn load_operand(self, bx: &mut Bx) -> OperandRef<'tcx, Bx::Value>; + + /// Emits a retag and returns the retagged value. + fn retag(&self, bx: &mut Bx, info: RetagInfo) -> Self; +} + +/// A context used to collect the updates to a [`Retagable`] value. +trait RetagCx<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>, R: Retagable<'a, 'tcx, Bx>>: + Sized + Clone +{ + /// Unifies multiple retagged variants of a value. + fn phi(&mut self, bx: &mut Bx, branches: Vec<(Bx::BasicBlock, Self)>); + + /// Applies the updates that have been collected during traversal to the initial + /// "base" value being retagged. + fn resolve(&self, bx: &mut Bx) -> R; + + /// Updates the value stored at the given index with a new value produced + /// by a retag. + fn retag(&mut self, cursor: Size, value: R); +} + +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> RetagCx<'a, 'tcx, Bx, PlaceRef<'tcx, Bx::Value>> + for PlaceRef<'tcx, Bx::Value> +{ + #[inline] + fn phi(&mut self, _bx: &mut Bx, _branches: Vec<(Bx::BasicBlock, Self)>) {} + + #[inline] + fn resolve(&self, _bx: &mut Bx) -> PlaceRef<'tcx, Bx::Value> { + *self + } + + #[inline] + fn retag(&mut self, _cursor: Size, _value: PlaceRef<'tcx, Bx::Value>) {} +} + +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> RetagCx<'a, 'tcx, Bx, OperandRef<'tcx, Bx::Value>> + for OperandRefBuilder<'tcx, Bx::Value> +{ + fn phi(&mut self, bx: &mut Bx, branches: Vec<(Bx::BasicBlock, Self)>) { + let operand_values = |val: OperandValue| -> Vec { + match val { + OperandValue::ZeroSized => vec![], + OperandValue::Ref(_) => { + bug!("Unresolved reference to place within operand: {val:?}") + } + OperandValue::Immediate(v) => vec![v], + OperandValue::Pair(a, b) => vec![a, b], + } + }; + + let mut incoming_values = [vec![], vec![]]; + + // We want to avoid emitting duplicate phi nodes, since not every component of an operand + // may have been affected by the retag. For each component, we track whether or not we have + // seen more than one value. + let mut found_different = [false, false]; + let mut sentinel = [None, None]; + + for (block, cursor) in branches.iter() { + let op = cursor.build(bx.cx()); + for (idx, val) in operand_values(op.val).drain(..).enumerate() { + // If we have already visited a value, see if its different than this one + if let Some(to_compare) = sentinel[idx] { + found_different[idx] |= to_compare != val; + } else { + // If this is the first value that we've seen, then + // store it for comparison on the next iteration. + sentinel[idx] = Some(val); + } + incoming_values[idx].push((*block, val)) + } + } + + for (idx, incoming) in incoming_values.iter_mut().enumerate() { + if found_different[idx] { + if let Some((_, val)) = incoming.first() { + let phi_val = bx.phi(bx.cx().val_ty(*val), incoming.drain(..)); + let offset = Size::from_bytes(idx); + // A zero-offset resolves to the first field, while a + // nonzero offset resolves to the second field. + self.update_imm(offset, phi_val); + } + } + } + } + + fn resolve(&self, bx: &mut Bx) -> OperandRef<'tcx, Bx::Value> { + self.build(bx.cx()) + } + + fn retag(&mut self, cursor: Size, op: OperandRef<'tcx, Bx::Value>) { + let (pointer, _) = op.val.pointer_parts(); + // A zero-offset resolves to the first field, while a + // nonzero offset resolves to the second field. + self.update_imm(cursor, pointer); + } +} + +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Retagable<'a, 'tcx, Bx> + for OperandRef<'tcx, Bx::Value> +{ + type Cx = OperandRefBuilder<'tcx, Bx::Value>; + + fn project_downcast(self, bx: &mut Bx, idx: VariantIdx) -> Self { + let mut operand = self; + operand.layout = operand.layout.for_variant(bx, idx); + operand + } + + fn project_field(self, bx: &mut Bx, fx: &mut FunctionCx<'a, 'tcx, Bx>, idx: FieldIdx) -> Self { + self.extract_field(fx, bx, idx.as_usize()) + } + + #[inline] + fn load_operand(self, _bx: &mut Bx) -> OperandRef<'tcx, Bx::Value> { + self + } + + #[inline] + fn layout(&self) -> TyAndLayout<'tcx> { + self.layout + } + + fn retag(&self, bx: &mut Bx, info: RetagInfo) -> OperandRef<'tcx, Bx::Value> { + let OperandRef { layout, val, move_annotation } = *self; + let (pointer, metadata) = val.pointer_parts(); + let retagged_val = bx.retag_reg(pointer, info); + let retagged_val = if let Some(metadata) = metadata { + OperandValue::Pair(retagged_val, metadata) + } else { + OperandValue::Immediate(retagged_val) + }; + OperandRef { layout, val: retagged_val, move_annotation } + } + + fn retag_cx(&self) -> OperandRefBuilder<'tcx, Bx::Value> { + OperandRefBuilder::from_existing(*self) + } +} + +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> Retagable<'a, 'tcx, Bx> for PlaceRef<'tcx, Bx::Value> { + type Cx = Self; + + fn project_downcast(self, bx: &mut Bx, idx: VariantIdx) -> Self { + let mut place = self; + place.layout = place.layout.for_variant(bx, idx); + place + } + + fn project_field(self, bx: &mut Bx, _fx: &mut FunctionCx<'a, 'tcx, Bx>, idx: FieldIdx) -> Self { + self.project_field(bx, idx.as_usize()) + } + + fn load_operand(self, bx: &mut Bx) -> OperandRef<'tcx, Bx::Value> { + bx.load_operand(self) + } + + fn layout(&self) -> TyAndLayout<'tcx> { + self.layout + } + + fn retag(&self, bx: &mut Bx, info: RetagInfo) -> PlaceRef<'tcx, Bx::Value> { + bx.retag_mem(self.val.llval, info); + *self + } + + #[inline] + fn retag_cx(&self) -> Self::Cx { + *self + } +} + +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { + /// Retags the pointers within an [`OperandRef`]. + pub(crate) fn codegen_retag_operand( + &mut self, + bx: &mut Bx, + op: OperandRef<'tcx, Bx::Value>, + is_fn_entry: bool, + ) -> OperandRef<'tcx, Bx::Value> { + if let OperandValue::Ref(place_ref) = op.val { + let place_ref = place_ref.with_type(op.layout); + self.codegen_retag_place(bx, place_ref, is_fn_entry); + } else if let Some(plan) = RetagPlan::::build(bx, self, op.layout, is_fn_entry) { + return self.retag(bx, plan, op); + } + op + } + + /// Retags the pointers within a [`PlaceRef`]. + pub(crate) fn codegen_retag_place( + &mut self, + bx: &mut Bx, + place_ref: PlaceRef<'tcx, Bx::Value>, + is_fn_entry: bool, + ) { + if let Some(plan) = RetagPlan::::build(bx, self, place_ref.layout, is_fn_entry) { + self.retag(bx, plan, place_ref); + } + } + + fn retag>( + &mut self, + bx: &mut Bx, + plan: RetagPlan, + value: R, + ) -> R { + // Create a context for the retag. + let mut retag_cx = value.retag_cx(); + // Traverse through the retagable value, storing updates within the context. + self.retag_inner(bx, &mut retag_cx, &plan, value, Size::ZERO); + // Apply updates from the context, producing the final value. + retag_cx.resolve(bx) + } + + fn retag_inner>( + &mut self, + bx: &mut Bx, + retag_cx: &mut R::Cx, + plan: &RetagPlan, + value: R, + cursor: Size, + ) { + match plan { + RetagPlan::EmitRetag(info) => { + let retagged_value = value.retag(bx, *info); + retag_cx.retag(cursor, retagged_value) + } + RetagPlan::Recurse { fields, variants } => { + for (ix, field_plan) in fields.iter() { + let field_cursor = value.layout().fields.offset((*ix).as_usize()) + cursor; + let field_value = value.project_field(bx, self, *ix); + self.retag_inner(bx, retag_cx, field_plan, field_value, field_cursor); + } + + if !variants.is_empty() { + let operand = value.load_operand(bx); + let discr_ty = value.layout().ty.discriminant_ty(bx.tcx()); + let discr_val = operand.codegen_get_discr(self, bx, discr_ty); + + // If the discriminant is a constant, then we can just downcast and avoid branching. + if let Some(val) = bx.const_to_opt_u128(discr_val, false) { + let ix = VariantIdx::from_usize(val as usize); + let variant_value = value.project_downcast(bx, ix); + if let Some(variant_plan) = variants.get(&ix) { + self.retag_inner(bx, retag_cx, variant_plan, variant_value, cursor); + } + } else { + // Otherwise, we need a block for each variant. + let root_block = bx.llbb(); + let mut variant_edges: Vec<(u128, Bx::BasicBlock)> = vec![]; + + // Each variant's block should arrive at the same terminator. + let terminator_block = bx.append_sibling_block("v_t"); + + // Each variant may update the current value in different ways. We collect a value context + // for each block, and then merge these contexts in the terminator, producing one or more + // phi nodes for operands. + let mut updates: Vec<(Bx::BasicBlock, R::Cx)> = + vec![(root_block, (*retag_cx).clone())]; + + for (ix, variant_plan) in variants.iter() { + let variant_discr_val = value + .layout() + .ty + .discriminant_for_variant(bx.tcx(), *ix) + .expect("Invalid variant.") + .val; + + let variant_block = bx.append_sibling_block("v"); + bx.switch_to_block(variant_block); + + let variant_value = value.project_downcast(bx, *ix); + let mut variant_cx = (*retag_cx).clone(); + + self.retag_inner( + bx, + &mut variant_cx, + variant_plan, + variant_value, + cursor, + ); + // If the variant contains another variant, then the current block + // will be different than the one that we created above. We want this block to jump + // to the terminator block. + updates.push((bx.llbb(), variant_cx)); + bx.br(terminator_block); + + // We need to record the new variant block that we created so that we can switch + // to it from the root block. + variant_edges.push((variant_discr_val, variant_block)) + } + + bx.switch_to_block(root_block); + bx.switch(discr_val, terminator_block, variant_edges.drain(..)); + bx.switch_to_block(terminator_block); + retag_cx.phi(bx, updates); + } + } + } + } + } +} + +/// A visitor trait for collecting the ranges within a layout that satisfy a given predicate. +trait PerByteTracking<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> { + /// Indicates that we can exclude the range of bytes that contains this type. + /// This tells us that [`PerByteTracking::contains`] is false for every + /// field or variant without having to recurse any further into the layout of the type. + fn excludes(bx: &mut Bx, ty: Ty<'tcx>) -> bool; + + /// Indicates that we should include the range containing this type. + fn contains(bx: &mut Bx, ty: Ty<'tcx>) -> bool; + + fn visit_layout( + bx: &mut Bx, + collector: &mut RangeCollector, + layout: TyAndLayout<'tcx>, + base_offset: Size, + imprecise: bool, + ) { + if Self::excludes(bx, layout.ty) { + return; + } + + // Optionally, we can treat a type that contains the type we are looking for + // as being equivalent to that type. For example, we would treat an entire type + // as interior mutable if it contains an `UnsafeCell` at any offset. + if imprecise { + return collector.extend(layout.size); + } + + let union_or_primitive = + matches!(layout.fields, FieldsShape::Union(..) | FieldsShape::Primitive); + let has_multiple_variants = matches!(layout.variants, Variants::Multiple { .. }); + + if Self::contains(bx, layout.ty) || union_or_primitive || has_multiple_variants { + collector.extend(layout.size); + } else { + let indices: Vec = match &layout.fields { + FieldsShape::Union(_) | FieldsShape::Primitive => vec![], + FieldsShape::Arbitrary { in_memory_order, .. } => { + in_memory_order.iter().copied().collect() + } + FieldsShape::Array { .. } => { + layout.fields.index_by_increasing_offset().map(FieldIdx::from_usize).collect() + } + }; + for idx in indices { + // We need to find the offset for this field relative + // to the entire type, not just the current aggregate + // that we are visiting here. + let field_offset = layout.fields.offset(idx.as_usize()); + let layout_offset = field_offset + base_offset; + collector.advance(layout_offset); + + let field = layout.field(bx, idx.as_usize()); + Self::visit_layout(bx, collector, field, layout_offset, imprecise); + } + } + } + /// Collects the ranges within a type that satisfy the given predicate. A range is a + /// pair of [`Size`], representing the offset and width, respectively. + fn collect(bx: &mut Bx, layout: TyAndLayout<'tcx>, imprecise: bool) -> Vec<[Size; 2]> { + let mut collector = RangeCollector::default(); + Self::visit_layout(bx, &mut collector, layout, Size::ZERO, imprecise); + collector.collect() + } +} + +/// Collects the ranges within a type that are covered by `UnsafeCell`. +struct UnsafeCellRanges; + +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> PerByteTracking<'a, 'tcx, Bx> for UnsafeCellRanges { + fn excludes(bx: &mut Bx, ty: Ty<'tcx>) -> bool { + ty.is_freeze(bx.tcx(), bx.cx().typing_env()) + } + + fn contains(bx: &mut Bx, ty: Ty<'tcx>) -> bool { + let tcx = bx.tcx(); + match ty.kind() { + ty::Adt(adt, _) => Some(adt.did()) == tcx.lang_items().unsafe_cell_type(), + _ => false, + } + } +} + +/// Collects the ranges within a type that are covered by `UnsafePinned`. +struct UnsafePinnedRanges; + +impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> PerByteTracking<'a, 'tcx, Bx> for UnsafePinnedRanges { + fn excludes(bx: &mut Bx, ty: Ty<'tcx>) -> bool { + ty.is_unpin(bx.tcx(), bx.typing_env()) && ty.is_unsafe_unpin(bx.tcx(), bx.typing_env()) + } + + fn contains(bx: &mut Bx, ty: Ty<'tcx>) -> bool { + let tcx = bx.tcx(); + match ty.kind() { + ty::Adt(adt, _) => Some(adt.did()) == tcx.lang_items().unsafe_pinned_type(), + _ => false, + } + } +} + +/// Helper for collecting a list of ranges within the size of a type, +/// such that adjacent ranges are merged. +struct RangeCollector { + /// The start of the currently accumulating + /// range that satisfies the predicate. + cursor: Size, + + /// The size of the currently accumulating range + /// that satisfies the predicate. + acc_offset: Size, + + /// A list of accumulated ranges. + ranges: Vec<[Size; 2]>, +} + +impl Default for RangeCollector { + fn default() -> Self { + Self { cursor: Size::ZERO, acc_offset: Size::ZERO, ranges: vec![] } + } +} + +impl RangeCollector { + /// Extend the current range. + fn extend(&mut self, size: Size) { + self.acc_offset += size; + } + + /// Move the collector forward to the given offset, recording the + /// current range if this leaves a gap. + fn advance(&mut self, next_cursor: Size) { + assert!(next_cursor >= self.cursor + self.acc_offset); + if self.cursor + self.acc_offset != next_cursor { + if self.acc_offset > Size::ZERO { + self.ranges.push([self.cursor, self.acc_offset]); + self.acc_offset = Size::ZERO; + } + self.cursor = next_cursor; + } + } + + /// Consumes the collector, returning all recorded ranges. + fn collect(mut self) -> Vec<[Size; 2]> { + if self.acc_offset > Size::ZERO { + self.ranges.push([self.cursor, self.acc_offset]); + } + self.ranges + } +} diff --git a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs index f9e4a6a352bac..43897ba9c152b 100644 --- a/compiler/rustc_codegen_ssa/src/mir/rvalue.rs +++ b/compiler/rustc_codegen_ssa/src/mir/rvalue.rs @@ -515,7 +515,12 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let mk_ref = move |tcx: TyCtxt<'tcx>, ty: Ty<'tcx>| { Ty::new_ref(tcx, tcx.lifetimes.re_erased, ty, bk.to_mutbl_lossy()) }; - self.codegen_place_to_pointer(bx, place, mk_ref) + let mut op = self.codegen_place_to_pointer(bx, place, mk_ref); + + if self.cx.tcx().sess.opts.unstable_opts.codegen_emit_retag.is_some() { + op = self.codegen_retag_operand(bx, op, false); + }; + op } mir::Rvalue::RawPtr(kind, place) => { diff --git a/compiler/rustc_codegen_ssa/src/mir/statement.rs b/compiler/rustc_codegen_ssa/src/mir/statement.rs index bc3ffa24d5289..2002d5c449b0f 100644 --- a/compiler/rustc_codegen_ssa/src/mir/statement.rs +++ b/compiler/rustc_codegen_ssa/src/mir/statement.rs @@ -1,4 +1,4 @@ -use rustc_middle::mir::{self, NonDivergingIntrinsic, StmtDebugInfo}; +use rustc_middle::mir::{self, NonDivergingIntrinsic, Rvalue, StmtDebugInfo}; use rustc_middle::{bug, span_bug, ty}; use tracing::instrument; @@ -10,11 +10,22 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { pub(crate) fn codegen_statement(&mut self, bx: &mut Bx, statement: &mir::Statement<'tcx>) { self.codegen_stmt_debuginfos(bx, &statement.debuginfos); self.set_debug_loc(bx, statement.source_info); + match statement.kind { mir::StatementKind::Assign(box (ref place, ref rvalue)) => { + let needs_retag = bx.tcx().sess.opts.unstable_opts.codegen_emit_retag.is_some() + // We're not really interested in stores to "outside" locations + && !(place.is_indirect_first_projection() + // `Ref` has its own internal retagging + || matches!(rvalue, Rvalue::Ref(..))); if let Some(index) = place.as_local() { match self.locals[index] { - LocalRef::Place(cg_dest) => self.codegen_rvalue(bx, cg_dest, rvalue), + LocalRef::Place(cg_dest) => { + self.codegen_rvalue(bx, cg_dest, rvalue); + if needs_retag { + self.codegen_retag_place(bx, cg_dest, false); + } + } LocalRef::UnsizedPlace(cg_indirect_dest) => { let ty = cg_indirect_dest.layout.ty; span_bug!( @@ -24,7 +35,10 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { ); } LocalRef::PendingOperand => { - let operand = self.codegen_rvalue_operand(bx, rvalue); + let mut operand = self.codegen_rvalue_operand(bx, rvalue); + if needs_retag { + operand = self.codegen_retag_operand(bx, operand, false); + } self.overwrite_local(index, LocalRef::Operand(operand)); self.debug_introduce_local(bx, index); } @@ -36,15 +50,21 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { rvalue ); } - // If the type is zero-sized, it's already been set here, // but we still need to make sure we codegen the operand - self.codegen_rvalue_operand(bx, rvalue); + // and emit a retag. + let operand = self.codegen_rvalue_operand(bx, rvalue); + if needs_retag { + self.codegen_retag_operand(bx, operand, false); + } } } } else { let cg_dest = self.codegen_place(bx, place.as_ref()); self.codegen_rvalue(bx, cg_dest, rvalue); + if needs_retag { + self.codegen_retag_place(bx, cg_dest, false); + } } } mir::StatementKind::SetDiscriminant { box ref place, variant_index } => { diff --git a/compiler/rustc_codegen_ssa/src/traits/builder.rs b/compiler/rustc_codegen_ssa/src/traits/builder.rs index c222aef4574bf..85f16c5df9ff5 100644 --- a/compiler/rustc_codegen_ssa/src/traits/builder.rs +++ b/compiler/rustc_codegen_ssa/src/traits/builder.rs @@ -103,6 +103,12 @@ pub trait BuilderMethods<'a, 'tcx>: self.cond_br(cond, then_llbb, else_llbb) } + fn phi( + &mut self, + ty: Self::Type, + cases: impl ExactSizeIterator, + ) -> Self::Value; + fn switch( &mut self, v: Self::Value, diff --git a/compiler/rustc_codegen_ssa/src/traits/intrinsic.rs b/compiler/rustc_codegen_ssa/src/traits/intrinsic.rs index d1e6436f6b1eb..809ad6a36a6a3 100644 --- a/compiler/rustc_codegen_ssa/src/traits/intrinsic.rs +++ b/compiler/rustc_codegen_ssa/src/traits/intrinsic.rs @@ -2,6 +2,7 @@ use rustc_middle::ty; use rustc_span::Span; use super::BackendTypes; +use crate::RetagInfo; use crate::mir::operand::OperandRef; use crate::mir::place::PlaceRef; @@ -49,4 +50,8 @@ pub trait IntrinsicCallBuilderMethods<'tcx>: BackendTypes { /// Trait method used to inject `va_end` on the "spoofed" `VaList` before /// Rust defined C-variadic functions return. fn va_end(&mut self, val: Self::Value) -> Self::Value; + /// Trait method used to retag a pointer stored within the given place. + fn retag_mem(&mut self, place: Self::Value, info: RetagInfo); + /// Trait method used to retag a pointer that has been loaded into a (virtual) register. + fn retag_reg(&mut self, ptr: Self::Value, info: RetagInfo) -> Self::Value; } diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index e54f68b6391e9..cdee5abc15c76 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -775,6 +775,7 @@ fn test_unstable_options_tracking_hash() { }) ); tracked!(codegen_backend, Some("abc".to_string())); + tracked!(codegen_emit_retag, Some(CodegenRetagOptions::default())); tracked!( coverage_options, CoverageOptions { @@ -815,6 +816,7 @@ fn test_unstable_options_tracking_hash() { tracked!(link_directives, false); tracked!(link_only, true); tracked!(lint_llvm_ir, true); + tracked!(llvm_emit_lifetime_markers, true); tracked!(llvm_module_flag, vec![("bar".to_string(), 123, "max".to_string())]); tracked!(llvm_plugins, vec![String::from("plugin_name")]); tracked!(llvm_writable, true); diff --git a/compiler/rustc_session/src/config.rs b/compiler/rustc_session/src/config.rs index 63806cbc701e6..82b06c885aed5 100644 --- a/compiler/rustc_session/src/config.rs +++ b/compiler/rustc_session/src/config.rs @@ -201,12 +201,20 @@ pub enum Offload { Test, } +/// The different settings that the `-Z codegen-emit-retag` flag can have. +#[derive(Copy, Clone, PartialEq, Hash, Debug, Encodable, Decodable, Default)] +pub struct CodegenRetagOptions { + /// Track interior mutable data on the level of references, instead of on the byte level. + pub no_precise_im: bool, + /// Track `UnsafePinned` data on the level of references, instead of on the byte level. + pub no_precise_pin: bool, +} + /// The different settings that the `-Z autodiff` flag can have. #[derive(Clone, PartialEq, Hash, Debug, Encodable, Decodable)] pub enum AutoDiff { /// Enable the autodiff opt pipeline Enable, - /// Print TypeAnalysis information PrintTA, /// Print TypeAnalysis information for a specific function @@ -3068,12 +3076,13 @@ pub(crate) mod dep_tracking { }; use super::{ - AnnotateMoves, AutoDiff, BranchProtection, CFGuard, CFProtection, CoverageOptions, - CrateType, DebugInfo, DebugInfoCompression, ErrorOutputType, FmtDebug, FunctionReturn, - InliningThreshold, InstrumentCoverage, InstrumentXRay, LinkerPluginLto, LocationDetail, - LtoCli, MirStripDebugInfo, NextSolverConfig, Offload, OptLevel, OutFileName, OutputType, - OutputTypes, PatchableFunctionEntry, Polonius, ResolveDocLinks, SourceFileHashAlgorithm, - SplitDwarfKind, SwitchWithOptPath, SymbolManglingVersion, WasiExecModel, + AnnotateMoves, AutoDiff, BranchProtection, CFGuard, CFProtection, CodegenRetagOptions, + CoverageOptions, CrateType, DebugInfo, DebugInfoCompression, ErrorOutputType, FmtDebug, + FunctionReturn, InliningThreshold, InstrumentCoverage, InstrumentXRay, LinkerPluginLto, + LocationDetail, LtoCli, MirStripDebugInfo, NextSolverConfig, Offload, OptLevel, + OutFileName, OutputType, OutputTypes, PatchableFunctionEntry, Polonius, ResolveDocLinks, + SourceFileHashAlgorithm, SplitDwarfKind, SwitchWithOptPath, SymbolManglingVersion, + WasiExecModel, }; use crate::lint; use crate::utils::NativeLib; @@ -3177,6 +3186,7 @@ pub(crate) mod dep_tracking { InliningThreshold, FunctionReturn, Align, + CodegenRetagOptions ); impl DepTrackingHash for (T1, T2) diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index a4e9a89a78c7a..c181e5ba9859f 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -781,6 +781,8 @@ mod desc { pub(crate) const parse_linker_flavor: &str = ::rustc_target::spec::LinkerFlavorCli::one_of(); pub(crate) const parse_dump_mono_stats: &str = "`markdown` (default) or `json`"; pub(crate) const parse_instrument_coverage: &str = parse_bool; + pub(crate) const parse_codegen_retag_options: &str = + "either no value or a comma-separated list of settings: `no-precise-im`, `no-precise-pin`"; pub(crate) const parse_coverage_options: &str = "`block` | `branch` | `condition`"; pub(crate) const parse_instrument_xray: &str = "either a boolean (`yes`, `no`, `on`, `off`, etc), or a comma separated list of settings: `always` or `never` (mutually exclusive), `ignore-loops`, `instruction-threshold=N`, `skip-entry`, `skip-exit`"; pub(crate) const parse_unpretty: &str = "`string` or `string=string`"; @@ -1297,6 +1299,7 @@ pub mod parse { None | Some("none") => CFProtection::None, Some("branch") => CFProtection::Branch, Some("return") => CFProtection::Return, + Some("full") => CFProtection::Full, Some(_) => return false, }; @@ -1521,6 +1524,26 @@ pub mod parse { true } + pub(crate) fn parse_codegen_retag_options( + slot: &mut Option, + v: Option<&str>, + ) -> bool { + let mut opts = CodegenRetagOptions::default(); + for option in v.into_iter().flat_map(|v| v.split(',')) { + match option { + "no-precise-im" => { + opts.no_precise_im = true; + } + "no-precise-pin" => { + opts.no_precise_pin = true; + } + _ => return false, + } + } + *slot = Some(opts); + true + } + pub(crate) fn parse_coverage_options(slot: &mut CoverageOptions, v: Option<&str>) -> bool { let Some(v) = v else { return true }; @@ -2242,6 +2265,8 @@ options! { "hash algorithm of source files used to check freshness in cargo (`blake3` or `sha256`)"), codegen_backend: Option = (None, parse_opt_string, [TRACKED], "the backend to use"), + codegen_emit_retag: Option = (None, parse_codegen_retag_options, [TRACKED], + "emit experimental retag intrinsic calls in generated code (default: no)"), codegen_source_order: bool = (false, parse_bool, [UNTRACKED], "emit mono items in the order of spans in source files (default: no)"), contract_checks: Option = (None, parse_opt_bool, [TRACKED], diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index a9e7f1503b9ca..8c0fb0165bcd3 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -539,6 +539,8 @@ impl Session { // HWAddressSanitizer and KernelHWAddressSanitizer will use lifetimes to detect use after // scope bugs in the future. || self.sanitizers().intersects(SanitizerSet::ADDRESS | SanitizerSet::KERNELADDRESS | SanitizerSet::MEMORY | SanitizerSet::HWADDRESS | SanitizerSet::KERNELHWADDRESS) + // Lifetimes are necessary for retagging semantics. + || self.opts.unstable_opts.codegen_emit_retag.is_some() } pub fn diagnostic_width(&self) -> usize { diff --git a/tests/codegen-llvm/retags/retags.rs b/tests/codegen-llvm/retags/retags.rs new file mode 100644 index 0000000000000..ce17203f76e6e --- /dev/null +++ b/tests/codegen-llvm/retags/retags.rs @@ -0,0 +1,205 @@ +// Verifies that retag intrinsics show up as expected with `-Zcodegen-emit-retag`. +//@ compile-flags: -Zcodegen-emit-retag -Copt-level=0 + +#![crate_type = "lib"] +#![feature(rustc_attrs)] +#![feature(allocator_api)] + +use std::marker::PhantomPinned; +pub struct NotUnpin { + _field: i32, + _marker: PhantomPinned, +} + +pub struct UnsafeInner { + _field: std::cell::UnsafeCell, +} + +// CHECK-LABEL: @readonly_borrow(ptr align {{.*}} %0) +#[no_mangle] +pub fn readonly_borrow(_: &i32) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 +} + +// CHECK-LABEL: @mutable_borrow(ptr align {{.*}} %0) +#[no_mangle] +pub fn mutable_borrow(_: &mut i32) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 +} + +// CHECK-LABEL: @option_borrow(ptr align {{.*}} %0) +#[no_mangle] +pub fn option_borrow(_x: Option<&i32>) { + // CHECK: start: + // CHECK: switch i64 %{{.+}}, label %[[V_T:.+]] [ + // CHECK-NEXT: i64 1, label %[[V:.+]] + // CHECK-NEXT: ] + // CHECK: [[V_T]]: + // CHECK: phi ptr [ %0, %start ], [ %[[R:.+]], %[[V]] ] + // CHECK: [[V]]: + // CHECK-NEXT: %[[R]] = call ptr @__rust_retag_reg(ptr %0 + // CHECK: br label %[[V_T]] +} + +// Retagging is a no-op for all `!Unpin`. +// CHECK-LABEL: @readonly_notunpin_borrow(ptr align {{.*}} %0 +#[no_mangle] +pub fn readonly_notunpin_borrow(_: &NotUnpin) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 +} + +// CHECK-LABEL: @mutable_notunpin_borrow +#[no_mangle] +pub fn mutable_notunpin_borrow(_: &mut NotUnpin) { + // CHECK-NOT: call {{ptr|void}} @__rust_retag +} + +enum E { + A(&'static i8), + B(&'static i32), + C(&'static i64), +} + +// CHECK-LABEL: @multiple_variants(i64 %_x.0, ptr %0 +#[no_mangle] +pub fn multiple_variants(_x: E) { + // CHECK: start: + // CHECK-NEXT: switch i64 %_x.0, label %[[V_T:.+]] [ + // CHECK-NEXT: i64 0, label %[[V0:.+]] + // CHECK-NEXT: i64 1, label %[[V1:.+]] + // CHECK-NEXT: i64 2, label %[[V2:.+]] + // CHECK-NEXT: ] + // CHECK: [[V_T]]: + // CHECK-NEXT: phi ptr [ %0, %start ], [ %[[R0:.+]], %[[V0]] ], [ %[[R1:.+]], %[[V1]] ], [ %[[R2:.+]], %[[V2]] ] + // CHECK: [[V0]]: + // CHECK-NEXT: %[[R0]] = call ptr @__rust_retag_reg(ptr %0, i64 1 + // CHECK: [[V1]]: + // CHECK-NEXT: %[[R1]] = call ptr @__rust_retag_reg(ptr %0, i64 4 + // CHECK: [[V2]]: + // CHECK-NEXT: %[[R2]] = call ptr @__rust_retag_reg(ptr %0, i64 8 +} + +// CHECK-LABEL: @_box(ptr align {{.*}} %0 +#[no_mangle] +pub fn _box(x: Box) -> Box { + // CHECK: start: + // CHECK-NEXT: %[[R1:.+]] = call ptr @__rust_retag_reg(ptr %0 + // CHECK-NEXT: %[[R2:.+]] = call ptr @__rust_retag_reg(ptr %[[R1]] + // CHECK-NEXT: ret ptr %[[R2]] + x +} +// If a `Box` comes from the global allocator, then its innermost pointer +// should not be retagged, but we still want to retag the allocator. +// CHECK-LABEL: @_box_custom(ptr align {{.*}} %x.0, ptr %0) +#[no_mangle] +pub fn _box_custom(x: Box) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 + drop(x) +} + +// CHECK-LABEL: @slice(ptr %0 +#[no_mangle] +pub fn slice(_: &[u8]) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 +} + +// CHECK-LABEL: @mutable_slice(ptr %0 +#[no_mangle] +pub fn mutable_slice(_: &mut [u8]) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 +} + +// CHECK-LABEL: @unsafe_slice(ptr align {{.*}} %0 +#[no_mangle] +pub fn unsafe_slice(_: &[UnsafeInner]) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 +} + +// CHECK-LABEL: @str(ptr %0, i64 %_1.1) +#[no_mangle] +pub fn str(_: &[u8]) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 +} + +// CHECK-LABEL: @return_slice(ptr align {{.*}} %0, i64 %x.1) +#[no_mangle] +pub fn return_slice(x: &[u16]) -> &[u16] { + // CHECK: start: + // CHECK-NEXT: %[[R1:.+]] = call ptr @__rust_retag_reg(ptr %0 + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %[[R1]] + x +} + +// CHECK-LABEL: @trait_borrow(ptr %0, ptr align {{.+}} %_1.1) +#[no_mangle] +pub fn trait_borrow(_: &dyn Drop) { + // CHECK: start: + // CHECK-NEXT: call ptr @__rust_retag_reg(ptr %0 +} + +// CHECK-LABEL: @trait_mutable_borrow +#[no_mangle] +pub fn trait_mutable_borrow(_: &mut dyn Drop) { + // CHECK-NOT: call {{ptr|void}} @__rust_retag +} + +// CHECK-LABEL: @option_trait_borrow(ptr %0, ptr %x.1) +#[no_mangle] +pub fn option_trait_borrow(x: Option<&dyn Drop>) { + // CHECK: start: + // CHECK: switch i64 %{{.+}}, label %v_t [ + // CHECK-NEXT: i64 1, label %v + // CHECK-NEXT: ] + // CHECK: v_t: + // CHECK-NEXT: phi ptr [ %0, %start ], [ %[[R:.+]], %v ] + // CHECK: v: + // CHECK-NEXT: %[[R]] = call ptr @__rust_retag_reg(ptr %0 + // CHECK: br label %v_t +} + +//CHECK-LABEL: @retag_mixed +#[no_mangle] +fn retag_mixed() { + // CHECK: %{{.+}} = call ptr @__rust_retag_reg(ptr %{{.+}}, i64 4 + // CHECK: call void @__rust_retag_mem(ptr %target_alias, i64 4 + // CHECK-NEXT: %{{.+}} = call ptr @__rust_retag_reg(ptr %target_alias, i64 8 + let target = &mut 42; + let mut target_alias = &42; + retarget(&mut target_alias); + + #[no_mangle] + fn retarget(_: &mut &u32) {} +} + +//CHECK-LABEL: @option_trait_borrow_mut +#[no_mangle] +pub fn option_trait_borrow_mut(_: Option<&mut dyn Drop>) { + // CHECK-NOT: call {{ptr|void}} @__rust_retag +} + +//CHECK-LABEL: @trait_box +#[no_mangle] +pub fn trait_box(_: Box) { + // CHECK-NOT: call {{ptr|void}} @__rust_retag +} + +//CHECK-LABEL: @trait_mutref +#[no_mangle] +pub fn trait_mutref(_: &mut (dyn Drop + Unpin)) { + // CHECK-NOT: call {{ptr|void}} @__rust_retag +} + +//CHECK-LABEL: @trait_option +#[no_mangle] +pub fn trait_option(x: Option>) -> Option> { + // CHECK-NOT: call {{ptr|void}} @__rust_retag + x +}