diff --git a/Cargo.lock b/Cargo.lock index 8ac10d7d4fb53..cd925bca4a79c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4372,6 +4372,7 @@ dependencies = [ "rustc_errors", "rustc_hir", "rustc_index", + "rustc_lint_defs", "rustc_macros", "rustc_middle", "rustc_session", @@ -4379,6 +4380,7 @@ dependencies = [ "rustc_target", "serde", "serde_json", + "smallvec", "tracing", ] diff --git a/compiler/rustc_borrowck/src/diagnostics/region_name.rs b/compiler/rustc_borrowck/src/diagnostics/region_name.rs index 963f902b71fcb..c9c5bc086b45b 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_name.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_name.rs @@ -730,6 +730,10 @@ impl<'tcx> MirBorrowckCtxt<'_, '_, 'tcx> { // to search anything here. } + (GenericArgKind::Outlives(_), _) => { + // Outlives args are metadata-only; no lifetime to find here. + } + ( GenericArgKind::Lifetime(_) | GenericArgKind::Type(_) diff --git a/compiler/rustc_borrowck/src/lib.rs b/compiler/rustc_borrowck/src/lib.rs index 822fe1e58ebba..f9a94e80c70a6 100644 --- a/compiler/rustc_borrowck/src/lib.rs +++ b/compiler/rustc_borrowck/src/lib.rs @@ -86,6 +86,7 @@ mod places_conflict; mod polonius; mod prefixes; mod region_infer; +mod region_summary; mod renumber; mod root_cx; mod session_diagnostics; @@ -104,38 +105,88 @@ impl<'tcx> TyCtxtConsts<'tcx> { } pub fn provide(providers: &mut Providers) { - *providers = Providers { mir_borrowck, ..*providers }; + *providers = Providers { mir_borrowck, borrowck_result, borrowck_region_summary, ..*providers }; } -/// Provider for `query mir_borrowck`. Unlike `typeck`, this must -/// only be called for typeck roots which *similar* to `typeck` will -/// then borrowck all nested bodies as well. -fn mir_borrowck( - tcx: TyCtxt<'_>, - def: LocalDefId, -) -> Result<&FxIndexMap>, ErrorGuaranteed> { +/// Shared core query: runs borrowck and collects both hidden types +/// and region summaries into a single `BorrowckResult`. +fn borrowck_result<'tcx>(tcx: TyCtxt<'tcx>, def: LocalDefId) -> &'tcx BorrowckResult<'tcx> { assert!(!tcx.is_typeck_child(def.to_def_id())); let (input_body, _) = tcx.mir_promoted(def); - debug!("run query mir_borrowck: {}", tcx.def_path_str(def)); + debug!("run query borrowck_result: {}", tcx.def_path_str(def)); - // We should eagerly check stalled coroutine obligations from HIR typeck. + // Eagerly check stalled coroutine obligations from HIR typeck. // Not doing so leads to silent normalization failures later, which will // fail to register opaque types in the next solver. - tcx.ensure_result().check_coroutine_obligations(def)?; + // + // This must come after `mir_promoted` so that query-cycle detection + // follows the same dependency path as the original `mir_borrowck`. + if let Err(guar) = tcx.ensure_result().check_coroutine_obligations(def) { + return tcx.arena.alloc(BorrowckResult { + hidden_types: Err(guar), + region_summaries: Default::default(), + }); + } let input_body: &Body<'_> = &input_body.borrow(); if let Some(guar) = input_body.tainted_by_errors { debug!("Skipping borrowck because of tainted body"); - Err(guar) - } else if input_body.should_skip() { + return tcx.arena.alloc(BorrowckResult { + hidden_types: Err(guar), + region_summaries: Default::default(), + }); + } + if input_body.should_skip() { debug!("Skipping borrowck because of injected body"); - let opaque_types = Default::default(); - Ok(tcx.arena.alloc(opaque_types)) - } else { - let mut root_cx = BorrowCheckRootCtxt::new(tcx, def, None); - root_cx.do_mir_borrowck(); - root_cx.finalize() + return tcx.arena.alloc(BorrowckResult { + hidden_types: Ok(Default::default()), + region_summaries: Default::default(), + }); + } + + let mut root_cx = BorrowCheckRootCtxt::new(tcx, def, None); + root_cx.do_mir_borrowck(); + root_cx.finalize() +} + +/// Provider for `query mir_borrowck`. Extracts hidden types from +/// the shared `borrowck_result`. +fn mir_borrowck( + tcx: TyCtxt<'_>, + def: LocalDefId, +) -> Result<&FxIndexMap>, ErrorGuaranteed> { + assert!(!tcx.is_typeck_child(def.to_def_id())); + debug!("run query mir_borrowck: {}", tcx.def_path_str(def)); + + let result = tcx.borrowck_result(def); + match &result.hidden_types { + Ok(t) => Ok(t), + Err(g) => Err(*g), + } +} + +/// Provider for `query borrowck_region_summary`. Extracts region summary +/// for a specific def_id from the shared `borrowck_result`. +/// +/// Returns a default (empty) summary for items without borrowck (shims, +/// constructors, trait method declarations). For shims, the collector +/// handles the transparent-forwarder case in `compose_all_through_chain`. +fn borrowck_region_summary<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> BorrowckRegionSummary { + let root = tcx.typeck_root_def_id(def_id.to_def_id()).expect_local(); + // Constructors have synthetic MIR, trivial consts have no MIR — skip them. + // Also bail for items without bodies (e.g. trait method declarations like + // FnOnce::call_once) which cannot be borrowck'd. + if matches!(tcx.def_kind(root), rustc_hir::def::DefKind::Ctor(..)) || tcx.is_trivial_const(root) + { + return BorrowckRegionSummary::default(); } + + if tcx.hir_node(tcx.local_def_id_to_hir_id(root)).body_id().is_none() { + return BorrowckRegionSummary::default(); + } + + let result = tcx.borrowck_result(root); + result.region_summaries.get(&def_id).cloned().unwrap_or_default() } /// Data propagated to the typeck parent by nested items. @@ -436,6 +487,10 @@ fn borrowck_check_region_constraints<'tcx>( polonius_context, ); + // Compute region summary for this function body. + let region_summary = region_summary::compute_region_summary(®ioncx, body, tcx); + root_cx.add_region_summary(def, region_summary); + // Dump MIR results into a file, if that is enabled. This lets us // write unit-tests, as well as helping with debugging. nll::dump_nll_mir(&infcx, body, ®ioncx, &opt_closure_req, &borrow_set); @@ -908,6 +963,7 @@ impl<'a, 'tcx> ResultsVisitor<'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt<'a, unwind: _, call_source: _, fn_span: _, + call_id: _, } => { self.consume_operand(loc, (func, span), state); for arg in args { @@ -915,7 +971,7 @@ impl<'a, 'tcx> ResultsVisitor<'tcx, Borrowck<'a, 'tcx>> for MirBorrowckCtxt<'a, } self.mutate_place(loc, (*destination, span), Deep, state); } - TerminatorKind::TailCall { func, args, fn_span: _ } => { + TerminatorKind::TailCall { func, args, fn_span: _, call_id: _ } => { self.consume_operand(loc, (func, span), state); for arg in args { self.consume_operand(loc, (&arg.node, arg.span), state); diff --git a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs index 439aa1a91e068..22cb8250da282 100644 --- a/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs +++ b/compiler/rustc_borrowck/src/polonius/legacy/loan_invalidations.rs @@ -125,6 +125,7 @@ impl<'a, 'tcx> Visitor<'tcx> for LoanInvalidationsGenerator<'a, 'tcx> { unwind: _, call_source: _, fn_span: _, + call_id: _, } => { self.consume_operand(location, func); for arg in args { diff --git a/compiler/rustc_borrowck/src/region_summary.rs b/compiler/rustc_borrowck/src/region_summary.rs new file mode 100644 index 0000000000000..15bccd59c9cbd --- /dev/null +++ b/compiler/rustc_borrowck/src/region_summary.rs @@ -0,0 +1,440 @@ +use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; +use rustc_data_structures::unord::UnordMap; +use rustc_hir::def_id::DefId; +use rustc_index::Idx; +use rustc_index::bit_set::{DenseBitSet, SparseBitMatrix}; +use rustc_middle::mir::*; +use rustc_middle::ty::{self, RegionVid, TyCtxt, TypeVisitable, TypeVisitor}; + +use crate::constraints::ConstraintSccIndex; +use crate::region_infer::RegionInferenceContext; + +/// TypeVisitor that walks a generic arg tree in depth-first order, assigning a +/// walk-order index to every region encountered. Only `ReVar` regions produce +/// entries in the output mapping; other region kinds (e.g. `ReBound` from +/// higher-ranked types) advance the index without producing entries, keeping +/// positions aligned with a consumer's walk over the same structure after +/// monomorphization. +struct RegionIndexCollector<'a, 'tcx> { + walk_index: usize, + mappings: &'a mut UnordMap, + relevant_vids: &'a mut FxIndexSet, + external_region_to_vid: &'a FxHashMap, RegionVid>, +} + +impl<'tcx> TypeVisitor> for RegionIndexCollector<'_, 'tcx> { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + let idx = self.walk_index; + self.walk_index += 1; + let maybe_vid = match r.kind() { + ty::ReVar(vid) => Some(vid), + _ => self.external_region_to_vid.get(&r).copied(), + }; + if let Some(vid) = maybe_vid { + debug_assert!(self.mappings.insert(idx as u32, vid.as_u32()).is_none()); + self.relevant_vids.insert(vid); + } + // Non-mapped regions (ReBound, ReErased, etc.) still consume + // an index so that walk-order positions stay stable across + // different region representations of the same type structure. + } +} + +struct InputSlotCollector<'a> { + arg_ordinal: u32, + offset_within_arg: u32, + slots: &'a mut FxHashMap, +} + +impl<'tcx> TypeVisitor> for InputSlotCollector<'_> { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + let slot = + InputSlot { arg_ordinal: self.arg_ordinal, offset_within_arg: self.offset_within_arg }; + self.offset_within_arg += 1; + if let ty::ReEarlyParam(ep) = r.kind() { + self.slots.entry(ep.index).or_insert(slot); + } + } +} + +fn build_param_pos_to_input_slot<'tcx>( + tcx: TyCtxt<'tcx>, + def_id: DefId, +) -> FxHashMap { + let mut slots = FxHashMap::default(); + let identity_args = ty::GenericArgs::identity_for_item(tcx, def_id); + for (arg_ordinal, arg) in identity_args.iter().enumerate() { + let mut collector = InputSlotCollector { + arg_ordinal: arg_ordinal as u32, + offset_within_arg: 0, + slots: &mut slots, + }; + arg.visit_with(&mut collector); + } + slots +} + +fn input_slot_for_param_pos( + param_pos: u32, + param_pos_to_slot: &FxHashMap, +) -> InputSlot { + param_pos_to_slot.get(¶m_pos).copied().unwrap_or_else(|| { + // Some region kinds (for example named late params) do not expose a + // finer structural slot through the identity-arg walk. Fall back to + // the param position so provenance still distinguishes input-sourced + // regions from LocalOnly ones. + InputSlot { arg_ordinal: param_pos, offset_within_arg: 0 } + }) +} + +pub(crate) fn compute_region_summary<'tcx>( + regioncx: &RegionInferenceContext<'tcx>, + body: &Body<'tcx>, + tcx: TyCtxt<'tcx>, +) -> BorrowckRegionSummary { + let external_region_to_vid: FxHashMap, RegionVid> = regioncx + .definitions + .iter_enumerated() + .filter_map(|(vid, def)| def.external_name.map(|region| (region, vid))) + .collect(); + + // ── Phase 1: Extract call-site region mappings ───────────────── + // + // Walk every terminator. For each Call whose callee resolves to + // an FnDef, walk the callee's generic args with a TypeVisitor to + // collect every region in depth-first visitation order. Each + // region encountered gets a walk-order index (0, 1, 2, ...), + // regardless of region kind. Only ReVar regions (inference + // variables) produce entries — but non-ReVar regions (e.g. + // ReBound from higher-ranked types) still advance the index, so + // that positions stay aligned with the consumer's walk over the + // same type structure after monomorphization. + + let mut call_site_mappings = UnordMap::default(); + let mut relevant_vids: FxIndexSet = FxIndexSet::default(); + + for bb_data in body.basic_blocks.iter() { + let (func, call_id) = match bb_data.terminator().kind { + TerminatorKind::Call { ref func, call_id, .. } + | TerminatorKind::TailCall { ref func, call_id, .. } => (func, call_id), + _ => continue, + }; + // Borrowck runs before inlining, so all call chains must be + // single-element at this point. + debug_assert_eq!( + call_id.len(), + 1, + "expected single-element call chain in pre-inlining MIR, got {:?}", + call_id, + ); + let &(_, local_id, _) = &call_id[0]; + let func_ty = func.ty(&body.local_decls, tcx); + if let ty::FnDef(_, args) = func_ty.kind() { + let mut region_mappings = UnordMap::default(); + let mut collector = RegionIndexCollector { + walk_index: 0, + mappings: &mut region_mappings, + relevant_vids: &mut relevant_vids, + external_region_to_vid: &external_region_to_vid, + }; + for arg in args.iter() { + arg.visit_with(&mut collector); + } + if !region_mappings.is_empty() { + call_site_mappings + .insert(local_id, CallSiteRegionMapping { call_id: local_id, region_mappings }); + } + } + } + + // Early exit: no call sites with lifetime params → empty summary. + if call_site_mappings.is_empty() { + return BorrowckRegionSummary::default(); + } + + // ── Phase 2: Project the SCC DAG ────────────────────────────── + // + // The full RegionInferenceContext partitions all RegionVids into + // SCCs and stores a DAG over them. We project onto the relevant + // set: + // + // - Each original SCC containing ≥1 relevant vid gets a + // projected SCC index. Relevant vids in the same original SCC + // share a projected SCC (they are mutually outliving). + // + // - A projected edge A → B exists iff original SCC A can reach + // original SCC B, possibly through intermediate SCCs that + // contain no relevant vids. This preserves transitive + // reachability while eliding invisible intermediaries. + + let constraint_sccs = regioncx.constraint_sccs(); + + // Include universal region vids in the relevant set so that + // inference variables and universal regions in the same SCC share + // a projected SCC index. Without this, consumers cannot resolve + // inference vids (from call-site generic args) to their + // corresponding universal region param positions. + for (vid, def) in regioncx.definitions.iter_enumerated() { + if def.external_name.is_some() { + relevant_vids.insert(vid); + } + } + + // Assign projected SCC indices. + let mut orig_to_proj: FxIndexMap = FxIndexMap::default(); + let mut next_proj: u32 = 0; + let mut scc_of = UnordMap::default(); + + for &vid in &relevant_vids { + let orig_scc = constraint_sccs.scc(vid); + let proj_idx = *orig_to_proj.entry(orig_scc).or_insert_with(|| { + let idx = next_proj; + next_proj += 1; + idx + }); + scc_of.insert(vid.as_u32(), proj_idx); + } + + let num_proj = next_proj as usize; + + // DFS from each projected SCC through the original DAG. + // + // When we reach another projected SCC, record a projected edge + // and stop — that SCC's own DFS handles onward reachability. + // When we reach a non-projected SCC, continue through it (it is + // an invisible intermediate). + let mut scc_successors: Vec> = vec![Vec::new(); num_proj]; + + for (&orig_scc, &proj_idx) in &orig_to_proj { + let mut visited: FxHashSet = FxHashSet::default(); + visited.insert(orig_scc); + let mut stack: Vec = constraint_sccs.successors(orig_scc).to_vec(); + + while let Some(scc) = stack.pop() { + if !visited.insert(scc) { + continue; + } + if let Some(&target_proj) = orig_to_proj.get(&scc) { + // Reached another projected SCC — record edge, stop. + scc_successors[proj_idx as usize].push(target_proj); + } else { + // Non-projected SCC — continue through it. + for &succ in constraint_sccs.successors(scc) { + stack.push(succ); + } + } + } + + scc_successors[proj_idx as usize].sort(); + scc_successors[proj_idx as usize].dedup(); + } + + let outlives_graph = ProjectedOutlivesGraph { scc_of, scc_successors }; + + // ── Phase 3: Universal region identity → param positions ───── + // + // Map every universal RegionVid to a param position: + // ReEarlyParam → ep.index + // ReLateParam (Named) → vid itself (identity) + // ReStatic → STATIC_PARAM_POS sentinel + // + // This covers all universal regions, not just those in the + // relevant set — consumers may need to identify a vid as + // 'static even if it did not appear in any call-site generic + // args. + + let vid_to_param_pos: Vec<(u32, u32)> = regioncx + .definitions + .iter_enumerated() + .filter_map(|(vid, def)| { + let region = def.external_name?; + let param_pos = match region.kind() { + ty::ReEarlyParam(ep) => ep.index, + ty::ReLateParam(lp) => match lp.kind { + ty::LateParamRegionKind::Named(_) => vid.as_u32(), + _ => return None, + }, + ty::ReStatic => STATIC_PARAM_POS, + _ => return None, + }; + Some((vid.as_u32(), param_pos)) + }) + .collect(); + + // ── Phase 4: Resolve call-site inference vids to params ──────── + // + // For each call-site inference vid, check if it shares an SCC with + // a non-static universal region in the FULL (unprojected) SCC graph. + // SCC equality captures invariant subtyping (dyn types are invariant + // in their lifetime parameters), which creates mutual constraints + // between the call's inference vars and the corresponding universal + // regions. This is more precise than reachability or raw-constraint + // approaches, which incorrectly map vids that are merely outlived + // by a universal region. + + // Build universal SCC → param_pos mapping. + let mut univ_scc_to_param: FxIndexMap = FxIndexMap::default(); + for &(vid, param_pos) in &vid_to_param_pos { + if param_pos == STATIC_PARAM_POS { + continue; + } + let scc = constraint_sccs.scc(RegionVid::from_u32(vid)); + univ_scc_to_param.insert(scc, param_pos); + } + + let mut vid_to_resolved_param_map: FxHashMap = FxHashMap::default(); + for &vid in &relevant_vids { + if regioncx.definitions[vid].external_name.is_some() { + continue; + } + let vid_scc = constraint_sccs.scc(vid); + if let Some(¶m_pos) = univ_scc_to_param.get(&vid_scc) { + vid_to_resolved_param_map.insert(vid.as_u32(), param_pos); + } + } + + // ── Phase 4b: Compute "bounded-by" resolution ──────────────────── + // + // For inference vids that are NOT in a universal SCC (due to + // covariant dyn-type constraints generating only forward edges), + // check if the vid's SCC is reachable from exactly one non-static + // universal SCC. Post-borrowck, the concrete lifetime through the + // unsizing edge IS that universal — the covariance permission + // doesn't change the runtime value. + + // Precompute transitive reachability over the SCC DAG as two + // bit matrices: + // `forward[scc]` = descendants of `scc` ∪ {scc} + // `reverse[scc]` = ancestors of `scc` ∪ {scc} + // Both are built with a single fold. `all_sccs()` yields post-order + // (successors before predecessors), so `forward` reads from + // already-complete rows when processing an SCC. `reverse` needs the + // opposite order — iterate `.rev()` so every scc's ancestors are + // finalized before being propagated into its successors. + let num_sccs = constraint_sccs.num_sccs(); + let mut forward: SparseBitMatrix = + SparseBitMatrix::new(num_sccs); + for scc in constraint_sccs.all_sccs() { + forward.insert(scc, scc); + for &succ in constraint_sccs.successors(scc) { + // Edge scc -> succ: scc's row absorbs succ's (completed) row. + forward.union_rows(succ, scc); + } + } + let mut reverse: SparseBitMatrix = + SparseBitMatrix::new(num_sccs); + // `all_sccs()` only exposes `impl Iterator`, not `DoubleEndedIterator`, + // so reverse-post-order is expressed directly over the index range. + for scc in (0..num_sccs).rev().map(ConstraintSccIndex::new) { + reverse.insert(scc, scc); + for &succ in constraint_sccs.successors(scc) { + // Edge scc -> succ: push scc's (completed) ancestors into succ. + reverse.union_rows(scc, succ); + } + } + + // Record which universal-SCC param positions transitively reach + // each descendant SCC. A universal is not listed among its own + // predecessors. + let mut scc_to_univ_predecessors: FxHashMap> = + FxHashMap::default(); + for (&univ_scc, ¶m_pos) in &univ_scc_to_param { + for reached in forward.iter(univ_scc) { + if reached == univ_scc { + continue; + } + scc_to_univ_predecessors.entry(reached).or_default().push(param_pos); + } + } + + // Compute the full set of SCCs reachable from 'static, used to + // identify non-static predecessors. + let static_vid = RegionVid::from_u32(0); + let static_scc = constraint_sccs.scc(static_vid); + let mut reachable_from_static = FxHashSet::default(); + { + let mut stack = vec![static_scc]; + reachable_from_static.insert(static_scc); + while let Some(scc) = stack.pop() { + for &succ in constraint_sccs.successors(scc) { + if reachable_from_static.insert(succ) { + stack.push(succ); + } + } + } + } + + let vid_to_param_pos_map: FxHashMap = vid_to_param_pos.iter().copied().collect(); + let input_slot_by_param_pos = build_param_pos_to_input_slot(tcx, body.source.def_id()); + + // Scratch reused across vids for the domination check below. + let mut scratch: DenseBitSet = DenseBitSet::new_empty(num_sccs); + let mut vid_provenance = UnordMap::default(); + for &vid in &relevant_vids { + let vid_u32 = vid.as_u32(); + let provenance = if let Some(¶m_pos) = vid_to_param_pos_map.get(&vid_u32) { + if param_pos == STATIC_PARAM_POS { + VidProvenance::Static + } else { + VidProvenance::Input(input_slot_for_param_pos(param_pos, &input_slot_by_param_pos)) + } + } else if let Some(¶m_pos) = vid_to_resolved_param_map.get(&vid_u32) { + VidProvenance::Input(input_slot_for_param_pos(param_pos, &input_slot_by_param_pos)) + } else { + // Check bounded-by: does exactly one non-static universal + // SCC U reach this vid's SCC S, and does U dominate S + // (every predecessor of S is also reachable from U)? + // + // The domination check ensures that no local-only lifetime + // feeds into S from a path that doesn't go through U. + // Without it, an SCC merged through trait bounds (e.g., + // Sub4<'a>: Super4<'a,'a>) would be classified as bounded + // by 'a even when some vids represent a local lifetime. + let vid_scc = constraint_sccs.scc(vid); + match scc_to_univ_predecessors.get(&vid_scc) { + Some(preds) if preds.len() == 1 => { + // Find the universal SCC for this param_pos. + let univ_scc = univ_scc_to_param + .iter() + .find(|&(_, &pp)| pp == preds[0]) + .map(|(&scc, _)| scc); + // Domination: every non-static, non-U ancestor of + // `vid_scc` must also be reachable from `u_scc`. + // Expressed as: ancestors(vid_scc) \ {static, u_scc} + // ⊆ descendants(u_scc) ∪ {u_scc}. + // `vid_scc` itself is in `ancestors(vid_scc)` but also + // in `forward.row(u_scc)` — since we only reach this + // arm when `u_scc` reaches `vid_scc` — so it cancels. + let dominated = univ_scc.is_some_and(|u_scc| { + match (reverse.row(vid_scc), forward.row(u_scc)) { + (Some(ancestors), Some(fwd_u)) => { + scratch.clone_from(ancestors); + scratch.remove(static_scc); + scratch.remove(u_scc); + scratch.subtract(fwd_u); + scratch.is_empty() + } + // `vid_scc` has no ancestors — trivially dominated. + (None, _) => true, + // Defensive: `forward` self-insertion guarantees + // `u_scc` has a row, so this arm is unreachable. + (Some(_), None) => false, + } + }); + if dominated { + VidProvenance::BoundedByUniversal(input_slot_for_param_pos( + preds[0], + &input_slot_by_param_pos, + )) + } else { + VidProvenance::LocalOnly + } + } + _ => VidProvenance::LocalOnly, + } + }; + vid_provenance.insert(vid_u32, provenance); + } + + BorrowckRegionSummary { call_site_mappings, outlives_graph, vid_provenance, vid_to_param_pos } +} diff --git a/compiler/rustc_borrowck/src/root_cx.rs b/compiler/rustc_borrowck/src/root_cx.rs index a082aba35b8a7..a4824d78d7ee3 100644 --- a/compiler/rustc_borrowck/src/root_cx.rs +++ b/compiler/rustc_borrowck/src/root_cx.rs @@ -4,7 +4,7 @@ use std::rc::Rc; use rustc_abi::FieldIdx; use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; use rustc_hir::def_id::LocalDefId; -use rustc_middle::mir::ConstraintCategory; +use rustc_middle::mir::{BorrowckRegionSummary, BorrowckResult, ConstraintCategory}; use rustc_middle::ty::{self, TyCtxt}; use rustc_span::ErrorGuaranteed; use smallvec::SmallVec; @@ -39,6 +39,9 @@ pub(super) struct BorrowCheckRootCtxt<'tcx> { collect_region_constraints_results: FxIndexMap>, propagated_borrowck_results: FxHashMap>, + /// Region summaries collected during borrowck of the root and all its + /// nested bodies, keyed by their `LocalDefId`. + region_summaries: FxIndexMap, tainted_by_errors: Option, /// This should be `None` during normal compilation. See [`crate::consumers`] for more /// information on how this is used. @@ -58,6 +61,7 @@ impl<'tcx> BorrowCheckRootCtxt<'tcx> { unconstrained_hidden_type_errors: Default::default(), collect_region_constraints_results: Default::default(), propagated_borrowck_results: Default::default(), + region_summaries: Default::default(), tainted_by_errors: None, consumer, } @@ -78,15 +82,20 @@ impl<'tcx> BorrowCheckRootCtxt<'tcx> { &self.propagated_borrowck_results[&nested_body_def_id].used_mut_upvars } - pub(super) fn finalize( - self, - ) -> Result<&'tcx FxIndexMap>, ErrorGuaranteed> - { - if let Some(guar) = self.tainted_by_errors { - Err(guar) - } else { - Ok(self.tcx.arena.alloc(self.hidden_types)) - } + pub(super) fn add_region_summary( + &mut self, + def_id: LocalDefId, + summary: BorrowckRegionSummary, + ) { + self.region_summaries.insert(def_id, summary); + } + + pub(super) fn finalize(self) -> &'tcx BorrowckResult<'tcx> { + let hidden_types = + if let Some(guar) = self.tainted_by_errors { Err(guar) } else { Ok(self.hidden_types) }; + self.tcx + .arena + .alloc(BorrowckResult { hidden_types, region_summaries: self.region_summaries }) } fn handle_opaque_type_uses(&mut self) { diff --git a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs index 868c6f11b68dc..6bdb4377b26e9 100644 --- a/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs +++ b/compiler/rustc_borrowck/src/type_check/constraint_conversion.rs @@ -209,6 +209,10 @@ impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> { } GenericArgKind::Const(_) => unreachable!(), + + GenericArgKind::Outlives(_) => { + // Outlives args are pure metadata; nothing to constrain. + } } } diff --git a/compiler/rustc_codegen_cranelift/src/base.rs b/compiler/rustc_codegen_cranelift/src/base.rs index 4f483cdc5d6c7..8ef0978f3c01c 100644 --- a/compiler/rustc_codegen_cranelift/src/base.rs +++ b/compiler/rustc_codegen_cranelift/src/base.rs @@ -44,7 +44,7 @@ pub(crate) fn codegen_fn<'tcx>( let symbol_name = tcx.symbol_name(instance).name.to_string(); let _timer = tcx.prof.generic_activity_with_arg("codegen fn", &*symbol_name); - let mir = tcx.instance_mir(instance.def); + let mir = tcx.codegen_mir(instance); let _mir_guard = crate::PrintOnPanic(|| { let mut buf = Vec::new(); with_no_trimmed_paths!({ @@ -502,6 +502,7 @@ fn codegen_fn_body(fx: &mut FunctionCx<'_, '_, '_>, start_block: Block) { fn_span, unwind, call_source: _, + call_id: _, } => { fx.tcx.prof.generic_activity("codegen call").run(|| { crate::abi::codegen_terminator_call( diff --git a/compiler/rustc_codegen_cranelift/src/constant.rs b/compiler/rustc_codegen_cranelift/src/constant.rs index ff8e6744bd32c..a660b381266c8 100644 --- a/compiler/rustc_codegen_cranelift/src/constant.rs +++ b/compiler/rustc_codegen_cranelift/src/constant.rs @@ -385,6 +385,12 @@ fn define_all_allocs(tcx: TyCtxt<'_>, module: &mut dyn Module, cx: &mut Constant unreachable!() } }; + if alloc.inner().address_significant { + debug_assert!( + alloc.inner().mutability.is_not(), + "address_significant on mutable allocation" + ); + } // FIXME: should we have a cache so we don't do this multiple times for the same `ConstAllocation`? let data_id = *cx.anon_allocs.entry(alloc_id).or_insert_with(|| { module.declare_anonymous_data(alloc.inner().mutability.is_mut(), false).unwrap() diff --git a/compiler/rustc_codegen_cranelift/src/driver/jit.rs b/compiler/rustc_codegen_cranelift/src/driver/jit.rs index 9bbc338a8e07c..3903e6ea3b1da 100644 --- a/compiler/rustc_codegen_cranelift/src/driver/jit.rs +++ b/compiler/rustc_codegen_cranelift/src/driver/jit.rs @@ -7,7 +7,7 @@ use std::os::raw::{c_char, c_int}; use cranelift_jit::{JITBuilder, JITModule}; use rustc_codegen_ssa::CrateInfo; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use rustc_middle::mir::mono::MonoItem; +use rustc_middle::mono::MonoItem; use rustc_session::Session; use rustc_session::config::OutputFilenames; use rustc_span::sym; diff --git a/compiler/rustc_codegen_gcc/src/common.rs b/compiler/rustc_codegen_gcc/src/common.rs index dd0064d34bc4a..2825afb36f875 100644 --- a/compiler/rustc_codegen_gcc/src/common.rs +++ b/compiler/rustc_codegen_gcc/src/common.rs @@ -265,6 +265,12 @@ impl<'gcc, 'tcx> ConstCodegenMethods for CodegenCx<'gcc, 'tcx> { }; } + if alloc.inner().address_significant { + debug_assert!( + alloc.inner().mutability.is_not(), + "address_significant on mutable allocation" + ); + } let value = match alloc.inner().mutability { Mutability::Mut => self.static_addr_of_mut( const_alloc_to_gcc(self, alloc), diff --git a/compiler/rustc_codegen_llvm/src/common.rs b/compiler/rustc_codegen_llvm/src/common.rs index dadf8e9e7d5fa..7f91273de7ada 100644 --- a/compiler/rustc_codegen_llvm/src/common.rs +++ b/compiler/rustc_codegen_llvm/src/common.rs @@ -304,6 +304,9 @@ impl<'ll, 'tcx> ConstCodegenMethods for CodegenCx<'ll, 'tcx> { Mutability::Mut => self.static_addr_of_mut(init, alloc.align, None), _ => self.static_addr_of_impl(init, alloc.align, None), }; + if alloc.address_significant { + llvm::set_unnamed_address(value, llvm::UnnamedAddr::No); + } if !self.sess().fewer_names() && llvm::get_value_name(value).is_empty() { let hash = self.tcx.with_stable_hashing_context(|mut hcx| { diff --git a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs index 2460bf18b13d1..3f0e1285bb182 100644 --- a/compiler/rustc_codegen_ssa/src/back/symbol_export.rs +++ b/compiler/rustc_codegen_ssa/src/back/symbol_export.rs @@ -107,11 +107,7 @@ fn reachable_non_generics_provider(tcx: TyCtxt<'_>, _: LocalCrate) -> DefIdMap> FunctionCx<'a, 'tcx, Bx> { unwind, call_source: _, fn_span, + call_id: _, } => self.codegen_call_terminator( helper, bx, @@ -1599,7 +1600,7 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { CallKind::Normal, mergeable_succ(), ), - mir::TerminatorKind::TailCall { ref func, ref args, fn_span } => self + mir::TerminatorKind::TailCall { ref func, ref args, fn_span, call_id: _ } => self .codegen_call_terminator( helper, bx, diff --git a/compiler/rustc_codegen_ssa/src/mir/mod.rs b/compiler/rustc_codegen_ssa/src/mir/mod.rs index 93da12107bab0..756deab33582e 100644 --- a/compiler/rustc_codegen_ssa/src/mir/mod.rs +++ b/compiler/rustc_codegen_ssa/src/mir/mod.rs @@ -179,7 +179,7 @@ pub fn codegen_mir<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>>( let tcx = cx.tcx(); let llfn = cx.get_fn(instance); - let mut mir = tcx.instance_mir(instance.def); + let mut mir = tcx.codegen_mir(instance); // Note that the ABI logic has deduced facts about the functions' parameters based on the MIR we // got here (`deduce_param_attrs`). That means we can *not* apply arbitrary further MIR // transforms as that may invalidate those deduced facts! diff --git a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs index bdefacefd20b9..012506dead3ff 100644 --- a/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs +++ b/compiler/rustc_codegen_ssa/src/mir/naked_asm.rs @@ -26,7 +26,7 @@ pub fn codegen_naked_asm< item_data: MonoItemData, ) { assert!(!instance.args.has_infer()); - let mir = cx.tcx().instance_mir(instance.def); + let mir = cx.tcx().codegen_mir(instance); let rustc_middle::mir::TerminatorKind::InlineAsm { asm_macro: _, diff --git a/compiler/rustc_const_eval/src/const_eval/type_info/adt.rs b/compiler/rustc_const_eval/src/const_eval/type_info/adt.rs index 2143313bbbada..cb1c3ba52eb34 100644 --- a/compiler/rustc_const_eval/src/const_eval/type_info/adt.rs +++ b/compiler/rustc_const_eval/src/const_eval/type_info/adt.rs @@ -210,6 +210,10 @@ impl<'tcx> InterpCx<'tcx, CompileTimeMachine<'tcx>> { GenericArgKind::Lifetime(region) => this.write_generic_lifetime(region, place), GenericArgKind::Type(ty) => this.write_generic_type(ty, place), GenericArgKind::Const(c) => this.write_generic_const(c, place), + GenericArgKind::Outlives(_) => { + // Outlives args are metadata-only; skip. + interp_ok(()) + } } }) } diff --git a/compiler/rustc_const_eval/src/interpret/step.rs b/compiler/rustc_const_eval/src/interpret/step.rs index f9ab86e9888d7..ed7b5f6099342 100644 --- a/compiler/rustc_const_eval/src/interpret/step.rs +++ b/compiler/rustc_const_eval/src/interpret/step.rs @@ -532,6 +532,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { unwind, call_source: _, fn_span: _, + call_id: _, } => { let old_stack = self.frame_idx(); let old_loc = self.frame().loc; @@ -559,7 +560,7 @@ impl<'tcx, M: Machine<'tcx>> InterpCx<'tcx, M> { } } - TailCall { ref func, ref args, fn_span: _ } => { + TailCall { ref func, ref args, fn_span: _, call_id: _ } => { let old_frame_idx = self.frame_idx(); let EvaluatedCalleeAndArgs { callee, args, fn_sig, fn_abi, with_caller_location } = diff --git a/compiler/rustc_hir/src/lang_items.rs b/compiler/rustc_hir/src/lang_items.rs index bdf63abc7a859..e6422a345a6e9 100644 --- a/compiler/rustc_hir/src/lang_items.rs +++ b/compiler/rustc_hir/src/lang_items.rs @@ -447,6 +447,9 @@ language_item_table! { // Used to fallback `{float}` to `f32` when `f32: From<{float}>` From, sym::From, from_trait, Target::Trait, GenericRequirement::Exact(1); + + // Trait Casting + TraitMetadataTable, sym::trait_metadata_table, trait_metadata_table_trait, Target::Trait, GenericRequirement::Exact(1); } /// The requirement imposed on the generics of a lang item diff --git a/compiler/rustc_hir_analysis/src/check/intrinsic.rs b/compiler/rustc_hir_analysis/src/check/intrinsic.rs index 4b0225e858818..0d125b93f1d1b 100644 --- a/compiler/rustc_hir_analysis/src/check/intrinsic.rs +++ b/compiler/rustc_hir_analysis/src/check/intrinsic.rs @@ -5,7 +5,7 @@ use rustc_hir::{self as hir, LangItem}; use rustc_middle::traits::{ObligationCause, ObligationCauseCode}; use rustc_middle::ty::{self, Const, Ty, TyCtxt}; use rustc_span::def_id::LocalDefId; -use rustc_span::{Span, Symbol, sym}; +use rustc_span::{DUMMY_SP, Span, Symbol, sym}; use crate::check::check_function_signature; use crate::errors::{UnrecognizedIntrinsicFunction, WrongNumberOfGenericArgumentsToIntrinsic}; @@ -801,6 +801,47 @@ pub(crate) fn check_intrinsic_type( | sym::atomic_xor => (2, 1, vec![Ty::new_mut_ptr(tcx, param(0)), param(1)], param(0)), sym::atomic_fence | sym::atomic_singlethreadfence => (0, 1, Vec::new(), tcx.types.unit), + sym::trait_metadata_index => ( + 2, + 0, + vec![], + Ty::new_tup( + tcx, + &[Ty::new_imm_ref(tcx, tcx.lifetimes.re_static, tcx.types.u8), tcx.types.usize], + ), + ), + sym::trait_metadata_table => { + // Build ptr::NonNull>> + let non_null_def_id = tcx.get_diagnostic_item(sym::NonNull).unwrap(); + let non_null_adt = tcx.adt_def(non_null_def_id); + let option_def_id = tcx.require_lang_item(LangItem::Option, DUMMY_SP); + let option_adt = tcx.adt_def(option_def_id); + + // Build NonNull<()> + let inner_non_null_args = tcx.mk_args(&[tcx.types.unit.into()]); + let inner_non_null_ty = Ty::new_adt(tcx, non_null_adt, inner_non_null_args); + + // Build Option> + let option_args = tcx.mk_args(&[inner_non_null_ty.into()]); + let option_ty = Ty::new_adt(tcx, option_adt, option_args); + + // Build NonNull>> + let outer_non_null_args = tcx.mk_args(&[option_ty.into()]); + let table_ptr = Ty::new_adt(tcx, non_null_adt, outer_non_null_args); + + ( + 2, + 0, + vec![], + Ty::new_tup( + tcx, + &[Ty::new_imm_ref(tcx, tcx.lifetimes.re_static, tcx.types.u8), table_ptr], + ), + ) + } + sym::trait_metadata_table_len => (1, 0, vec![], tcx.types.usize), + sym::trait_cast_is_lifetime_erasure_safe => (2, 0, vec![], tcx.types.bool), + other => { tcx.dcx().emit_err(UnrecognizedIntrinsicFunction { span, name: other }); return; diff --git a/compiler/rustc_hir_analysis/src/check/mod.rs b/compiler/rustc_hir_analysis/src/check/mod.rs index 6862b6fe863f8..6ca69816b683a 100644 --- a/compiler/rustc_hir_analysis/src/check/mod.rs +++ b/compiler/rustc_hir_analysis/src/check/mod.rs @@ -413,6 +413,7 @@ fn bounds_from_generic_predicates<'tcx>( region.to_string() } } + ty::GenericArgKind::Outlives(_) => String::new(), }) .collect::>(); for (ty, bounds) in types.into_iter() { diff --git a/compiler/rustc_hir_analysis/src/check/wfcheck.rs b/compiler/rustc_hir_analysis/src/check/wfcheck.rs index 07ad2a79dc7ad..e00bb442d0031 100644 --- a/compiler/rustc_hir_analysis/src/check/wfcheck.rs +++ b/compiler/rustc_hir_analysis/src/check/wfcheck.rs @@ -1190,10 +1190,221 @@ fn check_trait(tcx: TyCtxt<'_>, item: &hir::Item<'_>) -> Result<(), ErrorGuarant // Only check traits, don't check trait aliases if let hir::ItemKind::Trait(..) = item.kind { check_gat_where_clauses(tcx, item.owner_id.def_id); + let _ = check_trait_metadata_table_bounds_well_formed(tcx, def_id); + let _ = check_region_closure_for_cast_graph(tcx, def_id); } res } +/// Bounded intertrait casting: reject malformed `TraitMetadataTable` +/// supertrait bounds at trait-definition time. +/// +/// Two shapes are always wrong and emitted eagerly: +/// +/// 1. `TraitMetadataTable` where `T` is not a `dyn Trait` type. The +/// blanket impl requires `T: Pointee>`, which +/// holds only for trait objects — other `T` are uninhabitable and never +/// what the author intended. +/// +/// 2. `TraitMetadataTable` where `dyn X` is neither `dyn Self` +/// (declaring this trait as a cast root) nor `dyn R` for a transitive +/// supertrait `R` that is itself a cast root. Such a bound is satisfiable +/// via the blanket impl but places the trait in no reachable cast graph. +fn check_trait_metadata_table_bounds_well_formed( + tcx: TyCtxt<'_>, + trait_def_id: LocalDefId, +) -> Result<(), rustc_span::ErrorGuaranteed> { + let Some(tmt) = tcx.lang_items().trait_metadata_table_trait() else { + return Ok(()); + }; + let self_def_id = trait_def_id.to_def_id(); + + // A trait is a "root" if it carries `TraitMetadataTable` as a + // supertrait bound. + let is_root_trait = |candidate: DefId| { + tcx.explicit_super_predicates_of(candidate).skip_binder().iter().any(|(pred, _)| { + matches!( + pred.kind().skip_binder(), + ty::ClauseKind::Trait(tp) if tp.def_id() == tmt + ) + }) + }; + + // Collect the def_ids of all transitive supertraits of `self_def_id`, + // EXCLUDING `TraitMetadataTable` itself (which is always present via the + // bounds we're validating). Used to verify that a `TraitMetadataTable` + // bound's X refers to a trait in this trait's own supertrait chain. + let trait_name = tcx.item_name(self_def_id); + let mut result: Result<(), rustc_span::ErrorGuaranteed> = Ok(()); + + for (pred, span) in tcx.explicit_super_predicates_of(self_def_id).skip_binder().iter().copied() + { + let ty::ClauseKind::Trait(trait_pred) = pred.kind().skip_binder() else { + continue; + }; + if trait_pred.def_id() != tmt { + continue; + } + let arg_ty = trait_pred.trait_ref.args.type_at(1); + + // Diagnostic A: argument is not a `dyn Trait` type. + let ty::Dynamic(preds, ..) = arg_ty.kind() else { + let guar = tcx.dcx().emit_err(errors::TmtArgMustBeDyn { span, arg_ty, trait_name }); + result = Err(guar); + continue; + }; + + // Diagnostic B: argument is `dyn X` but X is not a valid root-ish + // target from this trait's perspective. Valid shapes: + // (a) X == Self — declaring this trait as a cast root. + // (b) X is a transitive supertrait of Self AND X is itself a cast + // root (carries `TraitMetadataTable`). + let Some(principal) = preds.principal() else { + continue; + }; + let arg_did = principal.skip_binder().def_id; + if arg_did == self_def_id { + continue; // (a) — root opt-in + } + + // Walk transitive supertrait def-ids of Self (via supertrait_def_ids, + // which walks `explicit_super_predicates_of` iteratively). + let supertrait_dids: FxHashSet = + rustc_middle::ty::elaborate::supertrait_def_ids(tcx, self_def_id).collect(); + let is_valid = supertrait_dids.contains(&arg_did) && is_root_trait(arg_did); + if !is_valid { + let guar = tcx.dcx().emit_err(errors::TmtArgMismatch { + span, + arg_trait: tcx.item_name(arg_did), + trait_name, + }); + result = Err(guar); + } + } + result +} + +/// Bounded intertrait casting: eagerly reject traits whose lifetime +/// parameters are not expressible through the root supertrait of any +/// trait-cast graph they participate in. +/// +/// A trait `T` participates in a graph with root `R` when `R` is in `T`'s +/// transitive supertrait chain AND `R` carries `TraitMetadataTable` +/// as a supertrait bound (the root opt-in). For every such root `R`, every +/// early-bound lifetime parameter of `T` must appear as a region in the +/// trait ref `Self: R<...>` reached via the supertrait chain; otherwise +/// that lifetime is erased when unsizing `T` to `R` and could be +/// manufactured at downcast time — which is unsound. +/// +/// This is a conservative, structural check: it considers only lifetime +/// parameters reachable directly through the elaborated supertrait chain +/// (no region outlives where-clause propagation). Any lifetime parameter +/// of `T` that does not appear in any reached root's trait ref args is +/// reported once per parameter. +fn check_region_closure_for_cast_graph( + tcx: TyCtxt<'_>, + trait_def_id: LocalDefId, +) -> Result<(), rustc_span::ErrorGuaranteed> { + let Some(tmt) = tcx.lang_items().trait_metadata_table_trait() else { + return Ok(()); + }; + let self_def_id = trait_def_id.to_def_id(); + + // A trait is a "root" if it carries `TraitMetadataTable` as a + // supertrait bound. + let is_root_trait = |candidate: DefId| { + tcx.explicit_super_predicates_of(candidate).skip_binder().iter().any(|(pred, _)| { + matches!( + pred.kind().skip_binder(), + ty::ClauseKind::Trait(tp) if tp.def_id() == tmt + ) + }) + }; + + let self_generics = tcx.generics_of(self_def_id); + + // BFS over the supertrait chain. At each node we hold a `TraitRef` whose + // args are expressed in terms of `self_def_id`'s early-bound parameters + // (via identity substitution at the start; each hop uses + // `instantiate_supertrait` to compose the parent's binder with the + // current node's args). + let mut covered: FxHashSet = FxHashSet::default(); + let mut reached_root: Option = None; + let mut seen: FxHashSet = FxHashSet::default(); + let mut queue: Vec> = Vec::new(); + queue.push(ty::TraitRef::identity(tcx, self_def_id)); + seen.insert(self_def_id); + + while let Some(current) = queue.pop() { + // `current` must not contain escaping bound vars for Binder::dummy + // below to be sound. We only ever push trait refs from + // `no_bound_vars`-unwrapped parents, so this holds by construction. + let current_binder = ty::Binder::dummy(current); + for (clause, _) in tcx + .explicit_super_predicates_of(current.def_id) + .iter_identity_copied() + .map(ty::Unnormalized::skip_norm_wip) + { + let Some(parent_trait_pred) = + clause.instantiate_supertrait(tcx, current_binder).as_trait_clause() + else { + continue; + }; + // Higher-ranked supertraits (e.g. `Self: for<'de> Deserialize<'de>`) + // have late-bound regions that escape when stripped from the binder. + // They cannot contribute to the coverage check because their vars + // are not early-bound params of `self_def_id`, so we skip them + // entirely. + let Some(parent_trait_pred) = parent_trait_pred.no_bound_vars() else { + continue; + }; + let parent_trait_ref = parent_trait_pred.trait_ref; + let parent_did = parent_trait_ref.def_id; + if parent_did == self_def_id { + continue; + } + if is_root_trait(parent_did) { + reached_root = Some(parent_did); + for arg in parent_trait_ref.args.iter() { + if let ty::GenericArgKind::Lifetime(r) = arg.kind() + && let ty::ReEarlyParam(ep) = r.kind() + { + covered.insert(ep.index); + } + } + } + if seen.insert(parent_did) { + queue.push(parent_trait_ref); + } + } + } + + // No root reached — trait is not part of any cast graph, nothing to check. + let Some(root_did) = reached_root else { + return Ok(()); + }; + + // One error per offending early-bound lifetime parameter. + let mut result: Result<(), rustc_span::ErrorGuaranteed> = Ok(()); + for param in &self_generics.own_params { + if !matches!(param.kind, GenericParamDefKind::Lifetime) { + continue; + } + if covered.contains(¶m.index) { + continue; + } + let guar = tcx.dcx().emit_err(errors::TraitGraphNotDowncastSafe { + span: tcx.def_span(param.def_id), + root_span: tcx.def_span(root_did), + trait_name: tcx.item_name(self_def_id), + root: tcx.item_name(root_did), + lt_name: param.name, + }); + result = Err(guar); + } + result +} + /// Checks all associated type defaults of trait `trait_def_id`. /// /// Assuming the defaults are used, check that all predicates (bounds on the diff --git a/compiler/rustc_hir_analysis/src/collect.rs b/compiler/rustc_hir_analysis/src/collect.rs index 69a3e519bb2ab..920b1cdde04e1 100644 --- a/compiler/rustc_hir_analysis/src/collect.rs +++ b/compiler/rustc_hir_analysis/src/collect.rs @@ -252,6 +252,26 @@ impl<'tcx> ItemCtxt<'tcx> { self.tcx.local_def_id_to_hir_id(self.item_def_id) } + /// Returns `true` when `self.item_def_id` is a trait whose HIR + /// supertraits include a `TraitMetadataTable<…>` bound. Used to + /// detect the self-referential pattern + /// `trait T: TraitMetadataTable` and break the query cycle + /// that would otherwise occur when lowering `dyn T`. + fn has_trait_metadata_table_supertrait(&self) -> bool { + let Some(tmt_did) = self.tcx.lang_items().trait_metadata_table_trait() else { + return false; + }; + let hir::Node::Item(item) = self.tcx.hir_node_by_def_id(self.item_def_id) else { + return false; + }; + let hir::ItemKind::Trait(.., supertraits, _) = item.kind else { + return false; + }; + supertraits + .iter() + .any(|bound| bound.trait_ref().and_then(|tr| tr.trait_def_id()) == Some(tmt_did)) + } + pub(crate) fn node(&self) -> hir::Node<'tcx> { self.tcx.hir_node(self.hir_id()) } @@ -570,6 +590,20 @@ impl<'tcx> HirTyLowerer<'tcx> for ItemCtxt<'tcx> { } fn dyn_compatibility_violations(&self, trait_def_id: DefId) -> Vec { + // When computing super-predicates of a trait T that has + // `TraitMetadataTable` as a supertrait, lowering `dyn T` + // re-enters `explicit_super_predicates_of(T)` through the + // dyn-compatibility check, causing a query cycle. Skip the + // check when the `dyn` type's principal trait is T and T has a + // `TraitMetadataTable` supertrait bound (detected by matching + // the item currently being collected). The full + // `dyn_compatibility_violations` query runs later anyway. + if trait_def_id == self.item_def_id.to_def_id() + && self.tcx.def_kind(self.item_def_id) == DefKind::Trait + && self.has_trait_metadata_table_supertrait() + { + return vec![]; + } hir_ty_lowering_dyn_compatibility_violations(self.tcx, trait_def_id) } } diff --git a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs index 4409f2c068eb8..9a81e6e802701 100644 --- a/compiler/rustc_hir_analysis/src/collect/item_bounds.rs +++ b/compiler/rustc_hir_analysis/src/collect/item_bounds.rs @@ -212,6 +212,10 @@ fn remap_gat_vars_and_recurse_into_nested_projections<'tcx>( }; mapping.insert(bv.var, tcx.mk_param_from_def(param)) } + ty::GenericArgKind::Outlives(_) => { + // Outlives metadata args don't participate in GAT arg mapping. + continue; + } }; if existing.is_some() { diff --git a/compiler/rustc_hir_analysis/src/errors.rs b/compiler/rustc_hir_analysis/src/errors.rs index f353ace0b3886..f8f5f148c9ccd 100644 --- a/compiler/rustc_hir_analysis/src/errors.rs +++ b/compiler/rustc_hir_analysis/src/errors.rs @@ -1958,3 +1958,73 @@ pub(crate) struct EiiDefkindMismatchStaticSafety { pub span: Span, pub eii_name: Symbol, } + +/// Emitted eagerly at trait-definition time when a trait that is part of a +/// bounded intertrait casting graph (i.e. has some supertrait ancestor +/// carrying `TraitMetadataTable`) introduces a lifetime parameter +/// that is not expressible through any lifetime parameter of the root +/// supertrait. Such a lifetime would be erased during unsizing to +/// `dyn Root` and could be manufactured at downcast time, which is unsound. +#[derive(Diagnostic)] +#[diag("trait graph rooted at `{$root}` is not downcast-safe")] +#[note( + "downcasting to `dyn {$trait_name}` could manufacture the lifetime `{$lt_name}` \ + which was erased when unsizing to `dyn {$root}`" +)] +#[help("add a lifetime parameter to `{$root}` that `{$lt_name}` can be bounded by")] +pub(crate) struct TraitGraphNotDowncastSafe { + #[primary_span] + #[label("lifetime `{$lt_name}` is not bounded by any lifetime on `{$root}`")] + pub span: Span, + #[label("root supertrait defined here")] + pub root_span: Span, + pub trait_name: Symbol, + pub root: Symbol, + pub lt_name: Symbol, +} + +/// Emitted when a trait declaration names `TraitMetadataTable` as a +/// supertrait where `T` is not a `dyn Trait` type. The `TraitMetadataTable` +/// machinery's blanket impl requires `T: Pointee>`, +/// which holds only for trait objects. +#[derive(Diagnostic)] +#[diag("`TraitMetadataTable` type argument must be a trait object")] +#[note( + "`TraitMetadataTable` requires `T: Pointee>`, \ + which holds only for trait objects" +)] +#[help( + "use `dyn Self` to declare `{$trait_name}` as a cast root, or `dyn R` for a \ + cast-root supertrait `R` of `{$trait_name}`" +)] +pub(crate) struct TmtArgMustBeDyn<'tcx> { + #[primary_span] + #[label("`{$arg_ty}` is not a `dyn Trait` type")] + pub span: Span, + pub arg_ty: Ty<'tcx>, + pub trait_name: Symbol, +} + +/// Emitted when a trait declaration names `TraitMetadataTable` as a +/// supertrait where `dyn X` is neither `dyn Self` (declaring this trait as +/// a cast root) nor `dyn R` for a transitive supertrait `R` that is itself a +/// cast root. Such a bound is satisfiable but places the trait in no +/// reachable cast graph. +#[derive(Diagnostic)] +#[diag("`TraitMetadataTable` type argument does not match a cast root")] +#[note( + "on a trait `Tr`, a `TraitMetadataTable` supertrait bound requires \ + `X = Self` (declaring `Tr` as a cast root) or `X = R` for some transitive \ + supertrait `R` of `Tr` that is itself a cast root" +)] +#[help( + "subtraits inherit `TraitMetadataTable` from their root — \ + the explicit bound is usually unnecessary" +)] +pub(crate) struct TmtArgMismatch { + #[primary_span] + #[label("`dyn {$arg_trait}` is not a (transitive) supertrait of `{$trait_name}`")] + pub span: Span, + pub arg_trait: Symbol, + pub trait_name: Symbol, +} diff --git a/compiler/rustc_hir_analysis/src/hir_ty_lowering/dyn_trait.rs b/compiler/rustc_hir_analysis/src/hir_ty_lowering/dyn_trait.rs index a498e97403881..ef69b3006d728 100644 --- a/compiler/rustc_hir_analysis/src/hir_ty_lowering/dyn_trait.rs +++ b/compiler/rustc_hir_analysis/src/hir_ty_lowering/dyn_trait.rs @@ -56,6 +56,31 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { } } + // When lowering `dyn T` during super-predicate computation of trait T + // whose supertraits include `TraitMetadataTable`, elaborating + // T's supertraits, checking dyn-compatibility, or computing object + // lifetime bounds would re-enter `explicit_super_predicates_of(T)` or + // `explicit_implied_predicates_of(T)`, causing query cycles. + // We detect this pattern early and skip only those specific operations. + // The basic lowering (trait ref + projection bindings) is safe. + let skip_elaboration = hir_bounds.first().is_some_and(|principal_hir| { + principal_hir.trait_ref.trait_def_id().is_some_and(|principal_did| { + principal_did == self.item_def_id().to_def_id() + && tcx.def_kind(self.item_def_id()) == DefKind::Trait + && tcx.lang_items().trait_metadata_table_trait().is_some_and(|tmt| { + let node = tcx.hir_node_by_def_id(self.item_def_id()); + matches!( + node, + hir::Node::Item(hir::Item { + kind: hir::ItemKind::Trait(.., bounds, _), .. + }) if bounds.iter().any(|b| { + b.trait_ref().and_then(|tr| tr.trait_def_id()) == Some(tmt) + }) + ) + }) + }) + }); + let mut user_written_bounds = Vec::new(); let mut potential_assoc_items = Vec::new(); for poly_trait_ref in hir_bounds.iter() { @@ -126,19 +151,23 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // Check that there are no gross dyn-compatibility violations; // most importantly, that the supertraits don't contain `Self`, // to avoid ICEs. - for (clause, span) in user_written_bounds { - if let Some(trait_pred) = clause.as_trait_clause() { - let violations = self.dyn_compatibility_violations(trait_pred.def_id()); - if !violations.is_empty() { - let reported = report_dyn_incompatibility( - tcx, - span, - Some(hir_id), - trait_pred.def_id(), - &violations, - ) - .emit(); - return Ty::new_error(tcx, reported); + // Skip when breaking the TraitMetadataTable cycle, since + // dyn_compatibility_violations elaborates supertraits. + if !skip_elaboration { + for (clause, span) in user_written_bounds { + if let Some(trait_pred) = clause.as_trait_clause() { + let violations = self.dyn_compatibility_violations(trait_pred.def_id()); + if !violations.is_empty() { + let reported = report_dyn_incompatibility( + tcx, + span, + Some(hir_id), + trait_pred.def_id(), + &violations, + ) + .emit(); + return Ty::new_error(tcx, reported); + } } } } @@ -204,7 +233,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // We achieve a stable ordering by walking over the unsubstituted principal trait ref. let mut ordered_associated_items = vec![]; - if let Some((principal_trait, ref spans)) = principal_trait { + if let Some((principal_trait, ref spans)) = principal_trait + && !skip_elaboration + { let principal_trait = principal_trait.map_bound(|trait_pred| { assert_eq!(trait_pred.polarity, ty::PredicatePolarity::Positive); trait_pred.trait_ref @@ -284,21 +315,43 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { _ => (), } } + } else if skip_elaboration { + // When breaking the TraitMetadataTable cycle, we can't + // elaborate supertraits. Include only the principal trait's own + // associated items so user-written projections are preserved. + if let Some((principal_trait, _)) = &principal_trait { + let principal_def_id = principal_trait.def_id(); + let trait_ref = tcx.anonymize_bound_vars(principal_trait.map_bound(|tp| { + assert_eq!(tp.polarity, ty::PredicatePolarity::Positive); + tp.trait_ref + })); + ordered_associated_items.extend( + tcx.associated_items(principal_def_id) + .in_definition_order() + .filter(|item| item.is_type() || item.is_type_const()) + .filter(|item| !item.is_impl_trait_in_trait()) + .map(|item| (item.def_id, trait_ref)), + ); + } } // Flag assoc item bindings that didn't really need to be specified. - for &(projection_bound, span) in projection_bounds.values() { - let def_id = projection_bound.item_def_id(); - if tcx.generics_require_sized_self(def_id) { - // FIXME(mgca): Ideally we would generalize the name of this lint to sth. like - // `unused_associated_item_bindings` since this can now also trigger on *const* - // projections / assoc *const* bindings. - tcx.emit_node_span_lint( - UNUSED_ASSOCIATED_TYPE_BOUNDS, - hir_id, - span, - crate::errors::UnusedAssociatedTypeBounds { span }, - ); + // Skip when breaking the TraitMetadataTable cycle, since + // generics_require_sized_self needs predicates_of which would cycle. + if !skip_elaboration { + for &(projection_bound, span) in projection_bounds.values() { + let def_id = projection_bound.item_def_id(); + if tcx.generics_require_sized_self(def_id) { + // FIXME(mgca): Ideally we would generalize the name of this lint to sth. like + // `unused_associated_item_bindings` since this can now also trigger on *const* + // projections / assoc *const* bindings. + tcx.emit_node_span_lint( + UNUSED_ASSOCIATED_TYPE_BOUNDS, + hir_id, + span, + crate::errors::UnusedAssociatedTypeBounds { span }, + ); + } } } @@ -320,7 +373,9 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { if let Some(&assoc) = projection_bounds.get(&key) { return Some(assoc); } - if !tcx.generics_require_sized_self(def_id) { + // Skip generics_require_sized_self when breaking the + // TraitMetadataTable cycle to avoid query cycles. + if !skip_elaboration && !tcx.generics_require_sized_self(def_id) { missing_assoc_items.insert(key); } None @@ -445,6 +500,13 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { // Use explicitly-specified region bound, unless the bound is missing. let region_bound = if !lifetime.is_elided() { self.lower_lifetime(lifetime, RegionInferReason::ExplicitObjectLifetime) + } else if skip_elaboration { + // When breaking the `TraitMetadataTable` cycle we must + // avoid `compute_object_lifetime_bound`, which elaborates + // supertraits via `explicit_implied_predicates_of`. Use + // 'static as the default, matching RFC 599 defaults for + // trait object types in supertrait position. + tcx.lifetimes.re_static } else { self.compute_object_lifetime_bound(span, existential_predicates).unwrap_or_else(|| { // Curiously, we prefer object lifetime default for `+ '_`... @@ -463,7 +525,14 @@ impl<'tcx> dyn HirTyLowerer<'tcx> + '_ { }; debug!(?region_bound); - Ty::new_dynamic(tcx, existential_predicates, region_bound) + if skip_elaboration { + // When breaking the TraitMetadataTable cycle, bypass + // the debug assertion in Ty::new_dynamic which calls + // elaborate::supertraits and would cycle. + tcx.mk_ty_from_kind(ty::Dynamic(existential_predicates, region_bound)) + } else { + Ty::new_dynamic(tcx, existential_predicates, region_bound) + } } /// Check that elaborating the principal of a trait ref doesn't lead to projections diff --git a/compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs b/compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs index 23e6b2281b370..ad41d86d29192 100644 --- a/compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs +++ b/compiler/rustc_hir_analysis/src/outlives/implicit_infer.rs @@ -125,7 +125,9 @@ fn insert_required_predicates_to_be_wf<'tcx>( // No predicates from lifetimes or constants, except potentially // constants' types, but `walk` will get to them as well. - GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue, + GenericArgKind::Lifetime(_) + | GenericArgKind::Const(_) + | GenericArgKind::Outlives(_) => continue, }; match *leaf_ty.kind() { diff --git a/compiler/rustc_hir_analysis/src/outlives/mod.rs b/compiler/rustc_hir_analysis/src/outlives/mod.rs index d155f4f98ad79..d85fe8a36d42e 100644 --- a/compiler/rustc_hir_analysis/src/outlives/mod.rs +++ b/compiler/rustc_hir_analysis/src/outlives/mod.rs @@ -85,6 +85,10 @@ pub(super) fn inferred_outlives_crate(tcx: TyCtxt<'_>, (): ()) -> CratePredicate // Generic consts don't impose any constraints. None } + GenericArgKind::Outlives(_) => { + // Outlives metadata doesn't impose any constraints. + None + } } }, )); diff --git a/compiler/rustc_hir_analysis/src/outlives/utils.rs b/compiler/rustc_hir_analysis/src/outlives/utils.rs index 99a633e2b7d1f..b4d7e3a1ef20a 100644 --- a/compiler/rustc_hir_analysis/src/outlives/utils.rs +++ b/compiler/rustc_hir_analysis/src/outlives/utils.rs @@ -140,6 +140,10 @@ pub(crate) fn insert_outlives_predicate<'tcx>( GenericArgKind::Const(_) => { // Generic consts don't impose any constraints. } + + GenericArgKind::Outlives(_) => { + // Outlives metadata doesn't impose any constraints. + } } } diff --git a/compiler/rustc_hir_analysis/src/variance/constraints.rs b/compiler/rustc_hir_analysis/src/variance/constraints.rs index a45856937a8e0..9519aac3fb3df 100644 --- a/compiler/rustc_hir_analysis/src/variance/constraints.rs +++ b/compiler/rustc_hir_analysis/src/variance/constraints.rs @@ -209,6 +209,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { GenericArgKind::Const(val) => { self.add_constraints_from_const(current, val, variance_i) } + GenericArgKind::Outlives(_) => {} } } } @@ -402,6 +403,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { GenericArgKind::Const(val) => { self.add_constraints_from_const(current, val, variance) } + GenericArgKind::Outlives(_) => {} } } } diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index ce5fcc558e694..23b06e7141356 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -2633,6 +2633,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } GenericArgKind::Type(_) => self.next_ty_var(DUMMY_SP).into(), GenericArgKind::Const(_) => self.next_const_var(DUMMY_SP).into(), + // Outlives args are metadata; pass through unchanged. + GenericArgKind::Outlives(_) => arg, } } else { arg diff --git a/compiler/rustc_index/src/bit_set.rs b/compiler/rustc_index/src/bit_set.rs index 2c5668e22f389..b486aaa302e39 100644 --- a/compiler/rustc_index/src/bit_set.rs +++ b/compiler/rustc_index/src/bit_set.rs @@ -1,3 +1,4 @@ +use std::hash::{Hash, Hasher}; use std::marker::PhantomData; #[cfg(not(feature = "nightly"))] use std::mem; @@ -1530,6 +1531,67 @@ impl BitMatrix { let (start, end) = self.range(row); count_ones(&self.words[start..end]) } + + /// Returns a [`BitRowRef`] for `row` whose `Eq`/`Hash` semantics + /// correctly ignore excess bits in the final word. + pub fn row_ref(&self, row: R) -> BitRowRef<'_> { + assert!(row.index() < self.num_rows); + let (start, end) = self.range(row); + BitRowRef::new(&self.words[start..end], self.num_columns) + } +} + +/// Borrowed view of a single [`BitMatrix`] row. `Eq`/`Hash` ignore +/// excess bits in the final word, so two rows that set the same +/// columns always compare equal regardless of trailing garbage. +#[derive(Clone, Copy, Debug)] +pub struct BitRowRef<'a> { + words: &'a [Word], + /// Bitmask for the final word: only the low `num_columns % 64` + /// bits are significant. `!0` when `num_columns` is a multiple + /// of 64 (no excess bits). + final_mask: Word, +} + +impl<'a> BitRowRef<'a> { + #[inline] + pub fn new(words: &'a [Word], num_columns: usize) -> Self { + let remainder = num_columns % WORD_BITS; + let final_mask = if remainder == 0 { !0 } else { (1u64 << remainder) - 1 }; + BitRowRef { words, final_mask } + } + + #[inline] + fn masked_final_word(&self) -> Word { + self.words.last().map_or(0, |w| w & self.final_mask) + } +} + +impl PartialEq for BitRowRef<'_> { + fn eq(&self, other: &Self) -> bool { + if self.words.len() != other.words.len() || self.final_mask != other.final_mask { + return false; + } + let n = self.words.len(); + if n == 0 { + return true; + } + self.words[..n - 1] == other.words[..n - 1] + && self.masked_final_word() == other.masked_final_word() + } +} + +impl Eq for BitRowRef<'_> {} + +impl Hash for BitRowRef<'_> { + fn hash(&self, state: &mut H) { + let n = self.words.len(); + if n == 0 { + return; + } + self.words[..n - 1].hash(state); + self.masked_final_word().hash(state); + } } impl fmt::Debug for BitMatrix { diff --git a/compiler/rustc_infer/src/infer/canonical/query_response.rs b/compiler/rustc_infer/src/infer/canonical/query_response.rs index 5203b4ee5d99f..04c202f470a09 100644 --- a/compiler/rustc_infer/src/infer/canonical/query_response.rs +++ b/compiler/rustc_infer/src/infer/canonical/query_response.rs @@ -470,6 +470,8 @@ impl<'tcx> InferCtxt<'tcx> { opt_values[b.var] = Some(*original_value); } } + // Outlives args are metadata, not canonical variables. + GenericArgKind::Outlives(_) => {} } } diff --git a/compiler/rustc_infer/src/infer/context.rs b/compiler/rustc_infer/src/infer/context.rs index fada30ff30633..cc055155fccd6 100644 --- a/compiler/rustc_infer/src/infer/context.rs +++ b/compiler/rustc_infer/src/infer/context.rs @@ -95,8 +95,8 @@ impl<'tcx> rustc_type_ir::InferCtxtLike for InferCtxt<'tcx> { fn is_changed_arg(&self, arg: ty::GenericArg<'tcx>) -> bool { match arg.kind() { - ty::GenericArgKind::Lifetime(_) => { - // Lifetimes should not change affect trait selection. + ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Outlives(_) => { + // Lifetimes/outlives args should not affect trait selection. false } ty::GenericArgKind::Type(ty) => { diff --git a/compiler/rustc_infer/src/infer/mod.rs b/compiler/rustc_infer/src/infer/mod.rs index 26c03066c7e40..bdfbfc38b7b8d 100644 --- a/compiler/rustc_infer/src/infer/mod.rs +++ b/compiler/rustc_infer/src/infer/mod.rs @@ -1533,7 +1533,7 @@ impl<'tcx> TyOrConstInferVar { match arg.kind() { GenericArgKind::Type(ty) => Self::maybe_from_ty(ty), GenericArgKind::Const(ct) => Self::maybe_from_const(ct), - GenericArgKind::Lifetime(_) => None, + GenericArgKind::Lifetime(_) | GenericArgKind::Outlives(_) => None, } } diff --git a/compiler/rustc_infer/src/infer/outlives/obligations.rs b/compiler/rustc_infer/src/infer/outlives/obligations.rs index 28e1b07182457..3ccc95f7b4e71 100644 --- a/compiler/rustc_infer/src/infer/outlives/obligations.rs +++ b/compiler/rustc_infer/src/infer/outlives/obligations.rs @@ -94,7 +94,7 @@ impl<'tcx> InferCtxt<'tcx> { ty::GenericArgKind::Type(ty1) => { self.register_type_outlives_constraint(ty1, r2, cause); } - ty::GenericArgKind::Const(_) => unreachable!(), + ty::GenericArgKind::Const(_) | ty::GenericArgKind::Outlives(_) => unreachable!(), } } @@ -561,8 +561,8 @@ where GenericArgKind::Type(ty) => { self.type_must_outlive(origin.clone(), ty, region, constraint); } - GenericArgKind::Const(_) => { - // Const parameters don't impose constraints. + GenericArgKind::Const(_) | GenericArgKind::Outlives(_) => { + // Const parameters and outlives args don't impose constraints. } } } diff --git a/compiler/rustc_interface/src/tests.rs b/compiler/rustc_interface/src/tests.rs index e54f68b6391e9..5d232ba0a5530 100644 --- a/compiler/rustc_interface/src/tests.rs +++ b/compiler/rustc_interface/src/tests.rs @@ -698,6 +698,11 @@ fn test_unstable_options_tracking_hash() { untracked!(dump_mir_graphviz, true); untracked!(dump_mono_stats, SwitchWithOptPath::Enabled(Some("mono-items-dir/".into()))); untracked!(dump_mono_stats_format, DumpMonoStatsFormat::Json); + untracked!(dump_trait_cast_augmentation, Some("all".to_string())); + untracked!(dump_trait_cast_canonicalization, true); + untracked!(dump_trait_cast_chain_composition, Some("all".to_string())); + untracked!(dump_trait_cast_erasure_safety, Some("all".to_string())); + untracked!(dump_trait_cast_sensitivity, Some("all".to_string())); untracked!(dylib_lto, true); untracked!(emit_stack_sizes, true); untracked!(future_incompat_test, true); @@ -723,6 +728,7 @@ fn test_unstable_options_tracking_hash() { untracked!(print_codegen_stats, true); untracked!(print_llvm_passes, true); untracked!(print_mono_items, true); + untracked!(print_trait_cast_stats, true); untracked!(print_type_sizes, true); untracked!(proc_macro_backtrace, true); untracked!(proc_macro_execution_strategy, ProcMacroExecutionStrategy::CrossThread); @@ -803,6 +809,7 @@ fn test_unstable_options_tracking_hash() { tracked!(force_unstable_if_unmarked, true); tracked!(function_return, FunctionReturn::ThunkExtern); tracked!(function_sections, Some(false)); + tracked!(global_crate, Some(true)); tracked!(hint_mostly_unused, true); tracked!(human_readable_cgu_names, true); tracked!(incremental_ignore_spans, true); diff --git a/compiler/rustc_lint/src/impl_trait_overcaptures.rs b/compiler/rustc_lint/src/impl_trait_overcaptures.rs index 65dfa8b93de78..124e73122ae4f 100644 --- a/compiler/rustc_lint/src/impl_trait_overcaptures.rs +++ b/compiler/rustc_lint/src/impl_trait_overcaptures.rs @@ -498,6 +498,9 @@ fn extract_def_id_from_arg<'tcx>( }; generics.const_param(param_ct, tcx).def_id } + ty::GenericArgKind::Outlives(_) => { + bug!("Outlives args have no associated DefId"); + } } } diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index b027872dd99cc..792188c9d2d77 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -139,6 +139,7 @@ declare_lint_pass! { UNUSED_ASSIGNMENTS, UNUSED_ASSOCIATED_TYPE_BOUNDS, UNUSED_ATTRIBUTES, + UNUSED_CAST_TARGET, UNUSED_CRATE_DEPENDENCIES, UNUSED_EXTERN_CRATES, UNUSED_FEATURES, @@ -812,6 +813,57 @@ declare_lint! { "detects attributes that were not used by the compiler" } +declare_lint! { + /// The `unused_cast_target` lint detects `cast!` target traits that + /// have no concrete type in the final binary that implements them. + /// + /// ### Example + /// + /// ```rust,ignore (requires-unstable-trait-cast-in-a-binary-crate) + /// #![feature(trait_cast)] + /// #![deny(unused_cast_target)] + /// + /// use core::marker::TraitMetadataTable; + /// + /// trait Root: TraitMetadataTable {} + /// trait Unused: Root {} + /// + /// struct S; + /// impl Root for S {} + /// + /// fn main() { + /// let s: &dyn Root = &S; + /// // `Unused` has no concrete implementor — this cast always returns `Err`. + /// let _ = core::cast!(in dyn Root, s => dyn Unused); + /// } + /// ``` + /// + /// This will produce: + /// + /// ```text + /// error: cast target `dyn Unused` is unreachable in the trait graph of `dyn Root` + /// | + /// = note: no type implementing `dyn Root` also implements `dyn Unused` + /// = note: this cast will always return `Err` at runtime + /// ``` + /// + /// ### Explanation + /// + /// A cast to a target trait that no concrete type in the final binary + /// implements will always fail at runtime. This is typically a sign that + /// the target trait was renamed, removed, or is unreachable in the + /// current build — e.g. the impl lives behind a `cfg` that is not + /// enabled. + /// + /// This lint is emitted during the global codegen phase (which runs + /// only in a binary, staticlib, or cdylib crate) and is therefore + /// `allow` by default: library crates cannot observe it, and the + /// condition depends on the final crate graph. + pub UNUSED_CAST_TARGET, + Allow, + "cast target has no satisfying concrete type in this binary" +} + declare_lint! { /// The `unreachable_code` lint detects unreachable code paths. /// diff --git a/compiler/rustc_metadata/src/rmeta/decoder.rs b/compiler/rustc_metadata/src/rmeta/decoder.rs index 280b3b12e5c85..c6c5c7357b15d 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder.rs @@ -1484,6 +1484,14 @@ impl<'a> CrateMetadataRef<'a> { self.root.foreign_modules.decode((self, tcx)) } + fn get_cast_relevant_lifetimes<'tcx>( + self, + tcx: TyCtxt<'tcx>, + ) -> impl Iterator, rustc_middle::mono::CastRelevantLifetimes<'tcx>)> + { + self.root.cast_relevant_lifetimes.decode((self, tcx)) + } + fn get_dylib_dependency_formats<'tcx>( self, tcx: TyCtxt<'tcx>, diff --git a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs index f9f746188f239..65866d012f730 100644 --- a/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs +++ b/compiler/rustc_metadata/src/rmeta/decoder/cstore_impl.rs @@ -268,6 +268,15 @@ provide! { tcx, def_id, other, cdata, .map(|lazy| lazy.decode((cdata, tcx))) .process_decoded(tcx, || panic!("{def_id:?} does not have coerce_unsized_info"))) } mir_const_qualif => { table } + borrowck_region_summary => { + cdata + .root + .tables + .borrowck_region_summary + .get((cdata, tcx), def_id.index) + .map(|lazy| lazy.decode((cdata, tcx))) + .unwrap_or_default() + } rendered_const => { table } rendered_precise_capturing_args => { table } asyncness => { table_direct } @@ -379,6 +388,14 @@ provide! { tcx, def_id, other, cdata, traits => { tcx.arena.alloc_from_iter(cdata.get_traits(tcx)) } trait_impls_in_crate => { tcx.arena.alloc_from_iter(cdata.get_trait_impls(tcx)) } + delayed_codegen_requests => { + tcx.arena.alloc_from_iter(cdata.root.delayed_codegen_requests.decode((cdata, tcx))) + } + crate_cast_relevant_lifetimes => { + let map = cdata.get_cast_relevant_lifetimes(tcx) + .collect::>(); + tcx.arena.alloc(map) + } implementations_of_trait => { cdata.get_implementations_of_trait(tcx, other) } crate_incoherent_impls => { cdata.get_incoherent_impls(tcx, other) } @@ -418,6 +435,7 @@ provide! { tcx, def_id, other, cdata, } anon_const_kind => { table } const_of_item => { table } + has_trait_cast_intrinsics => { table_direct } } pub(in crate::rmeta) fn provide(providers: &mut Providers) { diff --git a/compiler/rustc_metadata/src/rmeta/encoder.rs b/compiler/rustc_metadata/src/rmeta/encoder.rs index ece9dc52c292c..124ee9d82f67e 100644 --- a/compiler/rustc_metadata/src/rmeta/encoder.rs +++ b/compiler/rustc_metadata/src/rmeta/encoder.rs @@ -601,6 +601,34 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { adapted.encode(&mut self.opaque) } + fn encode_delayed_codegen_requests( + &mut self, + ) -> LazyArray> { + empty_proc_macro!(self); + // These queries trigger monomorphization collection, which requires + // optimized MIR from upstream crates. In check mode (metadata-only), + // upstream crates don't have optimized MIR, so skip encoding. + if !self.tcx.sess.opts.output_types.should_codegen() { + return LazyArray::default(); + } + self.lazy_array(self.tcx.delayed_codegen_requests(LOCAL_CRATE).iter().cloned()) + } + + fn encode_cast_relevant_lifetimes( + &mut self, + ) -> LazyArray<(ty::Instance<'static>, rustc_middle::mono::CastRelevantLifetimes<'static>)> + { + empty_proc_macro!(self); + if !self.tcx.sess.opts.output_types.should_codegen() { + return LazyArray::default(); + } + let parts = self.tcx.collect_and_partition_mono_items(()); + let sorted = self + .tcx + .with_stable_hashing_context(|mut hcx| parts.sensitivity_map.to_sorted(&mut hcx, true)); + self.lazy_array(sorted.into_iter().map(|(inst, crl)| (*inst, crl.clone()))) + } + fn encode_crate_root(&mut self) -> LazyValue { let tcx = self.tcx; let mut stats: Vec<(&'static str, usize)> = Vec::with_capacity(32); @@ -682,6 +710,13 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { // encode_def_path_table. let proc_macro_data = stat!("proc-macro-data", || self.encode_proc_macros()); + // Writes into `tables.delayed_codegen_requests` (keyed by DefId). + let delayed_codegen_requests = + stat!("delayed-codegen-requests", || self.encode_delayed_codegen_requests()); + + let cast_relevant_lifetimes = + stat!("cast-relevant-lifetimes", || self.encode_cast_relevant_lifetimes()); + let tables = stat!("tables", || self.tables.encode(&mut self.opaque)); let debugger_visualizers = @@ -768,6 +803,8 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { stable_order_of_exportable_impls, exported_non_generic_symbols, exported_generic_symbols, + delayed_codegen_requests, + cast_relevant_lifetimes, interpret_alloc_index, tables, syntax_contexts, @@ -1115,11 +1152,26 @@ fn should_encode_mir( DefKind::SyntheticCoroutineBody => (false, true), // Full-fledged functions + closures DefKind::AssocFn | DefKind::Fn | DefKind::Closure => { + // Transitively-delayed trait-cast instances must ship their + // MIR even when the def is non-generic and non-inline — the + // downstream global crate's `cascade_canonicalize` phase + // patches and re-emits their bodies, which requires + // `optimized_mir` to be available from rmeta. The set is + // computed by the mono collector; we only check def-id + // membership here. + // + // The `local_def_ids_backing_delayed_instances` call is + // inlined into the `&&` chain (not pre-computed) so the + // `should_codegen()` gate short-circuits it in + // metadata-only builds (rustdoc), where forcing + // `collect_local_mono_items` is invalid — it demands + // optimized MIR from upstream crates that don't have it. let opt = tcx.sess.opts.unstable_opts.always_encode_mir || (tcx.sess.opts.output_types.should_codegen() && reachable_set.contains(&def_id) && (tcx.generics_of(def_id).requires_monomorphization(tcx) - || tcx.cross_crate_inlinable(def_id))); + || tcx.cross_crate_inlinable(def_id) + || tcx.local_def_ids_backing_delayed_instances(()).contains(&def_id))); // The function has a `const` modifier or is in a `const trait`. let is_const_fn = tcx.is_const_fn(def_id.to_def_id()); (is_const_fn, opt) @@ -1828,6 +1880,10 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { self.tables .cross_crate_inlinable .set(def_id.to_def_id().index, self.tcx.cross_crate_inlinable(def_id)); + self.tables.has_trait_cast_intrinsics.set( + def_id.to_def_id().index, + self.tcx.has_trait_cast_intrinsics(def_id.to_def_id()), + ); record!(self.tables.closure_saved_names_of_captured_variables[def_id.to_def_id()] <- tcx.closure_saved_names_of_captured_variables(def_id)); @@ -1874,6 +1930,17 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> { } } + // Encode borrowck region summaries for polymorphic functions that may be + // instantiated cross-crate during mono collection. + for &def_id in tcx.mir_keys(()) { + if tcx.generics_of(def_id).requires_monomorphization(tcx) { + let summary = tcx.borrowck_region_summary(def_id.to_def_id()); + if !summary.call_site_mappings.is_empty() { + record!(self.tables.borrowck_region_summary[def_id.to_def_id()] <- summary); + } + } + } + // Encode all the deduced parameter attributes for everything that has MIR, even for items // that can't be inlined. But don't if we aren't optimizing in non-incremental mode, to // save the query traffic. diff --git a/compiler/rustc_metadata/src/rmeta/mod.rs b/compiler/rustc_metadata/src/rmeta/mod.rs index 781d3c6d18372..151ed38439ea6 100644 --- a/compiler/rustc_metadata/src/rmeta/mod.rs +++ b/compiler/rustc_metadata/src/rmeta/mod.rs @@ -271,6 +271,10 @@ pub(crate) struct CrateRoot { interpret_alloc_index: LazyArray, proc_macro_data: Option, + delayed_codegen_requests: LazyArray>, + cast_relevant_lifetimes: + LazyArray<(ty::Instance<'static>, rustc_middle::mono::CastRelevantLifetimes<'static>)>, + tables: LazyTables, debugger_visualizers: LazyArray, @@ -384,6 +388,7 @@ define_tables! { intrinsic: Table>>, is_macro_rules: Table, type_alias_is_lazy: Table, + has_trait_cast_intrinsics: Table, attr_flags: Table, // The u64 is the crate-local part of the DefPathHash. All hashes in this crate have the same // StableCrateId, so we omit encoding those into the table. @@ -446,6 +451,7 @@ define_tables! { // FIXME(eddyb) perhaps compute this on the fly if cheap enough? coerce_unsized_info: Table>, mir_const_qualif: Table>, + borrowck_region_summary: Table>, rendered_const: Table>, rendered_precise_capturing_args: Table>>, fn_arg_idents: Table>>, diff --git a/compiler/rustc_metadata/src/rmeta/parameterized.rs b/compiler/rustc_metadata/src/rmeta/parameterized.rs index 1531584e99788..a472b1ea83c24 100644 --- a/compiler/rustc_metadata/src/rmeta/parameterized.rs +++ b/compiler/rustc_metadata/src/rmeta/parameterized.rs @@ -104,6 +104,7 @@ trivially_parameterized_over_tcx! { rustc_middle::middle::exported_symbols::SymbolExportInfo, rustc_middle::middle::lib_features::FeatureStability, rustc_middle::middle::resolve_bound_vars::ObjectLifetimeDefault, + rustc_middle::mir::BorrowckRegionSummary, rustc_middle::mir::ConstQualifs, rustc_middle::mir::ConstValue, rustc_middle::ty::AnonConstKind, @@ -154,12 +155,18 @@ parameterized_over_tcx! { rustc_middle::mir::Body, rustc_middle::mir::CoroutineLayout, rustc_middle::mir::interpret::ConstAllocation, + rustc_middle::mono::CastRelevantLifetimes, + rustc_middle::mono::DelayedInstance, + rustc_middle::mono::LifetimeBVToParamMapping, rustc_middle::ty::Clause, rustc_middle::ty::Const, rustc_middle::ty::ConstConditions, rustc_middle::ty::FnSig, + rustc_middle::ty::GenericArg, + rustc_middle::ty::GenericArgsRef, rustc_middle::ty::GenericPredicates, rustc_middle::ty::ImplTraitHeader, + rustc_middle::ty::Instance, rustc_middle::ty::TraitRef, rustc_middle::ty::Ty, // tidy-alphabetical-end diff --git a/compiler/rustc_middle/src/arena.rs b/compiler/rustc_middle/src/arena.rs index de6a105ee2b7b..c71f19e8ddaa8 100644 --- a/compiler/rustc_middle/src/arena.rs +++ b/compiler/rustc_middle/src/arena.rs @@ -27,10 +27,12 @@ macro_rules! arena_types { rustc_middle::mir::Body<'tcx> >, [decode] typeck_results: rustc_middle::ty::TypeckResults<'tcx>, - [decode] borrowck_result: rustc_data_structures::fx::FxIndexMap< + [] borrowck_result: rustc_middle::mir::BorrowckResult<'tcx>, + [decode] borrowck_hidden_types: rustc_data_structures::fx::FxIndexMap< rustc_hir::def_id::LocalDefId, rustc_middle::ty::DefinitionSiteHiddenType<'tcx>, >, + [decode] borrowck_region_summary: rustc_middle::mir::BorrowckRegionSummary, [] resolver: rustc_data_structures::steal::Steal<( rustc_middle::ty::ResolverAstLowering<'tcx>, std::sync::Arc, @@ -88,6 +90,12 @@ macro_rules! arena_types { [] upvars_mentioned: rustc_data_structures::fx::FxIndexMap, [] dyn_compatibility_violations: rustc_middle::traits::DynCompatibilityViolation, [] codegen_unit: rustc_middle::mono::CodegenUnit<'tcx>, + [] usage_map: rustc_middle::mono::UsageMap<'tcx>, + [] cast_relevant_lifetimes: rustc_middle::mono::CastRelevantLifetimes<'tcx>, + [] cast_relevant_lifetimes_map: rustc_data_structures::unord::UnordMap< + rustc_middle::ty::Instance<'tcx>, + rustc_middle::mono::CastRelevantLifetimes<'tcx>, + >, [decode] attribute: rustc_hir::Attribute, [] name_set: rustc_data_structures::unord::UnordSet, [] autodiff_item: rustc_hir::attrs::AutoDiffItem, diff --git a/compiler/rustc_middle/src/hooks/mod.rs b/compiler/rustc_middle/src/hooks/mod.rs index c70ceef1d47e9..27129f97cb4f6 100644 --- a/compiler/rustc_middle/src/hooks/mod.rs +++ b/compiler/rustc_middle/src/hooks/mod.rs @@ -89,6 +89,12 @@ declare_hooks! { /// of a normal function because external tools might want to override it. hook should_codegen_locally(instance: crate::ty::Instance<'tcx>) -> bool; + /// Returns `true` if codegen of `instance` must be deferred to the global + /// crate's delayed-codegen pass (e.g. trait-cast intrinsic callees whose + /// resolution needs the crate-wide table layout). Hooked so consumers can + /// observe the decision without pulling in the monomorphize crate. + hook must_delay_codegen(instance: crate::ty::Instance<'tcx>) -> bool; + hook alloc_self_profile_query_strings() -> (); /// Saves and writes the DepGraph to the file system. diff --git a/compiler/rustc_middle/src/mir/interpret/allocation.rs b/compiler/rustc_middle/src/mir/interpret/allocation.rs index c0dfc18689ba8..0db72e6c0f6e1 100644 --- a/compiler/rustc_middle/src/mir/interpret/allocation.rs +++ b/compiler/rustc_middle/src/mir/interpret/allocation.rs @@ -112,17 +112,24 @@ pub struct Allocation Encodable for AllocFlags { @@ -141,6 +148,8 @@ impl Encodable for AllocFlags { }; flags |= (self.all_zero as u8) << 7; flags.encode(encoder); + // Encode address_significant as an additional byte. + self.address_significant.encode(encoder); } } @@ -157,8 +166,9 @@ impl Decodable for AllocFlags { _ => Mutability::Mut, }; let all_zero = all_zero > 0; + let address_significant: bool = Decodable::decode(decoder); - AllocFlags { align, mutability, all_zero } + AllocFlags { align, mutability, all_zero, address_significant } } } @@ -193,7 +203,13 @@ where { fn encode(&self, encoder: &mut E) { let all_zero = all_zero(&self.bytes); - AllocFlags { align: self.align, mutability: self.mutability, all_zero }.encode(encoder); + AllocFlags { + align: self.align, + mutability: self.mutability, + all_zero, + address_significant: self.address_significant, + } + .encode(encoder); encoder.emit_usize(self.bytes.len()); if !all_zero { @@ -211,7 +227,8 @@ where Extra: Decodable, { fn decode(decoder: &mut D) -> Self { - let AllocFlags { align, mutability, all_zero } = Decodable::decode(decoder); + let AllocFlags { align, mutability, all_zero, address_significant } = + Decodable::decode(decoder); let len = decoder.read_usize(); let bytes = if all_zero { vec![0u8; len] } else { decoder.read_raw_bytes(len).to_vec() }; @@ -221,7 +238,7 @@ where let init_mask = Decodable::decode(decoder); let extra = Decodable::decode(decoder); - Self { bytes, provenance, init_mask, align, mutability, extra } + Self { bytes, provenance, init_mask, align, mutability, address_significant, extra } } } @@ -248,6 +265,7 @@ impl hash::Hash for Allocation { init_mask, align, mutability, + address_significant, extra: (), // don't bother hashing () } = self; @@ -270,6 +288,7 @@ impl hash::Hash for Allocation { init_mask.hash(state); align.hash(state); mutability.hash(state); + address_significant.hash(state); } } @@ -412,6 +431,7 @@ impl Allocation { init_mask: InitMask::new(size, true), align, mutability, + address_significant: false, extra: (), } } @@ -450,6 +470,7 @@ impl Allocation { ), align, mutability: Mutability::Mut, + address_significant: false, extra: (), }) } @@ -504,6 +525,7 @@ impl Allocation { init_mask: self.init_mask, align: self.align, mutability: self.mutability, + address_significant: self.address_significant, extra, } } @@ -540,6 +562,7 @@ impl Allocation { init_mask: self.init_mask.clone(), align: self.align, mutability: self.mutability, + address_significant: self.address_significant, extra: self.extra, }) } diff --git a/compiler/rustc_middle/src/mir/mod.rs b/compiler/rustc_middle/src/mir/mod.rs index 36752bba9f722..3c4ae888e8453 100644 --- a/compiler/rustc_middle/src/mir/mod.rs +++ b/compiler/rustc_middle/src/mir/mod.rs @@ -310,6 +310,16 @@ pub struct Body<'tcx> { pub tainted_by_errors: Option, + /// Body-local counter for allocating the `u32` component of call-site + /// identifiers. Each `TerminatorKind::Call` and `TerminatorKind::TailCall` + /// carries a `call_id: &'tcx List<(DefId, u32, GenericArgsRef<'tcx>)>` + /// chain. During MIR building, this counter allocates fresh `u32` values + /// for the body's own calls; those are paired with the body's `DefId` and + /// the edge-local callee args to form single-element chains. The counter is + /// preserved across optimizations so that synthetic calls added by later + /// passes get fresh IDs. + pub next_call_id: u32, + /// Coverage information collected from THIR/MIR during MIR building, /// to be used by the `InstrumentCoverage` pass. /// @@ -369,6 +379,7 @@ impl<'tcx> Body<'tcx> { is_polymorphic: false, injection_phase: None, tainted_by_errors, + next_call_id: 0, coverage_info_hi: None, function_coverage_info: None, }; @@ -400,6 +411,7 @@ impl<'tcx> Body<'tcx> { is_polymorphic: false, injection_phase: None, tainted_by_errors: None, + next_call_id: 0, coverage_info_hi: None, function_coverage_info: None, }; @@ -407,6 +419,14 @@ impl<'tcx> Body<'tcx> { body } + /// Allocate a fresh stable call-site identifier for a new `Call` or + /// `TailCall` terminator. + pub fn next_call_id(&mut self) -> u32 { + let id = self.next_call_id; + self.next_call_id += 1; + id + } + #[inline] pub fn basic_blocks_mut(&mut self) -> &mut IndexVec> { self.basic_blocks.as_mut() @@ -1692,11 +1712,11 @@ mod size_asserts { use super::*; // tidy-alphabetical-start - static_assert_size!(BasicBlockData<'_>, 152); + static_assert_size!(BasicBlockData<'_>, 160); static_assert_size!(LocalDecl<'_>, 40); static_assert_size!(SourceScopeData<'_>, 64); static_assert_size!(Statement<'_>, 56); - static_assert_size!(Terminator<'_>, 96); + static_assert_size!(Terminator<'_>, 104); static_assert_size!(VarDebugInfo<'_>, 88); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/mir/pretty.rs b/compiler/rustc_middle/src/mir/pretty.rs index d160aada80a83..c868cc1753ce0 100644 --- a/compiler/rustc_middle/src/mir/pretty.rs +++ b/compiler/rustc_middle/src/mir/pretty.rs @@ -368,6 +368,57 @@ pub fn write_mir_pretty<'tcx>( Ok(()) } +/// If `-Zdump-post-mono-mir` is enabled, write the instance-specific MIR body +/// to `/.post-mono.mir` (or stdout if no path is given). +pub fn dump_post_mono_mir<'tcx>( + tcx: TyCtxt<'tcx>, + instance: ty::Instance<'tcx>, + body: &Body<'tcx>, +) { + use rustc_session::config::SwitchWithOptPath; + + let path = match &tcx.sess.opts.unstable_opts.dump_post_mono_mir { + SwitchWithOptPath::Enabled(p) => p.clone(), + SwitchWithOptPath::Disabled => return, + }; + + let writer = MirWriter::new(tcx); + let symbol = tcx.symbol_name(instance); + + // Sanitize symbol name for use as a filename. + let sanitized: String = symbol + .name + .chars() + .map(|c| match c { + '/' | '\\' | '\0' => '_', + c => c, + }) + .collect(); + + let write_body = |w: &mut dyn io::Write| -> io::Result<()> { + writeln!(w, "// post-mono MIR for instance `{instance}`")?; + writeln!(w, "// symbol: {}", symbol.name)?; + writeln!(w)?; + writer.write_mir_fn(body, w) + }; + + if let Some(ref dir) = path { + let file_path = dir.join(format!("{sanitized}.post-mono.mir")); + let _: io::Result<()> = try { + fs::create_dir_all(dir).map_err(|e| { + io::Error::new(e.kind(), format!("creating post-mono MIR dump dir: {dir:?}; {e}")) + })?; + let mut file = fs::File::create_buffered(&file_path).map_err(|e| { + io::Error::new(e.kind(), format!("creating post-mono MIR file: {file_path:?}; {e}")) + })?; + write_body(&mut file)?; + }; + } else { + let mut stdout = io::stdout().lock(); + let _ = write_body(&mut stdout); + } +} + /// Does the writing of MIR to output, e.g. a file. pub struct MirWriter<'a, 'tcx> { tcx: TyCtxt<'tcx>, diff --git a/compiler/rustc_middle/src/mir/query.rs b/compiler/rustc_middle/src/mir/query.rs index d7d5d63d45a73..e276e6d515cd7 100644 --- a/compiler/rustc_middle/src/mir/query.rs +++ b/compiler/rustc_middle/src/mir/query.rs @@ -3,7 +3,10 @@ use std::fmt::{self, Debug}; use rustc_abi::{FieldIdx, VariantIdx}; +use rustc_data_structures::fx::FxIndexMap; +use rustc_data_structures::unord::UnordMap; use rustc_errors::ErrorGuaranteed; +use rustc_hir::def_id::LocalDefId; use rustc_index::IndexVec; use rustc_index::bit_set::BitMatrix; use rustc_macros::{HashStable, TyDecodable, TyEncodable, TypeFoldable, TypeVisitable}; @@ -175,3 +178,162 @@ pub struct DestructuredConstant<'tcx> { pub variant: Option, pub fields: &'tcx [(ConstValue, Ty<'tcx>)], } + +/// Projected outlives graph over a subset of SCCs. +/// Only SCCs containing regions involved in call-site mappings are +/// represented; intermediate SCCs are collapsed into direct edges. +#[derive(Clone, Debug, Default, TyEncodable, TyDecodable, HashStable)] +pub struct ProjectedOutlivesGraph { + /// Maps RegionVid index to projected SCC index for each relevant region. + /// Stored as an `UnordMap` for O(1) lookup by `RegionVid`. + pub scc_of: UnordMap, + /// Adjacency list over projected SCCs: `scc_successors[i]` lists the + /// projected SCC indices that SCC `i` outlives. + pub scc_successors: Vec>, +} + +impl ProjectedOutlivesGraph { + /// Return the projected SCC for a RegionVid, if present. + pub fn scc_of_vid(&self, vid: u32) -> Option { + self.scc_of.get(&vid).copied() + } + + /// Check whether `to` is a direct (1-hop) successor of `from` in the + /// projected SCC graph. This captures immediate type-level outlives + /// constraints without transitive closure, which is important for + /// correctly distinguishing "derived from this param" vs "merely + /// outlived by this param". + pub fn scc_directly_reaches(&self, from: u32, to: u32) -> bool { + if from == to { + return true; + } + if let Some(succs) = self.scc_successors.get(from as usize) { + succs.contains(&to) + } else { + false + } + } + + /// Check whether `from` can reach `to` in the SCC successors DAG + /// (i.e., `from` outlives `to`). Uses BFS. + pub fn scc_reaches(&self, from: u32, to: u32) -> bool { + if from == to { + return true; + } + let mut visited = rustc_data_structures::fx::FxHashSet::default(); + let mut stack = vec![from]; + while let Some(current) = stack.pop() { + if current == to { + return true; + } + if !visited.insert(current) { + continue; + } + if let Some(succs) = self.scc_successors.get(current as usize) { + stack.extend(succs.iter().copied()); + } + } + false + } +} + +/// Region mappings for a single call site. +#[derive(Clone, Debug, TyEncodable, TyDecodable, HashStable)] +pub struct CallSiteRegionMapping { + /// Stable call-site identifier from the `call_id` field of + /// `TerminatorKind::Call` / `TerminatorKind::TailCall`. This ID is + /// assigned during MIR construction and is preserved across all MIR + /// optimizations, allowing correlation between the borrowck region + /// summary and the optimized MIR that the monomorphization collector + /// walks. + pub call_id: u32, + /// Map from walk-order index to local RegionVid index. + /// + /// The walk-order index is assigned by a `TypeVisitor` depth-first walk + /// over the callee's generic args, counting every region encountered + /// in visitation order. Stored as an `UnordMap` for O(1) lookup by + /// walk position. + pub region_mappings: UnordMap, +} + +impl CallSiteRegionMapping { + /// Return the local RegionVid stored at a given walk-order index. + pub fn vid_for_walk_pos(&self, walk_pos: u32) -> Option { + self.region_mappings.get(&walk_pos).copied() + } +} + +/// A structural slot within a monomorphized generic-arg list. +/// +/// `arg_ordinal` identifies the outer generic argument in the list and +/// `offset_within_arg` identifies the region slot within that argument's +/// TypeVisitor DFS walk. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[derive(TyEncodable, TyDecodable, HashStable)] +pub struct InputSlot { + pub arg_ordinal: u32, + pub offset_within_arg: u32, +} + +/// Provenance for a RegionVid in a borrowck region summary. +/// +/// `Input` means the vid originates from one of the body's instantiated +/// input slots. `LocalOnly` means the vid is body-local and should not be +/// transported across body boundaries. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +#[derive(TyEncodable, TyDecodable, HashStable)] +pub enum VidProvenance { + Static, + Input(InputSlot), + /// The vid is bounded from above by a single non-static universal + /// region (the unsizing edge's "lifetime GCD"). Post-borrowck, the + /// concrete lifetime through the coercion is this universal, even + /// though the NLL constraint graph only records the forward direction + /// (`'universal: vid`) due to dyn-type covariance. + BoundedByUniversal(InputSlot), + LocalOnly, +} + +/// Sentinel param position representing `'static` in +/// [`BorrowckRegionSummary::vid_to_param_pos`]. +pub const STATIC_PARAM_POS: u32 = u32::MAX; + +/// Compact summary of borrowck's solved region constraints. +/// Provides cross-crate access to outlives relationships that +/// only exist transiently during borrowck's region inference. +#[derive(Clone, Debug, Default, TyEncodable, TyDecodable, HashStable)] +pub struct BorrowckRegionSummary { + /// Call-site region mappings: for each call site in the MIR body, + /// records which local RegionVid instantiates each of the callee's + /// generic lifetime parameters. + pub call_site_mappings: UnordMap, + /// Projected outlives graph over the SCCs of regions involved in + /// call-site mappings. + pub outlives_graph: ProjectedOutlivesGraph, + /// Per-vid provenance for relevant regions consumed by the current + /// body. Used by call-chain composition to tell whether a region is + /// input-sourced or genuinely local. + pub vid_provenance: UnordMap, + /// Maps each universal RegionVid to its param position. + /// + /// For `ReEarlyParam`: the param's `index`. + /// For named `ReLateParam`: the RegionVid itself (identity). + /// For `ReStatic`: [`STATIC_PARAM_POS`]. + /// + /// Consumers that need the reverse mapping (param position → RegionVid) + /// can invert this cheaply — it is a small vec (one entry per universal + /// region). + pub vid_to_param_pos: Vec<(u32, u32)>, +} + +/// Combined borrowck result containing both hidden types (for opaque type +/// inference) and region summaries (for cross-function outlives propagation). +/// +/// This is the shared core computation that both `mir_borrowck` and +/// `borrowck_region_summary` delegate to. +#[derive(Debug, HashStable)] +pub struct BorrowckResult<'tcx> { + pub hidden_types: + Result>, ErrorGuaranteed>, + pub region_summaries: FxIndexMap, +} diff --git a/compiler/rustc_middle/src/mir/syntax.rs b/compiler/rustc_middle/src/mir/syntax.rs index 455089f285d17..8a4ce8204585b 100644 --- a/compiler/rustc_middle/src/mir/syntax.rs +++ b/compiler/rustc_middle/src/mir/syntax.rs @@ -828,6 +828,31 @@ pub enum TerminatorKind<'tcx> { /// This `Span` is the span of the function, without the dot and receiver /// e.g. `foo(a, b)` in `x.foo(a, b)` fn_span: Span, + /// Stable call-site identifier chain, recording the full inlining path. + /// + /// Each element is `(DefId, u32, GenericArgsRef<'tcx>)` where the + /// `DefId` identifies the function body in which the call was originally + /// constructed during MIR building, the `u32` is a body-local counter + /// unique among all call/tail-call terminators within that body, and the + /// `GenericArgsRef<'tcx>` stores the callee's edge-local generic-arg + /// template in that source body's own generic space. + /// + /// Before inlining, this is a single-element list + /// `[(origin, id, callee_args)]`. + /// When the inliner merges callee MIR into a caller, it prepends the + /// caller's call-chain to each inlined call's chain, producing a + /// multi-element list that records the full inlining path from outermost + /// caller to innermost call site. + /// + /// The monomorphization collector walks this chain to compose + /// `BorrowckRegionSummary` lookups transitively: each + /// `(DefId, u32, GenericArgsRef<'tcx>)` in the chain identifies which + /// summary to query (by `DefId`), which `CallSiteRegionMapping` to use + /// (by the `u32`), and which edge-local generic args to concretize when + /// transporting walk positions across body boundaries. + #[type_foldable(identity)] + #[type_visitable(ignore)] + call_id: &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, }, /// Tail call. @@ -858,6 +883,10 @@ pub enum TerminatorKind<'tcx> { /// This `Span` is the span of the function, without the dot and receiver /// (e.g. `foo(a, b)` in `x.foo(a, b)` fn_span: Span, + /// Stable call-site identifier chain. See [`TerminatorKind::Call::call_id`]. + #[type_foldable(identity)] + #[type_visitable(ignore)] + call_id: &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, }, /// Evaluates the operand, which must have type `bool`. If it is not equal to `expected`, @@ -1743,6 +1772,6 @@ mod size_asserts { static_assert_size!(PlaceElem<'_>, 24); static_assert_size!(Rvalue<'_>, 40); static_assert_size!(StatementKind<'_>, 16); - static_assert_size!(TerminatorKind<'_>, 80); + static_assert_size!(TerminatorKind<'_>, 88); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/mir/terminator.rs b/compiler/rustc_middle/src/mir/terminator.rs index 7931c80bed6c1..a451473aa9e34 100644 --- a/compiler/rustc_middle/src/mir/terminator.rs +++ b/compiler/rustc_middle/src/mir/terminator.rs @@ -771,6 +771,7 @@ impl<'tcx> TerminatorKind<'tcx> { args: _, fn_span: _, call_source: _, + call_id: _, } => TerminatorEdges::AssignOnReturn { return_: target.as_ref().map(slice::from_ref).unwrap_or_default(), cleanup: unwind.cleanup_block(), diff --git a/compiler/rustc_middle/src/mir/visit.rs b/compiler/rustc_middle/src/mir/visit.rs index 16a8743a6d67b..373e1f9445496 100644 --- a/compiler/rustc_middle/src/mir/visit.rs +++ b/compiler/rustc_middle/src/mir/visit.rs @@ -591,6 +591,7 @@ macro_rules! make_mir_visitor { unwind: _, call_source: _, fn_span, + call_id: _, } => { self.visit_span($(& $mutability)? *fn_span); self.visit_operand(func, location); @@ -604,7 +605,7 @@ macro_rules! make_mir_visitor { ); } - TerminatorKind::TailCall { func, args, fn_span } => { + TerminatorKind::TailCall { func, args, fn_span, call_id: _ } => { self.visit_span($(& $mutability)? *fn_span); self.visit_operand(func, location); for arg in args { diff --git a/compiler/rustc_middle/src/mono.rs b/compiler/rustc_middle/src/mono.rs index 3909db8bfce6e..c26d58642fd87 100644 --- a/compiler/rustc_middle/src/mono.rs +++ b/compiler/rustc_middle/src/mono.rs @@ -6,14 +6,14 @@ use rustc_data_structures::base_n::{BaseNString, CASE_INSENSITIVE, ToBaseN}; use rustc_data_structures::fingerprint::Fingerprint; use rustc_data_structures::fx::FxIndexMap; use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey}; -use rustc_data_structures::unord::UnordMap; +use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_hashes::Hash128; use rustc_hir::ItemId; use rustc_hir::attrs::{InlineAttr, Linkage}; use rustc_hir::def_id::{CrateNum, DefId, DefIdSet, LOCAL_CRATE}; use rustc_macros::{HashStable, TyDecodable, TyEncodable}; use rustc_session::config::OptLevel; -use rustc_span::{Span, Symbol}; +use rustc_span::{Span, Spanned, Symbol}; use rustc_target::spec::SymbolVisibility; use tracing::debug; @@ -21,7 +21,7 @@ use crate::dep_graph::dep_node::{make_compile_codegen_unit, make_compile_mono_it use crate::dep_graph::{DepNode, WorkProduct, WorkProductId}; use crate::ich::StableHashingContext; use crate::middle::codegen_fn_attrs::CodegenFnAttrFlags; -use crate::ty::{self, GenericArgs, Instance, InstanceKind, SymbolName, Ty, TyCtxt}; +use crate::ty::{self, GenericArgs, Instance, InstanceKind, List, SymbolName, Ty, TyCtxt}; /// Describes how a monomorphization will be instantiated in object files. #[derive(PartialEq)] @@ -51,6 +51,78 @@ pub enum InstantiationMode { #[derive(PartialEq, Eq, Clone, Copy, Debug, Hash, HashStable, TyEncodable, TyDecodable)] pub struct NormalizationErrorInMono; +/// Interned bv_to_param mapping. Index = dyn bound-var index, +/// value = walk-order position of the corresponding region in the +/// function's generic args (None = non-generic / concrete region). +/// Interned via `tcx.mk_lifetime_bv_to_param_mapping(iter)`. +/// Pointer-based Hash/Eq via ty::List interning. +#[derive(Clone, Copy, Debug, Hash, PartialEq, Eq, HashStable, TyEncodable, TyDecodable)] +#[repr(transparent)] +pub struct LifetimeBVToParamMapping<'tcx>(pub &'tcx ty::List>); + +impl ToStableHashKey> for LifetimeBVToParamMapping<'_> { + type KeyType = Fingerprint; + + fn to_stable_hash_key(&self, hcx: &mut StableHashingContext<'_>) -> Self::KeyType { + let mut hasher = StableHasher::new(); + self.hash_stable(hcx, &mut hasher); + hasher.finish() + } +} + +/// Per-Instance cast-relevant lifetimes, used for both direct and transitive +/// sensitivity. Records how dyn bound vars map through the call chain to the +/// function's own walk-order positions. Each element is an interned +/// `LifetimeBVToParamMapping` — the lattice domain is the set of distinct +/// interned mappings. +/// +/// Returned by the `cast_relevant_lifetimes` and `crate_cast_relevant_lifetimes` +/// queries. +#[derive(Clone, Debug, TyEncodable, TyDecodable)] +pub struct CastRelevantLifetimes<'tcx> { + pub mappings: UnordSet>, +} + +impl<'tcx> CastRelevantLifetimes<'tcx> { + /// Return the maximum walk-order position referenced across all mappings. + /// Used to size the composed_mapping vector in augment_callee. + pub fn max_walk_order_position(&self) -> usize { + self.mappings + .items() + .flat_map(|m| m.0.iter().filter_map(|b| b)) + .max() + .map(|m| m + 1) + .unwrap_or(0) + } + + /// Construct from a slice of direct mappings (as returned by + /// `derive_direct_sensitivity`). + pub fn from_direct_mappings(mappings: &[LifetimeBVToParamMapping<'tcx>]) -> Self { + CastRelevantLifetimes { mappings: UnordSet::from_iter(mappings.iter().copied()) } + } +} + +impl HashStable> for CastRelevantLifetimes<'_> { + fn hash_stable(&self, hcx: &mut StableHashingContext<'_>, hasher: &mut StableHasher) { + self.mappings.hash_stable(hcx, hasher); + } +} + +/// Return type of the `items_of_instance` query. +#[derive(Clone, Copy, Debug, HashStable, TyEncodable, TyDecodable)] +pub struct ItemsOfInstance<'tcx> { + pub used_items: &'tcx [Spanned>], + pub mentioned_items: &'tcx [Spanned>], + /// For each Call/TailCall terminator, the resolved callee Instance + /// and its call-site identifier chain. + pub call_sites: + &'tcx [(&'tcx List<(DefId, u32, crate::ty::GenericArgsRef<'tcx>)>, Instance<'tcx>)], + /// Direct sensitivity of this Instance, derived from walking the + /// post-inlining MIR for trait-cast intrinsic calls. Each element is an + /// interned `LifetimeBVToParamMapping`. Empty if not sensitive. + pub direct_sensitivity: &'tcx [LifetimeBVToParamMapping<'tcx>], +} + #[derive(PartialEq, Eq, Clone, Copy, Debug, Hash, HashStable, TyEncodable, TyDecodable)] pub enum MonoItem<'tcx> { Fn(Instance<'tcx>), @@ -335,10 +407,137 @@ impl ToStableHashKey> for MonoItem<'_> { } } +impl ToStableHashKey> for crate::ty::Instance<'_> { + type KeyType = Fingerprint; + + fn to_stable_hash_key(&self, hcx: &mut StableHashingContext<'_>) -> Self::KeyType { + let mut hasher = StableHasher::new(); + self.hash_stable(hcx, &mut hasher); + hasher.finish() + } +} + +/// A single delayed codegen request. Covers both directly sensitive +/// Instances (which contain intrinsic calls needing resolution) and +/// transitively sensitive ones (which need MIR patching with canonical +/// callee references). +#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash, HashStable, TyEncodable, TyDecodable)] +pub struct DelayedInstance<'tcx> { + pub instance: crate::ty::Instance<'tcx>, + /// Call sites where a sensitive callee must be substituted with + /// its augmented version. Empty for purely directly-sensitive + /// Instances (leaf functions that only contain intrinsic calls). + /// The augmented callees recorded here are pre-canonicalization; + /// the global phase rewrites them through the condensation map + /// before patching MIR. + pub callee_substitutions: &'tcx [( + &'tcx crate::ty::List<(DefId, u32, crate::ty::GenericArgsRef<'tcx>)>, + crate::ty::Instance<'tcx>, + )], + /// All augmented intrinsic callee Instances for this caller. + /// Used by the global phase's condensation pipeline. + pub intrinsic_callees: &'tcx [crate::ty::Instance<'tcx>], +} +impl ToStableHashKey> for DelayedInstance<'_> { + type KeyType = Fingerprint; + + fn to_stable_hash_key(&self, hcx: &mut StableHashingContext<'_>) -> Self::KeyType { + let mut hasher = StableHasher::new(); + self.hash_stable(hcx, &mut hasher); + hasher.finish() + } +} + +/// Result of mono item collection (before global trait-cast resolution +/// and partitioning). Returned by the `collect_local_mono_items` query. +#[derive(Debug, HashStable, Copy, Clone)] +pub struct LocalMonoItemCollection<'tcx> { + pub mono_items: &'tcx [MonoItem<'tcx>], + pub usage_map: &'tcx UsageMap<'tcx>, + pub delayed_codegen: &'tcx [DelayedInstance<'tcx>], + pub sensitivity_map: &'tcx UnordMap, CastRelevantLifetimes<'tcx>>, +} + #[derive(Debug, HashStable, Copy, Clone)] pub struct MonoItemPartitions<'tcx> { pub codegen_units: &'tcx [CodegenUnit<'tcx>], pub all_mono_items: &'tcx DefIdSet, + /// Full delayed codegen data with callee substitution metadata, + /// consumed by the global phase. The `delayed_codegen_requests` + /// query projects bare Instances from this. + pub delayed_codegen: &'tcx [DelayedInstance<'tcx>], + /// Per-Instance cast-relevant lifetimes. The + /// `crate_cast_relevant_lifetimes` query returns this directly. + pub sensitivity_map: &'tcx UnordMap, CastRelevantLifetimes<'tcx>>, +} + +/// Maps mono items to their usage relationships. Tracks which items +/// use which other items, and the reverse mapping. +#[derive(Debug)] +pub struct UsageMap<'tcx> { + /// Maps every mono item to the mono items used by it. + pub used_map: UnordMap, Vec>>, + + /// Maps each mono item with users to the mono items that use it. + /// Subset of `used_map`: unused items are absent. + user_map: UnordMap, Vec>>, +} + +impl<'tcx> UsageMap<'tcx> { + pub fn new() -> UsageMap<'tcx> { + UsageMap { used_map: Default::default(), user_map: Default::default() } + } + + pub fn record_used( + &mut self, + user_item: MonoItem<'tcx>, + used_items: impl Iterator>, + ) { + let used: Vec<_> = used_items.collect(); + for &used_item in &used { + self.user_map.entry(used_item).or_default().push(user_item); + } + assert!(self.used_map.insert(user_item, used).is_none()); + } + + pub fn get_user_items(&self, item: MonoItem<'tcx>) -> &[MonoItem<'tcx>] { + self.user_map.get(&item).map(|items| items.as_slice()).unwrap_or(&[]) + } + + /// Remove a mono item from both the used_map and user_map. + pub fn remove(&mut self, item: MonoItem<'tcx>) { + if let Some(used_items) = self.used_map.remove(&item) { + for used_item in used_items { + if let Some(users) = self.user_map.get_mut(&used_item) { + users.retain(|u| *u != item); + } + } + } + self.user_map.remove(&item); + } + + /// Internally iterate over all inlined items used by `item`. + /// Items not in the usage map (e.g., vtable methods added during + /// trait cast resolution) are treated as having no inlined children. + pub fn for_each_inlined_used_item(&self, tcx: TyCtxt<'tcx>, item: MonoItem<'tcx>, mut f: F) + where + F: FnMut(MonoItem<'tcx>), + { + let Some(used_items) = self.used_map.get(&item) else { return }; + for used_item in used_items.iter() { + let is_inlined = used_item.instantiation_mode(tcx) == InstantiationMode::LocalCopy; + if is_inlined { + f(*used_item); + } + } + } +} + +// Manual HashStable: only hash used_map since user_map is derived from it. +impl<'a, 'tcx> HashStable> for UsageMap<'tcx> { + fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) { + self.used_map.hash_stable(hcx, hasher); + } } #[derive(Debug, HashStable)] diff --git a/compiler/rustc_middle/src/queries.rs b/compiler/rustc_middle/src/queries.rs index 7c6ab642b2736..3b97042a2a65e 100644 --- a/compiler/rustc_middle/src/queries.rs +++ b/compiler/rustc_middle/src/queries.rs @@ -79,7 +79,7 @@ use rustc_session::cstore::{ }; use rustc_session::lint::LintExpectationId; use rustc_span::def_id::LOCAL_CRATE; -use rustc_span::{DUMMY_SP, LocalExpnId, Span, Spanned, Symbol}; +use rustc_span::{DUMMY_SP, LocalExpnId, Span, Symbol}; use rustc_target::spec::PanicStrategy; use crate::hir::Crate; @@ -99,7 +99,8 @@ use crate::mir::interpret::{ EvalToValTreeResult, GlobalId, }; use crate::mono::{ - CodegenUnit, CollectionMode, MonoItem, MonoItemPartitions, NormalizationErrorInMono, + CodegenUnit, CollectionMode, ItemsOfInstance, LocalMonoItemCollection, MonoItemPartitions, + NormalizationErrorInMono, }; use crate::query::describe_as_module; use crate::query::plumbing::{define_callbacks, maybe_into_query_key}; @@ -1243,6 +1244,13 @@ rustc_queries! { desc { "coherence checking all impls of trait `{}`", tcx.def_path_str(def_id) } } + /// Shared core computation for borrow-checking a typeck root. + /// Returns both hidden types and region summaries. + /// Both `mir_borrowck` and `borrowck_region_summary` delegate to this. + query borrowck_result(key: LocalDefId) -> &'tcx mir::BorrowckResult<'tcx> { + desc { "borrow-checking (with region summaries) `{}`", tcx.def_path_str(key) } + } + /// Borrow-checks the given typeck root, e.g. functions, const/static items, /// and its children, e.g. closures, inline consts. query mir_borrowck(key: LocalDefId) -> Result< @@ -1252,6 +1260,16 @@ rustc_queries! { desc { "borrow-checking `{}`", tcx.def_path_str(key) } } + /// Compact summary of borrowck's solved region constraints for a function. + /// Provides cross-crate access to outlives relationships that only exist + /// transiently during borrowck's region inference. + query borrowck_region_summary(key: DefId) -> &'tcx mir::BorrowckRegionSummary { + desc { "computing borrowck region summary for `{}`", tcx.def_path_str(key) } + arena_cache + cache_on_disk + separate_provide_extern + } + /// Gets a complete map from all types to their inherent impls. /// ///
@@ -2395,6 +2413,16 @@ rustc_queries! { separate_provide_extern } + /// Collects mono items for the local crate (including sensitivity + /// analysis and augmentation) but does NOT perform global trait-cast + /// resolution or partitioning. This query exists so that + /// `delayed_codegen_requests` can depend on it without creating a + /// cycle through `collect_and_partition_mono_items`. + query collect_local_mono_items(_: ()) -> LocalMonoItemCollection<'tcx> { + eval_always + desc { "collect_local_mono_items" } + } + query collect_and_partition_mono_items(_: ()) -> MonoItemPartitions<'tcx> { eval_always desc { "collect_and_partition_mono_items" } @@ -2719,7 +2747,7 @@ rustc_queries! { desc { "functions to skip for move-size check" } } - query items_of_instance(key: (ty::Instance<'tcx>, CollectionMode)) -> Result<(&'tcx [Spanned>], &'tcx [Spanned>]), NormalizationErrorInMono> { + query items_of_instance(key: (ty::Instance<'tcx>, CollectionMode)) -> Result, NormalizationErrorInMono> { desc { "collecting items used by `{}`", key.0 } cache_on_disk } @@ -2729,6 +2757,15 @@ rustc_queries! { cache_on_disk } + /// Returns the MIR body to use for codegen of the given Instance. + /// Defaults to `instance_mir`, but may be overridden by the + /// monomorphization collector for outlives-sensitive instances + /// that need patched MIR with augmented callee references. + query codegen_mir(key: ty::Instance<'tcx>) -> &'tcx mir::Body<'tcx> { + desc { "getting codegen MIR for `{}`", key } + feedable + } + query anon_const_kind(def_id: DefId) -> ty::AnonConstKind { desc { "looking up anon const kind of `{}`", tcx.def_path_str(def_id) } separate_provide_extern @@ -2762,6 +2799,170 @@ rustc_queries! { separate_provide_extern } + query has_trait_cast_intrinsics(def_id: DefId) -> bool { + desc { "checking for trait cast intrinsics in `{}`", + tcx.def_path_str(def_id) } + cache_on_disk + separate_provide_extern + } + + /// Per-crate sensitivity map: Instance → cast-relevant lifetime + /// mappings. Local provider proxies `collect_and_partition_mono_items`; + /// extern provider decodes from metadata. + query crate_cast_relevant_lifetimes(key: CrateNum) -> &'tcx UnordMap, crate::mono::CastRelevantLifetimes<'tcx>> { + desc { "computing cast-relevant lifetimes for crate `{}`", tcx.crate_name(key) } + separate_provide_extern + } + + /// Per-Instance cast-relevant lifetimes: the sensitivity mappings + /// recording how dyn bound vars map through call chains. Returns + /// `None` for non-sensitive Instances. Thin lookup into + /// `crate_cast_relevant_lifetimes` for the Instance's defining crate. + query cast_relevant_lifetimes(key: ty::Instance<'tcx>) -> Option<&'tcx crate::mono::CastRelevantLifetimes<'tcx>> { + desc { "looking up cast-relevant lifetimes for `{}`", key } + } + + /// Per-call-site outlives entries derived by composing the `call_id` + /// chain through the caller's outlives environment. + /// + /// MIR-backed callees receive entries in their binder-variable space. + /// MIR-less intrinsic callees may receive transport/origin walk-position + /// entries that the intrinsic resolver must lower into its own native + /// interpretation space before consuming. Returns the Outlives + /// `GenericArg` entries (sentinel-stripped). + query augmented_outlives_for_call( + key: ( + ty::Instance<'tcx>, + &'tcx ty::List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, + ty::Instance<'tcx>, + ) + ) -> &'tcx [ty::GenericArg<'tcx>] { + desc { "computing augmented outlives for call site" } + } + + /// Tracks which MIR bodies contain calls to trait casting intrinsics, + /// signaling that their codegen must be delayed until the global crate. + /// For the local crate, proxies into `collect_and_partition_mono_items`. + /// For upstream crates, decoded from metadata. + query delayed_codegen_requests(key: CrateNum) -> &'tcx [crate::mono::DelayedInstance<'tcx>] { + separate_provide_extern + desc { "tracking MIR bodies for delayed codegen" } + } + + /// Local def-ids that back at least one instance in the local crate's + /// `delayed_codegen_requests`. The encoder's `should_encode_mir` consults + /// this to ensure MIR for transitively-delayed fns (not just direct + /// intrinsic callers) is available to the downstream global crate's + /// cascade-canonicalize phase. + query local_def_ids_backing_delayed_instances(_: ()) -> &'tcx LocalDefIdSet { + desc { "collecting local def-ids backing delayed-codegen instances" } + } + + /// Whether an Instance is transitively reached by a trait-cast intrinsic + /// — i.e. it appears in the `delayed_codegen_requests` set of the local + /// crate or any upstream crate and must be codegen'd by the global + /// crate. Used by the symbol mangler (to drop the instantiating-crate + /// suffix for delayed instances, so upstream vtable refs and downstream + /// bodies share a name) and by the visibility override in + /// `mono_item_visibility`. Compares on the strip-outlives form so that + /// augmented (outlives-annotated) and base instances collapse + /// identically — the v0 mangler's impl-path does not emit Outlives + /// args in the symbol, so both forms share a name. + /// + /// The provider consults `delayed_codegen_stripped_set(cnum)` (a + /// `FxHashSet` precomputed per-crate from + /// `delayed_codegen_requests(cnum)`) so per-call cost is O(crates) + /// rather than O(total-delayed-instances). + query is_transitively_delayed_instance(key: ty::Instance<'tcx>) -> bool { + desc { "checking whether `{}` is a transitively-delayed instance", key } + } + + /// `delayed_codegen_requests` for the local crate and every upstream + /// crate, re-projected as a single flat set of the strip-outlives + /// canonical Instance forms. Used by + /// `is_transitively_delayed_instance` for a single O(1) membership + /// test rather than one per crate. + query delayed_codegen_stripped_set(_: ()) -> &'tcx UnordSet> { + arena_cache + desc { "building global stripped-instance set for all crates' delayed-codegen" } + } + + /// Gathers and classifies all trait-cast intrinsic callees from + /// delayed-codegen Instances into grouped requests by kind. + query gather_trait_cast_requests(_: ()) -> &'tcx ty::trait_cast::TraitCastRequests<'tcx> { + arena_cache + desc { "gathering trait cast requests" } + } + + /// Builds the trait-cast graph for a single root supertrait, + /// partitioning gathered requests into sub-trait/outlives-class + /// mappings and concrete-type sets. + query trait_cast_graph(root: Ty<'tcx>) -> &'tcx ty::trait_cast::TraitGraph<'tcx> { + arena_cache + desc { "building trait cast graph for root supertrait" } + } + + /// Computes the reflexive-transitive closure of outlives relationships + /// over a `dim`-dimensional index space via Floyd-Warshall. + /// Shared across layout, population, and erasure-safe checks. + query outlives_reachability(key: (&'tcx [ty::GenericArg<'tcx>], usize)) -> &'tcx rustc_index::bit_set::BitMatrix { + arena_cache + desc { "computing outlives reachability matrix" } + } + + /// Whether an impl is universally admissible — admissible under every + /// outlives class for every dyn binder structure. Fast-path check for + /// layout condensation. + query impl_universally_admissible(impl_def_id: DefId) -> bool { + desc { "checking universal admissibility of `{}`", tcx.def_path_str(impl_def_id) } + } + + /// Assigns table slot indices for all (sub_trait, outlives_class) pairs + /// in the trait cast graph for a root supertrait. + query trait_cast_layout(root: Ty<'tcx>) -> &'tcx ty::trait_cast::TableLayout<'tcx> { + arena_cache + desc { "computing trait cast table layout for root supertrait" } + } + + /// Populates the trait cast table for a (root supertrait, concrete type) + /// pair. For each table slot, determines whether the concrete type + /// implements the sub-trait under that slot's outlives class, and if so, + /// records the vtable AllocId. + query trait_cast_table(key: (Ty<'tcx>, Ty<'tcx>)) -> &'tcx [Option] { + desc { "populating trait cast table for concrete type" } + } + + /// Returns the AllocId of the metadata table static for a (root + /// supertrait, concrete type) pair. The allocation is an immutable + /// array of pointer-sized entries: vtable pointers for admissible + /// slots, null for non-admissible ones. + query trait_cast_table_alloc(key: (Ty<'tcx>, Ty<'tcx>)) -> mir::interpret::AllocId { + desc { "emitting trait cast metadata table static" } + } + + /// Returns the `AllocId` of a unique per-global-crate `u8` static + /// whose **address** serves as the global crate identifier for + /// cross-crate trait-cast safety checks. The value is unspecified; + /// only the address matters. + query global_crate_id_alloc(_: ()) -> mir::interpret::AllocId { + desc { "creating global crate ID static for trait casting" } + } + + /// Determines whether casting to `target_trait` within the graph + /// rooted at `super_trait` is safe w.r.t. lifetime erasure. + /// + /// Key: (super_trait, target_trait, origin_positions, + /// call_site_outlives). + /// + /// `origin_positions` and `call_site_outlives` are in walk-position + /// space from the CRL composition pipeline. `root_transport_slots` + /// is derived from `super_trait` inside the provider. + query is_lifetime_erasure_safe( + key: (Ty<'tcx>, Ty<'tcx>, &'tcx [Option], &'tcx [ty::GenericArg<'tcx>]) + ) -> bool { + desc { "checking lifetime erasure safety for trait cast" } + } + //----------------------------------------------------------------------------- // "Non-queries" are special dep kinds that are not queries. //----------------------------------------------------------------------------- diff --git a/compiler/rustc_middle/src/query/erase.rs b/compiler/rustc_middle/src/query/erase.rs index a6ff238ad6f0b..af96236866dd2 100644 --- a/compiler/rustc_middle/src/query/erase.rs +++ b/compiler/rustc_middle/src/query/erase.rs @@ -13,9 +13,9 @@ use std::mem::MaybeUninit; use rustc_ast::tokenstream::TokenStream; use rustc_data_structures::steal::Steal; use rustc_data_structures::sync::{DynSend, DynSync}; -use rustc_span::{ErrorGuaranteed, Spanned}; +use rustc_span::ErrorGuaranteed; -use crate::mono::{MonoItem, NormalizationErrorInMono}; +use crate::mono::{ItemsOfInstance, NormalizationErrorInMono}; use crate::ty::{self, Ty, TyCtxt}; use crate::{mir, thir, traits}; @@ -135,6 +135,10 @@ impl Erasable for Result<&'_ T, ErrorGuaranteed> { type Storage = [u8; size_of::>()]; } +impl Erasable for Result, NormalizationErrorInMono> { + type Storage = [u8; size_of::, NormalizationErrorInMono>>()]; +} + impl Erasable for Option<&'_ T> { type Storage = [u8; size_of::>()]; } @@ -193,7 +197,6 @@ impl_erasable_for_types_with_no_type_params! { Result<&'_ traits::ImplSource<'_, ()>, traits::CodegenObligationError>, Result<&'_ ty::List>, ty::util::AlwaysRequiresDrop>, Result<(&'_ Steal>, thir::ExprId), ErrorGuaranteed>, - Result<(&'_ [Spanned>], &'_ [Spanned>]), NormalizationErrorInMono>, Result<(), ErrorGuaranteed>, Result>>, ErrorGuaranteed>, Result>, ErrorGuaranteed>, @@ -221,6 +224,7 @@ impl_erasable_for_types_with_no_type_params! { rustc_middle::mir::interpret::AllocId, rustc_middle::mir::interpret::EvalStaticInitializerRawResult<'_>, rustc_middle::mir::interpret::EvalToValTreeResult<'_>, + rustc_middle::mono::LocalMonoItemCollection<'_>, rustc_middle::mono::MonoItemPartitions<'_>, rustc_middle::traits::query::MethodAutoderefStepsResult<'_>, rustc_middle::ty::AdtDef<'_>, diff --git a/compiler/rustc_middle/src/query/keys.rs b/compiler/rustc_middle/src/query/keys.rs index ad101cf34da3b..4ec87c36e35ab 100644 --- a/compiler/rustc_middle/src/query/keys.rs +++ b/compiler/rustc_middle/src/query/keys.rs @@ -361,3 +361,33 @@ impl<'tcx> QueryKey for (ty::Instance<'tcx>, CollectionMode) { self.0.default_span(tcx) } } + +impl<'tcx> QueryKey + for ( + ty::Instance<'tcx>, + &'tcx ty::List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, + ty::Instance<'tcx>, + ) +{ + fn default_span(&self, tcx: TyCtxt<'_>) -> Span { + self.0.default_span(tcx) + } +} + +impl<'tcx> QueryKey for (&'tcx [GenericArg<'tcx>], usize) { + fn default_span(&self, _: TyCtxt<'_>) -> Span { + DUMMY_SP + } +} + +impl<'tcx> QueryKey for (Ty<'tcx>, Ty<'tcx>, &'tcx [GenericArg<'tcx>]) { + fn default_span(&self, _: TyCtxt<'_>) -> Span { + DUMMY_SP + } +} + +impl<'tcx> QueryKey for (Ty<'tcx>, Ty<'tcx>, &'tcx [Option], &'tcx [GenericArg<'tcx>]) { + fn default_span(&self, _: TyCtxt<'_>) -> Span { + DUMMY_SP + } +} diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index df2c8d8b98f97..93457a471ed71 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -772,6 +772,9 @@ impl_ref_decoder! {<'tcx> rustc_span::def_id::LocalDefId, (rustc_middle::middle::exported_symbols::ExportedSymbol<'tcx>, rustc_middle::middle::exported_symbols::SymbolExportInfo), rustc_middle::middle::deduced_param_attrs::DeducedParamAttrs, + Option, + (u32, ty::Instance<'tcx>), + rustc_middle::mono::LifetimeBVToParamMapping<'tcx>, } //- ENCODING ------------------------------------------------------------------- diff --git a/compiler/rustc_middle/src/ty/codec.rs b/compiler/rustc_middle/src/ty/codec.rs index 28bdeabf34dc1..49536f831c7c9 100644 --- a/compiler/rustc_middle/src/ty/codec.rs +++ b/compiler/rustc_middle/src/ty/codec.rs @@ -12,7 +12,7 @@ use std::marker::{DiscriminantKind, PointeeSized}; use rustc_abi::FieldIdx; use rustc_data_structures::fx::FxHashMap; -use rustc_hir::def_id::LocalDefId; +use rustc_hir::def_id::{DefId, LocalDefId}; use rustc_middle::ty::Const; use rustc_serialize::{Decodable, Encodable}; use rustc_span::{Span, SpanDecoder, SpanEncoder, Spanned}; @@ -164,6 +164,13 @@ impl<'tcx, E: TyEncoder<'tcx>> Encodable for ty::Region<'tcx> { } } +impl<'tcx, E: TyEncoder<'tcx>> Encodable for ty::OutlivesArg<'tcx> { + fn encode(&self, e: &mut E) { + self.longer().encode(e); + self.shorter().encode(e); + } +} + impl<'tcx, E: TyEncoder<'tcx>> Encodable for ty::Const<'tcx> { fn encode(&self, e: &mut E) { self.0.0.encode(e); @@ -303,6 +310,14 @@ impl<'tcx, D: TyDecoder<'tcx>> Decodable for ty::Region<'tcx> { } } +impl<'tcx, D: TyDecoder<'tcx>> Decodable for ty::OutlivesArg<'tcx> { + fn decode(decoder: &mut D) -> Self { + let longer: usize = Decodable::decode(decoder); + let shorter: usize = Decodable::decode(decoder); + decoder.interner().mk_outlives_arg(longer, shorter) + } +} + impl<'tcx, D: TyDecoder<'tcx>> Decodable for CanonicalVarKinds<'tcx> { fn decode(decoder: &mut D) -> Self { let len = decoder.read_usize(); @@ -367,6 +382,14 @@ impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> ) } } +impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List> { + fn decode(decoder: &mut D) -> &'tcx Self { + let len = decoder.read_usize(); + decoder.interner().mk_outlives_from_iter( + (0..len).map::, _>(|_| Decodable::decode(decoder)), + ) + } +} impl<'tcx, D: TyDecoder<'tcx>> Decodable for ty::Const<'tcx> { fn decode(decoder: &mut D) -> Self { @@ -488,6 +511,38 @@ impl<'tcx, D: TyDecoder<'tcx>> Decodable for &'tcx ty::List { } } +impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> + for ty::List<(DefId, u32, GenericArgsRef<'tcx>)> +{ + fn decode(decoder: &mut D) -> &'tcx Self { + let len = decoder.read_usize(); + decoder.interner().mk_call_chain_from_iter( + (0..len).map::<(DefId, u32, GenericArgsRef<'tcx>), _>(|_| Decodable::decode(decoder)), + ) + } +} + +impl<'tcx, D: TyDecoder<'tcx>> Decodable for &'tcx ty::List<(DefId, u32, GenericArgsRef<'tcx>)> { + fn decode(d: &mut D) -> Self { + RefDecodable::decode(d) + } +} + +impl<'tcx, D: TyDecoder<'tcx>> RefDecodable<'tcx, D> for ty::List> { + fn decode(decoder: &mut D) -> &'tcx Self { + let len = decoder.read_usize(); + decoder.interner().mk_lifetime_bv_to_param_mapping_from_iter( + (0..len).map::, _>(|_| Decodable::decode(decoder)), + ) + } +} + +impl<'tcx, D: TyDecoder<'tcx>> Decodable for &'tcx ty::List> { + fn decode(d: &mut D) -> Self { + RefDecodable::decode(d) + } +} + impl_decodable_via_ref! { &'tcx ty::TypeckResults<'tcx>, &'tcx ty::List>, @@ -498,6 +553,7 @@ impl_decodable_via_ref! { &'tcx ty::List>, &'tcx ty::ListWithCachedTypeInfo>, &'tcx ty::List>, + &'tcx ty::List>, } #[macro_export] @@ -569,6 +625,18 @@ impl_arena_copy_decoder! {<'tcx> rustc_span::def_id::LocalDefId, (rustc_middle::middle::exported_symbols::ExportedSymbol<'tcx>, rustc_middle::middle::exported_symbols::SymbolExportInfo), rustc_middle::middle::deduced_param_attrs::DeducedParamAttrs, + Option, + ty::Instance<'tcx>, + (u32, ty::Instance<'tcx>), + ( + &'tcx ty::List<( + rustc_span::def_id::DefId, + u32, + rustc_middle::ty::GenericArgsRef<'tcx>, + )>, + ty::Instance<'tcx>, + ), + rustc_middle::mono::LifetimeBVToParamMapping<'tcx>, } #[macro_export] diff --git a/compiler/rustc_middle/src/ty/consts.rs b/compiler/rustc_middle/src/ty/consts.rs index d342e4ba5efae..5eadd4058e35b 100644 --- a/compiler/rustc_middle/src/ty/consts.rs +++ b/compiler/rustc_middle/src/ty/consts.rs @@ -46,6 +46,10 @@ impl<'tcx> rustc_type_ir::Flags for Const<'tcx> { fn outer_exclusive_binder(&self) -> rustc_type_ir::DebruijnIndex { self.0.outer_exclusive_binder } + + fn region_slots(&self) -> u32 { + self.0.region_slots + } } impl<'tcx> Const<'tcx> { diff --git a/compiler/rustc_middle/src/ty/context.rs b/compiler/rustc_middle/src/ty/context.rs index 44cd6499fb088..849c230cb6c49 100644 --- a/compiler/rustc_middle/src/ty/context.rs +++ b/compiler/rustc_middle/src/ty/context.rs @@ -70,10 +70,10 @@ use crate::traits::solve::{ExternalConstraints, ExternalConstraintsData, Predefi use crate::ty::predicate::ExistentialPredicateStableCmpExt as _; use crate::ty::{ self, AdtDef, AdtDefData, AdtKind, Binder, Clause, Clauses, Const, GenericArg, GenericArgs, - GenericArgsRef, GenericParamDefKind, List, ListWithCachedTypeInfo, ParamConst, Pattern, - PatternKind, PolyExistentialPredicate, PolyFnSig, Predicate, PredicateKind, PredicatePolarity, - Region, RegionKind, ReprOptions, TraitObjectVisitor, Ty, TyKind, TyVid, ValTree, ValTreeKind, - Visibility, + GenericArgsRef, GenericParamDefKind, List, ListWithCachedTypeInfo, OutlivesArg, + OutlivesArgData, ParamConst, Pattern, PatternKind, PolyExistentialPredicate, PolyFnSig, + Predicate, PredicateKind, PredicatePolarity, Region, RegionKind, ReprOptions, + TraitObjectVisitor, Ty, TyKind, TyVid, ValTree, ValTreeKind, Visibility, }; impl<'tcx> rustc_type_ir::inherent::DefId> for DefId { @@ -204,6 +204,9 @@ pub struct CtxtInterners<'tcx> { valtree: InternedSet<'tcx, ty::ValTreeKind>>, patterns: InternedSet<'tcx, List>>, outlives: InternedSet<'tcx, List>>, + outlives_arg: InternedSet<'tcx, ty::OutlivesArgData>, + call_chains: InternedSet<'tcx, List<(rustc_span::def_id::DefId, u32, GenericArgsRef<'tcx>)>>, + bv_to_param_mappings: InternedSet<'tcx, List>>, } impl<'tcx> CtxtInterners<'tcx> { @@ -241,6 +244,9 @@ impl<'tcx> CtxtInterners<'tcx> { valtree: InternedSet::with_capacity(N), patterns: InternedSet::with_capacity(N), outlives: InternedSet::with_capacity(N), + outlives_arg: InternedSet::with_capacity(N / 4), + call_chains: InternedSet::with_capacity(N), + bv_to_param_mappings: InternedSet::with_capacity(N), } } @@ -253,12 +259,12 @@ impl<'tcx> CtxtInterners<'tcx> { .intern(kind, |kind| { let flags = ty::FlagComputation::>::for_kind(&kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); - InternedInSet(self.arena.alloc(WithCachedTypeInfo { internee: kind, stable_hash, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, + region_slots: flags.region_slots, })) }) .0, @@ -279,12 +285,12 @@ impl<'tcx> CtxtInterners<'tcx> { .intern(kind, |kind: ty::ConstKind<'_>| { let flags = ty::FlagComputation::>::for_const_kind(&kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); - InternedInSet(self.arena.alloc(WithCachedTypeInfo { internee: kind, stable_hash, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, + region_slots: flags.region_slots, })) }) .0, @@ -324,12 +330,12 @@ impl<'tcx> CtxtInterners<'tcx> { let flags = ty::FlagComputation::>::for_predicate(kind); let stable_hash = self.stable_hash(&flags, sess, untracked, &kind); - InternedInSet(self.arena.alloc(WithCachedTypeInfo { internee: kind, stable_hash, flags: flags.flags, outer_exclusive_binder: flags.outer_exclusive_binder, + region_slots: flags.region_slots, })) }) .0, @@ -690,6 +696,11 @@ impl<'tcx> TyCtxt<'tcx> { TyCtxtFeed { tcx: self, key } } + pub fn feed_codegen_mir(self, instance: ty::Instance<'tcx>, body: &'tcx Body<'tcx>) { + crate::mir::pretty::dump_post_mono_mir(self, instance, body); + TyCtxtFeed { tcx: self, key: instance }.codegen_mir(body) + } + /// In order to break cycles involving `AnonConst`, we need to set the expected type by side /// effect. However, we do not want this as a general capability, so this interface restricts /// to the only allowed case. @@ -1247,6 +1258,18 @@ impl<'tcx> TyCtxt<'tcx> { &self.crate_types } + /// Whether this crate is a "global crate" — the final link product + /// (binary, staticlib, or cdylib) responsible for resolving trait-cast + /// globals. Overridable via `-Z global_crate=yes|no`. + pub fn is_global_crate(self) -> bool { + if let Some(explicit) = self.sess.opts.unstable_opts.global_crate { + return explicit; + } + self.crate_types().iter().any(|ct| { + matches!(ct, CrateType::Executable | CrateType::StaticLib | CrateType::Cdylib) + }) + } + pub fn needs_metadata(self) -> bool { self.crate_types().iter().any(|ty| match *ty { CrateType::Executable @@ -1833,6 +1856,7 @@ nop_lift! { predicate; Predicate<'a> => Predicate<'tcx> } nop_lift! { predicate; Clause<'a> => Clause<'tcx> } nop_lift! { layout; Layout<'a> => Layout<'tcx> } nop_lift! { valtree; ValTree<'a> => ValTree<'tcx> } +nop_lift! { outlives_arg; ty::OutlivesArg<'a> => ty::OutlivesArg<'tcx> } nop_list_lift! { type_lists; Ty<'a> => Ty<'tcx> } nop_list_lift! { @@ -2096,6 +2120,14 @@ direct_interners! { adt_def: pub mk_adt_def_from_data(AdtDefData): AdtDef -> AdtDef<'tcx>, external_constraints: pub mk_external_constraints(ExternalConstraintsData>): ExternalConstraints -> ExternalConstraints<'tcx>, + outlives_arg: pub(crate) intern_outlives_arg(OutlivesArgData): OutlivesArg -> OutlivesArg<'tcx>, +} + +impl<'tcx> TyCtxt<'tcx> { + /// Create an interned `OutlivesArg` from `(longer, shorter)` indices. + pub fn mk_outlives_arg(self, longer: usize, shorter: usize) -> ty::OutlivesArg<'tcx> { + self.intern_outlives_arg(ty::OutlivesArgData { longer, shorter }) + } } macro_rules! slice_interners { @@ -2132,6 +2164,8 @@ slice_interners!( patterns: pub mk_patterns(Pattern<'tcx>), outlives: pub mk_outlives(ty::ArgOutlivesPredicate<'tcx>), predefined_opaques_in_body: pub mk_predefined_opaques_in_body((ty::OpaqueTypeKey<'tcx>, Ty<'tcx>)), + call_chains: pub mk_call_chain((rustc_span::def_id::DefId, u32, GenericArgsRef<'tcx>)), + bv_to_param_mappings: pub mk_lifetime_bv_to_param_mapping(Option), ); impl<'tcx> TyCtxt<'tcx> { @@ -2466,6 +2500,25 @@ impl<'tcx> TyCtxt<'tcx> { T::collect_and_apply(iter, |xs| self.mk_const_list(xs)) } + pub fn mk_call_chain_from_iter(self, iter: I) -> T::Output + where + I: Iterator, + T: CollectAndApply< + (rustc_span::def_id::DefId, u32, GenericArgsRef<'tcx>), + &'tcx List<(rustc_span::def_id::DefId, u32, GenericArgsRef<'tcx>)>, + >, + { + T::collect_and_apply(iter, |xs| self.mk_call_chain(xs)) + } + + pub fn mk_lifetime_bv_to_param_mapping_from_iter(self, iter: I) -> T::Output + where + I: Iterator, + T: CollectAndApply, &'tcx List>>, + { + T::collect_and_apply(iter, |xs| self.mk_lifetime_bv_to_param_mapping(xs)) + } + // Unlike various other `mk_*_from_iter` functions, this one uses `I: // IntoIterator` instead of `I: Iterator`, and it doesn't have a slice // variant, because of the need to combine `inputs` and `output`. This diff --git a/compiler/rustc_middle/src/ty/context/impl_interner.rs b/compiler/rustc_middle/src/ty/context/impl_interner.rs index 733985c606e22..62c52b4282069 100644 --- a/compiler/rustc_middle/src/ty/context/impl_interner.rs +++ b/compiler/rustc_middle/src/ty/context/impl_interner.rs @@ -108,6 +108,8 @@ impl<'tcx> Interner for TyCtxt<'tcx> { type EarlyParamRegion = ty::EarlyParamRegion; type LateParamRegion = ty::LateParamRegion; + type OutlivesArg = ty::OutlivesArg<'tcx>; + type RegionAssumptions = &'tcx ty::List>; type ParamEnv = ty::ParamEnv<'tcx>; @@ -817,6 +819,7 @@ bidirectional_lang_item_map! { PointeeSized, PointeeTrait, Sized, + TraitMetadataTable, TransmuteTrait, TrivialClone, Tuple, diff --git a/compiler/rustc_middle/src/ty/generic_args.rs b/compiler/rustc_middle/src/ty/generic_args.rs index daeabf24d749f..e29e07ff77916 100644 --- a/compiler/rustc_middle/src/ty/generic_args.rs +++ b/compiler/rustc_middle/src/ty/generic_args.rs @@ -10,6 +10,7 @@ use rustc_errors::{DiagArgValue, IntoDiagArg}; use rustc_hir::def_id::DefId; use rustc_macros::{HashStable, TyDecodable, TyEncodable, extension}; use rustc_serialize::{Decodable, Encodable}; +pub(super) use rustc_type_ir::OutlivesArgData; use rustc_type_ir::WithCachedTypeInfo; use rustc_type_ir::walk::TypeWalker; use smallvec::SmallVec; @@ -20,22 +21,76 @@ use crate::ty::{ Lift, List, Ty, TyCtxt, TypeFoldable, TypeFolder, TypeVisitable, TypeVisitor, VisitorResult, walk_visitable_list, }; - pub type GenericArgKind<'tcx> = rustc_type_ir::GenericArgKind>; pub type TermKind<'tcx> = rustc_type_ir::TermKind>; +/// Interned outlives argument for trait-cast specialization. +/// Wraps `OutlivesArgData` (two region position indices) in an +/// interned pointer for pointer-based equality and hashing. +#[derive(Copy, Clone, PartialEq, Eq, Hash, HashStable)] +pub struct OutlivesArg<'tcx>(pub Interned<'tcx, OutlivesArgData>); + +impl<'tcx> std::fmt::Debug for OutlivesArg<'tcx> { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Outlives({}, {})", self.0.longer, self.0.shorter) + } +} + +impl<'tcx> OutlivesArg<'tcx> { + pub fn longer(self) -> usize { + self.0.longer + } + + pub fn shorter(self) -> usize { + self.0.shorter + } + + pub fn data(self) -> OutlivesArgData { + *self.0.0 + } +} + +impl<'tcx> rustc_type_ir::inherent::OutlivesArg> for OutlivesArg<'tcx> { + fn new(tcx: TyCtxt<'tcx>, longer: usize, shorter: usize) -> Self { + tcx.mk_outlives_arg(longer, shorter) + } + + fn data(self) -> OutlivesArgData { + self.data() + } +} + +impl<'tcx> TypeVisitable> for OutlivesArg<'tcx> { + fn visit_with>>(&self, _visitor: &mut V) -> V::Result { + V::Result::output() + } +} + +impl<'tcx> TypeFoldable> for OutlivesArg<'tcx> { + fn try_fold_with>>( + self, + _folder: &mut F, + ) -> Result { + Ok(self) + } + + fn fold_with>>(self, _folder: &mut F) -> Self { + self + } +} + /// An entity in the Rust type system, which can be one of -/// several kinds (types, lifetimes, and consts). +/// several kinds (types, lifetimes, consts, or outlives entries). /// To reduce memory usage, a `GenericArg` is an interned pointer, /// with the lowest 2 bits being reserved for a tag to -/// indicate the type (`Ty`, `Region`, or `Const`) it points to. +/// indicate the type (`Ty`, `Region`, `Const`, or `OutlivesArg`) it points to. /// /// Note: the `PartialEq`, `Eq` and `Hash` derives are only valid because `Ty`, -/// `Region` and `Const` are all interned. +/// `Region`, `Const`, and `OutlivesArg` are all interned. #[derive(Copy, Clone, PartialEq, Eq, Hash)] pub struct GenericArg<'tcx> { ptr: NonNull<()>, - marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>, ty::Const<'tcx>)>, + marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>, ty::Const<'tcx>, OutlivesArg<'tcx>)>, } impl<'tcx> rustc_type_ir::inherent::GenericArg> for GenericArg<'tcx> {} @@ -162,6 +217,7 @@ const TAG_MASK: usize = 0b11; const TYPE_TAG: usize = 0b00; const REGION_TAG: usize = 0b01; const CONST_TAG: usize = 0b10; +const OUTLIVES_TAG: usize = 0b11; #[extension(trait GenericArgPackExt<'tcx>)] impl<'tcx> GenericArgKind<'tcx> { @@ -183,6 +239,11 @@ impl<'tcx> GenericArgKind<'tcx> { assert_eq!(align_of_val(&*ct.0.0) & TAG_MASK, 0); (CONST_TAG, NonNull::from(ct.0.0).cast()) } + GenericArgKind::Outlives(arg) => { + // Ensure we can use the tag bits. + assert_eq!(align_of_val(&*arg.0.0) & TAG_MASK, 0); + (OUTLIVES_TAG, NonNull::from(arg.0.0).cast()) + } }; GenericArg { ptr: ptr.map_addr(|addr| addr | tag), marker: PhantomData } @@ -219,6 +280,13 @@ impl<'tcx> From> for GenericArg<'tcx> { } } +impl<'tcx> From> for GenericArg<'tcx> { + #[inline] + fn from(arg: OutlivesArg<'tcx>) -> GenericArg<'tcx> { + GenericArgKind::Outlives(arg).pack() + } +} + impl<'tcx> GenericArg<'tcx> { #[inline] pub fn kind(self) -> GenericArgKind<'tcx> { @@ -238,6 +306,9 @@ impl<'tcx> GenericArg<'tcx> { CONST_TAG => GenericArgKind::Const(ty::Const(Interned::new_unchecked( ptr.cast::>>().as_ref(), ))), + OUTLIVES_TAG => GenericArgKind::Outlives(OutlivesArg(Interned::new_unchecked( + ptr.cast::().as_ref(), + ))), _ => intrinsics::unreachable(), } } @@ -270,12 +341,21 @@ impl<'tcx> GenericArg<'tcx> { #[inline] pub fn as_term(self) -> Option> { match self.kind() { - GenericArgKind::Lifetime(_) => None, + GenericArgKind::Lifetime(_) | GenericArgKind::Outlives(..) => None, GenericArgKind::Type(ty) => Some(ty.into()), GenericArgKind::Const(ct) => Some(ct.into()), } } + /// Unpack the `GenericArg` as an `OutlivesArg`. + #[inline] + pub fn as_outlives(self) -> Option> { + match self.kind() { + GenericArgKind::Outlives(arg) => Some(arg), + _ => None, + } + } + /// Unpack the `GenericArg` as a region when it is known certainly to be a region. pub fn expect_region(self) -> ty::Region<'tcx> { self.as_region().unwrap_or_else(|| bug!("expected a region, but found another kind")) @@ -295,7 +375,7 @@ impl<'tcx> GenericArg<'tcx> { pub fn is_non_region_infer(self) -> bool { match self.kind() { - GenericArgKind::Lifetime(_) => false, + GenericArgKind::Lifetime(_) | GenericArgKind::Outlives(..) => false, // FIXME: This shouldn't return numerical/float. GenericArgKind::Type(ty) => ty.is_ty_or_numeric_infer(), GenericArgKind::Const(ct) => ct.is_ct_infer(), @@ -325,6 +405,9 @@ impl<'a, 'tcx> Lift> for GenericArg<'a> { GenericArgKind::Lifetime(lt) => tcx.lift(lt).map(|lt| lt.into()), GenericArgKind::Type(ty) => tcx.lift(ty).map(|ty| ty.into()), GenericArgKind::Const(ct) => tcx.lift(ct).map(|ct| ct.into()), + GenericArgKind::Outlives(arg) => { + Some(tcx.mk_outlives_arg(arg.longer(), arg.shorter()).into()) + } } } } @@ -338,6 +421,7 @@ impl<'tcx> TypeFoldable> for GenericArg<'tcx> { GenericArgKind::Lifetime(lt) => lt.try_fold_with(folder).map(Into::into), GenericArgKind::Type(ty) => ty.try_fold_with(folder).map(Into::into), GenericArgKind::Const(ct) => ct.try_fold_with(folder).map(Into::into), + GenericArgKind::Outlives(..) => Ok(self), } } @@ -346,6 +430,7 @@ impl<'tcx> TypeFoldable> for GenericArg<'tcx> { GenericArgKind::Lifetime(lt) => lt.fold_with(folder).into(), GenericArgKind::Type(ty) => ty.fold_with(folder).into(), GenericArgKind::Const(ct) => ct.fold_with(folder).into(), + GenericArgKind::Outlives(..) => self, } } } @@ -356,6 +441,7 @@ impl<'tcx> TypeVisitable> for GenericArg<'tcx> { GenericArgKind::Lifetime(lt) => lt.visit_with(visitor), GenericArgKind::Type(ty) => ty.visit_with(visitor), GenericArgKind::Const(ct) => ct.visit_with(visitor), + GenericArgKind::Outlives(..) => V::Result::output(), } } } @@ -518,11 +604,11 @@ impl<'tcx> GenericArgs<'tcx> { self.iter().filter_map(|k| k.as_const()) } - /// Returns generic arguments that are not lifetimes. + /// Returns generic arguments that are not lifetimes or outlives entries. #[inline] pub fn non_erasable_generics(&self) -> impl DoubleEndedIterator> { self.iter().filter_map(|arg| match arg.kind() { - ty::GenericArgKind::Lifetime(_) => None, + ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Outlives(..) => None, generic => Some(generic), }) } diff --git a/compiler/rustc_middle/src/ty/instance.rs b/compiler/rustc_middle/src/ty/instance.rs index 408edf19dbf23..0a3656a3e4a7c 100644 --- a/compiler/rustc_middle/src/ty/instance.rs +++ b/compiler/rustc_middle/src/ty/instance.rs @@ -909,8 +909,79 @@ impl<'tcx> Instance<'tcx> { tcx.try_normalize_erasing_regions(typing_env, v.instantiate_identity()) } } + + // --- Outlives specialization helpers --- + + /// Return a new Instance with the sentinel prepended, then `outlives` + /// entries appended to `self.args`. The entries must be sorted and + /// deduplicated. Always produces a structurally distinct Instance from + /// `self`, even when `outlives` is empty — the sentinel guarantees this. + pub fn with_outlives(self, tcx: TyCtxt<'tcx>, outlives: &[(usize, usize)]) -> Instance<'tcx> { + debug_assert!( + !outlives.contains(&OUTLIVES_SENTINEL), + "sentinel must not appear in caller-supplied outlives list" + ); + let sentinel = + std::iter::once(tcx.mk_outlives_arg(OUTLIVES_SENTINEL.0, OUTLIVES_SENTINEL.1).into()); + let new_args = tcx.mk_args_from_iter( + self.args + .iter() + .chain(sentinel) + .chain(outlives.iter().map(|&(l, s)| tcx.mk_outlives_arg(l, s).into())), + ); + Instance { def: self.def, args: new_args } + } + + /// Return the tail slice of `GenericArg`s that are `Outlives` entries, + /// including the sentinel. This is the structural-layer helper used for + /// Instance identity (hashing, mangling, `Eq`). + pub fn outlives_entries(self) -> &'tcx [ty::GenericArg<'tcx>] { + let start = + self.args.iter().position(|a| matches!(a.kind(), ty::GenericArgKind::Outlives(_))); + match start { + Some(i) => &self.args[i..], + None => &[], + } + } + + /// Iterate the semantic `(longer, shorter)` index pairs from the Outlives + /// tail, skipping the sentinel (always first). Panics via `bug!()` if any + /// entry after the sentinel is not `Outlives`. + pub fn outlives_indices_iter(self) -> impl Iterator + 'tcx { + self.outlives_entries().iter().skip(1).map(|a| match a.kind() { + ty::GenericArgKind::Outlives(o) => (o.longer(), o.shorter()), + _ => bug!("non-Outlives entry in outlives tail"), + }) + } + + /// Returns `true` if the Instance has been augmented (carries at least the + /// sentinel). + pub fn has_outlives_entries(self) -> bool { + !self.outlives_entries().is_empty() + } + + /// Return the base Instance with all Outlives entries (including sentinel) + /// stripped. + pub fn strip_outlives(self, tcx: TyCtxt<'tcx>) -> Instance<'tcx> { + if !self.has_outlives_entries() { + return self; + } + let outlives_len = self + .args + .iter() + .rev() + .take_while(|a| matches!(a.kind(), ty::GenericArgKind::Outlives(..))) + .count(); + let base_args = + tcx.mk_args_from_iter(self.args.iter().take(self.args.len() - outlives_len)); + Instance { def: self.def, args: base_args } + } } +/// Sentinel value prepended to every augmented Instance's Outlives tail. +/// Distinguishes "augmented, empty outlives class" from "un-augmented base". +pub const OUTLIVES_SENTINEL: (usize, usize) = (usize::MAX, usize::MAX); + fn needs_fn_once_adapter_shim( actual_closure_kind: ty::ClosureKind, trait_closure_kind: ty::ClosureKind, diff --git a/compiler/rustc_middle/src/ty/list.rs b/compiler/rustc_middle/src/ty/list.rs index ed5a48b094f24..10ae801aae305 100644 --- a/compiler/rustc_middle/src/ty/list.rs +++ b/compiler/rustc_middle/src/ty/list.rs @@ -292,6 +292,11 @@ impl ListWithCachedTypeInfo { pub fn outer_exclusive_binder(&self) -> DebruijnIndex { self.skel.header.outer_exclusive_binder } + + #[inline(always)] + pub fn region_slots(&self) -> u32 { + self.skel.header.region_slots + } } impl_list_empty!(TypeInfo, TypeInfo::empty()); @@ -302,11 +307,16 @@ impl_list_empty!(TypeInfo, TypeInfo::empty()); pub struct TypeInfo { flags: TypeFlags, outer_exclusive_binder: DebruijnIndex, + region_slots: u32, } impl TypeInfo { const fn empty() -> Self { - Self { flags: TypeFlags::empty(), outer_exclusive_binder: super::INNERMOST } + Self { + flags: TypeFlags::empty(), + outer_exclusive_binder: super::INNERMOST, + region_slots: 0, + } } } @@ -315,6 +325,7 @@ impl<'tcx> From>> for TypeInfo { TypeInfo { flags: computation.flags, outer_exclusive_binder: computation.outer_exclusive_binder, + region_slots: computation.region_slots, } } } diff --git a/compiler/rustc_middle/src/ty/mod.rs b/compiler/rustc_middle/src/ty/mod.rs index 56a7abfac6635..06d6b1294c156 100644 --- a/compiler/rustc_middle/src/ty/mod.rs +++ b/compiler/rustc_middle/src/ty/mod.rs @@ -143,6 +143,7 @@ pub mod pattern; pub mod print; pub mod relate; pub mod significant_drop_order; +pub mod trait_cast; pub mod trait_def; pub mod util; pub mod vtable; @@ -452,6 +453,10 @@ impl<'tcx> rustc_type_ir::Flags for Ty<'tcx> { fn outer_exclusive_binder(&self) -> DebruijnIndex { self.0.outer_exclusive_binder } + + fn region_slots(&self) -> u32 { + self.0.region_slots + } } /// The crate outlives map is computed during typeck and contains the @@ -913,6 +918,10 @@ impl<'tcx> rustc_type_ir::Flags for Clauses<'tcx> { fn outer_exclusive_binder(&self) -> DebruijnIndex { (**self).outer_exclusive_binder() } + + fn region_slots(&self) -> u32 { + (**self).region_slots() + } } /// When interacting with the type system we must provide information about the diff --git a/compiler/rustc_middle/src/ty/pattern.rs b/compiler/rustc_middle/src/ty/pattern.rs index 935e5da68e823..60a65c78915af 100644 --- a/compiler/rustc_middle/src/ty/pattern.rs +++ b/compiler/rustc_middle/src/ty/pattern.rs @@ -47,6 +47,22 @@ impl<'tcx> Flags for Pattern<'tcx> { ty::PatternKind::NotNull => rustc_type_ir::INNERMOST, } } + + fn region_slots(&self) -> u32 { + match &**self { + ty::PatternKind::Range { start, end } => { + start.region_slots().saturating_add(end.region_slots()) + } + ty::PatternKind::Or(pats) => { + let mut n: u32 = 0; + for pat in pats.iter() { + n = n.saturating_add(pat.region_slots()); + } + n + } + ty::PatternKind::NotNull => 0, + } + } } impl<'tcx> std::ops::Deref for Pattern<'tcx> { diff --git a/compiler/rustc_middle/src/ty/predicate.rs b/compiler/rustc_middle/src/ty/predicate.rs index 47175f404cf34..18ebffdc87c2f 100644 --- a/compiler/rustc_middle/src/ty/predicate.rs +++ b/compiler/rustc_middle/src/ty/predicate.rs @@ -69,6 +69,10 @@ impl<'tcx> rustc_type_ir::Flags for Predicate<'tcx> { fn outer_exclusive_binder(&self) -> ty::DebruijnIndex { self.0.outer_exclusive_binder } + + fn region_slots(&self) -> u32 { + self.0.region_slots + } } impl<'tcx> Predicate<'tcx> { @@ -657,6 +661,6 @@ mod size_asserts { use super::*; // tidy-alphabetical-start static_assert_size!(PredicateKind<'_>, 32); - static_assert_size!(WithCachedTypeInfo>, 56); + static_assert_size!(WithCachedTypeInfo>, 64); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/ty/print/pretty.rs b/compiler/rustc_middle/src/ty/print/pretty.rs index d3132d3f65780..54304470d8478 100644 --- a/compiler/rustc_middle/src/ty/print/pretty.rs +++ b/compiler/rustc_middle/src/ty/print/pretty.rs @@ -3325,6 +3325,10 @@ define_print! { define_print_and_forward_display! { (self, p): + ty::OutlivesArg<'tcx> { + write!(p, "outlives({}, {})", self.longer(), self.shorter())?; + } + &'tcx ty::List> { write!(p, "{{")?; p.comma_sep(self.iter())?; @@ -3409,6 +3413,7 @@ define_print_and_forward_display! { GenericArgKind::Lifetime(lt) => lt.print(p)?, GenericArgKind::Type(ty) => ty.print(p)?, GenericArgKind::Const(ct) => ct.print(p)?, + GenericArgKind::Outlives(o) => o.print(p)?, } } } diff --git a/compiler/rustc_middle/src/ty/region.rs b/compiler/rustc_middle/src/ty/region.rs index 798b98c5def5c..38623f39b11a2 100644 --- a/compiler/rustc_middle/src/ty/region.rs +++ b/compiler/rustc_middle/src/ty/region.rs @@ -35,6 +35,10 @@ impl<'tcx> rustc_type_ir::Flags for Region<'tcx> { _ => ty::INNERMOST, } } + + fn region_slots(&self) -> u32 { + 1 + } } impl<'tcx> Region<'tcx> { diff --git a/compiler/rustc_middle/src/ty/structural_impls.rs b/compiler/rustc_middle/src/ty/structural_impls.rs index f6d5d226683b3..987b9ab216dfd 100644 --- a/compiler/rustc_middle/src/ty/structural_impls.rs +++ b/compiler/rustc_middle/src/ty/structural_impls.rs @@ -165,6 +165,7 @@ impl<'tcx> fmt::Debug for GenericArg<'tcx> { GenericArgKind::Lifetime(lt) => lt.fmt(f), GenericArgKind::Type(ty) => ty.fmt(f), GenericArgKind::Const(ct) => ct.fmt(f), + GenericArgKind::Outlives(o) => o.fmt(f), } } } diff --git a/compiler/rustc_middle/src/ty/sty.rs b/compiler/rustc_middle/src/ty/sty.rs index 295acc5350831..166fac93ab975 100644 --- a/compiler/rustc_middle/src/ty/sty.rs +++ b/compiler/rustc_middle/src/ty/sty.rs @@ -2173,6 +2173,6 @@ mod size_asserts { use super::*; // tidy-alphabetical-start static_assert_size!(TyKind<'_>, 32); - static_assert_size!(ty::WithCachedTypeInfo>, 56); + static_assert_size!(ty::WithCachedTypeInfo>, 64); // tidy-alphabetical-end } diff --git a/compiler/rustc_middle/src/ty/trait_cast.rs b/compiler/rustc_middle/src/ty/trait_cast.rs new file mode 100644 index 0000000000000..95764d419e78b --- /dev/null +++ b/compiler/rustc_middle/src/ty/trait_cast.rs @@ -0,0 +1,464 @@ +use std::borrow::Borrow; +use std::cmp::Ordering; +use std::hash::{Hash, Hasher}; +use std::ops::Deref; + +use rustc_data_structures::fingerprint::Fingerprint; +use rustc_data_structures::fx::FxHashSet; +use rustc_data_structures::stable_hasher::{HashStable, StableCompare, StableHasher, StableOrd}; +use rustc_data_structures::unord::{UnordMap, UnordSet}; +use rustc_macros::HashStable; + +use crate::ich::StableHashingContext; +use crate::mir::interpret::AllocId; +use crate::ty::{self, GenericArg, Instance, Ty, TyCtxt}; + +/// A `Ty<'tcx>` paired with its pre-computed stable `Fingerprint`, +/// enabling deterministic ordering via `StableCompare`. +/// +/// **Identity**: `Eq`/`Hash` delegate to the inner `Ty` (interned +/// pointer identity), so `FingerprintedTy` behaves identically to +/// `Ty` in hash-based collections. Only `StableCompare` uses the +/// fingerprint. +/// +/// **Caching**: `Ty`'s `HashStable` impl short-circuits through +/// `WithCachedTypeInfo::stable_hash` when incremental compilation +/// is enabled (the common case), so construction cost is dominated +/// by hashing a `Fingerprint` (two `u64`s), not traversing the +/// full `TyKind`. In non-incremental builds the full hash is +/// computed once at construction and never recomputed. +/// +/// **Deref**: `Deref>` allows transparent use +/// wherever a `&Ty<'tcx>` is expected (pattern matching, method +/// calls, query arguments). +/// +/// **Borrow**: `Borrow>` enables `UnordMap::get(&plain_ty)` +/// lookups without wrapping the key — safe because `Eq`/`Hash` on +/// `FingerprintedTy` agree with `Eq`/`Hash` on `Ty`. +#[derive(Copy, Clone, Debug)] +pub struct FingerprintedTy<'tcx> { + ty: Ty<'tcx>, + fingerprint: Fingerprint, +} + +impl<'tcx> FingerprintedTy<'tcx> { + pub fn new(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> Self { + tcx.with_stable_hashing_context(|mut hcx| { + let mut hasher = StableHasher::new(); + ty.hash_stable(&mut hcx, &mut hasher); + FingerprintedTy { ty, fingerprint: hasher.finish() } + }) + } + + /// Batch-construct from an iterator, sharing a single + /// `StableHashingContext` across all elements. + pub fn from_iter( + tcx: TyCtxt<'tcx>, + iter: impl IntoIterator>, + ) -> UnordSet { + tcx.with_stable_hashing_context(|mut hcx| { + iter.into_iter() + .map(|ty| { + let mut hasher = StableHasher::new(); + ty.hash_stable(&mut hcx, &mut hasher); + FingerprintedTy { ty, fingerprint: hasher.finish() } + }) + .collect() + }) + } + + pub fn ty(&self) -> Ty<'tcx> { + self.ty + } +} + +// --- Identity: delegate to Ty --- + +impl PartialEq for FingerprintedTy<'_> { + fn eq(&self, other: &Self) -> bool { + self.ty == other.ty + } +} +impl Eq for FingerprintedTy<'_> {} + +impl Hash for FingerprintedTy<'_> { + fn hash(&self, state: &mut H) { + self.ty.hash(state); + } +} + +// --- Transparent access --- + +impl<'tcx> Deref for FingerprintedTy<'tcx> { + type Target = Ty<'tcx>; + fn deref(&self) -> &Ty<'tcx> { + &self.ty + } +} + +impl<'tcx> Borrow> for FingerprintedTy<'tcx> { + fn borrow(&self) -> &Ty<'tcx> { + &self.ty + } +} + +// --- Deterministic ordering --- + +impl StableCompare for FingerprintedTy<'_> { + const CAN_USE_UNSTABLE_SORT: bool = true; + + fn stable_cmp(&self, other: &Self) -> Ordering { + self.fingerprint.cmp(&other.fingerprint) + } +} + +// --- HashStable: delegate to inner Ty --- + +impl<'__ctx> HashStable> for FingerprintedTy<'_> { + fn hash_stable(&self, hcx: &mut StableHashingContext<'__ctx>, hasher: &mut StableHasher) { + self.ty.hash_stable(hcx, hasher); + } +} + +/// Classifies an augmented intrinsic Instance by kind. +pub enum IntrinsicSiteKind<'tcx> { + Index { + super_trait: Ty<'tcx>, + sub_trait: Ty<'tcx>, + // OutlivesClass derived from Instance's Outlives tail — not stored here. + }, + Table { + super_trait: Ty<'tcx>, + concrete_type: Ty<'tcx>, + }, + TableLen { + super_trait: Ty<'tcx>, + }, + ErasureSafe { + super_trait: Ty<'tcx>, + target_trait: Ty<'tcx>, + }, +} + +/// Groups classified intrinsic Instances by kind. +#[derive(Debug, Default, HashStable)] +pub struct TraitCastRequests<'tcx> { + /// (Super, Sub) → augmented intrinsic Instance. + /// Multiple Instances for same (Super, Sub) with different Outlives. + pub index_requests: Vec>, + + /// (Super, Concrete) → augmented intrinsic Instance. + pub table_requests: Vec>, + + /// Super → augmented intrinsic Instance. + pub table_len_requests: Vec>, + + /// (Super, Tgt) → augmented intrinsic Instance. + pub erasure_safe_requests: Vec>, +} + +#[derive(Debug, HashStable)] +pub struct IndexRequest<'tcx> { + pub instance: Instance<'tcx>, + pub super_trait: Ty<'tcx>, + pub sub_trait: Ty<'tcx>, +} + +#[derive(Debug, HashStable)] +pub struct TableRequest<'tcx> { + pub instance: Instance<'tcx>, + pub super_trait: Ty<'tcx>, + pub concrete_type: Ty<'tcx>, +} + +#[derive(Debug, HashStable)] +pub struct TableLenRequest<'tcx> { + pub instance: Instance<'tcx>, + pub super_trait: Ty<'tcx>, +} + +#[derive(Debug, HashStable)] +pub struct ErasureSafeRequest<'tcx> { + pub instance: Instance<'tcx>, + pub super_trait: Ty<'tcx>, + pub target_trait: Ty<'tcx>, +} + +impl<'tcx> TraitCastRequests<'tcx> { + pub fn is_empty(&self) -> bool { + self.index_requests.is_empty() + && self.table_requests.is_empty() + && self.table_len_requests.is_empty() + && self.erasure_safe_requests.is_empty() + } + + /// Extract distinct root supertraits from all request kinds. + pub fn root_traits(&self) -> UnordSet> { + let mut roots = UnordSet::default(); + for req in &self.index_requests { + roots.insert(req.super_trait); + } + for req in &self.table_requests { + roots.insert(req.super_trait); + } + for req in &self.table_len_requests { + roots.insert(req.super_trait); + } + roots + } + + /// Route a pre-classified intrinsic Instance into the appropriate + /// per-intrinsic list. + pub fn add(&mut self, site: IntrinsicSiteKind<'tcx>, instance: Instance<'tcx>) { + match site { + IntrinsicSiteKind::Index { super_trait, sub_trait } => { + self.index_requests.push(IndexRequest { instance, super_trait, sub_trait }); + } + IntrinsicSiteKind::Table { super_trait, concrete_type } => { + self.table_requests.push(TableRequest { instance, super_trait, concrete_type }); + } + IntrinsicSiteKind::TableLen { super_trait } => { + self.table_len_requests.push(TableLenRequest { instance, super_trait }); + } + IntrinsicSiteKind::ErasureSafe { super_trait, target_trait } => { + self.erasure_safe_requests.push(ErasureSafeRequest { + instance, + super_trait, + target_trait, + }); + } + } + } +} + +/// The trait graph for a single root supertrait. +/// +/// Collects all sub-traits that are cast targets and all concrete types +/// that participate via `trait_metadata_table` requests. Built by the +/// `trait_cast_graph` query. +/// +/// Keys are [`FingerprintedTy`] wrappers around `Ty<'tcx>`, enabling +/// deterministic materialization via `.into_sorted_stable_ord()`. +/// Lookups via plain `Ty<'tcx>` work through the `Borrow` impl. +#[derive(Debug, HashStable)] +pub struct TraitGraph<'tcx> { + /// The root supertrait (e.g., `dyn SuperTrait` or `dyn SuperTrait`). + pub root: Ty<'tcx>, + + /// All sub-traits that appear as cast targets in `trait_metadata_index` + /// requests. Each with its set of observed outlives classes. + /// `UnordMap`: call `.into_sorted_stable_ord()` + /// to materialize in deterministic order for index assignment. + /// Lookups via plain `Ty` work through the `Borrow` impl. + pub sub_traits: UnordMap, SubTraitInfo<'tcx>>, + + /// All concrete types that appear in `trait_metadata_table` requests. + /// `UnordSet`: call `.into_sorted_stable_ord()` + /// to materialize in deterministic order. + pub concrete_types: UnordSet>, +} + +/// Per-sub-trait information within a [`TraitGraph`]. +#[derive(Debug, HashStable)] +pub struct SubTraitInfo<'tcx> { + /// The distinct outlives classes observed from all index requests + /// targeting this sub-trait. + /// `UnordSet`: must be materialized via `.into_sorted_stable_ord()` + /// before use in condensation or index assignment. + pub outlives_classes: UnordSet>, +} + +/// An outlives class borrows the interned `&'tcx [GenericArg<'tcx>]` +/// subslice from `Instance::outlives_entries()[1..]` (skipping the +/// sentinel). The slice is already sorted and deduplicated by +/// `augment_callee` / `with_outlives`. +/// +/// Because `GenericArg` is interned (pointer-based `Eq`/`Hash`), +/// `PartialEq` on the slice is pointer comparison per element — +/// fast and allocation-free. +#[derive(Clone, Copy, Debug, HashStable)] +pub struct OutlivesClass<'tcx> { + /// `instance.outlives_entries()[1..]` — the semantic pairs, + /// skipping the sentinel at position 0. + pub entries: &'tcx [GenericArg<'tcx>], +} + +impl<'tcx> PartialEq for OutlivesClass<'tcx> { + fn eq(&self, other: &Self) -> bool { + // Fast path: same subslice pointer and length. + (std::ptr::eq(self.entries.as_ptr(), other.entries.as_ptr()) + && self.entries.len() == other.entries.len()) + // Fallback: element-wise (still pointer comparisons on interned GenericArg). + || self.entries == other.entries + } +} + +impl<'tcx> Eq for OutlivesClass<'tcx> {} + +impl<'tcx> Hash for OutlivesClass<'tcx> { + fn hash(&self, state: &mut H) { + self.entries.hash(state); + } +} + +/// `StableOrd` enables deterministic materialization of `UnordSet` +/// via `into_sorted_stable_ord()`. The ordering is lexicographic on the +/// `(longer, shorter)` pairs — stable across runs because the indices are +/// semantic (binder variable positions), not pointer-derived. +/// +/// Safety: the `Ord` is a valid `StableOrd` because `OutlivesArgData` +/// fields `longer` and `shorter` are plain `usize` indices into the dyn +/// type's binder, which are deterministic (TypeVisitor DFS order over +/// interned, canonicalized existential predicates). +impl<'tcx> StableOrd for OutlivesClass<'tcx> { + const CAN_USE_UNSTABLE_SORT: bool = true; + const THIS_IMPLEMENTATION_HAS_BEEN_TRIPLE_CHECKED: () = (); +} + +impl<'tcx> Ord for OutlivesClass<'tcx> { + fn cmp(&self, other: &Self) -> Ordering { + // Length first, then lexicographic on (longer, shorter) pairs. + self.entries.len().cmp(&other.entries.len()).then_with(|| { + for (a, b) in self.entries.iter().zip(other.entries.iter()) { + let (al, as_) = match a.kind() { + ty::GenericArgKind::Outlives(o) => (o.longer(), o.shorter()), + _ => bug!("non-Outlives entry in OutlivesClass"), + }; + let (bl, bs) = match b.kind() { + ty::GenericArgKind::Outlives(o) => (o.longer(), o.shorter()), + _ => bug!("non-Outlives entry in OutlivesClass"), + }; + let ord = al.cmp(&bl).then(as_.cmp(&bs)); + if ord != Ordering::Equal { + return ord; + } + } + Ordering::Equal + }) + } +} + +impl<'tcx> PartialOrd for OutlivesClass<'tcx> { + fn partial_cmp(&self, other: &Self) -> Option { + Some(self.cmp(other)) + } +} + +impl<'tcx> OutlivesClass<'tcx> { + /// Build from pre-computed per-call-site outlives entries. + /// + /// The caller is responsible for ensuring the slice is already in + /// the native binder-variable space expected by the consumer. For + /// MIR-less intrinsic lowerings, that means remapping the transport + /// slice returned by `augmented_outlives_for_call` before calling + /// this constructor. + pub fn from_entries(entries: &'tcx [GenericArg<'tcx>]) -> Self { + OutlivesClass { entries } + } + + /// Build directly from an augmented Instance's own Outlives entries. + /// Callers must ensure the entries are already in the correct index + /// space (e.g., single-intrinsic bodies with no inlining); in the + /// general case use `from_entries` with an explicit slice resolved + /// through `augmented_outlives_for_call`. Panics if the Instance + /// has no outlives entries. + pub fn from_instance(instance: Instance<'tcx>) -> Self { + let all = instance.outlives_entries(); + debug_assert!(!all.is_empty(), "expected augmented Instance with sentinel"); + OutlivesClass { entries: &all[1..] } + } + + pub fn is_empty(&self) -> bool { + self.entries.is_empty() + } + + pub fn len(&self) -> usize { + self.entries.len() + } + + /// Iterate the `(longer, shorter)` index pairs. + pub fn iter(&self) -> impl Iterator + 'tcx { + self.entries.iter().map(|a| match a.kind() { + ty::GenericArgKind::Outlives(o) => (o.longer(), o.shorter()), + _ => bug!("non-Outlives entry in OutlivesClass"), + }) + } + + /// Returns the `(longer, shorter)` pair at position `idx`. + pub fn get(&self, idx: usize) -> (usize, usize) { + match self.entries[idx].kind() { + ty::GenericArgKind::Outlives(o) => (o.longer(), o.shorter()), + _ => bug!("non-Outlives entry in OutlivesClass"), + } + } +} + +/// Per-root-supertrait table layout: maps (sub_trait, outlives_class) pairs +/// to flat table slot indices. Produced by the `trait_cast_layout` query. +#[derive(Debug, HashStable)] +pub struct TableLayout<'tcx> { + /// The root supertrait this layout is for. + pub root: Ty<'tcx>, + /// Total number of slots in the flat table. + pub table_length: usize, + /// (sub_trait, outlives_class) → table slot index. + pub index_map: UnordMap<(Ty<'tcx>, OutlivesClass<'tcx>), usize>, + /// Per-slot metadata needed by the population query. + pub slot_info: Vec>, +} + +impl<'tcx> TableLayout<'tcx> { + /// Distinct sub-traits that have at least one slot in the table. + pub fn sub_traits(&self) -> impl Iterator> + '_ { + let mut seen = FxHashSet::default(); + self.slot_info + .iter() + .filter_map(move |si| seen.insert(si.sub_trait).then_some(si.sub_trait)) + } + + /// All (slot_index, &SlotInfo) pairs for a given sub-trait. + pub fn slots_for_sub_trait( + &self, + sub_trait: Ty<'tcx>, + ) -> impl Iterator)> { + self.slot_info.iter().enumerate().filter(move |(_, si)| si.sub_trait == sub_trait) + } +} + +/// Per-slot metadata within a [`TableLayout`]. +#[derive(Debug, HashStable)] +pub struct SlotInfo<'tcx> { + /// The sub-trait this slot corresponds to. + pub sub_trait: Ty<'tcx>, + /// The representative outlives class for this slot. + pub outlives_class: OutlivesClass<'tcx>, + /// Number of bound variables in the sub-trait's dyn binder. + pub num_bvs: usize, +} + +/// Lookup table mapping each table-dependent intrinsic to its resolved +/// constant value. Built by `build_intrinsic_resolutions` from the query +/// results of `trait_cast_layout` and `trait_cast_table_alloc`. +/// +/// Consumed by `cascade_canonicalize` to patch intrinsic calls +/// in MIR bodies. `trait_cast_is_lifetime_erasure_safe` is **not** stored +/// here — it is resolved lazily per call site via the +/// `augmented_outlives_for_call` and `is_lifetime_erasure_safe` queries. +#[derive(Debug)] +pub struct IntrinsicResolutions<'tcx> { + /// The global crate-id `AllocId`, shared across all index/table resolutions. + pub global_crate_id: AllocId, + /// `trait_metadata_index` resolved table indices, keyed by + /// `(sub_trait_dyn, outlives_class)`. Point-lookup only. + pub indices: UnordMap<(Ty<'tcx>, OutlivesClass<'tcx>), usize>, + /// `trait_metadata_table` resolved static allocations, keyed by + /// `(super_trait_dyn, concrete_ty)`. Point-lookup only. + pub tables: UnordMap<(Ty<'tcx>, Ty<'tcx>), AllocId>, + /// `trait_metadata_table_len` resolved values, keyed by + /// `super_trait_dyn`. Point-lookup only. + pub table_lens: UnordMap, usize>, + /// Deduplicated list of all table static `AllocId`s for vtable + /// method collection. These are the same values stored in `tables`, + /// collected into a `Vec` for deterministic iteration. + pub table_alloc_ids: Vec, +} diff --git a/compiler/rustc_middle/src/ty/typeck_results.rs b/compiler/rustc_middle/src/ty/typeck_results.rs index b8399215cf810..7a83bd0514bbe 100644 --- a/compiler/rustc_middle/src/ty/typeck_results.rs +++ b/compiler/rustc_middle/src/ty/typeck_results.rs @@ -819,6 +819,8 @@ impl<'tcx> IsIdentity for CanonicalUserType<'tcx> { } _ => false, }, + // Outlives args are metadata, not identity-relevant. + GenericArgKind::Outlives(_) => true, } }) } diff --git a/compiler/rustc_middle/src/ty/util.rs b/compiler/rustc_middle/src/ty/util.rs index 07baf5b49d659..8c57185479695 100644 --- a/compiler/rustc_middle/src/ty/util.rs +++ b/compiler/rustc_middle/src/ty/util.rs @@ -550,6 +550,8 @@ impl<'tcx> TyCtxt<'tcx> { // Error: not a const param _ => false, }, + // Outlives args are metadata, not relevant for drop. + GenericArgKind::Outlives(_) => false, } }) .map(|(item_param, _)| item_param) @@ -600,6 +602,8 @@ impl<'tcx> TyCtxt<'tcx> { } _ => return Err(NotUniqueParam::NotParam(c.into())), }, + // Outlives args are metadata, skip. + GenericArgKind::Outlives(_) => {} } } diff --git a/compiler/rustc_mir_build/src/builder/custom/mod.rs b/compiler/rustc_mir_build/src/builder/custom/mod.rs index 1005dd30d73f4..c0ab01f15ca06 100644 --- a/compiler/rustc_mir_build/src/builder/custom/mod.rs +++ b/compiler/rustc_mir_build/src/builder/custom/mod.rs @@ -17,6 +17,8 @@ //! terminators, and everything below can be found in the `parse::instruction` submodule. //! +use std::cell::Cell; + use rustc_data_structures::fx::FxHashMap; use rustc_hir::def_id::DefId; use rustc_hir::{HirId, attrs}; @@ -60,6 +62,7 @@ pub(super) fn build_custom_mir<'tcx>( tainted_by_errors: None, injection_phase: None, pass_count: 0, + next_call_id: 0, coverage_info_hi: None, function_coverage_info: None, }; @@ -83,12 +86,16 @@ pub(super) fn build_custom_mir<'tcx>( body: &mut body, local_map: FxHashMap::default(), block_map: FxHashMap::default(), + next_call_id: Cell::new(0), }; - let res = try { + let res: Result<(), ParseError> = try { pctxt.parse_args(params)?; pctxt.parse_body(expr)?; }; + let final_call_id = pctxt.next_call_id.get(); + drop(pctxt); + body.next_call_id = final_call_id; if let Err(err) = res { tcx.dcx().span_fatal( err.span, @@ -141,6 +148,7 @@ struct ParseCtxt<'a, 'tcx> { body: &'a mut Body<'tcx>, local_map: FxHashMap, block_map: FxHashMap, + next_call_id: Cell, } struct ParseError { @@ -150,6 +158,12 @@ struct ParseError { } impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { + fn next_call_id(&self) -> u32 { + let id = self.next_call_id.get(); + self.next_call_id.set(id + 1); + id + } + fn expr_error(&self, expr: ExprId, expected: &'static str) -> ParseError { let expr = &self.thir[expr]; ParseError { diff --git a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs index a47a2e08c9f4b..e6d0c6dd23614 100644 --- a/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs +++ b/compiler/rustc_mir_build/src/builder/custom/parse/instruction.rs @@ -169,6 +169,10 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { parse_by_kind!(self, call, _, "function call", ExprKind::Call { fun, args, from_hir_call, fn_span, .. } => { let fun = self.parse_operand(*fun)?; + let call_id_args = fun + .const_fn_def() + .map(|(_, args)| args) + .unwrap_or_else(|| self.tcx.mk_args(&[])); let args = args .iter() .map(|arg| @@ -185,6 +189,11 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { CallSource::OverloadedOperator }, fn_span: *fn_span, + call_id: self.tcx.mk_call_chain(&[( + self.body.source.def_id(), + self.next_call_id(), + call_id_args, + )]), }) }, ) @@ -194,6 +203,10 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { parse_by_kind!(self, args[0], _, "tail call", ExprKind::Call { fun, args, fn_span, .. } => { let fun = self.parse_operand(*fun)?; + let call_id_args = fun + .const_fn_def() + .map(|(_, args)| args) + .unwrap_or_else(|| self.tcx.mk_args(&[])); let args = args .iter() .map(|arg| @@ -204,6 +217,11 @@ impl<'a, 'tcx> ParseCtxt<'a, 'tcx> { func: fun, args, fn_span: *fn_span, + call_id: self.tcx.mk_call_chain(&[( + self.body.source.def_id(), + self.next_call_id(), + call_id_args, + )]), }) }, ) diff --git a/compiler/rustc_mir_build/src/builder/expr/into.rs b/compiler/rustc_mir_build/src/builder/expr/into.rs index 446b2939e3705..3c2831afb8138 100644 --- a/compiler/rustc_mir_build/src/builder/expr/into.rs +++ b/compiler/rustc_mir_build/src/builder/expr/into.rs @@ -464,6 +464,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { debug!("expr_into_dest: fn_span={:?}", fn_span); + let call_id = this.mk_call_id(&fun); this.cfg.terminate( block, source_info, @@ -479,6 +480,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { CallSource::OverloadedOperator }, fn_span, + call_id, }, ); this.diverge_from(block); @@ -510,6 +512,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ref_place, Rvalue::Ref(this.tcx.lifetimes.re_erased, BorrowKind::Shared, place), ); + let call_id = this.mk_call_id(&func); this.cfg.terminate( block, source_info, @@ -522,6 +525,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unwind: UnwindAction::Unreachable, call_source: CallSource::Use, fn_span: expr_span, + call_id, }, ); success.unit() diff --git a/compiler/rustc_mir_build/src/builder/expr/stmt.rs b/compiler/rustc_mir_build/src/builder/expr/stmt.rs index 99e16d182a97d..82d41dd6aa543 100644 --- a/compiler/rustc_mir_build/src/builder/expr/stmt.rs +++ b/compiler/rustc_mir_build/src/builder/expr/stmt.rs @@ -132,10 +132,11 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unpack!(block = this.break_for_tail_call(block, &args, source_info)); + let call_id = this.mk_call_id(&fun); this.cfg.terminate( block, source_info, - TerminatorKind::TailCall { func: fun, args, fn_span }, + TerminatorKind::TailCall { func: fun, args, fn_span, call_id }, ); this.cfg.start_new_block().unit() diff --git a/compiler/rustc_mir_build/src/builder/matches/test.rs b/compiler/rustc_mir_build/src/builder/matches/test.rs index 9b7b6f574fe3f..9b168338f08f9 100644 --- a/compiler/rustc_mir_build/src/builder/matches/test.rs +++ b/compiler/rustc_mir_build/src/builder/matches/test.rs @@ -355,21 +355,21 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { ); // `let temp = ::deref(ref_src);` // or `let temp = ::deref_mut(ref_src);` + let func = + Operand::Constant(Box::new(ConstOperand { span, user_ty: None, const_: method })); + let call_id = self.mk_call_id(&func); self.cfg.terminate( block, source_info, TerminatorKind::Call { - func: Operand::Constant(Box::new(ConstOperand { - span, - user_ty: None, - const_: method, - })), + func, args: [Spanned { node: Operand::Move(ref_src), span }].into(), destination: temp, target: Some(target_block), unwind: UnwindAction::Continue, call_source: CallSource::Misc, fn_span: source_info.span, + call_id, }, ); } @@ -421,21 +421,23 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let bool_ty = self.tcx.types.bool; let eq_result = self.temp(bool_ty, source_info.span); let eq_block = self.cfg.start_new_block(); + let func = Operand::Constant(Box::new(ConstOperand { + span: source_info.span, + + // FIXME(#54571): This constant comes from user input (a + // constant in a pattern). Are there forms where users can add + // type annotations here? For example, an associated constant? + // Need to experiment. + user_ty: None, + + const_: method, + })); + let call_id = self.mk_call_id(&func); self.cfg.terminate( block, source_info, TerminatorKind::Call { - func: Operand::Constant(Box::new(ConstOperand { - span: source_info.span, - - // FIXME(#54571): This constant comes from user input (a - // constant in a pattern). Are there forms where users can add - // type annotations here? For example, an associated constant? - // Need to experiment. - user_ty: None, - - const_: method, - })), + func, args: [ Spanned { node: val, span: DUMMY_SP }, Spanned { node: expect, span: DUMMY_SP }, @@ -446,6 +448,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { unwind: UnwindAction::Continue, call_source: CallSource::MatchCmp, fn_span: source_info.span, + call_id, }, ); self.diverge_from(block); diff --git a/compiler/rustc_mir_build/src/builder/mod.rs b/compiler/rustc_mir_build/src/builder/mod.rs index 8e51ab7d4edb1..63a2a7acea706 100644 --- a/compiler/rustc_mir_build/src/builder/mod.rs +++ b/compiler/rustc_mir_build/src/builder/mod.rs @@ -249,6 +249,16 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.infcx.typing_env(self.param_env) } + fn mk_call_id( + &mut self, + func: &Operand<'tcx>, + ) -> &'tcx ty::List<(DefId, u32, ty::GenericArgsRef<'tcx>)> { + let id = self.cfg.next_call_id(); + let callee_args = + func.const_fn_def().map(|(_, args)| args).unwrap_or_else(|| self.tcx.mk_args(&[])); + self.tcx.mk_call_chain(&[(self.def_id.to_def_id(), id, callee_args)]) + } + fn is_bound_var_in_guard(&self, id: LocalVarId) -> bool { self.guard_context.iter().any(|frame| frame.locals.iter().any(|local| local.id == id)) } @@ -391,6 +401,15 @@ impl LocalsForNode { struct CFG<'tcx> { basic_blocks: IndexVec>, + next_call_id: u32, +} + +impl<'tcx> CFG<'tcx> { + fn next_call_id(&mut self) -> u32 { + let id = self.next_call_id; + self.next_call_id += 1; + id + } } rustc_index::newtype_index! { @@ -711,7 +730,7 @@ fn construct_error(tcx: TyCtxt<'_>, def_id: LocalDefId, guar: ErrorGuaranteed) - let local_decls = IndexVec::from_iter( [output].iter().chain(&inputs).map(|ty| LocalDecl::with_source_info(*ty, source_info)), ); - let mut cfg = CFG { basic_blocks: IndexVec::new() }; + let mut cfg = CFG { basic_blocks: IndexVec::new(), next_call_id: 0 }; let mut source_scopes = IndexVec::new(); cfg.start_new_block(); @@ -776,7 +795,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { hir_id, parent_module: tcx.parent_module(hir_id).to_def_id(), check_overflow, - cfg: CFG { basic_blocks: IndexVec::new() }, + cfg: CFG { basic_blocks: IndexVec::new(), next_call_id: 0 }, fn_span: span, arg_count, coroutine, @@ -948,6 +967,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { self.coroutine, None, ); + body.next_call_id = self.cfg.next_call_id; body.coverage_info_hi = self.coverage_info.map(|b| b.into_done()); let writer = pretty::MirWriter::new(self.tcx); diff --git a/compiler/rustc_mir_dataflow/src/framework/tests.rs b/compiler/rustc_mir_dataflow/src/framework/tests.rs index 85f23b8332a1c..4dfa8e1117061 100644 --- a/compiler/rustc_mir_dataflow/src/framework/tests.rs +++ b/compiler/rustc_mir_dataflow/src/framework/tests.rs @@ -16,7 +16,7 @@ fn mock_body<'tcx>() -> mir::Body<'tcx> { let source_info = mir::SourceInfo::outermost(DUMMY_SP); let mut blocks = IndexVec::new(); - let mut block = |n, kind| { + let block = |n, kind, blocks: &mut IndexVec<_, _>| { let nop = mir::Statement::new(source_info, mir::StatementKind::Nop); blocks.push(mir::BasicBlockData::new_stmts( @@ -28,8 +28,8 @@ fn mock_body<'tcx>() -> mir::Body<'tcx> { let dummy_place = mir::Place { local: mir::RETURN_PLACE, projection: ty::List::empty() }; - block(4, mir::TerminatorKind::Return); - block(1, mir::TerminatorKind::Return); + block(4, mir::TerminatorKind::Return, &mut blocks); + block(1, mir::TerminatorKind::Return, &mut blocks); block( 2, mir::TerminatorKind::Call { @@ -40,10 +40,12 @@ fn mock_body<'tcx>() -> mir::Body<'tcx> { unwind: mir::UnwindAction::Continue, call_source: mir::CallSource::Misc, fn_span: DUMMY_SP, + call_id: ty::List::empty(), }, + &mut blocks, ); - block(3, mir::TerminatorKind::Return); - block(0, mir::TerminatorKind::Return); + block(3, mir::TerminatorKind::Return, &mut blocks); + block(0, mir::TerminatorKind::Return, &mut blocks); block( 4, mir::TerminatorKind::Call { @@ -54,7 +56,9 @@ fn mock_body<'tcx>() -> mir::Body<'tcx> { unwind: mir::UnwindAction::Continue, call_source: mir::CallSource::Misc, fn_span: DUMMY_SP, + call_id: ty::List::empty(), }, + &mut blocks, ); mir::Body::new_cfg_only(blocks) diff --git a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs index 561836561b692..408f62766125c 100644 --- a/compiler/rustc_mir_dataflow/src/move_paths/builder.rs +++ b/compiler/rustc_mir_dataflow/src/move_paths/builder.rs @@ -481,6 +481,7 @@ impl<'a, 'tcx, F: Fn(Ty<'tcx>) -> bool> MoveDataBuilder<'a, 'tcx, F> { unwind: _, call_source: _, fn_span: _, + call_id: _, } => { self.gather_operand(func); for arg in args { diff --git a/compiler/rustc_mir_transform/src/coroutine.rs b/compiler/rustc_mir_transform/src/coroutine.rs index 652fd00d54d02..7bfc0ae31b12d 100644 --- a/compiler/rustc_mir_transform/src/coroutine.rs +++ b/compiler/rustc_mir_transform/src/coroutine.rs @@ -1778,6 +1778,7 @@ impl<'tcx> Visitor<'tcx> for EnsureCoroutineFieldAssignmentsNeverAlias<'_> { unwind: _, call_source: _, fn_span: _, + call_id: _, } => { self.check_assigned_place(*destination, |this| { this.visit_operand(func, location); diff --git a/compiler/rustc_mir_transform/src/coroutine/drop.rs b/compiler/rustc_mir_transform/src/coroutine/drop.rs index 2699a051a8fea..586c78e681faf 100644 --- a/compiler/rustc_mir_transform/src/coroutine/drop.rs +++ b/compiler/rustc_mir_transform/src/coroutine/drop.rs @@ -61,6 +61,11 @@ fn build_poll_call<'tcx>( unwind, call_source: CallSource::Misc, fn_span: DUMMY_SP, + call_id: tcx.mk_call_chain(&[( + body.source.def_id(), + body.next_call_id(), + tcx.mk_args(&[fut_ty.into()]), + )]), }; insert_term_block(body, call) } @@ -102,6 +107,11 @@ fn build_pin_fut<'tcx>( ); // call Pin::new_unchecked(&mut fut) + let call_id = tcx.mk_call_chain(&[( + body.source.def_id(), + body.next_call_id(), + tcx.mk_args(&[fut_ref_ty.into()]), + )]); let pin_fut_bb = body.basic_blocks_mut().push(BasicBlockData::new_stmts( [storage_live, fut_ref_assign].to_vec(), Some(Terminator { @@ -114,6 +124,7 @@ fn build_pin_fut<'tcx>( unwind, call_source: CallSource::Misc, fn_span: span, + call_id, }, }), false, diff --git a/compiler/rustc_mir_transform/src/coverage/tests.rs b/compiler/rustc_mir_transform/src/coverage/tests.rs index b0fc5e90f07bd..505c4fe879df2 100644 --- a/compiler/rustc_mir_transform/src/coverage/tests.rs +++ b/compiler/rustc_mir_transform/src/coverage/tests.rs @@ -138,6 +138,7 @@ impl<'tcx> MockBlocks<'tcx> { unwind: UnwindAction::Continue, call_source: CallSource::Misc, fn_span: DUMMY_SP, + call_id: ty::List::empty(), }, ) } diff --git a/compiler/rustc_mir_transform/src/elaborate_drop.rs b/compiler/rustc_mir_transform/src/elaborate_drop.rs index 7193a0245d121..a00f5a68b60e3 100644 --- a/compiler/rustc_mir_transform/src/elaborate_drop.rs +++ b/compiler/rustc_mir_transform/src/elaborate_drop.rs @@ -375,6 +375,8 @@ where call_statements .push(Statement::new(self.source_info, StatementKind::StorageLive(fut.local))); + let tcx_ = self.tcx(); + let call_id = self.elaborator.patch().next_call_id(tcx_, trait_args); let call_drop_bb = self.new_block_with_statements( unwind, call_statements, @@ -386,6 +388,7 @@ where unwind: unwind.into_action(), call_source: CallSource::Misc, fn_span: self.source_info.span, + call_id, }, ); // StorageDead(fut) in unwind block (at the begin) @@ -404,6 +407,9 @@ where } // #1:pin_obj_bb >>> call Pin::new_unchecked(&mut obj) + let tcx_ = self.tcx(); + let call_id = + self.elaborator.patch().next_call_id(tcx_, tcx_.mk_args(&[obj_ref_ty.into()])); self.elaborator.patch().patch_terminator( pin_obj_bb, TerminatorKind::Call { @@ -414,6 +420,7 @@ where unwind: unwind.into_action(), call_source: CallSource::Misc, fn_span: span, + call_id, }, ); pin_obj_bb @@ -1012,6 +1019,8 @@ where let ref_place = self.new_temp(ref_ty); let unit_temp = Place::from(self.new_temp(tcx.types.unit)); + let tcx_ = self.tcx(); + let call_id = self.elaborator.patch().next_call_id(tcx_, tcx_.mk_args(&[ty.into()])); let result = BasicBlockData::new_stmts( vec![self.assign( Place::from(ref_place), @@ -1036,6 +1045,7 @@ where unwind: unwind.into_action(), call_source: CallSource::Misc, fn_span: self.source_info.span, + call_id, }, source_info: self.source_info, }), diff --git a/compiler/rustc_mir_transform/src/function_item_references.rs b/compiler/rustc_mir_transform/src/function_item_references.rs index 71c9b79d682df..63cefb55f1394 100644 --- a/compiler/rustc_mir_transform/src/function_item_references.rs +++ b/compiler/rustc_mir_transform/src/function_item_references.rs @@ -36,6 +36,7 @@ impl<'tcx> Visitor<'tcx> for FunctionItemRefChecker<'_, 'tcx> { unwind: _, call_source: _, fn_span: _, + call_id: _, } = &terminator.kind { let source_info = *self.body.source_info(location); diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index db0eb56188255..1b0c2ab72cf53 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -14,7 +14,7 @@ use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrs; use rustc_middle::mir::visit::*; use rustc_middle::mir::*; use rustc_middle::ty::{ - self, Instance, InstanceKind, Ty, TyCtxt, TypeFlags, TypeVisitableExt, Unnormalized, + self, Instance, InstanceKind, List, Ty, TyCtxt, TypeFlags, TypeVisitableExt, Unnormalized, }; use rustc_session::config::{DebugInfo, OptLevel}; use rustc_span::Spanned; @@ -860,7 +860,15 @@ fn inline_call<'tcx, I: Inliner<'tcx>>( ) { let tcx = inliner.tcx(); let terminator = caller_body[callsite.block].terminator.take().unwrap(); - let TerminatorKind::Call { func, args, destination, unwind, target, .. } = terminator.kind + let TerminatorKind::Call { + func, + args, + destination, + unwind, + target, + call_id: caller_call_chain, + .. + } = terminator.kind else { bug!("unexpected terminator kind {:?}", terminator.kind); }; @@ -941,6 +949,7 @@ fn inline_call<'tcx, I: Inliner<'tcx>>( return_block, tcx, always_live_locals: UsedInStmtLocals::new(&callee_body).locals, + caller_call_chain, }; // Map all `Local`s, `SourceScope`s and `BasicBlock`s to new ones @@ -1185,6 +1194,10 @@ struct Integrator<'a, 'tcx> { return_block: Option, tcx: TyCtxt<'tcx>, always_live_locals: DenseBitSet, + /// The call-chain of the call site being inlined. When integrating + /// callee MIR, this is prepended to each inlined call's chain to + /// record the full inlining path. + caller_call_chain: &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, } impl Integrator<'_, '_> { @@ -1321,11 +1334,17 @@ impl<'tcx> MutVisitor<'tcx> for Integrator<'_, 'tcx> { // check_mir_body forbids tail calls unreachable!() } - TerminatorKind::Call { ref mut target, ref mut unwind, .. } => { + TerminatorKind::Call { ref mut target, ref mut unwind, ref mut call_id, .. } => { if let Some(ref mut tgt) = *target { *tgt = self.map_block(*tgt); } *unwind = self.map_unwind(*unwind); + // Prepend the caller's call chain to record the full inlining path. + let mut chain: smallvec::SmallVec<[(DefId, u32, ty::GenericArgsRef<'tcx>); 4]> = + smallvec::SmallVec::new(); + chain.extend(self.caller_call_chain.iter()); + chain.extend(call_id.iter()); + *call_id = self.tcx.mk_call_chain(&chain); } TerminatorKind::Assert { ref mut target, ref mut unwind, .. } => { *target = self.map_block(*target); diff --git a/compiler/rustc_mir_transform/src/lib.rs b/compiler/rustc_mir_transform/src/lib.rs index 9273c2103d442..ce7efeb8fb744 100644 --- a/compiler/rustc_mir_transform/src/lib.rs +++ b/compiler/rustc_mir_transform/src/lib.rs @@ -252,6 +252,7 @@ fn remap_mir_for_const_eval_select<'tcx>( target, unwind, fn_span, + call_id, .. } if let ty::FnDef(def_id, _) = *const_.ty().kind() && tcx.is_intrinsic(def_id, sym::const_eval_select) => @@ -301,6 +302,7 @@ fn remap_mir_for_const_eval_select<'tcx>( unwind, call_source: CallSource::Misc, fn_span, + call_id, }; } _ => {} diff --git a/compiler/rustc_mir_transform/src/patch.rs b/compiler/rustc_mir_transform/src/patch.rs index 015bae56cf57e..0bc035cff69db 100644 --- a/compiler/rustc_mir_transform/src/patch.rs +++ b/compiler/rustc_mir_transform/src/patch.rs @@ -1,7 +1,8 @@ -use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::fx::FxIndexMap; +use rustc_hir::def_id::DefId; use rustc_index::Idx; use rustc_middle::mir::*; -use rustc_middle::ty::Ty; +use rustc_middle::ty::{self, List, Ty, TyCtxt}; use rustc_span::Span; use tracing::debug; @@ -10,7 +11,7 @@ use tracing::debug; /// and replacement of terminators, and then apply the queued changes all at /// once with `apply`. This is useful for MIR transformation passes. pub(crate) struct MirPatch<'tcx> { - term_patch_map: FxHashMap>, + term_patch_map: FxIndexMap>, /// Set of statements that should be replaced by `Nop`. nop_statements: Vec, new_blocks: Vec>, @@ -30,6 +31,13 @@ pub(crate) struct MirPatch<'tcx> { /// The number of blocks at the start of the transformation. New blocks /// get appended at the end. next_block: usize, + /// The DefId of the body being patched, used as the first element of + /// `(DefId, u32, callee_args)` call-site identifiers for new Call/TailCall + /// terminators. + body_def_id: DefId, + /// Counter for allocating unique call-site identifiers for new + /// Call/TailCall terminators created during transformation. + next_call_id: u32, } impl<'tcx> MirPatch<'tcx> { @@ -43,6 +51,8 @@ impl<'tcx> MirPatch<'tcx> { new_locals: vec![], next_local: body.local_decls.len(), next_block: body.basic_blocks.len(), + body_def_id: body.source.def_id(), + next_call_id: body.next_call_id, resume_block: None, unreachable_cleanup_block: None, unreachable_no_cleanup_block: None, @@ -200,6 +210,18 @@ impl<'tcx> MirPatch<'tcx> { self.new_locals[new_local_idx].ty } + /// Allocates a fresh call-site identifier chain for a new Call/TailCall + /// terminator being created during this transformation. + pub(crate) fn next_call_id( + &mut self, + tcx: TyCtxt<'tcx>, + callee_args: ty::GenericArgsRef<'tcx>, + ) -> &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)> { + let id = self.next_call_id; + self.next_call_id += 1; + tcx.mk_call_chain(&[(self.body_def_id, id, callee_args)]) + } + /// Queues the addition of a new basic block. pub(crate) fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock { let block = BasicBlock::from_usize(self.next_block + self.new_blocks.len()); @@ -268,6 +290,7 @@ impl<'tcx> MirPatch<'tcx> { }; bbs.extend(self.new_blocks); body.local_decls.extend(self.new_locals); + body.next_call_id = self.next_call_id; for loc in self.nop_statements { bbs[loc.block].statements[loc.statement_index].make_nop(true); @@ -295,8 +318,6 @@ impl<'tcx> MirPatch<'tcx> { delta += 1; } - // The order in which we patch terminators does not change the result. - #[allow(rustc::potential_query_instability)] for (src, patch) in self.term_patch_map { debug!("MirPatch: patching block {:?}", src); let bb = &mut bbs[src]; diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index a47b3ce64ed23..b18e4ddc8735e 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -825,7 +825,12 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { match terminator.kind { TerminatorKind::Call { - mut func, mut args, call_source: desugar, fn_span, .. + mut func, + mut args, + call_source: desugar, + fn_span, + call_id, + .. } => { // This promoted involves a function call, so it may fail to evaluate. Let's // make sure it is added to `required_consts` so that failure cannot get lost. @@ -848,6 +853,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { target: Some(new_target), call_source: desugar, fn_span, + call_id, }, source_info: SourceInfo::outermost(terminator.source_info.span), ..terminator diff --git a/compiler/rustc_mir_transform/src/shim.rs b/compiler/rustc_mir_transform/src/shim.rs index 89423bf885c84..6ca285df2d9ee 100644 --- a/compiler/rustc_mir_transform/src/shim.rs +++ b/compiler/rustc_mir_transform/src/shim.rs @@ -564,6 +564,7 @@ struct CloneShimBuilder<'tcx> { blocks: IndexVec>, span: Span, sig: ty::FnSig<'tcx>, + next_call_id: u32, } impl<'tcx> CloneShimBuilder<'tcx> { @@ -582,6 +583,7 @@ impl<'tcx> CloneShimBuilder<'tcx> { blocks: IndexVec::new(), span, sig, + next_call_id: 0, } } @@ -669,6 +671,9 @@ impl<'tcx> CloneShimBuilder<'tcx> { )))); // `let loc = Clone::clone(ref_loc);` + let call_id = + tcx.mk_call_chain(&[(self.def_id, self.next_call_id, tcx.mk_args(&[ty.into()]))]); + self.next_call_id += 1; self.block( vec![statement], TerminatorKind::Call { @@ -679,6 +684,7 @@ impl<'tcx> CloneShimBuilder<'tcx> { unwind: UnwindAction::Cleanup(cleanup), call_source: CallSource::Normal, fn_span: self.span, + call_id, }, false, ); @@ -950,6 +956,7 @@ fn build_call_shim<'tcx>( let n_blocks = if let Some(Adjustment::RefMut) = rcvr_adjustment { 5 } else { 2 }; let mut blocks = IndexVec::with_capacity(n_blocks); + let shim_def_id = instance.def_id(); let block = |blocks: &mut IndexVec<_, _>, statements, kind, is_cleanup| { blocks.push(BasicBlockData::new_stmts( statements, @@ -960,6 +967,11 @@ fn build_call_shim<'tcx>( // BB #0 let args = args.into_iter().map(|a| Spanned { node: a, span: DUMMY_SP }).collect(); + let call_id = tcx.mk_call_chain(&[( + shim_def_id, + 0, + callee.const_fn_def().map(|(_, args)| args).unwrap_or_else(|| tcx.mk_args(&[])), + )]); block( &mut blocks, statements, @@ -975,6 +987,7 @@ fn build_call_shim<'tcx>( }, call_source: CallSource::Misc, fn_span: span, + call_id, }, false, ); diff --git a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs index 5038afffcd8d2..a3f90b7e0ebff 100644 --- a/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs +++ b/compiler/rustc_mir_transform/src/shim/async_destructor_ctor.rs @@ -362,6 +362,8 @@ fn build_adrop_for_adrop_shim<'tcx>( let cor_pin_place = Place::from(locals.push(LocalDecl::new(cor_pin_ty, span))); let pin_fn = tcx.require_lang_item(LangItem::PinNewUnchecked, span); + let shim_def_id = instance.def_id(); + let mut next_call_id: u32 = 0; // call Pin::new_unchecked(&mut impl_cor) blocks.push(BasicBlockData::new_stmts( statements, @@ -375,6 +377,15 @@ fn build_adrop_for_adrop_shim<'tcx>( unwind: UnwindAction::Continue, call_source: CallSource::Misc, fn_span: span, + call_id: tcx.mk_call_chain(&[( + shim_def_id, + { + let id = next_call_id; + next_call_id += 1; + id + }, + tcx.mk_args(&[cor_ref.into()]), + )]), }, }), false, @@ -398,6 +409,15 @@ fn build_adrop_for_adrop_shim<'tcx>( unwind: UnwindAction::Continue, call_source: CallSource::Misc, fn_span: span, + call_id: tcx.mk_call_chain(&[( + shim_def_id, + { + let id = next_call_id; + next_call_id += 1; + id + }, + tcx.mk_args(&[impl_ty.into()]), + )]), }, }), false, @@ -409,6 +429,7 @@ fn build_adrop_for_adrop_shim<'tcx>( let source = MirSource::from_instance(instance); let mut body = new_body(source, blocks, locals, sig.inputs().len(), span); + body.next_call_id = next_call_id; body.phase = MirPhase::Runtime(RuntimePhase::Initial); return body; } diff --git a/compiler/rustc_monomorphize/Cargo.toml b/compiler/rustc_monomorphize/Cargo.toml index 552c092ef7c46..24fc89870ba50 100644 --- a/compiler/rustc_monomorphize/Cargo.toml +++ b/compiler/rustc_monomorphize/Cargo.toml @@ -10,6 +10,7 @@ rustc_data_structures = { path = "../rustc_data_structures" } rustc_errors = { path = "../rustc_errors" } rustc_hir = { path = "../rustc_hir" } rustc_index = { path = "../rustc_index" } +rustc_lint_defs = { path = "../rustc_lint_defs" } rustc_macros = { path = "../rustc_macros" } rustc_middle = { path = "../rustc_middle" } rustc_session = { path = "../rustc_session" } @@ -17,5 +18,6 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } serde = "1" serde_json = "1" +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" # tidy-alphabetical-end diff --git a/compiler/rustc_monomorphize/src/cast_sensitivity.rs b/compiler/rustc_monomorphize/src/cast_sensitivity.rs new file mode 100644 index 0000000000000..287aa0eb1f682 --- /dev/null +++ b/compiler/rustc_monomorphize/src/cast_sensitivity.rs @@ -0,0 +1,1499 @@ +//! SCC-based batch computation of cast-relevant lifetimes and related +//! query providers. Composes per-Instance direct sensitivity with callee +//! sensitivity over the call graph; the SCC pass is order-independent so +//! back-edges propagate correctly. + +use std::collections::VecDeque; + +use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_data_structures::graph::scc::Sccs; +use rustc_data_structures::graph::vec_graph::VecGraph; +use rustc_data_structures::sync::{Lock, par_map}; +use rustc_data_structures::unord::{ExtendUnord, UnordMap, UnordSet}; +use rustc_hir::def_id::DefId; +use rustc_index::bit_set::{BitMatrix, DenseBitSet}; +use rustc_middle::bug; +use rustc_middle::mir::{self, BorrowckRegionSummary, InputSlot, VidProvenance}; +use rustc_middle::mono::{ + CastRelevantLifetimes, CollectionMode, LifetimeBVToParamMapping, MonoItem, +}; +use rustc_middle::ty::print::with_no_trimmed_paths; +use rustc_middle::ty::{self, Instance, List, TyCtxt}; +use rustc_span::{DUMMY_SP, sym}; +use smallvec::SmallVec; + +use crate::erasure_safe::{region_slots_of_arg, region_slots_of_args}; + +// ── Types ───────────────────────────────────────────────────────────────── + +/// Per-Instance sensitivity metadata, keyed on base Instance +/// (no Outlives entries). Populated by the SCC batch computation and +/// consumed when augmenting sensitive subgraphs. +pub(crate) struct InstanceSensitivity<'tcx> { + /// Transitive sensitivity: composed from direct + callee sensitivity. + /// `None` → this Instance is not sensitive. + pub(crate) sensitivity: Option>, + /// The base (un-augmented) sensitive callees at each call site. + /// Stored so augmentation can re-run with a different `CallerOutlivesEnv`. + /// Empty for directly sensitive functions (they are the leaves). + pub(crate) sensitive_call_sites: + &'tcx [(&'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, Instance<'tcx>)], + /// For ground-level callers: pre-computed augmented callee Instances. + /// Empty for generic callers, whose augmented callees depend on + /// Outlives entries computed later when a concrete caller is known. + pub(crate) augmented_callees: + &'tcx [(&'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, Instance<'tcx>)], +} + +/// Outlives oracle for a caller. Pre-computes a Floyd-Warshall +/// reachability matrix (via the `outlives_reachability` query) at +/// construction time for O(1) `outlives()` lookups. Two cases: +/// - Augmented callers: built from Instance Outlives entries. +/// - Ground-level callers: keys are walk positions from the origin call +/// site, translated through the call-site region mapping before lookup. +#[derive(Debug)] +pub(crate) struct CallerOutlivesEnv<'tcx> { + reach: &'tcx BitMatrix, + dim: usize, + /// Maps caller-space keys (walk positions or binder indices) to + /// matrix indices. `None` when the caller-space keys ARE matrix + /// indices (the `FromOutlivesEntries` path). + key_to_idx: Option>, +} + +impl<'tcx> CallerOutlivesEnv<'tcx> { + /// Build from a pre-computed reachability matrix where caller-space + /// keys are direct matrix indices (no remapping). + pub(crate) fn from_raw(reach: &'tcx BitMatrix, dim: usize) -> Self { + CallerOutlivesEnv { reach, dim, key_to_idx: None } + } + + pub(crate) fn from_outlives_entries(tcx: TyCtxt<'tcx>, instance: &Instance<'tcx>) -> Self { + // Single pass: intern the outlives args directly into the arena + // while tracking the largest live index. `max_idx` stays 0 when + // the iterator is empty; `interned.is_empty()` covers the empty + // case below. + let mut max_idx = 0; + let interned = tcx.arena.alloc_from_iter(instance.outlives_indices_iter().map(|(l, s)| { + for v in [l, s] { + if v != usize::MAX && v > max_idx { + max_idx = v; + } + } + tcx.mk_outlives_arg(l, s).into() + })); + let dim = if interned.is_empty() { 1 } else { max_idx + 2 }; + let reach = tcx.outlives_reachability((interned, dim)); + CallerOutlivesEnv { reach, dim, key_to_idx: None } + } + + /// Build an env where keys are walk positions from a specific call site. + /// Converts the SCC graph edges into outlives entries and pre-computes + /// the reachability matrix. + pub(crate) fn from_region_summary_walk_pos( + tcx: TyCtxt<'tcx>, + summary: &'tcx BorrowckRegionSummary, + call_site_mapping: &mir::CallSiteRegionMapping, + ) -> Self { + let num_sccs = summary.outlives_graph.scc_successors.len(); + let dim = num_sccs + 1; // +1 for 'static slot + let static_idx = dim - 1; + + let interned = tcx.arena.alloc_from_iter( + summary.outlives_graph.scc_successors.iter().enumerate().flat_map( + |(from_scc, successors)| { + successors + .iter() + .map(move |&to_scc| tcx.mk_outlives_arg(from_scc, to_scc as usize).into()) + }, + ), + ); + let reach = tcx.outlives_reachability((interned, dim)); + + let key_to_idx: FxHashMap = call_site_mapping + .region_mappings + .items() + .map(|(&walk_pos, &vid)| { + let scc = summary.outlives_graph.scc_of_vid(vid).unwrap_or_else(|| { + bug!("missing SCC mapping for region vid {vid} in {:?}", summary) + }); + (walk_pos as usize, scc as usize) + }) + .into_sorted_stable_ord() + .into_iter() + .collect(); + + // Check if any walk position maps to the 'static SCC. The + // projected graph doesn't have an explicit 'static SCC — it's + // implicit. Walk positions mapping to vids whose SCC reaches + // everything are already handled by the reachability matrix. + // The static_idx slot is added by outlives_reachability. + let _ = static_idx; + + CallerOutlivesEnv { reach, dim, key_to_idx: Some(key_to_idx) } + } + + /// Matrix index reserved for `'static` (always `dim - 1`). + pub(crate) fn static_idx(&self) -> usize { + self.dim - 1 + } + + /// Resolve a caller-space key to a matrix index, or `None` if the key + /// is absent from the environment. `usize::MAX` always maps to + /// `static_idx`. + pub(crate) fn resolve(&self, key: usize) -> Option { + if key == usize::MAX { + return Some(self.static_idx()); + } + match &self.key_to_idx { + None => (key < self.dim).then_some(key), + Some(map) => map.get(&key).copied(), + } + } + + /// Iterate all matrix indices that `idx` reaches (i.e. all `shorter` + /// such that `idx` outlives `shorter`). + pub(crate) fn reach_row(&self, idx: usize) -> impl Iterator + '_ { + self.reach.iter(idx) + } + + /// Query whether `longer` outlives `shorter` in the caller's environment. + pub(crate) fn outlives(&self, longer: usize, shorter: usize) -> bool { + if longer == shorter { + return true; + } + let Some(l) = self.resolve(longer) else { return false }; + let Some(s) = self.resolve(shorter) else { return false }; + self.reach.contains(l, s) + } +} + +#[derive(Clone)] +struct CallEdge<'tcx> { + call_id: &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, + callee: Instance<'tcx>, +} + +#[derive(Clone, Copy)] +struct ConcretizedChainEdge<'tcx> { + body_args: ty::GenericArgsRef<'tcx>, + concrete_edge_args: ty::GenericArgsRef<'tcx>, +} + +// ── Composition helpers ─────────────────────────────────────────────────── + +struct InputDecomposition { + to_walk_pos: FxHashMap, + #[allow(dead_code)] + from_walk_pos: FxHashMap, +} + +fn build_input_decomposition<'tcx>(concrete_args: ty::GenericArgsRef<'tcx>) -> InputDecomposition { + let mut to_walk_pos = FxHashMap::default(); + let mut from_walk_pos = FxHashMap::default(); + let mut walk_pos = 0usize; + + for (arg_ordinal, arg) in concrete_args.iter().enumerate() { + let slots = region_slots_of_arg(arg); + for offset in 0..slots { + let wp = walk_pos + offset; + let slot = + InputSlot { arg_ordinal: arg_ordinal as u32, offset_within_arg: offset as u32 }; + to_walk_pos.insert(slot, wp); + from_walk_pos.insert(wp, slot); + } + walk_pos += slots; + } + + InputDecomposition { to_walk_pos, from_walk_pos } +} + +fn resolve_vid_provenance(vid: u32, summary: &BorrowckRegionSummary) -> VidProvenance { + summary + .vid_provenance + .get(&vid) + .copied() + .unwrap_or_else(|| bug!("relevant vid {vid} missing provenance in {:?}", summary)) +} + +fn instance_from_edge_args_or_bug<'tcx>( + tcx: TyCtxt<'tcx>, + next_body_def_id: DefId, + concrete_edge_args: ty::GenericArgsRef<'tcx>, +) -> Instance<'tcx> { + let instance = Instance::expect_resolve( + tcx, + ty::TypingEnv::fully_monomorphized(), + next_body_def_id, + concrete_edge_args, + DUMMY_SP, + ); + + if instance.def_id() != next_body_def_id { + bug!( + "call_id chain/body mismatch: resolved {:?} for next body {:?} with args {:?}", + instance, + next_body_def_id, + concrete_edge_args, + ); + } + + match instance.def { + ty::InstanceKind::Virtual(..) | ty::InstanceKind::Intrinsic(..) => { + bug!( + "next call_id body {:?} resolved to non-MIR instance {:?}", + next_body_def_id, + instance, + ); + } + _ => instance, + } +} + +fn concretize_chain_args<'tcx>( + tcx: TyCtxt<'tcx>, + caller: Instance<'tcx>, + call_id: &[(DefId, u32, ty::GenericArgsRef<'tcx>)], +) -> Vec> { + let mut result = Vec::with_capacity(call_id.len()); + let mut current_instance = caller; + + for (i, &(body_def_id, _local_id, edge_args_template)) in call_id.iter().enumerate() { + if current_instance.def_id() != body_def_id { + bug!( + "call_id chain out of sync: expected body {:?}, got instance {:?}", + body_def_id, + current_instance, + ); + } + + let concrete_args = current_instance.instantiate_mir_and_normalize_erasing_regions( + tcx, + ty::TypingEnv::fully_monomorphized(), + ty::EarlyBinder::bind(edge_args_template), + ); + result.push(ConcretizedChainEdge { + body_args: current_instance.args, + concrete_edge_args: concrete_args, + }); + + if let Some(&(next_body_def_id, _next_local_id, _)) = call_id.get(i + 1) { + current_instance = instance_from_edge_args_or_bug(tcx, next_body_def_id, concrete_args); + } + } + + result +} + +fn build_template_input_slot_map<'tcx>( + body_args: ty::GenericArgsRef<'tcx>, + edge_args_template: ty::GenericArgsRef<'tcx>, + concrete_edge_args: ty::GenericArgsRef<'tcx>, +) -> FxHashMap { + let mut walk_pos_to_slot = FxHashMap::default(); + let mut walk_pos = 0usize; + + for (template_arg, concrete_arg) in edge_args_template.iter().zip(concrete_edge_args.iter()) { + // Determine which of the caller's generic params this template + // arg forwards. Non-forwarding args (concrete types, literal + // consts, etc.) are skipped — their walk positions get no + // InputSlot mapping, which downstream treats as unresolvable. + let source_arg_ordinal = match template_arg.kind() { + ty::GenericArgKind::Type(ty) => match ty.kind() { + ty::Param(param_ty) => Some(param_ty.index as usize), + _ => None, + }, + ty::GenericArgKind::Lifetime(region) => match region.kind() { + ty::ReEarlyParam(ep) => Some(ep.index as usize), + _ => None, + }, + ty::GenericArgKind::Const(ct) => match ct.kind() { + ty::ConstKind::Param(param_ct) => Some(param_ct.index as usize), + _ => None, + }, + ty::GenericArgKind::Outlives(_) => None, + }; + + let slots = region_slots_of_arg(concrete_arg); + if let Some(source_ordinal) = source_arg_ordinal { + debug_assert!( + body_args.get(source_ordinal).is_some(), + "template arg referenced missing body arg {} in {:?}", + source_ordinal, + body_args + ); + for offset in 0..slots { + walk_pos_to_slot.insert( + walk_pos + offset, + InputSlot { + arg_ordinal: source_ordinal as u32, + offset_within_arg: offset as u32, + }, + ); + } + } + walk_pos += slots; + } + + walk_pos_to_slot +} + +/// Compose all walk-order positions through a call_id chain in one pass. +/// +/// The output stays in the origin call site's walk-position space. This +/// lets callers compare the transported positions directly against the +/// caller's own outlives environment without collapsing them to SCC ids. +pub(crate) fn compose_all_through_chain<'tcx>( + tcx: TyCtxt<'tcx>, + caller: Instance<'tcx>, + call_id: &[(DefId, u32, ty::GenericArgsRef<'tcx>)], + n_positions: usize, +) -> Vec> { + let mut positions: Vec> = (0..n_positions).map(Some).collect(); + if call_id.is_empty() || n_positions == 0 { + return positions; + } + + let dump_filter = tcx.sess.opts.unstable_opts.dump_trait_cast_chain_composition.as_deref(); + let dump = match dump_filter { + Some(f) if f == "all" => true, + Some(f) => with_no_trimmed_paths!(caller.to_string()).contains(f), + None => false, + }; + + if dump { + let caller_name = with_no_trimmed_paths!(caller.to_string()); + eprintln!( + "=== Chain Composition: {caller_name} ({} link(s), max_walk_pos={n_positions}) ===", + call_id.len(), + ); + } + + let links: Vec<_> = call_id.iter().rev().copied().collect(); + let concrete_args_per_edge = concretize_chain_args(tcx, caller, call_id); + + for (i, &(body_def_id, local_id, _)) in links.iter().enumerate() { + let edge_index = call_id.len() - 1 - i; + let edge_info = concrete_args_per_edge[edge_index]; + let concrete_edge_args = edge_info.concrete_edge_args; + let summary = tcx.borrowck_region_summary(body_def_id); + let is_outermost = i + 1 == links.len(); + let outer_decomp = if is_outermost { + None + } else { + let outer_edge_index = call_id.len() - (i + 2); + Some(build_input_decomposition( + concrete_args_per_edge[outer_edge_index].concrete_edge_args, + )) + }; + + if dump { + let body_path = with_no_trimmed_paths!(tcx.def_path_str(body_def_id)); + let template_args = with_no_trimmed_paths!(format!("{:?}", call_id[edge_index].2)); + let body_args_s = with_no_trimmed_paths!(format!("{:?}", edge_info.body_args)); + let concrete_args_s = with_no_trimmed_paths!(format!("{:?}", concrete_edge_args)); + eprintln!(" Link {i}: body={body_path} local_id={local_id}"); + eprintln!(" body_args (before this link's edge): {body_args_s}"); + eprintln!(" edge args (template): {template_args}"); + eprintln!(" edge args (concretized): {concrete_args_s}"); + + let template_map = build_template_input_slot_map( + edge_info.body_args, + call_id[edge_index].2, + concrete_edge_args, + ); + if template_map.is_empty() { + eprintln!(" Template walk_pos -> InputSlot: (empty)"); + } else { + eprintln!(" Template walk_pos -> InputSlot:"); + #[allow(rustc::potential_query_instability)] + // Collecting map entries into a Vec that we subsequently sort by + // the usize walk_pos key for deterministic diagnostic output. + let mut entries: Vec<(usize, InputSlot)> = + template_map.iter().map(|(&k, &v)| (k, v)).collect(); + entries.sort_by_key(|&(k, _)| k); + for (wp, slot) in entries { + eprintln!( + " walk_pos={wp} -> InputSlot {{ arg_ordinal={}, offset_within_arg={} }}", + slot.arg_ordinal, slot.offset_within_arg, + ); + } + } + } + + let Some(mapping) = summary.call_site_mappings.get(&local_id) else { + if dump { + eprintln!(" Region-summary resolutions: (no call_site_mapping for local_id)"); + } + let edge_slots: usize = region_slots_of_args(concrete_edge_args); + if edge_slots == 0 { + if dump { + eprintln!( + " edge has 0 region slots after monomorphization; dropping all positions" + ); + } + positions.fill(None); + continue; + } + + // No call-site mapping but regions exist after monomorphization + // (e.g. U = dyn Trait<'lt>). Trace through the template args. + let local_walk_pos_to_slot = build_template_input_slot_map( + edge_info.body_args, + call_id[edge_index].2, + concrete_edge_args, + ); + + // At the outermost link there is no outer_decomp — use the + // caller's own args as the target coordinate space. + let caller_decomp; + let target_decomp = if is_outermost { + caller_decomp = build_input_decomposition(edge_info.body_args); + &caller_decomp + } else { + outer_decomp.as_ref().expect("non-outermost link must have an outer decomposition") + }; + + for pos in positions.iter_mut() { + let Some(wp) = *pos else { + continue; + }; + let Some(slot) = local_walk_pos_to_slot.get(&wp).copied() else { + *pos = None; + continue; + }; + let Some(&outer_wp) = target_decomp.to_walk_pos.get(&slot) else { + *pos = None; + continue; + }; + *pos = Some(outer_wp); + } + continue; + }; + + if dump { + let entries: Vec<(u32, u32)> = mapping + .region_mappings + .items() + .map(|(&walk_pos, &vid)| (walk_pos, vid)) + .into_sorted_stable_ord(); + if entries.is_empty() { + eprintln!(" Region-summary resolutions: (call_site_mapping is empty)"); + } else { + eprintln!(" Region-summary resolutions:"); + for (wp, vid) in entries { + let prov = resolve_vid_provenance(vid, &summary); + eprintln!(" walk_pos={wp} -> vid={vid} -> {prov:?}"); + } + } + } + + for pos in positions.iter_mut() { + let Some(wp) = *pos else { + continue; + }; + + let Some(vid) = mapping.vid_for_walk_pos(wp as u32) else { + *pos = None; + continue; + }; + + if is_outermost { + if matches!(resolve_vid_provenance(vid, &summary), VidProvenance::Static) { + *pos = None; + } else { + *pos = Some(wp); + } + continue; + } + + match resolve_vid_provenance(vid, &summary) { + VidProvenance::Static | VidProvenance::LocalOnly => { + *pos = None; + } + VidProvenance::Input(slot) | VidProvenance::BoundedByUniversal(slot) => { + let Some(&outer_wp) = outer_decomp + .as_ref() + .expect("non-outermost link must have an outer decomposition") + .to_walk_pos + .get(&slot) + else { + *pos = None; + continue; + }; + *pos = Some(outer_wp); + } + } + } + } + + if dump { + eprintln!(" Final mapping (callee walk_pos -> origin walk_pos):"); + for (i, entry) in positions.iter().enumerate() { + match entry { + Some(n) if *n == usize::MAX => eprintln!(" [{i}] -> 'static"), + Some(n) => eprintln!(" [{i}] -> {n}"), + None => eprintln!(" [{i}] -> (none)"), + } + } + eprintln!(); + } + + positions +} + +fn caller_env_for_call_id<'tcx>( + tcx: TyCtxt<'tcx>, + caller: Instance<'tcx>, + call_id: &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, +) -> CallerOutlivesEnv<'tcx> { + if call_id.is_empty() { + bug!("empty call_id chain for caller {:?}", caller); + } + if caller.has_outlives_entries() { + return CallerOutlivesEnv::from_outlives_entries(tcx, &caller); + } + + let origin_def_id = call_id[0].0; + debug_assert_eq!(origin_def_id, caller.def_id()); + let origin_local_id = call_id[0].1; + let summary = tcx.borrowck_region_summary(origin_def_id); + let Some(mapping) = summary.call_site_mappings.get(&origin_local_id) else { + // No call-site mapping: the origin function is generic and its + // intrinsic args are type params that only acquire regions after + // monomorphization. Return an empty env — outlives evidence will + // come from the augmented Instance's Outlives entries when the + // sensitivity system processes augmented callees. + let empty: &'tcx [ty::GenericArg<'tcx>] = &[]; + return CallerOutlivesEnv::from_raw(tcx.outlives_reachability((empty, 1)), 1); + }; + CallerOutlivesEnv::from_region_summary_walk_pos(tcx, summary, mapping) +} + +// ── CastRelevantLifetimes helpers ───────────────────────────────────────── + +fn input_identity_sensitivity_for_call_site<'tcx>( + tcx: TyCtxt<'tcx>, + summary: &'tcx BorrowckRegionSummary, + mapping: &mir::CallSiteRegionMapping, +) -> Option> { + // Allocate `bv_to_param` lazily: if no walk-pos survives the provenance + // filter, the whole function returns None and we never pay for the vec. + let mut bv_to_param: Option>> = None; + + for (walk_pos, region_vid) in mapping + .region_mappings + .items() + .map(|(&walk_pos, ®ion_vid)| (walk_pos, region_vid)) + .into_sorted_stable_ord() + { + if matches!( + summary.vid_provenance.get(®ion_vid), + Some( + VidProvenance::Input(_) + | VidProvenance::BoundedByUniversal(_) + | VidProvenance::LocalOnly + ) + ) { + let slots = bv_to_param.get_or_insert_with(|| { + let max_walk_pos = + mapping.region_mappings.items().map(|(&wp, _)| wp as usize).max().unwrap_or(0); + vec![None; max_walk_pos + 1] + }); + slots[walk_pos as usize] = Some(walk_pos as usize); + } + } + + let bv_to_param = bv_to_param?; + let list = tcx.mk_lifetime_bv_to_param_mapping_from_iter(bv_to_param.into_iter()); + let mappings = [LifetimeBVToParamMapping(list)]; + Some(CastRelevantLifetimes::from_direct_mappings(&mappings)) +} + +// ── augment_callee ──────────────────────────────────────────────────────── + +/// Compute the augmented callee Instance given the caller's outlives +/// environment and a pre-composed mapping from callee walk-order positions +/// to caller param positions. +pub(crate) fn augment_callee<'tcx>( + tcx: TyCtxt<'tcx>, + caller_instance: Instance<'tcx>, + callee_instance: Instance<'tcx>, + callee_sensitivity: &CastRelevantLifetimes<'tcx>, + caller_env: &CallerOutlivesEnv<'tcx>, + composed_mapping: Option<&[Option]>, +) -> Instance<'tcx> { + // Determinism is established by `into_sorted_stable_ord` below, so we + // can traverse the unord mappings directly without an up-front sort. + let mut nodes: Vec<(usize, usize)> = callee_sensitivity + .mappings + .items() + .flat_map(|mapping| { + mapping.0.iter().enumerate().filter_map(|(bv_idx, bv)| { + let callee_pos = bv?; + // When composed_mapping is None, use identity (pass + // CRL values through directly as caller param keys). + let caller_param_pos = match composed_mapping { + Some(cm) => match cm.get(callee_pos) { + Some(&Some(pos)) => pos, + _ => return None, + }, + None => callee_pos, + }; + Some((bv_idx, caller_param_pos)) + }) + }) + .into_sorted_stable_ord(); + + if nodes.is_empty() { + let result = callee_instance.with_outlives(tcx, &[]); + maybe_dump_augmentation( + tcx, + caller_instance, + callee_instance, + caller_env, + composed_mapping, + &nodes, + &[], + result, + ); + return result; + } + + // `into_sorted_stable_ord` above sorted lexicographically by + // `(bv_idx, caller_param_pos)`, so same-`bv_idx` entries are already + // adjacent and the dedup below drops duplicates per `bv_idx`. + nodes.dedup_by_key(|n| n.0); + + // Resolve every node's caller-space key to a matrix index once, then + // build the outlives pairs by iterating each row of the pre-computed + // reachability matrix. This is O(N · dim) in the number of sensitive + // binder variables and the matrix dimension (typically ≤ 10), rather + // than the O(N²) pairwise `outlives` probes it replaces. + let static_idx = caller_env.static_idx(); + let resolved: Vec<(usize, usize)> = nodes + .iter() + .filter_map(|&(bv, key)| caller_env.resolve(key).map(|idx| (bv, idx))) + .collect(); + + // Reverse index: matrix idx → bv(s) at that idx. `dim` is tiny, so this + // map stays small; SmallVec inline-4 covers the typical aliasing case. + let mut idx_to_bvs: FxHashMap> = FxHashMap::default(); + for &(bv, idx) in &resolved { + idx_to_bvs.entry(idx).or_default().push(bv); + } + + let mut outlives_pairs: Vec<(usize, usize)> = Vec::new(); + for &(bv_i, idx_i) in &resolved { + for idx_j in caller_env.reach_row(idx_i) { + // `'static` successor contributes the `(bv_i, usize::MAX)` + // sentinel. Also fall through so any bv whose key was + // `usize::MAX` (and thus resolved to `static_idx`) still + // gets a pair emitted against it. + if idx_j == static_idx { + outlives_pairs.push((bv_i, usize::MAX)); + } + // When multiple bvs alias onto the same caller-space key, + // reflexivity (`reach.contains(idx, idx) == true`) still + // relates them, so we do not skip `idx_j == idx_i`: the + // `bv_i != bv_j` filter below rejects only true self-pairs. + if let Some(bvs) = idx_to_bvs.get(&idx_j) { + for &bv_j in bvs { + if bv_i != bv_j { + outlives_pairs.push((bv_i, bv_j)); + } + } + } + } + } + + outlives_pairs.sort(); + outlives_pairs.dedup(); + + let result = callee_instance.with_outlives(tcx, &outlives_pairs); + maybe_dump_augmentation( + tcx, + caller_instance, + callee_instance, + caller_env, + composed_mapping, + &nodes, + &outlives_pairs, + result, + ); + result +} + +/// Emit the `-Z dump-trait-cast-augmentation` diagnostic block for a single +/// caller -> callee augmentation, when the flag is set and the filter matches +/// the caller's printed name. Pure instrumentation — does not affect the +/// augmentation result. +fn maybe_dump_augmentation<'tcx>( + tcx: TyCtxt<'tcx>, + caller_instance: Instance<'tcx>, + callee_instance: Instance<'tcx>, + caller_env: &CallerOutlivesEnv<'tcx>, + composed_mapping: Option<&[Option]>, + nodes: &[(usize, usize)], + outlives_pairs: &[(usize, usize)], + result: Instance<'tcx>, +) { + let Some(ref filter) = tcx.sess.opts.unstable_opts.dump_trait_cast_augmentation else { + return; + }; + + let caller_name = with_no_trimmed_paths!(caller_instance.to_string()); + if filter != "all" && !caller_name.contains(filter.as_str()) { + return; + } + + let callee_name = with_no_trimmed_paths!(callee_instance.to_string()); + let result_name = with_no_trimmed_paths!(result.to_string()); + + eprintln!("=== Augmentation: {caller_name} ->"); + eprintln!(" {callee_name} ==="); + + eprintln!(" Caller outlives env:"); + { + let dim = caller_env.dim; + let static_idx = dim - 1; + let has_remap = caller_env.key_to_idx.is_some(); + eprintln!(" CallerOutlivesEnv (dim={dim}, remapped={has_remap}):"); + if let Some(ref key_map) = caller_env.key_to_idx { + #[allow(rustc::potential_query_instability)] + let mut pairs: Vec<(usize, usize)> = key_map.iter().map(|(&k, &v)| (k, v)).collect(); + pairs.sort_by_key(|&(k, _)| k); + for (walk_pos, idx) in pairs { + eprintln!(" walk_pos={walk_pos} -> idx={idx}"); + } + } + for i in 0..dim { + for j in 0..dim { + if i != j && caller_env.reach.contains(i, j) { + let i_s = if i == static_idx { "'static".to_string() } else { i.to_string() }; + let j_s = if j == static_idx { "'static".to_string() } else { j.to_string() }; + eprintln!(" {i_s} outlives {j_s}"); + } + } + } + } + + eprintln!(" Composed mapping:"); + match composed_mapping { + None => eprintln!(" (identity)"), + Some(cm) => { + for (callee_walk_pos, entry) in cm.iter().enumerate() { + match entry { + Some(pos) => { + eprintln!( + " callee_walk_pos={callee_walk_pos} -> caller_param_pos={pos}" + ); + } + None => { + eprintln!(" callee_walk_pos={callee_walk_pos} -> (none)"); + } + } + } + } + } + + eprintln!(" BV nodes ({}):", nodes.len()); + for &(bv_idx, caller_param_pos) in nodes { + let key = if caller_param_pos == usize::MAX { + "'static".to_string() + } else { + caller_param_pos.to_string() + }; + eprintln!(" bv{bv_idx} -> caller_param_pos={key}"); + } + + eprintln!(" Outlives pairs emitted ({}):", outlives_pairs.len()); + for &(a, b) in outlives_pairs { + let b_s = if b == usize::MAX { "'static".to_string() } else { format!("bv{b}") }; + eprintln!(" (bv{a}, {b_s})"); + } + + eprintln!(" Augmented callee: {result_name}"); + eprintln!(); +} + +// ── SCC-based batch computation ─────────────────────────────────────────── + +/// Compute the crate-wide cast-relevant-lifetimes map using SCC-based +/// fixed-point iteration. +/// +/// Runs after the collector DFS. The resulting map is stored in +/// `instance_sensitivity` for later augmentation and proxied via the +/// `cast_relevant_lifetimes` query. +/// +/// # Algorithm +/// +/// 1. For each collected Instance, query `items_of_instance` to obtain +/// `direct_sensitivity` and `call_sites`. Record call-graph edges. +/// 2. Compute the reverse-reachable set from directly sensitive Instances +/// to identify the sensitive subgraph. +/// 3. Compute SCCs of the sensitive subgraph (Tarjan's algorithm). +/// 4. Process SCCs in reverse topological order: +/// - Singleton SCCs (acyclic): single-pass composition. +/// - Non-trivial SCCs (cycles): iterate until fixed-point. +/// 5. Store results in `instance_sensitivity`. +pub(crate) fn compute_cast_relevant_lifetimes<'tcx>( + tcx: TyCtxt<'tcx>, + instance_sensitivity: &Lock, InstanceSensitivity<'tcx>>>, + visited: &UnordSet>, +) { + // Extract Fn Instances from the visited set deterministically. + // MonoItem implements ToStableHashKey; Instance doesn't, so we + // sort as MonoItem first, then filter to Fn instances. + let collected_instances: Vec> = tcx.with_stable_hashing_context(|mut hcx| { + visited + .items() + .filter_map(|item| match item { + MonoItem::Fn(instance) => Some(instance), + _ => None, + }) + .copied() + .into_sorted(&mut hcx) + }); + + // ── Direct sensitivity + call-graph construction ────────────────── + + let mut direct_sensitivity: FxHashMap, CastRelevantLifetimes<'tcx>> = + FxHashMap::default(); + + let mut call_graph: FxHashMap, Vec>> = FxHashMap::default(); + + let idx_to_inst = &collected_instances[..]; + let inst_to_idx: FxHashMap, u32> = collected_instances + .iter() + .enumerate() + .map(|(idx, &instance)| (instance, idx as u32)) + .collect(); + + // Per-instance queries in parallel: `items_of_instance` is a cacheable + // query, and the edge/direct-sensitivity construction is read-only + // with respect to shared state. Merge into the serial maps in input + // order (`par_map` preserves it). + let per_instance: Vec<(Option>, Vec>)> = + par_map(collected_instances.iter().copied(), |instance| { + let Ok(items) = tcx.items_of_instance((instance, CollectionMode::UsedItems)) else { + return (None, Vec::new()); + }; + let direct = (!items.direct_sensitivity.is_empty()) + .then(|| CastRelevantLifetimes::from_direct_mappings(items.direct_sensitivity)); + let edges: Vec> = items + .call_sites + .iter() + .filter(|(_, callee)| visited.contains(&MonoItem::Fn(*callee))) + .map(|&(call_id, callee)| CallEdge { call_id, callee }) + .collect(); + (direct, edges) + }); + + for (&instance, (direct, edges)) in collected_instances.iter().zip(per_instance) { + if let Some(crl) = direct { + direct_sensitivity.insert(instance, crl); + } + if !edges.is_empty() { + call_graph.insert(instance, edges); + } + } + + let total = idx_to_inst.len(); + + // ── Sensitive subgraph (reverse reachability) ──────────────────── + + let mut reverse_adj: Vec> = vec![SmallVec::new(); total]; + #[allow(rustc::potential_query_instability)] + // Iteration order doesn't matter: we're populating an adjacency list + // indexed by pre-assigned node IDs. + for (&caller, edges) in &call_graph { + let caller_idx = inst_to_idx[&caller]; + for edge in edges { + if let Some(&callee_idx) = inst_to_idx.get(&edge.callee) { + reverse_adj[callee_idx as usize].push(caller_idx); + } + } + } + + let mut sensitive_set = DenseBitSet::::new_empty(total); + let mut queue: VecDeque = VecDeque::new(); + + #[allow(rustc::potential_query_instability)] + // Seeding a bitset by pre-assigned index; iteration order is irrelevant. + for &inst in direct_sensitivity.keys() { + let idx = inst_to_idx[&inst]; + if sensitive_set.insert(idx) { + queue.push_back(idx); + } + } + + while let Some(idx) = queue.pop_front() { + for &caller_idx in &reverse_adj[idx as usize] { + if sensitive_set.insert(caller_idx) { + queue.push_back(caller_idx); + } + } + } + + if sensitive_set.is_empty() { + return; + } + + let n = sensitive_set.count(); + let mut full_to_sens: Vec = vec![u32::MAX; total]; + let mut sens_to_full: Vec = Vec::with_capacity(n); + for full_idx in sensitive_set.iter() { + let sens_idx = sens_to_full.len() as u32; + full_to_sens[full_idx as usize] = sens_idx; + sens_to_full.push(full_idx); + } + + rustc_index::newtype_index! { + #[orderable] + struct SensIdx {} + } + rustc_index::newtype_index! { + #[orderable] + struct SccIdx {} + } + + let mut edge_pairs: Vec<(SensIdx, SensIdx)> = Vec::new(); + for (sens_i, &full_i) in sens_to_full.iter().enumerate() { + let full_i = full_i as usize; + let inst = idx_to_inst[full_i]; + if let Some(edges) = call_graph.get(&inst) { + for edge in edges { + if let Some(&full_j) = inst_to_idx.get(&edge.callee) { + let sens_j = full_to_sens[full_j as usize]; + if sens_j != u32::MAX { + edge_pairs.push(( + SensIdx::from_usize(sens_i), + SensIdx::from_usize(sens_j as usize), + )); + } + } + } + } + } + + // ── SCC decomposition ───────────────────────────────────────────── + + let graph = VecGraph::::new(n, edge_pairs); + let sccs = Sccs::::new(&graph); + + let mut scc_members: Vec> = vec![Vec::new(); sccs.num_sccs()]; + for sens_i in 0..n { + let node = SensIdx::from_usize(sens_i); + scc_members[sccs.scc(node).index()].push(node); + } + + // ── Pre-compute chain compositions for sensitive edges ──────────── + + #[allow(rustc::potential_query_instability)] + // Iteration order is irrelevant: populating a lookup table keyed by + // (Instance, call_id) identity. + let composition_cache: FxHashMap< + (Instance<'tcx>, &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>), + Vec>, + > = { + let mut cache = FxHashMap::default(); + for &full_i in &sens_to_full { + let inst = idx_to_inst[full_i as usize]; + if let Some(edges) = call_graph.get(&inst) { + for edge in edges { + let Some(&callee_full) = inst_to_idx.get(&edge.callee) else { + continue; + }; + if !sensitive_set.contains(callee_full) { + continue; + } + // Upper bound on walk positions: total region slots + // in the callee's args. Positions map independently, + // so computing with a larger n_positions is safe. + let upper_bound: usize = region_slots_of_args(edge.callee.args); + let composed = compose_all_through_chain(tcx, inst, edge.call_id, upper_bound); + cache.insert((inst, edge.call_id), composed); + } + } + } + cache + }; + + // ── Process SCCs in reverse topological order ──────────────────── + + let mut resolved: FxHashMap, CastRelevantLifetimes<'tcx>> = FxHashMap::default(); + + #[allow(rustc::potential_query_instability)] + // Seeding from pre-computed direct sensitivity; results are keyed by + // Instance identity, not iteration order. + for (inst, crl) in &direct_sensitivity { + let full_idx = inst_to_idx[inst]; + if sensitive_set.contains(full_idx) { + resolved.insert(*inst, crl.clone()); + } + } + + for scc in sccs.all_sccs() { + let scc: SccIdx = scc; + let members = &scc_members[scc.index()]; + + if members.len() == 1 { + let sens_node = members[0]; + let has_self_edge = graph.successors(sens_node).contains(&sens_node); + if !has_self_edge { + // Singleton SCC (no self-edge): single pass. + let instance = idx_to_inst[sens_to_full[sens_node.index()] as usize]; + let result = compute_instance_sensitivity( + tcx, + instance, + &call_graph, + &resolved, + &composition_cache, + ); + if let Some(crl) = result { + resolved.insert(instance, crl); + } + continue; + } + } + + // Non-trivial SCC (or singleton with self-edge): iterate + // until fixed-point. + let member_set = { + let mut set = DenseBitSet::::new_empty(n); + for &node in members { + set.insert(node); + } + set + }; + let mut dirty = member_set.clone(); + + loop { + let mut next_dirty = DenseBitSet::::new_empty(n); + let mut changed = false; + + for sens_node in dirty.iter() { + let instance = idx_to_inst[sens_to_full[sens_node.index()] as usize]; + let new_crl = compute_instance_sensitivity( + tcx, + instance, + &call_graph, + &resolved, + &composition_cache, + ); + + if let Some(new_crl) = new_crl { + let (merged, changed_here) = match resolved.remove(&instance) { + Some(existing) => crl_join(existing, new_crl), + None => (new_crl, true), + }; + resolved.insert(instance, merged); + + if changed_here { + changed = true; + for &caller_sens in graph.predecessors(sens_node) { + if member_set.contains(caller_sens) { + next_dirty.insert(caller_sens); + } + } + } + } + } + if !changed { + break; + } + dirty = next_dirty; + } + } + + // ── Feed results into instance_sensitivity ─────────────────────── + // + // Compute per-instance entries in parallel (augment_callee + arena + // allocation are thread-safe), then insert serially into the + // FxIndexMap. `par_map` preserves input order, so insertion order + // remains deterministic. + + let entries: Vec, InstanceSensitivity<'tcx>)>> = par_map( + collected_instances.iter().copied(), + |instance| -> Option<(Instance<'tcx>, InstanceSensitivity<'tcx>)> { + let sensitivity = resolved.get(&instance).cloned(); + if sensitivity.is_none() && !direct_sensitivity.contains_key(&instance) { + return None; + } + + let items = tcx.items_of_instance((instance, CollectionMode::UsedItems)).ok()?; + + let mut sensitive_call_sites_vec = Vec::new(); + let mut augmented_callees_vec = Vec::new(); + + if sensitivity.is_some() { + for &(call_id, callee) in items.call_sites { + if resolved.contains_key(&callee) { + sensitive_call_sites_vec.push((call_id, callee)); + } + } + + // For ground-level callers, pre-compute augmented callee Instances. + if !instance.has_outlives_entries() && !sensitive_call_sites_vec.is_empty() { + for &(call_id, callee_instance) in &sensitive_call_sites_vec { + let Some(callee_sensitivity) = resolved.get(&callee_instance) else { + continue; + }; + + let composed_mapping = composition_cache + .get(&(instance, call_id)) + .expect("composition cache miss during result feed-in") + .as_slice(); + let caller_env = caller_env_for_call_id(tcx, instance, call_id); + + let augmented = augment_callee( + tcx, + instance, + callee_instance, + callee_sensitivity, + &caller_env, + Some(composed_mapping), + ); + augmented_callees_vec.push((call_id, augmented)); + } + } + } + + Some(( + instance, + InstanceSensitivity { + sensitivity, + sensitive_call_sites: tcx.arena.alloc_from_iter(sensitive_call_sites_vec), + augmented_callees: tcx.arena.alloc_from_iter(augmented_callees_vec), + }, + )) + }, + ); + + let mut sensitivity_map = instance_sensitivity.lock(); + for (instance, sens) in entries.into_iter().flatten() { + sensitivity_map.insert(instance, sens); + } + drop(sensitivity_map); + + dump_trait_cast_sensitivity( + tcx, + instance_sensitivity, + &direct_sensitivity, + &collected_instances, + ); +} + +/// Dump per-instance sensitivity metadata to stderr, gated on +/// `-Z dump-trait-cast-sensitivity=`. Emits one block per matching +/// Instance in a deterministic order. Fast-paths when the flag is absent. +fn dump_trait_cast_sensitivity<'tcx>( + tcx: TyCtxt<'tcx>, + instance_sensitivity: &Lock, InstanceSensitivity<'tcx>>>, + direct_sensitivity: &FxHashMap, CastRelevantLifetimes<'tcx>>, + _collected_instances: &[Instance<'tcx>], +) { + let Some(ref filter) = tcx.sess.opts.unstable_opts.dump_trait_cast_sensitivity else { + return; + }; + + let map = instance_sensitivity.lock(); + + // Collect entries and sort via stable fingerprint of the Instance so the + // output order is deterministic across runs. `Instance` is `!Ord`, so we + // use the `with_stable_hashing_context` + `StableHasher` pattern + // established in `cascade_canonicalize` (see partitioning.rs). + #[allow(rustc::potential_query_instability)] + // Collecting entries from an FxIndexMap into a Vec that we subsequently + // sort by stable fingerprint; final iteration order is deterministic. + let mut entries: Vec> = map.keys().copied().collect(); + tcx.with_stable_hashing_context(|mut hcx| { + entries.sort_by_cached_key(|instance| { + use rustc_data_structures::fingerprint::Fingerprint; + use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; + let mut hasher = StableHasher::new(); + instance.hash_stable(&mut hcx, &mut hasher); + hasher.finish::() + }); + }); + + for instance in entries { + let entry = map.get(&instance).expect("entry present"); + let name = with_no_trimmed_paths!(instance.to_string()); + + if filter != "all" && !name.contains(filter.as_str()) { + continue; + } + + let is_direct = direct_sensitivity.contains_key(&instance); + let is_transitive = entry.sensitivity.is_some(); + if !is_direct && !is_transitive { + continue; + } + + eprintln!("=== Sensitivity: {name} ==="); + + let direct_count = direct_sensitivity.get(&instance).map(|d| d.mappings.len()).unwrap_or(0); + if is_direct { + eprintln!(" direct: yes (has {direct_count} direct mapping(s))"); + } else { + eprintln!(" direct: no"); + } + + let transitive_count = entry.sensitivity.as_ref().map(|s| s.mappings.len()).unwrap_or(0); + if is_transitive { + eprintln!(" transitive: yes (has {transitive_count} composed mapping(s))"); + } else { + eprintln!(" transitive: no"); + } + + if let Some(ref sens) = entry.sensitivity { + let sorted_mappings = + tcx.with_stable_hashing_context(|mut hcx| sens.mappings.to_sorted(&mut hcx, true)); + eprintln!(" Mappings ({}):", sorted_mappings.len()); + for mapping in &sorted_mappings { + let parts: Vec = mapping + .0 + .iter() + .enumerate() + .map(|(i, entry)| match entry { + Some(n) if n == usize::MAX => format!("bv{i} -> static"), + Some(n) => format!("bv{i} -> walk_pos={n}"), + None => format!("bv{i} -> (none)"), + }) + .collect(); + eprintln!(" [{}]", parts.join(", ")); + } + } + + eprintln!(" Sensitive call sites ({}):", entry.sensitive_call_sites.len()); + for &(call_id, callee) in entry.sensitive_call_sites { + let summary = format_call_id_summary(tcx, call_id); + let callee_str = with_no_trimmed_paths!(callee.to_string()); + eprintln!(" {summary} -> {callee_str}"); + } + + if !entry.augmented_callees.is_empty() { + eprintln!(" Augmented callees ({}):", entry.augmented_callees.len()); + for &(call_id, callee) in entry.augmented_callees { + let summary = format_call_id_summary(tcx, call_id); + let callee_str = with_no_trimmed_paths!(callee.to_string()); + eprintln!(" {summary} -> {callee_str}"); + } + } + + eprintln!(); + } +} + +/// Render a `call_id` chain as `call# in ` using the +/// first element (origin call site). If the chain is inlined (length > 1), +/// annotate with `(+N more)`. +pub(crate) fn format_call_id_summary<'tcx>( + tcx: TyCtxt<'tcx>, + call_id: &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, +) -> String { + let Some((origin_def_id, local_id, _)) = call_id.iter().next() else { + return "call#(empty-chain)".to_string(); + }; + let def_path = tcx.def_path_str(origin_def_id); + let extra = call_id.len().saturating_sub(1); + if extra == 0 { + format!("call#{local_id} in {def_path}") + } else { + format!("call#{local_id} in {def_path} (+{extra} more)") + } +} + +/// Compute transitive sensitivity for a single Instance by examining +/// its callees' current sensitivity state. +fn compute_instance_sensitivity<'tcx>( + tcx: TyCtxt<'tcx>, + instance: Instance<'tcx>, + call_graph: &FxHashMap, Vec>>, + resolved: &FxHashMap, CastRelevantLifetimes<'tcx>>, + composition_cache: &FxHashMap< + (Instance<'tcx>, &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>), + Vec>, + >, +) -> Option> { + let edges = match call_graph.get(&instance) { + Some(e) => e, + None => return resolved.get(&instance).cloned(), + }; + + let mut composed_mappings: UnordSet> = UnordSet::new(); + + for edge in edges { + let Some(callee_sensitivity) = resolved.get(&edge.callee) else { + continue; + }; + + let composed_positions = composition_cache + .get(&(instance, edge.call_id)) + .expect("composition cache miss in compute_instance_sensitivity") + .as_slice(); + composed_mappings.extend_unord(callee_sensitivity.mappings.items().filter_map(|mapping| { + let composed_bv = mapping.0.iter().map(|entry| { + entry.and_then(|walk_pos| composed_positions.get(walk_pos).copied().flatten()) + }); + if composed_bv.clone().any(|b| b.is_some()) { + let list = tcx.mk_lifetime_bv_to_param_mapping_from_iter(composed_bv); + Some(LifetimeBVToParamMapping(list)) + } else { + None + } + })); + } + + // Merge with direct sensitivity if present. + if let Some(direct) = resolved.get(&instance) { + if composed_mappings.is_empty() { + return Some(direct.clone()); + } + composed_mappings.extend_unord(direct.mappings.items().copied()); + return Some(CastRelevantLifetimes { mappings: composed_mappings }); + } + + if composed_mappings.is_empty() { + return None; + } + Some(CastRelevantLifetimes { mappings: composed_mappings }) +} + +/// Monotone join: merges mappings from `b` into `a`. Returns +/// `(merged, changed)` where `changed` is true iff at least one new +/// mapping was inserted. O(1) per-element duplicate detection via +/// `UnordSet::insert`. +fn crl_join<'tcx>( + mut a: CastRelevantLifetimes<'tcx>, + b: CastRelevantLifetimes<'tcx>, +) -> (CastRelevantLifetimes<'tcx>, bool) { + let before = a.mappings.len(); + a.mappings.extend_unord(b.mappings.into_items()); + let changed = a.mappings.len() > before; + (a, changed) +} + +// ── augmented_outlives_for_call query provider ──────────────────────────── + +/// Query provider for `augmented_outlives_for_call`. +/// +/// For MIR-backed callees, computes per-call-site outlives entries in the +/// callee's binder-variable index space by composing the callee's +/// `CastRelevantLifetimes` through the `call_id` chain and consulting the +/// caller's outlives environment. +/// +/// For MIR-less intrinsic callees, the fallback path returns entries in the +/// origin/transport walk-position space induced by the caller's own CRL; +/// intrinsic-specific resolvers remap those entries into their native +/// consumer space before interpreting them. +pub(crate) fn augmented_outlives_for_call<'tcx>( + tcx: TyCtxt<'tcx>, + (caller, call_id, callee): ( + Instance<'tcx>, + &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, + Instance<'tcx>, + ), +) -> &'tcx [ty::GenericArg<'tcx>] { + // 1. Get callee's cast-relevant lifetimes. + let Some(callee_sensitivity) = tcx.cast_relevant_lifetimes(callee) else { + // Intrinsic callees (trait_metadata_index, + // trait_cast_is_lifetime_erasure_safe, etc.) are never in the + // sensitivity map — they have no MIR body. For these, use the + // caller's direct-sensitivity mapping if the caller is already + // augmented, or fall back to the intrinsic call site's identity + // walk-position mapping for true ground-level callers. + let callee_def_id = callee.def_id(); + if tcx.is_intrinsic(callee_def_id, sym::trait_metadata_index) + || tcx.is_intrinsic(callee_def_id, sym::trait_cast_is_lifetime_erasure_safe) + || tcx.is_intrinsic(callee_def_id, sym::trait_metadata_table) + || tcx.is_intrinsic(callee_def_id, sym::trait_metadata_table_len) + { + // Sentinel-only augmented instances (only the sentinel, + // no real outlives pairs) have no outlives environment — + // fall through to the ground-level path that uses the + // borrowck region summary. + let has_real_outlives = caller.outlives_entries().len() > 1; + let augmented = if has_real_outlives { + let caller_base = caller.strip_outlives(tcx); + let Ok(items) = tcx.items_of_instance((caller_base, CollectionMode::UsedItems)) + else { + return &[]; + }; + if items.direct_sensitivity.is_empty() { + return &[]; + } + let caller_direct = + CastRelevantLifetimes::from_direct_mappings(items.direct_sensitivity); + let caller_env = caller_env_for_call_id(tcx, caller, call_id); + augment_callee(tcx, caller, callee, &caller_direct, &caller_env, None) + } else { + let origin_def_id = call_id[0].0; + let origin_local_id = call_id[0].1; + let summary = tcx.borrowck_region_summary(origin_def_id); + let Some(mapping) = summary.call_site_mappings.get(&origin_local_id) else { + return &[]; + }; + let Some(identity_sensitivity) = + input_identity_sensitivity_for_call_site(tcx, summary, mapping) + else { + return &[]; + }; + let caller_env = + CallerOutlivesEnv::from_region_summary_walk_pos(tcx, summary, mapping); + augment_callee(tcx, caller, callee, &identity_sensitivity, &caller_env, None) + }; + let all = augmented.outlives_entries(); + if all.len() > 1 { + return tcx.arena.alloc_slice(&all[1..]); + } + } + return &[]; + }; + + // 2. Build composed_mapping: maps callee walk-order positions to + // the origin caller's walk-position space by composing through + // the full call_id chain in one pass. + let max_pos = callee_sensitivity.max_walk_order_position(); + let composed_mapping = compose_all_through_chain(tcx, caller, call_id, max_pos); + + // 3. Build CallerOutlivesEnv from the caller Instance. + let caller_env = caller_env_for_call_id(tcx, caller, call_id); + // 4. Execute augment_callee. + let augmented = augment_callee( + tcx, + caller, + callee, + callee_sensitivity, + &caller_env, + Some(&composed_mapping), + ); + + // 5. Return the outlives entries, stripping the sentinel (always at + // position 0 of outlives_entries). + let all = augmented.outlives_entries(); + if all.len() <= 1 { + // Empty or only the sentinel — no meaningful outlives entries. + return &[]; + } + // Skip sentinel at index 0. + tcx.arena.alloc_slice(&all[1..]) +} + +// ── Build sensitivity_map for MonoItemPartitions ────────────────────────── + +/// Convert the internal `instance_sensitivity` map into the `UnordMap` +/// used by `MonoItemPartitions::sensitivity_map` and the +/// `crate_cast_relevant_lifetimes` query. +pub(crate) fn build_sensitivity_map<'tcx>( + tcx: TyCtxt<'tcx>, + instance_sensitivity: FxIndexMap, InstanceSensitivity<'tcx>>, +) -> &'tcx UnordMap, CastRelevantLifetimes<'tcx>> { + let map: UnordMap<_, _> = instance_sensitivity + .into_iter() + .filter_map(|(inst, entry)| entry.sensitivity.map(|crl| (inst, crl))) + .collect(); + tcx.arena.alloc(map) +} diff --git a/compiler/rustc_monomorphize/src/collector.rs b/compiler/rustc_monomorphize/src/collector.rs index f819ee42441a7..4518a7a8a9cd6 100644 --- a/compiler/rustc_monomorphize/src/collector.rs +++ b/compiler/rustc_monomorphize/src/collector.rs @@ -208,9 +208,9 @@ use std::cell::OnceCell; use std::ops::ControlFlow; -use rustc_data_structures::fx::FxIndexMap; +use rustc_data_structures::fx::{FxHashMap, FxIndexMap, FxIndexSet}; use rustc_data_structures::sync::{Lock, par_for_each_in}; -use rustc_data_structures::unord::{UnordMap, UnordSet}; +use rustc_data_structures::unord::{ExtendUnord, UnordMap, UnordSet}; use rustc_hir as hir; use rustc_hir::attrs::InlineAttr; use rustc_hir::def::DefKind; @@ -220,21 +220,28 @@ use rustc_hir::limit::Limit; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::mir::interpret::{AllocId, ErrorHandled, GlobalAlloc, Scalar}; use rustc_middle::mir::visit::Visitor as MirVisitor; -use rustc_middle::mir::{self, Body, Location, MentionedItem, traversal}; -use rustc_middle::mono::{CollectionMode, InstantiationMode, MonoItem, NormalizationErrorInMono}; +use rustc_middle::mir::{self, Body, InputSlot, Location, MentionedItem, VidProvenance, traversal}; +use rustc_middle::mono::{ + CastRelevantLifetimes, CollectionMode, DelayedInstance, ItemsOfInstance, + LifetimeBVToParamMapping, MonoItem, NormalizationErrorInMono, UsageMap, +}; use rustc_middle::query::TyCtxtAt; use rustc_middle::ty::adjustment::{CustomCoerceUnsized, PointerCoercion}; use rustc_middle::ty::layout::ValidityRequirement; use rustc_middle::ty::{ - self, GenericArgs, GenericParamDefKind, Instance, InstanceKind, Ty, TyCtxt, TypeFoldable, + self, GenericArgs, GenericParamDefKind, Instance, InstanceKind, List, Ty, TyCtxt, TypeFoldable, TypeVisitable, TypeVisitableExt, TypeVisitor, Unnormalized, VtblEntry, }; use rustc_middle::util::Providers; use rustc_middle::{bug, span_bug}; use rustc_session::config::{DebugInfo, EntryFnType}; -use rustc_span::{DUMMY_SP, Span, Spanned, dummy_spanned, respan}; +use rustc_span::{DUMMY_SP, Span, Spanned, dummy_spanned, respan, sym}; use tracing::{debug, instrument, trace}; +use crate::cast_sensitivity::{ + self, CallerOutlivesEnv, InstanceSensitivity, augment_callee, compose_all_through_chain, +}; +use crate::erasure_safe::{region_slots_of_arg, region_slots_of_args}; use crate::errors::{ self, EncounteredErrorWhileInstantiating, EncounteredErrorWhileInstantiatingGlobalAsm, NoOptimizedMir, RecursionLimit, @@ -255,54 +262,15 @@ struct SharedState<'tcx> { mentioned: Lock>>, /// Which items are being used where, for better errors. usage_map: Lock>, -} - -pub(crate) struct UsageMap<'tcx> { - // Maps every mono item to the mono items used by it. - pub used_map: UnordMap, Vec>>, - - // Maps each mono item with users to the mono items that use it. - // Be careful: subsets `used_map`, so unused items are vacant. - user_map: UnordMap, Vec>>, -} - -impl<'tcx> UsageMap<'tcx> { - fn new() -> UsageMap<'tcx> { - UsageMap { used_map: Default::default(), user_map: Default::default() } - } - - fn record_used<'a>(&mut self, user_item: MonoItem<'tcx>, used_items: &'a MonoItems<'tcx>) - where - 'tcx: 'a, - { - for used_item in used_items.items() { - self.user_map.entry(used_item).or_default().push(user_item); - } - - assert!(self.used_map.insert(user_item, used_items.items().collect()).is_none()); - } - - pub(crate) fn get_user_items(&self, item: MonoItem<'tcx>) -> &[MonoItem<'tcx>] { - self.user_map.get(&item).map(|items| items.as_slice()).unwrap_or(&[]) - } - - /// Internally iterate over all inlined items used by `item`. - pub(crate) fn for_each_inlined_used_item( - &self, - tcx: TyCtxt<'tcx>, - item: MonoItem<'tcx>, - mut f: F, - ) where - F: FnMut(MonoItem<'tcx>), - { - let used_items = self.used_map.get(&item).unwrap(); - for used_item in used_items.iter() { - let is_inlined = used_item.instantiation_mode(tcx) == InstantiationMode::LocalCopy; - if is_inlined { - f(*used_item); - } - } - } + /// Delayed codegen requests: augmented Instances whose codegen must be + /// handled by the global phase. Includes both directly sensitive + /// (intrinsic-containing) and transitively sensitive (callee-patching) + /// Instances, each with their callee substitution metadata. + delayed_codegen: Lock>>, + /// Per-Instance sensitivity metadata, keyed on base Instance + /// (no Outlives entries). Populated by `collect_items_rec` + /// post-processing and consumed by sensitive-subgraph augmentation. + instance_sensitivity: Lock, InstanceSensitivity<'tcx>>>, } struct MonoItems<'tcx> { @@ -471,8 +439,13 @@ fn collect_items_rec<'tcx>( recursion_limit, )); + // `must_delay_codegen` instances are NOT pushed to + // `delayed_codegen` here. `augment_sensitive_subgraphs` handles + // all delayed-codegen recording — both for sensitive and + // non-sensitive instances — after collection finishes. + rustc_data_structures::stack::ensure_sufficient_stack(|| { - let Ok((used, mentioned)) = tcx.items_of_instance((instance, mode)) else { + let Ok(result) = tcx.items_of_instance((instance, mode)) else { // Normalization errors here are usually due to trait solving overflow. // FIXME: I assume that there are few type errors at post-analysis stage, but not // entirely sure. @@ -488,8 +461,8 @@ fn collect_items_rec<'tcx>( def_path_str, }); }; - used_items.extend(used.into_iter().copied()); - mentioned_items.extend(mentioned.into_iter().copied()); + used_items.extend(result.used_items.iter().copied()); + mentioned_items.extend(result.mentioned_items.iter().copied()); }); } MonoItem::GlobalAsm(item_id) => { @@ -566,7 +539,7 @@ fn collect_items_rec<'tcx>( // This is part of the output of collection and hence only relevant for "used" items. // ("Mentioned" items are only considered internally during collection.) if mode == CollectionMode::UsedItems { - state.usage_map.lock().record_used(starting_item.node, &used_items); + state.usage_map.lock().record_used(starting_item.node, used_items.items()); } { @@ -686,6 +659,9 @@ struct MirUsedCollector<'a, 'tcx> { /// Note that this contains *not-monomorphized* items! used_mentioned_items: &'a mut UnordSet>, instance: Instance<'tcx>, + /// Records (call_chain, callee_instance) for each resolved Call/TailCall. + /// Used for transitive sensitivity composition in `collect_items_rec`. + call_sites: &'a mut Vec<(&'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, Instance<'tcx>)>, } impl<'a, 'tcx> MirUsedCollector<'a, 'tcx> { @@ -853,13 +829,18 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirUsedCollector<'a, 'tcx> { false }; - visit_fn_use( - self.tcx, - callee_ty, - !force_indirect_call, - source, - &mut self.used_items, - ) + if let Some(instance) = + visit_fn_use(self.tcx, callee_ty, !force_indirect_call, source, self.used_items) + { + // Record call site for sensitivity composition. + // Use the stable call_id from the terminator, not the basic block index. + let call_id = match terminator.kind { + mir::TerminatorKind::Call { call_id, .. } => call_id, + mir::TerminatorKind::TailCall { call_id, .. } => call_id, + _ => unreachable!(), + }; + self.call_sites.push((call_id, instance)); + } } mir::TerminatorKind::Drop { ref place, .. } => { let ty = place.ty(self.body, self.tcx).ty; @@ -947,7 +928,7 @@ fn visit_fn_use<'tcx>( is_direct_call: bool, source: Span, output: &mut MonoItems<'tcx>, -) { +) -> Option> { if let ty::FnDef(def_id, args) = *ty.kind() { let instance = if is_direct_call { ty::Instance::expect_resolve( @@ -969,6 +950,9 @@ fn visit_fn_use<'tcx>( } }; visit_instance_use(tcx, instance, is_direct_call, source, output); + Some(instance) + } else { + None } } @@ -1090,6 +1074,36 @@ fn should_codegen_locally<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> true } +fn has_trait_cast_intrinsics<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> bool { + let def_id = def_id.to_def_id(); + if !tcx.is_mir_available(def_id) { + return false; + } + let body = tcx.optimized_mir(def_id); + for bb in body.basic_blocks.iter() { + if let mir::TerminatorKind::Call { ref func, .. } = bb.terminator().kind { + let func_ty = func.ty(body, tcx); + if let ty::FnDef(callee, _) = *func_ty.kind() { + if tcx.is_intrinsic(callee, sym::trait_metadata_index) + || tcx.is_intrinsic(callee, sym::trait_metadata_table) + || tcx.is_intrinsic(callee, sym::trait_metadata_table_len) + || tcx.is_intrinsic(callee, sym::trait_cast_is_lifetime_erasure_safe) + { + return true; + } + } + } + } + false +} + +fn must_delay_codegen<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> bool { + match instance.def { + InstanceKind::Item(def_id) => tcx.has_trait_cast_intrinsics(def_id), + _ => false, + } +} + /// For a given pair of source and target type that occur in an unsizing coercion, /// this function finds the pair of types that determines the vtable linking /// them. @@ -1299,6 +1313,49 @@ fn collect_alloc<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId, output: &mut MonoIt } } +/// Collect mono items for vtable methods of a (sub_trait, concrete_type) +/// pair from a trait cast table. Uses the same vtable_entries path as the +/// normal collector to ensure Instance consistency. +/// +/// Called from `resolve_trait_cast_globals` because these vtables are +/// created after the normal mono item collection phase. +pub(crate) fn collect_vtable_methods_for_trait_cast<'tcx>( + tcx: TyCtxt<'tcx>, + sub_trait: Ty<'tcx>, + concrete_type: Ty<'tcx>, + output: &mut Vec>, +) { + let mut items = MonoItems::new(); + create_mono_items_for_vtable_methods(tcx, sub_trait, concrete_type, DUMMY_SP, &mut items); + output.extend(items.items()); +} + +/// Collect mono items reachable from a trait-cast metadata allocation. +/// +/// Walking the allocation provenance reaches the table's referenced vtables, +/// and from there the actual method instances stored in those vtables. +pub(crate) fn collect_alloc_items_for_trait_cast<'tcx>( + tcx: TyCtxt<'tcx>, + alloc_id: AllocId, + output: &mut Vec>, +) { + let mut items = MonoItems::new(); + collect_alloc(tcx, alloc_id, &mut items); + output.extend(items.items()); +} + +struct CollectedItems<'tcx> { + used_items: MonoItems<'tcx>, + mentioned_items: MonoItems<'tcx>, + /// For each Call/TailCall terminator, the resolved callee Instance + /// and its call-site identifier `(DefId, u32, callee_args)`. Used by + /// `collect_items_rec` to compute transitive sensitivity. + call_sites: Vec<(&'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, Instance<'tcx>)>, + /// Direct sensitivity of this Instance, derived from walking the + /// post-inlining MIR for trait-cast intrinsic calls. Empty if not sensitive. + direct_sensitivity: &'tcx [LifetimeBVToParamMapping<'tcx>], +} + /// Scans the MIR in order to find function calls, closures, and drop-glue. /// /// Anything that's found is added to `output`. Furthermore the "mentioned items" of the MIR are returned. @@ -1307,7 +1364,7 @@ fn collect_items_of_instance<'tcx>( tcx: TyCtxt<'tcx>, instance: Instance<'tcx>, mode: CollectionMode, -) -> Result<(MonoItems<'tcx>, MonoItems<'tcx>), NormalizationErrorInMono> { +) -> Result, NormalizationErrorInMono> { // This item is getting monomorphized, do mono-time checks. let body = tcx.instance_mir(instance.def); // Plenty of code paths later assume that everything can be normalized. So we have to check @@ -1329,12 +1386,14 @@ fn collect_items_of_instance<'tcx>( let mut used_items = MonoItems::new(); let mut mentioned_items = MonoItems::new(); let mut used_mentioned_items = Default::default(); + let mut call_sites = Vec::new(); let mut collector = MirUsedCollector { tcx, body, used_items: &mut used_items, used_mentioned_items: &mut used_mentioned_items, instance, + call_sites: &mut call_sites, }; if mode == CollectionMode::UsedItems { @@ -1365,22 +1424,25 @@ fn collect_items_of_instance<'tcx>( } } - Ok((used_items, mentioned_items)) + // Derive per-DefId direct sensitivity from borrowck_region_summary. + let direct_sensitivity = derive_direct_sensitivity(tcx, instance); + + Ok(CollectedItems { used_items, mentioned_items, call_sites, direct_sensitivity }) } fn items_of_instance<'tcx>( tcx: TyCtxt<'tcx>, (instance, mode): (Instance<'tcx>, CollectionMode), -) -> Result< - (&'tcx [Spanned>], &'tcx [Spanned>]), - NormalizationErrorInMono, -> { - let (used_items, mentioned_items) = collect_items_of_instance(tcx, instance, mode)?; - - let used_items = tcx.arena.alloc_from_iter(used_items); - let mentioned_items = tcx.arena.alloc_from_iter(mentioned_items); - - Ok((used_items, mentioned_items)) +) -> Result, NormalizationErrorInMono> { + let CollectedItems { used_items, mentioned_items, call_sites, direct_sensitivity } = + collect_items_of_instance(tcx, instance, mode)?; + + Ok(ItemsOfInstance { + used_items: tcx.arena.alloc_from_iter(used_items), + mentioned_items: tcx.arena.alloc_from_iter(mentioned_items), + call_sites: tcx.arena.alloc_from_iter(call_sites), + direct_sensitivity, + }) } /// `item` must be already monomorphized. @@ -1803,15 +1865,560 @@ fn create_mono_items_for_default_impls<'tcx>( } } +//=----------------------------------------------------------------------------- +// Two-phase collection +//=----------------------------------------------------------------------------- + +/// Derive per-Instance direct sensitivity by walking the post-inlining +/// (optimized) MIR for trait-cast intrinsic calls. Handles both non-inlined +/// (single-element call_id chain) and inlined (multi-element chain) intrinsic +/// calls via call_id chain composition. +/// +/// Returns a slice of interned `LifetimeBVToParamMapping`s — one per +/// intrinsic call site whose target dyn type references universal region +/// params. Empty for the vast majority of functions. +fn derive_direct_sensitivity<'tcx>( + tcx: TyCtxt<'tcx>, + instance: Instance<'tcx>, +) -> &'tcx [LifetimeBVToParamMapping<'tcx>] { + let def_id = instance.def_id(); + + // Fast path: most functions don't contain trait cast intrinsics at all. + if !tcx.has_trait_cast_intrinsics(def_id) { + return &[]; + } + + // Walk the post-inlining MIR to find all trait-cast intrinsic calls + // (both non-inlined and inlined). + let body = tcx.instance_mir(instance.def); + let mut mappings: Vec> = Vec::new(); + let mut input_slot_to_walk_pos: FxHashMap = FxHashMap::default(); + let mut input_walk_pos = 0usize; + for (arg_ordinal, arg) in instance.args.iter().enumerate() { + let slots = region_slots_of_arg(arg); + for offset in 0..slots { + input_slot_to_walk_pos.insert( + InputSlot { arg_ordinal: arg_ordinal as u32, offset_within_arg: offset as u32 }, + input_walk_pos + offset, + ); + } + input_walk_pos += slots; + } + + for bb_data in body.basic_blocks.iter() { + let mir::TerminatorKind::Call { ref func, call_id, .. } = bb_data.terminator().kind else { + continue; + }; + + // Check if this is a trait-cast intrinsic call. + let func_ty = func.ty(body, tcx); + let ty::FnDef(callee, _) = *func_ty.kind() else { + continue; + }; + if !tcx.is_intrinsic(callee, sym::trait_metadata_index) + && !tcx.is_intrinsic(callee, sym::trait_cast_is_lifetime_erasure_safe) + { + continue; + } + + if call_id.is_empty() { + bug!("call-site must be annotated with a unique id"); + } + + // The first element of the chain identifies the original function + // containing the intrinsic call. + let &(origin_def_id, origin_local_id, edge_args_template) = &call_id[0]; + + // Fetch the borrowck region summary for the origin function. + let origin_summary = tcx.borrowck_region_summary(origin_def_id); + + // Build the bv_to_param mapping. Two paths: + // 1. Borrowck has a call-site mapping → use vid provenance. + // 2. No mapping (regions only appear after monomorphization, + // e.g. U = dyn Trait<'lt>) → trace through template args. + let (bv_to_param, has_entry) = + if let Some(origin_mapping) = origin_summary.call_site_mappings.get(&origin_local_id) { + // Path 1: borrowck saw regions at this call site. + let graph = &origin_summary.outlives_graph; + + let max_walk_pos = origin_mapping + .region_mappings + .items() + .map(|(&walk_pos, _)| walk_pos as usize) + .max() + .unwrap_or(0); + + let mut bv_to_param: Vec> = vec![None; max_walk_pos + 1]; + let mut has_entry = false; + + // Check which vids are in the 'static SCC so we can exclude them. + let static_scc = origin_summary + .vid_to_param_pos + .iter() + .find(|&&(_, pp)| pp == mir::STATIC_PARAM_POS) + .and_then(|&(vid, _)| graph.scc_of_vid(vid)); + + for (walk_pos, region_vid) in origin_mapping + .region_mappings + .items() + .map(|(&walk_pos, ®ion_vid)| (walk_pos, region_vid)) + .into_sorted_stable_ord() + { + if let Some(scc) = graph.scc_of_vid(region_vid) { + if Some(scc) != static_scc { + match origin_summary.vid_provenance.get(®ion_vid).copied() { + Some( + VidProvenance::Input(slot) + | VidProvenance::BoundedByUniversal(slot), + ) => { + let input_wp = input_slot_to_walk_pos + .get(&slot) + .copied() + .unwrap_or(walk_pos as usize); + bv_to_param[walk_pos as usize] = Some(input_wp); + has_entry = true; + } + Some(VidProvenance::Static | VidProvenance::LocalOnly) => {} + None => bug!( + "missing vid provenance for region vid {region_vid} in {:?}", + origin_summary + ), + } + } + } + } + + (bv_to_param, has_entry) + } else { + // Path 2: no call-site mapping. Regions inside the callee's + // type args were invisible to borrowck because they were + // hidden inside type parameters (e.g. U = dyn Trait<'lt>). + // Trace the regions through the edge-args template to the + // caller's input walk-position space. + let concrete_edge_args = instance.instantiate_mir_and_normalize_erasing_regions( + tcx, + ty::TypingEnv::fully_monomorphized(), + ty::EarlyBinder::bind(edge_args_template), + ); + let total_slots: usize = region_slots_of_args(concrete_edge_args); + if total_slots == 0 { + continue; + } + + let mut bv_to_param: Vec> = vec![None; total_slots]; + let mut has_entry = false; + let mut walk_pos = 0usize; + + for (template_arg, concrete_arg) in + edge_args_template.iter().zip(concrete_edge_args.iter()) + { + let source_arg_ordinal = match template_arg.kind() { + ty::GenericArgKind::Type(ty) => match ty.kind() { + ty::Param(param_ty) => Some(param_ty.index as usize), + _ => None, + }, + ty::GenericArgKind::Lifetime(region) => match region.kind() { + ty::ReEarlyParam(ep) => Some(ep.index as usize), + _ => None, + }, + ty::GenericArgKind::Const(ct) => match ct.kind() { + ty::ConstKind::Param(param_ct) => Some(param_ct.index as usize), + _ => None, + }, + ty::GenericArgKind::Outlives(_) => None, + }; + + let slots = region_slots_of_arg(concrete_arg); + if let Some(source_ordinal) = source_arg_ordinal { + for offset in 0..slots { + let slot = InputSlot { + arg_ordinal: source_ordinal as u32, + offset_within_arg: offset as u32, + }; + if let Some(&input_wp) = input_slot_to_walk_pos.get(&slot) { + bv_to_param[walk_pos + offset] = Some(input_wp); + has_entry = true; + } + } + } + walk_pos += slots; + } + + (bv_to_param, has_entry) + }; + + if !has_entry { + continue; + } + + let list = tcx.mk_lifetime_bv_to_param_mapping_from_iter(bv_to_param.into_iter()); + mappings.push(LifetimeBVToParamMapping(list)); + } + + if mappings.is_empty() { + return &[]; + } + + tcx.arena.alloc_from_iter(mappings) +} + +/// Create augmented Instances and record delayed codegen requests. +/// Runs after the main `collect_items_rec` DFS completes. +/// +/// This function does NOT patch MIR or feed `codegen_mir`. All MIR patching +/// is performed exclusively in the global crate's global phase. This +/// function is responsible for: +/// - Creating augmented Instances via `augment_callee` +/// - Recording ALL `DelayedInstance` entries in `delayed_codegen` +/// (both sensitive and non-sensitive `must_delay_codegen` instances) +/// - Populating `intrinsic_callees` for intrinsic-containing functions +/// so that `gather_trait_cast_requests` discovers all intrinsic sites +/// - Cleaning up superseded base Instances that are replaced by augmented +/// variants. +/// +/// The collector itself does NOT push to `delayed_codegen` during the DFS — +/// this function handles all delayed-codegen recording, ensuring +/// `intrinsic_callees` is always populated when trait-cast intrinsic +/// calls are present. +fn augment_sensitive_subgraphs<'tcx>(tcx: TyCtxt<'tcx>, state: &mut SharedState<'tcx>) { + // Runs after the parallel collection phase has finished, so `state` is + // exclusively owned here. Take direct `&mut` views of each MTLock-guarded + // field rather than re-acquiring the locks per access. + let SharedState { visited, mentioned, usage_map, delayed_codegen, instance_sensitivity } = + state; + let visited = visited.get_mut(); + let mentioned = mentioned.get_mut(); + let usage_map = usage_map.get_mut(); + let delayed_codegen = delayed_codegen.get_mut(); + let instance_sensitivity = instance_sensitivity.get_mut(); + + // Worklist: (instance, base_instance_for_metadata_lookup). + // For ground-level transitioning bodies: instance == base_instance (no Outlives). + // For augmented Instances: instance has Outlives, base_instance is stripped. + let mut worklist: Vec<(Instance<'tcx>, Instance<'tcx>)> = Vec::new(); + let mut replaced_bases: FxIndexSet> = FxIndexSet::default(); + + // Seed: ALL sensitive base Instances — both transitively sensitive + // (non-empty augmented_callees) AND directly sensitive (empty + // augmented_callees but sensitivity.is_some()). This ensures root + // directly sensitive functions get their intrinsic_callees populated + // for gather_trait_cast_requests. + for (instance, entry) in instance_sensitivity.iter() { + if entry.sensitivity.is_some() && !instance.has_outlives_entries() { + worklist.push((*instance, *instance)); + } + } + + while let Some((instance, base_instance)) = worklist.pop() { + let Some(entry) = instance_sensitivity.get(&base_instance) else { continue }; + let augmented_callees = entry.augmented_callees; + let has_augmented_callees = !augmented_callees.is_empty(); + let intrinsic_callees = collect_intrinsic_callees(tcx, instance); + + if has_augmented_callees { + // Transitively sensitive: has augmented callees to substitute. + // Collect the instance's OWN intrinsic callees (if any) from + // its MIR. For purely transitive callers (like main), this is + // empty — their intrinsics are covered by each callee's own + // DelayedInstance entry (created when the worklist processes + // the augmented callee below). + + delayed_codegen.insert(DelayedInstance { + instance, + callee_substitutions: augmented_callees, + intrinsic_callees, + }); + + // Add augmented callees as new MonoItems and continue down + // the sensitive sub-graph. + for &(_call_id, augmented_callee) in augmented_callees { + let callee_base = augmented_callee.strip_outlives(tcx); + visited.insert(MonoItem::Fn(augmented_callee)); + replaced_bases.insert(callee_base); + + // If this augmented callee is itself transitively sensitive, + // compute ITS augmented callees and add to worklist. + let Some(callee_entry) = instance_sensitivity.get(&callee_base) else { + continue; + }; + if callee_entry.sensitivity.is_none() { + continue; + } + + let callee_call_sites = callee_entry.sensitive_call_sites; + let callee_sensitivity = callee_entry.sensitivity.clone().unwrap(); + + // Build CallerOutlivesEnv from the augmented Instance's + // Outlives entries. + let caller_env = CallerOutlivesEnv::from_outlives_entries(tcx, &augmented_callee); + + // Compute augmented sub-callees for the augmented callee. + // Use the batched walk-position transport so each chain + // link is fetched once per call site. + let mut sub_augmented = Vec::new(); + for &(sub_call_id, sub_callee) in callee_call_sites { + let Some(sub_entry) = instance_sensitivity.get(&sub_callee) else { + continue; + }; + let Some(sub_sens) = &sub_entry.sensitivity else { + continue; + }; + + // Build composed mapping through the full call_id chain. + let max_walk_pos = sub_sens.max_walk_order_position(); + let composed_mapping = + compose_all_through_chain(tcx, augmented_callee, sub_call_id, max_walk_pos); + + let sub_augmented_callee = augment_callee( + tcx, + augmented_callee, + sub_callee, + sub_sens, + &caller_env, + Some(&composed_mapping), + ); + sub_augmented.push((sub_call_id, sub_augmented_callee)); + } + + instance_sensitivity.insert( + augmented_callee, + InstanceSensitivity { + sensitivity: Some(callee_sensitivity), + sensitive_call_sites: callee_call_sites, + augmented_callees: tcx.arena.alloc_from_iter(sub_augmented), + }, + ); + + worklist.push((augmented_callee, augmented_callee)); + } + } else { + // Directly sensitive root function: contains intrinsic calls + // but has no sensitive callees. Collect sentinel-augmented + // intrinsic callee Instances from the MIR so that + // gather_trait_cast_requests discovers all intrinsic sites. + delayed_codegen.insert(DelayedInstance { + instance, + callee_substitutions: &[], + intrinsic_callees, + }); + } + } + + // Post-worklist pass: handle must_delay_codegen instances that are + // NOT in the sensitivity_map (non-generic functions with intrinsic + // calls where all binder variables map to existential/static regions). + // These need DelayedInstance entries with sentinel-augmented + // intrinsic_callees so gather_trait_cast_requests sees them. + { + let delayed_instances: UnordSet> = + delayed_codegen.items().map(|d| d.instance).collect(); + + delayed_codegen.extend_unord(visited.items().filter_map(|&item| { + let MonoItem::Fn(instance) = item else { + return None; + }; + if instance.has_outlives_entries() { + return None; + } + if !tcx.must_delay_codegen(instance) { + return None; + } + if delayed_instances.contains(&instance) { + return None; + } + + let intrinsic_callees = collect_intrinsic_callees(tcx, instance); + Some(DelayedInstance { instance, callee_substitutions: &[], intrinsic_callees }) + })); + } + + // Cleanup: remove base Instances superseded by augmented versions + // from visited, mentioned, usage_map, AND delayed_codegen. + // A base Instance can end up in delayed_codegen when it is processed + // before its caller augments it (e.g., check_a is delayed first as + // non-augmented, then main augments it → augmented_check_a is also + // delayed). The base must be removed so only the augmented version + // remains. + // + // When removing a base's usage_map entry, transfer its used-items + // list to the augmented replacement so the partitioner can still + // discover LocalCopy dependencies via `get_reachable_inlined_items`. + if !replaced_bases.is_empty() { + // Build base → augmented Instance mapping for usage_map transfer. + let base_to_augmented: FxIndexMap, Instance<'tcx>> = replaced_bases + .iter() + .filter_map(|base| { + // Find the augmented version: an Instance in visited + // whose strip_outlives == base and has outlives entries. + // The augmented callees for base's callers contain this. + // Look through all sensitivity entries' augmented_callees. + for (_inst, entry) in instance_sensitivity.iter() { + for &(_call_id, aug) in entry.augmented_callees { + if aug.strip_outlives(tcx) == *base && aug.has_outlives_entries() { + return Some((*base, aug)); + } + } + } + None + }) + .collect(); + + for base in &replaced_bases { + let base_mono = MonoItem::Fn(*base); + // Transfer usage_map entry to augmented version before removing. + if let Some(augmented) = base_to_augmented.get(base) { + let aug_mono = MonoItem::Fn(*augmented); + if let Some(used_items) = usage_map.used_map.get(&base_mono).cloned() { + usage_map.used_map.insert(aug_mono, used_items); + } + } + visited.remove(&base_mono); + mentioned.remove(&base_mono); + usage_map.remove(base_mono); + } + + let filtered: UnordSet> = delayed_codegen + .items() + .filter(|d| !replaced_bases.contains(&d.instance)) + .copied() + .collect(); + *delayed_codegen = filtered; + } + + // Remove ALL delayed codegen Instances from visited. These will be + // re-added by cascade_canonicalize with patched bodies (intrinsic + // calls resolved). Without this, delayed instances appear twice in + // mono_items — once from visited and once from cascade_canonicalize. + for d in tcx.with_stable_hashing_context(|mut hcx| { + delayed_codegen.items().copied().collect_sorted::<_, Vec<_>>(&mut hcx, false) + }) { + visited.remove(&MonoItem::Fn(d.instance)); + } +} + +/// Collect sentinel-augmented intrinsic callee Instances from a function's +/// MIR. Walks the basic blocks looking for Call terminators that invoke +/// trait-cast intrinsics (`trait_metadata_index`, `trait_metadata_table`, +/// `trait_metadata_table_len`, `trait_cast_is_lifetime_erasure_safe`). +/// Each found intrinsic callee is sentinel-augmented via `with_outlives(tcx, &[])`, +/// indicating the (conservative) empty outlives class. The global phase +/// computes the actual per-call-site outlives class via +/// `augmented_outlives_for_call`. +fn collect_intrinsic_callees<'tcx>( + tcx: TyCtxt<'tcx>, + instance: Instance<'tcx>, +) -> &'tcx [Instance<'tcx>] { + let body = tcx.instance_mir(instance.def); + let mut callees: Vec> = Vec::new(); + + for bb_data in body.basic_blocks.iter() { + let mir::TerminatorKind::Call { ref func, .. } = bb_data.terminator().kind else { + continue; + }; + + let func_ty = func.ty(body, tcx); + // The body from instance_mir is polymorphic — apply the caller + // instance's substitutions to get fully monomorphized types. + let func_ty = instance.instantiate_mir_and_normalize_erasing_regions( + tcx, + ty::TypingEnv::fully_monomorphized(), + ty::EarlyBinder::bind(func_ty), + ); + let ty::FnDef(callee_def_id, callee_args) = *func_ty.kind() else { + continue; + }; + + if !tcx.is_intrinsic(callee_def_id, sym::trait_metadata_index) + && !tcx.is_intrinsic(callee_def_id, sym::trait_cast_is_lifetime_erasure_safe) + && !tcx.is_intrinsic(callee_def_id, sym::trait_metadata_table) + && !tcx.is_intrinsic(callee_def_id, sym::trait_metadata_table_len) + { + continue; + } + + let callee_instance = Instance::expect_resolve( + tcx, + ty::TypingEnv::fully_monomorphized(), + callee_def_id, + callee_args, + body.span, + ); + + // Sentinel-augment: adds the Outlives sentinel (empty outlives + // class). The global phase's augmented_outlives_for_call query + // computes the actual per-call-site outlives class at resolution + // time. + let augmented = callee_instance.with_outlives(tcx, &[]); + callees.push(augmented); + } + + if callees.is_empty() { + return &[]; + } + tcx.arena.alloc_from_iter(callees) +} + +/// Patch a Call/TailCall terminator in the MIR body to reference an augmented +/// callee Instance. Finds the terminator with matching call-chain and modifies +/// its func operand to point to the augmented callee's FnDef type. +/// Used by the global phase's cascading canonicalization. +pub(crate) fn patch_call_terminator<'tcx>( + body: &mut Body<'tcx>, + target_call_id: &'tcx List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, + augmented_callee: Instance<'tcx>, + tcx: TyCtxt<'tcx>, +) { + // Search all basic blocks for the terminator with matching call_id chain. + for bb_data in body.basic_blocks_mut().iter_mut() { + let terminator = bb_data.terminator_mut(); + let (func, matched) = match &mut terminator.kind { + mir::TerminatorKind::Call { func, call_id, .. } + if std::ptr::eq(*call_id, target_call_id) => + { + (func, true) + } + mir::TerminatorKind::TailCall { func, call_id, .. } + if std::ptr::eq(*call_id, target_call_id) => + { + (func, true) + } + _ => continue, + }; + + if matched { + // Build the augmented callee's FnDef type. + let fn_ty = augmented_callee.ty(tcx, ty::TypingEnv::fully_monomorphized()); + let span = terminator.source_info.span; + *func = mir::Operand::Constant(Box::new(mir::ConstOperand { + span, + user_ty: None, + const_: mir::Const::zero_sized(fn_ty), + })); + return; // call_id chain is unique within a body + } + } +} + //=----------------------------------------------------------------------------- // Top-level entry point, tying it all together //=----------------------------------------------------------------------------- +/// Results of mono collection, including delayed codegen data for the +/// global phase. +pub(crate) struct CollectionResult<'tcx> { + pub mono_items: Vec>, + pub usage_map: UsageMap<'tcx>, + /// Full delayed codegen data with callee substitution metadata. + pub delayed_codegen: &'tcx [DelayedInstance<'tcx>], + /// Per-Instance cast-relevant lifetimes sensitivity map. + pub sensitivity_map: &'tcx UnordMap, CastRelevantLifetimes<'tcx>>, +} + #[instrument(skip(tcx, strategy), level = "debug")] pub(crate) fn collect_crate_mono_items<'tcx>( tcx: TyCtxt<'tcx>, strategy: MonoItemCollectionStrategy, -) -> (Vec>, UsageMap<'tcx>) { +) -> CollectionResult<'tcx> { let _prof_timer = tcx.prof.generic_activity("monomorphization_collector"); let roots = tcx @@ -1820,29 +2427,83 @@ pub(crate) fn collect_crate_mono_items<'tcx>( debug!("building mono item graph, beginning at roots"); - let state = SharedState { + let mut state = SharedState { visited: Lock::new(UnordSet::default()), mentioned: Lock::new(UnordSet::default()), usage_map: Lock::new(UsageMap::new()), + delayed_codegen: Lock::new(UnordSet::default()), + instance_sensitivity: Lock::new(FxIndexMap::default()), }; let recursion_limit = tcx.recursion_limit(); + // Normal collection (sensitivity detection deferred to a batch pass). tcx.sess.time("monomorphization_collector_graph_walk", || { par_for_each_in(roots, |root| { collect_items_root(tcx, dummy_spanned(*root), &state, recursion_limit); }); }); + // Batch sensitivity computation: SCC-based fixed-point iteration + // over the full collected Instance set. + tcx.sess.time("monomorphization_collector_sensitivity_scc", || { + let visited = state.visited.lock(); + cast_sensitivity::compute_cast_relevant_lifetimes( + tcx, + &state.instance_sensitivity, + &visited, + ); + }); + + // Augmentation + delayed codegen recording. Handles ALL delayed + // codegen entries — both sensitive sub-graphs (augmentation) and + // non-sensitive must_delay_codegen instances (intrinsic callee + // collection). Does NOT patch MIR or feed codegen_mir — that is + // deferred to the global phase. + tcx.sess.time("monomorphization_collector_augmentation", || { + augment_sensitive_subgraphs(tcx, &mut state); + }); + + let SharedState { visited, mentioned: _, usage_map, delayed_codegen, instance_sensitivity } = + state; + + // Sort delayed codegen entries deterministically for the arena slice. + let delayed = tcx.with_stable_hashing_context(move |mut hcx| { + delayed_codegen.into_inner().into_sorted(&mut hcx, true) + }); + let delayed: &'tcx [DelayedInstance<'tcx>] = tcx.arena.alloc_slice(&delayed); + + // Build sensitivity_map from instance_sensitivity for query proxying. + let sensitivity_map = + cast_sensitivity::build_sensitivity_map(tcx, instance_sensitivity.into_inner()); + // The set of MonoItems was created in an inherently indeterministic order because // of parallelism. We sort it here to ensure that the output is deterministic. let mono_items = tcx.with_stable_hashing_context(move |mut hcx| { - state.visited.into_inner().into_sorted(&mut hcx, true) + visited.into_inner().into_sorted(&mut hcx, true) }); - (mono_items, state.usage_map.into_inner()) + CollectionResult { + mono_items, + usage_map: usage_map.into_inner(), + delayed_codegen: delayed, + sensitivity_map, + } +} + +/// Default provider for `codegen_mir`: returns the unmodified MIR body +/// from `instance_mir`. For outlives-sensitive instances, the monomorphization +/// collector feeds a patched body that takes precedence over this provider. +fn codegen_mir<'tcx>(tcx: TyCtxt<'tcx>, instance: Instance<'tcx>) -> &'tcx Body<'tcx> { + tcx.instance_mir(instance.def) } pub(crate) fn provide(providers: &mut Providers) { providers.hooks.should_codegen_locally = should_codegen_locally; + providers.hooks.must_delay_codegen = must_delay_codegen; providers.queries.items_of_instance = items_of_instance; + providers.queries.has_trait_cast_intrinsics = has_trait_cast_intrinsics; + providers.queries.codegen_mir = codegen_mir; } + +#[cfg(test)] +mod tests; diff --git a/compiler/rustc_monomorphize/src/collector/tests.rs b/compiler/rustc_monomorphize/src/collector/tests.rs new file mode 100644 index 0000000000000..0de2f8003c1d8 --- /dev/null +++ b/compiler/rustc_monomorphize/src/collector/tests.rs @@ -0,0 +1,215 @@ +use rustc_data_structures::unord::UnordSet; +use rustc_index::bit_set::BitMatrix; +use rustc_middle::mono::CastRelevantLifetimes; + +use crate::cast_sensitivity::CallerOutlivesEnv; + +// ── CallerOutlivesEnv tests ───────────────────────────────────── + +/// Build an outlives reachability `BitMatrix` from raw pairs using the +/// same Floyd-Warshall algorithm as `outlives_reachability`, then call +/// `f` with a borrowed `CallerOutlivesEnv`. Avoids leaking memory and +/// keeps the construction logic in test code. +fn with_env(pairs: &[(usize, usize)], f: impl FnOnce(&CallerOutlivesEnv<'_>)) { + let max_idx = + pairs.iter().flat_map(|&(l, s)| [l, s]).filter(|&v| v != usize::MAX).max().unwrap_or(0); + let dim = if pairs.is_empty() { 1 } else { max_idx + 2 }; + let static_idx = dim - 1; + + let mut reach = BitMatrix::new(dim, dim); + for i in 0..dim { + reach.insert(i, i); + } + for j in 0..dim { + reach.insert(static_idx, j); + } + let remap = |idx: usize| if idx == usize::MAX { static_idx } else { idx }; + for &(l, s) in pairs { + reach.insert(remap(l), remap(s)); + } + for k in 0..dim { + for i in 0..dim { + if reach.contains(i, k) { + reach.union_rows(k, i); + } + } + } + + let env = CallerOutlivesEnv::from_raw(&reach, dim); + f(&env); +} + +#[test] +fn outlives_reflexive() { + with_env(&[], |env| { + assert!(env.outlives(0, 0)); + assert!(env.outlives(usize::MAX, usize::MAX)); + }); +} + +#[test] +fn outlives_direct() { + // 0 : 1 (0 outlives 1) + with_env(&[(0, 1)], |env| { + assert!(env.outlives(0, 1)); + assert!(!env.outlives(1, 0)); // not symmetric + }); +} + +#[test] +fn outlives_transitive() { + // 0 : 1, 1 : 2 → 0 : 2 transitively + with_env(&[(0, 1), (1, 2)], |env| { + assert!(env.outlives(0, 1)); + assert!(env.outlives(1, 2)); + assert!(env.outlives(0, 2)); // transitive + assert!(!env.outlives(2, 0)); + assert!(!env.outlives(2, 1)); + }); +} + +#[test] +fn outlives_cycle() { + // 0 : 1, 1 : 0 → mutual outlives (same equivalence class) + with_env(&[(0, 1), (1, 0)], |env| { + assert!(env.outlives(0, 1)); + assert!(env.outlives(1, 0)); + }); +} + +#[test] +fn outlives_disconnected() { + // 0 : 1, 2 : 3 → no relationship between {0,1} and {2,3} + with_env(&[(0, 1), (2, 3)], |env| { + assert!(env.outlives(0, 1)); + assert!(env.outlives(2, 3)); + assert!(!env.outlives(0, 2)); + assert!(!env.outlives(0, 3)); + assert!(!env.outlives(1, 2)); + assert!(!env.outlives(3, 0)); + }); +} + +#[test] +fn outlives_diamond() { + // 0 : 1, 0 : 2, 1 : 3, 2 : 3 → 0 : 3 through either path + with_env(&[(0, 1), (0, 2), (1, 3), (2, 3)], |env| { + assert!(env.outlives(0, 3)); // through 0→1→3 or 0→2→3 + assert!(!env.outlives(3, 0)); + assert!(!env.outlives(1, 2)); // no direct or transitive path 1→2 + assert!(!env.outlives(2, 1)); // no direct or transitive path 2→1 + }); +} + +#[test] +fn outlives_static() { + // 5 : usize::MAX (param 5 outlives 'static) + with_env(&[(5, usize::MAX)], |env| { + assert!(env.outlives(5, usize::MAX)); + // 'static outlives everything by construction in the reachability matrix. + assert!(env.outlives(usize::MAX, 5)); + }); +} + +#[test] +fn outlives_long_chain() { + // 0 : 1 : 2 : 3 : 4 → 0 : 4 transitively + with_env(&[(0, 1), (1, 2), (2, 3), (3, 4)], |env| { + assert!(env.outlives(0, 4)); + assert!(env.outlives(0, 3)); + assert!(env.outlives(1, 4)); + assert!(!env.outlives(4, 0)); + }); +} + +// ── CastRelevantLifetimes::max_walk_order_position tests ──────── + +#[test] +fn max_walk_order_empty() { + let crl = CastRelevantLifetimes { mappings: UnordSet::new() }; + assert_eq!(crl.max_walk_order_position(), 0); +} + +// Note: non-empty max_walk_order_position tests require TyCtxt for +// LifetimeBVToParamMapping interning and are covered by integration tests. + +// ── Transitive reduction tests ────────────────────────────────── +// Test the transitive reduction logic extracted from augment_callee. + +/// Apply the same transitive reduction algorithm used in augment_callee. +fn transitive_reduction(pairs: &[(usize, usize)]) -> Vec<(usize, usize)> { + let mut minimal: Vec<(usize, usize)> = Vec::new(); + for &(l, s) in pairs { + let is_redundant = pairs.iter().any(|&(l2, mid)| { + l2 == l && mid != s && pairs.iter().any(|&(l3, s3)| l3 == mid && s3 == s) + }); + if !is_redundant { + minimal.push((l, s)); + } + } + minimal.sort(); + minimal.dedup(); + minimal +} + +#[test] +fn transitive_reduction_empty() { + assert_eq!(transitive_reduction(&[]), vec![]); +} + +#[test] +fn transitive_reduction_single() { + assert_eq!(transitive_reduction(&[(0, 1)]), vec![(0, 1)]); +} + +#[test] +fn transitive_reduction_chain() { + // 0 : 1, 1 : 2, 0 : 2 → remove 0 : 2 (redundant via 0→1→2) + let result = transitive_reduction(&[(0, 1), (1, 2), (0, 2)]); + assert_eq!(result, vec![(0, 1), (1, 2)]); +} + +#[test] +fn transitive_reduction_no_redundancy() { + // 0 : 1, 2 : 3 → both kept (no transitive path) + let result = transitive_reduction(&[(0, 1), (2, 3)]); + assert_eq!(result, vec![(0, 1), (2, 3)]); +} + +#[test] +fn transitive_reduction_diamond() { + // 0 : 1, 0 : 2, 1 : 3, 2 : 3 → all kept (no single intermediate) + // 0→3 doesn't exist, so no redundancy to remove + let result = transitive_reduction(&[(0, 1), (0, 2), (1, 3), (2, 3)]); + assert_eq!(result, vec![(0, 1), (0, 2), (1, 3), (2, 3)]); +} + +#[test] +fn transitive_reduction_diamond_with_shortcut() { + // 0 : 1, 0 : 2, 1 : 3, 2 : 3, 0 : 3 → remove 0 : 3 + let result = transitive_reduction(&[(0, 1), (0, 2), (1, 3), (2, 3), (0, 3)]); + assert_eq!(result, vec![(0, 1), (0, 2), (1, 3), (2, 3)]); +} + +#[test] +fn transitive_reduction_longer_chain() { + // 0 : 1, 1 : 2, 2 : 3, 0 : 2, 0 : 3, 1 : 3 + // → remove 0:2 (via 0→1→2), 0:3 (via 0→1→3), 1:3 (via 1→2→3) + let result = transitive_reduction(&[(0, 1), (1, 2), (2, 3), (0, 2), (0, 3), (1, 3)]); + assert_eq!(result, vec![(0, 1), (1, 2), (2, 3)]); +} + +#[test] +fn transitive_reduction_with_static() { + // 0 : 1, 0 : MAX, 1 : MAX → remove 0 : MAX (via 0→1→MAX) + let s = usize::MAX; + let result = transitive_reduction(&[(0, 1), (0, s), (1, s)]); + assert_eq!(result, vec![(0, 1), (1, s)]); +} + +#[test] +fn transitive_reduction_duplicates() { + // Duplicate entries should be deduplicated. + let result = transitive_reduction(&[(0, 1), (0, 1), (1, 2)]); + assert_eq!(result, vec![(0, 1), (1, 2)]); +} diff --git a/compiler/rustc_monomorphize/src/erasure_safe.rs b/compiler/rustc_monomorphize/src/erasure_safe.rs new file mode 100644 index 0000000000000..55b8c4c8bcb46 --- /dev/null +++ b/compiler/rustc_monomorphize/src/erasure_safe.rs @@ -0,0 +1,1418 @@ +//! Erasure-safe analysis for the `trait_cast_is_lifetime_erasure_safe` +//! intrinsic. +//! +//! Provides structural helpers (binder variable enumeration, supertrait +//! chain tracing) and the `resolve_erasure_safe_intrinsic` function + +//! `is_lifetime_erasure_safe` query for determining whether casting to a +//! target dyn type preserves lifetime identity. + +use rustc_data_structures::fx::{FxHashMap, FxHashSet}; +use rustc_hir::def_id::DefId; +use rustc_index::bit_set::DenseBitSet; +use rustc_middle::bug; +use rustc_middle::ty::print::with_no_trimmed_paths; +use rustc_middle::ty::trait_cast::OutlivesClass; +use rustc_middle::ty::{self, GenericParamDefKind, Ty, TyCtxt, TypeVisitable, TypeVisitor}; + +// ── Types ────────────────────────────────────────────────────────────────────── + +/// Binder variable info collected by TypeVisitor DFS over a dyn type's +/// existential predicates. +pub(crate) struct DynBinderVars { + /// `(bv_index, PrincipalLocation)` for bvs in the principal + /// `ExistentialTraitRef`. + pub(crate) principal_entries: Vec<(usize, PrincipalLocation)>, + /// `(bv_index, ProjectionLocation)` for bvs in `ExistentialProjection`s. + pub(crate) projection_entries: Vec<(usize, ProjectionLocation)>, +} + +impl DynBinderVars { + /// Total number of distinct binder variables (max bv index + 1). + pub(crate) fn total_count(&self) -> usize { + let max_bv = self + .principal_entries + .iter() + .map(|(bv, _)| *bv) + .chain(self.projection_entries.iter().map(|(bv, _)| *bv)) + .max(); + max_bv.map_or(0, |m| m + 1) + } +} + +/// Location of a binder variable within the principal `ExistentialTraitRef`. +pub(crate) struct PrincipalLocation { + /// Index of the arg within `ExistentialTraitRef.args` (excluding + /// Self) in which this bv was found. + pub(crate) arg_index: usize, + /// TypeVisitor DFS offset of this bv *within* the arg at + /// `arg_index`. A top-level lifetime arg has `dfs_offset == 0` + /// and is the only region in that arg; a region nested inside a + /// type arg (e.g., `&'b u8` when the arg is a type param) has + /// a non-zero offset or shares the arg with other regions. + pub(crate) dfs_offset: usize, +} + +/// Location of a binder variable within an `ExistentialProjection`. +pub(crate) struct ProjectionLocation { + /// `DefId` of the associated type. + pub(crate) assoc_def_id: DefId, + /// TypeVisitor DFS position of this bv within the projection's + /// args (excluding Self) and term, walked in that order. + pub(crate) dfs_position: usize, +} + +// ── Functions ────────────────────────────────────────────────────────────────── + +/// Walk a dyn type's existential predicates in TypeVisitor DFS order, +/// collecting all `ReBound` binder variables and their locations. +/// +/// The canonical visitation order is: principal `ExistentialTraitRef` +/// (args excluding Self), then `ExistentialProjection`s sorted by def_id +/// (args excluding Self, then term). Binder variables are numbered by +/// order of first encounter. +pub(crate) fn collect_all_binder_vars<'tcx>( + _tcx: TyCtxt<'tcx>, + preds: &'tcx ty::List>, +) -> DynBinderVars { + let mut result = + DynBinderVars { principal_entries: Vec::new(), projection_entries: Vec::new() }; + + let mut var_to_idx: FxHashMap = FxHashMap::default(); + let mut next_bv_idx: usize = 0; + // Counter for synthetic BoundVar indices assigned to ReErased + // regions. Each erased position gets a unique value so that + // intern_bv treats them as distinct binder variables. + let mut next_erased_var: u32 = 0; + + // Assign a bv index on first encounter, return existing index + // on subsequent encounters of the same BoundVar. + let mut intern_bv = |var: ty::BoundVar| -> usize { + *var_to_idx.entry(var.as_u32()).or_insert_with(|| { + let idx = next_bv_idx; + next_bv_idx += 1; + idx + }) + }; + + // Principal trait ref. + // ExistentialTraitRef.args already excludes Self, so no skip(1). + if let Some(principal) = preds.principal() { + for (arg_index, arg) in principal.skip_binder().args.iter().enumerate() { + let regions = collect_bound_regions_in(arg, &mut next_erased_var); + for (dfs_offset, br) in regions { + let bv_idx = intern_bv(br.var); + result + .principal_entries + .push((bv_idx, PrincipalLocation { arg_index, dfs_offset })); + } + } + } + + // Projection predicates (sorted by def_id by construction). + for proj_pred in preds.projection_bounds() { + let proj = proj_pred.skip_binder(); + let assoc_def_id = proj.def_id; + let mut dfs_position = 0; + + // Walk projection args. ExistentialProjection.args already + // excludes Self (erased by `erase_self_ty`), so no skip(1). + for arg in proj.args.iter() { + let regions = collect_bound_regions_in(arg, &mut next_erased_var); + for (offset, br) in regions { + let bv_idx = intern_bv(br.var); + result.projection_entries.push(( + bv_idx, + ProjectionLocation { assoc_def_id, dfs_position: dfs_position + offset }, + )); + } + dfs_position += region_slots_of_arg(arg); + } + + // Walk the projected term. + let term_regions = collect_bound_regions_in(proj.term, &mut next_erased_var); + for (offset, br) in term_regions { + let bv_idx = intern_bv(br.var); + result.projection_entries.push(( + bv_idx, + ProjectionLocation { assoc_def_id, dfs_position: dfs_position + offset }, + )); + } + } + + result +} + +/// Map from arg position (in `ExistentialTraitRef.args`, excluding +/// Self) to `ReEarlyParam.index` for lifetime-typed generic params. +/// Non-lifetime params are absent from the map. +/// +/// The arg position `i` corresponds to +/// `generics_of(def_id).own_params[i+1]` (offset by 1 for Self). This +/// bridges between the arg-indexed `PrincipalLocation.arg_index` and the +/// `ReEarlyParam.index` used by where-clause derivation. +pub(crate) fn lifetime_param_map(tcx: TyCtxt<'_>, def_id: DefId) -> FxHashMap { + let generics = tcx.generics_of(def_id); + generics + .own_params + .iter() + .enumerate() + .filter(|(_, p)| { + p.index != 0 // skip Self + && matches!(p.kind, GenericParamDefKind::Lifetime) + }) + .map(|(pos, p)| { + // `pos` is the position in `own_params` (0-based, + // includes Self at 0). Arg position in + // ExistentialTraitRef.args is `pos - 1` (Self excluded). + (pos - 1, p.index) + }) + .collect() +} + +/// Walk a `TypeVisitable` value with TypeVisitor DFS, collecting all +/// `ReBound` or `ReErased` regions and their DFS offset. +/// +/// Returns `(dfs_offset, BoundRegion)` pairs in encounter order. +/// `dfs_offset` is the count of region-slots visited before this +/// region — a top-level lifetime arg yields a single `(0, br)` entry. +/// +/// For `ReErased` regions (post-monomorphization), a synthetic +/// `BoundRegion` is created using `*next_erased_var` as the +/// `BoundVar` index, incremented for each erased region to ensure +/// uniqueness across calls. +pub(crate) fn collect_bound_regions_in<'tcx>( + value: impl TypeVisitable>, + next_erased_var: &mut u32, +) -> Vec<(usize, ty::BoundRegion<'tcx>)> { + struct Collector<'a, 'tcx> { + regions: Vec<(usize, ty::BoundRegion<'tcx>)>, + dfs_offset: usize, + next_erased_var: &'a mut u32, + } + impl<'tcx> TypeVisitor> for Collector<'_, 'tcx> { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + match r.kind() { + ty::ReBound(_, br) => { + self.regions.push((self.dfs_offset, br)); + } + ty::ReErased => { + let var = ty::BoundVar::from_u32(*self.next_erased_var); + *self.next_erased_var += 1; + let br = ty::BoundRegion { var, kind: ty::BoundRegionKind::Anon }; + self.regions.push((self.dfs_offset, br)); + } + _ => {} + } + self.dfs_offset += 1; + } + } + let mut collector = Collector { regions: Vec::new(), dfs_offset: 0, next_erased_var }; + value.visit_with(&mut collector); + collector.regions +} + +/// Count the total number of region slots in a `TypeVisitable` value +/// as visited by TypeVisitor DFS. This includes all regions regardless +/// of kind (`ReBound`, `ReEarlyParam`, `ReStatic`, etc.). +/// +/// Used to compute `dfs_position` offsets when walking multiple args +/// in sequence (e.g., projection args followed by the projection term). +/// +/// Generic fallback — prefer [`region_slots_of_arg`] / +/// [`region_slots_of_args`] / `Ty::region_slots()` when the input is a +/// `GenericArg`, `GenericArgsRef`, `Ty`, or `Const`, since those hit +/// an O(1) cache populated at interning. +pub(crate) fn count_region_slots_in<'tcx>(value: impl TypeVisitable>) -> usize { + struct Counter { + count: usize, + } + impl<'tcx> TypeVisitor> for Counter { + fn visit_region(&mut self, _r: ty::Region<'tcx>) { + self.count += 1; + } + } + let mut counter = Counter { count: 0 }; + value.visit_with(&mut counter); + counter.count +} + +/// Region-slot count for a single `GenericArg`. +/// +/// Dispatches on the kind tag and reads the cached count on the +/// interned `Ty` / `Const`; lifetime args contribute one slot, +/// `Outlives` args contribute none. O(1). +/// +/// In debug builds the cached count is cross-checked against a live +/// `TypeVisitor` DFS. The check is retained in-tree permanently: it +/// costs nothing in release builds and catches any future `TyKind` / +/// `ConstKind` variant whose `FlagComputation` arm forgets to +/// propagate a child's region count. +#[inline] +pub(crate) fn region_slots_of_arg<'tcx>(arg: ty::GenericArg<'tcx>) -> usize { + match arg.kind() { + ty::GenericArgKind::Type(ty) => region_slots_of_ty(ty), + ty::GenericArgKind::Lifetime(_) => 1, + ty::GenericArgKind::Const(ct) => region_slots_of_const(ct), + ty::GenericArgKind::Outlives(_) => 0, + } +} + +/// Region-slot count for an interned `&List`. +/// +/// Sums per-arg counts from the cache. O(n_args) with O(1) per arg. +#[inline] +pub(crate) fn region_slots_of_args<'tcx>(args: ty::GenericArgsRef<'tcx>) -> usize { + args.iter().map(region_slots_of_arg).sum() +} + +/// Region-slot count for a `Ty<'tcx>`. O(1); reads the cached value +/// stored on the interned `WithCachedTypeInfo`. +#[inline] +pub(crate) fn region_slots_of_ty<'tcx>(ty: Ty<'tcx>) -> usize { + use rustc_middle::ty::Flags; + let cached = ty.region_slots() as usize; + debug_assert_eq!( + cached, + count_region_slots_in(ty), + "cached Ty::region_slots disagrees with TypeVisitor walk (ty = {ty:?})", + ); + cached +} + +/// Region-slot count for a `Const<'tcx>`. O(1). +#[inline] +pub(crate) fn region_slots_of_const<'tcx>(ct: ty::Const<'tcx>) -> usize { + use rustc_middle::ty::Flags; + let cached = ct.region_slots() as usize; + debug_assert_eq!( + cached, + count_region_slots_in(ct), + "cached Const::region_slots disagrees with TypeVisitor walk (ct = {ct:?})", + ); + cached +} + +/// Region-slot count for a `Term<'tcx>` (either a `Ty` or a `Const`). +/// O(1) in both branches. +#[inline] +pub(crate) fn region_slots_of_term<'tcx>(term: ty::Term<'tcx>) -> usize { + match term.kind() { + ty::TermKind::Ty(ty) => region_slots_of_ty(ty), + ty::TermKind::Const(ct) => region_slots_of_const(ct), + } +} + +/// Build a dense walk-position -> binder-variable mapping for a dyn +/// type's existential predicates. +/// +/// The returned vector is indexed by the dyn type's raw DFS walk +/// position. Slots that are not binder-bearing are `None`; binder +/// occurrences map to their dense binder-variable index in +/// `collect_all_binder_vars` order. +fn dyn_walk_pos_to_bv_map<'tcx>( + preds: &'tcx ty::List>, +) -> Vec> { + let mut map: Vec> = Vec::new(); + + let mut var_to_idx: FxHashMap = FxHashMap::default(); + let mut next_bv_idx: usize = 0; + let mut next_erased_var: u32 = 0; + + let mut intern_bv = |var: ty::BoundVar| -> usize { + *var_to_idx.entry(var.as_u32()).or_insert_with(|| { + let idx = next_bv_idx; + next_bv_idx += 1; + idx + }) + }; + + fn record_arg<'tcx, F: FnMut(ty::BoundVar) -> usize>( + map: &mut Vec>, + arg: ty::GenericArg<'tcx>, + walk_pos: &mut usize, + next_erased_var: &mut u32, + intern_bv: &mut F, + ) { + let region_slots = region_slots_of_arg(arg); + if map.len() < *walk_pos + region_slots { + map.resize(*walk_pos + region_slots, None); + } + for (dfs_offset, br) in collect_bound_regions_in(arg, next_erased_var) { + let bv_idx = intern_bv(br.var); + map[*walk_pos + dfs_offset] = Some(bv_idx); + } + *walk_pos += region_slots; + } + + let mut walk_pos = 0usize; + + if let Some(principal) = preds.principal() { + for arg in principal.skip_binder().args.iter() { + record_arg(&mut map, arg, &mut walk_pos, &mut next_erased_var, &mut intern_bv); + } + } + + for proj_pred in preds.projection_bounds() { + let proj = proj_pred.skip_binder(); + for arg in proj.args.iter() { + record_arg(&mut map, arg, &mut walk_pos, &mut next_erased_var, &mut intern_bv); + } + + let term_slots = region_slots_of_term(proj.term); + if map.len() < walk_pos + term_slots { + map.resize(walk_pos + term_slots, None); + } + for (dfs_offset, br) in collect_bound_regions_in(proj.term, &mut next_erased_var) { + let bv_idx = intern_bv(br.var); + map[walk_pos + dfs_offset] = Some(bv_idx); + } + walk_pos += term_slots; + } + + map +} + +struct TransportSegment<'a> { + transport_start: usize, + transport_slots: usize, + walk_pos_to_bv: &'a [Option], + native_base: usize, +} + +fn remap_transport_entries<'tcx>( + tcx: TyCtxt<'tcx>, + segments: &[TransportSegment<'_>], + call_site_outlives: &'tcx [ty::GenericArg<'tcx>], +) -> &'tcx [ty::GenericArg<'tcx>] { + if call_site_outlives.is_empty() { + return call_site_outlives; + } + + let remap_index = |idx: usize| -> Option { + if idx == usize::MAX { + return Some(usize::MAX); + } + + let Some(segment) = segments.iter().find(|segment| { + idx >= segment.transport_start + && idx < segment.transport_start + segment.transport_slots + }) else { + return None; + }; + + let local_idx = idx - segment.transport_start; + segment.walk_pos_to_bv.get(local_idx).copied().flatten().map(|bv| segment.native_base + bv) + }; + + let remapped = call_site_outlives + .iter() + .filter_map(|entry| match entry.kind() { + ty::GenericArgKind::Outlives(o) => { + let longer = remap_index(o.longer())?; + let shorter = remap_index(o.shorter())?; + Some(tcx.mk_outlives_arg(longer, shorter).into()) + } + _ => bug!("expected Outlives entry in call-site outlives slice"), + }) + .collect::>(); + + tcx.arena.alloc_from_iter(remapped) +} + +fn build_origin_to_native_map( + segments: &[TransportSegment<'_>], + origin_positions: &[Option], +) -> FxHashMap> { + let mut origin_to_native: FxHashMap> = FxHashMap::default(); + + for segment in segments { + for local_idx in 0..segment.transport_slots { + let intrinsic_pos = segment.transport_start + local_idx; + let Some(origin_pos) = origin_positions.get(intrinsic_pos).copied().flatten() else { + continue; + }; + let Some(native_bv) = segment.walk_pos_to_bv.get(local_idx).copied().flatten() else { + continue; + }; + origin_to_native.entry(origin_pos).or_default().push(segment.native_base + native_bv); + } + } + + origin_to_native +} + +fn remap_origin_entries<'tcx>( + tcx: TyCtxt<'tcx>, + origin_to_native: &FxHashMap>, + call_site_outlives: &'tcx [ty::GenericArg<'tcx>], +) -> &'tcx [ty::GenericArg<'tcx>] { + if call_site_outlives.is_empty() { + return call_site_outlives; + } + + let static_indices = [usize::MAX]; + let mut remapped = Vec::new(); + + for entry in call_site_outlives { + let ty::GenericArgKind::Outlives(outlives) = entry.kind() else { + bug!("expected Outlives entry in call-site outlives slice"); + }; + + let longer_indices = if outlives.longer() == usize::MAX { + &static_indices[..] + } else { + let Some(indices) = origin_to_native.get(&outlives.longer()) else { + continue; + }; + indices.as_slice() + }; + + let shorter_indices = if outlives.shorter() == usize::MAX { + &static_indices[..] + } else { + let Some(indices) = origin_to_native.get(&outlives.shorter()) else { + continue; + }; + indices.as_slice() + }; + + for &longer in longer_indices { + for &shorter in shorter_indices { + remapped.push((longer, shorter)); + } + } + } + + remapped.sort_unstable(); + remapped.dedup(); + + tcx.arena.alloc_from_iter( + remapped.into_iter().map(|(longer, shorter)| tcx.mk_outlives_arg(longer, shorter).into()), + ) +} + +/// Remap transported outlives entries into the sub-trait binder-variable +/// space expected by `trait_metadata_index`. +pub(crate) fn remap_trait_metadata_outlives_entries<'tcx>( + tcx: TyCtxt<'tcx>, + super_trait: Ty<'tcx>, + sub_trait: Ty<'tcx>, + call_site_outlives: &'tcx [ty::GenericArg<'tcx>], +) -> &'tcx [ty::GenericArg<'tcx>] { + let (_super_data, sub_data) = match (*super_trait.kind(), *sub_trait.kind()) { + (ty::Dynamic(s, ..), ty::Dynamic(t, ..)) => (s, t), + _ => return call_site_outlives, + }; + + let sub_map = dyn_walk_pos_to_bv_map(sub_data); + let root_transport_slots = region_slots_of_ty(super_trait); + let sub_transport_slots = region_slots_of_ty(sub_trait); + let root_drop_map: [Option; 0] = []; + + remap_transport_entries( + tcx, + &[ + TransportSegment { + transport_start: 0, + transport_slots: root_transport_slots, + walk_pos_to_bv: &root_drop_map, + native_base: 0, + }, + TransportSegment { + transport_start: root_transport_slots, + transport_slots: sub_transport_slots, + walk_pos_to_bv: &sub_map, + native_base: 0, + }, + ], + call_site_outlives, + ) +} + +/// Remap origin-space transported outlives entries into the sub-trait +/// binder-variable space expected by `trait_metadata_index`. +pub(crate) fn remap_trait_metadata_outlives_entries_from_origin_positions<'tcx>( + tcx: TyCtxt<'tcx>, + super_trait: Ty<'tcx>, + sub_trait: Ty<'tcx>, + origin_positions: &[Option], + call_site_outlives: &'tcx [ty::GenericArg<'tcx>], +) -> &'tcx [ty::GenericArg<'tcx>] { + let (_super_data, sub_data) = match (*super_trait.kind(), *sub_trait.kind()) { + (ty::Dynamic(s, ..), ty::Dynamic(t, ..)) => (s, t), + _ => return call_site_outlives, + }; + + let sub_map = dyn_walk_pos_to_bv_map(sub_data); + let root_transport_slots = region_slots_of_ty(super_trait); + let sub_transport_slots = region_slots_of_ty(sub_trait); + let root_drop_map: [Option; 0] = []; + + let origin_to_native = build_origin_to_native_map( + &[ + TransportSegment { + transport_start: 0, + transport_slots: root_transport_slots, + walk_pos_to_bv: &root_drop_map, + native_base: 0, + }, + TransportSegment { + transport_start: root_transport_slots, + transport_slots: sub_transport_slots, + walk_pos_to_bv: &sub_map, + native_base: 0, + }, + ], + origin_positions, + ); + + remap_origin_entries(tcx, &origin_to_native, call_site_outlives) +} + +/// Compute the `trait_metadata_index` outlives class in the sub-trait's +/// native binder-variable space from an augmented intrinsic instance. +pub(crate) fn trait_metadata_index_outlives_class<'tcx>( + tcx: TyCtxt<'tcx>, + super_trait: Ty<'tcx>, + sub_trait: Ty<'tcx>, + instance: ty::Instance<'tcx>, +) -> OutlivesClass<'tcx> { + let transported = match instance.outlives_entries().split_first() { + Some((_sentinel, entries)) => entries, + None => &[], + }; + let remapped = remap_trait_metadata_outlives_entries(tcx, super_trait, sub_trait, transported); + OutlivesClass::from_entries(remapped) +} + +/// Compute the set of target binder variables that are structurally exposed +/// through the root supertrait. +/// +/// These target bvs participate in the root<->target correspondence checked +/// by `trait_cast_is_lifetime_erasure_safe`, so table admissibility does not +/// need to conservatively treat them like hidden Self-anchored lifetimes. +pub(crate) fn root_exposed_target_bvs<'tcx>( + tcx: TyCtxt<'tcx>, + root_trait: Ty<'tcx>, + target_trait: Ty<'tcx>, +) -> DenseBitSet { + let (root_data, target_data) = match (*root_trait.kind(), *target_trait.kind()) { + (ty::Dynamic(root_data, ..), ty::Dynamic(target_data, ..)) => (root_data, target_data), + _ => return DenseBitSet::new_empty(0), + }; + + let root_bvs = collect_all_binder_vars(tcx, root_data); + let target_bvs = collect_all_binder_vars(tcx, target_data); + let n_target = target_bvs.total_count(); + let mut exposed = DenseBitSet::new_empty(n_target); + + let (Some(root_principal), Some(target_principal)) = + (root_data.principal(), target_data.principal()) + else { + return exposed; + }; + + let root_def_id = root_principal.skip_binder().def_id; + let target_def_id = target_principal.skip_binder().def_id; + + let root_principal_ref = root_principal.skip_binder(); + let target_principal_ref = target_principal.skip_binder(); + + if root_def_id == target_def_id { + // Same trait — identity correspondence. Walk root and target + // args in parallel DFS; at each position where both sides have + // a bv (ReErased or ReBound), mark the target bv as exposed. + for (arg_index, (target_arg, root_arg)) in + target_principal_ref.args.iter().zip(root_principal_ref.args.iter()).enumerate() + { + let target_regions = collect_all_regions_dfs(target_arg); + let root_regions = collect_all_regions_dfs(root_arg); + for (dfs_offset, (tr, rr)) in target_regions.iter().zip(root_regions.iter()).enumerate() + { + let is_target_bv = matches!(tr.kind(), ty::ReBound(..) | ty::ReErased); + let is_root_bv = matches!(rr.kind(), ty::ReBound(..) | ty::ReErased); + if is_target_bv && is_root_bv { + // Find the target bv index at this (arg_index, dfs_offset). + if let Some(&(bv_idx, _)) = target_bvs + .principal_entries + .iter() + .find(|(_, loc)| loc.arg_index == arg_index && loc.dfs_offset == dfs_offset) + { + exposed.insert(bv_idx); + } + } + } + } + } else { + // Different traits — use synthetic ReBound propagation through + // the supertrait chain, matching the technique used by + // `compute_walk_pos_correspondences`. + + // Inject synthetic ReBound into target's erased regions. + let mut next_synthetic_bv: u32 = 0; + let modified_target_args: Vec> = target_principal_ref + .args + .iter() + .map(|arg| { + ty::fold_regions(tcx, arg, |r, _depth| match r.kind() { + ty::ReErased => { + let var = ty::BoundVar::from_u32(next_synthetic_bv); + next_synthetic_bv += 1; + let br = ty::BoundRegion { var, kind: ty::BoundRegionKind::Anon }; + ty::Region::new_bound(tcx, ty::INNERMOST, br) + } + _ => r, + }) + }) + .collect(); + let num_synthetic_bvs = next_synthetic_bv as usize; + + // Build modified ExistentialTraitRef → TraitRef → PolyTraitRef. + let modified_existential_ref = ty::ExistentialTraitRef::new_from_args( + tcx, + target_def_id, + tcx.mk_args_from_iter(modified_target_args.iter().copied()), + ); + let dummy_self = tcx.types.trait_object_dummy_self; + let target_trait_ref = modified_existential_ref.with_self_ty(tcx, dummy_self); + let bound_vars = tcx.mk_bound_variable_kinds_from_iter( + (0..num_synthetic_bvs) + .map(|_| ty::BoundVariableKind::Region(ty::BoundRegionKind::Anon)), + ); + let target_poly_trait_ref = ty::Binder::bind_with_vars(target_trait_ref, bound_vars); + + // Instantiate supertrait chain from target to root. + let implied_root_trait_ref = + instantiate_supertrait_chain(tcx, target_poly_trait_ref, root_def_id); + + if let Some(implied_root_trait_ref) = implied_root_trait_ref { + // implied args include Self at position 0; actual args exclude Self. + let implied_root_args = &implied_root_trait_ref.args[1..]; + let actual_root_args = root_principal_ref.args; + + // Walk implied vs actual root args in parallel DFS. + for (implied_arg, actual_arg) in implied_root_args.iter().zip(actual_root_args.iter()) { + let implied_regions = collect_all_regions_dfs(*implied_arg); + let actual_regions = collect_all_regions_dfs(actual_arg); + for (implied_r, actual_r) in implied_regions.iter().zip(actual_regions.iter()) { + if let ty::ReBound(_, implied_br) = implied_r.kind() + && implied_br.var.as_usize() < num_synthetic_bvs + && matches!(actual_r.kind(), ty::ReErased | ty::ReBound(..)) + { + // synthetic_var_i maps to target bv index i + // (both assigned in same DFS order over principal args). + exposed.insert(implied_br.var.as_usize()); + } + } + } + } + } + + for &(target_bv_idx, ref proj_loc) in &target_bvs.projection_entries { + if root_bvs.projection_entries.iter().any(|(_, loc)| { + loc.assoc_def_id == proj_loc.assoc_def_id && loc.dfs_position == proj_loc.dfs_position + }) { + exposed.insert(target_bv_idx); + } + } + + exposed +} + +// ── Walk-position-based erasure safety ───────────────────────────────────────── + +/// Structural correspondences between target and root dyn types expressed +/// in predicate walk-position space. Each pair `(t_wp, r_wp)` means: +/// "the target lifetime at predicate walk position `t_wp` structurally +/// corresponds to the root lifetime at predicate walk position `r_wp` +/// through the supertrait chain." +struct WalkPosCorrespondences { + /// `(target_pred_walk_pos, root_pred_walk_pos)` pairs. + pairs: Vec<(usize, usize)>, + /// `false` if surjectivity fails: some root lifetime param has no + /// corresponding target param, or some target param reaches no root + /// param. + surjective: bool, +} + +/// Compute structural correspondences between target and root dyn types +/// in predicate walk-position space. +/// +/// Monomorphizes the supertrait chain from target → root using +/// `instantiate_supertrait`, then structurally compares the implied root +/// args with the actual root args, pairing up `ReBound` region positions. +/// This handles lifetimes nested inside type params (e.g., +/// `trait Target<'a>: Root<&'a u8>`). +fn compute_walk_pos_correspondences<'tcx>( + tcx: TyCtxt<'tcx>, + super_data: &'tcx ty::List>, + target_data: &'tcx ty::List>, +) -> WalkPosCorrespondences { + let (Some(super_principal), Some(target_principal)) = + (super_data.principal(), target_data.principal()) + else { + return WalkPosCorrespondences { pairs: Vec::new(), surjective: true }; + }; + + let super_def_id = super_principal.skip_binder().def_id; + let target_def_id = target_principal.skip_binder().def_id; + + if super_def_id == target_def_id { + // Same trait — identity correspondence on all ReBound positions. + return compute_identity_correspondences(tcx, super_data, target_data); + } + + let super_principal_ref = super_principal.skip_binder(); + let target_principal_ref = target_principal.skip_binder(); + + // --- Inject synthetic ReBound identities into the target args ----- + // + // Post-monomorphization all regions are ReErased, so the downstream + // `instantiate_supertrait_chain` and matching loop see nothing useful. + // We replace each ReErased in the target's existential args with + // `ReBound(INNERMOST, BV_i)`, giving every erased region slot a + // unique identity that survives substitution through the supertrait + // chain. + let mut next_synthetic_bv: u32 = 0; + let modified_target_args: Vec> = target_principal_ref + .args + .iter() + .map(|arg| { + ty::fold_regions(tcx, arg, |r, _depth| match r.kind() { + ty::ReErased => { + let var = ty::BoundVar::from_u32(next_synthetic_bv); + next_synthetic_bv += 1; + let br = ty::BoundRegion { var, kind: ty::BoundRegionKind::Anon }; + ty::Region::new_bound(tcx, ty::INNERMOST, br) + } + _ => r, + }) + }) + .collect(); + let num_synthetic_bvs = next_synthetic_bv as usize; + + // Build a modified ExistentialTraitRef with synthetic ReBound args. + let modified_existential_ref = ty::ExistentialTraitRef::new_from_args( + tcx, + target_def_id, + tcx.mk_args_from_iter(modified_target_args.iter().copied()), + ); + + // Convert to TraitRef (needs a Self type; we use a dummy since Self + // is erased in dyn types). + let dummy_self = tcx.types.trait_object_dummy_self; + let target_trait_ref = modified_existential_ref.with_self_ty(tcx, dummy_self); + + // Build a PolyTraitRef with N bound variable slots for the synthetic BVs. + let bound_vars = tcx.mk_bound_variable_kinds_from_iter( + (0..num_synthetic_bvs).map(|_| ty::BoundVariableKind::Region(ty::BoundRegionKind::Anon)), + ); + let target_poly_trait_ref = ty::Binder::bind_with_vars(target_trait_ref, bound_vars); + + // Walk the supertrait chain from target_def_id toward super_def_id, + // instantiating at each step. This is the same pattern as + // `prepare_vtable_segments` in vtable.rs. + let implied_root_trait_ref = + instantiate_supertrait_chain(tcx, target_poly_trait_ref, super_def_id); + + let Some(implied_root_trait_ref) = implied_root_trait_ref else { + // No path from target to root through the supertrait chain. + return WalkPosCorrespondences { pairs: Vec::new(), surjective: false }; + }; + + // The implied root args include Self at position 0; the existential + // args exclude Self. Skip the first arg (Self) when comparing. + let implied_root_args = &implied_root_trait_ref.args[1..]; + let actual_root_args = super_principal_ref.args; + + // --- Structural comparison of implied vs actual root args --------- + // + // Walk both arg lists in parallel DFS. At each `ReBound` region in + // the implied args (with var index < num_synthetic_bvs), record the + // target walk position; at the same DFS position in the actual root + // args, record the root walk position. + let mut pairs: Vec<(usize, usize)> = Vec::new(); + + // Walk the MODIFIED target args (now ReBound) to build a map from + // BoundVar → walk position in the target predicate space. + let target_bound_wp = collect_rebound_walk_positions(&modified_target_args); + + let mut root_wp_offset = 0usize; + for (implied_arg, actual_arg) in implied_root_args.iter().zip(actual_root_args.iter()) { + let implied_regions = collect_all_regions_dfs(*implied_arg); + let actual_regions = collect_all_regions_dfs(actual_arg); + + for (dfs_offset, (implied_r, _actual_r)) in + implied_regions.iter().zip(actual_regions.iter()).enumerate() + { + // Only pair up positions where the implied side has a + // synthetic ReBound region (var < num_synthetic_bvs). The + // actual side may be ReErased — that's fine because we + // identify its position purely by DFS offset. + if let ty::ReBound(_, implied_br) = implied_r.kind() + && implied_br.var.as_usize() < num_synthetic_bvs + { + // Find the target walk position for this binder var. + if let Some(&target_wp) = target_bound_wp.get(&implied_br.var) { + let root_wp = root_wp_offset + dfs_offset; + pairs.push((target_wp, root_wp)); + } + } + } + + root_wp_offset += region_slots_of_arg(actual_arg); + } + + // --- Projection predicates ---------------------------------------- + let super_proj_base: usize = region_slots_of_args(super_principal_ref.args); + let target_proj_base: usize = region_slots_of_args(target_principal_ref.args); + + let super_projs: Vec<_> = super_data.projection_bounds().collect(); + let target_projs: Vec<_> = target_data.projection_bounds().collect(); + + // Compute cumulative walk-position offsets for each super projection. + let mut super_proj_offsets: FxHashMap = FxHashMap::default(); + { + let mut offset = super_proj_base; + for proj_pred in &super_projs { + let proj = proj_pred.skip_binder(); + super_proj_offsets.insert(proj.def_id, offset); + let arg_slots: usize = region_slots_of_args(proj.args); + let term_slots = region_slots_of_term(proj.term); + offset += arg_slots + term_slots; + } + } + + // Walk target projections and match against root by assoc_def_id. + { + let mut target_offset = target_proj_base; + for target_proj_pred in &target_projs { + let target_proj = target_proj_pred.skip_binder(); + let target_assoc = target_proj.def_id; + let target_arg_slots: usize = region_slots_of_args(target_proj.args); + let target_term_slots = region_slots_of_term(target_proj.term); + let target_total = target_arg_slots + target_term_slots; + + if let Some(&super_offset) = super_proj_offsets.get(&target_assoc) { + let super_proj_pred = + super_projs.iter().find(|p| p.skip_binder().def_id == target_assoc).unwrap(); + let super_proj = super_proj_pred.skip_binder(); + + let mut erased_counter = u32::MAX / 2; + let target_regions = + collect_projection_bound_positions(target_proj, &mut erased_counter); + let super_regions = + collect_projection_bound_positions(super_proj, &mut erased_counter); + + for &t_pos in &target_regions { + if super_regions.contains(&t_pos) { + pairs.push((target_offset + t_pos, super_offset + t_pos)); + } + } + } + + target_offset += target_total; + } + } + + // --- Surjectivity ------------------------------------------------- + // + // Count total ReBound regions in each dyn type's predicates. Every + // root ReBound must be reached by some target ReBound, and vice + // versa. We check this by counting distinct positions in the pairs. + let total_target_rebound = count_rebound_regions_in_preds(target_data); + let total_root_rebound = count_rebound_regions_in_preds(super_data); + + let target_wps_in_pairs: FxHashSet = pairs.iter().map(|&(t, _)| t).collect(); + let root_wps_in_pairs: FxHashSet = pairs.iter().map(|&(_, r)| r).collect(); + + let surjective = target_wps_in_pairs.len() >= total_target_rebound + && root_wps_in_pairs.len() >= total_root_rebound; + + WalkPosCorrespondences { pairs, surjective } +} + +/// Identity correspondences when root and target are the same trait. +/// +/// For the identity case, every region slot at the same position in both +/// sides corresponds to itself: `(wp_i, wp_i)` for each region slot. +/// This handles both `ReBound` (pre-mono) and `ReErased` (post-mono) +/// regions. +fn compute_identity_correspondences<'tcx>( + _tcx: TyCtxt<'tcx>, + super_data: &'tcx ty::List>, + target_data: &'tcx ty::List>, +) -> WalkPosCorrespondences { + let super_principal = super_data.principal().unwrap(); + let target_principal = target_data.principal().unwrap(); + let super_ref = super_principal.skip_binder(); + let target_ref = target_principal.skip_binder(); + + let mut pairs = Vec::new(); + let mut target_wp = 0usize; + let mut root_wp = 0usize; + + for (target_arg, root_arg) in target_ref.args.iter().zip(super_ref.args.iter()) { + let target_regions = collect_all_regions_dfs(target_arg); + let root_regions = collect_all_regions_dfs(root_arg); + + for (offset, (tr, rr)) in target_regions.iter().zip(root_regions.iter()).enumerate() { + // Accept both ReBound (pre-mono) and ReErased (post-mono) + // regions. In the identity case, every region slot + // corresponds to itself. + let is_target_region = matches!(tr.kind(), ty::ReBound(..) | ty::ReErased); + let is_root_region = matches!(rr.kind(), ty::ReBound(..) | ty::ReErased); + if is_target_region && is_root_region { + pairs.push((target_wp + offset, root_wp + offset)); + } + } + + target_wp += region_slots_of_arg(target_arg); + root_wp += region_slots_of_arg(root_arg); + } + + WalkPosCorrespondences { pairs, surjective: true } +} + +/// Walk the supertrait chain from `start` toward `target_def_id` using +/// `instantiate_supertrait`. Returns the monomorphized `TraitRef` at +/// `target_def_id`, or `None` if no path exists. +fn instantiate_supertrait_chain<'tcx>( + tcx: TyCtxt<'tcx>, + start: ty::PolyTraitRef<'tcx>, + target_def_id: DefId, +) -> Option> { + // BFS through the supertrait hierarchy. + let mut queue = std::collections::VecDeque::new(); + let mut visited = FxHashSet::default(); + queue.push_back(start); + + while let Some(current) = queue.pop_front() { + let current_def_id = current.def_id(); + if !visited.insert(current_def_id) { + continue; + } + + if current_def_id == target_def_id { + return Some(current.skip_binder()); + } + + let super_predicates = tcx.explicit_super_predicates_of(current_def_id); + for (pred, _) in + super_predicates.iter_identity_copied().map(ty::Unnormalized::skip_norm_wip) + { + let Some(trait_clause) = pred.instantiate_supertrait(tcx, current).as_trait_clause() + else { + continue; + }; + let parent_ref = trait_clause.map_bound(|tc| tc.trait_ref); + queue.push_back(parent_ref); + } + } + + None +} + +/// Collect all regions encountered during TypeVisitor DFS of a GenericArg. +fn collect_all_regions_dfs<'tcx>(arg: ty::GenericArg<'tcx>) -> Vec> { + struct Collector<'tcx> { + regions: Vec>, + } + impl<'tcx> TypeVisitor> for Collector<'tcx> { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + self.regions.push(r); + } + } + let mut collector = Collector { regions: Vec::new() }; + arg.visit_with(&mut collector); + collector.regions +} + +/// Build a map from `ReBound` `BoundVar` → predicate walk position for +/// a principal `ExistentialTraitRef`'s args. +fn collect_rebound_walk_positions(args: &[ty::GenericArg<'_>]) -> FxHashMap { + let mut map = FxHashMap::default(); + let mut wp = 0usize; + for arg in args.iter() { + let regions = collect_all_regions_dfs(*arg); + for (offset, r) in regions.iter().enumerate() { + if let ty::ReBound(_, br) = r.kind() { + // First occurrence wins (a bv may appear in multiple + // args; the walk position is its first DFS hit). + map.entry(br.var).or_insert(wp + offset); + } + } + wp += region_slots_of_arg(*arg); + } + map +} + +/// Count the total number of `ReBound` or `ReErased` region positions +/// in a dyn type's existential predicates (principal + projections). +/// +/// Post-monomorphization all regions are `ReErased`, so both kinds must +/// be counted to ensure the surjectivity check is meaningful. +fn count_rebound_regions_in_preds<'tcx>( + preds: &'tcx ty::List>, +) -> usize { + let mut count = 0usize; + if let Some(principal) = preds.principal() { + for arg in principal.skip_binder().args.iter() { + for r in collect_all_regions_dfs(arg) { + if matches!(r.kind(), ty::ReBound(..) | ty::ReErased) { + count += 1; + } + } + } + } + for proj_pred in preds.projection_bounds() { + let proj = proj_pred.skip_binder(); + for arg in proj.args.iter() { + for r in collect_all_regions_dfs(arg) { + if matches!(r.kind(), ty::ReBound(..) | ty::ReErased) { + count += 1; + } + } + } + // Term regions. + struct Counter { + count: usize, + } + impl<'tcx> TypeVisitor> for Counter { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + if matches!(r.kind(), ty::ReBound(..) | ty::ReErased) { + self.count += 1; + } + } + } + let mut counter = Counter { count: 0 }; + proj.term.visit_with(&mut counter); + count += counter.count; + } + count +} + +/// Collect the DFS positions of `ReBound` or `ReErased` regions within +/// an `ExistentialProjection`'s args + term. Positions are relative to +/// the start of the projection (not the dyn type). +/// +/// Both `ReBound` and `ReErased` regions are included so that the +/// projection correspondence check works post-monomorphization. +fn collect_projection_bound_positions<'tcx>( + proj: ty::ExistentialProjection<'tcx>, + next_erased_var: &mut u32, +) -> Vec { + let mut positions = Vec::new(); + let mut walk_pos = 0usize; + + for arg in proj.args.iter() { + let regions = collect_bound_regions_in(arg, next_erased_var); + for (offset, _br) in ®ions { + positions.push(walk_pos + offset); + } + walk_pos += region_slots_of_arg(arg); + } + + let term_regions = collect_bound_regions_in(proj.term, next_erased_var); + for (offset, _br) in &term_regions { + positions.push(walk_pos + offset); + } + + positions +} + +// ── Resolution ───────────────────────────────────────────────────────────────── + +/// Resolve `trait_cast_is_lifetime_erasure_safe` in walk-position space. +/// +/// Returns `Ok(())` iff: +/// 1. The structural walk-position correspondences (from +/// `compute_walk_pos_correspondences`) are surjective. +/// 2. Every (target_wp, root_wp) pair has mutual outlives in the +/// caller's environment when translated through `origin_positions`. +/// +/// Returns `Err(reason)` with a short static string describing which +/// admissibility rule rejected the cast; consumed by the +/// `-Zdump-trait-cast-erasure-safety` diagnostic. +fn resolve_erasure_safe_walk_pos<'tcx>( + tcx: TyCtxt<'tcx>, + super_trait: Ty<'tcx>, + target_trait: Ty<'tcx>, + origin_positions: &[Option], + call_site_outlives: &[ty::GenericArg<'tcx>], + root_transport_slots: usize, +) -> Result<(), &'static str> { + let (super_data, target_data) = match (*super_trait.kind(), *target_trait.kind()) { + (ty::Dynamic(s, ..), ty::Dynamic(t, ..)) => (s, t), + _ => return Ok(()), + }; + + let correspondences = compute_walk_pos_correspondences(tcx, super_data, target_data); + + if !correspondences.surjective { + return Err("non-surjective correspondences"); + } + + if correspondences.pairs.is_empty() { + return Ok(()); + } + + // Build an outlives oracle from the call-site entries (already in + // origin walk-position space). Compute the dimension (max index + 2 + // for 0-based + 'static slot) and use the cached Floyd-Warshall + // reachability matrix for O(1) outlives lookups. + let max_idx = call_site_outlives + .iter() + .filter_map(|entry| match entry.kind() { + ty::GenericArgKind::Outlives(o) => { + let l = if o.longer() == usize::MAX { None } else { Some(o.longer()) }; + let s = if o.shorter() == usize::MAX { None } else { Some(o.shorter()) }; + l.into_iter().chain(s).max() + } + _ => bug!("expected Outlives entry in call-site outlives slice"), + }) + .max() + .unwrap_or(0); + let dim = max_idx + 2; + let interned_entries = tcx.arena.alloc_from_iter(call_site_outlives.iter().copied()); + let reach = tcx.outlives_reachability((interned_entries, dim)); + let env = crate::cast_sensitivity::CallerOutlivesEnv::from_raw(reach, dim); + + // Check mutual outlives for each structural pair. + // Group pairs by target walk position to handle fan-out. + let mut target_to_roots: FxHashMap> = FxHashMap::default(); + for &(t_wp, r_wp) in &correspondences.pairs { + // Translate predicate walk positions to transport positions, + // then to origin positions. + let t_transport = root_transport_slots + t_wp; + let r_transport = r_wp; + + let Some(origin_t) = origin_positions.get(t_transport).copied().flatten() else { + return Err("target origin position missing"); + }; + let Some(origin_r) = origin_positions.get(r_transport).copied().flatten() else { + return Err("root origin position missing"); + }; + + // Mutual outlives: target and root represent the same lifetime. + if !env.outlives(origin_t, origin_r) || !env.outlives(origin_r, origin_t) { + return Err("mutual-outlives missing for structural pair"); + } + + target_to_roots.entry(t_wp).or_default().push(origin_r); + } + + // Fan-out check: when one target maps to multiple roots, those + // roots must also be mutually equivalent. + // Iteration order is irrelevant — we check a universal property. + #[allow(rustc::potential_query_instability)] + for roots in target_to_roots.values() { + for i in 0..roots.len() { + for j in (i + 1)..roots.len() { + if !env.outlives(roots[i], roots[j]) || !env.outlives(roots[j], roots[i]) { + return Err("fan-out mutual-outlives missing"); + } + } + } + } + + Ok(()) +} + +/// Emit the `-Zdump-trait-cast-erasure-safety` diagnostic block for a +/// single erasure-safety query. +/// +/// Only invoked when the flag is set and the super-trait's printed name +/// matches the filter. This function recomputes the auxiliary data +/// (binder-var enumeration, supertrait chain) from scratch — it is a +/// strict observer of analysis inputs and the already-computed verdict. +fn dump_erasure_safety<'tcx>( + tcx: TyCtxt<'tcx>, + super_trait: Ty<'tcx>, + target_trait: Ty<'tcx>, + call_site_outlives: &[ty::GenericArg<'tcx>], + verdict: Result<(), &'static str>, +) { + let super_name = with_no_trimmed_paths!(super_trait.to_string()); + let target_name = with_no_trimmed_paths!(target_trait.to_string()); + + eprintln!("=== Erasure Safety: super={super_name} target={target_name} ==="); + + // Outlives class: print the semantic (longer, shorter) pairs. + if call_site_outlives.is_empty() { + eprintln!(" Outlives class: empty"); + } else { + let mut pairs: Vec<(usize, usize)> = call_site_outlives + .iter() + .filter_map(|entry| match entry.kind() { + ty::GenericArgKind::Outlives(o) => Some((o.longer(), o.shorter())), + _ => None, + }) + .collect(); + pairs.sort_unstable(); + let rendered: Vec = pairs + .iter() + .map(|&(l, s)| { + let l_s = if l == usize::MAX { "'static".to_string() } else { format!("wp{l}") }; + let s_s = if s == usize::MAX { "'static".to_string() } else { format!("wp{s}") }; + format!("{l_s}: {s_s}") + }) + .collect(); + eprintln!(" Outlives class: [{}]", rendered.join(", ")); + } + + // Binder var enumeration for the super-trait and target-trait. + let super_data = match *super_trait.kind() { + ty::Dynamic(d, ..) => Some(d), + _ => None, + }; + let target_data = match *target_trait.kind() { + ty::Dynamic(d, ..) => Some(d), + _ => None, + }; + + let (mut principal_entries, mut projection_entries) = target_data + .map(|d| { + let bvs = collect_all_binder_vars(tcx, d); + (bvs.principal_entries, bvs.projection_entries) + }) + .unwrap_or_else(|| (Vec::new(), Vec::new())); + // `collect_all_binder_vars` yields entries in DFS order; sort stably + // by (bv_index, arg_index / assoc, dfs_offset) for deterministic + // output independent of any upstream hash iteration. + principal_entries.sort_by_key(|(bv, loc)| (*bv, loc.arg_index, loc.dfs_offset)); + projection_entries + .sort_by_key(|(bv, loc)| (*bv, loc.assoc_def_id.index.as_u32(), loc.dfs_position)); + + eprintln!(" Principal binder vars ({}):", principal_entries.len()); + for (bv, loc) in &principal_entries { + eprintln!(" bv{bv}: arg_index={} dfs_offset={}", loc.arg_index, loc.dfs_offset); + } + eprintln!(" Projection binder vars ({}):", projection_entries.len()); + for (bv, loc) in &projection_entries { + let assoc = with_no_trimmed_paths!(tcx.def_path_str(loc.assoc_def_id)); + eprintln!(" bv{bv}: assoc={assoc} dfs_position={}", loc.dfs_position); + } + + // Where-clause / structural correspondences derivation summary. + if let (Some(sd), Some(td)) = (super_data, target_data) { + let corr = compute_walk_pos_correspondences(tcx, sd, td); + let mut pairs = corr.pairs.clone(); + pairs.sort_unstable(); + eprintln!( + " Where-clause outlives derivation: surjective={} pairs={}", + corr.surjective, + pairs.len() + ); + for (t_wp, r_wp) in pairs { + eprintln!(" target_wp={t_wp} <-> root_wp={r_wp}"); + } + } else { + eprintln!(" Where-clause outlives derivation: (non-dyn query — vacuously safe)"); + } + + // Supertrait chain trace from target principal → super principal. + eprintln!(" Supertrait chain:"); + if let (Some(sd), Some(td)) = (super_data, target_data) + && let (Some(sp), Some(tp)) = (sd.principal(), td.principal()) + { + let super_def_id = sp.skip_binder().def_id; + let target_def_id = tp.skip_binder().def_id; + let chain = trace_supertrait_chain(tcx, target_def_id, super_def_id); + match chain { + Some(defs) => { + let rendered: Vec = + defs.iter().map(|d| with_no_trimmed_paths!(tcx.def_path_str(*d))).collect(); + eprintln!(" {}", rendered.join(" -> ")); + } + None => eprintln!(" (no path from target to super in supertrait hierarchy)"), + } + } else { + eprintln!(" (non-dyn query — no chain)"); + } + + match verdict { + Ok(()) => eprintln!(" Verdict: safe"), + Err(reason) => eprintln!(" Verdict: unsafe ({reason})"), + } +} + +/// Trace the shortest supertrait path from `start` to `target`. +/// +/// Returns the list of `DefId`s along the path (`start` first, `target` +/// last), or `None` if no such path exists. Used only for the +/// `-Zdump-trait-cast-erasure-safety` diagnostic; the real analysis uses +/// `instantiate_supertrait_chain` which substitutes args. +fn trace_supertrait_chain(tcx: TyCtxt<'_>, start: DefId, target: DefId) -> Option> { + if start == target { + return Some(vec![start]); + } + let mut queue: std::collections::VecDeque<(DefId, Vec)> = + std::collections::VecDeque::new(); + let mut visited: FxHashSet = FxHashSet::default(); + queue.push_back((start, vec![start])); + visited.insert(start); + while let Some((cur, path)) = queue.pop_front() { + let supers = tcx.explicit_super_predicates_of(cur); + // Collect parent trait def_ids in a deterministic order. + let mut parents: Vec = supers + .iter_identity_copied() + .map(ty::Unnormalized::skip_norm_wip) + .filter_map(|(pred, _)| { + pred.as_trait_clause().map(|tc| tc.skip_binder().trait_ref.def_id) + }) + .collect(); + parents.sort_by_key(|d| (d.krate.as_u32(), d.index.as_u32())); + parents.dedup(); + for parent in parents { + if !visited.insert(parent) { + continue; + } + let mut next_path = path.clone(); + next_path.push(parent); + if parent == target { + return Some(next_path); + } + queue.push_back((parent, next_path)); + } + } + None +} + +/// Query provider for `is_lifetime_erasure_safe`. +/// +/// Determines whether casting to `target_trait` within the graph rooted at +/// `super_trait` is safe w.r.t. lifetime erasure, given call-site outlives +/// entries and origin walk positions from the CRL composition pipeline. +pub(crate) fn is_lifetime_erasure_safe<'tcx>( + tcx: TyCtxt<'tcx>, + (super_trait, target_trait, origin_positions, call_site_outlives): ( + Ty<'tcx>, + Ty<'tcx>, + &'tcx [Option], + &'tcx [ty::GenericArg<'tcx>], + ), +) -> bool { + let root_transport_slots = region_slots_of_ty(super_trait); + let verdict = resolve_erasure_safe_walk_pos( + tcx, + super_trait, + target_trait, + origin_positions, + call_site_outlives, + root_transport_slots, + ); + + // `-Zdump-trait-cast-erasure-safety` diagnostic emission. Fast path + // when the flag is absent; substring match against the super-trait's + // fully-qualified printed name when present. + if let Some(filter) = tcx.sess.opts.unstable_opts.dump_trait_cast_erasure_safety.as_deref() { + let super_name = with_no_trimmed_paths!(super_trait.to_string()); + let matches = filter == "all" || super_name.contains(filter); + if matches { + dump_erasure_safety(tcx, super_trait, target_trait, call_site_outlives, verdict); + } + } + + verdict.is_ok() +} diff --git a/compiler/rustc_monomorphize/src/errors.rs b/compiler/rustc_monomorphize/src/errors.rs index 27705a9837ad3..451612b04eeff 100644 --- a/compiler/rustc_monomorphize/src/errors.rs +++ b/compiler/rustc_monomorphize/src/errors.rs @@ -184,3 +184,14 @@ pub(crate) struct StaticInitializerCyclic<'a> { pub head: &'a str, pub target: &'a str, } + +#[derive(Diagnostic)] +#[diag("cast target `{$target}` is unreachable in the trait graph of `{$root}`")] +#[note("no type implementing `{$root}` also implements `{$target}`")] +#[note("this cast will always return `Err` at runtime")] +pub(crate) struct UnusedCastTargetLint<'tcx> { + #[primary_span] + pub span: Span, + pub root: Ty<'tcx>, + pub target: Ty<'tcx>, +} diff --git a/compiler/rustc_monomorphize/src/graph_checks/mod.rs b/compiler/rustc_monomorphize/src/graph_checks/mod.rs index 87cd2c733b69a..b272f28eb4203 100644 --- a/compiler/rustc_monomorphize/src/graph_checks/mod.rs +++ b/compiler/rustc_monomorphize/src/graph_checks/mod.rs @@ -1,8 +1,7 @@ //! Checks that need to operate on the entire mono item graph -use rustc_middle::mono::MonoItem; +use rustc_middle::mono::{MonoItem, UsageMap}; use rustc_middle::ty::TyCtxt; -use crate::collector::UsageMap; use crate::graph_checks::statics::check_static_initializers_are_acyclic; mod statics; diff --git a/compiler/rustc_monomorphize/src/graph_checks/statics.rs b/compiler/rustc_monomorphize/src/graph_checks/statics.rs index 16642b9960126..53fa8b0beeebf 100644 --- a/compiler/rustc_monomorphize/src/graph_checks/statics.rs +++ b/compiler/rustc_monomorphize/src/graph_checks/statics.rs @@ -4,10 +4,9 @@ use rustc_data_structures::graph::{DirectedGraph, Successors}; use rustc_data_structures::unord::UnordMap; use rustc_hir::def_id::DefId; use rustc_index::{Idx, IndexVec, newtype_index}; -use rustc_middle::mono::MonoItem; +use rustc_middle::mono::{MonoItem, UsageMap}; use rustc_middle::ty::TyCtxt; -use crate::collector::UsageMap; use crate::errors; #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] diff --git a/compiler/rustc_monomorphize/src/lib.rs b/compiler/rustc_monomorphize/src/lib.rs index ae97bf830d8c0..37886b9ea6a5d 100644 --- a/compiler/rustc_monomorphize/src/lib.rs +++ b/compiler/rustc_monomorphize/src/lib.rs @@ -12,11 +12,17 @@ use rustc_middle::util::Providers; use rustc_middle::{bug, traits}; use rustc_span::ErrorGuaranteed; +mod cast_sensitivity; mod collector; +mod erasure_safe; mod errors; mod graph_checks; mod mono_checks; mod partitioning; +mod resolved_bodies; +mod table_layout; +mod trait_cast_requests; +mod trait_graph; mod util; fn custom_coerce_unsize_info<'tcx>( @@ -49,4 +55,14 @@ fn custom_coerce_unsize_info<'tcx>( pub fn provide(providers: &mut Providers) { partitioning::provide(providers); mono_checks::provide(&mut providers.queries); + providers.queries.gather_trait_cast_requests = trait_cast_requests::gather_trait_cast_requests; + providers.queries.trait_cast_graph = trait_graph::trait_cast_graph; + providers.queries.outlives_reachability = trait_graph::outlives_reachability; + providers.queries.impl_universally_admissible = trait_graph::impl_universally_admissible; + providers.queries.trait_cast_layout = table_layout::trait_cast_layout; + providers.queries.trait_cast_table = table_layout::trait_cast_table; + providers.queries.trait_cast_table_alloc = table_layout::trait_cast_table_alloc; + providers.queries.global_crate_id_alloc = table_layout::global_crate_id_alloc; + providers.queries.augmented_outlives_for_call = cast_sensitivity::augmented_outlives_for_call; + providers.queries.is_lifetime_erasure_safe = erasure_safe::is_lifetime_erasure_safe; } diff --git a/compiler/rustc_monomorphize/src/mono_checks/move_check.rs b/compiler/rustc_monomorphize/src/mono_checks/move_check.rs index a24b0443d39c9..2823e6e753e8c 100644 --- a/compiler/rustc_monomorphize/src/mono_checks/move_check.rs +++ b/compiler/rustc_monomorphize/src/mono_checks/move_check.rs @@ -34,7 +34,7 @@ impl<'tcx> MirVisitor<'tcx> for MoveCheckVisitor<'tcx> { fn visit_terminator(&mut self, terminator: &mir::Terminator<'tcx>, location: Location) { match terminator.kind { mir::TerminatorKind::Call { ref func, ref args, ref fn_span, .. } - | mir::TerminatorKind::TailCall { ref func, ref args, ref fn_span } => { + | mir::TerminatorKind::TailCall { ref func, ref args, ref fn_span, .. } => { let callee_ty = func.ty(self.body, self.tcx); let callee_ty = self.monomorphize(callee_ty); self.check_fn_args_move_size(callee_ty, args, *fn_span, location); diff --git a/compiler/rustc_monomorphize/src/partitioning.rs b/compiler/rustc_monomorphize/src/partitioning.rs index 6f9795fb3bff1..a8ea8a7eb6a80 100644 --- a/compiler/rustc_monomorphize/src/partitioning.rs +++ b/compiler/rustc_monomorphize/src/partitioning.rs @@ -92,13 +92,14 @@ //! source-level module, functions from the same module will be available for //! inlining, even when they are not marked `#[inline]`. +use std::borrow::Cow; use std::cmp; use std::collections::hash_map::Entry; use std::fs::{self, File}; use std::io::Write; use std::path::{Path, PathBuf}; -use rustc_data_structures::fx::{FxIndexMap, FxIndexSet}; +use rustc_data_structures::fx::{FxHashMap, FxHashSet, FxIndexMap, FxIndexSet}; use rustc_data_structures::sync::par_join; use rustc_data_structures::unord::{UnordMap, UnordSet}; use rustc_hir::LangItem; @@ -110,11 +111,12 @@ use rustc_middle::bug; use rustc_middle::middle::codegen_fn_attrs::CodegenFnAttrFlags; use rustc_middle::middle::exported_symbols::{SymbolExportInfo, SymbolExportLevel}; use rustc_middle::mono::{ - CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, MonoItem, MonoItemData, - MonoItemPartitions, Visibility, + CodegenUnit, CodegenUnitNameBuilder, InstantiationMode, LocalMonoItemCollection, MonoItem, + MonoItemData, MonoItemPartitions, UsageMap, Visibility, }; use rustc_middle::ty::print::{characteristic_def_id_of_type, with_no_trimmed_paths}; -use rustc_middle::ty::{self, InstanceKind, TyCtxt}; +use rustc_middle::ty::trait_cast::{IntrinsicResolutions, TraitCastRequests}; +use rustc_middle::ty::{self, InstanceKind, Ty, TyCtxt}; use rustc_middle::util::Providers; use rustc_session::CodegenUnits; use rustc_session::config::{DumpMonoStatsFormat, SwitchWithOptPath}; @@ -122,7 +124,8 @@ use rustc_span::Symbol; use rustc_target::spec::SymbolVisibility; use tracing::debug; -use crate::collector::{self, MonoItemCollectionStrategy, UsageMap}; +use crate::collector::{self, MonoItemCollectionStrategy}; +use crate::erasure_safe::trait_metadata_index_outlives_class; use crate::errors::{CouldntDumpMonoStats, SymbolAlreadyDefined}; use crate::graph_checks::target_specific_checks; @@ -222,7 +225,18 @@ where // So even if its mode is LocalCopy, we need to treat it like a root. match mono_item.instantiation_mode(cx.tcx) { InstantiationMode::GloballyShared { .. } => {} - InstantiationMode::LocalCopy => continue, + InstantiationMode::LocalCopy => { + // Items added after the main mono collection pass (for example, + // trait-cast vtable methods discovered while resolving table + // allocations) have no usage-map edges. Treat those orphaned + // LocalCopy items as synthetic roots so they still get placed + // into a CGU and emitted for codegen. + if cx.usage_map.used_map.contains_key(&mono_item) + || !cx.usage_map.get_user_items(mono_item).is_empty() + { + continue; + } + } } let characteristic_def_id = characteristic_def_id_of_mono_item(cx.tcx, mono_item); @@ -269,11 +283,25 @@ where // from multiple root items within a CGU, which is fine, it just means // the `insert` will be a no-op. for inlined_item in reachable_inlined_items { - // This is a CGU-private copy. + // Trait-cast delayed instances must never be CGU-private: the + // matching symbol from an upstream dylib (built with the + // instantiating-crate suffix stripped from the mangled name) is + // resolved against the global crate's DYNSYM at runtime. A + // CGU-private (Internal) copy wouldn't appear in DYNSYM at all, + // leaving the dylib's reloc unresolved. Promote to + // External + Protected instead. + let delayed_inlined = matches!(inlined_item, MonoItem::Fn(i) + if cx.tcx.is_global_crate() + && cx.tcx.is_transitively_delayed_instance(i)); + let (ilinkage, ivisibility) = if delayed_inlined { + (Linkage::External, Visibility::Protected) + } else { + (Linkage::Internal, Visibility::Default) + }; cgu.items_mut().entry(inlined_item).or_insert_with(|| MonoItemData { inlined: true, - linkage: Linkage::Internal, - visibility: Visibility::Default, + linkage: ilinkage, + visibility: ivisibility, size_estimate: inlined_item.size_estimate(cx.tcx), }); } @@ -781,6 +809,24 @@ fn mono_item_visibility<'tcx>( can_export_generics: bool, always_export_generics: bool, ) -> Visibility { + // Trait-cast delayed instances codegen'd by the global crate must be + // `Protected`: the symbol needs to appear in `DT_DYNSYM` so upstream + // dylibs loaded alongside can resolve their vtable relocs at runtime, + // but we must **not** let another global crate loaded in the same + // process interpose our local intrinsic calls (the AllocId-rejection + // check is predicated on local calls using local bodies). + // `Visibility::Protected` maps to ELF `STV_PROTECTED` ("hide upward, + // export downward"); on object formats that don't support it, + // degrades to `Default`, which is still correct for the common + // single-global-crate case. + if let MonoItem::Fn(instance) = mono_item + && tcx.is_global_crate() + && tcx.is_transitively_delayed_instance(*instance) + { + *can_be_internalized = false; + return Visibility::Protected; + } + let instance = match mono_item { // This is pretty complicated; see below. MonoItem::Fn(instance) => instance, @@ -1128,15 +1174,691 @@ where } } -fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitions<'_> { +/// Emit the `unused_cast_target` lint for every `trait_metadata_index` +/// request whose sub_trait has no satisfying concrete type in the final +/// binary's trait graph. +/// +/// This is the final crate of compilation (binary / staticlib / cdylib), +/// so the set of concrete types implementing the root is known and we can +/// tell whether any of them also implement the sub_trait. If none do, the +/// cast will always return `Err` at runtime. +/// +/// Span recovery: each request carries the intrinsic `Instance`; we walk +/// the crate graph's delayed requests once to map intrinsic → caller and +/// use the caller's `def_span` as the lint's primary span. Cross-crate +/// casts land on the caller's foreign def_span; local casts land near +/// the `cast!` invocation. +fn emit_unused_cast_target_lint<'tcx>(tcx: TyCtxt<'tcx>, requests: &TraitCastRequests<'tcx>) { + use std::iter; + + use rustc_hir::CRATE_HIR_ID; + use rustc_lint_defs::builtin::UNUSED_CAST_TARGET; + use rustc_middle::ty::Instance; + use rustc_span::DUMMY_SP; + + use crate::errors::UnusedCastTargetLint; + use crate::trait_graph::resolve_dyn_satisfaction; + + if requests.index_requests.is_empty() { + return; + } + + // Map each intrinsic Instance to the def_span of the caller that references it. + let mut intrinsic_caller_span: FxHashMap, rustc_span::Span> = + FxHashMap::default(); + for &cnum in iter::once(&LOCAL_CRATE).chain(tcx.crates(())) { + for delayed in tcx.delayed_codegen_requests(cnum) { + for &intrinsic in delayed.intrinsic_callees { + intrinsic_caller_span + .entry(intrinsic) + .or_insert_with(|| tcx.def_span(delayed.instance.def_id())); + } + } + } + + #[allow(rustc::potential_query_instability)] + for req in &requests.index_requests { + let graph = tcx.trait_cast_graph(req.super_trait); + let any_satisfies = graph + .concrete_types + .items() + .any(|ct| resolve_dyn_satisfaction(tcx, **ct, req.sub_trait).is_some()); + if any_satisfies { + continue; + } + let span = intrinsic_caller_span.get(&req.instance).copied().unwrap_or(DUMMY_SP); + tcx.emit_node_span_lint( + UNUSED_CAST_TARGET, + CRATE_HIR_ID, + span, + UnusedCastTargetLint { span, root: req.super_trait, target: req.sub_trait }, + ); + } +} + +/// Called from within `collect_and_partition_mono_items`, after mono +/// collection completes but before partitioning. Resolves all +/// delayed codegen requests into `MonoItem::Fn` entries that are +/// inserted into `mono_items` before partitioning distributes +/// items into codegen units. +/// Only runs in a global crate (binary, staticlib, cdylib). +fn resolve_trait_cast_globals<'tcx>(tcx: TyCtxt<'tcx>, mono_items: &mut Cow<'_, [MonoItem<'tcx>]>) { + if !tcx.is_global_crate() { + return; // Non-global crates defer to the global crate. + } + + let requests = tcx.gather_trait_cast_requests(()); + if requests.is_empty() { + return; // No trait casting in the entire program. + } + + // Build the intrinsic resolution lookup table. Query results + // (trait_cast_layout, trait_cast_table, trait_cast_table_alloc) + // are driven on-demand within build_intrinsic_resolutions and + // cached by the dep graph for incremental reuse. + let resolutions = crate::table_layout::build_intrinsic_resolutions(tcx, &requests); + + // Fire `unused_cast_target` lint for every `trait_metadata_index` + // request whose sub_trait has no concrete-type implementer in the + // final binary. Such casts always return `Err` at runtime. + emit_unused_cast_target_lint(tcx, &requests); + + // Cascading canonicalization: process all caller DelayedInstances + // (directly + transitively sensitive) bottom-up, resolving + // intrinsics, rewriting callee references through the condensation + // map, patching MIR, feeding codegen_mir, and inserting + // MonoItem::Fn entries into mono_items. + // + // Pulls DelayedInstances from delayed_codegen_requests directly — + // independent of TraitCastRequests. + cascade_canonicalize(tcx, &resolutions, mono_items); + + // Collect mono items for vtable methods referenced by trait cast + // tables. For each (super_trait, concrete_type) table, iterate the + // sub-traits and collect vtable methods for implemented sub-traits. + // Uses create_mono_items_for_vtable_methods (same path as the normal + // collector) to ensure Instance consistency. Dedup against items + // already collected from direct unsizing points. + collect_trait_cast_vtable_methods(tcx, &requests, mono_items); + collect_trait_cast_table_backing_items(tcx, &resolutions, mono_items); +} + +/// Collect vtable method mono items for all (concrete_type, sub_trait) pairs +/// in the trait cast tables. These vtables are generated during resolution +/// but their methods must be added as mono items for codegen. +/// +/// Deduplicates against already-collected items (from direct unsizing casts) +/// and across sub-trait vtables (which share supertrait methods). +fn collect_trait_cast_vtable_methods<'tcx>( + tcx: TyCtxt<'tcx>, + requests: &TraitCastRequests<'tcx>, + mono_items: &mut Cow<'_, [MonoItem<'tcx>]>, +) { + use crate::trait_graph::resolve_dyn_satisfaction; + + // Deduplicate (super_trait, concrete_type) pairs across requests. + let table_pairs: FxHashSet<(Ty<'tcx>, Ty<'tcx>)> = + requests.table_requests.iter().map(|r| (r.super_trait, r.concrete_type)).collect(); + + if table_pairs.is_empty() { + return; + } + + let mut seen: FxHashSet> = mono_items.iter().copied().collect(); + let mut new_items = Vec::new(); + let mut candidate_items = Vec::new(); + + // Iteration order is irrelevant — we are collecting into a dedup set. + #[allow(rustc::potential_query_instability)] + for &(super_trait, concrete_type) in &table_pairs { + let layout = tcx.trait_cast_layout(super_trait); + for sub_trait in layout.sub_traits() { + if resolve_dyn_satisfaction(tcx, concrete_type, sub_trait).is_none() { + continue; + } + crate::collector::collect_vtable_methods_for_trait_cast( + tcx, + sub_trait, + concrete_type, + &mut candidate_items, + ); + for item in candidate_items.drain(..) { + if seen.insert(item) { + new_items.push(item); + } + } + } + } + + if !new_items.is_empty() { + mono_items.to_mut().extend(new_items); + } +} + +/// Collect mono items reachable from the actual trait-cast table allocations. +/// +/// This is a conservative backstop for cases where reconstructing the +/// (sub-trait, concrete-type) pairs misses an item that the emitted table +/// or its referenced vtables nevertheless contain. +fn collect_trait_cast_table_backing_items<'tcx>( + tcx: TyCtxt<'tcx>, + resolutions: &IntrinsicResolutions<'tcx>, + mono_items: &mut Cow<'_, [MonoItem<'tcx>]>, +) { + if resolutions.table_alloc_ids.is_empty() { + return; + } + + let mut seen: FxHashSet> = mono_items.iter().copied().collect(); + let mut new_items = Vec::new(); + let mut candidate_items = Vec::new(); + + for &alloc_id in &resolutions.table_alloc_ids { + crate::collector::collect_alloc_items_for_trait_cast(tcx, alloc_id, &mut candidate_items); + for item in candidate_items.drain(..) { + if seen.insert(item) { + new_items.push(item); + } + } + } + + if !new_items.is_empty() { + mono_items.to_mut().extend(new_items); + } +} + +/// Process all delayed codegen Instances bottom-up through the sensitive +/// sub-graph: apply callee substitutions, resolve intrinsic calls, +/// canonicalize condensed Instances via trampoline bodies, feed patched +/// MIR via `codegen_mir`, and insert `MonoItem::Fn` entries into +/// `mono_items` for partitioning. +/// +/// Condensation-based deduplication: two Instances that belong to the +/// same condensation group (identical admissibility vectors across all +/// concrete types) produce identical resolved bodies. The canonical +/// Instance (smallest `OutlivesClass` under `StableOrd`) receives the +/// full resolved body; non-canonical Instances receive a trampoline body +/// that tail-calls the canonical. +/// +/// Deduplication cascades through the bottom-up traversal: when leaf +/// Instances condense, their callers' patched MIR references the same +/// canonical callee. Callers that differ only in which condensed callee +/// they reference now produce identical patched bodies and are +/// themselves deduplicated. +fn cascade_canonicalize<'tcx>( + tcx: TyCtxt<'tcx>, + resolutions: &IntrinsicResolutions<'tcx>, + mono_items: &mut Cow<'_, [MonoItem<'tcx>]>, +) { + use std::collections::BTreeMap; + + use rustc_data_structures::graph::scc::Sccs; + use rustc_data_structures::graph::vec_graph::VecGraph; + use rustc_middle::mono::DelayedInstance; + use rustc_middle::ty::trait_cast::OutlivesClass; + + let dump = tcx.sess.opts.unstable_opts.dump_trait_cast_canonicalization; + + // Collect all delayed Instances from all crates, deduplicating by Instance. + let mut all_delayed: Vec<&DelayedInstance<'tcx>> = Vec::new(); + let mut seen: FxHashSet> = FxHashSet::default(); + for delayed in tcx.delayed_codegen_requests(LOCAL_CRATE) { + if seen.insert(delayed.instance) { + all_delayed.push(delayed); + } + } + for &cnum in tcx.crates(()) { + for delayed in tcx.delayed_codegen_requests(cnum) { + if seen.insert(delayed.instance) { + all_delayed.push(delayed); + } + } + } + + if all_delayed.is_empty() { + if dump { + eprintln!("=== Trait-Cast Canonicalization ==="); + eprintln!(" Total delayed instances: 0"); + eprintln!(" Depth levels: 0"); + eprintln!(" Canon map summary:"); + eprintln!(" total redirections: 0"); + } + return; + } + + // Build the delayed-Instance dependency graph and compute depths + // via VecGraph + Sccs. The dependency graph is a DAG (the call + // graph is acyclic after augmentation), so every SCC is a + // singleton — Sccs gives us a topological ordering for free, + // and depth is computed in a single O(V+E) pass. + rustc_index::newtype_index! { + #[orderable] + struct DelayIdx {} + } + rustc_index::newtype_index! { + #[orderable] + struct DelaySccIdx {} + } + + let instance_to_idx: FxHashMap, usize> = + all_delayed.iter().enumerate().map(|(i, d)| (d.instance, i)).collect(); + + let mut edge_pairs: Vec<(DelayIdx, DelayIdx)> = Vec::new(); + for (i, d) in all_delayed.iter().enumerate() { + for &(_, callee) in d.callee_substitutions { + if let Some(&j) = instance_to_idx.get(&callee) { + edge_pairs.push((DelayIdx::from(i), DelayIdx::from(j))); + } + } + } + + let delay_graph = VecGraph::::new(all_delayed.len(), edge_pairs); + let delay_sccs = Sccs::::new(&delay_graph); + + // Single-pass depth computation in dependency order (O(V+E)). + // all_sccs() visits callees before callers, so successor depths + // are already resolved when we compute a node's depth. + let mut scc_depth: Vec = vec![0; delay_sccs.num_sccs()]; + for scc in delay_sccs.all_sccs() { + let d = delay_sccs + .successors(scc) + .iter() + .copied() + .map(|succ| scc_depth[succ.index()] + 1) + .max() + .unwrap_or(0); + scc_depth[scc.index()] = d; + } + let depth: Vec = (0..all_delayed.len()) + .map(|i| scc_depth[delay_sccs.scc(DelayIdx::from(i)).index()]) + .collect(); + + // --- Seed canon_map from condensation groups (leaf level) --- + // + // For each root trait's layout, the condensation groups identify + // which (sub_trait, outlives_class) pairs share the same table slot. + // Group the directly-sensitive delayed Instances by + // (def_id, base_args) — same function, same type args, differing + // only in Outlives entries. Within each group, Instances whose + // OutlivesClass maps to the same condensation slot are equivalent. + // Pick the canonical (smallest OutlivesClass under StableOrd), + // map the rest → canonical. + let mut canon_map: FxHashMap, ty::Instance<'tcx>> = FxHashMap::default(); + + seed_canon_map_from_condensation(tcx, &all_delayed, &mut canon_map); + + // Group Instances by depth level for bottom-up processing. + // BTreeMap gives us ascending depth order (leaves first). + let mut depth_groups: BTreeMap> = BTreeMap::new(); + for i in 0..all_delayed.len() { + depth_groups.entry(depth[i]).or_default().push(i); + } + + if dump { + eprintln!("=== Trait-Cast Canonicalization ==="); + eprintln!(" Total delayed instances: {}", all_delayed.len()); + eprintln!(" Depth levels: {}", depth_groups.len()); + } + + let mut new_mono_items: Vec> = Vec::new(); + + for (d, indices) in &depth_groups { + // Order indices deterministically for dump output by stable + // fingerprint of the Instance. This does not affect observable + // behavior; it only reorders the emission. + let dump_order: Vec = if dump { + let mut v = indices.clone(); + tcx.with_stable_hashing_context(|mut hcx| { + v.sort_by_cached_key(|&idx| { + use rustc_data_structures::fingerprint::Fingerprint; + use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; + let mut hasher = StableHasher::new(); + all_delayed[idx].instance.hash_stable(&mut hcx, &mut hasher); + hasher.finish::() + }); + }); + eprintln!(" Depth {d}: {} instance(s)", indices.len()); + eprintln!(" Phase 1 (patch):"); + v + } else { + Vec::new() + }; + // --- Phase 1: Patch and resolve all canonical Instances at this depth --- + // + // For canonical Instances (not in canon_map): clone base MIR, + // apply callee substitutions through canon_map, resolve + // intrinsic calls, and record the patched body. + // + // For non-canonical Instances (already in canon_map from + // leaf-level seeding or prior depth levels): skip patching, + // a trampoline body will be generated in Phase 3. + let mut patched_bodies: FxHashMap, &'tcx rustc_middle::mir::Body<'tcx>> = + FxHashMap::default(); + + // Iteration order for the patching logic itself is irrelevant + // (the loop only mutates the per-instance `patched_bodies` + // entry). Iterate in `dump_order` when dumping so observers + // see a deterministic emission order, and in original order + // otherwise. + let patch_iter: &[usize] = if dump { &dump_order } else { &indices[..] }; + for &idx in patch_iter { + let delayed = all_delayed[idx]; + let instance = delayed.instance; + // Skip Instances already known to be non-canonical. + if canon_map.contains_key(&instance) { + continue; + } + + let base = instance.strip_outlives(tcx); + let body = tcx.instance_mir(base.def); + let mut patched = body.clone(); + + if dump { + let instance_name = with_no_trimmed_paths!(instance.to_string()); + eprintln!(" {instance_name}"); + if delayed.callee_substitutions.is_empty() { + eprintln!(" unchanged"); + } + } + + // Apply callee substitutions, resolving through canon_map. + for &(call_id, callee) in delayed.callee_substitutions { + let canonical_callee = canon_map.get(&callee).copied().unwrap_or(callee); + crate::collector::patch_call_terminator( + &mut patched, + call_id, + canonical_callee, + tcx, + ); + if dump { + let summary = crate::cast_sensitivity::format_call_id_summary(tcx, call_id); + let canonical_name = with_no_trimmed_paths!(canonical_callee.to_string()); + eprintln!(" substitution: {summary} -> {canonical_name}"); + } + } + + // Resolve intrinsic calls in-place. + crate::resolved_bodies::patch_intrinsic_calls(&mut patched, tcx, instance, resolutions); + + let patched = tcx.arena.alloc(patched); + patched_bodies.insert(instance, patched); + } + + // --- Phase 2: Transitive deduplication at this depth --- + // + // Group patched Instances by (def_id, base_args). Within each + // group, check if all callee_substitutions resolve to the + // same canonical Instances. If so, pick one canonical + // (smallest OutlivesClass), map the rest → canonical. + let mut by_base: FxHashMap<(DefId, ty::GenericArgsRef<'tcx>), Vec> = + FxHashMap::default(); + for &idx in indices { + let instance = all_delayed[idx].instance; + if canon_map.contains_key(&instance) { + continue; + } + let base = instance.strip_outlives(tcx); + by_base.entry((base.def_id(), base.args)).or_default().push(idx); + } + + if dump { + eprintln!(" Phase 2 (dedup):"); + } + + #[allow(rustc::potential_query_instability)] + for (_key, group) in &by_base { + if group.len() <= 1 { + continue; + } + + // Two Instances in this group are equivalent if their + // resolved callee sets (after canon_map lookup) are + // identical. Build a signature for each: the sorted + // list of (call_id, canonical_callee) pairs. + // + // DefId is !Ord and Instance is !Ord, so we sort via + // StableHasher fingerprints (the idiomatic rustc + // approach — see ToStableHashKey impls for DefId and + // Instance). + let mut sig_groups: FxIndexMap< + Vec<(&'tcx ty::List<(DefId, u32, ty::GenericArgsRef<'tcx>)>, ty::Instance<'tcx>)>, + Vec, + > = FxIndexMap::default(); + for &idx in group { + let delayed = all_delayed[idx]; + let mut sig: Vec<_> = delayed + .callee_substitutions + .iter() + .map(|&(call_id, callee)| { + let canonical_callee = canon_map.get(&callee).copied().unwrap_or(callee); + (call_id, canonical_callee) + }) + .collect(); + tcx.with_stable_hashing_context(|mut hcx| { + sig.sort_by_cached_key(|&(call_id, canonical)| { + use rustc_data_structures::fingerprint::Fingerprint; + use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; + let mut hasher = StableHasher::new(); + call_id.hash_stable(&mut hcx, &mut hasher); + canonical.hash_stable(&mut hcx, &mut hasher); + hasher.finish::() + }); + }); + sig_groups.entry(sig).or_default().push(idx); + } + + for (_sig, equiv) in &sig_groups { + if equiv.len() <= 1 { + continue; + } + // Pick canonical: smallest OutlivesClass under StableOrd. + let canonical_idx = *equiv + .iter() + .min_by_key(|&&idx| OutlivesClass::from_instance(all_delayed[idx].instance)) + .unwrap(); + let canonical_instance = all_delayed[canonical_idx].instance; + if dump { + let canonical_name = with_no_trimmed_paths!(canonical_instance.to_string()); + eprintln!(" signature group (size={}):", equiv.len()); + eprintln!(" canonical: {canonical_name}"); + // Sort redirected entries deterministically. + let mut redirected: Vec> = equiv + .iter() + .filter(|&&idx| idx != canonical_idx) + .map(|&idx| all_delayed[idx].instance) + .collect(); + tcx.with_stable_hashing_context(|mut hcx| { + redirected.sort_by_cached_key(|inst| { + use rustc_data_structures::fingerprint::Fingerprint; + use rustc_data_structures::stable_hasher::{HashStable, StableHasher}; + let mut hasher = StableHasher::new(); + inst.hash_stable(&mut hcx, &mut hasher); + hasher.finish::() + }); + }); + for inst in &redirected { + let name = with_no_trimmed_paths!(inst.to_string()); + eprintln!(" redirected: {name}"); + } + } + for &idx in equiv { + if idx == canonical_idx { + continue; + } + let instance = all_delayed[idx].instance; + let prev = canon_map.insert(instance, canonical_instance); + debug_assert!( + prev.is_none(), + "Instance in multiple signature groups: {instance:?}" + ); + patched_bodies.remove(&instance); + } + } + } + + // --- Phase 3: Feed bodies and insert MonoItems --- + let mut fed = 0usize; + let mut skipped = 0usize; + let mut inserted = 0usize; + for &idx in indices { + let delayed = all_delayed[idx]; + let instance = delayed.instance; + + if canon_map.contains_key(&instance) { + // Non-canonical: callers have been rewritten during the + // patch pass to call the canonical Instance directly. + // This Instance is unreachable — skip it. + skipped += 1; + continue; + } else if let Some(body) = patched_bodies.get(&instance) { + // Canonical (or non-condensed): feed the resolved body. + tcx.feed_codegen_mir(instance, body); + fed += 1; + } + new_mono_items.push(MonoItem::Fn(instance)); + inserted += 1; + } + + if dump { + eprintln!( + " Phase 3 (emit):\n fed: {fed}, skipped (non-canonical): {skipped}, \ + newly-inserted mono items: {inserted}" + ); + } + } + + if dump { + eprintln!(" Canon map summary:"); + eprintln!(" total redirections: {}", canon_map.len()); + } + + // Insert the resolved MonoItem::Fn entries into mono_items. + if !new_mono_items.is_empty() { + let items = mono_items.to_mut(); + items.extend(new_mono_items); + } +} + +/// Seed `canon_map` from leaf-level condensation groups. Groups +/// directly-sensitive Instances by `(def_id, base_args)`, then checks +/// whether their `OutlivesClass`es share the same condensation slot +/// in `trait_cast_layout`. Instances that share a slot are equivalent; +/// the smallest `OutlivesClass` under `StableOrd` is canonical. +fn seed_canon_map_from_condensation<'tcx>( + tcx: TyCtxt<'tcx>, + all_delayed: &[&rustc_middle::mono::DelayedInstance<'tcx>], + canon_map: &mut FxHashMap, ty::Instance<'tcx>>, +) { + use rustc_data_structures::stable_hasher::StableCompare; + use rustc_middle::ty::trait_cast::{FingerprintedTy, IntrinsicSiteKind, OutlivesClass}; + use smallvec::SmallVec; + + // Group leaf Instances (empty callee_substitutions) by (def_id, base_args). + let mut by_base: FxHashMap< + (DefId, ty::GenericArgsRef<'tcx>), + Vec<&rustc_middle::mono::DelayedInstance<'tcx>>, + > = FxHashMap::default(); + for delayed in all_delayed { + if !delayed.callee_substitutions.is_empty() { + continue; // Only seed from leaves. + } + let base = delayed.instance.strip_outlives(tcx); + by_base.entry((base.def_id(), base.args)).or_default().push(delayed); + } + + #[allow(rustc::potential_query_instability)] + for (_key, group) in &by_base { + if group.len() <= 1 { + continue; + } + + // Signature: sorted list of (super_trait, sub_trait, slot) for all + // Index intrinsics in this caller. Two callers with identical + // signatures produce identical patched bodies. + // + // Uses FingerprintedTy for deterministic sorting (Ty is !Ord). + // Eq/Hash on FingerprintedTy delegate to Ty (interned pointer), + // so HashMap grouping works identically. + let mut by_signature: FxHashMap< + SmallVec<[(FingerprintedTy<'tcx>, FingerprintedTy<'tcx>, usize); 1]>, + Vec>, + > = FxHashMap::default(); + + for delayed in group { + let instance = delayed.instance; + let mut sig: SmallVec<[(FingerprintedTy<'tcx>, FingerprintedTy<'tcx>, usize); 1]> = + SmallVec::new(); + for &intrinsic_instance in delayed.intrinsic_callees { + let site = + crate::trait_cast_requests::classify_intrinsic_site(tcx, intrinsic_instance); + if let IntrinsicSiteKind::Index { super_trait, sub_trait } = site { + let outlives_class = trait_metadata_index_outlives_class( + tcx, + super_trait, + sub_trait, + intrinsic_instance, + ); + let layout = tcx.trait_cast_layout(super_trait); + if let Some(&slot) = layout.index_map.get(&(sub_trait, outlives_class)) { + sig.push(( + FingerprintedTy::new(tcx, super_trait), + FingerprintedTy::new(tcx, sub_trait), + slot, + )); + } + } + } + sig.sort_by(|a, b| { + a.2.cmp(&b.2).then_with(|| a.0.stable_cmp(&b.0)).then_with(|| a.1.stable_cmp(&b.1)) + }); + by_signature.entry(sig).or_default().push(instance); + } + + // Within each slot group, pick canonical and map the rest. + // + // Iteration order of `by_signature` (FxHashMap) is non-deterministic, + // but this is safe: signature groups *partition* the Instance space — + // each Instance appears in exactly one bucket (one signature per + // delayed Instance, one push per delayed Instance). Because groups + // are disjoint, `canon_map.insert` never overwrites an entry written + // by a different group, so the final map contents are identical + // regardless of iteration order. Within each group, `min_by_key` + // over `OutlivesClass` is deterministic because the `instances` Vec + // inherits insertion order from `all_delayed` (a slice), and + // `min_by_key` returns the first minimum on ties. + #[allow(rustc::potential_query_instability)] + for (_, instances) in &by_signature { + if instances.len() <= 1 { + continue; + } + let canonical = + *instances.iter().min_by_key(|inst| OutlivesClass::from_instance(**inst)).unwrap(); + for &inst in instances { + if inst == canonical { + continue; + } + let prev = canon_map.insert(inst, canonical); + debug_assert!(prev.is_none(), "Instance in multiple signature groups: {inst:?}"); + } + } + } +} + +/// Query provider: collects mono items for the local crate, including +/// sensitivity analysis and augmentation, but does NOT perform global +/// trait-cast resolution or partitioning. +fn collect_local_mono_items(tcx: TyCtxt<'_>, (): ()) -> LocalMonoItemCollection<'_> { let collection_strategy = if tcx.sess.link_dead_code() { MonoItemCollectionStrategy::Eager } else { MonoItemCollectionStrategy::Lazy }; - let (items, usage_map) = collector::collect_crate_mono_items(tcx, collection_strategy); - // Perform checks that need to operate on the entire mono item graph + let collection_result = collector::collect_crate_mono_items(tcx, collection_strategy); + let items = collection_result.mono_items; + let usage_map = collection_result.usage_map; + + // Perform checks that need to operate on the entire mono item graph. target_specific_checks(tcx, &items, &usage_map); // If there was an error during collection (e.g. from one of the constants we evaluated), @@ -1144,10 +1866,27 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio // (codegen relies on this and ICEs will happen if this is violated.) tcx.dcx().abort_if_errors(); + LocalMonoItemCollection { + mono_items: tcx.arena.alloc_from_iter(items), + usage_map: tcx.arena.alloc(usage_map), + delayed_codegen: collection_result.delayed_codegen, + sensitivity_map: collection_result.sensitivity_map, + } +} + +fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitions<'_> { + let collection = tcx.collect_local_mono_items(()); + let mut items: Cow<'_, [MonoItem<'_>]> = Cow::Borrowed(collection.mono_items); + let usage_map = collection.usage_map; + + // Global phase: resolve trait-cast delayed codegen requests into + // MonoItem::Fn entries before partitioning distributes items. + tcx.sess.time("resolve_trait_cast_globals", || resolve_trait_cast_globals(tcx, &mut items)); + let (codegen_units, _) = tcx.sess.time("partition_and_assert_distinct_symbols", || { par_join( || { - let mut codegen_units = partition(tcx, items.iter().copied(), &usage_map); + let mut codegen_units = partition(tcx, items.iter().copied(), usage_map); codegen_units[0].make_primary(); &*tcx.arena.alloc_from_iter(codegen_units) }, @@ -1183,6 +1922,9 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio tcx.dcx().emit_fatal(CouldntDumpMonoStats { error: err.to_string() }); } + dump_trait_graph(tcx); + print_trait_cast_stats(tcx); + if tcx.sess.opts.unstable_opts.print_mono_items { let mut item_to_cgus: UnordMap<_, Vec<_>> = Default::default(); @@ -1232,7 +1974,12 @@ fn collect_and_partition_mono_items(tcx: TyCtxt<'_>, (): ()) -> MonoItemPartitio } } - MonoItemPartitions { all_mono_items: tcx.arena.alloc(mono_items), codegen_units } + MonoItemPartitions { + all_mono_items: tcx.arena.alloc(mono_items), + codegen_units, + delayed_codegen: collection.delayed_codegen, + sensitivity_map: collection.sensitivity_map, + } } /// Outputs stats about instantiation counts and estimated size, per `MonoItem`'s @@ -1312,9 +2059,275 @@ fn dump_mono_items_stats<'tcx>( Ok(()) } +/// Dump trait graph info for root supertraits matching `-Z dump-trait-graph`. +fn dump_trait_graph(tcx: TyCtxt<'_>) { + let Some(ref filter) = tcx.sess.opts.unstable_opts.dump_trait_graph else { + return; + }; + + let requests = tcx.gather_trait_cast_requests(()); + if requests.is_empty() { + return; + } + + use rustc_middle::ty::trait_cast::FingerprintedTy; + + let roots: Vec<_> = requests + .root_traits() + .into_items() + .map(|ty| FingerprintedTy::new(tcx, ty)) + .into_sorted_stable_ord(); + + for fp_root in &roots { + let root = fp_root.ty(); + let root_str = with_no_trimmed_paths!(root.to_string()); + if filter != "all" && !root_str.contains(filter.as_str()) { + continue; + } + + let graph = tcx.trait_cast_graph(root); + let layout = tcx.trait_cast_layout(root); + + eprintln!("=== Trait Graph: {root_str} ==="); + + // Sub-traits + outlives classes. + let sub_traits = graph + .sub_traits + .items() + .map(|(k, v)| (*k, v)) + .into_sorted_stable_ord_by_key(|item| &item.0); + eprintln!(" Sub-traits ({}):", sub_traits.len()); + for (fp_ty, info) in &sub_traits { + let classes: Vec<_> = info.outlives_classes.items().copied().into_sorted_stable_ord(); + let sub_str = with_no_trimmed_paths!(fp_ty.ty().to_string()); + eprintln!(" {sub_str} — {} outlives class(es)", classes.len()); + for (i, cls) in classes.iter().enumerate() { + let pairs: Vec = cls.iter().map(|(l, s)| format!("('{l}: '{s})")).collect(); + let pairs_str = + if pairs.is_empty() { "empty".to_string() } else { pairs.join(", ") }; + eprintln!(" [{i}] {{{pairs_str}}}"); + } + } + + // Concrete types. + let concretes: Vec<_> = graph.concrete_types.items().copied().into_sorted_stable_ord(); + eprintln!(" Concrete types ({}):", concretes.len()); + for ct in &concretes { + let ct_str = with_no_trimmed_paths!(ct.ty().to_string()); + eprintln!(" {ct_str}"); + } + + // Table layout. + eprintln!(" Table layout: {} slot(s)", layout.table_length); + for (idx, si) in layout.slot_info.iter().enumerate() { + let pairs: Vec = + si.outlives_class.iter().map(|(l, s)| format!("('{l}: '{s})")).collect(); + let pairs_str = if pairs.is_empty() { "empty".to_string() } else { pairs.join(", ") }; + let sub_str = with_no_trimmed_paths!(si.sub_trait.to_string()); + eprintln!(" slot[{idx}]: sub={sub_str}, bvs={}, class={{{pairs_str}}}", si.num_bvs); + } + + // Condensation summary (only when classes were collapsed). + for (fp_ty, info) in &sub_traits { + let raw_classes = info.outlives_classes.len(); + let slots = layout.slot_info.iter().filter(|si| si.sub_trait == **fp_ty).count(); + if raw_classes != slots { + let sub_str = with_no_trimmed_paths!(fp_ty.ty().to_string()); + eprintln!(" Condensation: {sub_str} — {raw_classes} class(es) -> {slots} slot(s)"); + } + } + + // Admissibility per (concrete_type, sub_trait). + { + use crate::trait_graph::resolve_dyn_satisfaction; + let mut any = false; + for ct in &concretes { + for (fp_ty, _) in &sub_traits { + if let Some(impl_def_id) = resolve_dyn_satisfaction(tcx, **ct, **fp_ty) { + if !any { + eprintln!(" Admissibility:"); + any = true; + } + let ua = tcx.impl_universally_admissible(impl_def_id); + let ct_str = with_no_trimmed_paths!(ct.ty().to_string()); + let sub_str = with_no_trimmed_paths!(fp_ty.ty().to_string()); + let impl_str = tcx.def_path_str(impl_def_id); + eprintln!( + " {ct_str} : {sub_str} — impl {impl_str} \ + (univ_admissible={ua})" + ); + } + } + } + } + + eprintln!(); + } +} + +/// Print summary statistics for the trait-cast monomorphization pipeline to +/// stderr, gated on `-Z print-trait-cast-stats`. Emits a single compact block +/// derived from query results already computed by the partitioning pass, so +/// this is effectively free when the flag is off. +fn print_trait_cast_stats(tcx: TyCtxt<'_>) { + if !tcx.sess.opts.unstable_opts.print_trait_cast_stats { + return; + } + + // Gather all delayed codegen entries across crates, deduplicating by + // `Instance` (mirrors the dedup pattern in `cascade_canonicalize`). + let mut seen: FxHashSet> = FxHashSet::default(); + let mut delayed_total = 0usize; + let mut augmented = 0usize; + let mut intrinsic_sites = 0usize; + for delayed in tcx.delayed_codegen_requests(LOCAL_CRATE) { + if seen.insert(delayed.instance) { + delayed_total += 1; + if delayed.instance.has_outlives_entries() { + augmented += 1; + } + intrinsic_sites += delayed.intrinsic_callees.len(); + } + } + for &cnum in tcx.crates(()) { + for delayed in tcx.delayed_codegen_requests(cnum) { + if seen.insert(delayed.instance) { + delayed_total += 1; + if delayed.instance.has_outlives_entries() { + augmented += 1; + } + intrinsic_sites += delayed.intrinsic_callees.len(); + } + } + } + + let requests = tcx.gather_trait_cast_requests(()); + let roots = requests.root_traits(); + let root_count = roots.len(); + + // Iteration order over an `UnordSet` doesn't matter for a sum. + let total_slots: usize = + roots.items().map(|root| tcx.trait_cast_layout(*root).table_length).sum(); + + eprintln!("trait-cast stats:"); + eprintln!(" delayed codegen entries: {delayed_total}"); + eprintln!( + " augmented instances: {augmented} \ + (instances with outlives entries among delayed)" + ); + eprintln!( + " trait-cast intrinsic sites: {intrinsic_sites} \ + (sum over delayed instances)" + ); + eprintln!( + " root supertraits: {root_count} \ + (from gather_trait_cast_requests.root_traits())" + ); + eprintln!( + " total table slots: {total_slots} \ + (sum of trait_cast_layout(root).table_length over roots)" + ); +} + +/// Query provider for `is_transitively_delayed_instance`. +/// +/// Compares on the strip-outlives form. The mono collector's +/// `augment_sensitive_subgraphs` pushes *augmented* instances (those carrying +/// the `OUTLIVES_SENTINEL` or real outlives entries) into `delayed_codegen`, +/// while MIR call sites — including the ones codegen re-mangles on a cache +/// miss in `get_fn` — may reach this query with the pre-augmentation base +/// Instance. The v0 mangler's impl-path does not include Outlives args in +/// the emitted symbol, so augmented and base share a mangled name when the +/// instantiating-crate suffix is suppressed; for the suffix-stripping +/// mangler gate to apply uniformly, both forms must report as delayed. +/// +/// Membership is a single O(1) lookup against +/// `delayed_codegen_stripped_set(())`, which flattens every crate's +/// delayed-codegen set into one precomputed `UnordSet`. +fn is_transitively_delayed_instance_provider<'tcx>( + tcx: TyCtxt<'tcx>, + instance: ty::Instance<'tcx>, +) -> bool { + // Metadata-only builds (rustdoc, `--emit=metadata`) don't run mono + // collection, so `delayed_codegen_requests(LOCAL_CRATE)` — which + // forces `collect_local_mono_items` — is both meaningless and + // impossible to satisfy (collection demands upstream `optimized_mir` + // that the metadata-only pipeline won't have loaded). Return + // `false` conservatively: no local crate can register delayed + // instances without a codegen phase, and the mangler's suffix- + // stripping gate is a no-op for those pathways anyway (the emitted + // metadata records DefId+args, not the pre-mangled name). + if !tcx.sess.opts.output_types.should_codegen() { + return false; + } + let stripped = instance.strip_outlives(tcx); + tcx.delayed_codegen_stripped_set(()).contains(&stripped) +} + +fn delayed_codegen_stripped_set_provider<'tcx>( + tcx: TyCtxt<'tcx>, + _: (), +) -> UnordSet> { + use rustc_hir::def_id::LOCAL_CRATE; + // An instance's `def_id().krate` is the crate that *defines* the + // generic, not where it's monomorphized — e.g. the blanket-impl + // `>::derived_metadata_table` mono for + // `T = cross_crate_lib::LibTypeA` has `def_id().krate == core` yet + // is classified delayed when upstream `cross_crate_lib` collects + // it. Flatten every crate's set so callers don't have to scan. + let mut set = UnordSet::default(); + for d in tcx.delayed_codegen_requests(LOCAL_CRATE) { + set.insert(d.instance.strip_outlives(tcx)); + } + for &cnum in tcx.crates(()) { + for d in tcx.delayed_codegen_requests(cnum) { + set.insert(d.instance.strip_outlives(tcx)); + } + } + set +} + pub(crate) fn provide(providers: &mut Providers) { + providers.queries.collect_local_mono_items = collect_local_mono_items; providers.queries.collect_and_partition_mono_items = collect_and_partition_mono_items; + // These project from collect_local_mono_items (NOT collect_and_partition_mono_items) + // to avoid a query cycle: collect_and_partition_mono_items → gather_trait_cast_requests + // → delayed_codegen_requests → collect_and_partition_mono_items. + providers.queries.delayed_codegen_requests = |tcx, _key: rustc_middle::query::LocalCrate| { + tcx.collect_local_mono_items(()).delayed_codegen + }; + + providers.queries.crate_cast_relevant_lifetimes = + |tcx, _key: rustc_middle::query::LocalCrate| { + let collection = tcx.collect_local_mono_items(()); + collection.sensitivity_map + }; + + providers.queries.cast_relevant_lifetimes = |tcx, instance| { + let map = tcx.crate_cast_relevant_lifetimes(instance.def_id().krate); + map.get(&instance) + }; + + // Local provider: project the LocalDefId set from delayed_codegen. + // Consumed by the rmeta encoder's `should_encode_mir` gate so that + // transitively-delayed non-generic fns (e.g. user fns whose only + // intrinsic reach is via post-monomorphization inlining of the + // `core::TraitCast` trampolines) ship their MIR downstream. + providers.queries.local_def_ids_backing_delayed_instances = |tcx, _: ()| { + let delayed = tcx.collect_local_mono_items(()).delayed_codegen; + let mut set = rustc_hir::def_id::LocalDefIdSet::default(); + for d in delayed.iter() { + if let Some(local_def_id) = d.instance.def_id().as_local() { + set.insert(local_def_id); + } + } + tcx.arena.alloc(set) + }; + + providers.queries.is_transitively_delayed_instance = is_transitively_delayed_instance_provider; + providers.queries.delayed_codegen_stripped_set = delayed_codegen_stripped_set_provider; + providers.queries.is_codegened_item = |tcx, def_id| tcx.collect_and_partition_mono_items(()).all_mono_items.contains(&def_id); diff --git a/compiler/rustc_monomorphize/src/resolved_bodies.rs b/compiler/rustc_monomorphize/src/resolved_bodies.rs new file mode 100644 index 0000000000000..f8292c25354a0 --- /dev/null +++ b/compiler/rustc_monomorphize/src/resolved_bodies.rs @@ -0,0 +1,389 @@ +//! Synthetic MIR body construction for resolved trait-cast intrinsics. + +use rustc_middle::mir::interpret::{AllocId, Pointer, Scalar}; +use rustc_middle::mir::{ + BasicBlock, Body, Const, ConstOperand, ConstValue, Operand, Place, Rvalue, Statement, + StatementKind, TerminatorKind, +}; +use rustc_middle::ty::trait_cast::{IntrinsicResolutions, OutlivesClass}; +use rustc_middle::ty::{self, GenericArgsRef, Instance, Ty, TyCtxt, TypeFoldable}; +use rustc_span::{DUMMY_SP, sym}; + +use crate::cast_sensitivity::{CallerOutlivesEnv, compose_all_through_chain}; +use crate::erasure_safe::{ + region_slots_of_ty, remap_trait_metadata_outlives_entries_from_origin_positions, +}; +use crate::trait_graph::derive_where_clause_outlives_class; + +// ── Rvalue construction helpers ───────────────────────────────────────────── + +/// Build a constant `Rvalue` for a resolved `trait_metadata_index` intrinsic. +/// +/// Returns `Rvalue::Aggregate(Tuple, [&'static u8, usize])` where the first +/// field is a reference to the global crate ID allocation and the second is +/// the resolved table index. +fn build_index_rvalue<'tcx>( + tcx: TyCtxt<'tcx>, + global_crate_id: AllocId, + index: usize, +) -> Rvalue<'tcx> { + let ref_operand = make_static_ref_operand(tcx, global_crate_id); + let index_operand = make_usize_operand(tcx, index); + + Rvalue::Aggregate( + Box::new(rustc_middle::mir::AggregateKind::Tuple), + [ref_operand, index_operand].into_iter().collect(), + ) +} + +/// Build a constant `Rvalue` for a resolved `trait_metadata_table` intrinsic. +/// +/// Returns `Rvalue::Aggregate(Tuple, [&'static u8, NonNull<...>])` where the +/// first field is a reference to the global crate ID allocation and the second +/// is a `NonNull` pointer to the metadata table static. +/// +/// `NonNull` is a `#[repr(transparent)]` wrapper around `*const T`, so at +/// the MIR level it is represented as a raw pointer scalar. +fn build_table_rvalue<'tcx>( + tcx: TyCtxt<'tcx>, + global_crate_id: AllocId, + table_alloc: AllocId, + return_ty: Ty<'tcx>, +) -> Rvalue<'tcx> { + let ref_operand = make_static_ref_operand(tcx, global_crate_id); + + // The NonNull field type is the second field of the return tuple. + let nonnull_ty = extract_tuple_field_ty(return_ty, 1); + let ptr = Pointer::from(table_alloc); + let scalar = Scalar::from_pointer(ptr, &tcx); + let nonnull_operand = Operand::Constant(Box::new(ConstOperand { + span: DUMMY_SP, + user_ty: None, + const_: Const::Val(ConstValue::Scalar(scalar), nonnull_ty), + })); + + Rvalue::Aggregate( + Box::new(rustc_middle::mir::AggregateKind::Tuple), + [ref_operand, nonnull_operand].into_iter().collect(), + ) +} + +/// Build a constant `Rvalue` for a resolved `trait_metadata_table_len` intrinsic. +/// +/// Returns `Rvalue::Use(Operand::Constant(usize))`. +fn build_table_len_rvalue<'tcx>(tcx: TyCtxt<'tcx>, len: usize) -> Rvalue<'tcx> { + Rvalue::Use(make_usize_operand(tcx, len)) +} + +/// Build a constant `Rvalue` for a resolved `trait_cast_is_lifetime_erasure_safe` +/// intrinsic. +/// +/// Returns `Rvalue::Use(Operand::Constant(bool))`. +fn build_erasure_safe_rvalue<'tcx>(tcx: TyCtxt<'tcx>, safe: bool) -> Rvalue<'tcx> { + Rvalue::Use(Operand::Constant(Box::new(ConstOperand { + span: DUMMY_SP, + user_ty: None, + const_: Const::from_bool(tcx, safe), + }))) +} + +// ── Operand construction primitives ───────────────────────────────────────── + +/// Create a constant operand holding a `&'static u8` reference to the given +/// allocation. +fn make_static_ref_operand<'tcx>(tcx: TyCtxt<'tcx>, alloc_id: AllocId) -> Operand<'tcx> { + let ref_ty = Ty::new_imm_ref(tcx, tcx.lifetimes.re_static, tcx.types.u8); + let ptr = Pointer::from(alloc_id); + let scalar = Scalar::from_pointer(ptr, &tcx); + Operand::Constant(Box::new(ConstOperand { + span: DUMMY_SP, + user_ty: None, + const_: Const::Val(ConstValue::Scalar(scalar), ref_ty), + })) +} + +/// Create a constant operand holding a `usize` value. +fn make_usize_operand<'tcx>(tcx: TyCtxt<'tcx>, value: usize) -> Operand<'tcx> { + Operand::Constant(Box::new(ConstOperand { + span: DUMMY_SP, + user_ty: None, + const_: Const::Val(ConstValue::from_target_usize(value as u64, &tcx), tcx.types.usize), + })) +} + +/// Extract the type of the `idx`-th field from a tuple type. +fn extract_tuple_field_ty<'tcx>(tuple_ty: Ty<'tcx>, idx: usize) -> Ty<'tcx> { + match tuple_ty.kind() { + ty::Tuple(fields) => fields[idx], + _ => { + rustc_middle::bug!("extract_tuple_field_ty: expected tuple type, got {tuple_ty:?}"); + } + } +} + +// ── Monomorphization ──────────────────────────────────────────────────────── + +/// Monomorphize a value through the delayed instance's substitution. +/// +/// The MIR body obtained from `instance_mir` is generic — types in +/// func operands are expressed in terms of the defining function's +/// generic parameters. This substitutes the delayed instance's args +/// to produce fully concrete types for `IntrinsicResolutions` lookups. +fn monomorphize<'tcx, T>(tcx: TyCtxt<'tcx>, delayed_instance: Instance<'tcx>, value: T) -> T +where + T: TypeFoldable>, +{ + delayed_instance.instantiate_mir_and_normalize_erasing_regions( + tcx, + ty::TypingEnv::fully_monomorphized(), + ty::EarlyBinder::bind(value), + ) +} + +fn identity_outlives_for_origin_positions<'tcx>( + tcx: TyCtxt<'tcx>, + summary: &'tcx rustc_middle::mir::BorrowckRegionSummary, + mapping: &rustc_middle::mir::CallSiteRegionMapping, + origin_positions: &[Option], +) -> &'tcx [ty::GenericArg<'tcx>] { + let caller_env = CallerOutlivesEnv::from_region_summary_walk_pos(tcx, summary, mapping); + let mut positions: Vec = origin_positions.iter().flatten().copied().collect(); + positions.sort_unstable(); + positions.dedup(); + + let mut entries = Vec::new(); + for &longer in &positions { + for &shorter in &positions { + if longer != shorter && caller_env.outlives(longer, shorter) { + entries.push(tcx.mk_outlives_arg(longer, shorter).into()); + } + } + } + tcx.arena.alloc_from_iter(entries) +} + +// ── Table-dependent intrinsic resolution ──────────────────────────────────── + +/// Given a Call terminator's `func` operand, check whether it is a +/// table-dependent trait-cast intrinsic (`trait_metadata_index`, +/// `trait_metadata_table`, `trait_metadata_table_len`) and, if so, +/// return the resolved constant as an `Rvalue`. +/// +/// For `trait_metadata_index`, the `OutlivesClass` is computed via +/// `augmented_outlives_for_call` — the func operand's generic args do +/// NOT carry the caller's outlives environment; that lives on the +/// `delayed_instance` and must be projected through the `call_id` chain. +/// +/// `trait_cast_is_lifetime_erasure_safe` is **not** handled here — it +/// is resolved per call site via [`resolve_erasure_safe_callee`]. +fn resolve_table_callee<'tcx>( + tcx: TyCtxt<'tcx>, + resolutions: &IntrinsicResolutions<'tcx>, + delayed_instance: Instance<'tcx>, + call_id: &'tcx ty::List<(rustc_hir::def_id::DefId, u32, GenericArgsRef<'tcx>)>, + func: &Operand<'tcx>, +) -> Option> { + let (def_id, generic_args) = func.const_fn_def()?; + let intrinsic = tcx.intrinsic(def_id)?; + + // The MIR body is generic — generic_args are in the defining + // function's parameter space. Monomorphize to get concrete types + // that match the IntrinsicResolutions keys. + let mono_args: GenericArgsRef<'tcx> = monomorphize(tcx, delayed_instance, generic_args); + + match intrinsic.name { + s if s == sym::trait_metadata_index => { + let super_trait = mono_args[0].expect_ty(); + let sub_trait = mono_args[1].expect_ty(); + let callee_instance = Instance::expect_resolve( + tcx, + ty::TypingEnv::fully_monomorphized(), + def_id, + mono_args, + DUMMY_SP, + ); + let mut call_site_outlives = + tcx.augmented_outlives_for_call((delayed_instance, call_id, callee_instance)); + let total_transport_slots = + region_slots_of_ty(super_trait) + region_slots_of_ty(sub_trait); + let origin_positions = + compose_all_through_chain(tcx, delayed_instance, call_id, total_transport_slots); + // (Fallback handled below via where-clause class.) + if !call_site_outlives.is_empty() { + call_site_outlives = remap_trait_metadata_outlives_entries_from_origin_positions( + tcx, + super_trait, + sub_trait, + &origin_positions, + call_site_outlives, + ); + } + let outlives_class = OutlivesClass::from_entries(call_site_outlives); + let empty_class = OutlivesClass::from_entries(&[]); + // Prefer the where-clause-derived class over the empty + // class. The empty class slot may have a null table entry + // for traits with where-clause constraints (the empty + // class can't prove the constraint). The where-clause + // class represents the minimum evidence the trait requires; + // the erasure safety check verifies the caller satisfies it. + let wc_class = derive_where_clause_outlives_class(tcx, sub_trait); + let index = resolutions + .indices + .get(&(sub_trait, outlives_class)) + .and_then(|idx| { + // If the CRL class matched directly, use it. + if !outlives_class.entries.is_empty() { + return Some(idx); + } + // Empty CRL class — prefer the where-clause class + // if available (its slot may be populated while the + // empty class slot is null). + wc_class + .as_ref() + .and_then(|wc| resolutions.indices.get(&(sub_trait, *wc))) + .or(Some(idx)) + }) + .or_else(|| resolutions.indices.get(&(sub_trait, empty_class))); + let &index = index?; + Some(build_index_rvalue(tcx, resolutions.global_crate_id, index)) + } + s if s == sym::trait_metadata_table => { + let super_trait = mono_args[0].expect_ty(); + let concrete_type = mono_args[1].expect_ty(); + let &table_alloc = resolutions.tables.get(&(super_trait, concrete_type))?; + let return_ty = monomorphize(tcx, delayed_instance, func.constant()?.const_.ty()); + let output_ty = return_ty.fn_sig(tcx).output().skip_binder(); + Some(build_table_rvalue(tcx, resolutions.global_crate_id, table_alloc, output_ty)) + } + s if s == sym::trait_metadata_table_len => { + let super_trait = mono_args[0].expect_ty(); + let &len = resolutions.table_lens.get(&super_trait)?; + Some(build_table_len_rvalue(tcx, len)) + } + _ => None, + } +} + +// ── Erasure-safe resolution ───────────────────────────────────────────────── + +/// Check if `func` is a `trait_cast_is_lifetime_erasure_safe` call; if so, +/// compute per-call-site outlives entries via the `augmented_outlives_for_call` +/// query and resolve via the `is_lifetime_erasure_safe` query. +/// +/// The `call_id` chain on the Call terminator identifies the inlining path, +/// which determines how the delayed Instance's outlives environment maps into +/// the intrinsic's walk-position space. +fn resolve_erasure_safe_callee<'tcx>( + tcx: TyCtxt<'tcx>, + delayed_instance: Instance<'tcx>, + call_id: &'tcx ty::List<( + rustc_hir::def_id::DefId, + u32, + rustc_middle::ty::GenericArgsRef<'tcx>, + )>, + func: &Operand<'tcx>, +) -> Option> { + let (def_id, generic_args) = func.const_fn_def()?; + let intrinsic = tcx.intrinsic(def_id)?; + if intrinsic.name != sym::trait_cast_is_lifetime_erasure_safe { + return None; + } + + // Monomorphize generic args to get concrete dyn types. + let mono_args: GenericArgsRef<'tcx> = monomorphize(tcx, delayed_instance, generic_args); + let super_trait = mono_args[0].expect_ty(); + let target_trait = mono_args[1].expect_ty(); + + // Compute outlives entries in transport/origin walk-position space by + // composing through the call_id chain. + let callee_instance = Instance::expect_resolve( + tcx, + ty::TypingEnv::fully_monomorphized(), + def_id, + mono_args, + DUMMY_SP, + ); + let mut call_site_outlives = + tcx.augmented_outlives_for_call((delayed_instance, call_id, callee_instance)); + let root_transport_slots = region_slots_of_ty(super_trait); + let total_transport_slots = root_transport_slots + region_slots_of_ty(target_trait); + let origin_positions = + compose_all_through_chain(tcx, delayed_instance, call_id, total_transport_slots); + + // Ground-level fallback: build outlives from borrowck region summary + // when the augmented query returned nothing and the caller is not + // already augmented. LocalOnly positions are naturally excluded — + // they don't appear in the SCC-based outlives env, so + // `identity_outlives_for_origin_positions` never emits them. + if call_site_outlives.is_empty() && !delayed_instance.has_outlives_entries() { + let origin_def_id = call_id[0].0; + let origin_local_id = call_id[0].1; + let summary = tcx.borrowck_region_summary(origin_def_id); + if let Some(mapping) = summary.call_site_mappings.get(&origin_local_id) { + call_site_outlives = + identity_outlives_for_origin_positions(tcx, summary, mapping, &origin_positions); + } + } + + // Intern origin_positions for the query cache key. + let interned_origins: &'tcx [Option] = + tcx.arena.alloc_from_iter(origin_positions.iter().copied()); + + let safe = tcx.is_lifetime_erasure_safe(( + super_trait, + target_trait, + interned_origins, + call_site_outlives, + )); + Some(build_erasure_safe_rvalue(tcx, safe)) +} + +// ── Body patching ─────────────────────────────────────────────────────────── + +/// Walk a MIR body's terminators and replace trait-cast intrinsic calls with +/// constant assignments. +/// +/// For each `Call` terminator whose callee matches a trait-cast intrinsic: +/// 1. The resolved constant `Rvalue` is computed (via +/// [`resolve_table_callee`] for table-dependent intrinsics, or +/// [`resolve_erasure_safe_callee`] for the erasure-safe intrinsic). +/// 2. The call is replaced with an `Assign` statement writing the constant +/// to the call's `destination`, followed by a `Goto` to the original +/// return target. +/// +/// This preserves unwind behavior trivially since the resolved assignment +/// cannot panic. +pub(crate) fn patch_intrinsic_calls<'tcx>( + body: &mut Body<'tcx>, + tcx: TyCtxt<'tcx>, + delayed_instance: Instance<'tcx>, + resolutions: &IntrinsicResolutions<'tcx>, +) { + // Two-pass: first collect patches (read-only), then apply them (mutate). + let mut patches: Vec<(BasicBlock, Place<'tcx>, Option, Rvalue<'tcx>)> = Vec::new(); + + for (bb, bb_data) in body.basic_blocks.iter_enumerated() { + if let TerminatorKind::Call { ref func, ref call_id, destination, target, .. } = + bb_data.terminator().kind + { + let resolved = resolve_table_callee(tcx, resolutions, delayed_instance, call_id, func) + .or_else(|| resolve_erasure_safe_callee(tcx, delayed_instance, call_id, func)); + if let Some(rvalue) = resolved { + patches.push((bb, destination, target, rvalue)); + } + } + } + + for (bb, destination, target, rvalue) in patches { + let bb_data = &mut body.basic_blocks_mut()[bb]; + let target_bb = target.expect("trait-cast intrinsic calls always have a return target"); + let source_info = bb_data.terminator().source_info; + + // Replace the call with: _dest = ; goto -> target; + bb_data.statements.push(Statement::new( + source_info, + StatementKind::Assign(Box::new((destination, rvalue))), + )); + bb_data.terminator_mut().kind = TerminatorKind::Goto { target: target_bb }; + } +} diff --git a/compiler/rustc_monomorphize/src/table_layout.rs b/compiler/rustc_monomorphize/src/table_layout.rs new file mode 100644 index 0000000000000..66edf86a6d2f3 --- /dev/null +++ b/compiler/rustc_monomorphize/src/table_layout.rs @@ -0,0 +1,587 @@ +use rustc_data_structures::fx::{FxHashMap, FxIndexMap}; +use rustc_data_structures::unord::UnordMap; +use rustc_hir::def_id::DefId; +use rustc_index::bit_set::{BitMatrix, BitRowRef, DenseBitSet}; +use rustc_middle::bug; +use rustc_middle::mir::Mutability; +use rustc_middle::mir::interpret::{AllocId, AllocInit, Allocation, Pointer, Scalar, alloc_range}; +use rustc_middle::ty::trait_cast::{ + FingerprintedTy, IntrinsicResolutions, OutlivesClass, SlotInfo, TableLayout, TraitCastRequests, +}; +use rustc_middle::ty::{self, RegionVid, Ty, TyCtxt, TypeVisitable}; + +use crate::erasure_safe::{ + collect_all_binder_vars, root_exposed_target_bvs, trait_metadata_index_outlives_class, +}; +use crate::trait_graph::{ + RegionPositionCollector, extract_dyn_bv_positions, extract_impl_trait_ref_regions, + region_to_bvs, resolve_dyn_satisfaction, +}; + +/// Assign table slot indices for all (sub_trait, outlives_class) pairs +/// in the trait cast graph, applying condensation to collapse classes +/// that admit identical sets of implementations. +/// +/// Sub-traits and concrete types are processed in a deterministic order +/// (see `FingerprintedTy`) to ensure reproducible table layouts across +/// compilations. +/// +/// **Fast path:** When every resolved impl (for participating concrete +/// types) is universally admissible (`impl_universally_admissible`), all +/// outlives classes for that sub-trait are equivalent and collapse to a +/// single slot — no per-class admissibility check needed. +/// +/// **Full condensation:** Otherwise, `condense_outlives_classes` builds +/// a `BitMatrix` of (class × concrete_type) admissibility and groups +/// classes with identical rows into condensation groups, each getting +/// one shared slot. +pub(crate) fn trait_cast_layout<'tcx>(tcx: TyCtxt<'tcx>, root: Ty<'tcx>) -> TableLayout<'tcx> { + assert!(root.is_known_rigid(), "trait cast root super-trait must be monomorphized: {root}"); + let graph = tcx.trait_cast_graph(root); + let mut index_map: UnordMap<(Ty<'tcx>, OutlivesClass<'tcx>), usize> = UnordMap::default(); + let mut slot_info: Vec> = Vec::new(); + let mut next_index: usize = 0; + + // Deterministic iteration: sort sub-trait keys by fingerprint via + // FingerprintedTy's StableCompare impl. + let sub_trait_pairs = graph + .sub_traits + .items() + .map(|(k, v)| (*k, v)) + .into_sorted_stable_ord_by_key(|item| &item.0); + + // Materialize concrete types in deterministic order (shared across + // all sub-traits in this root). The column order in the condensation + // BitMatrix must be stable so that row patterns are reproducible. + let concrete_types_sorted: Vec> = + graph.concrete_types.items().copied().into_sorted_stable_ord(); + + for (sub_trait_fp, info) in &sub_trait_pairs { + let sub_trait: Ty<'tcx> = **sub_trait_fp; + + if info.outlives_classes.is_empty() { + continue; + } + + let ty::Dynamic(dyn_data, ..) = sub_trait.kind() else { + bug!("trait_cast_layout: sub_trait {sub_trait:?} is not a dyn type"); + }; + let num_bvs = collect_all_binder_vars(tcx, dyn_data).total_count(); + + // --- Resolve impls for participating concrete types (shared) --- + // + // Needed by both the fast path and full condensation, so computed + // once up front. Only impls for concrete types that actually + // participate in the program's monomorphization set are considered + // — avoiding dep-graph pollution from `all_impls`. + let impl_cache: Vec> = concrete_types_sorted + .iter() + .map(|ty| resolve_dyn_satisfaction(tcx, **ty, sub_trait)) + .collect(); + + // --- Fast path: all participating impls universally admissible --- + // + // When every resolved impl has: (a) no concrete lifetimes in the + // trait ref, (b) no param aliasing, (c) no RegionOutlives + // where-clauses on trait lifetime params, and (d) no shared + // Self/trait params — all classes are equivalent and collapse to + // one slot. + let all_admissible = impl_cache + .iter() + .filter_map(|&r| r) + .all(|impl_def_id| tcx.impl_universally_admissible(impl_def_id)); + + // Materialize classes in StableOrd order for deterministic indices. + let classes: Vec> = + info.outlives_classes.items().copied().into_sorted_stable_ord(); + + if all_admissible { + let slot = next_index; + next_index += 1; + // All classes collapse to one slot. The representative is + // the first (minimum by StableOrd) — universal admissibility + // means the choice has no semantic effect, but the + // representative must be reproducible across runs. + let representative = classes[0]; + for &class in &classes { + index_map.insert((sub_trait, class), slot); + } + slot_info.push(SlotInfo { sub_trait, outlives_class: representative, num_bvs }); + continue; + } + + // --- Full condensation --- + // + // Compute per-class admissibility vectors over concrete_types, + // group classes with identical vectors, assign one slot per + // group. Reuses `impl_cache` from above. + let condensed_groups = + condense_outlives_classes(tcx, root, sub_trait, &classes, &impl_cache, num_bvs); + + for (group, repr_class) in condensed_groups { + let slot = next_index; + next_index += 1; + for class_idx in group.iter() { + let class = classes[class_idx as usize]; + index_map.insert((sub_trait, class), slot); + } + slot_info.push(SlotInfo { sub_trait, outlives_class: repr_class, num_bvs }); + } + } + + TableLayout { root, table_length: next_index, index_map, slot_info } +} + +/// Returns groups of class indices that are equivalent (produce +/// identical admissibility vectors across all concrete types). +/// Class indices are positions in the `classes` slice. +/// +/// **Determinism contract:** `classes` must be in `StableOrd` order +/// and `impl_cache` columns must follow the deterministic +/// `concrete_types_sorted` order. Groups are returned sorted by their +/// minimum class index, so the first group always contains class 0, +/// ensuring reproducible slot assignment across compilations. +/// +/// `impl_cache` is the pre-computed per-concrete-type impl resolution +/// from `trait_cast_layout` — shared with the fast path to avoid +/// redundant trait-solver queries. Its column order matches +/// `concrete_types_sorted`. +fn condense_outlives_classes<'tcx>( + tcx: TyCtxt<'tcx>, + root: Ty<'tcx>, + sub_trait: Ty<'tcx>, + classes: &[OutlivesClass<'tcx>], + impl_cache: &[Option], + num_bvs: usize, +) -> Vec<(DenseBitSet, OutlivesClass<'tcx>)> { + let num_types = impl_cache.len(); + let num_classes = classes.len(); + + // Flat BitMatrix: rows = classes, columns = concrete types. + // One bit per (class, concrete_type) pair, set if the impl is + // admissible under that class. Single allocation. + let mut matrix: BitMatrix = BitMatrix::new(num_classes, num_types); + + let dim = num_bvs + 1; + for (class_idx, class) in classes.iter().enumerate() { + // Reachability matrix from the cached query — shared with + // the population query and the erasure-safe check. + let reachability = tcx.outlives_reachability((class.entries, dim)); + for (type_idx, maybe_impl) in impl_cache.iter().enumerate() { + if let Some(impl_def_id) = maybe_impl { + if impl_admissible_under_class( + tcx, + *impl_def_id, + root, + sub_trait, + &reachability, + num_bvs, + ) { + matrix.insert(class_idx as u32, type_idx as u32); + } + } + } + } + + // Group classes by identical admissibility rows. + // row_ref() returns a BitRowRef whose Eq/Hash masks out excess + // bits in the final word. + // + // Because `classes` is sorted by `StableOrd` and we iterate + // `0..num_classes`, the FxIndexMap insertion order is + // deterministic: each novel row pattern is first encountered at + // the smallest class_idx that produces it. + let mut groups: FxIndexMap, DenseBitSet> = FxIndexMap::default(); + for class_idx in 0..num_classes { + groups + .entry(matrix.row_ref(class_idx as u32)) + .or_insert_with(|| DenseBitSet::new_empty(num_classes)) + .insert(class_idx as u32); + } + + // Return groups sorted by minimum class index within each group. + // Since `classes` is StableOrd-sorted, this is equivalent to + // sorting by the smallest `OutlivesClass` representative — making + // slot assignment deterministic. + // + // Each group is paired with its representative outlives class + // (smallest class index). The population query calls + // `outlives_reachability` with this class's entries to obtain + // the reachability matrix on demand (cached by the query system). + let mut result: Vec<(DenseBitSet, OutlivesClass<'tcx>)> = groups + .into_values() + .map(|group| { + let repr_idx = group.iter().next().unwrap() as usize; + (group, classes[repr_idx]) + }) + .collect(); + result.sort_by_key(|(group, _)| group.iter().next().unwrap()); + result +} + +/// Check whether an impl is admissible under a specific outlives class +/// for a given dyn type (cast target). +/// +/// Algorithm: +/// 1. Extract bv indices from the dyn type's binder. +/// 2. Walk the impl's trait ref in parallel to build a param→bv mapping. +/// 2b. Self-anchored parameters require 'static-equivalence unless +/// their mapped bvs are root-exposed. +/// 3. If one impl param maps to multiple distinct bvs, those bvs must +/// be equivalent under the class. +/// 4. Explicit RegionOutlives where clauses must be implied by the class. +/// +/// Uses the pre-computed `reachability` matrix (from `outlives_reachability`) +/// for O(1) outlives lookups. Index `num_bvs` in the matrix represents +/// `'static`. +pub(crate) fn impl_admissible_under_class<'tcx>( + tcx: TyCtxt<'tcx>, + impl_def_id: DefId, + root_dyn_type: Ty<'tcx>, + dyn_type: Ty<'tcx>, + reachability: &BitMatrix, + num_bvs: usize, +) -> bool { + let Some(impl_trait_ref) = tcx.impl_opt_trait_ref(impl_def_id) else { + bug!("impl_admissible_under_class: impl {impl_def_id:?} has no trait ref"); + }; + let impl_trait_ref = impl_trait_ref.skip_binder(); + + // O(1) reachability lookup. 'static is at index num_bvs (= dim - 1). + let remap = |idx: usize| if idx == usize::MAX { num_bvs } else { idx }; + let implies = + |longer: usize, shorter: usize| reachability.contains(remap(longer), remap(shorter)); + + // Walk the dyn type's existential trait ref to get the bound + // variable index at each trait-lifetime position. Positions where + // the dyn type has a concrete lifetime (e.g., 'static embedded in + // a type arg) get `None`. + let dyn_bvs: Vec> = extract_dyn_bv_positions(tcx, dyn_type); + + // At each position, pair the dyn type's bv index with the impl's + // region to build the mapping. + let impl_regions: Vec> = extract_impl_trait_ref_regions(tcx, impl_trait_ref); + + assert_eq!( + dyn_bvs.len(), + impl_regions.len(), + "dyn type and impl must have same number of trait lifetime positions" + ); + + // Build: impl_param → set of bv indices it maps to (deduplicated). + let mut param_to_bvs: FxHashMap> = FxHashMap::default(); + + for (&dyn_bv, &impl_region) in dyn_bvs.iter().zip(impl_regions.iter()) { + let Some(bv) = dyn_bv else { + // Dyn type has a concrete lifetime at this position. + // The impl's region at this position must be compatible. + // (Handled separately — see below.) + continue; + }; + + match impl_region.kind() { + // Free impl param — record which bvs it maps to. + ty::ReEarlyParam(param) => { + param_to_bvs + .entry(RegionVid::from_u32(param.index)) + .or_insert_with(|| DenseBitSet::new_empty(num_bvs)) + .insert(bv); + } + // Impl fixes this position to 'static. + // The class must imply this bv outlives 'static. + ty::ReStatic => { + if !implies(bv, usize::MAX) { + return false; + } + } + // Higher-ranked in impl — not a free param, skip. + ty::ReBound(..) => {} + // Conservative reject for other concrete lifetimes. + _ => { + return false; + } + } + } + + // Self-anchored parameters: walk the impl's Self type to detect + // impl params that appear in BOTH Self and trait-arg positions. + // Such params are anchored to the concrete type's (erased) + // lifetime. If the mapped target bvs are structurally exposed + // through the root supertrait, the erasure-safe check validates + // that root<->target correspondence at the cast site, so no extra + // table-side restriction is needed here. Non-root-exposed shared + // bvs remain hidden behind Self and therefore still require the + // conservative 'static-equivalence check. + let root_exposed_bvs = root_exposed_target_bvs(tcx, root_dyn_type, dyn_type); + let self_ty = impl_trait_ref.self_ty(); + let mut self_region_collector = RegionPositionCollector::new(); + self_ty.visit_with(&mut self_region_collector); + for self_region in self_region_collector.into_regions() { + if let ty::ReEarlyParam(param) = self_region.kind() { + if let Some(bvs) = param_to_bvs.get(&RegionVid::from_u32(param.index)) { + if bvs.iter().all(|bv| root_exposed_bvs.contains(bv)) { + continue; + } + // Shared param: appears in Self AND trait args. + // All mapped bvs must be 'static-equivalent. + for bv in bvs.iter() { + if !implies(bv, usize::MAX) || !implies(usize::MAX, bv) { + return false; + } + } + } + } + } + + // Parameter aliasing: if one impl param maps to multiple DISTINCT + // bvs, those bvs must be equivalent (mutual outlives) under the + // class. If they map to the same bv (due to dyn type aliasing), no + // constraint is needed — the dyn binder already guarantees it. + // + // Iteration order is irrelevant: the result is a pure conjunction + // over all params — each param's check is independent. + #[allow(rustc::potential_query_instability)] + for (_param, bvs) in ¶m_to_bvs { + let mut iter = bvs.iter(); + let Some(first) = iter.next() else { continue }; + for bv in iter { + if !implies(first, bv) || !implies(bv, first) { + return false; + } + } + } + + // Explicit RegionOutlives where clauses. + let predicates = tcx.predicates_of(impl_def_id); + 'preds: for (pred, _) in predicates.predicates { + if let ty::ClauseKind::RegionOutlives(outlives) = pred.kind().skip_binder() { + let longer_bvs = region_to_bvs(¶m_to_bvs, outlives.0); + let shorter_bvs = region_to_bvs(¶m_to_bvs, outlives.1); + + match (longer_bvs, shorter_bvs) { + (Some(ls), Some(ss)) => { + for l in ls.iter() { + for s in ss.iter() { + if l != s && !implies(l, s) { + return false; + } + } + } + } + (Some(ls), None) if outlives.1.is_static() => { + for l in ls.iter() { + if !implies(l, usize::MAX) { + return false; + } + } + } + (Some(ls), None) if !outlives.1.is_static() => { + // 'a: 'b where 'b is hidden. We can't prove anything about this. + // typeck will emit a diagnostic warning. + if ls.iter().all(|l| implies(l, usize::MAX)) { + continue 'preds; + } + return false; + } + (None, Some(_)) if !outlives.0.is_static() => { + // 'a: 'b where 'a is hidden. We can't prove anything about this. + // typeck will emit a diagnostic warning. + return false; + } + (None, Some(_)) if outlives.0.is_static() => { + // 'static: shorter — tautology. + } + _ => { + return false; + } + } + } + } + + true +} + +/// Populate the trait cast table for a (root supertrait, concrete type) pair. +/// +/// For each table slot (one per (sub_trait, outlives_class) pair from the +/// layout), determines whether the concrete type implements the sub-trait +/// under that slot's outlives class. If so, records the vtable `AllocId`; +/// otherwise the slot is `None`. +/// +/// Groups slots by sub-trait to avoid redundant impl resolution — all slots +/// for the same sub-trait share the same `impl_def_id`. +pub(crate) fn trait_cast_table<'tcx>( + tcx: TyCtxt<'tcx>, + (root, concrete_type): (Ty<'tcx>, Ty<'tcx>), +) -> &'tcx [Option] { + let layout = tcx.trait_cast_layout(root); + let mut table: Vec> = vec![None; layout.table_length]; + + for sub_trait in layout.sub_traits() { + let Some(impl_def_id) = resolve_dyn_satisfaction(tcx, concrete_type, sub_trait) else { + // Concrete type does not implement this sub-trait (or fails + // an auto trait bound). All slots for this sub-trait stay None. + continue; + }; + + // Extract the existential trait ref for vtable_allocation. + let ty::Dynamic(dyn_data, ..) = sub_trait.kind() else { + bug!("trait_cast_table: sub_trait {sub_trait:?} is not a dyn type"); + }; + let sub_trait_ref = dyn_data.principal().map(|p| p.skip_binder()); + + for (index, slot_info) in layout.slots_for_sub_trait(sub_trait) { + let reachability = tcx + .outlives_reachability((slot_info.outlives_class.entries, slot_info.num_bvs + 1)); + + if impl_admissible_under_class( + tcx, + impl_def_id, + root, + sub_trait, + &reachability, + slot_info.num_bvs, + ) { + let vtable = tcx.vtable_allocation((concrete_type, sub_trait_ref)); + table[index] = Some(vtable); + } + // else: impl exists but not admissible under this class — None. + } + } + + tcx.arena.alloc_from_iter(table) +} + +/// Build a static allocation holding the trait cast metadata table for a +/// (root supertrait, concrete type) pair. Each entry is a pointer-sized +/// slot: `Some(vtable_alloc_id)` becomes a pointer to that vtable; +/// `None` becomes a null pointer. +/// +/// The resulting allocation is immutable and placed in `.rodata` (or +/// equivalent) by the codegen backend. +fn emit_table_static<'tcx>(tcx: TyCtxt<'tcx>, root: Ty<'tcx>, concrete_type: Ty<'tcx>) -> AllocId { + let table = tcx.trait_cast_table((root, concrete_type)); + + let ptr_size = tcx.data_layout.pointer_size(); + let ptr_align = tcx.data_layout.pointer_align().abi; + let table_size = ptr_size * u64::try_from(table.len()).unwrap(); + let mut alloc = Allocation::new(table_size, ptr_align, AllocInit::Uninit, ()); + + for (idx, entry) in table.iter().enumerate() { + let idx: u64 = u64::try_from(idx).unwrap(); + let scalar = match entry { + None => Scalar::from_maybe_pointer(Pointer::null(), &tcx), + Some(vtable_alloc_id) => { + let vptr = Pointer::from(*vtable_alloc_id); + Scalar::from_pointer(vptr, &tcx) + } + }; + alloc + .write_scalar(&tcx, alloc_range(ptr_size * idx, ptr_size), scalar) + .expect("failed to build trait cast metadata table"); + } + + alloc.mutability = Mutability::Not; + tcx.reserve_and_set_memory_alloc(tcx.mk_const_alloc(alloc)) +} + +/// Query provider: returns the `AllocId` of the metadata table static for +/// the given (root supertrait, concrete type) pair. +pub(crate) fn trait_cast_table_alloc<'tcx>( + tcx: TyCtxt<'tcx>, + (root, concrete_type): (Ty<'tcx>, Ty<'tcx>), +) -> AllocId { + emit_table_static(tcx, root, concrete_type) +} + +/// Create a single immutable `u8 = 0` static allocation whose address +/// serves as the unique global crate identifier. Only the address is +/// significant — the value is unspecified. +/// +/// The allocation uses byte alignment and immutable mutability to ensure +/// it lands in `.rodata` (or equivalent). The codegen backend must not +/// mark it `unnamed_addr` — LLVM's default `named_addr` semantics +/// guarantee the address is preserved through optimization. +fn get_or_create_global_crate_id<'tcx>(tcx: TyCtxt<'tcx>) -> AllocId { + let mut alloc = Allocation::from_bytes_byte_aligned_immutable(&[0u8], ()); + alloc.address_significant = true; + tcx.reserve_and_set_memory_alloc(tcx.mk_const_alloc(alloc)) +} + +/// Query provider: returns the `AllocId` of the per-global-crate `u8` +/// static used for cross-crate trait-cast safety checks. +pub(crate) fn global_crate_id_alloc<'tcx>(tcx: TyCtxt<'tcx>, _: ()) -> AllocId { + get_or_create_global_crate_id(tcx) +} + +/// Build the lookup table that maps each table-dependent intrinsic to +/// its resolved constant value. Iterates over the classified requests, +/// delegates to the per-intrinsic resolution logic, and collects +/// results into an [`IntrinsicResolutions`] for use by +/// `cascade_canonicalize`. +/// +/// By this point, `trait_cast_layout(root)` and +/// `trait_cast_table_alloc(root, concrete_type)` have already been forced +/// in `resolve_trait_cast_globals`'s query-driving loop. All query calls +/// below are therefore cache hits. +pub(crate) fn build_intrinsic_resolutions<'tcx>( + tcx: TyCtxt<'tcx>, + requests: &TraitCastRequests<'tcx>, +) -> IntrinsicResolutions<'tcx> { + let global_crate_id = get_or_create_global_crate_id(tcx); + + // --- trait_metadata_index: (sub_trait, outlives_class) → slot index --- + // Multiple augmented intrinsic Instances may request the same + // (sub_trait, outlives_class) — e.g. the same intrinsic + // monomorphized from different call sites in different crates. + // Deduplication via `entry` makes the point-lookup-only contract + // of IntrinsicResolutions explicit. + let mut indices: UnordMap<(Ty<'tcx>, OutlivesClass<'tcx>), usize> = UnordMap::default(); + for req in &requests.index_requests { + let outlives_class = + trait_metadata_index_outlives_class(tcx, req.super_trait, req.sub_trait, req.instance); + indices.entry((req.sub_trait, outlives_class)).or_insert_with(|| { + let layout = tcx.trait_cast_layout(req.super_trait); + *layout + .index_map + .get(&(req.sub_trait, outlives_class)) + .expect("index request for (sub_trait, outlives_class) not found in layout") + }); + + // Also populate indices for where-clause-derived classes in the + // layout. These are added by `trait_cast_graph` and may not have + // a corresponding mono request, but need to be accessible at + // resolution time for callers that carry valid outlives evidence + // through generic library code. + let layout = tcx.trait_cast_layout(req.super_trait); + for slot in &layout.slot_info { + if slot.sub_trait == req.sub_trait { + if let Some(&idx) = layout.index_map.get(&(slot.sub_trait, slot.outlives_class)) { + indices.entry((slot.sub_trait, slot.outlives_class)).or_insert(idx); + } + } + } + } + + // --- trait_metadata_table: (super_trait, concrete_type) → table static AllocId --- + let mut tables: UnordMap<(Ty<'tcx>, Ty<'tcx>), AllocId> = UnordMap::default(); + let mut table_alloc_ids: Vec = Vec::new(); + for req in &requests.table_requests { + tables.entry((req.super_trait, req.concrete_type)).or_insert_with(|| { + let alloc_id = tcx.trait_cast_table_alloc((req.super_trait, req.concrete_type)); + table_alloc_ids.push(alloc_id); + alloc_id + }); + } + + // --- trait_metadata_table_len: super_trait → table length --- + let mut table_lens: UnordMap, usize> = UnordMap::default(); + for req in &requests.table_len_requests { + table_lens.entry(req.super_trait).or_insert_with(|| { + let layout = tcx.trait_cast_layout(req.super_trait); + layout.table_length + }); + } + + IntrinsicResolutions { global_crate_id, indices, tables, table_lens, table_alloc_ids } +} diff --git a/compiler/rustc_monomorphize/src/trait_cast_requests.rs b/compiler/rustc_monomorphize/src/trait_cast_requests.rs new file mode 100644 index 0000000000000..f917b3193c54b --- /dev/null +++ b/compiler/rustc_monomorphize/src/trait_cast_requests.rs @@ -0,0 +1,90 @@ +use rustc_middle::bug; +use rustc_middle::ty::trait_cast::IntrinsicSiteKind; +use rustc_middle::ty::{Instance, TyCtxt}; +use rustc_span::sym; + +/// Classifies an augmented intrinsic Instance by projecting +/// fully-monomorphized types from its generic args. Pure O(1) function. +/// +/// Not a query: the computation is cheaper than query machinery overhead +/// (key hashing, dep-node creation, result storage), and all callers are +/// in `rustc_monomorphize`. +pub(crate) fn classify_intrinsic_site<'tcx>( + tcx: TyCtxt<'tcx>, + instance: Instance<'tcx>, +) -> IntrinsicSiteKind<'tcx> { + let def_id = instance.def_id(); + let args = instance.args; + let intrinsic = tcx + .intrinsic(def_id) + .unwrap_or_else(|| bug!("classify_intrinsic_site called on non-intrinsic: {:?}", instance)); + + match intrinsic.name { + s if s == sym::trait_metadata_index => { + // Generic args: [Super, Sub, ...Outlives] + IntrinsicSiteKind::Index { + super_trait: args[0].expect_ty(), + sub_trait: args[1].expect_ty(), + } + } + s if s == sym::trait_metadata_table => { + // Generic args: [Super, Concrete] + IntrinsicSiteKind::Table { + super_trait: args[0].expect_ty(), + concrete_type: args[1].expect_ty(), + } + } + s if s == sym::trait_metadata_table_len => { + // Generic args: [Super] + IntrinsicSiteKind::TableLen { super_trait: args[0].expect_ty() } + } + s if s == sym::trait_cast_is_lifetime_erasure_safe => { + // Generic args: [Super, Tgt, ...Outlives] + IntrinsicSiteKind::ErasureSafe { + super_trait: args[0].expect_ty(), + target_trait: args[1].expect_ty(), + } + } + _ => { + rustc_middle::bug!("classify_intrinsic_site: not a trait-cast intrinsic: {:?}", def_id) + } + } +} + +/// Query provider: collect all augmented intrinsic Instances from all crates +/// and classify them into `TraitCastRequests`. +/// +/// Iterates over `delayed_codegen_requests` for each crate (local and +/// upstream), extracts intrinsic callees from each delayed instance, and +/// classifies each intrinsic Instance into the appropriate +/// `IntrinsicSiteKind`. +pub(crate) fn gather_trait_cast_requests<'tcx>( + tcx: TyCtxt<'tcx>, + (): (), +) -> rustc_middle::ty::trait_cast::TraitCastRequests<'tcx> { + use std::iter; + + use rustc_hir::def_id::LOCAL_CRATE; + use rustc_middle::ty::trait_cast::TraitCastRequests; + + if !tcx.is_global_crate() { + return TraitCastRequests::default(); + } + + let mut requests = TraitCastRequests::default(); + + // Collect intrinsic Instances from all crates. + // Local crate proxies into collect_and_partition_mono_items; + // upstream crates decode from metadata. + for &cnum in iter::once(&LOCAL_CRATE).chain(tcx.crates(())) { + let delayed_list = tcx.delayed_codegen_requests(cnum); + for delayed in delayed_list { + for &intrinsic in delayed.intrinsic_callees { + let site = classify_intrinsic_site(tcx, intrinsic); + requests.add(site, intrinsic); + } + } + } + + requests +} diff --git a/compiler/rustc_monomorphize/src/trait_graph.rs b/compiler/rustc_monomorphize/src/trait_graph.rs new file mode 100644 index 0000000000000..b7f3cf72fb31d --- /dev/null +++ b/compiler/rustc_monomorphize/src/trait_graph.rs @@ -0,0 +1,501 @@ +use rustc_data_structures::fx::FxHashMap; +use rustc_data_structures::unord::{UnordMap, UnordSet}; +use rustc_hir::def_id::DefId; +use rustc_index::bit_set::{BitMatrix, DenseBitSet}; +use rustc_middle::bug; +use rustc_middle::traits::{CodegenObligationError, ImplSource, ImplSourceUserDefinedData}; +use rustc_middle::ty::trait_cast::{FingerprintedTy, SubTraitInfo, TraitGraph}; +use rustc_middle::ty::{ + self, EarlyParamRegion, GenericArg, RegionVid, Ty, TyCtxt, TypeVisitable, TypeVisitor, +}; + +use crate::erasure_safe::{ + collect_all_binder_vars, lifetime_param_map, trait_metadata_index_outlives_class, +}; + +/// Build a [`TraitGraph`] for a given root supertrait. +/// +/// Partitions the gathered delayed-codegen requests into sub-trait → +/// outlives-class mappings and the set of concrete types that requested +/// metadata tables. Requests whose `super_trait` does not match `root` +/// are ignored (they belong to a different root's graph). +pub(crate) fn trait_cast_graph<'tcx>(tcx: TyCtxt<'tcx>, root: Ty<'tcx>) -> TraitGraph<'tcx> { + let requests = tcx.gather_trait_cast_requests(()); + + // Build with FxHash* for .entry() access, then wrap in UnordMap/UnordSet. + let mut sub_traits: UnordMap, SubTraitInfo<'tcx>> = UnordMap::default(); + let mut concrete_types: UnordSet> = UnordSet::default(); + + for req in &requests.index_requests { + if req.super_trait != root { + continue; + } + let outlives_class = + trait_metadata_index_outlives_class(tcx, req.super_trait, req.sub_trait, req.instance); + let key = FingerprintedTy::new(tcx, req.sub_trait); + sub_traits + .entry(key) + .or_insert_with(|| SubTraitInfo { outlives_classes: UnordSet::default() }) + .outlives_classes + .insert(outlives_class); + } + + for req in &requests.table_requests { + if req.super_trait != root { + continue; + } + concrete_types.insert(FingerprintedTy::new(tcx, req.concrete_type)); + } + + // TableLenRequests don't contribute additional structure — the root + // is already the key. Their presence in the requests ensures the root + // appears in the `root_traits()` set that drives invocation, but the + // graph itself needs no extra state from them. + // + // ErasureSafeRequests are resolved independently — they query the + // layout but do not influence graph construction. + + // Ensure the table layout includes slots for where-clause-derived + // outlives classes. Trait where-clauses like `where 'a: 'b` create + // admissibility requirements that the empty outlives class cannot + // satisfy. Without the corresponding slot, casts through generic + // library code (e.g. `TraitCast::unchecked_cast`) that carry valid + // outlives evidence would find only a null table entry. + let wc_additions: Vec<_> = sub_traits + .items() + .map(|(k, v)| (*k, v)) + .into_sorted_stable_ord_by_key(|item| &item.0) + .into_iter() + .filter_map(|(key, info)| { + if info.outlives_classes.is_empty() { + return None; + } + let sub_trait: Ty<'tcx> = *key; + derive_where_clause_outlives_class(tcx, sub_trait).map(|cls| (key, cls)) + }) + .collect(); + for (key, wc_class) in wc_additions { + sub_traits.get_mut(&key).unwrap().outlives_classes.insert(wc_class); + } + + TraitGraph { root, sub_traits, concrete_types } +} + +/// Derive an outlives class from a sub-trait's where-clauses on lifetime +/// parameters. +/// +/// For a sub-trait `dyn Trait<'a, 'b>` where the trait has `where 'a: 'b`, +/// this returns an `OutlivesClass` with `(bv0, bv1)` — the binder variable +/// pair corresponding to the where-clause's region outlives predicate. +/// +/// Returns `None` if the trait has no region outlives predicates on its +/// own lifetime parameters, or if the dyn type has no principal trait. +#[allow(rustc::potential_query_instability)] +pub(crate) fn derive_where_clause_outlives_class<'tcx>( + tcx: TyCtxt<'tcx>, + sub_trait: Ty<'tcx>, +) -> Option> { + let ty::Dynamic(dyn_data, ..) = sub_trait.kind() else { + return None; + }; + let principal = dyn_data.principal()?; + let trait_def_id = principal.skip_binder().def_id; + + // Map from arg position (in ExistentialTraitRef.args) to param index. + let lt_map = lifetime_param_map(tcx, trait_def_id); + // Invert: param_index → arg_position. + let param_to_arg: FxHashMap = + lt_map.iter().map(|(&arg_pos, ¶m_idx)| (param_idx, arg_pos)).collect(); + + // Map arg position to binder variable index. + let bvs = collect_all_binder_vars(tcx, dyn_data); + let mut arg_to_bv: FxHashMap = FxHashMap::default(); + for &(bv_idx, ref loc) in &bvs.principal_entries { + if loc.dfs_offset == 0 { + arg_to_bv.insert(loc.arg_index, bv_idx); + } + } + + // Walk the trait's predicates for RegionOutlives clauses. + let predicates = tcx.predicates_of(trait_def_id); + let mut entries = Vec::new(); + + for (pred, _) in predicates.predicates { + if let ty::ClauseKind::RegionOutlives(outlives) = pred.kind().skip_binder() { + let longer_param = match outlives.0.kind() { + ty::ReEarlyParam(ep) => Some(ep.index), + _ => None, + }; + let shorter_param = match outlives.1.kind() { + ty::ReEarlyParam(ep) => Some(ep.index), + _ => None, + }; + + if let (Some(longer_param), Some(shorter_param)) = (longer_param, shorter_param) { + let longer_arg = param_to_arg.get(&longer_param); + let shorter_arg = param_to_arg.get(&shorter_param); + if let (Some(&la), Some(&sa)) = (longer_arg, shorter_arg) { + let longer_bv = arg_to_bv.get(&la); + let shorter_bv = arg_to_bv.get(&sa); + if let (Some(&lb), Some(&sb)) = (longer_bv, shorter_bv) { + if lb != sb { + entries.push(tcx.mk_outlives_arg(lb, sb).into()); + } + } + } + } + } + } + + if entries.is_empty() { + return None; + } + + entries.sort_by(|a: &GenericArg<'tcx>, b: &GenericArg<'tcx>| { + let ao = a.as_outlives().unwrap(); + let bo = b.as_outlives().unwrap(); + (ao.longer(), ao.shorter()).cmp(&(bo.longer(), bo.shorter())) + }); + entries.dedup(); + + let interned = tcx.arena.alloc_from_iter(entries); + Some(ty::trait_cast::OutlivesClass::from_entries(interned)) +} + +/// Computes the reflexive-transitive closure of outlives relationships +/// over a `dim`-dimensional index space using Floyd-Warshall. +/// +/// `entries` contains `GenericArgKind::Outlives` pairs encoding direct +/// outlives edges. Index `dim - 1` represents `'static` (outlives +/// everything). `usize::MAX` in an `OutlivesArgData` field is remapped +/// to `dim - 1`. +/// +/// The resulting `BitMatrix` satisfies: `reach.contains(a, b)` iff +/// lifetime `a` transitively outlives lifetime `b` under the given +/// constraints. +pub(crate) fn outlives_reachability<'tcx>( + _tcx: TyCtxt<'tcx>, + (entries, dim): (&'tcx [GenericArg<'tcx>], usize), +) -> BitMatrix { + let static_idx = dim - 1; + let mut reach = BitMatrix::new(dim, dim); + + // Reflexivity. + for i in 0..dim { + reach.insert(i, i); + } + // 'static outlives everything. + for j in 0..dim { + reach.insert(static_idx, j); + } + + // Direct edges, remapping usize::MAX → static_idx. + let remap = |idx: usize| if idx == usize::MAX { static_idx } else { idx }; + for entry in entries { + if let ty::GenericArgKind::Outlives(o) = entry.kind() { + reach.insert(remap(o.longer()), remap(o.shorter())); + } + } + + // Transitive closure (Floyd-Warshall — dim is tiny, typically ≤10). + for k in 0..dim { + for i in 0..dim { + if reach.contains(i, k) { + reach.union_rows(k, i); + } + } + } + + reach +} + +/// Query: is this impl universally admissible — admissible under every +/// outlives class for every dyn binder structure? +/// +/// True when the impl satisfies: +/// (a) no concrete lifetimes (e.g. 'static) in the impl's trait ref +/// (b) every trait lifetime position maps to a distinct free impl param +/// (c) no `RegionOutlives` where clauses involving trait lifetime params +/// (d) no trait-position lifetime param also appears in Self +/// (shared params are anchored to the concrete type's erased +/// lifetime, requiring 'static-equivalence) +pub(crate) fn impl_universally_admissible<'tcx>(tcx: TyCtxt<'tcx>, impl_def_id: DefId) -> bool { + let Some(impl_trait_ref) = tcx.impl_opt_trait_ref(impl_def_id) else { + return true; // inherent impl => vacuously admissible + }; + let impl_trait_ref = impl_trait_ref.skip_binder(); + + // Walk the impl's trait ref (excluding Self) to check (a)+(b). + let mut trait_params: UnordSet = UnordSet::default(); + let mut ok = true; + let mut checker = + UniversalAdmissibilityChecker { seen_params: &mut trait_params, admissible: &mut ok }; + for arg in impl_trait_ref.args.iter().skip(1) { + let arg: ty::GenericArg<'tcx> = arg; + arg.visit_with(&mut checker); + if !*checker.admissible { + return false; + } + } + drop(checker); + + // Check (d): no trait-position param also appears in Self. + let mut self_params: UnordSet = UnordSet::default(); + let mut collector = SelfRegionCollector { params: &mut self_params }; + impl_trait_ref.self_ty().visit_with(&mut collector); + drop(collector); + if self_params.items().any(|param| trait_params.contains(¶m)) { + return false; + } + + // Check (c): no RegionOutlives where clauses on trait params. + let predicates = tcx.predicates_of(impl_def_id); + for (pred, _) in predicates.predicates { + if let ty::ClauseKind::RegionOutlives(outlives) = pred.kind().skip_binder() { + let l_is_trait = is_trait_region_param(&trait_params, outlives.0); + let s_is_trait = is_trait_region_param(&trait_params, outlives.1); + if l_is_trait || s_is_trait { + return false; + } + } + } + true +} + +/// Collects all `ReEarlyParam` regions from a type. +struct SelfRegionCollector<'a> { + params: &'a mut UnordSet, +} + +impl<'tcx> TypeVisitor> for SelfRegionCollector<'_> { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + if let ty::ReEarlyParam(param) = r.kind() { + self.params.insert(param); + } + } +} + +/// TypeVisitor that checks conditions (a) and (b). +struct UniversalAdmissibilityChecker<'a> { + seen_params: &'a mut UnordSet, + admissible: &'a mut bool, +} + +impl<'tcx> TypeVisitor> for UniversalAdmissibilityChecker<'_> { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + if !*self.admissible { + return; + } + match r.kind() { + ty::ReEarlyParam(param) => { + // (b): each param must appear at most once + if !self.seen_params.insert(param) { + *self.admissible = false; + } + } + ty::ReStatic => *self.admissible = false, // (a) + ty::ReBound(..) => {} // higher-ranked, fine + _ => *self.admissible = false, // conservative reject + } + } +} + +fn is_trait_region_param( + trait_params: &UnordSet, + region: ty::Region<'_>, +) -> bool { + match region.kind() { + ty::ReEarlyParam(param) => trait_params.contains(¶m), + _ => false, + } +} + +// ── TypeVisitor helpers for `impl_admissible_under_class` ───────────────── + +/// Extract the dyn type's bound variable index at each trait-lifetime +/// position. Walks the existential binder's trait ref in TypeVisitor +/// DFS order. Returns `Some(bv_index)` for bound regions, `None` for +/// concrete lifetimes (e.g., 'static embedded in a type argument). +pub(crate) fn extract_dyn_bv_positions<'tcx>( + _tcx: TyCtxt<'tcx>, + dyn_type: Ty<'tcx>, +) -> Vec> { + // dyn_type is `dyn for<'^0, '^1, ...> SubTrait<...>`. + // Walk the binder's trait ref to find BoundRegion indices. + let ty::Dynamic(dyn_data, ..) = dyn_type.kind() else { + bug!("extract_dyn_bv_positions: {dyn_type:?} is not a dyn type"); + }; + let binder = dyn_data.principal().unwrap(); + let mut collector = BoundRegionCollector::new(); + // ExistentialTraitRef.args already excludes Self (erased by + // `erase_self_ty`), so no skip(1) — iterate all args. + for arg in binder.skip_binder().args.iter() { + arg.visit_with(&mut collector); + } + collector.into_bv_positions() +} + +/// Extract the impl's region at each trait-lifetime position. +/// Walks the impl trait ref's generic args (excluding Self) in the +/// same TypeVisitor DFS order as `extract_dyn_bv_positions`. +/// +/// `TraitRef.args[0]` is Self (unlike `ExistentialTraitRef` which +/// already excludes Self), so `.skip(1)` is correct here. Self's +/// regions are handled separately by the Self-anchored-params check +/// in `impl_admissible_under_class`. +pub(crate) fn extract_impl_trait_ref_regions<'tcx>( + _tcx: TyCtxt<'tcx>, + trait_ref: ty::TraitRef<'tcx>, +) -> Vec> { + let mut collector = RegionPositionCollector::new(); + for arg in trait_ref.args.iter().skip(1) { + arg.visit_with(&mut collector); + } + collector.into_regions() +} + +/// Collects bound variable indices from a dyn type's existential trait +/// ref regions. Records `Some(bv_index)` for `ReBound` regions and +/// `None` for concrete lifetimes, in TypeVisitor DFS order. +pub(crate) struct BoundRegionCollector { + positions: Vec>, + next_erased_var: usize, +} + +impl BoundRegionCollector { + pub(crate) fn new() -> Self { + Self { positions: Vec::new(), next_erased_var: 0 } + } + pub(crate) fn into_bv_positions(self) -> Vec> { + self.positions + } +} + +impl<'tcx> TypeVisitor> for BoundRegionCollector { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + match r.kind() { + ty::ReBound(_, br) => self.positions.push(Some(br.var.as_usize())), + ty::ReErased => { + let var = self.next_erased_var; + self.next_erased_var += 1; + self.positions.push(Some(var)); + } + _ => self.positions.push(None), + } + } +} + +/// Collects regions from an impl's trait ref in TypeVisitor DFS order. +pub(crate) struct RegionPositionCollector<'tcx> { + regions: Vec>, +} + +impl<'tcx> RegionPositionCollector<'tcx> { + pub(crate) fn new() -> Self { + Self { regions: Vec::new() } + } + pub(crate) fn into_regions(self) -> Vec> { + self.regions + } +} + +impl<'tcx> TypeVisitor> for RegionPositionCollector<'tcx> { + fn visit_region(&mut self, r: ty::Region<'tcx>) { + self.regions.push(r); + } +} + +/// Map a region to the set of bv indices it covers, via param_to_bvs. +pub(crate) fn region_to_bvs<'a>( + param_to_bvs: &'a FxHashMap>, + region: ty::Region<'_>, +) -> Option<&'a DenseBitSet> { + match region.kind() { + ty::ReEarlyParam(param) => param_to_bvs.get(&RegionVid::from_u32(param.index)), + _ => None, + } +} + +/// Resolve whether a concrete type fully satisfies a dyn type — +/// both its principal trait and all auto trait bounds (e.g., `Send`, +/// `Sync`). Returns the principal impl's `DefId` if all obligations +/// are met, `None` otherwise. +/// +/// Uses `codegen_select_candidate` — the same path used by vtable +/// computation in unsizing coercions. +/// +/// The dyn type's binder may contain `ReBound` regions (e.g., +/// `dyn for<'a, 'b> Sub<'a, 'b>`). These appear in the constructed +/// `TraitRef` but are harmless: `codegen_select_candidate` builds its +/// `InferCtxt` with `.ignoring_regions()`, so they unify freely with +/// impl params. We only need the `DefId`; lifetime admissibility is +/// checked separately by `impl_admissible_under_class`. +/// +/// **Auto traits.** The dyn type may carry non-principal auto trait +/// bounds (e.g., `dyn Sub + Send`). These are checked separately from +/// the principal: for each auto trait `DefId` in the dyn type, the +/// function verifies that `concrete_ty: AutoTrait` holds. If any auto +/// trait is unsatisfied, the concrete type cannot soundly inhabit a +/// trait object of this dyn type, and `None` is returned — even if +/// the principal trait is implemented. +pub(crate) fn resolve_dyn_satisfaction<'tcx>( + tcx: TyCtxt<'tcx>, + concrete_ty: Ty<'tcx>, + sub_trait_dyn: Ty<'tcx>, +) -> Option { + let ty::Dynamic(dyn_data, ..) = sub_trait_dyn.kind() else { + bug!("resolve_dyn_satisfaction: {sub_trait_dyn:?} is not a dyn type"); + }; + let principal = dyn_data.principal()?; + let existential_ref = principal.skip_binder(); + + let typing_env = ty::TypingEnv::fully_monomorphized(); + + // --- Check auto trait bounds --- + // + // Auto traits (Send, Sync, Unpin, etc.) have no methods and no + // vtable entries, but they are semantic obligations on the dyn + // type. A table entry must be `None` if the concrete type fails + // any auto trait bound, regardless of principal satisfaction. + for auto_trait_def_id in dyn_data.auto_traits() { + let auto_ref = ty::TraitRef::new(tcx, auto_trait_def_id, [concrete_ty]); + let input = typing_env.as_query_input(auto_ref); + match tcx.codegen_select_candidate(input) { + Ok(_) => {} // Satisfied. + Err(_) => return None, // Auto trait not implemented. + } + } + + // --- Check principal trait --- + // + // ExistentialTraitRef.args already excludes Self, so prepend + // concrete_ty as Self — equivalent to + // `existential_ref.with_self_ty(tcx, concrete_ty)`. + let concrete_trait_ref = ty::TraitRef::new( + tcx, + existential_ref.def_id, + std::iter::once(concrete_ty.into()).chain(existential_ref.args.iter()), + ); + + debug_assert_eq!( + concrete_trait_ref, + tcx.normalize_erasing_regions(typing_env, ty::Unnormalized::new_wip(concrete_trait_ref)), + "resolve_dyn_satisfaction: trait ref must be normalized post-mono", + ); + let input = typing_env.as_query_input(concrete_trait_ref); + + match tcx.codegen_select_candidate(input) { + Ok(ImplSource::UserDefined(ImplSourceUserDefinedData { impl_def_id, .. })) => { + Some(*impl_def_id) + } + Ok(_) => None, // Builtin or Param — no user impl. + Err(CodegenObligationError::Ambiguity | CodegenObligationError::Unimplemented) => None, + Err(CodegenObligationError::UnconstrainedParam(_)) => { + bug!( + "resolve_dyn_satisfaction: unconstrained param in impl \ + for `{concrete_ty}: {:?}`", + existential_ref.def_id, + ); + } + } +} diff --git a/compiler/rustc_next_trait_solver/src/canonical/mod.rs b/compiler/rustc_next_trait_solver/src/canonical/mod.rs index a32a693a899cf..6bfaad5efc43c 100644 --- a/compiler/rustc_next_trait_solver/src/canonical/mod.rs +++ b/compiler/rustc_next_trait_solver/src/canonical/mod.rs @@ -185,6 +185,8 @@ where opt_values[bc.var()] = Some(*original_value); } } + // Outlives args are metadata, not canonical variables. + ty::GenericArgKind::Outlives(_) => {} } } CanonicalVarValues::instantiate(delegate.cx(), response.var_kinds, |var_values, kind| { @@ -261,7 +263,9 @@ fn register_region_constraints( ty::RegionConstraint::Outlives(ty::OutlivesPredicate(lhs, rhs)) => match lhs.kind() { ty::GenericArgKind::Lifetime(lhs) => delegate.sub_regions(rhs, lhs, span), ty::GenericArgKind::Type(lhs) => delegate.register_ty_outlives(lhs, rhs, span), - ty::GenericArgKind::Const(_) => panic!("const outlives: {lhs:?}: {rhs:?}"), + ty::GenericArgKind::Const(_) | ty::GenericArgKind::Outlives(_) => { + panic!("unexpected outlives arg: {lhs:?}: {rhs:?}") + } }, ty::RegionConstraint::Eq(ty::RegionEqPredicate(lhs, rhs)) => { delegate.equate_regions(lhs, rhs, span) diff --git a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs index 8933ac16b2b10..9deed8f548158 100644 --- a/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs +++ b/compiler/rustc_next_trait_solver/src/solve/eval_ctxt/mod.rs @@ -523,8 +523,8 @@ where // Remove the canonicalized universal vars, since we only care about stalled existentials. let mut sub_roots = Vec::new(); stalled_vars.retain(|arg| match arg.kind() { - // Lifetimes can never stall goals. - ty::GenericArgKind::Lifetime(_) => false, + // Lifetimes and outlives args can never stall goals. + ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Outlives(_) => false, ty::GenericArgKind::Type(ty) => match ty.kind() { ty::Infer(ty::TyVar(vid)) => { sub_roots.push(self.delegate.sub_unification_table_root_var(vid)); diff --git a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs index 931dd293973a1..f713c562327ce 100644 --- a/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs +++ b/compiler/rustc_next_trait_solver/src/solve/normalizes_to/opaque_types.rs @@ -71,6 +71,7 @@ where ty::GenericArgKind::Const(ct) => { self.structurally_normalize_const(goal.param_env, ct).map(Into::into) } + ty::GenericArgKind::Outlives(o) => Ok(o.into()), }))?; let opaque_type_key = ty::OpaqueTypeKey { def_id, args: normalized_args }; diff --git a/compiler/rustc_public/src/mir/visit.rs b/compiler/rustc_public/src/mir/visit.rs index e1d9cf31036e2..ab0cb39f95b82 100644 --- a/compiler/rustc_public/src/mir/visit.rs +++ b/compiler/rustc_public/src/mir/visit.rs @@ -214,7 +214,7 @@ macro_rules! make_mir_visitor { TerminatorKind::Drop { place, target: _, unwind: _ } => { self.visit_place(place, PlaceContext::MUTATING, location); } - TerminatorKind::Call { func, args, destination, target: _, unwind: _ } => { + TerminatorKind::Call { func, args, destination, target: _, unwind: _, .. } => { self.visit_operand(func, location); for arg in args { self.visit_operand(arg, location); diff --git a/compiler/rustc_public/src/unstable/convert/stable/mir.rs b/compiler/rustc_public/src/unstable/convert/stable/mir.rs index 0d04053aab76b..7b09122fcb2f6 100644 --- a/compiler/rustc_public/src/unstable/convert/stable/mir.rs +++ b/compiler/rustc_public/src/unstable/convert/stable/mir.rs @@ -744,6 +744,7 @@ impl<'tcx> Stable<'tcx> for mir::TerminatorKind<'tcx> { unwind, call_source: _, fn_span: _, + call_id: _, } => TerminatorKind::Call { func: func.stable(tables, cx), args: args.iter().map(|arg| arg.node.stable(tables, cx)).collect(), @@ -751,7 +752,7 @@ impl<'tcx> Stable<'tcx> for mir::TerminatorKind<'tcx> { target: target.map(|t| t.as_usize()), unwind: unwind.stable(tables, cx), }, - mir::TerminatorKind::TailCall { func: _, args: _, fn_span: _ } => todo!(), + mir::TerminatorKind::TailCall { func: _, args: _, fn_span: _, call_id: _ } => todo!(), mir::TerminatorKind::Assert { cond, expected, msg, target, unwind } => { TerminatorKind::Assert { cond: cond.stable(tables, cx), diff --git a/compiler/rustc_public/src/unstable/convert/stable/ty.rs b/compiler/rustc_public/src/unstable/convert/stable/ty.rs index 9a9576d47efd2..f8e45a4b1f379 100644 --- a/compiler/rustc_public/src/unstable/convert/stable/ty.rs +++ b/compiler/rustc_public/src/unstable/convert/stable/ty.rs @@ -207,6 +207,12 @@ impl<'tcx> Stable<'tcx> for ty::GenericArgKind<'tcx> { } ty::GenericArgKind::Type(ty) => GenericArgKind::Type(ty.stable(tables, cx)), ty::GenericArgKind::Const(cnst) => GenericArgKind::Const(cnst.stable(tables, cx)), + ty::GenericArgKind::Outlives(_) => { + // Outlives args are internal monomorphization metadata: they + // only appear on post-collection Instances, never on any + // GenericArgs exposed through the stable API. + unreachable!("outlives arg in stable conversion") + } } } } diff --git a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs index 873ed9bb10398..3725d7c718ff9 100644 --- a/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs +++ b/compiler/rustc_sanitizers/src/cfi/typeid/itanium_cxx_abi/encode.rs @@ -94,6 +94,9 @@ fn encode_args<'tcx>( .skip_norm_wip(); s.push_str(&encode_const(tcx, c, ct_ty, dict, options)); } + GenericArgKind::Outlives(_) => { + // Outlives args are metadata-only; nothing to encode for CFI. + } } } s.push('E'); diff --git a/compiler/rustc_session/src/options.rs b/compiler/rustc_session/src/options.rs index 9580642ba72bd..336aac722dcf5 100644 --- a/compiler/rustc_session/src/options.rs +++ b/compiler/rustc_session/src/options.rs @@ -2478,6 +2478,37 @@ options! { "output statistics about monomorphization collection"), dump_mono_stats_format: DumpMonoStatsFormat = (DumpMonoStatsFormat::Markdown, parse_dump_mono_stats, [UNTRACKED], "the format to use for -Z dump-mono-stats (`markdown` (default) or `json`)"), + dump_post_mono_mir: SwitchWithOptPath = (SwitchWithOptPath::Disabled, + parse_switch_with_opt_path, [UNTRACKED], + "dump instance-specific MIR bodies produced by codegen_mir to files"), + dump_trait_cast_augmentation: Option = (None, parse_opt_string, [UNTRACKED], + "dump augmentation decisions (CallerOutlivesEnv, composed mapping, BV \ + nodes, outlives pairs, final augmented callee) per caller -> callee edge \ + to stderr. `all` matches every augmentation; a substring matches callers \ + whose printed name contains it."), + dump_trait_cast_canonicalization: bool = (false, parse_bool, [UNTRACKED], + "dump trait-cast cascade canonicalization decisions (depth-ordered \ + patching, signature-group deduplication, codegen emission) to stderr \ + (default: no)"), + dump_trait_cast_chain_composition: Option = (None, parse_opt_string, [UNTRACKED], + "dump per-link details of trait-cast call_id chain composition \ + (template input-slot maps, vid provenance resolutions, final walk-position \ + mapping with None entries) to stderr. `all` matches every invocation; a \ + substring matches callers whose printed name contains it."), + dump_trait_cast_erasure_safety: Option = (None, parse_opt_string, [UNTRACKED], + "dump trait-cast erasure-safety analysis decisions (binder-var \ + enumeration, where-clause derivation, supertrait chain, verdict) per \ + query to stderr. `all` dumps every query; a substring matches queries \ + whose super-trait printed name contains it."), + dump_trait_cast_sensitivity: Option = (None, parse_opt_string, [UNTRACKED], + "dump per-instance trait-cast sensitivity metadata (CastRelevantLifetimes, \ + sensitive call sites, pre-augmented callees) to stderr. `all` dumps every \ + instance whose sensitivity is non-empty or which is directly sensitive; a \ + substring matches instances whose printed name contains it."), + dump_trait_graph: Option = (None, parse_opt_string, [UNTRACKED], + "dump computed trait graph info for root supertraits matching the filter. \ + `all` dumps every root; a substring (e.g. `MyTrait`) matches roots whose \ + printed type contains it. Output goes to stderr."), #[rustc_lint_opt_deny_field_access("use `Session::dwarf_version` instead of this field")] dwarf_version: Option = (None, parse_opt_number, [TRACKED], "version of DWARF debug information to emit (default: 2 or 4, depending on platform)"), @@ -2526,6 +2557,8 @@ options! { "whether each function should go in its own section"), future_incompat_test: bool = (false, parse_bool, [UNTRACKED], "forces all lints to be future incompatible, used for internal testing (default: no)"), + global_crate: Option = (None, parse_opt_bool, [TRACKED], + "explicitly set whether this crate is a 'global crate' (default: auto)"), graphviz_dark_mode: bool = (false, parse_bool, [UNTRACKED], "use dark-themed colors in graphviz output (default: no)"), graphviz_font: String = ("Courier, monospace".to_string(), parse_string, [UNTRACKED], @@ -2732,6 +2765,9 @@ options! { "print the LLVM optimization passes being run (default: no)"), print_mono_items: bool = (false, parse_bool, [UNTRACKED], "print the result of the monomorphization collection pass (default: no)"), + print_trait_cast_stats: bool = (false, parse_bool, [UNTRACKED], + "print summary statistics for the trait-cast monomorphization \ + pipeline to stderr (default: no)"), print_type_sizes: bool = (false, parse_bool, [UNTRACKED], "print layout information for each type encountered (default: no)"), proc_macro_backtrace: bool = (false, parse_bool, [UNTRACKED], diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index a9e7f1503b9ca..1cc5572112ddd 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -281,6 +281,14 @@ impl Session { if self.opts.unstable_opts.print_type_sizes || self.opts.unstable_opts.query_dep_graph || self.opts.unstable_opts.dump_mir.is_some() + || self.opts.unstable_opts.dump_post_mono_mir.enabled() + || self.opts.unstable_opts.dump_trait_cast_augmentation.is_some() + || self.opts.unstable_opts.dump_trait_cast_canonicalization + || self.opts.unstable_opts.dump_trait_cast_chain_composition.is_some() + || self.opts.unstable_opts.dump_trait_cast_erasure_safety.is_some() + || self.opts.unstable_opts.dump_trait_cast_sensitivity.is_some() + || self.opts.unstable_opts.dump_trait_graph.is_some() + || self.opts.unstable_opts.print_trait_cast_stats || self.opts.unstable_opts.unpretty.is_some() || self.prof.is_args_recording_enabled() || self.opts.output_types.contains_key(&OutputType::Mir) diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 981bfed363dcc..94277ff5540ea 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -2050,6 +2050,10 @@ symbols! { trace_macros, track_caller, trait_alias, + trait_cast_is_lifetime_erasure_safe, + trait_metadata_index, + trait_metadata_table, + trait_metadata_table_len, trait_ty, trait_upcasting, transmute, diff --git a/compiler/rustc_symbol_mangling/src/export.rs b/compiler/rustc_symbol_mangling/src/export.rs index bc29142252800..10d136c8be2d1 100644 --- a/compiler/rustc_symbol_mangling/src/export.rs +++ b/compiler/rustc_symbol_mangling/src/export.rs @@ -150,6 +150,10 @@ impl<'tcx> AbiHashStable<'tcx> for ty::GenericArgKind<'tcx> { match self { ty::GenericArgKind::Type(t) => t.abi_hash(tcx, hasher), ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Const(_) => unimplemented!(), + ty::GenericArgKind::Outlives(o) => { + o.longer().abi_hash(tcx, hasher); + o.shorter().abi_hash(tcx, hasher); + } } } } diff --git a/compiler/rustc_symbol_mangling/src/lib.rs b/compiler/rustc_symbol_mangling/src/lib.rs index c052037f05b39..8a80c30afdf25 100644 --- a/compiler/rustc_symbol_mangling/src/lib.rs +++ b/compiler/rustc_symbol_mangling/src/lib.rs @@ -256,7 +256,28 @@ fn compute_symbol_name<'tcx>( // the ID of the instantiating crate. This avoids symbol conflicts // in case the same instances is emitted in two crates of the same // project. - let avoid_cross_crate_conflicts = is_generic(instance) || is_globally_shared_function; + // + // Exception: transitively-delayed instances (trait-cast intrinsic + // callers and their transitive callers) are emitted only by the + // single global crate in a linkage, so the per-instantiating-crate + // suffix would cause upstream vtable references (mangled with the + // upstream's crate-id) and downstream bodies (mangled with the + // global bin's crate-id) to diverge. Stripping the suffix for these + // instances makes both sides converge on a single name; the + // "one global crate per linkage" invariant guarantees no conflict. + // + // Pre-compute `is_delayed` into a standalone binding rather than + // short-circuiting it into the `&&` expression: `compute_symbol_name` + // is called from `report_symbol_names` inside + // `tcx.dep_graph.with_ignore(...)` (for `#[rustc_symbol_name]` attr + // testing), and if the first forcing of + // `delayed_codegen_requests → collect_local_mono_items` happens + // under that ignore-context the resulting dep-graph state silently + // drops subsequent diagnostics. Eager evaluation guarantees the + // first forcing happens on a consistent path. + let is_delayed = tcx.is_transitively_delayed_instance(instance); + let avoid_cross_crate_conflicts = + (is_generic(instance) || is_globally_shared_function) && !is_delayed; let instantiating_crate = avoid_cross_crate_conflicts.then(compute_instantiating_crate); diff --git a/compiler/rustc_symbol_mangling/src/v0.rs b/compiler/rustc_symbol_mangling/src/v0.rs index 46a7e092e6bc9..9ab5485f686e8 100644 --- a/compiler/rustc_symbol_mangling/src/v0.rs +++ b/compiler/rustc_symbol_mangling/src/v0.rs @@ -383,8 +383,12 @@ impl<'tcx> Printer<'tcx> for V0SymbolMangler<'tcx> { }); // Encode impl generic params if the generic parameters contain non-region parameters - // and this isn't an inherent impl. - if impl_trait_ref.is_some() && args.iter().any(|a| a.has_non_region_param()) { + // (or Outlives entries from the mono collector's augmentation) and this isn't an + // inherent impl. Outlives entries don't set TypeFlags params, so check explicitly. + let has_outlives = args.iter().any(|a| matches!(a.kind(), GenericArgKind::Outlives(_))); + if impl_trait_ref.is_some() + && (args.iter().any(|a| a.has_non_region_param()) || has_outlives) + { self.print_path_with_generic_args( |this| { this.path_append_ns( @@ -957,6 +961,14 @@ impl<'tcx> Printer<'tcx> for V0SymbolMangler<'tcx> { self.push("K"); c.print(self)?; } + GenericArgKind::Outlives(o) => { + // Mangle outlives entries as "Oo_E" + self.push("Oo"); + self.push(&format!("{}", o.longer())); + self.push("_"); + self.push(&format!("{}", o.shorter())); + self.push("E"); + } } } self.push("E"); diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs index 86fd705e68aea..91a76d55e3a78 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/mod.rs @@ -678,6 +678,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { ty::GenericArgKind::Const(ct) => { value.push_normal(ct.to_string()); } + ty::GenericArgKind::Outlives(_) => continue, // Highlight all the type arguments that aren't at `pos` and compare // the type argument at `pos` and `other_ty`. ty::GenericArgKind::Type(type_arg) => { @@ -1152,6 +1153,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let ca2 = arg2.expect_const(); maybe_highlight(ca1, ca2, &mut values, self.tcx); } + ty::GenericArgKind::Outlives(_) => continue, } } diff --git a/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs b/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs index acc8fae24a6d0..8e78865c5afcc 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/infer/need_type_info.rs @@ -641,6 +641,7 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { GenericArgKind::Lifetime(_) => bug!("unexpected lifetime"), GenericArgKind::Type(_) => self.next_ty_var(DUMMY_SP).into(), GenericArgKind::Const(_) => self.next_const_var(DUMMY_SP).into(), + GenericArgKind::Outlives(_) => bug!("unexpected outlives"), } })) .unwrap(); @@ -893,6 +894,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { GenericArgKind::Lifetime(_) => 0, // erased GenericArgKind::Type(ty) => self.ty_cost(ty), GenericArgKind::Const(_) => 3, // some non-zero value + GenericArgKind::Outlives(_) => 0, } } fn ty_cost(self, ty: Ty<'tcx>) -> usize { @@ -1046,6 +1048,7 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> { walker.skip_current_subtree(); } } + GenericArgKind::Outlives(_) => {} } } false diff --git a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs index 7cead434bdad3..2d328e8d6657a 100644 --- a/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs +++ b/compiler/rustc_trait_selection/src/error_reporting/traits/fulfillment_errors.rs @@ -110,6 +110,26 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { let leaf_trait_predicate = self.resolve_vars_if_possible(bound_predicate.rebind(trait_predicate)); + // Bounded intertrait casting: when the ROOT obligation is + // `T: TraitMetadataTable`, try to emit a specialized + // trait-cast diagnostic that distinguishes the three failure + // modes (target not reachable / missing root bound / source + // not in graph) instead of the generic "not implemented" error. + // We look at the root — not the leaf — because the leaf is + // typically a sub-obligation (e.g. `Self: Sized` implied by + // `TraitMetadataTable: MetaSized`) that hides the real issue. + if let ty::PredicateKind::Clause(ty::ClauseKind::Trait(root_tp)) = + root_obligation.predicate.kind().skip_binder() + && Some(root_tp.def_id()) + == tcx.lang_items().trait_metadata_table_trait() + && let Some(guar) = self.try_report_trait_cast_error( + root_obligation, + root_obligation.predicate.kind().rebind(root_tp), + ) + { + return guar; + } + // Let's use the root obligation as the main message, when we care about the // most general case ("X doesn't implement Pattern<'_>") over the case that // happened to fail ("char doesn't implement Fn(&mut char)"). @@ -3706,4 +3726,73 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> { } } } + + /// Specialized diagnostics for failures of the `TraitMetadataTable` lang-item + /// bound used by the bounded intertrait casting feature. Returns `Some` when + /// the failure can be classified (emits the diagnostic); `None` when the + /// caller should fall back to the generic unimplemented-trait reporter. + /// + /// Cases handled: + /// + /// * `Self == I` (e.g. `dyn Root: TraitMetadataTable`) — the root + /// trait lacks the required `TraitMetadataTable` supertrait + /// bound. Emits [`crate::errors::MissingRootBound`]. + /// + /// * `Self != I`, both `dyn Trait`, and `Self`'s elaborated supertrait + /// def-id set does not contain `I`'s principal def-id — the trait named + /// by `Self` is not reachable from the trait graph rooted at `I`. Emits + /// [`crate::errors::TargetNotReachable`]. + fn try_report_trait_cast_error( + &self, + obligation: &PredicateObligation<'tcx>, + trait_predicate: ty::PolyTraitPredicate<'tcx>, + ) -> Option { + let tcx = self.tcx; + let self_ty = trait_predicate.self_ty().skip_binder(); + let tp = trait_predicate.skip_binder(); + let i_ty = tp.trait_ref.args.type_at(1); + + if self_ty.has_non_region_infer() || i_ty.has_non_region_infer() { + return None; + } + + // Case: `Self == I` — root trait missing `TraitMetadataTable`. + if self_ty == i_ty + && let ty::Dynamic(preds, ..) = self_ty.kind() + && let Some(principal) = preds.principal() + { + let guar = tcx.dcx().emit_err(crate::errors::MissingRootBound { + span: obligation.cause.span, + root: tcx.item_name(principal.skip_binder().def_id), + }); + return Some(guar); + } + + // Case: `Self != I`, both `dyn Trait` — check whether `Self`'s + // principal trait is reachable from `I`'s principal via the + // supertrait chain. If not, `Self` is not in `I`'s trait graph. + if let ty::Dynamic(self_preds, ..) = self_ty.kind() + && let ty::Dynamic(root_preds, ..) = i_ty.kind() + && let Some(self_principal) = self_preds.principal() + && let Some(root_principal) = root_preds.principal() + { + let self_principal_did = self_principal.skip_binder().def_id; + let root_principal_did = root_principal.skip_binder().def_id; + if self_principal_did != root_principal_did { + let super_def_ids: FxHashSet = + rustc_middle::ty::elaborate::supertrait_def_ids(tcx, self_principal_did) + .collect(); + if !super_def_ids.contains(&root_principal_did) { + let guar = tcx.dcx().emit_err(crate::errors::TargetNotReachable { + span: obligation.cause.span, + target: tcx.item_name(self_principal_did), + root: tcx.item_name(root_principal_did), + }); + return Some(guar); + } + } + } + + None + } } diff --git a/compiler/rustc_trait_selection/src/errors.rs b/compiler/rustc_trait_selection/src/errors.rs index 1656493fc3093..2b93ac34b8d43 100644 --- a/compiler/rustc_trait_selection/src/errors.rs +++ b/compiler/rustc_trait_selection/src/errors.rs @@ -2047,3 +2047,31 @@ pub(crate) struct NonGenericOpaqueTypeParam<'a, 'tcx> { }")] pub param_span: Span, } + +// ── Bounded intertrait casting diagnostics ──────────────────────────────── + +/// Diagnostic emitted when a trait is used as a `cast!` root but does not +/// carry `TraitMetadataTable` as a supertrait bound. +#[derive(Diagnostic)] +#[diag("`{$root}` cannot be used as a cast root: missing `TraitMetadataTable` bound")] +#[help("add `TraitMetadataTable` as a supertrait bound of `{$root}`")] +pub(crate) struct MissingRootBound { + #[primary_span] + #[label("`TraitMetadataTable` is not a supertrait of `{$root}`")] + pub span: Span, + pub root: Symbol, +} + +/// Diagnostic emitted when a type in a `cast!` expression is not reachable +/// from the root supertrait's trait graph. Covers both the target-trait and +/// the source-dyn-type cases (either unreachable yields the same remedy). +#[derive(Diagnostic)] +#[diag("`{$target}` is not in the trait graph rooted at `{$root}`")] +#[note("`{$target}` does not have `{$root}` as a (transitive) supertrait")] +#[help("add `{$root}` as a supertrait bound on `{$target}`")] +pub(crate) struct TargetNotReachable { + #[primary_span] + pub span: Span, + pub target: Symbol, + pub root: Symbol, +} diff --git a/compiler/rustc_trait_selection/src/opaque_types.rs b/compiler/rustc_trait_selection/src/opaque_types.rs index 28b9bf21eee59..165049da52f96 100644 --- a/compiler/rustc_trait_selection/src/opaque_types.rs +++ b/compiler/rustc_trait_selection/src/opaque_types.rs @@ -95,6 +95,7 @@ pub fn opaque_type_has_defining_use_args<'tcx>( }, GenericArgKind::Type(ty) => matches!(ty.kind(), ty::Param(_)), GenericArgKind::Const(ct) => matches!(ct.kind(), ty::ConstKind::Param(_)), + GenericArgKind::Outlives(_) => continue, }; if arg_is_param { diff --git a/compiler/rustc_trait_selection/src/solve/delegate.rs b/compiler/rustc_trait_selection/src/solve/delegate.rs index 05ecc4725a7b6..20454887d1087 100644 --- a/compiler/rustc_trait_selection/src/solve/delegate.rs +++ b/compiler/rustc_trait_selection/src/solve/delegate.rs @@ -166,6 +166,7 @@ impl<'tcx> rustc_next_trait_solver::delegate::SolverDelegate for SolverDelegate< } ty::GenericArgKind::Type(_) => self.next_ty_var(span).into(), ty::GenericArgKind::Const(_) => self.next_const_var(span).into(), + ty::GenericArgKind::Outlives(_) => unreachable!(), } } diff --git a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs index 4e8ee9ed426c7..5aa1cb9b73e1e 100644 --- a/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs +++ b/compiler/rustc_trait_selection/src/traits/dyn_compatibility.rs @@ -188,7 +188,20 @@ fn predicates_reference_self( // impossible to make into existential bounds without eager resolution // or something. // e.g. `trait A: B`. - predicate_references_self(tcx, trait_def_id, clause, sp, AllowSelfProjections::No) + // + // Exception: `TraitMetadataTable>` bounds + // are safe because any well-formed `dyn T` resolves + // `Self::Assoc` to `X` via the projection binding. We allow self + // projections (but not bare `Self`) in this position. + let allow_self_projections = if let ty::ClauseKind::Trait(ref data) = + clause.kind().skip_binder() + && Some(data.def_id()) == tcx.lang_items().trait_metadata_table_trait() + { + AllowSelfProjections::Yes + } else { + AllowSelfProjections::No + }; + predicate_references_self(tcx, trait_def_id, clause, sp, allow_self_projections) }) .collect() } diff --git a/compiler/rustc_trait_selection/src/traits/select/mod.rs b/compiler/rustc_trait_selection/src/traits/select/mod.rs index 17a5f14767eec..19a25d0479e90 100644 --- a/compiler/rustc_trait_selection/src/traits/select/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/select/mod.rs @@ -1815,6 +1815,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } ty::GenericArgKind::Type(ty) => self.infcx.shallow_resolve(ty) != ty, ty::GenericArgKind::Lifetime(_) => false, + ty::GenericArgKind::Outlives(_) => false, } }) { diff --git a/compiler/rustc_ty_utils/src/ty.rs b/compiler/rustc_ty_utils/src/ty.rs index cb08ca0757876..1f75d88f39dfa 100644 --- a/compiler/rustc_ty_utils/src/ty.rs +++ b/compiler/rustc_ty_utils/src/ty.rs @@ -322,6 +322,9 @@ fn unsizing_params_for_adt<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> DenseBitSe ty::ConstKind::Param(p) => Some(p.index), _ => None, }, + + // Outlives args are metadata-only; can't unsize + ty::GenericArgKind::Outlives(_) => None, }; // The last field of the structure has to exist and contain type/const parameters. diff --git a/compiler/rustc_type_ir/src/canonical.rs b/compiler/rustc_type_ir/src/canonical.rs index be5e483c808d4..f3f6a47fe2a72 100644 --- a/compiler/rustc_type_ir/src/canonical.rs +++ b/compiler/rustc_type_ir/src/canonical.rs @@ -241,6 +241,8 @@ impl CanonicalVarValues { ty::GenericArgKind::Const(ct) => { matches!(ct.kind(), ty::ConstKind::Bound(ty::BoundVarIndexKind::Canonical, bc) if bc.var().as_usize() == bv) } + // Outlives args are pure metadata, not identity-relevant. + ty::GenericArgKind::Outlives(_) => true, }) } @@ -272,6 +274,8 @@ impl CanonicalVarValues { return false; } } + // Outlives args are pure metadata, not identity-relevant. + ty::GenericArgKind::Outlives(_) => {} } } diff --git a/compiler/rustc_type_ir/src/elaborate.rs b/compiler/rustc_type_ir/src/elaborate.rs index be3661518d7d9..3a394db240a66 100644 --- a/compiler/rustc_type_ir/src/elaborate.rs +++ b/compiler/rustc_type_ir/src/elaborate.rs @@ -390,6 +390,8 @@ pub fn elaborate_outlives_assumptions( for ty::OutlivesPredicate(arg1, r2) in assumptions { collected.insert(ty::OutlivesPredicate(arg1, r2)); match arg1.kind() { + // Outlives args are pure metadata; skip. + ty::GenericArgKind::Outlives(_) => {} // Elaborate the components of an type, since we may have substituted a // generic coroutine with a more specific type. ty::GenericArgKind::Type(ty1) => { diff --git a/compiler/rustc_type_ir/src/flags.rs b/compiler/rustc_type_ir/src/flags.rs index 50c30f4252703..58cd9905f17e1 100644 --- a/compiler/rustc_type_ir/src/flags.rs +++ b/compiler/rustc_type_ir/src/flags.rs @@ -151,6 +151,9 @@ pub struct FlagComputation { /// see `Ty::outer_exclusive_binder` for details pub outer_exclusive_binder: ty::DebruijnIndex, + /// see `WithCachedTypeInfo::region_slots` for details + pub region_slots: u32, + interner: std::marker::PhantomData, } @@ -159,6 +162,7 @@ impl FlagComputation { FlagComputation { flags: TypeFlags::empty(), outer_exclusive_binder: ty::INNERMOST, + region_slots: 0, interner: std::marker::PhantomData, } } @@ -187,6 +191,7 @@ impl FlagComputation { for c in clauses { result.add_flags(c.as_predicate().flags()); result.add_exclusive_binder(c.as_predicate().outer_exclusive_binder()); + result.add_region_slots(c.as_predicate().region_slots()); } result } @@ -208,6 +213,10 @@ impl FlagComputation { self.outer_exclusive_binder = self.outer_exclusive_binder.max(exclusive_binder); } + fn add_region_slots(&mut self, n: u32) { + self.region_slots = self.region_slots.saturating_add(n); + } + /// Adds the flags/depth from a set of types that appear within the current type, but within a /// region binder. fn bound_computation(&mut self, value: ty::Binder, f: F) @@ -223,6 +232,7 @@ impl FlagComputation { f(&mut computation, value.skip_binder()); self.add_flags(computation.flags); + self.add_region_slots(computation.region_slots); // The types that contributed to `computation` occurred within // a region binder, so subtract one from the region depth @@ -361,6 +371,7 @@ impl FlagComputation { fn add_ty_pat(&mut self, pat: ::Pat) { self.add_flags(pat.flags()); self.add_exclusive_binder(pat.outer_exclusive_binder()); + self.add_region_slots(pat.region_slots()); } fn add_predicate(&mut self, binder: ty::Binder>) { @@ -438,6 +449,7 @@ impl FlagComputation { fn add_ty(&mut self, ty: I::Ty) { self.add_flags(ty.flags()); self.add_exclusive_binder(ty.outer_exclusive_binder()); + self.add_region_slots(ty.region_slots()); } fn add_tys(&mut self, tys: I::Tys) { @@ -448,6 +460,7 @@ impl FlagComputation { fn add_region(&mut self, r: I::Region) { self.add_flags(r.flags()); + self.add_region_slots(1); if let ty::ReBound(ty::BoundVarIndexKind::Bound(debruijn), _) = r.kind() { self.add_bound_var(debruijn); } @@ -456,6 +469,7 @@ impl FlagComputation { fn add_const(&mut self, c: I::Const) { self.add_flags(c.flags()); self.add_exclusive_binder(c.outer_exclusive_binder()); + self.add_region_slots(c.region_slots()); } fn add_const_kind(&mut self, c: &ty::ConstKind) { @@ -520,6 +534,8 @@ impl FlagComputation { ty::GenericArgKind::Type(ty) => self.add_ty(ty), ty::GenericArgKind::Lifetime(lt) => self.add_region(lt), ty::GenericArgKind::Const(ct) => self.add_const(ct), + // Outlives args are pure metadata with no type flags. + ty::GenericArgKind::Outlives(_) => {} } } } diff --git a/compiler/rustc_type_ir/src/generic_arg.rs b/compiler/rustc_type_ir/src/generic_arg.rs index 5d612740fdd84..47aa4eae88f32 100644 --- a/compiler/rustc_type_ir/src/generic_arg.rs +++ b/compiler/rustc_type_ir/src/generic_arg.rs @@ -5,6 +5,20 @@ use rustc_type_ir_macros::GenericTypeVisitable; use crate::Interner; +/// Raw data for an outlives relationship between two region positions. +/// `Outlives { longer, shorter }` means the region at position `longer` +/// outlives the region at position `shorter`. `usize::MAX` represents +/// `'static`. +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[cfg_attr( + feature = "nightly", + derive(Decodable_NoContext, Encodable_NoContext, HashStable_NoContext) +)] +pub struct OutlivesArgData { + pub longer: usize, + pub shorter: usize, +} + #[derive_where(Clone, Copy, PartialEq, Debug; I: Interner)] #[derive(GenericTypeVisitable)] #[cfg_attr( @@ -15,6 +29,10 @@ pub enum GenericArgKind { Lifetime(I::Region), Type(I::Ty), Const(I::Const), + /// An outlives relation between two region positions within an + /// instance's generic arg list. The interned `OutlivesArg` wraps an + /// `OutlivesArgData` holding the two position indices. + Outlives(I::OutlivesArg), } impl Eq for GenericArgKind {} diff --git a/compiler/rustc_type_ir/src/inherent.rs b/compiler/rustc_type_ir/src/inherent.rs index a336313e90b8f..d9c8e97079cf0 100644 --- a/compiler/rustc_type_ir/src/inherent.rs +++ b/compiler/rustc_type_ir/src/inherent.rs @@ -282,6 +282,22 @@ pub trait Region>: } } +pub trait OutlivesArg>: + Copy + Debug + Hash + Eq + Into +{ + fn new(interner: I, longer: usize, shorter: usize) -> Self; + + fn data(self) -> ty::OutlivesArgData; + + fn longer(self) -> usize { + self.data().longer + } + + fn shorter(self) -> usize { + self.data().shorter + } +} + #[rust_analyzer::prefer_underscore_import] pub trait Const>: Copy @@ -359,7 +375,7 @@ pub trait GenericArg>: { fn as_term(&self) -> Option { match self.kind() { - ty::GenericArgKind::Lifetime(_) => None, + ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Outlives(_) => None, ty::GenericArgKind::Type(ty) => Some(ty.into()), ty::GenericArgKind::Const(ct) => Some(ct.into()), } @@ -391,7 +407,7 @@ pub trait GenericArg>: fn is_non_region_infer(self) -> bool { match self.kind() { - ty::GenericArgKind::Lifetime(_) => false, + ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Outlives(_) => false, ty::GenericArgKind::Type(ty) => ty.is_ty_var(), ty::GenericArgKind::Const(ct) => ct.is_ct_var(), } diff --git a/compiler/rustc_type_ir/src/interner.rs b/compiler/rustc_type_ir/src/interner.rs index f71f7c7c1ab38..4a59a3abda377 100644 --- a/compiler/rustc_type_ir/src/interner.rs +++ b/compiler/rustc_type_ir/src/interner.rs @@ -163,6 +163,10 @@ pub trait Interner: type EarlyParamRegion: ParamLike; type LateParamRegion: Copy + Debug + Hash + Eq; + /// Interned outlives relation between two region positions within a + /// generic arg list; carried as a `GenericArgKind::Outlives` entry. + type OutlivesArg: OutlivesArg; + type RegionAssumptions: Copy + Debug + Hash diff --git a/compiler/rustc_type_ir/src/lang_items.rs b/compiler/rustc_type_ir/src/lang_items.rs index f1c45a4d98b5e..e9724c0693a25 100644 --- a/compiler/rustc_type_ir/src/lang_items.rs +++ b/compiler/rustc_type_ir/src/lang_items.rs @@ -50,6 +50,7 @@ pub enum SolverTraitLangItem { PointeeSized, PointeeTrait, Sized, + TraitMetadataTable, TransmuteTrait, TrivialClone, Tuple, diff --git a/compiler/rustc_type_ir/src/outlives.rs b/compiler/rustc_type_ir/src/outlives.rs index 5b4e44dc89ebe..40408f0d3cb1a 100644 --- a/compiler/rustc_type_ir/src/outlives.rs +++ b/compiler/rustc_type_ir/src/outlives.rs @@ -91,7 +91,7 @@ impl TypeVisitor for OutlivesCollector<'_, I> { // for further background and discussion. for child in args.iter() { match child.kind() { - ty::GenericArgKind::Lifetime(_) => {} + ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Outlives(_) => {} ty::GenericArgKind::Type(_) | ty::GenericArgKind::Const(_) => { child.visit_with(self); } diff --git a/compiler/rustc_type_ir/src/ty_info.rs b/compiler/rustc_type_ir/src/ty_info.rs index 5e297a51f0ce7..8d0fa72398917 100644 --- a/compiler/rustc_type_ir/src/ty_info.rs +++ b/compiler/rustc_type_ir/src/ty_info.rs @@ -52,6 +52,16 @@ pub struct WithCachedTypeInfo { /// De Bruijn indices within the type are contained within `0..D` /// (exclusive). pub outer_exclusive_binder: DebruijnIndex, + + /// Number of region occurrences reached by a `TypeVisitor` DFS over + /// `internee`. Matches the count a visitor would produce by + /// incrementing once per `visit_region` call, regardless of region + /// kind (`ReBound`, `ReEarlyParam`, `ReStatic`, `ReErased`, …). + /// + /// Computed once by `FlagComputation` during interning and read in + /// O(1) thereafter. Used to avoid quadratic re-walks in consumers + /// that need the occurrence count in a hot loop. + pub region_slots: u32, } impl PartialEq for WithCachedTypeInfo { diff --git a/compiler/rustc_type_ir/src/visit.rs b/compiler/rustc_type_ir/src/visit.rs index a078b860be774..f03ad78e61628 100644 --- a/compiler/rustc_type_ir/src/visit.rs +++ b/compiler/rustc_type_ir/src/visit.rs @@ -234,6 +234,11 @@ impl, S> TypeVisitable for indexmap::IndexSe pub trait Flags { fn flags(&self) -> TypeFlags; fn outer_exclusive_binder(&self) -> ty::DebruijnIndex; + /// Number of region occurrences reached by a `TypeVisitor` DFS over + /// this value. Cached at interning for `Ty` / `Const` / `Predicate` / + /// `Clauses` so consumers can obtain the count in O(1) without + /// re-walking the type tree. + fn region_slots(&self) -> u32; } pub trait TypeVisitableExt: TypeVisitable { diff --git a/compiler/rustc_type_ir/src/walk.rs b/compiler/rustc_type_ir/src/walk.rs index 96ae6f1c06146..93a77055c6885 100644 --- a/compiler/rustc_type_ir/src/walk.rs +++ b/compiler/rustc_type_ir/src/walk.rs @@ -149,7 +149,7 @@ fn push_inner(stack: &mut TypeWalkerStack, parent: I::GenericArg stack.push(bound_ty.skip_binder().into()); } }, - ty::GenericArgKind::Lifetime(_) => {} + ty::GenericArgKind::Lifetime(_) | ty::GenericArgKind::Outlives(_) => {} ty::GenericArgKind::Const(parent_ct) => match parent_ct.kind() { ty::ConstKind::Infer(_) | ty::ConstKind::Param(_) diff --git a/library/alloc/src/boxed.rs b/library/alloc/src/boxed.rs index ef7422302d974..0d615b8a08dd0 100644 --- a/library/alloc/src/boxed.rs +++ b/library/alloc/src/boxed.rs @@ -2479,3 +2479,45 @@ unsafe impl Allocator for Box { unsafe { (**self).shrink(ptr, old_layout, new_layout) } } } + +#[unstable(feature = "trait_cast", issue = "none")] +impl<'a, T, U, I, A> core::trait_cast::TraitCast for Box +where + I: core::marker::MetaSized + + core::ptr::Pointee> + + core::marker::TraitMetadataTable, + T: core::marker::MetaSized + core::marker::TraitMetadataTable + 'a, + U: core::marker::MetaSized + + core::ptr::Pointee> + + core::marker::TraitMetadataTable + + 'a, + A: Allocator, +{ + type Target = Box; + unsafe fn unchecked_cast(self) -> Result, core::trait_cast::TraitCastError> { + unsafe { + let (obj_graph_id, table) = + >::derived_metadata_table(&*self); + let (this, alloc) = Box::into_raw_with_allocator(self); + let (crate_graph_id, idx) = core::intrinsics::trait_metadata_index::(); + if crate_graph_id as *const u8 != obj_graph_id as *const u8 { + return Err(core::trait_cast::TraitCastError::ForeignTraitGraph(Box::from_raw_in( + this, alloc, + ))); + } + + let table_len = core::intrinsics::trait_metadata_table_len::(); + let table: &[Option>] = + &*core::ptr::from_raw_parts(table.as_ptr(), table_len); + + let (p, _) = (this as *const T).to_raw_parts(); + let Some(Some(vtable)) = table.get(idx) else { + let this = Box::from_raw_in(this, alloc); + return Err(core::trait_cast::TraitCastError::UnsatisfiedObligation(this)); + }; + let metadata: core::ptr::DynMetadata = core::mem::transmute(vtable); + let p: *mut U = core::ptr::from_raw_parts_mut(p as *mut (), metadata); + Ok(Box::from_raw_in(p, alloc)) + } + } +} diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index d85a63999fe03..a811f7834521d 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -143,6 +143,7 @@ #![feature(ptr_metadata)] #![feature(rev_into_inner)] #![feature(set_ptr_value)] +#![feature(sized_hierarchy)] #![feature(sized_type_properties)] #![feature(slice_from_ptr_range)] #![feature(slice_index_methods)] @@ -152,6 +153,7 @@ #![feature(std_internals)] #![feature(temporary_niche_types)] #![feature(titlecase)] +#![feature(trait_cast)] #![feature(transmutability)] #![feature(trivial_clone)] #![feature(trusted_fused)] diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index 4e6d886658595..7afd514539e77 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -4600,3 +4600,45 @@ unsafe impl Allocator for Rc { unsafe { (**self).shrink(ptr, old_layout, new_layout) } } } + +#[unstable(feature = "trait_cast", issue = "none")] +impl<'a, T, U, I, A> core::trait_cast::TraitCast for Rc +where + I: core::marker::MetaSized + + core::ptr::Pointee> + + core::marker::TraitMetadataTable, + T: core::marker::MetaSized + core::marker::TraitMetadataTable + 'a, + U: core::marker::MetaSized + + core::ptr::Pointee> + + core::marker::TraitMetadataTable + + 'a, + A: Allocator, +{ + type Target = Rc; + unsafe fn unchecked_cast(self) -> Result, core::trait_cast::TraitCastError> { + unsafe { + let (obj_graph_id, table) = + >::derived_metadata_table(&*self); + let (this, alloc) = Rc::into_raw_with_allocator(self); + let (crate_graph_id, idx) = core::intrinsics::trait_metadata_index::(); + if crate_graph_id as *const u8 != obj_graph_id as *const u8 { + return Err(core::trait_cast::TraitCastError::ForeignTraitGraph(Rc::from_raw_in( + this, alloc, + ))); + } + + let table_len = core::intrinsics::trait_metadata_table_len::(); + let table: &[Option>] = + &*core::ptr::from_raw_parts(table.as_ptr(), table_len); + + let (p, _) = (this as *const T).to_raw_parts(); + let Some(Some(vtable)) = table.get(idx) else { + let this = Rc::from_raw_in(this, alloc); + return Err(core::trait_cast::TraitCastError::UnsatisfiedObligation(this)); + }; + let metadata: core::ptr::DynMetadata = core::mem::transmute(vtable); + let p = core::ptr::from_raw_parts(p, metadata); + Ok(Rc::from_raw_in(p, alloc)) + } + } +} diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 8004dd38d073b..7b5e63a008878 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -4969,3 +4969,44 @@ unsafe impl Allocator for Arc { unsafe { (**self).shrink(ptr, old_layout, new_layout) } } } + +#[unstable(feature = "trait_cast", issue = "none")] +impl<'a, T, U, I, A> core::trait_cast::TraitCast for Arc +where + I: core::marker::MetaSized + + core::ptr::Pointee> + + core::marker::TraitMetadataTable, + T: core::marker::MetaSized + core::marker::TraitMetadataTable + 'a, + U: core::marker::MetaSized + + core::ptr::Pointee> + + core::marker::TraitMetadataTable + + 'a, + A: Allocator, +{ + type Target = Arc; + unsafe fn unchecked_cast(self) -> Result, core::trait_cast::TraitCastError> { + unsafe { + let (obj_graph_id, table) = + >::derived_metadata_table(&*self); + let (this, alloc) = Arc::into_raw_with_allocator(self); + let (crate_graph_id, idx) = core::intrinsics::trait_metadata_index::(); + if crate_graph_id as *const u8 != obj_graph_id as *const u8 { + return Err(core::trait_cast::TraitCastError::ForeignTraitGraph(Arc::from_raw_in( + this, alloc, + ))); + } + let table_len = core::intrinsics::trait_metadata_table_len::(); + let table: &[Option>] = + &*core::ptr::from_raw_parts(table.as_ptr(), table_len); + + let (p, _) = (this as *const T).to_raw_parts(); + let Some(Some(vtable)) = table.get(idx) else { + let this = Arc::from_raw_in(this, alloc); + return Err(core::trait_cast::TraitCastError::UnsatisfiedObligation(this)); + }; + let metadata: core::ptr::DynMetadata = core::mem::transmute(vtable); + let p = core::ptr::from_raw_parts(p, metadata); + Ok(Arc::from_raw_in(p, alloc)) + } + } +} diff --git a/library/core/src/intrinsics/mod.rs b/library/core/src/intrinsics/mod.rs index 94d0c7eab9227..a1a2ac5871889 100644 --- a/library/core/src/intrinsics/mod.rs +++ b/library/core/src/intrinsics/mod.rs @@ -3589,3 +3589,69 @@ pub const fn va_copy<'f>(src: &VaList<'f>) -> VaList<'f> { pub const unsafe fn va_end(ap: &mut VaList<'_>) { /* deliberately does nothing */ } + +/// Retrieve the index of `Trait`'s vtable in the slice returned via +/// "TraitMetadataTable::derived_metadata_table". +/// The specific index value returned is implementation defined and subject to whim. +/// The value returned is constant for a given `Trait` and `SuperTrait`, but will not be "known +/// enough" to be `const fn` due to the need for a global computation. +/// Note: this value can only be computed globally, i.e., over all crates in the binary. +/// The `&'static u8` is a unique address per global crate only. It is independent of +/// the `SuperTrait` and `Trait` generic params. +#[unstable(feature = "trait_cast", issue = "none")] +#[rustc_nounwind] +#[rustc_intrinsic] +pub unsafe fn trait_metadata_index() -> (&'static u8, usize) +where + SuperTrait: crate::marker::MetaSized + + ptr::Pointee> + + crate::marker::TraitMetadataTable, + Trait: crate::marker::MetaSized + + ptr::Pointee> + + crate::marker::TraitMetadataTable; + +/// Retrieve the slice returned via "TraitMetadataTable::derived_metadata_table" for the given `SuperTrait`. +/// Calling this intrinsic forces the caller to be delayed until after global monomorphization. +/// The value returned is constant for a given `ConcreteType` and `SuperTrait`, but will not be "known +/// enough" to be `const fn` due to the need for a global computation. +/// Note: this value can only be computed globally, i.e., over all crates in the binary. +/// The `&'static u8` is a unique address per global crate only. It is independent of +/// the `SuperTrait` and `Trait` generic params. +#[unstable(feature = "trait_cast", issue = "none")] +#[rustc_nounwind] +#[rustc_intrinsic] +pub unsafe fn trait_metadata_table() +-> (&'static u8, ptr::NonNull>>) +where + SuperTrait: crate::marker::MetaSized + + ptr::Pointee> + + crate::marker::TraitMetadataTable, + ConcreteType: Sized + crate::marker::TraitMetadataTable; + +/// Return the length of the metadata table for the given `SuperTrait`. +#[unstable(feature = "trait_cast", issue = "none")] +#[rustc_nounwind] +#[rustc_intrinsic] +pub unsafe fn trait_metadata_table_len() -> usize +where + SuperTrait: crate::marker::MetaSized + + ptr::Pointee> + + crate::marker::TraitMetadataTable; + +/// Return true iff casting to `TargetTrait` (within the graph rooted at `SuperTrait`) +/// is safe with respect to lifetime erasure. Checks that every lifetime in +/// `TargetTrait`'s binder is expressible through `SuperTrait`'s binder and that +/// the concrete outlives relationships at the call site establish equivalence. +/// Obligation checks are separated from the metadata table entries to facilitate +/// lifetime binders. +#[unstable(feature = "trait_cast", issue = "none")] +#[rustc_nounwind] +#[rustc_intrinsic] +pub unsafe fn trait_cast_is_lifetime_erasure_safe() -> bool +where + SuperTrait: crate::marker::MetaSized + + ptr::Pointee> + + crate::marker::TraitMetadataTable, + TargetTrait: crate::marker::MetaSized + + ptr::Pointee> + + crate::marker::TraitMetadataTable; diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index bdc1c48f70dfe..e94ca6134cadd 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -324,6 +324,8 @@ pub mod random; pub mod range; pub mod result; pub mod sync; +#[unstable(feature = "trait_cast", issue = "none")] +pub mod trait_cast; #[unstable(feature = "unsafe_binders", issue = "130516")] pub mod unsafe_binder; diff --git a/library/core/src/marker.rs b/library/core/src/marker.rs index f56a4d7308e90..719c278883748 100644 --- a/library/core/src/marker.rs +++ b/library/core/src/marker.rs @@ -176,6 +176,63 @@ pub trait MetaSized: PointeeSized { // Empty } +/// Since this value can only be known globally, the table is computed only for +/// the global crate. +/// It will be implemented for all types and traits that implement/inherit from `SuperTrait`. +/// `SuperTrait` must be a trait object, i.e., `dyn Trait`; `[_]`/`str`/etc is not allowed. +/// +/// Marked `#[rustc_coinductive]` to allow coinductive resolution of cycles arising from +/// root supertraits that inherit from `TraitMetadataTable` (e.g., +/// `trait Foo: TraitMetadataTable`). Effectively `#[rustc_deny_explicit_impl]` +/// due to the blanket impl below. +/// +/// The blanket impl intentionally omits an `Unsize` bound to avoid a +/// cycle in the trait solver: proving `S: Unsize` requires `S: Root`, which +/// requires the supertrait `S: TraitMetadataTable`, which would cycle back +/// through `Unsize`. Instead, the impl applies unconditionally for all `Sized` types; +/// the actual constraint that `S` implements the root supertrait is enforced by the +/// supertrait relationship itself (the user must write `impl Root for S`). +#[unstable(feature = "trait_cast", issue = "none")] +#[lang = "trait_metadata_table"] +#[rustc_coinductive] +#[doc(hidden)] +pub trait TraitMetadataTable: MetaSized +where + SuperTrait: MetaSized + crate::ptr::Pointee>, +{ + /// Retrieval should /really/ be via a "virtual const" and not a virtual function call. + /// The returned slice is a static array of all trait vtables for this concrete type. + /// The order of the array is implementation defined and subject to whim, but will be the + /// same for a given `SuperTrait`. + /// Effectively a wrapper around `core::intrinsics::trait_metadata_table::()`. + /// Must not dereference any part of `self`. + fn derived_metadata_table( + &self, + ) -> (&'static u8, crate::ptr::NonNull>>); +} + +/// Implementation for all `Sized` types. The actual constraint that a type implements the +/// root supertrait is enforced by the supertrait relationship, not by this impl's +/// where-clauses (to avoid a cycle through `Unsize`). +/// +/// The `SuperTrait: TraitMetadataTable` bound is required by the intrinsic and +/// is satisfied via the object candidate (vtable dispatch) when `SuperTrait = dyn Root`, +/// since `TraitMetadataTable` is a supertrait of `Root`. +#[unstable(feature = "trait_cast", issue = "none")] +impl TraitMetadataTable for T +where + SuperTrait: MetaSized + + crate::ptr::Pointee> + + TraitMetadataTable, +{ + fn derived_metadata_table( + &self, + ) -> (&'static u8, crate::ptr::NonNull>>) { + // SAFETY: Not unsafe, required by the intrinsic. + unsafe { crate::intrinsics::trait_metadata_table::() } + } +} + /// Types that may or may not have a size. #[unstable(feature = "sized_hierarchy", issue = "144404")] #[lang = "pointee_sized"] diff --git a/library/core/src/trait_cast.rs b/library/core/src/trait_cast.rs new file mode 100644 index 0000000000000..99c6e1b6bddae --- /dev/null +++ b/library/core/src/trait_cast.rs @@ -0,0 +1,205 @@ +//! Trait casting support. + +use crate::marker::{MetaSized, TraitMetadataTable}; +use crate::ptr::{DynMetadata, NonNull, Pointee}; + +/// Represents errors that may occur when attempting to perform a cast +/// between trait objects within the same trait graph. +/// +/// This enum is generic over type `T`, which is intended to represent +/// the object involved in the cast operation. +/// +/// # Variants +/// +/// - `ForeignTraitGraph(T)`: +/// Indicates that the object being cast originates from a different +/// global crate than the one attempting the cast. +/// - This is useful to provide more context when debugging cast errors. +/// - **Note:** Do not rely on this behavior, as it is subject to change +/// in future versions. +/// +/// - `UnsatisfiedObligation(T)`: +/// Indicates that the object being cast does not implement the required +/// trait or the cast fails due to inability to satisfy lifetime erasure +/// requirements. +/// - This may occur when the cast violates safety or does not align with +/// the constraints of the target trait. +/// +/// # Usage +/// +/// This enum is primarily used to encapsulate errors in trait casting +/// scenarios where such operations require validation of compatibility +/// at runtime. +#[derive(Debug, Clone, Copy)] +pub enum TraitCastError { + /// This object is from a different global crate than the one + /// that is performing the cast. + /// Useful if you'd like to provide a more informative error message. + /// Note: do not rely on this behavior. It is subject to change. + ForeignTraitGraph(T), + /// This object does not implement the specified trait, or the cast does not + /// satisfy lifetime erasure requirements. + UnsatisfiedObligation(T), +} +impl TraitCastError { + /// Unwrap the contained, un-casted, value. + pub fn unwrap(self) -> T { + match self { + Self::ForeignTraitGraph(v) | Self::UnsatisfiedObligation(v) => v, + } + } +} + +/// `I` is the root supertrait, `U` is the target trait. +/// +/// The choice of root supertrait does not affect the value of the cast: +/// the output vtable is the same after monomorphization (or is +/// essentially user-invisible). +pub trait TraitCast: Sized +where + I: Pointee> + TraitMetadataTable, + U: Pointee> + TraitMetadataTable, +{ + /// The target *value* of a successful cast. + type Target; + /// Attempt to cast `self` to `U`. All trait impl-obligations are enforced, + /// but lifetime-erasure soundness is not. + /// Returns Err(TraitCastError::UnsatisfiedObligation) if the cast is not + /// possible due to unfulfilled generic obligations. + /// Returns Err(TraitCastError::ForeignTraitGraph) if the cast is not + /// possible because the object is from a different global crate. + unsafe fn unchecked_cast(self) -> Result>; + /// Attempt to cast `self` to `U`. + /// + /// Returns Err(TraitCastError::ForeignTraitGraph) if the cast is not + /// possible because the object is from a different global crate. + /// Returns Err(TraitCastError::UnsatisfiedObligation) if the cast is not + /// possible due to lifetime erasure requirements or because of unfulfilled + /// generic obligations. + fn checked_cast(self) -> Result> { + // SAFETY: `unchecked_cast`'s only precondition is that lifetime-erasure + // soundness has been verified, since it enforces trait impl-obligations + // on its own. The `trait_cast_is_lifetime_erasure_safe::` + // intrinsic call below returns `false` whenever that requirement is + // not satisfied, and we bail out with `UnsatisfiedObligation` in that + // case — so by the time we reach `self.unchecked_cast()`, erasure + // soundness for `I -> U` has been established. + unsafe { + if !crate::intrinsics::trait_cast_is_lifetime_erasure_safe::() { + return Err(TraitCastError::UnsatisfiedObligation(self)); + } + self.unchecked_cast() + } + } + /// Same as `checked_cast`, but strips TraitCastError::* from the return type. + fn cast(self) -> Result { + self.checked_cast().map_err(TraitCastError::unwrap) + } +} +impl<'r, T, U, I> TraitCast for &'r T +where + I: MetaSized + Pointee> + TraitMetadataTable + 'r, + T: MetaSized + TraitMetadataTable, + U: MetaSized + Pointee> + TraitMetadataTable + 'r, +{ + type Target = &'r U; + unsafe fn unchecked_cast(self) -> Result<&'r U, TraitCastError> { + // SAFETY: the caller has promised lifetime-erasure soundness for + // `I -> U` (the sole precondition of `unchecked_cast`). The compiler + // emits `trait_metadata_index` and `trait_metadata_table` in lockstep, + // so once `crate_graph_id == obj_graph_id` we know `table` has length + // `trait_metadata_table_len::()` and `idx` is a valid slot in it. A + // `Some(vtable)` entry holds the `U`-vtable for `I`'s trait graph, + // which has the layout of `DynMetadata`; the transmute reconstructs + // the correct metadata. The resulting fat pointer shares `self`'s + // provenance and is valid for `'r` because `T: 'r` and `U: 'r`. + unsafe { + let (obj_graph_id, table) = >::derived_metadata_table(self); + let (crate_graph_id, idx) = crate::intrinsics::trait_metadata_index::(); + if crate_graph_id as *const u8 != obj_graph_id as *const u8 { + return Err(TraitCastError::ForeignTraitGraph(self)); + } + + let table_len = crate::intrinsics::trait_metadata_table_len::(); + let table: &[Option>] = + &*crate::ptr::from_raw_parts(table.as_ptr(), table_len); + + let (p, _) = (self as *const T).to_raw_parts(); + let Some(&Some(vtable)) = table.get(idx) else { + return Err(TraitCastError::UnsatisfiedObligation(self)); + }; + Ok(&*crate::ptr::from_raw_parts(p, crate::mem::transmute(vtable))) + } + } +} + +impl<'r, T, U, I> TraitCast for &'r mut T +where + I: MetaSized + Pointee> + TraitMetadataTable + 'r, + T: MetaSized + TraitMetadataTable, + U: MetaSized + Pointee> + TraitMetadataTable + 'r, +{ + type Target = &'r mut U; + unsafe fn unchecked_cast(self) -> Result<&'r mut U, TraitCastError> { + // SAFETY: the caller has promised lifetime-erasure soundness for + // `I -> U` (the sole precondition of `unchecked_cast`). The compiler + // emits `trait_metadata_index` and `trait_metadata_table` in lockstep, + // so once `crate_graph_id == obj_graph_id` we know `table` has length + // `trait_metadata_table_len::()` and `idx` is a valid slot in it. A + // `Some(vtable)` entry holds the `U`-vtable for `I`'s trait graph, + // which has the layout of `DynMetadata`; the transmute reconstructs + // the correct metadata. Uniqueness is preserved because `self` is + // moved into this call, and the resulting `&'r mut U` shares `self`'s + // provenance and is valid for `'r` because `T: 'r` and `U: 'r`. + unsafe { + let (obj_graph_id, table) = >::derived_metadata_table(self); + let (crate_graph_id, idx) = crate::intrinsics::trait_metadata_index::(); + if crate_graph_id as *const u8 != obj_graph_id as *const u8 { + return Err(TraitCastError::ForeignTraitGraph(self)); + } + + let table_len = crate::intrinsics::trait_metadata_table_len::(); + let table: &[Option>] = + &*crate::ptr::from_raw_parts(table.as_ptr(), table_len); + + let (p, _) = (self as *mut T).to_raw_parts(); + let Some(&Some(vtable)) = table.get(idx) else { + return Err(TraitCastError::UnsatisfiedObligation(self)); + }; + Ok(&mut *crate::ptr::from_raw_parts_mut(p, crate::mem::transmute(vtable))) + } + } +} + +/// Attempt to cast `$e` to `$u` in the trait graph of `$i`. +/// Returns Err($e) if the cast is not possible. +#[macro_export] +macro_rules! cast { + (in $i:ty, $e:expr => $u:ty) => {{ $crate::trait_cast::TraitCast::<$i, $u>::cast($e) }}; +} + +/// Attempt to cast `$e` to `$u` in the trait graph of `$i`. +/// +/// Returns Err(TraitCastError::ForeignTraitGraph) if the cast is not +/// possible because the object is from a different global crate. +/// Returns Err(TraitCastError::UnsatisfiedObligation) if the cast is not +/// possible due to lifetime erasure requirements or because of unfulfilled +/// generic obligations. +#[macro_export] +macro_rules! try_cast { + (in $i:ty, $e:expr => $u:ty) => {{ $crate::trait_cast::TraitCast::<$i, $u>::checked_cast($e) }}; +} + +/// Unsafely attempt to cast `$e` to `$u` in the trait graph of `$i`. +/// +/// All trait impl-obligations are enforced, but lifetime-erasure soundness is +/// not. +/// +/// Returns Err(TraitCastError::UnsatisfiedObligation) if the cast is not +/// possible due to unfulfilled generic obligations. +/// Returns Err(TraitCastError::ForeignTraitGraph) if the cast is not +/// possible because the object is from a different global crate. +#[macro_export] +macro_rules! unchecked_cast { + (in $i:ty, $e:expr => $u:ty) => {{ $crate::trait_cast::TraitCast::<$i, $u>::unchecked_cast($e) }}; +} diff --git a/library/std/src/lib.rs b/library/std/src/lib.rs index 807befec1ad11..42a155f630e3a 100644 --- a/library/std/src/lib.rs +++ b/library/std/src/lib.rs @@ -555,6 +555,8 @@ pub use core::ptr; pub use core::range; #[stable(feature = "rust1", since = "1.0.0")] pub use core::result; +#[unstable(feature = "trait_cast", issue = "none")] +pub use core::trait_cast; #[stable(feature = "rust1", since = "1.0.0")] #[allow(deprecated, deprecated_in_future)] pub use core::u8; @@ -746,6 +748,8 @@ pub use core::{ }; #[stable(feature = "assert_matches", since = "1.95.0")] pub use core::{assert_matches, debug_assert_matches}; +#[unstable(feature = "trait_cast", issue = "none")] +pub use core::{cast, try_cast, unchecked_cast}; // Re-export unstable derive macro defined through core. #[unstable(feature = "derive_from", issue = "144889")] diff --git a/src/librustdoc/clean/utils.rs b/src/librustdoc/clean/utils.rs index ecf20dd9754ae..69d1c58fed505 100644 --- a/src/librustdoc/clean/utils.rs +++ b/src/librustdoc/clean/utils.rs @@ -150,6 +150,10 @@ pub(crate) fn clean_middle_generic_args<'tcx>( GenericArgKind::Const(ct) => { Some(GenericArg::Const(Box::new(clean_middle_const(arg.rebind(ct))))) } + // Outlives entries are internal to the mono collector's + // outlives specialization. They never appear in user-facing + // generic args and should be skipped in documentation. + GenericArgKind::Outlives(_) => None, } }; diff --git a/src/tools/clippy/clippy_lints/src/let_underscore.rs b/src/tools/clippy/clippy_lints/src/let_underscore.rs index 984574c221fb6..306a1e352c14b 100644 --- a/src/tools/clippy/clippy_lints/src/let_underscore.rs +++ b/src/tools/clippy/clippy_lints/src/let_underscore.rs @@ -146,7 +146,7 @@ impl<'tcx> LateLintPass<'tcx> for LetUnderscore { GenericArgKind::Type(inner_ty) => inner_ty .ty_adt_def() .is_some_and(|adt| paths::PARKING_LOT_GUARDS.iter().any(|path| path.matches(cx, adt.did()))), - GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => false, + GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) | GenericArgKind::Outlives(_) => false, }); if contains_sync_guard { #[expect(clippy::collapsible_span_lint_calls, reason = "rust-clippy#7797")] diff --git a/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs b/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs index 6f1fca8d402ee..df1fc340d50c1 100644 --- a/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs +++ b/src/tools/clippy/clippy_lints/src/non_send_fields_in_send_ty.rs @@ -210,7 +210,7 @@ fn ty_allowed_with_raw_pointer_heuristic<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'t args.iter().all(|generic_arg| match generic_arg.kind() { GenericArgKind::Type(ty) => ty_allowed_with_raw_pointer_heuristic(cx, ty, send_trait), // Lifetimes and const generics are not solid part of ADT and ignored - GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => true, + GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) | GenericArgKind::Outlives(_) => true, }) }, // Raw pointers are `!Send` but allowed by the heuristic diff --git a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs index 2bb5615cfb767..de84560235467 100644 --- a/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs +++ b/src/tools/clippy/clippy_lints/src/only_used_in_recursion.rs @@ -488,6 +488,7 @@ fn has_matching_args(kind: FnKind, args: GenericArgsRef<'_>) -> bool { GenericArgKind::Lifetime(_) => true, GenericArgKind::Type(ty) => matches!(*ty.kind(), ty::Param(ty) if ty.index as usize == idx), GenericArgKind::Const(c) => matches!(c.kind(), ConstKind::Param(c) if c.index as usize == idx), + GenericArgKind::Outlives(_) => true, }), FnKind::ImplTraitFn(expected_args) => std::ptr::from_ref(args) as usize == expected_args, } diff --git a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs index 8f7a140e91a86..d8af566a7ba12 100644 --- a/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs +++ b/src/tools/clippy/clippy_utils/src/qualify_min_const_fn.rs @@ -75,7 +75,7 @@ fn check_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>, span: Span, msrv: Msrv) // No constraints on lifetimes or constants, except potentially // constants' types, but `walk` will get to them as well. - GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => continue, + GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) | GenericArgKind::Outlives(_) => continue, }; match ty.kind() { @@ -354,8 +354,9 @@ fn check_terminator<'tcx>( target: _, unwind: _, fn_span: _, + call_id: _, } - | TerminatorKind::TailCall { func, args, fn_span: _ } => { + | TerminatorKind::TailCall { func, args, fn_span: _, call_id: _ } => { let fn_ty = func.ty(body, cx.tcx); if let ty::FnDef(fn_def_id, fn_substs) = fn_ty.kind() { // FIXME: when analyzing a function with generic parameters, we may not have enough information to diff --git a/src/tools/clippy/clippy_utils/src/ty/mod.rs b/src/tools/clippy/clippy_utils/src/ty/mod.rs index 0c261d21c1ec5..ceec799661857 100644 --- a/src/tools/clippy/clippy_utils/src/ty/mod.rs +++ b/src/tools/clippy/clippy_utils/src/ty/mod.rs @@ -76,7 +76,7 @@ pub fn can_partially_move_ty<'tcx>(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool pub fn contains_adt_constructor<'tcx>(ty: Ty<'tcx>, adt: AdtDef<'tcx>) -> bool { ty.walk().any(|inner| match inner.kind() { GenericArgKind::Type(inner_ty) => inner_ty.ty_adt_def() == Some(adt), - GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => false, + GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) | GenericArgKind::Outlives(_) => false, }) } @@ -144,7 +144,7 @@ pub fn contains_ty_adt_constructor_opaque<'tcx>(cx: &LateContext<'tcx>, ty: Ty<' false }, - GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) => false, + GenericArgKind::Lifetime(_) | GenericArgKind::Const(_) | GenericArgKind::Outlives(_) => false, }) } diff --git a/tests/codegen-llvm/remap_path_prefix/main.rs b/tests/codegen-llvm/remap_path_prefix/main.rs index 7d17b3b67cfa2..ea995e35d8b9b 100644 --- a/tests/codegen-llvm/remap_path_prefix/main.rs +++ b/tests/codegen-llvm/remap_path_prefix/main.rs @@ -12,7 +12,7 @@ mod aux_mod; include!("aux_mod.rs"); // Here we check that the expansion of the file!() macro is mapped. -// CHECK: @alloc_5761061597a97f66e13ef2ff92712c4b = private unnamed_addr constant [34 x i8] c"/the/src/remap_path_prefix/main.rs" +// CHECK: @alloc_c3ffabeefd1a2c7adaf988a14a98103f = private unnamed_addr constant [34 x i8] c"/the/src/remap_path_prefix/main.rs" pub static FILE_PATH: &'static str = file!(); fn main() { diff --git a/tests/run-make/cross-global-crate-casts/cdylib_a.rs b/tests/run-make/cross-global-crate-casts/cdylib_a.rs new file mode 100644 index 0000000000000..3d9303fff2bbd --- /dev/null +++ b/tests/run-make/cross-global-crate-casts/cdylib_a.rs @@ -0,0 +1,62 @@ +//! One of the two global cdylibs used by this test. +//! +//! Exposes: +//! +//! * `cdylib_a_make_root` — returns a `&'static dyn Root` wrapped +//! in a `RootRef` (leaks a static `TypeA`). +//! * `cdylib_a_cast_sub` — runs `core::cast!(in dyn Root, _ => dyn Sub)` +//! on the incoming carrier and reports the +//! outcome via status codes. +//! +//! The trait-metadata intrinsics in `cdylib_a_cast_sub` are monomorphized +//! *here* (since this crate is a global crate), so they return this +//! cdylib's `global_crate_id`. When the bin hands in a carrier produced +//! by `cdylib_b` (or from a bin-local type), the ids diverge and +//! `TraitCastError::ForeignTraitGraph` is raised. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![crate_type = "cdylib"] + +extern crate common; +extern crate core; + +use core::trait_cast::TraitCastError; + +use common::{CAST_FOREIGN_GRAPH, CAST_OK, CAST_UNSATISFIED, Root, RootRef, Sub}; + +pub struct TypeA; + +impl Root for TypeA { + fn name(&self) -> &'static str { + "TypeA" + } +} + +impl Sub for TypeA { + fn sub_value(&self) -> u32 { + 0xA + } +} + +static TYPE_A: TypeA = TypeA; + +#[no_mangle] +pub extern "C" fn cdylib_a_make_root() -> RootRef { + RootRef::from_root(&TYPE_A as &dyn Root) +} + +/// # Safety +/// `obj` must be a valid `RootRef` pointing at a live `dyn Root`. +#[no_mangle] +pub unsafe extern "C" fn cdylib_a_cast_sub(obj: RootRef, out_value: *mut u32) -> i32 { + let r: &dyn Root = unsafe { obj.as_root() }; + match core::try_cast!(in dyn Root, r => dyn Sub) { + Ok(s) => { + unsafe { *out_value = s.sub_value() }; + CAST_OK + } + Err(TraitCastError::ForeignTraitGraph(_)) => CAST_FOREIGN_GRAPH, + Err(TraitCastError::UnsatisfiedObligation(_)) => CAST_UNSATISFIED, + } +} diff --git a/tests/run-make/cross-global-crate-casts/cdylib_b.rs b/tests/run-make/cross-global-crate-casts/cdylib_b.rs new file mode 100644 index 0000000000000..662b7ffebfd0b --- /dev/null +++ b/tests/run-make/cross-global-crate-casts/cdylib_b.rs @@ -0,0 +1,54 @@ +//! Second global cdylib. Structurally identical to `cdylib_a`, but with +//! a distinct concrete type (`TypeB`) and a distinct `global_crate_id` +//! allocation. The interesting property is that `cdylib_b`'s +//! `trait_metadata_index` picks the *same* slot +//! index as `cdylib_a`'s (both graphs contain only `dyn Sub` as a +//! sub-trait of `dyn Root`). Only the crate-id token distinguishes +//! them, and that's what the test exercises. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![crate_type = "cdylib"] + +extern crate common; +extern crate core; + +use core::trait_cast::TraitCastError; + +use common::{CAST_FOREIGN_GRAPH, CAST_OK, CAST_UNSATISFIED, Root, RootRef, Sub}; + +pub struct TypeB; + +impl Root for TypeB { + fn name(&self) -> &'static str { + "TypeB" + } +} + +impl Sub for TypeB { + fn sub_value(&self) -> u32 { + 0xB + } +} + +static TYPE_B: TypeB = TypeB; + +#[no_mangle] +pub extern "C" fn cdylib_b_make_root() -> RootRef { + RootRef::from_root(&TYPE_B as &dyn Root) +} + +/// # Safety +/// `obj` must be a valid `RootRef` pointing at a live `dyn Root`. +#[no_mangle] +pub unsafe extern "C" fn cdylib_b_cast_sub(obj: RootRef, out_value: *mut u32) -> i32 { + let r: &dyn Root = unsafe { obj.as_root() }; + match core::try_cast!(in dyn Root, r => dyn Sub) { + Ok(s) => { + unsafe { *out_value = s.sub_value() }; + CAST_OK + } + Err(TraitCastError::ForeignTraitGraph(_)) => CAST_FOREIGN_GRAPH, + Err(TraitCastError::UnsatisfiedObligation(_)) => CAST_UNSATISFIED, + } +} diff --git a/tests/run-make/cross-global-crate-casts/common.rs b/tests/run-make/cross-global-crate-casts/common.rs new file mode 100644 index 0000000000000..748e79c603545 --- /dev/null +++ b/tests/run-make/cross-global-crate-casts/common.rs @@ -0,0 +1,67 @@ +//! Shared rlib used by `cdylib_a`, `cdylib_b`, and `program`. +//! +//! This crate is *not* a global crate (it's an rlib), so it defines no +//! trait metadata tables of its own. Every downstream global crate +//! (the two cdylibs and the bin) computes its own table over the types +//! it can see — deliberately giving three disjoint global crates that +//! all assign the same slot index to `dyn Sub` but hand out different +//! `global_crate_id` tokens. That divergence is what the runtime cast +//! check distinguishes. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![feature(ptr_metadata)] +#![crate_type = "rlib"] + +extern crate core; + +use core::marker::TraitMetadataTable; +use core::ptr::{DynMetadata, Pointee}; + +pub trait Root: TraitMetadataTable { + fn name(&self) -> &'static str; +} + +pub trait Sub: Root { + fn sub_value(&self) -> u32; +} + +/// FFI-safe carrier for `*const dyn Root`. +/// +/// The bin and the two cdylibs can't name each other's concrete types, +/// so they exchange erased trait objects across the extern-"C" boundary +/// via this struct. Layout: `(data ptr, vtable ptr)` — identical to +/// `*const dyn Root`, but explicitly `#[repr(C)]` so we don't rely on +/// the implicit layout of a `*const dyn T`. +#[repr(C)] +#[derive(Copy, Clone)] +pub struct RootRef { + pub data: *const (), + pub vtable: *const (), +} + +impl RootRef { + pub fn from_root(r: &dyn Root) -> Self { + let raw: *const dyn Root = r; + let (data, meta) = raw.to_raw_parts(); + let vtable: *const () = unsafe { core::mem::transmute(meta) }; + RootRef { data, vtable } + } + + /// # Safety + /// The carrier must have been produced by `RootRef::from_root` in + /// some (possibly different) global crate and the pointee must still + /// be live. + pub unsafe fn as_root<'a>(self) -> &'a dyn Root { + let meta: ::Metadata = + unsafe { core::mem::transmute::<*const (), DynMetadata>(self.vtable) }; + let ptr: *const dyn Root = core::ptr::from_raw_parts(self.data, meta); + unsafe { &*ptr } + } +} + +/// Status codes used by the extern-"C" cast entry points. Kept in the +/// shared crate so the bin and cdylibs agree. +pub const CAST_OK: i32 = 0; +pub const CAST_FOREIGN_GRAPH: i32 = 1; +pub const CAST_UNSATISFIED: i32 = 2; diff --git a/tests/run-make/cross-global-crate-casts/program.rs b/tests/run-make/cross-global-crate-casts/program.rs new file mode 100644 index 0000000000000..b806dc3a07a6e --- /dev/null +++ b/tests/run-make/cross-global-crate-casts/program.rs @@ -0,0 +1,136 @@ +//! Driver for the cross-global-crate cast test. +//! +//! This bin is itself a global crate (CrateType::Executable). It links +//! against `cdylib_a` and `cdylib_b` purely through their extern-"C" +//! entry points — the bin never names `TypeA`/`TypeB` and the cdylibs +//! never name `BinType`. +//! +//! There are therefore three independent global crates at runtime: +//! +//! * the bin, +//! * `libcdylib_a.so`, +//! * `libcdylib_b.so`. +//! +//! Each owns a distinct `global_crate_id` allocation. The RFC guarantees +//! that trait metadata index + table checks compare crate-id tokens +//! before trusting slot indices, so even though all three graphs happen +//! to assign slot 0 to `dyn Sub`, casts across crate boundaries must +//! report `TraitCastError::ForeignTraitGraph`. Casts that stay inside +//! one global crate must succeed. +//! +//! The test matrix covered by `main` below: +//! +//! cast site +//! ┌────────────┬────────────┬────────────┐ +//! object ── │ bin │ cdylib_a │ cdylib_b │ +//! ─────────────┼────────────┼────────────┼────────────┤ +//! BinType │ ok │ foreign │ foreign │ +//! TypeA │ foreign │ ok │ foreign │ +//! TypeB │ foreign │ foreign │ ok │ +//! └────────────┴────────────┴────────────┘ + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![crate_type = "bin"] + +extern crate common; +extern crate core; + +use core::trait_cast::TraitCastError; + +use common::{CAST_FOREIGN_GRAPH, CAST_OK, Root, RootRef, Sub}; + +// ---- extern surface of the two cdylibs -------------------------------- + +#[link(name = "cdylib_a")] +unsafe extern "C" { + fn cdylib_a_make_root() -> RootRef; + fn cdylib_a_cast_sub(obj: RootRef, out_value: *mut u32) -> i32; +} + +#[link(name = "cdylib_b")] +unsafe extern "C" { + fn cdylib_b_make_root() -> RootRef; + fn cdylib_b_cast_sub(obj: RootRef, out_value: *mut u32) -> i32; +} + +// ---- bin-local trait impl -------------------------------------------- + +struct BinType; + +impl Root for BinType { + fn name(&self) -> &'static str { + "BinType" + } +} + +impl Sub for BinType { + fn sub_value(&self) -> u32 { + 0xB1 + } +} + +// ---- cast helpers ---------------------------------------------------- + +/// Run the cast at the bin's cast site and classify the outcome the same +/// way the cdylibs do — this keeps the three rows of the matrix above +/// mutually comparable. +fn bin_cast_sub(obj: RootRef) -> (i32, u32) { + let r: &dyn Root = unsafe { obj.as_root() }; + let mut value = 0u32; + let code = match core::try_cast!(in dyn Root, r => dyn Sub) { + Ok(s) => { + value = s.sub_value(); + CAST_OK + } + Err(TraitCastError::ForeignTraitGraph(_)) => CAST_FOREIGN_GRAPH, + Err(TraitCastError::UnsatisfiedObligation(_)) => common::CAST_UNSATISFIED, + }; + (code, value) +} + +fn a_cast_sub(obj: RootRef) -> (i32, u32) { + let mut value = 0u32; + let code = unsafe { cdylib_a_cast_sub(obj, &mut value) }; + (code, value) +} + +fn b_cast_sub(obj: RootRef) -> (i32, u32) { + let mut value = 0u32; + let code = unsafe { cdylib_b_cast_sub(obj, &mut value) }; + (code, value) +} + +fn main() { + let a_obj: RootRef = unsafe { cdylib_a_make_root() }; + let b_obj: RootRef = unsafe { cdylib_b_make_root() }; + let bin_type = BinType; + let bin_obj: RootRef = RootRef::from_root(&bin_type as &dyn Root); + + // Sanity: vtable dispatch across the FFI boundary still works for + // the non-cast method — if this failed, the test below would be + // diagnosing something unrelated. + assert_eq!(unsafe { a_obj.as_root() }.name(), "TypeA"); + assert_eq!(unsafe { b_obj.as_root() }.name(), "TypeB"); + assert_eq!(unsafe { bin_obj.as_root() }.name(), "BinType"); + + // ---- diagonal: cast site matches object's origin global crate ---- + + assert_eq!(bin_cast_sub(bin_obj), (CAST_OK, 0xB1)); + assert_eq!(a_cast_sub(a_obj), (CAST_OK, 0xA)); + assert_eq!(b_cast_sub(b_obj), (CAST_OK, 0xB)); + + // ---- off-diagonal: every cross-crate pair must reject ------------ + + // bin cast site, foreign-origin objects. + assert_eq!(bin_cast_sub(a_obj), (CAST_FOREIGN_GRAPH, 0)); + assert_eq!(bin_cast_sub(b_obj), (CAST_FOREIGN_GRAPH, 0)); + + // cdylib_a cast site, foreign-origin objects. + assert_eq!(a_cast_sub(bin_obj), (CAST_FOREIGN_GRAPH, 0)); + assert_eq!(a_cast_sub(b_obj), (CAST_FOREIGN_GRAPH, 0)); + + // cdylib_b cast site, foreign-origin objects. + assert_eq!(b_cast_sub(bin_obj), (CAST_FOREIGN_GRAPH, 0)); + assert_eq!(b_cast_sub(a_obj), (CAST_FOREIGN_GRAPH, 0)); +} diff --git a/tests/run-make/cross-global-crate-casts/rmake.rs b/tests/run-make/cross-global-crate-casts/rmake.rs new file mode 100644 index 0000000000000..8f179d45dcd52 --- /dev/null +++ b/tests/run-make/cross-global-crate-casts/rmake.rs @@ -0,0 +1,44 @@ +// Validate the cross-global-crate trait-cast check from the RFC. +// +// Build layout: +// common.rs -> rlib (not a global crate) +// cdylib_a.rs -> cdylib (global crate) +// cdylib_b.rs -> cdylib (global crate) +// program.rs -> bin (global crate), links both cdylibs via +// #[link(name = "cdylib_{a,b}")]. +// +// Because each of the three global crates independently computes its +// trait-cast layout over disjoint sets of concrete types, every graph +// assigns `dyn Sub` the same slot index (0). Only the per-crate +// `global_crate_id` token distinguishes them. `program` exercises the +// full 3x3 matrix of (object-origin-crate, cast-site-crate) pairs and +// asserts that off-diagonal pairs are rejected with +// `TraitCastError::ForeignTraitGraph` while diagonal pairs succeed. + +//@ ignore-cross-compile +// The test produces a binary that loads two cdylibs at runtime. + +//@ needs-target-std +// program.rs links against std implicitly via core/alloc; running it on +// a target without std is not meaningful. + +use run_make_support::{run, rustc}; + +fn main() { + // Shared rlib — provides `Root`/`Sub` and the `RootRef` FFI carrier. + rustc().input("common.rs").run(); + + // Two independent global cdylibs. Each carries its own copy of the + // trait-cast tables and its own address-significant + // `global_crate_id` allocation. + rustc().input("cdylib_a.rs").run(); + rustc().input("cdylib_b.rs").run(); + + // Bin — also a global crate. `-L .` lets rustc find `libcommon.rlib`, + // `libcdylib_a.so`, and `libcdylib_b.so` in the cwd. + rustc().input("program.rs").run(); + + // `run()` sets LD_LIBRARY_PATH (or platform equivalent) to include + // the cwd, so the two cdylibs are resolved at load time. + run("program"); +} diff --git a/tests/run-make/dump-trait-cast-augmentation/rmake.rs b/tests/run-make/dump-trait-cast-augmentation/rmake.rs new file mode 100644 index 0000000000000..c400fde1a6c0e --- /dev/null +++ b/tests/run-make/dump-trait-cast-augmentation/rmake.rs @@ -0,0 +1,49 @@ +// Verify the `-Z dump-trait-cast-augmentation` diagnostic flag. +// +// The flag (defined in compiler/rustc_session/src/options.rs) dumps +// per-edge augmentation decisions to stderr. It is implemented in +// compiler/rustc_monomorphize/src/cast_sensitivity.rs +// (`maybe_dump_augmentation`), invoked from every `augment_callee` call +// site once the final augmented Instance is known. +// +// This test verifies: +// 1. `-Zdump-trait-cast-augmentation=all` emits the expected section +// header, caller-outlives-env line, and augmented-callee line for +// at least one augmentation. +// 2. A substring filter matching `exercise` produces the dump whose +// caller name contains that substring (filter path exercised, +// positive case). +// 3. A substring filter matching no caller produces no dump (negative +// case — no `=== Augmentation:` header). + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + // ---- 1. filter = "all" -------------------------------------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-augmentation=all") + .run() + .assert_stderr_contains("=== Augmentation:") + .assert_stderr_contains("Caller outlives env:") + .assert_stderr_contains("Augmented callee:"); + + // ---- 2. filter = substring of a caller's printed name ------------------ + // `exercise` is directly sensitive in `test.rs`, so it acts as a caller + // whose augmentations are emitted for its sensitive callees. + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-augmentation=exercise") + .run() + .assert_stderr_contains("=== Augmentation:") + .assert_stderr_contains("exercise"); + + // ---- 3. filter matching no caller (negative case) ---------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-augmentation=ZZZNoMatchZZZ") + .run() + .assert_stderr_not_contains("=== Augmentation:"); +} diff --git a/tests/run-make/dump-trait-cast-augmentation/test.rs b/tests/run-make/dump-trait-cast-augmentation/test.rs new file mode 100644 index 0000000000000..49733bd31b497 --- /dev/null +++ b/tests/run-make/dump-trait-cast-augmentation/test.rs @@ -0,0 +1,69 @@ +//! Minimal trait-cast program for exercising the +//! `-Z dump-trait-cast-augmentation` diagnostic flag. +//! +//! `exercise` contains `core::cast!` calls (which expand to the +//! `trait_metadata_index` intrinsic), making it directly sensitive. +//! `main` calls `exercise`, so `main` is transitively sensitive. +//! During augmentation the collector materializes outlives +//! relationships for these sensitive call edges, which is exactly +//! the moment the augmentation dump covers. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait GraphRoot: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +trait GraphSubA: GraphRoot { + fn a(&self) -> u32; +} + +trait GraphSubB: GraphRoot { + fn b(&self) -> u32; +} + +// ---- concrete type ---- + +#[derive(Debug)] +struct GraphConcrete; + +impl GraphRoot for GraphConcrete { + fn name(&self) -> &'static str { + "GraphConcrete" + } +} + +impl GraphSubA for GraphConcrete { + fn a(&self) -> u32 { + 1 + } +} + +impl GraphSubB for GraphConcrete { + fn b(&self) -> u32 { + 2 + } +} + +#[inline(never)] +fn exercise(obj: &dyn GraphRoot) { + assert_eq!(obj.name(), "GraphConcrete"); + + let a = core::cast!(in dyn GraphRoot, obj => dyn GraphSubA).unwrap(); + assert_eq!(a.a(), 1); + + let b = core::cast!(in dyn GraphRoot, obj => dyn GraphSubB).unwrap(); + assert_eq!(b.b(), 2); +} + +fn main() { + exercise(&GraphConcrete as &dyn GraphRoot); +} diff --git a/tests/run-make/dump-trait-cast-canonicalization/rmake.rs b/tests/run-make/dump-trait-cast-canonicalization/rmake.rs new file mode 100644 index 0000000000000..5680de8b1f779 --- /dev/null +++ b/tests/run-make/dump-trait-cast-canonicalization/rmake.rs @@ -0,0 +1,40 @@ +// Verify the `-Z dump-trait-cast-canonicalization` diagnostic flag. +// +// The flag (defined in compiler/rustc_session/src/options.rs) dumps +// cascade canonicalization decisions to stderr. It is implemented in +// compiler/rustc_monomorphize/src/partitioning.rs (cascade_canonicalize): +// a depth-ordered walk that emits a header, per-depth Phase 1 (patch) +// and Phase 3 (emit) sections, Phase 2 (dedup) entries for +// signature groups of size > 1, and a final canon map summary. +// +// This test verifies: +// 1. `-Zdump-trait-cast-canonicalization` emits the expected header, +// `Total delayed instances:` line, and per-depth emission +// evidence (at least one `Depth 0:` or `Phase 1 (patch):` line). +// 2. Without the flag, none of the canonicalization-dump output is +// emitted (negative case — no `=== Trait-Cast Canonicalization ===`). + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + // ---- 1. flag on -------------------------------------------------------- + let out = rustc().input("test.rs").arg("-Zdump-trait-cast-canonicalization").run(); + out.assert_stderr_contains("=== Trait-Cast Canonicalization ===") + .assert_stderr_contains("Total delayed instances:"); + // Per-depth emission proof: either the depth header or the Phase 1 + // subheader must appear. `Depth 0:` is always emitted whenever at + // least one delayed Instance exists (the test program has several). + let stderr = out.stderr_utf8(); + assert!( + stderr.contains("Depth 0:") || stderr.contains("Phase 1 (patch):"), + "expected per-depth emission marker in stderr, got:\n{stderr}" + ); + + // ---- 2. flag off (negative) -------------------------------------------- + rustc() + .input("test.rs") + .run() + .assert_stderr_not_contains("=== Trait-Cast Canonicalization ==="); +} diff --git a/tests/run-make/dump-trait-cast-canonicalization/test.rs b/tests/run-make/dump-trait-cast-canonicalization/test.rs new file mode 100644 index 0000000000000..b6db535facfd1 --- /dev/null +++ b/tests/run-make/dump-trait-cast-canonicalization/test.rs @@ -0,0 +1,123 @@ +//! Minimal trait-cast program with multiple delayed-Instance +//! call contexts for exercising the +//! `-Z dump-trait-cast-canonicalization` diagnostic flag. +//! +//! Several sibling `core::cast!` calls from different contexts +//! produce directly-sensitive leaf Instances, and their callers +//! become transitively sensitive delayed Instances. This gives +//! `cascade_canonicalize` at least one depth level with multiple +//! Instances — enough to exercise Phase 1 patching and Phase 3 +//! emission accounting. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait Root<'a, 'b>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} + +trait SubX<'a, 'b>: Root<'a, 'b> { + fn x_val(&self) -> u32; +} + +trait SubY<'a, 'b>: Root<'a, 'b> { + fn y_val(&self) -> u32; +} + +// ---- concrete types ---- + +#[derive(Debug)] +struct TypeA<'a, 'b> { + x: &'a u32, + y: &'b u32, +} + +#[derive(Debug)] +struct TypeB<'a, 'b> { + x: &'a u32, + y: &'b u32, +} + +impl<'a, 'b> Root<'a, 'b> for TypeA<'a, 'b> { + fn id(&self) -> u32 { + 1 + } +} +impl<'a, 'b> Root<'a, 'b> for TypeB<'a, 'b> { + fn id(&self) -> u32 { + 2 + } +} + +impl<'a, 'b> SubX<'a, 'b> for TypeA<'a, 'b> { + fn x_val(&self) -> u32 { + *self.x + } +} +impl<'a, 'b> SubX<'a, 'b> for TypeB<'a, 'b> { + fn x_val(&self) -> u32 { + *self.x * 10 + } +} + +impl<'a, 'b> SubY<'a, 'b> for TypeA<'a, 'b> { + fn y_val(&self) -> u32 { + *self.y + } +} +impl<'a, 'b> SubY<'a, 'b> for TypeB<'a, 'b> { + fn y_val(&self) -> u32 { + *self.y * 10 + } +} + +// ---- multiple coercion/cast contexts ---- + +#[inline(never)] +fn ctx_equal<'a>(x: &'a u32, y: &'a u32) { + let a = TypeA { x, y }; + let obj: &dyn Root<'_, '_> = &a; + let sx = core::cast!(in dyn Root<'_, '_>, obj => dyn SubX<'_, '_>).expect("ctx_equal: subx"); + assert_eq!(sx.x_val(), *x); + let sy = core::cast!(in dyn Root<'_, '_>, obj => dyn SubY<'_, '_>).expect("ctx_equal: suby"); + assert_eq!(sy.y_val(), *y); +} + +#[inline(never)] +fn ctx_interior<'a>(x: &'a u32) { + let local: u32 = 7; + let b = TypeB { x, y: &local }; + let obj: &dyn Root<'_, '_> = &b; + let sx = core::cast!(in dyn Root<'_, '_>, obj => dyn SubX<'_, '_>).expect("ctx_interior: subx"); + assert_eq!(sx.x_val(), *x * 10); + let sy = core::cast!(in dyn Root<'_, '_>, obj => dyn SubY<'_, '_>).expect("ctx_interior: suby"); + assert_eq!(sy.y_val(), 7 * 10); +} + +#[inline(never)] +fn ctx_static() { + static X: u32 = 100; + static Y: u32 = 200; + let b = TypeB { x: &X, y: &Y }; + let obj: &dyn Root<'static, 'static> = &b; + let sx = core::cast!(in dyn Root<'static, 'static>, obj => dyn SubX<'static, 'static>) + .expect("ctx_static: subx"); + assert_eq!(sx.x_val(), 100 * 10); + let sy = core::cast!(in dyn Root<'static, 'static>, obj => dyn SubY<'static, 'static>) + .expect("ctx_static: suby"); + assert_eq!(sy.y_val(), 200 * 10); +} + +fn main() { + let a: u32 = 3; + let b: u32 = 5; + ctx_equal(&a, &b); + ctx_interior(&a); + ctx_static(); +} diff --git a/tests/run-make/dump-trait-cast-chain-composition/rmake.rs b/tests/run-make/dump-trait-cast-chain-composition/rmake.rs new file mode 100644 index 0000000000000..61533e142a76d --- /dev/null +++ b/tests/run-make/dump-trait-cast-chain-composition/rmake.rs @@ -0,0 +1,49 @@ +// Verify the `-Z dump-trait-cast-chain-composition` diagnostic flag. +// +// The flag (defined in compiler/rustc_session/src/options.rs) dumps per-link +// details of trait-cast call_id chain composition to stderr. It is implemented +// in compiler/rustc_monomorphize/src/cast_sensitivity.rs inside +// `compose_all_through_chain`. +// +// This test verifies: +// 1. `-Zdump-trait-cast-chain-composition=all` emits the expected header, +// at least one `Link ` entry, and the `Final mapping` section. +// 2. A substring filter matching `exercise` still produces the dump when the +// caller instance name contains it (filter path exercised, positive case). +// 3. A substring filter matching no caller produces no dump (negative case — +// no `=== Chain Composition:` header). + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + // ---- 1. filter = "all" -------------------------------------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-chain-composition=all") + .run() + // Header for at least one invocation. + .assert_stderr_contains("=== Chain Composition:") + // Per-link details — at least one link is emitted. + .assert_stderr_contains("Link ") + // Final mapping section is present. + .assert_stderr_contains("Final mapping"); + + // ---- 2. filter = substring of a caller's printed name ------------------ + // `exercise` is the caller on at least one chain composition invocation + // (it calls `core::cast!`, which expands to sensitive intrinsics). + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-chain-composition=exercise") + .run() + .assert_stderr_contains("=== Chain Composition:") + .assert_stderr_contains("exercise"); + + // ---- 3. filter matching no caller (negative case) ---------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-chain-composition=ZZZNoMatchZZZ") + .run() + .assert_stderr_not_contains("=== Chain Composition:"); +} diff --git a/tests/run-make/dump-trait-cast-chain-composition/test.rs b/tests/run-make/dump-trait-cast-chain-composition/test.rs new file mode 100644 index 0000000000000..cc6de0b6ace36 --- /dev/null +++ b/tests/run-make/dump-trait-cast-chain-composition/test.rs @@ -0,0 +1,67 @@ +//! Minimal trait-cast program for exercising the +//! `-Z dump-trait-cast-chain-composition` diagnostic flag. +//! +//! `exercise` calls `core::cast!`, which expands to the `trait_metadata_index` +//! intrinsic. That makes `exercise` directly sensitive; `main` calls +//! `exercise`, and so `main` is transitively sensitive. The call_id chain +//! composition fires while computing transitive sensitivity at those edges. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait GraphRoot: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +trait GraphSubA: GraphRoot { + fn a(&self) -> u32; +} + +trait GraphSubB: GraphRoot { + fn b(&self) -> u32; +} + +// ---- concrete type ---- + +#[derive(Debug)] +struct GraphConcrete; + +impl GraphRoot for GraphConcrete { + fn name(&self) -> &'static str { + "GraphConcrete" + } +} + +impl GraphSubA for GraphConcrete { + fn a(&self) -> u32 { + 1 + } +} + +impl GraphSubB for GraphConcrete { + fn b(&self) -> u32 { + 2 + } +} + +#[inline(never)] +fn exercise(obj: &dyn GraphRoot) { + assert_eq!(obj.name(), "GraphConcrete"); + + let a = core::cast!(in dyn GraphRoot, obj => dyn GraphSubA).unwrap(); + assert_eq!(a.a(), 1); + + let b = core::cast!(in dyn GraphRoot, obj => dyn GraphSubB).unwrap(); + assert_eq!(b.b(), 2); +} + +fn main() { + exercise(&GraphConcrete as &dyn GraphRoot); +} diff --git a/tests/run-make/dump-trait-cast-erasure-safety/rmake.rs b/tests/run-make/dump-trait-cast-erasure-safety/rmake.rs new file mode 100644 index 0000000000000..cc2d4323b85d7 --- /dev/null +++ b/tests/run-make/dump-trait-cast-erasure-safety/rmake.rs @@ -0,0 +1,49 @@ +// Verify the `-Z dump-trait-cast-erasure-safety` diagnostic flag. +// +// The flag (defined in compiler/rustc_session/src/options.rs) dumps +// per-query trait-cast erasure-safety analysis decisions to stderr. It is +// implemented in compiler/rustc_monomorphize/src/erasure_safe.rs +// (`dump_erasure_safety`), invoked at the end of the query provider +// `is_lifetime_erasure_safe`. +// +// This test verifies: +// 1. `-Zdump-trait-cast-erasure-safety=all` emits the expected header +// and verdict for at least one query. +// 2. A substring filter matching the `GraphRoot` super-trait name +// still produces the dump for that query (positive filter case). +// 3. A substring filter matching no super-trait name produces no dump +// (negative case). + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + // ---- 1. filter = "all" -------------------------------------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-erasure-safety=all") + .run() + // Section header for at least one erasure-safety query. + .assert_stderr_contains("=== Erasure Safety:") + // Verdict line always present — either `safe` or `unsafe (...)`. + .assert_stderr_contains("Verdict:"); + + // ---- 2. filter = substring of a super-trait printed name --------------- + // `GraphRoot` is the super-trait for both casts in `test.rs`, so it + // appears in at least one query's printed super-trait name. + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-erasure-safety=GraphRoot") + .run() + .assert_stderr_contains("=== Erasure Safety:") + .assert_stderr_contains("Verdict:") + .assert_stderr_contains("GraphRoot"); + + // ---- 3. filter matching no query (negative case) ----------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-erasure-safety=ZZZNoMatchZZZ") + .run() + .assert_stderr_not_contains("=== Erasure Safety:"); +} diff --git a/tests/run-make/dump-trait-cast-erasure-safety/test.rs b/tests/run-make/dump-trait-cast-erasure-safety/test.rs new file mode 100644 index 0000000000000..89e8eb7631041 --- /dev/null +++ b/tests/run-make/dump-trait-cast-erasure-safety/test.rs @@ -0,0 +1,68 @@ +//! Minimal trait-cast program for exercising the +//! `-Z dump-trait-cast-erasure-safety` diagnostic flag. +//! +//! `exercise` performs `core::cast!` conversions across a small trait +//! graph; each `core::cast!` expansion funnels into the +//! `trait_cast_is_lifetime_erasure_safe` query, which is where the dump +//! is emitted. We keep two sub-traits so that we see at least two +//! distinct verdicts (one per cast). + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait GraphRoot: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +trait GraphSubA: GraphRoot { + fn a(&self) -> u32; +} + +trait GraphSubB: GraphRoot { + fn b(&self) -> u32; +} + +// ---- concrete type ---- + +#[derive(Debug)] +struct GraphConcrete; + +impl GraphRoot for GraphConcrete { + fn name(&self) -> &'static str { + "GraphConcrete" + } +} + +impl GraphSubA for GraphConcrete { + fn a(&self) -> u32 { + 1 + } +} + +impl GraphSubB for GraphConcrete { + fn b(&self) -> u32 { + 2 + } +} + +#[inline(never)] +fn exercise(obj: &dyn GraphRoot) { + assert_eq!(obj.name(), "GraphConcrete"); + + let a = core::cast!(in dyn GraphRoot, obj => dyn GraphSubA).unwrap(); + assert_eq!(a.a(), 1); + + let b = core::cast!(in dyn GraphRoot, obj => dyn GraphSubB).unwrap(); + assert_eq!(b.b(), 2); +} + +fn main() { + exercise(&GraphConcrete as &dyn GraphRoot); +} diff --git a/tests/run-make/dump-trait-cast-sensitivity/rmake.rs b/tests/run-make/dump-trait-cast-sensitivity/rmake.rs new file mode 100644 index 0000000000000..5cbcda56c7e45 --- /dev/null +++ b/tests/run-make/dump-trait-cast-sensitivity/rmake.rs @@ -0,0 +1,48 @@ +// Verify the `-Z dump-trait-cast-sensitivity` diagnostic flag. +// +// The flag (defined in compiler/rustc_session/src/options.rs) dumps +// per-instance trait-cast sensitivity metadata to stderr. It is implemented +// in compiler/rustc_monomorphize/src/cast_sensitivity.rs +// (`dump_trait_cast_sensitivity`), invoked at the end of +// `compute_cast_relevant_lifetimes`. +// +// This test verifies: +// 1. `-Zdump-trait-cast-sensitivity=all` emits the expected header and +// mappings section for at least one instance. +// 2. A substring filter matching `exercise` still produces the dump for +// that instance (filter path exercised, positive case). +// 3. A substring filter matching no instance produces no dump (negative +// case — no `=== Sensitivity:` header). + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + // ---- 1. filter = "all" -------------------------------------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-sensitivity=all") + .run() + // Section header for at least one sensitive instance. + .assert_stderr_contains("=== Sensitivity:") + // Mappings section (non-empty because at least one instance is + // transitively sensitive in this test). + .assert_stderr_contains("Mappings ("); + + // ---- 2. filter = substring of an instance's printed name --------------- + // `exercise` is both directly and transitively sensitive in `test.rs`. + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-sensitivity=exercise") + .run() + .assert_stderr_contains("=== Sensitivity:") + .assert_stderr_contains("exercise"); + + // ---- 3. filter matching no instance (negative case) -------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-cast-sensitivity=NoSuchFunctionZZZ") + .run() + .assert_stderr_not_contains("=== Sensitivity:"); +} diff --git a/tests/run-make/dump-trait-cast-sensitivity/test.rs b/tests/run-make/dump-trait-cast-sensitivity/test.rs new file mode 100644 index 0000000000000..c4fe542477b20 --- /dev/null +++ b/tests/run-make/dump-trait-cast-sensitivity/test.rs @@ -0,0 +1,67 @@ +//! Minimal trait-cast program for exercising the +//! `-Z dump-trait-cast-sensitivity` diagnostic flag. +//! +//! `exercise` calls `core::cast!`, which expands to the `trait_metadata_index` +//! intrinsic. That makes `exercise` directly sensitive; `main` calls +//! `exercise`, and so `main` is transitively sensitive. Between them, we cover +//! both the direct and transitive cases the dump reports on. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait GraphRoot: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +trait GraphSubA: GraphRoot { + fn a(&self) -> u32; +} + +trait GraphSubB: GraphRoot { + fn b(&self) -> u32; +} + +// ---- concrete type ---- + +#[derive(Debug)] +struct GraphConcrete; + +impl GraphRoot for GraphConcrete { + fn name(&self) -> &'static str { + "GraphConcrete" + } +} + +impl GraphSubA for GraphConcrete { + fn a(&self) -> u32 { + 1 + } +} + +impl GraphSubB for GraphConcrete { + fn b(&self) -> u32 { + 2 + } +} + +#[inline(never)] +fn exercise(obj: &dyn GraphRoot) { + assert_eq!(obj.name(), "GraphConcrete"); + + let a = core::cast!(in dyn GraphRoot, obj => dyn GraphSubA).unwrap(); + assert_eq!(a.a(), 1); + + let b = core::cast!(in dyn GraphRoot, obj => dyn GraphSubB).unwrap(); + assert_eq!(b.b(), 2); +} + +fn main() { + exercise(&GraphConcrete as &dyn GraphRoot); +} diff --git a/tests/run-make/dump-trait-graph/rmake.rs b/tests/run-make/dump-trait-graph/rmake.rs new file mode 100644 index 0000000000000..ed574feb14e90 --- /dev/null +++ b/tests/run-make/dump-trait-graph/rmake.rs @@ -0,0 +1,58 @@ +// Verify the `-Z dump-trait-graph` diagnostic flag. +// +// The flag (defined in compiler/rustc_session/src/options.rs) dumps computed +// trait graph info for root supertraits matching a filter to stderr. It is +// implemented in compiler/rustc_monomorphize/src/partitioning.rs +// (`dump_trait_graph`). +// +// This test verifies: +// 1. `-Zdump-trait-graph=all` emits the expected section headers, sub-trait +// enumeration, and table layout for a simple trait hierarchy. +// 2. Filtering by a substring of the root trait name still produces the +// dump (filter path exercised). +// 3. Filtering by a string that matches no root produces no dump +// (negative case — no `=== Trait Graph:` header). + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + // ---- 1. filter = "all" -------------------------------------------------- + rustc() + .input("test.rs") + .arg("-Zdump-trait-graph=all") + .run() + // Section header for the root supertrait. + .assert_stderr_contains("=== Trait Graph:") + // The root trait from test.rs should appear in a header. + .assert_stderr_contains("GraphRoot") + // Sub-trait enumeration ran. + .assert_stderr_contains("Sub-traits (") + // Concrete type enumeration ran. + .assert_stderr_contains("Concrete types (") + // Table layout was computed. + .assert_stderr_contains("Table layout:") + // At least one slot was emitted. + .assert_stderr_contains("slot[0]:"); + + // ---- 2. filter = substring of root name -------------------------------- + // "GraphRoot" is a substring of the root's printed type; the dump should + // still fire. + rustc() + .input("test.rs") + .arg("-Zdump-trait-graph=GraphRoot") + .run() + .assert_stderr_contains("=== Trait Graph:") + .assert_stderr_contains("GraphRoot") + .assert_stderr_contains("Table layout:") + .assert_stderr_contains("slot[0]:"); + + // ---- 3. filter matching no root (negative case) ------------------------ + // No root's printed type contains this string, so the dump must be silent. + rustc() + .input("test.rs") + .arg("-Zdump-trait-graph=DefinitelyNotARealTrait") + .run() + .assert_stderr_not_contains("=== Trait Graph:"); +} diff --git a/tests/run-make/dump-trait-graph/test.rs b/tests/run-make/dump-trait-graph/test.rs new file mode 100644 index 0000000000000..e537688a5541e --- /dev/null +++ b/tests/run-make/dump-trait-graph/test.rs @@ -0,0 +1,73 @@ +//! Minimal trait-cast program for exercising the `-Z dump-trait-graph` +//! diagnostic flag. +//! +//! Trait graph: +//! root: dyn GraphRoot (via TraitMetadataTable) +//! sub-traits: dyn GraphSubA, dyn GraphSubB +//! concrete: GraphConcrete +//! +//! The sub-traits have no lifetime binder variables (all outlives classes are +//! empty), so the table layout fast-path kicks in and each sub-trait maps to +//! a single slot. That's sufficient to exercise the dump's header, sub-trait +//! enumeration, concrete type enumeration, table layout, and admissibility +//! sections. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait GraphRoot: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +trait GraphSubA: GraphRoot { + fn a(&self) -> u32; +} + +trait GraphSubB: GraphRoot { + fn b(&self) -> u32; +} + +// ---- concrete type ---- + +#[derive(Debug)] +struct GraphConcrete; + +impl GraphRoot for GraphConcrete { + fn name(&self) -> &'static str { + "GraphConcrete" + } +} + +impl GraphSubA for GraphConcrete { + fn a(&self) -> u32 { + 1 + } +} + +impl GraphSubB for GraphConcrete { + fn b(&self) -> u32 { + 2 + } +} + +#[inline(never)] +fn exercise(obj: &dyn GraphRoot) { + assert_eq!(obj.name(), "GraphConcrete"); + + let a = core::cast!(in dyn GraphRoot, obj => dyn GraphSubA).unwrap(); + assert_eq!(a.a(), 1); + + let b = core::cast!(in dyn GraphRoot, obj => dyn GraphSubB).unwrap(); + assert_eq!(b.b(), 2); +} + +fn main() { + exercise(&GraphConcrete as &dyn GraphRoot); +} diff --git a/tests/run-make/print-trait-cast-stats/rmake.rs b/tests/run-make/print-trait-cast-stats/rmake.rs new file mode 100644 index 0000000000000..cf956d72793d1 --- /dev/null +++ b/tests/run-make/print-trait-cast-stats/rmake.rs @@ -0,0 +1,33 @@ +// Verify the `-Z print-trait-cast-stats` diagnostic flag. +// +// The flag (defined in compiler/rustc_session/src/options.rs) emits a +// single compact summary block describing the trait-cast monomorphization +// pipeline to stderr. It is implemented in +// compiler/rustc_monomorphize/src/partitioning.rs (`print_trait_cast_stats`). +// +// This test verifies: +// 1. `-Zprint-trait-cast-stats` emits the header block with the expected +// labels. +// 2. Without the flag, none of those labels appear (flag-off fast path). + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + // ---- 1. flag on -------------------------------------------------------- + rustc() + .input("test.rs") + .arg("-Zprint-trait-cast-stats") + .run() + // Header. + .assert_stderr_contains("trait-cast stats:") + // Counter labels (we don't assert specific numbers — those are not + // load-bearing and would churn with unrelated pipeline changes). + .assert_stderr_contains("delayed codegen entries:") + .assert_stderr_contains("root supertraits:"); + + // ---- 2. flag off (negative case) --------------------------------------- + // With no `-Z print-trait-cast-stats`, the header must not appear. + rustc().input("test.rs").run().assert_stderr_not_contains("trait-cast stats:"); +} diff --git a/tests/run-make/print-trait-cast-stats/test.rs b/tests/run-make/print-trait-cast-stats/test.rs new file mode 100644 index 0000000000000..74410a635e717 --- /dev/null +++ b/tests/run-make/print-trait-cast-stats/test.rs @@ -0,0 +1,72 @@ +//! Minimal trait-cast program for exercising the `-Z print-trait-cast-stats` +//! diagnostic flag. +//! +//! Trait graph: +//! root: dyn GraphRoot (via TraitMetadataTable) +//! sub-traits: dyn GraphSubA, dyn GraphSubB +//! concrete: GraphConcrete +//! +//! This is the same shape used by the `dump-trait-graph` smoke test — it is +//! small enough that the pipeline numbers are easy to reason about, but big +//! enough that at least one root supertrait, one table slot, and one +//! intrinsic call site are emitted. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait GraphRoot: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +trait GraphSubA: GraphRoot { + fn a(&self) -> u32; +} + +trait GraphSubB: GraphRoot { + fn b(&self) -> u32; +} + +// ---- concrete type ---- + +#[derive(Debug)] +struct GraphConcrete; + +impl GraphRoot for GraphConcrete { + fn name(&self) -> &'static str { + "GraphConcrete" + } +} + +impl GraphSubA for GraphConcrete { + fn a(&self) -> u32 { + 1 + } +} + +impl GraphSubB for GraphConcrete { + fn b(&self) -> u32 { + 2 + } +} + +#[inline(never)] +fn exercise(obj: &dyn GraphRoot) { + assert_eq!(obj.name(), "GraphConcrete"); + + let a = core::cast!(in dyn GraphRoot, obj => dyn GraphSubA).unwrap(); + assert_eq!(a.a(), 1); + + let b = core::cast!(in dyn GraphRoot, obj => dyn GraphSubB).unwrap(); + assert_eq!(b.b(), 2); +} + +fn main() { + exercise(&GraphConcrete as &dyn GraphRoot); +} diff --git a/tests/run-make/trait-cast-condense-baseline/rmake.rs b/tests/run-make/trait-cast-condense-baseline/rmake.rs new file mode 100644 index 0000000000000..2a6ff479e3c50 --- /dev/null +++ b/tests/run-make/trait-cast-condense-baseline/rmake.rs @@ -0,0 +1,46 @@ +// Verify the condensation code path (NOT the fast path) when a +// lifetime-parameterized trait has impls whose Self type and trait +// ref share parameters — a shape that disables +// `impl_universally_admissible`. +// +// Every impl in the test program (test.rs) has the form +// `impl<'a, 'b> SubX<'a, 'b> for TypeA<'a, 'b>`; the lifetime params +// are shared between Self and trait args. `impl_universally_admissible` +// therefore returns false for every impl, so `trait_cast_layout` +// bypasses the fast path and invokes `condense_outlives_classes` for +// every sub-trait. +// +// With a single (empty) outlives class materialized per sub-trait, +// condensation produces exactly one group per sub-trait. The table +// length is therefore `num_sub_traits == 2`. This is the baseline +// "condensation actually runs and produces minimum slots" contract: if +// condensation regressed and started emitting multiple slots for a +// single class, the table length would grow and this test would fail. +// +// Expected resolutions in the post-mono MIR: +// - trait_metadata_table_len>() → 2_usize +// - trait_metadata_index for SubX and SubY → 0_usize, 1_usize +// +// (Erasure safety is orthogonal to condensation and this test does not +// pin `trait_cast_is_lifetime_erasure_safe`.) + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + rustc() + .input("test.rs") + .arg("-Zdump-post-mono-mir") + .run() + // ── Table length ────────────────────────────────────────── + // One slot per sub-trait — condensation collapses the single + // materialized class to a single group. + .assert_stdout_contains("const 2_usize") + // ── Slot indices ────────────────────────────────────────── + // Two distinct slot indices (0, 1), one per sub-trait. + .assert_stdout_contains("const 0_usize") + .assert_stdout_contains("const 1_usize") + // ── Instance headers ────────────────────────────────────── + .assert_stdout_contains("post-mono MIR for instance"); +} diff --git a/tests/run-make/trait-cast-condense-baseline/test.rs b/tests/run-make/trait-cast-condense-baseline/test.rs new file mode 100644 index 0000000000000..fadd030c00d07 --- /dev/null +++ b/tests/run-make/trait-cast-condense-baseline/test.rs @@ -0,0 +1,161 @@ +//! Condensation baseline: lifetime-parameterized trait hierarchy with +//! multiple call contexts. Exercises the `condense_outlives_classes` +//! code path driven by shared Self/trait params in +//! `impl_admissible_under_class`. +//! +//! Every impl has the form `impl<'a, 'b> SubX<'a, 'b> for TypeA<'a, 'b>` +//! — the lifetime parameters `'a` and `'b` appear in BOTH the impl's +//! Self type AND its trait-ref args. That shared-param condition causes +//! `impl_universally_admissible` to return false for every impl, so +//! `trait_cast_layout` BYPASSES the fast path and invokes +//! `condense_outlives_classes` for every sub-trait. +//! +//! With a single (empty) outlives class materialized per sub-trait, +//! condensation produces exactly one group per sub-trait, yielding +//! `table_length == num_sub_traits`. This baseline captures the +//! minimum-size-via-condensation-path contract: if condensation +//! regressed and started emitting multiple slots for a single class, +//! the table length would grow and this test would fail. +//! +//! Trait graph: +//! root: dyn Root<'a, 'b> +//! sub-traits: dyn SubX<'a, 'b>, dyn SubY<'a, 'b> +//! concrete: TypeA<'a, 'b>, TypeB<'a, 'b> +//! +//! Expected resolutions in post-mono MIR: +//! trait_metadata_table_len>() → 2_usize +//! trait_metadata_index, dyn SubX<'_,'_>> → (.., 0_usize) +//! trait_metadata_index, dyn SubY<'_,'_>> → (.., 1_usize) + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait Root<'a, 'b>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} + +trait SubX<'a, 'b>: Root<'a, 'b> { + fn x_val(&self) -> u32; +} + +trait SubY<'a, 'b>: Root<'a, 'b> { + fn y_val(&self) -> u32; +} + +// ---- concrete types ---- + +#[derive(Debug)] +struct TypeA<'a, 'b> { + x: &'a u32, + y: &'b u32, +} + +#[derive(Debug)] +struct TypeB<'a, 'b> { + x: &'a u32, + y: &'b u32, +} + +// All impls are universally admissible: no where clauses, no param +// aliasing in the trait ref, no shared Self/trait params. +impl<'a, 'b> Root<'a, 'b> for TypeA<'a, 'b> { + fn id(&self) -> u32 { + 1 + } +} +impl<'a, 'b> Root<'a, 'b> for TypeB<'a, 'b> { + fn id(&self) -> u32 { + 2 + } +} + +impl<'a, 'b> SubX<'a, 'b> for TypeA<'a, 'b> { + fn x_val(&self) -> u32 { + *self.x + } +} +impl<'a, 'b> SubX<'a, 'b> for TypeB<'a, 'b> { + fn x_val(&self) -> u32 { + *self.x * 10 + } +} + +impl<'a, 'b> SubY<'a, 'b> for TypeA<'a, 'b> { + fn y_val(&self) -> u32 { + *self.y + } +} +impl<'a, 'b> SubY<'a, 'b> for TypeB<'a, 'b> { + fn y_val(&self) -> u32 { + *self.y * 10 + } +} + +// ---- multiple coercion/cast contexts ---- + +/// Context 1: both lifetimes equal (single outer lifetime). +#[inline(never)] +fn ctx_equal<'a>(x: &'a u32, y: &'a u32) { + let a = TypeA { x, y }; + let obj: &dyn Root<'_, '_> = &a; + let sx = core::cast!(in dyn Root<'_, '_>, obj => dyn SubX<'_, '_>).expect("ctx_equal: subx"); + assert_eq!(sx.x_val(), *x); + let sy = core::cast!(in dyn Root<'_, '_>, obj => dyn SubY<'_, '_>).expect("ctx_equal: suby"); + assert_eq!(sy.y_val(), *y); +} + +/// Context 2: `'b` is strictly interior to `'a`. +#[inline(never)] +fn ctx_interior<'a>(x: &'a u32) { + let local: u32 = 7; + let b = TypeB { x, y: &local }; + let obj: &dyn Root<'_, '_> = &b; + let sx = core::cast!(in dyn Root<'_, '_>, obj => dyn SubX<'_, '_>).expect("ctx_interior: subx"); + assert_eq!(sx.x_val(), *x * 10); + let sy = core::cast!(in dyn Root<'_, '_>, obj => dyn SubY<'_, '_>).expect("ctx_interior: suby"); + assert_eq!(sy.y_val(), 7 * 10); +} + +/// Context 3: explicit `'a: 'b` bound introduced in the signature. +#[inline(never)] +fn ctx_bounded<'a, 'b>(x: &'a u32, y: &'b u32) +where + 'a: 'b, +{ + let a = TypeA { x, y }; + let obj: &dyn Root<'_, '_> = &a; + let sx = core::cast!(in dyn Root<'_, '_>, obj => dyn SubX<'_, '_>).expect("ctx_bounded: subx"); + assert_eq!(sx.x_val(), *x); + let sy = core::cast!(in dyn Root<'_, '_>, obj => dyn SubY<'_, '_>).expect("ctx_bounded: suby"); + assert_eq!(sy.y_val(), *y); +} + +/// Context 4: 'static everywhere. +#[inline(never)] +fn ctx_static() { + static X: u32 = 100; + static Y: u32 = 200; + let b = TypeB { x: &X, y: &Y }; + let obj: &dyn Root<'static, 'static> = &b; + let sx = core::cast!(in dyn Root<'static, 'static>, obj => dyn SubX<'static, 'static>) + .expect("ctx_static: subx"); + assert_eq!(sx.x_val(), 100 * 10); + let sy = core::cast!(in dyn Root<'static, 'static>, obj => dyn SubY<'static, 'static>) + .expect("ctx_static: suby"); + assert_eq!(sy.y_val(), 200 * 10); +} + +fn main() { + let a: u32 = 3; + let b: u32 = 5; + ctx_equal(&a, &b); + ctx_interior(&a); + ctx_bounded(&a, &b); + ctx_static(); +} diff --git a/tests/run-make/trait-cast-condense-param-aliasing/rmake.rs b/tests/run-make/trait-cast-condense-param-aliasing/rmake.rs new file mode 100644 index 0000000000000..e1d970e8f6deb --- /dev/null +++ b/tests/run-make/trait-cast-condense-param-aliasing/rmake.rs @@ -0,0 +1,45 @@ +// Verify `condense_outlives_classes` under the parameter-aliasing +// rejection rule — an impl where ONE impl-side lifetime param maps to +// MULTIPLE distinct dyn bound variables (parameter aliasing). +// +// The test program (test.rs) implements `Aliased<'a, 'b>` for both +// `TypeA<'a, 'b>` (universal-looking, different params per bv) and +// `TypeB<'c>` at `Aliased<'c, 'c>` (one `'c` maps to both bvs 0 and 1 +// of the dyn binder). Param aliasing disqualifies the TypeB impl from +// `impl_universally_admissible`, so `trait_cast_layout` invokes +// `condense_outlives_classes` for `Aliased`. +// +// Under the single empty outlives class, the aliasing rule requires +// mutual outlives between the two bvs that `'c` aliases. Reachability +// has only reflexivity and `'static →` edges, so the mutual-outlives +// check fails and `TypeB::Aliased` is rejected. +// Matrix row for Aliased: `[TypeA: ✓, TypeB: ✗]` — one row → one slot. +// +// `Free<'a, 'b>` also bypasses the fast path (shared Self/trait +// params) and receives one slot via condensation. +// +// Expected table layout: 2 slots total (1 Free + 1 Aliased). +// +// If the aliasing rule regressed and accidentally admitted the +// aliased impl under an empty class, the table length would still be +// 2 — but runtime casts of TypeB to `Aliased<'c, 'c>` that should +// fail would start succeeding, which a behavior-level assertion would +// catch. + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + rustc() + .input("test.rs") + .arg("-Zdump-post-mono-mir") + .run() + // ── Table length: 1 slot per sub-trait via condensation ─── + .assert_stdout_contains("const 2_usize") + // ── Slot indices ────────────────────────────────────────── + .assert_stdout_contains("const 0_usize") + .assert_stdout_contains("const 1_usize") + // ── Instance headers ────────────────────────────────────── + .assert_stdout_contains("post-mono MIR for instance"); +} diff --git a/tests/run-make/trait-cast-condense-param-aliasing/test.rs b/tests/run-make/trait-cast-condense-param-aliasing/test.rs new file mode 100644 index 0000000000000..f6fdade5c72e2 --- /dev/null +++ b/tests/run-make/trait-cast-condense-param-aliasing/test.rs @@ -0,0 +1,122 @@ +//! Condensation with PARAMETER ALIASING in an impl: exercises the +//! "one impl param maps to multiple distinct bvs" rule in +//! `impl_admissible_under_class`. +//! +//! The dyn type is `dyn Aliased<'a, 'b>` (2 bound variables). +//! `TypeB` is implemented only at `Aliased<'c, 'c>` — one impl param +//! `'c` maps to BOTH bvs 0 and 1. The aliasing rule says: when one +//! impl param maps to multiple distinct bvs, those bvs must be +//! MUTUALLY OUTLIVES under the current outlives class. With an empty +//! class, the mutual-outlives check fails, so +//! `impl_admissible_under_class` rejects the `TypeB::Aliased` impl. +//! +//! Param aliasing ALSO disqualifies the impl from +//! `impl_universally_admissible`, so the fast path is bypassed and +//! `condense_outlives_classes` is invoked for `Aliased`. The +//! admissibility matrix row for the single empty class is +//! `[TypeA: ✓, TypeB: ✗]` — one row → one condensed slot. +//! +//! The `Free<'a, 'b>` sub-trait has no where clauses and no param +//! aliasing but still goes through condensation because of shared +//! Self/trait params (Self-anchored-param rule). Both sub-traits +//! therefore receive one slot each via condensation. +//! +//! Expected total table length: 2 slots (1 Free + 1 Aliased). + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code, unused_variables)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait Root<'a, 'b>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} + +trait Free<'a, 'b>: Root<'a, 'b> { + fn free_val(&self) -> u32; +} + +trait Aliased<'a, 'b>: Root<'a, 'b> { + fn aliased_val(&self) -> u32; +} + +// ---- concrete types ---- + +#[derive(Debug)] +struct TypeA<'a, 'b> { + x: &'a u32, + y: &'b u32, +} + +/// `TypeB` carries a single lifetime parameter that is used to +/// instantiate both bvs of `Aliased<'c, 'c>` below — the aliasing +/// pattern. +#[derive(Debug)] +struct TypeB<'c> { + v: &'c u32, +} + +// Universal-looking impls (still on the condensation path because of +// shared Self/trait params). +impl<'a, 'b> Root<'a, 'b> for TypeA<'a, 'b> { + fn id(&self) -> u32 { + 1 + } +} +impl<'c> Root<'c, 'c> for TypeB<'c> { + fn id(&self) -> u32 { + 2 + } +} +impl<'a, 'b> Free<'a, 'b> for TypeA<'a, 'b> { + fn free_val(&self) -> u32 { + 10 + } +} +impl<'c> Free<'c, 'c> for TypeB<'c> { + fn free_val(&self) -> u32 { + 20 + } +} + +// Aliased — TypeA impls without aliasing; TypeB impls with 'c used +// for BOTH positions of the trait ref (one param → two bvs). +impl<'a, 'b> Aliased<'a, 'b> for TypeA<'a, 'b> { + fn aliased_val(&self) -> u32 { + *self.x + *self.y + } +} +impl<'c> Aliased<'c, 'c> for TypeB<'c> { + fn aliased_val(&self) -> u32 { + *self.v + } +} + +// ---- call contexts ---- + +#[inline(never)] +fn ctx_a<'a>(x: &'a u32) { + let local: u32 = 7; + let obj: &dyn Root<'_, '_> = &TypeA { x, y: &local }; + let _ = core::cast!(in dyn Root<'_, '_>, obj => dyn Free<'_, '_>); + let _ = core::cast!(in dyn Root<'_, '_>, obj => dyn Aliased<'_, '_>); +} + +#[inline(never)] +fn ctx_b<'c>(v: &'c u32) { + // Single lifetime — both slots instantiated from 'c. + let obj: &dyn Root<'c, 'c> = &TypeB { v }; + let _ = core::cast!(in dyn Root<'c, 'c>, obj => dyn Free<'c, 'c>); + let _ = core::cast!(in dyn Root<'c, 'c>, obj => dyn Aliased<'c, 'c>); +} + +fn main() { + let a: u32 = 3; + ctx_a(&a); + ctx_b(&a); +} diff --git a/tests/run-make/trait-cast-condense-static-in-impl/rmake.rs b/tests/run-make/trait-cast-condense-static-in-impl/rmake.rs new file mode 100644 index 0000000000000..4075d2722cacf --- /dev/null +++ b/tests/run-make/trait-cast-condense-static-in-impl/rmake.rs @@ -0,0 +1,44 @@ +// Verify `condense_outlives_classes` when an impl fixes a trait-ref +// lifetime arg to the literal `'static` (the `ReStatic` branch of +// `impl_admissible_under_class`). +// +// The test program (test.rs) has `TypeB` implementing `Anchored<'static>` +// (and `Root`/`Free`) rather than `Anchored<'a>` for any free `'a`. The +// concrete `'static` in the trait ref disqualifies the impl from +// `impl_universally_admissible`, so `trait_cast_layout` invokes +// `condense_outlives_classes` for the `Anchored` sub-trait. +// +// Under the single materialized empty outlives class, admissibility +// requires `bv0 outlives 'static`. Reachability contains only +// reflexivity and `'static → *` edges — no `bv → 'static` edge — so +// the admissibility check rejects `TypeB::Anchored`. +// Matrix row for Anchored: `[TypeA: ✓, TypeB: ✗]` → one row → one slot. +// +// `Free<'a>` bypasses the fast path via shared Self/trait params and +// also receives one slot. +// +// Expected total table length: 2 slots. +// +// If the `ReStatic` rule regressed (e.g., remapping `ReStatic` +// incorrectly, or reading the reachability matrix with the wrong +// `num_bvs`), either the slot count would change or the TypeB +// admissibility pattern would flip — both of which this test +// constrains, when combined with runtime behavior checks. + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + rustc() + .input("test.rs") + .arg("-Zdump-post-mono-mir") + .run() + // ── Table length: 1 slot per sub-trait via condensation ── + .assert_stdout_contains("const 2_usize") + // ── Slot indices ───────────────────────────────────────── + .assert_stdout_contains("const 0_usize") + .assert_stdout_contains("const 1_usize") + // ── Instance headers ───────────────────────────────────── + .assert_stdout_contains("post-mono MIR for instance"); +} diff --git a/tests/run-make/trait-cast-condense-static-in-impl/test.rs b/tests/run-make/trait-cast-condense-static-in-impl/test.rs new file mode 100644 index 0000000000000..9baa1bf8f5e98 --- /dev/null +++ b/tests/run-make/trait-cast-condense-static-in-impl/test.rs @@ -0,0 +1,117 @@ +//! Condensation with a concrete `'static` lifetime in an impl's +//! trait ref: exercises the `ReStatic`-in-trait-arg branch of +//! `impl_admissible_under_class`. +//! +//! `TypeB` is implemented only at `Anchored<'static>` — the impl's +//! trait-ref arg at position 0 is the literal `'static`, not a free +//! param. The rule then requires that for every bv at that position, +//! the outlives class implies `bv outlives 'static`. Under the only +//! materialized (empty) outlives class, reachability contains no +//! `bv → 'static` edge, so the admissibility check rejects the +//! TypeB::Anchored impl. +//! +//! Concrete `'static` in the trait ref also disqualifies +//! `impl_universally_admissible` (condition: "no concrete lifetimes in +//! the trait ref"), so `trait_cast_layout` bypasses the fast path and +//! invokes `condense_outlives_classes` for `Anchored`. +//! Matrix row for Anchored (empty class): `[TypeA: ✓, TypeB: ✗]` → +//! one row → one condensed slot. +//! +//! `Free<'a>` has no concrete lifetimes but still goes through +//! condensation via shared Self/trait params. Total: 2 slots. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code, unused_variables)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait Root<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} + +trait Free<'a>: Root<'a> { + fn free_val(&self) -> u32; +} + +trait Anchored<'a>: Root<'a> { + fn anchored_val(&self) -> u32; +} + +// ---- concrete types ---- + +#[derive(Debug)] +struct TypeA<'a> { + x: &'a u32, +} + +/// `TypeB` uses `'static` for its `Root`/`Anchored` impls, so the +/// admissibility check sees `ReStatic` at the trait-ref lifetime +/// position. +#[derive(Debug)] +struct TypeB { + v: &'static u32, +} + +impl<'a> Root<'a> for TypeA<'a> { + fn id(&self) -> u32 { + 1 + } +} +impl Root<'static> for TypeB { + fn id(&self) -> u32 { + 2 + } +} +impl<'a> Free<'a> for TypeA<'a> { + fn free_val(&self) -> u32 { + 10 + } +} +impl Free<'static> for TypeB { + fn free_val(&self) -> u32 { + 20 + } +} + +impl<'a> Anchored<'a> for TypeA<'a> { + fn anchored_val(&self) -> u32 { + *self.x + } +} +impl Anchored<'static> for TypeB { + fn anchored_val(&self) -> u32 { + *self.v + } +} + +// ---- call contexts ---- + +#[inline(never)] +fn ctx_scoped<'a>(x: &'a u32) { + let obj: &dyn Root<'a> = &TypeA { x }; + let _ = core::cast!(in dyn Root<'a>, obj => dyn Free<'a>); + let _ = core::cast!(in dyn Root<'a>, obj => dyn Anchored<'a>); +} + +#[inline(never)] +fn ctx_static() { + static X: u32 = 42; + let obj_a: &dyn Root<'static> = &TypeA { x: &X }; + let _ = core::cast!(in dyn Root<'static>, obj_a => dyn Free<'static>); + let _ = core::cast!(in dyn Root<'static>, obj_a => dyn Anchored<'static>); + + let obj_b: &dyn Root<'static> = &TypeB { v: &X }; + let _ = core::cast!(in dyn Root<'static>, obj_b => dyn Free<'static>); + let _ = core::cast!(in dyn Root<'static>, obj_b => dyn Anchored<'static>); +} + +fn main() { + let a: u32 = 3; + ctx_scoped(&a); + ctx_static(); +} diff --git a/tests/run-make/trait-cast-condense-where-clause-reject/rmake.rs b/tests/run-make/trait-cast-condense-where-clause-reject/rmake.rs new file mode 100644 index 0000000000000..2470a8806f8cc --- /dev/null +++ b/tests/run-make/trait-cast-condense-where-clause-reject/rmake.rs @@ -0,0 +1,50 @@ +// Verify `condense_outlives_classes` when an impl carries an +// unsatisfiable `'a: 'static` RegionOutlives where clause (the +// explicit-where-clause branch of `impl_admissible_under_class`). +// +// The test program (test.rs) has a `Gated<'a>` sub-trait whose impl +// for `TypeB<'a>` carries a `'a: 'static` where clause. That clause +// disqualifies the impl from `impl_universally_admissible`, so +// `trait_cast_layout` invokes `condense_outlives_classes` for `Gated`. +// All call contexts use scoped (non-'static) lifetimes, so the +// outlives class materialized for Gated is `{empty}`. Under that +// class: +// - TypeA::Gated admits (no where clause) +// - TypeB::Gated rejects: `'a: 'static` requires an `'a→'static` +// edge in reachability, which only `ReStatic` in the impl's +// trait ref could insert — none exists here. +// Matrix row: `[TypeA: ✓, TypeB: ✗]` — one row → one slot. +// +// The `Free<'a>` sub-trait has no where clauses but still bypasses +// the fast path because its impls have shared Self/trait params. +// Condensation likewise produces one slot. +// +// Expected resolutions in the post-mono MIR: +// - trait_metadata_table_len>() → 2_usize +// (1 slot for Free + 1 slot for Gated, each condensed) +// - trait_metadata_index → 0_usize, 1_usize +// +// If the where-clause check accidentally admitted the `'a: 'static` +// impl under an empty class (e.g., by treating the `'static` +// placeholder in the clause as satisfied by reachability's reflexive +// closure), the table length would still be 2 — but the Gated slot's +// `TypeB` entry would populate with a vtable, which runtime cast +// tests would notice. + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + rustc() + .input("test.rs") + .arg("-Zdump-post-mono-mir") + .run() + // ── Table length: 1 slot each for Free and Gated ── + .assert_stdout_contains("const 2_usize") + // ── Slot indices ──────────────────────────────────────────── + .assert_stdout_contains("const 0_usize") + .assert_stdout_contains("const 1_usize") + // ── Instance headers ──────────────────────────────────────── + .assert_stdout_contains("post-mono MIR for instance"); +} diff --git a/tests/run-make/trait-cast-condense-where-clause-reject/test.rs b/tests/run-make/trait-cast-condense-where-clause-reject/test.rs new file mode 100644 index 0000000000000..bc46d9275f3a4 --- /dev/null +++ b/tests/run-make/trait-cast-condense-where-clause-reject/test.rs @@ -0,0 +1,148 @@ +//! Condensation with an UNSATISFIABLE RegionOutlives where clause: +//! exercises the explicit-where-clause rejection path in +//! `impl_admissible_under_class`. +//! +//! The `TypeB` impl of `Gated<'a>` carries a `'a: 'static` where +//! clause. `impl_universally_admissible` returns false for it (the +//! RegionOutlives clause disqualifies universal admissibility), so +//! `trait_cast_layout` invokes `condense_outlives_classes` for `Gated`. +//! The `Free` sub-trait's impls are also not universally admissible +//! (shared Self/trait params), so Free also goes through condensation. +//! +//! All call contexts use scoped (non-'static) lifetimes, so the only +//! materialized outlives class is `{empty}`. Under that class: +//! - TypeA::Gated admits (no where clause) +//! - TypeB::Gated REJECTS (the 'a: 'static bound never holds because +//! reachability has no 'a→'static edge) +//! Matrix row for Gated: `[TypeA: ✓, TypeB: ✗]`. +//! +//! With one row per sub-trait, condensation produces exactly one slot +//! per sub-trait. Total: 2 slots. This pins the explicit-where-clause +//! rejection path — if `impl_admissible_under_class` accidentally +//! started admitting `'a: 'static` under a scoped class, the slot +//! would still be 1 for Gated (row pattern would just be `[✓, ✓]`), +//! so runtime behavior verification (that the TypeB cast rejects) is +//! what would catch that regression. +//! +//! Expected resolutions in the post-mono MIR: +//! trait_metadata_table_len>() → 2_usize +//! trait_metadata_index → 0_usize, 1_usize + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![allow(dead_code, unused_variables)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait Root<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} + +trait Free<'a>: Root<'a> { + fn free_val(&self) -> u32; +} + +trait Gated<'a>: Root<'a> { + fn gated_val(&self) -> u32; +} + +// ---- concrete types ---- + +#[derive(Debug)] +struct TypeA<'a> { + x: &'a u32, +} + +#[derive(Debug)] +struct TypeB<'a> { + x: &'a u32, +} + +// Universal impls — both types, Root + Free. +impl<'a> Root<'a> for TypeA<'a> { + fn id(&self) -> u32 { + 1 + } +} +impl<'a> Root<'a> for TypeB<'a> { + fn id(&self) -> u32 { + 2 + } +} +impl<'a> Free<'a> for TypeA<'a> { + fn free_val(&self) -> u32 { + 10 + } +} +impl<'a> Free<'a> for TypeB<'a> { + fn free_val(&self) -> u32 { + 20 + } +} + +// Gated — TypeA is universal, TypeB has a where clause that is never +// provable for a scoped (non-'static) 'a. +impl<'a> Gated<'a> for TypeA<'a> { + fn gated_val(&self) -> u32 { + *self.x + } +} +impl<'a> Gated<'a> for TypeB<'a> +where + 'a: 'static, +{ + fn gated_val(&self) -> u32 { + *self.x + 100 + } +} + +// ---- multiple coercion/cast contexts (all with scoped 'a) ---- + +#[inline(never)] +fn ctx_simple<'a>(x: &'a u32) { + let obj: &dyn Root<'a> = &TypeA { x }; + let _ = core::cast!(in dyn Root<'a>, obj => dyn Free<'a>); + let _ = core::cast!(in dyn Root<'a>, obj => dyn Gated<'a>); +} + +#[inline(never)] +fn ctx_interior<'a>(x: &'a u32) { + let local: u32 = 7; + let obj: &dyn Root<'_> = &TypeB { x: &local }; + let _ = core::cast!(in dyn Root<'_>, obj => dyn Free<'_>); + let _ = core::cast!(in dyn Root<'_>, obj => dyn Gated<'_>); + let _ = x; +} + +#[inline(never)] +fn ctx_outer<'a, 'b>(x: &'a u32, y: &'b u32) +where + 'a: 'b, +{ + let obj: &dyn Root<'b> = &TypeA { x: y }; + let _ = core::cast!(in dyn Root<'b>, obj => dyn Free<'b>); + let _ = core::cast!(in dyn Root<'b>, obj => dyn Gated<'b>); + let _ = x; +} + +#[inline(never)] +fn ctx_both_b<'a>(x: &'a u32) { + let inner: u32 = 99; + let obj_a: &dyn Root<'_> = &TypeA { x }; + let obj_b: &dyn Root<'_> = &TypeB { x: &inner }; + let _ = core::cast!(in dyn Root<'_>, obj_a => dyn Gated<'_>); + let _ = core::cast!(in dyn Root<'_>, obj_b => dyn Gated<'_>); +} + +fn main() { + let a: u32 = 3; + let b: u32 = 5; + ctx_simple(&a); + ctx_interior(&a); + ctx_outer(&a, &b); + ctx_both_b(&a); +} diff --git a/tests/run-make/trait-cast-table-layout/rmake.rs b/tests/run-make/trait-cast-table-layout/rmake.rs new file mode 100644 index 0000000000000..f0657a6ade200 --- /dev/null +++ b/tests/run-make/trait-cast-table-layout/rmake.rs @@ -0,0 +1,46 @@ +// Verify that the trait cast table layout computed for a basic downcast +// scenario (3 sub-traits, 2 concrete types, no lifetimes) produces the +// expected resolved constants in post-monomorphization MIR. +// +// Expected table layout for `dyn Base`: +// - 3 slots (one per sub-trait: Greet, Count, Describe) +// - All impls universally admissible (no lifetimes → fast path) +// - TypeA implements all 3 → all slots populated +// - TypeB implements Greet + Count only → Describe slot is None +// +// The test checks resolved intrinsic constants in the dumped MIR: +// - trait_metadata_table_len → 3_usize +// - trait_metadata_index → slot indices 0, 1, 2 +// - is_lifetime_erasure_safe → true (no lifetimes) + +//@ needs-target-std + +use run_make_support::rustc; + +fn main() { + // Compile with -Z dump-post-mono-mir (no path = stdout). + // This dumps all post-monomorphization MIR bodies, including the + // patched bodies where trait cast intrinsics are resolved to constants. + rustc() + .input("test.rs") + .arg("-Zdump-post-mono-mir") + .run() + // ── Table length ────────────────────────────────────────── + // trait_metadata_table_len() resolves to 3_usize + // (one slot per sub-trait: Greet, Count, Describe). + .assert_stdout_contains("const 3_usize") + // ── Erasure safety ──────────────────────────────────────── + // All three sub-traits have no lifetime binder variables, so + // trait_cast_is_lifetime_erasure_safe resolves to `true`. + .assert_stdout_contains("const true") + // ── Slot indices ────────────────────────────────────────── + // Three distinct slot indices (0, 1, 2) are assigned, one per + // sub-trait. Each appears as a usize constant in a resolved + // trait_metadata_index tuple: (_X = (, N_usize)). + .assert_stdout_contains("const 0_usize") + .assert_stdout_contains("const 1_usize") + .assert_stdout_contains("const 2_usize") + // ── Instance headers ────────────────────────────────────── + // The dump should contain post-mono MIR headers. + .assert_stdout_contains("post-mono MIR for instance"); +} diff --git a/tests/run-make/trait-cast-table-layout/test.rs b/tests/run-make/trait-cast-table-layout/test.rs new file mode 100644 index 0000000000000..b96e85b24b660 --- /dev/null +++ b/tests/run-make/trait-cast-table-layout/test.rs @@ -0,0 +1,130 @@ +//! Minimal trait-cast program for verifying table layout. +//! +//! Trait graph: +//! root: dyn Base (via TraitMetadataTable) +//! sub-traits: dyn Greet, dyn Count, dyn Describe +//! +//! Concrete types: TypeA (impls all 3), TypeB (impls Greet + Count only) +//! +//! Expected table layout (3 slots total, one per sub-trait): +//! - No lifetime binder variables on any trait → all impls universally admissible +//! - Fast path: each sub-trait collapses to a single slot +//! - Slot order: deterministic by FingerprintedTy StableCompare +//! +//! Expected table entries: +//! TypeA: [Some, Some, Some] (all three sub-traits implemented) +//! TypeB: [Some, Some, None] (Describe not implemented) +//! +//! Expected intrinsic resolutions: +//! trait_metadata_table_len() → 3_usize +//! trait_metadata_index() → (crate_id, {0,1,2}_usize) +//! trait_cast_is_lifetime_erasure_safe<...> → true (no lifetimes) + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait Base: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +trait Greet: Base { + fn greeting(&self) -> &'static str; +} + +trait Count: Base { + fn count(&self) -> u32; +} + +trait Describe: Base { + fn description(&self) -> &'static str; +} + +// ---- concrete types ---- +#[derive(Debug)] +struct TypeA; +#[derive(Debug)] +struct TypeB; + +// Base — both types +impl Base for TypeA { + fn name(&self) -> &'static str { + "TypeA" + } +} +impl Base for TypeB { + fn name(&self) -> &'static str { + "TypeB" + } +} + +// Greet — both types +impl Greet for TypeA { + fn greeting(&self) -> &'static str { + "Hello from A" + } +} +impl Greet for TypeB { + fn greeting(&self) -> &'static str { + "Hello from B" + } +} + +// Count — both types +impl Count for TypeA { + fn count(&self) -> u32 { + 42 + } +} +impl Count for TypeB { + fn count(&self) -> u32 { + 99 + } +} + +// Describe — only TypeA +impl Describe for TypeA { + fn description(&self) -> &'static str { + "I am TypeA, the describable" + } +} + +// ---- downcast function ---- + +#[inline(never)] +fn check_a(obj: &dyn Base) { + assert_eq!(obj.name(), "TypeA"); + + let greeter = core::cast!(in dyn Base, obj => dyn Greet).unwrap(); + assert_eq!(greeter.greeting(), "Hello from A"); + + let counter = core::cast!(in dyn Base, obj => dyn Count).unwrap(); + assert_eq!(counter.count(), 42); + + let describer = core::cast!(in dyn Base, obj => dyn Describe).unwrap(); + assert_eq!(describer.description(), "I am TypeA, the describable"); +} + +#[inline(never)] +fn check_b(obj: &dyn Base) { + assert_eq!(obj.name(), "TypeB"); + + let greeter = core::cast!(in dyn Base, obj => dyn Greet).unwrap(); + assert_eq!(greeter.greeting(), "Hello from B"); + + let counter = core::cast!(in dyn Base, obj => dyn Count).unwrap(); + assert_eq!(counter.count(), 99); + + // Describe is NOT implemented for TypeB — cast must fail. + assert!(core::cast!(in dyn Base, obj => dyn Describe).is_err()); +} + +fn main() { + check_a(&TypeA as &dyn Base); + check_b(&TypeB as &dyn Base); +} diff --git a/tests/ui/README.md b/tests/ui/README.md index 9ef331698d2b6..c58fc37c43c8f 100644 --- a/tests/ui/README.md +++ b/tests/ui/README.md @@ -1410,6 +1410,10 @@ Exercises `#[track_caller]` and `-Z track-diagnostics`. Collection of tests for [trait bounds](https://doc.rust-lang.org/reference/trait-bounds.html). +## `tests/ui/trait-cast/` + +Tests for `#![feature(trait_cast)]` — bounded intertrait casting via `TraitMetadataTable` and the `cast!` / `try_cast!` macros. + ## `tests/ui/traits/` Broad collection of tests on traits in general. diff --git a/tests/ui/impl-trait/auto-trait-leakage/auto-trait-leak.stderr b/tests/ui/impl-trait/auto-trait-leakage/auto-trait-leak.stderr index ae10516999882..de21663e70860 100644 --- a/tests/ui/impl-trait/auto-trait-leakage/auto-trait-leak.stderr +++ b/tests/ui/impl-trait/auto-trait-leakage/auto-trait-leak.stderr @@ -9,6 +9,11 @@ note: ...which requires borrow-checking `cycle1`... | LL | fn cycle1() -> impl Clone { | ^^^^^^^^^^^^^^^^^^^^^^^^^ +note: ...which requires borrow-checking (with region summaries) `cycle1`... + --> $DIR/auto-trait-leak.rs:11:1 + | +LL | fn cycle1() -> impl Clone { + | ^^^^^^^^^^^^^^^^^^^^^^^^^ note: ...which requires promoting constants in MIR for `cycle1`... --> $DIR/auto-trait-leak.rs:11:1 | @@ -45,6 +50,11 @@ note: ...which requires borrow-checking `cycle2`... | LL | fn cycle2() -> impl Clone { | ^^^^^^^^^^^^^^^^^^^^^^^^^ +note: ...which requires borrow-checking (with region summaries) `cycle2`... + --> $DIR/auto-trait-leak.rs:17:1 + | +LL | fn cycle2() -> impl Clone { + | ^^^^^^^^^^^^^^^^^^^^^^^^^ note: ...which requires promoting constants in MIR for `cycle2`... --> $DIR/auto-trait-leak.rs:17:1 | diff --git a/tests/ui/impl-trait/in-trait/method-compatability-via-leakage-cycle.current.stderr b/tests/ui/impl-trait/in-trait/method-compatability-via-leakage-cycle.current.stderr index ff3a726477e05..e3ab185c25e26 100644 --- a/tests/ui/impl-trait/in-trait/method-compatability-via-leakage-cycle.current.stderr +++ b/tests/ui/impl-trait/in-trait/method-compatability-via-leakage-cycle.current.stderr @@ -20,6 +20,11 @@ note: ...which requires borrow-checking ` impl Sized { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +note: ...which requires borrow-checking (with region summaries) `::foo`... + --> $DIR/method-compatability-via-leakage-cycle.rs:21:5 + | +LL | fn foo(b: bool) -> impl Sized { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ note: ...which requires promoting constants in MIR for `::foo`... --> $DIR/method-compatability-via-leakage-cycle.rs:21:5 | diff --git a/tests/ui/impl-trait/in-trait/method-compatability-via-leakage-cycle.next.stderr b/tests/ui/impl-trait/in-trait/method-compatability-via-leakage-cycle.next.stderr index 87640517ddb20..e8fcd654c72e3 100644 --- a/tests/ui/impl-trait/in-trait/method-compatability-via-leakage-cycle.next.stderr +++ b/tests/ui/impl-trait/in-trait/method-compatability-via-leakage-cycle.next.stderr @@ -24,6 +24,11 @@ note: ...which requires borrow-checking ` impl Sized { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +note: ...which requires borrow-checking (with region summaries) `::foo`... + --> $DIR/method-compatability-via-leakage-cycle.rs:21:5 + | +LL | fn foo(b: bool) -> impl Sized { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ note: ...which requires promoting constants in MIR for `::foo`... --> $DIR/method-compatability-via-leakage-cycle.rs:21:5 | diff --git a/tests/ui/offset-of/inside-array-length.stderr b/tests/ui/offset-of/inside-array-length.stderr index de110939d4aad..087c039e06838 100644 --- a/tests/ui/offset-of/inside-array-length.stderr +++ b/tests/ui/offset-of/inside-array-length.stderr @@ -53,6 +53,11 @@ note: ...which requires borrow-checking `foo::{constant#0}`... | LL | fn foo<'a, T: 'a>(_: [(); std::mem::offset_of!((T,), 0)]) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +note: ...which requires borrow-checking (with region summaries) `foo::{constant#0}`... + --> $DIR/inside-array-length.rs:9:27 + | +LL | fn foo<'a, T: 'a>(_: [(); std::mem::offset_of!((T,), 0)]) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ note: ...which requires normalizing `Binder { value: ConstEvaluatable(UnevaluatedConst { def: DefId(0:7 ~ inside_array_length[07d6]::foo::{constant#0}), args: ['^c_1, T/#1] }), bound_vars: [] }`... --> $DIR/inside-array-length.rs:9:27 | diff --git a/tests/ui/pattern/non-structural-match-types-cycle-err.stderr b/tests/ui/pattern/non-structural-match-types-cycle-err.stderr index 2f4ac63fc570a..5d0966e2ffb90 100644 --- a/tests/ui/pattern/non-structural-match-types-cycle-err.stderr +++ b/tests/ui/pattern/non-structural-match-types-cycle-err.stderr @@ -31,6 +31,11 @@ note: ...which requires borrow-checking `defines`... | LL | fn defines() { | ^^^^^^^^^^^^ +note: ...which requires borrow-checking (with region summaries) `defines`... + --> $DIR/non-structural-match-types-cycle-err.rs:17:1 + | +LL | fn defines() { + | ^^^^^^^^^^^^ note: ...which requires promoting constants in MIR for `defines`... --> $DIR/non-structural-match-types-cycle-err.rs:17:1 | diff --git a/tests/ui/trait-cast/auxiliary/cross_crate_lib.rs b/tests/ui/trait-cast/auxiliary/cross_crate_lib.rs new file mode 100644 index 0000000000000..72a58775d6045 --- /dev/null +++ b/tests/ui/trait-cast/auxiliary/cross_crate_lib.rs @@ -0,0 +1,100 @@ +//! Upstream (non-global) library crate used by +//! `../cross-crate-casts.rs`. Rlib default — not a global crate, so +//! every trait-cast intrinsic emitted here must flow through the +//! `DelayedInstance` rmeta round-trip to be resolved downstream. + +#![feature(trait_cast)] +#![feature(sized_hierarchy)] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +pub trait Root: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +pub trait Greet: Root { + fn greeting(&self) -> &'static str; +} + +pub trait Count: Root { + fn count(&self) -> u32; +} + +pub trait Describe: Root { + fn description(&self) -> &'static str; +} + +// ---- upstream concrete types ---- + +#[derive(Debug)] +pub struct LibTypeA; + +#[derive(Debug)] +pub struct LibTypeB; + +impl Root for LibTypeA { + fn name(&self) -> &'static str { "LibTypeA" } +} +impl Greet for LibTypeA { + fn greeting(&self) -> &'static str { "hello from LibTypeA" } +} +impl Count for LibTypeA { + fn count(&self) -> u32 { 1 } +} +impl Describe for LibTypeA { + fn description(&self) -> &'static str { "the describable one" } +} + +impl Root for LibTypeB { + fn name(&self) -> &'static str { "LibTypeB" } +} +impl Greet for LibTypeB { + fn greeting(&self) -> &'static str { "hello from LibTypeB" } +} +impl Count for LibTypeB { + fn count(&self) -> u32 { 2 } +} +// NOTE: no Describe impl for LibTypeB — the corresponding downstream +// cast must return Err at runtime. + +// ---- upstream cast sites ---- + +/// Non-generic upstream cast site. The call to `core::cast!` expands to +/// `trait_metadata_index` + +/// `trait_metadata_table` intrinsics. Both indices are +/// unknown until the downstream global crate runs `trait_cast_layout`, +/// so this function is recorded in the upstream crate's +/// `delayed_codegen_requests` and its final codegen happens in the +/// global crate. +/// +/// `#[inline]` forces `cross_crate_inlinable`, which forces the +/// encoder to ship this fn's optimized MIR in rmeta. Without the +/// annotation, `should_encode_mir` skips non-generic, non-inline fns +/// and the downstream mono collector ICEs trying to decode the body +/// at `cascade_canonicalize` time — a real delayed-codegen bug: the +/// `has_trait_cast_intrinsics` query only scans the direct body, so +/// functions whose intrinsic calls only materialize via +/// post-monomorphization inlining of `TraitCast::checked_cast` (from +/// `core`) are not detected as needing cross-crate MIR. +#[inline] +pub fn try_describe_from_lib(obj: &dyn Root) -> Option<&'static str> { + core::cast!(in dyn Root, obj => dyn Describe) + .ok() + .map(|d| d.description()) +} + +/// Upstream factory returning a boxed trait object. The unsizing +/// `LibTypeA -> dyn Root` produces a vtable whose +/// `TraitMetadataTable::derived_metadata_table` slot points at the +/// blanket-impl monomorphization of `trait_metadata_table` — itself a delayed intrinsic. +pub fn lib_boxed_a() -> Box { + Box::new(LibTypeA) +} + +pub fn lib_boxed_b() -> Box { + Box::new(LibTypeB) +} diff --git a/tests/ui/trait-cast/basic-downcast.rs b/tests/ui/trait-cast/basic-downcast.rs new file mode 100644 index 0000000000000..6aa1fddc1e45e --- /dev/null +++ b/tests/ui/trait-cast/basic-downcast.rs @@ -0,0 +1,101 @@ +//@ run-pass +//! Basic trait-cast run-pass test: two concrete types, three sub-traits of a +//! common root. Greet and Count are implemented for both types; Describe is +//! implemented only for TypeA. A `#[inline(never)]` function receives +//! `&dyn Base` and attempts all three downcasts, exercising both the success +//! and failure paths. + +#![feature(trait_cast)] + +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait Base: TraitMetadataTable + core::fmt::Debug { + fn name(&self) -> &'static str; +} + +trait Greet: Base { + fn greeting(&self) -> &'static str; +} + +trait Count: Base { + fn count(&self) -> u32; +} + +trait Describe: Base { + fn description(&self) -> &'static str; +} + +// ---- concrete types ---- +#[derive(Debug)] +struct TypeA; +#[derive(Debug)] +struct TypeB; + +// Base — both types +impl Base for TypeA { + fn name(&self) -> &'static str { "TypeA" } +} +impl Base for TypeB { + fn name(&self) -> &'static str { "TypeB" } +} + +// Greet — both types +impl Greet for TypeA { + fn greeting(&self) -> &'static str { "Hello from A" } +} +impl Greet for TypeB { + fn greeting(&self) -> &'static str { "Hello from B" } +} + +// Count — both types +impl Count for TypeA { + fn count(&self) -> u32 { 42 } +} +impl Count for TypeB { + fn count(&self) -> u32 { 99 } +} + +// Describe — only TypeA +impl Describe for TypeA { + fn description(&self) -> &'static str { "I am TypeA, the describable" } +} + +// ---- downcast function ---- + +#[inline(never)] +fn check_a(obj: &dyn Base) { + assert_eq!(obj.name(), "TypeA"); + + let greeter = core::cast!(in dyn Base, obj => dyn Greet).unwrap(); + assert_eq!(greeter.greeting(), "Hello from A"); + + let counter = core::cast!(in dyn Base, obj => dyn Count).unwrap(); + assert_eq!(counter.count(), 42); + + let describer = core::cast!(in dyn Base, obj => dyn Describe).unwrap(); + assert_eq!(describer.description(), "I am TypeA, the describable"); +} + +#[inline(never)] +fn check_b(obj: &dyn Base) { + assert_eq!(obj.name(), "TypeB"); + + let greeter = core::cast!(in dyn Base, obj => dyn Greet).unwrap(); + assert_eq!(greeter.greeting(), "Hello from B"); + + let counter = core::cast!(in dyn Base, obj => dyn Count).unwrap(); + assert_eq!(counter.count(), 99); + + // Describe is NOT implemented for TypeB — cast must fail. + assert!(core::cast!(in dyn Base, obj => dyn Describe).is_err()); +} + +fn main() { + check_a(&TypeA as &dyn Base); + check_b(&TypeB as &dyn Base); +} diff --git a/tests/ui/trait-cast/cross-crate-casts.rs b/tests/ui/trait-cast/cross-crate-casts.rs new file mode 100644 index 0000000000000..6404a633331be --- /dev/null +++ b/tests/ui/trait-cast/cross-crate-casts.rs @@ -0,0 +1,137 @@ +//@ run-pass +//@ aux-build:cross_crate_lib.rs +//! Cross-crate trait-cast validation. +//! +//! The upstream rlib `cross_crate_lib` defines a four-trait graph +//! (`Root`/`Greet`/`Count`/`Describe`) and two concrete types: +//! +//! * `LibTypeA` — impls every sub-trait. +//! * `LibTypeB` — impls `Greet` and `Count` but **not** `Describe`. +//! +//! The downstream bin (this crate, a global crate by virtue of being an +//! executable) performs casts through three distinct channels to exercise +//! the cross-crate `DelayedInstance` / `trait_cast_intrinsics` rmeta +//! pipeline end-to-end: +//! +//! 1. Direct cast of a stack-allocated library type: +//! `&LibTypeA as &dyn Root => dyn Greet/Count/Describe`. +//! The cast site is in the bin; the library's vtable for +//! `LibTypeA` was built upstream. +//! 2. Cast against a `Box` returned from the library. The +//! `LibTypeA -> dyn Root` unsizing happened upstream, so the +//! `TraitMetadataTable::derived_metadata_table` entry in the +//! vtable is a monomorphization of the upstream intrinsic that +//! must have been decoded from the library's rmeta. +//! 3. Cast via `cross_crate_lib::try_describe_from_lib`, a *generic* +//! `&dyn Root -> Option<&'static str>` function defined upstream +//! whose body contains a `core::cast!` expansion. The upstream +//! crate records this as a `DelayedInstance`, and the global phase +//! in the bin consumes `delayed_codegen_requests(upstream)` to +//! splice in the augmented callee. +//! +//! The test additionally validates that a sub-trait defined *downstream* +//! (`BinExtra`) can extend the upstream `Root` and participate in the +//! graph. Library types (which have no `BinExtra` impl) must fail that +//! cast; a bin-local type that does impl `BinExtra` must succeed. + +#![feature(trait_cast)] + +#![crate_type = "bin"] + +extern crate core; +extern crate cross_crate_lib; + +use cross_crate_lib::{ + Count, Describe, Greet, LibTypeA, LibTypeB, Root, lib_boxed_a, lib_boxed_b, + try_describe_from_lib, +}; + +// ---- downstream sub-trait extending an upstream root ---- + +trait BinExtra: Root { + fn extra(&self) -> &'static str; +} + +#[derive(Debug)] +struct BinType; + +impl Root for BinType { + fn name(&self) -> &'static str { "BinType" } +} +impl BinExtra for BinType { + fn extra(&self) -> &'static str { "downstream only" } +} + +// ---- check functions (one per concrete type) ---- +// +// `#[inline(never)]` keeps the cast sites in distinct MIR bodies so the +// per-instance `call_id` chain the collector threads through +// `DelayedInstance::callee_substitutions` is exercised. + +#[inline(never)] +fn check_lib_type_a(obj: &dyn Root) { + assert_eq!(obj.name(), "LibTypeA"); + + let g = core::cast!(in dyn Root, obj => dyn Greet).expect("A: Greet"); + assert_eq!(g.greeting(), "hello from LibTypeA"); + + let c = core::cast!(in dyn Root, obj => dyn Count).expect("A: Count"); + assert_eq!(c.count(), 1); + + let d = core::cast!(in dyn Root, obj => dyn Describe).expect("A: Describe"); + assert_eq!(d.description(), "the describable one"); + + // Downstream-defined sub-trait: LibTypeA has no BinExtra impl. + assert!(core::cast!(in dyn Root, obj => dyn BinExtra).is_err()); +} + +#[inline(never)] +fn check_lib_type_b(obj: &dyn Root) { + assert_eq!(obj.name(), "LibTypeB"); + + let g = core::cast!(in dyn Root, obj => dyn Greet).expect("B: Greet"); + assert_eq!(g.greeting(), "hello from LibTypeB"); + + let c = core::cast!(in dyn Root, obj => dyn Count).expect("B: Count"); + assert_eq!(c.count(), 2); + + // LibTypeB has no Describe impl upstream. + assert!(core::cast!(in dyn Root, obj => dyn Describe).is_err()); + // ...nor the downstream BinExtra. + assert!(core::cast!(in dyn Root, obj => dyn BinExtra).is_err()); +} + +#[inline(never)] +fn check_bin_type(obj: &dyn Root) { + assert_eq!(obj.name(), "BinType"); + + // BinType implements only Root + BinExtra locally — the three + // upstream sub-traits must all fail. + assert!(core::cast!(in dyn Root, obj => dyn Greet).is_err()); + assert!(core::cast!(in dyn Root, obj => dyn Count).is_err()); + assert!(core::cast!(in dyn Root, obj => dyn Describe).is_err()); + + let e = core::cast!(in dyn Root, obj => dyn BinExtra).expect("Bin: BinExtra"); + assert_eq!(e.extra(), "downstream only"); +} + +fn main() { + // (1) direct downstream cast, lib type on the stack. + check_lib_type_a(&LibTypeA as &dyn Root); + check_lib_type_b(&LibTypeB as &dyn Root); + + // (2) library-allocated Box: vtable was built upstream. + check_lib_type_a(&*lib_boxed_a()); + check_lib_type_b(&*lib_boxed_b()); + + // (3) upstream cast site instantiated downstream. + assert_eq!(try_describe_from_lib(&LibTypeA), Some("the describable one")); + assert_eq!(try_describe_from_lib(&LibTypeB), None); + // BinType flows through the upstream cast site but has no Describe + // impl — the global phase still needs to emit a valid table entry + // (None) for it under the upstream `Describe` slot. + assert_eq!(try_describe_from_lib(&BinType), None); + + // Downstream concrete type casting to a downstream sub-trait. + check_bin_type(&BinType as &dyn Root); +} diff --git a/tests/ui/trait-cast/erasure-region-closure.rs b/tests/ui/trait-cast/erasure-region-closure.rs new file mode 100644 index 0000000000000..40f53f2be65b3 --- /dev/null +++ b/tests/ui/trait-cast/erasure-region-closure.rs @@ -0,0 +1,27 @@ +//! Diagnostic (eager): a trait-cast sub-trait introduces a lifetime +//! parameter that the root supertrait cannot bound. Such a lifetime is +//! erased on unsizing to `dyn Root` and would be manufactured at +//! downcast time — unsound. +//! +//! Emitted at trait-definition time, not at cast sites, so the error +//! surfaces even when no `cast!` is written in this crate. + +#![feature(trait_cast)] +#![allow(dead_code)] + +#![crate_type = "rlib"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// Root with no lifetime parameters — accepts only subtraits that don't +// introduce lifetime parameters of their own. +trait Root: TraitMetadataTable + core::fmt::Debug { + fn val(&self) -> u32; +} + +// Sub-trait introducing `'a` which cannot be expressed through `Root`. +trait Sub<'a>: Root { + //~^ ERROR trait graph rooted at `Root` is not downcast-safe + fn f(&self) -> &'a u8; +} diff --git a/tests/ui/trait-cast/erasure-region-closure.stderr b/tests/ui/trait-cast/erasure-region-closure.stderr new file mode 100644 index 0000000000000..0fcd93b02cb43 --- /dev/null +++ b/tests/ui/trait-cast/erasure-region-closure.stderr @@ -0,0 +1,14 @@ +error: trait graph rooted at `Root` is not downcast-safe + --> $DIR/erasure-region-closure.rs:24:11 + | +LL | trait Root: TraitMetadataTable + core::fmt::Debug { + | ----------------------------------------------------------- root supertrait defined here +... +LL | trait Sub<'a>: Root { + | ^^ lifetime `'a` is not bounded by any lifetime on `Root` + | + = note: downcasting to `dyn Sub` could manufacture the lifetime `'a` which was erased when unsizing to `dyn Root` + = help: add a lifetime parameter to `Root` that `'a` can be bounded by + +error: aborting due to 1 previous error + diff --git a/tests/ui/trait-cast/erasure-safety-chain-walk.rs b/tests/ui/trait-cast/erasure-safety-chain-walk.rs new file mode 100644 index 0000000000000..169f8c8407aa2 --- /dev/null +++ b/tests/ui/trait-cast/erasure-safety-chain-walk.rs @@ -0,0 +1,325 @@ +//@ run-pass +//! Erasure-safety supertrait chain walk tests (S14.3.4). +//! +//! The structural surjectivity check must walk the supertrait chain from +//! the target trait up to the root trait, mapping binder variables at each +//! step. These tests exercise: +//! +//! - Depth-1: Sub directly extends Super. +//! - Depth-2: Sub extends Mid extends Super (transitive mapping). +//! - Diamond: Sub extends Mid1 + Mid2, both extend Super. The same root +//! bv must be reachable through both paths consistently. +//! +//! All tests use lifetime-parameterized traits to exercise bv mapping +//! through the chain. + +#![feature(trait_cast)] +#![allow(dead_code, unused_variables)] + +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ========================================================================= +// Case 1: Depth-1 chain. Sub<'a>: Super<'a> +// +// The simplest chain walk: one step from target to root. +// bv mapping: t0 -> r0 directly. +// ========================================================================= + +trait Super1<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Sub1<'a>: Super1<'a> { + fn val(&self) -> u32; +} + +#[derive(Debug)] +struct C1<'a> { _x: &'a u32 } + +impl<'a> Super1<'a> for C1<'a> { + fn id(&self) -> u32 { 1 } +} +impl<'a> Sub1<'a> for C1<'a> { + fn val(&self) -> u32 { 10 } +} + +#[inline(never)] +fn case1_depth1<'a>(x: &'a u32) { + let obj: &dyn Super1<'_> = &C1 { _x: x }; + let sub = core::cast!(in dyn Super1<'_>, obj => dyn Sub1<'_>).expect("case1_depth1"); + assert_eq!(sub.val(), 10); +} + +// ========================================================================= +// Case 2: Depth-2 chain. Sub<'a>: Mid<'a>: Super<'a> +// +// Two-step chain walk: target -> mid -> root. +// bv mapping: t0 -> mid0 -> r0 (transitive). +// ========================================================================= + +trait Super2<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Mid2<'a>: Super2<'a> { + fn mid_val(&self) -> u32; +} +trait Sub2<'a>: Mid2<'a> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct C2<'a> { _x: &'a u32 } + +impl<'a> Super2<'a> for C2<'a> { + fn id(&self) -> u32 { 2 } +} +impl<'a> Mid2<'a> for C2<'a> { + fn mid_val(&self) -> u32 { 20 } +} +impl<'a> Sub2<'a> for C2<'a> { + fn sub_val(&self) -> u32 { 21 } +} + +#[inline(never)] +fn case2_depth2<'a>(x: &'a u32) { + let obj: &dyn Super2<'_> = &C2 { _x: x }; + + // Direct cast to Mid2 (depth-1). + let mid = core::cast!(in dyn Super2<'_>, obj => dyn Mid2<'_>).expect("case2_depth2: mid"); + assert_eq!(mid.mid_val(), 20); + + // Cast to Sub2 (depth-2, transitive chain walk). + let sub = core::cast!(in dyn Super2<'_>, obj => dyn Sub2<'_>).expect("case2_depth2: sub"); + assert_eq!(sub.sub_val(), 21); +} + +// ========================================================================= +// Case 3: Depth-2 with lifetime transformation. +// +// Sub<'a,'b>: Mid<'a,'b>: Super<'a,'b> +// The chain walk passes two lifetime params through two steps. +// ========================================================================= + +trait Super3<'a, 'b>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Mid3<'a, 'b>: Super3<'a, 'b> { + fn mid_val(&self) -> u32; +} +trait Sub3<'a, 'b>: Mid3<'a, 'b> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct C3<'a, 'b> { _x: &'a u32, _y: &'b u32 } + +impl<'a, 'b> Super3<'a, 'b> for C3<'a, 'b> { + fn id(&self) -> u32 { 3 } +} +impl<'a, 'b> Mid3<'a, 'b> for C3<'a, 'b> { + fn mid_val(&self) -> u32 { 30 } +} +impl<'a, 'b> Sub3<'a, 'b> for C3<'a, 'b> { + fn sub_val(&self) -> u32 { 31 } +} + +#[inline(never)] +fn case3_depth2_multi_lifetime<'a>(x: &'a u32) { + // Both lifetimes are the same -> cast succeeds. + let obj: &dyn Super3<'_, '_> = &C3 { _x: x, _y: x }; + let sub = core::cast!(in dyn Super3<'_, '_>, obj => dyn Sub3<'_, '_>) + .expect("case3_depth2_multi_lifetime"); + assert_eq!(sub.sub_val(), 31); +} + +#[inline(never)] +fn case3_depth2_multi_lifetime_mixed<'a>(x: &'a u32) { + // Different lifetimes -> cast to Sub3 with both still works (they're + // separate bvs, the mapping is t0->r0, t1->r1 with no merging). + let local: u32 = 99; + let obj: &dyn Super3<'_, '_> = &C3 { _x: x, _y: &local }; + let sub = core::cast!(in dyn Super3<'_, '_>, obj => dyn Sub3<'_, '_>) + .expect("case3_depth2_multi_lifetime_mixed"); + assert_eq!(sub.sub_val(), 31); +} + +// ========================================================================= +// Case 4: Diamond inheritance. +// +// Sub: Mid1<'a> + Mid2<'a>, where Mid1<'a>: Super<'a> and Mid2<'a>: Super<'a> +// +// The root bv r0 is reachable through both Mid1 and Mid2. The chain +// walk must find the same root bv through both paths. +// ========================================================================= + +trait Super4<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Mid4A<'a>: Super4<'a> { + fn mid_a_val(&self) -> u32; +} +trait Mid4B<'a>: Super4<'a> { + fn mid_b_val(&self) -> u32; +} +trait Sub4<'a>: Mid4A<'a> + Mid4B<'a> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct C4<'a> { _x: &'a u32 } + +impl<'a> Super4<'a> for C4<'a> { + fn id(&self) -> u32 { 4 } +} +impl<'a> Mid4A<'a> for C4<'a> { + fn mid_a_val(&self) -> u32 { 40 } +} +impl<'a> Mid4B<'a> for C4<'a> { + fn mid_b_val(&self) -> u32 { 41 } +} +impl<'a> Sub4<'a> for C4<'a> { + fn sub_val(&self) -> u32 { 42 } +} + +#[inline(never)] +fn case4_diamond<'a>(x: &'a u32) { + let obj: &dyn Super4<'_> = &C4 { _x: x }; + + // Cast through Mid4A path. + let mid_a = core::cast!(in dyn Super4<'_>, obj => dyn Mid4A<'_>).expect("case4_diamond: mid_a"); + assert_eq!(mid_a.mid_a_val(), 40); + + // Cast through Mid4B path. + let mid_b = core::cast!(in dyn Super4<'_>, obj => dyn Mid4B<'_>).expect("case4_diamond: mid_b"); + assert_eq!(mid_b.mid_b_val(), 41); + + // Cast to Sub4 (diamond join: both Mid4A and Mid4B paths reach Super4). + let sub = core::cast!(in dyn Super4<'_>, obj => dyn Sub4<'_>).expect("case4_diamond: sub"); + assert_eq!(sub.sub_val(), 42); +} + +// ========================================================================= +// Case 5: Diamond with lifetime-bounded impl on one branch. +// +// Sub: Mid4A<'a> + Mid4B<'a>, but Sub's impl requires 'a: 'static on +// one of the mid-trait paths. This tests that the diamond walk +// correctly handles cases where one path is more constrained. +// ========================================================================= + +trait Super5<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Mid5A<'a>: Super5<'a> { + fn mid_a_val(&self) -> u32; +} +trait Mid5B<'a>: Super5<'a> { + fn mid_b_val(&self) -> u32; +} + +#[derive(Debug)] +struct C5<'a> { _x: &'a u32 } + +impl<'a> Super5<'a> for C5<'a> { + fn id(&self) -> u32 { 5 } +} +impl<'a> Mid5A<'a> for C5<'a> { + fn mid_a_val(&self) -> u32 { 50 } +} +// Mid5B only implemented with a where clause +impl<'a> Mid5B<'a> for C5<'a> +where + 'a: 'static, +{ + fn mid_b_val(&self) -> u32 { 51 } +} + +/// 'a is a local non-static lifetime. Mid5A is always available, but +/// Mid5B requires 'a: 'static which doesn't hold. +#[inline(never)] +fn case5_diamond_constrained<'a>(x: &'a u32) { + let obj: &dyn Super5<'_> = &C5 { _x: x }; + + // Mid5A cast succeeds (no extra constraints). + let mid_a = core::cast!(in dyn Super5<'_>, obj => dyn Mid5A<'_>) + .expect("case5_diamond_constrained: mid_a"); + assert_eq!(mid_a.mid_a_val(), 50); + + // Mid5B cast fails (requires 'a: 'static, which doesn't hold). + core::cast!(in dyn Super5<'_>, obj => dyn Mid5B<'_>) + .expect_err("case5_diamond_constrained: mid_b"); +} + +// ========================================================================= +// Case 6: Depth-3 chain. Sub: Mid2: Mid1: Super +// +// Three-step chain walk for completeness. +// ========================================================================= + +trait Super6<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Mid6A<'a>: Super6<'a> { + fn a_val(&self) -> u32; +} +trait Mid6B<'a>: Mid6A<'a> { + fn b_val(&self) -> u32; +} +trait Sub6<'a>: Mid6B<'a> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct C6<'a> { _x: &'a u32 } + +impl<'a> Super6<'a> for C6<'a> { + fn id(&self) -> u32 { 6 } +} +impl<'a> Mid6A<'a> for C6<'a> { + fn a_val(&self) -> u32 { 60 } +} +impl<'a> Mid6B<'a> for C6<'a> { + fn b_val(&self) -> u32 { 61 } +} +impl<'a> Sub6<'a> for C6<'a> { + fn sub_val(&self) -> u32 { 62 } +} + +#[inline(never)] +fn case6_depth3<'a>(x: &'a u32) { + let obj: &dyn Super6<'_> = &C6 { _x: x }; + + // Cast through each depth level. + let mid_a = core::cast!(in dyn Super6<'_>, obj => dyn Mid6A<'_>).expect("case6_depth3: mid_a"); + assert_eq!(mid_a.a_val(), 60); + + let mid_b = core::cast!(in dyn Super6<'_>, obj => dyn Mid6B<'_>).expect("case6_depth3: mid_b"); + assert_eq!(mid_b.b_val(), 61); + + let sub = core::cast!(in dyn Super6<'_>, obj => dyn Sub6<'_>).expect("case6_depth3: sub"); + assert_eq!(sub.sub_val(), 62); +} + +fn main() { + let x: u32 = 42; + + // Case 1: depth-1 direct mapping. + case1_depth1(&x); + + // Case 2: depth-2 transitive mapping. + case2_depth2(&x); + + // Case 3: depth-2 with multiple lifetimes. + case3_depth2_multi_lifetime(&x); + case3_depth2_multi_lifetime_mixed(&x); + + // Case 4: diamond inheritance. + case4_diamond(&x); + + // Case 5: diamond with constrained branch. + case5_diamond_constrained(&x); + + // Case 6: depth-3 chain. + case6_depth3(&x); +} diff --git a/tests/ui/trait-cast/erasure-safety-outlives.rs b/tests/ui/trait-cast/erasure-safety-outlives.rs new file mode 100644 index 0000000000000..a668a3f9c02d1 --- /dev/null +++ b/tests/ui/trait-cast/erasure-safety-outlives.rs @@ -0,0 +1,210 @@ +//@ run-pass +//! Erasure-safety outlives-based correspondence tests. +//! +//! Given that the structural surjectivity check passes, these tests +//! verify that the outlives entries at the coercion site correctly +//! determine erasure safety. The key rule: for each (target_bv, +//! root_bv) pair in the structural mapping, mutual outlives (both +//! directions) must hold. When a target bv maps to multiple root bvs, +//! those root bvs must also be mutually equivalent. +//! +//! Each test case controls the outlives environment at the coercion +//! site by varying whether lifetime parameters have provable outlives +//! relationships. + +#![feature(trait_cast)] + +#![crate_type = "bin"] + +#![allow(dead_code, unused_variables)] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ========================================================================= +// Case 1: t0 -> r0, n=1, mutual outlives {(0,1),(1,0)} => true +// +// Simple one-to-one mapping with both outlives directions present. +// This is the positive case: the coercion site has evidence that +// the root bv and target bv refer to the same lifetime. +// ========================================================================= + +trait Root1<'a>: TraitMetadataTable> + core::fmt::Debug { + #[allow(dead_code)] + fn id(&self) -> u32; +} +trait Target1<'a>: Root1<'a> { + fn val(&self) -> u32; +} + +#[derive(Debug)] +struct T1<'a> { _x: &'a u32 } + +impl<'a> Root1<'a> for T1<'a> { + fn id(&self) -> u32 { 1 } +} +impl<'a> Target1<'a> for T1<'a> { + fn val(&self) -> u32 { 100 } +} + +/// Both root and target share the same universal lifetime 'a. +/// The mono collector produces mutual outlives entries. +#[inline(never)] +fn case1_mutual<'a>(x: &'a u32) { + let obj: &dyn Root1<'a> = &T1 { _x: x }; + let target = core::cast!(in dyn Root1<'_>, obj => dyn Target1<'_>).expect("case1_mutual"); + assert_eq!(target.val(), 100); +} + +// ========================================================================= +// Case 2: Asymmetric outlives — cast must fail +// +// The impl requires 'a: 'b, so the table slot is only populated when +// that holds. Even when the impl is admissible, erasure safety requires +// the dyn bv for the root and target to be provably equivalent. Here +// the coercion site lacks that evidence, so the cast must fail. +// ========================================================================= + +trait Root2<'a>: TraitMetadataTable> + core::fmt::Debug { + #[allow(dead_code)] + fn id(&self) -> u32; +} +trait Target2<'a>: Root2<'a> { + fn val(&self) -> u32; +} + +#[derive(Debug)] +struct T2<'a, 'b> { _x: &'a u32, _y: &'b u32 } + +impl<'a, 'b> Root2<'a> for T2<'a, 'b> { + fn id(&self) -> u32 { 2 } +} +// The impl requires 'a: 'b, so the table slot is only populated when +// that holds. Additionally, erasure safety requires the dyn bv for the +// root and target to be provably equivalent. +impl<'a, 'b> Target2<'a> for T2<'a, 'b> +where + 'a: 'b, +{ + fn val(&self) -> u32 { 200 } +} + +/// We can't prove anything about a hidden lifetime. The cast must fail. +#[inline(never)] +fn case2_both_directions<'a>(x: &'a u32) { + let obj: &dyn Root2<'_> = &T2 { _x: x, _y: x }; + core::cast!(in dyn Root2<'_>, obj => dyn Target2<'_>).expect_err("case2_both_directions"); +} + +/// We can't prove anything about a hidden lifetime. The cast must fail. +#[inline(never)] +fn case2_no_evidence<'a>(x: &'a u32) { + let local: u32 = 99; + let obj: &dyn Root2<'_> = &T2 { _x: &local, _y: x }; + core::cast!(in dyn Root2<'_>, obj => dyn Target2<'_>).expect_err("case2_no_evidence"); +} + +// ========================================================================= +// Case 3: Swap mapping t0 -> r1, t1 -> r0, n=2 +// with mutual outlives {(1,2),(2,1),(0,3),(3,0)} => true +// +// Both pairs (t0<->r1) and (t1<->r0) have mutual outlives in the +// combined index space. +// ========================================================================= + +trait Root3<'a, 'b>: TraitMetadataTable> + core::fmt::Debug { + #[allow(dead_code)] + fn id(&self) -> u32; +} +trait Target3<'a, 'b>: Root3<'b, 'a> { + fn val(&self) -> u32; +} + +#[derive(Debug)] +struct T3<'a, 'b> { _x: &'a u32, _y: &'b u32 } + +impl<'a, 'b> Root3<'a, 'b> for T3<'a, 'b> { + fn id(&self) -> u32 { 3 } +} +impl<'a, 'b> Target3<'a, 'b> for T3<'b, 'a> { + fn val(&self) -> u32 { 300 } +} + +/// When both lifetimes are the same, the swap is trivially satisfied. +/// Explicit 'a at the coercion site provides invariant constraints; +/// the cast site uses inferred lifetimes. +#[inline(never)] +fn case3_swap_bounded<'a>(x: &'a u32) { + let obj: &dyn Root3<'a, 'a> = &T3 { _x: x, _y: x }; + let target = core::cast!(in dyn Root3<'_, '_>, obj => dyn Target3<'_, '_>) + .expect("case3_swap_bounded"); + assert_eq!(target.val(), 300); +} + +/// When lifetimes are unrelated, the swap mapping still succeeds +/// because each structural pair maps to the same concrete lifetime: +/// target_bv0 and root_bv1 both hold 'local, target_bv1 and root_bv0 +/// both hold 'a. The resulting reference is bounded by min('a, 'local). +#[inline(never)] +fn case3_swap_unbounded<'a>(x: &'a u32) { + let local: u32 = 99; + let obj: &(dyn Root3<'a, '_> + '_) = &T3 { _x: x, _y: &local }; + let target = core::cast!(in dyn Root3<'_, '_>, obj => dyn Target3<'_, '_>) + .expect("case3_swap_unbounded"); + assert_eq!(target.val(), 300); +} + +// ========================================================================= +// Case 4: Fan-out t0 -> {r0, r1}, n=2 +// with ALL mutual: {(0,2),(2,0),(1,2),(2,1),(0,1),(1,0)} => true +// +// Target has one bv mapping to both root bvs. All pairs must be +// mutually equivalent. +// ========================================================================= + +trait Root4<'a, 'b>: TraitMetadataTable> + core::fmt::Debug { + #[allow(dead_code)] + fn id(&self) -> u32; +} +trait Target4<'a>: Root4<'a, 'a> { + fn val(&self) -> u32; +} + +#[derive(Debug)] +struct T4<'a, 'b> { _x: &'a u32, _y: &'b u32 } + +impl<'a, 'b> Root4<'a, 'b> for T4<'a, 'b> { + fn id(&self) -> u32 { 4 } +} +impl<'a> Target4<'a> for T4<'a, 'a> { + fn val(&self) -> u32 { 400 } +} + +/// When 'a == 'b (same reference), all three pairs (t0<->r0, t0<->r1, +/// r0<->r1) are trivially mutual. +/// Explicit 'a at the coercion site; cast lifetimes inferred. +#[inline(never)] +fn case4_all_mutual<'a>(x: &'a u32) { + let obj: &dyn Root4<'a, 'a> = &T4 { _x: x, _y: x }; + let target = core::cast!(in dyn Root4<'_, '_>, obj => dyn Target4<'_>) + .expect("case4_all_mutual"); + assert_eq!(target.val(), 400); +} + +// When 'a and 'b are unrelated r0 and r1 are not equivalent, so the +// fan-out mapping fails even when the target bv outlives each root bv +// individually. Borrowck rules out a direct in-source reproduction. + +fn main() { + let x: u32 = 42; + + case1_mutual(&x); + + case2_both_directions(&x); + case2_no_evidence(&x); + + case3_swap_bounded(&x); + case3_swap_unbounded(&x); + + case4_all_mutual(&x); +} diff --git a/tests/ui/trait-cast/erasure-safety-projections.rs b/tests/ui/trait-cast/erasure-safety-projections.rs new file mode 100644 index 0000000000000..f683b0822f627 --- /dev/null +++ b/tests/ui/trait-cast/erasure-safety-projections.rs @@ -0,0 +1,217 @@ +//@ run-pass +//! Erasure-safety projection predicate tests. +//! +//! Binder variables can appear not only in the principal trait's generic +//! args, but also in associated type projections (e.g., `Assoc = &'a u8`). +//! The erasure-safety check must account for these by matching projections +//! between the target and root dyn types by associated type DefId, then +//! walking both in TypeVisitor DFS order to establish bv correspondence. +//! +//! Test cases: +//! - Projection match: both root and target dyn types carry the same +//! projection -> bv correspondence through projection. +//! - Principal + projection bvs: binder variables appear in both the +//! principal trait args and the projection term. +//! - Transitive chain with projection: the projection flows through +//! an intermediate supertrait. + +#![feature(trait_cast)] + +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ========================================================================= +// Case 1: Projection match through associated types. +// +// The root trait Root1 has an associated type Assoc. When used as +// `dyn Root1`, the projection creates a binder variable. +// The sub-trait Sub1 inherits from Root1, so `dyn Sub1` +// carries the same projection. The bv correspondence is established +// through the shared projection predicate. +// ========================================================================= + +trait Root1: TraitMetadataTable> + core::fmt::Debug { + type Assoc; + #[allow(dead_code)] + fn id(&self) -> u32; +} +trait Sub1: Root1 { + fn val(&self) -> u32; +} + +#[derive(Debug)] +struct P1<'a> { _x: &'a u8 } + +impl<'a> Root1 for P1<'a> { + type Assoc = &'a u8; + fn id(&self) -> u32 { 1 } +} +impl<'a> Sub1 for P1<'a> { + fn val(&self) -> u32 { 10 } +} + +/// Both `dyn Root1` and `dyn Sub1` have +/// a binder variable for 'a in the projection. The structural check maps +/// the projection's bv in the target to the same bv in the root. +#[inline(never)] +fn case1_projection_match<'a>(x: &'a u8) { + let obj: &dyn Root1 = &P1 { _x: x }; + let sub = core::cast!( + in dyn Root1 + '_, + obj => dyn Sub1 + '_ + ).expect("case1_projection_match"); + assert_eq!(sub.val(), 10); +} + +// ========================================================================= +// Case 2: Principal trait args carry a lifetime + projection carries +// the same lifetime. +// +// Root2<'a> has Assoc = &'a u8. When used as +// `dyn Root2<'a, Assoc = &'a u8>`, the principal arg and the projection +// both reference the same binder variable. The sub-trait Sub2<'a> +// inherits from Root2<'a>, producing the same projection. +// ========================================================================= + +trait Root2<'a>: TraitMetadataTable> + core::fmt::Debug { + type Assoc; + #[allow(dead_code)] + fn id(&self) -> u32; +} +trait Sub2<'a>: Root2<'a> { + fn val(&self) -> u32; +} + +#[derive(Debug)] +struct P2<'a> { _x: &'a u8 } + +impl<'a> Root2<'a> for P2<'a> { + type Assoc = &'a u8; + fn id(&self) -> u32 { 2 } +} +impl<'a> Sub2<'a> for P2<'a> { + fn val(&self) -> u32 { 20 } +} + +/// The principal (Root2<'a>) and projection (Assoc = &'a u8) carry +/// lifetimes that refer to the same underlying bv. Cast succeeds. +#[inline(never)] +fn case2_principal_and_projection<'a>(x: &'a u8) { + let obj: &dyn Root2<'_, Assoc = &'a u8> = &P2 { _x: x }; + let sub = core::cast!( + in dyn Root2<'_, Assoc = &'a u8>, + obj => dyn Sub2<'_, Assoc = &'a u8> + ).expect("case2_principal_and_projection"); + assert_eq!(sub.val(), 20); +} + +// ========================================================================= +// Case 3: Transitive chain with projection. +// +// Sub3: Mid3: Root3, where Root3 defines an associated type. The +// projection `Assoc = &'a u8` is specified in the dyn type and must +// be matched at each level of the chain. +// ========================================================================= + +trait Root3: TraitMetadataTable> + core::fmt::Debug { + type Assoc; + #[allow(dead_code)] + fn id(&self) -> u32; +} +trait Mid3: Root3 { + fn mid_val(&self) -> u32; +} +trait Sub3: Mid3 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct P3<'a> { _x: &'a u8 } + +impl<'a> Root3 for P3<'a> { + type Assoc = &'a u8; + fn id(&self) -> u32 { 3 } +} +impl<'a> Mid3 for P3<'a> { + fn mid_val(&self) -> u32 { 30 } +} +impl<'a> Sub3 for P3<'a> { + fn sub_val(&self) -> u32 { 31 } +} + +/// The projection flows from Root3 through Mid3 to Sub3. Casting +/// through the chain preserves the bv correspondence. +#[inline(never)] +fn case3_transitive_projection<'a>(x: &'a u8) { + let obj: &dyn Root3 = &P3 { _x: x }; + + // Cast to Mid3 (depth-1). + let mid = core::cast!( + in dyn Root3, + obj => dyn Mid3 + ).expect("case3_transitive_projection: mid"); + assert_eq!(mid.mid_val(), 30); + + // Cast to Sub3 (depth-2). + let sub = core::cast!( + in dyn Root3, + obj => dyn Sub3 + ).expect("case3_transitive_projection: sub"); + assert_eq!(sub.sub_val(), 31); +} + +// ========================================================================= +// Case 4: No-lifetime traits with projections (no bvs). +// +// When the associated type doesn't involve any lifetimes (e.g., +// `Assoc = u32`), the projections carry no binder variables. +// Surjectivity is trivially satisfied. +// ========================================================================= + +trait Root4: TraitMetadataTable> + core::fmt::Debug { + type Assoc; + #[allow(dead_code)] + fn id(&self) -> u32; +} +trait Sub4: Root4 { + fn val(&self) -> u32; +} + +#[derive(Debug)] +struct P4; + +impl Root4 for P4 { + type Assoc = u32; + fn id(&self) -> u32 { 4 } +} +impl Sub4 for P4 { + fn val(&self) -> u32 { 40 } +} + +/// No binder variables in the projection. Trivially erasure-safe. +#[inline(never)] +fn case4_no_lifetime_projection(obj: &dyn Root4) { + let sub = core::cast!( + in dyn Root4, + obj => dyn Sub4 + ).expect("case4_no_lifetime_projection"); + assert_eq!(sub.val(), 40); +} + +fn main() { + let x: u8 = 42; + + // Case 1: projection match, same bv. + case1_projection_match(&x); + + // Case 2: principal + projection bvs. + case2_principal_and_projection(&x); + + // Case 3: transitive projection chain. + case3_transitive_projection(&x); + + // Case 4: no-lifetime projection (trivially safe). + case4_no_lifetime_projection(&P4 as &dyn Root4); +} diff --git a/tests/ui/trait-cast/erasure-safety-structural.rs b/tests/ui/trait-cast/erasure-safety-structural.rs new file mode 100644 index 0000000000000..2f465e53aff6c --- /dev/null +++ b/tests/ui/trait-cast/erasure-safety-structural.rs @@ -0,0 +1,259 @@ +//@ run-pass +//! Erasure-safety structural surjectivity tests (S14.3.1). +//! +//! Verifies that the binder-variable mapping between root and target dyn +//! types is structurally surjective. These tests exercise the "forward +//! coverage" and "backward coverage" rules: every target binder variable +//! must map to at least one root binder variable, and every root binder +//! variable must be mapped to by at least one target binder variable. +//! +//! Tests marked "depends on outlives" are exercised in both bounded and +//! unbounded contexts; the structural check passes but the final result +//! depends on whether mutual outlives evidence is available. + +#![feature(trait_cast)] +#![allow(dead_code, unused_variables)] + +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ========================================================================= +// Case 1: dyn for<'a> Super<'a> -> dyn for<'a> Sub<'a> via Sub<'a>: Super<'a> +// +// Structural: t0 -> r0 (one-to-one identity mapping). +// Result: depends on outlives (needs mutual outlives for t0 <-> r0). +// ========================================================================= + +trait Super1<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Sub1<'a>: Super1<'a> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S1<'a> { _x: &'a u32 } + +impl<'a> Super1<'a> for S1<'a> { + fn id(&self) -> u32 { 1 } +} +impl<'a> Sub1<'a> for S1<'a> { + fn sub_val(&self) -> u32 { 10 } +} + +/// 'a is a universal region; coercing S1<'a> to dyn Super1<'a> produces +/// a binder with one variable. The identity mapping t0 -> r0 is trivially +/// mutual in the same universal region, so erasure safety holds. +#[inline(never)] +fn case1_bounded<'a>(x: &'a u32) { + let obj: &dyn Super1<'_> = &S1 { _x: x }; + let sub = core::cast!(in dyn Super1<'_>, obj => dyn Sub1<'_>).expect("case1_bounded"); + assert_eq!(sub.sub_val(), 10); +} + +// ========================================================================= +// Case 2 removed: `trait Sub2<'a>: Super2` (Super2 with no lifetime params) +// is now rejected at trait-definition time by the eager downcast-safety +// check. See tests/ui/trait-cast/erasure-region-closure.rs for the +// compile-fail coverage of this pattern. +// ========================================================================= +// Case 3: dyn for<'a> Super<'a> -> dyn Sub via Sub: Super<'static> +// +// Structural: backward fails. Root bv r0 is not mapped to by any target +// bv (the target has none). r0 <- ? fails. +// Result: always false. +// ========================================================================= + +trait Super3<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Sub3: Super3<'static> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S3; + +impl<'a> Super3<'a> for S3 { + fn id(&self) -> u32 { 3 } +} +impl Sub3 for S3 { + fn sub_val(&self) -> u32 { 30 } +} + +/// The target dyn type has no binder variables, but the root has r0. +/// Backward coverage fails: r0 is not reached by any target bv. +/// The cast must fail. +#[inline(never)] +fn case3_backward_fails<'a>(x: &'a u32) { + let obj: &dyn Super3<'_> = &S3; + core::cast!(in dyn Super3<'_>, obj => dyn Sub3).expect_err("case3_backward_fails"); +} + +// ========================================================================= +// Case 4: dyn for<'a,'b> Super<'a,'b> -> dyn for<'a> Sub<'a> +// via Sub<'a>: Super<'a, 'a> +// +// Structural: t0 -> {r0, r1}. Both root bvs are reached (backward ok), +// and the target bv maps to both (forward ok). BUT the two root bvs +// must be equivalent (mutual outlives between r0 and r1), which requires +// outlives evidence. +// Result: depends on outlives. +// ========================================================================= + +trait Super4<'a, 'b>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Sub4<'a>: Super4<'a, 'a> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S4<'a, 'b> { _x: &'a u32, _y: &'b u32 } + +impl<'a, 'b> Super4<'a, 'b> for S4<'a, 'b> { + fn id(&self) -> u32 { 4 } +} +impl<'a> Sub4<'a> for S4<'a, 'a> { + fn sub_val(&self) -> u32 { 40 } +} + +/// When 'a == 'b (same reference), the two root bvs are equivalent, +/// so the cast succeeds. +#[inline(never)] +fn case4_bounded<'a>(x: &'a u32) { + let obj: &dyn Super4<'_, '_> = &S4 { _x: x, _y: x }; + let sub = core::cast!(in dyn Super4<'_, '_>, obj => dyn Sub4<'_>).expect("case4_bounded"); + assert_eq!(sub.sub_val(), 40); +} + +/// When 'a and 'b are unrelated (no mutual outlives), the cast fails +/// because r0 and r1 are not provably equivalent. +/// Borrowck forces both lifetimes to be equivalent due to the common TraitMetadataTable marker. + +// ========================================================================= +// Case 5: dyn for<'a,'b> Super<'a,'b> -> dyn for<'a,'b> Sub<'a,'b> +// via Sub<'a,'b>: Super<'b,'a> (swap) +// +// Structural: t0 -> r1, t1 -> r0. Both directions covered. +// Result: depends on outlives (needs mutual outlives for each pair). +// ========================================================================= + +trait Super5<'a, 'b>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Sub5<'a, 'b>: Super5<'b, 'a> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S5<'a, 'b> { _x: &'a u32, _y: &'b u32 } + +impl<'a, 'b> Super5<'a, 'b> for S5<'a, 'b> { + fn id(&self) -> u32 { 5 } +} +impl<'a, 'b> Sub5<'a, 'b> for S5<'b, 'a> { + fn sub_val(&self) -> u32 { 50 } +} + +/// When 'a and 'b are the same region, the swap is trivially satisfied. +#[inline(never)] +fn case5_bounded<'a>(x: &'a u32) { + let obj: &dyn Super5<'_, '_> = &S5 { _x: x, _y: x }; + let sub = core::cast!(in dyn Super5<'_, '_>, obj => dyn Sub5<'_, '_>).expect("case5_bounded"); + assert_eq!(sub.sub_val(), 50); +} + +/// When 'a and 'b are unrelated, the swap mapping still succeeds +/// because each structural pair maps to the same concrete lifetime: +/// target_bv0 and root_bv1 both hold 'local, target_bv1 and root_bv0 +/// both hold 'a. +#[inline(never)] +fn case5_unbounded<'a>(x: &'a u32) { + let local: u32 = 99; + let obj: &dyn Super5<'_, '_> = &S5 { _x: x, _y: &local }; + let sub = core::cast!(in dyn Super5<'_, '_>, obj => dyn Sub5<'_, '_>) + .expect("case5_unbounded"); + assert_eq!(sub.sub_val(), 50); +} + +// ========================================================================= +// Case 6: Same-trait cast: dyn for<'a> Super<'a> -> dyn Super<'a> (self) +// +// Structural: identity mapping (t0 -> r0, trivially surjective). +// Result: always true (identity mapping, trivially mutual). +// ========================================================================= + +/// A same-trait cast is always erasure-safe. +#[inline(never)] +fn case6_identity<'a>(x: &'a u32) { + let obj: &dyn Super1<'_> = &S1 { _x: x }; + // Cast from dyn Super1 to dyn Super1 is the identity cast. + let same = core::cast!(in dyn Super1<'_>, obj => dyn Super1<'_>).expect("case6_identity"); + assert_eq!(same.id(), 1); +} + +// ========================================================================= +// Case 7: No principal (auto-trait only) +// +// When neither root nor target has a principal trait (only auto traits), +// there are no binder variables. Surjectivity is trivially satisfied. +// Result: always true. +// ========================================================================= + +// Note: auto-trait-only casts are a degenerate case. The existing test +// infrastructure requires a principal trait for the TraitMetadataTable +// bound. We exercise this indirectly: a root with no lifetime params +// casting to a target with no lifetime params is the analogous case. + +trait Super7: TraitMetadataTable + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Sub7: Super7 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S7; + +impl Super7 for S7 { + fn id(&self) -> u32 { 7 } +} +impl Sub7 for S7 { + fn sub_val(&self) -> u32 { 70 } +} + +/// No binder variables on either side. Trivially erasure-safe. +#[inline(never)] +fn case7_no_bvs(obj: &dyn Super7) { + let sub = core::cast!(in dyn Super7, obj => dyn Sub7).expect("case7_no_bvs"); + assert_eq!(sub.sub_val(), 70); +} + +fn main() { + let x: u32 = 42; + + // Case 1: identity mapping, bounded — succeeds. + case1_bounded(&x); + + // Case 2 removed — see note above; compile-fail coverage in + // tests/ui/trait-cast/erasure-region-closure.rs. + + // Case 3: backward coverage failure — always fails. + case3_backward_fails(&x); + + // Case 4: fan-out mapping t0 -> {r0, r1}. + case4_bounded(&x); // same region -> succeeds + + // Case 5: swap mapping t0 -> r1, t1 -> r0. + case5_bounded(&x); // same region -> succeeds + case5_unbounded(&x); // unrelated regions -> fails + + // Case 6: same-trait identity cast — always succeeds. + case6_identity(&x); + + // Case 7: no binder variables — always succeeds. + case7_no_bvs(&S7 as &dyn Super7); +} diff --git a/tests/ui/trait-cast/invalid-cast-target.rs b/tests/ui/trait-cast/invalid-cast-target.rs new file mode 100644 index 0000000000000..0f7efb7a7a312 --- /dev/null +++ b/tests/ui/trait-cast/invalid-cast-target.rs @@ -0,0 +1,111 @@ +//! Negative compile-fail tests: cast! invocations whose TARGET cannot +//! be reached from the source dyn type within the trait graph. +//! +//! Existing compile-fail coverage (not-dyn-compat.rs) tests traits +//! whose shape breaks dyn compatibility. This file complements that +//! by testing casts to dyn types that ARE individually dyn-compatible +//! but are not reachable from the source's trait graph. Each target +//! fails `E0277` because `TraitCast` requires +//! `U: TraitMetadataTable`, and the auto-impled +//! `TraitMetadataTable` chain does not reach the target from the +//! source's trait graph: +//! +//! 1. Sub-trait with a wrong generic type parameter. +//! 2. Sub-trait of an entirely different root trait. +//! 3. A trait that is not a sub-trait of anything (no connection). +//! +//! Note: a naive wrong-projection target +//! (e.g. `dyn Sub` when the source carries +//! `Assoc = &'a u32`) does NOT fail compile-time; the auto-impled +//! `TraitMetadataTable` accepts the mismatched projection. Such +//! mismatches surface as a *runtime* erasure-safety failure and are +//! covered by `erasure-safety-projections.rs` / `runtime-cast-failures.rs` +//! rather than here. + +#![feature(trait_cast)] +#![allow(dead_code)] + +#![crate_type = "rlib"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ========================================================================= +// Shared Root1 / Sub1 from the positive `lifetime-in-generics` case 1. +// ========================================================================= + +trait Root1: TraitMetadataTable> + core::fmt::Debug { + fn val(&self) -> u32; +} +trait Sub1: Root1 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S1<'a> { x: &'a u32 } + +impl<'a> Root1<&'a u32> for S1<'a> { + fn val(&self) -> u32 { *self.x } +} +impl<'a> Sub1<&'a u32> for S1<'a> { + fn sub_val(&self) -> u32 { *self.x + 1 } +} + +// ========================================================================= +// Invalid 1: Sub-trait with the wrong generic type parameter. +// +// Cast target is `dyn Sub1` but the source holds `&'_ u32`. The +// auto-impled `TraitMetadataTable>` for `dyn Sub1` +// uses the same `T` on both sides; substituting `u64` for the target +// breaks the chain to `dyn Root1<&u32>`. +// ========================================================================= + +fn invalid1<'a>(x: &'a u32) { + let obj: &dyn Root1<&'_ u32> = &S1 { x }; + let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub1); + //~^ ERROR E0277 + //~| ERROR E0277 + //~| ERROR E0277 +} + +// ========================================================================= +// Invalid 2: Sub-trait of a DIFFERENT root trait. +// +// Root2 is a separate trait graph with its own sub-trait Sub2. +// Casting from `dyn Root1<_>` to `dyn Sub2<_>` fails because Sub2's +// `TraitMetadataTable` chain connects it to Root2, not Root1. +// ========================================================================= + +trait Root2: TraitMetadataTable> + core::fmt::Debug { + fn val(&self) -> u32; +} +trait Sub2: Root2 { + fn sub_val(&self) -> u32; +} + +fn invalid2<'a>(x: &'a u32) { + let obj: &dyn Root1<&'_ u32> = &S1 { x }; + let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub2<&'_ u32>); + //~^ ERROR `Sub2` is not in the trait graph rooted at `Root1` + //~| ERROR `Sub2` is not in the trait graph rooted at `Root1` + //~| ERROR E0277 +} + +// ========================================================================= +// Invalid 3: Completely unrelated dyn trait (no root relationship). +// +// `Unrelated` carries its own `TraitMetadataTable` +// supertrait but is not a sub-trait of any shared root with Root1. +// ========================================================================= + +trait Unrelated: TraitMetadataTable + core::fmt::Debug { + fn do_something(&self); +} + +fn invalid3<'a>(x: &'a u32) { + let obj: &dyn Root1<&'_ u32> = &S1 { x }; + let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Unrelated); + //~^ ERROR `Unrelated` is not in the trait graph rooted at `Root1` + //~| ERROR `Unrelated` is not in the trait graph rooted at `Root1` + //~| ERROR E0277 +} diff --git a/tests/ui/trait-cast/invalid-cast-target.stderr b/tests/ui/trait-cast/invalid-cast-target.stderr new file mode 100644 index 0000000000000..54b342ad8a438 --- /dev/null +++ b/tests/ui/trait-cast/invalid-cast-target.stderr @@ -0,0 +1,128 @@ +error[E0277]: the trait bound `dyn Sub1: std::marker::TraitMetadataTable>` is not satisfied + --> $DIR/invalid-cast-target.rs:65:55 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub1); + | ^^^^^^^^^^^^^ the trait `Sized` is not implemented for `dyn Sub1` + | + = note: required for `dyn Sub1` to implement `std::marker::TraitMetadataTable>` +note: required by a bound in `std::trait_cast::TraitCast::cast` + --> $SRC_DIR/core/src/trait_cast.rs:LL:COL + +error[E0277]: the size for values of type `dyn Sub1` cannot be known at compilation time + --> $DIR/invalid-cast-target.rs:65:48 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub1); + | -----------------------------------^^^------------------ + | | | + | | the trait `Sized` is not implemented for `dyn Sub1` + | required by a bound introduced by this call + | + = note: the trait bound `Sized` is not satisfied + = note: required for `dyn Sub1` to implement `std::marker::TraitMetadataTable>` + = note: required for `&dyn Root1<&u32>` to implement `TraitCast, dyn Sub1>` +help: consider borrowing here + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, &obj => dyn Sub1); + | + +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, &mut obj => dyn Sub1); + | ++++ + +error[E0277]: the trait bound `dyn Sub1: std::marker::TraitMetadataTable>` is not satisfied + --> $DIR/invalid-cast-target.rs:65:13 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub1); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Sized` is not implemented for `dyn Sub1` + | + = help: the following other types implement trait `TraitCast`: + &'r T + &'r mut T + Arc + Box + Rc + = note: required for `dyn Sub1` to implement `std::marker::TraitMetadataTable>` + = note: required for `&dyn Root1<&u32>` to implement `TraitCast, dyn Sub1>` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: `Sub2` is not in the trait graph rooted at `Root1` + --> $DIR/invalid-cast-target.rs:88:13 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub2<&'_ u32>); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `Sub2` does not have `Root1` as a (transitive) supertrait + = help: add `Root1` as a supertrait bound on `Sub2` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `dyn Sub2<&u32>` cannot be known at compilation time + --> $DIR/invalid-cast-target.rs:88:48 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub2<&'_ u32>); + | -----------------------------------^^^---------------------- + | | | + | | the trait `Sized` is not implemented for `dyn Sub2<&u32>` + | required by a bound introduced by this call + | + = note: the trait bound `Sized` is not satisfied + = note: required for `dyn Sub2<&u32>` to implement `std::marker::TraitMetadataTable>` + = note: required for `&dyn Root1<&u32>` to implement `TraitCast, dyn Sub2<&u32>>` +help: consider borrowing here + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, &obj => dyn Sub2<&'_ u32>); + | + +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, &mut obj => dyn Sub2<&'_ u32>); + | ++++ + +error: `Sub2` is not in the trait graph rooted at `Root1` + --> $DIR/invalid-cast-target.rs:88:13 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub2<&'_ u32>); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `Sub2` does not have `Root1` as a (transitive) supertrait + = help: add `Root1` as a supertrait bound on `Sub2` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: `Unrelated` is not in the trait graph rooted at `Root1` + --> $DIR/invalid-cast-target.rs:107:13 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Unrelated); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `Unrelated` does not have `Root1` as a (transitive) supertrait + = help: add `Root1` as a supertrait bound on `Unrelated` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `dyn Unrelated` cannot be known at compilation time + --> $DIR/invalid-cast-target.rs:107:48 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Unrelated); + | -----------------------------------^^^------------------ + | | | + | | the trait `Sized` is not implemented for `dyn Unrelated` + | required by a bound introduced by this call + | + = note: the trait bound `Sized` is not satisfied + = note: required for `dyn Unrelated` to implement `std::marker::TraitMetadataTable>` + = note: required for `&dyn Root1<&u32>` to implement `TraitCast, dyn Unrelated>` +help: consider borrowing here + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, &obj => dyn Unrelated); + | + +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, &mut obj => dyn Unrelated); + | ++++ + +error: `Unrelated` is not in the trait graph rooted at `Root1` + --> $DIR/invalid-cast-target.rs:107:13 + | +LL | let _ = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Unrelated); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `Unrelated` does not have `Root1` as a (transitive) supertrait + = help: add `Root1` as a supertrait bound on `Unrelated` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 9 previous errors + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/trait-cast/lifetime-bounded-downcast.rs b/tests/ui/trait-cast/lifetime-bounded-downcast.rs new file mode 100644 index 0000000000000..86f5b71c1309a --- /dev/null +++ b/tests/ui/trait-cast/lifetime-bounded-downcast.rs @@ -0,0 +1,141 @@ +//@ run-pass +//! Trait-cast with lifetime-parameterized types: single binary crate, two +//! concrete types (`TypeA<'a, 'b>`, `TypeB<'a, 'b>`), three sub-traits of a +//! common root. +//! +//! - `Label` and `Inspect` are implemented for both types. +//! - `TypeB`'s `Inspect` impl requires `'b: 'a`. +//! - `Special` is implemented only for `TypeA`. +//! +//! The unsizing coercion `&TypeB → &dyn Root` is performed in two distinct +//! contexts whose borrowck `region_summary` yields different outlives +//! relationships, producing different outlives classes in the mono collector: +//! - `coerce_b_bounded`: `'b: 'a` provable → Inspect slot populated +//! - `coerce_b_unbounded`: `'b` is strictly interior, no `'b: 'a` → +//! Inspect slot is `None` +//! +//! `#[inline(never)]` checkers receive the erased `&dyn Root` and exercise +//! both the success and failure paths via `assert_eq!`. + +#![feature(trait_cast)] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ---- trait graph ---- + +trait Root: TraitMetadataTable + core::fmt::Debug { + fn id(&self) -> u32; +} + +trait Label: Root { + fn label(&self) -> &'static str; +} + +trait Inspect: Root { + fn data(&self) -> u32; +} + +trait Special: Root { + fn special_value(&self) -> u64; +} + +// ---- concrete types (two lifetimes each) ---- + +#[derive(Debug)] +struct TypeA<'a, 'b> { + x: &'a u32, + y: &'b u32, +} + +#[derive(Debug)] +struct TypeB<'a, 'b> { + x: &'a u32, + y: &'b u32, +} + +// Root — both types +impl<'a, 'b> Root for TypeA<'a, 'b> { + fn id(&self) -> u32 { 1 } +} +impl<'a, 'b> Root for TypeB<'a, 'b> { + fn id(&self) -> u32 { 2 } +} + +// Label — both types +impl<'a, 'b> Label for TypeA<'a, 'b> { + fn label(&self) -> &'static str { "TypeA" } +} +impl<'a, 'b> Label for TypeB<'a, 'b> { + fn label(&self) -> &'static str { "TypeB" } +} + +// Inspect — both types, but TypeB's impl requires 'b: 'a +impl<'a, 'b> Inspect for TypeA<'a, 'b> { + fn data(&self) -> u32 { *self.x + *self.y } +} +impl<'a, 'b> Inspect for TypeB<'a, 'b> +where + 'b: 'a, +{ + fn data(&self) -> u32 { *self.x * *self.y } +} + +// Special — only TypeA +impl<'a, 'b> Special for TypeA<'a, 'b> { + fn special_value(&self) -> u64 { 42 } +} + +// ---- downcast checkers ---- + +#[inline(never)] +fn check_a(obj: &dyn Root) { + assert_eq!(obj.id(), 1); + + let labeler = core::cast!(in dyn Root, obj => dyn Label) + .expect("check_a: label"); + assert_eq!(labeler.label(), "TypeA"); + + let inspector = core::cast!(in dyn Root, obj => dyn Inspect) + .expect("check_a: inspect"); + assert_eq!(inspector.data(), 30); // 10 + 20 + + let special = core::cast!(in dyn Root, obj => dyn Special) + .expect("check_a: special"); + assert_eq!(special.special_value(), 42); +} + +/// Checker for TypeB when the outlives class does NOT include `'b: 'a`. +#[inline(never)] +fn check_b_inspect_absent(obj: &dyn Root) { + assert_eq!(obj.id(), 2); + + let labeler = core::cast!(in dyn Root, obj => dyn Label) + .expect("check_b_inspect_absent: label"); + assert_eq!(labeler.label(), "TypeB"); + + // Inspect is not available: where-bounds aren't provable + // after unsizing. + core::cast!(in dyn Root, obj => dyn Inspect) + .expect_err("check_b_inspect_absent: inspect"); + + // Special is NOT implemented for TypeB. + core::cast!(in dyn Root, obj => dyn Special) + .expect_err("check_b_inspect_absent: special"); +} + +#[inline(never)] +fn coerce_b_unbounded<'a>(x: &'a u32) { + let local: u32 = 20; + check_b_inspect_absent(&TypeB { x, y: &local } as &dyn Root); +} + +fn main() { + let x: u32 = 10; + let y: u32 = 20; + + check_a(&TypeA { x: &x, y: &y } as &dyn Root); + + // Negative: 'b is local to coerce_b_unbounded, strictly shorter than 'a. + coerce_b_unbounded(&x); +} diff --git a/tests/ui/trait-cast/lifetime-in-generics.rs b/tests/ui/trait-cast/lifetime-in-generics.rs new file mode 100644 index 0000000000000..0ba1a138cf950 --- /dev/null +++ b/tests/ui/trait-cast/lifetime-in-generics.rs @@ -0,0 +1,565 @@ +//@ run-pass +//! Trait-cast with lifetimes embedded inside generic *type* parameters. +//! +//! The existing tests use lifetime parameters on traits (e.g., Root<'a>). +//! These tests use generic *type* parameters whose instantiations carry +//! lifetimes (e.g., Root<&'a u32>, Root>). The dyn +//! types carry their lifetimes inside type arguments rather than as +//! binder variables. +//! +//! Test cases: +//! 1. Single type param: Root with T = &'a u32. +//! 2. Two type params merged: Root → Sub: Root. +//! 3. Where-clause gating: Root with hidden lifetime constraint. +//! 4. Nested generic: Root with T = Option<&'a u32>. +//! 5. Depth-3 chain with type param: Sub: Mid: Root. +//! 6. Diamond with type param: Sub: Mid1 + Mid2, both: Root. +//! 7. Mixed lifetime + type params: Root<'a, T>. +//! 8. Type param + projected associated type: Root. +//! 9. Type param + projected transformation: Root>. +//! 10. Chain + diamond combined: 4-level hierarchy. +//! 11. Multi-type-param diamond: Sub: MidT + MidU. +//! +//! Note: GATs with sub-trait casting ICE in well-formedness checking, +//! which appears to be a compiler bug independent of the trait-cast +//! machinery; the `Container::Lent<'a>` shape is therefore not +//! exercised here. + +#![feature(trait_cast)] +#![allow(dead_code, unused_variables)] + +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ========================================================================= +// Case 1: trait Root with T = &'a u32 +// +// The lifetime 'a is embedded inside the type parameter, not a direct +// lifetime param on the trait. The dyn type is dyn Root1<&'a u32>. +// ========================================================================= + +trait Root1: TraitMetadataTable> + core::fmt::Debug { + fn val(&self) -> u32; +} +trait Sub1: Root1 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S1<'a> { x: &'a u32 } + +impl<'a> Root1<&'a u32> for S1<'a> { + fn val(&self) -> u32 { *self.x } +} +impl<'a> Sub1<&'a u32> for S1<'a> { + fn sub_val(&self) -> u32 { *self.x + 1 } +} + +#[inline(never)] +fn case1<'a>(x: &'a u32) { + let obj: &dyn Root1<&'_ u32> = &S1 { x }; + let sub = core::cast!(in dyn Root1<&'_ u32>, obj => dyn Sub1<&'_ u32>) + .expect("case1"); + assert_eq!(sub.sub_val(), *x + 1); +} + +// ========================================================================= +// Case 2: Two type params — Sub merges both +// +// Root2 with T = &'a u32, U = &'b u32. +// Sub2: Root2 collapses both params into one. +// ========================================================================= + +trait Root2: TraitMetadataTable> + core::fmt::Debug { + fn first(&self) -> u32; + fn second(&self) -> u32; +} +trait Sub2: Root2 { + fn merged(&self) -> u32; +} + +#[derive(Debug)] +struct S2<'a, 'b> { x: &'a u32, y: &'b u32 } + +impl<'a, 'b> Root2<&'a u32, &'b u32> for S2<'a, 'b> { + fn first(&self) -> u32 { *self.x } + fn second(&self) -> u32 { *self.y } +} +impl<'a> Sub2<&'a u32> for S2<'a, 'a> { + fn merged(&self) -> u32 { *self.x + *self.y } +} + +#[inline(never)] +fn case2<'a>(x: &'a u32) { + let obj: &dyn Root2<&'_ u32, &'_ u32> = &S2 { x, y: x }; + let sub = core::cast!( + in dyn Root2<&'_ u32, &'_ u32>, + obj => dyn Sub2<&'_ u32> + ).expect("case2"); + assert_eq!(sub.merged(), *x + *x); +} + +// ========================================================================= +// Case 3: Where-clause gating with two type params +// +// Root3 with T = &'a u32, U = &'b u32. Sub3Always has no +// where-clause. Sub3Gated requires 'b: 'a. +// ========================================================================= + +trait Root3: TraitMetadataTable> + core::fmt::Debug { + fn val(&self) -> u32; +} +trait Sub3Always: Root3 { + fn always(&self) -> u32; +} +trait Sub3Gated: Root3 { + fn gated(&self) -> u32; +} + +#[derive(Debug)] +struct S3<'a, 'b> { x: &'a u32, y: &'b u32 } + +impl<'a, 'b> Root3<&'a u32, &'b u32> for S3<'a, 'b> { + fn val(&self) -> u32 { *self.x + *self.y } +} +impl<'a, 'b> Sub3Always<&'a u32, &'b u32> for S3<'a, 'b> { + fn always(&self) -> u32 { *self.x } +} +impl<'a, 'b> Sub3Gated<&'a u32, &'b u32> for S3<'a, 'b> +where + 'b: 'a, +{ + fn gated(&self) -> u32 { *self.y } +} + +/// 'b is a local lifetime, shorter than 'a. +/// Sub3Always succeeds; Sub3Gated fails ('b: 'a not provable). +#[inline(never)] +fn case3<'a>(x: &'a u32) { + let local: u32 = 5; + let obj: &dyn Root3<&'_ u32, &'_ u32> = &S3 { x, y: &local }; + let sub = core::cast!( + in dyn Root3<&'_ u32, &'_ u32>, + obj => dyn Sub3Always<&'_ u32, &'_ u32> + ).expect("case3_always"); + assert_eq!(sub.always(), *x); + core::cast!( + in dyn Root3<&'_ u32, &'_ u32>, + obj => dyn Sub3Gated<&'_ u32, &'_ u32> + ).expect_err("case3_gated"); +} + +// ========================================================================= +// Case 4: Nested generic — T = Option<&'a u32> +// +// The lifetime 'a is nested two levels deep: inside Option, inside +// the trait's type param. Tests that deeply nested lifetimes are +// tracked through the type parameter. +// ========================================================================= + +trait Root4: TraitMetadataTable> + core::fmt::Debug { + fn inner_val(&self) -> u32; +} +trait Sub4: Root4 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S4<'a> { opt: Option<&'a u32> } + +impl<'a> Root4> for S4<'a> { + fn inner_val(&self) -> u32 { + self.opt.copied().unwrap_or(0) + } +} +impl<'a> Sub4> for S4<'a> { + fn sub_val(&self) -> u32 { + self.opt.copied().unwrap_or(0) + 10 + } +} + +#[inline(never)] +fn case4_some<'a>(x: &'a u32) { + let obj: &dyn Root4> = &S4 { opt: Some(x) }; + let sub = core::cast!( + in dyn Root4>, + obj => dyn Sub4> + ).expect("case4_some"); + assert_eq!(sub.sub_val(), *x + 10); +} + +#[inline(never)] +fn case4_none<'a>(_anchor: &'a u32) { + let s: S4<'a> = S4 { opt: None }; + let obj: &dyn Root4> = &s; + let sub = core::cast!( + in dyn Root4>, + obj => dyn Sub4> + ).expect("case4_none"); + assert_eq!(sub.sub_val(), 10); +} + +// ========================================================================= +// Case 5: Depth-3 chain with type param +// +// Sub5: Mid5: Root5, all with T = &'a u32. Tests that a +// transitive supertrait chain works when the lifetime is embedded in +// the trait's type parameter. +// ========================================================================= + +trait Root5: TraitMetadataTable> + core::fmt::Debug { + fn root_val(&self) -> u32; +} +trait Mid5: Root5 { + fn mid_val(&self) -> u32; +} +trait Sub5: Mid5 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct C5<'a> { x: &'a u32 } + +impl<'a> Root5<&'a u32> for C5<'a> { + fn root_val(&self) -> u32 { *self.x } +} +impl<'a> Mid5<&'a u32> for C5<'a> { + fn mid_val(&self) -> u32 { *self.x + 100 } +} +impl<'a> Sub5<&'a u32> for C5<'a> { + fn sub_val(&self) -> u32 { *self.x + 200 } +} + +#[inline(never)] +fn case5<'a>(x: &'a u32) { + let obj: &dyn Root5<&'_ u32> = &C5 { x }; + let mid = core::cast!(in dyn Root5<&'_ u32>, obj => dyn Mid5<&'_ u32>) + .expect("case5_mid"); + assert_eq!(mid.mid_val(), *x + 100); + let sub = core::cast!(in dyn Root5<&'_ u32>, obj => dyn Sub5<&'_ u32>) + .expect("case5_sub"); + assert_eq!(sub.sub_val(), *x + 200); +} + +// ========================================================================= +// Case 6: Diamond with type param +// +// Sub6: Mid6A + Mid6B, both extending Root6. +// Tests that the diamond join works with type-param lifetimes. +// ========================================================================= + +trait Root6: TraitMetadataTable> + core::fmt::Debug { + fn root_val(&self) -> u32; +} +trait Mid6A: Root6 { + fn a_val(&self) -> u32; +} +trait Mid6B: Root6 { + fn b_val(&self) -> u32; +} +trait Sub6: Mid6A + Mid6B { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct C6<'a> { x: &'a u32 } + +impl<'a> Root6<&'a u32> for C6<'a> { + fn root_val(&self) -> u32 { *self.x } +} +impl<'a> Mid6A<&'a u32> for C6<'a> { + fn a_val(&self) -> u32 { *self.x + 10 } +} +impl<'a> Mid6B<&'a u32> for C6<'a> { + fn b_val(&self) -> u32 { *self.x + 20 } +} +impl<'a> Sub6<&'a u32> for C6<'a> { + fn sub_val(&self) -> u32 { *self.x + 30 } +} + +#[inline(never)] +fn case6<'a>(x: &'a u32) { + let obj: &dyn Root6<&'_ u32> = &C6 { x }; + let a = core::cast!(in dyn Root6<&'_ u32>, obj => dyn Mid6A<&'_ u32>) + .expect("case6_a"); + assert_eq!(a.a_val(), *x + 10); + let b = core::cast!(in dyn Root6<&'_ u32>, obj => dyn Mid6B<&'_ u32>) + .expect("case6_b"); + assert_eq!(b.b_val(), *x + 20); + let sub = core::cast!(in dyn Root6<&'_ u32>, obj => dyn Sub6<&'_ u32>) + .expect("case6_sub"); + assert_eq!(sub.sub_val(), *x + 30); +} + +// ========================================================================= +// Case 7: Mixed lifetime param + type param +// +// Root7<'a, T> has both a direct lifetime parameter AND a type +// parameter. The type parameter T = &'b u32 carries another lifetime. +// Tests interaction between binder-variable lifetimes (from 'a) and +// type-embedded lifetimes (from T). +// ========================================================================= + +trait Root7<'a, T>: TraitMetadataTable> + core::fmt::Debug { + fn val(&self) -> u32; +} +trait Sub7<'a, T>: Root7<'a, T> { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct C7<'a, 'b> { x: &'a u32, y: &'b u32 } + +impl<'a, 'b> Root7<'a, &'b u32> for C7<'a, 'b> { + fn val(&self) -> u32 { *self.x + *self.y } +} +impl<'a, 'b> Sub7<'a, &'b u32> for C7<'a, 'b> { + fn sub_val(&self) -> u32 { *self.x * *self.y } +} + +#[inline(never)] +fn case7<'a>(x: &'a u32) { + let obj: &dyn Root7<'_, &'_ u32> = &C7 { x, y: x }; + let sub = core::cast!(in dyn Root7<'_, &'_ u32>, obj => dyn Sub7<'_, &'_ u32>) + .expect("case7"); + assert_eq!(sub.sub_val(), *x * *x); +} + +// ========================================================================= +// Case 8: Type param + projected associated type +// +// Root8 has an associated type Assoc that is constrained to equal T +// in the dyn projection. Combines the type-param-with-lifetime pattern +// with associated-type projection (similar to erasure-safety-projections). +// ========================================================================= + +trait Root8: TraitMetadataTable> + core::fmt::Debug { + type Assoc; + fn val(&self) -> u32; +} +trait Sub8: Root8 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct C8<'a> { x: &'a u32 } + +impl<'a> Root8<&'a u32> for C8<'a> { + type Assoc = &'a u32; + fn val(&self) -> u32 { *self.x } +} +impl<'a> Sub8<&'a u32> for C8<'a> { + fn sub_val(&self) -> u32 { *self.x + 50 } +} + +#[inline(never)] +fn case8<'a>(x: &'a u32) { + let obj: &dyn Root8<&'a u32, Assoc = &'a u32> = &C8 { x }; + let sub = core::cast!( + in dyn Root8<&'a u32, Assoc = &'a u32>, + obj => dyn Sub8<&'a u32, Assoc = &'a u32> + ).expect("case8"); + assert_eq!(sub.sub_val(), *x + 50); +} + +// ========================================================================= +// Case 9: Type param threaded through both trait params and projections +// +// Root9 has an associated type Assoc that contains a transformation +// of T (e.g., Option). The dyn type's projection fixes the +// transformation. Sub-trait extends with another transformation. +// Combines: type-param-with-lifetime + projection-with-transformation. +// ========================================================================= + +trait Root9: TraitMetadataTable>> + core::fmt::Debug { + type Wrapped; + fn first_val(&self) -> u32; +} +trait Sub9A: Root9 { + fn a_val(&self) -> u32; +} +trait Sub9B: Root9 { + fn b_val(&self) -> u32; +} + +#[derive(Debug)] +struct C9<'a> { x: &'a u32 } + +impl<'a> Root9<&'a u32> for C9<'a> { + type Wrapped = Option<&'a u32>; + fn first_val(&self) -> u32 { *self.x } +} +impl<'a> Sub9A<&'a u32> for C9<'a> { + fn a_val(&self) -> u32 { *self.x + 7 } +} +impl<'a> Sub9B<&'a u32> for C9<'a> { + fn b_val(&self) -> u32 { *self.x + 13 } +} + +#[inline(never)] +fn case9<'a>(x: &'a u32) { + let obj: &dyn Root9<&'a u32, Wrapped = Option<&'a u32>> = &C9 { x }; + let a = core::cast!( + in dyn Root9<&'a u32, Wrapped = Option<&'a u32>>, + obj => dyn Sub9A<&'a u32, Wrapped = Option<&'a u32>> + ).expect("case9_a"); + assert_eq!(a.a_val(), *x + 7); + let b = core::cast!( + in dyn Root9<&'a u32, Wrapped = Option<&'a u32>>, + obj => dyn Sub9B<&'a u32, Wrapped = Option<&'a u32>> + ).expect("case9_b"); + assert_eq!(b.b_val(), *x + 13); +} + +// ========================================================================= +// Case 10: Chain + diamond combined — 4-level hierarchy +// +// Root10 → Mid10 → {BranchA, BranchB} → Leaf10 +// +// First a chain of depth 2 (Root → Mid), then a diamond on top +// (BranchA, BranchB both extend Mid; Leaf extends both branches). +// Tests that the chain walk correctly handles both linear and +// diamond segments in the same hierarchy. +// ========================================================================= + +trait Root10: TraitMetadataTable> + core::fmt::Debug { + fn root_val(&self) -> u32; +} +trait Mid10: Root10 { + fn mid_val(&self) -> u32; +} +trait BranchA: Mid10 { + fn a_val(&self) -> u32; +} +trait BranchB: Mid10 { + fn b_val(&self) -> u32; +} +trait Leaf10: BranchA + BranchB { + fn leaf_val(&self) -> u32; +} + +#[derive(Debug)] +struct C10<'a> { x: &'a u32 } + +impl<'a> Root10<&'a u32> for C10<'a> { fn root_val(&self) -> u32 { *self.x } } +impl<'a> Mid10<&'a u32> for C10<'a> { fn mid_val(&self) -> u32 { *self.x + 1 } } +impl<'a> BranchA<&'a u32> for C10<'a> { fn a_val(&self) -> u32 { *self.x + 2 } } +impl<'a> BranchB<&'a u32> for C10<'a> { fn b_val(&self) -> u32 { *self.x + 3 } } +impl<'a> Leaf10<&'a u32> for C10<'a> { fn leaf_val(&self) -> u32 { *self.x + 4 } } + +#[inline(never)] +fn case10<'a>(x: &'a u32) { + let obj: &dyn Root10<&'_ u32> = &C10 { x }; + let mid = core::cast!(in dyn Root10<&'_ u32>, obj => dyn Mid10<&'_ u32>) + .expect("case10_mid"); + assert_eq!(mid.mid_val(), *x + 1); + let a = core::cast!(in dyn Root10<&'_ u32>, obj => dyn BranchA<&'_ u32>) + .expect("case10_a"); + assert_eq!(a.a_val(), *x + 2); + let b = core::cast!(in dyn Root10<&'_ u32>, obj => dyn BranchB<&'_ u32>) + .expect("case10_b"); + assert_eq!(b.b_val(), *x + 3); + let leaf = core::cast!(in dyn Root10<&'_ u32>, obj => dyn Leaf10<&'_ u32>) + .expect("case10_leaf"); + assert_eq!(leaf.leaf_val(), *x + 4); +} + +// ========================================================================= +// Case 11: Multi-type-param diamond +// +// Root11 → MidT, MidU → Sub11: MidT + MidU +// +// Both type params carry distinct lifetimes via T = &'a u32, U = &'b u32. +// The diamond join must reconcile both type params consistently. +// ========================================================================= + +trait Root11: TraitMetadataTable> + core::fmt::Debug { + fn t_val(&self) -> u32; + fn u_val(&self) -> u32; +} +trait MidT: Root11 { + fn focus_t(&self) -> u32; +} +trait MidU: Root11 { + fn focus_u(&self) -> u32; +} +trait Sub11: MidT + MidU { + fn combined(&self) -> u32; +} + +#[derive(Debug)] +struct C11<'a, 'b> { t: &'a u32, u: &'b u32 } + +impl<'a, 'b> Root11<&'a u32, &'b u32> for C11<'a, 'b> { + fn t_val(&self) -> u32 { *self.t } + fn u_val(&self) -> u32 { *self.u } +} +impl<'a, 'b> MidT<&'a u32, &'b u32> for C11<'a, 'b> { + fn focus_t(&self) -> u32 { *self.t * 2 } +} +impl<'a, 'b> MidU<&'a u32, &'b u32> for C11<'a, 'b> { + fn focus_u(&self) -> u32 { *self.u * 3 } +} +impl<'a, 'b> Sub11<&'a u32, &'b u32> for C11<'a, 'b> { + fn combined(&self) -> u32 { *self.t + *self.u } +} + +#[inline(never)] +fn case11<'a>(x: &'a u32) { + let obj: &dyn Root11<&'_ u32, &'_ u32> = &C11 { t: x, u: x }; + let mt = core::cast!( + in dyn Root11<&'_ u32, &'_ u32>, + obj => dyn MidT<&'_ u32, &'_ u32> + ).expect("case11_t"); + assert_eq!(mt.focus_t(), *x * 2); + let mu = core::cast!( + in dyn Root11<&'_ u32, &'_ u32>, + obj => dyn MidU<&'_ u32, &'_ u32> + ).expect("case11_u"); + assert_eq!(mu.focus_u(), *x * 3); + let sub = core::cast!( + in dyn Root11<&'_ u32, &'_ u32>, + obj => dyn Sub11<&'_ u32, &'_ u32> + ).expect("case11_sub"); + assert_eq!(sub.combined(), *x + *x); +} + +fn main() { + let x: u32 = 10; + + eprintln!("=== case1: basic ref type param ==="); + case1(&x); + + eprintln!("=== case2: two type params merged ==="); + case2(&x); + + eprintln!("=== case3: where-clause gating ==="); + case3(&x); + + eprintln!("=== case4: Option type param ==="); + case4_some(&x); + case4_none(&x); + + eprintln!("=== case5: chain depth-3 ==="); + case5(&x); + + eprintln!("=== case6: diamond ==="); + case6(&x); + + eprintln!("=== case7: mixed lifetime + type params ==="); + case7(&x); + + eprintln!("=== case8: type param + projected assoc ==="); + case8(&x); + + eprintln!("=== case9: type param + projection ==="); + case9(&x); + + eprintln!("=== case10: chain + diamond ==="); + case10(&x); + + eprintln!("=== case11: multi-type-param diamond ==="); + case11(&x); +} diff --git a/tests/ui/trait-cast/missing-root-bound.rs b/tests/ui/trait-cast/missing-root-bound.rs new file mode 100644 index 0000000000000..d663b482388bf --- /dev/null +++ b/tests/ui/trait-cast/missing-root-bound.rs @@ -0,0 +1,42 @@ +//! Diagnostic: trait used as a `cast!` root lacks `TraitMetadataTable`. +//! +//! The `cast!(in dyn Root, ...)` macro requires that `Root` carry +//! `TraitMetadataTable` as a supertrait bound. When it +//! does not, the specialized `MissingRootBound` diagnostic is emitted +//! instead of the generic "trait bound not satisfied" error. + +#![feature(trait_cast)] +#![allow(dead_code)] + +#![crate_type = "rlib"] + +extern crate core; + +// A trait that does NOT carry `TraitMetadataTable`. +trait BrokenRoot: core::fmt::Debug { + fn val(&self) -> u32; +} + +// A hypothetical target. Even if it had `TraitMetadataTable` +// as a supertrait, the root itself would still fail the check — this fixture +// focuses on the root-missing case. +trait SomeTarget: BrokenRoot { + fn extra(&self) -> u32; +} + +#[derive(Debug)] +struct S; + +impl BrokenRoot for S { + fn val(&self) -> u32 { 0 } +} + +fn cast_through_broken_root(obj: &dyn BrokenRoot) { + let _ = core::cast!(in dyn BrokenRoot, obj => dyn SomeTarget); + //~^ ERROR `BrokenRoot` cannot be used as a cast root + //~| ERROR `BrokenRoot` cannot be used as a cast root + //~| ERROR E0277 + //~| ERROR E0277 + //~| ERROR E0277 + //~| ERROR E0277 +} diff --git a/tests/ui/trait-cast/missing-root-bound.stderr b/tests/ui/trait-cast/missing-root-bound.stderr new file mode 100644 index 0000000000000..e1484cb23cc18 --- /dev/null +++ b/tests/ui/trait-cast/missing-root-bound.stderr @@ -0,0 +1,85 @@ +error: `BrokenRoot` cannot be used as a cast root: missing `TraitMetadataTable` bound + --> $DIR/missing-root-bound.rs:35:13 + | +LL | let _ = core::cast!(in dyn BrokenRoot, obj => dyn SomeTarget); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `TraitMetadataTable` is not a supertrait of `BrokenRoot` + | + = help: add `TraitMetadataTable` as a supertrait bound of `BrokenRoot` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `dyn SomeTarget: std::marker::TraitMetadataTable` is not satisfied + --> $DIR/missing-root-bound.rs:35:51 + | +LL | let _ = core::cast!(in dyn BrokenRoot, obj => dyn SomeTarget); + | ^^^^^^^^^^^^^^ the trait `Sized` is not implemented for `dyn SomeTarget` + | + = note: required for `dyn SomeTarget` to implement `std::marker::TraitMetadataTable` +note: required by a bound in `std::trait_cast::TraitCast::cast` + --> $SRC_DIR/core/src/trait_cast.rs:LL:COL + +error[E0277]: the trait bound `&dyn BrokenRoot: TraitCast` is not satisfied + --> $DIR/missing-root-bound.rs:35:44 + | +LL | let _ = core::cast!(in dyn BrokenRoot, obj => dyn SomeTarget); + | -------------------------------^^^------------------- + | | | + | | the trait `Sized` is not implemented for `dyn BrokenRoot` + | required by a bound introduced by this call + | + = help: the following other types implement trait `TraitCast`: + &'r T + &'r mut T + Arc + Box + Rc + = note: required for `dyn BrokenRoot` to implement `std::marker::TraitMetadataTable` + = note: required for `&dyn BrokenRoot` to implement `TraitCast` + +error[E0277]: the size for values of type `dyn SomeTarget` cannot be known at compilation time + --> $DIR/missing-root-bound.rs:35:44 + | +LL | let _ = core::cast!(in dyn BrokenRoot, obj => dyn SomeTarget); + | -------------------------------^^^------------------- + | | | + | | doesn't have a size known at compile-time + | required by a bound introduced by this call + | + = help: the trait `Sized` is not implemented for `dyn SomeTarget` + = help: the following other types implement trait `TraitCast`: + &'r T + &'r mut T + Arc + Box + Rc + = note: required for `dyn SomeTarget` to implement `std::marker::TraitMetadataTable` + = note: required for `&dyn BrokenRoot` to implement `TraitCast` + +error: `BrokenRoot` cannot be used as a cast root: missing `TraitMetadataTable` bound + --> $DIR/missing-root-bound.rs:35:13 + | +LL | let _ = core::cast!(in dyn BrokenRoot, obj => dyn SomeTarget); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `TraitMetadataTable` is not a supertrait of `BrokenRoot` + | + = help: add `TraitMetadataTable` as a supertrait bound of `BrokenRoot` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the trait bound `dyn SomeTarget: std::marker::TraitMetadataTable` is not satisfied + --> $DIR/missing-root-bound.rs:35:13 + | +LL | let _ = core::cast!(in dyn BrokenRoot, obj => dyn SomeTarget); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Sized` is not implemented for `dyn SomeTarget` + | + = help: the following other types implement trait `TraitCast`: + &'r T + &'r mut T + Arc + Box + Rc + = note: required for `dyn SomeTarget` to implement `std::marker::TraitMetadataTable` + = note: required for `&dyn BrokenRoot` to implement `TraitCast` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 6 previous errors + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/trait-cast/non-dyn-compat-target.rs b/tests/ui/trait-cast/non-dyn-compat-target.rs new file mode 100644 index 0000000000000..1227156b2e2c3 --- /dev/null +++ b/tests/ui/trait-cast/non-dyn-compat-target.rs @@ -0,0 +1,42 @@ +//! Diagnostic: non-dyn-compatible cast target. +//! +//! When a `cast!` expression names a non-dyn-compatible trait as its +//! target, lowering `dyn Target` triggers the existing E0038 +//! dyn-compatibility diagnostic before any trait-cast-specific +//! machinery runs. This test verifies that the error path surfaces +//! cleanly through the macro expansion. +//! +//! The root (`CleanRoot`) is itself dyn-compatible; only the target +//! (`NotDynCompat`) is rejected. + +#![feature(trait_cast)] +#![allow(dead_code)] + +#![crate_type = "rlib"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// A dyn-compatible root. +trait CleanRoot: TraitMetadataTable + core::fmt::Debug { + fn val(&self) -> u32; +} + +// A sub-trait that breaks dyn-compatibility by returning `Self`. +trait NotDynCompat: CleanRoot { + fn build(&self) -> Self; +} + +#[derive(Debug)] +struct S; + +impl CleanRoot for S { + fn val(&self) -> u32 { 0 } +} + +fn cast_to_non_dyn_compat(obj: &dyn CleanRoot) { + let _ = core::cast!(in dyn CleanRoot, obj => dyn NotDynCompat); + //~^ ERROR E0038 +} + +fn main() {} diff --git a/tests/ui/trait-cast/non-dyn-compat-target.stderr b/tests/ui/trait-cast/non-dyn-compat-target.stderr new file mode 100644 index 0000000000000..93665ed16f935 --- /dev/null +++ b/tests/ui/trait-cast/non-dyn-compat-target.stderr @@ -0,0 +1,19 @@ +error[E0038]: the trait `NotDynCompat` is not dyn compatible + --> $DIR/non-dyn-compat-target.rs:38:54 + | +LL | let _ = core::cast!(in dyn CleanRoot, obj => dyn NotDynCompat); + | ^^^^^^^^^^^^ `NotDynCompat` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/non-dyn-compat-target.rs:27:24 + | +LL | trait NotDynCompat: CleanRoot { + | ------------ this trait is not dyn compatible... +LL | fn build(&self) -> Self; + | ^^^^ ...because method `build` references the `Self` type in its return type + = help: consider moving `build` to another trait + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0038`. diff --git a/tests/ui/trait-cast/not-dyn-compat.rs b/tests/ui/trait-cast/not-dyn-compat.rs new file mode 100644 index 0000000000000..3e6dcc81e0bdf --- /dev/null +++ b/tests/ui/trait-cast/not-dyn-compat.rs @@ -0,0 +1,87 @@ +//! Negative compile-fail tests: trait patterns that are NOT dyn-compatible +//! (object-safe) and therefore cannot participate in trait-cast. +//! +//! Trait-cast requires `TraitMetadataTable` as a supertrait +//! bound, which forces dyn compatibility on the trait. Any feature +//! that breaks dyn compatibility — generic methods, `Self: Sized` +//! requirements, methods returning `Self`, generic methods using +//! associated types — must be rejected. +//! +//! Each case mirrors a positive pattern from lifetime-in-generics.rs +//! but introduces a feature that makes the trait non-dyn-compat. +//! +//! Note: GAT cases (`type Lent<'a>`) trigger a compiler ICE in +//! `check_well_formed` rather than a clean dyn-compatibility error, so +//! they cannot be expressed as ordinary compile-fail UI tests here. + +#![feature(trait_cast)] +#![allow(dead_code)] + +#![crate_type = "rlib"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ========================================================================= +// Negative 1: Method with generic type parameter — not dyn-compat +// +// `process` is a generic method. Generic methods cannot be +// dispatched through a vtable. Mirrors the positive Root1 pattern +// (case 1) — replacing the trait-level type param with a method-level +// one is invalid. +// ========================================================================= + +trait GenericMethod: TraitMetadataTable> + core::fmt::Debug { +//~^ ERROR the trait `GenericMethod` is not dyn compatible +//~| ERROR the trait `GenericMethod` is not dyn compatible + fn process(&self, value: U) -> u32; +} + +// ========================================================================= +// Negative 2: `Self: Sized` requirement on the trait — not dyn-compat +// +// Requiring `Self: Sized` excludes dyn types (which are unsized). +// Mirrors the positive single-type-param pattern (case 1). +// ========================================================================= + +trait NeedsSized: TraitMetadataTable> + core::fmt::Debug +//~^ ERROR the trait `NeedsSized` is not dyn compatible +//~| ERROR the trait `NeedsSized` is not dyn compatible +where + Self: Sized, +{ + fn val(&self) -> u32; +} + +// ========================================================================= +// Negative 3: Method returning Self — not dyn-compat +// +// Returning `Self` requires sizing the return value. Mirrors the +// diamond pattern (case 6) — but with a builder-style method. +// ========================================================================= + +trait ReturnsSelf: TraitMetadataTable> + core::fmt::Debug { +//~^ ERROR the trait `ReturnsSelf` is not dyn compatible +//~| ERROR the trait `ReturnsSelf` is not dyn compatible +//~| ERROR the trait `ReturnsSelf` is not dyn compatible + fn val(&self) -> u32; + fn build(&self) -> Self; +} + +// ========================================================================= +// Negative 4: Method with generic type param using associated type — not +// dyn-compat +// +// Combines the associated-type pattern (positive case 8) with a +// generic method. Even though associated types alone are dyn-compat, +// the generic method `map` breaks it. +// ========================================================================= + +trait AssocGeneric: + TraitMetadataTable> + core::fmt::Debug +//~^ ERROR the trait `AssocGeneric` is not dyn compatible +//~| ERROR the trait `AssocGeneric` is not dyn compatible +{ + type Assoc; + fn map u32>(&self, f: F) -> u32; +} diff --git a/tests/ui/trait-cast/not-dyn-compat.stderr b/tests/ui/trait-cast/not-dyn-compat.stderr new file mode 100644 index 0000000000000..583f51c2831f1 --- /dev/null +++ b/tests/ui/trait-cast/not-dyn-compat.stderr @@ -0,0 +1,159 @@ +error[E0038]: the trait `GenericMethod` is not dyn compatible + --> $DIR/not-dyn-compat.rs:34:25 + | +LL | trait GenericMethod: TraitMetadataTable> + core::fmt::Debug { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `GenericMethod` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:37:8 + | +LL | trait GenericMethod: TraitMetadataTable> + core::fmt::Debug { + | ------------- this trait is not dyn compatible... +... +LL | fn process(&self, value: U) -> u32; + | ^^^^^^^ ...because method `process` has generic type parameters + = help: consider moving `process` to another trait + +error[E0038]: the trait `NeedsSized` is not dyn compatible + --> $DIR/not-dyn-compat.rs:47:22 + | +LL | trait NeedsSized: TraitMetadataTable> + core::fmt::Debug + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `NeedsSized` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:51:11 + | +LL | trait NeedsSized: TraitMetadataTable> + core::fmt::Debug + | ---------- this trait is not dyn compatible... +... +LL | Self: Sized, + | ^^^^^ ...because it requires `Self: Sized` + +error[E0038]: the trait `ReturnsSelf` is not dyn compatible + --> $DIR/not-dyn-compat.rs:63:23 + | +LL | trait ReturnsSelf: TraitMetadataTable> + core::fmt::Debug { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `ReturnsSelf` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:68:24 + | +LL | trait ReturnsSelf: TraitMetadataTable> + core::fmt::Debug { + | ----------- this trait is not dyn compatible... +... +LL | fn build(&self) -> Self; + | ^^^^ ...because method `build` references the `Self` type in its return type + = help: consider moving `build` to another trait + +error[E0038]: the trait `AssocGeneric` is not dyn compatible + --> $DIR/not-dyn-compat.rs:81:5 + | +LL | TraitMetadataTable> + core::fmt::Debug + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `AssocGeneric` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:86:8 + | +LL | trait AssocGeneric: + | ------------ this trait is not dyn compatible... +... +LL | fn map u32>(&self, f: F) -> u32; + | ^^^ ...because method `map` has generic type parameters + = help: consider moving `map` to another trait + +error[E0038]: the trait `GenericMethod` is not dyn compatible + --> $DIR/not-dyn-compat.rs:34:25 + | +LL | trait GenericMethod: TraitMetadataTable> + core::fmt::Debug { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `GenericMethod` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:37:8 + | +LL | trait GenericMethod: TraitMetadataTable> + core::fmt::Debug { + | ------------- this trait is not dyn compatible... +... +LL | fn process(&self, value: U) -> u32; + | ^^^^^^^ ...because method `process` has generic type parameters + = help: consider moving `process` to another trait + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0038]: the trait `NeedsSized` is not dyn compatible + --> $DIR/not-dyn-compat.rs:47:22 + | +LL | trait NeedsSized: TraitMetadataTable> + core::fmt::Debug + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `NeedsSized` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:51:11 + | +LL | trait NeedsSized: TraitMetadataTable> + core::fmt::Debug + | ---------- this trait is not dyn compatible... +... +LL | Self: Sized, + | ^^^^^ ...because it requires `Self: Sized` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0038]: the trait `ReturnsSelf` is not dyn compatible + --> $DIR/not-dyn-compat.rs:63:23 + | +LL | trait ReturnsSelf: TraitMetadataTable> + core::fmt::Debug { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `ReturnsSelf` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:68:24 + | +LL | trait ReturnsSelf: TraitMetadataTable> + core::fmt::Debug { + | ----------- this trait is not dyn compatible... +... +LL | fn build(&self) -> Self; + | ^^^^ ...because method `build` references the `Self` type in its return type + = help: consider moving `build` to another trait + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0038]: the trait `ReturnsSelf` is not dyn compatible + --> $DIR/not-dyn-compat.rs:63:23 + | +LL | trait ReturnsSelf: TraitMetadataTable> + core::fmt::Debug { + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `ReturnsSelf` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:68:24 + | +LL | trait ReturnsSelf: TraitMetadataTable> + core::fmt::Debug { + | ----------- this trait is not dyn compatible... +... +LL | fn build(&self) -> Self; + | ^^^^ ...because method `build` references the `Self` type in its return type + = help: consider moving `build` to another trait + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error[E0038]: the trait `AssocGeneric` is not dyn compatible + --> $DIR/not-dyn-compat.rs:81:5 + | +LL | TraitMetadataTable> + core::fmt::Debug + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `AssocGeneric` is not dyn compatible + | +note: for a trait to be dyn compatible it needs to allow building a vtable + for more information, visit + --> $DIR/not-dyn-compat.rs:86:8 + | +LL | trait AssocGeneric: + | ------------ this trait is not dyn compatible... +... +LL | fn map u32>(&self, f: F) -> u32; + | ^^^ ...because method `map` has generic type parameters + = help: consider moving `map` to another trait + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error: aborting due to 9 previous errors + +For more information about this error, try `rustc --explain E0038`. diff --git a/tests/ui/trait-cast/runtime-cast-failures.rs b/tests/ui/trait-cast/runtime-cast-failures.rs new file mode 100644 index 0000000000000..6f75ec51b7966 --- /dev/null +++ b/tests/ui/trait-cast/runtime-cast-failures.rs @@ -0,0 +1,232 @@ +//@ run-pass +//! Runtime trait-cast failures: patterns from lifetime-in-generics.rs +//! where `cast!` is expected to return `Err` at runtime. +//! +//! Existing tests already cover single missing impls (basic-downcast, +//! torture-tests case 1 sparse), single where-clause gating with +//! direct lifetime params (lifetime-bounded-downcast), diamond with a +//! where-clause-gated branch (erasure-safety-chain-walk case 5), and +//! chain with where-clause-gated impls (torture-tests case 5). +//! +//! This file focuses on negative patterns that are NOT yet covered: +//! +//! 1. Transitive where-clause chain `'a: 'b, 'b: 'c` where the +//! middle link is unprovable. Unlike the single-bound gating in +//! lifetime-bounded-downcast, this tests multi-bound composition. +//! 2. Chain where the intermediate sub-trait impl is literally +//! MISSING (not where-clause gated). Complements torture-tests +//! case 5 which tests where-clause gating in the same shape. +//! 3. Projection + where-clause gating combined: the sub-trait impl +//! is gated by a lifetime bound when the trait also carries an +//! associated-type projection. Complements erasure-safety- +//! projections which has no negative cases. +//! 4. Multi-type-param diamond with one branch impl missing: +//! `Root` → `MidT` / `MidU` → `Sub`, +//! where only MidT is impled. No existing test uses a two-type- +//! param diamond for negative cases. +//! +//! All cases use the type-param-with-lifetime pattern from +//! lifetime-in-generics.rs (e.g., `Root<&'a u32>`), which the existing +//! negative tests do not exercise. + +#![feature(trait_cast)] +#![allow(dead_code, unused_variables)] + +#![crate_type = "bin"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// ========================================================================= +// Case 1: Transitive where-clause chain, middle link unprovable. +// +// The impl requires `'a: 'b` AND `'b: 'c`. The coercion site has +// 'b as a strictly interior scope, so neither bound is provable. +// Existing where-clause tests (lifetime-bounded-downcast, torture +// case 5) use a single bound on direct lifetime params; this case +// tests multi-bound composition with lifetimes embedded in type params. +// ========================================================================= + +trait Root1: TraitMetadataTable> + core::fmt::Debug { + fn val(&self) -> u32; +} +trait Sub1: Root1 { + fn chained(&self) -> u32; +} + +#[derive(Debug)] +struct S1<'a, 'b, 'c> { a: &'a u32, b: &'b u32, c: &'c u32 } + +impl<'a, 'b, 'c> Root1<&'a u32, &'b u32, &'c u32> for S1<'a, 'b, 'c> { + fn val(&self) -> u32 { *self.a + *self.b + *self.c } +} +impl<'a, 'b, 'c> Sub1<&'a u32, &'b u32, &'c u32> for S1<'a, 'b, 'c> +where + 'a: 'b, + 'b: 'c, +{ + fn chained(&self) -> u32 { *self.a * *self.b * *self.c } +} + +/// 'b is strictly interior — neither `'a: 'b` nor `'b: 'c` is provable. +#[inline(never)] +fn case1_chain_unprovable<'a>(x: &'a u32) { + let b_scope: u32 = 2; + let c_scope: u32 = 3; + let obj: &dyn Root1<&'_ u32, &'_ u32, &'_ u32> = + &S1 { a: x, b: &b_scope, c: &c_scope }; + core::cast!( + in dyn Root1<&'_ u32, &'_ u32, &'_ u32>, + obj => dyn Sub1<&'_ u32, &'_ u32, &'_ u32> + ).expect_err("case1_chain_unprovable"); +} + +// ========================================================================= +// Case 2: Chain with intermediate impl literally missing. +// +// Root2 → Mid2 → Sub2, with T = &'a u32. Only Root2 is +// implemented for the concrete type — Mid2 and Sub2 have no impls at +// all. This is a different failure mechanism from torture-tests +// case 5 (where Mid and Sub impls exist but are where-clause gated). +// ========================================================================= + +trait Root2: TraitMetadataTable> + core::fmt::Debug { + fn val(&self) -> u32; +} +trait Mid2: Root2 { + fn mid_val(&self) -> u32; +} +trait Sub2: Mid2 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S2<'a> { x: &'a u32 } + +impl<'a> Root2<&'a u32> for S2<'a> { + fn val(&self) -> u32 { *self.x } +} +// NOTE: no Mid2 or Sub2 impl. + +#[inline(never)] +fn case2_chain_missing<'a>(x: &'a u32) { + let obj: &dyn Root2<&'_ u32> = &S2 { x }; + core::cast!(in dyn Root2<&'_ u32>, obj => dyn Mid2<&'_ u32>) + .expect_err("case2_mid_missing"); + core::cast!(in dyn Root2<&'_ u32>, obj => dyn Sub2<&'_ u32>) + .expect_err("case2_sub_missing"); +} + +// ========================================================================= +// Case 3: Projection + where-clause gating. +// +// Root3 has an associated-type projection. Sub3's impl +// is gated by a lifetime bound `'b: 'a` on a lifetime that is not +// visible in the projection. erasure-safety-projections has no +// negative cases — this case shows a projection-carrying dyn type +// whose sub-trait cast can still fail at runtime. +// ========================================================================= + +trait Root3: TraitMetadataTable> + core::fmt::Debug { + type Assoc; + fn val(&self) -> u32; +} +trait Sub3: Root3 { + fn sub_val(&self) -> u32; +} + +#[derive(Debug)] +struct S3<'a, 'b> { x: &'a u32, y: &'b u32 } + +impl<'a, 'b> Root3<&'a u32> for S3<'a, 'b> { + type Assoc = &'a u32; + fn val(&self) -> u32 { *self.x } +} +impl<'a, 'b> Sub3<&'a u32> for S3<'a, 'b> +where + 'b: 'a, +{ + fn sub_val(&self) -> u32 { *self.y } +} + +#[inline(never)] +fn case3_projection_where<'a>(x: &'a u32) { + let local: u32 = 5; + let obj: &dyn Root3<&'a u32, Assoc = &'a u32> = &S3 { x, y: &local }; + core::cast!( + in dyn Root3<&'a u32, Assoc = &'a u32>, + obj => dyn Sub3<&'a u32, Assoc = &'a u32> + ).expect_err("case3_projection_where"); +} + +// ========================================================================= +// Case 4: Multi-type-param diamond with one branch impl missing. +// +// Mirrors lifetime-in-generics case 11 (Root, MidT, MidU, Sub). +// Only MidT is implemented; MidU and the diamond Sub are not. No +// existing negative test uses a two-type-param diamond. +// ========================================================================= + +trait Root4: TraitMetadataTable> + core::fmt::Debug { + fn val(&self) -> u32; +} +trait MidT4: Root4 { + fn focus_t(&self) -> u32; +} +trait MidU4: Root4 { + fn focus_u(&self) -> u32; +} +trait Sub4: MidT4 + MidU4 { + fn combined(&self) -> u32; +} + +#[derive(Debug)] +struct S4<'a, 'b> { t: &'a u32, u: &'b u32 } + +impl<'a, 'b> Root4<&'a u32, &'b u32> for S4<'a, 'b> { + fn val(&self) -> u32 { *self.t + *self.u } +} +impl<'a, 'b> MidT4<&'a u32, &'b u32> for S4<'a, 'b> { + fn focus_t(&self) -> u32 { *self.t } +} +// NOTE: no MidU4 impl, no Sub4 impl. + +#[inline(never)] +fn case4_diamond_missing<'a>(x: &'a u32) { + let obj: &dyn Root4<&'_ u32, &'_ u32> = &S4 { t: x, u: x }; + + // Positive: MidT4 is impled. + let mt = core::cast!( + in dyn Root4<&'_ u32, &'_ u32>, + obj => dyn MidT4<&'_ u32, &'_ u32> + ).expect("case4_mt"); + assert_eq!(mt.focus_t(), *x); + + // Negative: MidU4 has no impl. + core::cast!( + in dyn Root4<&'_ u32, &'_ u32>, + obj => dyn MidU4<&'_ u32, &'_ u32> + ).expect_err("case4_mu_missing"); + + // Negative: Sub4 requires both branches. + core::cast!( + in dyn Root4<&'_ u32, &'_ u32>, + obj => dyn Sub4<&'_ u32, &'_ u32> + ).expect_err("case4_sub_missing"); +} + +fn main() { + let x: u32 = 10; + + eprintln!("=== case1: transitive where-clause unprovable ==="); + case1_chain_unprovable(&x); + + eprintln!("=== case2: chain links missing ==="); + case2_chain_missing(&x); + + eprintln!("=== case3: projection + where-clause ==="); + case3_projection_where(&x); + + eprintln!("=== case4: multi-type-param diamond missing ==="); + case4_diamond_missing(&x); +} diff --git a/tests/ui/trait-cast/target-not-reachable.rs b/tests/ui/trait-cast/target-not-reachable.rs new file mode 100644 index 0000000000000..2adafe7861c24 --- /dev/null +++ b/tests/ui/trait-cast/target-not-reachable.rs @@ -0,0 +1,39 @@ +//! Diagnostic: cast target trait is not reachable from the root supertrait. +//! +//! When `cast!(in dyn Root, obj => dyn Target)` names a target whose +//! principal trait is not a (transitive) supertrait of `Root`, the +//! specialized `TargetNotReachable` diagnostic is emitted instead of +//! the generic "trait bound not satisfied" error. + +#![feature(trait_cast)] +#![allow(dead_code)] + +#![crate_type = "rlib"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// Graph A: Root with its own sub-trait. +trait Root: TraitMetadataTable + core::fmt::Debug { + fn val(&self) -> u32; +} + +// Completely unrelated trait — has its own metadata table, but no shared +// root with `Root`. +trait Unrelated: TraitMetadataTable + core::fmt::Debug { + fn do_something(&self); +} + +#[derive(Debug)] +struct S; + +impl Root for S { + fn val(&self) -> u32 { 0 } +} + +fn target_not_in_graph(obj: &dyn Root) { + let _ = core::cast!(in dyn Root, obj => dyn Unrelated); + //~^ ERROR `Unrelated` is not in the trait graph rooted at `Root` + //~| ERROR `Unrelated` is not in the trait graph rooted at `Root` + //~| ERROR E0277 +} diff --git a/tests/ui/trait-cast/target-not-reachable.stderr b/tests/ui/trait-cast/target-not-reachable.stderr new file mode 100644 index 0000000000000..f03e8a0ba9efb --- /dev/null +++ b/tests/ui/trait-cast/target-not-reachable.stderr @@ -0,0 +1,43 @@ +error: `Unrelated` is not in the trait graph rooted at `Root` + --> $DIR/target-not-reachable.rs:35:13 + | +LL | let _ = core::cast!(in dyn Root, obj => dyn Unrelated); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `Unrelated` does not have `Root` as a (transitive) supertrait + = help: add `Root` as a supertrait bound on `Unrelated` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error[E0277]: the size for values of type `dyn Unrelated` cannot be known at compilation time + --> $DIR/target-not-reachable.rs:35:38 + | +LL | let _ = core::cast!(in dyn Root, obj => dyn Unrelated); + | -------------------------^^^------------------ + | | | + | | the trait `Sized` is not implemented for `dyn Unrelated` + | required by a bound introduced by this call + | + = note: the trait bound `Sized` is not satisfied + = note: required for `dyn Unrelated` to implement `std::marker::TraitMetadataTable` + = note: required for `&dyn Root` to implement `TraitCast` +help: consider borrowing here + | +LL | let _ = core::cast!(in dyn Root, &obj => dyn Unrelated); + | + +LL | let _ = core::cast!(in dyn Root, &mut obj => dyn Unrelated); + | ++++ + +error: `Unrelated` is not in the trait graph rooted at `Root` + --> $DIR/target-not-reachable.rs:35:13 + | +LL | let _ = core::cast!(in dyn Root, obj => dyn Unrelated); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = note: `Unrelated` does not have `Root` as a (transitive) supertrait + = help: add `Root` as a supertrait bound on `Unrelated` + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + = note: this error originates in the macro `core::cast` (in Nightly builds, run with -Z macro-backtrace for more info) + +error: aborting due to 3 previous errors + +For more information about this error, try `rustc --explain E0277`. diff --git a/tests/ui/trait-cast/tmt-arg-mismatch.rs b/tests/ui/trait-cast/tmt-arg-mismatch.rs new file mode 100644 index 0000000000000..d8e060cf36019 --- /dev/null +++ b/tests/ui/trait-cast/tmt-arg-mismatch.rs @@ -0,0 +1,26 @@ +//! Diagnostic (eager): `TraitMetadataTable` supertrait bound where +//! `dyn X` is neither `dyn Self` (declaring this trait as a cast root) nor +//! `dyn R` for a transitive supertrait `R` that is itself a cast root. + +#![feature(trait_cast)] +#![allow(dead_code)] + +#![crate_type = "rlib"] + +extern crate core; +use core::marker::TraitMetadataTable; + +// A valid cast root. +trait Root: TraitMetadataTable + core::fmt::Debug { + fn val(&self) -> u32; +} + +// A separate, unrelated cast root. +trait Unrelated: TraitMetadataTable + core::fmt::Debug { + fn other(&self) -> u32; +} + +// Sub-trait that points its `TraitMetadataTable` at an unrelated root, +// rather than `Self` or `Root` (its own supertrait). +trait ChildTrait: Root + TraitMetadataTable {} +//~^ ERROR `TraitMetadataTable` type argument does not match a cast root diff --git a/tests/ui/trait-cast/tmt-arg-mismatch.stderr b/tests/ui/trait-cast/tmt-arg-mismatch.stderr new file mode 100644 index 0000000000000..4a9f19fea0e0c --- /dev/null +++ b/tests/ui/trait-cast/tmt-arg-mismatch.stderr @@ -0,0 +1,11 @@ +error: `TraitMetadataTable` type argument does not match a cast root + --> $DIR/tmt-arg-mismatch.rs:25:26 + | +LL | trait ChildTrait: Root + TraitMetadataTable {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `dyn Unrelated` is not a (transitive) supertrait of `ChildTrait` + | + = note: on a trait `Tr`, a `TraitMetadataTable` supertrait bound requires `X = Self` (declaring `Tr` as a cast root) or `X = R` for some transitive supertrait `R` of `Tr` that is itself a cast root + = help: subtraits inherit `TraitMetadataTable` from their root — the explicit bound is usually unnecessary + +error: aborting due to 1 previous error + diff --git a/tests/ui/trait-cast/tmt-arg-not-dyn.rs b/tests/ui/trait-cast/tmt-arg-not-dyn.rs new file mode 100644 index 0000000000000..1ac4b6497316c --- /dev/null +++ b/tests/ui/trait-cast/tmt-arg-not-dyn.rs @@ -0,0 +1,15 @@ +//! Diagnostic (eager): the `TraitMetadataTable` type argument must be a +//! trait object. Non-`dyn` arguments render the bound uninhabitable and are +//! never what the author intended. + +#![feature(trait_cast)] +#![allow(dead_code)] + +#![crate_type = "rlib"] + +extern crate core; +use core::marker::TraitMetadataTable; + +trait ChildTrait: TraitMetadataTable {} +//~^ ERROR `TraitMetadataTable` type argument must be a trait object +//~| ERROR E0271 diff --git a/tests/ui/trait-cast/tmt-arg-not-dyn.stderr b/tests/ui/trait-cast/tmt-arg-not-dyn.stderr new file mode 100644 index 0000000000000..4b197d170755a --- /dev/null +++ b/tests/ui/trait-cast/tmt-arg-not-dyn.stderr @@ -0,0 +1,23 @@ +error[E0271]: type mismatch resolving `::Metadata == DynMetadata` + --> $DIR/tmt-arg-not-dyn.rs:13:19 + | +LL | trait ChildTrait: TraitMetadataTable {} + | ^^^^^^^^^^^^^^^^^^^^^^^ expected `DynMetadata`, found `()` + | + = note: expected struct `DynMetadata` + found unit type `()` +note: required by a bound in `std::marker::TraitMetadataTable` + --> $SRC_DIR/core/src/marker.rs:LL:COL + +error: `TraitMetadataTable` type argument must be a trait object + --> $DIR/tmt-arg-not-dyn.rs:13:19 + | +LL | trait ChildTrait: TraitMetadataTable {} + | ^^^^^^^^^^^^^^^^^^^^^^^ `u32` is not a `dyn Trait` type + | + = note: `TraitMetadataTable` requires `T: Pointee>`, which holds only for trait objects + = help: use `dyn Self` to declare `ChildTrait` as a cast root, or `dyn R` for a cast-root supertrait `R` of `ChildTrait` + +error: aborting due to 2 previous errors + +For more information about this error, try `rustc --explain E0271`. diff --git a/tests/ui/trait-cast/torture-tests.rs b/tests/ui/trait-cast/torture-tests.rs new file mode 100644 index 0000000000000..8133962b78a0e --- /dev/null +++ b/tests/ui/trait-cast/torture-tests.rs @@ -0,0 +1,306 @@ +//@ run-pass +//! Trait-cast torture tests: complex lifetime and hierarchy scenarios. +//! +//! Stress tests combining multiple trait-cast features to exercise corner +//! cases in the mono collector, erasure-safety checks, and metadata table +//! generation. +//! +//! Test cases: +//! 1. Wide hierarchy: root with six sub-traits, two concrete types with +//! different impl coverage. +//! 2. Three lifetimes: full merge (3 -> 1) vs. partial merge (3 -> 2). +//! 3. Two independent root hierarchies on the same concrete type. +//! 4. Zero-sized type with PhantomData lifetime. +//! 5. Deep chain with where-clause gating at the intermediate level. + +#![feature(trait_cast)] +#![allow(dead_code, unused_variables)] + +#![crate_type = "bin"] + +extern crate core; +use core::marker::{PhantomData, TraitMetadataTable}; + +// ========================================================================= +// Case 1: Wide hierarchy — root with six sub-traits +// +// TypeAll implements all six sub-traits. +// TypeSparse implements only W1, W3, and W5 — the even-numbered +// sub-traits are absent. Tests that a wide metadata table correctly +// interleaves populated and empty slots. +// ========================================================================= + +trait WRoot<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} + +trait W1<'a>: WRoot<'a> { fn w1(&self) -> u32; } +trait W2<'a>: WRoot<'a> { fn w2(&self) -> u32; } +trait W3<'a>: WRoot<'a> { fn w3(&self) -> u32; } +trait W4<'a>: WRoot<'a> { fn w4(&self) -> u32; } +trait W5<'a>: WRoot<'a> { fn w5(&self) -> u32; } +trait W6<'a>: WRoot<'a> { fn w6(&self) -> u32; } + +#[derive(Debug)] +struct TypeAll<'a> { _x: &'a u32 } + +#[derive(Debug)] +struct TypeSparse<'a> { _x: &'a u32 } + +// TypeAll — everything +impl<'a> WRoot<'a> for TypeAll<'a> { fn id(&self) -> u32 { 1 } } +impl<'a> W1<'a> for TypeAll<'a> { fn w1(&self) -> u32 { 10 } } +impl<'a> W2<'a> for TypeAll<'a> { fn w2(&self) -> u32 { 20 } } +impl<'a> W3<'a> for TypeAll<'a> { fn w3(&self) -> u32 { 30 } } +impl<'a> W4<'a> for TypeAll<'a> { fn w4(&self) -> u32 { 40 } } +impl<'a> W5<'a> for TypeAll<'a> { fn w5(&self) -> u32 { 50 } } +impl<'a> W6<'a> for TypeAll<'a> { fn w6(&self) -> u32 { 60 } } + +// TypeSparse — odd-numbered only +impl<'a> WRoot<'a> for TypeSparse<'a> { fn id(&self) -> u32 { 2 } } +impl<'a> W1<'a> for TypeSparse<'a> { fn w1(&self) -> u32 { 11 } } +impl<'a> W3<'a> for TypeSparse<'a> { fn w3(&self) -> u32 { 31 } } +impl<'a> W5<'a> for TypeSparse<'a> { fn w5(&self) -> u32 { 51 } } + +/// TypeAll: all six sub-traits available. +#[inline(never)] +fn case1_all<'a>(x: &'a u32) { + let obj: &dyn WRoot<'_> = &TypeAll { _x: x }; + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W1<'_>).expect("W1").w1(), 10); + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W2<'_>).expect("W2").w2(), 20); + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W3<'_>).expect("W3").w3(), 30); + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W4<'_>).expect("W4").w4(), 40); + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W5<'_>).expect("W5").w5(), 50); + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W6<'_>).expect("W6").w6(), 60); +} + +/// TypeSparse: odd sub-traits available, even absent. +#[inline(never)] +fn case1_sparse<'a>(x: &'a u32) { + let obj: &dyn WRoot<'_> = &TypeSparse { _x: x }; + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W1<'_>).expect("W1").w1(), 11); + core::cast!(in dyn WRoot<'_>, obj => dyn W2<'_>).expect_err("W2"); + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W3<'_>).expect("W3").w3(), 31); + core::cast!(in dyn WRoot<'_>, obj => dyn W4<'_>).expect_err("W4"); + assert_eq!(core::cast!(in dyn WRoot<'_>, obj => dyn W5<'_>).expect("W5").w5(), 51); + core::cast!(in dyn WRoot<'_>, obj => dyn W6<'_>).expect_err("W6"); +} + +// ========================================================================= +// Case 2: Three lifetimes — full merge vs. partial merge +// +// Tri<'a, 'b, 'c> has three binder variables. TriMerge<'a> merges +// all three (fan-out t0 -> {r0, r1, r2}), requiring pairwise mutual +// equivalence. TriPartial<'a, 'c> merges only the first two +// (fan-out t0 -> {r0, r1}, plus identity t1 -> r2). +// ========================================================================= + +trait Tri<'a, 'b, 'c>: TraitMetadataTable> + core::fmt::Debug { + fn id(&self) -> u32; +} +trait TriMerge<'a>: Tri<'a, 'a, 'a> { + fn merged(&self) -> u32; +} +trait TriPartial<'a, 'c>: Tri<'a, 'a, 'c> { + fn partial(&self) -> u32; +} + +#[derive(Debug)] +struct Triple<'a, 'b, 'c> { + _x: &'a u32, + _y: &'b u32, + _z: &'c u32, +} + +impl<'a, 'b, 'c> Tri<'a, 'b, 'c> for Triple<'a, 'b, 'c> { + fn id(&self) -> u32 { 3 } +} +impl<'a> TriMerge<'a> for Triple<'a, 'a, 'a> { + fn merged(&self) -> u32 { 333 } +} +impl<'a, 'c> TriPartial<'a, 'c> for Triple<'a, 'a, 'c> { + fn partial(&self) -> u32 { 334 } +} + +/// All three lifetimes identical => both full and partial merge succeed. +/// (Borrowck forces binder variables toward equivalence via the +/// TraitMetadataTable marker, so the negative fan-out cases — e.g., +/// first two same but third different — are not independently testable.) +#[inline(never)] +fn case2_all_same<'a>(x: &'a u32) { + let obj: &dyn Tri<'_, '_, '_> = &Triple { _x: x, _y: x, _z: x }; + let m = core::cast!(in dyn Tri<'_, '_, '_>, obj => dyn TriMerge<'_>) + .expect("all_same: merge"); + assert_eq!(m.merged(), 333); + let p = core::cast!(in dyn Tri<'_, '_, '_>, obj => dyn TriPartial<'_, '_>) + .expect("all_same: partial"); + assert_eq!(p.partial(), 334); +} + +// ========================================================================= +// Case 3: Two independent root hierarchies on the same concrete type +// +// MultiRoot<'a> implements both RootA (no lifetime params) and +// RootB<'a> (with lifetime param). Each root has its own sub-trait. +// Casts through each hierarchy are independent — the per-root +// metadata tables are generated separately. +// ========================================================================= + +trait RootA: TraitMetadataTable + core::fmt::Debug { + fn id_a(&self) -> u32; +} +trait SubA: RootA { + fn val_a(&self) -> u32; +} +trait SubA2: RootA { + fn val_a2(&self) -> u32; +} + +trait RootB<'a>: TraitMetadataTable> + core::fmt::Debug { + fn id_b(&self) -> u32; +} +trait SubB<'a>: RootB<'a> { + fn val_b(&self) -> u32; +} + +#[derive(Debug)] +struct MultiRoot<'a> { _x: &'a u32 } + +impl<'a> RootA for MultiRoot<'a> { + fn id_a(&self) -> u32 { 1 } +} +impl<'a> SubA for MultiRoot<'a> { + fn val_a(&self) -> u32 { 10 } +} +impl<'a> SubA2 for MultiRoot<'a> { + fn val_a2(&self) -> u32 { 15 } +} +impl<'a> RootB<'a> for MultiRoot<'a> { + fn id_b(&self) -> u32 { 2 } +} +impl<'a> SubB<'a> for MultiRoot<'a> { + fn val_b(&self) -> u32 { 20 } +} + +/// The same concrete value is independently usable through both roots. +#[inline(never)] +fn case3_dual_roots<'a>(x: &'a u32) { + let val = MultiRoot { _x: x }; + + // RootA hierarchy (no lifetimes). + let obj_a: &dyn RootA = &val; + let sub_a = core::cast!(in dyn RootA, obj_a => dyn SubA).expect("RootA -> SubA"); + assert_eq!(sub_a.val_a(), 10); + let sub_a2 = core::cast!(in dyn RootA, obj_a => dyn SubA2).expect("RootA -> SubA2"); + assert_eq!(sub_a2.val_a2(), 15); + + // RootB hierarchy (with lifetime). + let obj_b: &dyn RootB<'_> = &val; + let sub_b = core::cast!(in dyn RootB<'_>, obj_b => dyn SubB<'_>).expect("RootB -> SubB"); + assert_eq!(sub_b.val_b(), 20); +} + +// ========================================================================= +// Case 4: Zero-sized type with PhantomData lifetime +// +// A ZST (zero-sized type) that carries a lifetime only through +// PhantomData. The data pointer in the trait object is dangling +// (aligned to ZST conventions). Tests that the trait-cast machinery +// handles ZSTs correctly. +// ========================================================================= + +trait RootZ<'a>: TraitMetadataTable> + core::fmt::Debug { + fn tag(&self) -> u32; +} +trait SubZ<'a>: RootZ<'a> { + fn ztag(&self) -> u32; +} + +#[derive(Debug)] +struct Zst<'a> { + _marker: PhantomData<&'a ()>, +} + +impl<'a> RootZ<'a> for Zst<'a> { + fn tag(&self) -> u32 { 0 } +} +impl<'a> SubZ<'a> for Zst<'a> { + fn ztag(&self) -> u32 { 1 } +} + +#[inline(never)] +fn case4_zst<'a>(_anchor: &'a u32) { + let z: Zst<'a> = Zst { _marker: PhantomData }; + let obj: &dyn RootZ<'_> = &z; + assert_eq!(obj.tag(), 0); + let sub = core::cast!(in dyn RootZ<'_>, obj => dyn SubZ<'_>).expect("zst"); + assert_eq!(sub.ztag(), 1); +} + +// ========================================================================= +// Case 5: Deep chain with where-clause gating (hidden lifetimes) +// +// Sub5: Mid5: Root5, where Root5 has NO lifetime params (both +// lifetimes are hidden). Mid5 and Sub5 impls require 'b: 'a. +// Tests that where-clause gating propagates through a chain. +// Uses the proven hidden-lifetime pattern from lifetime-bounded-downcast. +// ========================================================================= + +trait Root5: TraitMetadataTable + core::fmt::Debug { + fn id(&self) -> u32; +} +trait Mid5: Root5 { + fn mid(&self) -> u32; +} +trait Sub5: Mid5 { + fn sub(&self) -> u32; +} + +#[derive(Debug)] +struct Deep<'a, 'b> { _x: &'a u32, _y: &'b u32 } + +impl<'a, 'b> Root5 for Deep<'a, 'b> { + fn id(&self) -> u32 { 5 } +} +impl<'a, 'b> Mid5 for Deep<'a, 'b> +where + 'b: 'a, +{ + fn mid(&self) -> u32 { 51 } +} +impl<'a, 'b> Sub5 for Deep<'a, 'b> +where + 'b: 'a, +{ + fn sub(&self) -> u32 { 52 } +} + +/// 'b is a local lifetime, strictly shorter than 'a. +/// 'b: 'a does not hold => neither Mid5 nor Sub5 is available. +#[inline(never)] +fn case5_unbounded<'a>(x: &'a u32) { + let local: u32 = 99; + let obj: &dyn Root5 = &Deep { _x: x, _y: &local }; + core::cast!(in dyn Root5, obj => dyn Mid5).expect_err("mid"); + core::cast!(in dyn Root5, obj => dyn Sub5).expect_err("sub"); +} + +fn main() { + let x: u32 = 42; + + eprintln!("=== case1: wide hierarchy ==="); + case1_all(&x); + case1_sparse(&x); + + eprintln!("=== case2: three lifetimes ==="); + case2_all_same(&x); + + eprintln!("=== case3: dual roots ==="); + case3_dual_roots(&x); + + eprintln!("=== case4: ZST ==="); + case4_zst(&x); + + eprintln!("=== case5: deep chain ==="); + case5_unbounded(&x); +} diff --git a/tests/ui/trait-cast/unused-cast-target.rs b/tests/ui/trait-cast/unused-cast-target.rs new file mode 100644 index 0000000000000..33a4700ebd106 --- /dev/null +++ b/tests/ui/trait-cast/unused-cast-target.rs @@ -0,0 +1,49 @@ +//@ build-fail +//@ compile-flags: --crate-type=bin +//! Diagnostic: the `unused_cast_target` lint fires when a `cast!` target +//! trait has no concrete type in the final binary that implements it. +//! Such casts always return `Err` at runtime. + +#![feature(trait_cast)] +#![allow(dead_code)] +#![deny(unused_cast_target)] + +extern crate core; +use core::marker::TraitMetadataTable; + +trait Root: TraitMetadataTable + core::fmt::Debug { + fn val(&self) -> u32; +} + +// `Used` has an impl for `S` below, so a cast to `dyn Used` is fine. +trait Used: Root { + fn used(&self) -> u32; +} + +// `Unused` has Root as supertrait (so it IS in the graph), but no concrete +// type in this crate implements it. Cast to `dyn Unused` always returns Err. +trait Unused: Root { + fn unused(&self) -> u32; +} + +#[derive(Debug)] +struct S; + +impl Root for S { + fn val(&self) -> u32 { 0 } +} +impl Used for S { + fn used(&self) -> u32 { 1 } +} + +fn do_casts(obj: &dyn Root) { + let _ = core::cast!(in dyn Root, obj => dyn Used); + let _ = core::cast!(in dyn Root, obj => dyn Unused); +} +//~? ERROR cast target `dyn Unused` is unreachable in the trait graph of `dyn Root` +//~? ERROR cast target `dyn Unused` is unreachable in the trait graph of `dyn Root` + +fn main() { + let s = S; + do_casts(&s); +} diff --git a/tests/ui/trait-cast/unused-cast-target.stderr b/tests/ui/trait-cast/unused-cast-target.stderr new file mode 100644 index 0000000000000..e66d65856f529 --- /dev/null +++ b/tests/ui/trait-cast/unused-cast-target.stderr @@ -0,0 +1,20 @@ +error: cast target `dyn Unused` is unreachable in the trait graph of `dyn Root` + --> $SRC_DIR/core/src/trait_cast.rs:LL:COL + | + = note: no type implementing `dyn Root` also implements `dyn Unused` + = note: this cast will always return `Err` at runtime +note: the lint level is defined here + --> $DIR/unused-cast-target.rs:9:9 + | +LL | #![deny(unused_cast_target)] + | ^^^^^^^^^^^^^^^^^^ + +error: cast target `dyn Unused` is unreachable in the trait graph of `dyn Root` + --> $SRC_DIR/core/src/trait_cast.rs:LL:COL + | + = note: no type implementing `dyn Root` also implements `dyn Unused` + = note: this cast will always return `Err` at runtime + = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` + +error: aborting due to 2 previous errors + diff --git a/tests/ui/type-alias-impl-trait/in-where-clause.stderr b/tests/ui/type-alias-impl-trait/in-where-clause.stderr index fcb590a961c18..c2cc571c56c2e 100644 --- a/tests/ui/type-alias-impl-trait/in-where-clause.stderr +++ b/tests/ui/type-alias-impl-trait/in-where-clause.stderr @@ -9,6 +9,13 @@ note: ...which requires borrow-checking `foo`... | LL | / fn foo() -> Bar LL | | where +LL | | Bar: Send, + | |______________^ +note: ...which requires borrow-checking (with region summaries) `foo`... + --> $DIR/in-where-clause.rs:9:1 + | +LL | / fn foo() -> Bar +LL | | where LL | | Bar: Send, | |______________^ note: ...which requires promoting constants in MIR for `foo`...