Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 10 additions & 4 deletions compiler/rustc_codegen_cranelift/src/debuginfo/line_info.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,8 @@ fn osstr_as_utf8_bytes(path: &OsStr) -> &[u8] {

fn make_file_info(source_file: &SourceFile, embed_source: bool) -> Option<FileInfo> {
let has_md5 = source_file.src_hash.kind == SourceFileHashAlgorithm::Md5;
let has_source = embed_source && source_file.src.is_some();
let has_source = embed_source
&& (source_file.src.is_some() || source_file.external_src.read().get_source().is_some());

if !has_md5 && !has_source {
return None;
Expand All @@ -62,6 +63,8 @@ fn make_file_info(source_file: &SourceFile, embed_source: bool) -> Option<FileIn
if embed_source {
if let Some(src) = &source_file.src {
info.source = Some(LineString::String(src.as_bytes().to_vec()));
} else if let Some(src) = source_file.external_src.read().get_source() {
info.source = Some(LineString::String(src.as_bytes().to_vec()));
}
}

Expand All @@ -79,19 +82,22 @@ impl DebugContext {
let span = hygiene::walk_chain_collapsed(span, function_span);
match tcx.sess.source_map().lookup_line(span.lo()) {
Ok(SourceFileAndLine { sf: file, line }) => {
let file_id = self.add_source_file(&file);
let file_id = self.add_source_file(tcx, &file);
let line_pos = file.lines()[line];
let col = file.relative_position(span.lo()) - line_pos;

(file_id, u64::try_from(line).unwrap() + 1, u64::from(col.to_u32()) + 1)
}
Err(file) => (self.add_source_file(&file), 0, 0),
Err(file) => (self.add_source_file(tcx, &file), 0, 0),
}
}

pub(crate) fn add_source_file(&mut self, source_file: &SourceFile) -> FileId {
pub(crate) fn add_source_file(&mut self, tcx: TyCtxt<'_>, source_file: &SourceFile) -> FileId {
let cache_key = (source_file.stable_id, source_file.src_hash);
*self.created_files.entry(cache_key).or_insert_with(|| {
if self.embed_source && source_file.src.is_none() {
tcx.sess.source_map().ensure_source_file_source_present(source_file);
}
let line_program: &mut LineProgram = &mut self.dwarf.unit.line_program;
let line_strings: &mut LineStringTable = &mut self.dwarf.line_strings;

Expand Down
15 changes: 11 additions & 4 deletions compiler/rustc_codegen_llvm/src/debuginfo/metadata.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ use std::borrow::Cow;
use std::fmt::{self, Write};
use std::hash::{Hash, Hasher};
use std::path::PathBuf;
use std::sync::Arc;
use std::{assert_matches, iter, ptr};

use libc::{c_longlong, c_uint};
Expand Down Expand Up @@ -607,8 +606,16 @@ pub(crate) fn file_metadata<'ll>(cx: &CodegenCx<'ll, '_>, source_file: &SourceFi
};
let hash_value = hex_encode(source_file.src_hash.hash_bytes());

let source =
cx.sess().opts.unstable_opts.embed_source.then_some(()).and(source_file.src.as_ref());
let mut source = None;
let external_src;
if cx.sess().opts.unstable_opts.embed_source {
source = source_file.src.as_deref().map(String::as_str);
if source.is_none() {
cx.tcx.sess.source_map().ensure_source_file_source_present(source_file);
external_src = source_file.external_src.read();
source = external_src.get_source();
}
}

create_file(DIB(cx), &file_name, &directory, &hash_value, hash_kind, source)
}
Expand All @@ -626,7 +633,7 @@ fn create_file<'ll>(
directory: &str,
hash_value: &str,
hash_kind: llvm::ChecksumKind,
source: Option<&Arc<String>>,
source: Option<&str>,
) -> &'ll DIFile {
unsafe {
llvm::LLVMRustDIBuilderCreateFile(
Expand Down
53 changes: 7 additions & 46 deletions compiler/rustc_middle/src/ty/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::cmp::Ordering;
use std::env::VarError;
use std::ffi::OsStr;
use std::hash::{Hash, Hasher};
use std::marker::{PhantomData, PointeeSized};
use std::marker::PointeeSized;
use std::ops::{Bound, Deref};
use std::sync::{Arc, OnceLock};
use std::{fmt, iter, mem};
Expand Down Expand Up @@ -539,36 +539,14 @@ pub struct FreeRegionInfo {

/// This struct should only be created by `create_def`.
#[derive(Copy, Clone)]
pub struct TyCtxtFeed<'tcx, KEY: Copy> {
pub struct TyCtxtFeed<'tcx, K: Copy> {
pub tcx: TyCtxt<'tcx>,
// Do not allow direct access, as downstream code must not mutate this field.
key: KEY,
key: K,
}

/// Never return a `Feed` from a query. Only queries that create a `DefId` are
/// allowed to feed queries for that `DefId`.
impl<KEY: Copy> !HashStable for TyCtxtFeed<'_, KEY> {}

/// The same as `TyCtxtFeed`, but does not contain a `TyCtxt`.
/// Use this to pass around when you have a `TyCtxt` elsewhere.
/// Just an optimization to save space and not store hundreds of
/// `TyCtxtFeed` in the resolver.
#[derive(Copy, Clone)]
pub struct Feed<'tcx, KEY: Copy> {
_tcx: PhantomData<TyCtxt<'tcx>>,
// Do not allow direct access, as downstream code must not mutate this field.
key: KEY,
}

/// Never return a `Feed` from a query. Only queries that create a `DefId` are
/// allowed to feed queries for that `DefId`.
impl<KEY: Copy> !HashStable for Feed<'_, KEY> {}

impl<T: fmt::Debug + Copy> fmt::Debug for Feed<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.key.fmt(f)
}
}
/// Only queries that create a `DefId` are allowed to feed queries for that `DefId`.
impl<K: Copy> !HashStable for TyCtxtFeed<'_, K> {}

/// Some workarounds to use cases that cannot use `create_def`.
/// Do not add new ways to create `TyCtxtFeed` without consulting
Expand Down Expand Up @@ -622,28 +600,11 @@ impl<'tcx> TyCtxt<'tcx> {
}
}

impl<'tcx, KEY: Copy> TyCtxtFeed<'tcx, KEY> {
impl<'tcx, K: Copy> TyCtxtFeed<'tcx, K> {
#[inline(always)]
pub fn key(&self) -> KEY {
pub fn key(&self) -> K {
self.key
}

#[inline(always)]
pub fn downgrade(self) -> Feed<'tcx, KEY> {
Feed { _tcx: PhantomData, key: self.key }
}
}

impl<'tcx, KEY: Copy> Feed<'tcx, KEY> {
#[inline(always)]
pub fn key(&self) -> KEY {
self.key
}

#[inline(always)]
pub fn upgrade(self, tcx: TyCtxt<'tcx>) -> TyCtxtFeed<'tcx, KEY> {
TyCtxtFeed { tcx, key: self.key }
}
}

impl<'tcx> TyCtxtFeed<'tcx, LocalDefId> {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_middle/src/ty/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ pub use self::consts::{
const_lit_matches_ty,
};
pub use self::context::{
CtxtInterners, CurrentGcx, Feed, FreeRegionInfo, GlobalCtxt, Lift, TyCtxt, TyCtxtFeed, tls,
CtxtInterners, CurrentGcx, FreeRegionInfo, GlobalCtxt, Lift, TyCtxt, TyCtxtFeed, tls,
};
pub use self::fold::*;
pub use self::instance::{Instance, InstanceKind, ReifyReason};
Expand Down
8 changes: 8 additions & 0 deletions compiler/rustc_mir_transform/src/jump_threading.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,14 @@ const MAX_COST: u8 = 100;

impl<'tcx> crate::MirPass<'tcx> for JumpThreading {
fn is_enabled(&self, sess: &rustc_session::Session) -> bool {
if sess.target.is_like_gpu {
// Jump threading can duplicate calls in control-flow.
// This leads to incorrect code when done for so called "convergent" operations on GPU
// targets, similar to how inline assembly cannot be duplicated on all targets.
// Conservatively prevent this by disabling the pass.
// See also issue #137086.
return false;
}
sess.mir_opt_level() >= 2
}

Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_parse/src/lexer/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ pub(crate) fn lex_token_trees<'psess, 'src>(
Err(errs) => {
// We emit delimiter mismatch errors first, then emit the unclosing delimiter mismatch
// because the delimiter mismatch is more likely to be the root cause of error
unmatched_closing_delims.extend(errs);
unmatched_closing_delims.push(errs);
Err(unmatched_closing_delims)
}
}
Expand Down
8 changes: 4 additions & 4 deletions compiler/rustc_parse/src/lexer/tokentrees.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
pub(super) fn lex_token_trees(
&mut self,
is_delimited: bool,
) -> Result<(Spacing, TokenStream), Vec<Diag<'psess>>> {
) -> Result<(Spacing, TokenStream), Diag<'psess>> {
// Move past the opening delimiter.
let open_spacing = self.bump_minimal();

Expand All @@ -35,11 +35,11 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
return if is_delimited {
Ok((open_spacing, TokenStream::new(buf)))
} else {
Err(vec![self.close_delim_err(delim)])
Err(self.close_delim_err(delim))
};
} else if self.token.kind == token::Eof {
return if is_delimited {
Err(vec![self.eof_err()])
Err(self.eof_err())
} else {
Ok((open_spacing, TokenStream::new(buf)))
};
Expand All @@ -54,7 +54,7 @@ impl<'psess, 'src> Lexer<'psess, 'src> {
fn lex_token_tree_open_delim(
&mut self,
open_delim: Delimiter,
) -> Result<TokenTree, Vec<Diag<'psess>>> {
) -> Result<TokenTree, Diag<'psess>> {
// The span for beginning of the delimited section.
let pre_span = self.token.span;

Expand Down
Loading
Loading