Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 0 additions & 37 deletions compiler/rustc_data_structures/src/sync.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,6 @@
//! | `Lock<T>` | `RefCell<T>` | `RefCell<T>` or |
//! | | | `parking_lot::Mutex<T>` |
//! | `RwLock<T>` | `RefCell<T>` | `parking_lot::RwLock<T>` |
//! | `MTLock<T>` [^1] | `T` | `Lock<T>` |
//!
//! [^1]: `MTLock` is similar to `Lock`, but the serial version avoids the cost
//! of a `RefCell`. This is appropriate when interior mutability is not
//! required.

use std::collections::HashMap;
use std::hash::{BuildHasher, Hash};
Expand Down Expand Up @@ -106,38 +101,6 @@ mod mode {
}
}

// FIXME(parallel_compiler): Get rid of these aliases across the compiler.

#[derive(Debug, Default)]
pub struct MTLock<T>(Lock<T>);

impl<T> MTLock<T> {
#[inline(always)]
pub fn new(inner: T) -> Self {
MTLock(Lock::new(inner))
}

#[inline(always)]
pub fn into_inner(self) -> T {
self.0.into_inner()
}

#[inline(always)]
pub fn get_mut(&mut self) -> &mut T {
self.0.get_mut()
}

#[inline(always)]
pub fn lock(&self) -> LockGuard<'_, T> {
self.0.lock()
}

#[inline(always)]
pub fn lock_mut(&self) -> LockGuard<'_, T> {
self.lock()
}
}

/// This makes locks panic if they are already held.
/// It is only useful when you are running in a single thread
const ERROR_CHECKING: bool = false;
Expand Down
22 changes: 11 additions & 11 deletions compiler/rustc_monomorphize/src/collector.rs
Original file line number Diff line number Diff line change
Expand Up @@ -209,7 +209,7 @@ use std::cell::OnceCell;
use std::ops::ControlFlow;

use rustc_data_structures::fx::FxIndexMap;
use rustc_data_structures::sync::{MTLock, par_for_each_in};
use rustc_data_structures::sync::{Lock, par_for_each_in};
use rustc_data_structures::unord::{UnordMap, UnordSet};
use rustc_hir as hir;
use rustc_hir::attrs::InlineAttr;
Expand Down Expand Up @@ -251,12 +251,12 @@ pub(crate) enum MonoItemCollectionStrategy {
/// The state that is shared across the concurrent threads that are doing collection.
struct SharedState<'tcx> {
/// Items that have been or are currently being recursively collected.
visited: MTLock<UnordSet<MonoItem<'tcx>>>,
visited: Lock<UnordSet<MonoItem<'tcx>>>,
/// Items that have been or are currently being recursively treated as "mentioned", i.e., their
/// consts are evaluated but nothing is added to the collection.
mentioned: MTLock<UnordSet<MonoItem<'tcx>>>,
mentioned: Lock<UnordSet<MonoItem<'tcx>>>,
/// Which items are being used where, for better errors.
usage_map: MTLock<UsageMap<'tcx>>,
usage_map: Lock<UsageMap<'tcx>>,
}

pub(crate) struct UsageMap<'tcx> {
Expand Down Expand Up @@ -359,7 +359,7 @@ fn collect_items_root<'tcx>(
state: &SharedState<'tcx>,
recursion_limit: Limit,
) {
if !state.visited.lock_mut().insert(starting_item.node) {
if !state.visited.lock().insert(starting_item.node) {
// We've been here already, no need to search again.
return;
}
Expand Down Expand Up @@ -568,21 +568,21 @@ fn collect_items_rec<'tcx>(
// This is part of the output of collection and hence only relevant for "used" items.
// ("Mentioned" items are only considered internally during collection.)
if mode == CollectionMode::UsedItems {
state.usage_map.lock_mut().record_used(starting_item.node, &used_items);
state.usage_map.lock().record_used(starting_item.node, &used_items);
}

{
let mut visited = OnceCell::default();
if mode == CollectionMode::UsedItems {
used_items
.items
.retain(|k, _| visited.get_mut_or_init(|| state.visited.lock_mut()).insert(*k));
.retain(|k, _| visited.get_mut_or_init(|| state.visited.lock()).insert(*k));
}

let mut mentioned = OnceCell::default();
mentioned_items.items.retain(|k, _| {
!visited.get_or_init(|| state.visited.lock()).contains(k)
&& mentioned.get_mut_or_init(|| state.mentioned.lock_mut()).insert(*k)
&& mentioned.get_mut_or_init(|| state.mentioned.lock()).insert(*k)
});
}
if mode == CollectionMode::MentionedItems {
Expand Down Expand Up @@ -1810,9 +1810,9 @@ pub(crate) fn collect_crate_mono_items<'tcx>(
debug!("building mono item graph, beginning at roots");

let state = SharedState {
visited: MTLock::new(UnordSet::default()),
mentioned: MTLock::new(UnordSet::default()),
usage_map: MTLock::new(UsageMap::new()),
visited: Lock::new(UnordSet::default()),
mentioned: Lock::new(UnordSet::default()),
usage_map: Lock::new(UsageMap::new()),
};
let recursion_limit = tcx.recursion_limit();

Expand Down
60 changes: 50 additions & 10 deletions compiler/rustc_target/src/callconv/hexagon.rs
Original file line number Diff line number Diff line change
@@ -1,36 +1,76 @@
use rustc_abi::TyAbiInterface;
use rustc_abi::{HasDataLayout, TyAbiInterface};

use crate::callconv::{ArgAbi, FnAbi};
use crate::callconv::{ArgAbi, FnAbi, Reg, Uniform};

fn classify_ret<Ty>(ret: &mut ArgAbi<'_, Ty>) {
if ret.layout.is_aggregate() && ret.layout.size.bits() > 64 {
ret.make_indirect();
} else {
fn classify_ret<'a, Ty, C>(_cx: &C, ret: &mut ArgAbi<'a, Ty>)
where
Ty: TyAbiInterface<'a, C> + Copy,
C: HasDataLayout,
{
if !ret.layout.is_sized() {
return;
}

if !ret.layout.is_aggregate() {
ret.extend_integer_width_to(32);
return;
}

// Per the Hexagon ABI:
// - Aggregates up to 32 bits are returned in R0
// - Aggregates 33-64 bits are returned in R1:R0
// - Aggregates > 64 bits are returned indirectly via hidden first argument
let size = ret.layout.size;
let bits = size.bits();
if bits <= 32 {
ret.cast_to(Uniform::new(Reg::i32(), size));
} else if bits <= 64 {
ret.cast_to(Uniform::new(Reg::i64(), size));
} else {
ret.make_indirect();
}
}

fn classify_arg<'a, Ty, C>(cx: &C, arg: &mut ArgAbi<'a, Ty>)
where
Ty: TyAbiInterface<'a, C> + Copy,
C: HasDataLayout,
{
if !arg.layout.is_sized() {
return;
}
if arg.layout.pass_indirectly_in_non_rustic_abis(cx) {
arg.make_indirect();
return;
}
if arg.layout.is_aggregate() && arg.layout.size.bits() > 64 {
arg.make_indirect();
} else {

if !arg.layout.is_aggregate() {
arg.extend_integer_width_to(32);
return;
}

// Per the Hexagon ABI:
// - Aggregates up to 32 bits are passed in a single register
// - Aggregates 33-64 bits are passed in a register pair
// - Aggregates > 64 bits are passed on the stack
let size = arg.layout.size;
let bits = size.bits();
if bits <= 32 {
arg.cast_to(Uniform::new(Reg::i32(), size));
} else if bits <= 64 {
arg.cast_to(Uniform::new(Reg::i64(), size));
} else {
arg.pass_by_stack_offset(None);
}
}

pub(crate) fn compute_abi_info<'a, Ty, C>(cx: &C, fn_abi: &mut FnAbi<'a, Ty>)
where
Ty: TyAbiInterface<'a, C> + Copy,
C: HasDataLayout,
{
if !fn_abi.ret.is_ignore() {
classify_ret(&mut fn_abi.ret);
classify_ret(cx, &mut fn_abi.ret);
}

for arg in fn_abi.args.iter_mut() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1030,12 +1030,15 @@ impl<'a, 'tcx> FindInferSourceVisitor<'a, 'tcx> {
let args = self.node_args_opt(expr.hir_id)?;
let span = tcx.hir_span(segment.hir_id);
let insert_span = segment.ident.span.shrink_to_hi().with_hi(span.hi());
let have_turbofish = segment.args.is_some_and(|args| {
args.args.iter().any(|arg| arg.is_ty_or_const())
});
InsertableGenericArgs {
insert_span,
args,
generics_def_id: def_id,
def_id,
have_turbofish: false,
have_turbofish,
}
};
return Box::new(insertable.into_iter());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -864,23 +864,63 @@ impl<'a, 'tcx> TypeErrCtxt<'a, 'tcx> {
.collect::<Vec<_>>()
.join(", ");

if matches!(obligation.cause.code(), ObligationCauseCode::FunctionArg { .. })
if let ObligationCauseCode::FunctionArg { arg_hir_id, .. } = obligation.cause.code()
&& obligation.cause.span.can_be_used_for_suggestions()
{
let (span, sugg) = if let Some(snippet) =
self.tcx.sess.source_map().span_to_snippet(obligation.cause.span).ok()
&& snippet.starts_with("|")
{
(obligation.cause.span, format!("({snippet})({args})"))
} else {
(obligation.cause.span.shrink_to_hi(), format!("({args})"))
let span = obligation.cause.span;

let arg_expr = match self.tcx.hir_node(*arg_hir_id) {
hir::Node::Expr(expr) => Some(expr),
_ => None,
};

// When the obligation error has been ensured to have been caused by
// an argument, the `obligation.cause.span` points at the expression
// of the argument, so we can provide a suggestion. Otherwise, we give
// a more general note.
err.span_suggestion_verbose(span, msg, sugg, Applicability::HasPlaceholders);
let is_closure_expr =
arg_expr.is_some_and(|expr| matches!(expr.kind, hir::ExprKind::Closure(..)));

// If the user wrote `|| {}()`, suggesting to call the closure would produce `(|| {}())()`,
// which doesn't help and is often outright wrong.
if args.is_empty()
&& let Some(expr) = arg_expr
&& let hir::ExprKind::Closure(closure) = expr.kind
{
let mut body = self.tcx.hir_body(closure.body).value;

// Async closures desugar to a closure returning a coroutine
if let hir::ClosureKind::CoroutineClosure(hir::CoroutineDesugaring::Async) =
closure.kind
{
let peeled = body.peel_blocks().peel_drop_temps();
if let hir::ExprKind::Closure(inner) = peeled.kind {
body = self.tcx.hir_body(inner.body).value;
}
}

let peeled_body = body.peel_blocks().peel_drop_temps();
if let hir::ExprKind::Call(callee, call_args) = peeled_body.kind
&& call_args.is_empty()
&& let hir::ExprKind::Block(..) = callee.peel_blocks().peel_drop_temps().kind
{
return false;
}
}

if is_closure_expr {
err.multipart_suggestion_verbose(
msg,
vec![
(span.shrink_to_lo(), "(".to_string()),
(span.shrink_to_hi(), format!(")({args})")),
],
Applicability::HasPlaceholders,
);
} else {
err.span_suggestion_verbose(
span.shrink_to_hi(),
msg,
format!("({args})"),
Applicability::HasPlaceholders,
);
}
} else if let DefIdOrName::DefId(def_id) = def_id_or_name {
let name = match self.tcx.hir_get_if_local(def_id) {
Some(hir::Node::Expr(hir::Expr {
Expand Down
23 changes: 14 additions & 9 deletions src/bootstrap/src/core/config/flags.rs
Original file line number Diff line number Diff line change
Expand Up @@ -570,16 +570,21 @@ impl Subcommand {

pub fn test_target(&self) -> TestTarget {
match *self {
Subcommand::Test { all_targets, doc, tests, .. }
| Subcommand::Miri { all_targets, doc, tests, .. } => match (all_targets, doc, tests) {
(true, true, _) | (true, _, true) | (_, true, true) => {
panic!("You can only set one of `--all-targets`, `--doc` and `--tests`.")
Subcommand::Test { mut all_targets, doc, tests, no_doc, .. }
| Subcommand::Miri { mut all_targets, doc, tests, no_doc, .. } => {
// for backwards compatibility --no-doc keeps working
all_targets = all_targets || no_doc;

match (all_targets, doc, tests) {
(true, true, _) | (true, _, true) | (_, true, true) => {
panic!("You can only set one of `--all-targets`, `--doc` and `--tests`.")
}
(true, false, false) => TestTarget::AllTargets,
(false, true, false) => TestTarget::DocOnly,
(false, false, true) => TestTarget::Tests,
(false, false, false) => TestTarget::Default,
}
(true, false, false) => TestTarget::AllTargets,
(false, true, false) => TestTarget::DocOnly,
(false, false, true) => TestTarget::Tests,
(false, false, false) => TestTarget::Default,
},
}
_ => TestTarget::Default,
}
}
Expand Down
1 change: 0 additions & 1 deletion src/doc/rustc-dev-guide/src/parallel-rustc.md
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,6 @@ are implemented differently depending on whether `parallel-compiler` is true.
| -------------------------------- | --------------------------------------------------- | ------------ |
| Lock\<T> | (parking_lot::Mutex\<T>) | (std::cell::RefCell) |
| RwLock\<T> | (parking_lot::RwLock\<T>) | (std::cell::RefCell) |
| MTLock\<T> | (Lock\<T>) | (T) |
| ReadGuard | parking_lot::RwLockReadGuard | std::cell::Ref |
| MappedReadGuard | parking_lot::MappedRwLockReadGuard | std::cell::Ref |
| WriteGuard | parking_lot::RwLockWriteGuard | std::cell::RefMut |
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,8 @@ LL | fn check(_: impl std::marker::ConstParamTy_) {}
| ^^^^^^^^^^^^^^^^^^^^^^^^^^ required by this bound in `check`
help: use parentheses to call this closure
|
LL - check(|| {});
LL + check((|| {})());
|
LL | check((|| {})());
| + +++

error[E0277]: `fn()` can't be used as a const parameter type
--> $DIR/const_param_ty_bad.rs:9:11
Expand Down
28 changes: 28 additions & 0 deletions tests/ui/inference/useless-turbofish-suggestion.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
// Regression test for #153732.
//
// When a method call already has turbofish type arguments, don't suggest
// rewriting them — the suggestion just rewrites user syntax into
// fully-qualified form without resolving anything.
//
// The span still points at the method name rather than the unresolved `_`;
// fixing that is left as future work.

struct S;

impl S {
fn f<A, B>(self, _a: A) -> B {
todo!()
}
}

fn with_turbofish() {
S.f::<u32, _>(42);
//~^ ERROR type annotations needed
}

fn without_turbofish() {
S.f(42);
//~^ ERROR type annotations needed
}

fn main() {}
Loading
Loading