From f842d7b505eb57ac589e47525eb14e9acc888c57 Mon Sep 17 00:00:00 2001 From: Zachary S Date: Mon, 23 Oct 2023 16:40:58 -0500 Subject: [PATCH 001/134] Allow cloning `poison::Guard`s. (makes implementing `Mapped*Guard` easier) --- library/std/src/sync/poison.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/library/std/src/sync/poison.rs b/library/std/src/sync/poison.rs index 741312d5537e9..ababaa2f06635 100644 --- a/library/std/src/sync/poison.rs +++ b/library/std/src/sync/poison.rs @@ -55,6 +55,7 @@ impl Flag { } } +#[derive(Clone)] pub struct Guard { panicking: bool, } From 9be1321676dd3a90b180ce8e9b6ac2da1bd2272d Mon Sep 17 00:00:00 2001 From: Zachary S Date: Mon, 23 Oct 2023 16:26:35 -0500 Subject: [PATCH 002/134] Implement `MappedMutexGuard`. --- library/std/src/sync/mutex.rs | 193 ++++++++++++++++++++++++++++++++++ 1 file changed, 193 insertions(+) diff --git a/library/std/src/sync/mutex.rs b/library/std/src/sync/mutex.rs index b4ae6b7e07ebc..18bea6d58fba1 100644 --- a/library/std/src/sync/mutex.rs +++ b/library/std/src/sync/mutex.rs @@ -3,7 +3,10 @@ mod tests; use crate::cell::UnsafeCell; use crate::fmt; +use crate::marker::PhantomData; +use crate::mem::ManuallyDrop; use crate::ops::{Deref, DerefMut}; +use crate::ptr::NonNull; use crate::sync::{poison, LockResult, TryLockError, TryLockResult}; use crate::sys::locks as sys; @@ -213,6 +216,43 @@ impl !Send for MutexGuard<'_, T> {} #[stable(feature = "mutexguard", since = "1.19.0")] unsafe impl Sync for MutexGuard<'_, T> {} +/// An RAII mutex guard returned by `MutexGuard::map`, which can point to a +/// subfield of the protected data. When this structure is dropped (falls out +/// of scope), the lock will be unlocked. +/// +/// The main difference between `MappedMutexGuard` and [`MutexGuard`] is that the +/// former cannot be used with [`CondVar`], since that +/// could introduce soundness issues if the locked object is modified by another +/// thread while the `Mutex` is unlocked. +/// +/// The data protected by the mutex can be accessed through this guard via its +/// [`Deref`] and [`DerefMut`] implementations. +/// +/// This structure is created by the [`map`] and [`try_map`] methods on +/// [`MutexGuard`]. +/// +/// [`map`]: MutexGuard::map +/// [`try_map`]: MutexGuard::try_map +/// [`CondVar`]: crate::sync::CondVar +#[must_use = "if unused the Mutex will immediately unlock"] +#[must_not_suspend = "holding a MappedMutexGuard across suspend \ + points can cause deadlocks, delays, \ + and cause Futures to not implement `Send`"] +#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[clippy::has_significant_drop] +pub struct MappedMutexGuard<'a, T: ?Sized + 'a> { + data: NonNull, + inner: &'a sys::Mutex, + poison_flag: &'a poison::Flag, + poison: poison::Guard, + _variance: PhantomData<&'a mut T>, +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl !Send for MappedMutexGuard<'_, T> {} +#[unstable(feature = "mapped_lock_guards", issue = "none")] +unsafe impl Sync for MappedMutexGuard<'_, T> {} + impl Mutex { /// Creates a new mutex in an unlocked state ready for use. /// @@ -552,3 +592,156 @@ pub fn guard_lock<'a, T: ?Sized>(guard: &MutexGuard<'a, T>) -> &'a sys::Mutex { pub fn guard_poison<'a, T: ?Sized>(guard: &MutexGuard<'a, T>) -> &'a poison::Flag { &guard.lock.poison } + +impl<'a, T: ?Sized> MutexGuard<'a, T> { + /// Makes a [`MappedMutexGuard`] for a component of the borrowed data, e.g. + /// an enum variant. + /// + /// The `Mutex` is already locked, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `MutexGuard::map(...)`. A method would interfere with methods of the + /// same name on the contents of the `MutexGuard` used through `Deref`. + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn map(orig: Self, f: F) -> MappedMutexGuard<'a, U> + where + F: FnOnce(&mut T) -> &mut U, + U: ?Sized, + { + let mut orig = ManuallyDrop::new(orig); + let value = NonNull::from(f(&mut *orig)); + MappedMutexGuard { + data: value, + inner: &orig.lock.inner, + poison_flag: &orig.lock.poison, + poison: orig.poison.clone(), + _variance: PhantomData, + } + } + + /// Makes a [`MappedMutexGuard`] for a component of the borrowed data. The + /// original guard is returned as an `Err(...)` if the closure returns + /// `None`. + /// + /// The `Mutex` is already locked, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `MutexGuard::try_map(...)`. A method would interfere with methods of the + /// same name on the contents of the `MutexGuard` used through `Deref`. + #[doc(alias = "filter_map")] + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn try_map(orig: Self, f: F) -> Result, Self> + where + F: FnOnce(&mut T) -> Option<&mut U>, + U: ?Sized, + { + let mut orig = ManuallyDrop::new(orig); + match f(&mut *orig).map(NonNull::from) { + Some(value) => Ok(MappedMutexGuard { + data: value, + inner: &orig.lock.inner, + poison_flag: &orig.lock.poison, + poison: orig.poison.clone(), + _variance: PhantomData, + }), + None => Err(ManuallyDrop::into_inner(orig)), + } + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl Deref for MappedMutexGuard<'_, T> { + type Target = T; + + fn deref(&self) -> &T { + unsafe { self.data.as_ref() } + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl DerefMut for MappedMutexGuard<'_, T> { + fn deref_mut(&mut self) -> &mut T { + unsafe { self.data.as_mut() } + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl Drop for MappedMutexGuard<'_, T> { + #[inline] + fn drop(&mut self) { + unsafe { + self.poison_flag.done(&self.poison); + self.inner.unlock(); + } + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl fmt::Debug for MappedMutexGuard<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + fmt::Debug::fmt(&**self, f) + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl fmt::Display for MappedMutexGuard<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + +impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { + /// Makes a [`MappedMutexGuard`] for a component of the borrowed data, e.g. + /// an enum variant. + /// + /// The `Mutex` is already locked, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `MutexGuard::map(...)`. A method would interfere with methods of the + /// same name on the contents of the `MutexGuard` used through `Deref`. + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn map(orig: Self, f: F) -> MappedMutexGuard<'a, U> + where + F: FnOnce(&mut T) -> &mut U, + U: ?Sized, + { + let mut orig = ManuallyDrop::new(orig); + let value = NonNull::from(f(&mut *orig)); + MappedMutexGuard { + data: value, + inner: orig.inner, + poison_flag: orig.poison_flag, + poison: orig.poison.clone(), + _variance: PhantomData, + } + } + + /// Makes a [`MappedMutexGuard`] for a component of the borrowed data. The + /// original guard is returned as an `Err(...)` if the closure returns + /// `None`. + /// + /// The `Mutex` is already locked, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `MutexGuard::try_map(...)`. A method would interfere with methods of the + /// same name on the contents of the `MutexGuard` used through `Deref`. + #[doc(alias = "filter_map")] + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn try_map(orig: Self, f: F) -> Result, Self> + where + F: FnOnce(&mut T) -> Option<&mut U>, + U: ?Sized, + { + let mut orig = ManuallyDrop::new(orig); + match f(&mut *orig).map(NonNull::from) { + Some(value) => Ok(MappedMutexGuard { + data: value, + inner: orig.inner, + poison_flag: orig.poison_flag, + poison: orig.poison.clone(), + _variance: PhantomData, + }), + None => Err(ManuallyDrop::into_inner(orig)), + } + } +} From ea97c1f2dc451ddd50c159cf04967f9be742de8a Mon Sep 17 00:00:00 2001 From: Zachary S Date: Mon, 23 Oct 2023 16:26:59 -0500 Subject: [PATCH 003/134] Implmement `MappedRwLock(Read|Write)Guard`. --- library/std/src/sync/rwlock.rs | 358 ++++++++++++++++++++++++++++++++- 1 file changed, 355 insertions(+), 3 deletions(-) diff --git a/library/std/src/sync/rwlock.rs b/library/std/src/sync/rwlock.rs index 5d8967bfbe686..717f40db4c1c6 100644 --- a/library/std/src/sync/rwlock.rs +++ b/library/std/src/sync/rwlock.rs @@ -3,6 +3,8 @@ mod tests; use crate::cell::UnsafeCell; use crate::fmt; +use crate::marker::PhantomData; +use crate::mem::ManuallyDrop; use crate::ops::{Deref, DerefMut}; use crate::ptr::NonNull; use crate::sync::{poison, LockResult, TryLockError, TryLockResult}; @@ -105,7 +107,7 @@ unsafe impl Sync for RwLock {} #[cfg_attr(not(test), rustc_diagnostic_item = "RwLockReadGuard")] pub struct RwLockReadGuard<'a, T: ?Sized + 'a> { // NB: we use a pointer instead of `&'a T` to avoid `noalias` violations, because a - // `Ref` argument doesn't hold immutability for its whole scope, only until it drops. + // `RwLockReadGuard` argument doesn't hold immutability for its whole scope, only until it drops. // `NonNull` is also covariant over `T`, just like we would have with `&T`. `NonNull` // is preferable over `const* T` to allow for niche optimization. data: NonNull, @@ -144,6 +146,63 @@ impl !Send for RwLockWriteGuard<'_, T> {} #[stable(feature = "rwlock_guard_sync", since = "1.23.0")] unsafe impl Sync for RwLockWriteGuard<'_, T> {} +/// RAII structure used to release the shared read access of a lock when +/// dropped, which can point to a subfield of the protected data. +/// +/// This structure is created by the [`map`] and [`try_map`] methods +/// on [`RwLockReadGuard`]. +/// +/// [`map`]: RwLockReadGuard::map +/// [`try_map`]: RwLockReadGuard::try_map +#[must_use = "if unused the RwLock will immediately unlock"] +#[must_not_suspend = "holding a MappedRwLockReadGuard across suspend \ + points can cause deadlocks, delays, \ + and cause Futures to not implement `Send`"] +#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[clippy::has_significant_drop] +pub struct MappedRwLockReadGuard<'a, T: ?Sized + 'a> { + // NB: we use a pointer instead of `&'a T` to avoid `noalias` violations, because a + // `MappedRwLockReadGuard` argument doesn't hold immutability for its whole scope, only until it drops. + // `NonNull` is also covariant over `T`, just like we would have with `&T`. `NonNull` + // is preferable over `const* T` to allow for niche optimization. + data: NonNull, + inner_lock: &'a sys::RwLock, +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl !Send for MappedRwLockReadGuard<'_, T> {} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +unsafe impl Sync for MappedRwLockReadGuard<'_, T> {} + +/// RAII structure used to release the exclusive write access of a lock when +/// dropped, which can point to a subfield of the protected data. +/// +/// This structure is created by the [`map`] and [`try_map`] methods +/// on [`RwLockWriteGuard`]. +/// +/// [`map`]: RwLockWriteGuard::map +/// [`try_map`]: RwLockWriteGuard::try_map +#[must_use = "if unused the RwLock will immediately unlock"] +#[must_not_suspend = "holding a MappedRwLockWriteGuard across suspend \ + points can cause deadlocks, delays, \ + and cause Future's to not implement `Send`"] +#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[clippy::has_significant_drop] +pub struct MappedRwLockWriteGuard<'a, T: ?Sized + 'a> { + data: NonNull, + inner_lock: &'a sys::RwLock, + poison_flag: &'a poison::Flag, + poison: poison::Guard, + _variance: PhantomData<&'a mut T>, +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl !Send for MappedRwLockWriteGuard<'_, T> {} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +unsafe impl Sync for MappedRwLockWriteGuard<'_, T> {} + impl RwLock { /// Creates a new instance of an `RwLock` which is unlocked. /// @@ -527,7 +586,10 @@ impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { // SAFETY: if and only if `lock.inner.write()` (or `lock.inner.try_write()`) has been // successfully called from the same thread before instantiating this object. unsafe fn new(lock: &'rwlock RwLock) -> LockResult> { - poison::map_result(lock.poison.guard(), |guard| RwLockWriteGuard { lock, poison: guard }) + poison::map_result(lock.poison.guard(), |guard| RwLockWriteGuard { + lock, + poison: guard, + }) } } @@ -559,12 +621,40 @@ impl fmt::Display for RwLockWriteGuard<'_, T> { } } +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl fmt::Debug for MappedRwLockReadGuard<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl fmt::Display for MappedRwLockReadGuard<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl fmt::Debug for MappedRwLockWriteGuard<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl fmt::Display for MappedRwLockWriteGuard<'_, T> { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + (**self).fmt(f) + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl Deref for RwLockReadGuard<'_, T> { type Target = T; fn deref(&self) -> &T { - // SAFETY: the conditions of `RwLockGuard::new` were satisfied when created. + // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when created. unsafe { self.data.as_ref() } } } @@ -587,6 +677,37 @@ impl DerefMut for RwLockWriteGuard<'_, T> { } } +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl Deref for MappedRwLockReadGuard<'_, T> { + type Target = T; + + fn deref(&self) -> &T { + // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + unsafe { self.data.as_ref() } + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl Deref for MappedRwLockWriteGuard<'_, T> { + type Target = T; + + fn deref(&self) -> &T { + // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + unsafe { self.data.as_ref() } + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl DerefMut for MappedRwLockWriteGuard<'_, T> { + fn deref_mut(&mut self) -> &mut T { + // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + unsafe { self.data.as_mut() } + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl Drop for RwLockReadGuard<'_, T> { fn drop(&mut self) { @@ -607,3 +728,234 @@ impl Drop for RwLockWriteGuard<'_, T> { } } } + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl Drop for MappedRwLockReadGuard<'_, T> { + fn drop(&mut self) { + // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + unsafe { + self.inner_lock.read_unlock(); + } + } +} + +#[unstable(feature = "mapped_lock_guards", issue = "none")] +impl Drop for MappedRwLockWriteGuard<'_, T> { + fn drop(&mut self) { + self.poison_flag.done(&self.poison); + // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + unsafe { + self.inner_lock.write_unlock(); + } + } +} + +impl<'a, T: ?Sized> RwLockReadGuard<'a, T> { + /// Makes a [`MappedRwLockReadGuard`] for a component of the borrowed data, e.g. + /// an enum variant. + /// + /// The `RwLock` is already locked for reading, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `RwLockReadGuard::map(...)`. A method would interfere with methods of + /// the same name on the contents of the `RwLockReadGuard` used through + /// `Deref`. + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn map(orig: Self, f: F) -> MappedRwLockReadGuard<'a, U> + where + F: FnOnce(&T) -> &U, + U: ?Sized, + { + let orig = ManuallyDrop::new(orig); + let value = NonNull::from(f(&*orig)); + MappedRwLockReadGuard { data: value, inner_lock: &orig.inner_lock } + } + + /// Makes a [`MappedRwLockReadGuard`] for a component of the borrowed data. The + /// original guard is returned as an `Err(...)` if the closure returns + /// `None`. + /// + /// The `RwLock` is already locked for reading, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `RwLockReadGuard::try_map(...)`. A method would interfere with methods + /// of the same name on the contents of the `RwLockReadGuard` used through + /// `Deref`. + #[doc(alias = "filter_map")] + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn try_map(orig: Self, f: F) -> Result, Self> + where + F: FnOnce(&T) -> Option<&U>, + U: ?Sized, + { + let orig = ManuallyDrop::new(orig); + match f(&*orig).map(NonNull::from) { + Some(value) => Ok(MappedRwLockReadGuard { data: value, inner_lock: &orig.inner_lock }), + None => Err(ManuallyDrop::into_inner(orig)), + } + } +} + +impl<'a, T: ?Sized> MappedRwLockReadGuard<'a, T> { + /// Makes a [`MappedRwLockReadGuard`] for a component of the borrowed data, + /// e.g. an enum variant. + /// + /// The `RwLock` is already locked for reading, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `MappedRwLockReadGuard::map(...)`. A method would interfere with + /// methods of the same name on the contents of the `MappedRwLockReadGuard` + /// used through `Deref`. + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn map(orig: Self, f: F) -> MappedRwLockReadGuard<'a, U> + where + F: FnOnce(&T) -> &U, + U: ?Sized, + { + let orig = ManuallyDrop::new(orig); + let value = NonNull::from(f(&*orig)); + MappedRwLockReadGuard { data: value, inner_lock: &orig.inner_lock } + } + + /// Makes a [`MappedRwLockReadGuard`] for a component of the borrowed data. + /// The original guard is returned as an `Err(...)` if the closure returns + /// `None`. + /// + /// The `RwLock` is already locked for reading, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `MappedRwLockReadGuard::try_map(...)`. A method would interfere with + /// methods of the same name on the contents of the `MappedRwLockReadGuard` + /// used through `Deref`. + #[doc(alias = "filter_map")] + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn try_map(orig: Self, f: F) -> Result, Self> + where + F: FnOnce(&T) -> Option<&U>, + U: ?Sized, + { + let orig = ManuallyDrop::new(orig); + match f(&*orig).map(NonNull::from) { + Some(value) => Ok(MappedRwLockReadGuard { data: value, inner_lock: &orig.inner_lock }), + None => Err(ManuallyDrop::into_inner(orig)), + } + } +} + +impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { + /// Makes a [`MappedRwLockWriteGuard`] for a component of the borrowed data, e.g. + /// an enum variant. + /// + /// The `RwLock` is already locked for writing, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `RwLockWriteGuard::map(...)`. A method would interfere with methods of + /// the same name on the contents of the `RwLockWriteGuard` used through + /// `Deref`. + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn map(orig: Self, f: F) -> MappedRwLockWriteGuard<'a, U> + where + F: FnOnce(&mut T) -> &mut U, + U: ?Sized, + { + let mut orig = ManuallyDrop::new(orig); + let value = NonNull::from(f(&mut *orig)); + MappedRwLockWriteGuard { + data: value, + inner_lock: &orig.lock.inner, + poison_flag: &orig.lock.poison, + poison: orig.poison.clone(), + _variance: PhantomData, + } + } + + /// Makes a [`MappedRwLockWriteGuard`] for a component of the borrowed data. The + /// original guard is returned as an `Err(...)` if the closure returns + /// `None`. + /// + /// The `RwLock` is already locked for writing, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `RwLockWriteGuard::try_map(...)`. A method would interfere with methods + /// of the same name on the contents of the `RwLockWriteGuard` used through + /// `Deref`. + #[doc(alias = "filter_map")] + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn try_map(orig: Self, f: F) -> Result, Self> + where + F: FnOnce(&mut T) -> Option<&mut U>, + U: ?Sized, + { + let mut orig = ManuallyDrop::new(orig); + match f(&mut *orig).map(NonNull::from) { + Some(value) => Ok(MappedRwLockWriteGuard { + data: value, + inner_lock: &orig.lock.inner, + poison_flag: &orig.lock.poison, + poison: orig.poison.clone(), + _variance: PhantomData, + }), + None => Err(ManuallyDrop::into_inner(orig)), + } + } +} + +impl<'a, T: ?Sized> MappedRwLockWriteGuard<'a, T> { + /// Makes a [`MappedRwLockWriteGuard`] for a component of the borrowed data, + /// e.g. an enum variant. + /// + /// The `RwLock` is already locked for writing, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `MappedRwLockWriteGuard::map(...)`. A method would interfere with + /// methods of the same name on the contents of the `MappedRwLockWriteGuard` + /// used through `Deref`. + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn map(orig: Self, f: F) -> MappedRwLockWriteGuard<'a, U> + where + F: FnOnce(&mut T) -> &mut U, + U: ?Sized, + { + let mut orig = ManuallyDrop::new(orig); + let value = NonNull::from(f(&mut *orig)); + MappedRwLockWriteGuard { + data: value, + inner_lock: orig.inner_lock, + poison_flag: orig.poison_flag, + poison: orig.poison.clone(), + _variance: PhantomData, + } + } + + /// Makes a [`MappedRwLockWriteGuard`] for a component of the borrowed data. + /// The original guard is returned as an `Err(...)` if the closure returns + /// `None`. + /// + /// The `RwLock` is already locked for writing, so this cannot fail. + /// + /// This is an associated function that needs to be used as + /// `MappedRwLockWriteGuard::try_map(...)`. A method would interfere with + /// methods of the same name on the contents of the `MappedRwLockWriteGuard` + /// used through `Deref`. + #[doc(alias = "filter_map")] + #[unstable(feature = "mapped_lock_guards", issue = "none")] + pub fn try_map(orig: Self, f: F) -> Result, Self> + where + F: FnOnce(&mut T) -> Option<&mut U>, + U: ?Sized, + { + let mut orig = ManuallyDrop::new(orig); + match f(&mut *orig).map(NonNull::from) { + Some(value) => Ok(MappedRwLockWriteGuard { + data: value, + inner_lock: orig.inner_lock, + poison_flag: orig.poison_flag, + poison: orig.poison.clone(), + _variance: PhantomData, + }), + None => Err(ManuallyDrop::into_inner(orig)), + } + } +} From 04f86304422cfc74a46ee7d53d04eb1e56572599 Mon Sep 17 00:00:00 2001 From: Zachary S Date: Mon, 23 Oct 2023 16:36:13 -0500 Subject: [PATCH 004/134] Add comment about `Mapped(Mutex|RwLockWrite)Guard` variance. --- library/std/src/sync/mutex.rs | 28 +++++++++++-------- library/std/src/sync/rwlock.rs | 50 ++++++++++++++++++---------------- 2 files changed, 43 insertions(+), 35 deletions(-) diff --git a/library/std/src/sync/mutex.rs b/library/std/src/sync/mutex.rs index 18bea6d58fba1..a435fba4f1ea2 100644 --- a/library/std/src/sync/mutex.rs +++ b/library/std/src/sync/mutex.rs @@ -238,9 +238,13 @@ unsafe impl Sync for MutexGuard<'_, T> {} #[must_not_suspend = "holding a MappedMutexGuard across suspend \ points can cause deadlocks, delays, \ and cause Futures to not implement `Send`"] -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] #[clippy::has_significant_drop] pub struct MappedMutexGuard<'a, T: ?Sized + 'a> { + // NB: we use a pointer instead of `&'a mut T` to avoid `noalias` violations, because a + // `MappedMutexGuard` argument doesn't hold uniqueness for its whole scope, only until it drops. + // `NonNull` is covariant over `T`, so we add a `PhantomData<&'a mut T>` field + // below for the correct variance over `T` (invariance). data: NonNull, inner: &'a sys::Mutex, poison_flag: &'a poison::Flag, @@ -248,9 +252,9 @@ pub struct MappedMutexGuard<'a, T: ?Sized + 'a> { _variance: PhantomData<&'a mut T>, } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl !Send for MappedMutexGuard<'_, T> {} -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] unsafe impl Sync for MappedMutexGuard<'_, T> {} impl Mutex { @@ -602,7 +606,7 @@ impl<'a, T: ?Sized> MutexGuard<'a, T> { /// This is an associated function that needs to be used as /// `MutexGuard::map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedMutexGuard<'a, U> where F: FnOnce(&mut T) -> &mut U, @@ -629,7 +633,7 @@ impl<'a, T: ?Sized> MutexGuard<'a, T> { /// `MutexGuard::try_map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. #[doc(alias = "filter_map")] - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, @@ -649,7 +653,7 @@ impl<'a, T: ?Sized> MutexGuard<'a, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl Deref for MappedMutexGuard<'_, T> { type Target = T; @@ -658,14 +662,14 @@ impl Deref for MappedMutexGuard<'_, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl DerefMut for MappedMutexGuard<'_, T> { fn deref_mut(&mut self) -> &mut T { unsafe { self.data.as_mut() } } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl Drop for MappedMutexGuard<'_, T> { #[inline] fn drop(&mut self) { @@ -676,14 +680,14 @@ impl Drop for MappedMutexGuard<'_, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl fmt::Debug for MappedMutexGuard<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fmt::Debug::fmt(&**self, f) } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl fmt::Display for MappedMutexGuard<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (**self).fmt(f) @@ -699,7 +703,7 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { /// This is an associated function that needs to be used as /// `MutexGuard::map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedMutexGuard<'a, U> where F: FnOnce(&mut T) -> &mut U, @@ -726,7 +730,7 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { /// `MutexGuard::try_map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. #[doc(alias = "filter_map")] - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, diff --git a/library/std/src/sync/rwlock.rs b/library/std/src/sync/rwlock.rs index 717f40db4c1c6..def0c8a16c7ce 100644 --- a/library/std/src/sync/rwlock.rs +++ b/library/std/src/sync/rwlock.rs @@ -158,7 +158,7 @@ unsafe impl Sync for RwLockWriteGuard<'_, T> {} #[must_not_suspend = "holding a MappedRwLockReadGuard across suspend \ points can cause deadlocks, delays, \ and cause Futures to not implement `Send`"] -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] #[clippy::has_significant_drop] pub struct MappedRwLockReadGuard<'a, T: ?Sized + 'a> { // NB: we use a pointer instead of `&'a T` to avoid `noalias` violations, because a @@ -169,10 +169,10 @@ pub struct MappedRwLockReadGuard<'a, T: ?Sized + 'a> { inner_lock: &'a sys::RwLock, } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl !Send for MappedRwLockReadGuard<'_, T> {} -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] unsafe impl Sync for MappedRwLockReadGuard<'_, T> {} /// RAII structure used to release the exclusive write access of a lock when @@ -187,9 +187,13 @@ unsafe impl Sync for MappedRwLockReadGuard<'_, T> {} #[must_not_suspend = "holding a MappedRwLockWriteGuard across suspend \ points can cause deadlocks, delays, \ and cause Future's to not implement `Send`"] -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] #[clippy::has_significant_drop] pub struct MappedRwLockWriteGuard<'a, T: ?Sized + 'a> { + // NB: we use a pointer instead of `&'a mut T` to avoid `noalias` violations, because a + // `MappedRwLockWriteGuard` argument doesn't hold uniqueness for its whole scope, only until it drops. + // `NonNull` is covariant over `T`, so we add a `PhantomData<&'a mut T>` field + // below for the correct variance over `T` (invariance). data: NonNull, inner_lock: &'a sys::RwLock, poison_flag: &'a poison::Flag, @@ -197,10 +201,10 @@ pub struct MappedRwLockWriteGuard<'a, T: ?Sized + 'a> { _variance: PhantomData<&'a mut T>, } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl !Send for MappedRwLockWriteGuard<'_, T> {} -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] unsafe impl Sync for MappedRwLockWriteGuard<'_, T> {} impl RwLock { @@ -621,28 +625,28 @@ impl fmt::Display for RwLockWriteGuard<'_, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl fmt::Debug for MappedRwLockReadGuard<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (**self).fmt(f) } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl fmt::Display for MappedRwLockReadGuard<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (**self).fmt(f) } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl fmt::Debug for MappedRwLockWriteGuard<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (**self).fmt(f) } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl fmt::Display for MappedRwLockWriteGuard<'_, T> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { (**self).fmt(f) @@ -677,7 +681,7 @@ impl DerefMut for RwLockWriteGuard<'_, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl Deref for MappedRwLockReadGuard<'_, T> { type Target = T; @@ -688,7 +692,7 @@ impl Deref for MappedRwLockReadGuard<'_, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl Deref for MappedRwLockWriteGuard<'_, T> { type Target = T; @@ -699,7 +703,7 @@ impl Deref for MappedRwLockWriteGuard<'_, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl DerefMut for MappedRwLockWriteGuard<'_, T> { fn deref_mut(&mut self) -> &mut T { // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard @@ -729,7 +733,7 @@ impl Drop for RwLockWriteGuard<'_, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl Drop for MappedRwLockReadGuard<'_, T> { fn drop(&mut self) { // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard @@ -740,7 +744,7 @@ impl Drop for MappedRwLockReadGuard<'_, T> { } } -#[unstable(feature = "mapped_lock_guards", issue = "none")] +#[unstable(feature = "mapped_lock_guards", issue = "117108")] impl Drop for MappedRwLockWriteGuard<'_, T> { fn drop(&mut self) { self.poison_flag.done(&self.poison); @@ -762,7 +766,7 @@ impl<'a, T: ?Sized> RwLockReadGuard<'a, T> { /// `RwLockReadGuard::map(...)`. A method would interfere with methods of /// the same name on the contents of the `RwLockReadGuard` used through /// `Deref`. - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedRwLockReadGuard<'a, U> where F: FnOnce(&T) -> &U, @@ -784,7 +788,7 @@ impl<'a, T: ?Sized> RwLockReadGuard<'a, T> { /// of the same name on the contents of the `RwLockReadGuard` used through /// `Deref`. #[doc(alias = "filter_map")] - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&T) -> Option<&U>, @@ -808,7 +812,7 @@ impl<'a, T: ?Sized> MappedRwLockReadGuard<'a, T> { /// `MappedRwLockReadGuard::map(...)`. A method would interfere with /// methods of the same name on the contents of the `MappedRwLockReadGuard` /// used through `Deref`. - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedRwLockReadGuard<'a, U> where F: FnOnce(&T) -> &U, @@ -830,7 +834,7 @@ impl<'a, T: ?Sized> MappedRwLockReadGuard<'a, T> { /// methods of the same name on the contents of the `MappedRwLockReadGuard` /// used through `Deref`. #[doc(alias = "filter_map")] - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&T) -> Option<&U>, @@ -854,7 +858,7 @@ impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { /// `RwLockWriteGuard::map(...)`. A method would interfere with methods of /// the same name on the contents of the `RwLockWriteGuard` used through /// `Deref`. - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedRwLockWriteGuard<'a, U> where F: FnOnce(&mut T) -> &mut U, @@ -882,7 +886,7 @@ impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { /// of the same name on the contents of the `RwLockWriteGuard` used through /// `Deref`. #[doc(alias = "filter_map")] - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, @@ -912,7 +916,7 @@ impl<'a, T: ?Sized> MappedRwLockWriteGuard<'a, T> { /// `MappedRwLockWriteGuard::map(...)`. A method would interfere with /// methods of the same name on the contents of the `MappedRwLockWriteGuard` /// used through `Deref`. - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedRwLockWriteGuard<'a, U> where F: FnOnce(&mut T) -> &mut U, @@ -940,7 +944,7 @@ impl<'a, T: ?Sized> MappedRwLockWriteGuard<'a, T> { /// methods of the same name on the contents of the `MappedRwLockWriteGuard` /// used through `Deref`. #[doc(alias = "filter_map")] - #[unstable(feature = "mapped_lock_guards", issue = "none")] + #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, From 20fa3a0d8f529b428eeacf7cce184345f200654d Mon Sep 17 00:00:00 2001 From: Zachary S Date: Mon, 23 Oct 2023 17:03:17 -0500 Subject: [PATCH 005/134] Fix Condvar typo, add public re-exports of Mapped*Guard. --- library/std/src/sync/mod.rs | 4 ++++ library/std/src/sync/mutex.rs | 4 ++-- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/library/std/src/sync/mod.rs b/library/std/src/sync/mod.rs index f6a7c0a9f7549..ca62179e95b66 100644 --- a/library/std/src/sync/mod.rs +++ b/library/std/src/sync/mod.rs @@ -165,6 +165,8 @@ pub use core::sync::Exclusive; pub use self::barrier::{Barrier, BarrierWaitResult}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::condvar::{Condvar, WaitTimeoutResult}; +#[unstable(feature = "mapped_lock_guards", issue = "117108")] +pub use self::mutex::MappedMutexGuard; #[stable(feature = "rust1", since = "1.0.0")] pub use self::mutex::{Mutex, MutexGuard}; #[stable(feature = "rust1", since = "1.0.0")] @@ -172,6 +174,8 @@ pub use self::mutex::{Mutex, MutexGuard}; pub use self::once::{Once, OnceState, ONCE_INIT}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::poison::{LockResult, PoisonError, TryLockError, TryLockResult}; +#[unstable(feature = "mapped_lock_guards", issue = "117108")] +pub use self::rwlock::{MappedRwLockReadGuard, MappedRwLockWriteGuard}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::rwlock::{RwLock, RwLockReadGuard, RwLockWriteGuard}; diff --git a/library/std/src/sync/mutex.rs b/library/std/src/sync/mutex.rs index a435fba4f1ea2..5a419f4bd8465 100644 --- a/library/std/src/sync/mutex.rs +++ b/library/std/src/sync/mutex.rs @@ -221,7 +221,7 @@ unsafe impl Sync for MutexGuard<'_, T> {} /// of scope), the lock will be unlocked. /// /// The main difference between `MappedMutexGuard` and [`MutexGuard`] is that the -/// former cannot be used with [`CondVar`], since that +/// former cannot be used with [`Condvar`], since that /// could introduce soundness issues if the locked object is modified by another /// thread while the `Mutex` is unlocked. /// @@ -233,7 +233,7 @@ unsafe impl Sync for MutexGuard<'_, T> {} /// /// [`map`]: MutexGuard::map /// [`try_map`]: MutexGuard::try_map -/// [`CondVar`]: crate::sync::CondVar +/// [`Condvar`]: crate::sync::Condvar #[must_use = "if unused the Mutex will immediately unlock"] #[must_not_suspend = "holding a MappedMutexGuard across suspend \ points can cause deadlocks, delays, \ From 5533606fe0def63a62a3f75be4eb2d87081a05c4 Mon Sep 17 00:00:00 2001 From: Zachary S Date: Tue, 24 Oct 2023 13:32:53 -0500 Subject: [PATCH 006/134] Add MappedMutexGuard and MappedRwLock*Guard tests. --- library/std/src/sync/mutex/tests.rs | 30 +++++++- library/std/src/sync/rwlock/tests.rs | 105 ++++++++++++++++++++++++++- 2 files changed, 133 insertions(+), 2 deletions(-) diff --git a/library/std/src/sync/mutex/tests.rs b/library/std/src/sync/mutex/tests.rs index 1786a3c09ffb5..cf69813baa3c5 100644 --- a/library/std/src/sync/mutex/tests.rs +++ b/library/std/src/sync/mutex/tests.rs @@ -1,6 +1,6 @@ use crate::sync::atomic::{AtomicUsize, Ordering}; use crate::sync::mpsc::channel; -use crate::sync::{Arc, Condvar, Mutex}; +use crate::sync::{Arc, Condvar, MappedMutexGuard, Mutex, MutexGuard}; use crate::thread; struct Packet(Arc<(Mutex, Condvar)>); @@ -188,6 +188,21 @@ fn test_mutex_arc_poison() { assert!(arc.is_poisoned()); } +#[test] +fn test_mutex_arc_poison_mapped() { + let arc = Arc::new(Mutex::new(1)); + assert!(!arc.is_poisoned()); + let arc2 = arc.clone(); + let _ = thread::spawn(move || { + let lock = arc2.lock().unwrap(); + let lock = MutexGuard::map(lock, |val| val); + assert_eq!(*lock, 2); // deliberate assertion failure to poison the mutex + }) + .join(); + assert!(arc.lock().is_err()); + assert!(arc.is_poisoned()); +} + #[test] fn test_mutex_arc_nested() { // Tests nested mutexes and access @@ -236,3 +251,16 @@ fn test_mutex_unsized() { let comp: &[i32] = &[4, 2, 5]; assert_eq!(&*mutex.lock().unwrap(), comp); } + +#[test] +fn test_mapping_mapped_guard() { + let arr = [0; 4]; + let mut lock = Mutex::new(arr); + let guard = lock.lock().unwrap(); + let guard = MutexGuard::map(guard, |arr| &mut arr[..2]); + let mut guard = MappedMutexGuard::map(guard, |slice| &mut slice[1..]); + assert_eq!(guard.len(), 1); + guard[0] = 42; + drop(guard); + assert_eq!(*lock.get_mut().unwrap(), [0, 42, 0, 0]); +} diff --git a/library/std/src/sync/rwlock/tests.rs b/library/std/src/sync/rwlock/tests.rs index 1a9d3d3f12f3c..0a5eb7aac023f 100644 --- a/library/std/src/sync/rwlock/tests.rs +++ b/library/std/src/sync/rwlock/tests.rs @@ -1,6 +1,9 @@ use crate::sync::atomic::{AtomicUsize, Ordering}; use crate::sync::mpsc::channel; -use crate::sync::{Arc, RwLock, RwLockReadGuard, TryLockError}; +use crate::sync::{ + Arc, MappedRwLockReadGuard, MappedRwLockWriteGuard, RwLock, RwLockReadGuard, RwLockWriteGuard, + TryLockError, +}; use crate::thread; use rand::Rng; @@ -55,6 +58,19 @@ fn test_rw_arc_poison_wr() { assert!(arc.read().is_err()); } +#[test] +fn test_rw_arc_poison_mapped_w_r() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let lock = arc2.write().unwrap(); + let _lock = RwLockWriteGuard::map(lock, |val| val); + panic!(); + }) + .join(); + assert!(arc.read().is_err()); +} + #[test] fn test_rw_arc_poison_ww() { let arc = Arc::new(RwLock::new(1)); @@ -69,6 +85,20 @@ fn test_rw_arc_poison_ww() { assert!(arc.is_poisoned()); } +#[test] +fn test_rw_arc_poison_mapped_w_w() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let lock = arc2.write().unwrap(); + let _lock = RwLockWriteGuard::map(lock, |val| val); + panic!(); + }) + .join(); + assert!(arc.write().is_err()); + assert!(arc.is_poisoned()); +} + #[test] fn test_rw_arc_no_poison_rr() { let arc = Arc::new(RwLock::new(1)); @@ -81,6 +111,21 @@ fn test_rw_arc_no_poison_rr() { let lock = arc.read().unwrap(); assert_eq!(*lock, 1); } + +#[test] +fn test_rw_arc_no_poison_mapped_r_r() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let lock = arc2.read().unwrap(); + let _lock = RwLockReadGuard::map(lock, |val| val); + panic!(); + }) + .join(); + let lock = arc.read().unwrap(); + assert_eq!(*lock, 1); +} + #[test] fn test_rw_arc_no_poison_rw() { let arc = Arc::new(RwLock::new(1)); @@ -94,6 +139,20 @@ fn test_rw_arc_no_poison_rw() { assert_eq!(*lock, 1); } +#[test] +fn test_rw_arc_no_poison_mapped_r_w() { + let arc = Arc::new(RwLock::new(1)); + let arc2 = arc.clone(); + let _: Result<(), _> = thread::spawn(move || { + let lock = arc2.read().unwrap(); + let _lock = RwLockReadGuard::map(lock, |val| val); + panic!(); + }) + .join(); + let lock = arc.write().unwrap(); + assert_eq!(*lock, 1); +} + #[test] fn test_rw_arc() { let arc = Arc::new(RwLock::new(0)); @@ -179,6 +238,16 @@ fn test_rwlock_try_write() { } drop(read_guard); + let mapped_read_guard = RwLockReadGuard::map(lock.read().unwrap(), |_| &()); + + let write_result = lock.try_write(); + match write_result { + Err(TryLockError::WouldBlock) => (), + Ok(_) => assert!(false, "try_write should not succeed while mapped_read_guard is in scope"), + Err(_) => assert!(false, "unexpected error"), + } + + drop(mapped_read_guard); } #[test] @@ -257,3 +326,37 @@ fn test_read_guard_covariance() { } drop(lock); } + +#[test] +fn test_mapped_read_guard_covariance() { + fn do_stuff<'a>(_: MappedRwLockReadGuard<'_, &'a i32>, _: &'a i32) {} + let j: i32 = 5; + let lock = RwLock::new((&j, &j)); + { + let i = 6; + let guard = lock.read().unwrap(); + let guard = RwLockReadGuard::map(guard, |(val, _val)| val); + do_stuff(guard, &i); + } + drop(lock); +} + +#[test] +fn test_mapping_mapped_guard() { + let arr = [0; 4]; + let mut lock = RwLock::new(arr); + let guard = lock.write().unwrap(); + let guard = RwLockWriteGuard::map(guard, |arr| &mut arr[..2]); + let mut guard = MappedRwLockWriteGuard::map(guard, |slice| &mut slice[1..]); + assert_eq!(guard.len(), 1); + guard[0] = 42; + drop(guard); + assert_eq!(*lock.get_mut().unwrap(), [0, 42, 0, 0]); + + let guard = lock.read().unwrap(); + let guard = RwLockReadGuard::map(guard, |arr| &arr[..2]); + let guard = MappedRwLockReadGuard::map(guard, |slice| &slice[1..]); + assert_eq!(*guard, [42]); + drop(guard); + assert_eq!(*lock.get_mut().unwrap(), [0, 42, 0, 0]); +} From 6aebcbee0a4d232f675eb380269bb405b0a1cc32 Mon Sep 17 00:00:00 2001 From: Zachary S Date: Tue, 24 Oct 2023 14:09:04 -0500 Subject: [PATCH 007/134] fix MappedMutexGuard::(try_)map doc typo. --- library/std/src/sync/mutex.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/library/std/src/sync/mutex.rs b/library/std/src/sync/mutex.rs index 5a419f4bd8465..1b3b4a3027cd7 100644 --- a/library/std/src/sync/mutex.rs +++ b/library/std/src/sync/mutex.rs @@ -701,7 +701,7 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { /// The `Mutex` is already locked, so this cannot fail. /// /// This is an associated function that needs to be used as - /// `MutexGuard::map(...)`. A method would interfere with methods of the + /// `MappedMutexGuard::map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedMutexGuard<'a, U> @@ -727,7 +727,7 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { /// The `Mutex` is already locked, so this cannot fail. /// /// This is an associated function that needs to be used as - /// `MutexGuard::try_map(...)`. A method would interfere with methods of the + /// `MappedMutexGuard::try_map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] From 3ef4b083ac03fd25339be009e3ae525adab30d78 Mon Sep 17 00:00:00 2001 From: Zachary S Date: Thu, 26 Oct 2023 13:33:54 -0500 Subject: [PATCH 008/134] Specify behavior if the closure passed to *Guard::*map panics. --- library/std/src/sync/mutex.rs | 78 +++++++++----- library/std/src/sync/mutex/tests.rs | 63 ++++++++++- library/std/src/sync/rwlock.rs | 156 ++++++++++++++++++++------- library/std/src/sync/rwlock/tests.rs | 136 +++++++++++++++++++++++ 4 files changed, 364 insertions(+), 69 deletions(-) diff --git a/library/std/src/sync/mutex.rs b/library/std/src/sync/mutex.rs index 1b3b4a3027cd7..1b05247bb7c7c 100644 --- a/library/std/src/sync/mutex.rs +++ b/library/std/src/sync/mutex.rs @@ -612,10 +612,14 @@ impl<'a, T: ?Sized> MutexGuard<'a, T> { F: FnOnce(&mut T) -> &mut U, U: ?Sized, { - let mut orig = ManuallyDrop::new(orig); - let value = NonNull::from(f(&mut *orig)); + // SAFETY: the conditions of `MutedGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + let data = NonNull::from(f(unsafe { &mut *orig.lock.data.get() })); + let orig = ManuallyDrop::new(orig); MappedMutexGuard { - data: value, + data, inner: &orig.lock.inner, poison_flag: &orig.lock.poison, poison: orig.poison.clone(), @@ -639,16 +643,23 @@ impl<'a, T: ?Sized> MutexGuard<'a, T> { F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { - let mut orig = ManuallyDrop::new(orig); - match f(&mut *orig).map(NonNull::from) { - Some(value) => Ok(MappedMutexGuard { - data: value, - inner: &orig.lock.inner, - poison_flag: &orig.lock.poison, - poison: orig.poison.clone(), - _variance: PhantomData, - }), - None => Err(ManuallyDrop::into_inner(orig)), + // SAFETY: the conditions of `MutexGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + match f(unsafe { &mut *orig.lock.data.get() }) { + Some(data) => { + let data = NonNull::from(data); + let orig = ManuallyDrop::new(orig); + Ok(MappedMutexGuard { + data, + inner: &orig.lock.inner, + poison_flag: &orig.lock.poison, + poison: orig.poison.clone(), + _variance: PhantomData, + }) + } + None => Err(orig), } } } @@ -704,15 +715,19 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { /// `MappedMutexGuard::map(...)`. A method would interfere with methods of the /// same name on the contents of the `MutexGuard` used through `Deref`. #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn map(orig: Self, f: F) -> MappedMutexGuard<'a, U> + pub fn map(mut orig: Self, f: F) -> MappedMutexGuard<'a, U> where F: FnOnce(&mut T) -> &mut U, U: ?Sized, { - let mut orig = ManuallyDrop::new(orig); - let value = NonNull::from(f(&mut *orig)); + // SAFETY: the conditions of `MutedGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + let data = NonNull::from(f(unsafe { orig.data.as_mut() })); + let orig = ManuallyDrop::new(orig); MappedMutexGuard { - data: value, + data, inner: orig.inner, poison_flag: orig.poison_flag, poison: orig.poison.clone(), @@ -731,21 +746,28 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { /// same name on the contents of the `MutexGuard` used through `Deref`. #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn try_map(orig: Self, f: F) -> Result, Self> + pub fn try_map(mut orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { - let mut orig = ManuallyDrop::new(orig); - match f(&mut *orig).map(NonNull::from) { - Some(value) => Ok(MappedMutexGuard { - data: value, - inner: orig.inner, - poison_flag: orig.poison_flag, - poison: orig.poison.clone(), - _variance: PhantomData, - }), - None => Err(ManuallyDrop::into_inner(orig)), + // SAFETY: the conditions of `MutedGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + match f(unsafe { orig.data.as_mut() }) { + Some(data) => { + let data = NonNull::from(data); + let orig = ManuallyDrop::new(orig); + Ok(MappedMutexGuard { + data, + inner: orig.inner, + poison_flag: orig.poison_flag, + poison: orig.poison.clone(), + _variance: PhantomData, + }) + } + None => Err(orig), } } } diff --git a/library/std/src/sync/mutex/tests.rs b/library/std/src/sync/mutex/tests.rs index cf69813baa3c5..19ec096c59334 100644 --- a/library/std/src/sync/mutex/tests.rs +++ b/library/std/src/sync/mutex/tests.rs @@ -1,6 +1,6 @@ use crate::sync::atomic::{AtomicUsize, Ordering}; use crate::sync::mpsc::channel; -use crate::sync::{Arc, Condvar, MappedMutexGuard, Mutex, MutexGuard}; +use crate::sync::{Arc, Condvar, MappedMutexGuard, Mutex, MutexGuard, TryLockError}; use crate::thread; struct Packet(Arc<(Mutex, Condvar)>); @@ -264,3 +264,64 @@ fn test_mapping_mapped_guard() { drop(guard); assert_eq!(*lock.get_mut().unwrap(), [0, 42, 0, 0]); } + +#[test] +fn panic_while_mapping_unlocked_poison() { + let lock = Mutex::new(()); + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.lock().unwrap(); + let _guard = MutexGuard::map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_lock() { + Ok(_) => panic!("panicking in a MutexGuard::map closure should poison the Mutex"), + Err(TryLockError::WouldBlock) => { + panic!("panicking in a MutexGuard::map closure should unlock the mutex") + } + Err(TryLockError::Poisoned(_)) => {} + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.lock().unwrap(); + let _guard = MutexGuard::try_map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_lock() { + Ok(_) => panic!("panicking in a MutexGuard::try_map closure should poison the Mutex"), + Err(TryLockError::WouldBlock) => { + panic!("panicking in a MutexGuard::try_map closure should unlock the mutex") + } + Err(TryLockError::Poisoned(_)) => {} + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.lock().unwrap(); + let guard = MutexGuard::map::<(), _>(guard, |val| val); + let _guard = MappedMutexGuard::map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_lock() { + Ok(_) => panic!("panicking in a MappedMutexGuard::map closure should poison the Mutex"), + Err(TryLockError::WouldBlock) => { + panic!("panicking in a MappedMutexGuard::map closure should unlock the mutex") + } + Err(TryLockError::Poisoned(_)) => {} + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.lock().unwrap(); + let guard = MutexGuard::map::<(), _>(guard, |val| val); + let _guard = MappedMutexGuard::try_map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_lock() { + Ok(_) => panic!("panicking in a MappedMutexGuard::try_map closure should poison the Mutex"), + Err(TryLockError::WouldBlock) => { + panic!("panicking in a MappedMutexGuard::try_map closure should unlock the mutex") + } + Err(TryLockError::Poisoned(_)) => {} + } + + drop(lock); +} diff --git a/library/std/src/sync/rwlock.rs b/library/std/src/sync/rwlock.rs index def0c8a16c7ce..5c4e4a784dbf5 100644 --- a/library/std/src/sync/rwlock.rs +++ b/library/std/src/sync/rwlock.rs @@ -766,15 +766,23 @@ impl<'a, T: ?Sized> RwLockReadGuard<'a, T> { /// `RwLockReadGuard::map(...)`. A method would interfere with methods of /// the same name on the contents of the `RwLockReadGuard` used through /// `Deref`. + /// + /// # Panics + /// + /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will not be poisoned. #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedRwLockReadGuard<'a, U> where F: FnOnce(&T) -> &U, U: ?Sized, { + // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + let data = NonNull::from(f(unsafe { orig.data.as_ref() })); let orig = ManuallyDrop::new(orig); - let value = NonNull::from(f(&*orig)); - MappedRwLockReadGuard { data: value, inner_lock: &orig.inner_lock } + MappedRwLockReadGuard { data, inner_lock: &orig.inner_lock } } /// Makes a [`MappedRwLockReadGuard`] for a component of the borrowed data. The @@ -787,6 +795,10 @@ impl<'a, T: ?Sized> RwLockReadGuard<'a, T> { /// `RwLockReadGuard::try_map(...)`. A method would interfere with methods /// of the same name on the contents of the `RwLockReadGuard` used through /// `Deref`. + /// + /// # Panics + /// + /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will not be poisoned. #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> @@ -794,10 +806,17 @@ impl<'a, T: ?Sized> RwLockReadGuard<'a, T> { F: FnOnce(&T) -> Option<&U>, U: ?Sized, { - let orig = ManuallyDrop::new(orig); - match f(&*orig).map(NonNull::from) { - Some(value) => Ok(MappedRwLockReadGuard { data: value, inner_lock: &orig.inner_lock }), - None => Err(ManuallyDrop::into_inner(orig)), + // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + match f(unsafe { orig.data.as_ref() }) { + Some(data) => { + let data = NonNull::from(data); + let orig = ManuallyDrop::new(orig); + Ok(MappedRwLockReadGuard { data, inner_lock: &orig.inner_lock }) + } + None => Err(orig), } } } @@ -812,15 +831,23 @@ impl<'a, T: ?Sized> MappedRwLockReadGuard<'a, T> { /// `MappedRwLockReadGuard::map(...)`. A method would interfere with /// methods of the same name on the contents of the `MappedRwLockReadGuard` /// used through `Deref`. + /// + /// # Panics + /// + /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will not be poisoned. #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedRwLockReadGuard<'a, U> where F: FnOnce(&T) -> &U, U: ?Sized, { + // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + let data = NonNull::from(f(unsafe { orig.data.as_ref() })); let orig = ManuallyDrop::new(orig); - let value = NonNull::from(f(&*orig)); - MappedRwLockReadGuard { data: value, inner_lock: &orig.inner_lock } + MappedRwLockReadGuard { data, inner_lock: &orig.inner_lock } } /// Makes a [`MappedRwLockReadGuard`] for a component of the borrowed data. @@ -833,6 +860,10 @@ impl<'a, T: ?Sized> MappedRwLockReadGuard<'a, T> { /// `MappedRwLockReadGuard::try_map(...)`. A method would interfere with /// methods of the same name on the contents of the `MappedRwLockReadGuard` /// used through `Deref`. + /// + /// # Panics + /// + /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will not be poisoned. #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> @@ -840,10 +871,17 @@ impl<'a, T: ?Sized> MappedRwLockReadGuard<'a, T> { F: FnOnce(&T) -> Option<&U>, U: ?Sized, { - let orig = ManuallyDrop::new(orig); - match f(&*orig).map(NonNull::from) { - Some(value) => Ok(MappedRwLockReadGuard { data: value, inner_lock: &orig.inner_lock }), - None => Err(ManuallyDrop::into_inner(orig)), + // SAFETY: the conditions of `RwLockReadGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + match f(unsafe { orig.data.as_ref() }) { + Some(data) => { + let data = NonNull::from(data); + let orig = ManuallyDrop::new(orig); + Ok(MappedRwLockReadGuard { data, inner_lock: &orig.inner_lock }) + } + None => Err(orig), } } } @@ -858,16 +896,24 @@ impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { /// `RwLockWriteGuard::map(...)`. A method would interfere with methods of /// the same name on the contents of the `RwLockWriteGuard` used through /// `Deref`. + /// + /// # Panics + /// + /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will be poisoned. #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn map(orig: Self, f: F) -> MappedRwLockWriteGuard<'a, U> where F: FnOnce(&mut T) -> &mut U, U: ?Sized, { - let mut orig = ManuallyDrop::new(orig); - let value = NonNull::from(f(&mut *orig)); + // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + let data = NonNull::from(f(unsafe { &mut *orig.lock.data.get() })); + let orig = ManuallyDrop::new(orig); MappedRwLockWriteGuard { - data: value, + data, inner_lock: &orig.lock.inner, poison_flag: &orig.lock.poison, poison: orig.poison.clone(), @@ -885,6 +931,10 @@ impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { /// `RwLockWriteGuard::try_map(...)`. A method would interfere with methods /// of the same name on the contents of the `RwLockWriteGuard` used through /// `Deref`. + /// + /// # Panics + /// + /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will be poisoned. #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] pub fn try_map(orig: Self, f: F) -> Result, Self> @@ -892,16 +942,23 @@ impl<'a, T: ?Sized> RwLockWriteGuard<'a, T> { F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { - let mut orig = ManuallyDrop::new(orig); - match f(&mut *orig).map(NonNull::from) { - Some(value) => Ok(MappedRwLockWriteGuard { - data: value, - inner_lock: &orig.lock.inner, - poison_flag: &orig.lock.poison, - poison: orig.poison.clone(), - _variance: PhantomData, - }), - None => Err(ManuallyDrop::into_inner(orig)), + // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + match f(unsafe { &mut *orig.lock.data.get() }) { + Some(data) => { + let data = NonNull::from(data); + let orig = ManuallyDrop::new(orig); + Ok(MappedRwLockWriteGuard { + data, + inner_lock: &orig.lock.inner, + poison_flag: &orig.lock.poison, + poison: orig.poison.clone(), + _variance: PhantomData, + }) + } + None => Err(orig), } } } @@ -916,16 +973,24 @@ impl<'a, T: ?Sized> MappedRwLockWriteGuard<'a, T> { /// `MappedRwLockWriteGuard::map(...)`. A method would interfere with /// methods of the same name on the contents of the `MappedRwLockWriteGuard` /// used through `Deref`. + /// + /// # Panics + /// + /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will be poisoned. #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn map(orig: Self, f: F) -> MappedRwLockWriteGuard<'a, U> + pub fn map(mut orig: Self, f: F) -> MappedRwLockWriteGuard<'a, U> where F: FnOnce(&mut T) -> &mut U, U: ?Sized, { - let mut orig = ManuallyDrop::new(orig); - let value = NonNull::from(f(&mut *orig)); + // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + let data = NonNull::from(f(unsafe { orig.data.as_mut() })); + let orig = ManuallyDrop::new(orig); MappedRwLockWriteGuard { - data: value, + data, inner_lock: orig.inner_lock, poison_flag: orig.poison_flag, poison: orig.poison.clone(), @@ -943,23 +1008,34 @@ impl<'a, T: ?Sized> MappedRwLockWriteGuard<'a, T> { /// `MappedRwLockWriteGuard::try_map(...)`. A method would interfere with /// methods of the same name on the contents of the `MappedRwLockWriteGuard` /// used through `Deref`. + /// + /// # Panics + /// + /// If the closure panics, the guard will be dropped (unlocked) and the RwLock will be poisoned. #[doc(alias = "filter_map")] #[unstable(feature = "mapped_lock_guards", issue = "117108")] - pub fn try_map(orig: Self, f: F) -> Result, Self> + pub fn try_map(mut orig: Self, f: F) -> Result, Self> where F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { - let mut orig = ManuallyDrop::new(orig); - match f(&mut *orig).map(NonNull::from) { - Some(value) => Ok(MappedRwLockWriteGuard { - data: value, - inner_lock: orig.inner_lock, - poison_flag: orig.poison_flag, - poison: orig.poison.clone(), - _variance: PhantomData, - }), - None => Err(ManuallyDrop::into_inner(orig)), + // SAFETY: the conditions of `RwLockWriteGuard::new` were satisfied when the original guard + // was created, and have been upheld throughout `map` and/or `try_map`. + // The signature of the closure guarantees that it will not "leak" the lifetime of the reference + // passed to it. If the closure panics, the guard will be dropped. + match f(unsafe { orig.data.as_mut() }) { + Some(data) => { + let data = NonNull::from(data); + let orig = ManuallyDrop::new(orig); + Ok(MappedRwLockWriteGuard { + data, + inner_lock: orig.inner_lock, + poison_flag: orig.poison_flag, + poison: orig.poison.clone(), + _variance: PhantomData, + }) + } + None => Err(orig), } } } diff --git a/library/std/src/sync/rwlock/tests.rs b/library/std/src/sync/rwlock/tests.rs index 0a5eb7aac023f..9cc5e7a3a60f1 100644 --- a/library/std/src/sync/rwlock/tests.rs +++ b/library/std/src/sync/rwlock/tests.rs @@ -360,3 +360,139 @@ fn test_mapping_mapped_guard() { drop(guard); assert_eq!(*lock.get_mut().unwrap(), [0, 42, 0, 0]); } + +#[test] +fn panic_while_mapping_read_unlocked_no_poison() { + let lock = RwLock::new(()); + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.read().unwrap(); + let _guard = RwLockReadGuard::map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_write() { + Ok(_) => {} + Err(TryLockError::WouldBlock) => { + panic!("panicking in a RwLockReadGuard::map closure should release the read lock") + } + Err(TryLockError::Poisoned(_)) => { + panic!("panicking in a RwLockReadGuard::map closure should not poison the RwLock") + } + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.read().unwrap(); + let _guard = RwLockReadGuard::try_map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_write() { + Ok(_) => {} + Err(TryLockError::WouldBlock) => { + panic!("panicking in a RwLockReadGuard::try_map closure should release the read lock") + } + Err(TryLockError::Poisoned(_)) => { + panic!("panicking in a RwLockReadGuard::try_map closure should not poison the RwLock") + } + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.read().unwrap(); + let guard = RwLockReadGuard::map::<(), _>(guard, |val| val); + let _guard = MappedRwLockReadGuard::map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_write() { + Ok(_) => {} + Err(TryLockError::WouldBlock) => { + panic!("panicking in a MappedRwLockReadGuard::map closure should release the read lock") + } + Err(TryLockError::Poisoned(_)) => { + panic!("panicking in a MappedRwLockReadGuard::map closure should not poison the RwLock") + } + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.read().unwrap(); + let guard = RwLockReadGuard::map::<(), _>(guard, |val| val); + let _guard = MappedRwLockReadGuard::try_map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_write() { + Ok(_) => {} + Err(TryLockError::WouldBlock) => panic!( + "panicking in a MappedRwLockReadGuard::try_map closure should release the read lock" + ), + Err(TryLockError::Poisoned(_)) => panic!( + "panicking in a MappedRwLockReadGuard::try_map closure should not poison the RwLock" + ), + } + + drop(lock); +} + +#[test] +fn panic_while_mapping_write_unlocked_poison() { + let lock = RwLock::new(()); + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.write().unwrap(); + let _guard = RwLockWriteGuard::map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_write() { + Ok(_) => panic!("panicking in a RwLockWriteGuard::map closure should poison the RwLock"), + Err(TryLockError::WouldBlock) => { + panic!("panicking in a RwLockWriteGuard::map closure should release the write lock") + } + Err(TryLockError::Poisoned(_)) => {} + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.write().unwrap(); + let _guard = RwLockWriteGuard::try_map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_write() { + Ok(_) => { + panic!("panicking in a RwLockWriteGuard::try_map closure should poison the RwLock") + } + Err(TryLockError::WouldBlock) => { + panic!("panicking in a RwLockWriteGuard::try_map closure should release the write lock") + } + Err(TryLockError::Poisoned(_)) => {} + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.write().unwrap(); + let guard = RwLockWriteGuard::map::<(), _>(guard, |val| val); + let _guard = MappedRwLockWriteGuard::map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_write() { + Ok(_) => { + panic!("panicking in a MappedRwLockWriteGuard::map closure should poison the RwLock") + } + Err(TryLockError::WouldBlock) => panic!( + "panicking in a MappedRwLockWriteGuard::map closure should release the write lock" + ), + Err(TryLockError::Poisoned(_)) => {} + } + + let _ = crate::panic::catch_unwind(|| { + let guard = lock.write().unwrap(); + let guard = RwLockWriteGuard::map::<(), _>(guard, |val| val); + let _guard = MappedRwLockWriteGuard::try_map::<(), _>(guard, |_| panic!()); + }); + + match lock.try_write() { + Ok(_) => panic!( + "panicking in a MappedRwLockWriteGuard::try_map closure should poison the RwLock" + ), + Err(TryLockError::WouldBlock) => panic!( + "panicking in a MappedRwLockWriteGuard::try_map closure should release the write lock" + ), + Err(TryLockError::Poisoned(_)) => {} + } + + drop(lock); +} From 06ec7a7611bfcfd228507988a325f229d425598f Mon Sep 17 00:00:00 2001 From: Zachary S Date: Tue, 5 Dec 2023 18:10:39 -0600 Subject: [PATCH 009/134] fmt --- library/std/src/sync/rwlock.rs | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/library/std/src/sync/rwlock.rs b/library/std/src/sync/rwlock.rs index 5c4e4a784dbf5..b985988d760c9 100644 --- a/library/std/src/sync/rwlock.rs +++ b/library/std/src/sync/rwlock.rs @@ -590,10 +590,7 @@ impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { // SAFETY: if and only if `lock.inner.write()` (or `lock.inner.try_write()`) has been // successfully called from the same thread before instantiating this object. unsafe fn new(lock: &'rwlock RwLock) -> LockResult> { - poison::map_result(lock.poison.guard(), |guard| RwLockWriteGuard { - lock, - poison: guard, - }) + poison::map_result(lock.poison.guard(), |guard| RwLockWriteGuard { lock, poison: guard }) } } From 4a1372251850c6cf62dfe7d380bc515763426107 Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Mon, 12 Feb 2024 15:26:59 +1100 Subject: [PATCH 010/134] Tweak delayed bug mentions. Now that we have both `delayed_bug` and `span_delayed_bug`, it makes sense to use the generic term "delayed bug" more. --- crates/hir-def/src/attr/builtin.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/hir-def/src/attr/builtin.rs b/crates/hir-def/src/attr/builtin.rs index b20ee9e5bf6c8..55b9a1dfdcb91 100644 --- a/crates/hir-def/src/attr/builtin.rs +++ b/crates/hir-def/src/attr/builtin.rs @@ -650,7 +650,7 @@ pub const INERT_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_attr!(TEST, rustc_regions, Normal, template!(Word), WarnFollowing), rustc_attr!( TEST, rustc_error, Normal, - template!(Word, List: "span_delayed_bug_from_inside_query"), WarnFollowingWordOnly + template!(Word, List: "delayed_bug_from_inside_query"), WarnFollowingWordOnly ), rustc_attr!(TEST, rustc_dump_user_args, Normal, template!(Word), WarnFollowing), rustc_attr!(TEST, rustc_evaluate_where_clauses, Normal, template!(Word), WarnFollowing), From a52acccc588b77520a003a217b91f47413371cac Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Tue, 23 Aug 2022 21:45:15 +0900 Subject: [PATCH 011/134] Implement `RustIrDatabase::impl_provided_for()` for `ChalkContext` --- crates/hir-ty/src/chalk_db.rs | 197 ++++++++++++++++++++++------------ 1 file changed, 130 insertions(+), 67 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index bd243518fc607..157f7ce462d91 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -1,7 +1,7 @@ //! The implementation of `RustIrDatabase` for Chalk, which provides information //! about the code that Chalk needs. use core::ops; -use std::{iter, sync::Arc}; +use std::{iter, ops::ControlFlow, sync::Arc}; use tracing::debug; @@ -136,81 +136,91 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { _ => self_ty_fp.as_ref().map(std::slice::from_ref).unwrap_or(&[]), }; - let trait_module = trait_.module(self.db.upcast()); - let type_module = match self_ty_fp { - Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())), - Some(TyFingerprint::ForeignType(type_id)) => { - Some(from_foreign_def_id(type_id).module(self.db.upcast())) - } - Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())), - _ => None, - }; - - let mut def_blocks = - [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())]; - - // Note: Since we're using impls_for_trait, only impls where the trait - // can be resolved should ever reach Chalk. impl_datum relies on that - // and will panic if the trait can't be resolved. - let in_deps = self.db.trait_impls_in_deps(self.krate); - let in_self = self.db.trait_impls_in_crate(self.krate); - - let block_impls = iter::successors(self.block, |&block_id| { - cov_mark::hit!(block_local_impls); - self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block()) - }) - .inspect(|&block_id| { - // make sure we don't search the same block twice - def_blocks.iter_mut().for_each(|block| { - if *block == Some(block_id) { - *block = None; - } - }); - }) - .filter_map(|block_id| self.db.trait_impls_in_block(block_id)); - let id_to_chalk = |id: hir_def::ImplId| id.to_chalk(self.db); + let mut result = vec![]; - match fps { - [] => { - debug!("Unrestricted search for {:?} impls...", trait_); - let mut f = |impls: &TraitImpls| { - result.extend(impls.for_trait(trait_).map(id_to_chalk)); - }; - f(&in_self); - in_deps.iter().map(ops::Deref::deref).for_each(&mut f); - block_impls.for_each(|it| f(&it)); - def_blocks - .into_iter() - .flatten() - .filter_map(|it| self.db.trait_impls_in_block(it)) - .for_each(|it| f(&it)); - } - fps => { - let mut f = - |impls: &TraitImpls| { - result.extend(fps.iter().flat_map(|fp| { - impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) - })); - }; - f(&in_self); - in_deps.iter().map(ops::Deref::deref).for_each(&mut f); - block_impls.for_each(|it| f(&it)); - def_blocks - .into_iter() - .flatten() - .filter_map(|it| self.db.trait_impls_in_block(it)) - .for_each(|it| f(&it)); - } - } + if fps.is_empty() { + debug!("Unrestricted search for {:?} impls...", trait_); + self.for_trait_impls(trait_, self_ty_fp, |impls| { + result.extend(impls.for_trait(trait_).map(id_to_chalk)); + ControlFlow::Continue(()) + }) + } else { + self.for_trait_impls(trait_, self_ty_fp, |impls| { + result.extend( + fps.iter().flat_map(move |fp| { + impls.for_trait_and_self_ty(trait_, *fp).map(id_to_chalk) + }), + ); + ControlFlow::Continue(()) + }) + }; debug!("impls_for_trait returned {} impls", result.len()); result } fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind) -> bool { debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind); - false // FIXME + + let trait_id = from_chalk_trait_id(auto_trait_id); + let self_ty = kind.clone().intern(Interner); + // We cannot filter impls by `TyFingerprint` for the following types: + let self_ty_fp = match kind { + // because we need to find any impl whose Self type is a ref with the same mutability + // (we don't care about the inner type). + TyKind::Ref(..) => None, + // because we need to find any impl whose Self type is a tuple with the same arity. + TyKind::Tuple(..) => None, + _ => TyFingerprint::for_trait_impl(&self_ty), + }; + + let check_kind = |impl_id| { + let impl_self_ty = self.db.impl_self_ty(impl_id); + // NOTE(skip_binders): it's safe to skip binders here as we don't check substitutions. + let impl_self_kind = impl_self_ty.skip_binders().kind(Interner); + + match (kind, impl_self_kind) { + (TyKind::Adt(id_a, _), TyKind::Adt(id_b, _)) => id_a == id_b, + (TyKind::AssociatedType(id_a, _), TyKind::AssociatedType(id_b, _)) => id_a == id_b, + (TyKind::Scalar(scalar_a), TyKind::Scalar(scalar_b)) => scalar_a == scalar_b, + (TyKind::Str, TyKind::Str) => true, + (TyKind::Tuple(arity_a, _), TyKind::Tuple(arity_b, _)) => arity_a == arity_b, + (TyKind::OpaqueType(id_a, _), TyKind::OpaqueType(id_b, _)) => id_a == id_b, + (TyKind::Slice(_), TyKind::Slice(_)) => true, + (TyKind::FnDef(id_a, _), TyKind::FnDef(id_b, _)) => id_a == id_b, + (TyKind::Ref(id_a, _, _), TyKind::Ref(id_b, _, _)) => id_a == id_b, + (TyKind::Raw(id_a, _), TyKind::Raw(id_b, _)) => id_a == id_b, + (TyKind::Never, TyKind::Never) => true, + (TyKind::Array(_, _), TyKind::Array(_, _)) => true, + (TyKind::Closure(id_a, _), TyKind::Closure(id_b, _)) => id_a == id_b, + (TyKind::Coroutine(id_a, _), TyKind::Coroutine(id_b, _)) => id_a == id_b, + (TyKind::CoroutineWitness(id_a, _), TyKind::CoroutineWitness(id_b, _)) => { + id_a == id_b + } + (TyKind::Foreign(id_a), TyKind::Foreign(id_b)) => id_a == id_b, + (TyKind::Error, TyKind::Error) => true, + (_, _) => false, + } + }; + + if let Some(fp) = self_ty_fp { + self.for_trait_impls(trait_id, self_ty_fp, |impls| { + match impls.for_trait_and_self_ty(trait_id, fp).any(check_kind) { + true => ControlFlow::Break(()), + false => ControlFlow::Continue(()), + } + }) + } else { + self.for_trait_impls(trait_id, self_ty_fp, |impls| { + match impls.for_trait(trait_id).any(check_kind) { + true => ControlFlow::Break(()), + false => ControlFlow::Continue(()), + } + }) + } + .is_break() } + fn associated_ty_value(&self, id: AssociatedTyValueId) -> Arc { self.db.associated_ty_value(self.krate, id) } @@ -489,6 +499,59 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { } } +impl<'a> ChalkContext<'a> { + fn for_trait_impls( + &self, + trait_id: hir_def::TraitId, + self_ty_fp: Option, + mut f: impl FnMut(&TraitImpls) -> ControlFlow<()>, + ) -> ControlFlow<()> { + // Note: Since we're using `impls_for_trait` and `impl_provided_for`, + // only impls where the trait can be resolved should ever reach Chalk. + // `impl_datum` relies on that and will panic if the trait can't be resolved. + let in_deps = self.db.trait_impls_in_deps(self.krate); + let in_self = self.db.trait_impls_in_crate(self.krate); + let trait_module = trait_id.module(self.db.upcast()); + let type_module = match self_ty_fp { + Some(TyFingerprint::Adt(adt_id)) => Some(adt_id.module(self.db.upcast())), + Some(TyFingerprint::ForeignType(type_id)) => { + Some(from_foreign_def_id(type_id).module(self.db.upcast())) + } + Some(TyFingerprint::Dyn(trait_id)) => Some(trait_id.module(self.db.upcast())), + _ => None, + }; + + let mut def_blocks = + [trait_module.containing_block(), type_module.and_then(|it| it.containing_block())]; + + let block_impls = iter::successors(self.block, |&block_id| { + cov_mark::hit!(block_local_impls); + self.db.block_def_map(block_id).parent().and_then(|module| module.containing_block()) + }) + .inspect(|&block_id| { + // make sure we don't search the same block twice + def_blocks.iter_mut().for_each(|block| { + if *block == Some(block_id) { + *block = None; + } + }); + }) + .filter_map(|block_id| self.db.trait_impls_in_block(block_id)); + f(&in_self)?; + for it in in_deps.iter().map(ops::Deref::deref) { + f(it)?; + } + for it in block_impls { + f(&it)?; + } + for it in def_blocks.into_iter().flatten().filter_map(|it| self.db.trait_impls_in_block(it)) + { + f(&it)?; + } + ControlFlow::Continue(()) + } +} + impl chalk_ir::UnificationDatabase for &dyn HirDatabase { fn fn_def_variance( &self, From 4940017716b7d3eeba314a361a9e4afa64e85a95 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Tue, 23 Aug 2022 21:56:56 +0900 Subject: [PATCH 012/134] Rename `StructDatum` -> `AdtDatum` --- crates/hir-ty/src/chalk_db.rs | 20 ++++++++++---------- crates/hir-ty/src/db.rs | 6 +++--- crates/hir/src/lib.rs | 4 ++-- crates/ide-db/src/apply_change.rs | 2 +- crates/ide-db/src/lib.rs | 2 +- 5 files changed, 17 insertions(+), 17 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 157f7ce462d91..33ae07e3638e7 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -33,7 +33,7 @@ use crate::{ pub(crate) type AssociatedTyDatum = chalk_solve::rust_ir::AssociatedTyDatum; pub(crate) type TraitDatum = chalk_solve::rust_ir::TraitDatum; -pub(crate) type StructDatum = chalk_solve::rust_ir::AdtDatum; +pub(crate) type AdtDatum = chalk_solve::rust_ir::AdtDatum; pub(crate) type ImplDatum = chalk_solve::rust_ir::ImplDatum; pub(crate) type OpaqueTyDatum = chalk_solve::rust_ir::OpaqueTyDatum; @@ -53,8 +53,8 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { fn trait_datum(&self, trait_id: TraitId) -> Arc { self.db.trait_datum(self.krate, trait_id) } - fn adt_datum(&self, struct_id: AdtId) -> Arc { - self.db.struct_datum(self.krate, struct_id) + fn adt_datum(&self, struct_id: AdtId) -> Arc { + self.db.adt_datum(self.krate, struct_id) } fn adt_repr(&self, _struct_id: AdtId) -> Arc> { // FIXME: keep track of these @@ -712,13 +712,13 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem { } } -pub(crate) fn struct_datum_query( +pub(crate) fn adt_datum_query( db: &dyn HirDatabase, krate: CrateId, - struct_id: AdtId, -) -> Arc { - debug!("struct_datum {:?}", struct_id); - let chalk_ir::AdtId(adt_id) = struct_id; + adt_id: AdtId, +) -> Arc { + debug!("adt_datum {:?}", adt_id); + let chalk_ir::AdtId(adt_id) = adt_id; let generic_params = generics(db.upcast(), adt_id.into()); let upstream = adt_id.module(db.upcast()).krate() != krate; let where_clauses = { @@ -737,10 +737,10 @@ pub(crate) fn struct_datum_query( fields: Vec::new(), // FIXME add fields (only relevant for auto traits), }; let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses }; - let struct_datum = StructDatum { + let struct_datum = AdtDatum { // FIXME set ADT kind kind: rust_ir::AdtKind::Struct, - id: struct_id, + id: chalk_ir::AdtId(adt_id), binders: make_binders(db, &generic_params, struct_datum_bound), flags, }; diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index fbd366864a439..42313ff52b1fd 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -220,12 +220,12 @@ pub trait HirDatabase: DefDatabase + Upcast { trait_id: chalk_db::TraitId, ) -> sync::Arc; - #[salsa::invoke(chalk_db::struct_datum_query)] - fn struct_datum( + #[salsa::invoke(chalk_db::adt_datum_query)] + fn adt_datum( &self, krate: CrateId, struct_id: chalk_db::AdtId, - ) -> sync::Arc; + ) -> sync::Arc; #[salsa::invoke(chalk_db::impl_datum_query)] fn impl_datum( diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 08f7bb14caa3a..beaa6dd4d67cb 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -3798,9 +3798,9 @@ impl Type { // For non-phantom_data adts we check variants/fields as well as generic parameters TyKind::Adt(adt_id, substitution) - if !db.struct_datum(krate, *adt_id).flags.phantom_data => + if !db.adt_datum(krate, *adt_id).flags.phantom_data => { - let adt_datum = &db.struct_datum(krate, *adt_id); + let adt_datum = &db.adt_datum(krate, *adt_id); let adt_datum_bound = adt_datum.binders.clone().substitute(Interner, substitution); adt_datum_bound diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 296253aa1ee19..1a214ef0bf564 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -124,7 +124,7 @@ impl RootDatabase { hir::db::InternCoroutineQuery hir::db::AssociatedTyDataQuery hir::db::TraitDatumQuery - hir::db::StructDatumQuery + hir::db::AdtDatumQuery hir::db::ImplDatumQuery hir::db::FnDefDatumQuery hir::db::FnDefVarianceQuery diff --git a/crates/ide-db/src/lib.rs b/crates/ide-db/src/lib.rs index 2881748dd477e..d31dad514aa56 100644 --- a/crates/ide-db/src/lib.rs +++ b/crates/ide-db/src/lib.rs @@ -280,7 +280,7 @@ impl RootDatabase { // hir_db::InternCoroutineQuery hir_db::AssociatedTyDataQuery hir_db::TraitDatumQuery - hir_db::StructDatumQuery + hir_db::AdtDatumQuery hir_db::ImplDatumQuery hir_db::FnDefDatumQuery hir_db::FnDefVarianceQuery From 03340742ea42c00588707001aca8a05c9d846e08 Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 24 Aug 2022 01:55:08 +0900 Subject: [PATCH 013/134] Return ADT fields and `phantom_data` flag from `adt_datum_query()` --- crates/hir-ty/src/chalk_db.rs | 59 ++++++++++++++++++++++++++--------- 1 file changed, 44 insertions(+), 15 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 33ae07e3638e7..0fde0f661d74f 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -720,26 +720,55 @@ pub(crate) fn adt_datum_query( debug!("adt_datum {:?}", adt_id); let chalk_ir::AdtId(adt_id) = adt_id; let generic_params = generics(db.upcast(), adt_id.into()); - let upstream = adt_id.module(db.upcast()).krate() != krate; - let where_clauses = { - let generic_params = generics(db.upcast(), adt_id.into()); - let bound_vars = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); - convert_where_clauses(db, adt_id.into(), &bound_vars) - }; + let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); + let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst); + + let phantom_data_id = db + .lang_item(krate, SmolStr::new_inline("phantom_data")) + .and_then(|item| item.as_struct()) + .map(|item| item.into()); let flags = rust_ir::AdtFlags { - upstream, - // FIXME set fundamental and phantom_data flags correctly + upstream: adt_id.module(db.upcast()).krate() != krate, + // FIXME set fundamental flags correctly fundamental: false, - phantom_data: false, + phantom_data: phantom_data_id == Some(adt_id), + }; + + let variant_id_to_fields = |id| { + let field_types = db.field_types(id); + let fields = id + .variant_data(db.upcast()) + .fields() + .iter() + .map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst)) + .collect(); + rust_ir::AdtVariantDatum { fields } }; - // FIXME provide enum variants properly (for auto traits) - let variant = rust_ir::AdtVariantDatum { - fields: Vec::new(), // FIXME add fields (only relevant for auto traits), + + let (kind, variants) = match adt_id { + hir_def::AdtId::StructId(id) => { + (rust_ir::AdtKind::Struct, vec![variant_id_to_fields(id.into())]) + } + hir_def::AdtId::EnumId(id) => { + let variants = db + .enum_data(id) + .variants + .iter() + .map(|(local_id, _)| { + let variant_id = hir_def::EnumVariantId { parent: id, local_id }; + variant_id_to_fields(variant_id.into()) + }) + .collect(); + (rust_ir::AdtKind::Enum, variants) + } + hir_def::AdtId::UnionId(id) => { + (rust_ir::AdtKind::Union, vec![variant_id_to_fields(id.into())]) + } }; - let struct_datum_bound = rust_ir::AdtDatumBound { variants: vec![variant], where_clauses }; + + let struct_datum_bound = rust_ir::AdtDatumBound { variants, where_clauses }; let struct_datum = AdtDatum { - // FIXME set ADT kind - kind: rust_ir::AdtKind::Struct, + kind, id: chalk_ir::AdtId(adt_id), binders: make_binders(db, &generic_params, struct_datum_bound), flags, From 4829f591fbf6ce7e91394775c8be10cdf9291d1b Mon Sep 17 00:00:00 2001 From: Ryo Yoshida Date: Wed, 24 Aug 2022 03:27:59 +0900 Subject: [PATCH 014/134] Add test for auto trait bounds --- crates/hir-ty/src/chalk_db.rs | 7 ++-- crates/hir-ty/src/tests/traits.rs | 55 +++++++++++++++++++++++++++++++ 2 files changed, 57 insertions(+), 5 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 0fde0f661d74f..5039d51d70c00 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -724,7 +724,7 @@ pub(crate) fn adt_datum_query( let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst); let phantom_data_id = db - .lang_item(krate, SmolStr::new_inline("phantom_data")) + .lang_item(krate, LangItem::PhantomData) .and_then(|item| item.as_struct()) .map(|item| item.into()); let flags = rust_ir::AdtFlags { @@ -754,10 +754,7 @@ pub(crate) fn adt_datum_query( .enum_data(id) .variants .iter() - .map(|(local_id, _)| { - let variant_id = hir_def::EnumVariantId { parent: id, local_id }; - variant_id_to_fields(variant_id.into()) - }) + .map(|&(variant_id, _)| variant_id_to_fields(variant_id.into())) .collect(); (rust_ir::AdtKind::Enum, variants) } diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index db14addaf185b..68cd6071ec79b 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -4553,3 +4553,58 @@ fn foo() { "#, ); } + +#[test] +fn auto_trait_bound() { + check_types( + r#" +//- minicore: sized +auto trait Send {} +impl !Send for *const T {} + +struct Yes; +trait IsSend { const IS_SEND: Yes; } +impl IsSend for T { const IS_SEND: Yes = Yes; } + +struct Struct(T); +enum Enum { A, B(T) } +union Union { t: T } + +#[lang = "phantom_data"] +struct PhantomData; + +fn f() { + T::IS_SEND; + //^^^^^^^^^^Yes + U::IS_SEND; + //^^^^^^^^^^{unknown} + <*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^{unknown} + Struct::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^Yes + Struct::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^{unknown} + Struct::<*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + Enum::::IS_SEND; + //^^^^^^^^^^^^^^^^^^Yes + Enum::::IS_SEND; + //^^^^^^^^^^^^^^^^^^{unknown} + Enum::<*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + Union::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^Yes + Union::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^{unknown} + Union::<*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + PhantomData::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^Yes + PhantomData::::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + PhantomData::<*const T>::IS_SEND; + //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} +} +"#, + ); +} From 0eca3ef93eacd34e47d2893a3777e1d55593823c Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 14 Feb 2024 13:35:43 +0100 Subject: [PATCH 015/134] Fix coerce_unsize_generic test --- crates/hir-ty/src/tests/coercion.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index d56b15b9b741e..bfb8df61a3330 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -536,7 +536,7 @@ fn test() { #[test] fn coerce_unsize_generic() { - check( + check_no_mismatches( r#" //- minicore: coerce_unsized struct Foo { t: T }; @@ -544,9 +544,7 @@ struct Bar(Foo); fn test() { let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] }; - //^^^^^^^^^^^^^^^^^^^^^ expected &Foo<[usize]>, got &Foo<[i32; 3]> let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] }); - //^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &Bar<[usize]>, got &Bar<[i32; 3]> } "#, ); From 9d18e197bcbd732a349ff8b51ecf6532d7ea822a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Wed, 14 Feb 2024 14:01:23 +0100 Subject: [PATCH 016/134] Filter out `{unknown}` types in `adt_datum_quqery` --- crates/hir-def/src/data/adt.rs | 2 +- crates/hir-ty/src/chalk_db.rs | 69 ++++++++++++++++++++-------------- 2 files changed, 42 insertions(+), 29 deletions(-) diff --git a/crates/hir-def/src/data/adt.rs b/crates/hir-def/src/data/adt.rs index 540f643ae7d91..f07b1257662d5 100644 --- a/crates/hir-def/src/data/adt.rs +++ b/crates/hir-def/src/data/adt.rs @@ -40,7 +40,7 @@ pub struct StructData { } bitflags! { - #[derive(Debug, Clone, PartialEq, Eq)] + #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub struct StructFlags: u8 { const NO_FLAGS = 0; /// Indicates whether the struct is `PhantomData`. diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 5039d51d70c00..49393f05a1ab9 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -10,9 +10,10 @@ use chalk_solve::rust_ir::{self, OpaqueTyDatumBound, WellKnownTrait}; use base_db::CrateId; use hir_def::{ + data::adt::StructFlags, hir::Movability, lang_item::{LangItem, LangItemTarget}, - AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, + AssocItemId, BlockId, GenericDefId, HasModule, ItemContainerId, Lookup, TypeAliasId, VariantId, }; use hir_expand::name::name; @@ -159,6 +160,7 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { debug!("impls_for_trait returned {} impls", result.len()); result } + fn impl_provided_for(&self, auto_trait_id: TraitId, kind: &chalk_ir::TyKind) -> bool { debug!("impl_provided_for {:?}, {:?}", auto_trait_id, kind); @@ -183,22 +185,22 @@ impl chalk_solve::RustIrDatabase for ChalkContext<'_> { (TyKind::Adt(id_a, _), TyKind::Adt(id_b, _)) => id_a == id_b, (TyKind::AssociatedType(id_a, _), TyKind::AssociatedType(id_b, _)) => id_a == id_b, (TyKind::Scalar(scalar_a), TyKind::Scalar(scalar_b)) => scalar_a == scalar_b, - (TyKind::Str, TyKind::Str) => true, + (TyKind::Error, TyKind::Error) + | (TyKind::Str, TyKind::Str) + | (TyKind::Slice(_), TyKind::Slice(_)) + | (TyKind::Never, TyKind::Never) + | (TyKind::Array(_, _), TyKind::Array(_, _)) => true, (TyKind::Tuple(arity_a, _), TyKind::Tuple(arity_b, _)) => arity_a == arity_b, (TyKind::OpaqueType(id_a, _), TyKind::OpaqueType(id_b, _)) => id_a == id_b, - (TyKind::Slice(_), TyKind::Slice(_)) => true, (TyKind::FnDef(id_a, _), TyKind::FnDef(id_b, _)) => id_a == id_b, - (TyKind::Ref(id_a, _, _), TyKind::Ref(id_b, _, _)) => id_a == id_b, - (TyKind::Raw(id_a, _), TyKind::Raw(id_b, _)) => id_a == id_b, - (TyKind::Never, TyKind::Never) => true, - (TyKind::Array(_, _), TyKind::Array(_, _)) => true, + (TyKind::Ref(id_a, _, _), TyKind::Ref(id_b, _, _)) + | (TyKind::Raw(id_a, _), TyKind::Raw(id_b, _)) => id_a == id_b, (TyKind::Closure(id_a, _), TyKind::Closure(id_b, _)) => id_a == id_b, - (TyKind::Coroutine(id_a, _), TyKind::Coroutine(id_b, _)) => id_a == id_b, - (TyKind::CoroutineWitness(id_a, _), TyKind::CoroutineWitness(id_b, _)) => { + (TyKind::Coroutine(id_a, _), TyKind::Coroutine(id_b, _)) + | (TyKind::CoroutineWitness(id_a, _), TyKind::CoroutineWitness(id_b, _)) => { id_a == id_b } (TyKind::Foreign(id_a), TyKind::Foreign(id_b)) => id_a == id_b, - (TyKind::Error, TyKind::Error) => true, (_, _) => false, } }; @@ -653,7 +655,7 @@ pub(crate) fn trait_datum_query( coinductive: false, // only relevant for Chalk testing // FIXME: set these flags correctly marker: false, - fundamental: false, + fundamental: trait_data.fundamental, }; let where_clauses = convert_where_clauses(db, trait_.into(), &bound_vars); let associated_ty_ids = trait_data.associated_types().map(to_assoc_type_id).collect(); @@ -715,33 +717,44 @@ fn lang_item_from_well_known_trait(trait_: WellKnownTrait) -> LangItem { pub(crate) fn adt_datum_query( db: &dyn HirDatabase, krate: CrateId, - adt_id: AdtId, + chalk_ir::AdtId(adt_id): AdtId, ) -> Arc { debug!("adt_datum {:?}", adt_id); - let chalk_ir::AdtId(adt_id) = adt_id; let generic_params = generics(db.upcast(), adt_id.into()); let bound_vars_subst = generic_params.bound_vars_subst(db, DebruijnIndex::INNERMOST); let where_clauses = convert_where_clauses(db, adt_id.into(), &bound_vars_subst); - let phantom_data_id = db - .lang_item(krate, LangItem::PhantomData) - .and_then(|item| item.as_struct()) - .map(|item| item.into()); + let (fundamental, phantom_data) = match adt_id { + hir_def::AdtId::StructId(s) => { + let flags = db.struct_data(s).flags; + ( + flags.contains(StructFlags::IS_FUNDAMENTAL), + flags.contains(StructFlags::IS_PHANTOM_DATA), + ) + } + // FIXME set fundamental flags correctly + hir_def::AdtId::UnionId(_) => (false, false), + hir_def::AdtId::EnumId(_) => (false, false), + }; let flags = rust_ir::AdtFlags { upstream: adt_id.module(db.upcast()).krate() != krate, - // FIXME set fundamental flags correctly - fundamental: false, - phantom_data: phantom_data_id == Some(adt_id), + fundamental, + phantom_data, }; - let variant_id_to_fields = |id| { - let field_types = db.field_types(id); - let fields = id - .variant_data(db.upcast()) - .fields() - .iter() - .map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst)) - .collect(); + let variant_id_to_fields = |id: VariantId| { + let variant_data = &id.variant_data(db.upcast()); + let fields = if variant_data.fields().is_empty() { + vec![] + } else { + let field_types = db.field_types(id); + variant_data + .fields() + .iter() + .map(|(idx, _)| field_types[idx].clone().substitute(Interner, &bound_vars_subst)) + .filter(|it| !it.contains_unknown()) + .collect() + }; rust_ir::AdtVariantDatum { fields } }; From d33d8675d0fff6f9652790baa5346ac478e528ea Mon Sep 17 00:00:00 2001 From: Nicholas Nethercote Date: Wed, 14 Feb 2024 20:12:05 +1100 Subject: [PATCH 017/134] Add `ErrorGuaranteed` to `ast::LitKind::Err`, `token::LitKind::Err`. This mostly works well, and eliminates a couple of delayed bugs. One annoying thing is that we should really also add an `ErrorGuaranteed` to `proc_macro::bridge::LitKind::Err`. But that's difficult because `proc_macro` doesn't have access to `ErrorGuaranteed`, so we have to fake it. --- crates/proc-macro-srv/src/server/rust_analyzer_span.rs | 4 ++-- crates/proc-macro-srv/src/server/token_id.rs | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index c7c7bea994101..8a9d52a37a2f3 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -72,7 +72,7 @@ impl server::FreeFunctions for RaSpanServer { ) -> Result, ()> { // FIXME: keep track of LitKind and Suffix Ok(bridge::Literal { - kind: bridge::LitKind::Err, + kind: bridge::LitKind::Integer, // dummy symbol: Symbol::intern(self.interner, s), suffix: None, span: self.call_site, @@ -202,7 +202,7 @@ impl server::TokenStream for RaSpanServer { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { // FIXME: handle literal kinds - kind: bridge::LitKind::Err, + kind: bridge::LitKind::Integer, // dummy symbol: Symbol::intern(self.interner, &lit.text), // FIXME: handle suffixes suffix: None, diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index edbdc67b482ff..15a9e0deae44f 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -64,7 +64,7 @@ impl server::FreeFunctions for TokenIdServer { ) -> Result, ()> { // FIXME: keep track of LitKind and Suffix Ok(bridge::Literal { - kind: bridge::LitKind::Err, + kind: bridge::LitKind::Integer, // dummy symbol: Symbol::intern(self.interner, s), suffix: None, span: self.call_site, @@ -187,7 +187,7 @@ impl server::TokenStream for TokenIdServer { tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { // FIXME: handle literal kinds - kind: bridge::LitKind::Err, + kind: bridge::LitKind::Integer, // dummy symbol: Symbol::intern(self.interner, &lit.text), // FIXME: handle suffixes suffix: None, From e3450ad19b6b5a6a67ba5bfdae53ac826953ba2a Mon Sep 17 00:00:00 2001 From: Matt Harding Date: Sun, 12 Nov 2023 05:58:42 +0000 Subject: [PATCH 018/134] Fix bootstrap issue with git on MSYS src/bootstrap runs git to find the root of the repository, but this can go awry when building in MSYS for the mingw target. This is because MSYS git returns a unix-y path, but bootstrap requires a Windows-y path. --- src/bootstrap/src/core/config/config.rs | 31 ++++++++++++++++--------- 1 file changed, 20 insertions(+), 11 deletions(-) diff --git a/src/bootstrap/src/core/config/config.rs b/src/bootstrap/src/core/config/config.rs index c0dd1e1208484..6fba6b20ace63 100644 --- a/src/bootstrap/src/core/config/config.rs +++ b/src/bootstrap/src/core/config/config.rs @@ -1227,12 +1227,16 @@ impl Config { // Infer the rest of the configuration. // Infer the source directory. This is non-trivial because we want to support a downloaded bootstrap binary, - // running on a completely machine from where it was compiled. + // running on a completely different machine from where it was compiled. let mut cmd = Command::new("git"); - // NOTE: we cannot support running from outside the repository because the only path we have available - // is set at compile time, which can be wrong if bootstrap was downloaded from source. + // NOTE: we cannot support running from outside the repository because the only other path we have available + // is set at compile time, which can be wrong if bootstrap was downloaded rather than compiled locally. // We still support running outside the repository if we find we aren't in a git directory. - cmd.arg("rev-parse").arg("--show-toplevel"); + + // NOTE: We get a relative path from git to work around an issue on MSYS/mingw. If we used an absolute path, + // and end up using MSYS's git rather than git-for-windows, we would get a unix-y MSYS path. But as bootstrap + // has already been (kinda-cross-)compiled to Windows land, we require a normal Windows path. + cmd.arg("rev-parse").arg("--show-cdup"); // Discard stderr because we expect this to fail when building from a tarball. let output = cmd .stderr(std::process::Stdio::null()) @@ -1240,15 +1244,20 @@ impl Config { .ok() .and_then(|output| if output.status.success() { Some(output) } else { None }); if let Some(output) = output { - let git_root = String::from_utf8(output.stdout).unwrap(); - // We need to canonicalize this path to make sure it uses backslashes instead of forward slashes. - let git_root = PathBuf::from(git_root.trim()).canonicalize().unwrap(); + let git_root_relative = String::from_utf8(output.stdout).unwrap(); + // We need to canonicalize this path to make sure it uses backslashes instead of forward slashes, + // and to resolve any relative components. + let git_root = env::current_dir() + .unwrap() + .join(PathBuf::from(git_root_relative.trim())) + .canonicalize() + .unwrap(); let s = git_root.to_str().unwrap(); // Bootstrap is quite bad at handling /? in front of paths - let src = match s.strip_prefix("\\\\?\\") { + let git_root = match s.strip_prefix("\\\\?\\") { Some(p) => PathBuf::from(p), - None => PathBuf::from(git_root), + None => git_root, }; // If this doesn't have at least `stage0.json`, we guessed wrong. This can happen when, // for example, the build directory is inside of another unrelated git directory. @@ -1256,8 +1265,8 @@ impl Config { // // NOTE: this implies that downloadable bootstrap isn't supported when the build directory is outside // the source directory. We could fix that by setting a variable from all three of python, ./x, and x.ps1. - if src.join("src").join("stage0.json").exists() { - config.src = src; + if git_root.join("src").join("stage0.json").exists() { + config.src = git_root; } } else { // We're building from a tarball, not git sources. From d846586bc9e0c056a92f8394b6209782238dc5e2 Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 1 Feb 2024 20:38:42 -0500 Subject: [PATCH 019/134] fix: Support multiple tab stops in completions in VSCode Uses the native VSCode support for `SnippetTextEdit`s, but in a semi-hacky way as it's not fully supported yet. --- editors/code/src/commands.ts | 66 +++++++++++++++++++++++++-- editors/code/src/snippets.ts | 87 +++++++++++++++--------------------- 2 files changed, 98 insertions(+), 55 deletions(-) diff --git a/editors/code/src/commands.ts b/editors/code/src/commands.ts index 3d33d255ad491..849fae5cf24b1 100644 --- a/editors/code/src/commands.ts +++ b/editors/code/src/commands.ts @@ -4,7 +4,11 @@ import * as ra from "./lsp_ext"; import * as path from "path"; import type { Ctx, Cmd, CtxInit } from "./ctx"; -import { applySnippetWorkspaceEdit, applySnippetTextEdits } from "./snippets"; +import { + applySnippetWorkspaceEdit, + applySnippetTextEdits, + type SnippetTextDocumentEdit, +} from "./snippets"; import { spawnSync } from "child_process"; import { type RunnableQuickPick, selectRunnable, createTask, createArgs } from "./run"; import { AstInspector } from "./ast_inspector"; @@ -1006,7 +1010,6 @@ export function resolveCodeAction(ctx: CtxInit): Cmd { return; } const itemEdit = item.edit; - const edit = await client.protocol2CodeConverter.asWorkspaceEdit(itemEdit); // filter out all text edits and recreate the WorkspaceEdit without them so we can apply // snippet edits on our own const lcFileSystemEdit = { @@ -1017,16 +1020,71 @@ export function resolveCodeAction(ctx: CtxInit): Cmd { lcFileSystemEdit, ); await vscode.workspace.applyEdit(fileSystemEdit); - await applySnippetWorkspaceEdit(edit); + + // replace all text edits so that we can convert snippet text edits into `vscode.SnippetTextEdit`s + // FIXME: this is a workaround until vscode-languageclient supports doing the SnippeTextEdit conversion itself + // also need to carry the snippetTextDocumentEdits separately, since we can't retrieve them again using WorkspaceEdit.entries + const [workspaceTextEdit, snippetTextDocumentEdits] = asWorkspaceSnippetEdit(ctx, itemEdit); + await applySnippetWorkspaceEdit(workspaceTextEdit, snippetTextDocumentEdits); if (item.command != null) { await vscode.commands.executeCommand(item.command.command, item.command.arguments); } }; } +function asWorkspaceSnippetEdit( + ctx: CtxInit, + item: lc.WorkspaceEdit, +): [vscode.WorkspaceEdit, SnippetTextDocumentEdit[]] { + const client = ctx.client; + + // partially borrowed from https://github.com/microsoft/vscode-languageserver-node/blob/295aaa393fda8ecce110c38880a00466b9320e63/client/src/common/protocolConverter.ts#L1060-L1101 + const result = new vscode.WorkspaceEdit(); + + if (item.documentChanges) { + const snippetTextDocumentEdits: SnippetTextDocumentEdit[] = []; + + for (const change of item.documentChanges) { + if (lc.TextDocumentEdit.is(change)) { + const uri = client.protocol2CodeConverter.asUri(change.textDocument.uri); + const snippetTextEdits: (vscode.TextEdit | vscode.SnippetTextEdit)[] = []; + + for (const edit of change.edits) { + if ( + "insertTextFormat" in edit && + edit.insertTextFormat === lc.InsertTextFormat.Snippet + ) { + // is a snippet text edit + snippetTextEdits.push( + new vscode.SnippetTextEdit( + client.protocol2CodeConverter.asRange(edit.range), + new vscode.SnippetString(edit.newText), + ), + ); + } else { + // always as a text document edit + snippetTextEdits.push( + vscode.TextEdit.replace( + client.protocol2CodeConverter.asRange(edit.range), + edit.newText, + ), + ); + } + } + + snippetTextDocumentEdits.push([uri, snippetTextEdits]); + } + } + return [result, snippetTextDocumentEdits]; + } else { + // we don't handle WorkspaceEdit.changes since it's not relevant for code actions + return [result, []]; + } +} + export function applySnippetWorkspaceEditCommand(_ctx: CtxInit): Cmd { return async (edit: vscode.WorkspaceEdit) => { - await applySnippetWorkspaceEdit(edit); + await applySnippetWorkspaceEdit(edit, edit.entries()); }; } diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts index d81765649ffb2..fb12125bcd842 100644 --- a/editors/code/src/snippets.ts +++ b/editors/code/src/snippets.ts @@ -3,20 +3,28 @@ import * as vscode from "vscode"; import { assert } from "./util"; import { unwrapUndefinable } from "./undefinable"; -export async function applySnippetWorkspaceEdit(edit: vscode.WorkspaceEdit) { - if (edit.entries().length === 1) { - const [uri, edits] = unwrapUndefinable(edit.entries()[0]); +export type SnippetTextDocumentEdit = [vscode.Uri, (vscode.TextEdit | vscode.SnippetTextEdit)[]]; + +export async function applySnippetWorkspaceEdit( + edit: vscode.WorkspaceEdit, + editEntries: SnippetTextDocumentEdit[], +) { + if (editEntries.length === 1) { + const [uri, edits] = unwrapUndefinable(editEntries[0]); const editor = await editorFromUri(uri); - if (editor) await applySnippetTextEdits(editor, edits); + if (editor) { + edit.set(uri, edits); + await vscode.workspace.applyEdit(edit); + } return; } - for (const [uri, edits] of edit.entries()) { + for (const [uri, edits] of editEntries) { const editor = await editorFromUri(uri); if (editor) { await editor.edit((builder) => { for (const indel of edits) { assert( - !parseSnippet(indel.newText), + !(indel instanceof vscode.SnippetTextEdit), `bad ws edit: snippet received with multiple edits: ${JSON.stringify( edit, )}`, @@ -39,53 +47,30 @@ async function editorFromUri(uri: vscode.Uri): Promise { - for (const indel of edits) { - const parsed = parseSnippet(indel.newText); - if (parsed) { - const [newText, [placeholderStart, placeholderLength]] = parsed; - const prefix = newText.substr(0, placeholderStart); - const lastNewline = prefix.lastIndexOf("\n"); - - const startLine = indel.range.start.line + lineDelta + countLines(prefix); - const startColumn = - lastNewline === -1 - ? indel.range.start.character + placeholderStart - : prefix.length - lastNewline - 1; - const endColumn = startColumn + placeholderLength; - selections.push( - new vscode.Selection( - new vscode.Position(startLine, startColumn), - new vscode.Position(startLine, endColumn), - ), - ); - builder.replace(indel.range, newText); - } else { - builder.replace(indel.range, indel.newText); - } - lineDelta += - countLines(indel.newText) - (indel.range.end.line - indel.range.start.line); - } - }); - if (selections.length > 0) editor.selections = selections; - if (selections.length === 1) { - const selection = unwrapUndefinable(selections[0]); - editor.revealRange(selection, vscode.TextEditorRevealType.InCenterIfOutsideViewport); - } + const edit = new vscode.WorkspaceEdit(); + edit.set(editor.document.uri, toSnippetTextEdits(edits)); + await vscode.workspace.applyEdit(edit); } -function parseSnippet(snip: string): [string, [number, number]] | undefined { - const m = snip.match(/\$(0|\{0:([^}]*)\})/); - if (!m) return undefined; - const placeholder = m[2] ?? ""; - if (m.index == null) return undefined; - const range: [number, number] = [m.index, placeholder.length]; - const insert = snip.replace(m[0], placeholder); - return [insert, range]; +function hasSnippet(snip: string): boolean { + const m = snip.match(/\$\d+|\{\d+:[^}]*\}/); + return m != null; } -function countLines(text: string): number { - return (text.match(/\n/g) || []).length; +function toSnippetTextEdits( + edits: vscode.TextEdit[], +): (vscode.TextEdit | vscode.SnippetTextEdit)[] { + return edits.map((textEdit) => { + // Note: text edits without any snippets are returned as-is instead of + // being wrapped in a SnippetTextEdit, as otherwise it would be + // treated as if it had a tab stop at the end. + if (hasSnippet(textEdit.newText)) { + return new vscode.SnippetTextEdit( + textEdit.range, + new vscode.SnippetString(textEdit.newText), + ); + } else { + return textEdit; + } + }); } From bcf14e27ce9d4331f4685fb759cf135a76ad119c Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Thu, 15 Feb 2024 18:39:17 -0500 Subject: [PATCH 020/134] Work around snippet edits doubling up extra indentation We can't tell vscode to not add in the extra indentation, so we instead opt to remove it from the edits themselves, and then let vscode add it back in. --- editors/code/src/snippets.ts | 71 +++++++++++++++++++++++++++++++++++- 1 file changed, 69 insertions(+), 2 deletions(-) diff --git a/editors/code/src/snippets.ts b/editors/code/src/snippets.ts index fb12125bcd842..b3982bdf2be41 100644 --- a/editors/code/src/snippets.ts +++ b/editors/code/src/snippets.ts @@ -13,7 +13,7 @@ export async function applySnippetWorkspaceEdit( const [uri, edits] = unwrapUndefinable(editEntries[0]); const editor = await editorFromUri(uri); if (editor) { - edit.set(uri, edits); + edit.set(uri, removeLeadingWhitespace(editor, edits)); await vscode.workspace.applyEdit(edit); } return; @@ -48,7 +48,8 @@ async function editorFromUri(uri: vscode.Uri): Promise { + if (edit instanceof vscode.SnippetTextEdit) { + const snippetEdit: vscode.SnippetTextEdit = edit; + const firstLineEnd = snippetEdit.snippet.value.indexOf("\n"); + + if (firstLineEnd !== -1) { + // Is a multi-line snippet, remove the indentation which + // would be added back in by vscode. + const startLine = editor.document.lineAt(snippetEdit.range.start.line); + const leadingWhitespace = getLeadingWhitespace( + startLine.text, + 0, + startLine.firstNonWhitespaceCharacterIndex, + ); + + const [firstLine, rest] = splitAt(snippetEdit.snippet.value, firstLineEnd + 1); + const unindentedLines = rest + .split("\n") + .map((line) => line.replace(leadingWhitespace, "")) + .join("\n"); + + snippetEdit.snippet.value = firstLine + unindentedLines; + } + + return snippetEdit; + } else { + return edit; + } + }); +} + +// based on https://github.com/microsoft/vscode/blob/main/src/vs/base/common/strings.ts#L284 +function getLeadingWhitespace(str: string, start: number = 0, end: number = str.length): string { + for (let i = start; i < end; i++) { + const chCode = str.charCodeAt(i); + if (chCode !== " ".charCodeAt(0) && chCode !== " ".charCodeAt(0)) { + return str.substring(start, i); + } + } + return str.substring(start, end); +} + +function splitAt(str: string, index: number): [string, string] { + return [str.substring(0, index), str.substring(index)]; +} From 80459c14a4a6d9613d2bc07f5f0df905e8f99a0d Mon Sep 17 00:00:00 2001 From: Matt Harding Date: Tue, 13 Feb 2024 04:37:31 +0000 Subject: [PATCH 021/134] Changes to CI related to mingw and MSYS --- .github/workflows/ci.yml | 33 ++++++++++++++++++++++++++++ src/ci/github-actions/ci.yml | 21 ++++++++++++++++++ src/ci/run.sh | 2 +- src/ci/scripts/install-clang.sh | 3 +-- src/ci/scripts/install-mingw.sh | 10 ++++----- src/ci/scripts/install-msys2.sh | 38 +++++++++++++++++++++++++++------ src/ci/shared.sh | 4 ++++ 7 files changed, 96 insertions(+), 15 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 26e589c092eda..464fd3b5640e9 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -65,9 +65,20 @@ jobs: - name: x86_64-gnu-tools os: ubuntu-20.04-16core-64gb env: {} + defaults: + run: + shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" timeout-minutes: 600 runs-on: "${{ matrix.os }}" steps: + - if: "contains(matrix.os, 'windows')" + uses: msys2/setup-msys2@v2.22.0 + with: + msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}" + update: false + release: true + path-type: inherit + install: "make dos2unix diffutils\n" - name: disable git crlf conversion run: git config --global core.autocrlf false - name: checkout the source code @@ -459,9 +470,20 @@ jobs: RUST_CONFIGURE_ARGS: "--build=x86_64-pc-windows-msvc --enable-extended --enable-profiler" SCRIPT: python x.py dist bootstrap --include-default-paths os: windows-2019-8core-32gb + defaults: + run: + shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" timeout-minutes: 600 runs-on: "${{ matrix.os }}" steps: + - if: "contains(matrix.os, 'windows')" + uses: msys2/setup-msys2@v2.22.0 + with: + msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}" + update: false + release: true + path-type: inherit + install: "make dos2unix diffutils\n" - name: disable git crlf conversion run: git config --global core.autocrlf false - name: checkout the source code @@ -587,9 +609,20 @@ jobs: env: CODEGEN_BACKENDS: "llvm,cranelift" os: ubuntu-20.04-16core-64gb + defaults: + run: + shell: "${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }}" timeout-minutes: 600 runs-on: "${{ matrix.os }}" steps: + - if: "contains(matrix.os, 'windows')" + uses: msys2/setup-msys2@v2.22.0 + with: + msystem: "${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }}" + update: false + release: true + path-type: inherit + install: "make dos2unix diffutils\n" - name: disable git crlf conversion run: git config --global core.autocrlf false - name: checkout the source code diff --git a/src/ci/github-actions/ci.yml b/src/ci/github-actions/ci.yml index 43e48c01176f3..51d9dea5a0bc3 100644 --- a/src/ci/github-actions/ci.yml +++ b/src/ci/github-actions/ci.yml @@ -111,10 +111,31 @@ x--expand-yaml-anchors--remove: if: success() && !env.SKIP_JOB - &base-ci-job + defaults: + run: + shell: ${{ contains(matrix.os, 'windows') && 'msys2 {0}' || 'bash' }} timeout-minutes: 600 runs-on: "${{ matrix.os }}" env: *shared-ci-variables steps: + - if: contains(matrix.os, 'windows') + uses: msys2/setup-msys2@v2.22.0 + with: + # i686 jobs use mingw32. x86_64 and cross-compile jobs use mingw64. + msystem: ${{ contains(matrix.name, 'i686') && 'mingw32' || 'mingw64' }} + # don't try to download updates for already installed packages + update: false + # don't try to use the msys that comes built-in to the github runner, + # so we can control what is installed (i.e. not python) + release: true + # Inherit the full path from the Windows environment, with MSYS2's */bin/ + # dirs placed in front. This lets us run Windows-native Python etc. + path-type: inherit + install: > + make + dos2unix + diffutils + - name: disable git crlf conversion run: git config --global core.autocrlf false diff --git a/src/ci/run.sh b/src/ci/run.sh index 1cdcffc1a7544..3ad04c73d3da4 100755 --- a/src/ci/run.sh +++ b/src/ci/run.sh @@ -76,7 +76,7 @@ RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set dist.compression-profile=balance # the LLVM build, as not to run out of memory. # This is an attempt to fix the spurious build error tracked by # https://github.com/rust-lang/rust/issues/108227. -if isWindows && [[ ${CUSTOM_MINGW-0} -eq 1 ]]; then +if isKnownToBeMingwBuild; then RUST_CONFIGURE_ARGS="$RUST_CONFIGURE_ARGS --set llvm.link-jobs=1" fi diff --git a/src/ci/scripts/install-clang.sh b/src/ci/scripts/install-clang.sh index 77164ed4117a9..aa7ff813f5161 100755 --- a/src/ci/scripts/install-clang.sh +++ b/src/ci/scripts/install-clang.sh @@ -37,8 +37,7 @@ if isMacOS; then # Configure `AR` specifically so rustbuild doesn't try to infer it as # `clang-ar` by accident. ciCommandSetEnv AR "ar" -elif isWindows && [[ ${CUSTOM_MINGW-0} -ne 1 ]]; then - +elif isWindows && ! isKnownToBeMingwBuild; then # If we're compiling for MSVC then we, like most other distribution builders, # switch to clang as the compiler. This'll allow us eventually to enable LTO # amongst LLVM and rustc. Note that we only do this on MSVC as I don't think diff --git a/src/ci/scripts/install-mingw.sh b/src/ci/scripts/install-mingw.sh index 7eccb9b86502c..87b835b63db5b 100755 --- a/src/ci/scripts/install-mingw.sh +++ b/src/ci/scripts/install-mingw.sh @@ -38,11 +38,11 @@ if isWindows; then ;; esac - if [[ "${CUSTOM_MINGW-0}" -ne 1 ]]; then - pacman -S --noconfirm --needed mingw-w64-$arch-toolchain mingw-w64-$arch-cmake \ - mingw-w64-$arch-gcc \ - mingw-w64-$arch-python # the python package is actually for python3 - ciCommandAddPath "$(ciCheckoutPath)/msys2/mingw${bits}/bin" + if [[ "${CUSTOM_MINGW:-0}" == 0 ]]; then + pacboy -S --noconfirm toolchain:p + # According to the comment in the Windows part of install-clang.sh, in the future we might + # want to do this instead: + # pacboy -S --noconfirm clang:p ... else mingw_dir="mingw${bits}" diff --git a/src/ci/scripts/install-msys2.sh b/src/ci/scripts/install-msys2.sh index 0aa4b42a6a8fb..905edf38a09db 100755 --- a/src/ci/scripts/install-msys2.sh +++ b/src/ci/scripts/install-msys2.sh @@ -1,17 +1,12 @@ #!/bin/bash -# Download and install MSYS2, needed primarily for the test suite (run-make) but -# also used by the MinGW toolchain for assembling things. +# Clean up and prepare the MSYS2 installation. MSYS2 is needed primarily for +# the test suite (run-make), but is also used by the MinGW toolchain for assembling things. set -euo pipefail IFS=$'\n\t' source "$(cd "$(dirname "$0")" && pwd)/../shared.sh" - if isWindows; then - msys2Path="c:/msys64" - mkdir -p "${msys2Path}/home/${USERNAME}" - ciCommandAddPath "${msys2Path}/usr/bin" - # Detect the native Python version installed on the agent. On GitHub # Actions, the C:\hostedtoolcache\windows\Python directory contains a # subdirectory for each installed Python version. @@ -29,4 +24,33 @@ if isWindows; then fi ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64" ciCommandAddPath "C:\\hostedtoolcache\\windows\\Python\\${native_python_version}\\x64\\Scripts" + + # Install pacboy for easily installing packages + pacman -S --noconfirm pactoys + + # Delete these pre-installed tools so we can't accidentally use them, because we are using the + # MSYS2 setup action versions instead. + # Delete pre-installed version of MSYS2 + rm -r "/c/msys64/" + # Delete Strawberry Perl, which contains a version of mingw + rm -r "/c/Strawberry/" + # Delete these other copies of mingw, I don't even know where they come from. + rm -r "/c/mingw64/" + rm -r "/c/mingw32/" + + if isKnownToBeMingwBuild; then + # Use the mingw version of CMake for mingw builds. + # However, the MSVC build needs native CMake, as it fails with the mingw one. + # Delete native CMake + rm -r "/c/Program Files/CMake/" + # Install mingw-w64-$arch-cmake + pacboy -S --noconfirm cmake:p + + # We use Git-for-Windows for MSVC builds, and MSYS2 Git for mingw builds, + # so that both are tested. + # Delete Windows-Git + rm -r "/c/Program Files/Git/" + # Install MSYS2 git + pacman -S --noconfirm git + fi fi diff --git a/src/ci/shared.sh b/src/ci/shared.sh index 720394af249b2..2b0a10e4d08d9 100644 --- a/src/ci/shared.sh +++ b/src/ci/shared.sh @@ -52,6 +52,10 @@ function isLinux { [[ "${OSTYPE}" = "linux-gnu" ]] } +function isKnownToBeMingwBuild { + isGitHubActions && [[ "${CI_JOB_NAME}" == *mingw ]] +} + function isCiBranch { if [[ $# -ne 1 ]]; then echo "usage: $0 " From e27c4722d364e0367a436f7d89c493eeb8ccf8ee Mon Sep 17 00:00:00 2001 From: Matt Harding Date: Tue, 13 Feb 2024 06:11:42 +0000 Subject: [PATCH 022/134] Update INSTALL.md instructions for MinGW --- INSTALL.md | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/INSTALL.md b/INSTALL.md index b872d317e3627..d7e0fd72044e9 100644 --- a/INSTALL.md +++ b/INSTALL.md @@ -145,10 +145,9 @@ toolchain. 1. Download the latest [MSYS2 installer][msys2] and go through the installer. -2. Run `mingw32_shell.bat` or `mingw64_shell.bat` from the MSYS2 installation - directory (e.g. `C:\msys64`), depending on whether you want 32-bit or 64-bit - Rust. (As of the latest version of MSYS2 you have to run `msys2_shell.cmd - -mingw32` or `msys2_shell.cmd -mingw64` from the command line instead.) +2. Start a MINGW64 or MINGW32 shell (depending on whether you want 32-bit + or 64-bit Rust) either from your start menu, or by running `mingw64.exe` + or `mingw32.exe` from your MSYS2 installation directory (e.g. `C:\msys64`). 3. From this terminal, install the required tools: @@ -157,8 +156,7 @@ toolchain. pacman -Sy pacman-mirrors # Install build tools needed for Rust. If you're building a 32-bit compiler, - # then replace "x86_64" below with "i686". If you've already got Git, Python, - # or CMake installed and in PATH you can remove them from this list. + # then replace "x86_64" below with "i686". # Note that it is important that you do **not** use the 'python2', 'cmake', # and 'ninja' packages from the 'msys2' subsystem. # The build has historically been known to fail with these packages. @@ -175,9 +173,21 @@ toolchain. 4. Navigate to Rust's source code (or clone it), then build it: ```sh - python x.py setup user && python x.py build && python x.py install + python x.py setup dist && python x.py build && python x.py install ``` +If you want to use the native versions of Git, Python, or CMake you can remove +them from the above pacman command and install them from another source. Make +sure that they're in your Windows PATH, and edit the relevant `mingw[32|64].ini` +file in your MSYS2 installation directory by uncommenting the line +`MSYS2_PATH_TYPE=inherit` to include them in your MSYS2 PATH. + +Using Windows native Python can be helpful if you get errors when building LLVM. +You may also want to use Git for Windows, as it is often *much* faster. Turning +off real-time protection in the Windows Virus & Threat protections settings can +also help with long run times (although note that it will automatically turn +itself back on after some time). + ### MSVC MSVC builds of Rust additionally require an installation of Visual Studio 2017 From c1144436f69fb3c235dd0dbf90f1833bec830856 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sun, 28 Jan 2024 13:50:06 +0100 Subject: [PATCH 023/134] Make synstructure underscore_const(true) the default since otherwise it will trigger the non_local_definitions lint --- compiler/rustc_macros/src/diagnostics/mod.rs | 9 ++++++--- compiler/rustc_macros/src/hash_stable.rs | 2 ++ compiler/rustc_macros/src/lift.rs | 1 + compiler/rustc_macros/src/serialize.rs | 12 +++++++++++- compiler/rustc_macros/src/type_foldable.rs | 2 ++ compiler/rustc_macros/src/type_visitable.rs | 2 ++ 6 files changed, 24 insertions(+), 4 deletions(-) diff --git a/compiler/rustc_macros/src/diagnostics/mod.rs b/compiler/rustc_macros/src/diagnostics/mod.rs index 33dffe6998a1e..044bbadf41c5a 100644 --- a/compiler/rustc_macros/src/diagnostics/mod.rs +++ b/compiler/rustc_macros/src/diagnostics/mod.rs @@ -55,7 +55,8 @@ use synstructure::Structure; /// /// See rustc dev guide for more examples on using the `#[derive(Diagnostic)]`: /// -pub fn session_diagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn session_diagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); DiagnosticDerive::new(s).into_tokens() } @@ -101,7 +102,8 @@ pub fn session_diagnostic_derive(s: Structure<'_>) -> TokenStream { /// /// See rustc dev guide for more examples on using the `#[derive(LintDiagnostic)]`: /// -pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn lint_diagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); LintDiagnosticDerive::new(s).into_tokens() } @@ -151,6 +153,7 @@ pub fn lint_diagnostic_derive(s: Structure<'_>) -> TokenStream { /// /// diag.subdiagnostic(RawIdentifierSuggestion { span, applicability, ident }); /// ``` -pub fn session_subdiagnostic_derive(s: Structure<'_>) -> TokenStream { +pub fn session_subdiagnostic_derive(mut s: Structure<'_>) -> TokenStream { + s.underscore_const(true); SubdiagnosticDeriveBuilder::new().into_tokens(s) } diff --git a/compiler/rustc_macros/src/hash_stable.rs b/compiler/rustc_macros/src/hash_stable.rs index a6396ba687d11..6b3210cad7be6 100644 --- a/compiler/rustc_macros/src/hash_stable.rs +++ b/compiler/rustc_macros/src/hash_stable.rs @@ -74,6 +74,8 @@ fn hash_stable_derive_with_mode( HashStableMode::Generic | HashStableMode::NoContext => parse_quote!(__CTX), }; + s.underscore_const(true); + // no_context impl is able to derive by-field, which is closer to a perfect derive. s.add_bounds(match mode { HashStableMode::Normal | HashStableMode::Generic => synstructure::AddBounds::Generics, diff --git a/compiler/rustc_macros/src/lift.rs b/compiler/rustc_macros/src/lift.rs index 3dedd88fb19b6..f7a84ba1510d0 100644 --- a/compiler/rustc_macros/src/lift.rs +++ b/compiler/rustc_macros/src/lift.rs @@ -4,6 +4,7 @@ use syn::parse_quote; pub fn lift_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::TokenStream { s.add_bounds(synstructure::AddBounds::Generics); s.bind_with(|_| synstructure::BindStyle::Move); + s.underscore_const(true); let tcx: syn::Lifetime = parse_quote!('tcx); let newtcx: syn::GenericParam = parse_quote!('__lifted); diff --git a/compiler/rustc_macros/src/serialize.rs b/compiler/rustc_macros/src/serialize.rs index 98b53945b9115..5fa11d22f0e75 100644 --- a/compiler/rustc_macros/src/serialize.rs +++ b/compiler/rustc_macros/src/serialize.rs @@ -15,6 +15,7 @@ pub fn type_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_type_ir::codec::TyDecoder #bound }); s.add_bounds(synstructure::AddBounds::Fields); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -26,6 +27,7 @@ pub fn meta_decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! { '__a }); let decoder_ty = quote! { DecodeContext<'__a, 'tcx> }; s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -34,6 +36,7 @@ pub fn decodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_span::SpanDecoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } @@ -42,12 +45,13 @@ pub fn decodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macr let decoder_ty = quote! { __D }; s.add_impl_generic(parse_quote! {#decoder_ty: ::rustc_serialize::Decoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); decodable_body(s, decoder_ty) } fn decodable_body( - s: synstructure::Structure<'_>, + mut s: synstructure::Structure<'_>, decoder_ty: TokenStream, ) -> proc_macro2::TokenStream { if let syn::Data::Union(_) = s.ast().data { @@ -93,6 +97,7 @@ fn decodable_body( } } }; + s.underscore_const(true); s.bound_impl( quote!(::rustc_serialize::Decodable<#decoder_ty>), @@ -130,6 +135,7 @@ pub fn type_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! {#encoder_ty: ::rustc_type_ir::codec::TyEncoder #bound }); s.add_bounds(synstructure::AddBounds::Fields); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -141,6 +147,7 @@ pub fn meta_encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: s.add_impl_generic(parse_quote! { '__a }); let encoder_ty = quote! { EncodeContext<'__a, 'tcx> }; s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, true) } @@ -149,6 +156,7 @@ pub fn encodable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2::Toke let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_span::SpanEncoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -157,6 +165,7 @@ pub fn encodable_generic_derive(mut s: synstructure::Structure<'_>) -> proc_macr let encoder_ty = quote! { __E }; s.add_impl_generic(parse_quote! { #encoder_ty: ::rustc_serialize::Encoder}); s.add_bounds(synstructure::AddBounds::Generics); + s.underscore_const(true); encodable_body(s, encoder_ty, false) } @@ -170,6 +179,7 @@ fn encodable_body( panic!("cannot derive on union") } + s.underscore_const(true); s.bind_with(|binding| { // Handle the lack of a blanket reference impl. if let syn::Type::Reference(_) = binding.ast().ty { diff --git a/compiler/rustc_macros/src/type_foldable.rs b/compiler/rustc_macros/src/type_foldable.rs index 5ee4d8793135c..5617c53b119ee 100644 --- a/compiler/rustc_macros/src/type_foldable.rs +++ b/compiler/rustc_macros/src/type_foldable.rs @@ -6,6 +6,8 @@ pub fn type_foldable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2:: panic!("cannot derive on union") } + s.underscore_const(true); + if !s.ast().generics.lifetimes().any(|lt| lt.lifetime.ident == "tcx") { s.add_impl_generic(parse_quote! { 'tcx }); } diff --git a/compiler/rustc_macros/src/type_visitable.rs b/compiler/rustc_macros/src/type_visitable.rs index dcd505a105e57..c8430380345b4 100644 --- a/compiler/rustc_macros/src/type_visitable.rs +++ b/compiler/rustc_macros/src/type_visitable.rs @@ -6,6 +6,8 @@ pub fn type_visitable_derive(mut s: synstructure::Structure<'_>) -> proc_macro2: panic!("cannot derive on union") } + s.underscore_const(true); + // ignore fields with #[type_visitable(ignore)] s.filter(|bi| { let mut ignored = false; From 61703943132eb4a509646bf63e0199268c6df4a2 Mon Sep 17 00:00:00 2001 From: Urgau Date: Fri, 26 Jan 2024 15:25:18 +0100 Subject: [PATCH 024/134] Implement RFC3373 non local definitions lint --- Cargo.lock | 1 + compiler/rustc_lint/Cargo.toml | 1 + compiler/rustc_lint/messages.ftl | 20 + compiler/rustc_lint/src/lib.rs | 3 + compiler/rustc_lint/src/lints.rs | 17 + compiler/rustc_lint/src/non_local_def.rs | 187 ++++++ tests/ui/lint/non_local_definitions.rs | 373 +++++++++++ tests/ui/lint/non_local_definitions.stderr | 611 ++++++++++++++++++ tests/ui/proc-macro/nested-macro-rules.rs | 1 + tests/ui/proc-macro/nested-macro-rules.stderr | 27 + 10 files changed, 1241 insertions(+) create mode 100644 compiler/rustc_lint/src/non_local_def.rs create mode 100644 tests/ui/lint/non_local_definitions.rs create mode 100644 tests/ui/lint/non_local_definitions.stderr create mode 100644 tests/ui/proc-macro/nested-macro-rules.stderr diff --git a/Cargo.lock b/Cargo.lock index f9ad78e37951d..327186bc9b1de 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4164,6 +4164,7 @@ dependencies = [ "rustc_target", "rustc_trait_selection", "rustc_type_ir", + "smallvec", "tracing", "unicode-security", ] diff --git a/compiler/rustc_lint/Cargo.toml b/compiler/rustc_lint/Cargo.toml index fa1133e7780ff..2271321b8bf22 100644 --- a/compiler/rustc_lint/Cargo.toml +++ b/compiler/rustc_lint/Cargo.toml @@ -23,6 +23,7 @@ rustc_span = { path = "../rustc_span" } rustc_target = { path = "../rustc_target" } rustc_trait_selection = { path = "../rustc_trait_selection" } rustc_type_ir = { path = "../rustc_type_ir" } +smallvec = { version = "1.8.1", features = ["union", "may_dangle"] } tracing = "0.1" unicode-security = "0.1.0" # tidy-alphabetical-end diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index 785895e0ab823..4e0ba376b7d2e 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -411,6 +411,26 @@ lint_non_fmt_panic_unused = } .add_fmt_suggestion = or add a "{"{"}{"}"}" format string to use the message literally +lint_non_local_definitions_deprecation = this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +lint_non_local_definitions_impl = non-local `impl` definition, they should be avoided as they go against expectation + .help = + move this `impl` block outside the of the current {$body_kind_descr} {$depth -> + [one] `{$body_name}` + *[other] `{$body_name}` and up {$depth} bodies + } + .non_local = an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + .exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module and anon-const at the same nesting as the trait or type + +lint_non_local_definitions_macro_rules = non-local `macro_rules!` definition, they should be avoided as they go against expectation + .help = + remove the `#[macro_export]` or move this `macro_rules!` outside the of the current {$body_kind_descr} {$depth -> + [one] `{$body_name}` + *[other] `{$body_name}` and up {$depth} bodies + } + .non_local = a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + .exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module + lint_non_snake_case = {$sort} `{$name}` should have a snake case name .rename_or_convert_suggestion = rename the identifier or convert it to a snake case raw identifier .cannot_convert_note = `{$sc}` cannot be used as a raw identifier diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index 85f9d3bd63ec7..f6d2b758525ee 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -70,6 +70,7 @@ mod methods; mod multiple_supertrait_upcastable; mod non_ascii_idents; mod non_fmt_panic; +mod non_local_def; mod nonstandard_style; mod noop_method_call; mod opaque_hidden_inferred_bound; @@ -105,6 +106,7 @@ use methods::*; use multiple_supertrait_upcastable::*; use non_ascii_idents::*; use non_fmt_panic::NonPanicFmt; +use non_local_def::*; use nonstandard_style::*; use noop_method_call::*; use opaque_hidden_inferred_bound::*; @@ -231,6 +233,7 @@ late_lint_methods!( MissingDebugImplementations: MissingDebugImplementations, MissingDoc: MissingDoc, AsyncFnInTrait: AsyncFnInTrait, + NonLocalDefinitions: NonLocalDefinitions::default(), ] ] ); diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index da59ffebdc5a9..15f158961d097 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1293,6 +1293,23 @@ pub struct SuspiciousDoubleRefCloneDiag<'a> { pub ty: Ty<'a>, } +// non_local_defs.rs +#[derive(LintDiagnostic)] +pub enum NonLocalDefinitionsDiag { + #[diag(lint_non_local_definitions_impl)] + #[help] + #[note(lint_non_local)] + #[note(lint_exception)] + #[note(lint_non_local_definitions_deprecation)] + Impl { depth: u32, body_kind_descr: &'static str, body_name: String }, + #[diag(lint_non_local_definitions_macro_rules)] + #[help] + #[note(lint_non_local)] + #[note(lint_exception)] + #[note(lint_non_local_definitions_deprecation)] + MacroRules { depth: u32, body_kind_descr: &'static str, body_name: String }, +} + // pass_by_value.rs #[derive(LintDiagnostic)] #[diag(lint_pass_by_value)] diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs new file mode 100644 index 0000000000000..28bd49f36a008 --- /dev/null +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -0,0 +1,187 @@ +use rustc_hir::{def::DefKind, Body, Item, ItemKind, Path, QPath, TyKind}; +use rustc_span::{def_id::DefId, sym, symbol::kw, MacroKind}; + +use smallvec::{smallvec, SmallVec}; + +use crate::{lints::NonLocalDefinitionsDiag, LateContext, LateLintPass, LintContext}; + +declare_lint! { + /// The `non_local_definitions` lint checks for `impl` blocks and `#[macro_export]` + /// macro inside bodies (functions, enum discriminant, ...). + /// + /// ### Example + /// + /// ```rust + /// trait MyTrait {} + /// struct MyStruct; + /// + /// fn foo() { + /// impl MyTrait for MyStruct {} + /// } + /// ``` + /// + /// {{produces}} + /// + /// ### Explanation + /// + /// Creating non-local definitions go against expectation and can create discrepancies + /// in tooling. It should be avoided. It may become deny-by-default in edition 2024 + /// and higher, see see the tracking issue . + /// + /// An `impl` definition is non-local if it is nested inside an item and neither + /// the type nor the trait are at the same nesting level as the `impl` block. + /// + /// All nested bodies (functions, enum discriminant, array length, consts) (expect for + /// `const _: Ty = { ... }` in top-level module, which is still undecided) are checked. + pub NON_LOCAL_DEFINITIONS, + Warn, + "checks for non-local definitions", + report_in_external_macro +} + +#[derive(Default)] +pub struct NonLocalDefinitions { + body_depth: u32, +} + +impl_lint_pass!(NonLocalDefinitions => [NON_LOCAL_DEFINITIONS]); + +// FIXME(Urgau): Figure out how to handle modules nested in bodies. +// It's currently not handled by the current logic because modules are not bodies. +// They don't even follow the correct order (check_body -> check_mod -> check_body_post) +// instead check_mod is called after every body has been handled. + +impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { + fn check_body(&mut self, _cx: &LateContext<'tcx>, _body: &'tcx Body<'tcx>) { + self.body_depth += 1; + } + + fn check_body_post(&mut self, _cx: &LateContext<'tcx>, _body: &'tcx Body<'tcx>) { + self.body_depth -= 1; + } + + fn check_item(&mut self, cx: &LateContext<'tcx>, item: &'tcx Item<'tcx>) { + if self.body_depth == 0 { + return; + } + + let parent = cx.tcx.parent(item.owner_id.def_id.into()); + let parent_def_kind = cx.tcx.def_kind(parent); + let parent_opt_item_name = cx.tcx.opt_item_name(parent); + + // Per RFC we (currently) ignore anon-const (`const _: Ty = ...`) in top-level module. + if self.body_depth == 1 + && parent_def_kind == DefKind::Const + && parent_opt_item_name == Some(kw::Underscore) + { + return; + } + + match item.kind { + ItemKind::Impl(impl_) => { + // The RFC states: + // + // > An item nested inside an expression-containing item (through any + // > level of nesting) may not define an impl Trait for Type unless + // > either the **Trait** or the **Type** is also nested inside the + // > same expression-containing item. + // + // To achieve this we get try to get the paths of the _Trait_ and + // _Type_, and we look inside thoses paths to try a find in one + // of them a type whose parent is the same as the impl definition. + // + // If that's the case this means that this impl block declaration + // is using local items and so we don't lint on it. + + // We also ignore anon-const in item by including the anon-const + // parent as well; and since it's quite uncommon, we use smallvec + // to avoid unnecessary heap allocations. + let local_parents: SmallVec<[DefId; 1]> = if parent_def_kind == DefKind::Const + && parent_opt_item_name == Some(kw::Underscore) + { + smallvec![parent, cx.tcx.parent(parent)] + } else { + smallvec![parent] + }; + + let self_ty_has_local_parent = match impl_.self_ty.kind { + TyKind::Path(QPath::Resolved(_, ty_path)) => { + path_has_local_parent(ty_path, cx, &*local_parents) + } + TyKind::TraitObject([principle_poly_trait_ref, ..], _, _) => { + path_has_local_parent( + principle_poly_trait_ref.trait_ref.path, + cx, + &*local_parents, + ) + } + TyKind::TraitObject([], _, _) + | TyKind::InferDelegation(_, _) + | TyKind::Slice(_) + | TyKind::Array(_, _) + | TyKind::Ptr(_) + | TyKind::Ref(_, _) + | TyKind::BareFn(_) + | TyKind::Never + | TyKind::Tup(_) + | TyKind::Path(_) + | TyKind::AnonAdt(_) + | TyKind::OpaqueDef(_, _, _) + | TyKind::Typeof(_) + | TyKind::Infer + | TyKind::Err(_) => false, + }; + + let of_trait_has_local_parent = impl_ + .of_trait + .map(|of_trait| path_has_local_parent(of_trait.path, cx, &*local_parents)) + .unwrap_or(false); + + // If none of them have a local parent (LOGICAL NOR) this means that + // this impl definition is a non-local definition and so we lint on it. + if !(self_ty_has_local_parent || of_trait_has_local_parent) { + cx.emit_span_lint( + NON_LOCAL_DEFINITIONS, + item.span, + NonLocalDefinitionsDiag::Impl { + depth: self.body_depth, + body_kind_descr: cx.tcx.def_kind_descr(parent_def_kind, parent), + body_name: parent_opt_item_name + .map(|s| s.to_ident_string()) + .unwrap_or_else(|| "".to_string()), + }, + ) + } + } + ItemKind::Macro(_macro, MacroKind::Bang) + if cx.tcx.has_attr(item.owner_id.def_id, sym::macro_export) => + { + cx.emit_span_lint( + NON_LOCAL_DEFINITIONS, + item.span, + NonLocalDefinitionsDiag::MacroRules { + depth: self.body_depth, + body_kind_descr: cx.tcx.def_kind_descr(parent_def_kind, parent), + body_name: parent_opt_item_name + .map(|s| s.to_ident_string()) + .unwrap_or_else(|| "".to_string()), + }, + ) + } + _ => {} + } + } +} + +/// Given a path and a parent impl def id, this checks if the if parent resolution +/// def id correspond to the def id of the parent impl definition. +/// +/// Given this path, we will look at the path (and ignore any generic args): +/// +/// ```text +/// std::convert::PartialEq> +/// ^^^^^^^^^^^^^^^^^^^^^^^ +/// ``` +fn path_has_local_parent(path: &Path<'_>, cx: &LateContext<'_>, local_parents: &[DefId]) -> bool { + path.res.opt_def_id().is_some_and(|did| local_parents.contains(&cx.tcx.parent(did))) +} diff --git a/tests/ui/lint/non_local_definitions.rs b/tests/ui/lint/non_local_definitions.rs new file mode 100644 index 0000000000000..986efbfcf0fe1 --- /dev/null +++ b/tests/ui/lint/non_local_definitions.rs @@ -0,0 +1,373 @@ +//@ check-pass +//@ edition:2021 + +#![feature(inline_const)] + +use std::fmt::{Debug, Display}; + +struct Test; + +impl Debug for Test { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!() + } +} + +mod do_not_lint_mod { + pub trait Tait {} + + impl super::Test { + fn hugo() {} + } + + impl Tait for super::Test {} +} + +trait Uto {} +const Z: () = { + trait Uto1 {} + + impl Uto1 for Test {} // the trait is local, don't lint + + impl Uto for &Test {} + //~^ WARN non-local `impl` definition +}; + +trait Ano {} +const _: () = { + impl Ano for &Test {} // ignored since the parent is an anon-const +}; + +type A = [u32; { + impl Uto for *mut Test {} + //~^ WARN non-local `impl` definition + + 1 +}]; + +enum Enum { + Discr = { + impl Uto for Test {} + //~^ WARN non-local `impl` definition + + 1 + } +} + +trait Uto2 {} +static A: u32 = { + impl Uto2 for Test {} + //~^ WARN non-local `impl` definition + + 1 +}; + +trait Uto3 {} +const B: u32 = { + impl Uto3 for Test {} + //~^ WARN non-local `impl` definition + + #[macro_export] + macro_rules! m0 { () => { } }; + //~^ WARN non-local `macro_rules!` definition + + trait Uto4 {} + impl Uto4 for Test {} + + 1 +}; + +trait Uto5 {} +fn main() { + #[macro_export] + macro_rules! m { () => { } }; + //~^ WARN non-local `macro_rules!` definition + + impl Test { + //~^ WARN non-local `impl` definition + fn foo() {} + } + + let _array = [0i32; { + impl Test { + //~^ WARN non-local `impl` definition + fn bar() {} + } + + 1 + }]; + + const { + impl Test { + //~^ WARN non-local `impl` definition + fn hoo() {} + } + + 1 + }; + + const _: u32 = { + impl Test { + //~^ WARN non-local `impl` definition + fn foo2() {} + } + + 1 + }; + + impl Display for Test { + //~^ WARN non-local `impl` definition + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!() + } + } + + impl dyn Uto5 {} + //~^ WARN non-local `impl` definition + + impl Uto5 for Vec { } + //~^ WARN non-local `impl` definition + + impl Uto5 for &dyn Uto5 {} + //~^ WARN non-local `impl` definition + + impl Uto5 for *mut Test {} + //~^ WARN non-local `impl` definition + + impl Uto5 for *mut [Test] {} + //~^ WARN non-local `impl` definition + + impl Uto5 for [Test; 8] {} + //~^ WARN non-local `impl` definition + + impl Uto5 for (Test,) {} + //~^ WARN non-local `impl` definition + + impl Uto5 for fn(Test) -> () {} + //~^ WARN non-local `impl` definition + + impl Uto5 for fn() -> Test {} + //~^ WARN non-local `impl` definition + + let _a = || { + impl Uto5 for Test {} + //~^ WARN non-local `impl` definition + + 1 + }; + + type A = [u32; { + impl Uto5 for &Test {} + //~^ WARN non-local `impl` definition + + 1 + }]; + + fn a(_: [u32; { + impl Uto5 for &(Test,) {} + //~^ WARN non-local `impl` definition + + 1 + }]) {} + + fn b() -> [u32; { + impl Uto5 for &(Test,Test) {} + //~^ WARN non-local `impl` definition + + 1 + }] { todo!() } + + struct InsideMain; + + impl Uto5 for *mut InsideMain {} + //~^ WARN non-local `impl` definition + impl Uto5 for *mut [InsideMain] {} + //~^ WARN non-local `impl` definition + impl Uto5 for [InsideMain; 8] {} + //~^ WARN non-local `impl` definition + impl Uto5 for (InsideMain,) {} + //~^ WARN non-local `impl` definition + impl Uto5 for fn(InsideMain) -> () {} + //~^ WARN non-local `impl` definition + impl Uto5 for fn() -> InsideMain {} + //~^ WARN non-local `impl` definition + + impl Debug for InsideMain { + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!() + } + } + + impl InsideMain { + fn foo() {} + } + + fn inside_inside() { + impl Display for InsideMain { + //~^ WARN non-local `impl` definition + fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + todo!() + } + } + + impl InsideMain { + //~^ WARN non-local `impl` definition + fn bar() { + #[macro_export] + macro_rules! m2 { () => { } }; + //~^ WARN non-local `macro_rules!` definition + } + } + } + + trait Uto6 {} + impl dyn Uto6 {} + impl Uto5 for dyn Uto6 {} + + impl Uto3 for Vec { } + //~^ WARN non-local `impl` definition +} + +trait Uto7 {} +trait Uto8 {} + +fn bad() { + struct Local; + impl Uto7 for Test where Local: std::any::Any {} + //~^ WARN non-local `impl` definition + + impl Uto8 for T {} + //~^ WARN non-local `impl` definition +} + +struct UwU(T); + +fn fun() { + #[derive(Debug)] + struct OwO; + impl Default for UwU { + //~^ WARN non-local `impl` definition + fn default() -> Self { + UwU(OwO) + } + } +} + +struct Cat; + +fn meow() { + impl From for () { + //~^ WARN non-local `impl` definition + fn from(_: Cat) -> () { + todo!() + } + } + + #[derive(Debug)] + struct Cat; + impl AsRef for () { + //~^ WARN non-local `impl` definition + fn as_ref(&self) -> &Cat { &Cat } + } +} + +struct G; + +fn fun2() { + #[derive(Debug, Default)] + struct B; + impl PartialEq for G { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &B) -> bool { + true + } + } +} + +fn side_effects() { + dbg!(().as_ref()); // prints `Cat` + dbg!(UwU::default().0); + let _ = G::eq(&G, dbg!(&<_>::default())); +} + +struct Dog; + +fn woof() { + impl PartialEq for &Dog { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &Dog) -> bool { + todo!() + } + } + + impl PartialEq<()> for Dog { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &()) -> bool { + todo!() + } + } + + impl PartialEq<()> for &Dog { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &()) -> bool { + todo!() + } + } + + impl PartialEq for () { + //~^ WARN non-local `impl` definition + fn eq(&self, _: &Dog) -> bool { + todo!() + } + } + + struct Test; + impl PartialEq for Test { + fn eq(&self, _: &Dog) -> bool { + todo!() + } + } +} + +struct Wrap(T); + +impl Wrap>> {} + +fn rawr() { + struct Lion; + + impl From>> for () { + //~^ WARN non-local `impl` definition + fn from(_: Wrap>) -> Self { + todo!() + } + } + + impl From<()> for Wrap { + //~^ WARN non-local `impl` definition + fn from(_: ()) -> Self { + todo!() + } + } +} + +macro_rules! m { + () => { + trait MacroTrait {} + struct OutsideStruct; + fn my_func() { + impl MacroTrait for OutsideStruct {} + //~^ WARN non-local `impl` definition + } + } +} + +m!(); + +fn bitflags() { + struct Flags; + + const _: () = { + impl Flags {} + }; +} diff --git a/tests/ui/lint/non_local_definitions.stderr b/tests/ui/lint/non_local_definitions.stderr new file mode 100644 index 0000000000000..f15457734bcfe --- /dev/null +++ b/tests/ui/lint/non_local_definitions.stderr @@ -0,0 +1,611 @@ +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:32:5 + | +LL | impl Uto for &Test {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant `Z` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: `#[warn(non_local_definitions)]` on by default + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:42:5 + | +LL | impl Uto for *mut Test {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:50:9 + | +LL | impl Uto for Test {} + | ^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:59:5 + | +LL | impl Uto2 for Test {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current static `A` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:67:5 + | +LL | impl Uto3 for Test {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant `B` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:71:5 + | +LL | macro_rules! m0 { () => { } }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current constant `B` + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:83:5 + | +LL | macro_rules! m { () => { } }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current function `main` + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:86:5 + | +LL | / impl Test { +LL | | +LL | | fn foo() {} +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:92:9 + | +LL | / impl Test { +LL | | +LL | | fn bar() {} +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current constant expression `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:101:9 + | +LL | / impl Test { +LL | | +LL | | fn hoo() {} +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current inline constant `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:110:9 + | +LL | / impl Test { +LL | | +LL | | fn foo2() {} +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current constant `_` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:118:5 + | +LL | / impl Display for Test { +LL | | +LL | | fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:125:5 + | +LL | impl dyn Uto5 {} + | ^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:128:5 + | +LL | impl Uto5 for Vec { } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:131:5 + | +LL | impl Uto5 for &dyn Uto5 {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:134:5 + | +LL | impl Uto5 for *mut Test {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:137:5 + | +LL | impl Uto5 for *mut [Test] {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:140:5 + | +LL | impl Uto5 for [Test; 8] {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:143:5 + | +LL | impl Uto5 for (Test,) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:146:5 + | +LL | impl Uto5 for fn(Test) -> () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:149:5 + | +LL | impl Uto5 for fn() -> Test {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:153:9 + | +LL | impl Uto5 for Test {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current closure `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:160:9 + | +LL | impl Uto5 for &Test {} + | ^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:167:9 + | +LL | impl Uto5 for &(Test,) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:174:9 + | +LL | impl Uto5 for &(Test,Test) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant expression `` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:182:5 + | +LL | impl Uto5 for *mut InsideMain {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:184:5 + | +LL | impl Uto5 for *mut [InsideMain] {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:186:5 + | +LL | impl Uto5 for [InsideMain; 8] {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:188:5 + | +LL | impl Uto5 for (InsideMain,) {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:190:5 + | +LL | impl Uto5 for fn(InsideMain) -> () {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:192:5 + | +LL | impl Uto5 for fn() -> InsideMain {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:206:9 + | +LL | / impl Display for InsideMain { +LL | | +LL | | fn fmt(&self, _f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { +LL | | todo!() +LL | | } +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current function `inside_inside` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:213:9 + | +LL | / impl InsideMain { +LL | | +LL | | fn bar() { +LL | | #[macro_export] +... | +LL | | } +LL | | } + | |_________^ + | + = help: move this `impl` block outside the of the current function `inside_inside` and up 2 bodies + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:217:17 + | +LL | macro_rules! m2 { () => { } }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current associated function `bar` and up 3 bodies + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:227:5 + | +LL | impl Uto3 for Vec { } + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `main` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:236:5 + | +LL | impl Uto7 for Test where Local: std::any::Any {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `bad` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:239:5 + | +LL | impl Uto8 for T {} + | ^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current function `bad` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:248:5 + | +LL | / impl Default for UwU { +LL | | +LL | | fn default() -> Self { +LL | | UwU(OwO) +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `fun` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:259:5 + | +LL | / impl From for () { +LL | | +LL | | fn from(_: Cat) -> () { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `meow` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:268:5 + | +LL | / impl AsRef for () { +LL | | +LL | | fn as_ref(&self) -> &Cat { &Cat } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `meow` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:279:5 + | +LL | / impl PartialEq for G { +LL | | +LL | | fn eq(&self, _: &B) -> bool { +LL | | true +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `fun2` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:296:5 + | +LL | / impl PartialEq for &Dog { +LL | | +LL | | fn eq(&self, _: &Dog) -> bool { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `woof` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:303:5 + | +LL | / impl PartialEq<()> for Dog { +LL | | +LL | | fn eq(&self, _: &()) -> bool { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `woof` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:310:5 + | +LL | / impl PartialEq<()> for &Dog { +LL | | +LL | | fn eq(&self, _: &()) -> bool { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `woof` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:317:5 + | +LL | / impl PartialEq for () { +LL | | +LL | | fn eq(&self, _: &Dog) -> bool { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `woof` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:339:5 + | +LL | / impl From>> for () { +LL | | +LL | | fn from(_: Wrap>) -> Self { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `rawr` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:346:5 + | +LL | / impl From<()> for Wrap { +LL | | +LL | | fn from(_: ()) -> Self { +LL | | todo!() +LL | | } +LL | | } + | |_____^ + | + = help: move this `impl` block outside the of the current function `rawr` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:359:13 + | +LL | impl MacroTrait for OutsideStruct {} + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +... +LL | m!(); + | ---- in this macro invocation + | + = help: move this `impl` block outside the of the current function `my_func` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: this warning originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: 48 warnings emitted + diff --git a/tests/ui/proc-macro/nested-macro-rules.rs b/tests/ui/proc-macro/nested-macro-rules.rs index bb25b97df5067..0dce3c408c203 100644 --- a/tests/ui/proc-macro/nested-macro-rules.rs +++ b/tests/ui/proc-macro/nested-macro-rules.rs @@ -19,5 +19,6 @@ fn main() { nested_macro_rules::inner_macro!(print_bang, print_attr); nested_macro_rules::outer_macro!(SecondStruct, SecondAttrStruct); + //~^ WARN non-local `macro_rules!` definition inner_macro!(print_bang, print_attr); } diff --git a/tests/ui/proc-macro/nested-macro-rules.stderr b/tests/ui/proc-macro/nested-macro-rules.stderr new file mode 100644 index 0000000000000..111be8827714f --- /dev/null +++ b/tests/ui/proc-macro/nested-macro-rules.stderr @@ -0,0 +1,27 @@ +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/auxiliary/nested-macro-rules.rs:7:9 + | +LL | macro_rules! outer_macro { + | ------------------------ in this expansion of `nested_macro_rules::outer_macro!` +... +LL | / macro_rules! inner_macro { +LL | | ($bang_macro:ident, $attr_macro:ident) => { +LL | | $bang_macro!($name); +LL | | #[$attr_macro] struct $attr_struct_name {} +LL | | } +LL | | } + | |_________^ + | + ::: $DIR/nested-macro-rules.rs:21:5 + | +LL | nested_macro_rules::outer_macro!(SecondStruct, SecondAttrStruct); + | ---------------------------------------------------------------- in this macro invocation + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current function `main` + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: `#[warn(non_local_definitions)]` on by default + +warning: 1 warning emitted + From 80c81c53ace700baf07d7d51c90e55adb2b3ff8f Mon Sep 17 00:00:00 2001 From: Urgau Date: Fri, 26 Jan 2024 17:14:38 +0100 Subject: [PATCH 025/134] Allow newly added non_local_definitions lint in tests --- tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs | 2 ++ tests/ui/const-generics/min_const_generics/macro.rs | 2 ++ tests/ui/consts/const_in_pattern/accept_structural.rs | 1 + tests/ui/drop/dropck-eyepatch-reorder.rs | 1 + tests/ui/drop/dropck-eyepatch.rs | 1 + tests/ui/imports/local-modularized-tricky-pass-2.rs | 1 + tests/ui/issues/issue-31776.rs | 1 + tests/ui/issues/issue-41053.rs | 2 ++ tests/ui/macros/type-macros-simple.rs | 2 ++ tests/ui/packed/issue-46152.rs | 1 + tests/ui/privacy/associated-item-privacy-trait.rs | 2 +- tests/ui/privacy/private-in-public-non-principal.rs | 1 + tests/ui/privacy/private-in-public-non-principal.stderr | 8 ++++---- tests/ui/rust-2018/uniform-paths/issue-55779.rs | 2 ++ 14 files changed, 22 insertions(+), 5 deletions(-) diff --git a/tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs b/tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs index 28705bfc0c8f8..1a2aae8fb23a5 100644 --- a/tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs +++ b/tests/ui/async-await/async-assoc-fn-anon-lifetimes.rs @@ -5,6 +5,8 @@ //@ edition:2018 +#![allow(non_local_definitions)] + struct A<'a, 'b>(&'a &'b i32); struct B<'a>(&'a i32); diff --git a/tests/ui/const-generics/min_const_generics/macro.rs b/tests/ui/const-generics/min_const_generics/macro.rs index b7e8083a86199..52f47628f8f1a 100644 --- a/tests/ui/const-generics/min_const_generics/macro.rs +++ b/tests/ui/const-generics/min_const_generics/macro.rs @@ -1,4 +1,6 @@ //@ run-pass +#![allow(non_local_definitions)] + struct Example; macro_rules! external_macro { diff --git a/tests/ui/consts/const_in_pattern/accept_structural.rs b/tests/ui/consts/const_in_pattern/accept_structural.rs index 09142c5615747..31d3b6e73312d 100644 --- a/tests/ui/consts/const_in_pattern/accept_structural.rs +++ b/tests/ui/consts/const_in_pattern/accept_structural.rs @@ -1,5 +1,6 @@ //@ run-pass +#![allow(non_local_definitions)] #![warn(indirect_structural_match)] // This test is checking our logic for structural match checking by enumerating diff --git a/tests/ui/drop/dropck-eyepatch-reorder.rs b/tests/ui/drop/dropck-eyepatch-reorder.rs index 6b394414baec0..b985beee9ec9d 100644 --- a/tests/ui/drop/dropck-eyepatch-reorder.rs +++ b/tests/ui/drop/dropck-eyepatch-reorder.rs @@ -1,5 +1,6 @@ //@ run-pass #![feature(dropck_eyepatch)] +#![allow(non_local_definitions)] // The point of this test is to test uses of `#[may_dangle]` attribute // where the formal declaration order (in the impl generics) does not diff --git a/tests/ui/drop/dropck-eyepatch.rs b/tests/ui/drop/dropck-eyepatch.rs index 2f27b72da5a65..2dffe6aba1767 100644 --- a/tests/ui/drop/dropck-eyepatch.rs +++ b/tests/ui/drop/dropck-eyepatch.rs @@ -1,5 +1,6 @@ //@ run-pass #![feature(dropck_eyepatch)] +#![allow(non_local_definitions)] // The point of this test is to illustrate that the `#[may_dangle]` // attribute specifically allows, in the context of a type diff --git a/tests/ui/imports/local-modularized-tricky-pass-2.rs b/tests/ui/imports/local-modularized-tricky-pass-2.rs index 581bab467f561..1cf97c5aa06e6 100644 --- a/tests/ui/imports/local-modularized-tricky-pass-2.rs +++ b/tests/ui/imports/local-modularized-tricky-pass-2.rs @@ -4,6 +4,7 @@ // into the root module soon enough to act as usual items and shadow globs and preludes. #![feature(decl_macro)] +#![allow(non_local_definitions)] // `macro_export` shadows globs use inner1::*; diff --git a/tests/ui/issues/issue-31776.rs b/tests/ui/issues/issue-31776.rs index 632defbcf273f..4b342a0e3b2dc 100644 --- a/tests/ui/issues/issue-31776.rs +++ b/tests/ui/issues/issue-31776.rs @@ -1,6 +1,7 @@ //@ run-pass #![allow(dead_code)] #![allow(unused_variables)] +#![allow(non_local_definitions)] // Various scenarios in which `pub` is required in blocks struct S; diff --git a/tests/ui/issues/issue-41053.rs b/tests/ui/issues/issue-41053.rs index f46bf6b4aa168..18f9e209c33b7 100644 --- a/tests/ui/issues/issue-41053.rs +++ b/tests/ui/issues/issue-41053.rs @@ -1,6 +1,8 @@ //@ run-pass //@ aux-build:issue-41053.rs +#![allow(non_local_definitions)] + pub trait Trait { fn foo(&self) {} } pub struct Foo; diff --git a/tests/ui/macros/type-macros-simple.rs b/tests/ui/macros/type-macros-simple.rs index 4d1001baf59fd..d189b881f7dda 100644 --- a/tests/ui/macros/type-macros-simple.rs +++ b/tests/ui/macros/type-macros-simple.rs @@ -1,6 +1,8 @@ //@ run-pass #![allow(dead_code)] #![allow(unused_variables)] +#![allow(non_local_definitions)] + macro_rules! Tuple { { $A:ty,$B:ty } => { ($A, $B) } } diff --git a/tests/ui/packed/issue-46152.rs b/tests/ui/packed/issue-46152.rs index e38b445107baf..5b2e4bbfdf243 100644 --- a/tests/ui/packed/issue-46152.rs +++ b/tests/ui/packed/issue-46152.rs @@ -1,6 +1,7 @@ //@ run-pass #![allow(dead_code)] #![allow(unused_variables)] +#![allow(non_local_definitions)] #![feature(unsize, coerce_unsized)] #[repr(packed)] diff --git a/tests/ui/privacy/associated-item-privacy-trait.rs b/tests/ui/privacy/associated-item-privacy-trait.rs index f038ae9e261df..655d892e2441f 100644 --- a/tests/ui/privacy/associated-item-privacy-trait.rs +++ b/tests/ui/privacy/associated-item-privacy-trait.rs @@ -1,5 +1,5 @@ #![feature(decl_macro, associated_type_defaults)] -#![allow(private_interfaces, private_bounds)] +#![allow(private_interfaces, private_bounds, non_local_definitions)] mod priv_trait { trait PrivTr { diff --git a/tests/ui/privacy/private-in-public-non-principal.rs b/tests/ui/privacy/private-in-public-non-principal.rs index e348a181651cf..8dc90919bc9ea 100644 --- a/tests/ui/privacy/private-in-public-non-principal.rs +++ b/tests/ui/privacy/private-in-public-non-principal.rs @@ -1,5 +1,6 @@ #![feature(auto_traits)] #![feature(negative_impls)] +#![allow(non_local_definitions)] pub trait PubPrincipal {} auto trait PrivNonPrincipal {} diff --git a/tests/ui/privacy/private-in-public-non-principal.stderr b/tests/ui/privacy/private-in-public-non-principal.stderr index 73f2249bc6c05..5aa08d3f07175 100644 --- a/tests/ui/privacy/private-in-public-non-principal.stderr +++ b/tests/ui/privacy/private-in-public-non-principal.stderr @@ -1,24 +1,24 @@ warning: trait `PrivNonPrincipal` is more private than the item `leak_dyn_nonprincipal` - --> $DIR/private-in-public-non-principal.rs:7:1 + --> $DIR/private-in-public-non-principal.rs:8:1 | LL | pub fn leak_dyn_nonprincipal() -> Box { loop {} } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ function `leak_dyn_nonprincipal` is reachable at visibility `pub` | note: but trait `PrivNonPrincipal` is only usable at visibility `pub(crate)` - --> $DIR/private-in-public-non-principal.rs:5:1 + --> $DIR/private-in-public-non-principal.rs:6:1 | LL | auto trait PrivNonPrincipal {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ = note: `#[warn(private_interfaces)]` on by default error: missing documentation for an associated function - --> $DIR/private-in-public-non-principal.rs:13:9 + --> $DIR/private-in-public-non-principal.rs:14:9 | LL | pub fn check_doc_lint() {} | ^^^^^^^^^^^^^^^^^^^^^^^ | note: the lint level is defined here - --> $DIR/private-in-public-non-principal.rs:10:8 + --> $DIR/private-in-public-non-principal.rs:11:8 | LL | #[deny(missing_docs)] | ^^^^^^^^^^^^ diff --git a/tests/ui/rust-2018/uniform-paths/issue-55779.rs b/tests/ui/rust-2018/uniform-paths/issue-55779.rs index 350ab324682b4..246b8dd82c5e3 100644 --- a/tests/ui/rust-2018/uniform-paths/issue-55779.rs +++ b/tests/ui/rust-2018/uniform-paths/issue-55779.rs @@ -2,6 +2,8 @@ //@ edition:2018 //@ aux-crate:issue_55779_extern_trait=issue-55779-extern-trait.rs +#![allow(non_local_definitions)] + use issue_55779_extern_trait::Trait; struct Local; From 01bcc60ecd8aa4bf7d43dd13694e003c84ad19ee Mon Sep 17 00:00:00 2001 From: Urgau Date: Fri, 26 Jan 2024 18:48:18 +0100 Subject: [PATCH 026/134] Allow newly added non_local_definitions lint in clippy --- .../undocumented_unsafe_blocks.rs | 2 +- .../clippy/tests/ui/bool_comparison.fixed | 2 +- src/tools/clippy/tests/ui/bool_comparison.rs | 2 +- src/tools/clippy/tests/ui/crashes/ice-4760.rs | 2 + src/tools/clippy/tests/ui/crashes/ice-6179.rs | 1 + .../tests/ui/explicit_into_iter_loop.fixed | 1 + .../tests/ui/explicit_into_iter_loop.rs | 1 + .../tests/ui/explicit_into_iter_loop.stderr | 12 ++--- .../clippy/tests/ui/explicit_iter_loop.fixed | 3 +- .../clippy/tests/ui/explicit_iter_loop.rs | 3 +- .../clippy/tests/ui/explicit_iter_loop.stderr | 36 ++++++------- .../clippy/tests/ui/from_over_into.fixed | 1 + src/tools/clippy/tests/ui/from_over_into.rs | 1 + .../clippy/tests/ui/from_over_into.stderr | 14 ++--- .../clippy/tests/ui/manual_str_repeat.fixed | 1 + .../clippy/tests/ui/manual_str_repeat.rs | 1 + .../clippy/tests/ui/manual_str_repeat.stderr | 20 +++---- .../clippy/tests/ui/needless_borrow.fixed | 1 + src/tools/clippy/tests/ui/needless_borrow.rs | 1 + .../clippy/tests/ui/needless_borrow.stderr | 54 +++++++++---------- 20 files changed, 86 insertions(+), 73 deletions(-) diff --git a/src/tools/clippy/tests/ui-toml/undocumented_unsafe_blocks/undocumented_unsafe_blocks.rs b/src/tools/clippy/tests/ui-toml/undocumented_unsafe_blocks/undocumented_unsafe_blocks.rs index a278139876064..8997073c8a555 100644 --- a/src/tools/clippy/tests/ui-toml/undocumented_unsafe_blocks/undocumented_unsafe_blocks.rs +++ b/src/tools/clippy/tests/ui-toml/undocumented_unsafe_blocks/undocumented_unsafe_blocks.rs @@ -4,7 +4,7 @@ //@[disabled] rustc-env:CLIPPY_CONF_DIR=tests/ui-toml/undocumented_unsafe_blocks/disabled #![warn(clippy::undocumented_unsafe_blocks, clippy::unnecessary_safety_comment)] -#![allow(deref_nullptr, clippy::let_unit_value, clippy::missing_safety_doc)] +#![allow(deref_nullptr, non_local_definitions, clippy::let_unit_value, clippy::missing_safety_doc)] #![feature(lint_reasons)] extern crate proc_macro_unsafe; diff --git a/src/tools/clippy/tests/ui/bool_comparison.fixed b/src/tools/clippy/tests/ui/bool_comparison.fixed index 02f1d09b83395..600380fd1420d 100644 --- a/src/tools/clippy/tests/ui/bool_comparison.fixed +++ b/src/tools/clippy/tests/ui/bool_comparison.fixed @@ -1,4 +1,4 @@ -#![allow(clippy::needless_if)] +#![allow(non_local_definitions, clippy::needless_if)] #![warn(clippy::bool_comparison)] #![allow(clippy::non_canonical_partial_ord_impl)] diff --git a/src/tools/clippy/tests/ui/bool_comparison.rs b/src/tools/clippy/tests/ui/bool_comparison.rs index 5ef696d855eca..910df6151f8c2 100644 --- a/src/tools/clippy/tests/ui/bool_comparison.rs +++ b/src/tools/clippy/tests/ui/bool_comparison.rs @@ -1,4 +1,4 @@ -#![allow(clippy::needless_if)] +#![allow(non_local_definitions, clippy::needless_if)] #![warn(clippy::bool_comparison)] #![allow(clippy::non_canonical_partial_ord_impl)] diff --git a/src/tools/clippy/tests/ui/crashes/ice-4760.rs b/src/tools/clippy/tests/ui/crashes/ice-4760.rs index 08b06961760ff..e1265169762fc 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-4760.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-4760.rs @@ -1,3 +1,5 @@ +#![allow(non_local_definitions)] + const COUNT: usize = 2; struct Thing; trait Dummy {} diff --git a/src/tools/clippy/tests/ui/crashes/ice-6179.rs b/src/tools/clippy/tests/ui/crashes/ice-6179.rs index ce1895851e2d9..fffc0f7d0d4f2 100644 --- a/src/tools/clippy/tests/ui/crashes/ice-6179.rs +++ b/src/tools/clippy/tests/ui/crashes/ice-6179.rs @@ -3,6 +3,7 @@ #![warn(clippy::use_self)] #![allow(dead_code, clippy::let_with_type_underscore)] +#![allow(non_local_definitions)] struct Foo; diff --git a/src/tools/clippy/tests/ui/explicit_into_iter_loop.fixed b/src/tools/clippy/tests/ui/explicit_into_iter_loop.fixed index 2521bce6a58e1..6d67488a71309 100644 --- a/src/tools/clippy/tests/ui/explicit_into_iter_loop.fixed +++ b/src/tools/clippy/tests/ui/explicit_into_iter_loop.fixed @@ -1,3 +1,4 @@ +#![allow(non_local_definitions)] #![warn(clippy::explicit_into_iter_loop)] fn main() { diff --git a/src/tools/clippy/tests/ui/explicit_into_iter_loop.rs b/src/tools/clippy/tests/ui/explicit_into_iter_loop.rs index 9eac96d182b9f..14630c07c5cc4 100644 --- a/src/tools/clippy/tests/ui/explicit_into_iter_loop.rs +++ b/src/tools/clippy/tests/ui/explicit_into_iter_loop.rs @@ -1,3 +1,4 @@ +#![allow(non_local_definitions)] #![warn(clippy::explicit_into_iter_loop)] fn main() { diff --git a/src/tools/clippy/tests/ui/explicit_into_iter_loop.stderr b/src/tools/clippy/tests/ui/explicit_into_iter_loop.stderr index c03647ab43367..a1e632271ed32 100644 --- a/src/tools/clippy/tests/ui/explicit_into_iter_loop.stderr +++ b/src/tools/clippy/tests/ui/explicit_into_iter_loop.stderr @@ -1,5 +1,5 @@ error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:9:18 + --> $DIR/explicit_into_iter_loop.rs:10:18 | LL | for _ in iterator.into_iter() {} | ^^^^^^^^^^^^^^^^^^^^ help: to write this more concisely, try: `iterator` @@ -8,31 +8,31 @@ LL | for _ in iterator.into_iter() {} = help: to override `-D warnings` add `#[allow(clippy::explicit_into_iter_loop)]` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:22:14 + --> $DIR/explicit_into_iter_loop.rs:23:14 | LL | for _ in t.into_iter() {} | ^^^^^^^^^^^^^ help: to write this more concisely, try: `&t` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:25:14 + --> $DIR/explicit_into_iter_loop.rs:26:14 | LL | for _ in r.into_iter() {} | ^^^^^^^^^^^^^ help: to write this more concisely, try: `r` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:33:14 + --> $DIR/explicit_into_iter_loop.rs:34:14 | LL | for _ in mr.into_iter() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&*mr` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:45:14 + --> $DIR/explicit_into_iter_loop.rs:46:14 | LL | for _ in u.into_iter() {} | ^^^^^^^^^^^^^ help: to write this more concisely, try: `&mut u` error: it is more concise to loop over containers instead of using explicit iteration methods - --> $DIR/explicit_into_iter_loop.rs:48:14 + --> $DIR/explicit_into_iter_loop.rs:49:14 | LL | for _ in mr.into_iter() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&mut *mr` diff --git a/src/tools/clippy/tests/ui/explicit_iter_loop.fixed b/src/tools/clippy/tests/ui/explicit_iter_loop.fixed index f08397defa539..06229a52a18c4 100644 --- a/src/tools/clippy/tests/ui/explicit_iter_loop.fixed +++ b/src/tools/clippy/tests/ui/explicit_iter_loop.fixed @@ -5,7 +5,8 @@ clippy::needless_borrow, clippy::deref_addrof, clippy::unnecessary_mut_passed, - dead_code + dead_code, + non_local_definitions, )] use core::slice; diff --git a/src/tools/clippy/tests/ui/explicit_iter_loop.rs b/src/tools/clippy/tests/ui/explicit_iter_loop.rs index 2ee6825d445c2..c2bf45ab2e990 100644 --- a/src/tools/clippy/tests/ui/explicit_iter_loop.rs +++ b/src/tools/clippy/tests/ui/explicit_iter_loop.rs @@ -5,7 +5,8 @@ clippy::needless_borrow, clippy::deref_addrof, clippy::unnecessary_mut_passed, - dead_code + dead_code, + non_local_definitions, )] use core::slice; diff --git a/src/tools/clippy/tests/ui/explicit_iter_loop.stderr b/src/tools/clippy/tests/ui/explicit_iter_loop.stderr index 725d9b63cf8d6..007606b52c294 100644 --- a/src/tools/clippy/tests/ui/explicit_iter_loop.stderr +++ b/src/tools/clippy/tests/ui/explicit_iter_loop.stderr @@ -1,5 +1,5 @@ error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:17:14 + --> $DIR/explicit_iter_loop.rs:18:14 | LL | for _ in vec.iter() {} | ^^^^^^^^^^ help: to write this more concisely, try: `&vec` @@ -11,103 +11,103 @@ LL | #![deny(clippy::explicit_iter_loop)] | ^^^^^^^^^^^^^^^^^^^^^^^^^^ error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:18:14 + --> $DIR/explicit_iter_loop.rs:19:14 | LL | for _ in vec.iter_mut() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&mut vec` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:21:14 + --> $DIR/explicit_iter_loop.rs:22:14 | LL | for _ in rvec.iter() {} | ^^^^^^^^^^^ help: to write this more concisely, try: `rvec` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:30:14 + --> $DIR/explicit_iter_loop.rs:31:14 | LL | for _ in [1, 2, 3].iter() {} | ^^^^^^^^^^^^^^^^ help: to write this more concisely, try: `&[1, 2, 3]` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:34:14 + --> $DIR/explicit_iter_loop.rs:35:14 | LL | for _ in [0; 32].iter() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&[0; 32]` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:35:14 + --> $DIR/explicit_iter_loop.rs:36:14 | LL | for _ in [0; 33].iter() {} | ^^^^^^^^^^^^^^ help: to write this more concisely, try: `&[0; 33]` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:38:14 + --> $DIR/explicit_iter_loop.rs:39:14 | LL | for _ in ll.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&ll` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:40:14 + --> $DIR/explicit_iter_loop.rs:41:14 | LL | for _ in rll.iter() {} | ^^^^^^^^^^ help: to write this more concisely, try: `rll` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:43:14 + --> $DIR/explicit_iter_loop.rs:44:14 | LL | for _ in vd.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&vd` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:45:14 + --> $DIR/explicit_iter_loop.rs:46:14 | LL | for _ in rvd.iter() {} | ^^^^^^^^^^ help: to write this more concisely, try: `rvd` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:48:14 + --> $DIR/explicit_iter_loop.rs:49:14 | LL | for _ in bh.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&bh` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:51:14 + --> $DIR/explicit_iter_loop.rs:52:14 | LL | for _ in hm.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&hm` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:54:14 + --> $DIR/explicit_iter_loop.rs:55:14 | LL | for _ in bt.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&bt` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:57:14 + --> $DIR/explicit_iter_loop.rs:58:14 | LL | for _ in hs.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&hs` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:60:14 + --> $DIR/explicit_iter_loop.rs:61:14 | LL | for _ in bs.iter() {} | ^^^^^^^^^ help: to write this more concisely, try: `&bs` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:149:14 + --> $DIR/explicit_iter_loop.rs:150:14 | LL | for _ in x.iter() {} | ^^^^^^^^ help: to write this more concisely, try: `&x` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:150:14 + --> $DIR/explicit_iter_loop.rs:151:14 | LL | for _ in x.iter_mut() {} | ^^^^^^^^^^^^ help: to write this more concisely, try: `&mut x` error: it is more concise to loop over references to containers instead of using explicit iteration methods - --> $DIR/explicit_iter_loop.rs:153:14 + --> $DIR/explicit_iter_loop.rs:154:14 | LL | for _ in r.iter() {} | ^^^^^^^^ help: to write this more concisely, try: `r` diff --git a/src/tools/clippy/tests/ui/from_over_into.fixed b/src/tools/clippy/tests/ui/from_over_into.fixed index 4a68505ee0b1c..a33c1ea5738b9 100644 --- a/src/tools/clippy/tests/ui/from_over_into.fixed +++ b/src/tools/clippy/tests/ui/from_over_into.fixed @@ -1,5 +1,6 @@ #![feature(type_alias_impl_trait)] #![warn(clippy::from_over_into)] +#![allow(non_local_definitions)] #![allow(unused)] // this should throw an error diff --git a/src/tools/clippy/tests/ui/from_over_into.rs b/src/tools/clippy/tests/ui/from_over_into.rs index bf3ed0c2b6422..6cd811ae401e2 100644 --- a/src/tools/clippy/tests/ui/from_over_into.rs +++ b/src/tools/clippy/tests/ui/from_over_into.rs @@ -1,5 +1,6 @@ #![feature(type_alias_impl_trait)] #![warn(clippy::from_over_into)] +#![allow(non_local_definitions)] #![allow(unused)] // this should throw an error diff --git a/src/tools/clippy/tests/ui/from_over_into.stderr b/src/tools/clippy/tests/ui/from_over_into.stderr index f1370ed844fa9..15b4e02a264f1 100644 --- a/src/tools/clippy/tests/ui/from_over_into.stderr +++ b/src/tools/clippy/tests/ui/from_over_into.stderr @@ -1,5 +1,5 @@ error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:8:1 + --> $DIR/from_over_into.rs:9:1 | LL | impl Into for String { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -14,7 +14,7 @@ LL ~ StringWrapper(val) | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:16:1 + --> $DIR/from_over_into.rs:17:1 | LL | impl Into for String { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -27,7 +27,7 @@ LL ~ SelfType(String::new()) | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:31:1 + --> $DIR/from_over_into.rs:32:1 | LL | impl Into for X { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -42,7 +42,7 @@ LL ~ let _: X = val; | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:43:1 + --> $DIR/from_over_into.rs:44:1 | LL | impl core::convert::Into for crate::ExplicitPaths { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -60,7 +60,7 @@ LL ~ val.0 | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:63:1 + --> $DIR/from_over_into.rs:64:1 | LL | impl Into for PathInExpansion { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -74,7 +74,7 @@ LL ~ fn from(val: PathInExpansion) -> Self { | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:85:5 + --> $DIR/from_over_into.rs:86:5 | LL | impl Into> for Vec { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -87,7 +87,7 @@ LL ~ FromOverInto(val) | error: an implementation of `From` is preferred since it gives you `Into<_>` for free where the reverse isn't true - --> $DIR/from_over_into.rs:95:5 + --> $DIR/from_over_into.rs:96:5 | LL | impl Into<()> for Hello { | ^^^^^^^^^^^^^^^^^^^^^^^ diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.fixed b/src/tools/clippy/tests/ui/manual_str_repeat.fixed index 888a466278ccc..5f2f1bd9916d8 100644 --- a/src/tools/clippy/tests/ui/manual_str_repeat.fixed +++ b/src/tools/clippy/tests/ui/manual_str_repeat.fixed @@ -1,3 +1,4 @@ +#![allow(non_local_definitions)] #![warn(clippy::manual_str_repeat)] use std::borrow::Cow; diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.rs b/src/tools/clippy/tests/ui/manual_str_repeat.rs index a366351ffa45a..3e3c7f4db4a27 100644 --- a/src/tools/clippy/tests/ui/manual_str_repeat.rs +++ b/src/tools/clippy/tests/ui/manual_str_repeat.rs @@ -1,3 +1,4 @@ +#![allow(non_local_definitions)] #![warn(clippy::manual_str_repeat)] use std::borrow::Cow; diff --git a/src/tools/clippy/tests/ui/manual_str_repeat.stderr b/src/tools/clippy/tests/ui/manual_str_repeat.stderr index 9a13aa9722737..6eb6f2b85a8f4 100644 --- a/src/tools/clippy/tests/ui/manual_str_repeat.stderr +++ b/src/tools/clippy/tests/ui/manual_str_repeat.stderr @@ -1,5 +1,5 @@ error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:7:21 + --> $DIR/manual_str_repeat.rs:8:21 | LL | let _: String = std::iter::repeat("test").take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"test".repeat(10)` @@ -8,55 +8,55 @@ LL | let _: String = std::iter::repeat("test").take(10).collect(); = help: to override `-D warnings` add `#[allow(clippy::manual_str_repeat)]` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:8:21 + --> $DIR/manual_str_repeat.rs:9:21 | LL | let _: String = std::iter::repeat('x').take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"x".repeat(10)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:9:21 + --> $DIR/manual_str_repeat.rs:10:21 | LL | let _: String = std::iter::repeat('\'').take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"'".repeat(10)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:10:21 + --> $DIR/manual_str_repeat.rs:11:21 | LL | let _: String = std::iter::repeat('"').take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"\"".repeat(10)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:14:13 + --> $DIR/manual_str_repeat.rs:15:13 | LL | let _ = repeat(x).take(count + 2).collect::(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count + 2)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:23:21 + --> $DIR/manual_str_repeat.rs:24:21 | LL | let _: String = repeat(*x).take(count).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `(*x).repeat(count)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:32:21 + --> $DIR/manual_str_repeat.rs:33:21 | LL | let _: String = repeat(x).take(count).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:44:21 + --> $DIR/manual_str_repeat.rs:45:21 | LL | let _: String = repeat(Cow::Borrowed("test")).take(count).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `Cow::Borrowed("test").repeat(count)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:47:21 + --> $DIR/manual_str_repeat.rs:48:21 | LL | let _: String = repeat(x).take(count).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `x.repeat(count)` error: manual implementation of `str::repeat` using iterators - --> $DIR/manual_str_repeat.rs:62:21 + --> $DIR/manual_str_repeat.rs:63:21 | LL | let _: String = std::iter::repeat("test").take(10).collect(); | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: try: `"test".repeat(10)` diff --git a/src/tools/clippy/tests/ui/needless_borrow.fixed b/src/tools/clippy/tests/ui/needless_borrow.fixed index 23e8bf8a468fd..998f5430fdf05 100644 --- a/src/tools/clippy/tests/ui/needless_borrow.fixed +++ b/src/tools/clippy/tests/ui/needless_borrow.fixed @@ -1,6 +1,7 @@ #![feature(lint_reasons)] #![allow( unused, + non_local_definitions, clippy::uninlined_format_args, clippy::unnecessary_mut_passed, clippy::unnecessary_to_owned, diff --git a/src/tools/clippy/tests/ui/needless_borrow.rs b/src/tools/clippy/tests/ui/needless_borrow.rs index 27771a8f15b30..acb2c74d849a2 100644 --- a/src/tools/clippy/tests/ui/needless_borrow.rs +++ b/src/tools/clippy/tests/ui/needless_borrow.rs @@ -1,6 +1,7 @@ #![feature(lint_reasons)] #![allow( unused, + non_local_definitions, clippy::uninlined_format_args, clippy::unnecessary_mut_passed, clippy::unnecessary_to_owned, diff --git a/src/tools/clippy/tests/ui/needless_borrow.stderr b/src/tools/clippy/tests/ui/needless_borrow.stderr index a21ed8382c14e..9034bd83a0b05 100644 --- a/src/tools/clippy/tests/ui/needless_borrow.stderr +++ b/src/tools/clippy/tests/ui/needless_borrow.stderr @@ -1,5 +1,5 @@ error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:15:15 + --> $DIR/needless_borrow.rs:16:15 | LL | let _ = x(&&a); // warn | ^^^ help: change this to: `&a` @@ -8,157 +8,157 @@ LL | let _ = x(&&a); // warn = help: to override `-D warnings` add `#[allow(clippy::needless_borrow)]` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:19:13 + --> $DIR/needless_borrow.rs:20:13 | LL | mut_ref(&mut &mut b); // warn | ^^^^^^^^^^^ help: change this to: `&mut b` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:31:13 + --> $DIR/needless_borrow.rs:32:13 | LL | &&a | ^^^ help: change this to: `&a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:33:15 + --> $DIR/needless_borrow.rs:34:15 | LL | 46 => &&a, | ^^^ help: change this to: `&a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:39:27 + --> $DIR/needless_borrow.rs:40:27 | LL | break &ref_a; | ^^^^^^ help: change this to: `ref_a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:46:15 + --> $DIR/needless_borrow.rs:47:15 | LL | let _ = x(&&&a); | ^^^^ help: change this to: `&a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:47:15 + --> $DIR/needless_borrow.rs:48:15 | LL | let _ = x(&mut &&a); | ^^^^^^^^ help: change this to: `&a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:48:15 + --> $DIR/needless_borrow.rs:49:15 | LL | let _ = x(&&&mut b); | ^^^^^^^^ help: change this to: `&mut b` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:49:15 + --> $DIR/needless_borrow.rs:50:15 | LL | let _ = x(&&ref_a); | ^^^^^^^ help: change this to: `ref_a` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:52:11 + --> $DIR/needless_borrow.rs:53:11 | LL | x(&b); | ^^ help: change this to: `b` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:59:13 + --> $DIR/needless_borrow.rs:60:13 | LL | mut_ref(&mut x); | ^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:60:13 + --> $DIR/needless_borrow.rs:61:13 | LL | mut_ref(&mut &mut x); | ^^^^^^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:61:23 + --> $DIR/needless_borrow.rs:62:23 | LL | let y: &mut i32 = &mut x; | ^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:62:23 + --> $DIR/needless_borrow.rs:63:23 | LL | let y: &mut i32 = &mut &mut x; | ^^^^^^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:71:14 + --> $DIR/needless_borrow.rs:72:14 | LL | 0 => &mut x, | ^^^^^^ help: change this to: `x` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:77:14 + --> $DIR/needless_borrow.rs:78:14 | LL | 0 => &mut x, | ^^^^^^ help: change this to: `x` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:89:13 + --> $DIR/needless_borrow.rs:90:13 | LL | let _ = (&x).0; | ^^^^ help: change this to: `x` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:91:22 + --> $DIR/needless_borrow.rs:92:22 | LL | let _ = unsafe { (&*x).0 }; | ^^^^^ help: change this to: `(*x)` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:101:5 + --> $DIR/needless_borrow.rs:102:5 | LL | (&&()).foo(); | ^^^^^^ help: change this to: `(&())` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:110:5 + --> $DIR/needless_borrow.rs:111:5 | LL | (&&5).foo(); | ^^^^^ help: change this to: `(&5)` error: this expression creates a reference which is immediately dereferenced by the compiler - --> $DIR/needless_borrow.rs:136:23 + --> $DIR/needless_borrow.rs:137:23 | LL | let x: (&str,) = (&"",); | ^^^ help: change this to: `""` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:178:13 + --> $DIR/needless_borrow.rs:179:13 | LL | (&self.f)() | ^^^^^^^^^ help: change this to: `(self.f)` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:187:13 + --> $DIR/needless_borrow.rs:188:13 | LL | (&mut self.f)() | ^^^^^^^^^^^^^ help: change this to: `(self.f)` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:224:22 + --> $DIR/needless_borrow.rs:225:22 | LL | let _ = &mut (&mut { x.u }).x; | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:231:22 + --> $DIR/needless_borrow.rs:232:22 | LL | let _ = &mut (&mut { x.u }).x; | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:235:22 + --> $DIR/needless_borrow.rs:236:22 | LL | let _ = &mut (&mut x.u).x; | ^^^^^^^^^^ help: change this to: `x.u` error: this expression borrows a value the compiler would automatically borrow - --> $DIR/needless_borrow.rs:236:22 + --> $DIR/needless_borrow.rs:237:22 | LL | let _ = &mut (&mut { x.u }).x; | ^^^^^^^^^^^^^^ help: change this to: `{ x.u }` From 1b733558bf7ac425a2b56726e7229cc298982536 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sat, 27 Jan 2024 17:37:53 +0100 Subject: [PATCH 027/134] Allow newly added non_local_definitions in std --- library/core/src/convert/mod.rs | 1 + library/core/src/hash/mod.rs | 1 + library/core/tests/iter/adapters/step_by.rs | 1 + library/core/tests/result.rs | 2 ++ 4 files changed, 5 insertions(+) diff --git a/library/core/src/convert/mod.rs b/library/core/src/convert/mod.rs index 45f6e375e8942..85740dce8668c 100644 --- a/library/core/src/convert/mod.rs +++ b/library/core/src/convert/mod.rs @@ -396,6 +396,7 @@ pub trait AsMut { /// For example, take this code: /// /// ``` +/// # #![cfg_attr(not(bootstrap), allow(non_local_definitions))] /// struct Wrapper(Vec); /// impl From> for Vec { /// fn from(w: Wrapper) -> Vec { diff --git a/library/core/src/hash/mod.rs b/library/core/src/hash/mod.rs index 153971a59c5c9..bfdd28a7399fd 100644 --- a/library/core/src/hash/mod.rs +++ b/library/core/src/hash/mod.rs @@ -454,6 +454,7 @@ pub trait Hasher { /// ``` /// #![feature(hasher_prefixfree_extras)] /// # // Stubs to make the `impl` below pass the compiler + /// # #![cfg_attr(not(bootstrap), allow(non_local_definitions))] /// # struct MyCollection(Option); /// # impl MyCollection { /// # fn len(&self) -> usize { todo!() } diff --git a/library/core/tests/iter/adapters/step_by.rs b/library/core/tests/iter/adapters/step_by.rs index b4d61d28cb2e0..29adf0b42fae3 100644 --- a/library/core/tests/iter/adapters/step_by.rs +++ b/library/core/tests/iter/adapters/step_by.rs @@ -49,6 +49,7 @@ fn test_iterator_step_by_nth() { } #[test] +#[cfg_attr(not(bootstrap), allow(non_local_definitions))] fn test_iterator_step_by_nth_overflow() { #[cfg(target_pointer_width = "16")] type Bigger = u32; diff --git a/library/core/tests/result.rs b/library/core/tests/result.rs index 6c008ab2cb196..d02dc45da34c2 100644 --- a/library/core/tests/result.rs +++ b/library/core/tests/result.rs @@ -195,6 +195,7 @@ pub fn test_unwrap_or_default() { } #[test] +#[cfg_attr(not(bootstrap), allow(non_local_definitions))] pub fn test_into_ok() { fn infallible_op() -> Result { Ok(666) @@ -217,6 +218,7 @@ pub fn test_into_ok() { } #[test] +#[cfg_attr(not(bootstrap), allow(non_local_definitions))] pub fn test_into_err() { fn until_error_op() -> Result { Err(666) From 7b6057dea42c0aaec0735b155a2bc1aeaa0c0a93 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sat, 27 Jan 2024 17:39:16 +0100 Subject: [PATCH 028/134] Allow newly added non_local_definitions in rustfmt --- src/tools/rustfmt/src/source_file.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/src/tools/rustfmt/src/source_file.rs b/src/tools/rustfmt/src/source_file.rs index 958f9b0154f70..512a8593c27cd 100644 --- a/src/tools/rustfmt/src/source_file.rs +++ b/src/tools/rustfmt/src/source_file.rs @@ -66,6 +66,7 @@ where } } + #[cfg_attr(not(bootstrap), allow(non_local_definitions))] impl From<&FileName> for rustc_span::FileName { fn from(filename: &FileName) -> rustc_span::FileName { match filename { From 6320ad0b07be860232d2b1cf9e3bdf870ca28894 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sat, 27 Jan 2024 18:26:37 +0100 Subject: [PATCH 029/134] Fix non_local_definitions lint in rustdoc --- src/librustdoc/doctest.rs | 7 +++++++ src/librustdoc/html/markdown/tests.rs | 5 ----- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/librustdoc/doctest.rs b/src/librustdoc/doctest.rs index f9d4d1af1140f..828c72b28407d 100644 --- a/src/librustdoc/doctest.rs +++ b/src/librustdoc/doctest.rs @@ -1208,6 +1208,13 @@ impl Tester for Collector { } } +#[cfg(test)] // used in tests +impl Tester for Vec { + fn add_test(&mut self, _test: String, _config: LangString, line: usize) { + self.push(line); + } +} + struct HirCollector<'a, 'hir, 'tcx> { sess: &'a Session, collector: &'a mut Collector, diff --git a/src/librustdoc/html/markdown/tests.rs b/src/librustdoc/html/markdown/tests.rs index 4dd176b3a692a..1de97e49b8307 100644 --- a/src/librustdoc/html/markdown/tests.rs +++ b/src/librustdoc/html/markdown/tests.rs @@ -480,11 +480,6 @@ fn test_markdown_html_escape() { #[test] fn test_find_testable_code_line() { fn t(input: &str, expect: &[usize]) { - impl crate::doctest::Tester for Vec { - fn add_test(&mut self, _test: String, _config: LangString, line: usize) { - self.push(line); - } - } let mut lines = Vec::::new(); find_testable_code(input, &mut lines, ErrorCodes::No, false, None, true); assert_eq!(lines, expect); From a8ae1175c7e8f96996b69d75ca6a18e58efb7814 Mon Sep 17 00:00:00 2001 From: Urgau Date: Sun, 28 Jan 2024 13:49:45 +0100 Subject: [PATCH 030/134] Fix non_local_definitions lint in rustc_hir_analysis --- .../src/coherence/inherent_impls_overlap.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs b/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs index 9e1e884d9769f..3aef29f4ae4dd 100644 --- a/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs +++ b/compiler/rustc_hir_analysis/src/coherence/inherent_impls_overlap.rs @@ -24,6 +24,11 @@ struct InherentOverlapChecker<'tcx> { tcx: TyCtxt<'tcx>, } +rustc_index::newtype_index! { + #[orderable] + pub struct RegionId {} +} + impl<'tcx> InherentOverlapChecker<'tcx> { /// Checks whether any associated items in impls 1 and 2 share the same identifier and /// namespace. @@ -205,11 +210,6 @@ impl<'tcx> InherentOverlapChecker<'tcx> { // This is advantageous to running the algorithm over the // entire graph when there are many connected regions. - rustc_index::newtype_index! { - #[orderable] - pub struct RegionId {} - } - struct ConnectedRegion { idents: SmallVec<[Symbol; 8]>, impl_blocks: FxHashSet, From 85e3a2ee043f6404561a9fbed799c07d83f305ce Mon Sep 17 00:00:00 2001 From: Urgau Date: Thu, 15 Feb 2024 20:30:16 +0100 Subject: [PATCH 031/134] Add const-anon suggestion for non local impl --- compiler/rustc_lint/messages.ftl | 1 + compiler/rustc_lint/src/lints.rs | 8 +++++++- compiler/rustc_lint/src/non_local_def.rs | 16 +++++++++++++++- tests/ui/lint/non_local_definitions.stderr | 3 +++ 4 files changed, 26 insertions(+), 2 deletions(-) diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index 4e0ba376b7d2e..ca3941c06f427 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -421,6 +421,7 @@ lint_non_local_definitions_impl = non-local `impl` definition, they should be av } .non_local = an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block .exception = one exception to the rule are anon-const (`const _: () = {"{"} ... {"}"}`) at top-level module and anon-const at the same nesting as the trait or type + .const_anon = use a const-anon item to suppress this lint lint_non_local_definitions_macro_rules = non-local `macro_rules!` definition, they should be avoided as they go against expectation .help = diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index 15f158961d097..1a56fa751c2e7 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1301,7 +1301,13 @@ pub enum NonLocalDefinitionsDiag { #[note(lint_non_local)] #[note(lint_exception)] #[note(lint_non_local_definitions_deprecation)] - Impl { depth: u32, body_kind_descr: &'static str, body_name: String }, + Impl { + depth: u32, + body_kind_descr: &'static str, + body_name: String, + #[suggestion(lint_const_anon, code = "_", applicability = "machine-applicable")] + const_anon: Option, + }, #[diag(lint_non_local_definitions_macro_rules)] #[help] #[note(lint_non_local)] diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs index 28bd49f36a008..bfd7b4a72f9b3 100644 --- a/compiler/rustc_lint/src/non_local_def.rs +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -1,4 +1,4 @@ -use rustc_hir::{def::DefKind, Body, Item, ItemKind, Path, QPath, TyKind}; +use rustc_hir::{def::DefKind, Body, Item, ItemKind, Node, Path, QPath, TyKind}; use rustc_span::{def_id::DefId, sym, symbol::kw, MacroKind}; use smallvec::{smallvec, SmallVec}; @@ -140,6 +140,19 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { // If none of them have a local parent (LOGICAL NOR) this means that // this impl definition is a non-local definition and so we lint on it. if !(self_ty_has_local_parent || of_trait_has_local_parent) { + let const_anon = if self.body_depth == 1 + && parent_def_kind == DefKind::Const + && parent_opt_item_name != Some(kw::Underscore) + && let Some(parent) = parent.as_local() + && let Node::Item(item) = cx.tcx.hir_node_by_def_id(parent) + && let ItemKind::Const(ty, _, _) = item.kind + && let TyKind::Tup(&[]) = ty.kind + { + Some(item.ident.span) + } else { + None + }; + cx.emit_span_lint( NON_LOCAL_DEFINITIONS, item.span, @@ -149,6 +162,7 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { body_name: parent_opt_item_name .map(|s| s.to_ident_string()) .unwrap_or_else(|| "".to_string()), + const_anon, }, ) } diff --git a/tests/ui/lint/non_local_definitions.stderr b/tests/ui/lint/non_local_definitions.stderr index f15457734bcfe..8403357b11552 100644 --- a/tests/ui/lint/non_local_definitions.stderr +++ b/tests/ui/lint/non_local_definitions.stderr @@ -1,6 +1,9 @@ warning: non-local `impl` definition, they should be avoided as they go against expectation --> $DIR/non_local_definitions.rs:32:5 | +LL | const Z: () = { + | - help: use a const-anon item to suppress this lint: `_` +... LL | impl Uto for &Test {} | ^^^^^^^^^^^^^^^^^^^^^ | From 63469ab762b5710b07418e61ff758bd48d0f8b3e Mon Sep 17 00:00:00 2001 From: Urgau Date: Thu, 15 Feb 2024 20:33:30 +0100 Subject: [PATCH 032/134] Add cargo update suggestion for non local defs --- compiler/rustc_lint/messages.ftl | 2 + compiler/rustc_lint/src/lints.rs | 18 ++- compiler/rustc_lint/src/non_local_def.rs | 25 ++++- tests/ui/lint/auxiliary/non_local_macro.rs | 26 +++++ tests/ui/lint/non_local_definitions.rs | 12 ++ tests/ui/lint/non_local_definitions.stderr | 124 +++++++++++++-------- 6 files changed, 155 insertions(+), 52 deletions(-) create mode 100644 tests/ui/lint/auxiliary/non_local_macro.rs diff --git a/compiler/rustc_lint/messages.ftl b/compiler/rustc_lint/messages.ftl index ca3941c06f427..3f7abebf7b95b 100644 --- a/compiler/rustc_lint/messages.ftl +++ b/compiler/rustc_lint/messages.ftl @@ -411,6 +411,8 @@ lint_non_fmt_panic_unused = } .add_fmt_suggestion = or add a "{"{"}{"}"}" format string to use the message literally +lint_non_local_definitions_cargo_update = the {$macro_kind} `{$macro_name}` may come from an old version of the `{$crate_name}` crate, try updating your dependency with `cargo update -p {$crate_name}` + lint_non_local_definitions_deprecation = this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue lint_non_local_definitions_impl = non-local `impl` definition, they should be avoided as they go against expectation diff --git a/compiler/rustc_lint/src/lints.rs b/compiler/rustc_lint/src/lints.rs index 1a56fa751c2e7..3a79520505964 100644 --- a/compiler/rustc_lint/src/lints.rs +++ b/compiler/rustc_lint/src/lints.rs @@ -1305,6 +1305,8 @@ pub enum NonLocalDefinitionsDiag { depth: u32, body_kind_descr: &'static str, body_name: String, + #[subdiagnostic] + cargo_update: Option, #[suggestion(lint_const_anon, code = "_", applicability = "machine-applicable")] const_anon: Option, }, @@ -1313,7 +1315,21 @@ pub enum NonLocalDefinitionsDiag { #[note(lint_non_local)] #[note(lint_exception)] #[note(lint_non_local_definitions_deprecation)] - MacroRules { depth: u32, body_kind_descr: &'static str, body_name: String }, + MacroRules { + depth: u32, + body_kind_descr: &'static str, + body_name: String, + #[subdiagnostic] + cargo_update: Option, + }, +} + +#[derive(Subdiagnostic)] +#[note(lint_non_local_definitions_cargo_update)] +pub struct NonLocalDefinitionsCargoUpdateNote { + pub macro_kind: &'static str, + pub macro_name: Symbol, + pub crate_name: Symbol, } // pass_by_value.rs diff --git a/compiler/rustc_lint/src/non_local_def.rs b/compiler/rustc_lint/src/non_local_def.rs index bfd7b4a72f9b3..6cb6fd1cbd550 100644 --- a/compiler/rustc_lint/src/non_local_def.rs +++ b/compiler/rustc_lint/src/non_local_def.rs @@ -1,9 +1,11 @@ use rustc_hir::{def::DefKind, Body, Item, ItemKind, Node, Path, QPath, TyKind}; -use rustc_span::{def_id::DefId, sym, symbol::kw, MacroKind}; +use rustc_span::def_id::{DefId, LOCAL_CRATE}; +use rustc_span::{sym, symbol::kw, ExpnKind, MacroKind}; use smallvec::{smallvec, SmallVec}; -use crate::{lints::NonLocalDefinitionsDiag, LateContext, LateLintPass, LintContext}; +use crate::lints::{NonLocalDefinitionsCargoUpdateNote, NonLocalDefinitionsDiag}; +use crate::{LateContext, LateLintPass, LintContext}; declare_lint! { /// The `non_local_definitions` lint checks for `impl` blocks and `#[macro_export]` @@ -77,6 +79,23 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { return; } + let cargo_update = || { + let oexpn = item.span.ctxt().outer_expn_data(); + if let Some(def_id) = oexpn.macro_def_id + && let ExpnKind::Macro(macro_kind, macro_name) = oexpn.kind + && def_id.krate != LOCAL_CRATE + && std::env::var_os("CARGO").is_some() + { + Some(NonLocalDefinitionsCargoUpdateNote { + macro_kind: macro_kind.descr(), + macro_name, + crate_name: cx.tcx.crate_name(def_id.krate), + }) + } else { + None + } + }; + match item.kind { ItemKind::Impl(impl_) => { // The RFC states: @@ -162,6 +181,7 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { body_name: parent_opt_item_name .map(|s| s.to_ident_string()) .unwrap_or_else(|| "".to_string()), + cargo_update: cargo_update(), const_anon, }, ) @@ -179,6 +199,7 @@ impl<'tcx> LateLintPass<'tcx> for NonLocalDefinitions { body_name: parent_opt_item_name .map(|s| s.to_ident_string()) .unwrap_or_else(|| "".to_string()), + cargo_update: cargo_update(), }, ) } diff --git a/tests/ui/lint/auxiliary/non_local_macro.rs b/tests/ui/lint/auxiliary/non_local_macro.rs new file mode 100644 index 0000000000000..8c0ff8adda1b9 --- /dev/null +++ b/tests/ui/lint/auxiliary/non_local_macro.rs @@ -0,0 +1,26 @@ +#[macro_export] +macro_rules! non_local_impl { + ($a:ident) => { + const _IMPL_DEBUG: () = { + impl ::std::fmt::Debug for $a { + fn fmt(&self, _: &mut ::std::fmt::Formatter<'_>) + -> ::std::result::Result<(), ::std::fmt::Error> + { + todo!() + } + } + }; + } +} + +#[macro_export] +macro_rules! non_local_macro_rules { + ($a:ident) => { + const _MACRO_EXPORT: () = { + #[macro_export] + macro_rules! $a { + () => {} + } + }; + } +} diff --git a/tests/ui/lint/non_local_definitions.rs b/tests/ui/lint/non_local_definitions.rs index 986efbfcf0fe1..c9aaa04934631 100644 --- a/tests/ui/lint/non_local_definitions.rs +++ b/tests/ui/lint/non_local_definitions.rs @@ -1,8 +1,12 @@ //@ check-pass //@ edition:2021 +//@ aux-build:non_local_macro.rs +//@ rustc-env:CARGO=/usr/bin/cargo #![feature(inline_const)] +extern crate non_local_macro; + use std::fmt::{Debug, Display}; struct Test; @@ -364,6 +368,14 @@ macro_rules! m { m!(); +struct CargoUpdate; + +non_local_macro::non_local_impl!(CargoUpdate); +//~^ WARN non-local `impl` definition + +non_local_macro::non_local_macro_rules!(my_macro); +//~^ WARN non-local `macro_rules!` definition + fn bitflags() { struct Flags; diff --git a/tests/ui/lint/non_local_definitions.stderr b/tests/ui/lint/non_local_definitions.stderr index 8403357b11552..f9f29ec63a805 100644 --- a/tests/ui/lint/non_local_definitions.stderr +++ b/tests/ui/lint/non_local_definitions.stderr @@ -1,5 +1,5 @@ warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:32:5 + --> $DIR/non_local_definitions.rs:36:5 | LL | const Z: () = { | - help: use a const-anon item to suppress this lint: `_` @@ -14,7 +14,7 @@ LL | impl Uto for &Test {} = note: `#[warn(non_local_definitions)]` on by default warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:42:5 + --> $DIR/non_local_definitions.rs:46:5 | LL | impl Uto for *mut Test {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -25,7 +25,7 @@ LL | impl Uto for *mut Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:50:9 + --> $DIR/non_local_definitions.rs:54:9 | LL | impl Uto for Test {} | ^^^^^^^^^^^^^^^^^^^^ @@ -36,7 +36,7 @@ LL | impl Uto for Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:59:5 + --> $DIR/non_local_definitions.rs:63:5 | LL | impl Uto2 for Test {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -47,7 +47,7 @@ LL | impl Uto2 for Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:67:5 + --> $DIR/non_local_definitions.rs:71:5 | LL | impl Uto3 for Test {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -58,7 +58,7 @@ LL | impl Uto3 for Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:71:5 + --> $DIR/non_local_definitions.rs:75:5 | LL | macro_rules! m0 { () => { } }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -69,7 +69,7 @@ LL | macro_rules! m0 { () => { } }; = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:83:5 + --> $DIR/non_local_definitions.rs:87:5 | LL | macro_rules! m { () => { } }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -80,7 +80,7 @@ LL | macro_rules! m { () => { } }; = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:86:5 + --> $DIR/non_local_definitions.rs:90:5 | LL | / impl Test { LL | | @@ -94,7 +94,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:92:9 + --> $DIR/non_local_definitions.rs:96:9 | LL | / impl Test { LL | | @@ -108,7 +108,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:101:9 + --> $DIR/non_local_definitions.rs:105:9 | LL | / impl Test { LL | | @@ -122,7 +122,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:110:9 + --> $DIR/non_local_definitions.rs:114:9 | LL | / impl Test { LL | | @@ -136,7 +136,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:118:5 + --> $DIR/non_local_definitions.rs:122:5 | LL | / impl Display for Test { LL | | @@ -152,7 +152,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:125:5 + --> $DIR/non_local_definitions.rs:129:5 | LL | impl dyn Uto5 {} | ^^^^^^^^^^^^^^^^ @@ -163,7 +163,7 @@ LL | impl dyn Uto5 {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:128:5 + --> $DIR/non_local_definitions.rs:132:5 | LL | impl Uto5 for Vec { } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -174,7 +174,7 @@ LL | impl Uto5 for Vec { } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:131:5 + --> $DIR/non_local_definitions.rs:135:5 | LL | impl Uto5 for &dyn Uto5 {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -185,7 +185,7 @@ LL | impl Uto5 for &dyn Uto5 {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:134:5 + --> $DIR/non_local_definitions.rs:138:5 | LL | impl Uto5 for *mut Test {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -196,7 +196,7 @@ LL | impl Uto5 for *mut Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:137:5 + --> $DIR/non_local_definitions.rs:141:5 | LL | impl Uto5 for *mut [Test] {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -207,7 +207,7 @@ LL | impl Uto5 for *mut [Test] {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:140:5 + --> $DIR/non_local_definitions.rs:144:5 | LL | impl Uto5 for [Test; 8] {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -218,7 +218,7 @@ LL | impl Uto5 for [Test; 8] {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:143:5 + --> $DIR/non_local_definitions.rs:147:5 | LL | impl Uto5 for (Test,) {} | ^^^^^^^^^^^^^^^^^^^^^^^^ @@ -229,7 +229,7 @@ LL | impl Uto5 for (Test,) {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:146:5 + --> $DIR/non_local_definitions.rs:150:5 | LL | impl Uto5 for fn(Test) -> () {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -240,7 +240,7 @@ LL | impl Uto5 for fn(Test) -> () {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:149:5 + --> $DIR/non_local_definitions.rs:153:5 | LL | impl Uto5 for fn() -> Test {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -251,7 +251,7 @@ LL | impl Uto5 for fn() -> Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:153:9 + --> $DIR/non_local_definitions.rs:157:9 | LL | impl Uto5 for Test {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -262,7 +262,7 @@ LL | impl Uto5 for Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:160:9 + --> $DIR/non_local_definitions.rs:164:9 | LL | impl Uto5 for &Test {} | ^^^^^^^^^^^^^^^^^^^^^^ @@ -273,7 +273,7 @@ LL | impl Uto5 for &Test {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:167:9 + --> $DIR/non_local_definitions.rs:171:9 | LL | impl Uto5 for &(Test,) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -284,7 +284,7 @@ LL | impl Uto5 for &(Test,) {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:174:9 + --> $DIR/non_local_definitions.rs:178:9 | LL | impl Uto5 for &(Test,Test) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -295,7 +295,7 @@ LL | impl Uto5 for &(Test,Test) {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:182:5 + --> $DIR/non_local_definitions.rs:186:5 | LL | impl Uto5 for *mut InsideMain {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -306,7 +306,7 @@ LL | impl Uto5 for *mut InsideMain {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:184:5 + --> $DIR/non_local_definitions.rs:188:5 | LL | impl Uto5 for *mut [InsideMain] {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -317,7 +317,7 @@ LL | impl Uto5 for *mut [InsideMain] {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:186:5 + --> $DIR/non_local_definitions.rs:190:5 | LL | impl Uto5 for [InsideMain; 8] {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -328,7 +328,7 @@ LL | impl Uto5 for [InsideMain; 8] {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:188:5 + --> $DIR/non_local_definitions.rs:192:5 | LL | impl Uto5 for (InsideMain,) {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -339,7 +339,7 @@ LL | impl Uto5 for (InsideMain,) {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:190:5 + --> $DIR/non_local_definitions.rs:194:5 | LL | impl Uto5 for fn(InsideMain) -> () {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -350,7 +350,7 @@ LL | impl Uto5 for fn(InsideMain) -> () {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:192:5 + --> $DIR/non_local_definitions.rs:196:5 | LL | impl Uto5 for fn() -> InsideMain {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -361,7 +361,7 @@ LL | impl Uto5 for fn() -> InsideMain {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:206:9 + --> $DIR/non_local_definitions.rs:210:9 | LL | / impl Display for InsideMain { LL | | @@ -377,7 +377,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:213:9 + --> $DIR/non_local_definitions.rs:217:9 | LL | / impl InsideMain { LL | | @@ -394,7 +394,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:217:17 + --> $DIR/non_local_definitions.rs:221:17 | LL | macro_rules! m2 { () => { } }; | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -405,7 +405,7 @@ LL | macro_rules! m2 { () => { } }; = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:227:5 + --> $DIR/non_local_definitions.rs:231:5 | LL | impl Uto3 for Vec { } | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -416,7 +416,7 @@ LL | impl Uto3 for Vec { } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:236:5 + --> $DIR/non_local_definitions.rs:240:5 | LL | impl Uto7 for Test where Local: std::any::Any {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -427,7 +427,7 @@ LL | impl Uto7 for Test where Local: std::any::Any {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:239:5 + --> $DIR/non_local_definitions.rs:243:5 | LL | impl Uto8 for T {} | ^^^^^^^^^^^^^^^^^^^^^ @@ -438,7 +438,7 @@ LL | impl Uto8 for T {} = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:248:5 + --> $DIR/non_local_definitions.rs:252:5 | LL | / impl Default for UwU { LL | | @@ -454,7 +454,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:259:5 + --> $DIR/non_local_definitions.rs:263:5 | LL | / impl From for () { LL | | @@ -470,7 +470,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:268:5 + --> $DIR/non_local_definitions.rs:272:5 | LL | / impl AsRef for () { LL | | @@ -484,7 +484,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:279:5 + --> $DIR/non_local_definitions.rs:283:5 | LL | / impl PartialEq for G { LL | | @@ -500,7 +500,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:296:5 + --> $DIR/non_local_definitions.rs:300:5 | LL | / impl PartialEq for &Dog { LL | | @@ -516,7 +516,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:303:5 + --> $DIR/non_local_definitions.rs:307:5 | LL | / impl PartialEq<()> for Dog { LL | | @@ -532,7 +532,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:310:5 + --> $DIR/non_local_definitions.rs:314:5 | LL | / impl PartialEq<()> for &Dog { LL | | @@ -548,7 +548,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:317:5 + --> $DIR/non_local_definitions.rs:321:5 | LL | / impl PartialEq for () { LL | | @@ -564,7 +564,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:339:5 + --> $DIR/non_local_definitions.rs:343:5 | LL | / impl From>> for () { LL | | @@ -580,7 +580,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:346:5 + --> $DIR/non_local_definitions.rs:350:5 | LL | / impl From<()> for Wrap { LL | | @@ -596,7 +596,7 @@ LL | | } = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue warning: non-local `impl` definition, they should be avoided as they go against expectation - --> $DIR/non_local_definitions.rs:359:13 + --> $DIR/non_local_definitions.rs:363:13 | LL | impl MacroTrait for OutsideStruct {} | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -610,5 +610,31 @@ LL | m!(); = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue = note: this warning originates in the macro `m` (in Nightly builds, run with -Z macro-backtrace for more info) -warning: 48 warnings emitted +warning: non-local `impl` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:373:1 + | +LL | non_local_macro::non_local_impl!(CargoUpdate); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: move this `impl` block outside the of the current constant `_IMPL_DEBUG` + = note: an `impl` definition is non-local if it is nested inside an item and neither the type nor the trait are at the same nesting level as the `impl` block + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module and anon-const at the same nesting as the trait or type + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: the macro `non_local_macro::non_local_impl` may come from an old version of the `non_local_macro` crate, try updating your dependency with `cargo update -p non_local_macro` + = note: this warning originates in the macro `non_local_macro::non_local_impl` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: non-local `macro_rules!` definition, they should be avoided as they go against expectation + --> $DIR/non_local_definitions.rs:376:1 + | +LL | non_local_macro::non_local_macro_rules!(my_macro); + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | + = help: remove the `#[macro_export]` or move this `macro_rules!` outside the of the current constant `_MACRO_EXPORT` + = note: a `macro_rules!` definition is non-local if it is nested inside an item and has a `#[macro_export]` attribute + = note: one exception to the rule are anon-const (`const _: () = { ... }`) at top-level module + = note: this lint may become deny-by-default in the edition 2024 and higher, see the tracking issue + = note: the macro `non_local_macro::non_local_macro_rules` may come from an old version of the `non_local_macro` crate, try updating your dependency with `cargo update -p non_local_macro` + = note: this warning originates in the macro `non_local_macro::non_local_macro_rules` (in Nightly builds, run with -Z macro-backtrace for more info) + +warning: 50 warnings emitted From 03cec74a3c4bfea766b352c0d2ff00e769c58886 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 17 Feb 2024 14:41:15 +0100 Subject: [PATCH 033/134] add direct test of pthread_cond --- ...cond.rs => libc_pthread_cond_timedwait.rs} | 0 ...> libc_pthread_cond_timedwait_isolated.rs} | 0 .../miri/tests/pass-dep/shims/pthread-sync.rs | 56 +++++++++++++++++-- 3 files changed, 52 insertions(+), 4 deletions(-) rename src/tools/miri/tests/pass-dep/concurrency/{libc_pthread_cond.rs => libc_pthread_cond_timedwait.rs} (100%) rename src/tools/miri/tests/pass-dep/concurrency/{libc_pthread_cond_isolated.rs => libc_pthread_cond_timedwait_isolated.rs} (100%) diff --git a/src/tools/miri/tests/pass-dep/concurrency/libc_pthread_cond.rs b/src/tools/miri/tests/pass-dep/concurrency/libc_pthread_cond_timedwait.rs similarity index 100% rename from src/tools/miri/tests/pass-dep/concurrency/libc_pthread_cond.rs rename to src/tools/miri/tests/pass-dep/concurrency/libc_pthread_cond_timedwait.rs diff --git a/src/tools/miri/tests/pass-dep/concurrency/libc_pthread_cond_isolated.rs b/src/tools/miri/tests/pass-dep/concurrency/libc_pthread_cond_timedwait_isolated.rs similarity index 100% rename from src/tools/miri/tests/pass-dep/concurrency/libc_pthread_cond_isolated.rs rename to src/tools/miri/tests/pass-dep/concurrency/libc_pthread_cond_timedwait_isolated.rs diff --git a/src/tools/miri/tests/pass-dep/shims/pthread-sync.rs b/src/tools/miri/tests/pass-dep/shims/pthread-sync.rs index 077bbfff1645f..c9d10cb83d4d2 100644 --- a/src/tools/miri/tests/pass-dep/shims/pthread-sync.rs +++ b/src/tools/miri/tests/pass-dep/shims/pthread-sync.rs @@ -4,8 +4,8 @@ #![feature(sync_unsafe_cell)] use std::cell::SyncUnsafeCell; -use std::thread; -use std::{mem, ptr}; +use std::mem::MaybeUninit; +use std::{mem, ptr, thread}; fn main() { test_mutex_libc_init_recursive(); @@ -15,9 +15,10 @@ fn main() { #[cfg(target_os = "linux")] test_mutex_libc_static_initializer_recursive(); - test_mutex(); + check_mutex(); check_rwlock_write(); check_rwlock_read_no_deadlock(); + check_cond(); } fn test_mutex_libc_init_recursive() { @@ -119,7 +120,7 @@ impl Clone for SendPtr { } } -fn test_mutex() { +fn check_mutex() { // Specifically *not* using `Arc` to make sure there is no synchronization apart from the mutex. unsafe { let data = SyncUnsafeCell::new((libc::PTHREAD_MUTEX_INITIALIZER, 0)); @@ -213,6 +214,53 @@ fn check_rwlock_read_no_deadlock() { } } +fn check_cond() { + unsafe { + let mut cond: MaybeUninit = MaybeUninit::uninit(); + assert_eq!(libc::pthread_cond_init(cond.as_mut_ptr(), ptr::null()), 0); + let cond = SendPtr { ptr: cond.as_mut_ptr() }; + + let mut mutex: libc::pthread_mutex_t = libc::PTHREAD_MUTEX_INITIALIZER; + let mutex = SendPtr { ptr: &mut mutex }; + + let mut data = 0; + let data = SendPtr { ptr: &mut data }; + + let t = thread::spawn(move || { + let mutex = mutex; // circumvent per-field closure capture + let cond = cond; + let data = data; + assert_eq!(libc::pthread_mutex_lock(mutex.ptr), 0); + assert!(data.ptr.read() == 0); + data.ptr.write(1); + libc::pthread_cond_wait(cond.ptr, mutex.ptr); + assert!(data.ptr.read() == 3); + data.ptr.write(4); + assert_eq!(libc::pthread_mutex_unlock(mutex.ptr), 0); + }); + + thread::yield_now(); + + assert_eq!(libc::pthread_mutex_lock(mutex.ptr), 0); + assert!(data.ptr.read() == 1); + data.ptr.write(2); + assert_eq!(libc::pthread_cond_signal(cond.ptr), 0); + thread::yield_now(); // the other thread wakes up but can't get the lock yet + assert!(data.ptr.read() == 2); + data.ptr.write(3); + assert_eq!(libc::pthread_mutex_unlock(mutex.ptr), 0); + + thread::yield_now(); // now the other thread gets the lock back + + assert_eq!(libc::pthread_mutex_lock(mutex.ptr), 0); + assert!(data.ptr.read() == 4); + assert_eq!(libc::pthread_cond_broadcast(cond.ptr), 0); // just a smoke test + assert_eq!(libc::pthread_mutex_unlock(mutex.ptr), 0); + + t.join().unwrap(); + } +} + // std::sync::RwLock does not even used pthread_rwlock any more. // Do some smoke testing of the API surface. fn test_rwlock_libc_static_initializer() { From d18d4615c4f58e70799f56dc916f839a8019e2cb Mon Sep 17 00:00:00 2001 From: The Miri Conjob Bot Date: Sun, 18 Feb 2024 04:53:46 +0000 Subject: [PATCH 034/134] Preparing for merge from rustc --- src/tools/miri/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index ab6f899cd3a5e..921a55d48c6c1 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -4316d0c6252cb1f833e582dfa68adb98efd5ddfb +1f8e824f111c972c9df8dbb378d87c33f67bbad4 From 7a83e2f30d92748640461edbd5812e7b9229e995 Mon Sep 17 00:00:00 2001 From: The Miri Conjob Bot Date: Sun, 18 Feb 2024 05:02:03 +0000 Subject: [PATCH 035/134] fmt --- src/tools/miri/tests/pass/overflow_checks_off.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/tools/miri/tests/pass/overflow_checks_off.rs b/src/tools/miri/tests/pass/overflow_checks_off.rs index 7b9d4f8fff5d5..831bffb6c5e8d 100644 --- a/src/tools/miri/tests/pass/overflow_checks_off.rs +++ b/src/tools/miri/tests/pass/overflow_checks_off.rs @@ -7,10 +7,9 @@ // Miri does not implement the codegen-time hack that backs `#[rustc_inherit_overflow_checks]`. // use std::ops::*; - // Disable _compile-time_ overflow linting // so that we can test runtime overflow checks - #![allow(arithmetic_overflow)] +#![allow(arithmetic_overflow)] fn main() { assert_eq!(-{ -0x80i8 }, -0x80); From 6b17dba68cff05978c10eb2600c16d4450ad77f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 18 Feb 2024 09:41:20 +0200 Subject: [PATCH 036/134] Merge commit 'ac998a74b3c8ff4b81c3eeb9a18811d4cc76226d' into sync-from-ra --- .github/rust.json | 33 + .github/workflows/autopublish.yaml | 2 +- .github/workflows/ci.yaml | 20 +- .github/workflows/fuzz.yml | 2 +- .github/workflows/metrics.yaml | 16 +- .github/workflows/publish-libs.yaml | 2 +- .github/workflows/release.yaml | 14 +- .github/workflows/rustdoc.yaml | 2 +- Cargo.lock | 17 +- Cargo.toml | 2 +- crates/base-db/src/input.rs | 230 +---- crates/base-db/src/lib.rs | 14 + crates/flycheck/src/lib.rs | 79 +- crates/hir-def/src/attr.rs | 42 +- crates/hir-def/src/body/lower.rs | 11 +- crates/hir-def/src/body/pretty.rs | 6 + crates/hir-def/src/body/scope.rs | 1 + crates/hir-def/src/data.rs | 12 +- crates/hir-def/src/expander.rs | 6 +- crates/hir-def/src/find_path.rs | 63 +- crates/hir-def/src/hir.rs | 9 + .../src/macro_expansion_tests/mbe/matching.rs | 2 +- .../macro_expansion_tests/mbe/meta_syntax.rs | 42 +- .../macro_expansion_tests/mbe/metavar_expr.rs | 6 +- .../macro_expansion_tests/mbe/regression.rs | 54 ++ .../mbe/tt_conversion.rs | 4 +- .../hir-def/src/macro_expansion_tests/mod.rs | 1 + crates/hir-def/src/nameres/collector.rs | 95 +- crates/hir-def/src/nameres/diagnostics.rs | 3 + crates/hir-expand/src/builtin_fn_macro.rs | 2 +- crates/hir-expand/src/change.rs | 34 +- crates/hir-expand/src/db.rs | 154 ++-- crates/hir-expand/src/declarative.rs | 14 +- crates/hir-expand/src/lib.rs | 26 +- crates/hir-expand/src/mod_path.rs | 15 + crates/hir-expand/src/proc_macro.rs | 46 +- crates/hir-ty/src/diagnostics/expr.rs | 13 +- .../diagnostics/match_check/pat_analysis.rs | 46 +- crates/hir-ty/src/infer.rs | 133 ++- crates/hir-ty/src/infer/closure.rs | 4 + crates/hir-ty/src/infer/expr.rs | 23 + crates/hir-ty/src/infer/mutability.rs | 4 + crates/hir-ty/src/infer/unify.rs | 83 +- crates/hir-ty/src/layout/target.rs | 8 +- crates/hir-ty/src/layout/tests.rs | 12 +- crates/hir-ty/src/lib.rs | 4 +- crates/hir-ty/src/mir/borrowck.rs | 185 +++- crates/hir-ty/src/mir/lower.rs | 4 +- crates/hir-ty/src/tests/diagnostics.rs | 42 + crates/hir-ty/src/tests/simple.rs | 3 - crates/hir/src/lib.rs | 246 ++++- crates/hir/src/term_search.rs | 298 ++++++ crates/hir/src/term_search/expr.rs | 468 ++++++++++ crates/hir/src/term_search/tactics.rs | 859 ++++++++++++++++++ .../src/handlers/fix_visibility.rs | 4 +- .../src/handlers/generate_trait_from_impl.rs | 104 +-- .../ide-assists/src/handlers/term_search.rs | 253 ++++++ crates/ide-assists/src/lib.rs | 7 +- crates/ide-assists/src/tests/generated.rs | 4 +- crates/ide-completion/src/completions.rs | 10 +- crates/ide-completion/src/completions/expr.rs | 56 ++ .../src/completions/flyimport.rs | 2 + crates/ide-completion/src/completions/type.rs | 20 + crates/ide-completion/src/config.rs | 1 + crates/ide-completion/src/context.rs | 3 +- crates/ide-completion/src/item.rs | 50 + crates/ide-completion/src/render.rs | 443 ++++++++- crates/ide-completion/src/render/function.rs | 50 +- crates/ide-completion/src/tests.rs | 1 + crates/ide-completion/src/tests/expression.rs | 32 +- crates/ide-completion/src/tests/flyimport.rs | 19 + crates/ide-completion/src/tests/record.rs | 2 + crates/ide-completion/src/tests/special.rs | 40 +- crates/ide-completion/src/tests/type_pos.rs | 40 + crates/ide-db/src/famous_defs.rs | 8 + crates/ide-db/src/path_transform.rs | 2 +- crates/ide-db/src/rename.rs | 5 +- crates/ide-db/src/source_change.rs | 68 +- crates/ide-db/src/syntax_helpers/node_ext.rs | 1 + .../src/handlers/incorrect_case.rs | 2 +- .../src/handlers/macro_error.rs | 2 +- .../src/handlers/missing_match_arms.rs | 18 + .../src/handlers/remove_trailing_return.rs | 12 + .../src/handlers/type_mismatch.rs | 3 +- .../src/handlers/typed_hole.rs | 256 +++++- crates/ide-diagnostics/src/tests.rs | 85 ++ crates/ide/src/doc_links.rs | 2 +- crates/ide/src/hover/tests.rs | 8 +- crates/ide/src/lib.rs | 12 +- crates/ide/src/parent_module.rs | 2 +- crates/ide/src/rename.rs | 41 +- crates/ide/src/shuffle_crate_graph.rs | 2 - crates/ide/src/static_index.rs | 23 +- crates/ide/src/status.rs | 8 - crates/load-cargo/src/lib.rs | 74 +- crates/mbe/src/expander/transcriber.rs | 18 +- crates/mbe/src/syntax_bridge.rs | 6 +- crates/parser/src/grammar/expressions.rs | 29 +- crates/parser/src/grammar/expressions/atom.rs | 14 + crates/parser/src/grammar/generic_params.rs | 10 + crates/parser/src/grammar/patterns.rs | 9 + crates/parser/src/syntax_kind/generated.rs | 6 +- ...cord_literal_before_ellipsis_recovery.rast | 36 +- ...0032_record_literal_field_eq_recovery.rast | 41 + .../0032_record_literal_field_eq_recovery.rs | 3 + .../0033_record_pat_field_eq_recovery.rast | 43 + .../err/0033_record_pat_field_eq_recovery.rs | 3 + .../parser/inline/ok/0209_become_expr.rast | 31 + .../parser/inline/ok/0209_become_expr.rs | 3 + .../inline/ok/0211_async_trait_bound.rast | 43 + .../inline/ok/0211_async_trait_bound.rs | 1 + .../inline/ok/0212_const_trait_bound.rast | 34 + .../inline/ok/0212_const_trait_bound.rs | 1 + crates/proc-macro-api/src/lib.rs | 8 +- crates/proc-macro-api/src/process.rs | 25 +- crates/proc-macro-srv/Cargo.toml | 1 + crates/proc-macro-srv/src/lib.rs | 5 + .../src/server/rust_analyzer_span.rs | 55 +- crates/proc-macro-srv/src/server/token_id.rs | 55 +- crates/proc-macro-srv/src/tests/mod.rs | 18 +- crates/project-model/src/build_scripts.rs | 25 +- crates/project-model/src/cargo_workspace.rs | 39 +- crates/project-model/src/project_json.rs | 3 +- crates/project-model/src/rustc_cfg.rs | 46 +- crates/project-model/src/sysroot.rs | 124 ++- .../project-model/src/target_data_layout.rs | 60 +- crates/project-model/src/tests.rs | 71 +- crates/project-model/src/workspace.rs | 549 ++++++----- .../cargo_hello_world_project_model.txt | 27 - ...project_model_with_selective_overrides.txt | 27 - ..._project_model_with_wildcard_overrides.txt | 27 - ...rust_project_hello_world_project_model.txt | 60 -- crates/rust-analyzer/src/bin/main.rs | 28 +- crates/rust-analyzer/src/bin/rustc_wrapper.rs | 19 +- crates/rust-analyzer/src/cargo_target_spec.rs | 2 +- .../rust-analyzer/src/cli/analysis_stats.rs | 216 ++++- crates/rust-analyzer/src/cli/flags.rs | 7 + crates/rust-analyzer/src/cli/scip.rs | 31 +- crates/rust-analyzer/src/config.rs | 19 +- crates/rust-analyzer/src/global_state.rs | 41 +- .../src/handlers/notification.rs | 22 +- crates/rust-analyzer/src/handlers/request.rs | 11 +- .../src/integrated_benchmarks.rs | 3 + crates/rust-analyzer/src/lib.rs | 4 +- crates/rust-analyzer/src/lsp/to_proto.rs | 673 ++++++++++++-- crates/rust-analyzer/src/main_loop.rs | 41 +- crates/rust-analyzer/src/reload.rs | 193 +++- crates/rust-analyzer/tests/crate_graph.rs | 118 +++ crates/rust-analyzer/tests/slow-tests/main.rs | 16 +- .../rust-analyzer/tests/slow-tests/support.rs | 9 +- crates/rust-analyzer/tests/slow-tests/tidy.rs | 21 - .../deduplication_crate_graph_A.json | 0 .../deduplication_crate_graph_B.json | 0 crates/salsa/src/doctest.rs | 115 --- crates/salsa/src/lib.rs | 1 - crates/syntax/rust.ungram | 6 +- crates/syntax/src/ast/edit_in_place.rs | 30 +- crates/syntax/src/ast/generated/nodes.rs | 37 +- crates/syntax/src/ast/make.rs | 2 +- crates/syntax/src/ast/node_ext.rs | 20 + crates/syntax/src/ast/prec.rs | 8 +- crates/syntax/src/lib.rs | 5 - crates/syntax/src/tests/ast_src.rs | 6 +- crates/test-fixture/src/lib.rs | 90 +- crates/test-utils/src/fixture.rs | 38 +- crates/test-utils/src/minicore.rs | 33 + crates/toolchain/src/lib.rs | 42 +- crates/tt/src/lib.rs | 1 + docs/user/generated_config.adoc | 17 +- editors/code/.vscodeignore | 3 + .../code/language-configuration-rustdoc.json | 37 + editors/code/package.json | 44 +- editors/code/rustdoc-inject.json | 93 ++ editors/code/rustdoc.json | 82 ++ editors/code/src/rust_project.ts | 19 + lib/lsp-server/LICENSE-APACHE | 1 + lib/lsp-server/LICENSE-MIT | 1 + xtask/src/metrics.rs | 2 - 178 files changed, 7095 insertions(+), 1959 deletions(-) create mode 100644 .github/rust.json create mode 100644 crates/hir/src/term_search.rs create mode 100644 crates/hir/src/term_search/expr.rs create mode 100644 crates/hir/src/term_search/tactics.rs create mode 100644 crates/ide-assists/src/handlers/term_search.rs create mode 100644 crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs create mode 100644 crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast create mode 100644 crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0209_become_expr.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0209_become_expr.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs create mode 100644 crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast create mode 100644 crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs create mode 100644 crates/rust-analyzer/tests/crate_graph.rs rename crates/{project-model => rust-analyzer/tests}/test_data/deduplication_crate_graph_A.json (100%) rename crates/{project-model => rust-analyzer/tests}/test_data/deduplication_crate_graph_B.json (100%) delete mode 100644 crates/salsa/src/doctest.rs create mode 100644 editors/code/language-configuration-rustdoc.json create mode 100644 editors/code/rustdoc-inject.json create mode 100644 editors/code/rustdoc.json create mode 120000 lib/lsp-server/LICENSE-APACHE create mode 120000 lib/lsp-server/LICENSE-MIT diff --git a/.github/rust.json b/.github/rust.json new file mode 100644 index 0000000000000..ddaa1b0824b94 --- /dev/null +++ b/.github/rust.json @@ -0,0 +1,33 @@ +{ + "problemMatcher": [ + { + "owner": "rustfmt", + "severity": "warning", + "pattern": [ + { + "regexp": "^(Diff in (.+)) at line (\\d+):$", + "message": 1, + "file": 2, + "line": 3 + } + ] + }, + { + "owner": "clippy", + "pattern": [ + { + "regexp": "^(?:\\x1b\\[[\\d;]+m)*(warning|warn|error)(?:\\x1b\\[[\\d;]+m)*(\\[(.*)\\])?(?:\\x1b\\[[\\d;]+m)*:(?:\\x1b\\[[\\d;]+m)* ([^\\x1b]*)(?:\\x1b\\[[\\d;]+m)*$", + "severity": 1, + "message": 4, + "code": 3 + }, + { + "regexp": "^(?:\\x1b\\[[\\d;]+m)*\\s*(?:\\x1b\\[[\\d;]+m)*\\s*--> (?:\\x1b\\[[\\d;]+m)*(.*):(\\d*):(\\d*)(?:\\x1b\\[[\\d;]+m)*$", + "file": 1, + "line": 2, + "column": 3 + } + ] + } + ] +} diff --git a/.github/workflows/autopublish.yaml b/.github/workflows/autopublish.yaml index 9a5015005b3dc..4b97637088c31 100644 --- a/.github/workflows/autopublish.yaml +++ b/.github/workflows/autopublish.yaml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 964be478fa3aa..62fbd57abc165 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -27,7 +27,7 @@ jobs: typescript: ${{ steps.filter.outputs.typescript }} proc_macros: ${{ steps.filter.outputs.proc_macros }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: dorny/paths-filter@1441771bbfdd59dcd748680ee64ebd8faab1a242 id: filter with: @@ -56,7 +56,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} @@ -65,6 +65,10 @@ jobs: rustup update --no-self-update ${{ env.RUST_CHANNEL }} rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src rustup default ${{ env.RUST_CHANNEL }} + # https://github.com/actions-rust-lang/setup-rust-toolchain/blob/main/rust.json + - name: Install Rust Problem Matcher + if: matrix.os == 'ubuntu-latest' + run: echo "::add-matcher::.github/rust.json" - name: Cache Dependencies uses: Swatinem/rust-cache@988c164c3d0e93c4dbab36aaf5bbeb77425b2894 @@ -107,6 +111,10 @@ jobs: if: matrix.os == 'windows-latest' run: cargo clippy --all-targets -- -D clippy::disallowed_macros -D clippy::dbg_macro -D clippy::todo -D clippy::print_stdout -D clippy::print_stderr + - name: rustfmt + if: matrix.os == 'ubuntu-latest' + run: cargo fmt -- --check + # Weird targets to catch non-portable code rust-cross: if: github.repository == 'rust-lang/rust-analyzer' @@ -121,7 +129,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain run: | @@ -153,13 +161,13 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 if: needs.changes.outputs.typescript == 'true' - name: Install Nodejs - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 if: needs.changes.outputs.typescript == 'true' - name: Install xvfb diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index 5af8aa1f77aac..f88c7f95d5c96 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -27,7 +27,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 1 diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index e6a9917a0bf3d..be9f504e59966 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -21,7 +21,7 @@ jobs: rustup component add rustfmt rust-src rustup default stable - name: Cache cargo - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -36,10 +36,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Restore cargo cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -52,7 +52,7 @@ jobs: run: cargo xtask metrics build - name: Cache target - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target/ key: ${{ runner.os }}-target-${{ github.sha }} @@ -73,10 +73,10 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Restore cargo cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: | ~/.cargo/bin/ @@ -86,7 +86,7 @@ jobs: key: ${{ runner.os }}-cargo-${{ github.sha }} - name: Restore target cache - uses: actions/cache@v3 + uses: actions/cache@v4 with: path: target/ key: ${{ runner.os }}-target-${{ github.sha }} @@ -106,7 +106,7 @@ jobs: needs: [build_metrics, other_metrics] steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download build metrics uses: actions/download-artifact@v3 diff --git a/.github/workflows/publish-libs.yaml b/.github/workflows/publish-libs.yaml index 6d026c9ad910b..862373ec1cce0 100644 --- a/.github/workflows/publish-libs.yaml +++ b/.github/workflows/publish-libs.yaml @@ -13,7 +13,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 9077a9ac21eb6..adb1c85051610 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,7 +59,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} @@ -78,9 +78,9 @@ jobs: rustup component add rust-src - name: Install Node.js - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' @@ -154,7 +154,7 @@ jobs: run: apk add --no-cache git clang lld musl-dev nodejs npm - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} @@ -188,9 +188,9 @@ jobs: needs: ["dist", "dist-x86_64-unknown-linux-musl"] steps: - name: Install Nodejs - uses: actions/setup-node@v3 + uses: actions/setup-node@v4 with: - node-version: 16 + node-version: 18 - run: echo "TAG=$(date --iso -u)" >> $GITHUB_ENV if: github.ref == 'refs/heads/release' @@ -199,7 +199,7 @@ jobs: - run: 'echo "TAG: $TAG"' - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: fetch-depth: ${{ env.FETCH_DEPTH }} diff --git a/.github/workflows/rustdoc.yaml b/.github/workflows/rustdoc.yaml index 05f3e254e5f5a..12a1a791fda2e 100644 --- a/.github/workflows/rustdoc.yaml +++ b/.github/workflows/rustdoc.yaml @@ -17,7 +17,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Install Rust toolchain run: rustup update --no-self-update stable diff --git a/Cargo.lock b/Cargo.lock index dc2bf3a76943e..7b29d7bb798df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1329,6 +1329,7 @@ dependencies = [ "paths", "proc-macro-api", "proc-macro-test", + "ra-ap-rustc_lexer", "span", "stdx", "tt", @@ -1470,12 +1471,12 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8a41dee58608b1fc93779ea365edaa70ac9927e3335ae914b675be0fa063cd7" +checksum = "df5a0ba0d08af366cf235dbe8eb7226cced7a4fe502c98aa434ccf416defd746" dependencies = [ "arrayvec", - "ra-ap-rustc_index_macros 0.36.0", + "ra-ap-rustc_index_macros 0.37.0", "smallvec", ] @@ -1493,9 +1494,9 @@ dependencies = [ [[package]] name = "ra-ap-rustc_index_macros" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbfe98def54c4337a2f7d8233850bd5d5349972b185fe8a0db2b979164b30ed8" +checksum = "1971ebf9a701e0e68387c264a32517dcb4861ad3a4862f2e2803c1121ade20d5" dependencies = [ "proc-macro2", "quote", @@ -1525,11 +1526,11 @@ dependencies = [ [[package]] name = "ra-ap-rustc_pattern_analysis" -version = "0.36.0" +version = "0.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5529bffec7530b4a3425640bfdfd9b95d87c4c620f740266c0de6572561aab4" +checksum = "2c3c0e7ca9c5bdc66e3b590688e237a22ac47a48e4eac7f46b05b2abbfaf0abd" dependencies = [ - "ra-ap-rustc_index 0.36.0", + "ra-ap-rustc_index 0.37.0", "rustc-hash", "rustc_apfloat", "smallvec", diff --git a/Cargo.toml b/Cargo.toml index 2b81f7b11b238..49c7d369190ed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -84,7 +84,7 @@ ra-ap-rustc_lexer = { version = "0.35.0", default-features = false } ra-ap-rustc_parse_format = { version = "0.35.0", default-features = false } ra-ap-rustc_index = { version = "0.35.0", default-features = false } ra-ap-rustc_abi = { version = "0.35.0", default-features = false } -ra-ap-rustc_pattern_analysis = { version = "0.36.0", default-features = false } +ra-ap-rustc_pattern_analysis = { version = "0.37.0", default-features = false } # local crates that aren't published to crates.io. These should not have versions. sourcegen = { path = "./crates/sourcegen" } diff --git a/crates/base-db/src/input.rs b/crates/base-db/src/input.rs index 9560826e373e0..a817cd0c3ac2f 100644 --- a/crates/base-db/src/input.rs +++ b/crates/base-db/src/input.rs @@ -11,7 +11,6 @@ use std::{fmt, mem, ops, str::FromStr}; use cfg::CfgOptions; use la_arena::{Arena, Idx, RawIdx}; use rustc_hash::{FxHashMap, FxHashSet}; -use semver::Version; use syntax::SmolStr; use triomphe::Arc; use vfs::{file_set::FileSet, AbsPathBuf, AnchoredPath, FileId, VfsPath}; @@ -243,6 +242,7 @@ impl CrateDisplayName { CrateDisplayName { crate_name, canonical_name } } } + pub type TargetLayoutLoadResult = Result, Arc>; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)] @@ -291,71 +291,6 @@ pub struct CrateData { pub dependencies: Vec, pub origin: CrateOrigin, pub is_proc_macro: bool, - // FIXME: These things should not be per crate! These are more per workspace crate graph level - // things. This info does need to be somewhat present though as to prevent deduplication from - // happening across different workspaces with different layouts. - pub target_layout: TargetLayoutLoadResult, - pub toolchain: Option, -} - -impl CrateData { - /// Check if [`other`] is almost equal to [`self`] ignoring `CrateOrigin` value. - pub fn eq_ignoring_origin_and_deps(&self, other: &CrateData, ignore_dev_deps: bool) -> bool { - // This method has some obscure bits. These are mostly there to be compliant with - // some patches. References to the patches are given. - if self.root_file_id != other.root_file_id { - return false; - } - - if self.display_name != other.display_name { - return false; - } - - if self.is_proc_macro != other.is_proc_macro { - return false; - } - - if self.edition != other.edition { - return false; - } - - if self.version != other.version { - return false; - } - - let mut opts = self.cfg_options.difference(&other.cfg_options); - if let Some(it) = opts.next() { - // Don't care if rust_analyzer CfgAtom is the only cfg in the difference set of self's and other's cfgs. - // https://github.com/rust-lang/rust-analyzer/blob/0840038f02daec6ba3238f05d8caa037d28701a0/crates/project-model/src/workspace.rs#L894 - if it.to_string() != "rust_analyzer" { - return false; - } - - if opts.next().is_some() { - return false; - } - } - - if self.env != other.env { - return false; - } - - let slf_deps = self.dependencies.iter(); - let other_deps = other.dependencies.iter(); - - if ignore_dev_deps { - return slf_deps - .clone() - .filter(|it| it.kind != DependencyKind::Dev) - .eq(other_deps.clone().filter(|it| it.kind != DependencyKind::Dev)); - } - - slf_deps.eq(other_deps) - } - - pub fn channel(&self) -> Option { - self.toolchain.as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) - } } #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] @@ -398,32 +333,22 @@ pub enum DependencyKind { pub struct Dependency { pub crate_id: CrateId, pub name: CrateName, - kind: DependencyKind, prelude: bool, } impl Dependency { - pub fn new(name: CrateName, crate_id: CrateId, kind: DependencyKind) -> Self { - Self { name, crate_id, prelude: true, kind } + pub fn new(name: CrateName, crate_id: CrateId) -> Self { + Self { name, crate_id, prelude: true } } - pub fn with_prelude( - name: CrateName, - crate_id: CrateId, - prelude: bool, - kind: DependencyKind, - ) -> Self { - Self { name, crate_id, prelude, kind } + pub fn with_prelude(name: CrateName, crate_id: CrateId, prelude: bool) -> Self { + Self { name, crate_id, prelude } } /// Whether this dependency is to be added to the depending crate's extern prelude. pub fn is_prelude(&self) -> bool { self.prelude } - - pub fn kind(&self) -> DependencyKind { - self.kind - } } impl CrateGraph { @@ -438,8 +363,6 @@ impl CrateGraph { env: Env, is_proc_macro: bool, origin: CrateOrigin, - target_layout: Result, Arc>, - toolchain: Option, ) -> CrateId { let data = CrateData { root_file_id, @@ -451,9 +374,7 @@ impl CrateGraph { env, dependencies: Vec::new(), origin, - target_layout, is_proc_macro, - toolchain, }; self.arena.alloc(data) } @@ -523,6 +444,10 @@ impl CrateGraph { self.arena.is_empty() } + pub fn len(&self) -> usize { + self.arena.len() + } + pub fn iter(&self) -> impl Iterator + '_ { self.arena.iter().map(|(idx, _)| idx) } @@ -623,13 +548,17 @@ impl CrateGraph { /// /// This will deduplicate the crates of the graph where possible. /// Note that for deduplication to fully work, `self`'s crate dependencies must be sorted by crate id. - /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also have the crate dependencies sorted. + /// If the crate dependencies were sorted, the resulting graph from this `extend` call will also + /// have the crate dependencies sorted. + /// + /// Returns a mapping from `other`'s crate ids to the new crate ids in `self`. pub fn extend( &mut self, mut other: CrateGraph, proc_macros: &mut ProcMacroPaths, - on_finished: impl FnOnce(&FxHashMap), - ) { + merge: impl Fn((CrateId, &mut CrateData), (CrateId, &CrateData)) -> bool, + ) -> FxHashMap { + let m = self.len(); let topo = other.crates_in_topological_order(); let mut id_map: FxHashMap = FxHashMap::default(); for topo in topo { @@ -637,51 +566,21 @@ impl CrateGraph { crate_data.dependencies.iter_mut().for_each(|dep| dep.crate_id = id_map[&dep.crate_id]); crate_data.dependencies.sort_by_key(|dep| dep.crate_id); - let res = self.arena.iter().find_map(|(id, data)| { - match (&data.origin, &crate_data.origin) { - (a, b) if a == b => { - if data.eq_ignoring_origin_and_deps(crate_data, false) { - return Some((id, false)); - } - } - (a @ CrateOrigin::Local { .. }, CrateOrigin::Library { .. }) - | (a @ CrateOrigin::Library { .. }, CrateOrigin::Local { .. }) => { - // If the origins differ, check if the two crates are equal without - // considering the dev dependencies, if they are, they most likely are in - // different loaded workspaces which may cause issues. We keep the local - // version and discard the library one as the local version may have - // dev-dependencies that we want to keep resolving. See #15656 for more - // information. - if data.eq_ignoring_origin_and_deps(crate_data, true) { - return Some((id, !a.is_local())); - } - } - (_, _) => return None, - } - - None - }); - - if let Some((res, should_update_lib_to_local)) = res { - id_map.insert(topo, res); - if should_update_lib_to_local { - assert!(self.arena[res].origin.is_lib()); - assert!(crate_data.origin.is_local()); - self.arena[res].origin = crate_data.origin.clone(); - - // Move local's dev dependencies into the newly-local-formerly-lib crate. - self.arena[res].dependencies = crate_data.dependencies.clone(); - } - } else { - let id = self.arena.alloc(crate_data.clone()); - id_map.insert(topo, id); - } + let res = self + .arena + .iter_mut() + .take(m) + .find_map(|(id, data)| merge((id, data), (topo, &crate_data)).then_some(id)); + + let new_id = + if let Some(res) = res { res } else { self.arena.alloc(crate_data.clone()) }; + id_map.insert(topo, new_id); } *proc_macros = mem::take(proc_macros).into_iter().map(|(id, macros)| (id_map[&id], macros)).collect(); - on_finished(&id_map); + id_map } fn find_path( @@ -719,11 +618,9 @@ impl CrateGraph { match (cfg_if, std) { (Some(cfg_if), Some(std)) => { self.arena[cfg_if].dependencies.clear(); - self.arena[std].dependencies.push(Dependency::new( - CrateName::new("cfg_if").unwrap(), - cfg_if, - DependencyKind::Normal, - )); + self.arena[std] + .dependencies + .push(Dependency::new(CrateName::new("cfg_if").unwrap(), cfg_if)); true } _ => false, @@ -871,7 +768,7 @@ impl fmt::Display for CyclicDependenciesError { #[cfg(test)] mod tests { - use crate::{CrateOrigin, DependencyKind}; + use crate::CrateOrigin; use super::{CrateGraph, CrateName, Dependency, Edition::Edition2018, Env, FileId}; @@ -888,8 +785,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -901,8 +796,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -914,26 +807,15 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) .is_ok()); assert!(graph - .add_dep( - crate3, - Dependency::new(CrateName::new("crate1").unwrap(), crate1, DependencyKind::Normal) - ) + .add_dep(crate3, Dependency::new(CrateName::new("crate1").unwrap(), crate1,)) .is_err()); } @@ -950,8 +832,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -963,20 +843,12 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_err()); } @@ -993,8 +865,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -1006,8 +876,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate3 = graph.add_crate_root( FileId::from_raw(3u32), @@ -1019,20 +887,12 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph - .add_dep( - crate1, - Dependency::new(CrateName::new("crate2").unwrap(), crate2, DependencyKind::Normal) - ) + .add_dep(crate1, Dependency::new(CrateName::new("crate2").unwrap(), crate2,)) .is_ok()); assert!(graph - .add_dep( - crate2, - Dependency::new(CrateName::new("crate3").unwrap(), crate3, DependencyKind::Normal) - ) + .add_dep(crate2, Dependency::new(CrateName::new("crate3").unwrap(), crate3,)) .is_ok()); } @@ -1049,8 +909,6 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); let crate2 = graph.add_crate_root( FileId::from_raw(2u32), @@ -1062,26 +920,16 @@ mod tests { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("".into()), - None, ); assert!(graph .add_dep( crate1, - Dependency::new( - CrateName::normalize_dashes("crate-name-with-dashes"), - crate2, - DependencyKind::Normal - ) + Dependency::new(CrateName::normalize_dashes("crate-name-with-dashes"), crate2,) ) .is_ok()); assert_eq!( graph[crate1].dependencies, - vec![Dependency::new( - CrateName::new("crate_name_with_dashes").unwrap(), - crate2, - DependencyKind::Normal - )] + vec![Dependency::new(CrateName::new("crate_name_with_dashes").unwrap(), crate2,)] ); } } diff --git a/crates/base-db/src/lib.rs b/crates/base-db/src/lib.rs index d7fc9d4c95cd6..cb2e6cdaa28dc 100644 --- a/crates/base-db/src/lib.rs +++ b/crates/base-db/src/lib.rs @@ -62,6 +62,20 @@ pub trait SourceDatabase: FileLoader + std::fmt::Debug { /// The crate graph. #[salsa::input] fn crate_graph(&self) -> Arc; + + // FIXME: Consider removing this, making HirDatabase::target_data_layout an input query + #[salsa::input] + fn data_layout(&self, krate: CrateId) -> TargetLayoutLoadResult; + + #[salsa::input] + fn toolchain(&self, krate: CrateId) -> Option; + + #[salsa::transparent] + fn toolchain_channel(&self, krate: CrateId) -> Option; +} + +fn toolchain_channel(db: &dyn SourceDatabase, krate: CrateId) -> Option { + db.toolchain(krate).as_ref().and_then(|v| ReleaseChannel::from_str(&v.pre)) } fn parse(db: &dyn SourceDatabase, file_id: FileId) -> Parse { diff --git a/crates/flycheck/src/lib.rs b/crates/flycheck/src/lib.rs index c59aff2a8bbf8..ee39a2790bc37 100644 --- a/crates/flycheck/src/lib.rs +++ b/crates/flycheck/src/lib.rs @@ -14,7 +14,7 @@ use std::{ use command_group::{CommandGroup, GroupChild}; use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; -use paths::AbsPathBuf; +use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; use serde::Deserialize; use stdx::process::streaming_output; @@ -23,6 +23,7 @@ pub use cargo_metadata::diagnostic::{ Applicability, Diagnostic, DiagnosticCode, DiagnosticLevel, DiagnosticSpan, DiagnosticSpanMacroExpansion, }; +use toolchain::Tool; #[derive(Copy, Clone, Debug, Default, PartialEq, Eq)] pub enum InvocationStrategy { @@ -89,9 +90,10 @@ impl FlycheckHandle { id: usize, sender: Box, config: FlycheckConfig, + sysroot_root: Option, workspace_root: AbsPathBuf, ) -> FlycheckHandle { - let actor = FlycheckActor::new(id, sender, config, workspace_root); + let actor = FlycheckActor::new(id, sender, config, sysroot_root, workspace_root); let (sender, receiver) = unbounded::(); let thread = stdx::thread::Builder::new(stdx::thread::ThreadIntent::Worker) .name("Flycheck".to_owned()) @@ -101,13 +103,15 @@ impl FlycheckHandle { } /// Schedule a re-start of the cargo check worker to do a workspace wide check. - pub fn restart_workspace(&self) { - self.sender.send(StateChange::Restart(None)).unwrap(); + pub fn restart_workspace(&self, saved_file: Option) { + self.sender.send(StateChange::Restart { package: None, saved_file }).unwrap(); } /// Schedule a re-start of the cargo check worker to do a package wide check. pub fn restart_for_package(&self, package: String) { - self.sender.send(StateChange::Restart(Some(package))).unwrap(); + self.sender + .send(StateChange::Restart { package: Some(package), saved_file: None }) + .unwrap(); } /// Stop this cargo check worker. @@ -158,7 +162,7 @@ pub enum Progress { } enum StateChange { - Restart(Option), + Restart { package: Option, saved_file: Option }, Cancel, } @@ -171,6 +175,7 @@ struct FlycheckActor { /// Either the workspace root of the workspace we are flychecking, /// or the project root of the project. root: AbsPathBuf, + sysroot_root: Option, /// CargoHandle exists to wrap around the communication needed to be able to /// run `cargo check` without blocking. Currently the Rust standard library /// doesn't provide a way to read sub-process output without blocking, so we @@ -184,15 +189,25 @@ enum Event { CheckEvent(Option), } +const SAVED_FILE_PLACEHOLDER: &str = "$saved_file"; + impl FlycheckActor { fn new( id: usize, sender: Box, config: FlycheckConfig, + sysroot_root: Option, workspace_root: AbsPathBuf, ) -> FlycheckActor { tracing::info!(%id, ?workspace_root, "Spawning flycheck"); - FlycheckActor { id, sender, config, root: workspace_root, command_handle: None } + FlycheckActor { + id, + sender, + config, + sysroot_root, + root: workspace_root, + command_handle: None, + } } fn report_progress(&self, progress: Progress) { @@ -218,7 +233,7 @@ impl FlycheckActor { tracing::debug!(flycheck_id = self.id, "flycheck cancelled"); self.cancel_check_process(); } - Event::RequestStateChange(StateChange::Restart(package)) => { + Event::RequestStateChange(StateChange::Restart { package, saved_file }) => { // Cancel the previously spawned process self.cancel_check_process(); while let Ok(restart) = inbox.recv_timeout(Duration::from_millis(50)) { @@ -228,7 +243,11 @@ impl FlycheckActor { } } - let command = self.check_command(package.as_deref()); + let command = + match self.check_command(package.as_deref(), saved_file.as_deref()) { + Some(c) => c, + None => continue, + }; let formatted_command = format!("{:?}", command); tracing::debug!(?command, "will restart flycheck"); @@ -302,7 +321,14 @@ impl FlycheckActor { } } - fn check_command(&self, package: Option<&str>) -> Command { + /// Construct a `Command` object for checking the user's code. If the user + /// has specified a custom command with placeholders that we cannot fill, + /// return None. + fn check_command( + &self, + package: Option<&str>, + saved_file: Option<&AbsPath>, + ) -> Option { let (mut cmd, args) = match &self.config { FlycheckConfig::CargoCommand { command, @@ -316,7 +342,10 @@ impl FlycheckActor { ansi_color_output, target_dir, } => { - let mut cmd = Command::new(toolchain::cargo()); + let mut cmd = Command::new(Tool::Cargo.path()); + if let Some(sysroot_root) = &self.sysroot_root { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(sysroot_root)); + } cmd.arg(command); cmd.current_dir(&self.root); @@ -355,7 +384,7 @@ impl FlycheckActor { cmd.arg("--target-dir").arg(target_dir); } cmd.envs(extra_env); - (cmd, extra_args) + (cmd, extra_args.clone()) } FlycheckConfig::CustomCommand { command, @@ -384,12 +413,34 @@ impl FlycheckActor { } } - (cmd, args) + if args.contains(&SAVED_FILE_PLACEHOLDER.to_owned()) { + // If the custom command has a $saved_file placeholder, and + // we're saving a file, replace the placeholder in the arguments. + if let Some(saved_file) = saved_file { + let args = args + .iter() + .map(|arg| { + if arg == SAVED_FILE_PLACEHOLDER { + saved_file.to_string() + } else { + arg.clone() + } + }) + .collect(); + (cmd, args) + } else { + // The custom command has a $saved_file placeholder, + // but we had an IDE event that wasn't a file save. Do nothing. + return None; + } + } else { + (cmd, args.clone()) + } } }; cmd.args(args); - cmd + Some(cmd) } fn send(&self, check_task: Message) { diff --git a/crates/hir-def/src/attr.rs b/crates/hir-def/src/attr.rs index c91a5497262b7..519706c65f29b 100644 --- a/crates/hir-def/src/attr.rs +++ b/crates/hir-def/src/attr.rs @@ -377,27 +377,39 @@ impl AttrsWithOwner { AttrDefId::GenericParamId(it) => match it { GenericParamId::ConstParamId(it) => { let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id()) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } GenericParamId::TypeParamId(it) => { let src = it.parent().child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id()]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id()) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } GenericParamId::LifetimeParamId(it) => { let src = it.parent.child_source(db); - RawAttrs::from_attrs_owner( - db.upcast(), - src.with_value(&src.value[it.local_id]), - db.span_map(src.file_id).as_ref(), - ) + // FIXME: We should be never getting `None` here. + match src.value.get(it.local_id) { + Some(val) => RawAttrs::from_attrs_owner( + db.upcast(), + src.with_value(val), + db.span_map(src.file_id).as_ref(), + ), + None => RawAttrs::EMPTY, + } } }, AttrDefId::ExternBlockId(it) => attrs_from_item_tree_loc(db, it), diff --git a/crates/hir-def/src/body/lower.rs b/crates/hir-def/src/body/lower.rs index 29ac666277d0d..5dc5fedd23070 100644 --- a/crates/hir-def/src/body/lower.rs +++ b/crates/hir-def/src/body/lower.rs @@ -416,6 +416,11 @@ impl ExprCollector<'_> { let expr = e.expr().map(|e| self.collect_expr(e)); self.alloc_expr(Expr::Return { expr }, syntax_ptr) } + ast::Expr::BecomeExpr(e) => { + let expr = + e.expr().map(|e| self.collect_expr(e)).unwrap_or_else(|| self.missing_expr()); + self.alloc_expr(Expr::Become { expr }, syntax_ptr) + } ast::Expr::YieldExpr(e) => { self.is_lowering_coroutine = true; let expr = e.expr().map(|e| self.collect_expr(e)); @@ -1000,10 +1005,6 @@ impl ExprCollector<'_> { krate: *krate, }); } - Some(ExpandError::RecursionOverflowPoisoned) => { - // Recursion limit has been reached in the macro expansion tree, but not in - // this very macro call. Don't add diagnostics to avoid duplication. - } Some(err) => { self.source_map.diagnostics.push(BodyDiagnostic::MacroError { node: InFile::new(outer_file, syntax_ptr), @@ -1112,7 +1113,7 @@ impl ExprCollector<'_> { statements.push(Statement::Expr { expr, has_semi }); } } - ast::Stmt::Item(_item) => (), + ast::Stmt::Item(_item) => statements.push(Statement::Item), } } diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 4afb408651703..7007dea638ef9 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -261,6 +261,11 @@ impl Printer<'_> { self.print_expr(*expr); } } + Expr::Become { expr } => { + w!(self, "become"); + self.whitespace(); + self.print_expr(*expr); + } Expr::Yield { expr } => { w!(self, "yield"); if let Some(expr) = expr { @@ -623,6 +628,7 @@ impl Printer<'_> { } wln!(self); } + Statement::Item => (), } } diff --git a/crates/hir-def/src/body/scope.rs b/crates/hir-def/src/body/scope.rs index ab623250d4072..69b82ae871a4e 100644 --- a/crates/hir-def/src/body/scope.rs +++ b/crates/hir-def/src/body/scope.rs @@ -197,6 +197,7 @@ fn compute_block_scopes( Statement::Expr { expr, .. } => { compute_expr_scopes(*expr, body, scopes, scope); } + Statement::Item => (), } } if let Some(expr) = tail { diff --git a/crates/hir-def/src/data.rs b/crates/hir-def/src/data.rs index 7ce05b64d022e..f506864902c47 100644 --- a/crates/hir-def/src/data.rs +++ b/crates/hir-def/src/data.rs @@ -634,7 +634,6 @@ impl<'a> AssocItemCollector<'a> { attr, ) { Ok(ResolvedAttr::Macro(call_id)) => { - self.attr_calls.push((ast_id, call_id)); // If proc attribute macro expansion is disabled, skip expanding it here if !self.db.expand_proc_attr_macros() { continue 'attrs; @@ -647,10 +646,21 @@ impl<'a> AssocItemCollector<'a> { // disabled. This is analogous to the handling in // `DefCollector::collect_macros`. if exp.is_dummy() { + self.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + self.module_id.local_id, + loc.kind, + loc.def.krate, + )); + + continue 'attrs; + } + if exp.is_disabled() { continue 'attrs; } } + self.attr_calls.push((ast_id, call_id)); + let res = self.expander.enter_expand_id::(self.db, call_id); self.collect_macro_items(res, &|| loc.kind.clone()); diff --git a/crates/hir-def/src/expander.rs b/crates/hir-def/src/expander.rs index b83feeedc34c1..b99df1ed59348 100644 --- a/crates/hir-def/src/expander.rs +++ b/crates/hir-def/src/expander.rs @@ -140,13 +140,11 @@ impl Expander { // The overflow error should have been reported when it occurred (see the next branch), // so don't return overflow error here to avoid diagnostics duplication. cov_mark::hit!(overflow_but_not_me); - return ExpandResult::only_err(ExpandError::RecursionOverflowPoisoned); + return ExpandResult::ok(None); } else if self.recursion_limit.check(self.recursion_depth as usize + 1).is_err() { self.recursion_depth = u32::MAX; cov_mark::hit!(your_stack_belongs_to_me); - return ExpandResult::only_err(ExpandError::other( - "reached recursion limit during macro expansion", - )); + return ExpandResult::only_err(ExpandError::RecursionOverflow); } let ExpandResult { value, err } = op(self); diff --git a/crates/hir-def/src/find_path.rs b/crates/hir-def/src/find_path.rs index 2e137f67b4c2a..26247ba5b507d 100644 --- a/crates/hir-def/src/find_path.rs +++ b/crates/hir-def/src/find_path.rs @@ -447,18 +447,25 @@ fn select_best_path( } const STD_CRATES: [Name; 3] = [known::std, known::core, known::alloc]; - let choose = |new_path: (ModPath, _), old_path: (ModPath, _)| { - let new_has_prelude = new_path.0.segments().iter().any(|seg| seg == &known::prelude); - let old_has_prelude = old_path.0.segments().iter().any(|seg| seg == &known::prelude); + let choose = |new: (ModPath, _), old: (ModPath, _)| { + let (new_path, _) = &new; + let (old_path, _) = &old; + let new_has_prelude = new_path.segments().iter().any(|seg| seg == &known::prelude); + let old_has_prelude = old_path.segments().iter().any(|seg| seg == &known::prelude); match (new_has_prelude, old_has_prelude, prefer_prelude) { - (true, false, true) | (false, true, false) => new_path, - (true, false, false) | (false, true, true) => old_path, - // no prelude difference in the paths, so pick the smaller one + (true, false, true) | (false, true, false) => new, + (true, false, false) | (false, true, true) => old, + // no prelude difference in the paths, so pick the shorter one (true, true, _) | (false, false, _) => { - if new_path.0.len() < old_path.0.len() { - new_path + let new_path_is_shorter = new_path + .len() + .cmp(&old_path.len()) + .then_with(|| new_path.textual_len().cmp(&old_path.textual_len())) + .is_lt(); + if new_path_is_shorter { + new } else { - old_path + old } } } @@ -469,8 +476,8 @@ fn select_best_path( let rank = match prefer_no_std { false => |name: &Name| match name { name if name == &known::core => 0, - name if name == &known::alloc => 0, - name if name == &known::std => 1, + name if name == &known::alloc => 1, + name if name == &known::std => 2, _ => unreachable!(), }, true => |name: &Name| match name { @@ -1539,4 +1546,38 @@ pub mod foo { "krate::prelude::Foo", ); } + + #[test] + fn respect_segment_length() { + check_found_path( + r#" +//- /main.rs crate:main deps:petgraph +$0 +//- /petgraph.rs crate:petgraph +pub mod graph { + pub use crate::graph_impl::{ + NodeIndex + }; +} + +mod graph_impl { + pub struct NodeIndex(Ix); +} + +pub mod stable_graph { + #[doc(no_inline)] + pub use crate::graph::{NodeIndex}; +} + +pub mod prelude { + #[doc(no_inline)] + pub use crate::graph::{NodeIndex}; +} +"#, + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + "petgraph::graph::NodeIndex", + ); + } } diff --git a/crates/hir-def/src/hir.rs b/crates/hir-def/src/hir.rs index ac44d379415c0..34b2910b4f5e5 100644 --- a/crates/hir-def/src/hir.rs +++ b/crates/hir-def/src/hir.rs @@ -182,6 +182,7 @@ pub enum Expr { tail: Option, }, Const(ConstBlockId), + // FIXME: Fold this into Block with an unsafe flag? Unsafe { id: Option, statements: Box<[Statement]>, @@ -216,6 +217,9 @@ pub enum Expr { Return { expr: Option, }, + Become { + expr: ExprId, + }, Yield { expr: Option, }, @@ -349,6 +353,9 @@ pub enum Statement { expr: ExprId, has_semi: bool, }, + // At the moment, we only use this to figure out if a return expression + // is really the last statement of a block. See #16566 + Item, } impl Expr { @@ -382,6 +389,7 @@ impl Expr { } } Statement::Expr { expr: expression, .. } => f(*expression), + Statement::Item => (), } } if let &Some(expr) = tail { @@ -410,6 +418,7 @@ impl Expr { f(expr); } } + Expr::Become { expr } => f(*expr), Expr::RecordLit { fields, spread, .. } => { for field in fields.iter() { f(field.expr); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs index 0909d8c835443..63f211022c975 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/matching.rs @@ -33,7 +33,7 @@ m!(&k"); "#, expect![[r#" macro_rules! m { ($i:literal) => {}; } -/* error: invalid token tree */"#]], +/* error: mismatched delimiters */"#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs index e875950e4e5f9..2d289b7683389 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/meta_syntax.rs @@ -68,26 +68,26 @@ m2!(); "#, expect![[r#" macro_rules! i1 { invalid } -/* error: invalid macro definition: expected subtree */ +/* error: macro definition has parse errors */ macro_rules! e1 { $i:ident => () } -/* error: invalid macro definition: expected subtree */ +/* error: macro definition has parse errors */ macro_rules! e2 { ($i:ident) () } -/* error: invalid macro definition: expected `=` */ +/* error: macro definition has parse errors */ macro_rules! e3 { ($(i:ident)_) => () } -/* error: invalid macro definition: invalid repeat */ +/* error: macro definition has parse errors */ macro_rules! f1 { ($i) => ($i) } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! f2 { ($i:) => ($i) } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! f3 { ($i:_) => () } -/* error: invalid macro definition: missing fragment specifier */ +/* error: macro definition has parse errors */ macro_rules! m1 { ($$i) => () } -/* error: invalid macro definition: `$$` is not allowed on the pattern side */ +/* error: macro definition has parse errors */ macro_rules! m2 { () => ( ${invalid()} ) } -/* error: invalid macro definition: invalid metavariable expression */ +/* error: macro definition has parse errors */ "#]], ) } @@ -137,18 +137,18 @@ macro_rules! m9 { ($($($($i:ident)?)*)+) => {}; } macro_rules! mA { ($($($($i:ident)+)?)*) => {}; } macro_rules! mB { ($($($($i:ident)+)*)?) => {}; } -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ -/* error: invalid macro definition: empty token tree in repetition */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ +/* error: macro definition has parse errors */ "#]], ); } diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs index 6560d0ec4664b..bf70119838766 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/metavar_expr.rs @@ -275,9 +275,9 @@ macro_rules! depth_too_large { } fn test() { - /* error: invalid macro definition: invalid metavariable expression */; - /* error: invalid macro definition: invalid metavariable expression */; - /* error: invalid macro definition: invalid metavariable expression */; + /* error: macro definition has parse errors */; + /* error: macro definition has parse errors */; + /* error: macro definition has parse errors */; } "#]], ); diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs index 6717ee1aa5fdf..4aad53c3bd71c 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/regression.rs @@ -1090,3 +1090,57 @@ fn main() { "#]], ); } + +#[test] +fn regression_16529() { + check( + r#" +mod any { + #[macro_export] + macro_rules! nameable { + { + struct $name:ident[$a:lifetime] + } => { + $crate::any::nameable! { + struct $name[$a] + a + } + }; + { + struct $name:ident[$a:lifetime] + a + } => {}; + } + pub use nameable; + + nameable! { + Name['a] + } +} +"#, + expect![[r#" +mod any { + #[macro_export] + macro_rules! nameable { + { + struct $name:ident[$a:lifetime] + } => { + $crate::any::nameable! { + struct $name[$a] + a + } + }; + { + struct $name:ident[$a:lifetime] + a + } => {}; + } + pub use nameable; + + /* error: unexpected token in input */$crate::any::nameable! { + struct $name[$a]a + } +} +"#]], + ); +} diff --git a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs index ae56934f632f1..362c189f6a734 100644 --- a/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs +++ b/crates/hir-def/src/macro_expansion_tests/mbe/tt_conversion.rs @@ -97,8 +97,8 @@ m2!(x macro_rules! m1 { ($x:ident) => { ($x } } macro_rules! m2 { ($x:ident) => {} } -/* error: invalid macro definition: expected subtree */ -/* error: invalid token tree */ +/* error: macro definition has parse errors */ +/* error: mismatched delimiters */ "#]], ) } diff --git a/crates/hir-def/src/macro_expansion_tests/mod.rs b/crates/hir-def/src/macro_expansion_tests/mod.rs index fc5a6e80a427d..23b10cfd8e6c7 100644 --- a/crates/hir-def/src/macro_expansion_tests/mod.rs +++ b/crates/hir-def/src/macro_expansion_tests/mod.rs @@ -58,6 +58,7 @@ pub fn identity_when_valid(_attr: TokenStream, item: TokenStream) -> TokenStream name: "identity_when_valid".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityWhenValidProcMacroExpander), + disabled: false, }, )]; let db = TestDB::with_files_extra_proc_macros(ra_fixture, extra_proc_macros); diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 21cc28f1b3d0d..88838f58fe787 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -11,7 +11,7 @@ use either::Either; use hir_expand::{ ast_id_map::FileAstId, attrs::{Attr, AttrId}, - builtin_attr_macro::find_builtin_attr, + builtin_attr_macro::{find_builtin_attr, BuiltinAttrExpander}, builtin_derive_macro::find_builtin_derive, builtin_fn_macro::find_builtin_macro, name::{name, AsName, Name}, @@ -98,9 +98,13 @@ pub(super) fn collect_defs(db: &dyn DefDatabase, def_map: DefMap, tree_id: TreeI }; ( name.as_name(), - CustomProcMacroExpander::new(hir_expand::proc_macro::ProcMacroId( - idx as u32, - )), + if it.disabled { + CustomProcMacroExpander::disabled() + } else { + CustomProcMacroExpander::new( + hir_expand::proc_macro::ProcMacroId::new(idx as u32), + ) + }, ) }) .collect()) @@ -604,9 +608,6 @@ impl DefCollector<'_> { id: ItemTreeId, fn_id: FunctionId, ) { - if self.def_map.block.is_some() { - return; - } let kind = def.kind.to_basedb_kind(); let (expander, kind) = match self.proc_macros.as_ref().map(|it| it.iter().find(|(n, _)| n == &def.name)) { @@ -1120,9 +1121,16 @@ impl DefCollector<'_> { let mut push_resolved = |directive: &MacroDirective, call_id| { resolved.push((directive.module_id, directive.depth, directive.container, call_id)); }; + + #[derive(PartialEq, Eq)] + enum Resolved { + Yes, + No, + } + let mut res = ReachedFixedPoint::Yes; // Retain unresolved macros after this round of resolution. - macros.retain(|directive| { + let mut retain = |directive: &MacroDirective| { let subns = match &directive.kind { MacroDirectiveKind::FnLike { .. } => MacroSubNs::Bang, MacroDirectiveKind::Attr { .. } | MacroDirectiveKind::Derive { .. } => { @@ -1156,10 +1164,11 @@ impl DefCollector<'_> { self.def_map.modules[directive.module_id] .scope .add_macro_invoc(ast_id.ast_id, call_id); + push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } MacroDirectiveKind::Derive { ast_id, derive_attr, derive_pos, call_site } => { @@ -1198,7 +1207,7 @@ impl DefCollector<'_> { push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } MacroDirectiveKind::Attr { ast_id: file_ast_id, mod_item, attr, tree } => { @@ -1221,7 +1230,7 @@ impl DefCollector<'_> { } .collect(&[*mod_item], directive.container); res = ReachedFixedPoint::No; - false + Resolved::Yes }; if let Some(ident) = path.as_ident() { @@ -1237,13 +1246,18 @@ impl DefCollector<'_> { let def = match resolver_def_id(path.clone()) { Some(def) if def.is_attribute() => def, - _ => return true, + _ => return Resolved::No, }; - if matches!( - def, - MacroDefId { kind: MacroDefKind::BuiltInAttr(expander, _),.. } - if expander.is_derive() - ) { + + if let MacroDefId { + kind: + MacroDefKind::BuiltInAttr( + BuiltinAttrExpander::Derive | BuiltinAttrExpander::DeriveConst, + _, + ), + .. + } = def + { // Resolved to `#[derive]`, we don't actually expand this attribute like // normal (as that would just be an identity expansion with extra output) // Instead we treat derive attributes special and apply them separately. @@ -1316,16 +1330,6 @@ impl DefCollector<'_> { let call_id = attr_macro_as_call_id(self.db, file_ast_id, attr, self.def_map.krate, def); - // If proc attribute macro expansion is disabled, skip expanding it here - if !self.db.expand_proc_attr_macros() { - self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( - directive.module_id, - self.db.lookup_intern_macro_call(call_id).kind, - def.krate, - )); - return recollect_without(self); - } - // Skip #[test]/#[bench] expansion, which would merely result in more memory usage // due to duplicating functions into macro expansions if matches!( @@ -1337,17 +1341,29 @@ impl DefCollector<'_> { } if let MacroDefKind::ProcMacro(exp, ..) = def.kind { - if exp.is_dummy() { - // If there's no expander for the proc macro (e.g. - // because proc macros are disabled, or building the - // proc macro crate failed), report this and skip - // expansion like we would if it was disabled + // If proc attribute macro expansion is disabled, skip expanding it here + if !self.db.expand_proc_attr_macros() { self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( directive.module_id, self.db.lookup_intern_macro_call(call_id).kind, def.krate, )); + return recollect_without(self); + } + // If there's no expander for the proc macro (e.g. + // because proc macros are disabled, or building the + // proc macro crate failed), report this and skip + // expansion like we would if it was disabled + if exp.is_dummy() { + self.def_map.diagnostics.push(DefDiagnostic::unresolved_proc_macro( + directive.module_id, + self.db.lookup_intern_macro_call(call_id).kind, + def.krate, + )); + return recollect_without(self); + } + if exp.is_disabled() { return recollect_without(self); } } @@ -1358,12 +1374,13 @@ impl DefCollector<'_> { push_resolved(directive, call_id); res = ReachedFixedPoint::No; - return false; + return Resolved::Yes; } } - true - }); + Resolved::No + }; + macros.retain(|it| retain(it) == Resolved::No); // Attribute resolution can add unresolved macro invocations, so concatenate the lists. macros.extend(mem::take(&mut self.unresolved_macros)); self.unresolved_macros = macros; @@ -1673,7 +1690,11 @@ impl ModCollector<'_, '_> { FunctionLoc { container, id: ItemTreeId::new(self.tree_id, id) }.intern(db); let vis = resolve_vis(def_map, &self.item_tree[it.visibility]); - if self.def_collector.is_proc_macro && self.module_id == DefMap::ROOT { + + if self.def_collector.def_map.block.is_none() + && self.def_collector.is_proc_macro + && self.module_id == DefMap::ROOT + { if let Some(proc_macro) = attrs.parse_proc_macro_decl(&it.name) { self.def_collector.export_proc_macro( proc_macro, @@ -2333,7 +2354,7 @@ impl ModCollector<'_, '_> { resolved_res.resolved_def.take_macros().map(|it| db.macro_def(it)) }, ) { - // FIXME: if there were errors, this mightve been in the eager expansion from an + // FIXME: if there were errors, this might've been in the eager expansion from an // unresolved macro, so we need to push this into late macro resolution. see fixme above if res.err.is_none() { // Legacy macros need to be expanded immediately, so that any macros they produce diff --git a/crates/hir-def/src/nameres/diagnostics.rs b/crates/hir-def/src/nameres/diagnostics.rs index 0a3f7bf7ec3d6..161b2c0599099 100644 --- a/crates/hir-def/src/nameres/diagnostics.rs +++ b/crates/hir-def/src/nameres/diagnostics.rs @@ -103,6 +103,9 @@ impl DefDiagnostic { } // FIXME: Whats the difference between this and unresolved_macro_call + // FIXME: This is used for a lot of things, unresolved proc macros, disabled proc macros, etc + // yet the diagnostic handler in ide-diagnostics has to figure out what happened because this + // struct loses all that information! pub(crate) fn unresolved_proc_macro( container: LocalModuleId, ast: MacroCallKind, diff --git a/crates/hir-expand/src/builtin_fn_macro.rs b/crates/hir-expand/src/builtin_fn_macro.rs index 6d3de0e55d24d..90cd3af75783d 100644 --- a/crates/hir-expand/src/builtin_fn_macro.rs +++ b/crates/hir-expand/src/builtin_fn_macro.rs @@ -446,7 +446,7 @@ fn compile_error_expand( ) -> ExpandResult { let err = match &*tt.token_trees { [tt::TokenTree::Leaf(tt::Leaf::Literal(it))] => match unquote_str(it) { - Some(unquoted) => ExpandError::other(unquoted), + Some(unquoted) => ExpandError::other(unquoted.into_boxed_str()), None => ExpandError::other("`compile_error!` argument must be a string"), }, _ => ExpandError::other("`compile_error!` argument must be a string"), diff --git a/crates/hir-expand/src/change.rs b/crates/hir-expand/src/change.rs index 67b7df198e93e..c6611438e64d8 100644 --- a/crates/hir-expand/src/change.rs +++ b/crates/hir-expand/src/change.rs @@ -1,6 +1,10 @@ //! Defines a unit of change that can applied to the database to get the next //! state. Changes are transactional. -use base_db::{salsa::Durability, CrateGraph, FileChange, SourceDatabaseExt, SourceRoot}; +use base_db::{ + salsa::Durability, CrateGraph, CrateId, FileChange, SourceDatabaseExt, SourceRoot, + TargetLayoutLoadResult, Version, +}; +use la_arena::RawIdx; use span::FileId; use triomphe::Arc; @@ -10,6 +14,8 @@ use crate::{db::ExpandDatabase, proc_macro::ProcMacros}; pub struct Change { pub source_change: FileChange, pub proc_macros: Option, + pub toolchains: Option>>, + pub target_data_layouts: Option>, } impl Change { @@ -22,6 +28,24 @@ impl Change { if let Some(proc_macros) = self.proc_macros { db.set_proc_macros_with_durability(Arc::new(proc_macros), Durability::HIGH); } + if let Some(target_data_layouts) = self.target_data_layouts { + for (id, val) in target_data_layouts.into_iter().enumerate() { + db.set_data_layout_with_durability( + CrateId::from_raw(RawIdx::from(id as u32)), + val, + Durability::HIGH, + ); + } + } + if let Some(toolchains) = self.toolchains { + for (id, val) in toolchains.into_iter().enumerate() { + db.set_toolchain_with_durability( + CrateId::from_raw(RawIdx::from(id as u32)), + val, + Durability::HIGH, + ); + } + } } pub fn change_file(&mut self, file_id: FileId, new_text: Option>) { @@ -36,6 +60,14 @@ impl Change { self.proc_macros = Some(proc_macros); } + pub fn set_toolchains(&mut self, toolchains: Vec>) { + self.toolchains = Some(toolchains); + } + + pub fn set_target_data_layouts(&mut self, target_data_layouts: Vec) { + self.target_data_layouts = Some(target_data_layouts); + } + pub fn set_roots(&mut self, roots: Vec) { self.source_change.set_roots(roots) } diff --git a/crates/hir-expand/src/db.rs b/crates/hir-expand/src/db.rs index 6a288cf91979a..7b62eaa0289dc 100644 --- a/crates/hir-expand/src/db.rs +++ b/crates/hir-expand/src/db.rs @@ -108,7 +108,7 @@ pub trait ExpandDatabase: SourceDatabase { fn macro_arg( &self, id: MacroCallId, - ) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>>; + ) -> ValueResult<(Arc, SyntaxFixupUndoInfo), Arc>>; /// Fetches the expander for this macro. #[salsa::transparent] #[salsa::invoke(TokenExpander::macro_expander)] @@ -326,58 +326,77 @@ fn macro_arg( db: &dyn ExpandDatabase, id: MacroCallId, // FIXME: consider the following by putting fixup info into eager call info args - // ) -> ValueResult>, Arc>> { -) -> ValueResult, SyntaxFixupUndoInfo)>, Arc>> { - let mismatched_delimiters = |arg: &SyntaxNode| { - let first = arg.first_child_or_token().map_or(T![.], |it| it.kind()); - let last = arg.last_child_or_token().map_or(T![.], |it| it.kind()); - let well_formed_tt = - matches!((first, last), (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}'])); - if !well_formed_tt { - // Don't expand malformed (unbalanced) macro invocations. This is - // less than ideal, but trying to expand unbalanced macro calls - // sometimes produces pathological, deeply nested code which breaks - // all kinds of things. - // - // Some day, we'll have explicit recursion counters for all - // recursive things, at which point this code might be removed. - cov_mark::hit!(issue9358_bad_macro_stack_overflow); - Some(Arc::new(Box::new([SyntaxError::new( - "unbalanced token tree".to_owned(), - arg.text_range(), - )]) as Box<[_]>)) - } else { - None - } - }; + // ) -> ValueResult, Arc>> { +) -> ValueResult<(Arc, SyntaxFixupUndoInfo), Arc>> { let loc = db.lookup_intern_macro_call(id); if let Some(EagerCallInfo { arg, .. }) = matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) .then(|| loc.eager.as_deref()) .flatten() { - ValueResult::ok(Some((arg.clone(), SyntaxFixupUndoInfo::NONE))) + ValueResult::ok((arg.clone(), SyntaxFixupUndoInfo::NONE)) } else { let (parse, map) = parse_with_map(db, loc.kind.file_id()); let root = parse.syntax_node(); let syntax = match loc.kind { MacroCallKind::FnLike { ast_id, .. } => { + let dummy_tt = |kind| { + ( + Arc::new(tt::Subtree { + delimiter: tt::Delimiter { + open: loc.call_site, + close: loc.call_site, + kind, + }, + token_trees: Box::default(), + }), + SyntaxFixupUndoInfo::default(), + ) + }; + let node = &ast_id.to_ptr(db).to_node(&root); let offset = node.syntax().text_range().start(); - match node.token_tree() { - Some(tt) => { - let tt = tt.syntax(); - if let Some(e) = mismatched_delimiters(tt) { - return ValueResult::only_err(e); - } - tt.clone() - } - None => { - return ValueResult::only_err(Arc::new(Box::new([ - SyntaxError::new_at_offset("missing token tree".to_owned(), offset), - ]))); - } + let Some(tt) = node.token_tree() else { + return ValueResult::new( + dummy_tt(tt::DelimiterKind::Invisible), + Arc::new(Box::new([SyntaxError::new_at_offset( + "missing token tree".to_owned(), + offset, + )])), + ); + }; + let first = tt.left_delimiter_token().map(|it| it.kind()).unwrap_or(T!['(']); + let last = tt.right_delimiter_token().map(|it| it.kind()).unwrap_or(T![.]); + + let mismatched_delimiters = !matches!( + (first, last), + (T!['('], T![')']) | (T!['['], T![']']) | (T!['{'], T!['}']) + ); + if mismatched_delimiters { + // Don't expand malformed (unbalanced) macro invocations. This is + // less than ideal, but trying to expand unbalanced macro calls + // sometimes produces pathological, deeply nested code which breaks + // all kinds of things. + // + // So instead, we'll return an empty subtree here + cov_mark::hit!(issue9358_bad_macro_stack_overflow); + + let kind = match first { + _ if loc.def.is_proc_macro() => tt::DelimiterKind::Invisible, + T!['('] => tt::DelimiterKind::Parenthesis, + T!['['] => tt::DelimiterKind::Bracket, + T!['{'] => tt::DelimiterKind::Brace, + _ => tt::DelimiterKind::Invisible, + }; + return ValueResult::new( + dummy_tt(kind), + Arc::new(Box::new([SyntaxError::new_at_offset( + "mismatched delimiters".to_owned(), + offset, + )])), + ); } + tt.syntax().clone() } MacroCallKind::Derive { ast_id, .. } => { ast_id.to_ptr(db).to_node(&root).syntax().clone() @@ -427,15 +446,15 @@ fn macro_arg( if matches!(loc.def.kind, MacroDefKind::BuiltInEager(..)) { match parse.errors() { - [] => ValueResult::ok(Some((Arc::new(tt), undo_info))), + [] => ValueResult::ok((Arc::new(tt), undo_info)), errors => ValueResult::new( - Some((Arc::new(tt), undo_info)), + (Arc::new(tt), undo_info), // Box::<[_]>::from(res.errors()), not stable yet Arc::new(errors.to_vec().into_boxed_slice()), ), } } else { - ValueResult::ok(Some((Arc::new(tt), undo_info))) + ValueResult::ok((Arc::new(tt), undo_info)) } } } @@ -519,21 +538,20 @@ fn macro_expand( expander.expand(db, macro_call_id, &node, map.as_ref()) } _ => { - let ValueResult { value, err } = db.macro_arg(macro_call_id); - let Some((macro_arg, undo_info)) = value else { - return ExpandResult { - value: CowArc::Owned(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - // FIXME: We should make sure to enforce an invariant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other("invalid token tree")), - }; + let ValueResult { value: (macro_arg, undo_info), err } = db.macro_arg(macro_call_id); + let format_parse_err = |err: Arc>| { + let mut buf = String::new(); + for err in &**err { + use std::fmt::Write; + _ = write!(buf, "{}, ", err); + } + buf.pop(); + buf.pop(); + ExpandError::other(buf) }; let arg = &*macro_arg; - match loc.def.kind { + let res = match loc.def.kind { MacroDefKind::Declarative(id) => { db.decl_macro_expander(loc.def.krate, id).expand(db, arg.clone(), macro_call_id) } @@ -549,16 +567,7 @@ fn macro_expand( MacroDefKind::BuiltInEager(..) if loc.eager.is_none() => { return ExpandResult { value: CowArc::Arc(macro_arg.clone()), - err: err.map(|err| { - let mut buf = String::new(); - for err in &**err { - use std::fmt::Write; - _ = write!(buf, "{}, ", err); - } - buf.pop(); - buf.pop(); - ExpandError::other(buf) - }), + err: err.map(format_parse_err), }; } MacroDefKind::BuiltInEager(it, _) => { @@ -570,6 +579,11 @@ fn macro_expand( res } _ => unreachable!(), + }; + ExpandResult { + value: res.value, + // if the arg had parse errors, show them instead of the expansion errors + err: err.map(format_parse_err).or(res.err), } } }; @@ -597,17 +611,7 @@ fn macro_expand( fn expand_proc_macro(db: &dyn ExpandDatabase, id: MacroCallId) -> ExpandResult> { let loc = db.lookup_intern_macro_call(id); - let Some((macro_arg, undo_info)) = db.macro_arg(id).value else { - return ExpandResult { - value: Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - // FIXME: We should make sure to enforce an invariant that invalid macro - // calls do not reach this call path! - err: Some(ExpandError::other("invalid token tree")), - }; - }; + let (macro_arg, undo_info) = db.macro_arg(id).value; let expander = match loc.def.kind { MacroDefKind::ProcMacro(expander, ..) => expander, diff --git a/crates/hir-expand/src/declarative.rs b/crates/hir-expand/src/declarative.rs index 37084ee8b93c9..6874336cd2d05 100644 --- a/crates/hir-expand/src/declarative.rs +++ b/crates/hir-expand/src/declarative.rs @@ -31,7 +31,7 @@ impl DeclarativeMacroExpander { call_id: MacroCallId, ) -> ExpandResult { let loc = db.lookup_intern_macro_call(call_id); - let toolchain = &db.crate_graph()[loc.def.krate].toolchain; + let toolchain = db.toolchain(loc.def.krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { @@ -44,9 +44,9 @@ impl DeclarativeMacroExpander { ) }); match self.mac.err() { - Some(e) => ExpandResult::new( + Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: loc.call_site, close: loc.call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), + ExpandError::MacroDefinition, ), None => self .mac @@ -67,7 +67,7 @@ impl DeclarativeMacroExpander { krate: CrateId, call_site: Span, ) -> ExpandResult { - let toolchain = &db.crate_graph()[krate].toolchain; + let toolchain = db.toolchain(krate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { @@ -80,9 +80,9 @@ impl DeclarativeMacroExpander { ) }); match self.mac.err() { - Some(e) => ExpandResult::new( + Some(_) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other(format!("invalid macro definition: {e}")), + ExpandError::MacroDefinition, ), None => self.mac.expand(&tt, |_| (), new_meta_vars, call_site).map_err(Into::into), } @@ -119,7 +119,7 @@ impl DeclarativeMacroExpander { _ => None, } }; - let toolchain = crate_data.toolchain.as_ref(); + let toolchain = db.toolchain(def_crate); let new_meta_vars = toolchain.as_ref().map_or(false, |version| { REQUIREMENT.get_or_init(|| VersionReq::parse(">=1.76").unwrap()).matches( &base_db::Version { diff --git a/crates/hir-expand/src/lib.rs b/crates/hir-expand/src/lib.rs index fd028182faf6f..020ca75d80cb2 100644 --- a/crates/hir-expand/src/lib.rs +++ b/crates/hir-expand/src/lib.rs @@ -44,7 +44,6 @@ use crate::{ builtin_derive_macro::BuiltinDeriveExpander, builtin_fn_macro::{BuiltinFnLikeExpander, EagerExpander}, db::{ExpandDatabase, TokenExpander}, - fixup::SyntaxFixupUndoInfo, hygiene::SyntaxContextData, mod_path::ModPath, proc_macro::{CustomProcMacroExpander, ProcMacroKind}, @@ -129,8 +128,11 @@ pub type ExpandResult = ValueResult; #[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum ExpandError { UnresolvedProcMacro(CrateId), + /// The macro expansion is disabled. + MacroDisabled, + MacroDefinition, Mbe(mbe::ExpandError), - RecursionOverflowPoisoned, + RecursionOverflow, Other(Box>), ProcMacroPanic(Box>), } @@ -152,14 +154,14 @@ impl fmt::Display for ExpandError { match self { ExpandError::UnresolvedProcMacro(_) => f.write_str("unresolved proc-macro"), ExpandError::Mbe(it) => it.fmt(f), - ExpandError::RecursionOverflowPoisoned => { - f.write_str("overflow expanding the original macro") - } + ExpandError::RecursionOverflow => f.write_str("overflow expanding the original macro"), ExpandError::ProcMacroPanic(it) => { f.write_str("proc-macro panicked: ")?; f.write_str(it) } ExpandError::Other(it) => f.write_str(it), + ExpandError::MacroDisabled => f.write_str("macro disabled"), + ExpandError::MacroDefinition => f.write_str("macro definition has parse errors"), } } } @@ -225,8 +227,8 @@ pub enum MacroCallKind { }, Attr { ast_id: AstId, - // FIXME: This is being interned, subtrees can vary quickly differ just slightly causing - // leakage problems here + // FIXME: This shouldn't be here, we can derive this from `invoc_attr_index` + // but we need to fix the `cfg_attr` handling first. attr_args: Option>, /// Syntactical index of the invoking `#[attribute]`. /// @@ -758,15 +760,7 @@ impl ExpansionInfo { let (parse, exp_map) = db.parse_macro_expansion(macro_file).value; let expanded = InMacroFile { file_id: macro_file, value: parse.syntax_node() }; - let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value.unwrap_or_else(|| { - ( - Arc::new(tt::Subtree { - delimiter: tt::Delimiter::invisible_spanned(loc.call_site), - token_trees: Box::new([]), - }), - SyntaxFixupUndoInfo::NONE, - ) - }); + let (macro_arg, _) = db.macro_arg(macro_file.macro_call_id).value; let def = loc.def.ast_id().left().and_then(|id| { let def_tt = match id.to_node(db) { diff --git a/crates/hir-expand/src/mod_path.rs b/crates/hir-expand/src/mod_path.rs index b64c3549e421e..136b0935be277 100644 --- a/crates/hir-expand/src/mod_path.rs +++ b/crates/hir-expand/src/mod_path.rs @@ -94,6 +94,21 @@ impl ModPath { } } + pub fn textual_len(&self) -> usize { + let base = match self.kind { + PathKind::Plain => 0, + PathKind::Super(0) => "self".len(), + PathKind::Super(i) => "super".len() * i as usize, + PathKind::Crate => "crate".len(), + PathKind::Abs => 0, + PathKind::DollarCrate(_) => "$crate".len(), + }; + self.segments() + .iter() + .map(|segment| segment.as_str().map_or(0, str::len)) + .fold(base, core::ops::Add::add) + } + pub fn is_ident(&self) -> bool { self.as_ident().is_some() } diff --git a/crates/hir-expand/src/proc_macro.rs b/crates/hir-expand/src/proc_macro.rs index 70b47fc54b11c..ca6fc0afe2d7d 100644 --- a/crates/hir-expand/src/proc_macro.rs +++ b/crates/hir-expand/src/proc_macro.rs @@ -12,7 +12,13 @@ use syntax::SmolStr; use crate::{db::ExpandDatabase, tt, ExpandError, ExpandResult}; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] -pub struct ProcMacroId(pub u32); +pub struct ProcMacroId(u32); + +impl ProcMacroId { + pub fn new(u32: u32) -> Self { + ProcMacroId(u32) + } +} #[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)] pub enum ProcMacroKind { @@ -49,6 +55,7 @@ pub struct ProcMacro { pub name: SmolStr, pub kind: ProcMacroKind, pub expander: sync::Arc, + pub disabled: bool, } #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] @@ -56,20 +63,35 @@ pub struct CustomProcMacroExpander { proc_macro_id: ProcMacroId, } -const DUMMY_ID: u32 = !0; - impl CustomProcMacroExpander { + const DUMMY_ID: u32 = !0; + const DISABLED_ID: u32 = !1; + pub fn new(proc_macro_id: ProcMacroId) -> Self { - assert_ne!(proc_macro_id.0, DUMMY_ID); + assert_ne!(proc_macro_id.0, Self::DUMMY_ID); + assert_ne!(proc_macro_id.0, Self::DISABLED_ID); Self { proc_macro_id } } - pub fn dummy() -> Self { - Self { proc_macro_id: ProcMacroId(DUMMY_ID) } + /// A dummy expander that always errors. This is used for proc-macros that are missing, usually + /// due to them not being built yet. + pub const fn dummy() -> Self { + Self { proc_macro_id: ProcMacroId(Self::DUMMY_ID) } + } + + /// The macro was not yet resolved. + pub const fn is_dummy(&self) -> bool { + self.proc_macro_id.0 == Self::DUMMY_ID + } + + /// A dummy expander that always errors. This expander is used for macros that have been disabled. + pub const fn disabled() -> Self { + Self { proc_macro_id: ProcMacroId(Self::DISABLED_ID) } } - pub fn is_dummy(&self) -> bool { - self.proc_macro_id.0 == DUMMY_ID + /// The macro is explicitly disabled and cannot be expanded. + pub const fn is_disabled(&self) -> bool { + self.proc_macro_id.0 == Self::DISABLED_ID } pub fn expand( @@ -84,10 +106,14 @@ impl CustomProcMacroExpander { mixed_site: Span, ) -> ExpandResult { match self.proc_macro_id { - ProcMacroId(DUMMY_ID) => ExpandResult::new( + ProcMacroId(Self::DUMMY_ID) => ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), ExpandError::UnresolvedProcMacro(def_crate), ), + ProcMacroId(Self::DISABLED_ID) => ExpandResult::new( + tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), + ExpandError::MacroDisabled, + ), ProcMacroId(id) => { let proc_macros = db.proc_macros(); let proc_macros = match proc_macros.get(&def_crate) { @@ -110,7 +136,7 @@ impl CustomProcMacroExpander { ); return ExpandResult::new( tt::Subtree::empty(tt::DelimSpan { open: call_site, close: call_site }), - ExpandError::other("Internal error"), + ExpandError::other("Internal error: proc-macro index out of bounds"), ); } }; diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 7f8fb7f4b5214..c4329a7b82bf8 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -169,9 +169,9 @@ impl ExprValidator { return; } - let pattern_arena = Arena::new(); - let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db, &pattern_arena); + let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); + let pattern_arena = Arena::new(); let mut m_arms = Vec::with_capacity(arms.len()); let mut has_lowering_errors = false; for arm in arms { @@ -196,8 +196,9 @@ impl ExprValidator { // If we had a NotUsefulMatchArm diagnostic, we could // check the usefulness of each pattern as we added it // to the matrix here. + let pat = self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors); let m_arm = pat_analysis::MatchArm { - pat: self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors), + pat: pattern_arena.alloc(pat), has_guard: arm.guard.is_some(), arm_data: (), }; @@ -223,7 +224,7 @@ impl ExprValidator { ValidityConstraint::ValidOnly, ) { Ok(report) => report, - Err(void) => match void {}, + Err(()) => return, }; // FIXME Report unreachable arms @@ -245,10 +246,10 @@ impl ExprValidator { db: &dyn HirDatabase, body: &Body, have_errors: &mut bool, - ) -> &'p DeconstructedPat<'p> { + ) -> DeconstructedPat<'p> { let mut patcx = match_check::PatCtxt::new(db, &self.infer, body); let pattern = patcx.lower_pattern(pat); - let pattern = cx.pattern_arena.alloc(cx.lower_pat(&pattern)); + let pattern = cx.lower_pat(&pattern); if !patcx.errors.is_empty() { *have_errors = true; } diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs index 712842372b625..e98a946a8708c 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_analysis.rs @@ -1,6 +1,7 @@ //! Interface with `rustc_pattern_analysis`. use std::fmt; +use tracing::debug; use hir_def::{DefWithBodyId, EnumVariantId, HasModule, LocalFieldId, ModuleId, VariantId}; use rustc_hash::FxHashMap; @@ -11,7 +12,6 @@ use rustc_pattern_analysis::{ }; use smallvec::{smallvec, SmallVec}; use stdx::never; -use typed_arena::Arena; use crate::{ db::HirDatabase, @@ -26,7 +26,7 @@ use Constructor::*; // Re-export r-a-specific versions of all these types. pub(crate) type DeconstructedPat<'p> = - rustc_pattern_analysis::pat::DeconstructedPat<'p, MatchCheckCtx<'p>>; + rustc_pattern_analysis::pat::DeconstructedPat>; pub(crate) type MatchArm<'p> = rustc_pattern_analysis::MatchArm<'p, MatchCheckCtx<'p>>; pub(crate) type WitnessPat<'p> = rustc_pattern_analysis::pat::WitnessPat>; @@ -40,7 +40,6 @@ pub(crate) struct MatchCheckCtx<'p> { module: ModuleId, body: DefWithBodyId, pub(crate) db: &'p dyn HirDatabase, - pub(crate) pattern_arena: &'p Arena>, exhaustive_patterns: bool, min_exhaustive_patterns: bool, } @@ -52,17 +51,12 @@ pub(crate) struct PatData<'p> { } impl<'p> MatchCheckCtx<'p> { - pub(crate) fn new( - module: ModuleId, - body: DefWithBodyId, - db: &'p dyn HirDatabase, - pattern_arena: &'p Arena>, - ) -> Self { + pub(crate) fn new(module: ModuleId, body: DefWithBodyId, db: &'p dyn HirDatabase) -> Self { let def_map = db.crate_def_map(module.krate()); let exhaustive_patterns = def_map.is_unstable_feature_enabled("exhaustive_patterns"); let min_exhaustive_patterns = def_map.is_unstable_feature_enabled("min_exhaustive_patterns"); - Self { module, body, db, pattern_arena, exhaustive_patterns, min_exhaustive_patterns } + Self { module, body, db, exhaustive_patterns, min_exhaustive_patterns } } fn is_uninhabited(&self, ty: &Ty) -> bool { @@ -131,15 +125,15 @@ impl<'p> MatchCheckCtx<'p> { } pub(crate) fn lower_pat(&self, pat: &Pat) -> DeconstructedPat<'p> { - let singleton = |pat| std::slice::from_ref(self.pattern_arena.alloc(pat)); + let singleton = |pat| vec![pat]; let ctor; - let fields: &[_]; + let fields: Vec<_>; match pat.kind.as_ref() { PatKind::Binding { subpattern: Some(subpat), .. } => return self.lower_pat(subpat), PatKind::Binding { subpattern: None, .. } | PatKind::Wild => { ctor = Wildcard; - fields = &[]; + fields = Vec::new(); } PatKind::Deref { subpattern } => { ctor = match pat.ty.kind(Interner) { @@ -157,7 +151,7 @@ impl<'p> MatchCheckCtx<'p> { match pat.ty.kind(Interner) { TyKind::Tuple(_, substs) => { ctor = Struct; - let mut wilds: SmallVec<[_; 2]> = substs + let mut wilds: Vec<_> = substs .iter(Interner) .map(|arg| arg.assert_ty_ref(Interner).clone()) .map(DeconstructedPat::wildcard) @@ -166,7 +160,7 @@ impl<'p> MatchCheckCtx<'p> { let idx: u32 = pat.field.into_raw().into(); wilds[idx as usize] = self.lower_pat(&pat.pattern); } - fields = self.pattern_arena.alloc_extend(wilds) + fields = wilds } TyKind::Adt(adt, substs) if is_box(self.db, adt.0) => { // The only legal patterns of type `Box` (outside `std`) are `_` and box @@ -216,33 +210,29 @@ impl<'p> MatchCheckCtx<'p> { field_id_to_id[field_idx as usize] = Some(i); ty }); - let mut wilds: SmallVec<[_; 2]> = - tys.map(DeconstructedPat::wildcard).collect(); + let mut wilds: Vec<_> = tys.map(DeconstructedPat::wildcard).collect(); for pat in subpatterns { let field_idx: u32 = pat.field.into_raw().into(); if let Some(i) = field_id_to_id[field_idx as usize] { wilds[i] = self.lower_pat(&pat.pattern); } } - fields = self.pattern_arena.alloc_extend(wilds); + fields = wilds; } _ => { never!("pattern has unexpected type: pat: {:?}, ty: {:?}", pat, &pat.ty); ctor = Wildcard; - fields = &[]; + fields = Vec::new(); } } } &PatKind::LiteralBool { value } => { ctor = Bool(value); - fields = &[]; + fields = Vec::new(); } PatKind::Or { pats } => { ctor = Or; - // Collect here because `Arena::alloc_extend` panics on reentrancy. - let subpats: SmallVec<[_; 2]> = - pats.iter().map(|pat| self.lower_pat(pat)).collect(); - fields = self.pattern_arena.alloc_extend(subpats); + fields = pats.iter().map(|pat| self.lower_pat(pat)).collect(); } } let data = PatData { db: self.db }; @@ -307,7 +297,7 @@ impl<'p> MatchCheckCtx<'p> { } impl<'p> TypeCx for MatchCheckCtx<'p> { - type Error = Void; + type Error = (); type Ty = Ty; type VariantIdx = EnumVariantId; type StrLit = Void; @@ -463,7 +453,7 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { fn write_variant_name( f: &mut fmt::Formatter<'_>, - pat: &rustc_pattern_analysis::pat::DeconstructedPat<'_, Self>, + pat: &rustc_pattern_analysis::pat::DeconstructedPat, ) -> fmt::Result { let variant = pat.ty().as_adt().and_then(|(adt, _)| Self::variant_id_for_adt(pat.ctor(), adt)); @@ -485,8 +475,8 @@ impl<'p> TypeCx for MatchCheckCtx<'p> { Ok(()) } - fn bug(&self, fmt: fmt::Arguments<'_>) -> ! { - panic!("{}", fmt) + fn bug(&self, fmt: fmt::Arguments<'_>) { + debug!("{}", fmt) } } diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 71c3f89716d82..1977f00517cd1 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -26,7 +26,7 @@ use std::{convert::identity, ops::Index}; use chalk_ir::{ cast::Cast, fold::TypeFoldable, interner::HasInterner, DebruijnIndex, Mutability, Safety, - Scalar, TyKind, TypeFlags, + Scalar, TyKind, TypeFlags, Variance, }; use either::Either; use hir_def::{ @@ -58,8 +58,9 @@ use crate::{ static_lifetime, to_assoc_type_id, traits::FnTrait, utils::{InTypeConstIdMetadata, UnevaluatedConstEvaluatorFolder}, - AliasEq, AliasTy, ClosureId, DomainGoal, GenericArg, Goal, ImplTraitId, InEnvironment, - Interner, ProjectionTy, RpitId, Substitution, TraitEnvironment, TraitRef, Ty, TyBuilder, TyExt, + AliasEq, AliasTy, Binders, ClosureId, Const, DomainGoal, GenericArg, Goal, ImplTraitId, + InEnvironment, Interner, Lifetime, ProjectionTy, RpitId, Substitution, TraitEnvironment, + TraitRef, Ty, TyBuilder, TyExt, }; // This lint has a false positive here. See the link below for details. @@ -68,7 +69,7 @@ use crate::{ #[allow(unreachable_pub)] pub use coerce::could_coerce; #[allow(unreachable_pub)] -pub use unify::could_unify; +pub use unify::{could_unify, could_unify_deeply}; use cast::CastCheck; pub(crate) use closure::{CaptureKind, CapturedItem, CapturedItemWithoutTy}; @@ -688,10 +689,17 @@ impl<'a> InferenceContext<'a> { for ty in type_of_for_iterator.values_mut() { *ty = table.resolve_completely(ty.clone()); } - for mismatch in type_mismatches.values_mut() { + type_mismatches.retain(|_, mismatch| { mismatch.expected = table.resolve_completely(mismatch.expected.clone()); mismatch.actual = table.resolve_completely(mismatch.actual.clone()); - } + chalk_ir::zip::Zip::zip_with( + &mut UnknownMismatch(self.db), + Variance::Invariant, + &mismatch.expected, + &mismatch.actual, + ) + .is_ok() + }); diagnostics.retain_mut(|diagnostic| { use InferenceDiagnostic::*; match diagnostic { @@ -1502,3 +1510,116 @@ impl std::ops::BitOrAssign for Diverges { *self = *self | other; } } +/// A zipper that checks for unequal `{unknown}` occurrences in the two types. Used to filter out +/// mismatch diagnostics that only differ in `{unknown}`. These mismatches are usually not helpful. +/// As the cause is usually an underlying name resolution problem. +struct UnknownMismatch<'db>(&'db dyn HirDatabase); +impl chalk_ir::zip::Zipper for UnknownMismatch<'_> { + fn zip_tys(&mut self, variance: Variance, a: &Ty, b: &Ty) -> chalk_ir::Fallible<()> { + let zip_substs = |this: &mut Self, + variances, + sub_a: &Substitution, + sub_b: &Substitution| { + this.zip_substs(variance, variances, sub_a.as_slice(Interner), sub_b.as_slice(Interner)) + }; + match (a.kind(Interner), b.kind(Interner)) { + (TyKind::Adt(id_a, sub_a), TyKind::Adt(id_b, sub_b)) if id_a == id_b => zip_substs( + self, + Some(self.unification_database().adt_variance(*id_a)), + sub_a, + sub_b, + )?, + ( + TyKind::AssociatedType(assoc_ty_a, sub_a), + TyKind::AssociatedType(assoc_ty_b, sub_b), + ) if assoc_ty_a == assoc_ty_b => zip_substs(self, None, sub_a, sub_b)?, + (TyKind::Tuple(arity_a, sub_a), TyKind::Tuple(arity_b, sub_b)) + if arity_a == arity_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::OpaqueType(opaque_ty_a, sub_a), TyKind::OpaqueType(opaque_ty_b, sub_b)) + if opaque_ty_a == opaque_ty_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::Slice(ty_a), TyKind::Slice(ty_b)) => self.zip_tys(variance, ty_a, ty_b)?, + (TyKind::FnDef(fn_def_a, sub_a), TyKind::FnDef(fn_def_b, sub_b)) + if fn_def_a == fn_def_b => + { + zip_substs( + self, + Some(self.unification_database().fn_def_variance(*fn_def_a)), + sub_a, + sub_b, + )? + } + (TyKind::Ref(mutability_a, _, ty_a), TyKind::Ref(mutability_b, _, ty_b)) + if mutability_a == mutability_b => + { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Raw(mutability_a, ty_a), TyKind::Raw(mutability_b, ty_b)) + if mutability_a == mutability_b => + { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Array(ty_a, const_a), TyKind::Array(ty_b, const_b)) if const_a == const_b => { + self.zip_tys(variance, ty_a, ty_b)? + } + (TyKind::Closure(id_a, sub_a), TyKind::Closure(id_b, sub_b)) if id_a == id_b => { + zip_substs(self, None, sub_a, sub_b)? + } + (TyKind::Coroutine(coroutine_a, sub_a), TyKind::Coroutine(coroutine_b, sub_b)) + if coroutine_a == coroutine_b => + { + zip_substs(self, None, sub_a, sub_b)? + } + ( + TyKind::CoroutineWitness(coroutine_a, sub_a), + TyKind::CoroutineWitness(coroutine_b, sub_b), + ) if coroutine_a == coroutine_b => zip_substs(self, None, sub_a, sub_b)?, + (TyKind::Function(fn_ptr_a), TyKind::Function(fn_ptr_b)) + if fn_ptr_a.sig == fn_ptr_b.sig && fn_ptr_a.num_binders == fn_ptr_b.num_binders => + { + zip_substs(self, None, &fn_ptr_a.substitution.0, &fn_ptr_b.substitution.0)? + } + (TyKind::Error, TyKind::Error) => (), + (TyKind::Error, _) | (_, TyKind::Error) => return Err(chalk_ir::NoSolution), + _ => (), + } + + Ok(()) + } + + fn zip_lifetimes(&mut self, _: Variance, _: &Lifetime, _: &Lifetime) -> chalk_ir::Fallible<()> { + Ok(()) + } + + fn zip_consts(&mut self, _: Variance, _: &Const, _: &Const) -> chalk_ir::Fallible<()> { + Ok(()) + } + + fn zip_binders( + &mut self, + variance: Variance, + a: &Binders, + b: &Binders, + ) -> chalk_ir::Fallible<()> + where + T: Clone + + HasInterner + + chalk_ir::zip::Zip + + TypeFoldable, + { + chalk_ir::zip::Zip::zip_with(self, variance, a.skip_binders(), b.skip_binders()) + } + + fn interner(&self) -> Interner { + Interner + } + + fn unification_database(&self) -> &dyn chalk_ir::UnificationDatabase { + &self.0 + } +} diff --git a/crates/hir-ty/src/infer/closure.rs b/crates/hir-ty/src/infer/closure.rs index c3746f787067c..22a70f951ea7a 100644 --- a/crates/hir-ty/src/infer/closure.rs +++ b/crates/hir-ty/src/infer/closure.rs @@ -485,6 +485,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.consume_expr(*expr); } + Statement::Item => (), } } if let Some(tail) = tail { @@ -531,6 +532,9 @@ impl InferenceContext<'_> { self.consume_expr(expr); } } + &Expr::Become { expr } => { + self.consume_expr(expr); + } Expr::RecordLit { fields, spread, .. } => { if let &Some(expr) = spread { self.consume_expr(expr); diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 8b8e97b0081c6..428ed6748c6c2 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -502,6 +502,7 @@ impl InferenceContext<'_> { self.result.standard_types.never.clone() } &Expr::Return { expr } => self.infer_expr_return(tgt_expr, expr), + &Expr::Become { expr } => self.infer_expr_become(expr), Expr::Yield { expr } => { if let Some((resume_ty, yield_ty)) = self.resume_yield_tys.clone() { if let Some(expr) = expr { @@ -1084,6 +1085,27 @@ impl InferenceContext<'_> { self.result.standard_types.never.clone() } + fn infer_expr_become(&mut self, expr: ExprId) -> Ty { + match &self.return_coercion { + Some(return_coercion) => { + let ret_ty = return_coercion.expected_ty(); + + let call_expr_ty = + self.infer_expr_inner(expr, &Expectation::HasType(ret_ty.clone())); + + // NB: this should *not* coerce. + // tail calls don't support any coercions except lifetimes ones (like `&'static u8 -> &'a u8`). + self.unify(&call_expr_ty, &ret_ty); + } + None => { + // FIXME: diagnose `become` outside of functions + self.infer_expr_no_expect(expr); + } + } + + self.result.standard_types.never.clone() + } + fn infer_expr_box(&mut self, inner_expr: ExprId, expected: &Expectation) -> Ty { if let Some(box_id) = self.resolve_boxed_box() { let table = &mut self.table; @@ -1367,6 +1389,7 @@ impl InferenceContext<'_> { ); } } + Statement::Item => (), } } diff --git a/crates/hir-ty/src/infer/mutability.rs b/crates/hir-ty/src/infer/mutability.rs index 663ea85323189..00e5eac229fb6 100644 --- a/crates/hir-ty/src/infer/mutability.rs +++ b/crates/hir-ty/src/infer/mutability.rs @@ -65,6 +65,7 @@ impl InferenceContext<'_> { Statement::Expr { expr, has_semi: _ } => { self.infer_mut_expr(*expr, Mutability::Not); } + Statement::Item => (), } } if let Some(tail) = tail { @@ -93,6 +94,9 @@ impl InferenceContext<'_> { self.infer_mut_expr(expr, Mutability::Not); } } + Expr::Become { expr } => { + self.infer_mut_expr(*expr, Mutability::Not); + } Expr::RecordLit { path: _, fields, spread, ellipsis: _, is_assignee_expr: _ } => { self.infer_mut_not_expr_iter(fields.iter().map(|it| it.expr).chain(*spread)) } diff --git a/crates/hir-ty/src/infer/unify.rs b/crates/hir-ty/src/infer/unify.rs index de23ca34990be..709760b64fd3f 100644 --- a/crates/hir-ty/src/infer/unify.rs +++ b/crates/hir-ty/src/infer/unify.rs @@ -74,6 +74,12 @@ impl> Canonicalized { } } +/// Check if types unify. +/// +/// Note that we consider placeholder types to unify with everything. +/// This means that there may be some unresolved goals that actually set bounds for the placeholder +/// type for the types to unify. For example `Option` and `Option` unify although there is +/// unresolved goal `T = U`. pub fn could_unify( db: &dyn HirDatabase, env: Arc, @@ -82,21 +88,35 @@ pub fn could_unify( unify(db, env, tys).is_some() } +/// Check if types unify eagerly making sure there are no unresolved goals. +/// +/// This means that placeholder types are not considered to unify if there are any bounds set on +/// them. For example `Option` and `Option` do not unify as we cannot show that `T = U` +pub fn could_unify_deeply( + db: &dyn HirDatabase, + env: Arc, + tys: &Canonical<(Ty, Ty)>, +) -> bool { + let mut table = InferenceTable::new(db, env); + let vars = make_substitutions(tys, &mut table); + let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); + let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); + let ty1_with_vars = table.normalize_associated_types_in(ty1_with_vars); + let ty2_with_vars = table.normalize_associated_types_in(ty2_with_vars); + table.resolve_obligations_as_possible(); + table.propagate_diverging_flag(); + let ty1_with_vars = table.resolve_completely(ty1_with_vars); + let ty2_with_vars = table.resolve_completely(ty2_with_vars); + table.unify_deeply(&ty1_with_vars, &ty2_with_vars) +} + pub(crate) fn unify( db: &dyn HirDatabase, env: Arc, tys: &Canonical<(Ty, Ty)>, ) -> Option { let mut table = InferenceTable::new(db, env); - let vars = Substitution::from_iter( - Interner, - tys.binders.iter(Interner).map(|it| match &it.kind { - chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner), - // FIXME: maybe wrong? - chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner), - chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner), - }), - ); + let vars = make_substitutions(tys, &mut table); let ty1_with_vars = vars.apply(tys.value.0.clone(), Interner); let ty2_with_vars = vars.apply(tys.value.1.clone(), Interner); if !table.unify(&ty1_with_vars, &ty2_with_vars) { @@ -125,6 +145,21 @@ pub(crate) fn unify( )) } +fn make_substitutions( + tys: &chalk_ir::Canonical<(chalk_ir::Ty, chalk_ir::Ty)>, + table: &mut InferenceTable<'_>, +) -> chalk_ir::Substitution { + Substitution::from_iter( + Interner, + tys.binders.iter(Interner).map(|it| match &it.kind { + chalk_ir::VariableKind::Ty(_) => table.new_type_var().cast(Interner), + // FIXME: maybe wrong? + chalk_ir::VariableKind::Lifetime => table.new_type_var().cast(Interner), + chalk_ir::VariableKind::Const(ty) => table.new_const_var(ty.clone()).cast(Interner), + }), + ) +} + bitflags::bitflags! { #[derive(Default, Clone, Copy)] pub(crate) struct TypeVariableFlags: u8 { @@ -431,6 +466,18 @@ impl<'a> InferenceTable<'a> { true } + /// Unify two relatable values (e.g. `Ty`) and check whether trait goals which arise from that could be fulfilled + pub(crate) fn unify_deeply>(&mut self, ty1: &T, ty2: &T) -> bool { + let result = match self.try_unify(ty1, ty2) { + Ok(r) => r, + Err(_) => return false, + }; + result.goals.iter().all(|goal| { + let canonicalized = self.canonicalize(goal.clone()); + self.try_resolve_obligation(&canonicalized).is_some() + }) + } + /// Unify two relatable values (e.g. `Ty`) and return new trait goals arising from it, so the /// caller needs to deal with them. pub(crate) fn try_unify>( @@ -501,7 +548,8 @@ impl<'a> InferenceTable<'a> { fn register_obligation_in_env(&mut self, goal: InEnvironment) { let canonicalized = self.canonicalize(goal); - if !self.try_resolve_obligation(&canonicalized) { + let solution = self.try_resolve_obligation(&canonicalized); + if matches!(solution, Some(Solution::Ambig(_))) { self.pending_obligations.push(canonicalized); } } @@ -627,38 +675,35 @@ impl<'a> InferenceTable<'a> { fn try_resolve_obligation( &mut self, canonicalized: &Canonicalized>, - ) -> bool { + ) -> Option> { let solution = self.db.trait_solve( self.trait_env.krate, self.trait_env.block, canonicalized.value.clone(), ); - match solution { + match &solution { Some(Solution::Unique(canonical_subst)) => { canonicalized.apply_solution( self, Canonical { - binders: canonical_subst.binders, + binders: canonical_subst.binders.clone(), // FIXME: handle constraints - value: canonical_subst.value.subst, + value: canonical_subst.value.subst.clone(), }, ); - true } Some(Solution::Ambig(Guidance::Definite(substs))) => { - canonicalized.apply_solution(self, substs); - false + canonicalized.apply_solution(self, substs.clone()); } Some(_) => { // FIXME use this when trying to resolve everything at the end - false } None => { // FIXME obligation cannot be fulfilled => diagnostic - true } } + solution } pub(crate) fn callable_sig( diff --git a/crates/hir-ty/src/layout/target.rs b/crates/hir-ty/src/layout/target.rs index 5bfe7bf010f1c..9b1424548c2a9 100644 --- a/crates/hir-ty/src/layout/target.rs +++ b/crates/hir-ty/src/layout/target.rs @@ -11,10 +11,8 @@ pub fn target_data_layout_query( db: &dyn HirDatabase, krate: CrateId, ) -> Result, Arc> { - let crate_graph = db.crate_graph(); - let res = crate_graph[krate].target_layout.as_deref(); - match res { - Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(it) { + match db.data_layout(krate) { + Ok(it) => match TargetDataLayout::parse_from_llvm_datalayout_string(&it) { Ok(it) => Ok(Arc::new(it)), Err(e) => { Err(match e { @@ -44,6 +42,6 @@ pub fn target_data_layout_query( }.into()) } }, - Err(e) => Err(Arc::from(&**e)), + Err(e) => Err(e), } } diff --git a/crates/hir-ty/src/layout/tests.rs b/crates/hir-ty/src/layout/tests.rs index ba3dfe8100d17..6c1eccb75e631 100644 --- a/crates/hir-ty/src/layout/tests.rs +++ b/crates/hir-ty/src/layout/tests.rs @@ -1,6 +1,7 @@ use chalk_ir::{AdtId, TyKind}; use either::Either; use hir_def::db::DefDatabase; +use project_model::target_data_layout::RustcDataLayoutConfig; use rustc_hash::FxHashMap; use test_fixture::WithFixture; use triomphe::Arc; @@ -15,13 +16,18 @@ use crate::{ mod closure; fn current_machine_data_layout() -> String { - project_model::target_data_layout::get(None, None, &FxHashMap::default()).unwrap() + project_model::target_data_layout::get( + RustcDataLayoutConfig::Rustc(None), + None, + &FxHashMap::default(), + ) + .unwrap() } fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( - "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\n{ra_fixture}", + "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\n{ra_fixture}", ); let (db, file_ids) = TestDB::with_many_files(&ra_fixture); @@ -70,7 +76,7 @@ fn eval_goal(ra_fixture: &str, minicore: &str) -> Result, LayoutErro fn eval_expr(ra_fixture: &str, minicore: &str) -> Result, LayoutError> { let target_data_layout = current_machine_data_layout(); let ra_fixture = format!( - "{minicore}//- /main.rs crate:test target_data_layout:{target_data_layout}\nfn main(){{let goal = {{{ra_fixture}}};}}", + "//- target_data_layout: {target_data_layout}\n{minicore}//- /main.rs crate:test\nfn main(){{let goal = {{{ra_fixture}}};}}", ); let (db, file_id) = TestDB::with_single_file(&ra_fixture); diff --git a/crates/hir-ty/src/lib.rs b/crates/hir-ty/src/lib.rs index 70138633341ce..ec97bdc2c4343 100644 --- a/crates/hir-ty/src/lib.rs +++ b/crates/hir-ty/src/lib.rs @@ -79,8 +79,8 @@ pub use builder::{ParamKind, TyBuilder}; pub use chalk_ext::*; pub use infer::{ closure::{CaptureKind, CapturedItem}, - could_coerce, could_unify, Adjust, Adjustment, AutoBorrow, BindingMode, InferenceDiagnostic, - InferenceResult, OverloadedDeref, PointerCast, + could_coerce, could_unify, could_unify_deeply, Adjust, Adjustment, AutoBorrow, BindingMode, + InferenceDiagnostic, InferenceResult, OverloadedDeref, PointerCast, }; pub use interner::Interner; pub use lower::{ diff --git a/crates/hir-ty/src/mir/borrowck.rs b/crates/hir-ty/src/mir/borrowck.rs index 9089c11c5d9bb..63fa87ad66288 100644 --- a/crates/hir-ty/src/mir/borrowck.rs +++ b/crates/hir-ty/src/mir/borrowck.rs @@ -7,6 +7,7 @@ use std::iter; use hir_def::{DefWithBodyId, HasModule}; use la_arena::ArenaMap; +use rustc_hash::FxHashMap; use stdx::never; use triomphe::Arc; @@ -14,7 +15,7 @@ use crate::{ db::{HirDatabase, InternedClosure}, mir::Operand, utils::ClosureSubst, - ClosureId, Interner, Ty, TyExt, TypeFlags, + ClosureId, Interner, Substitution, Ty, TyExt, TypeFlags, }; use super::{ @@ -36,11 +37,27 @@ pub struct MovedOutOfRef { pub span: MirSpan, } +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct PartiallyMoved { + pub ty: Ty, + pub span: MirSpan, + pub local: LocalId, +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct BorrowRegion { + pub local: LocalId, + pub kind: BorrowKind, + pub places: Vec, +} + #[derive(Debug, Clone, PartialEq, Eq)] pub struct BorrowckResult { pub mir_body: Arc, pub mutability_of_locals: ArenaMap, pub moved_out_of_ref: Vec, + pub partially_moved: Vec, + pub borrow_regions: Vec, } fn all_mir_bodies( @@ -80,12 +97,26 @@ pub fn borrowck_query( res.push(BorrowckResult { mutability_of_locals: mutability_of_locals(db, &body), moved_out_of_ref: moved_out_of_ref(db, &body), + partially_moved: partially_moved(db, &body), + borrow_regions: borrow_regions(db, &body), mir_body: body, }); })?; Ok(res.into()) } +fn make_fetch_closure_field( + db: &dyn HirDatabase, +) -> impl FnOnce(ClosureId, &Substitution, usize) -> Ty + '_ { + |c: ClosureId, subst: &Substitution, f: usize| { + let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); + let infer = db.infer(def); + let (captures, _) = infer.closure_info(&c); + let parent_subst = ClosureSubst(subst).parent_subst(); + captures.get(f).expect("broken closure field").ty.clone().substitute(Interner, parent_subst) + } +} + fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec { let mut result = vec![]; let mut for_operand = |op: &Operand, span: MirSpan| match op { @@ -99,18 +130,7 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec ty = proj.projected_ty( ty, db, - |c, subst, f| { - let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); - let infer = db.infer(def); - let (captures, _) = infer.closure_info(&c); - let parent_subst = ClosureSubst(subst).parent_subst(); - captures - .get(f) - .expect("broken closure field") - .ty - .clone() - .substitute(Interner, parent_subst) - }, + make_fetch_closure_field(db), body.owner.module(db.upcast()).krate(), ); } @@ -188,6 +208,132 @@ fn moved_out_of_ref(db: &dyn HirDatabase, body: &MirBody) -> Vec result } +fn partially_moved(db: &dyn HirDatabase, body: &MirBody) -> Vec { + let mut result = vec![]; + let mut for_operand = |op: &Operand, span: MirSpan| match op { + Operand::Copy(p) | Operand::Move(p) => { + let mut ty: Ty = body.locals[p.local].ty.clone(); + for proj in p.projection.lookup(&body.projection_store) { + ty = proj.projected_ty( + ty, + db, + make_fetch_closure_field(db), + body.owner.module(db.upcast()).krate(), + ); + } + if !ty.clone().is_copy(db, body.owner) + && !ty.data(Interner).flags.intersects(TypeFlags::HAS_ERROR) + { + result.push(PartiallyMoved { span, ty, local: p.local }); + } + } + Operand::Constant(_) | Operand::Static(_) => (), + }; + for (_, block) in body.basic_blocks.iter() { + db.unwind_if_cancelled(); + for statement in &block.statements { + match &statement.kind { + StatementKind::Assign(_, r) => match r { + Rvalue::ShallowInitBoxWithAlloc(_) => (), + Rvalue::ShallowInitBox(o, _) + | Rvalue::UnaryOp(_, o) + | Rvalue::Cast(_, o, _) + | Rvalue::Repeat(o, _) + | Rvalue::Use(o) => for_operand(o, statement.span), + Rvalue::CopyForDeref(_) + | Rvalue::Discriminant(_) + | Rvalue::Len(_) + | Rvalue::Ref(_, _) => (), + Rvalue::CheckedBinaryOp(_, o1, o2) => { + for_operand(o1, statement.span); + for_operand(o2, statement.span); + } + Rvalue::Aggregate(_, ops) => { + for op in ops.iter() { + for_operand(op, statement.span); + } + } + }, + StatementKind::FakeRead(_) + | StatementKind::Deinit(_) + | StatementKind::StorageLive(_) + | StatementKind::StorageDead(_) + | StatementKind::Nop => (), + } + } + match &block.terminator { + Some(terminator) => match &terminator.kind { + TerminatorKind::SwitchInt { discr, .. } => for_operand(discr, terminator.span), + TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::CoroutineDrop + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Drop { .. } => (), + TerminatorKind::DropAndReplace { value, .. } => { + for_operand(value, terminator.span); + } + TerminatorKind::Call { func, args, .. } => { + for_operand(func, terminator.span); + args.iter().for_each(|it| for_operand(it, terminator.span)); + } + TerminatorKind::Assert { cond, .. } => { + for_operand(cond, terminator.span); + } + TerminatorKind::Yield { value, .. } => { + for_operand(value, terminator.span); + } + }, + None => (), + } + } + result.shrink_to_fit(); + result +} + +fn borrow_regions(db: &dyn HirDatabase, body: &MirBody) -> Vec { + let mut borrows = FxHashMap::default(); + for (_, block) in body.basic_blocks.iter() { + db.unwind_if_cancelled(); + for statement in &block.statements { + if let StatementKind::Assign(_, Rvalue::Ref(kind, p)) = &statement.kind { + borrows + .entry(p.local) + .and_modify(|it: &mut BorrowRegion| { + it.places.push(statement.span); + }) + .or_insert_with(|| BorrowRegion { + local: p.local, + kind: *kind, + places: vec![statement.span], + }); + } + } + match &block.terminator { + Some(terminator) => match &terminator.kind { + TerminatorKind::FalseEdge { .. } + | TerminatorKind::FalseUnwind { .. } + | TerminatorKind::Goto { .. } + | TerminatorKind::UnwindResume + | TerminatorKind::CoroutineDrop + | TerminatorKind::Abort + | TerminatorKind::Return + | TerminatorKind::Unreachable + | TerminatorKind::Drop { .. } => (), + TerminatorKind::DropAndReplace { .. } => {} + TerminatorKind::Call { .. } => {} + _ => (), + }, + None => (), + } + } + + borrows.into_values().collect() +} + #[derive(Debug, Clone, Copy, PartialEq, Eq)] enum ProjectionCase { /// Projection is a local @@ -217,18 +363,7 @@ fn place_case(db: &dyn HirDatabase, body: &MirBody, lvalue: &Place) -> Projectio ty = proj.projected_ty( ty, db, - |c, subst, f| { - let InternedClosure(def, _) = db.lookup_intern_closure(c.into()); - let infer = db.infer(def); - let (captures, _) = infer.closure_info(&c); - let parent_subst = ClosureSubst(subst).parent_subst(); - captures - .get(f) - .expect("broken closure field") - .ty - .clone() - .substitute(Interner, parent_subst) - }, + make_fetch_closure_field(db), body.owner.module(db.upcast()).krate(), ); } diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index 1572a6d497c57..b038900cdacba 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -775,6 +775,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.set_terminator(current, TerminatorKind::Return, expr_id.into()); Ok(None) } + Expr::Become { .. } => not_supported!("tail-calls"), Expr::Yield { .. } => not_supported!("yield"), Expr::RecordLit { fields, path, spread, ellipsis: _, is_assignee_expr: _ } => { let spread_place = match spread { @@ -1246,7 +1247,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_assignment(current, place, op.into(), expr_id.into()); Ok(Some(current)) } - Expr::Underscore => not_supported!("underscore"), + Expr::Underscore => Ok(Some(current)), } } @@ -1780,6 +1781,7 @@ impl<'ctx> MirLowerCtx<'ctx> { self.push_fake_read(c, p, expr.into()); current = scope2.pop_and_drop(self, c, expr.into()); } + hir_def::hir::Statement::Item => (), } } if let Some(tail) = tail { diff --git a/crates/hir-ty/src/tests/diagnostics.rs b/crates/hir-ty/src/tests/diagnostics.rs index 1876be303ad44..80f92eaf43553 100644 --- a/crates/hir-ty/src/tests/diagnostics.rs +++ b/crates/hir-ty/src/tests/diagnostics.rs @@ -1,3 +1,5 @@ +use crate::tests::check_no_mismatches; + use super::check; #[test] @@ -94,3 +96,43 @@ fn test(x: bool) { "#, ); } + +#[test] +fn no_mismatches_on_atpit() { + check_no_mismatches( + r#" +//- minicore: option, sized +#![feature(impl_trait_in_assoc_type)] + +trait WrappedAssoc { + type Assoc; + fn do_thing(&self) -> Option; +} + +struct Foo; +impl WrappedAssoc for Foo { + type Assoc = impl Sized; + + fn do_thing(&self) -> Option { + Some(()) + } +} +"#, + ); + check_no_mismatches( + r#" +//- minicore: option, sized +#![feature(impl_trait_in_assoc_type)] + +trait Trait { + type Assoc; + const DEFINE: Option; +} + +impl Trait for () { + type Assoc = impl Sized; + const DEFINE: Option = Option::Some(()); +} +"#, + ); +} diff --git a/crates/hir-ty/src/tests/simple.rs b/crates/hir-ty/src/tests/simple.rs index 8474782282606..6c7dbe1db6ff7 100644 --- a/crates/hir-ty/src/tests/simple.rs +++ b/crates/hir-ty/src/tests/simple.rs @@ -3376,11 +3376,8 @@ fn main() { [x,] = &[1,]; //^^^^expected &[i32; 1], got [{unknown}; _] - // FIXME we only want the outermost error, but this matches the current - // behavior of slice patterns let x; [(x,),] = &[(1,),]; - // ^^^^expected {unknown}, got ({unknown},) //^^^^^^^expected &[(i32,); 1], got [{unknown}; _] let x; diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index 32abbc80c6af4..08f7bb14caa3a 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -31,6 +31,7 @@ mod has_source; pub mod db; pub mod diagnostics; pub mod symbols; +pub mod term_search; mod display; @@ -1084,6 +1085,27 @@ impl Field { Type::new(db, var_id, ty) } + // FIXME: Find better API to also handle const generics + pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator) -> Type { + let var_id = self.parent.into(); + let def_id: AdtId = match self.parent { + VariantDef::Struct(it) => it.id.into(), + VariantDef::Union(it) => it.id.into(), + VariantDef::Variant(it) => it.parent_enum(db).id.into(), + }; + let mut generics = generics.map(|it| it.ty.clone()); + let substs = TyBuilder::subst_for_def(db, def_id, None) + .fill(|x| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }) + .build(); + let ty = db.field_types(var_id)[self.id].clone().substitute(Interner, &substs); + Type::new(db, var_id, ty) + } + pub fn layout(&self, db: &dyn HirDatabase) -> Result { db.layout_of_ty( self.ty(db).ty, @@ -1152,6 +1174,10 @@ impl Struct { fn variant_data(self, db: &dyn HirDatabase) -> Arc { db.struct_data(self.id).variant_data.clone() } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Struct { @@ -1194,6 +1220,10 @@ impl Union { fn variant_data(self, db: &dyn HirDatabase) -> Arc { db.union_data(self.id).variant_data.clone() } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Union { @@ -1269,6 +1299,10 @@ impl Enum { pub fn layout(self, db: &dyn HirDatabase) -> Result { Adt::from(self).layout(db) } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } impl HasVisibility for Enum { @@ -1344,6 +1378,10 @@ impl Variant { _ => parent_layout, }) } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(self.id.into()).is_unstable() + } } /// Variants inherit visibility from the parent enum. @@ -1394,9 +1432,9 @@ impl Adt { /// Turns this ADT into a type with the given type parameters. This isn't /// the greatest API, FIXME find a better one. - pub fn ty_with_args(self, db: &dyn HirDatabase, args: &[Type]) -> Type { + pub fn ty_with_args(self, db: &dyn HirDatabase, args: impl Iterator) -> Type { let id = AdtId::from(self); - let mut it = args.iter().map(|t| t.ty.clone()); + let mut it = args.map(|t| t.ty.clone()); let ty = TyBuilder::def_ty(db, id.into(), None) .fill(|x| { let r = it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); @@ -1789,6 +1827,35 @@ impl Function { Type::new_with_resolver_inner(db, &resolver, ty) } + // FIXME: Find better API to also handle const generics + pub fn ret_type_with_args( + self, + db: &dyn HirDatabase, + generics: impl Iterator, + ) -> Type { + let resolver = self.id.resolver(db.upcast()); + let parent_id: Option = match self.id.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => Some(it.into()), + ItemContainerId::TraitId(it) => Some(it.into()), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, + }; + let mut generics = generics.map(|it| it.ty.clone()); + let mut filler = |x: &_| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }; + + let parent_substs = + parent_id.map(|id| TyBuilder::subst_for_def(db, id, None).fill(&mut filler).build()); + let substs = TyBuilder::subst_for_def(db, self.id, parent_substs).fill(&mut filler).build(); + + let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); + let ty = callable_sig.ret().clone(); + Type::new_with_resolver_inner(db, &resolver, ty) + } + pub fn async_ret_type(self, db: &dyn HirDatabase) -> Option { if !self.is_async(db) { return None; @@ -1855,6 +1922,51 @@ impl Function { .collect() } + // FIXME: Find better API to also handle const generics + pub fn params_without_self_with_args( + self, + db: &dyn HirDatabase, + generics: impl Iterator, + ) -> Vec { + let environment = db.trait_environment(self.id.into()); + let parent_id: Option = match self.id.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => Some(it.into()), + ItemContainerId::TraitId(it) => Some(it.into()), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => None, + }; + let mut generics = generics.map(|it| it.ty.clone()); + let parent_substs = parent_id.map(|id| { + TyBuilder::subst_for_def(db, id, None) + .fill(|x| match x { + ParamKind::Type => generics + .next() + .unwrap_or_else(|| TyKind::Error.intern(Interner)) + .cast(Interner), + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }) + .build() + }); + + let substs = TyBuilder::subst_for_def(db, self.id, parent_substs) + .fill(|_| { + let ty = generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)); + GenericArg::new(Interner, GenericArgData::Ty(ty)) + }) + .build(); + let callable_sig = db.callable_item_signature(self.id.into()).substitute(Interner, &substs); + let skip = if db.function_data(self.id).has_self_param() { 1 } else { 0 }; + callable_sig + .params() + .iter() + .enumerate() + .skip(skip) + .map(|(idx, ty)| { + let ty = Type { env: environment.clone(), ty: ty.clone() }; + Param { func: self, ty, idx } + }) + .collect() + } + pub fn is_const(self, db: &dyn HirDatabase) -> bool { db.function_data(self.id).has_const_kw() } @@ -1889,6 +2001,11 @@ impl Function { db.function_data(self.id).attrs.is_bench() } + /// Is this function marked as unstable with `#[feature]` attribute? + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.function_data(self.id).attrs.is_unstable() + } + pub fn is_unsafe_to_call(self, db: &dyn HirDatabase) -> bool { hir_ty::is_fn_unsafe_to_call(db, self.id) } @@ -2052,6 +2169,34 @@ impl SelfParam { let ty = callable_sig.params()[0].clone(); Type { env: environment, ty } } + + // FIXME: Find better API to also handle const generics + pub fn ty_with_args(&self, db: &dyn HirDatabase, generics: impl Iterator) -> Type { + let parent_id: GenericDefId = match self.func.lookup(db.upcast()).container { + ItemContainerId::ImplId(it) => it.into(), + ItemContainerId::TraitId(it) => it.into(), + ItemContainerId::ModuleId(_) | ItemContainerId::ExternBlockId(_) => { + panic!("Never get here") + } + }; + + let mut generics = generics.map(|it| it.ty.clone()); + let mut filler = |x: &_| match x { + ParamKind::Type => { + generics.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } + ParamKind::Const(ty) => unknown_const_as_generic(ty.clone()), + }; + + let parent_substs = TyBuilder::subst_for_def(db, parent_id, None).fill(&mut filler).build(); + let substs = + TyBuilder::subst_for_def(db, self.func, Some(parent_substs)).fill(&mut filler).build(); + let callable_sig = + db.callable_item_signature(self.func.into()).substitute(Interner, &substs); + let environment = db.trait_environment(self.func.into()); + let ty = callable_sig.params()[0].clone(); + Type { env: environment, ty } + } } impl HasVisibility for Function { @@ -2754,7 +2899,7 @@ impl GenericDef { .collect() } - pub fn type_params(self, db: &dyn HirDatabase) -> Vec { + pub fn type_or_const_params(self, db: &dyn HirDatabase) -> Vec { let generics = db.generic_params(self.into()); generics .type_or_consts @@ -3126,12 +3271,16 @@ impl TypeParam { let ty = generic_arg_from_param(db, self.id.into())?; let resolver = self.id.parent().resolver(db.upcast()); match ty.data(Interner) { - GenericArgData::Ty(it) => { + GenericArgData::Ty(it) if *it.kind(Interner) != TyKind::Error => { Some(Type::new_with_resolver_inner(db, &resolver, it.clone())) } _ => None, } } + + pub fn is_unstable(self, db: &dyn HirDatabase) -> bool { + db.attrs(GenericParamId::from(self.id).into()).is_unstable() + } } #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] @@ -3241,6 +3390,26 @@ impl TypeOrConstParam { Either::Right(it) => it.ty(db), } } + + pub fn as_type_param(self, db: &dyn HirDatabase) -> Option { + let params = db.generic_params(self.id.parent); + match ¶ms.type_or_consts[self.id.local_id] { + hir_def::generics::TypeOrConstParamData::TypeParamData(_) => { + Some(TypeParam { id: TypeParamId::from_unchecked(self.id) }) + } + hir_def::generics::TypeOrConstParamData::ConstParamData(_) => None, + } + } + + pub fn as_const_param(self, db: &dyn HirDatabase) -> Option { + let params = db.generic_params(self.id.parent); + match ¶ms.type_or_consts[self.id.local_id] { + hir_def::generics::TypeOrConstParamData::TypeParamData(_) => None, + hir_def::generics::TypeOrConstParamData::ConstParamData(_) => { + Some(ConstParam { id: ConstParamId::from_unchecked(self.id) }) + } + } + } } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -3285,12 +3454,11 @@ impl Impl { .filter(filter), ) }); + for id in def_crates .iter() .flat_map(|&id| Crate { id }.transitive_reverse_dependencies(db)) .map(|Crate { id }| id) - .chain(def_crates.iter().copied()) - .unique() { all.extend( db.trait_impls_in_crate(id) @@ -3520,7 +3688,7 @@ pub enum CaptureKind { Move, } -#[derive(Clone, PartialEq, Eq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug, Hash)] pub struct Type { env: Arc, ty: Ty, @@ -3620,6 +3788,50 @@ impl Type { matches!(self.ty.kind(Interner), TyKind::Ref(..)) } + pub fn contains_reference(&self, db: &dyn HirDatabase) -> bool { + return go(db, self.env.krate, &self.ty); + + fn go(db: &dyn HirDatabase, krate: CrateId, ty: &Ty) -> bool { + match ty.kind(Interner) { + // Reference itself + TyKind::Ref(_, _, _) => true, + + // For non-phantom_data adts we check variants/fields as well as generic parameters + TyKind::Adt(adt_id, substitution) + if !db.struct_datum(krate, *adt_id).flags.phantom_data => + { + let adt_datum = &db.struct_datum(krate, *adt_id); + let adt_datum_bound = + adt_datum.binders.clone().substitute(Interner, substitution); + adt_datum_bound + .variants + .into_iter() + .flat_map(|variant| variant.fields.into_iter()) + .any(|ty| go(db, krate, &ty)) + || substitution + .iter(Interner) + .filter_map(|x| x.ty(Interner)) + .any(|ty| go(db, krate, ty)) + } + // And for `PhantomData`, we check `T`. + TyKind::Adt(_, substitution) + | TyKind::Tuple(_, substitution) + | TyKind::OpaqueType(_, substitution) + | TyKind::AssociatedType(_, substitution) + | TyKind::FnDef(_, substitution) => substitution + .iter(Interner) + .filter_map(|x| x.ty(Interner)) + .any(|ty| go(db, krate, ty)), + + // For `[T]` or `*T` we check `T` + TyKind::Array(ty, _) | TyKind::Slice(ty) | TyKind::Raw(_, ty) => go(db, krate, ty), + + // Consider everything else as not reference + _ => false, + } + } + } + pub fn as_reference(&self) -> Option<(Type, Mutability)> { let (ty, _lt, m) = self.ty.as_reference()?; let m = Mutability::from_mutable(matches!(m, hir_ty::Mutability::Mut)); @@ -3727,14 +3939,16 @@ impl Type { ) } + // FIXME: Find better API that also handles const generics pub fn impls_trait(&self, db: &dyn HirDatabase, trait_: Trait, args: &[Type]) -> bool { let mut it = args.iter().map(|t| t.ty.clone()); let trait_ref = TyBuilder::trait_ref(db, trait_.id) .push(self.ty.clone()) .fill(|x| { - let r = it.next().unwrap(); match x { - ParamKind::Type => r.cast(Interner), + ParamKind::Type => { + it.next().unwrap_or_else(|| TyKind::Error.intern(Interner)).cast(Interner) + } ParamKind::Const(ty) => { // FIXME: this code is not covered in tests. unknown_const_as_generic(ty.clone()) @@ -4368,12 +4582,24 @@ impl Type { walk_type(db, self, &mut cb); } - + /// Check if type unifies with another type. + /// + /// Note that we consider placeholder types to unify with everything. + /// For example `Option` and `Option` unify although there is unresolved goal `T = U`. pub fn could_unify_with(&self, db: &dyn HirDatabase, other: &Type) -> bool { let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone())); hir_ty::could_unify(db, self.env.clone(), &tys) } + /// Check if type unifies with another type eagerly making sure there are no unresolved goals. + /// + /// This means that placeholder types are not considered to unify if there are any bounds set on + /// them. For example `Option` and `Option` do not unify as we cannot show that `T = U` + pub fn could_unify_with_deeply(&self, db: &dyn HirDatabase, other: &Type) -> bool { + let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), other.ty.clone())); + hir_ty::could_unify_deeply(db, self.env.clone(), &tys) + } + pub fn could_coerce_to(&self, db: &dyn HirDatabase, to: &Type) -> bool { let tys = hir_ty::replace_errors_with_variables(&(self.ty.clone(), to.ty.clone())); hir_ty::could_coerce(db, self.env.clone(), &tys) diff --git a/crates/hir/src/term_search.rs b/crates/hir/src/term_search.rs new file mode 100644 index 0000000000000..72762007dc98f --- /dev/null +++ b/crates/hir/src/term_search.rs @@ -0,0 +1,298 @@ +//! Term search + +use hir_def::type_ref::Mutability; +use hir_ty::db::HirDatabase; +use itertools::Itertools; +use rustc_hash::{FxHashMap, FxHashSet}; + +use crate::{ModuleDef, ScopeDef, Semantics, SemanticsScope, Type}; + +mod expr; +pub use expr::Expr; + +mod tactics; + +/// Key for lookup table to query new types reached. +#[derive(Debug, Hash, PartialEq, Eq)] +enum NewTypesKey { + ImplMethod, + StructProjection, +} + +/// Helper enum to squash big number of alternative trees into `Many` variant as there is too many +/// to take into account. +#[derive(Debug)] +enum AlternativeExprs { + /// There are few trees, so we keep track of them all + Few(FxHashSet), + /// There are too many trees to keep track of + Many, +} + +impl AlternativeExprs { + /// Construct alternative trees + /// + /// # Arguments + /// `threshold` - threshold value for many trees (more than that is many) + /// `exprs` - expressions iterator + fn new(threshold: usize, exprs: impl Iterator) -> AlternativeExprs { + let mut it = AlternativeExprs::Few(Default::default()); + it.extend_with_threshold(threshold, exprs); + it + } + + /// Get type trees stored in alternative trees (or `Expr::Many` in case of many) + /// + /// # Arguments + /// `ty` - Type of expressions queried (this is used to give type to `Expr::Many`) + fn exprs(&self, ty: &Type) -> Vec { + match self { + AlternativeExprs::Few(exprs) => exprs.iter().cloned().collect(), + AlternativeExprs::Many => vec![Expr::Many(ty.clone())], + } + } + + /// Extend alternative expressions + /// + /// # Arguments + /// `threshold` - threshold value for many trees (more than that is many) + /// `exprs` - expressions iterator + fn extend_with_threshold(&mut self, threshold: usize, exprs: impl Iterator) { + match self { + AlternativeExprs::Few(tts) => { + for it in exprs { + if tts.len() > threshold { + *self = AlternativeExprs::Many; + break; + } + + tts.insert(it); + } + } + AlternativeExprs::Many => (), + } + } +} + +/// # Lookup table for term search +/// +/// Lookup table keeps all the state during term search. +/// This means it knows what types and how are reachable. +/// +/// The secondary functionality for lookup table is to keep track of new types reached since last +/// iteration as well as keeping track of which `ScopeDef` items have been used. +/// Both of them are to speed up the term search by leaving out types / ScopeDefs that likely do +/// not produce any new results. +#[derive(Default, Debug)] +struct LookupTable { + /// All the `Expr`s in "value" produce the type of "key" + data: FxHashMap, + /// New types reached since last query by the `NewTypesKey` + new_types: FxHashMap>, + /// ScopeDefs that are not interesting any more + exhausted_scopedefs: FxHashSet, + /// ScopeDefs that were used in current round + round_scopedef_hits: FxHashSet, + /// Amount of rounds since scopedef was first used. + rounds_since_sopedef_hit: FxHashMap, + /// Types queried but not present + types_wishlist: FxHashSet, + /// Threshold to squash trees to `Many` + many_threshold: usize, +} + +impl LookupTable { + /// Initialize lookup table + fn new(many_threshold: usize) -> Self { + let mut res = Self { many_threshold, ..Default::default() }; + res.new_types.insert(NewTypesKey::ImplMethod, Vec::new()); + res.new_types.insert(NewTypesKey::StructProjection, Vec::new()); + res + } + + /// Find all `Expr`s that unify with the `ty` + fn find(&self, db: &dyn HirDatabase, ty: &Type) -> Option> { + self.data + .iter() + .find(|(t, _)| t.could_unify_with_deeply(db, ty)) + .map(|(t, tts)| tts.exprs(t)) + } + + /// Same as find but automatically creates shared reference of types in the lookup + /// + /// For example if we have type `i32` in data and we query for `&i32` it map all the type + /// trees we have for `i32` with `Expr::Reference` and returns them. + fn find_autoref(&self, db: &dyn HirDatabase, ty: &Type) -> Option> { + self.data + .iter() + .find(|(t, _)| t.could_unify_with_deeply(db, ty)) + .map(|(t, it)| it.exprs(t)) + .or_else(|| { + self.data + .iter() + .find(|(t, _)| { + Type::reference(t, Mutability::Shared).could_unify_with_deeply(db, ty) + }) + .map(|(t, it)| { + it.exprs(t) + .into_iter() + .map(|expr| Expr::Reference(Box::new(expr))) + .collect() + }) + }) + } + + /// Insert new type trees for type + /// + /// Note that the types have to be the same, unification is not enough as unification is not + /// transitive. For example Vec and FxHashSet both unify with Iterator, + /// but they clearly do not unify themselves. + fn insert(&mut self, ty: Type, exprs: impl Iterator) { + match self.data.get_mut(&ty) { + Some(it) => it.extend_with_threshold(self.many_threshold, exprs), + None => { + self.data.insert(ty.clone(), AlternativeExprs::new(self.many_threshold, exprs)); + for it in self.new_types.values_mut() { + it.push(ty.clone()); + } + } + } + } + + /// Iterate all the reachable types + fn iter_types(&self) -> impl Iterator + '_ { + self.data.keys().cloned() + } + + /// Query new types reached since last query by key + /// + /// Create new key if you wish to query it to avoid conflicting with existing queries. + fn new_types(&mut self, key: NewTypesKey) -> Vec { + match self.new_types.get_mut(&key) { + Some(it) => std::mem::take(it), + None => Vec::new(), + } + } + + /// Mark `ScopeDef` as exhausted meaning it is not interesting for us any more + fn mark_exhausted(&mut self, def: ScopeDef) { + self.exhausted_scopedefs.insert(def); + } + + /// Mark `ScopeDef` as used meaning we managed to produce something useful from it + fn mark_fulfilled(&mut self, def: ScopeDef) { + self.round_scopedef_hits.insert(def); + } + + /// Start new round (meant to be called at the beginning of iteration in `term_search`) + /// + /// This functions marks some `ScopeDef`s as exhausted if there have been + /// `MAX_ROUNDS_AFTER_HIT` rounds after first using a `ScopeDef`. + fn new_round(&mut self) { + for def in &self.round_scopedef_hits { + let hits = + self.rounds_since_sopedef_hit.entry(*def).and_modify(|n| *n += 1).or_insert(0); + const MAX_ROUNDS_AFTER_HIT: u32 = 2; + if *hits > MAX_ROUNDS_AFTER_HIT { + self.exhausted_scopedefs.insert(*def); + } + } + self.round_scopedef_hits.clear(); + } + + /// Get exhausted `ScopeDef`s + fn exhausted_scopedefs(&self) -> &FxHashSet { + &self.exhausted_scopedefs + } + + /// Types queried but not found + fn take_types_wishlist(&mut self) -> FxHashSet { + std::mem::take(&mut self.types_wishlist) + } +} + +/// Context for the `term_search` function +#[derive(Debug)] +pub struct TermSearchCtx<'a, DB: HirDatabase> { + /// Semantics for the program + pub sema: &'a Semantics<'a, DB>, + /// Semantic scope, captures context for the term search + pub scope: &'a SemanticsScope<'a>, + /// Target / expected output type + pub goal: Type, + /// Configuration for term search + pub config: TermSearchConfig, +} + +/// Configuration options for the term search +#[derive(Debug, Clone, Copy)] +pub struct TermSearchConfig { + /// Enable borrow checking, this guarantees the outputs of the `term_search` to borrow-check + pub enable_borrowcheck: bool, + /// Indicate when to squash multiple trees to `Many` as there are too many to keep track + pub many_alternatives_threshold: usize, + /// Depth of the search eg. number of cycles to run + pub depth: usize, +} + +impl Default for TermSearchConfig { + fn default() -> Self { + Self { enable_borrowcheck: true, many_alternatives_threshold: 1, depth: 6 } + } +} + +/// # Term search +/// +/// Search for terms (expressions) that unify with the `goal` type. +/// +/// # Arguments +/// * `ctx` - Context for term search +/// +/// Internally this function uses Breadth First Search to find path to `goal` type. +/// The general idea is following: +/// 1. Populate lookup (frontier for BFS) from values (local variables, statics, constants, etc) +/// as well as from well knows values (such as `true/false` and `()`) +/// 2. Iteratively expand the frontier (or contents of the lookup) by trying different type +/// transformation tactics. For example functions take as from set of types (arguments) to some +/// type (return type). Other transformations include methods on type, type constructors and +/// projections to struct fields (field access). +/// 3. Once we manage to find path to type we are interested in we continue for single round to see +/// if we can find more paths that take us to the `goal` type. +/// 4. Return all the paths (type trees) that take us to the `goal` type. +/// +/// Note that there are usually more ways we can get to the `goal` type but some are discarded to +/// reduce the memory consumption. It is also unlikely anyone is willing ti browse through +/// thousands of possible responses so we currently take first 10 from every tactic. +pub fn term_search(ctx: &TermSearchCtx<'_, DB>) -> Vec { + let module = ctx.scope.module(); + let mut defs = FxHashSet::default(); + defs.insert(ScopeDef::ModuleDef(ModuleDef::Module(module))); + + ctx.scope.process_all_names(&mut |_, def| { + defs.insert(def); + }); + + let mut lookup = LookupTable::new(ctx.config.many_alternatives_threshold); + + // Try trivial tactic first, also populates lookup table + let mut solutions: Vec = tactics::trivial(ctx, &defs, &mut lookup).collect(); + // Use well known types tactic before iterations as it does not depend on other tactics + solutions.extend(tactics::famous_types(ctx, &defs, &mut lookup)); + + for _ in 0..ctx.config.depth { + lookup.new_round(); + + solutions.extend(tactics::type_constructor(ctx, &defs, &mut lookup)); + solutions.extend(tactics::free_function(ctx, &defs, &mut lookup)); + solutions.extend(tactics::impl_method(ctx, &defs, &mut lookup)); + solutions.extend(tactics::struct_projection(ctx, &defs, &mut lookup)); + solutions.extend(tactics::impl_static_method(ctx, &defs, &mut lookup)); + + // Discard not interesting `ScopeDef`s for speedup + for def in lookup.exhausted_scopedefs() { + defs.remove(def); + } + } + + solutions.into_iter().filter(|it| !it.is_many()).unique().collect() +} diff --git a/crates/hir/src/term_search/expr.rs b/crates/hir/src/term_search/expr.rs new file mode 100644 index 0000000000000..254fbe7e2b53e --- /dev/null +++ b/crates/hir/src/term_search/expr.rs @@ -0,0 +1,468 @@ +//! Type tree for term search + +use hir_def::find_path::PrefixKind; +use hir_expand::mod_path::ModPath; +use hir_ty::{ + db::HirDatabase, + display::{DisplaySourceCodeError, HirDisplay}, +}; +use itertools::Itertools; + +use crate::{ + Adt, AsAssocItem, Const, ConstParam, Field, Function, GenericDef, Local, ModuleDef, + SemanticsScope, Static, Struct, StructKind, Trait, Type, Variant, +}; + +/// Helper function to get path to `ModuleDef` +fn mod_item_path( + sema_scope: &SemanticsScope<'_>, + def: &ModuleDef, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Option { + let db = sema_scope.db; + // Account for locals shadowing items from module + let name_hit_count = def.name(db).map(|def_name| { + let mut name_hit_count = 0; + sema_scope.process_all_names(&mut |name, _| { + if name == def_name { + name_hit_count += 1; + } + }); + name_hit_count + }); + + let m = sema_scope.module(); + match name_hit_count { + Some(0..=1) | None => m.find_use_path(db.upcast(), *def, prefer_no_std, prefer_prelude), + Some(_) => m.find_use_path_prefixed( + db.upcast(), + *def, + PrefixKind::ByCrate, + prefer_no_std, + prefer_prelude, + ), + } +} + +/// Helper function to get path to `ModuleDef` as string +fn mod_item_path_str( + sema_scope: &SemanticsScope<'_>, + def: &ModuleDef, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Result { + let path = mod_item_path(sema_scope, def, prefer_no_std, prefer_prelude); + path.map(|it| it.display(sema_scope.db.upcast()).to_string()) + .ok_or(DisplaySourceCodeError::PathNotFound) +} + +/// Helper function to get path to `Type` +fn type_path( + sema_scope: &SemanticsScope<'_>, + ty: &Type, + prefer_no_std: bool, + prefer_prelude: bool, +) -> Result { + let db = sema_scope.db; + let m = sema_scope.module(); + + match ty.as_adt() { + Some(adt) => { + let ty_name = ty.display_source_code(db, m.id, true)?; + + let mut path = + mod_item_path(sema_scope, &ModuleDef::Adt(adt), prefer_no_std, prefer_prelude) + .unwrap(); + path.pop_segment(); + let path = path.display(db.upcast()).to_string(); + let res = match path.is_empty() { + true => ty_name, + false => format!("{path}::{ty_name}"), + }; + Ok(res) + } + None => ty.display_source_code(db, m.id, true), + } +} + +/// Helper function to filter out generic parameters that are default +fn non_default_generics(db: &dyn HirDatabase, def: GenericDef, generics: &[Type]) -> Vec { + def.type_or_const_params(db) + .into_iter() + .filter_map(|it| it.as_type_param(db)) + .zip(generics) + .filter(|(tp, arg)| tp.default(db).as_ref() != Some(arg)) + .map(|(_, arg)| arg.clone()) + .collect() +} + +/// Type tree shows how can we get from set of types to some type. +/// +/// Consider the following code as an example +/// ``` +/// fn foo(x: i32, y: bool) -> Option { None } +/// fn bar() { +/// let a = 1; +/// let b = true; +/// let c: Option = _; +/// } +/// ``` +/// If we generate type tree in the place of `_` we get +/// ```txt +/// Option +/// | +/// foo(i32, bool) +/// / \ +/// a: i32 b: bool +/// ``` +/// So in short it pretty much gives us a way to get type `Option` using the items we have in +/// scope. +#[derive(Debug, Clone, Eq, Hash, PartialEq)] +pub enum Expr { + /// Constant + Const(Const), + /// Static variable + Static(Static), + /// Local variable + Local(Local), + /// Constant generic parameter + ConstParam(ConstParam), + /// Well known type (such as `true` for bool) + FamousType { ty: Type, value: &'static str }, + /// Function call (does not take self param) + Function { func: Function, generics: Vec, params: Vec }, + /// Method call (has self param) + Method { func: Function, generics: Vec, target: Box, params: Vec }, + /// Enum variant construction + Variant { variant: Variant, generics: Vec, params: Vec }, + /// Struct construction + Struct { strukt: Struct, generics: Vec, params: Vec }, + /// Struct field access + Field { expr: Box, field: Field }, + /// Passing type as reference (with `&`) + Reference(Box), + /// Indicates possibility of many different options that all evaluate to `ty` + Many(Type), +} + +impl Expr { + /// Generate source code for type tree. + /// + /// Note that trait imports are not added to generated code. + /// To make sure that the code is valid, callee has to also ensure that all the traits listed + /// by `traits_used` method are also imported. + pub fn gen_source_code( + &self, + sema_scope: &SemanticsScope<'_>, + many_formatter: &mut dyn FnMut(&Type) -> String, + prefer_no_std: bool, + prefer_prelude: bool, + ) -> Result { + let db = sema_scope.db; + let mod_item_path_str = |s, def| mod_item_path_str(s, def, prefer_no_std, prefer_prelude); + match self { + Expr::Const(it) => mod_item_path_str(sema_scope, &ModuleDef::Const(*it)), + Expr::Static(it) => mod_item_path_str(sema_scope, &ModuleDef::Static(*it)), + Expr::Local(it) => Ok(it.name(db).display(db.upcast()).to_string()), + Expr::ConstParam(it) => Ok(it.name(db).display(db.upcast()).to_string()), + Expr::FamousType { value, .. } => Ok(value.to_string()), + Expr::Function { func, params, .. } => { + let args = params + .iter() + .map(|f| { + f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + + match func.as_assoc_item(db).map(|it| it.container(db)) { + Some(container) => { + let container_name = match container { + crate::AssocItemContainer::Trait(trait_) => { + mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))? + } + crate::AssocItemContainer::Impl(imp) => { + let self_ty = imp.self_ty(db); + // Should it be guaranteed that `mod_item_path` always exists? + match self_ty.as_adt().and_then(|adt| { + mod_item_path( + sema_scope, + &adt.into(), + prefer_no_std, + prefer_prelude, + ) + }) { + Some(path) => path.display(sema_scope.db.upcast()).to_string(), + None => self_ty.display(db).to_string(), + } + } + }; + let fn_name = func.name(db).display(db.upcast()).to_string(); + Ok(format!("{container_name}::{fn_name}({args})")) + } + None => { + let fn_name = mod_item_path_str(sema_scope, &ModuleDef::Function(*func))?; + Ok(format!("{fn_name}({args})")) + } + } + } + Expr::Method { func, target, params, .. } => { + if target.contains_many_in_illegal_pos() { + return Ok(many_formatter(&target.ty(db))); + } + + let func_name = func.name(db).display(db.upcast()).to_string(); + let self_param = func.self_param(db).unwrap(); + let target = target.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + let args = params + .iter() + .map(|f| { + f.gen_source_code(sema_scope, many_formatter, prefer_no_std, prefer_prelude) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + + match func.as_assoc_item(db).and_then(|it| it.container_or_implemented_trait(db)) { + Some(trait_) => { + let trait_name = mod_item_path_str(sema_scope, &ModuleDef::Trait(trait_))?; + let target = match self_param.access(db) { + crate::Access::Shared => format!("&{target}"), + crate::Access::Exclusive => format!("&mut {target}"), + crate::Access::Owned => target, + }; + let res = match args.is_empty() { + true => format!("{trait_name}::{func_name}({target})",), + false => format!("{trait_name}::{func_name}({target}, {args})",), + }; + Ok(res) + } + None => Ok(format!("{target}.{func_name}({args})")), + } + } + Expr::Variant { variant, generics, params } => { + let generics = non_default_generics(db, (*variant).into(), generics); + let generics_str = match generics.is_empty() { + true => String::new(), + false => { + let generics = generics + .iter() + .map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude)) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("::<{generics}>") + } + }; + let inner = match variant.kind(db) { + StructKind::Tuple => { + let args = params + .iter() + .map(|f| { + f.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + ) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("{generics_str}({args})") + } + StructKind::Record => { + let fields = variant.fields(db); + let args = params + .iter() + .zip(fields.iter()) + .map(|(a, f)| { + let tmp = format!( + "{}: {}", + f.name(db).display(db.upcast()), + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude + )? + ); + Ok(tmp) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("{generics_str}{{ {args} }}") + } + StructKind::Unit => generics_str, + }; + + let prefix = mod_item_path_str(sema_scope, &ModuleDef::Variant(*variant))?; + Ok(format!("{prefix}{inner}")) + } + Expr::Struct { strukt, generics, params } => { + let generics = non_default_generics(db, (*strukt).into(), generics); + let inner = match strukt.kind(db) { + StructKind::Tuple => { + let args = params + .iter() + .map(|a| { + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + ) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("({args})") + } + StructKind::Record => { + let fields = strukt.fields(db); + let args = params + .iter() + .zip(fields.iter()) + .map(|(a, f)| { + let tmp = format!( + "{}: {}", + f.name(db).display(db.upcast()), + a.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude + )? + ); + Ok(tmp) + }) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!(" {{ {args} }}") + } + StructKind::Unit => match generics.is_empty() { + true => String::new(), + false => { + let generics = generics + .iter() + .map(|it| type_path(sema_scope, it, prefer_no_std, prefer_prelude)) + .collect::, DisplaySourceCodeError>>()? + .into_iter() + .join(", "); + format!("::<{generics}>") + } + }, + }; + + let prefix = mod_item_path_str(sema_scope, &ModuleDef::Adt(Adt::Struct(*strukt)))?; + Ok(format!("{prefix}{inner}")) + } + Expr::Field { expr, field } => { + if expr.contains_many_in_illegal_pos() { + return Ok(many_formatter(&expr.ty(db))); + } + + let strukt = expr.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + let field = field.name(db).display(db.upcast()).to_string(); + Ok(format!("{strukt}.{field}")) + } + Expr::Reference(expr) => { + if expr.contains_many_in_illegal_pos() { + return Ok(many_formatter(&expr.ty(db))); + } + + let inner = expr.gen_source_code( + sema_scope, + many_formatter, + prefer_no_std, + prefer_prelude, + )?; + Ok(format!("&{inner}")) + } + Expr::Many(ty) => Ok(many_formatter(ty)), + } + } + + /// Get type of the type tree. + /// + /// Same as getting the type of root node + pub fn ty(&self, db: &dyn HirDatabase) -> Type { + match self { + Expr::Const(it) => it.ty(db), + Expr::Static(it) => it.ty(db), + Expr::Local(it) => it.ty(db), + Expr::ConstParam(it) => it.ty(db), + Expr::FamousType { ty, .. } => ty.clone(), + Expr::Function { func, generics, .. } => { + func.ret_type_with_args(db, generics.iter().cloned()) + } + Expr::Method { func, generics, target, .. } => func.ret_type_with_args( + db, + target.ty(db).type_arguments().chain(generics.iter().cloned()), + ), + Expr::Variant { variant, generics, .. } => { + Adt::from(variant.parent_enum(db)).ty_with_args(db, generics.iter().cloned()) + } + Expr::Struct { strukt, generics, .. } => { + Adt::from(*strukt).ty_with_args(db, generics.iter().cloned()) + } + Expr::Field { expr, field } => field.ty_with_args(db, expr.ty(db).type_arguments()), + Expr::Reference(it) => it.ty(db), + Expr::Many(ty) => ty.clone(), + } + } + + /// List the traits used in type tree + pub fn traits_used(&self, db: &dyn HirDatabase) -> Vec { + let mut res = Vec::new(); + + if let Expr::Method { func, params, .. } = self { + res.extend(params.iter().flat_map(|it| it.traits_used(db))); + if let Some(it) = func.as_assoc_item(db) { + if let Some(it) = it.container_or_implemented_trait(db) { + res.push(it); + } + } + } + + res + } + + /// Check in the tree contains `Expr::Many` variant in illegal place to insert `todo`, + /// `unimplemented` or similar macro + /// + /// Some examples are following + /// ```no_compile + /// macro!().foo + /// macro!().bar() + /// ¯o!() + /// ``` + fn contains_many_in_illegal_pos(&self) -> bool { + match self { + Expr::Method { target, .. } => target.contains_many_in_illegal_pos(), + Expr::Field { expr, .. } => expr.contains_many_in_illegal_pos(), + Expr::Reference(target) => target.is_many(), + Expr::Many(_) => true, + _ => false, + } + } + + /// Helper function to check if outermost type tree is `Expr::Many` variant + pub fn is_many(&self) -> bool { + matches!(self, Expr::Many(_)) + } +} diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs new file mode 100644 index 0000000000000..666d63ac1558b --- /dev/null +++ b/crates/hir/src/term_search/tactics.rs @@ -0,0 +1,859 @@ +//! Tactics for term search +//! +//! All the tactics take following arguments +//! * `ctx` - Context for the term search +//! * `defs` - Set of items in scope at term search target location +//! * `lookup` - Lookup table for types +//! And they return iterator that yields type trees that unify with the `goal` type. + +use std::iter; + +use hir_ty::db::HirDatabase; +use hir_ty::mir::BorrowKind; +use hir_ty::TyBuilder; +use itertools::Itertools; +use rustc_hash::FxHashSet; + +use crate::{ + Adt, AssocItem, Enum, GenericDef, GenericParam, HasVisibility, Impl, ModuleDef, ScopeDef, Type, + TypeParam, Variant, +}; + +use crate::term_search::{Expr, TermSearchConfig}; + +use super::{LookupTable, NewTypesKey, TermSearchCtx}; + +/// # Trivial tactic +/// +/// Attempts to fulfill the goal by trying items in scope +/// Also works as a starting point to move all items in scope to lookup table. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +/// +/// Returns iterator that yields elements that unify with `goal`. +/// +/// _Note that there is no use of calling this tactic in every iteration as the output does not +/// depend on the current state of `lookup`_ +pub(super) fn trivial<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + defs.iter().filter_map(|def| { + let expr = match def { + ScopeDef::ModuleDef(ModuleDef::Const(it)) => Some(Expr::Const(*it)), + ScopeDef::ModuleDef(ModuleDef::Static(it)) => Some(Expr::Static(*it)), + ScopeDef::GenericParam(GenericParam::ConstParam(it)) => Some(Expr::ConstParam(*it)), + ScopeDef::Local(it) => { + if ctx.config.enable_borrowcheck { + let borrowck = db.borrowck(it.parent).ok()?; + + let invalid = borrowck.iter().any(|b| { + b.partially_moved.iter().any(|moved| { + Some(&moved.local) == b.mir_body.binding_locals.get(it.binding_id) + }) || b.borrow_regions.iter().any(|region| { + // Shared borrows are fine + Some(®ion.local) == b.mir_body.binding_locals.get(it.binding_id) + && region.kind != BorrowKind::Shared + }) + }); + + if invalid { + return None; + } + } + + Some(Expr::Local(*it)) + } + _ => None, + }?; + + lookup.mark_exhausted(*def); + + let ty = expr.ty(db); + lookup.insert(ty.clone(), std::iter::once(expr.clone())); + + // Don't suggest local references as they are not valid for return + if matches!(expr, Expr::Local(_)) && ty.contains_reference(db) { + return None; + } + + ty.could_unify_with_deeply(db, &ctx.goal).then_some(expr) + }) +} + +/// # Type constructor tactic +/// +/// Attempts different type constructors for enums and structs in scope +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn type_constructor<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + fn variant_helper( + db: &dyn HirDatabase, + lookup: &mut LookupTable, + parent_enum: Enum, + variant: Variant, + goal: &Type, + config: &TermSearchConfig, + ) -> Vec<(Type, Vec)> { + // Ignore unstable + if variant.is_unstable(db) { + return Vec::new(); + } + + let generics = GenericDef::from(variant.parent_enum(db)); + let Some(type_params) = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>() + else { + // Ignore enums with const generics + return Vec::new(); + }; + + // We currently do not check lifetime bounds so ignore all types that have something to do + // with them + if !generics.lifetime_params(db).is_empty() { + return Vec::new(); + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return Vec::new(); + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + generic_params + .filter_map(move |generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| it.default(db).unwrap_or_else(|| g.next().expect("No generic"))) + .collect(); + + let enum_ty = Adt::from(parent_enum).ty_with_args(db, generics.iter().cloned()); + + // Allow types with generics only if they take us straight to goal for + // performance reasons + if !generics.is_empty() && !enum_ty.could_unify_with_deeply(db, goal) { + return None; + } + + // Ignore types that have something to do with lifetimes + if config.enable_borrowcheck && enum_ty.contains_reference(db) { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = variant + .fields(db) + .into_iter() + .map(|field| lookup.find(db, &field.ty_with_args(db, generics.iter().cloned()))) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let variant_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Variant { variant, generics: generics.clone(), params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Variant { variant, generics: generics.clone(), params }) + .collect() + }; + lookup.insert(enum_ty.clone(), variant_exprs.iter().cloned()); + + Some((enum_ty, variant_exprs)) + }) + .collect() + } + defs.iter() + .filter_map(move |def| match def { + ScopeDef::ModuleDef(ModuleDef::Variant(it)) => { + let variant_exprs = + variant_helper(db, lookup, it.parent_enum(db), *it, &ctx.goal, &ctx.config); + if variant_exprs.is_empty() { + return None; + } + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Variant(*it))); + Some(variant_exprs) + } + ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(enum_))) => { + let exprs: Vec<(Type, Vec)> = enum_ + .variants(db) + .into_iter() + .flat_map(|it| variant_helper(db, lookup, *enum_, it, &ctx.goal, &ctx.config)) + .collect(); + + if !exprs.is_empty() { + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Enum(*enum_)))); + } + + Some(exprs) + } + ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(it))) => { + // Ignore unstable and not visible + if it.is_unstable(db) || !it.is_visible_from(db, module) { + return None; + } + + let generics = GenericDef::from(*it); + + // Ignore const params for now + let type_params = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // We currently do not check lifetime bounds so ignore all types that have something to do + // with them + if !generics.lifetime_params(db).is_empty() { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return None; + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| { + it.default(db) + .unwrap_or_else(|| g.next().expect("Missing type param")) + }) + .collect(); + + let struct_ty = Adt::from(*it).ty_with_args(db, generics.iter().cloned()); + + // Allow types with generics only if they take us straight to goal for + // performance reasons + if non_default_type_params_len != 0 + && struct_ty.could_unify_with_deeply(db, &ctx.goal) + { + return None; + } + + // Ignore types that have something to do with lifetimes + if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { + return None; + } + let fileds = it.fields(db); + // Check if all fields are visible, otherwise we cannot fill them + if fileds.iter().any(|it| !it.is_visible_from(db, module)) { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = fileds + .into_iter() + .map(|field| lookup.find(db, &field.ty(db))) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let struct_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Struct { strukt: *it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Struct { + strukt: *it, + generics: generics.clone(), + params, + }) + .collect() + }; + + lookup + .mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Adt(Adt::Struct(*it)))); + lookup.insert(struct_ty.clone(), struct_exprs.iter().cloned()); + + Some((struct_ty, struct_exprs)) + }) + .collect(); + Some(exprs) + } + _ => None, + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Free function tactic +/// +/// Attempts to call different functions in scope with parameters from lookup table. +/// Functions that include generics are not used for performance reasons. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn free_function<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + defs.iter() + .filter_map(move |def| match def { + ScopeDef::ModuleDef(ModuleDef::Function(it)) => { + let generics = GenericDef::from(*it); + + // Ignore const params for now + let type_params = generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore lifetimes as we do not check them + if !generics.lifetime_params(db).is_empty() { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) { + return None; + } + + let non_default_type_params_len = + type_params.iter().filter(|it| it.default(db).is_none()).count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 0 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = type_params + .iter() + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args(db, generics.iter().cloned()); + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) + || it.is_unsafe_to_call(db) + || it.is_unstable(db) + || ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args(db, generics.iter().cloned()) + .into_iter() + .map(|field| { + let ty = field.ty(); + match ty.is_mutable_reference() { + true => None, + false => lookup.find_autoref(db, ty), + } + }) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let fn_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Function { func: *it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Function { + func: *it, + generics: generics.clone(), + + params, + }) + .collect() + }; + + lookup.mark_fulfilled(ScopeDef::ModuleDef(ModuleDef::Function(*it))); + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + } + _ => None, + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Impl method tactic +/// +/// Attempts to to call methods on types from lookup table. +/// This includes both functions from direct impl blocks as well as functions from traits. +/// Methods defined in impl blocks that are generic and methods that are themselves have +/// generics are ignored for performance reasons. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn impl_method<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .new_types(NewTypesKey::ImplMethod) + .into_iter() + .flat_map(|ty| { + Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) + }) + .flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item))) + .filter_map(|(imp, ty, it)| match it { + AssocItem::Function(f) => Some((imp, ty, f)), + _ => None, + }) + .filter_map(move |(imp, ty, it)| { + let fn_generics = GenericDef::from(it); + let imp_generics = GenericDef::from(imp); + + // Ignore const params for now + let imp_type_params = imp_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore const params for now + let fn_type_params = fn_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore all functions that have something to do with lifetimes as we don't check them + if !fn_generics.lifetime_params(db).is_empty() { + return None; + } + + // Ignore functions without self param + if !it.has_self_param(db) { + return None; + } + + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + { + return None; + } + + let non_default_type_params_len = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .filter(|it| it.default(db).is_none()) + .count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 0 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Ignore functions that do not change the type + if ty.could_unify_with_deeply(db, &ret_ty) { + return None; + } + + let self_ty = it + .self_param(db) + .expect("No self param") + .ty_with_args(db, ty.type_arguments().chain(generics.iter().cloned())); + + // Ignore functions that have different self type + if !self_ty.autoderef(db).any(|s_ty| ty == s_ty) { + return None; + } + + let target_type_exprs = lookup.find(db, &ty).expect("Type not in lookup"); + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ) + .into_iter() + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::>()?; + + let fn_exprs: Vec = std::iter::once(target_type_exprs) + .chain(param_exprs) + .multi_cartesian_product() + .map(|params| { + let mut params = params.into_iter(); + let target = Box::new(params.next().unwrap()); + Expr::Method { + func: it, + generics: generics.clone(), + target, + params: params.collect(), + } + }) + .collect(); + + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Struct projection tactic +/// +/// Attempts different struct fields (`foo.bar.baz`) +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn struct_projection<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .new_types(NewTypesKey::StructProjection) + .into_iter() + .map(|ty| (ty.clone(), lookup.find(db, &ty).expect("Expr not in lookup"))) + .flat_map(move |(ty, targets)| { + ty.fields(db).into_iter().filter_map(move |(field, filed_ty)| { + if !field.is_visible_from(db, module) { + return None; + } + let exprs = targets + .clone() + .into_iter() + .map(move |target| Expr::Field { field, expr: Box::new(target) }); + Some((filed_ty, exprs)) + }) + }) + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} + +/// # Famous types tactic +/// +/// Attempts different values of well known types such as `true` or `false`. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// _Note that there is no point of calling it iteratively as the output is always the same_ +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn famous_types<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + [ + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "true" }, + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::bool()), value: "false" }, + Expr::FamousType { ty: Type::new(db, module.id, TyBuilder::unit()), value: "()" }, + ] + .into_iter() + .map(|exprs| { + lookup.insert(exprs.ty(db), std::iter::once(exprs.clone())); + exprs + }) + .filter(|expr| expr.ty(db).could_unify_with_deeply(db, &ctx.goal)) +} + +/// # Impl static method (without self type) tactic +/// +/// Attempts different functions from impl blocks that take no self parameter. +/// +/// Updates lookup by new types reached and returns iterator that yields +/// elements that unify with `goal`. +/// +/// # Arguments +/// * `ctx` - Context for the term search +/// * `defs` - Set of items in scope at term search target location +/// * `lookup` - Lookup table for types +pub(super) fn impl_static_method<'a, DB: HirDatabase>( + ctx: &'a TermSearchCtx<'a, DB>, + _defs: &'a FxHashSet, + lookup: &'a mut LookupTable, +) -> impl Iterator + 'a { + let db = ctx.sema.db; + let module = ctx.scope.module(); + lookup + .take_types_wishlist() + .into_iter() + .chain(iter::once(ctx.goal.clone())) + .flat_map(|ty| { + Impl::all_for_type(db, ty.clone()).into_iter().map(move |imp| (ty.clone(), imp)) + }) + .filter(|(_, imp)| !imp.is_unsafe(db)) + .flat_map(|(ty, imp)| imp.items(db).into_iter().map(move |item| (imp, ty.clone(), item))) + .filter_map(|(imp, ty, it)| match it { + AssocItem::Function(f) => Some((imp, ty, f)), + _ => None, + }) + .filter_map(move |(imp, ty, it)| { + let fn_generics = GenericDef::from(it); + let imp_generics = GenericDef::from(imp); + + // Ignore const params for now + let imp_type_params = imp_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore const params for now + let fn_type_params = fn_generics + .type_or_const_params(db) + .into_iter() + .map(|it| it.as_type_param(db)) + .collect::>>()?; + + // Ignore all functions that have something to do with lifetimes as we don't check them + if !fn_generics.lifetime_params(db).is_empty() + || !imp_generics.lifetime_params(db).is_empty() + { + return None; + } + + // Ignore functions with self param + if it.has_self_param(db) { + return None; + } + + // Filter out private and unsafe functions + if !it.is_visible_from(db, module) || it.is_unsafe_to_call(db) || it.is_unstable(db) { + return None; + } + + // Only account for stable type parameters for now, unstable params can be default + // tho, for example in `Box` + if imp_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + || fn_type_params.iter().any(|it| it.is_unstable(db) && it.default(db).is_none()) + { + return None; + } + + let non_default_type_params_len = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .filter(|it| it.default(db).is_none()) + .count(); + + // Ignore bigger number of generics for now as they kill the performance + if non_default_type_params_len > 1 { + return None; + } + + let generic_params = lookup + .iter_types() + .collect::>() // Force take ownership + .into_iter() + .permutations(non_default_type_params_len); + + let exprs: Vec<_> = generic_params + .filter_map(|generics| { + // Insert default type params + let mut g = generics.into_iter(); + let generics: Vec<_> = imp_type_params + .iter() + .chain(fn_type_params.iter()) + .map(|it| match it.default(db) { + Some(ty) => Some(ty), + None => { + let generic = g.next().expect("Missing type param"); + it.trait_bounds(db) + .into_iter() + .all(|bound| generic.impls_trait(db, bound, &[])); + // Filter out generics that do not unify due to trait bounds + it.ty(db).could_unify_with(db, &generic).then_some(generic) + } + }) + .collect::>()?; + + let ret_ty = it.ret_type_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ); + // Filter out functions that return references + if ctx.config.enable_borrowcheck && ret_ty.contains_reference(db) + || ret_ty.is_raw_ptr() + { + return None; + } + + // Ignore functions that do not change the type + // if ty.could_unify_with_deeply(db, &ret_ty) { + // return None; + // } + + // Early exit if some param cannot be filled from lookup + let param_exprs: Vec> = it + .params_without_self_with_args( + db, + ty.type_arguments().chain(generics.iter().cloned()), + ) + .into_iter() + .map(|field| lookup.find_autoref(db, field.ty())) + .collect::>()?; + + // Note that we need special case for 0 param constructors because of multi cartesian + // product + let fn_exprs: Vec = if param_exprs.is_empty() { + vec![Expr::Function { func: it, generics, params: Vec::new() }] + } else { + param_exprs + .into_iter() + .multi_cartesian_product() + .map(|params| Expr::Function { + func: it, + generics: generics.clone(), + params, + }) + .collect() + }; + + lookup.insert(ret_ty.clone(), fn_exprs.iter().cloned()); + Some((ret_ty, fn_exprs)) + }) + .collect(); + Some(exprs) + }) + .flatten() + .filter_map(|(ty, exprs)| ty.could_unify_with_deeply(db, &ctx.goal).then_some(exprs)) + .flatten() +} diff --git a/crates/ide-assists/src/handlers/fix_visibility.rs b/crates/ide-assists/src/handlers/fix_visibility.rs index 204e796fa2c0d..589591a6777ee 100644 --- a/crates/ide-assists/src/handlers/fix_visibility.rs +++ b/crates/ide-assists/src/handlers/fix_visibility.rs @@ -79,7 +79,7 @@ fn add_vis_to_referenced_module_def(acc: &mut Assists, ctx: &AssistContext<'_>) edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); - vis_owner.set_visibility(missing_visibility.clone_for_update()); + vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { edit.add_tabstop_before(cap, vis); @@ -131,7 +131,7 @@ fn add_vis_to_referenced_record_field(acc: &mut Assists, ctx: &AssistContext<'_> edit.edit_file(target_file); let vis_owner = edit.make_mut(vis_owner); - vis_owner.set_visibility(missing_visibility.clone_for_update()); + vis_owner.set_visibility(Some(missing_visibility.clone_for_update())); if let Some((cap, vis)) = ctx.config.snippet_cap.zip(vis_owner.visibility()) { edit.add_tabstop_before(cap, vis); diff --git a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs index 24094de22c8d1..5f7350bc2812b 100644 --- a/crates/ide-assists/src/handlers/generate_trait_from_impl.rs +++ b/crates/ide-assists/src/handlers/generate_trait_from_impl.rs @@ -1,8 +1,13 @@ use crate::assist_context::{AssistContext, Assists}; use ide_db::assists::AssistId; use syntax::{ - ast::{self, edit::IndentLevel, make, HasGenericParams, HasVisibility}, - ted, AstNode, SyntaxKind, + ast::{ + self, + edit_in_place::{HasVisibilityEdit, Indent}, + make, HasGenericParams, HasName, + }, + ted::{self, Position}, + AstNode, SyntaxKind, T, }; // NOTES : @@ -44,7 +49,7 @@ use syntax::{ // }; // } // -// trait ${0:TraitName} { +// trait ${0:NewTrait} { // // Used as an associated constant. // const CONST_ASSOC: usize = N * 4; // @@ -53,7 +58,7 @@ use syntax::{ // const_maker! {i32, 7} // } // -// impl ${0:TraitName} for Foo { +// impl ${0:NewTrait} for Foo { // // Used as an associated constant. // const CONST_ASSOC: usize = N * 4; // @@ -94,8 +99,10 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ "Generate trait from impl", impl_ast.syntax().text_range(), |builder| { + let impl_ast = builder.make_mut(impl_ast); let trait_items = assoc_items.clone_for_update(); - let impl_items = assoc_items.clone_for_update(); + let impl_items = builder.make_mut(assoc_items); + let impl_name = builder.make_mut(impl_name); trait_items.assoc_items().for_each(|item| { strip_body(&item); @@ -112,46 +119,42 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ impl_ast.generic_param_list(), impl_ast.where_clause(), trait_items, - ); + ) + .clone_for_update(); + + let trait_name = trait_ast.name().expect("new trait should have a name"); + let trait_name_ref = make::name_ref(&trait_name.to_string()).clone_for_update(); // Change `impl Foo` to `impl NewTrait for Foo` - let arg_list = if let Some(genpars) = impl_ast.generic_param_list() { - genpars.to_generic_args().to_string() - } else { - "".to_owned() - }; - - if let Some(snippet_cap) = ctx.config.snippet_cap { - builder.replace_snippet( - snippet_cap, - impl_name.syntax().text_range(), - format!("${{0:TraitName}}{} for {}", arg_list, impl_name), - ); + let mut elements = vec![ + trait_name_ref.syntax().clone().into(), + make::tokens::single_space().into(), + make::token(T![for]).into(), + ]; + + if let Some(params) = impl_ast.generic_param_list() { + let gen_args = ¶ms.to_generic_args().clone_for_update(); + elements.insert(1, gen_args.syntax().clone().into()); + } - // Insert trait before TraitImpl - builder.insert_snippet( - snippet_cap, - impl_ast.syntax().text_range().start(), - format!( - "{}\n\n{}", - trait_ast.to_string().replace("NewTrait", "${0:TraitName}"), - IndentLevel::from_node(impl_ast.syntax()) - ), - ); - } else { - builder.replace( - impl_name.syntax().text_range(), - format!("NewTrait{} for {}", arg_list, impl_name), - ); + ted::insert_all(Position::before(impl_name.syntax()), elements); + + // Insert trait before TraitImpl + ted::insert_all_raw( + Position::before(impl_ast.syntax()), + vec![ + trait_ast.syntax().clone().into(), + make::tokens::whitespace(&format!("\n\n{}", impl_ast.indent_level())).into(), + ], + ); - // Insert trait before TraitImpl - builder.insert( - impl_ast.syntax().text_range().start(), - format!("{}\n\n{}", trait_ast, IndentLevel::from_node(impl_ast.syntax())), + // Link the trait name & trait ref names together as a placeholder snippet group + if let Some(cap) = ctx.config.snippet_cap { + builder.add_placeholder_snippet_group( + cap, + vec![trait_name.syntax().clone(), trait_name_ref.syntax().clone()], ); } - - builder.replace(assoc_items.syntax().text_range(), impl_items.to_string()); }, ); @@ -160,23 +163,8 @@ pub(crate) fn generate_trait_from_impl(acc: &mut Assists, ctx: &AssistContext<'_ /// `E0449` Trait items always share the visibility of their trait fn remove_items_visibility(item: &ast::AssocItem) { - match item { - ast::AssocItem::Const(c) => { - if let Some(vis) = c.visibility() { - ted::remove(vis.syntax()); - } - } - ast::AssocItem::Fn(f) => { - if let Some(vis) = f.visibility() { - ted::remove(vis.syntax()); - } - } - ast::AssocItem::TypeAlias(t) => { - if let Some(vis) = t.visibility() { - ted::remove(vis.syntax()); - } - } - _ => (), + if let Some(has_vis) = ast::AnyHasVisibility::cast(item.syntax().clone()) { + has_vis.set_visibility(None); } } @@ -404,12 +392,12 @@ impl F$0oo { r#" struct Foo([i32; N]); -trait ${0:TraitName} { +trait ${0:NewTrait} { // Used as an associated constant. const CONST: usize = N * 4; } -impl ${0:TraitName} for Foo { +impl ${0:NewTrait} for Foo { // Used as an associated constant. const CONST: usize = N * 4; } diff --git a/crates/ide-assists/src/handlers/term_search.rs b/crates/ide-assists/src/handlers/term_search.rs new file mode 100644 index 0000000000000..51a1a406f316d --- /dev/null +++ b/crates/ide-assists/src/handlers/term_search.rs @@ -0,0 +1,253 @@ +//! Term search assist +use hir::term_search::TermSearchCtx; +use ide_db::{ + assists::{AssistId, AssistKind, GroupLabel}, + famous_defs::FamousDefs, +}; + +use itertools::Itertools; +use syntax::{ast, AstNode}; + +use crate::assist_context::{AssistContext, Assists}; + +pub(crate) fn term_search(acc: &mut Assists, ctx: &AssistContext<'_>) -> Option<()> { + let unexpanded = ctx.find_node_at_offset::()?; + let syntax = unexpanded.syntax(); + let goal_range = syntax.text_range(); + + let parent = syntax.parent()?; + let scope = ctx.sema.scope(&parent)?; + + let macro_call = ctx.sema.resolve_macro_call(&unexpanded)?; + + let famous_defs = FamousDefs(&ctx.sema, scope.krate()); + let std_todo = famous_defs.core_macros_todo()?; + let std_unimplemented = famous_defs.core_macros_unimplemented()?; + + if macro_call != std_todo && macro_call != std_unimplemented { + return None; + } + + let target_ty = ctx.sema.type_of_expr(&ast::Expr::cast(parent.clone())?)?.adjusted(); + + let term_search_ctx = TermSearchCtx { + sema: &ctx.sema, + scope: &scope, + goal: target_ty, + config: Default::default(), + }; + let paths = hir::term_search::term_search(&term_search_ctx); + + if paths.is_empty() { + return None; + } + + let mut formatter = |_: &hir::Type| String::from("todo!()"); + + let paths = paths + .into_iter() + .filter_map(|path| { + path.gen_source_code( + &scope, + &mut formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok() + }) + .unique(); + + for code in paths { + acc.add_group( + &GroupLabel(String::from("Term search")), + AssistId("term_search", AssistKind::Generate), + format!("Replace todo!() with {code}"), + goal_range, + |builder| { + builder.replace(goal_range, code); + }, + ); + } + + Some(()) +} + +#[cfg(test)] +mod tests { + use crate::tests::{check_assist, check_assist_not_applicable}; + + use super::*; + + #[test] + fn test_complete_local() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!() }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_todo_with_msg() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_unimplemented_with_msg() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_unimplemented() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: u128 = 1; let b: u128 = todo$0!("asd") }"#, + r#"fn f() { let a: u128 = 1; let b: u128 = a }"#, + ) + } + + #[test] + fn test_complete_struct_field() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +struct A { pub x: i32, y: bool } +fn f() { let a = A { x: 1, y: true }; let b: i32 = todo$0!(); }"#, + r#"struct A { pub x: i32, y: bool } +fn f() { let a = A { x: 1, y: true }; let b: i32 = a.x; }"#, + ) + } + + #[test] + fn test_enum_with_generics() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented, option +fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, + r#"fn f() { let a: i32 = 1; let b: Option = None; }"#, + ) + } + + #[test] + fn test_enum_with_generics2() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Option { None, Some(T) } +fn f() { let a: i32 = 1; let b: Option = todo$0!(); }"#, + r#"enum Option { None, Some(T) } +fn f() { let a: i32 = 1; let b: Option = Option::Some(a); }"#, + ) + } + + #[test] + fn test_enum_with_generics3() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Option { None, Some(T) } +fn f() { let a: Option = Option::None; let b: Option> = todo$0!(); }"#, + r#"enum Option { None, Some(T) } +fn f() { let a: Option = Option::None; let b: Option> = Option::Some(a); }"#, + ) + } + + #[test] + fn test_enum_with_generics4() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Foo { Foo(T) } +fn f() { let a = 0; let b: Foo = todo$0!(); }"#, + r#"enum Foo { Foo(T) } +fn f() { let a = 0; let b: Foo = Foo::Foo(a); }"#, + ); + + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +enum Foo { Foo(T) } +fn f() { let a: Foo = Foo::Foo(0); let b: Foo = todo$0!(); }"#, + r#"enum Foo { Foo(T) } +fn f() { let a: Foo = Foo::Foo(0); let b: Foo = a; }"#, + ) + } + + #[test] + fn test_newtype() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +struct Foo(i32); +fn f() { let a: i32 = 1; let b: Foo = todo$0!(); }"#, + r#"struct Foo(i32); +fn f() { let a: i32 = 1; let b: Foo = Foo(a); }"#, + ) + } + + #[test] + fn test_shadowing() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = todo$0!(); }"#, + r#"fn f() { let a: i32 = 1; let b: i32 = 2; let a: u32 = 0; let c: i32 = b; }"#, + ) + } + + #[test] + fn test_famous_bool() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f() { let a: bool = todo$0!(); }"#, + r#"fn f() { let a: bool = false; }"#, + ) + } + + #[test] + fn test_fn_with_reference_types() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = 1; let b: f32 = todo$0!(); }"#, + r#"fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = 1; let b: f32 = f(&a); }"#, + ) + } + + #[test] + fn test_fn_with_reference_types2() { + check_assist( + term_search, + r#"//- minicore: todo, unimplemented +fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = &1; let b: f32 = todo$0!(); }"#, + r#"fn f(a: &i32) -> f32 { a as f32 } +fn g() { let a = &1; let b: f32 = f(a); }"#, + ) + } + + #[test] + fn test_fn_with_reference_types3() { + check_assist_not_applicable( + term_search, + r#"//- minicore: todo, unimplemented + fn f(a: &i32) -> f32 { a as f32 } + fn g() { let a = &mut 1; let b: f32 = todo$0!(); }"#, + ) + } +} diff --git a/crates/ide-assists/src/lib.rs b/crates/ide-assists/src/lib.rs index 2fec104323dc7..dcc89014b956b 100644 --- a/crates/ide-assists/src/lib.rs +++ b/crates/ide-assists/src/lib.rs @@ -60,11 +60,6 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod assist_config; mod assist_context; #[cfg(test)] @@ -210,6 +205,7 @@ mod handlers { mod replace_turbofish_with_explicit_type; mod sort_items; mod split_import; + mod term_search; mod toggle_ignore; mod unmerge_match_arm; mod unmerge_use; @@ -332,6 +328,7 @@ mod handlers { replace_arith_op::replace_arith_with_saturating, sort_items::sort_items, split_import::split_import, + term_search::term_search, toggle_ignore::toggle_ignore, unmerge_match_arm::unmerge_match_arm, unmerge_use::unmerge_use, diff --git a/crates/ide-assists/src/tests/generated.rs b/crates/ide-assists/src/tests/generated.rs index 8ad735d0ae801..268ba3225b668 100644 --- a/crates/ide-assists/src/tests/generated.rs +++ b/crates/ide-assists/src/tests/generated.rs @@ -1665,7 +1665,7 @@ macro_rules! const_maker { }; } -trait ${0:TraitName} { +trait ${0:NewTrait} { // Used as an associated constant. const CONST_ASSOC: usize = N * 4; @@ -1674,7 +1674,7 @@ trait ${0:TraitName} { const_maker! {i32, 7} } -impl ${0:TraitName} for Foo { +impl ${0:NewTrait} for Foo { // Used as an associated constant. const CONST_ASSOC: usize = N * 4; diff --git a/crates/ide-completion/src/completions.rs b/crates/ide-completion/src/completions.rs index ba3c0cf3fd60e..1ea7220960d25 100644 --- a/crates/ide-completion/src/completions.rs +++ b/crates/ide-completion/src/completions.rs @@ -40,7 +40,8 @@ use crate::{ literal::{render_struct_literal, render_variant_lit}, macro_::render_macro, pattern::{render_struct_pat, render_variant_pat}, - render_field, render_path_resolution, render_pattern_resolution, render_tuple_field, + render_expr, render_field, render_path_resolution, render_pattern_resolution, + render_tuple_field, type_alias::{render_type_alias, render_type_alias_with_eq}, union_literal::render_union_literal, RenderContext, @@ -157,6 +158,12 @@ impl Completions { item.add_to(self, ctx.db); } + pub(crate) fn add_expr(&mut self, ctx: &CompletionContext<'_>, expr: &hir::term_search::Expr) { + if let Some(item) = render_expr(ctx, expr) { + item.add_to(self, ctx.db) + } + } + pub(crate) fn add_crate_roots( &mut self, ctx: &CompletionContext<'_>, @@ -694,6 +701,7 @@ pub(super) fn complete_name_ref( match &path_ctx.kind { PathKind::Expr { expr_ctx } => { expr::complete_expr_path(acc, ctx, path_ctx, expr_ctx); + expr::complete_expr(acc, ctx); dot::complete_undotted_self(acc, ctx, path_ctx, expr_ctx); item_list::complete_item_list_in_expr(acc, ctx, path_ctx, expr_ctx); diff --git a/crates/ide-completion/src/completions/expr.rs b/crates/ide-completion/src/completions/expr.rs index 77fd5dd98b8d3..802e9bc3a8077 100644 --- a/crates/ide-completion/src/completions/expr.rs +++ b/crates/ide-completion/src/completions/expr.rs @@ -328,3 +328,59 @@ pub(crate) fn complete_expr_path( } } } + +pub(crate) fn complete_expr(acc: &mut Completions, ctx: &CompletionContext<'_>) { + let _p = tracing::span!(tracing::Level::INFO, "complete_expr").entered(); + + if !ctx.config.enable_term_search { + return; + } + + if !ctx.qualifier_ctx.none() { + return; + } + + if let Some(ty) = &ctx.expected_type { + // Ignore unit types as they are not very interesting + if ty.is_unit() || ty.is_unknown() { + return; + } + + let term_search_ctx = hir::term_search::TermSearchCtx { + sema: &ctx.sema, + scope: &ctx.scope, + goal: ty.clone(), + config: hir::term_search::TermSearchConfig { + enable_borrowcheck: false, + many_alternatives_threshold: 1, + depth: 6, + }, + }; + let exprs = hir::term_search::term_search(&term_search_ctx); + for expr in exprs { + // Expand method calls + match expr { + hir::term_search::Expr::Method { func, generics, target, params } + if target.is_many() => + { + let target_ty = target.ty(ctx.db); + let term_search_ctx = + hir::term_search::TermSearchCtx { goal: target_ty, ..term_search_ctx }; + let target_exprs = hir::term_search::term_search(&term_search_ctx); + + for expr in target_exprs { + let expanded_expr = hir::term_search::Expr::Method { + func, + generics: generics.clone(), + target: Box::new(expr), + params: params.clone(), + }; + + acc.add_expr(ctx, &expanded_expr) + } + } + _ => acc.add_expr(ctx, &expr), + } + } + } +} diff --git a/crates/ide-completion/src/completions/flyimport.rs b/crates/ide-completion/src/completions/flyimport.rs index b9f91d34b2c2b..3bc329ecd748f 100644 --- a/crates/ide-completion/src/completions/flyimport.rs +++ b/crates/ide-completion/src/completions/flyimport.rs @@ -238,6 +238,8 @@ fn import_on_the_fly( (PathKind::Type { location }, ItemInNs::Types(ty)) => { if matches!(location, TypeLocation::TypeBound) { matches!(ty, ModuleDef::Trait(_)) + } else if matches!(location, TypeLocation::ImplTrait) { + matches!(ty, ModuleDef::Trait(_) | ModuleDef::Module(_)) } else { true } diff --git a/crates/ide-completion/src/completions/type.rs b/crates/ide-completion/src/completions/type.rs index e6a4335c3fec8..e4678089462a3 100644 --- a/crates/ide-completion/src/completions/type.rs +++ b/crates/ide-completion/src/completions/type.rs @@ -31,6 +31,11 @@ pub(crate) fn complete_type_path( ScopeDef::ImplSelfType(_) => location.complete_self_type(), // Don't suggest attribute macros and derives. ScopeDef::ModuleDef(Macro(mac)) => mac.is_fn_like(ctx.db), + ScopeDef::ModuleDef(Trait(_) | Module(_)) + if matches!(location, TypeLocation::ImplTrait) => + { + true + } // Type things are fine ScopeDef::ModuleDef( BuiltinType(_) | Adt(_) | Module(_) | Trait(_) | TraitAlias(_) | TypeAlias(_), @@ -184,6 +189,21 @@ pub(crate) fn complete_type_path( } } } + TypeLocation::ImplTrait => { + acc.add_nameref_keywords_with_colon(ctx); + ctx.process_all_names(&mut |name, def, doc_aliases| { + let is_trait_or_module = matches!( + def, + ScopeDef::ModuleDef( + hir::ModuleDef::Module(_) | hir::ModuleDef::Trait(_) + ) + ); + if is_trait_or_module { + acc.add_path_resolution(ctx, path_ctx, name, def, doc_aliases); + } + }); + return; + } _ => {} }; diff --git a/crates/ide-completion/src/config.rs b/crates/ide-completion/src/config.rs index ed5ddde8fbfe9..04563fb0f469b 100644 --- a/crates/ide-completion/src/config.rs +++ b/crates/ide-completion/src/config.rs @@ -14,6 +14,7 @@ pub struct CompletionConfig { pub enable_imports_on_the_fly: bool, pub enable_self_on_the_fly: bool, pub enable_private_editable: bool, + pub enable_term_search: bool, pub full_function_signatures: bool, pub callable: Option, pub snippet_cap: Option, diff --git a/crates/ide-completion/src/context.rs b/crates/ide-completion/src/context.rs index 2a0004f60b820..aa22155feffe2 100644 --- a/crates/ide-completion/src/context.rs +++ b/crates/ide-completion/src/context.rs @@ -202,6 +202,7 @@ impl TypeLocation { } TypeLocation::AssocConstEq => false, TypeLocation::AssocTypeEq => true, + TypeLocation::ImplTrait => false, _ => true, } } @@ -716,7 +717,7 @@ impl<'a> CompletionContext<'a> { let krate = scope.krate(); let module = scope.module(); - let toolchain = db.crate_graph()[krate.into()].channel(); + let toolchain = db.toolchain_channel(krate.into()); // `toolchain == None` means we're in some detached files. Since we have no information on // the toolchain being used, let's just allow unstable items to be listed. let is_nightly = matches!(toolchain, Some(base_db::ReleaseChannel::Nightly) | None); diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index 8552a20392abf..c2c0641961a6a 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -166,6 +166,8 @@ pub struct CompletionRelevance { pub postfix_match: Option, /// This is set for type inference results pub is_definite: bool, + /// This is set for items that are function (associated or method) + pub function: Option, } #[derive(Debug, Clone, Copy, Eq, PartialEq)] @@ -207,6 +209,24 @@ pub enum CompletionRelevancePostfixMatch { Exact, } +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub struct CompletionRelevanceFn { + pub has_params: bool, + pub has_self_param: bool, + pub return_type: CompletionRelevanceReturnType, +} + +#[derive(Debug, Clone, Copy, Eq, PartialEq)] +pub enum CompletionRelevanceReturnType { + Other, + /// Returns the Self type of the impl/trait + DirectConstructor, + /// Returns something that indirectly constructs the `Self` type of the impl/trait e.g. `Result`, `Option` + Constructor, + /// Returns a possible builder for the type + Builder, +} + impl CompletionRelevance { /// Provides a relevance score. Higher values are more relevant. /// @@ -231,6 +251,7 @@ impl CompletionRelevance { postfix_match, is_definite, is_item_from_notable_trait, + function, } = self; // lower rank private things @@ -275,6 +296,33 @@ impl CompletionRelevance { if is_definite { score += 10; } + + score += function + .map(|asf| { + let mut fn_score = match asf.return_type { + CompletionRelevanceReturnType::DirectConstructor => 15, + CompletionRelevanceReturnType::Builder => 10, + CompletionRelevanceReturnType::Constructor => 5, + CompletionRelevanceReturnType::Other => 0, + }; + + // When a fn is bumped due to return type: + // Bump Constructor or Builder methods with no arguments, + // over them tha with self arguments + if fn_score > 0 { + if !asf.has_params { + // bump associated functions + fn_score += 1; + } else if asf.has_self_param { + // downgrade methods (below Constructor) + fn_score = 1; + } + } + + fn_score + }) + .unwrap_or_default(); + score } @@ -297,6 +345,7 @@ pub enum CompletionItemKind { Method, Snippet, UnresolvedReference, + Expression, } impl_from!(SymbolKind for CompletionItemKind); @@ -341,6 +390,7 @@ impl CompletionItemKind { CompletionItemKind::Method => "me", CompletionItemKind::Snippet => "sn", CompletionItemKind::UnresolvedReference => "??", + CompletionItemKind::Expression => "ex", } } } diff --git a/crates/ide-completion/src/render.rs b/crates/ide-completion/src/render.rs index 2ed080a834790..3f374b307fbe3 100644 --- a/crates/ide-completion/src/render.rs +++ b/crates/ide-completion/src/render.rs @@ -17,7 +17,7 @@ use ide_db::{ imports::import_assets::LocatedImport, RootDatabase, SnippetCap, SymbolKind, }; -use syntax::{format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; +use syntax::{ast, format_smolstr, AstNode, SmolStr, SyntaxKind, TextRange}; use text_edit::TextEdit; use crate::{ @@ -272,6 +272,82 @@ pub(crate) fn render_resolution_with_import_pat( Some(render_resolution_pat(ctx, pattern_ctx, local_name, Some(import_edit), resolution)) } +pub(crate) fn render_expr( + ctx: &CompletionContext<'_>, + expr: &hir::term_search::Expr, +) -> Option { + let mut i = 1; + let mut snippet_formatter = |ty: &hir::Type| { + let arg_name = ty + .as_adt() + .and_then(|adt| adt.name(ctx.db).as_text()) + .map(|s| stdx::to_lower_snake_case(s.as_str())) + .unwrap_or_else(|| String::from("_")); + let res = format!("${{{i}:{arg_name}}}"); + i += 1; + res + }; + + let mut label_formatter = |ty: &hir::Type| { + ty.as_adt() + .and_then(|adt| adt.name(ctx.db).as_text()) + .map(|s| stdx::to_lower_snake_case(s.as_str())) + .unwrap_or_else(|| String::from("...")) + }; + + let label = expr + .gen_source_code( + &ctx.scope, + &mut label_formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok()?; + + let source_range = match ctx.original_token.parent() { + Some(node) => match node.ancestors().find_map(ast::Path::cast) { + Some(path) => path.syntax().text_range(), + None => node.text_range(), + }, + None => ctx.source_range(), + }; + + let mut item = CompletionItem::new(CompletionItemKind::Expression, source_range, label.clone()); + + let snippet = format!( + "{}$0", + expr.gen_source_code( + &ctx.scope, + &mut snippet_formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude + ) + .ok()? + ); + let edit = TextEdit::replace(source_range, snippet); + item.snippet_edit(ctx.config.snippet_cap?, edit); + item.documentation(Documentation::new(String::from("Autogenerated expression by term search"))); + item.set_relevance(crate::CompletionRelevance { + type_match: compute_type_match(ctx, &expr.ty(ctx.db)), + ..Default::default() + }); + for trait_ in expr.traits_used(ctx.db) { + let trait_item = hir::ItemInNs::from(hir::ModuleDef::from(trait_)); + let Some(path) = ctx.module.find_use_path( + ctx.db, + trait_item, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) else { + continue; + }; + + item.add_import(LocatedImport::new(path, trait_item, trait_item)); + } + + Some(item) +} + fn scope_def_to_name( resolution: ScopeDef, ctx: &RenderContext<'_>, @@ -599,6 +675,16 @@ mod tests { expect.assert_debug_eq(&actual); } + #[track_caller] + fn check_function_relevance(ra_fixture: &str, expect: Expect) { + let actual: Vec<_> = do_completion(ra_fixture, CompletionItemKind::Method) + .into_iter() + .map(|item| (item.detail.unwrap_or_default(), item.relevance.function)) + .collect(); + + expect.assert_debug_eq(&actual); + } + #[track_caller] fn check_relevance_for_kinds(ra_fixture: &str, kinds: &[CompletionItemKind], expect: Expect) { let mut actual = get_all_items(TEST_CONFIG, ra_fixture, None); @@ -961,6 +1047,7 @@ fn func(input: Struct) { } st Self [type] sp Self [type] st Struct [type] + ex Struct [type] lc self [local] fn func(…) [] me self.test() [] @@ -985,6 +1072,9 @@ fn main() { "#, expect![[r#" lc input [type+name+local] + ex input [type] + ex true [type] + ex false [type] lc inputbad [local] fn main() [] fn test(…) [] @@ -1174,6 +1264,7 @@ fn main() { let _: m::Spam = S$0 } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, trigger_call_info: true, }, @@ -1201,6 +1292,7 @@ fn main() { let _: m::Spam = S$0 } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, trigger_call_info: true, }, @@ -1280,6 +1372,7 @@ fn foo() { A { the$0 } } is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] @@ -1313,6 +1406,26 @@ impl S { documentation: Documentation( "Method docs", ), + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, }, CompletionItem { label: "foo", @@ -1418,6 +1531,26 @@ fn foo(s: S) { s.$0 } kind: Method, lookup: "the_method", detail: "fn(&self)", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, }, ] "#]], @@ -1665,6 +1798,10 @@ fn f() { A { bar: b$0 }; } expect![[r#" fn bar() [type+name] fn baz() [type] + ex baz() [type] + ex bar() [type] + ex A { bar: baz() }.bar [type] + ex A { bar: bar() }.bar [type] st A [] fn f() [] "#]], @@ -1749,6 +1886,8 @@ fn main() { lc s [type+name+local] st S [type] st S [type] + ex s [type] + ex S [type] fn foo(…) [] fn main() [] "#]], @@ -1766,6 +1905,8 @@ fn main() { lc ssss [type+local] st S [type] st S [type] + ex ssss [type] + ex S [type] fn foo(…) [] fn main() [] "#]], @@ -1798,6 +1939,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] + ex core::ops::Deref::deref(&t) (use core::ops::Deref) [type_could_unify] lc m [local] lc t [local] lc &t [type+local] @@ -1846,6 +1989,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::DerefMut::deref_mut(&mut T(S)) (use core::ops::DerefMut) [type_could_unify] + ex core::ops::DerefMut::deref_mut(&mut t) (use core::ops::DerefMut) [type_could_unify] lc m [local] lc t [local] lc &mut t [type+local] @@ -1894,6 +2039,8 @@ fn bar(t: Foo) {} ev Foo::A [type] ev Foo::B [type] en Foo [type] + ex Foo::A [type] + ex Foo::B [type] fn bar(…) [] fn foo() [] "#]], @@ -1947,6 +2094,8 @@ fn main() { } "#, expect![[r#" + ex core::ops::Deref::deref(&T(S)) (use core::ops::Deref) [type_could_unify] + ex core::ops::Deref::deref(&bar()) (use core::ops::Deref) [type_could_unify] st S [] st &S [type] st S [] @@ -2002,6 +2151,254 @@ fn main() { ); } + #[test] + fn constructor_order_simple() { + check_relevance( + r#" +struct Foo; +struct Other; +struct Option(T); + +impl Foo { + fn fn_ctr() -> Foo { unimplemented!() } + fn fn_another(n: u32) -> Other { unimplemented!() } + fn fn_ctr_self() -> Option { unimplemented!() } +} + +fn test() { + let a = Foo::$0; +} +"#, + expect![[r#" + fn fn_ctr() [type_could_unify] + fn fn_ctr_self() [type_could_unify] + fn fn_another(…) [type_could_unify] + "#]], + ); + } + + #[test] + fn constructor_order_kind() { + check_function_relevance( + r#" +struct Foo; +struct Bar; +struct Option(T); +enum Result { Ok(T), Err(E) }; + +impl Foo { + fn fn_ctr(&self) -> Foo { unimplemented!() } + fn fn_ctr_with_args(&self, n: u32) -> Foo { unimplemented!() } + fn fn_another(&self, n: u32) -> Bar { unimplemented!() } + fn fn_ctr_wrapped(&self, ) -> Option { unimplemented!() } + fn fn_ctr_wrapped_2(&self, ) -> Result { unimplemented!() } + fn fn_ctr_wrapped_3(&self, ) -> Result { unimplemented!() } // Self is not the first type + fn fn_ctr_wrapped_with_args(&self, m: u32) -> Option { unimplemented!() } + fn fn_another_unit(&self) { unimplemented!() } +} + +fn test() { + let a = self::Foo::$0; +} +"#, + expect![[r#" + [ + ( + "fn(&self, u32) -> Bar", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + ), + ( + "fn(&self)", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + ), + ( + "fn(&self) -> Foo", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: DirectConstructor, + }, + ), + ), + ( + "fn(&self, u32) -> Foo", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: DirectConstructor, + }, + ), + ), + ( + "fn(&self) -> Option", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self) -> Result", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self) -> Result", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ( + "fn(&self, u32) -> Option", + Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Constructor, + }, + ), + ), + ] + "#]], + ); + } + + #[test] + fn constructor_order_relevance() { + check_relevance( + r#" +struct Foo; +struct FooBuilder; +struct Result(T); + +impl Foo { + fn fn_no_ret(&self) {} + fn fn_ctr_with_args(input: u32) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr() -> Result { unimplemented!() } + fn fn_other() -> Result { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a = self::Foo::$0; +} +"#, + // preference: + // Direct Constructor + // Direct Constructor with args + // Builder + // Constructor + // Others + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr() [type_could_unify] + me fn_no_ret(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + + // + } + + #[test] + fn function_relevance_generic_1() { + check_relevance( + r#" +struct Foo(T); +struct FooBuilder; +struct Option(T); +enum Result{Ok(T), Err(E)}; + +impl Foo { + fn fn_returns_unit(&self) {} + fn fn_ctr_with_args(input: T) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr_wrapped() -> Option { unimplemented!() } + fn fn_ctr_wrapped_2() -> Result { unimplemented!() } + fn fn_other() -> Option { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a = self::Foo::::$0; +} + "#, + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr_wrapped() [type_could_unify] + fn fn_ctr_wrapped_2() [type_could_unify] + me fn_returns_unit(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + } + + #[test] + fn function_relevance_generic_2() { + // Generic 2 + check_relevance( + r#" +struct Foo(T); +struct FooBuilder; +struct Option(T); +enum Result{Ok(T), Err(E)}; + +impl Foo { + fn fn_no_ret(&self) {} + fn fn_ctr_with_args(input: T) -> Foo { unimplemented!() } + fn fn_direct_ctr() -> Self { unimplemented!() } + fn fn_ctr() -> Option { unimplemented!() } + fn fn_ctr2() -> Result { unimplemented!() } + fn fn_other() -> Option { unimplemented!() } + fn fn_builder() -> FooBuilder { unimplemented!() } +} + +fn test() { + let a : Res> = Foo::$0; +} + "#, + expect![[r#" + fn fn_direct_ctr() [type_could_unify] + fn fn_ctr_with_args(…) [type_could_unify] + fn fn_builder() [type_could_unify] + fn fn_ctr() [type_could_unify] + fn fn_ctr2() [type_could_unify] + me fn_no_ret(…) [type_could_unify] + fn fn_other() [type_could_unify] + "#]], + ); + } + #[test] fn struct_field_method_ref() { check_kinds( @@ -2022,6 +2419,26 @@ fn foo(f: Foo) { let _: &u32 = f.b$0 } kind: Method, lookup: "baz", detail: "fn(&self) -> u32", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: true, + has_self_param: true, + return_type: Other, + }, + ), + }, ref_match: "&@107", }, CompletionItem { @@ -2096,6 +2513,7 @@ fn foo() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] @@ -2133,6 +2551,26 @@ fn main() { ), lookup: "foo", detail: "fn() -> S", + relevance: CompletionRelevance { + exact_name_match: false, + type_match: None, + is_local: false, + is_item_from_trait: false, + is_item_from_notable_trait: false, + is_name_already_imported: false, + requires_import: false, + is_op_method: false, + is_private_editable: false, + postfix_match: None, + is_definite: false, + function: Some( + CompletionRelevanceFn { + has_params: false, + has_self_param: false, + return_type: Other, + }, + ), + }, ref_match: "&@92", }, ] @@ -2160,6 +2598,7 @@ fn foo() { "#, expect![[r#" lc foo [type+local] + ex foo [type] ev Foo::A(…) [type_could_unify] ev Foo::B [type_could_unify] en Foo [type_could_unify] @@ -2493,6 +2932,7 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, CompletionItem { @@ -2515,6 +2955,7 @@ fn main() { is_private_editable: false, postfix_match: None, is_definite: false, + function: None, }, }, ] diff --git a/crates/ide-completion/src/render/function.rs b/crates/ide-completion/src/render/function.rs index 27186a2b7ffbe..cf9fe1ab30728 100644 --- a/crates/ide-completion/src/render/function.rs +++ b/crates/ide-completion/src/render/function.rs @@ -8,8 +8,13 @@ use syntax::{format_smolstr, AstNode, SmolStr}; use crate::{ context::{CompletionContext, DotAccess, DotAccessKind, PathCompletionCtx, PathKind}, - item::{Builder, CompletionItem, CompletionItemKind, CompletionRelevance}, - render::{compute_exact_name_match, compute_ref_match, compute_type_match, RenderContext}, + item::{ + Builder, CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevanceFn, + CompletionRelevanceReturnType, + }, + render::{ + compute_exact_name_match, compute_ref_match, compute_type_match, match_types, RenderContext, + }, CallableSnippets, }; @@ -61,9 +66,9 @@ fn render( ), _ => (name.unescaped().to_smol_str(), name.to_smol_str()), }; - + let has_self_param = func.self_param(db).is_some(); let mut item = CompletionItem::new( - if func.self_param(db).is_some() { + if has_self_param { CompletionItemKind::Method } else { CompletionItemKind::SymbolKind(SymbolKind::Function) @@ -99,6 +104,15 @@ fn render( .filter(|_| !has_call_parens) .and_then(|cap| Some((cap, params(ctx.completion, func, &func_kind, has_dot_receiver)?))); + let function = assoc_item + .and_then(|assoc_item| assoc_item.implementing_ty(db)) + .map(|self_type| compute_return_type_match(db, &ctx, self_type, &ret_type)) + .map(|return_type| CompletionRelevanceFn { + has_params: has_self_param || func.num_params(db) > 0, + has_self_param, + return_type, + }); + item.set_relevance(CompletionRelevance { type_match: if has_call_parens || complete_call_parens.is_some() { compute_type_match(completion, &ret_type) @@ -106,6 +120,7 @@ fn render( compute_type_match(completion, &func.ty(db)) }, exact_name_match: compute_exact_name_match(completion, &call), + function, is_op_method, is_item_from_notable_trait, ..ctx.completion_relevance() @@ -156,6 +171,33 @@ fn render( item } +fn compute_return_type_match( + db: &dyn HirDatabase, + ctx: &RenderContext<'_>, + self_type: hir::Type, + ret_type: &hir::Type, +) -> CompletionRelevanceReturnType { + if match_types(ctx.completion, &self_type, ret_type).is_some() { + // fn([..]) -> Self + CompletionRelevanceReturnType::DirectConstructor + } else if ret_type + .type_arguments() + .any(|ret_type_arg| match_types(ctx.completion, &self_type, &ret_type_arg).is_some()) + { + // fn([..]) -> Result OR Wrapped + CompletionRelevanceReturnType::Constructor + } else if ret_type + .as_adt() + .and_then(|adt| adt.name(db).as_str().map(|name| name.ends_with("Builder"))) + .unwrap_or(false) + { + // fn([..]) -> [..]Builder + CompletionRelevanceReturnType::Builder + } else { + CompletionRelevanceReturnType::Other + } +} + pub(super) fn add_call_parens<'b>( builder: &'b mut Builder, ctx: &CompletionContext<'_>, diff --git a/crates/ide-completion/src/tests.rs b/crates/ide-completion/src/tests.rs index 154b69875aea8..1f032c7df480d 100644 --- a/crates/ide-completion/src/tests.rs +++ b/crates/ide-completion/src/tests.rs @@ -65,6 +65,7 @@ pub(crate) const TEST_CONFIG: CompletionConfig = CompletionConfig { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: false, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), diff --git a/crates/ide-completion/src/tests/expression.rs b/crates/ide-completion/src/tests/expression.rs index 78907a2896c4a..7749fac40b9dc 100644 --- a/crates/ide-completion/src/tests/expression.rs +++ b/crates/ide-completion/src/tests/expression.rs @@ -97,6 +97,11 @@ fn func(param0 @ (param1, param2): (i32, i32)) { kw unsafe kw while kw while let + ex ifletlocal + ex letlocal + ex matcharm + ex param1 + ex param2 "#]], ); } @@ -241,6 +246,8 @@ fn complete_in_block() { sn macro_rules sn pd sn ppd + ex false + ex true "#]], ) } @@ -542,7 +549,26 @@ fn quux(x: i32) { m!(x$0 } "#, - expect![[r#""#]], + expect![[r#" + fn quux(…) fn(i32) + lc x i32 + lc y i32 + ma m!(…) macro_rules! m + bt u32 u32 + kw crate:: + kw false + kw for + kw if + kw if let + kw loop + kw match + kw return + kw self:: + kw true + kw unsafe + kw while + kw while let + "#]], ); } @@ -682,7 +708,9 @@ fn main() { } "#, expect![[r#" - fn test() fn() -> Zulu + fn test() fn() -> Zulu + ex Zulu + ex Zulu::test() "#]], ); } diff --git a/crates/ide-completion/src/tests/flyimport.rs b/crates/ide-completion/src/tests/flyimport.rs index eaa1bebc03c7e..fff193ba4c9bd 100644 --- a/crates/ide-completion/src/tests/flyimport.rs +++ b/crates/ide-completion/src/tests/flyimport.rs @@ -1397,3 +1397,22 @@ pub use bridge2::server2::Span2; "#]], ); } + +#[test] +fn flyimport_only_traits_in_impl_trait_block() { + check( + r#" +//- /main.rs crate:main deps:dep +pub struct Bar; + +impl Foo$0 for Bar { } +//- /lib.rs crate:dep +pub trait FooTrait; + +pub struct FooStruct; +"#, + expect![[r#" + tt FooTrait (use dep::FooTrait) + "#]], + ); +} diff --git a/crates/ide-completion/src/tests/record.rs b/crates/ide-completion/src/tests/record.rs index 18afde1b7cefd..e64ec74c6106e 100644 --- a/crates/ide-completion/src/tests/record.rs +++ b/crates/ide-completion/src/tests/record.rs @@ -192,6 +192,8 @@ fn main() { bt u32 u32 kw crate:: kw self:: + ex Foo::default() + ex foo "#]], ); check( diff --git a/crates/ide-completion/src/tests/special.rs b/crates/ide-completion/src/tests/special.rs index a87d16c789faf..ff32eccfbff4c 100644 --- a/crates/ide-completion/src/tests/special.rs +++ b/crates/ide-completion/src/tests/special.rs @@ -225,10 +225,10 @@ impl S { fn foo() { let _ = lib::S::$0 } "#, expect![[r#" - ct PUBLIC_CONST pub const PUBLIC_CONST: u32 - fn public_method() fn() - ta PublicType pub type PublicType = u32 - "#]], + ct PUBLIC_CONST pub const PUBLIC_CONST: u32 + fn public_method() fn() + ta PublicType pub type PublicType = u32 + "#]], ); } @@ -242,8 +242,8 @@ impl U { fn m() { } } fn foo() { let _ = U::$0 } "#, expect![[r#" - fn m() fn() - "#]], + fn m() fn() + "#]], ); } @@ -256,8 +256,8 @@ trait Trait { fn m(); } fn foo() { let _ = Trait::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -273,8 +273,8 @@ impl Trait for S {} fn foo() { let _ = S::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -290,8 +290,8 @@ impl Trait for S {} fn foo() { let _ = ::$0 } "#, expect![[r#" - fn m() (as Trait) fn() - "#]], + fn m() (as Trait) fn() + "#]], ); } @@ -396,9 +396,9 @@ macro_rules! foo { () => {} } fn main() { let _ = crate::$0 } "#, expect![[r#" - fn main() fn() - ma foo!(…) macro_rules! foo - "#]], + fn main() fn() + ma foo!(…) macro_rules! foo + "#]], ); } @@ -694,8 +694,10 @@ fn bar() -> Bar { } "#, expect![[r#" - fn foo() (as Foo) fn() -> Self - "#]], + fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() + "#]], ); } @@ -722,6 +724,8 @@ fn bar() -> Bar { expect![[r#" fn bar() fn() fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() "#]], ); } @@ -748,6 +752,8 @@ fn bar() -> Bar { "#, expect![[r#" fn foo() (as Foo) fn() -> Self + ex Bar + ex bar() "#]], ); } diff --git a/crates/ide-completion/src/tests/type_pos.rs b/crates/ide-completion/src/tests/type_pos.rs index c7161f82ce74f..db4ac9381cedb 100644 --- a/crates/ide-completion/src/tests/type_pos.rs +++ b/crates/ide-completion/src/tests/type_pos.rs @@ -989,3 +989,43 @@ fn foo<'a>() { S::<'static, F$0, _, _>; } "#]], ); } + +#[test] +fn complete_traits_on_impl_trait_block() { + check( + r#" +trait Foo {} + +struct Bar; + +impl $0 for Bar { } +"#, + expect![[r#" + md module + tt Foo + tt Trait + kw crate:: + kw self:: + "#]], + ); +} + +#[test] +fn complete_traits_with_path_on_impl_trait_block() { + check( + r#" +mod outer { + pub trait Foo {} + pub struct Bar; + pub mod inner { + } +} + +impl outer::$0 for Bar { } +"#, + expect![[r#" + md inner + tt Foo + "#]], + ); +} diff --git a/crates/ide-db/src/famous_defs.rs b/crates/ide-db/src/famous_defs.rs index 4edfa37b32905..3106772e63b12 100644 --- a/crates/ide-db/src/famous_defs.rs +++ b/crates/ide-db/src/famous_defs.rs @@ -114,6 +114,14 @@ impl FamousDefs<'_, '_> { self.find_function("core:mem:drop") } + pub fn core_macros_todo(&self) -> Option { + self.find_macro("core:todo") + } + + pub fn core_macros_unimplemented(&self) -> Option { + self.find_macro("core:unimplemented") + } + pub fn builtin_crates(&self) -> impl Iterator { IntoIterator::into_iter([ self.std(), diff --git a/crates/ide-db/src/path_transform.rs b/crates/ide-db/src/path_transform.rs index 3862acc2af4da..7e1811b4cacb6 100644 --- a/crates/ide-db/src/path_transform.rs +++ b/crates/ide-db/src/path_transform.rs @@ -148,7 +148,7 @@ impl<'a> PathTransform<'a> { let mut defaulted_params: Vec = Default::default(); self.generic_def .into_iter() - .flat_map(|it| it.type_params(db)) + .flat_map(|it| it.type_or_const_params(db)) .skip(skip) // The actual list of trait type parameters may be longer than the one // used in the `impl` block due to trailing default type parameters. diff --git a/crates/ide-db/src/rename.rs b/crates/ide-db/src/rename.rs index 032b8338ab85d..6a7042988a9c4 100644 --- a/crates/ide-db/src/rename.rs +++ b/crates/ide-db/src/rename.rs @@ -71,7 +71,6 @@ impl Definition { &self, sema: &Semantics<'_, RootDatabase>, new_name: &str, - rename_external: bool, ) -> Result { // self.krate() returns None if // self is a built-in attr, built-in type or tool module. @@ -80,8 +79,8 @@ impl Definition { if let Some(krate) = self.krate(sema.db) { // Can we not rename non-local items? // Then bail if non-local - if !rename_external && !krate.origin(sema.db).is_local() { - bail!("Cannot rename a non-local definition as the config for it is disabled") + if !krate.origin(sema.db).is_local() { + bail!("Cannot rename a non-local definition") } } diff --git a/crates/ide-db/src/source_change.rs b/crates/ide-db/src/source_change.rs index 73be6a4071e47..f59d8d08c8924 100644 --- a/crates/ide-db/src/source_change.rs +++ b/crates/ide-db/src/source_change.rs @@ -138,7 +138,7 @@ impl SnippetEdit { .into_iter() .zip(1..) .with_position() - .map(|pos| { + .flat_map(|pos| { let (snippet, index) = match pos { (itertools::Position::First, it) | (itertools::Position::Middle, it) => it, // last/only snippet gets index 0 @@ -146,11 +146,13 @@ impl SnippetEdit { | (itertools::Position::Only, (snippet, _)) => (snippet, 0), }; - let range = match snippet { - Snippet::Tabstop(pos) => TextRange::empty(pos), - Snippet::Placeholder(range) => range, - }; - (index, range) + match snippet { + Snippet::Tabstop(pos) => vec![(index, TextRange::empty(pos))], + Snippet::Placeholder(range) => vec![(index, range)], + Snippet::PlaceholderGroup(ranges) => { + ranges.into_iter().map(|range| (index, range)).collect() + } + } }) .collect_vec(); @@ -248,7 +250,7 @@ impl SourceChangeBuilder { fn commit(&mut self) { let snippet_edit = self.snippet_builder.take().map(|builder| { SnippetEdit::new( - builder.places.into_iter().map(PlaceSnippet::finalize_position).collect_vec(), + builder.places.into_iter().flat_map(PlaceSnippet::finalize_position).collect(), ) }); @@ -287,30 +289,10 @@ impl SourceChangeBuilder { pub fn insert(&mut self, offset: TextSize, text: impl Into) { self.edit.insert(offset, text.into()) } - /// Append specified `snippet` at the given `offset` - pub fn insert_snippet( - &mut self, - _cap: SnippetCap, - offset: TextSize, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.insert(offset, snippet); - } /// Replaces specified `range` of text with a given string. pub fn replace(&mut self, range: TextRange, replace_with: impl Into) { self.edit.replace(range, replace_with.into()) } - /// Replaces specified `range` of text with a given `snippet`. - pub fn replace_snippet( - &mut self, - _cap: SnippetCap, - range: TextRange, - snippet: impl Into, - ) { - self.source_change.is_snippet = true; - self.replace(range, snippet); - } pub fn replace_ast(&mut self, old: N, new: N) { algo::diff(old.syntax(), new.syntax()).into_text_edit(&mut self.edit) } @@ -356,6 +338,17 @@ impl SourceChangeBuilder { self.add_snippet(PlaceSnippet::Over(node.syntax().clone().into())) } + /// Adds a snippet to move the cursor selected over `nodes` + /// + /// This allows for renaming newly generated items without having to go + /// through a separate rename step. + pub fn add_placeholder_snippet_group(&mut self, _cap: SnippetCap, nodes: Vec) { + assert!(nodes.iter().all(|node| node.parent().is_some())); + self.add_snippet(PlaceSnippet::OverGroup( + nodes.into_iter().map(|node| node.into()).collect(), + )) + } + fn add_snippet(&mut self, snippet: PlaceSnippet) { let snippet_builder = self.snippet_builder.get_or_insert(SnippetBuilder { places: vec![] }); snippet_builder.places.push(snippet); @@ -400,6 +393,13 @@ pub enum Snippet { Tabstop(TextSize), /// A placeholder snippet (e.g. `${0:placeholder}`). Placeholder(TextRange), + /// A group of placeholder snippets, e.g. + /// + /// ```no_run + /// let ${0:new_var} = 4; + /// fun(1, 2, 3, ${0:new_var}); + /// ``` + PlaceholderGroup(Vec), } enum PlaceSnippet { @@ -409,14 +409,20 @@ enum PlaceSnippet { After(SyntaxElement), /// Place a placeholder snippet in place of the element Over(SyntaxElement), + /// Place a group of placeholder snippets which are linked together + /// in place of the elements + OverGroup(Vec), } impl PlaceSnippet { - fn finalize_position(self) -> Snippet { + fn finalize_position(self) -> Vec { match self { - PlaceSnippet::Before(it) => Snippet::Tabstop(it.text_range().start()), - PlaceSnippet::After(it) => Snippet::Tabstop(it.text_range().end()), - PlaceSnippet::Over(it) => Snippet::Placeholder(it.text_range()), + PlaceSnippet::Before(it) => vec![Snippet::Tabstop(it.text_range().start())], + PlaceSnippet::After(it) => vec![Snippet::Tabstop(it.text_range().end())], + PlaceSnippet::Over(it) => vec![Snippet::Placeholder(it.text_range())], + PlaceSnippet::OverGroup(it) => { + vec![Snippet::PlaceholderGroup(it.into_iter().map(|it| it.text_range()).collect())] + } } } } diff --git a/crates/ide-db/src/syntax_helpers/node_ext.rs b/crates/ide-db/src/syntax_helpers/node_ext.rs index e4e735cecd89d..4f706e26af2b3 100644 --- a/crates/ide-db/src/syntax_helpers/node_ext.rs +++ b/crates/ide-db/src/syntax_helpers/node_ext.rs @@ -329,6 +329,7 @@ pub fn for_each_tail_expr(expr: &ast::Expr, cb: &mut dyn FnMut(&ast::Expr)) { | ast::Expr::RecordExpr(_) | ast::Expr::RefExpr(_) | ast::Expr::ReturnExpr(_) + | ast::Expr::BecomeExpr(_) | ast::Expr::TryExpr(_) | ast::Expr::TupleExpr(_) | ast::Expr::LetExpr(_) diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index dd64b93e4548b..5e2541795ca1c 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -43,7 +43,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::IncorrectCase) -> Option {} + // ^^^^ error: expected (bool, bool), found bool + // ^^^^^ error: expected (bool, bool), found bool + None => {} + } +} + "#, + ); + } + #[test] fn mismatched_types_in_or_patterns() { cov_mark::check_count!(validate_match_bailed_out, 2); diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index a0d5d742d3622..b7667dc318f0c 100644 --- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -182,6 +182,18 @@ fn foo() -> u8 { ); } + #[test] + fn no_diagnostic_if_not_last_statement2() { + check_diagnostics( + r#" +fn foo() -> u8 { + return 2; + fn bar() {} +} +"#, + ); + } + #[test] fn replace_with_expr() { check_fix( diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index e93eea8ce29e5..8c97281b78328 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -112,7 +112,8 @@ fn add_missing_ok_or_some( let variant_name = if Some(expected_enum) == core_result { "Ok" } else { "Some" }; - let wrapped_actual_ty = expected_adt.ty_with_args(ctx.sema.db, &[d.actual.clone()]); + let wrapped_actual_ty = + expected_adt.ty_with_args(ctx.sema.db, std::iter::once(d.actual.clone())); if !d.expected.could_unify_with(ctx.sema.db, &wrapped_actual_ty) { return None; diff --git a/crates/ide-diagnostics/src/handlers/typed_hole.rs b/crates/ide-diagnostics/src/handlers/typed_hole.rs index 6441343ebacdc..56c8181e84ce6 100644 --- a/crates/ide-diagnostics/src/handlers/typed_hole.rs +++ b/crates/ide-diagnostics/src/handlers/typed_hole.rs @@ -1,14 +1,20 @@ -use hir::{db::ExpandDatabase, ClosureStyle, HirDisplay, StructKind}; +use hir::{ + db::ExpandDatabase, + term_search::{term_search, TermSearchCtx}, + ClosureStyle, HirDisplay, +}; use ide_db::{ assists::{Assist, AssistId, AssistKind, GroupLabel}, label::Label, source_change::SourceChange, }; -use syntax::AstNode; +use itertools::Itertools; use text_edit::TextEdit; use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; +use syntax::AstNode; + // Diagnostic: typed-hole // // This diagnostic is triggered when an underscore expression is used in an invalid position. @@ -36,50 +42,54 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &hir::TypedHole) -> Option let (original_range, _) = d.expr.as_ref().map(|it| it.to_node(&root)).syntax().original_file_range_opt(db)?; let scope = ctx.sema.scope(d.expr.value.to_node(&root).syntax())?; - let mut assists = vec![]; - scope.process_all_names(&mut |name, def| { - let ty = match def { - hir::ScopeDef::ModuleDef(it) => match it { - hir::ModuleDef::Function(it) => it.ty(db), - hir::ModuleDef::Adt(hir::Adt::Struct(it)) if it.kind(db) != StructKind::Record => { - it.constructor_ty(db) - } - hir::ModuleDef::Variant(it) if it.kind(db) != StructKind::Record => { - it.constructor_ty(db) - } - hir::ModuleDef::Const(it) => it.ty(db), - hir::ModuleDef::Static(it) => it.ty(db), - _ => return, - }, - hir::ScopeDef::GenericParam(hir::GenericParam::ConstParam(it)) => it.ty(db), - hir::ScopeDef::Local(it) => it.ty(db), - _ => return, - }; - // FIXME: should also check coercions if it is at a coercion site - if !ty.contains_unknown() && ty.could_unify_with(db, &d.expected) { - assists.push(Assist { - id: AssistId("typed-hole", AssistKind::QuickFix), - label: Label::new(format!("Replace `_` with `{}`", name.display(db))), - group: Some(GroupLabel("Replace `_` with a matching entity in scope".to_owned())), - target: original_range.range, - source_change: Some(SourceChange::from_text_edit( - original_range.file_id, - TextEdit::replace(original_range.range, name.display(db).to_string()), - )), - trigger_signature_help: false, - }); - } - }); - if assists.is_empty() { - None - } else { + + let term_search_ctx = TermSearchCtx { + sema: &ctx.sema, + scope: &scope, + goal: d.expected.clone(), + config: Default::default(), + }; + let paths = term_search(&term_search_ctx); + + let mut formatter = |_: &hir::Type| String::from("_"); + + let assists: Vec = paths + .into_iter() + .filter_map(|path| { + path.gen_source_code( + &scope, + &mut formatter, + ctx.config.prefer_no_std, + ctx.config.prefer_prelude, + ) + .ok() + }) + .unique() + .map(|code| Assist { + id: AssistId("typed-hole", AssistKind::QuickFix), + label: Label::new(format!("Replace `_` with `{}`", &code)), + group: Some(GroupLabel("Replace `_` with a term".to_owned())), + target: original_range.range, + source_change: Some(SourceChange::from_text_edit( + original_range.file_id, + TextEdit::replace(original_range.range, code), + )), + trigger_signature_help: false, + }) + .collect(); + + if !assists.is_empty() { Some(assists) + } else { + None } } #[cfg(test)] mod tests { - use crate::tests::{check_diagnostics, check_fixes}; + use crate::tests::{ + check_diagnostics, check_fixes_unordered, check_has_fix, check_has_single_fix, + }; #[test] fn unknown() { @@ -99,7 +109,7 @@ fn main() { r#" fn main() { if _ {} - //^ error: invalid `_` expression, expected type `bool` + //^ 💡 error: invalid `_` expression, expected type `bool` let _: fn() -> i32 = _; //^ error: invalid `_` expression, expected type `fn() -> i32` let _: fn() -> () = _; // FIXME: This should trigger an assist because `main` matches via *coercion* @@ -129,7 +139,7 @@ fn main() { fn main() { let mut x = t(); x = _; - //^ 💡 error: invalid `_` expression, expected type `&str` + //^ error: invalid `_` expression, expected type `&str` x = ""; } fn t() -> T { loop {} } @@ -143,7 +153,8 @@ fn t() -> T { loop {} } r#" fn main() { let _x = [(); _]; - let _y: [(); 10] = [(); _]; + // FIXME: This should trigger error + // let _y: [(); 10] = [(); _]; _ = 0; (_,) = (1,); } @@ -153,7 +164,7 @@ fn main() { #[test] fn check_quick_fix() { - check_fixes( + check_fixes_unordered( r#" enum Foo { Bar @@ -175,7 +186,7 @@ use Foo::Bar; const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; - let _: Foo = local; + let _: Foo = Bar; //^ error: invalid `_` expression, expected type `fn()` } "#, @@ -187,7 +198,7 @@ use Foo::Bar; const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; - let _: Foo = param; + let _: Foo = local; //^ error: invalid `_` expression, expected type `fn()` } "#, @@ -199,7 +210,7 @@ use Foo::Bar; const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; - let _: Foo = CP; + let _: Foo = param; //^ error: invalid `_` expression, expected type `fn()` } "#, @@ -211,7 +222,7 @@ use Foo::Bar; const C: Foo = Foo::Bar; fn main(param: Foo) { let local = Foo::Bar; - let _: Foo = Bar; + let _: Foo = CP; //^ error: invalid `_` expression, expected type `fn()` } "#, @@ -230,4 +241,153 @@ fn main(param: Foo) { ], ); } + + #[test] + fn local_item_use_trait() { + check_has_fix( + r#" +struct Bar; +struct Baz; +trait Foo { + fn foo(self) -> Bar; +} +impl Foo for Baz { + fn foo(self) -> Bar { + unimplemented!() + } +} +fn asd() -> Bar { + let a = Baz; + _$0 +} +"#, + r" +struct Bar; +struct Baz; +trait Foo { + fn foo(self) -> Bar; +} +impl Foo for Baz { + fn foo(self) -> Bar { + unimplemented!() + } +} +fn asd() -> Bar { + let a = Baz; + Foo::foo(a) +} +", + ); + } + + #[test] + fn init_struct() { + check_has_fix( + r#"struct Abc {} +struct Qwe { a: i32, b: Abc } +fn main() { + let a: i32 = 1; + let c: Qwe = _$0; +}"#, + r#"struct Abc {} +struct Qwe { a: i32, b: Abc } +fn main() { + let a: i32 = 1; + let c: Qwe = Qwe { a: a, b: Abc { } }; +}"#, + ); + } + + #[test] + fn ignore_impl_func_with_incorrect_return() { + check_has_single_fix( + r#" +struct Bar {} +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for i32 { + type Res = Self; + fn foo(&self) -> Self::Res { 1 } +} +fn main() { + let a: i32 = 1; + let c: Bar = _$0; +}"#, + r#" +struct Bar {} +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for i32 { + type Res = Self; + fn foo(&self) -> Self::Res { 1 } +} +fn main() { + let a: i32 = 1; + let c: Bar = Bar { }; +}"#, + ); + } + + #[test] + fn use_impl_func_with_correct_return() { + check_has_fix( + r#" +struct Bar {} +struct A; +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for A { + type Res = Bar; + fn foo(&self) -> Self::Res { Bar { } } +} +fn main() { + let a = A; + let c: Bar = _$0; +}"#, + r#" +struct Bar {} +struct A; +trait Foo { + type Res; + fn foo(&self) -> Self::Res; +} +impl Foo for A { + type Res = Bar; + fn foo(&self) -> Self::Res { Bar { } } +} +fn main() { + let a = A; + let c: Bar = Foo::foo(&a); +}"#, + ); + } + + #[test] + fn local_shadow_fn() { + check_fixes_unordered( + r#" +fn f() { + let f: i32 = 0; + _$0 +}"#, + vec![ + r#" +fn f() { + let f: i32 = 0; + () +}"#, + r#" +fn f() { + let f: i32 = 0; + crate::f() +}"#, + ], + ); + } } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index b62bb5affdd8e..4e4a851f67e0a 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -91,6 +91,91 @@ fn check_nth_fix_with_config( assert_eq_text!(&after, &actual); } +pub(crate) fn check_fixes_unordered(ra_fixture_before: &str, ra_fixtures_after: Vec<&str>) { + for ra_fixture_after in ra_fixtures_after.iter() { + check_has_fix(ra_fixture_before, ra_fixture_after) + } +} + +#[track_caller] +pub(crate) fn check_has_fix(ra_fixture_before: &str, ra_fixture_after: &str) { + let after = trim_indent(ra_fixture_after); + + let (db, file_position) = RootDatabase::with_position(ra_fixture_before); + let mut conf = DiagnosticsConfig::test_sample(); + conf.expr_fill_default = ExprFillDefaultMode::Default; + let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); + + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); + assert!(fix.is_some(), "no diagnostic with desired fix"); +} + +#[track_caller] +pub(crate) fn check_has_single_fix(ra_fixture_before: &str, ra_fixture_after: &str) { + let after = trim_indent(ra_fixture_after); + + let (db, file_position) = RootDatabase::with_position(ra_fixture_before); + let mut conf = DiagnosticsConfig::test_sample(); + conf.expr_fill_default = ExprFillDefaultMode::Default; + let mut n_fixes = 0; + let fix = super::diagnostics(&db, &conf, &AssistResolveStrategy::All, file_position.file_id) + .into_iter() + .find(|d| { + d.fixes + .as_ref() + .and_then(|fixes| { + n_fixes += fixes.len(); + fixes.iter().find(|fix| { + if !fix.target.contains_inclusive(file_position.offset) { + return false; + } + let actual = { + let source_change = fix.source_change.as_ref().unwrap(); + let file_id = *source_change.source_file_edits.keys().next().unwrap(); + let mut actual = db.file_text(file_id).to_string(); + + for (edit, snippet_edit) in source_change.source_file_edits.values() { + edit.apply(&mut actual); + if let Some(snippet_edit) = snippet_edit { + snippet_edit.apply(&mut actual); + } + } + actual + }; + after == actual + }) + }) + .is_some() + }); + assert!(fix.is_some(), "no diagnostic with desired fix"); + assert!(n_fixes == 1, "Too many fixes suggested"); +} + /// Checks that there's a diagnostic *without* fix at `$0`. pub(crate) fn check_no_fix(ra_fixture: &str) { let (db, file_position) = RootDatabase::with_position(ra_fixture); diff --git a/crates/ide/src/doc_links.rs b/crates/ide/src/doc_links.rs index dbe6a5507cc3e..18821bd78bfac 100644 --- a/crates/ide/src/doc_links.rs +++ b/crates/ide/src/doc_links.rs @@ -501,7 +501,7 @@ fn get_doc_base_urls( let Some(krate) = def.krate(db) else { return Default::default() }; let Some(display_name) = krate.display_name(db) else { return Default::default() }; let crate_data = &db.crate_graph()[krate.into()]; - let channel = crate_data.channel().unwrap_or(ReleaseChannel::Nightly).as_str(); + let channel = db.toolchain_channel(krate.into()).unwrap_or(ReleaseChannel::Nightly).as_str(); let (web_base, local_base) = match &crate_data.origin { // std and co do not specify `html_root_url` any longer so we gotta handwrite this ourself. diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 30bfe6ee9dc34..69ddc1e45efbd 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -7263,8 +7263,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6157..6365, - focus_range: 6222..6228, + full_range: 6290..6498, + focus_range: 6355..6361, name: "Future", kind: Trait, container_name: "future", @@ -7277,8 +7277,8 @@ impl Iterator for S { file_id: FileId( 1, ), - full_range: 6995..7461, - focus_range: 7039..7047, + full_range: 7128..7594, + focus_range: 7172..7180, name: "Iterator", kind: Trait, container_name: "iterator", diff --git a/crates/ide/src/lib.rs b/crates/ide/src/lib.rs index effdbf2c1f041..3238887257a47 100644 --- a/crates/ide/src/lib.rs +++ b/crates/ide/src/lib.rs @@ -12,11 +12,6 @@ #![cfg_attr(feature = "in-rust-tree", feature(rustc_private))] #![recursion_limit = "128"] -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - #[cfg(test)] mod fixture; @@ -258,11 +253,11 @@ impl Analysis { Env::default(), false, CrateOrigin::Local { repo: None, name: None }, - Err("Analysis::from_single_file has no target layout".into()), - None, ); change.change_file(file_id, Some(Arc::from(text))); change.set_crate_graph(crate_graph); + change.set_target_data_layouts(vec![Err("fixture has no layout".into())]); + change.set_toolchains(vec![None]); host.apply_change(change); (host.analysis(), file_id) } @@ -680,9 +675,8 @@ impl Analysis { &self, position: FilePosition, new_name: &str, - rename_external: bool, ) -> Cancellable> { - self.with_db(|db| rename::rename(db, position, new_name, rename_external)) + self.with_db(|db| rename::rename(db, position, new_name)) } pub fn prepare_rename( diff --git a/crates/ide/src/parent_module.rs b/crates/ide/src/parent_module.rs index 413dbf9c5dfc6..f67aea2d5b9c1 100644 --- a/crates/ide/src/parent_module.rs +++ b/crates/ide/src/parent_module.rs @@ -54,7 +54,7 @@ pub(crate) fn parent_module(db: &RootDatabase, position: FilePosition) -> Vec Vec { db.relevant_crates(file_id) .iter() diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index 9fce4bb0f8271..f2eedfa431693 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -84,7 +84,6 @@ pub(crate) fn rename( db: &RootDatabase, position: FilePosition, new_name: &str, - rename_external: bool, ) -> RenameResult { let sema = Semantics::new(db); let source_file = sema.parse(position.file_id); @@ -104,7 +103,7 @@ pub(crate) fn rename( return rename_to_self(&sema, local); } } - def.rename(&sema, new_name, rename_external) + def.rename(&sema, new_name) }) .collect(); @@ -123,9 +122,9 @@ pub(crate) fn will_rename_file( let module = sema.to_module_def(file_id)?; let def = Definition::Module(module); let mut change = if is_raw_identifier(new_name_stem) { - def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem]), true).ok()? + def.rename(&sema, &SmolStr::from_iter(["r#", new_name_stem])).ok()? } else { - def.rename(&sema, new_name_stem, true).ok()? + def.rename(&sema, new_name_stem).ok()? }; change.file_system_edits.clear(); Some(change) @@ -377,16 +376,11 @@ mod tests { use super::{RangeInfo, RenameError}; fn check(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { - check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after, true); + check_with_rename_config(new_name, ra_fixture_before, ra_fixture_after); } #[track_caller] - fn check_with_rename_config( - new_name: &str, - ra_fixture_before: &str, - ra_fixture_after: &str, - rename_external: bool, - ) { + fn check_with_rename_config(new_name: &str, ra_fixture_before: &str, ra_fixture_after: &str) { let ra_fixture_after = &trim_indent(ra_fixture_after); let (analysis, position) = fixture::position(ra_fixture_before); if !ra_fixture_after.starts_with("error: ") { @@ -395,7 +389,7 @@ mod tests { } } let rename_result = analysis - .rename(position, new_name, rename_external) + .rename(position, new_name) .unwrap_or_else(|err| panic!("Rename to '{new_name}' was cancelled: {err}")); match rename_result { Ok(source_change) => { @@ -426,10 +420,8 @@ mod tests { fn check_expect(new_name: &str, ra_fixture: &str, expect: Expect) { let (analysis, position) = fixture::position(ra_fixture); - let source_change = analysis - .rename(position, new_name, true) - .unwrap() - .expect("Expect returned a RenameError"); + let source_change = + analysis.rename(position, new_name).unwrap().expect("Expect returned a RenameError"); expect.assert_eq(&filter_expect(source_change)) } @@ -2636,19 +2628,7 @@ pub struct S; //- /main.rs crate:main deps:lib new_source_root:local use lib::S$0; "#, - "error: Cannot rename a non-local definition as the config for it is disabled", - false, - ); - - check( - "Baz", - r#" -//- /lib.rs crate:lib new_source_root:library -pub struct S; -//- /main.rs crate:main deps:lib new_source_root:local -use lib::S$0; -"#, - "use lib::Baz;\n", + "error: Cannot rename a non-local definition", ); } @@ -2663,8 +2643,7 @@ use core::hash::Hash; #[derive(H$0ash)] struct A; "#, - "error: Cannot rename a non-local definition as the config for it is disabled", - false, + "error: Cannot rename a non-local definition", ); } diff --git a/crates/ide/src/shuffle_crate_graph.rs b/crates/ide/src/shuffle_crate_graph.rs index bf6ad47a49527..453d1836e16e4 100644 --- a/crates/ide/src/shuffle_crate_graph.rs +++ b/crates/ide/src/shuffle_crate_graph.rs @@ -39,8 +39,6 @@ pub(crate) fn shuffle_crate_graph(db: &mut RootDatabase) { data.env.clone(), data.is_proc_macro, data.origin.clone(), - data.target_layout.clone(), - data.toolchain.clone(), ); new_proc_macros.insert(new_id, proc_macros[&old_id].clone()); map.insert(old_id, new_id); diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index dee5afbf8d9e8..5feaf21aa9795 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -1,14 +1,16 @@ //! This module provides `StaticIndex` which is used for powering //! read-only code browsers and emitting LSIF -use hir::{db::HirDatabase, Crate, HirFileIdExt, Module}; +use hir::{db::HirDatabase, Crate, HirFileIdExt, Module, Semantics}; use ide_db::{ base_db::{FileId, FileRange, SourceDatabaseExt}, defs::Definition, + documentation::Documentation, + famous_defs::FamousDefs, helpers::get_definition, FxHashMap, FxHashSet, RootDatabase, }; -use syntax::{AstNode, SyntaxKind::*, TextRange, T}; +use syntax::{AstNode, SyntaxKind::*, SyntaxNode, TextRange, T}; use crate::inlay_hints::InlayFieldsToResolve; use crate::navigation_target::UpmappingResult; @@ -22,7 +24,7 @@ use crate::{ /// A static representation of fully analyzed source code. /// -/// The intended use-case is powering read-only code browsers and emitting LSIF +/// The intended use-case is powering read-only code browsers and emitting LSIF/SCIP. #[derive(Debug)] pub struct StaticIndex<'a> { pub files: Vec, @@ -40,6 +42,7 @@ pub struct ReferenceData { #[derive(Debug)] pub struct TokenStaticData { + pub documentation: Option, pub hover: Option, pub definition: Option, pub references: Vec, @@ -103,6 +106,19 @@ fn all_modules(db: &dyn HirDatabase) -> Vec { modules } +fn documentation_for_definition( + sema: &Semantics<'_, RootDatabase>, + def: Definition, + scope_node: &SyntaxNode, +) -> Option { + let famous_defs = match &def { + Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), + _ => None, + }; + + def.docs(sema.db, famous_defs.as_ref()) +} + impl StaticIndex<'_> { fn add_file(&mut self, file_id: FileId) { let current_crate = crates_for(self.db, file_id).pop().map(Into::into); @@ -169,6 +185,7 @@ impl StaticIndex<'_> { *it } else { let it = self.tokens.insert(TokenStaticData { + documentation: documentation_for_definition(&sema, def, &node), hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { FileRange { file_id: it.file_id, range: it.focus_or_full_range() } diff --git a/crates/ide/src/status.rs b/crates/ide/src/status.rs index 3321a0513b6f3..c3d85e38936d9 100644 --- a/crates/ide/src/status.rs +++ b/crates/ide/src/status.rs @@ -72,8 +72,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { dependencies, origin, is_proc_macro, - target_layout, - toolchain, } = &crate_graph[crate_id]; format_to!( buf, @@ -91,12 +89,6 @@ pub(crate) fn status(db: &RootDatabase, file_id: Option) -> String { format_to!(buf, " Env: {:?}\n", env); format_to!(buf, " Origin: {:?}\n", origin); format_to!(buf, " Is a proc macro crate: {}\n", is_proc_macro); - format_to!(buf, " Workspace Target Layout: {:?}\n", target_layout); - format_to!( - buf, - " Workspace Toolchain: {}\n", - toolchain.as_ref().map_or_else(|| "n/a".into(), |v| v.to_string()) - ); let deps = dependencies .iter() .map(|dep| format!("{}={}", dep.name, dep.crate_id.into_raw())) diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index c6dc071c394e0..8c5592da63ecd 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -2,7 +2,7 @@ //! for incorporating changes. // Note, don't remove any public api from this. This API is consumed by external tools // to run rust-analyzer as a library. -use std::{collections::hash_map::Entry, mem, path::Path, sync}; +use std::{collections::hash_map::Entry, iter, mem, path::Path, sync}; use crossbeam_channel::{unbounded, Receiver}; use hir_expand::proc_macro::{ @@ -18,7 +18,6 @@ use itertools::Itertools; use proc_macro_api::{MacroDylib, ProcMacroServer}; use project_model::{CargoConfig, PackageRoot, ProjectManifest, ProjectWorkspace}; use span::Span; -use tt::DelimSpan; use vfs::{file_set::FileSetConfig, loader::Handle, AbsPath, AbsPathBuf, VfsPath}; pub struct LoadCargoConfig { @@ -68,9 +67,9 @@ pub fn load_workspace( let proc_macro_server = match &load_config.with_proc_macro_server { ProcMacroServerChoice::Sysroot => ws .find_sysroot_proc_macro_srv() - .and_then(|it| ProcMacroServer::spawn(it).map_err(Into::into)), + .and_then(|it| ProcMacroServer::spawn(it, extra_env).map_err(Into::into)), ProcMacroServerChoice::Explicit(path) => { - ProcMacroServer::spawn(path.clone()).map_err(Into::into) + ProcMacroServer::spawn(path.clone(), extra_env).map_err(Into::into) } ProcMacroServerChoice::None => Err(anyhow::format_err!("proc macro server disabled")), }; @@ -107,7 +106,7 @@ pub fn load_workspace( .collect() }; - let project_folders = ProjectFolders::new(&[ws], &[]); + let project_folders = ProjectFolders::new(std::slice::from_ref(&ws), &[]); loader.set_config(vfs::loader::Config { load: project_folders.load, watch: vec![], @@ -115,6 +114,7 @@ pub fn load_workspace( }); let host = load_crate_graph( + &ws, crate_graph, proc_macros, project_folders.source_root_config, @@ -273,7 +273,7 @@ impl SourceRootConfig { pub fn load_proc_macro( server: &ProcMacroServer, path: &AbsPath, - dummy_replace: &[Box], + ignored_macros: &[Box], ) -> ProcMacroLoadResult { let res: Result, String> = (|| { let dylib = MacroDylib::new(path.to_path_buf()); @@ -283,7 +283,7 @@ pub fn load_proc_macro( } Ok(vec .into_iter() - .map(|expander| expander_to_proc_macro(expander, dummy_replace)) + .map(|expander| expander_to_proc_macro(expander, ignored_macros)) .collect()) })(); match res { @@ -302,6 +302,7 @@ pub fn load_proc_macro( } fn load_crate_graph( + ws: &ProjectWorkspace, crate_graph: CrateGraph, proc_macros: ProcMacros, source_root_config: SourceRootConfig, @@ -340,8 +341,17 @@ fn load_crate_graph( let source_roots = source_root_config.partition(vfs); analysis_change.set_roots(source_roots); + let num_crates = crate_graph.len(); analysis_change.set_crate_graph(crate_graph); analysis_change.set_proc_macros(proc_macros); + if let ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } = ws + { + analysis_change.set_target_data_layouts( + iter::repeat(target_layout.clone()).take(num_crates).collect(), + ); + analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); + } host.apply_change(analysis_change); host @@ -349,7 +359,7 @@ fn load_crate_graph( fn expander_to_proc_macro( expander: proc_macro_api::ProcMacro, - dummy_replace: &[Box], + ignored_macros: &[Box], ) -> ProcMacro { let name = From::from(expander.name()); let kind = match expander.kind() { @@ -357,16 +367,8 @@ fn expander_to_proc_macro( proc_macro_api::ProcMacroKind::FuncLike => ProcMacroKind::FuncLike, proc_macro_api::ProcMacroKind::Attr => ProcMacroKind::Attr, }; - let expander: sync::Arc = - if dummy_replace.iter().any(|replace| **replace == name) { - match kind { - ProcMacroKind::Attr => sync::Arc::new(IdentityExpander), - _ => sync::Arc::new(EmptyExpander), - } - } else { - sync::Arc::new(Expander(expander)) - }; - ProcMacro { name, kind, expander } + let disabled = ignored_macros.iter().any(|replace| **replace == name); + ProcMacro { name, kind, expander: sync::Arc::new(Expander(expander)), disabled } } #[derive(Debug)] @@ -391,42 +393,6 @@ impl ProcMacroExpander for Expander { } } -/// Dummy identity expander, used for attribute proc-macros that are deliberately ignored by the user. -#[derive(Debug)] -struct IdentityExpander; - -impl ProcMacroExpander for IdentityExpander { - fn expand( - &self, - subtree: &tt::Subtree, - _: Option<&tt::Subtree>, - _: &Env, - _: Span, - _: Span, - _: Span, - ) -> Result, ProcMacroExpansionError> { - Ok(subtree.clone()) - } -} - -/// Empty expander, used for proc-macros that are deliberately ignored by the user. -#[derive(Debug)] -struct EmptyExpander; - -impl ProcMacroExpander for EmptyExpander { - fn expand( - &self, - _: &tt::Subtree, - _: Option<&tt::Subtree>, - _: &Env, - call_site: Span, - _: Span, - _: Span, - ) -> Result, ProcMacroExpansionError> { - Ok(tt::Subtree::empty(DelimSpan { open: call_site, close: call_site })) - } -} - #[cfg(test)] mod tests { use ide_db::base_db::SourceDatabase; diff --git a/crates/mbe/src/expander/transcriber.rs b/crates/mbe/src/expander/transcriber.rs index 9291f799cca73..6d3055da28608 100644 --- a/crates/mbe/src/expander/transcriber.rs +++ b/crates/mbe/src/expander/transcriber.rs @@ -101,10 +101,20 @@ impl Bindings { }))) } MetaVarKind::Lifetime => { - Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { - text: SmolStr::new_static("'missing"), - span, - }))) + Fragment::Tokens(tt::TokenTree::Subtree(tt::Subtree { + delimiter: tt::Delimiter::invisible_spanned(span), + token_trees: Box::new([ + tt::TokenTree::Leaf(tt::Leaf::Punct(tt::Punct { + char: '\'', + span, + spacing: tt::Spacing::Joint, + })), + tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { + text: SmolStr::new_static("missing"), + span, + })), + ]), + })) } MetaVarKind::Literal => { Fragment::Tokens(tt::TokenTree::Leaf(tt::Leaf::Ident(tt::Ident { diff --git a/crates/mbe/src/syntax_bridge.rs b/crates/mbe/src/syntax_bridge.rs index bfc5d197f6834..3c270e30a9ba8 100644 --- a/crates/mbe/src/syntax_bridge.rs +++ b/crates/mbe/src/syntax_bridge.rs @@ -700,10 +700,12 @@ impl SynToken { } impl SrcToken, S> for SynToken { - fn kind(&self, ctx: &Converter) -> SyntaxKind { + fn kind(&self, _ctx: &Converter) -> SyntaxKind { match self { SynToken::Ordinary(token) => token.kind(), - SynToken::Punct { .. } => SyntaxKind::from_char(self.to_char(ctx).unwrap()).unwrap(), + SynToken::Punct { token, offset: i } => { + SyntaxKind::from_char(token.text().chars().nth(*i).unwrap()).unwrap() + } SynToken::Leaf(_) => { never!(); SyntaxKind::ERROR diff --git a/crates/parser/src/grammar/expressions.rs b/crates/parser/src/grammar/expressions.rs index f40c515fa079d..6b660180f8238 100644 --- a/crates/parser/src/grammar/expressions.rs +++ b/crates/parser/src/grammar/expressions.rs @@ -678,27 +678,38 @@ pub(crate) fn record_expr_field_list(p: &mut Parser<'_>) { attributes::outer_attrs(p); match p.current() { - IDENT | INT_NUMBER => { + IDENT | INT_NUMBER if p.nth_at(1, T![::]) => { // test_err record_literal_missing_ellipsis_recovery // fn main() { // S { S::default() } // } - if p.nth_at(1, T![::]) { - m.abandon(p); - p.expect(T![..]); - expr(p); - } else { + m.abandon(p); + p.expect(T![..]); + expr(p); + } + IDENT | INT_NUMBER => { + if p.nth_at(1, T![..]) { // test_err record_literal_before_ellipsis_recovery // fn main() { // S { field ..S::default() } // } - if p.nth_at(1, T![:]) || p.nth_at(1, T![..]) { + name_ref_or_index(p); + p.error("expected `:`"); + } else { + // test_err record_literal_field_eq_recovery + // fn main() { + // S { field = foo } + // } + if p.nth_at(1, T![:]) { + name_ref_or_index(p); + p.bump(T![:]); + } else if p.nth_at(1, T![=]) { name_ref_or_index(p); - p.expect(T![:]); + p.err_and_bump("expected `:`"); } expr(p); - m.complete(p, RECORD_EXPR_FIELD); } + m.complete(p, RECORD_EXPR_FIELD); } T![.] if p.at(T![..]) => { m.abandon(p); diff --git a/crates/parser/src/grammar/expressions/atom.rs b/crates/parser/src/grammar/expressions/atom.rs index 4197f248e0a97..48600641ad05b 100644 --- a/crates/parser/src/grammar/expressions/atom.rs +++ b/crates/parser/src/grammar/expressions/atom.rs @@ -58,6 +58,7 @@ pub(super) const ATOM_EXPR_FIRST: TokenSet = T![match], T![move], T![return], + T![become], T![static], T![try], T![unsafe], @@ -102,6 +103,7 @@ pub(super) fn atom_expr( T![try] => try_block_expr(p, None), T![match] => match_expr(p), T![return] => return_expr(p), + T![become] => become_expr(p), T![yield] => yield_expr(p), T![do] if p.nth_at_contextual_kw(1, T![yeet]) => yeet_expr(p), T![continue] => continue_expr(p), @@ -621,6 +623,18 @@ fn return_expr(p: &mut Parser<'_>) -> CompletedMarker { m.complete(p, RETURN_EXPR) } +// test become_expr +// fn foo() { +// become foo(); +// } +fn become_expr(p: &mut Parser<'_>) -> CompletedMarker { + assert!(p.at(T![become])); + let m = p.start(); + p.bump(T![become]); + expr(p); + m.complete(p, BECOME_EXPR) +} + // test yield_expr // fn foo() { // yield; diff --git a/crates/parser/src/grammar/generic_params.rs b/crates/parser/src/grammar/generic_params.rs index 3c577aa3cb499..4498daf21a3d8 100644 --- a/crates/parser/src/grammar/generic_params.rs +++ b/crates/parser/src/grammar/generic_params.rs @@ -157,6 +157,16 @@ fn type_bound(p: &mut Parser<'_>) -> bool { p.bump_any(); p.expect(T![const]); } + // test const_trait_bound + // const fn foo(_: impl const Trait) {} + T![const] => { + p.bump_any(); + } + // test async_trait_bound + // fn async_foo(_: impl async Fn(&i32)) {} + T![async] => { + p.bump_any(); + } _ => (), } if paths::is_use_path_start(p) { diff --git a/crates/parser/src/grammar/patterns.rs b/crates/parser/src/grammar/patterns.rs index 39ded41bb2413..5036742337921 100644 --- a/crates/parser/src/grammar/patterns.rs +++ b/crates/parser/src/grammar/patterns.rs @@ -323,6 +323,15 @@ fn record_pat_field(p: &mut Parser<'_>) { p.bump(T![:]); pattern(p); } + // test_err record_pat_field_eq_recovery + // fn main() { + // let S { field = foo }; + // } + IDENT | INT_NUMBER if p.nth(1) == T![=] => { + name_ref_or_index(p); + p.err_and_bump("expected `:`"); + pattern(p); + } T![box] => { // FIXME: not all box patterns should be allowed box_pat(p); diff --git a/crates/parser/src/syntax_kind/generated.rs b/crates/parser/src/syntax_kind/generated.rs index 4b589037672f4..6ecfdc9f4664c 100644 --- a/crates/parser/src/syntax_kind/generated.rs +++ b/crates/parser/src/syntax_kind/generated.rs @@ -90,6 +90,7 @@ pub enum SyntaxKind { PUB_KW, REF_KW, RETURN_KW, + BECOME_KW, SELF_KW, SELF_TYPE_KW, STATIC_KW, @@ -195,6 +196,7 @@ pub enum SyntaxKind { BLOCK_EXPR, STMT_LIST, RETURN_EXPR, + BECOME_EXPR, YIELD_EXPR, YEET_EXPR, LET_EXPR, @@ -307,6 +309,7 @@ impl SyntaxKind { | PUB_KW | REF_KW | RETURN_KW + | BECOME_KW | SELF_KW | SELF_TYPE_KW | STATIC_KW @@ -425,6 +428,7 @@ impl SyntaxKind { "pub" => PUB_KW, "ref" => REF_KW, "return" => RETURN_KW, + "become" => BECOME_KW, "self" => SELF_KW, "Self" => SELF_TYPE_KW, "static" => STATIC_KW, @@ -496,4 +500,4 @@ impl SyntaxKind { } } #[macro_export] -macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } +macro_rules ! T { [;] => { $ crate :: SyntaxKind :: SEMICOLON } ; [,] => { $ crate :: SyntaxKind :: COMMA } ; ['('] => { $ crate :: SyntaxKind :: L_PAREN } ; [')'] => { $ crate :: SyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: SyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: SyntaxKind :: R_CURLY } ; ['['] => { $ crate :: SyntaxKind :: L_BRACK } ; [']'] => { $ crate :: SyntaxKind :: R_BRACK } ; [<] => { $ crate :: SyntaxKind :: L_ANGLE } ; [>] => { $ crate :: SyntaxKind :: R_ANGLE } ; [@] => { $ crate :: SyntaxKind :: AT } ; [#] => { $ crate :: SyntaxKind :: POUND } ; [~] => { $ crate :: SyntaxKind :: TILDE } ; [?] => { $ crate :: SyntaxKind :: QUESTION } ; [$] => { $ crate :: SyntaxKind :: DOLLAR } ; [&] => { $ crate :: SyntaxKind :: AMP } ; [|] => { $ crate :: SyntaxKind :: PIPE } ; [+] => { $ crate :: SyntaxKind :: PLUS } ; [*] => { $ crate :: SyntaxKind :: STAR } ; [/] => { $ crate :: SyntaxKind :: SLASH } ; [^] => { $ crate :: SyntaxKind :: CARET } ; [%] => { $ crate :: SyntaxKind :: PERCENT } ; [_] => { $ crate :: SyntaxKind :: UNDERSCORE } ; [.] => { $ crate :: SyntaxKind :: DOT } ; [..] => { $ crate :: SyntaxKind :: DOT2 } ; [...] => { $ crate :: SyntaxKind :: DOT3 } ; [..=] => { $ crate :: SyntaxKind :: DOT2EQ } ; [:] => { $ crate :: SyntaxKind :: COLON } ; [::] => { $ crate :: SyntaxKind :: COLON2 } ; [=] => { $ crate :: SyntaxKind :: EQ } ; [==] => { $ crate :: SyntaxKind :: EQ2 } ; [=>] => { $ crate :: SyntaxKind :: FAT_ARROW } ; [!] => { $ crate :: SyntaxKind :: BANG } ; [!=] => { $ crate :: SyntaxKind :: NEQ } ; [-] => { $ crate :: SyntaxKind :: MINUS } ; [->] => { $ crate :: SyntaxKind :: THIN_ARROW } ; [<=] => { $ crate :: SyntaxKind :: LTEQ } ; [>=] => { $ crate :: SyntaxKind :: GTEQ } ; [+=] => { $ crate :: SyntaxKind :: PLUSEQ } ; [-=] => { $ crate :: SyntaxKind :: MINUSEQ } ; [|=] => { $ crate :: SyntaxKind :: PIPEEQ } ; [&=] => { $ crate :: SyntaxKind :: AMPEQ } ; [^=] => { $ crate :: SyntaxKind :: CARETEQ } ; [/=] => { $ crate :: SyntaxKind :: SLASHEQ } ; [*=] => { $ crate :: SyntaxKind :: STAREQ } ; [%=] => { $ crate :: SyntaxKind :: PERCENTEQ } ; [&&] => { $ crate :: SyntaxKind :: AMP2 } ; [||] => { $ crate :: SyntaxKind :: PIPE2 } ; [<<] => { $ crate :: SyntaxKind :: SHL } ; [>>] => { $ crate :: SyntaxKind :: SHR } ; [<<=] => { $ crate :: SyntaxKind :: SHLEQ } ; [>>=] => { $ crate :: SyntaxKind :: SHREQ } ; [as] => { $ crate :: SyntaxKind :: AS_KW } ; [async] => { $ crate :: SyntaxKind :: ASYNC_KW } ; [await] => { $ crate :: SyntaxKind :: AWAIT_KW } ; [box] => { $ crate :: SyntaxKind :: BOX_KW } ; [break] => { $ crate :: SyntaxKind :: BREAK_KW } ; [const] => { $ crate :: SyntaxKind :: CONST_KW } ; [continue] => { $ crate :: SyntaxKind :: CONTINUE_KW } ; [crate] => { $ crate :: SyntaxKind :: CRATE_KW } ; [do] => { $ crate :: SyntaxKind :: DO_KW } ; [dyn] => { $ crate :: SyntaxKind :: DYN_KW } ; [else] => { $ crate :: SyntaxKind :: ELSE_KW } ; [enum] => { $ crate :: SyntaxKind :: ENUM_KW } ; [extern] => { $ crate :: SyntaxKind :: EXTERN_KW } ; [false] => { $ crate :: SyntaxKind :: FALSE_KW } ; [fn] => { $ crate :: SyntaxKind :: FN_KW } ; [for] => { $ crate :: SyntaxKind :: FOR_KW } ; [if] => { $ crate :: SyntaxKind :: IF_KW } ; [impl] => { $ crate :: SyntaxKind :: IMPL_KW } ; [in] => { $ crate :: SyntaxKind :: IN_KW } ; [let] => { $ crate :: SyntaxKind :: LET_KW } ; [loop] => { $ crate :: SyntaxKind :: LOOP_KW } ; [macro] => { $ crate :: SyntaxKind :: MACRO_KW } ; [match] => { $ crate :: SyntaxKind :: MATCH_KW } ; [mod] => { $ crate :: SyntaxKind :: MOD_KW } ; [move] => { $ crate :: SyntaxKind :: MOVE_KW } ; [mut] => { $ crate :: SyntaxKind :: MUT_KW } ; [pub] => { $ crate :: SyntaxKind :: PUB_KW } ; [ref] => { $ crate :: SyntaxKind :: REF_KW } ; [return] => { $ crate :: SyntaxKind :: RETURN_KW } ; [become] => { $ crate :: SyntaxKind :: BECOME_KW } ; [self] => { $ crate :: SyntaxKind :: SELF_KW } ; [Self] => { $ crate :: SyntaxKind :: SELF_TYPE_KW } ; [static] => { $ crate :: SyntaxKind :: STATIC_KW } ; [struct] => { $ crate :: SyntaxKind :: STRUCT_KW } ; [super] => { $ crate :: SyntaxKind :: SUPER_KW } ; [trait] => { $ crate :: SyntaxKind :: TRAIT_KW } ; [true] => { $ crate :: SyntaxKind :: TRUE_KW } ; [try] => { $ crate :: SyntaxKind :: TRY_KW } ; [type] => { $ crate :: SyntaxKind :: TYPE_KW } ; [unsafe] => { $ crate :: SyntaxKind :: UNSAFE_KW } ; [use] => { $ crate :: SyntaxKind :: USE_KW } ; [where] => { $ crate :: SyntaxKind :: WHERE_KW } ; [while] => { $ crate :: SyntaxKind :: WHILE_KW } ; [yield] => { $ crate :: SyntaxKind :: YIELD_KW } ; [auto] => { $ crate :: SyntaxKind :: AUTO_KW } ; [builtin] => { $ crate :: SyntaxKind :: BUILTIN_KW } ; [default] => { $ crate :: SyntaxKind :: DEFAULT_KW } ; [existential] => { $ crate :: SyntaxKind :: EXISTENTIAL_KW } ; [union] => { $ crate :: SyntaxKind :: UNION_KW } ; [raw] => { $ crate :: SyntaxKind :: RAW_KW } ; [macro_rules] => { $ crate :: SyntaxKind :: MACRO_RULES_KW } ; [yeet] => { $ crate :: SyntaxKind :: YEET_KW } ; [offset_of] => { $ crate :: SyntaxKind :: OFFSET_OF_KW } ; [asm] => { $ crate :: SyntaxKind :: ASM_KW } ; [format_args] => { $ crate :: SyntaxKind :: FORMAT_ARGS_KW } ; [lifetime_ident] => { $ crate :: SyntaxKind :: LIFETIME_IDENT } ; [ident] => { $ crate :: SyntaxKind :: IDENT } ; [shebang] => { $ crate :: SyntaxKind :: SHEBANG } ; } diff --git a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast index f511960040d5f..741b7845e7f14 100644 --- a/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast +++ b/crates/parser/test_data/parser/inline/err/0014_record_literal_before_ellipsis_recovery.rast @@ -24,26 +24,26 @@ SOURCE_FILE RECORD_EXPR_FIELD NAME_REF IDENT "field" - WHITESPACE " " - RANGE_EXPR - DOT2 ".." - CALL_EXPR - PATH_EXPR - PATH - PATH - PATH_SEGMENT - NAME_REF - IDENT "S" - COLON2 "::" - PATH_SEGMENT - NAME_REF - IDENT "default" - ARG_LIST - L_PAREN "(" - R_PAREN ")" + WHITESPACE " " + DOT2 ".." + CALL_EXPR + PATH_EXPR + PATH + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + COLON2 "::" + PATH_SEGMENT + NAME_REF + IDENT "default" + ARG_LIST + L_PAREN "(" + R_PAREN ")" WHITESPACE " " R_CURLY "}" WHITESPACE "\n" R_CURLY "}" WHITESPACE "\n" -error 25: expected COLON +error 25: expected `:` +error 25: expected COMMA diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast new file mode 100644 index 0000000000000..ad4deeb0b67c9 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rast @@ -0,0 +1,41 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + RECORD_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_EXPR_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_EXPR_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + WHITESPACE " " + R_CURLY "}" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 26: expected `:` diff --git a/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs new file mode 100644 index 0000000000000..1eb1aa9b92642 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0032_record_literal_field_eq_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + S { field = foo } +} diff --git a/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast new file mode 100644 index 0000000000000..6940a84b68302 --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "main" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + LET_STMT + LET_KW "let" + WHITESPACE " " + RECORD_PAT + PATH + PATH_SEGMENT + NAME_REF + IDENT "S" + WHITESPACE " " + RECORD_PAT_FIELD_LIST + L_CURLY "{" + WHITESPACE " " + RECORD_PAT_FIELD + NAME_REF + IDENT "field" + WHITESPACE " " + ERROR + EQ "=" + WHITESPACE " " + IDENT_PAT + NAME + IDENT "foo" + WHITESPACE " " + R_CURLY "}" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" +error 30: expected `:` diff --git a/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs new file mode 100644 index 0000000000000..c4949d6e12e7a --- /dev/null +++ b/crates/parser/test_data/parser/inline/err/0033_record_pat_field_eq_recovery.rs @@ -0,0 +1,3 @@ +fn main() { + let S { field = foo }; +} diff --git a/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast new file mode 100644 index 0000000000000..c544cf4e5e3ef --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rast @@ -0,0 +1,31 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + WHITESPACE "\n " + EXPR_STMT + BECOME_EXPR + BECOME_KW "become" + WHITESPACE " " + CALL_EXPR + PATH_EXPR + PATH + PATH_SEGMENT + NAME_REF + IDENT "foo" + ARG_LIST + L_PAREN "(" + R_PAREN ")" + SEMICOLON ";" + WHITESPACE "\n" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs new file mode 100644 index 0000000000000..918a83ca6e83e --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0209_become_expr.rs @@ -0,0 +1,3 @@ +fn foo() { + become foo(); +} diff --git a/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast new file mode 100644 index 0000000000000..ebf758286a7c2 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rast @@ -0,0 +1,43 @@ +SOURCE_FILE + FN + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "async_foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + ASYNC_KW "async" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Fn" + PARAM_LIST + L_PAREN "(" + PARAM + REF_TYPE + AMP "&" + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "i32" + R_PAREN ")" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs new file mode 100644 index 0000000000000..04d44175d778d --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0211_async_trait_bound.rs @@ -0,0 +1 @@ +fn async_foo(_: impl async Fn(&i32)) {} diff --git a/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast new file mode 100644 index 0000000000000..646873881bcb0 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rast @@ -0,0 +1,34 @@ +SOURCE_FILE + FN + CONST_KW "const" + WHITESPACE " " + FN_KW "fn" + WHITESPACE " " + NAME + IDENT "foo" + PARAM_LIST + L_PAREN "(" + PARAM + WILDCARD_PAT + UNDERSCORE "_" + COLON ":" + WHITESPACE " " + IMPL_TRAIT_TYPE + IMPL_KW "impl" + WHITESPACE " " + TYPE_BOUND_LIST + TYPE_BOUND + CONST_KW "const" + WHITESPACE " " + PATH_TYPE + PATH + PATH_SEGMENT + NAME_REF + IDENT "Trait" + R_PAREN ")" + WHITESPACE " " + BLOCK_EXPR + STMT_LIST + L_CURLY "{" + R_CURLY "}" + WHITESPACE "\n" diff --git a/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs new file mode 100644 index 0000000000000..8eb8f84c91f45 --- /dev/null +++ b/crates/parser/test_data/parser/inline/ok/0212_const_trait_bound.rs @@ -0,0 +1 @@ +const fn foo(_: impl const Trait) {} diff --git a/crates/proc-macro-api/src/lib.rs b/crates/proc-macro-api/src/lib.rs index 1dadfc40ac431..6b16711a8d87b 100644 --- a/crates/proc-macro-api/src/lib.rs +++ b/crates/proc-macro-api/src/lib.rs @@ -13,6 +13,7 @@ mod version; use indexmap::IndexSet; use paths::AbsPathBuf; +use rustc_hash::FxHashMap; use span::Span; use std::{ fmt, io, @@ -107,8 +108,11 @@ pub struct MacroPanic { impl ProcMacroServer { /// Spawns an external process as the proc macro server and returns a client connected to it. - pub fn spawn(process_path: AbsPathBuf) -> io::Result { - let process = ProcMacroProcessSrv::run(process_path)?; + pub fn spawn( + process_path: AbsPathBuf, + env: &FxHashMap, + ) -> io::Result { + let process = ProcMacroProcessSrv::run(process_path, env)?; Ok(ProcMacroServer { process: Arc::new(Mutex::new(process)) }) } diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 96f97bf5e205e..12eafcea442d3 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -7,6 +7,7 @@ use std::{ }; use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashMap; use stdx::JodChild; use crate::{ @@ -26,9 +27,12 @@ pub(crate) struct ProcMacroProcessSrv { } impl ProcMacroProcessSrv { - pub(crate) fn run(process_path: AbsPathBuf) -> io::Result { + pub(crate) fn run( + process_path: AbsPathBuf, + env: &FxHashMap, + ) -> io::Result { let create_srv = |null_stderr| { - let mut process = Process::run(process_path.clone(), null_stderr)?; + let mut process = Process::run(process_path.clone(), env, null_stderr)?; let (stdin, stdout) = process.stdio().expect("couldn't access child stdio"); io::Result::Ok(ProcMacroProcessSrv { @@ -147,8 +151,12 @@ struct Process { } impl Process { - fn run(path: AbsPathBuf, null_stderr: bool) -> io::Result { - let child = JodChild(mk_child(&path, null_stderr)?); + fn run( + path: AbsPathBuf, + env: &FxHashMap, + null_stderr: bool, + ) -> io::Result { + let child = JodChild(mk_child(&path, env, null_stderr)?); Ok(Process { child }) } @@ -161,9 +169,14 @@ impl Process { } } -fn mk_child(path: &AbsPath, null_stderr: bool) -> io::Result { +fn mk_child( + path: &AbsPath, + env: &FxHashMap, + null_stderr: bool, +) -> io::Result { let mut cmd = Command::new(path.as_os_str()); - cmd.env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") + cmd.envs(env) + .env("RUST_ANALYZER_INTERNALS_DO_NOT_USE", "this is unstable") .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(if null_stderr { Stdio::null() } else { Stdio::inherit() }); diff --git a/crates/proc-macro-srv/Cargo.toml b/crates/proc-macro-srv/Cargo.toml index ba17ea6f7b439..bd7a31654584f 100644 --- a/crates/proc-macro-srv/Cargo.toml +++ b/crates/proc-macro-srv/Cargo.toml @@ -29,6 +29,7 @@ paths.workspace = true base-db.workspace = true span.workspace = true proc-macro-api.workspace = true +ra-ap-rustc_lexer.workspace = true [dev-dependencies] expect-test = "1.4.0" diff --git a/crates/proc-macro-srv/src/lib.rs b/crates/proc-macro-srv/src/lib.rs index 460a96c07f367..831632c64c0a2 100644 --- a/crates/proc-macro-srv/src/lib.rs +++ b/crates/proc-macro-srv/src/lib.rs @@ -20,6 +20,11 @@ extern crate proc_macro; #[cfg(feature = "in-rust-tree")] extern crate rustc_driver as _; +#[cfg(not(feature = "in-rust-tree"))] +extern crate ra_ap_rustc_lexer as rustc_lexer; +#[cfg(feature = "in-rust-tree")] +extern crate rustc_lexer; + mod dylib; mod proc_macros; mod server; diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index 8a9d52a37a2f3..c6a0a6665553f 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -70,11 +70,58 @@ impl server::FreeFunctions for RaSpanServer { &mut self, s: &str, ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix + use proc_macro::bridge::LitKind; + use rustc_lexer::{LiteralKind, Token, TokenKind}; + + let mut tokens = rustc_lexer::tokenize(s); + let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 }); + + let lit = if minus_or_lit.kind == TokenKind::Minus { + let lit = tokens.next().ok_or(())?; + if !matches!( + lit.kind, + TokenKind::Literal { + kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, + .. + } + ) { + return Err(()); + } + lit + } else { + minus_or_lit + }; + + if tokens.next().is_some() { + return Err(()); + } + + let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; + let kind = match kind { + LiteralKind::Int { .. } => LitKind::Integer, + LiteralKind::Float { .. } => LitKind::Float, + LiteralKind::Char { .. } => LitKind::Char, + LiteralKind::Byte { .. } => LitKind::Byte, + LiteralKind::Str { .. } => LitKind::Str, + LiteralKind::ByteStr { .. } => LitKind::ByteStr, + LiteralKind::CStr { .. } => LitKind::CStr, + LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), + LiteralKind::RawByteStr { n_hashes } => { + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) + } + LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + }; + + let (lit, suffix) = s.split_at(suffix_start as usize); + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(self.interner, suffix)), + }; + Ok(bridge::Literal { - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, s), - suffix: None, + kind, + symbol: Symbol::intern(self.interner, lit), + suffix, span: self.call_site, }) } diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 15a9e0deae44f..7e9d8057ac9a5 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -62,11 +62,58 @@ impl server::FreeFunctions for TokenIdServer { &mut self, s: &str, ) -> Result, ()> { - // FIXME: keep track of LitKind and Suffix + use proc_macro::bridge::LitKind; + use rustc_lexer::{LiteralKind, Token, TokenKind}; + + let mut tokens = rustc_lexer::tokenize(s); + let minus_or_lit = tokens.next().unwrap_or(Token { kind: TokenKind::Eof, len: 0 }); + + let lit = if minus_or_lit.kind == TokenKind::Minus { + let lit = tokens.next().ok_or(())?; + if !matches!( + lit.kind, + TokenKind::Literal { + kind: LiteralKind::Int { .. } | LiteralKind::Float { .. }, + .. + } + ) { + return Err(()); + } + lit + } else { + minus_or_lit + }; + + if tokens.next().is_some() { + return Err(()); + } + + let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; + let kind = match kind { + LiteralKind::Int { .. } => LitKind::Integer, + LiteralKind::Float { .. } => LitKind::Float, + LiteralKind::Char { .. } => LitKind::Char, + LiteralKind::Byte { .. } => LitKind::Byte, + LiteralKind::Str { .. } => LitKind::Str, + LiteralKind::ByteStr { .. } => LitKind::ByteStr, + LiteralKind::CStr { .. } => LitKind::CStr, + LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), + LiteralKind::RawByteStr { n_hashes } => { + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) + } + LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + }; + + let (lit, suffix) = s.split_at(suffix_start as usize); + let suffix = match suffix { + "" | "_" => None, + suffix => Some(Symbol::intern(self.interner, suffix)), + }; + Ok(bridge::Literal { - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, s), - suffix: None, + kind, + symbol: Symbol::intern(self.interner, lit), + suffix, span: self.call_site, }) } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index 87d832cc76fa0..e5bfe5ee92cd8 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -169,8 +169,8 @@ fn test_fn_like_mk_idents() { fn test_fn_like_macro_clone_literals() { assert_expand( "fn_like_clone_tokens", - r#"1u16, 2_u32, -4i64, 3.14f32, "hello bridge""#, - expect![[r#" + r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###, + expect![[r###" SUBTREE $$ 1 1 LITERAL 1u16 1 PUNCH , [alone] 1 @@ -181,8 +181,12 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 1 LITERAL 3.14f32 1 PUNCH , [alone] 1 - LITERAL "hello bridge" 1"#]], - expect![[r#" + LITERAL ""hello bridge"" 1 + PUNCH , [alone] 1 + LITERAL ""suffixed""suffix 1 + PUNCH , [alone] 1 + LITERAL r##"r##"raw"##"## 1"###]], + expect![[r###" SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 4..5, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } @@ -193,7 +197,11 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"#]], + LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], ); } diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index a2c9856a3f733..ab72f1fba09dd 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -20,10 +20,11 @@ use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; use serde::Deserialize; +use toolchain::Tool; use crate::{ cfg_flag::CfgFlag, utf8_stdout, CargoConfig, CargoFeatures, CargoWorkspace, InvocationLocation, - InvocationStrategy, Package, + InvocationStrategy, Package, Sysroot, TargetKind, }; #[derive(Debug, Default, Clone, PartialEq, Eq)] @@ -61,6 +62,7 @@ impl WorkspaceBuildScripts { config: &CargoConfig, allowed_features: &FxHashSet, workspace_root: &AbsPathBuf, + sysroot: Option<&Sysroot>, ) -> io::Result { let mut cmd = match config.run_build_script_command.as_deref() { Some([program, args @ ..]) => { @@ -69,7 +71,8 @@ impl WorkspaceBuildScripts { cmd } _ => { - let mut cmd = Command::new(toolchain::cargo()); + let mut cmd = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.args(["check", "--quiet", "--workspace", "--message-format=json"]); cmd.args(&config.extra_args); @@ -133,6 +136,7 @@ impl WorkspaceBuildScripts { workspace: &CargoWorkspace, progress: &dyn Fn(String), toolchain: &Option, + sysroot: Option<&Sysroot>, ) -> io::Result { const RUST_1_62: Version = Version::new(1, 62, 0); @@ -151,6 +155,7 @@ impl WorkspaceBuildScripts { config, &allowed_features, &workspace.workspace_root().to_path_buf(), + sysroot, )?, workspace, current_dir, @@ -165,6 +170,7 @@ impl WorkspaceBuildScripts { config, &allowed_features, &workspace.workspace_root().to_path_buf(), + sysroot, )?; cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; @@ -194,7 +200,7 @@ impl WorkspaceBuildScripts { )) } }; - let cmd = Self::build_command(config, &Default::default(), workspace_root)?; + let cmd = Self::build_command(config, &Default::default(), workspace_root, None)?; // NB: Cargo.toml could have been modified between `cargo metadata` and // `cargo check`. We shouldn't assume that package ids we see here are // exactly those from `config`. @@ -415,6 +421,7 @@ impl WorkspaceBuildScripts { rustc: &CargoWorkspace, current_dir: &AbsPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Self { let mut bs = WorkspaceBuildScripts::default(); for p in rustc.packages() { @@ -422,7 +429,8 @@ impl WorkspaceBuildScripts { } let res = (|| { let target_libdir = (|| { - let mut cargo_config = Command::new(toolchain::cargo()); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(current_dir) @@ -431,7 +439,8 @@ impl WorkspaceBuildScripts { if let Ok(it) = utf8_stdout(cargo_config) { return Ok(it); } - let mut cmd = Command::new(toolchain::rustc()); + let mut cmd = Command::new(Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.args(["--print", "target-libdir"]); utf8_stdout(cmd) @@ -458,7 +467,11 @@ impl WorkspaceBuildScripts { .collect(); for p in rustc.packages() { let package = &rustc[p]; - if package.targets.iter().any(|&it| rustc[it].is_proc_macro) { + if package + .targets + .iter() + .any(|&it| matches!(rustc[it].kind, TargetKind::Lib { is_proc_macro: true })) + { if let Some((_, path)) = proc_macro_dylibs .iter() .find(|(name, _)| *name.trim_start_matches("lib") == package.name) diff --git a/crates/project-model/src/cargo_workspace.rs b/crates/project-model/src/cargo_workspace.rs index a99ee6e664c5f..08d86fd7b0fee 100644 --- a/crates/project-model/src/cargo_workspace.rs +++ b/crates/project-model/src/cargo_workspace.rs @@ -12,8 +12,9 @@ use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use serde::Deserialize; use serde_json::from_value; +use toolchain::Tool; -use crate::{utf8_stdout, InvocationLocation, ManifestPath}; +use crate::{utf8_stdout, InvocationLocation, ManifestPath, Sysroot}; use crate::{CfgOverrides, InvocationStrategy}; /// [`CargoWorkspace`] represents the logical structure of, well, a Cargo @@ -188,8 +189,6 @@ pub struct TargetData { pub root: AbsPathBuf, /// Kind of target pub kind: TargetKind, - /// Is this target a proc-macro - pub is_proc_macro: bool, /// Required features of the target without which it won't build pub required_features: Vec, } @@ -198,7 +197,10 @@ pub struct TargetData { pub enum TargetKind { Bin, /// Any kind of Cargo lib crate-type (dylib, rlib, proc-macro, ...). - Lib, + Lib { + /// Is this target a proc-macro + is_proc_macro: bool, + }, Example, Test, Bench, @@ -215,8 +217,8 @@ impl TargetKind { "bench" => TargetKind::Bench, "example" => TargetKind::Example, "custom-build" => TargetKind::BuildScript, - "proc-macro" => TargetKind::Lib, - _ if kind.contains("lib") => TargetKind::Lib, + "proc-macro" => TargetKind::Lib { is_proc_macro: true }, + _ if kind.contains("lib") => TargetKind::Lib { is_proc_macro: false }, _ => continue, }; } @@ -236,12 +238,13 @@ impl CargoWorkspace { cargo_toml: &ManifestPath, current_dir: &AbsPath, config: &CargoConfig, + sysroot: Option<&Sysroot>, progress: &dyn Fn(String), ) -> anyhow::Result { - let targets = find_list_of_build_targets(config, cargo_toml); + let targets = find_list_of_build_targets(config, cargo_toml, sysroot); let mut meta = MetadataCommand::new(); - meta.cargo_path(toolchain::cargo()); + meta.cargo_path(Tool::Cargo.path()); meta.manifest_path(cargo_toml.to_path_buf()); match &config.features { CargoFeatures::All => { @@ -289,6 +292,7 @@ impl CargoWorkspace { (|| -> Result { let mut command = meta.cargo_command(); + Sysroot::set_rustup_toolchain_env(&mut command, sysroot); command.envs(&config.extra_env); let output = command.output()?; if !output.status.success() { @@ -368,7 +372,6 @@ impl CargoWorkspace { name, root: AbsPathBuf::assert(src_path.into()), kind: TargetKind::new(&kind), - is_proc_macro: *kind == ["proc-macro"], required_features, }); pkg_data.targets.push(tgt); @@ -476,24 +479,30 @@ impl CargoWorkspace { } } -fn find_list_of_build_targets(config: &CargoConfig, cargo_toml: &ManifestPath) -> Vec { +fn find_list_of_build_targets( + config: &CargoConfig, + cargo_toml: &ManifestPath, + sysroot: Option<&Sysroot>, +) -> Vec { if let Some(target) = &config.target { return [target.into()].to_vec(); } - let build_targets = cargo_config_build_target(cargo_toml, &config.extra_env); + let build_targets = cargo_config_build_target(cargo_toml, &config.extra_env, sysroot); if !build_targets.is_empty() { return build_targets; } - rustc_discover_host_triple(cargo_toml, &config.extra_env).into_iter().collect() + rustc_discover_host_triple(cargo_toml, &config.extra_env, sysroot).into_iter().collect() } fn rustc_discover_host_triple( cargo_toml: &ManifestPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Option { - let mut rustc = Command::new(toolchain::rustc()); + let mut rustc = Command::new(Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut rustc, sysroot); rustc.envs(extra_env); rustc.current_dir(cargo_toml.parent()).arg("-vV"); tracing::debug!("Discovering host platform by {:?}", rustc); @@ -519,8 +528,10 @@ fn rustc_discover_host_triple( fn cargo_config_build_target( cargo_toml: &ManifestPath, extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, ) -> Vec { - let mut cargo_config = Command::new(toolchain::cargo()); + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); cargo_config.envs(extra_env); cargo_config .current_dir(cargo_toml.parent()) diff --git a/crates/project-model/src/project_json.rs b/crates/project-model/src/project_json.rs index cf3231498f3e7..fba0aaa8ce9f4 100644 --- a/crates/project-model/src/project_json.rs +++ b/crates/project-model/src/project_json.rs @@ -49,7 +49,7 @@ //! user explores them belongs to that extension (it's totally valid to change //! rust-project.json over time via configuration request!) -use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, DependencyKind, Edition}; +use base_db::{CrateDisplayName, CrateId, CrateName, Dependency, Edition}; use la_arena::RawIdx; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; @@ -135,7 +135,6 @@ impl ProjectJson { Dependency::new( dep_data.name, CrateId::from_raw(RawIdx::from(dep_data.krate as u32)), - DependencyKind::Normal, ) }) .collect::>(), diff --git a/crates/project-model/src/rustc_cfg.rs b/crates/project-model/src/rustc_cfg.rs index 0aee002fbb3ff..1ad6e7255bf10 100644 --- a/crates/project-model/src/rustc_cfg.rs +++ b/crates/project-model/src/rustc_cfg.rs @@ -8,17 +8,13 @@ use rustc_hash::FxHashMap; use crate::{cfg_flag::CfgFlag, utf8_stdout, ManifestPath, Sysroot}; /// Determines how `rustc --print cfg` is discovered and invoked. -/// -/// There options are supported: -/// - [`RustcCfgConfig::Cargo`], which relies on `cargo rustc --print cfg` -/// and `RUSTC_BOOTSTRAP`. -/// - [`RustcCfgConfig::Explicit`], which uses an explicit path to the `rustc` -/// binary in the sysroot. -/// - [`RustcCfgConfig::Discover`], which uses [`toolchain::rustc`]. pub(crate) enum RustcCfgConfig<'a> { - Cargo(&'a ManifestPath), - Explicit(&'a Sysroot), - Discover, + /// Use `rustc --print cfg`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::rustc`]. + Rustc(Option<&'a Sysroot>), + /// Use `cargo --print cfg`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::cargo`]. + Cargo(Option<&'a Sysroot>, &'a ManifestPath), } pub(crate) fn get( @@ -71,9 +67,10 @@ fn get_rust_cfgs( extra_env: &FxHashMap, config: RustcCfgConfig<'_>, ) -> anyhow::Result { - let mut cmd = match config { - RustcCfgConfig::Cargo(cargo_toml) => { - let mut cmd = Command::new(toolchain::cargo()); + let sysroot = match config { + RustcCfgConfig::Cargo(sysroot, cargo_toml) => { + let mut cmd = Command::new(toolchain::Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) .args(["rustc", "-Z", "unstable-options", "--print", "cfg"]) @@ -82,25 +79,24 @@ fn get_rust_cfgs( cmd.args(["--target", target]); } - return utf8_stdout(cmd).context("Unable to run `cargo rustc`"); - } - RustcCfgConfig::Explicit(sysroot) => { - let rustc: std::path::PathBuf = sysroot.discover_rustc()?.into(); - tracing::debug!(?rustc, "using explicit rustc from sysroot"); - Command::new(rustc) - } - RustcCfgConfig::Discover => { - let rustc = toolchain::rustc(); - tracing::debug!(?rustc, "using rustc from env"); - Command::new(rustc) + match utf8_stdout(cmd) { + Ok(it) => return Ok(it), + Err(e) => { + tracing::warn!("failed to run `cargo rustc --print cfg`, falling back to invoking rustc directly: {e}"); + sysroot + } + } } + RustcCfgConfig::Rustc(sysroot) => sysroot, }; + let mut cmd = Command::new(toolchain::Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.args(["--print", "cfg", "-O"]); if let Some(target) = target { cmd.args(["--target", target]); } - utf8_stdout(cmd).context("Unable to run `rustc`") + utf8_stdout(cmd).context("unable to fetch cfgs via `rustc --print cfg -O`") } diff --git a/crates/project-model/src/sysroot.rs b/crates/project-model/src/sysroot.rs index 9e19a5258388f..07cfaba2d2ca2 100644 --- a/crates/project-model/src/sysroot.rs +++ b/crates/project-model/src/sysroot.rs @@ -4,24 +4,38 @@ //! but we can't process `.rlib` and need source code instead. The source code //! is typically installed with `rustup component add rust-src` command. -use std::{env, fs, iter, ops, path::PathBuf, process::Command}; +use std::{env, fs, iter, ops, path::PathBuf, process::Command, sync::Arc}; -use anyhow::{format_err, Context, Result}; +use anyhow::{format_err, Result}; use base_db::CrateName; use itertools::Itertools; use la_arena::{Arena, Idx}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; +use toolchain::probe_for_binary; use crate::{utf8_stdout, CargoConfig, CargoWorkspace, ManifestPath}; -#[derive(Debug, Clone, Eq, PartialEq)] +#[derive(Debug, Clone)] pub struct Sysroot { root: AbsPathBuf, - src_root: AbsPathBuf, + src_root: Option>>, mode: SysrootMode, } +impl Eq for Sysroot {} +impl PartialEq for Sysroot { + fn eq(&self, other: &Self) -> bool { + self.root == other.root + && self.mode == other.mode + && match (&self.src_root, &other.src_root) { + (Some(Ok(this)), Some(Ok(other))) => this == other, + (None, None) | (Some(Err(_)), Some(Err(_))) => true, + _ => false, + } + } +} + #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum SysrootMode { Workspace(CargoWorkspace), @@ -86,8 +100,8 @@ impl Sysroot { /// Returns the sysroot "source" directory, where stdlib sources are located, like: /// `$HOME/.rustup/toolchains/nightly-2022-07-23-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library` - pub fn src_root(&self) -> &AbsPath { - &self.src_root + pub fn src_root(&self) -> Option<&AbsPath> { + self.src_root.as_ref()?.as_deref().ok() } pub fn is_empty(&self) -> bool { @@ -98,6 +112,11 @@ impl Sysroot { } pub fn loading_warning(&self) -> Option { + let src_root = match &self.src_root { + None => return Some(format!("sysroot at `{}` has no library sources", self.root)), + Some(Ok(src_root)) => src_root, + Some(Err(e)) => return Some(e.to_string()), + }; let has_core = match &self.mode { SysrootMode::Workspace(ws) => ws.packages().any(|p| ws[p].name == "core"), SysrootMode::Stitched(stitched) => stitched.by_name("core").is_some(), @@ -108,10 +127,7 @@ impl Sysroot { } else { " try running `rustup component add rust-src` to possible fix this" }; - Some(format!( - "could not find libcore in loaded sysroot at `{}`{var_note}", - self.src_root.as_path(), - )) + Some(format!("could not find libcore in loaded sysroot at `{}`{var_note}", src_root,)) } else { None } @@ -140,8 +156,19 @@ impl Sysroot { tracing::debug!("discovering sysroot for {dir}"); let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; let sysroot_src_dir = - discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env)?; - Ok(Sysroot::load(sysroot_dir, sysroot_src_dir, metadata)) + discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata)) + } + + pub fn discover_no_source( + dir: &AbsPath, + extra_env: &FxHashMap, + ) -> Result { + tracing::debug!("discovering sysroot for {dir}"); + let sysroot_dir = discover_sysroot_dir(dir, extra_env)?; + let sysroot_src_dir = + discover_sysroot_src_dir_or_add_component(&sysroot_dir, dir, extra_env); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), false)) } pub fn discover_with_src_override( @@ -152,33 +179,59 @@ impl Sysroot { ) -> Result { tracing::debug!("discovering sysroot for {current_dir}"); let sysroot_dir = discover_sysroot_dir(current_dir, extra_env)?; - Ok(Sysroot::load(sysroot_dir, src, metadata)) + Ok(Sysroot::load(sysroot_dir, Some(Ok(src)), metadata)) } pub fn discover_rustc_src(&self) -> Option { get_rustc_src(&self.root) } - pub fn discover_rustc(&self) -> anyhow::Result { - let rustc = self.root.join("bin/rustc"); - tracing::debug!(?rustc, "checking for rustc binary at location"); - match fs::metadata(&rustc) { - Ok(_) => Ok(rustc), - Err(e) => Err(e).context(format!( - "failed to discover rustc in sysroot: {:?}", - AsRef::::as_ref(&self.root) - )), - } - } - pub fn with_sysroot_dir(sysroot_dir: AbsPathBuf, metadata: bool) -> Result { let sysroot_src_dir = discover_sysroot_src_dir(&sysroot_dir).ok_or_else(|| { format_err!("can't load standard library from sysroot path {sysroot_dir}") - })?; - Ok(Sysroot::load(sysroot_dir, sysroot_src_dir, metadata)) + }); + Ok(Sysroot::load(sysroot_dir, Some(sysroot_src_dir), metadata)) + } + + pub fn set_rustup_toolchain_env(cmd: &mut Command, sysroot: Option<&Self>) { + if let Some(sysroot) = sysroot { + cmd.env("RUSTUP_TOOLCHAIN", AsRef::::as_ref(&sysroot.root)); + } + } + + pub fn discover_proc_macro_srv(&self) -> anyhow::Result { + ["libexec", "lib"] + .into_iter() + .map(|segment| self.root().join(segment).join("rust-analyzer-proc-macro-srv")) + .find_map(|server_path| probe_for_binary(server_path.into())) + .map(AbsPathBuf::assert) + .ok_or_else(|| { + anyhow::format_err!("cannot find proc-macro server in sysroot `{}`", self.root()) + }) } - pub fn load(sysroot_dir: AbsPathBuf, sysroot_src_dir: AbsPathBuf, metadata: bool) -> Sysroot { + pub fn load( + sysroot_dir: AbsPathBuf, + sysroot_src_dir: Option>, + metadata: bool, + ) -> Sysroot { + let sysroot_src_dir = match sysroot_src_dir { + Some(Ok(sysroot_src_dir)) => sysroot_src_dir, + Some(Err(e)) => { + return Sysroot { + root: sysroot_dir, + src_root: Some(Err(Arc::new(e))), + mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }), + } + } + None => { + return Sysroot { + root: sysroot_dir, + src_root: None, + mode: SysrootMode::Stitched(Stitched { crates: Arena::default() }), + } + } + }; if metadata { let sysroot: Option<_> = (|| { let sysroot_cargo_toml = ManifestPath::try_from( @@ -187,10 +240,19 @@ impl Sysroot { .ok()?; let current_dir = AbsPathBuf::try_from(&*format!("{sysroot_src_dir}/sysroot")).ok()?; + + let mut cargo_config = CargoConfig::default(); + // the sysroot uses `public-dependency`, so we make cargo think it's a nightly + cargo_config.extra_env.insert( + "__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS".to_owned(), + "nightly".to_owned(), + ); + let res = CargoWorkspace::fetch_metadata( &sysroot_cargo_toml, ¤t_dir, - &CargoConfig::default(), + &cargo_config, + None, &|_| (), ) .map_err(|e| { @@ -274,7 +336,7 @@ impl Sysroot { let cargo_workspace = CargoWorkspace::new(res); Some(Sysroot { root: sysroot_dir.clone(), - src_root: sysroot_src_dir.clone(), + src_root: Some(Ok(sysroot_src_dir.clone())), mode: SysrootMode::Workspace(cargo_workspace), }) })(); @@ -326,7 +388,7 @@ impl Sysroot { } Sysroot { root: sysroot_dir, - src_root: sysroot_src_dir, + src_root: Some(Ok(sysroot_src_dir)), mode: SysrootMode::Stitched(stitched), } } diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index cb995857ec7dc..af635dda5782d 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -3,38 +3,58 @@ use std::process::Command; use rustc_hash::FxHashMap; -use crate::{utf8_stdout, ManifestPath}; +use crate::{utf8_stdout, ManifestPath, Sysroot}; + +/// Determines how `rustc --print target-spec-json` is discovered and invoked. +pub enum RustcDataLayoutConfig<'a> { + /// Use `rustc --print target-spec-json`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::rustc`]. + Rustc(Option<&'a Sysroot>), + /// Use `cargo --print target-spec-json`, either from with the binary from the sysroot or by discovering via + /// [`toolchain::cargo`]. + Cargo(Option<&'a Sysroot>, &'a ManifestPath), +} pub fn get( - cargo_toml: Option<&ManifestPath>, + config: RustcDataLayoutConfig<'_>, target: Option<&str>, extra_env: &FxHashMap, ) -> anyhow::Result { - let output = (|| { - if let Some(cargo_toml) = cargo_toml { - let mut cmd = Command::new(toolchain::rustc()); + let process = |output: String| { + (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() + .ok_or_else(|| { + anyhow::format_err!("could not fetch target-spec-json from command output") + }) + }; + let sysroot = match config { + RustcDataLayoutConfig::Cargo(sysroot, cargo_toml) => { + let mut cmd = Command::new(toolchain::Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) - .args(["-Z", "unstable-options", "--print", "target-spec-json"]) + .args(["rustc", "--", "-Z", "unstable-options", "--print", "target-spec-json"]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { cmd.args(["--target", target]); } match utf8_stdout(cmd) { - Ok(it) => return Ok(it), - Err(e) => tracing::debug!("{e:?}: falling back to querying rustc for cfgs"), + Ok(output) => return process(output), + Err(e) => { + tracing::warn!("failed to run `cargo rustc --print target-spec-json`, falling back to invoking rustc directly: {e}"); + sysroot + } } } - // using unstable cargo features failed, fall back to using plain rustc - let mut cmd = Command::new(toolchain::rustc()); - cmd.envs(extra_env) - .args(["-Z", "unstable-options", "--print", "target-spec-json"]) - .env("RUSTC_BOOTSTRAP", "1"); - if let Some(target) = target { - cmd.args(["--target", target]); - } - utf8_stdout(cmd) - })()?; - (|| Some(output.split_once(r#""data-layout": ""#)?.1.split_once('"')?.0.to_owned()))() - .ok_or_else(|| anyhow::format_err!("could not fetch target-spec-json from command output")) + RustcDataLayoutConfig::Rustc(sysroot) => sysroot, + }; + + let mut cmd = Command::new(toolchain::Tool::Rustc.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); + cmd.envs(extra_env) + .args(["-Z", "unstable-options", "--print", "target-spec-json"]) + .env("RUSTC_BOOTSTRAP", "1"); + if let Some(target) = target { + cmd.args(["--target", target]); + } + process(utf8_stdout(cmd)?) } diff --git a/crates/project-model/src/tests.rs b/crates/project-model/src/tests.rs index 74042e925ede4..b9b1b701f6d40 100644 --- a/crates/project-model/src/tests.rs +++ b/crates/project-model/src/tests.rs @@ -9,6 +9,7 @@ use expect_test::{expect_file, ExpectFile}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::FxHashMap; use serde::de::DeserializeOwned; +use triomphe::Arc; use crate::{ CargoWorkspace, CfgOverrides, ProjectJson, ProjectJsonData, ProjectWorkspace, Sysroot, @@ -34,6 +35,7 @@ fn load_cargo_with_overrides( cfg_overrides, toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; to_crate_graph(project_workspace) } @@ -53,6 +55,7 @@ fn load_cargo_with_fake_sysroot( cfg_overrides: Default::default(), toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; project_workspace.to_crate_graph( &mut { @@ -69,8 +72,13 @@ fn load_rust_project(file: &str) -> (CrateGraph, ProcMacroPaths) { let data = get_test_json_file(file); let project = rooted_project_json(data); let sysroot = Ok(get_fake_sysroot()); - let project_workspace = - ProjectWorkspace::Json { project, sysroot, rustc_cfg: Vec::new(), toolchain: None }; + let project_workspace = ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg: Vec::new(), + toolchain: None, + target_layout: Err(Arc::from("test has no data layout")), + }; to_crate_graph(project_workspace) } @@ -125,7 +133,7 @@ fn get_fake_sysroot() -> Sysroot { // fake sysroot, so we give them both the same path: let sysroot_dir = AbsPathBuf::assert(sysroot_path); let sysroot_src_dir = sysroot_dir.clone(); - Sysroot::load(sysroot_dir, sysroot_src_dir, false) + Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) } fn rooted_project_json(data: ProjectJsonData) -> ProjectJson { @@ -230,7 +238,7 @@ fn crate_graph_dedup_identical() { let (d_crate_graph, mut d_proc_macros) = (crate_graph.clone(), proc_macros.clone()); - crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |_| ()); + crate_graph.extend(d_crate_graph.clone(), &mut d_proc_macros, |(_, a), (_, b)| a == b); assert!(crate_graph.iter().eq(d_crate_graph.iter())); assert_eq!(proc_macros, d_proc_macros); } @@ -246,62 +254,10 @@ fn crate_graph_dedup() { load_cargo_with_fake_sysroot(path_map, "regex-metadata.json"); assert_eq!(regex_crate_graph.iter().count(), 60); - crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |_| ()); + crate_graph.extend(regex_crate_graph, &mut regex_proc_macros, |(_, a), (_, b)| a == b); assert_eq!(crate_graph.iter().count(), 118); } -#[test] -fn test_deduplicate_origin_dev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ()); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - -#[test] -fn test_deduplicate_origin_dev_rev() { - let path_map = &mut Default::default(); - let (mut crate_graph, _proc_macros) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_B.json"); - crate_graph.sort_deps(); - let (crate_graph_1, mut _proc_macros_2) = - load_cargo_with_fake_sysroot(path_map, "deduplication_crate_graph_A.json"); - - crate_graph.extend(crate_graph_1, &mut _proc_macros_2, |_| ()); - - let mut crates_named_p2 = vec![]; - for id in crate_graph.iter() { - let krate = &crate_graph[id]; - if let Some(name) = krate.display_name.as_ref() { - if name.to_string() == "p2" { - crates_named_p2.push(krate); - } - } - } - - assert!(crates_named_p2.len() == 1); - let p2 = crates_named_p2[0]; - assert!(p2.origin.is_local()); -} - #[test] fn smoke_test_real_sysroot_cargo() { if std::env::var("SYSROOT_CARGO_METADATA").is_err() { @@ -327,6 +283,7 @@ fn smoke_test_real_sysroot_cargo() { cfg_overrides: Default::default(), toolchain: None, target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), }; project_workspace.to_crate_graph( &mut { diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index cda5ad2f1109f..b7ae76be8cec0 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -6,14 +6,15 @@ use std::{collections::VecDeque, fmt, fs, iter, process::Command, str::FromStr, use anyhow::{format_err, Context}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, + FileId, LangCrateOrigin, ProcMacroPaths, TargetLayoutLoadResult, }; use cfg::{CfgAtom, CfgDiff, CfgOptions}; use paths::{AbsPath, AbsPathBuf}; use rustc_hash::{FxHashMap, FxHashSet}; use semver::Version; use stdx::always; +use toolchain::Tool; use triomphe::Arc; use crate::{ @@ -23,8 +24,9 @@ use crate::{ project_json::Crate, rustc_cfg::{self, RustcCfgConfig}, sysroot::{SysrootCrate, SysrootMode}, - target_data_layout, utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, - Package, ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, + target_data_layout::{self, RustcDataLayoutConfig}, + utf8_stdout, CargoConfig, CargoWorkspace, InvocationStrategy, ManifestPath, Package, + ProjectJson, ProjectManifest, Sysroot, TargetData, TargetKind, WorkspaceBuildScripts, }; /// A set of cfg-overrides per crate. @@ -69,7 +71,8 @@ pub enum ProjectWorkspace { rustc_cfg: Vec, cfg_overrides: CfgOverrides, toolchain: Option, - target_layout: Result, + target_layout: TargetLayoutLoadResult, + cargo_config_extra_env: FxHashMap, }, /// Project workspace was manually specified using a `rust-project.json` file. Json { @@ -79,6 +82,7 @@ pub enum ProjectWorkspace { /// `rustc --print cfg`. rustc_cfg: Vec, toolchain: Option, + target_layout: TargetLayoutLoadResult, }, // FIXME: The primary limitation of this approach is that the set of detached files needs to be fixed at the beginning. // That's not the end user experience we should strive for. @@ -111,7 +115,8 @@ impl fmt::Debug for ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout, + target_layout, + cargo_config_extra_env, } => f .debug_struct("Cargo") .field("root", &cargo.workspace_root().file_name()) @@ -124,16 +129,25 @@ impl fmt::Debug for ProjectWorkspace { .field("n_rustc_cfg", &rustc_cfg.len()) .field("n_cfg_overrides", &cfg_overrides.len()) .field("toolchain", &toolchain) - .field("data_layout", &data_layout) + .field("data_layout", &target_layout) + .field("cargo_config_extra_env", &cargo_config_extra_env) .finish(), - ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain } => { + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg, + toolchain, + target_layout: data_layout, + } => { let mut debug_struct = f.debug_struct("Json"); debug_struct.field("n_crates", &project.n_crates()); if let Ok(sysroot) = sysroot { debug_struct.field("n_sysroot_crates", &sysroot.num_packages()); } - debug_struct.field("toolchain", &toolchain); - debug_struct.field("n_rustc_cfg", &rustc_cfg.len()); + debug_struct + .field("toolchain", &toolchain) + .field("n_rustc_cfg", &rustc_cfg.len()) + .field("data_layout", &data_layout); debug_struct.finish() } ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f @@ -146,6 +160,28 @@ impl fmt::Debug for ProjectWorkspace { } } +fn get_toolchain_version( + current_dir: &AbsPath, + sysroot: Option<&Sysroot>, + tool: Tool, + extra_env: &FxHashMap, + prefix: &str, +) -> Result, anyhow::Error> { + let cargo_version = utf8_stdout({ + let mut cmd = Command::new(tool.path()); + Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); + cmd.envs(extra_env); + cmd.arg("--version").current_dir(current_dir); + cmd + }) + .with_context(|| format!("Failed to query rust toolchain version at {current_dir}, is your toolchain setup correctly?"))?; + anyhow::Ok( + cargo_version + .get(prefix.len()..) + .and_then(|it| Version::parse(it.split_whitespace().next()?).ok()), + ) +} + impl ProjectWorkspace { pub fn load( manifest: ProjectManifest, @@ -161,20 +197,6 @@ impl ProjectWorkspace { config: &CargoConfig, progress: &dyn Fn(String), ) -> anyhow::Result { - let version = |current_dir, cmd_path, prefix: &str| { - let cargo_version = utf8_stdout({ - let mut cmd = Command::new(cmd_path); - cmd.envs(&config.extra_env); - cmd.arg("--version").current_dir(current_dir); - cmd - }) - .with_context(|| format!("Failed to query rust toolchain version at {current_dir}, is your toolchain setup correctly?"))?; - anyhow::Ok( - cargo_version - .get(prefix.len()..) - .and_then(|it| Version::parse(it.split_whitespace().next()?).ok()), - ) - }; let res = match manifest { ProjectManifest::ProjectJson(project_json) => { let file = fs::read_to_string(project_json) @@ -182,30 +204,14 @@ impl ProjectWorkspace { let data = serde_json::from_str(&file) .with_context(|| format!("Failed to deserialize json file {project_json}"))?; let project_location = project_json.parent().to_path_buf(); - let toolchain = version(&*project_location, toolchain::rustc(), "rustc ")?; - let project_json = ProjectJson::new(&project_location, data); + let project_json: ProjectJson = ProjectJson::new(&project_location, data); ProjectWorkspace::load_inline( project_json, config.target.as_deref(), &config.extra_env, - toolchain, ) } ProjectManifest::CargoToml(cargo_toml) => { - let toolchain = version(cargo_toml.parent(), toolchain::cargo(), "cargo ")?; - let meta = CargoWorkspace::fetch_metadata( - cargo_toml, - cargo_toml.parent(), - config, - progress, - ) - .with_context(|| { - format!( - "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", - ) - })?; - let cargo = CargoWorkspace::new(meta); - let sysroot = match (&config.sysroot, &config.sysroot_src) { (Some(RustLibSource::Path(path)), None) => { Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata).map_err(|e| { @@ -218,7 +224,7 @@ impl ProjectWorkspace { }) } (Some(RustLibSource::Path(sysroot)), Some(sysroot_src)) => { - Ok(Sysroot::load(sysroot.clone(), sysroot_src.clone(), config.sysroot_query_metadata)) + Ok(Sysroot::load(sysroot.clone(), Some(Ok(sysroot_src.clone())), config.sysroot_query_metadata)) } (Some(RustLibSource::Discover), Some(sysroot_src)) => { Sysroot::discover_with_src_override( @@ -231,18 +237,19 @@ impl ProjectWorkspace { } (None, _) => Err(None), }; + let sysroot_ref = sysroot.as_ref().ok(); if let Ok(sysroot) = &sysroot { - tracing::info!(workspace = %cargo_toml, src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); + tracing::info!(workspace = %cargo_toml, src_root = ?sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); } let rustc_dir = match &config.rustc_source { Some(RustLibSource::Path(path)) => ManifestPath::try_from(path.clone()) .map_err(|p| Some(format!("rustc source path is not absolute: {p}"))), Some(RustLibSource::Discover) => { - sysroot.as_ref().ok().and_then(Sysroot::discover_rustc_src).ok_or_else( - || Some("Failed to discover rustc source for sysroot.".to_owned()), - ) + sysroot_ref.and_then(Sysroot::discover_rustc_src).ok_or_else(|| { + Some("Failed to discover rustc source for sysroot.".to_owned()) + }) } None => Err(None), }; @@ -256,6 +263,7 @@ impl ProjectWorkspace { features: crate::CargoFeatures::default(), ..config.clone() }, + sysroot_ref, progress, ) { Ok(meta) => { @@ -264,6 +272,7 @@ impl ProjectWorkspace { &workspace, cargo_toml.parent(), &config.extra_env, + sysroot_ref ); Ok(Box::new((workspace, buildscripts))) } @@ -279,21 +288,45 @@ impl ProjectWorkspace { } }); + let toolchain = get_toolchain_version( + cargo_toml.parent(), + sysroot_ref, + toolchain::Tool::Cargo, + &config.extra_env, + "cargo ", + )?; let rustc_cfg = rustc_cfg::get( config.target.as_deref(), &config.extra_env, - RustcCfgConfig::Cargo(cargo_toml), + RustcCfgConfig::Cargo(sysroot_ref, cargo_toml), ); let cfg_overrides = config.cfg_overrides.clone(); let data_layout = target_data_layout::get( - Some(cargo_toml), + RustcDataLayoutConfig::Cargo(sysroot_ref, cargo_toml), config.target.as_deref(), &config.extra_env, ); if let Err(e) = &data_layout { tracing::error!(%e, "failed fetching data layout for {cargo_toml:?} workspace"); } + + let meta = CargoWorkspace::fetch_metadata( + cargo_toml, + cargo_toml.parent(), + config, + sysroot_ref, + progress, + ) + .with_context(|| { + format!( + "Failed to read Cargo metadata from Cargo.toml file {cargo_toml}, {toolchain:?}", + ) + })?; + let cargo = CargoWorkspace::new(meta); + + let cargo_config_extra_env = + cargo_config_env(cargo_toml, &config.extra_env, sysroot_ref); ProjectWorkspace::Cargo { cargo, build_scripts: WorkspaceBuildScripts::default(), @@ -302,7 +335,10 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, - target_layout: data_layout.map_err(|it| it.to_string()), + target_layout: data_layout + .map(Arc::from) + .map_err(|it| Arc::from(it.to_string())), + cargo_config_extra_env, } } }; @@ -314,15 +350,16 @@ impl ProjectWorkspace { project_json: ProjectJson, target: Option<&str>, extra_env: &FxHashMap, - toolchain: Option, ) -> ProjectWorkspace { let sysroot = match (project_json.sysroot.clone(), project_json.sysroot_src.clone()) { - (Some(sysroot), Some(sysroot_src)) => Ok(Sysroot::load(sysroot, sysroot_src, false)), + (Some(sysroot), Some(sysroot_src)) => { + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) + } (Some(sysroot), None) => { // assume sysroot is structured like rustup's and guess `sysroot_src` let sysroot_src = sysroot.join("lib").join("rustlib").join("src").join("rust").join("library"); - Ok(Sysroot::load(sysroot, sysroot_src, false)) + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) } (None, Some(sysroot_src)) => { // assume sysroot is structured like rustup's and guess `sysroot` @@ -330,23 +367,36 @@ impl ProjectWorkspace { for _ in 0..5 { sysroot.pop(); } - Ok(Sysroot::load(sysroot, sysroot_src, false)) + Ok(Sysroot::load(sysroot, Some(Ok(sysroot_src)), false)) } (None, None) => Err(None), }; - let config = match &sysroot { - Ok(sysroot) => { - tracing::debug!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); - RustcCfgConfig::Explicit(sysroot) - } - Err(_) => { - tracing::debug!("discovering sysroot"); - RustcCfgConfig::Discover + let sysroot_ref = sysroot.as_ref().ok(); + let cfg_config = RustcCfgConfig::Rustc(sysroot_ref); + let data_layout_config = RustcDataLayoutConfig::Rustc(sysroot_ref); + let toolchain = match get_toolchain_version( + project_json.path(), + sysroot_ref, + toolchain::Tool::Rustc, + extra_env, + "rustc ", + ) { + Ok(it) => it, + Err(e) => { + tracing::error!("{e}"); + None } }; - let rustc_cfg = rustc_cfg::get(target, extra_env, config); - ProjectWorkspace::Json { project: project_json, sysroot, rustc_cfg, toolchain } + let rustc_cfg = rustc_cfg::get(target, extra_env, cfg_config); + let data_layout = target_data_layout::get(data_layout_config, target, extra_env); + ProjectWorkspace::Json { + project: project_json, + sysroot, + rustc_cfg, + toolchain, + target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + } } pub fn load_detached_files( @@ -373,18 +423,11 @@ impl ProjectWorkspace { } None => Err(None), }; - let rustc_config = match &sysroot { - Ok(sysroot) => { - tracing::info!(src_root = %sysroot.src_root(), root = %sysroot.root(), "Using sysroot"); - RustcCfgConfig::Explicit(sysroot) - } - Err(_) => { - tracing::info!("discovering sysroot"); - RustcCfgConfig::Discover - } - }; - - let rustc_cfg = rustc_cfg::get(None, &FxHashMap::default(), rustc_config); + let rustc_cfg = rustc_cfg::get( + None, + &FxHashMap::default(), + RustcCfgConfig::Rustc(sysroot.as_ref().ok()), + ); Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) } @@ -395,11 +438,17 @@ impl ProjectWorkspace { progress: &dyn Fn(String), ) -> anyhow::Result { match self { - ProjectWorkspace::Cargo { cargo, toolchain, .. } => { - WorkspaceBuildScripts::run_for_workspace(config, cargo, progress, toolchain) - .with_context(|| { - format!("Failed to run build scripts for {}", cargo.workspace_root()) - }) + ProjectWorkspace::Cargo { cargo, toolchain, sysroot, .. } => { + WorkspaceBuildScripts::run_for_workspace( + config, + cargo, + progress, + toolchain, + sysroot.as_ref().ok(), + ) + .with_context(|| { + format!("Failed to run build scripts for {}", cargo.workspace_root()) + }) } ProjectWorkspace::Json { .. } | ProjectWorkspace::DetachedFiles { .. } => { Ok(WorkspaceBuildScripts::default()) @@ -472,18 +521,7 @@ impl ProjectWorkspace { ProjectWorkspace::Cargo { sysroot: Ok(sysroot), .. } | ProjectWorkspace::Json { sysroot: Ok(sysroot), .. } | ProjectWorkspace::DetachedFiles { sysroot: Ok(sysroot), .. } => { - let standalone_server_name = - format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); - ["libexec", "lib"] - .into_iter() - .map(|segment| sysroot.root().join(segment).join(&standalone_server_name)) - .find(|server_path| std::fs::metadata(server_path).is_ok()) - .ok_or_else(|| { - anyhow::format_err!( - "cannot find proc-macro server in sysroot `{}`", - sysroot.root() - ) - }) + sysroot.discover_proc_macro_srv() } ProjectWorkspace::DetachedFiles { .. } => { Err(anyhow::format_err!("cannot find proc-macro server, no sysroot was found")) @@ -503,8 +541,7 @@ impl ProjectWorkspace { /// The return type contains the path and whether or not /// the root is a member of the current workspace pub fn to_roots(&self) -> Vec { - let mk_sysroot = |sysroot: Result<_, _>, project_root: Option<&AbsPath>| { - let project_root = project_root.map(ToOwned::to_owned); + let mk_sysroot = |sysroot: Result<_, _>| { sysroot.into_iter().flat_map(move |sysroot: &Sysroot| { let mut r = match sysroot.mode() { SysrootMode::Workspace(ws) => ws @@ -532,18 +569,21 @@ impl ProjectWorkspace { }; r.push(PackageRoot { - // mark the sysroot as mutable if it is located inside of the project - is_local: project_root - .as_ref() - .map_or(false, |project_root| sysroot.src_root().starts_with(project_root)), - include: vec![sysroot.src_root().to_path_buf()], + is_local: false, + include: sysroot.src_root().map(|it| it.to_path_buf()).into_iter().collect(), exclude: Vec::new(), }); r }) }; match self { - ProjectWorkspace::Json { project, sysroot, rustc_cfg: _, toolchain: _ } => project + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg: _, + toolchain: _, + target_layout: _, + } => project .crates() .map(|(_, krate)| PackageRoot { is_local: krate.is_workspace_member, @@ -552,7 +592,7 @@ impl ProjectWorkspace { }) .collect::>() .into_iter() - .chain(mk_sysroot(sysroot.as_ref(), Some(project.path()))) + .chain(mk_sysroot(sysroot.as_ref())) .collect::>(), ProjectWorkspace::Cargo { cargo, @@ -563,6 +603,7 @@ impl ProjectWorkspace { build_scripts, toolchain: _, target_layout: _, + cargo_config_extra_env: _, } => { cargo .packages() @@ -586,7 +627,7 @@ impl ProjectWorkspace { let extra_targets = cargo[pkg] .targets .iter() - .filter(|&&tgt| cargo[tgt].kind == TargetKind::Lib) + .filter(|&&tgt| matches!(cargo[tgt].kind, TargetKind::Lib { .. })) .filter_map(|&tgt| cargo[tgt].root.parent()) .map(|tgt| tgt.normalize().to_path_buf()) .filter(|path| !path.starts_with(&pkg_root)); @@ -602,7 +643,7 @@ impl ProjectWorkspace { } PackageRoot { is_local, include, exclude } }) - .chain(mk_sysroot(sysroot.as_ref(), Some(cargo.workspace_root()))) + .chain(mk_sysroot(sysroot.as_ref())) .chain(rustc.iter().map(|a| a.as_ref()).flat_map(|(rustc, _)| { rustc.packages().map(move |krate| PackageRoot { is_local: false, @@ -619,7 +660,7 @@ impl ProjectWorkspace { include: vec![detached_file.clone()], exclude: Vec::new(), }) - .chain(mk_sysroot(sysroot.as_ref(), None)) + .chain(mk_sysroot(sysroot.as_ref())) .collect(), } } @@ -651,17 +692,19 @@ impl ProjectWorkspace { let _p = tracing::span!(tracing::Level::INFO, "ProjectWorkspace::to_crate_graph").entered(); let (mut crate_graph, proc_macros) = match self { - ProjectWorkspace::Json { project, sysroot, rustc_cfg, toolchain } => { - project_json_to_crate_graph( - rustc_cfg.clone(), - load, - project, - sysroot.as_ref().ok(), - extra_env, - Err("rust-project.json projects have no target layout set".into()), - toolchain.clone(), - ) - } + ProjectWorkspace::Json { + project, + sysroot, + rustc_cfg, + toolchain: _, + target_layout: _, + } => project_json_to_crate_graph( + rustc_cfg.clone(), + load, + project, + sysroot.as_ref().ok(), + extra_env, + ), ProjectWorkspace::Cargo { cargo, sysroot, @@ -669,8 +712,9 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, build_scripts, - toolchain, - target_layout, + toolchain: _, + target_layout: _, + cargo_config_extra_env: _, } => cargo_to_crate_graph( load, rustc.as_ref().map(|a| a.as_ref()).ok(), @@ -679,20 +723,9 @@ impl ProjectWorkspace { rustc_cfg.clone(), cfg_overrides, build_scripts, - match target_layout.as_ref() { - Ok(it) => Ok(Arc::from(it.as_str())), - Err(it) => Err(Arc::from(it.as_str())), - }, - toolchain.as_ref(), ), ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { - detached_files_to_crate_graph( - rustc_cfg.clone(), - load, - files, - sysroot.as_ref().ok(), - Err("detached file projects have no target layout set".into()), - ) + detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok()) } }; if crate_graph.patch_cfg_if() { @@ -713,6 +746,7 @@ impl ProjectWorkspace { rustc_cfg, cfg_overrides, toolchain, + cargo_config_extra_env, build_scripts: _, target_layout: _, }, @@ -723,6 +757,7 @@ impl ProjectWorkspace { rustc_cfg: o_rustc_cfg, cfg_overrides: o_cfg_overrides, toolchain: o_toolchain, + cargo_config_extra_env: o_cargo_config_extra_env, build_scripts: _, target_layout: _, }, @@ -733,14 +768,16 @@ impl ProjectWorkspace { && cfg_overrides == o_cfg_overrides && toolchain == o_toolchain && sysroot == o_sysroot + && cargo_config_extra_env == o_cargo_config_extra_env } ( - Self::Json { project, sysroot, rustc_cfg, toolchain }, + Self::Json { project, sysroot, rustc_cfg, toolchain, target_layout: _ }, Self::Json { project: o_project, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg, toolchain: o_toolchain, + target_layout: _, }, ) => { project == o_project @@ -771,21 +808,12 @@ fn project_json_to_crate_graph( project: &ProjectJson, sysroot: Option<&Sysroot>, extra_env: &FxHashMap, - target_layout: TargetLayoutLoadResult, - toolchain: Option, ) -> (CrateGraph, ProcMacroPaths) { let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let (crate_graph, proc_macros) = &mut res; - let sysroot_deps = sysroot.as_ref().map(|sysroot| { - sysroot_to_crate_graph( - crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - toolchain.as_ref(), - ) - }); + let sysroot_deps = sysroot + .as_ref() + .map(|sysroot| sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load)); let r_a_cfg_flag = CfgFlag::Atom("rust_analyzer".to_owned()); let mut cfg_cache: FxHashMap<&str, Vec> = FxHashMap::default(); @@ -813,12 +841,7 @@ fn project_json_to_crate_graph( let target_cfgs = match target.as_deref() { Some(target) => cfg_cache.entry(target).or_insert_with(|| { - let rustc_cfg = match sysroot { - Some(sysroot) => RustcCfgConfig::Explicit(sysroot), - None => RustcCfgConfig::Discover, - }; - - rustc_cfg::get(Some(target), extra_env, rustc_cfg) + rustc_cfg::get(Some(target), extra_env, RustcCfgConfig::Rustc(sysroot)) }), None => &rustc_cfg, }; @@ -845,8 +868,6 @@ fn project_json_to_crate_graph( } else { CrateOrigin::Local { repo: None, name: None } }, - target_layout.clone(), - toolchain.clone(), ); if *is_proc_macro { if let Some(path) = proc_macro_dylib_path.clone() { @@ -873,7 +894,7 @@ fn project_json_to_crate_graph( for dep in &krate.deps { if let Some(&to) = crates.get(&dep.crate_id) { - add_dep(crate_graph, from, dep.name.clone(), to, dep.kind().to_owned()) + add_dep(crate_graph, from, dep.name.clone(), to) } } } @@ -889,22 +910,13 @@ fn cargo_to_crate_graph( rustc_cfg: Vec, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: TargetLayoutLoadResult, - toolchain: Option<&Version>, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "cargo_to_crate_graph").entered(); let mut res = (CrateGraph::default(), ProcMacroPaths::default()); let crate_graph = &mut res.0; let proc_macros = &mut res.1; let (public_deps, libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph( - crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - toolchain, - ), + Some(sysroot) => sysroot_to_crate_graph(crate_graph, sysroot, rustc_cfg.clone(), load), None => (SysrootPublicDeps::default(), None), }; @@ -926,8 +938,6 @@ fn cargo_to_crate_graph( // Add test cfg for local crates if cargo[pkg].is_local { cfg_options.insert_atom("test".into()); - } - if cargo[pkg].is_member { cfg_options.insert_atom("rust_analyzer".into()); } @@ -949,7 +959,7 @@ fn cargo_to_crate_graph( let mut lib_tgt = None; for &tgt in cargo[pkg].targets.iter() { - if cargo[tgt].kind != TargetKind::Lib && !cargo[pkg].is_member { + if !matches!(cargo[tgt].kind, TargetKind::Lib { .. }) && !cargo[pkg].is_member { // For non-workspace-members, Cargo does not resolve dev-dependencies, so we don't // add any targets except the library target, since those will not work correctly if // they use dev-dependencies. @@ -957,46 +967,46 @@ fn cargo_to_crate_graph( // https://github.com/rust-lang/rust-analyzer/issues/11300 continue; } - let &TargetData { ref name, kind, is_proc_macro, ref root, .. } = &cargo[tgt]; - - if kind == TargetKind::Lib - && sysroot.map_or(false, |sysroot| root.starts_with(sysroot.src_root())) - { - if let Some(&(_, crate_id, _)) = - public_deps.deps.iter().find(|(dep_name, ..)| dep_name.as_smol_str() == name) - { - pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, kind)); - - lib_tgt = Some((crate_id, name.clone())); - pkg_to_lib_crate.insert(pkg, crate_id); - // sysroot is inside the workspace, prevent the sysroot crates from being duplicated here - continue; - } - } + let &TargetData { ref name, kind, ref root, .. } = &cargo[tgt]; let Some(file_id) = load(root) else { continue }; + let build_data = build_scripts.get_output(pkg); + let pkg_data = &cargo[pkg]; let crate_id = add_target_crate_root( crate_graph, proc_macros, - &cargo[pkg], - build_scripts.get_output(pkg), + pkg_data, + build_data, cfg_options.clone(), file_id, name, - is_proc_macro, - target_layout.clone(), - false, - toolchain.cloned(), + kind, + if pkg_data.is_local { + CrateOrigin::Local { + repo: pkg_data.repository.clone(), + name: Some(pkg_data.name.clone()), + } + } else { + CrateOrigin::Library { + repo: pkg_data.repository.clone(), + name: pkg_data.name.clone(), + } + }, ); - if kind == TargetKind::Lib { + if let TargetKind::Lib { .. } = kind { lib_tgt = Some((crate_id, name.clone())); pkg_to_lib_crate.insert(pkg, crate_id); } // Even crates that don't set proc-macro = true are allowed to depend on proc_macro // (just none of the APIs work when called outside of a proc macro). if let Some(proc_macro) = libproc_macro { - add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); + add_proc_macro_dep( + crate_graph, + crate_id, + proc_macro, + matches!(kind, TargetKind::Lib { is_proc_macro: true }), + ); } pkg_crates.entry(pkg).or_insert_with(Vec::new).push((crate_id, kind)); @@ -1016,7 +1026,7 @@ fn cargo_to_crate_graph( // cargo metadata does not do any normalization, // so we do it ourselves currently let name = CrateName::normalize_dashes(&name); - add_dep(crate_graph, from, name, to, DependencyKind::Normal); + add_dep(crate_graph, from, name, to); } } } @@ -1036,17 +1046,7 @@ fn cargo_to_crate_graph( continue; } - add_dep( - crate_graph, - from, - name.clone(), - to, - match dep.kind { - DepKind::Normal => DependencyKind::Normal, - DepKind::Dev => DependencyKind::Dev, - DepKind::Build => DependencyKind::Build, - }, - ) + add_dep(crate_graph, from, name.clone(), to) } } } @@ -1074,8 +1074,6 @@ fn cargo_to_crate_graph( } else { rustc_build_scripts }, - target_layout, - toolchain, ); } } @@ -1087,19 +1085,11 @@ fn detached_files_to_crate_graph( load: &mut dyn FnMut(&AbsPath) -> Option, detached_files: &[AbsPathBuf], sysroot: Option<&Sysroot>, - target_layout: TargetLayoutLoadResult, ) -> (CrateGraph, ProcMacroPaths) { let _p = tracing::span!(tracing::Level::INFO, "detached_files_to_crate_graph").entered(); let mut crate_graph = CrateGraph::default(); let (public_deps, _libproc_macro) = match sysroot { - Some(sysroot) => sysroot_to_crate_graph( - &mut crate_graph, - sysroot, - rustc_cfg.clone(), - target_layout.clone(), - load, - None, - ), + Some(sysroot) => sysroot_to_crate_graph(&mut crate_graph, sysroot, rustc_cfg.clone(), load), None => (SysrootPublicDeps::default(), None), }; @@ -1131,8 +1121,6 @@ fn detached_files_to_crate_graph( repo: None, name: display_name.map(|n| n.canonical_name().to_owned()), }, - target_layout.clone(), - None, ); public_deps.add_to_crate_graph(&mut crate_graph, detached_file_crate); @@ -1153,8 +1141,6 @@ fn handle_rustc_crates( cfg_options: &CfgOptions, override_cfg: &CfgOverrides, build_scripts: &WorkspaceBuildScripts, - target_layout: TargetLayoutLoadResult, - toolchain: Option<&Version>, ) { let mut rustc_pkg_crates = FxHashMap::default(); // The root package of the rustc-dev component is rustc_driver, so we match that @@ -1194,9 +1180,9 @@ fn handle_rustc_crates( }; for &tgt in rustc_workspace[pkg].targets.iter() { - if rustc_workspace[tgt].kind != TargetKind::Lib { + let kind @ TargetKind::Lib { is_proc_macro } = rustc_workspace[tgt].kind else { continue; - } + }; if let Some(file_id) = load(&rustc_workspace[tgt].root) { let crate_id = add_target_crate_root( crate_graph, @@ -1206,21 +1192,14 @@ fn handle_rustc_crates( cfg_options.clone(), file_id, &rustc_workspace[tgt].name, - rustc_workspace[tgt].is_proc_macro, - target_layout.clone(), - true, - toolchain.cloned(), + kind, + CrateOrigin::Rustc { name: rustc_workspace[pkg].name.clone() }, ); pkg_to_lib_crate.insert(pkg, crate_id); // Add dependencies on core / std / alloc for this crate public_deps.add_to_crate_graph(crate_graph, crate_id); if let Some(proc_macro) = libproc_macro { - add_proc_macro_dep( - crate_graph, - crate_id, - proc_macro, - rustc_workspace[tgt].is_proc_macro, - ); + add_proc_macro_dep(crate_graph, crate_id, proc_macro, is_proc_macro); } rustc_pkg_crates.entry(pkg).or_insert_with(Vec::new).push(crate_id); } @@ -1234,17 +1213,7 @@ fn handle_rustc_crates( let name = CrateName::new(&dep.name).unwrap(); if let Some(&to) = pkg_to_lib_crate.get(&dep.pkg) { for &from in rustc_pkg_crates.get(&pkg).into_iter().flatten() { - add_dep( - crate_graph, - from, - name.clone(), - to, - match dep.kind { - DepKind::Normal => DependencyKind::Normal, - DepKind::Dev => DependencyKind::Dev, - DepKind::Build => DependencyKind::Build, - }, - ); + add_dep(crate_graph, from, name.clone(), to); } } } @@ -1266,7 +1235,7 @@ fn handle_rustc_crates( // `rust_analyzer` thinks that it should use the one from the `rustc_source` // instead of the one from `crates.io` if !crate_graph[*from].dependencies.iter().any(|d| d.name == name) { - add_dep(crate_graph, *from, name.clone(), to, DependencyKind::Normal); + add_dep(crate_graph, *from, name.clone(), to); } } } @@ -1282,10 +1251,8 @@ fn add_target_crate_root( cfg_options: CfgOptions, file_id: FileId, cargo_name: &str, - is_proc_macro: bool, - target_layout: TargetLayoutLoadResult, - rustc_crate: bool, - toolchain: Option, + kind: TargetKind, + origin: CrateOrigin, ) -> CrateId { let edition = pkg.edition; let potential_cfg_options = if pkg.features.is_empty() { @@ -1332,18 +1299,10 @@ fn add_target_crate_root( cfg_options, potential_cfg_options, env, - is_proc_macro, - if rustc_crate { - CrateOrigin::Rustc { name: pkg.name.clone() } - } else if pkg.is_member { - CrateOrigin::Local { repo: pkg.repository.clone(), name: Some(pkg.name.clone()) } - } else { - CrateOrigin::Library { repo: pkg.repository.clone(), name: pkg.name.clone() } - }, - target_layout, - toolchain, + matches!(kind, TargetKind::Lib { is_proc_macro: true }), + origin, ); - if is_proc_macro { + if let TargetKind::Lib { is_proc_macro: true } = kind { let proc_macro = match build_data.as_ref().map(|it| it.proc_macro_dylib_path.as_ref()) { Some(it) => it.cloned().map(|path| Ok((Some(cargo_name.to_owned()), path))), None => Some(Err("crate has not yet been built".to_owned())), @@ -1365,14 +1324,7 @@ impl SysrootPublicDeps { /// Makes `from` depend on the public sysroot crates. fn add_to_crate_graph(&self, crate_graph: &mut CrateGraph, from: CrateId) { for (name, krate, prelude) in &self.deps { - add_dep_with_prelude( - crate_graph, - from, - name.clone(), - *krate, - *prelude, - DependencyKind::Normal, - ); + add_dep_with_prelude(crate_graph, from, name.clone(), *krate, *prelude); } } } @@ -1381,9 +1333,7 @@ fn sysroot_to_crate_graph( crate_graph: &mut CrateGraph, sysroot: &Sysroot, rustc_cfg: Vec, - target_layout: TargetLayoutLoadResult, load: &mut dyn FnMut(&AbsPath) -> Option, - toolchain: Option<&Version>, ) -> (SysrootPublicDeps, Option) { let _p = tracing::span!(tracing::Level::INFO, "sysroot_to_crate_graph").entered(); match sysroot.mode() { @@ -1396,8 +1346,6 @@ fn sysroot_to_crate_graph( rustc_cfg, &CfgOverrides::default(), &WorkspaceBuildScripts::default(), - target_layout, - toolchain, ); let mut pub_deps = vec![]; @@ -1440,17 +1388,16 @@ fn sysroot_to_crate_graph( // Remove all crates except the ones we are interested in to keep the sysroot graph small. let removed_mapping = cg.remove_crates_except(&marker_set); + let mapping = crate_graph.extend(cg, &mut pm, |(_, a), (_, b)| a == b); - crate_graph.extend(cg, &mut pm, |mapping| { - // Map the id through the removal mapping first, then through the crate graph extension mapping. - pub_deps.iter_mut().for_each(|(_, cid, _)| { - *cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()] - }); - if let Some(libproc_macro) = &mut libproc_macro { - *libproc_macro = mapping - [&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()]; - } + // Map the id through the removal mapping first, then through the crate graph extension mapping. + pub_deps.iter_mut().for_each(|(_, cid, _)| { + *cid = mapping[&removed_mapping[cid.into_raw().into_u32() as usize].unwrap()] }); + if let Some(libproc_macro) = &mut libproc_macro { + *libproc_macro = mapping + [&removed_mapping[libproc_macro.into_raw().into_u32() as usize].unwrap()]; + } (SysrootPublicDeps { deps: pub_deps }, libproc_macro) } @@ -1474,8 +1421,6 @@ fn sysroot_to_crate_graph( env, false, CrateOrigin::Lang(LangCrateOrigin::from(&*stitched[krate].name)), - target_layout.clone(), - toolchain.cloned(), ); Some((krate, crate_id)) }) @@ -1487,7 +1432,7 @@ fn sysroot_to_crate_graph( if let (Some(&from), Some(&to)) = (sysroot_crates.get(&from), sysroot_crates.get(&to)) { - add_dep(crate_graph, from, name, to, DependencyKind::Normal); + add_dep(crate_graph, from, name, to); } } } @@ -1508,14 +1453,8 @@ fn sysroot_to_crate_graph( } } -fn add_dep( - graph: &mut CrateGraph, - from: CrateId, - name: CrateName, - to: CrateId, - kind: DependencyKind, -) { - add_dep_inner(graph, from, Dependency::new(name, to, kind)) +fn add_dep(graph: &mut CrateGraph, from: CrateId, name: CrateName, to: CrateId) { + add_dep_inner(graph, from, Dependency::new(name, to)) } fn add_dep_with_prelude( @@ -1524,20 +1463,12 @@ fn add_dep_with_prelude( name: CrateName, to: CrateId, prelude: bool, - kind: DependencyKind, ) { - add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude, kind)) + add_dep_inner(graph, from, Dependency::with_prelude(name, to, prelude)) } fn add_proc_macro_dep(crate_graph: &mut CrateGraph, from: CrateId, to: CrateId, prelude: bool) { - add_dep_with_prelude( - crate_graph, - from, - CrateName::new("proc_macro").unwrap(), - to, - prelude, - DependencyKind::Normal, - ); + add_dep_with_prelude(crate_graph, from, CrateName::new("proc_macro").unwrap(), to, prelude); } fn add_dep_inner(graph: &mut CrateGraph, from: CrateId, dep: Dependency) { @@ -1588,3 +1519,29 @@ fn create_cfg_options(rustc_cfg: Vec) -> CfgOptions { cfg_options.insert_atom("debug_assertions".into()); cfg_options } + +fn cargo_config_env( + cargo_toml: &ManifestPath, + extra_env: &FxHashMap, + sysroot: Option<&Sysroot>, +) -> FxHashMap { + let mut cargo_config = Command::new(Tool::Cargo.path()); + Sysroot::set_rustup_toolchain_env(&mut cargo_config, sysroot); + cargo_config.envs(extra_env); + cargo_config + .current_dir(cargo_toml.parent()) + .args(["-Z", "unstable-options", "config", "get", "env"]) + .env("RUSTC_BOOTSTRAP", "1"); + // if successful we receive `env.key.value = "value" per entry + tracing::debug!("Discovering cargo config env by {:?}", cargo_config); + utf8_stdout(cargo_config).map(parse_output_cargo_config_env).unwrap_or_default() +} + +fn parse_output_cargo_config_env(stdout: String) -> FxHashMap { + stdout + .lines() + .filter_map(|l| l.strip_prefix("env.")) + .filter_map(|l| l.split_once(".value = ")) + .map(|(key, value)| (key.to_owned(), value.trim_matches('"').to_owned())) + .collect() +} diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt index d8d9e559e5c1d..0ad19ca9f759d 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model.txt @@ -48,7 +48,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -59,10 +58,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -113,7 +108,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -121,7 +115,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -132,10 +125,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -186,7 +175,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -194,7 +182,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -205,10 +192,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -259,7 +242,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -267,7 +249,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -278,10 +259,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -347,9 +324,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt index d8d9e559e5c1d..0ad19ca9f759d 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_selective_overrides.txt @@ -48,7 +48,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -59,10 +58,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -113,7 +108,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -121,7 +115,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -132,10 +125,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -186,7 +175,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -194,7 +182,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -205,10 +192,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -259,7 +242,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -267,7 +249,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -278,10 +259,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -347,9 +324,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt index e0ba5ed498fa8..e2334dca87579 100644 --- a/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt +++ b/crates/project-model/test_data/output/cargo_hello_world_project_model_with_wildcard_overrides.txt @@ -47,7 +47,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -58,10 +57,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -111,7 +106,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -119,7 +113,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -130,10 +123,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -183,7 +172,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -191,7 +179,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -202,10 +189,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -255,7 +238,6 @@ name: CrateName( "hello_world", ), - kind: Normal, prelude: true, }, Dependency { @@ -263,7 +245,6 @@ name: CrateName( "libc", ), - kind: Normal, prelude: true, }, ], @@ -274,10 +255,6 @@ ), }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -343,9 +320,5 @@ name: "libc", }, is_proc_macro: false, - target_layout: Err( - "target_data_layout not loaded", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt index 0df99534c5bd9..ccaba963deda3 100644 --- a/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt +++ b/crates/project-model/test_data/output/rust_project_hello_world_project_model.txt @@ -28,7 +28,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, ], @@ -36,10 +35,6 @@ Alloc, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 1: CrateData { root_file_id: FileId( @@ -69,10 +64,6 @@ Core, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 2: CrateData { root_file_id: FileId( @@ -102,10 +93,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 3: CrateData { root_file_id: FileId( @@ -135,10 +122,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 4: CrateData { root_file_id: FileId( @@ -169,7 +152,6 @@ name: CrateName( "std", ), - kind: Normal, prelude: true, }, Dependency { @@ -177,7 +159,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, ], @@ -185,10 +166,6 @@ ProcMacro, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 5: CrateData { root_file_id: FileId( @@ -218,10 +195,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 6: CrateData { root_file_id: FileId( @@ -252,7 +225,6 @@ name: CrateName( "alloc", ), - kind: Normal, prelude: true, }, Dependency { @@ -260,7 +232,6 @@ name: CrateName( "panic_unwind", ), - kind: Normal, prelude: true, }, Dependency { @@ -268,7 +239,6 @@ name: CrateName( "panic_abort", ), - kind: Normal, prelude: true, }, Dependency { @@ -276,7 +246,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, Dependency { @@ -284,7 +253,6 @@ name: CrateName( "profiler_builtins", ), - kind: Normal, prelude: true, }, Dependency { @@ -292,7 +260,6 @@ name: CrateName( "unwind", ), - kind: Normal, prelude: true, }, Dependency { @@ -300,7 +267,6 @@ name: CrateName( "std_detect", ), - kind: Normal, prelude: true, }, Dependency { @@ -308,7 +274,6 @@ name: CrateName( "test", ), - kind: Normal, prelude: true, }, ], @@ -316,10 +281,6 @@ Std, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 7: CrateData { root_file_id: FileId( @@ -349,10 +310,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 8: CrateData { root_file_id: FileId( @@ -382,10 +339,6 @@ Test, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 9: CrateData { root_file_id: FileId( @@ -415,10 +368,6 @@ Other, ), is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, 10: CrateData { root_file_id: FileId( @@ -449,7 +398,6 @@ name: CrateName( "core", ), - kind: Normal, prelude: true, }, Dependency { @@ -457,7 +405,6 @@ name: CrateName( "alloc", ), - kind: Normal, prelude: true, }, Dependency { @@ -465,7 +412,6 @@ name: CrateName( "std", ), - kind: Normal, prelude: true, }, Dependency { @@ -473,7 +419,6 @@ name: CrateName( "test", ), - kind: Normal, prelude: false, }, Dependency { @@ -481,7 +426,6 @@ name: CrateName( "proc_macro", ), - kind: Normal, prelude: false, }, ], @@ -492,9 +436,5 @@ ), }, is_proc_macro: false, - target_layout: Err( - "rust-project.json projects have no target layout set", - ), - toolchain: None, }, } \ No newline at end of file diff --git a/crates/rust-analyzer/src/bin/main.rs b/crates/rust-analyzer/src/bin/main.rs index 269dd3cfffe95..07e04a8366173 100644 --- a/crates/rust-analyzer/src/bin/main.rs +++ b/crates/rust-analyzer/src/bin/main.rs @@ -11,7 +11,7 @@ extern crate rustc_driver as _; mod rustc_wrapper; -use std::{env, fs, path::PathBuf, process, sync::Arc}; +use std::{env, fs, path::PathBuf, process::ExitCode, sync::Arc}; use anyhow::Context; use lsp_server::Connection; @@ -27,21 +27,15 @@ static ALLOC: mimalloc::MiMalloc = mimalloc::MiMalloc; #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; -fn main() -> anyhow::Result<()> { +fn main() -> anyhow::Result { if std::env::var("RA_RUSTC_WRAPPER").is_ok() { - let mut args = std::env::args_os(); - let _me = args.next().unwrap(); - let rustc = args.next().unwrap(); - let code = match rustc_wrapper::run_rustc_skipping_cargo_checking(rustc, args.collect()) { - Ok(rustc_wrapper::ExitCode(code)) => code.unwrap_or(102), - Err(err) => { - eprintln!("{err}"); - 101 - } - }; - process::exit(code); + rustc_wrapper::main().map_err(Into::into) + } else { + actual_main() } +} +fn actual_main() -> anyhow::Result { let flags = flags::RustAnalyzer::from_env_or_exit(); #[cfg(debug_assertions)] @@ -58,14 +52,14 @@ fn main() -> anyhow::Result<()> { let verbosity = flags.verbosity(); match flags.subcommand { - flags::RustAnalyzerCmd::LspServer(cmd) => { + flags::RustAnalyzerCmd::LspServer(cmd) => 'lsp_server: { if cmd.print_config_schema { println!("{:#}", Config::json_schema()); - return Ok(()); + break 'lsp_server; } if cmd.version { println!("rust-analyzer {}", rust_analyzer::version()); - return Ok(()); + break 'lsp_server; } // rust-analyzer’s “main thread” is actually @@ -90,7 +84,7 @@ fn main() -> anyhow::Result<()> { flags::RustAnalyzerCmd::RunTests(cmd) => cmd.run()?, flags::RustAnalyzerCmd::RustcTests(cmd) => cmd.run()?, } - Ok(()) + Ok(ExitCode::SUCCESS) } fn setup_logging(log_file_flag: Option) -> anyhow::Result<()> { diff --git a/crates/rust-analyzer/src/bin/rustc_wrapper.rs b/crates/rust-analyzer/src/bin/rustc_wrapper.rs index 38e9c7dd7e11c..684b3f52afc86 100644 --- a/crates/rust-analyzer/src/bin/rustc_wrapper.rs +++ b/crates/rust-analyzer/src/bin/rustc_wrapper.rs @@ -7,13 +7,17 @@ use std::{ ffi::OsString, io, - process::{Command, Stdio}, + process::{Command, ExitCode, Stdio}, }; -/// ExitCode/ExitStatus are impossible to create :(. -pub(crate) struct ExitCode(pub(crate) Option); +pub(crate) fn main() -> io::Result { + let mut args = std::env::args_os(); + let _me = args.next().unwrap(); + let rustc = args.next().unwrap(); + run_rustc_skipping_cargo_checking(rustc, args.collect()) +} -pub(crate) fn run_rustc_skipping_cargo_checking( +fn run_rustc_skipping_cargo_checking( rustc_executable: OsString, args: Vec, ) -> io::Result { @@ -35,9 +39,10 @@ pub(crate) fn run_rustc_skipping_cargo_checking( arg.starts_with("--emit=") && arg.contains("metadata") && !arg.contains("link") }); if not_invoked_by_build_script && is_cargo_check { - return Ok(ExitCode(Some(0))); + Ok(ExitCode::from(0)) + } else { + run_rustc(rustc_executable, args) } - run_rustc(rustc_executable, args) } fn run_rustc(rustc_executable: OsString, args: Vec) -> io::Result { @@ -47,5 +52,5 @@ fn run_rustc(rustc_executable: OsString, args: Vec) -> io::Result { + TargetKind::Lib { is_proc_macro: _ } => { buf.push("--lib".to_owned()); } TargetKind::Other | TargetKind::BuildScript => (), diff --git a/crates/rust-analyzer/src/cli/analysis_stats.rs b/crates/rust-analyzer/src/cli/analysis_stats.rs index 2741b45222569..ce7e3b3cd6a44 100644 --- a/crates/rust-analyzer/src/cli/analysis_stats.rs +++ b/crates/rust-analyzer/src/cli/analysis_stats.rs @@ -32,7 +32,7 @@ use oorandom::Rand32; use profile::{Bytes, StopWatch}; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rayon::prelude::*; -use rustc_hash::FxHashSet; +use rustc_hash::{FxHashMap, FxHashSet}; use syntax::{AstNode, SyntaxNode}; use vfs::{AbsPathBuf, FileId, Vfs, VfsPath}; @@ -91,7 +91,7 @@ impl flags::AnalysisStats { }; let (host, vfs, _proc_macro) = - load_workspace(workspace, &cargo_config.extra_env, &load_cargo_config)?; + load_workspace(workspace.clone(), &cargo_config.extra_env, &load_cargo_config)?; let db = host.raw_database(); eprint!("{:<20} {}", "Database loaded:", db_load_sw.elapsed()); eprint!(" (metadata {metadata_time}"); @@ -232,7 +232,11 @@ impl flags::AnalysisStats { } if self.run_all_ide_things { - self.run_ide_things(host.analysis(), file_ids); + self.run_ide_things(host.analysis(), file_ids.clone()); + } + + if self.run_term_search { + self.run_term_search(&workspace, db, &vfs, file_ids, verbosity); } let total_span = analysis_sw.elapsed(); @@ -321,6 +325,212 @@ impl flags::AnalysisStats { report_metric("const eval time", const_eval_time.time.as_millis() as u64, "ms"); } + fn run_term_search( + &self, + ws: &ProjectWorkspace, + db: &RootDatabase, + vfs: &Vfs, + mut file_ids: Vec, + verbosity: Verbosity, + ) { + let cargo_config = CargoConfig { + sysroot: match self.no_sysroot { + true => None, + false => Some(RustLibSource::Discover), + }, + ..Default::default() + }; + + let mut bar = match verbosity { + Verbosity::Quiet | Verbosity::Spammy => ProgressReport::hidden(), + _ if self.parallel || self.output.is_some() => ProgressReport::hidden(), + _ => ProgressReport::new(file_ids.len() as u64), + }; + + file_ids.sort(); + file_ids.dedup(); + + #[derive(Debug, Default)] + struct Acc { + tail_expr_syntax_hits: u64, + tail_expr_no_term: u64, + total_tail_exprs: u64, + error_codes: FxHashMap, + syntax_errors: u32, + } + + let mut acc: Acc = Default::default(); + bar.tick(); + let mut sw = self.stop_watch(); + + for &file_id in &file_ids { + let sema = hir::Semantics::new(db); + let _ = db.parse(file_id); + + let parse = sema.parse(file_id); + let file_txt = db.file_text(file_id); + let path = vfs.file_path(file_id).as_path().unwrap().to_owned(); + + for node in parse.syntax().descendants() { + let expr = match syntax::ast::Expr::cast(node.clone()) { + Some(it) => it, + None => continue, + }; + let block = match syntax::ast::BlockExpr::cast(expr.syntax().clone()) { + Some(it) => it, + None => continue, + }; + let target_ty = match sema.type_of_expr(&expr) { + Some(it) => it.adjusted(), + None => continue, // Failed to infer type + }; + + let expected_tail = match block.tail_expr() { + Some(it) => it, + None => continue, + }; + + if expected_tail.is_block_like() { + continue; + } + + let range = sema.original_range(expected_tail.syntax()).range; + let original_text: String = db + .file_text(file_id) + .chars() + .skip(usize::from(range.start())) + .take(usize::from(range.end()) - usize::from(range.start())) + .collect(); + + let scope = match sema.scope(expected_tail.syntax()) { + Some(it) => it, + None => continue, + }; + + let ctx = hir::term_search::TermSearchCtx { + sema: &sema, + scope: &scope, + goal: target_ty, + config: hir::term_search::TermSearchConfig { + enable_borrowcheck: true, + ..Default::default() + }, + }; + let found_terms = hir::term_search::term_search(&ctx); + + if found_terms.is_empty() { + acc.tail_expr_no_term += 1; + acc.total_tail_exprs += 1; + // println!("\n{}\n", &original_text); + continue; + }; + + fn trim(s: &str) -> String { + s.chars().filter(|c| !c.is_whitespace()).collect() + } + + let todo = syntax::ast::make::ext::expr_todo().to_string(); + let mut formatter = |_: &hir::Type| todo.clone(); + let mut syntax_hit_found = false; + for term in found_terms { + let generated = + term.gen_source_code(&scope, &mut formatter, false, true).unwrap(); + syntax_hit_found |= trim(&original_text) == trim(&generated); + + // Validate if type-checks + let mut txt = file_txt.to_string(); + + let edit = ide::TextEdit::replace(range, generated.clone()); + edit.apply(&mut txt); + + if self.validate_term_search { + std::fs::write(&path, txt).unwrap(); + + let res = ws.run_build_scripts(&cargo_config, &|_| ()).unwrap(); + if let Some(err) = res.error() { + if err.contains("error: could not compile") { + if let Some(mut err_idx) = err.find("error[E") { + err_idx += 7; + let err_code = &err[err_idx..err_idx + 4]; + match err_code { + "0282" => continue, // Byproduct of testing method + "0277" if generated.contains(&todo) => continue, // See https://github.com/rust-lang/rust/issues/69882 + _ => (), + } + bar.println(err); + bar.println(generated); + acc.error_codes + .entry(err_code.to_owned()) + .and_modify(|n| *n += 1) + .or_insert(1); + } else { + acc.syntax_errors += 1; + bar.println(format!("Syntax error: \n{}", err)); + } + } + } + } + } + + if syntax_hit_found { + acc.tail_expr_syntax_hits += 1; + } + acc.total_tail_exprs += 1; + + let msg = move || { + format!( + "processing: {:<50}", + trim(&original_text).chars().take(50).collect::() + ) + }; + if verbosity.is_spammy() { + bar.println(msg()); + } + bar.set_message(msg); + } + // Revert file back to original state + if self.validate_term_search { + std::fs::write(&path, file_txt.to_string()).unwrap(); + } + + bar.inc(1); + } + let term_search_time = sw.elapsed(); + + bar.println(format!( + "Tail Expr syntactic hits: {}/{} ({}%)", + acc.tail_expr_syntax_hits, + acc.total_tail_exprs, + percentage(acc.tail_expr_syntax_hits, acc.total_tail_exprs) + )); + bar.println(format!( + "Tail Exprs found: {}/{} ({}%)", + acc.total_tail_exprs - acc.tail_expr_no_term, + acc.total_tail_exprs, + percentage(acc.total_tail_exprs - acc.tail_expr_no_term, acc.total_tail_exprs) + )); + if self.validate_term_search { + bar.println(format!( + "Tail Exprs total errors: {}, syntax errors: {}, error codes:", + acc.error_codes.values().sum::() + acc.syntax_errors, + acc.syntax_errors, + )); + for (err, count) in acc.error_codes { + bar.println(format!( + " E{err}: {count:>5} (https://doc.rust-lang.org/error_codes/E{err}.html)" + )); + } + } + bar.println(format!( + "Term search avg time: {}ms", + term_search_time.time.as_millis() as u64 / acc.total_tail_exprs + )); + bar.println(format!("{:<20} {}", "Term search:", term_search_time)); + report_metric("term search time", term_search_time.time.as_millis() as u64, "ms"); + + bar.finish_and_clear(); + } + fn run_mir_lowering(&self, db: &RootDatabase, bodies: &[DefWithBody], verbosity: Verbosity) { let mut sw = self.stop_watch(); let mut all = 0; diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 252b1e1a48581..493e614dce682 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -93,6 +93,11 @@ xflags::xflags! { /// and annotations. This is useful for benchmarking the memory usage on a project that has /// been worked on for a bit in a longer running session. optional --run-all-ide-things + /// Run term search on all the tail expressions (of functions, block, if statements etc.) + optional --run-term-search + /// Validate term search by running `cargo check` on every response. + /// Note that this also temporarily modifies the files on disk, use with caution! + optional --validate-term-search } /// Run unit tests of the project using mir interpreter @@ -218,6 +223,8 @@ pub struct AnalysisStats { pub skip_data_layout: bool, pub skip_const_eval: bool, pub run_all_ide_things: bool, + pub run_term_search: bool, + pub validate_term_search: bool, } #[derive(Debug)] diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index f4aec288348f9..2d56830c87f30 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -135,12 +135,11 @@ impl flags::Scip { } if symbols_emitted.insert(id) { - let documentation = token - .hover - .as_ref() - .map(|hover| hover.markup.as_str()) - .filter(|it| !it.is_empty()) - .map(|it| vec![it.to_owned()]); + let documentation = match &token.documentation { + Some(doc) => vec![doc.as_str().to_owned()], + None => vec![], + }; + let position_encoding = scip_types::PositionEncoding::UTF8CodeUnitOffsetFromLineStart.into(); let signature_documentation = @@ -153,7 +152,7 @@ impl flags::Scip { }); let symbol_info = scip_types::SymbolInformation { symbol: symbol.clone(), - documentation: documentation.unwrap_or_default(), + documentation, relationships: Vec::new(), special_fields: Default::default(), kind: symbol_kind(token.kind).into(), @@ -599,4 +598,22 @@ pub mod example_mod { "rust-analyzer cargo main . MyTypeAlias#", ); } + + #[test] + fn documentation_matches_doc_comment() { + let s = "/// foo\nfn bar() {}"; + + let mut host = AnalysisHost::default(); + let change_fixture = ChangeFixture::parse(s); + host.raw_database_mut().apply_change(change_fixture.change); + + let analysis = host.analysis(); + let si = StaticIndex::compute(&analysis); + + let file = si.files.first().unwrap(); + let (_, token_id) = file.tokens.first().unwrap(); + let token = si.tokens.get(*token_id).unwrap(); + + assert_eq!(token.documentation.as_ref().map(|d| d.as_str()), Some("foo")); + } } diff --git a/crates/rust-analyzer/src/config.rs b/crates/rust-analyzer/src/config.rs index 7bdd9ec866a5a..16e1a2f544907 100644 --- a/crates/rust-analyzer/src/config.rs +++ b/crates/rust-analyzer/src/config.rs @@ -112,7 +112,7 @@ config_data! { cargo_buildScripts_overrideCommand: Option> = "null", /// Rerun proc-macros building/build-scripts running when proc-macro /// or build-script sources change and are saved. - cargo_buildScripts_rebuildOnSave: bool = "false", + cargo_buildScripts_rebuildOnSave: bool = "true", /// Use `RUSTC_WRAPPER=rust-analyzer` when running build scripts to /// avoid checking unnecessary things. cargo_buildScripts_useRustcWrapper: bool = "true", @@ -209,6 +209,11 @@ config_data! { /// by changing `#rust-analyzer.check.invocationStrategy#` and /// `#rust-analyzer.check.invocationLocation#`. /// + /// If `$saved_file` is part of the command, rust-analyzer will pass + /// the absolute path of the saved file to the provided command. This is + /// intended to be used with non-Cargo build systems. + /// Note that `$saved_file` is experimental and may be removed in the futureg. + /// /// An example command would be: /// /// ```bash @@ -286,6 +291,8 @@ config_data! { "scope": "expr" } }"#, + /// Whether to enable term search based snippets like `Some(foo.bar().baz())`. + completion_termSearch_enable: bool = "false", /// List of rust-analyzer diagnostics to disable. diagnostics_disabled: FxHashSet = "[]", @@ -504,9 +511,6 @@ config_data! { /// Exclude tests from find-all-references. references_excludeTests: bool = "false", - /// Allow renaming of items not belonging to the loaded workspaces. - rename_allowExternalItems: bool = "false", - /// Command to be executed instead of 'cargo' for runnables. runnables_command: Option = "null", @@ -1202,7 +1206,7 @@ impl Config { Some(AbsPathBuf::try_from(path).unwrap_or_else(|path| self.root_path.join(path))) } - pub fn dummy_replacements(&self) -> &FxHashMap, Box<[Box]>> { + pub fn ignored_proc_macros(&self) -> &FxHashMap, Box<[Box]>> { &self.data.procMacro_ignored } @@ -1535,6 +1539,7 @@ impl Config { && completion_item_edit_resolve(&self.caps), enable_self_on_the_fly: self.data.completion_autoself_enable, enable_private_editable: self.data.completion_privateEditable_enable, + enable_term_search: self.data.completion_termSearch_enable, full_function_signatures: self.data.completion_fullFunctionSignatures_enable, callable: match self.data.completion_callable_snippets { CallableCompletionDef::FillArguments => Some(CallableSnippets::FillArguments), @@ -1766,10 +1771,6 @@ impl Config { self.data.typing_autoClosingAngleBrackets_enable } - pub fn rename(&self) -> bool { - self.data.rename_allowExternalItems - } - // FIXME: VSCode seems to work wrong sometimes, see https://github.com/microsoft/vscode/issues/193124 // hence, distinguish it for now. pub fn is_visual_studio_code(&self) -> bool { diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index da4422a60a8a9..293807a383baa 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -9,7 +9,7 @@ use crossbeam_channel::{unbounded, Receiver, Sender}; use flycheck::FlycheckHandle; use hir::Change; use ide::{Analysis, AnalysisHost, Cancellable, FileId}; -use ide_db::base_db::{CrateId, FileLoader, ProcMacroPaths, SourceDatabase}; +use ide_db::base_db::{CrateId, ProcMacroPaths}; use load_cargo::SourceRootConfig; use lsp_types::{SemanticTokens, Url}; use nohash_hasher::IntMap; @@ -74,8 +74,8 @@ pub(crate) struct GlobalState { pub(crate) last_reported_status: Option, // proc macros - pub(crate) proc_macro_changed: bool, pub(crate) proc_macro_clients: Arc<[anyhow::Result]>, + pub(crate) build_deps_changed: bool, // Flycheck pub(crate) flycheck: Arc<[FlycheckHandle]>, @@ -203,9 +203,10 @@ impl GlobalState { source_root_config: SourceRootConfig::default(), config_errors: Default::default(), - proc_macro_changed: false, proc_macro_clients: Arc::from_iter([]), + build_deps_changed: false, + flycheck: Arc::from_iter([]), flycheck_sender, flycheck_receiver, @@ -300,12 +301,19 @@ impl GlobalState { if let Some(path) = vfs_path.as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.kind()) { - workspace_structure_change = Some((path.clone(), false)); + workspace_structure_change = Some(( + path.clone(), + false, + AsRef::::as_ref(&path).ends_with("build.rs"), + )); } if file.is_created_or_deleted() { has_structure_changes = true; - workspace_structure_change = - Some((path, self.crate_graph_file_dependencies.contains(vfs_path))); + workspace_structure_change = Some(( + path, + self.crate_graph_file_dependencies.contains(vfs_path), + false, + )); } else if path.extension() == Some("rs".as_ref()) { modified_rust_files.push(file.file_id); } @@ -346,23 +354,28 @@ impl GlobalState { }; self.analysis_host.apply_change(change); + { - let raw_database = self.analysis_host.raw_database(); + if !matches!(&workspace_structure_change, Some((.., true))) { + _ = self + .deferred_task_queue + .sender + .send(crate::main_loop::QueuedTask::CheckProcMacroSources(modified_rust_files)); + } // FIXME: ideally we should only trigger a workspace fetch for non-library changes // but something's going wrong with the source root business when we add a new local // crate see https://github.com/rust-lang/rust-analyzer/issues/13029 - if let Some((path, force_crate_graph_reload)) = workspace_structure_change { + if let Some((path, force_crate_graph_reload, build_scripts_touched)) = + workspace_structure_change + { self.fetch_workspaces_queue.request_op( format!("workspace vfs file change: {path}"), force_crate_graph_reload, ); + if build_scripts_touched { + self.fetch_build_data_queue.request_op(format!("build.rs changed: {path}"), ()); + } } - self.proc_macro_changed = modified_rust_files.into_iter().any(|file_id| { - let crates = raw_database.relevant_crates(file_id); - let crate_graph = raw_database.crate_graph(); - - crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) - }); } true diff --git a/crates/rust-analyzer/src/handlers/notification.rs b/crates/rust-analyzer/src/handlers/notification.rs index d3c2073f09d25..b13c709dbfe60 100644 --- a/crates/rust-analyzer/src/handlers/notification.rs +++ b/crates/rust-analyzer/src/handlers/notification.rs @@ -145,11 +145,11 @@ pub(crate) fn handle_did_save_text_document( state: &mut GlobalState, params: DidSaveTextDocumentParams, ) -> anyhow::Result<()> { - if state.config.script_rebuild_on_save() && state.proc_macro_changed { - // reset the flag - state.proc_macro_changed = false; - // rebuild the proc macros - state.fetch_build_data_queue.request_op("ScriptRebuildOnSave".to_owned(), ()); + if state.config.script_rebuild_on_save() && state.build_deps_changed { + state.build_deps_changed = false; + state + .fetch_build_data_queue + .request_op("build_deps_changed - save notification".to_owned(), ()); } if let Ok(vfs_path) = from_proto::vfs_path(¶ms.text_document.uri) { @@ -158,7 +158,7 @@ pub(crate) fn handle_did_save_text_document( if reload::should_refresh_for_change(abs_path, ChangeKind::Modify) { state .fetch_workspaces_queue - .request_op(format!("DidSaveTextDocument {abs_path}"), false); + .request_op(format!("workspace vfs file change saved {abs_path}"), false); } } @@ -168,7 +168,7 @@ pub(crate) fn handle_did_save_text_document( } else if state.config.check_on_save() { // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(None); } } Ok(()) @@ -314,6 +314,8 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { Some((idx, package)) }); + let saved_file = vfs_path.as_path().map(|p| p.to_owned()); + // Find and trigger corresponding flychecks for flycheck in world.flycheck.iter() { for (id, package) in workspace_ids.clone() { @@ -321,7 +323,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { updated = true; match package.filter(|_| !world.config.flycheck_workspace()) { Some(package) => flycheck.restart_for_package(package), - None => flycheck.restart_workspace(), + None => flycheck.restart_workspace(saved_file.clone()), } continue; } @@ -330,7 +332,7 @@ fn run_flycheck(state: &mut GlobalState, vfs_path: VfsPath) -> bool { // No specific flycheck was triggered, so let's trigger all of them. if !updated { for flycheck in world.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(saved_file.clone()); } } Ok(()) @@ -372,7 +374,7 @@ pub(crate) fn handle_run_flycheck( } // No specific flycheck was triggered, so let's trigger all of them. for flycheck in state.flycheck.iter() { - flycheck.restart_workspace(); + flycheck.restart_workspace(None); } Ok(()) } diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index 2a3633a48e9fa..eb9d4bf0f02d7 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -52,7 +52,7 @@ use crate::{ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); - state.proc_macro_changed = false; + state.build_deps_changed = false; state.fetch_workspaces_queue.request_op("reload workspace request".to_owned(), false); Ok(()) @@ -60,7 +60,7 @@ pub(crate) fn handle_workspace_reload(state: &mut GlobalState, _: ()) -> anyhow: pub(crate) fn handle_proc_macros_rebuild(state: &mut GlobalState, _: ()) -> anyhow::Result<()> { state.proc_macro_clients = Arc::from_iter([]); - state.proc_macro_changed = false; + state.build_deps_changed = false; state.fetch_build_data_queue.request_op("rebuild proc macros request".to_owned(), ()); Ok(()) @@ -1017,10 +1017,8 @@ pub(crate) fn handle_rename( let _p = tracing::span!(tracing::Level::INFO, "handle_rename").entered(); let position = from_proto::file_position(&snap, params.text_document_position)?; - let mut change = snap - .analysis - .rename(position, ¶ms.new_name, snap.config.rename())? - .map_err(to_proto::rename_error)?; + let mut change = + snap.analysis.rename(position, ¶ms.new_name)?.map_err(to_proto::rename_error)?; // this is kind of a hack to prevent double edits from happening when moving files // When a module gets renamed by renaming the mod declaration this causes the file to move @@ -1937,6 +1935,7 @@ fn run_rustfmt( let mut command = match snap.config.rustfmt() { RustfmtConfig::Rustfmt { extra_args, enable_range_formatting } => { + // FIXME: Set RUSTUP_TOOLCHAIN let mut cmd = process::Command::new(toolchain::rustfmt()); cmd.envs(snap.config.extra_env()); cmd.args(extra_args); diff --git a/crates/rust-analyzer/src/integrated_benchmarks.rs b/crates/rust-analyzer/src/integrated_benchmarks.rs index acc02d6447c63..f0eee77aff592 100644 --- a/crates/rust-analyzer/src/integrated_benchmarks.rs +++ b/crates/rust-analyzer/src/integrated_benchmarks.rs @@ -132,6 +132,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), @@ -175,6 +176,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), @@ -216,6 +218,7 @@ fn integrated_completion_benchmark() { enable_imports_on_the_fly: true, enable_self_on_the_fly: true, enable_private_editable: true, + enable_term_search: true, full_function_signatures: false, callable: Some(CallableSnippets::FillArguments), snippet_cap: SnippetCap::new(true), diff --git a/crates/rust-analyzer/src/lib.rs b/crates/rust-analyzer/src/lib.rs index b1809f58ae700..473ca991ad9b0 100644 --- a/crates/rust-analyzer/src/lib.rs +++ b/crates/rust-analyzer/src/lib.rs @@ -47,7 +47,9 @@ mod integrated_benchmarks; use serde::de::DeserializeOwned; -pub use crate::{caps::server_capabilities, main_loop::main_loop, version::version}; +pub use crate::{ + caps::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph, version::version, +}; pub fn from_json( what: &'static str, diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 64f19f0b32d7e..727007bba083a 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -123,6 +123,7 @@ pub(crate) fn completion_item_kind( CompletionItemKind::Method => lsp_types::CompletionItemKind::METHOD, CompletionItemKind::Snippet => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::UnresolvedReference => lsp_types::CompletionItemKind::REFERENCE, + CompletionItemKind::Expression => lsp_types::CompletionItemKind::SNIPPET, CompletionItemKind::SymbolKind(symbol) => match symbol { SymbolKind::Attribute => lsp_types::CompletionItemKind::FUNCTION, SymbolKind::Const => lsp_types::CompletionItemKind::CONSTANT, @@ -929,6 +930,16 @@ fn merge_text_and_snippet_edits( let mut edits: Vec = vec![]; let mut snippets = snippet_edit.into_edit_ranges().into_iter().peekable(); let text_edits = edit.into_iter(); + // offset to go from the final source location to the original source location + let mut source_text_offset = 0i32; + + let offset_range = |range: TextRange, offset: i32| -> TextRange { + // map the snippet range from the target location into the original source location + let start = u32::from(range.start()).checked_add_signed(offset).unwrap_or(0); + let end = u32::from(range.end()).checked_add_signed(offset).unwrap_or(0); + + TextRange::new(start.into(), end.into()) + }; for current_indel in text_edits { let new_range = { @@ -937,10 +948,17 @@ fn merge_text_and_snippet_edits( TextRange::at(current_indel.delete.start(), insert_len) }; + // figure out how much this Indel will shift future ranges from the initial source + let offset_adjustment = + u32::from(current_indel.delete.len()) as i32 - u32::from(new_range.len()) as i32; + // insert any snippets before the text edit - for (snippet_index, snippet_range) in - snippets.take_while_ref(|(_, range)| range.end() < new_range.start()) - { + for (snippet_index, snippet_range) in snippets.peeking_take_while(|(_, range)| { + offset_range(*range, source_text_offset).end() < new_range.start() + }) { + // adjust the snippet range into the corresponding initial source location + let snippet_range = offset_range(snippet_range, source_text_offset); + let snippet_range = if !stdx::always!( snippet_range.is_empty(), "placeholder range {:?} is before current text edit range {:?}", @@ -953,22 +971,23 @@ fn merge_text_and_snippet_edits( snippet_range }; - let range = range(line_index, snippet_range); - let new_text = format!("${snippet_index}"); - - edits.push(SnippetTextEdit { - range, - new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - }) + edits.push(snippet_text_edit( + line_index, + true, + Indel { insert: format!("${snippet_index}"), delete: snippet_range }, + )) } - if snippets.peek().is_some_and(|(_, range)| new_range.intersect(*range).is_some()) { + if snippets.peek().is_some_and(|(_, range)| { + new_range.intersect(offset_range(*range, source_text_offset)).is_some() + }) { // at least one snippet edit intersects this text edit, // so gather all of the edits that intersect this text edit let mut all_snippets = snippets - .take_while_ref(|(_, range)| new_range.intersect(*range).is_some()) + .peeking_take_while(|(_, range)| { + new_range.intersect(offset_range(*range, source_text_offset)).is_some() + }) + .map(|(tabstop, range)| (tabstop, offset_range(range, source_text_offset))) .collect_vec(); // ensure all of the ranges are wholly contained inside of the new range @@ -979,40 +998,59 @@ fn merge_text_and_snippet_edits( ) }); - let mut text_edit = text_edit(line_index, current_indel); + let mut new_text = current_indel.insert; - // escape out snippet text - stdx::replace(&mut text_edit.new_text, '\\', r"\\"); - stdx::replace(&mut text_edit.new_text, '$', r"\$"); + // find which snippet bits need to be escaped + let escape_places = new_text + .rmatch_indices(['\\', '$', '{', '}']) + .map(|(insert, _)| insert) + .collect_vec(); + let mut escape_places = escape_places.into_iter().peekable(); + let mut escape_prior_bits = |new_text: &mut String, up_to: usize| { + for before in escape_places.peeking_take_while(|insert| *insert >= up_to) { + new_text.insert(before, '\\'); + } + }; - // ...and apply! + // insert snippets, and escaping any needed bits along the way for (index, range) in all_snippets.iter().rev() { - let start = (range.start() - new_range.start()).into(); - let end = (range.end() - new_range.start()).into(); + let text_range = range - new_range.start(); + let (start, end) = (text_range.start().into(), text_range.end().into()); if range.is_empty() { - text_edit.new_text.insert_str(start, &format!("${index}")); + escape_prior_bits(&mut new_text, start); + new_text.insert_str(start, &format!("${index}")); } else { - text_edit.new_text.insert(end, '}'); - text_edit.new_text.insert_str(start, &format!("${{{index}:")); + escape_prior_bits(&mut new_text, end); + new_text.insert(end, '}'); + escape_prior_bits(&mut new_text, start); + new_text.insert_str(start, &format!("${{{index}:")); } } - edits.push(SnippetTextEdit { - range: text_edit.range, - new_text: text_edit.new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - }) + // escape any remaining bits + escape_prior_bits(&mut new_text, 0); + + edits.push(snippet_text_edit( + line_index, + true, + Indel { insert: new_text, delete: current_indel.delete }, + )) } else { // snippet edit was beyond the current one // since it wasn't consumed, it's available for the next pass edits.push(snippet_text_edit(line_index, false, current_indel)); } + + // update the final source -> initial source mapping offset + source_text_offset += offset_adjustment; } // insert any remaining tabstops edits.extend(snippets.map(|(snippet_index, snippet_range)| { + // adjust the snippet range into the corresponding initial source location + let snippet_range = offset_range(snippet_range, source_text_offset); + let snippet_range = if !stdx::always!( snippet_range.is_empty(), "found placeholder snippet {:?} without a text edit", @@ -1023,15 +1061,11 @@ fn merge_text_and_snippet_edits( snippet_range }; - let range = range(line_index, snippet_range); - let new_text = format!("${snippet_index}"); - - SnippetTextEdit { - range, - new_text, - insert_text_format: Some(lsp_types::InsertTextFormat::SNIPPET), - annotation_id: None, - } + snippet_text_edit( + line_index, + true, + Indel { insert: format!("${snippet_index}"), delete: snippet_range }, + ) })); edits @@ -1658,15 +1692,44 @@ fn bar(_: usize) {} assert!(!docs.contains("use crate::bar")); } + #[track_caller] fn check_rendered_snippets(edit: TextEdit, snippets: SnippetEdit, expect: Expect) { - let text = r#"/* place to put all ranges in */"#; + check_rendered_snippets_in_source( + r"/* place to put all ranges in */", + edit, + snippets, + expect, + ); + } + + #[track_caller] + fn check_rendered_snippets_in_source( + ra_fixture: &str, + edit: TextEdit, + snippets: SnippetEdit, + expect: Expect, + ) { + let source = stdx::trim_indent(ra_fixture); + let endings = if source.contains('\r') { LineEndings::Dos } else { LineEndings::Unix }; let line_index = LineIndex { - index: Arc::new(ide::LineIndex::new(text)), - endings: LineEndings::Unix, + index: Arc::new(ide::LineIndex::new(&source)), + endings, encoding: PositionEncoding::Utf8, }; let res = merge_text_and_snippet_edits(&line_index, edit, snippets); + + // Ensure that none of the ranges overlap + { + let mut sorted = res.clone(); + sorted.sort_by_key(|edit| (edit.range.start, edit.range.end)); + let disjoint_ranges = sorted + .iter() + .zip(sorted.iter().skip(1)) + .all(|(l, r)| l.range.end <= r.range.start || l == r); + assert!(disjoint_ranges, "ranges overlap for {res:#?}"); + } + expect.assert_debug_eq(&res); } @@ -1811,7 +1874,8 @@ fn bar(_: usize) {} let mut edit = TextEdit::builder(); edit.insert(0.into(), "abc".to_owned()); let edit = edit.finish(); - let snippets = SnippetEdit::new(vec![Snippet::Tabstop(7.into())]); + // Note: tabstops are positioned in the source where all text edits have been applied + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(10.into())]); check_rendered_snippets( edit, @@ -1928,8 +1992,9 @@ fn bar(_: usize) {} edit.insert(0.into(), "abc".to_owned()); edit.insert(7.into(), "abc".to_owned()); let edit = edit.finish(); + // Note: tabstops are positioned in the source where all text edits have been applied let snippets = - SnippetEdit::new(vec![Snippet::Tabstop(4.into()), Snippet::Tabstop(4.into())]); + SnippetEdit::new(vec![Snippet::Tabstop(7.into()), Snippet::Tabstop(7.into())]); check_rendered_snippets( edit, @@ -2085,13 +2150,502 @@ fn bar(_: usize) {} fn snippet_rendering_escape_snippet_bits() { // only needed for snippet formats let mut edit = TextEdit::builder(); - edit.insert(0.into(), r"abc\def$".to_owned()); - edit.insert(8.into(), r"ghi\jkl$".to_owned()); + edit.insert(0.into(), r"$ab{}$c\def".to_owned()); + edit.insert(8.into(), r"ghi\jk<-check_insert_here$".to_owned()); + edit.insert(10.into(), r"a\\b\\c{}$".to_owned()); let edit = edit.finish(); - let snippets = - SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(0.into(), 3.into()))]); + let snippets = SnippetEdit::new(vec![ + Snippet::Placeholder(TextRange::new(1.into(), 9.into())), + Snippet::Tabstop(25.into()), + ]); check_rendered_snippets( + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 0, + }, + end: Position { + line: 0, + character: 0, + }, + }, + new_text: "\\$${1:ab\\{\\}\\$c\\\\d}ef", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 8, + }, + end: Position { + line: 0, + character: 8, + }, + }, + new_text: "ghi\\\\jk$0<-check_insert_here\\$", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 0, + character: 10, + }, + end: Position { + line: 0, + character: 10, + }, + }, + new_text: "a\\\\b\\\\c{}$", + insert_text_format: None, + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_deleted() { + // negative offset from inserting a smaller range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(51.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_added() { + // positive offset from inserting a larger range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(39.into(), 40.into()), "let".to_owned()); + edit.replace( + TextRange::new(41.into(), 73.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(43.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> P { + P { + disabled: false, + } +} + +struct P { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_placeholder_adjust_offset_deleted() { + // negative offset from inserting a smaller range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = + SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(51.into(), 59.into()))]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_placeholder_adjust_offset_added() { + // positive offset from inserting a larger range + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(39.into(), 40.into()), "let".to_owned()); + edit.replace( + TextRange::new(41.into(), 73.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = + SnippetEdit::new(vec![Snippet::Placeholder(TextRange::new(43.into(), 51.into()))]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> P { + P { + disabled: false, + } +} + +struct P { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 5, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 6, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] + "#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_between_text_edits() { + // inserting between edits, tabstop should be at (1, 14) + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(58.into(), 90.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(51.into())]); + + // add an extra space between `ProcMacro` and `{` to insert the tabstop at + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 1, + character: 14, + }, + }, + new_text: "$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 15, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: None, + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_tabstop_adjust_offset_after_text_edits() { + // inserting after edits, tabstop should be before the closing curly of the fn + let mut edit = TextEdit::builder(); + edit.replace(TextRange::new(47.into(), 56.into()), "let".to_owned()); + edit.replace( + TextRange::new(57.into(), 89.into()), + "disabled = false;\n ProcMacro {\n disabled,\n }".to_owned(), + ); + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(109.into())]); + + check_rendered_snippets_in_source( + r" +fn expander_to_proc_macro() -> ProcMacro { + ProcMacro { + disabled: false, + } +} + +struct ProcMacro { + disabled: bool, +}", + edit, + snippets, + expect![[r#" + [ + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 4, + }, + end: Position { + line: 1, + character: 13, + }, + }, + new_text: "let", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 1, + character: 14, + }, + end: Position { + line: 3, + character: 5, + }, + }, + new_text: "disabled = false;\n ProcMacro {\n disabled,\n }", + insert_text_format: None, + annotation_id: None, + }, + SnippetTextEdit { + range: Range { + start: Position { + line: 4, + character: 0, + }, + end: Position { + line: 4, + character: 0, + }, + }, + new_text: "$0", + insert_text_format: Some( + Snippet, + ), + annotation_id: None, + }, + ] +"#]], + ); + } + + #[test] + fn snippet_rendering_handle_dos_line_endings() { + // unix -> dos conversion should be handled after placing snippets + let mut edit = TextEdit::builder(); + edit.insert(6.into(), "\n\n->".to_owned()); + + let edit = edit.finish(); + let snippets = SnippetEdit::new(vec![Snippet::Tabstop(10.into())]); + + check_rendered_snippets_in_source( + "yeah\r\n<-tabstop here", edit, snippets, expect![[r#" @@ -2099,38 +2653,23 @@ fn bar(_: usize) {} SnippetTextEdit { range: Range { start: Position { - line: 0, + line: 1, character: 0, }, end: Position { - line: 0, + line: 1, character: 0, }, }, - new_text: "${0:abc}\\\\def\\$", + new_text: "\r\n\r\n->$0", insert_text_format: Some( Snippet, ), annotation_id: None, }, - SnippetTextEdit { - range: Range { - start: Position { - line: 0, - character: 8, - }, - end: Position { - line: 0, - character: 8, - }, - }, - new_text: "ghi\\jkl$", - insert_text_format: None, - annotation_id: None, - }, ] "#]], - ); + ) } // `Url` is not able to parse windows paths on unix machines. diff --git a/crates/rust-analyzer/src/main_loop.rs b/crates/rust-analyzer/src/main_loop.rs index 88660db7e93b6..72f6d0fde5fe7 100644 --- a/crates/rust-analyzer/src/main_loop.rs +++ b/crates/rust-analyzer/src/main_loop.rs @@ -8,12 +8,10 @@ use std::{ use always_assert::always; use crossbeam_channel::{select, Receiver}; -use flycheck::FlycheckHandle; -use ide_db::base_db::{SourceDatabaseExt, VfsPath}; +use ide_db::base_db::{SourceDatabase, SourceDatabaseExt, VfsPath}; use lsp_server::{Connection, Notification, Request}; use lsp_types::notification::Notification as _; use stdx::thread::ThreadIntent; -use triomphe::Arc; use vfs::FileId; use crate::{ @@ -77,6 +75,7 @@ impl fmt::Display for Event { #[derive(Debug)] pub(crate) enum QueuedTask { CheckIfIndexed(lsp_types::Url), + CheckProcMacroSources(Vec), } #[derive(Debug)] @@ -89,6 +88,7 @@ pub(crate) enum Task { FetchWorkspace(ProjectWorkspaceProgress), FetchBuildData(BuildDataProgress), LoadProcMacros(ProcMacroProgress), + BuildDepsHaveChanged, } #[derive(Debug)] @@ -337,7 +337,7 @@ impl GlobalState { if became_quiescent { if self.config.check_on_save() { // Project has loaded properly, kick off initial flycheck - self.flycheck.iter().for_each(FlycheckHandle::restart_workspace); + self.flycheck.iter().for_each(|flycheck| flycheck.restart_workspace(None)); } if self.config.prefill_caches() { self.prime_caches_queue.request_op("became quiescent".to_owned(), ()); @@ -358,9 +358,7 @@ impl GlobalState { } // Refresh inlay hints if the client supports it. - if (self.send_hint_refresh_query || self.proc_macro_changed) - && self.config.inlay_hints_refresh() - { + if self.send_hint_refresh_query && self.config.inlay_hints_refresh() { self.send_request::((), |_, _| ()); self.send_hint_refresh_query = false; } @@ -555,16 +553,7 @@ impl GlobalState { if let Err(e) = self.fetch_workspace_error() { tracing::error!("FetchWorkspaceError:\n{e}"); } - - let old = Arc::clone(&self.workspaces); self.switch_workspaces("fetched workspace".to_owned()); - let workspaces_updated = !Arc::ptr_eq(&old, &self.workspaces); - - if self.config.run_build_scripts() && workspaces_updated { - self.fetch_build_data_queue - .request_op("workspace updated".to_owned(), ()); - } - (Progress::End, None) } }; @@ -608,6 +597,7 @@ impl GlobalState { self.report_progress("Loading", state, msg, None, None); } } + Task::BuildDepsHaveChanged => self.build_deps_changed = true, } } @@ -686,6 +676,25 @@ impl GlobalState { } }); } + QueuedTask::CheckProcMacroSources(modified_rust_files) => { + let crate_graph = self.analysis_host.raw_database().crate_graph(); + let snap = self.snapshot(); + self.task_pool.handle.spawn_with_sender(stdx::thread::ThreadIntent::Worker, { + move |sender| { + if modified_rust_files.into_iter().any(|file_id| { + // FIXME: Check whether these files could be build script related + match snap.analysis.crates_for(file_id) { + Ok(crates) => { + crates.iter().any(|&krate| crate_graph[krate].is_proc_macro) + } + _ => false, + } + }) { + sender.send(Task::BuildDepsHaveChanged).unwrap(); + } + } + }); + } } } diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 7bd2877b00cba..5895459d1fcf8 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -17,8 +17,9 @@ use std::{iter, mem}; use flycheck::{FlycheckConfig, FlycheckHandle}; use hir::{db::DefDatabase, Change, ProcMacros}; +use ide::CrateId; use ide_db::{ - base_db::{salsa::Durability, CrateGraph, ProcMacroPaths}, + base_db::{salsa::Durability, CrateGraph, ProcMacroPaths, Version}, FxHashMap, }; use itertools::Itertools; @@ -28,7 +29,7 @@ use project_model::{ProjectWorkspace, WorkspaceBuildScripts}; use rustc_hash::FxHashSet; use stdx::{format_to, thread::ThreadIntent}; use triomphe::Arc; -use vfs::{AbsPath, ChangeKind}; +use vfs::{AbsPath, AbsPathBuf, ChangeKind}; use crate::{ config::{Config, FilesWatcher, LinkedProject}, @@ -83,7 +84,7 @@ impl GlobalState { } if self.config.linked_or_discovered_projects() != old_config.linked_or_discovered_projects() { - self.fetch_workspaces_queue.request_op("linked projects changed".to_owned(), false) + self.fetch_workspaces_queue.request_op("discovered projects changed".to_owned(), false) } else if self.config.flycheck() != old_config.flycheck() { self.reload_flycheck(); } @@ -106,9 +107,11 @@ impl GlobalState { }; let mut message = String::new(); - if self.proc_macro_changed { + if self.build_deps_changed { status.health = lsp_ext::Health::Warning; - message.push_str("Proc-macros have changed and need to be rebuilt.\n\n"); + message.push_str( + "Proc-macros and/or build scripts have changed and need to be rebuilt.\n\n", + ); } if self.fetch_build_data_error().is_err() { status.health = lsp_ext::Health::Warning; @@ -234,7 +237,6 @@ impl GlobalState { it.clone(), cargo_config.target.as_deref(), &cargo_config.extra_env, - None, )) } }) @@ -300,13 +302,13 @@ impl GlobalState { pub(crate) fn fetch_proc_macros(&mut self, cause: Cause, paths: Vec) { tracing::info!(%cause, "will load proc macros"); - let dummy_replacements = self.config.dummy_replacements().clone(); + let ignored_proc_macros = self.config.ignored_proc_macros().clone(); let proc_macro_clients = self.proc_macro_clients.clone(); self.task_pool.handle.spawn_with_sender(ThreadIntent::Worker, move |sender| { sender.send(Task::LoadProcMacros(ProcMacroProgress::Begin)).unwrap(); - let dummy_replacements = &dummy_replacements; + let ignored_proc_macros = &ignored_proc_macros; let progress = { let sender = sender.clone(); &move |msg| { @@ -334,7 +336,12 @@ impl GlobalState { crate_name .as_deref() .and_then(|crate_name| { - dummy_replacements.get(crate_name).map(|v| &**v) + ignored_proc_macros.iter().find_map( + |(name, macros)| { + eq_ignore_underscore(name, crate_name) + .then_some(&**macros) + }, + ) }) .unwrap_or_default(), ) @@ -404,6 +411,10 @@ impl GlobalState { if *force_reload_crate_graph { self.recreate_crate_graph(cause); } + if self.build_deps_changed && self.config.run_build_scripts() { + self.build_deps_changed = false; + self.fetch_build_data_queue.request_op("build_deps_changed".to_owned(), ()); + } // Current build scripts do not match the version of the active // workspace, so there's nothing for us to update. return; @@ -415,6 +426,11 @@ impl GlobalState { // we don't care about build-script results, they are stale. // FIXME: can we abort the build scripts here? self.workspaces = Arc::new(workspaces); + + if self.config.run_build_scripts() { + self.build_deps_changed = false; + self.fetch_build_data_queue.request_op("workspace updated".to_owned(), ()); + } } if let FilesWatcher::Client = self.config.files().watcher { @@ -464,8 +480,23 @@ impl GlobalState { None => ws.find_sysroot_proc_macro_srv()?, }; + let env = + match ws { + ProjectWorkspace::Cargo { cargo_config_extra_env, sysroot, .. } => { + cargo_config_extra_env + .iter() + .chain(self.config.extra_env()) + .map(|(a, b)| (a.clone(), b.clone())) + .chain(sysroot.as_ref().map(|it| { + ("RUSTUP_TOOLCHAIN".to_owned(), it.root().to_string()) + })) + .collect() + } + _ => Default::default(), + }; tracing::info!("Using proc-macro server at {path}"); - ProcMacroServer::spawn(path.clone()).map_err(|err| { + + ProcMacroServer::spawn(path.clone(), &env).map_err(|err| { tracing::error!( "Failed to run proc-macro server from path {path}, error: {err:?}", ); @@ -494,15 +525,15 @@ impl GlobalState { } fn recreate_crate_graph(&mut self, cause: String) { - // Create crate graph from all the workspaces - let (crate_graph, proc_macro_paths, crate_graph_file_dependencies) = { + { + // Create crate graph from all the workspaces let vfs = &mut self.vfs.write().0; let loader = &mut self.loader; // crate graph construction relies on these paths, record them so when one of them gets // deleted or created we trigger a reconstruction of the crate graph let mut crate_graph_file_dependencies = FxHashSet::default(); - let mut load = |path: &AbsPath| { + let load = |path: &AbsPath| { let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); let vfs_path = vfs::VfsPath::from(path.to_path_buf()); crate_graph_file_dependencies.insert(vfs_path.clone()); @@ -517,32 +548,26 @@ impl GlobalState { } }; - let mut crate_graph = CrateGraph::default(); - let mut proc_macros = Vec::default(); - for ws in &**self.workspaces { - let (other, mut crate_proc_macros) = - ws.to_crate_graph(&mut load, self.config.extra_env()); - crate_graph.extend(other, &mut crate_proc_macros, |_| {}); - proc_macros.push(crate_proc_macros); - } - (crate_graph, proc_macros, crate_graph_file_dependencies) - }; + let (crate_graph, proc_macro_paths, layouts, toolchains) = + ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load); - let mut change = Change::new(); - if self.config.expand_proc_macros() { - change.set_proc_macros( - crate_graph - .iter() - .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) - .collect(), - ); - self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); + let mut change = Change::new(); + if self.config.expand_proc_macros() { + change.set_proc_macros( + crate_graph + .iter() + .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) + .collect(), + ); + self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); + } + change.set_crate_graph(crate_graph); + change.set_target_data_layouts(layouts); + change.set_toolchains(toolchains); + self.analysis_host.apply_change(change); + self.crate_graph_file_dependencies = crate_graph_file_dependencies; } - change.set_crate_graph(crate_graph); - self.analysis_host.apply_change(change); - self.crate_graph_file_dependencies = crate_graph_file_dependencies; self.process_changes(); - self.reload_flycheck(); } @@ -605,6 +630,7 @@ impl GlobalState { 0, Box::new(move |msg| sender.send(msg).unwrap()), config, + None, self.config.root_path().clone(), )], flycheck::InvocationStrategy::PerWorkspace => { @@ -612,23 +638,32 @@ impl GlobalState { .iter() .enumerate() .filter_map(|(id, w)| match w { - ProjectWorkspace::Cargo { cargo, .. } => Some((id, cargo.workspace_root())), - ProjectWorkspace::Json { project, .. } => { + ProjectWorkspace::Cargo { cargo, sysroot, .. } => Some(( + id, + cargo.workspace_root(), + sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), + )), + ProjectWorkspace::Json { project, sysroot, .. } => { // Enable flychecks for json projects if a custom flycheck command was supplied // in the workspace configuration. match config { - FlycheckConfig::CustomCommand { .. } => Some((id, project.path())), + FlycheckConfig::CustomCommand { .. } => Some(( + id, + project.path(), + sysroot.as_ref().ok().map(|sysroot| sysroot.root().to_owned()), + )), _ => None, } } ProjectWorkspace::DetachedFiles { .. } => None, }) - .map(|(id, root)| { + .map(|(id, root, sysroot_root)| { let sender = sender.clone(); FlycheckHandle::spawn( id, Box::new(move |msg| sender.send(msg).unwrap()), config.clone(), + sysroot_root, root.to_path_buf(), ) }) @@ -639,6 +674,69 @@ impl GlobalState { } } +// FIXME: Move this into load-cargo? +pub fn ws_to_crate_graph( + workspaces: &[ProjectWorkspace], + extra_env: &FxHashMap, + mut load: impl FnMut(&AbsPath) -> Option, +) -> ( + CrateGraph, + Vec, AbsPathBuf), String>>>, + Vec, Arc>>, + Vec>, +) { + let mut crate_graph = CrateGraph::default(); + let mut proc_macro_paths = Vec::default(); + let mut layouts = Vec::default(); + let mut toolchains = Vec::default(); + let e = Err(Arc::from("missing layout")); + for ws in workspaces { + let (other, mut crate_proc_macros) = ws.to_crate_graph(&mut load, extra_env); + let num_layouts = layouts.len(); + let num_toolchains = toolchains.len(); + let (toolchain, layout) = match ws { + ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } => { + (toolchain.clone(), target_layout.clone()) + } + ProjectWorkspace::DetachedFiles { .. } => { + (None, Err("detached files have no layout".into())) + } + }; + + let mapping = crate_graph.extend( + other, + &mut crate_proc_macros, + |(cg_id, cg_data), (_o_id, o_data)| { + // if the newly created crate graph's layout is equal to the crate of the merged graph, then + // we can merge the crates. + let id = cg_id.into_raw().into_u32() as usize; + layouts[id] == layout && toolchains[id] == toolchain && cg_data == o_data + }, + ); + // Populate the side tables for the newly merged crates + mapping.values().for_each(|val| { + let idx = val.into_raw().into_u32() as usize; + // we only need to consider crates that were not merged and remapped, as the + // ones that were remapped already have the correct layout and toolchain + if idx >= num_layouts { + if layouts.len() <= idx { + layouts.resize(idx + 1, e.clone()); + } + layouts[idx] = layout.clone(); + } + if idx >= num_toolchains { + if toolchains.len() <= idx { + toolchains.resize(idx + 1, None); + } + toolchains[idx] = toolchain.clone(); + } + }); + proc_macro_paths.push(crate_proc_macros); + } + (crate_graph, proc_macro_paths, layouts, toolchains) +} + pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) -> bool { const IMPLICIT_TARGET_FILES: &[&str] = &["build.rs", "src/main.rs", "src/lib.rs"]; const IMPLICIT_TARGET_DIRS: &[&str] = &["src/bin", "examples", "tests", "benches"]; @@ -683,3 +781,18 @@ pub(crate) fn should_refresh_for_change(path: &AbsPath, change_kind: ChangeKind) } false } + +/// Similar to [`str::eq_ignore_ascii_case`] but instead of ignoring +/// case, we say that `-` and `_` are equal. +fn eq_ignore_underscore(s1: &str, s2: &str) -> bool { + if s1.len() != s2.len() { + return false; + } + + s1.as_bytes().iter().zip(s2.as_bytes()).all(|(c1, c2)| { + let c1_underscore = c1 == &b'_' || c1 == &b'-'; + let c2_underscore = c2 == &b'_' || c2 == &b'-'; + + c1 == c2 || (c1_underscore && c2_underscore) + }) +} diff --git a/crates/rust-analyzer/tests/crate_graph.rs b/crates/rust-analyzer/tests/crate_graph.rs new file mode 100644 index 0000000000000..efd42fadf7e96 --- /dev/null +++ b/crates/rust-analyzer/tests/crate_graph.rs @@ -0,0 +1,118 @@ +use std::path::PathBuf; + +use project_model::{CargoWorkspace, ProjectWorkspace, Sysroot, WorkspaceBuildScripts}; +use rust_analyzer::ws_to_crate_graph; +use rustc_hash::FxHashMap; +use serde::de::DeserializeOwned; +use vfs::{AbsPathBuf, FileId}; + +fn load_cargo_with_fake_sysroot(file: &str) -> ProjectWorkspace { + let meta = get_test_json_file(file); + let cargo_workspace = CargoWorkspace::new(meta); + ProjectWorkspace::Cargo { + cargo: cargo_workspace, + build_scripts: WorkspaceBuildScripts::default(), + sysroot: Ok(get_fake_sysroot()), + rustc: Err(None), + rustc_cfg: Vec::new(), + cfg_overrides: Default::default(), + toolchain: None, + target_layout: Err("target_data_layout not loaded".into()), + cargo_config_extra_env: Default::default(), + } +} + +fn get_test_json_file(file: &str) -> T { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let file = base.join("tests/test_data").join(file); + let data = std::fs::read_to_string(file).unwrap(); + let mut json = data.parse::().unwrap(); + fixup_paths(&mut json); + return serde_json::from_value(json).unwrap(); + + fn fixup_paths(val: &mut serde_json::Value) { + match val { + serde_json::Value::String(s) => replace_root(s, true), + serde_json::Value::Array(vals) => vals.iter_mut().for_each(fixup_paths), + serde_json::Value::Object(kvals) => kvals.values_mut().for_each(fixup_paths), + serde_json::Value::Null | serde_json::Value::Bool(_) | serde_json::Value::Number(_) => { + } + } + } +} + +fn replace_root(s: &mut String, direction: bool) { + if direction { + let root = if cfg!(windows) { r#"C:\\ROOT\"# } else { "/ROOT/" }; + *s = s.replace("$ROOT$", root) + } else { + let root = if cfg!(windows) { r#"C:\\\\ROOT\\"# } else { "/ROOT/" }; + *s = s.replace(root, "$ROOT$") + } +} + +fn get_fake_sysroot_path() -> PathBuf { + let base = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + base.join("../project-model/test_data/fake-sysroot") +} + +fn get_fake_sysroot() -> Sysroot { + let sysroot_path = get_fake_sysroot_path(); + // there's no `libexec/` directory with a `proc-macro-srv` binary in that + // fake sysroot, so we give them both the same path: + let sysroot_dir = AbsPathBuf::assert(sysroot_path); + let sysroot_src_dir = sysroot_dir.clone(); + Sysroot::load(sysroot_dir, Some(Ok(sysroot_src_dir)), false) +} + +#[test] +fn test_deduplicate_origin_dev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} + +#[test] +fn test_deduplicate_origin_dev_rev() { + let path_map = &mut FxHashMap::default(); + let ws = load_cargo_with_fake_sysroot("deduplication_crate_graph_B.json"); + let ws2 = load_cargo_with_fake_sysroot("deduplication_crate_graph_A.json"); + + let (crate_graph, ..) = ws_to_crate_graph(&[ws, ws2], &Default::default(), |path| { + let len = path_map.len(); + Some(*path_map.entry(path.to_path_buf()).or_insert(FileId::from_raw(len as u32))) + }); + + let mut crates_named_p2 = vec![]; + for id in crate_graph.iter() { + let krate = &crate_graph[id]; + if let Some(name) = krate.display_name.as_ref() { + if name.to_string() == "p2" { + crates_named_p2.push(krate); + } + } + } + + assert!(crates_named_p2.len() == 1); + let p2 = crates_named_p2[0]; + assert!(p2.origin.is_local()); +} diff --git a/crates/rust-analyzer/tests/slow-tests/main.rs b/crates/rust-analyzer/tests/slow-tests/main.rs index 79ae0c30cfc4c..960f5b531d44f 100644 --- a/crates/rust-analyzer/tests/slow-tests/main.rs +++ b/crates/rust-analyzer/tests/slow-tests/main.rs @@ -911,20 +911,18 @@ fn root_contains_symlink_out_dirs_check() { #[cfg(any(feature = "sysroot-abi", rust_analyzer))] fn resolve_proc_macro() { use expect_test::expect; + use vfs::AbsPathBuf; if skip_slow_tests() { return; } - // skip using the sysroot config as to prevent us from loading the sysroot sources - let mut rustc = std::process::Command::new(toolchain::rustc()); - rustc.args(["--print", "sysroot"]); - let output = rustc.output().unwrap(); - let sysroot = - vfs::AbsPathBuf::try_from(std::str::from_utf8(&output.stdout).unwrap().trim()).unwrap(); + let sysroot = project_model::Sysroot::discover_no_source( + &AbsPathBuf::assert(std::env::current_dir().unwrap()), + &Default::default(), + ) + .unwrap(); - let standalone_server_name = - format!("rust-analyzer-proc-macro-srv{}", std::env::consts::EXE_SUFFIX); - let proc_macro_server_path = sysroot.join("libexec").join(&standalone_server_name); + let proc_macro_server_path = sysroot.discover_proc_macro_srv().unwrap(); let server = Project::with_fixture( r###" diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index d02cb45b8e35f..392a71702070e 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -101,8 +101,13 @@ impl Project<'_> { }; }); - let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse(self.fixture); + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse(self.fixture); assert!(proc_macro_names.is_empty()); assert!(mini_core.is_none()); assert!(toolchain.is_none()); diff --git a/crates/rust-analyzer/tests/slow-tests/tidy.rs b/crates/rust-analyzer/tests/slow-tests/tidy.rs index 3e38fc3ebcd7a..78da4487d4c9e 100644 --- a/crates/rust-analyzer/tests/slow-tests/tidy.rs +++ b/crates/rust-analyzer/tests/slow-tests/tidy.rs @@ -9,27 +9,6 @@ use xshell::Shell; #[cfg(not(feature = "in-rust-tree"))] use xshell::cmd; -#[cfg(not(feature = "in-rust-tree"))] -#[test] -fn check_code_formatting() { - let sh = &Shell::new().unwrap(); - sh.change_dir(sourcegen::project_root()); - - let out = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap(); - if !out.contains("stable") { - panic!( - "Failed to run rustfmt from toolchain 'stable'. \ - Please run `rustup component add rustfmt --toolchain stable` to install it.", - ) - } - - let res = cmd!(sh, "rustup run stable cargo fmt -- --check").run(); - if res.is_err() { - let _ = cmd!(sh, "rustup run stable cargo fmt").run(); - } - res.unwrap() -} - #[test] fn check_lsp_extensions_docs() { let sh = &Shell::new().unwrap(); diff --git a/crates/project-model/test_data/deduplication_crate_graph_A.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_A.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_A.json diff --git a/crates/project-model/test_data/deduplication_crate_graph_B.json b/crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json similarity index 100% rename from crates/project-model/test_data/deduplication_crate_graph_B.json rename to crates/rust-analyzer/tests/test_data/deduplication_crate_graph_B.json diff --git a/crates/salsa/src/doctest.rs b/crates/salsa/src/doctest.rs deleted file mode 100644 index 29a80663567fe..0000000000000 --- a/crates/salsa/src/doctest.rs +++ /dev/null @@ -1,115 +0,0 @@ -//! -#![allow(dead_code)] - -/// Test that a database with a key/value that is not `Send` will, -/// indeed, not be `Send`. -/// -/// ```compile_fail,E0277 -/// use std::rc::Rc; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Rc; -/// fn no_send_sync_key(&self, key: Rc) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Rc { -/// Rc::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Rc) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// storage: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_send(_: T) { } -/// -/// fn assert_send() { -/// is_send(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_send_db_not_send() {} - -/// Test that a database with a key/value that is not `Sync` will not -/// be `Send`. -/// -/// ```compile_fail,E0277 -/// use std::rc::Rc; -/// use std::cell::Cell; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Cell; -/// fn no_send_sync_key(&self, key: Cell) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { -/// Cell::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// runtime: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_send(_: T) { } -/// -/// fn assert_send() { -/// is_send(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_sync_db_not_send() {} - -/// Test that a database with a key/value that is not `Sync` will -/// not be `Sync`. -/// -/// ```compile_fail,E0277 -/// use std::cell::Cell; -/// use std::rc::Rc; -/// -/// #[salsa::query_group(NoSendSyncStorage)] -/// trait NoSendSyncDatabase: salsa::Database { -/// fn no_send_sync_value(&self, key: bool) -> Cell; -/// fn no_send_sync_key(&self, key: Cell) -> bool; -/// } -/// -/// fn no_send_sync_value(_db: &dyn NoSendSyncDatabase, key: bool) -> Cell { -/// Cell::new(key) -/// } -/// -/// fn no_send_sync_key(_db: &dyn NoSendSyncDatabase, key: Cell) -> bool { -/// *key -/// } -/// -/// #[salsa::database(NoSendSyncStorage)] -/// #[derive(Default)] -/// struct DatabaseImpl { -/// runtime: salsa::Storage, -/// } -/// -/// impl salsa::Database for DatabaseImpl { -/// } -/// -/// fn is_sync(_: T) { } -/// -/// fn assert_send() { -/// is_sync(DatabaseImpl::default()); -/// } -/// ``` -fn test_key_not_sync_db_not_sync() {} diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 2d58beafb2a0b..668dcfd925d8d 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -11,7 +11,6 @@ //! from previous invocations as appropriate. mod derived; -mod doctest; mod durability; mod hash; mod input; diff --git a/crates/syntax/rust.ungram b/crates/syntax/rust.ungram index c3010d090c6b9..c3d8e97c436cc 100644 --- a/crates/syntax/rust.ungram +++ b/crates/syntax/rust.ungram @@ -367,6 +367,7 @@ Expr = | RecordExpr | RefExpr | ReturnExpr +| BecomeExpr | TryExpr | TupleExpr | WhileExpr @@ -528,6 +529,9 @@ MatchGuard = ReturnExpr = Attr* 'return' Expr? +BecomeExpr = + Attr* 'become' Expr + YieldExpr = Attr* 'yield' Expr? @@ -610,7 +614,7 @@ TypeBoundList = TypeBound = Lifetime -| ('?' | '~' 'const')? Type +| ('~' 'const' | 'const')? 'async'? '?'? Type //************************// // Patterns // diff --git a/crates/syntax/src/ast/edit_in_place.rs b/crates/syntax/src/ast/edit_in_place.rs index bc9c54d0b73ec..41d33c457ce70 100644 --- a/crates/syntax/src/ast/edit_in_place.rs +++ b/crates/syntax/src/ast/edit_in_place.rs @@ -1007,20 +1007,24 @@ impl ast::IdentPat { } pub trait HasVisibilityEdit: ast::HasVisibility { - fn set_visibility(&self, visibility: ast::Visibility) { - match self.visibility() { - Some(current_visibility) => { - ted::replace(current_visibility.syntax(), visibility.syntax()) - } - None => { - let vis_before = self - .syntax() - .children_with_tokens() - .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) - .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); - - ted::insert(ted::Position::before(vis_before), visibility.syntax()); + fn set_visibility(&self, visibility: Option) { + if let Some(visibility) = visibility { + match self.visibility() { + Some(current_visibility) => { + ted::replace(current_visibility.syntax(), visibility.syntax()) + } + None => { + let vis_before = self + .syntax() + .children_with_tokens() + .find(|it| !matches!(it.kind(), WHITESPACE | COMMENT | ATTR)) + .unwrap_or_else(|| self.syntax().first_child_or_token().unwrap()); + + ted::insert(ted::Position::before(vis_before), visibility.syntax()); + } } + } else if let Some(visibility) = self.visibility() { + ted::remove(visibility.syntax()); } } } diff --git a/crates/syntax/src/ast/generated/nodes.rs b/crates/syntax/src/ast/generated/nodes.rs index 6c86e591044a8..75971861aa80e 100644 --- a/crates/syntax/src/ast/generated/nodes.rs +++ b/crates/syntax/src/ast/generated/nodes.rs @@ -1095,6 +1095,16 @@ impl ReturnExpr { pub fn expr(&self) -> Option { support::child(&self.syntax) } } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct BecomeExpr { + pub(crate) syntax: SyntaxNode, +} +impl ast::HasAttrs for BecomeExpr {} +impl BecomeExpr { + pub fn become_token(&self) -> Option { support::token(&self.syntax, T![become]) } + pub fn expr(&self) -> Option { support::child(&self.syntax) } +} + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TryExpr { pub(crate) syntax: SyntaxNode, @@ -1400,9 +1410,10 @@ pub struct TypeBound { } impl TypeBound { pub fn lifetime(&self) -> Option { support::child(&self.syntax) } - pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn tilde_token(&self) -> Option { support::token(&self.syntax, T![~]) } pub fn const_token(&self) -> Option { support::token(&self.syntax, T![const]) } + pub fn async_token(&self) -> Option { support::token(&self.syntax, T![async]) } + pub fn question_mark_token(&self) -> Option { support::token(&self.syntax, T![?]) } pub fn ty(&self) -> Option { support::child(&self.syntax) } } @@ -1633,6 +1644,7 @@ pub enum Expr { RecordExpr(RecordExpr), RefExpr(RefExpr), ReturnExpr(ReturnExpr), + BecomeExpr(BecomeExpr), TryExpr(TryExpr), TupleExpr(TupleExpr), WhileExpr(WhileExpr), @@ -2792,6 +2804,17 @@ impl AstNode for ReturnExpr { } fn syntax(&self) -> &SyntaxNode { &self.syntax } } +impl AstNode for BecomeExpr { + fn can_cast(kind: SyntaxKind) -> bool { kind == BECOME_EXPR } + fn cast(syntax: SyntaxNode) -> Option { + if Self::can_cast(syntax.kind()) { + Some(Self { syntax }) + } else { + None + } + } + fn syntax(&self) -> &SyntaxNode { &self.syntax } +} impl AstNode for TryExpr { fn can_cast(kind: SyntaxKind) -> bool { kind == TRY_EXPR } fn cast(syntax: SyntaxNode) -> Option { @@ -3540,6 +3563,9 @@ impl From for Expr { impl From for Expr { fn from(node: ReturnExpr) -> Expr { Expr::ReturnExpr(node) } } +impl From for Expr { + fn from(node: BecomeExpr) -> Expr { Expr::BecomeExpr(node) } +} impl From for Expr { fn from(node: TryExpr) -> Expr { Expr::TryExpr(node) } } @@ -3593,6 +3619,7 @@ impl AstNode for Expr { | RECORD_EXPR | REF_EXPR | RETURN_EXPR + | BECOME_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR @@ -3632,6 +3659,7 @@ impl AstNode for Expr { RECORD_EXPR => Expr::RecordExpr(RecordExpr { syntax }), REF_EXPR => Expr::RefExpr(RefExpr { syntax }), RETURN_EXPR => Expr::ReturnExpr(ReturnExpr { syntax }), + BECOME_EXPR => Expr::BecomeExpr(BecomeExpr { syntax }), TRY_EXPR => Expr::TryExpr(TryExpr { syntax }), TUPLE_EXPR => Expr::TupleExpr(TupleExpr { syntax }), WHILE_EXPR => Expr::WhileExpr(WhileExpr { syntax }), @@ -3673,6 +3701,7 @@ impl AstNode for Expr { Expr::RecordExpr(it) => &it.syntax, Expr::RefExpr(it) => &it.syntax, Expr::ReturnExpr(it) => &it.syntax, + Expr::BecomeExpr(it) => &it.syntax, Expr::TryExpr(it) => &it.syntax, Expr::TupleExpr(it) => &it.syntax, Expr::WhileExpr(it) => &it.syntax, @@ -4150,6 +4179,7 @@ impl AstNode for AnyHasAttrs { | RANGE_EXPR | REF_EXPR | RETURN_EXPR + | BECOME_EXPR | TRY_EXPR | TUPLE_EXPR | WHILE_EXPR @@ -4851,6 +4881,11 @@ impl std::fmt::Display for ReturnExpr { std::fmt::Display::fmt(self.syntax(), f) } } +impl std::fmt::Display for BecomeExpr { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + std::fmt::Display::fmt(self.syntax(), f) + } +} impl std::fmt::Display for TryExpr { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) diff --git a/crates/syntax/src/ast/make.rs b/crates/syntax/src/ast/make.rs index 120d801c8d17e..02246fc3291d3 100644 --- a/crates/syntax/src/ast/make.rs +++ b/crates/syntax/src/ast/make.rs @@ -1147,7 +1147,7 @@ pub mod tokens { pub(super) static SOURCE_FILE: Lazy> = Lazy::new(|| { SourceFile::parse( - "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\n", + "const C: <()>::Item = ( true && true , true || true , 1 != 1, 2 == 2, 3 < 3, 4 <= 4, 5 > 5, 6 >= 6, !true, *p, &p , &mut p, { let a @ [] })\n;\n\nimpl A for B where: {}", ) }); diff --git a/crates/syntax/src/ast/node_ext.rs b/crates/syntax/src/ast/node_ext.rs index 6e5e4127f4d43..1bc1ef8434fc7 100644 --- a/crates/syntax/src/ast/node_ext.rs +++ b/crates/syntax/src/ast/node_ext.rs @@ -569,6 +569,26 @@ impl fmt::Display for NameOrNameRef { } } +impl ast::AstNode for NameOrNameRef { + fn can_cast(kind: SyntaxKind) -> bool { + matches!(kind, SyntaxKind::NAME | SyntaxKind::NAME_REF) + } + fn cast(syntax: SyntaxNode) -> Option { + let res = match syntax.kind() { + SyntaxKind::NAME => NameOrNameRef::Name(ast::Name { syntax }), + SyntaxKind::NAME_REF => NameOrNameRef::NameRef(ast::NameRef { syntax }), + _ => return None, + }; + Some(res) + } + fn syntax(&self) -> &SyntaxNode { + match self { + NameOrNameRef::NameRef(it) => it.syntax(), + NameOrNameRef::Name(it) => it.syntax(), + } + } +} + impl NameOrNameRef { pub fn text(&self) -> TokenText<'_> { match self { diff --git a/crates/syntax/src/ast/prec.rs b/crates/syntax/src/ast/prec.rs index 9ddf5a0a9804c..9131cd2f17993 100644 --- a/crates/syntax/src/ast/prec.rs +++ b/crates/syntax/src/ast/prec.rs @@ -130,8 +130,8 @@ impl Expr { // ContinueExpr(_) => (0, 0), - ClosureExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | BreakExpr(_) - | OffsetOfExpr(_) | FormatArgsExpr(_) | AsmExpr(_) => (0, 1), + ClosureExpr(_) | ReturnExpr(_) | BecomeExpr(_) | YieldExpr(_) | YeetExpr(_) + | BreakExpr(_) | OffsetOfExpr(_) | FormatArgsExpr(_) | AsmExpr(_) => (0, 1), RangeExpr(_) => (5, 5), @@ -288,6 +288,7 @@ impl Expr { PrefixExpr(e) => e.op_token(), RefExpr(e) => e.amp_token(), ReturnExpr(e) => e.return_token(), + BecomeExpr(e) => e.become_token(), TryExpr(e) => e.question_mark_token(), YieldExpr(e) => e.yield_token(), YeetExpr(e) => e.do_token(), @@ -316,7 +317,8 @@ impl Expr { // For BinExpr and RangeExpr this is technically wrong -- the child can be on the left... BinExpr(_) | RangeExpr(_) | BreakExpr(_) | ContinueExpr(_) | PrefixExpr(_) - | RefExpr(_) | ReturnExpr(_) | YieldExpr(_) | YeetExpr(_) | LetExpr(_) => self + | RefExpr(_) | ReturnExpr(_) | BecomeExpr(_) | YieldExpr(_) | YeetExpr(_) + | LetExpr(_) => self .syntax() .parent() .and_then(Expr::cast) diff --git a/crates/syntax/src/lib.rs b/crates/syntax/src/lib.rs index 960889b742112..b755de86d32c5 100644 --- a/crates/syntax/src/lib.rs +++ b/crates/syntax/src/lib.rs @@ -27,11 +27,6 @@ extern crate ra_ap_rustc_lexer as rustc_lexer; #[cfg(feature = "in-rust-tree")] extern crate rustc_lexer; -#[allow(unused)] -macro_rules! eprintln { - ($($tt:tt)*) => { stdx::eprintln!($($tt)*) }; -} - mod parsing; mod ptr; mod syntax_error; diff --git a/crates/syntax/src/tests/ast_src.rs b/crates/syntax/src/tests/ast_src.rs index 341bda892ba1c..8221c577892de 100644 --- a/crates/syntax/src/tests/ast_src.rs +++ b/crates/syntax/src/tests/ast_src.rs @@ -67,8 +67,9 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { keywords: &[ "as", "async", "await", "box", "break", "const", "continue", "crate", "do", "dyn", "else", "enum", "extern", "false", "fn", "for", "if", "impl", "in", "let", "loop", "macro", - "match", "mod", "move", "mut", "pub", "ref", "return", "self", "Self", "static", "struct", - "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", "yield", + "match", "mod", "move", "mut", "pub", "ref", "return", "become", "self", "Self", "static", + "struct", "super", "trait", "true", "try", "type", "unsafe", "use", "where", "while", + "yield", ], contextual_keywords: &[ "auto", @@ -154,6 +155,7 @@ pub(crate) const KINDS_SRC: KindsSrc<'_> = KindsSrc { "BLOCK_EXPR", "STMT_LIST", "RETURN_EXPR", + "BECOME_EXPR", "YIELD_EXPR", "YEET_EXPR", "LET_EXPR", diff --git a/crates/test-fixture/src/lib.rs b/crates/test-fixture/src/lib.rs index 28e757e81bb2c..e118262b4edd9 100644 --- a/crates/test-fixture/src/lib.rs +++ b/crates/test-fixture/src/lib.rs @@ -1,10 +1,9 @@ //! A set of high-level utility fixture methods to use in tests. -use std::{mem, ops::Not, str::FromStr, sync}; +use std::{iter, mem, ops::Not, str::FromStr, sync}; use base_db::{ - CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, DependencyKind, - Edition, Env, FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, - VfsPath, + CrateDisplayName, CrateGraph, CrateId, CrateName, CrateOrigin, Dependency, Edition, Env, + FileChange, FileSet, LangCrateOrigin, SourceDatabaseExt, SourceRoot, Version, VfsPath, }; use cfg::CfgOptions; use hir_expand::{ @@ -118,8 +117,14 @@ impl ChangeFixture { ra_fixture: &str, mut proc_macro_defs: Vec<(String, ProcMacro)>, ) -> ChangeFixture { - let FixtureWithProjectMeta { fixture, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse(ra_fixture); + let FixtureWithProjectMeta { + fixture, + mini_core, + proc_macro_names, + toolchain, + target_data_layout, + } = FixtureWithProjectMeta::parse(ra_fixture); + let target_data_layout = Ok(target_data_layout.into()); let toolchain = Some({ let channel = toolchain.as_deref().unwrap_or("stable"); Version::parse(&format!("1.76.0-{channel}")).unwrap() @@ -131,7 +136,6 @@ impl ChangeFixture { let mut crates = FxHashMap::default(); let mut crate_deps = Vec::new(); let mut default_crate_root: Option = None; - let mut default_target_data_layout: Option = None; let mut default_cfg = CfgOptions::default(); let mut default_env = Env::new_for_test_fixture(); @@ -187,11 +191,6 @@ impl ChangeFixture { meta.env, false, origin, - meta.target_data_layout - .as_deref() - .map(From::from) - .ok_or_else(|| "target_data_layout unset".into()), - toolchain.clone(), ); let prev = crates.insert(crate_name.clone(), crate_id); assert!(prev.is_none(), "multiple crates with same name: {}", crate_name); @@ -205,7 +204,6 @@ impl ChangeFixture { default_crate_root = Some(file_id); default_cfg.extend(meta.cfg.into_iter()); default_env.extend(meta.env.iter().map(|(x, y)| (x.to_owned(), y.to_owned()))); - default_target_data_layout = meta.target_data_layout; } source_change.change_file(file_id, Some(text.into())); @@ -228,10 +226,6 @@ impl ChangeFixture { default_env, false, CrateOrigin::Local { repo: None, name: None }, - default_target_data_layout - .map(|it| it.into()) - .ok_or_else(|| "target_data_layout unset".into()), - toolchain.clone(), ); } else { for (from, to, prelude) in crate_deps { @@ -240,20 +234,11 @@ impl ChangeFixture { crate_graph .add_dep( from_id, - Dependency::with_prelude( - CrateName::new(&to).unwrap(), - to_id, - prelude, - DependencyKind::Normal, - ), + Dependency::with_prelude(CrateName::new(&to).unwrap(), to_id, prelude), ) .unwrap(); } } - let target_layout = crate_graph.iter().next().map_or_else( - || Err("target_data_layout unset".into()), - |it| crate_graph[it].target_layout.clone(), - ); if let Some(mini_core) = mini_core { let core_file = file_id; @@ -277,20 +262,11 @@ impl ChangeFixture { Env::new_for_test_fixture(), false, CrateOrigin::Lang(LangCrateOrigin::Core), - target_layout.clone(), - toolchain.clone(), ); for krate in all_crates { crate_graph - .add_dep( - krate, - Dependency::new( - CrateName::new("core").unwrap(), - core_crate, - DependencyKind::Normal, - ), - ) + .add_dep(krate, Dependency::new(CrateName::new("core").unwrap(), core_crate)) .unwrap(); } } @@ -322,8 +298,6 @@ impl ChangeFixture { Env::new_for_test_fixture(), true, CrateOrigin::Local { repo: None, name: None }, - target_layout, - toolchain, ); proc_macros.insert(proc_macros_crate, Ok(proc_macro)); @@ -331,11 +305,7 @@ impl ChangeFixture { crate_graph .add_dep( krate, - Dependency::new( - CrateName::new("proc_macros").unwrap(), - proc_macros_crate, - DependencyKind::Normal, - ), + Dependency::new(CrateName::new("proc_macros").unwrap(), proc_macros_crate), ) .unwrap(); } @@ -346,17 +316,20 @@ impl ChangeFixture { SourceRootKind::Library => SourceRoot::new_library(mem::take(&mut file_set)), }; roots.push(root); - source_change.set_roots(roots); - source_change.set_crate_graph(crate_graph); - - ChangeFixture { - file_position, - files, - change: Change { - source_change, - proc_macros: proc_macros.is_empty().not().then_some(proc_macros), - }, - } + + let mut change = Change { + source_change, + proc_macros: proc_macros.is_empty().not().then_some(proc_macros), + toolchains: Some(iter::repeat(toolchain).take(crate_graph.len()).collect()), + target_data_layouts: Some( + iter::repeat(target_data_layout).take(crate_graph.len()).collect(), + ), + }; + + change.source_change.set_roots(roots); + change.source_change.set_crate_graph(crate_graph); + + ChangeFixture { file_position, files, change } } } @@ -374,6 +347,7 @@ pub fn identity(_attr: TokenStream, item: TokenStream) -> TokenStream { name: "identity".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(IdentityProcMacroExpander), + disabled: false, }, ), ( @@ -388,6 +362,7 @@ pub fn derive_identity(item: TokenStream) -> TokenStream { name: "DeriveIdentity".into(), kind: ProcMacroKind::CustomDerive, expander: sync::Arc::new(IdentityProcMacroExpander), + disabled: false, }, ), ( @@ -402,6 +377,7 @@ pub fn input_replace(attr: TokenStream, _item: TokenStream) -> TokenStream { name: "input_replace".into(), kind: ProcMacroKind::Attr, expander: sync::Arc::new(AttributeInputReplaceProcMacroExpander), + disabled: false, }, ), ( @@ -416,6 +392,7 @@ pub fn mirror(input: TokenStream) -> TokenStream { name: "mirror".into(), kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(MirrorProcMacroExpander), + disabled: false, }, ), ( @@ -430,6 +407,7 @@ pub fn shorten(input: TokenStream) -> TokenStream { name: "shorten".into(), kind: ProcMacroKind::FuncLike, expander: sync::Arc::new(ShortenProcMacroExpander), + disabled: false, }, ), ] @@ -470,7 +448,6 @@ struct FileMeta { edition: Edition, env: Env, introduce_new_source_root: Option, - target_data_layout: Option, } impl FileMeta { @@ -502,7 +479,6 @@ impl FileMeta { edition: f.edition.map_or(Edition::CURRENT, |v| Edition::from_str(&v).unwrap()), env: f.env.into_iter().collect(), introduce_new_source_root, - target_data_layout: f.target_data_layout, } } } diff --git a/crates/test-utils/src/fixture.rs b/crates/test-utils/src/fixture.rs index 595281336d582..7e34c36189950 100644 --- a/crates/test-utils/src/fixture.rs +++ b/crates/test-utils/src/fixture.rs @@ -126,11 +126,6 @@ pub struct Fixture { /// /// Syntax: `library` pub library: bool, - /// Specifies LLVM data layout to be used. - /// - /// You probably don't want to manually specify this. See LLVM manual for the - /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout - pub target_data_layout: Option, /// Actual file contents. All meta comments are stripped. pub text: String, } @@ -145,6 +140,11 @@ pub struct FixtureWithProjectMeta { pub mini_core: Option, pub proc_macro_names: Vec, pub toolchain: Option, + /// Specifies LLVM data layout to be used. + /// + /// You probably don't want to manually specify this. See LLVM manual for the + /// syntax, if you must: https://llvm.org/docs/LangRef.html#data-layout + pub target_data_layout: String, } impl FixtureWithProjectMeta { @@ -172,6 +172,8 @@ impl FixtureWithProjectMeta { let fixture = trim_indent(ra_fixture); let mut fixture = fixture.as_str(); let mut toolchain = None; + let mut target_data_layout = + "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(); let mut mini_core = None; let mut res: Vec = Vec::new(); let mut proc_macro_names = vec![]; @@ -182,6 +184,12 @@ impl FixtureWithProjectMeta { fixture = remain; } + if let Some(meta) = fixture.strip_prefix("//- target_data_layout:") { + let (meta, remain) = meta.split_once('\n').unwrap(); + target_data_layout = meta.trim().to_owned(); + fixture = remain; + } + if let Some(meta) = fixture.strip_prefix("//- proc_macros:") { let (meta, remain) = meta.split_once('\n').unwrap(); proc_macro_names = meta.split(',').map(|it| it.trim().to_owned()).collect(); @@ -225,7 +233,7 @@ impl FixtureWithProjectMeta { } } - Self { fixture: res, mini_core, proc_macro_names, toolchain } + Self { fixture: res, mini_core, proc_macro_names, toolchain, target_data_layout } } //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b env:OUTDIR=path/to,OTHER=foo @@ -245,9 +253,6 @@ impl FixtureWithProjectMeta { let mut env = FxHashMap::default(); let mut introduce_new_source_root = None; let mut library = false; - let mut target_data_layout = Some( - "e-m:e-p270:32:32-p271:32:32-p272:64:64-i64:64-f80:128-n8:16:32:64-S128".to_owned(), - ); for component in components { if component == "library" { library = true; @@ -284,7 +289,6 @@ impl FixtureWithProjectMeta { } } "new_source_root" => introduce_new_source_root = Some(value.to_owned()), - "target_data_layout" => target_data_layout = Some(value.to_owned()), _ => panic!("bad component: {component:?}"), } } @@ -307,7 +311,6 @@ impl FixtureWithProjectMeta { env, introduce_new_source_root, library, - target_data_layout, } } } @@ -476,16 +479,21 @@ fn parse_fixture_checks_further_indented_metadata() { #[test] fn parse_fixture_gets_full_meta() { - let FixtureWithProjectMeta { fixture: parsed, mini_core, proc_macro_names, toolchain } = - FixtureWithProjectMeta::parse( - r#" + let FixtureWithProjectMeta { + fixture: parsed, + mini_core, + proc_macro_names, + toolchain, + target_data_layout: _, + } = FixtureWithProjectMeta::parse( + r#" //- toolchain: nightly //- proc_macros: identity //- minicore: coerce_unsized //- /lib.rs crate:foo deps:bar,baz cfg:foo=a,bar=b,atom env:OUTDIR=path/to,OTHER=foo mod m; "#, - ); + ); assert_eq!(toolchain, Some("nightly".to_owned())); assert_eq!(proc_macro_names, vec!["identity".to_owned()]); assert_eq!(mini_core.unwrap().activated_flags, vec!["coerce_unsized".to_owned()]); diff --git a/crates/test-utils/src/minicore.rs b/crates/test-utils/src/minicore.rs index 23a3a7e0afa49..f125792d12587 100644 --- a/crates/test-utils/src/minicore.rs +++ b/crates/test-utils/src/minicore.rs @@ -60,6 +60,8 @@ //! try: infallible //! unpin: sized //! unsize: sized +//! todo: panic +//! unimplemented: panic #![rustc_coherence_is_core] @@ -927,6 +929,10 @@ pub mod fmt { use crate::mem::transmute; unsafe { Argument { formatter: transmute(f), value: transmute(x) } } } + + pub fn new_display<'b, T: Display>(x: &'b T) -> Argument<'_> { + Self::new(x, Display::fmt) + } } #[lang = "format_alignment"] @@ -1438,6 +1444,33 @@ mod macros { // endregion:fmt + // region:todo + #[macro_export] + #[allow_internal_unstable(core_panic)] + macro_rules! todo { + () => { + $crate::panicking::panic("not yet implemented") + }; + ($($arg:tt)+) => { + $crate::panic!("not yet implemented: {}", $crate::format_args!($($arg)+)) + }; + } + // endregion:todo + + // region:unimplemented + #[macro_export] + #[allow_internal_unstable(core_panic)] + macro_rules! unimplemented { + () => { + $crate::panicking::panic("not implemented") + }; + ($($arg:tt)+) => { + $crate::panic!("not implemented: {}", $crate::format_args!($($arg)+)) + }; + } + // endregion:unimplemented + + // region:derive pub(crate) mod builtin { #[rustc_builtin_macro] diff --git a/crates/toolchain/src/lib.rs b/crates/toolchain/src/lib.rs index 997f339edc4d7..ae71b6700c0b2 100644 --- a/crates/toolchain/src/lib.rs +++ b/crates/toolchain/src/lib.rs @@ -2,7 +2,41 @@ #![warn(rust_2018_idioms, unused_lifetimes)] -use std::{env, iter, path::PathBuf}; +use std::{ + env, iter, + path::{Path, PathBuf}, +}; + +#[derive(Copy, Clone)] +pub enum Tool { + Cargo, + Rustc, + Rustup, + Rustfmt, +} + +impl Tool { + pub fn path(self) -> PathBuf { + get_path_for_executable(self.name()) + } + + pub fn path_in(self, path: &Path) -> Option { + probe_for_binary(path.join(self.name())) + } + + pub fn path_in_or_discover(self, path: &Path) -> PathBuf { + probe_for_binary(path.join(self.name())).unwrap_or_else(|| self.path()) + } + + pub fn name(self) -> &'static str { + match self { + Tool::Cargo => "cargo", + Tool::Rustc => "rustc", + Tool::Rustup => "rustup", + Tool::Rustfmt => "rustfmt", + } + } +} pub fn cargo() -> PathBuf { get_path_for_executable("cargo") @@ -47,7 +81,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { if let Some(mut path) = get_cargo_home() { path.push("bin"); path.push(executable_name); - if let Some(path) = probe(path) { + if let Some(path) = probe_for_binary(path) { return path; } } @@ -57,7 +91,7 @@ fn get_path_for_executable(executable_name: &'static str) -> PathBuf { fn lookup_in_path(exec: &str) -> bool { let paths = env::var_os("PATH").unwrap_or_default(); - env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe).is_some() + env::split_paths(&paths).map(|path| path.join(exec)).find_map(probe_for_binary).is_some() } fn get_cargo_home() -> Option { @@ -73,7 +107,7 @@ fn get_cargo_home() -> Option { None } -fn probe(path: PathBuf) -> Option { +pub fn probe_for_binary(path: PathBuf) -> Option { let with_extension = match env::consts::EXE_EXTENSION { "" => None, it => Some(path.with_extension(it)), diff --git a/crates/tt/src/lib.rs b/crates/tt/src/lib.rs index 9004bff53a808..eec88f80688c7 100644 --- a/crates/tt/src/lib.rs +++ b/crates/tt/src/lib.rs @@ -152,6 +152,7 @@ pub struct Punct { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum Spacing { Alone, + /// Whether the following token is joint to the current one. Joint, } diff --git a/docs/user/generated_config.adoc b/docs/user/generated_config.adoc index a86ef709411cc..da7654b0f6447 100644 --- a/docs/user/generated_config.adoc +++ b/docs/user/generated_config.adoc @@ -71,7 +71,7 @@ cargo check --quiet --workspace --message-format=json --all-targets ``` . -- -[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `false`):: +[[rust-analyzer.cargo.buildScripts.rebuildOnSave]]rust-analyzer.cargo.buildScripts.rebuildOnSave (default: `true`):: + -- Rerun proc-macros building/build-scripts running when proc-macro @@ -234,6 +234,11 @@ each of them, with the working directory being the workspace root by changing `#rust-analyzer.check.invocationStrategy#` and `#rust-analyzer.check.invocationLocation#`. +If `$saved_file` is part of the command, rust-analyzer will pass +the absolute path of the saved file to the provided command. This is +intended to be used with non-Cargo build systems. +Note that `$saved_file` is experimental and may be removed in the futureg. + An example command would be: ```bash @@ -343,6 +348,11 @@ Default: ---- Custom completion snippets. +-- +[[rust-analyzer.completion.termSearch.enable]]rust-analyzer.completion.termSearch.enable (default: `false`):: ++ +-- +Whether to enable term search based snippets like `Some(foo.bar().baz())`. -- [[rust-analyzer.diagnostics.disabled]]rust-analyzer.diagnostics.disabled (default: `[]`):: + @@ -793,11 +803,6 @@ Exclude imports from find-all-references. -- Exclude tests from find-all-references. -- -[[rust-analyzer.rename.allowExternalItems]]rust-analyzer.rename.allowExternalItems (default: `false`):: -+ --- -Allow renaming of items not belonging to the loaded workspaces. --- [[rust-analyzer.runnables.command]]rust-analyzer.runnables.command (default: `null`):: + -- diff --git a/editors/code/.vscodeignore b/editors/code/.vscodeignore index 09dc27056b37a..5c48205694fe9 100644 --- a/editors/code/.vscodeignore +++ b/editors/code/.vscodeignore @@ -12,3 +12,6 @@ !ra_syntax_tree.tmGrammar.json !server !README.md +!language-configuration-rustdoc.json +!rustdoc-inject.json +!rustdoc.json diff --git a/editors/code/language-configuration-rustdoc.json b/editors/code/language-configuration-rustdoc.json new file mode 100644 index 0000000000000..c905d3b60674e --- /dev/null +++ b/editors/code/language-configuration-rustdoc.json @@ -0,0 +1,37 @@ +{ + "comments": { + "blockComment": [""] + }, + "brackets": [ + ["{", "}"], + ["[", "]"], + ["(", ")"] + ], + "colorizedBracketPairs": [], + "autoClosingPairs": [ + { "open": "{", "close": "}" }, + { "open": "[", "close": "]" }, + { "open": "(", "close": ")" } + ], + "surroundingPairs": [ + ["(", ")"], + ["[", "]"], + ["`", "`"], + ["_", "_"], + ["*", "*"], + ["{", "}"], + ["'", "'"], + ["\"", "\""] + ], + "folding": { + "offSide": true, + "markers": { + "start": "^\\s*", + "end": "^\\s*" + } + }, + "wordPattern": { + "pattern": "(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})(((\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark})|[_])?(\\p{Alphabetic}|\\p{Number}|\\p{Nonspacing_Mark}))*", + "flags": "ug" + } +} diff --git a/editors/code/package.json b/editors/code/package.json index b474471e5a4b3..3a1df5a2f901c 100644 --- a/editors/code/package.json +++ b/editors/code/package.json @@ -68,7 +68,9 @@ "typescript": "^5.1.6" }, "activationEvents": [ + "workspaceContains:Cargo.toml", "workspaceContains:*/Cargo.toml", + "workspaceContains:rust-project.json", "workspaceContains:*/rust-project.json" ], "main": "./out/main", @@ -588,7 +590,7 @@ }, "rust-analyzer.cargo.buildScripts.rebuildOnSave": { "markdownDescription": "Rerun proc-macros building/build-scripts running when proc-macro\nor build-script sources change and are saved.", - "default": false, + "default": true, "type": "boolean" }, "rust-analyzer.cargo.buildScripts.useRustcWrapper": { @@ -775,7 +777,7 @@ ] }, "rust-analyzer.check.overrideCommand": { - "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", + "markdownDescription": "Override the command rust-analyzer uses instead of `cargo check` for\ndiagnostics on save. The command is required to output json and\nshould therefore include `--message-format=json` or a similar option\n(if your client supports the `colorDiagnosticOutput` experimental\ncapability, you can use `--message-format=json-diagnostic-rendered-ansi`).\n\nIf you're changing this because you're using some tool wrapping\nCargo, you might also want to change\n`#rust-analyzer.cargo.buildScripts.overrideCommand#`.\n\nIf there are multiple linked projects/workspaces, this command is invoked for\neach of them, with the working directory being the workspace root\n(i.e., the folder containing the `Cargo.toml`). This can be overwritten\nby changing `#rust-analyzer.check.invocationStrategy#` and\n`#rust-analyzer.check.invocationLocation#`.\n\nIf `$saved_file` is part of the command, rust-analyzer will pass\nthe absolute path of the saved file to the provided command. This is\nintended to be used with non-Cargo build systems.\nNote that `$saved_file` is experimental and may be removed in the futureg.\n\nAn example command would be:\n\n```bash\ncargo check --workspace --message-format=json --all-targets\n```\n.", "default": null, "type": [ "null", @@ -902,6 +904,11 @@ }, "type": "object" }, + "rust-analyzer.completion.termSearch.enable": { + "markdownDescription": "Whether to enable term search based snippets like `Some(foo.bar().baz())`.", + "default": false, + "type": "boolean" + }, "rust-analyzer.diagnostics.disabled": { "markdownDescription": "List of rust-analyzer diagnostics to disable.", "default": [], @@ -1520,11 +1527,6 @@ "default": false, "type": "boolean" }, - "rust-analyzer.rename.allowExternalItems": { - "markdownDescription": "Allow renaming of items not belonging to the loaded workspaces.", - "default": false, - "type": "boolean" - }, "rust-analyzer.runnables.command": { "markdownDescription": "Command to be executed instead of 'cargo' for runnables.", "default": null, @@ -1756,6 +1758,13 @@ "rs" ], "configuration": "language-configuration.json" + }, + { + "id": "rustdoc", + "extensions": [ + ".rustdoc" + ], + "configuration": "./language-configuration-rustdoc.json" } ], "grammars": [ @@ -1763,6 +1772,27 @@ "language": "ra_syntax_tree", "scopeName": "source.ra_syntax_tree", "path": "ra_syntax_tree.tmGrammar.json" + }, + { + "language": "rustdoc", + "scopeName": "text.html.markdown.rustdoc", + "path": "rustdoc.json", + "embeddedLanguages": { + "meta.embedded.block.html": "html", + "meta.embedded.block.markdown": "markdown", + "meta.embedded.block.rust": "rust" + } + }, + { + "injectTo": [ + "source.rust" + ], + "scopeName": "comment.markdown-cell-inject.rustdoc", + "path": "rustdoc-inject.json", + "embeddedLanguages": { + "meta.embedded.block.rustdoc": "rustdoc", + "meta.embedded.block.rust": "rust" + } } ], "problemMatchers": [ diff --git a/editors/code/rustdoc-inject.json b/editors/code/rustdoc-inject.json new file mode 100644 index 0000000000000..7a4498fea9d07 --- /dev/null +++ b/editors/code/rustdoc-inject.json @@ -0,0 +1,93 @@ +{ + "injectionSelector": "L:source.rust -string -comment -meta.embedded.block.rustdoc.md", + "patterns": [ + { + "include": "#triple-slash" + }, + { + "include": "#double-slash-exclamation" + }, + { + "include": "#slash-start-exclamation" + }, + { + "include": "#slash-double-start" + } + ], + "repository": { + "triple-slash": { + "begin": "(^|\\G)\\s*(///) ?", + "captures": { + "2": { + "name": "comment.line.double-slash.rust" + } + }, + "name": "comment.quote_code.triple-slash.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(///) ?" + }, + "double-slash-exclamation": { + "begin": "(^|\\G)\\s*(//!) ?", + "captures": { + "2": { + "name": "comment.line.double-slash.rust" + } + }, + "name": "comment.quote_code.double-slash-exclamation.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(//!) ?" + }, + "slash-start-exclamation": { + "begin": "(^)(/\\*!) ?$", + "captures": { + "2": { + "name": "comment.block.rust" + } + }, + "name": "comment.quote_code.slash-start-exclamation.rust", + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "end": "( ?)(\\*/)" + }, + "slash-double-start": { + "name": "comment.quote_code.slash-double-start-quote-star.rust", + "begin": "(?:^)\\s*/\\*\\* ?$", + "end": "\\*/", + "patterns": [ + { + "include": "#quote-star" + } + ] + }, + "quote-star": { + "begin": "(^|\\G)\\s*(\\*(?!/)) ?", + "captures": { + "2": { + "name": "comment.punctuation.definition.quote_code.slash-star.MR" + } + }, + "contentName": "meta.embedded.block.rustdoc", + "patterns": [ + { + "include": "text.html.markdown.rustdoc" + } + ], + "while": "(^|\\G)\\s*(\\*(?!/)) ?" + } + }, + "scopeName": "comment.markdown-cell-inject.rustdoc" +} diff --git a/editors/code/rustdoc.json b/editors/code/rustdoc.json new file mode 100644 index 0000000000000..cecfae9d753e2 --- /dev/null +++ b/editors/code/rustdoc.json @@ -0,0 +1,82 @@ +{ + "name": "rustdoc", + "patterns": [ + { + "include": "#fenced_code_block" + }, + { + "include": "#markdown" + } + ], + "scopeName": "text.html.markdown.rustdoc", + "repository": { + "markdown": { + "patterns": [ + { + "include": "text.html.markdown" + } + ] + }, + "fenced_code_block": { + "patterns": [ + { + "include": "#fenced_code_block_rust" + }, + { + "include": "#fenced_code_block_unknown" + } + ] + }, + "fenced_code_block_rust": { + "begin": "(^|\\G)(\\s*)(`{3,}|~{3,})\\s*(?i:(rust|not run|not_run)?((\\s+|:|,|\\{|\\?)[^`~]*)?$)", + "name": "markup.fenced_code.block.markdown", + "end": "(^|\\G)(\\2|\\s{0,3})(\\3)\\s*$", + "beginCaptures": { + "3": { + "name": "punctuation.definition.markdown" + }, + "4": { + "name": "fenced_code.block.language.markdown" + }, + "5": { + "name": "fenced_code.block.language.attributes.markdown" + } + }, + "endCaptures": { + "3": { + "name": "punctuation.definition.markdown" + } + }, + "patterns": [ + { + "begin": "(^|\\G)(\\s*)(.*)", + "while": "(^|\\G)(?!\\s*([`~]{3,})\\s*$)", + "contentName": "meta.embedded.block.rust", + "patterns": [ + { + "include": "source.rust" + } + ] + } + ] + }, + "fenced_code_block_unknown": { + "begin": "(^|\\G)(\\s*)(`{3,}|~{3,})\\s*(?=([^`~]+)?$)", + "beginCaptures": { + "3": { + "name": "punctuation.definition.markdown" + }, + "4": { + "name": "fenced_code.block.language" + } + }, + "end": "(^|\\G)(\\2|\\s{0,3})(\\3)\\s*$", + "endCaptures": { + "3": { + "name": "punctuation.definition.markdown" + } + }, + "name": "markup.fenced_code.block.markdown" + } + } +} diff --git a/editors/code/src/rust_project.ts b/editors/code/src/rust_project.ts index bf65ad43ba596..c983874fc009f 100644 --- a/editors/code/src/rust_project.ts +++ b/editors/code/src/rust_project.ts @@ -1,7 +1,26 @@ export interface JsonProject { + /// Path to the sysroot directory. + /// + /// The sysroot is where rustc looks for the + /// crates that are built-in to rust, such as + /// std. + /// + /// https://doc.rust-lang.org/rustc/command-line-arguments.html#--sysroot-override-the-system-root + /// + /// To see the current value of sysroot, you + /// can query rustc: + /// + /// ``` + /// $ rustc --print sysroot + /// /Users/yourname/.rustup/toolchains/stable-x86_64-apple-darwin + /// ``` + sysroot?: string; /// Path to the directory with *source code* of /// sysroot crates. /// + /// By default, this is `lib/rustlib/src/rust/library` + /// relative to the sysroot. + /// /// It should point to the directory where std, /// core, and friends can be found: /// diff --git a/lib/lsp-server/LICENSE-APACHE b/lib/lsp-server/LICENSE-APACHE new file mode 120000 index 0000000000000..1cd601d0a3aff --- /dev/null +++ b/lib/lsp-server/LICENSE-APACHE @@ -0,0 +1 @@ +../../LICENSE-APACHE \ No newline at end of file diff --git a/lib/lsp-server/LICENSE-MIT b/lib/lsp-server/LICENSE-MIT new file mode 120000 index 0000000000000..b2cfbdc7b0b46 --- /dev/null +++ b/lib/lsp-server/LICENSE-MIT @@ -0,0 +1 @@ +../../LICENSE-MIT \ No newline at end of file diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 9bd3a661c24de..2efafa10a828e 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -117,8 +117,6 @@ impl Metrics { sh, "./target/release/rust-analyzer -q analysis-stats {path} --query-sysroot-metadata" ) - // the sysroot uses `public-dependency`, so we make cargo think it's a nightly - .env("__CARGO_TEST_CHANNEL_OVERRIDE_DO_NOT_USE_THIS", "nightly") .read()?; for (metric, value, unit) in parse_metrics(&output) { self.report(&format!("analysis-stats/{name}/{metric}"), value, unit.into()); From 6b6da93e51851d098574b5e6f39b6d187e78f34a Mon Sep 17 00:00:00 2001 From: The Miri Conjob Bot Date: Mon, 19 Feb 2024 05:05:10 +0000 Subject: [PATCH 037/134] Preparing for merge from rustc --- src/tools/miri/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 921a55d48c6c1..715917e0f2fbb 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -1f8e824f111c972c9df8dbb378d87c33f67bbad4 +d5735645753e990a72446094f703df9b5e421555 From 591356738d397dc1d1f8a7b815ac374ede4a5cb1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 19 Feb 2024 08:32:33 +0200 Subject: [PATCH 038/134] Downgrade actions/checkout in release workflow --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index adb1c85051610..88adc3bc757e4 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -59,7 +59,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@v3 with: fetch-depth: ${{ env.FETCH_DEPTH }} From 8ca834c86b6fc2da539d079d62c5e6baf26f2ca3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 19 Feb 2024 08:55:37 +0200 Subject: [PATCH 039/134] Also downgrade actions/setup-node --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 88adc3bc757e4..147f516db9de9 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -78,7 +78,7 @@ jobs: rustup component add rust-src - name: Install Node.js - uses: actions/setup-node@v4 + uses: actions/setup-node@v3 with: node-version: 18 From 8544e729f1f0de1a35f1d7b87e9ec4ab8c669af2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Mon, 19 Feb 2024 09:25:25 +0200 Subject: [PATCH 040/134] Also downgrade node to 16 --- .github/workflows/release.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 147f516db9de9..ac536d0fddeae 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -80,7 +80,7 @@ jobs: - name: Install Node.js uses: actions/setup-node@v3 with: - node-version: 18 + node-version: 16 - name: Update apt repositories if: matrix.target == 'aarch64-unknown-linux-gnu' || matrix.target == 'arm-unknown-linux-gnueabihf' From 059138cd5835bb40decb35aef9909c762f6450d2 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Tue, 30 Jan 2024 08:10:44 +0100 Subject: [PATCH 041/134] enable from_bitmask_vector test on little-endian targets --- src/tools/miri/tests/pass/portable-simd.rs | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/tools/miri/tests/pass/portable-simd.rs b/src/tools/miri/tests/pass/portable-simd.rs index 57d0b6a87b22a..d4b3ab8ac0992 100644 --- a/src/tools/miri/tests/pass/portable-simd.rs +++ b/src/tools/miri/tests/pass/portable-simd.rs @@ -268,15 +268,11 @@ fn simd_mask() { } // This used to cause an ICE. It exercises simd_select_bitmask with an array as input. - if cfg!(target_endian = "little") { - // FIXME this test currently fails on big-endian: - // - let bitmask = u8x4::from_array([0b00001101, 0, 0, 0]); - assert_eq!( - mask32x4::from_bitmask_vector(bitmask), - mask32x4::from_array([true, false, true, true]), - ); - } + let bitmask = u8x4::from_array([0b00001101, 0, 0, 0]); + assert_eq!( + mask32x4::from_bitmask_vector(bitmask), + mask32x4::from_array([true, false, true, true]), + ); let bitmask = u8x8::from_array([0b01000101, 0, 0, 0, 0, 0, 0, 0]); assert_eq!( mask32x8::from_bitmask_vector(bitmask), From af174b9428fd4a80971b7bfc213f108facf62740 Mon Sep 17 00:00:00 2001 From: Young-Flash Date: Mon, 19 Feb 2024 18:09:33 +0800 Subject: [PATCH 042/134] internal: checkout repo before run typos --- .github/workflows/ci.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 62fbd57abc165..5a8b18e3fe1b3 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -226,6 +226,11 @@ jobs: - name: download typos run: curl -LsSf https://github.com/crate-ci/typos/releases/download/$TYPOS_VERSION/typos-$TYPOS_VERSION-x86_64-unknown-linux-musl.tar.gz | tar zxf - -C ${CARGO_HOME:-~/.cargo}/bin + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + - name: check for typos run: typos From f3d84e86c8e5490e2ac96c0fcd0de179d9f4aa22 Mon Sep 17 00:00:00 2001 From: Young-Flash Date: Mon, 19 Feb 2024 18:12:08 +0800 Subject: [PATCH 043/134] minor: fix typo --- crates/hir/src/term_search/tactics.rs | 6 +++--- crates/ide-completion/src/item.rs | 2 +- crates/rust-analyzer/tests/slow-tests/support.rs | 2 +- crates/salsa/src/lib.rs | 4 ++-- crates/salsa/src/lru.rs | 2 +- 5 files changed, 8 insertions(+), 8 deletions(-) diff --git a/crates/hir/src/term_search/tactics.rs b/crates/hir/src/term_search/tactics.rs index 666d63ac1558b..edbf75affe64c 100644 --- a/crates/hir/src/term_search/tactics.rs +++ b/crates/hir/src/term_search/tactics.rs @@ -281,14 +281,14 @@ pub(super) fn type_constructor<'a, DB: HirDatabase>( if ctx.config.enable_borrowcheck && struct_ty.contains_reference(db) { return None; } - let fileds = it.fields(db); + let fields = it.fields(db); // Check if all fields are visible, otherwise we cannot fill them - if fileds.iter().any(|it| !it.is_visible_from(db, module)) { + if fields.iter().any(|it| !it.is_visible_from(db, module)) { return None; } // Early exit if some param cannot be filled from lookup - let param_exprs: Vec> = fileds + let param_exprs: Vec> = fields .into_iter() .map(|field| lookup.find(db, &field.ty(db))) .collect::>()?; diff --git a/crates/ide-completion/src/item.rs b/crates/ide-completion/src/item.rs index c2c0641961a6a..4bab2886851a0 100644 --- a/crates/ide-completion/src/item.rs +++ b/crates/ide-completion/src/item.rs @@ -308,7 +308,7 @@ impl CompletionRelevance { // When a fn is bumped due to return type: // Bump Constructor or Builder methods with no arguments, - // over them tha with self arguments + // over them than with self arguments if fn_score > 0 { if !asf.has_params { // bump associated functions diff --git a/crates/rust-analyzer/tests/slow-tests/support.rs b/crates/rust-analyzer/tests/slow-tests/support.rs index 392a71702070e..dfd25abc70f6e 100644 --- a/crates/rust-analyzer/tests/slow-tests/support.rs +++ b/crates/rust-analyzer/tests/slow-tests/support.rs @@ -243,7 +243,7 @@ impl Server { to_string_pretty(actual_part).unwrap(), ); } else { - tracing::debug!("sucessfully matched notification"); + tracing::debug!("successfully matched notification"); return; } } else { diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 668dcfd925d8d..48b5d633bd672 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -456,12 +456,12 @@ pub trait Query: Debug + Default + Sized + for<'d> QueryDb<'d> { /// Name of the query method (e.g., `foo`) const QUERY_NAME: &'static str; - /// Extact storage for this query from the storage for its group. + /// Extract storage for this query from the storage for its group. fn query_storage<'a>( group_storage: &'a >::GroupStorage, ) -> &'a std::sync::Arc; - /// Extact storage for this query from the storage for its group. + /// Extract storage for this query from the storage for its group. fn query_storage_mut<'a>( group_storage: &'a >::GroupStorage, ) -> &'a std::sync::Arc; diff --git a/crates/salsa/src/lru.rs b/crates/salsa/src/lru.rs index c6b9778f20ad0..1ff85a3ea4585 100644 --- a/crates/salsa/src/lru.rs +++ b/crates/salsa/src/lru.rs @@ -40,7 +40,7 @@ pub(crate) trait LruNode: Sized + Debug { #[derive(Debug)] pub(crate) struct LruIndex { - /// Index in the approprate LRU list, or std::usize::MAX if not a + /// Index in the appropriate LRU list, or std::usize::MAX if not a /// member. index: AtomicUsize, } From 385479c0cd5519ae2ee5c273a333c704817f5411 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Mon, 19 Feb 2024 11:21:51 +0100 Subject: [PATCH 044/134] tests/pass/concurrency/sync: try to make it less likely for the test to fail on macOS --- src/tools/miri/tests/pass/concurrency/sync.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/tools/miri/tests/pass/concurrency/sync.rs b/src/tools/miri/tests/pass/concurrency/sync.rs index 1d48e5312d496..a6c181098b74b 100644 --- a/src/tools/miri/tests/pass/concurrency/sync.rs +++ b/src/tools/miri/tests/pass/concurrency/sync.rs @@ -63,10 +63,10 @@ fn check_conditional_variables_timed_wait_timeout() { let cvar = Condvar::new(); let guard = lock.lock().unwrap(); let now = Instant::now(); - let (_guard, timeout) = cvar.wait_timeout(guard, Duration::from_millis(100)).unwrap(); + let (_guard, timeout) = cvar.wait_timeout(guard, Duration::from_millis(10)).unwrap(); assert!(timeout.timed_out()); let elapsed_time = now.elapsed().as_millis(); - assert!(100 <= elapsed_time && elapsed_time <= 1000); + assert!(10 <= elapsed_time && elapsed_time <= 1000); } /// Test that signaling a conditional variable when waiting with a timeout works @@ -79,7 +79,7 @@ fn check_conditional_variables_timed_wait_notimeout() { let guard = lock.lock().unwrap(); let handle = thread::spawn(move || { - thread::sleep(Duration::from_millis(100)); // Make sure the other thread is waiting by the time we call `notify`. + thread::sleep(Duration::from_millis(1)); // Make sure the other thread is waiting by the time we call `notify`. let (_lock, cvar) = &*pair2; cvar.notify_one(); }); From 30b992e95a1e437b3e96b0e86373427f0fe2b121 Mon Sep 17 00:00:00 2001 From: Johannes Altmanninger Date: Sun, 14 Jan 2024 08:48:41 +0100 Subject: [PATCH 045/134] Deduplicate references to macro argument Commit 6a06f6f72 (Deduplicate reference search results, 2022-11-07) deduplicates references within each definition. There is an edge case when requesting references of a macro argument. Apparently, our descend_into_macros() stanza in references.rs produces a cartesian product of - references inside the macro times - times references outside the macro. Since the above deduplication only applies to the references within a single definition, we return them all, leading to many redundant references. Work around this by deduplicating definitions as well. Perhaps there is a better fix to not produce this cartesian product in the first place; but I think at least for definitions the problem would remain; a macro can contain multiple definitions of the same name, but since the navigation target will be the unresolved location, it's the same for all of them. We can't use unique() because we don't want to drop references that don't have a declaration (though I dont' have an example for this case). I discovered this working with the "bitflags" macro from the crate of the same name. Fixes #16357 --- crates/rust-analyzer/src/handlers/request.rs | 28 ++++++++++++++++---- crates/stdx/src/lib.rs | 16 +++++++++++ 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index eb9d4bf0f02d7..a677cea31b505 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -2,6 +2,7 @@ //! Protocol. This module specifically handles requests. use std::{ + collections::HashSet, fs, io::Write as _, path::PathBuf, @@ -13,7 +14,8 @@ use anyhow::Context; use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit, - ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, + ReferenceCategory, ReferenceSearchResult, Runnable, RunnableKind, SingleResolve, SourceChange, + TextEdit, }; use ide_db::SymbolKind; use lsp_server::ErrorCode; @@ -28,6 +30,8 @@ use lsp_types::{ }; use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; +#[allow(unused_imports)] +use stdx::IsNoneOr; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; use triomphe::Arc; @@ -1055,10 +1059,10 @@ pub(crate) fn handle_references( let exclude_imports = snap.config.find_all_refs_exclude_imports(); let exclude_tests = snap.config.find_all_refs_exclude_tests(); - let refs = match snap.analysis.find_all_refs(position, None)? { - None => return Ok(None), - Some(refs) => refs, + let Some(mut refs) = snap.analysis.find_all_refs(position, None)? else { + return Ok(None); }; + deduplicate_declarations(&mut refs); let include_declaration = params.context.include_declaration; let locations = refs @@ -1090,6 +1094,17 @@ pub(crate) fn handle_references( Ok(Some(locations)) } +fn deduplicate_declarations(refs: &mut Vec) { + if refs.iter().filter(|decl| decl.declaration.is_some()).take(2).count() > 1 { + let mut seen_navigation_targets = HashSet::new(); + refs.retain(|res| { + res.declaration + .as_ref() + .is_none_or(|decl| seen_navigation_targets.insert(decl.nav.clone())) + }); + } +} + pub(crate) fn handle_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentFormattingParams, @@ -1794,7 +1809,10 @@ fn show_ref_command_link( position: &FilePosition, ) -> Option { if snap.config.hover_actions().references && snap.config.client_commands().show_reference { - if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) { + if let Some(mut ref_search_res) = + snap.analysis.find_all_refs(*position, None).unwrap_or(None) + { + deduplicate_declarations(&mut ref_search_res); let uri = to_proto::url(snap, position.file_id); let line_index = snap.file_line_index(position.file_id).ok()?; let position = to_proto::position(&line_index, position.offset); diff --git a/crates/stdx/src/lib.rs b/crates/stdx/src/lib.rs index 9a9ebae74e8c1..0504ca50b8828 100644 --- a/crates/stdx/src/lib.rs +++ b/crates/stdx/src/lib.rs @@ -302,6 +302,22 @@ pub fn slice_tails(this: &[T]) -> impl Iterator { (0..this.len()).map(|i| &this[i..]) } +pub trait IsNoneOr { + type Type; + #[allow(clippy::wrong_self_convention)] + fn is_none_or(self, s: impl FnOnce(Self::Type) -> bool) -> bool; +} +#[allow(unstable_name_collisions)] +impl IsNoneOr for Option { + type Type = T; + fn is_none_or(self, f: impl FnOnce(T) -> bool) -> bool { + match self { + Some(v) => f(v), + None => true, + } + } +} + #[cfg(test)] mod tests { use super::*; From 91a8f34aeed075427ad4f6c0c6f58f247ac7de42 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 12:22:27 +0100 Subject: [PATCH 046/134] Deduplicate lsp locations --- crates/rust-analyzer/src/handlers/request.rs | 33 +++++--------------- crates/rust-analyzer/src/lsp/to_proto.rs | 7 +++-- 2 files changed, 12 insertions(+), 28 deletions(-) diff --git a/crates/rust-analyzer/src/handlers/request.rs b/crates/rust-analyzer/src/handlers/request.rs index a677cea31b505..04a043954299a 100644 --- a/crates/rust-analyzer/src/handlers/request.rs +++ b/crates/rust-analyzer/src/handlers/request.rs @@ -2,7 +2,6 @@ //! Protocol. This module specifically handles requests. use std::{ - collections::HashSet, fs, io::Write as _, path::PathBuf, @@ -14,10 +13,10 @@ use anyhow::Context; use ide::{ AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query, RangeInfo, RangeLimit, - ReferenceCategory, ReferenceSearchResult, Runnable, RunnableKind, SingleResolve, SourceChange, - TextEdit, + ReferenceCategory, Runnable, RunnableKind, SingleResolve, SourceChange, TextEdit, }; use ide_db::SymbolKind; +use itertools::Itertools; use lsp_server::ErrorCode; use lsp_types::{ CallHierarchyIncomingCall, CallHierarchyIncomingCallsParams, CallHierarchyItem, @@ -30,8 +29,6 @@ use lsp_types::{ }; use project_model::{ManifestPath, ProjectWorkspace, TargetKind}; use serde_json::json; -#[allow(unused_imports)] -use stdx::IsNoneOr; use stdx::{format_to, never}; use syntax::{algo, ast, AstNode, TextRange, TextSize}; use triomphe::Arc; @@ -1059,10 +1056,9 @@ pub(crate) fn handle_references( let exclude_imports = snap.config.find_all_refs_exclude_imports(); let exclude_tests = snap.config.find_all_refs_exclude_tests(); - let Some(mut refs) = snap.analysis.find_all_refs(position, None)? else { + let Some(refs) = snap.analysis.find_all_refs(position, None)? else { return Ok(None); }; - deduplicate_declarations(&mut refs); let include_declaration = params.context.include_declaration; let locations = refs @@ -1088,23 +1084,13 @@ pub(crate) fn handle_references( }) .chain(decl) }) + .unique() .filter_map(|frange| to_proto::location(&snap, frange).ok()) .collect(); Ok(Some(locations)) } -fn deduplicate_declarations(refs: &mut Vec) { - if refs.iter().filter(|decl| decl.declaration.is_some()).take(2).count() > 1 { - let mut seen_navigation_targets = HashSet::new(); - refs.retain(|res| { - res.declaration - .as_ref() - .is_none_or(|decl| seen_navigation_targets.insert(decl.nav.clone())) - }); - } -} - pub(crate) fn handle_formatting( snap: GlobalStateSnapshot, params: lsp_types::DocumentFormattingParams, @@ -1809,10 +1795,7 @@ fn show_ref_command_link( position: &FilePosition, ) -> Option { if snap.config.hover_actions().references && snap.config.client_commands().show_reference { - if let Some(mut ref_search_res) = - snap.analysis.find_all_refs(*position, None).unwrap_or(None) - { - deduplicate_declarations(&mut ref_search_res); + if let Some(ref_search_res) = snap.analysis.find_all_refs(*position, None).unwrap_or(None) { let uri = to_proto::url(snap, position.file_id); let line_index = snap.file_line_index(position.file_id).ok()?; let position = to_proto::position(&line_index, position.offset); @@ -1820,10 +1803,10 @@ fn show_ref_command_link( .into_iter() .flat_map(|res| res.references) .flat_map(|(file_id, ranges)| { - ranges.into_iter().filter_map(move |(range, _)| { - to_proto::location(snap, FileRange { file_id, range }).ok() - }) + ranges.into_iter().map(move |(range, _)| FileRange { file_id, range }) }) + .unique() + .filter_map(|range| to_proto::location(snap, range).ok()) .collect(); let title = to_proto::reference_title(locations.len()); let command = to_proto::command::show_references(title, &uri, position, locations); diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 727007bba083a..4101d476cd308 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -904,15 +904,16 @@ pub(crate) fn goto_definition_response( if snap.config.location_link() { let links = targets .into_iter() + .unique_by(|nav| (nav.file_id, nav.full_range, nav.focus_range)) .map(|nav| location_link(snap, src, nav)) .collect::>>()?; Ok(links.into()) } else { let locations = targets .into_iter() - .map(|nav| { - location(snap, FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) - }) + .map(|nav| FileRange { file_id: nav.file_id, range: nav.focus_or_full_range() }) + .unique() + .map(|range| location(snap, range)) .collect::>>()?; Ok(locations.into()) } From 6e16edb3bfc641b2a9a17b2fa66da05fc3d368e8 Mon Sep 17 00:00:00 2001 From: UserIsntAvailable Date: Sun, 4 Feb 2024 08:17:02 -0500 Subject: [PATCH 047/134] feat: append `as ` when renaming inside an "UseTree". test: include `rename_path_inside_use_tree`. Keeps tracks the progress of the changes. 3 other tests broke with the changes of this. feat: rename all other usages within the current file. feat: fix most of the implementation problems. test: `rename_path_inside_use_tree` tests a more complicated scenario. --- crates/ide/src/rename.rs | 120 ++++++++++++++++++++++++++++++++++----- 1 file changed, 106 insertions(+), 14 deletions(-) diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index f2eedfa431693..c128fa5f41a2a 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -9,6 +9,7 @@ use ide_db::{ base_db::{FileId, FileRange}, defs::{Definition, NameClass, NameRefClass}, rename::{bail, format_err, source_edit_from_references, IdentifierKind}, + source_change::SourceChangeBuilder, RootDatabase, }; use itertools::Itertools; @@ -90,24 +91,60 @@ pub(crate) fn rename( let syntax = source_file.syntax(); let defs = find_definitions(&sema, syntax, position)?; + let alias_fallback = alias_fallback(syntax, position, new_name); + + let ops: RenameResult> = match alias_fallback { + Some(_) => defs + // FIXME: This can use the `ide_db::rename_reference` (or def.rename) method once we can + // properly find "direct" usages/references. + .map(|(.., def)| { + match IdentifierKind::classify(new_name)? { + IdentifierKind::Ident => (), + IdentifierKind::Lifetime => { + bail!("Cannot alias reference to a lifetime identifier") + } + IdentifierKind::Underscore => bail!("Cannot alias reference to `_`"), + }; - let ops: RenameResult> = defs - .map(|(.., def)| { - if let Definition::Local(local) = def { - if let Some(self_param) = local.as_self_param(sema.db) { - cov_mark::hit!(rename_self_to_param); - return rename_self_to_param(&sema, local, self_param, new_name); - } - if new_name == "self" { - cov_mark::hit!(rename_to_self); - return rename_to_self(&sema, local); + let mut usages = def.usages(&sema).all(); + + // FIXME: hack - removes the usage that triggered this rename operation. + match usages.references.get_mut(&position.file_id).and_then(|refs| { + refs.iter() + .position(|ref_| ref_.range.contains_inclusive(position.offset)) + .map(|idx| refs.remove(idx)) + }) { + Some(_) => (), + None => never!(), + }; + + let mut source_change = SourceChange::default(); + source_change.extend(usages.iter().map(|(&file_id, refs)| { + (file_id, source_edit_from_references(refs, def, new_name)) + })); + + Ok(source_change) + }) + .collect(), + None => defs + .map(|(.., def)| { + if let Definition::Local(local) = def { + if let Some(self_param) = local.as_self_param(sema.db) { + cov_mark::hit!(rename_self_to_param); + return rename_self_to_param(&sema, local, self_param, new_name); + } + if new_name == "self" { + cov_mark::hit!(rename_to_self); + return rename_to_self(&sema, local); + } } - } - def.rename(&sema, new_name) - }) - .collect(); + def.rename(&sema, new_name, rename_external) + }) + .collect(), + }; ops?.into_iter() + .chain(alias_fallback) .reduce(|acc, elem| acc.merge(elem)) .ok_or_else(|| format_err!("No references found at position")) } @@ -130,6 +167,38 @@ pub(crate) fn will_rename_file( Some(change) } +// FIXME: Should support `extern crate`. +fn alias_fallback( + syntax: &SyntaxNode, + FilePosition { file_id, offset }: FilePosition, + new_name: &str, +) -> Option { + let use_tree = syntax + .token_at_offset(offset) + .flat_map(|syntax| syntax.parent_ancestors()) + .find_map(ast::UseTree::cast)?; + + let last_path_segment = use_tree.path()?.segments().last()?.name_ref()?; + if !last_path_segment.syntax().text_range().contains_inclusive(offset) { + return None; + }; + + let mut builder = SourceChangeBuilder::new(file_id); + + match use_tree.rename() { + Some(rename) => { + let offset = rename.syntax().text_range(); + builder.replace(offset, format!("as {new_name}")); + } + None => { + let offset = use_tree.syntax().text_range().end(); + builder.insert(offset, format!(" as {new_name}")); + } + } + + Some(builder.finish()) +} + fn find_definitions( sema: &Semantics<'_, RootDatabase>, syntax: &SyntaxNode, @@ -2686,4 +2755,27 @@ fn test() { "#, ); } + + #[test] + fn rename_path_inside_use_tree() { + check( + "Baz", + r#" +mod foo { pub struct Foo; } +mod bar { use super::Foo; } + +use foo::Foo$0; + +fn main() { let _: Foo; } +"#, + r#" +mod foo { pub struct Foo; } +mod bar { use super::Baz; } + +use foo::Foo as Baz; + +fn main() { let _: Baz; } +"#, + ) + } } From 69c25327f48aa6e0297bff6249fb027c7b836eea Mon Sep 17 00:00:00 2001 From: Rose Hudson Date: Sun, 14 Jan 2024 15:19:20 +0000 Subject: [PATCH 048/134] internal: reduce body lookups in expr diagnostics --- crates/hir-ty/src/diagnostics/expr.rs | 20 ++++++++------------ 1 file changed, 8 insertions(+), 12 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index c4329a7b82bf8..afb80e1f4450e 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -57,7 +57,8 @@ impl BodyValidationDiagnostic { let _p = tracing::span!(tracing::Level::INFO, "BodyValidationDiagnostic::collect").entered(); let infer = db.infer(owner); - let mut validator = ExprValidator::new(owner, infer); + let body = db.body(owner); + let mut validator = ExprValidator { owner, body, infer, diagnostics: Vec::new() }; validator.validate_body(db); validator.diagnostics } @@ -65,18 +66,16 @@ impl BodyValidationDiagnostic { struct ExprValidator { owner: DefWithBodyId, + body: Arc, infer: Arc, pub(super) diagnostics: Vec, } impl ExprValidator { - fn new(owner: DefWithBodyId, infer: Arc) -> ExprValidator { - ExprValidator { owner, infer, diagnostics: Vec::new() } - } - fn validate_body(&mut self, db: &dyn HirDatabase) { - let body = db.body(self.owner); let mut filter_map_next_checker = None; + // we'll pass &mut self while iterating over body.exprs, so they need to be disjoint + let body = Arc::clone(&self.body); if matches!(self.owner, DefWithBodyId::FunctionId(_)) { self.check_for_trailing_return(body.body_expr, &body); @@ -162,8 +161,6 @@ impl ExprValidator { arms: &[MatchArm], db: &dyn HirDatabase, ) { - let body = db.body(self.owner); - let scrut_ty = &self.infer[scrutinee_expr]; if scrut_ty.is_unknown() { return; @@ -191,12 +188,12 @@ impl ExprValidator { .as_reference() .map(|(match_expr_ty, ..)| match_expr_ty == pat_ty) .unwrap_or(false)) - && types_of_subpatterns_do_match(arm.pat, &body, &self.infer) + && types_of_subpatterns_do_match(arm.pat, &self.body, &self.infer) { // If we had a NotUsefulMatchArm diagnostic, we could // check the usefulness of each pattern as we added it // to the matrix here. - let pat = self.lower_pattern(&cx, arm.pat, db, &body, &mut has_lowering_errors); + let pat = self.lower_pattern(&cx, arm.pat, db, &mut has_lowering_errors); let m_arm = pat_analysis::MatchArm { pat: pattern_arena.alloc(pat), has_guard: arm.guard.is_some(), @@ -244,10 +241,9 @@ impl ExprValidator { cx: &MatchCheckCtx<'p>, pat: PatId, db: &dyn HirDatabase, - body: &Body, have_errors: &mut bool, ) -> DeconstructedPat<'p> { - let mut patcx = match_check::PatCtxt::new(db, &self.infer, body); + let mut patcx = match_check::PatCtxt::new(db, &self.infer, &self.body); let pattern = patcx.lower_pattern(pat); let pattern = cx.lower_pat(&pattern); if !patcx.errors.is_empty() { From 5390e4ce9bdb822ea4899e2df4383a7076d820cf Mon Sep 17 00:00:00 2001 From: Rose Hudson Date: Fri, 5 Jan 2024 17:38:29 +0000 Subject: [PATCH 049/134] feat: add non-exhaustive-let diagnostic --- crates/hir-ty/src/diagnostics/expr.rs | 60 +++++++++++++++++-- crates/hir/src/diagnostics.rs | 23 +++++++ .../src/handlers/mutability_errors.rs | 2 +- .../src/handlers/non_exhaustive_let.rs | 47 +++++++++++++++ crates/ide-diagnostics/src/lib.rs | 2 + 5 files changed, 129 insertions(+), 5 deletions(-) create mode 100644 crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index afb80e1f4450e..0c5d6399619a6 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -12,6 +12,7 @@ use hir_expand::name; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint}; +use tracing::debug; use triomphe::Arc; use typed_arena::Arena; @@ -44,6 +45,10 @@ pub enum BodyValidationDiagnostic { match_expr: ExprId, uncovered_patterns: String, }, + NonExhaustiveLet { + pat: PatId, + uncovered_patterns: String, + }, RemoveTrailingReturn { return_expr: ExprId, }, @@ -68,7 +73,7 @@ struct ExprValidator { owner: DefWithBodyId, body: Arc, infer: Arc, - pub(super) diagnostics: Vec, + diagnostics: Vec, } impl ExprValidator { @@ -105,6 +110,9 @@ impl ExprValidator { Expr::If { .. } => { self.check_for_unnecessary_else(id, expr, &body); } + Expr::Block { .. } => { + self.validate_block(db, expr); + } _ => {} } } @@ -231,11 +239,55 @@ impl ExprValidator { if !witnesses.is_empty() { self.diagnostics.push(BodyValidationDiagnostic::MissingMatchArms { match_expr, - uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, arms), + uncovered_patterns: missing_match_arms(&cx, scrut_ty, witnesses, m_arms.is_empty()), }); } } + fn validate_block(&mut self, db: &dyn HirDatabase, expr: &Expr) { + let Expr::Block { statements, .. } = expr else { return }; + let pattern_arena = Arena::new(); + let cx = MatchCheckCtx::new(self.owner.module(db.upcast()), self.owner, db); + for stmt in &**statements { + let &Statement::Let { pat, initializer, else_branch: None, .. } = stmt else { + continue; + }; + let Some(initializer) = initializer else { continue }; + let ty = &self.infer[initializer]; + + let mut have_errors = false; + let deconstructed_pat = self.lower_pattern(&cx, pat, db, &mut have_errors); + let match_arm = rustc_pattern_analysis::MatchArm { + pat: pattern_arena.alloc(deconstructed_pat), + has_guard: false, + arm_data: (), + }; + if have_errors { + continue; + } + + let report = match compute_match_usefulness( + &cx, + &[match_arm], + ty.clone(), + ValidityConstraint::ValidOnly, + ) { + Ok(v) => v, + Err(e) => { + debug!(?e, "match usefulness error"); + continue; + } + }; + let witnesses = report.non_exhaustiveness_witnesses; + if !witnesses.is_empty() { + self.diagnostics.push(BodyValidationDiagnostic::NonExhaustiveLet { + pat, + uncovered_patterns: missing_match_arms(&cx, ty, witnesses, false), + }); + } + } + } + fn lower_pattern<'p>( &self, cx: &MatchCheckCtx<'p>, @@ -444,7 +496,7 @@ fn missing_match_arms<'p>( cx: &MatchCheckCtx<'p>, scrut_ty: &Ty, witnesses: Vec>, - arms: &[MatchArm], + arms_is_empty: bool, ) -> String { struct DisplayWitness<'a, 'p>(&'a WitnessPat<'p>, &'a MatchCheckCtx<'p>); impl fmt::Display for DisplayWitness<'_, '_> { @@ -459,7 +511,7 @@ fn missing_match_arms<'p>( Some((AdtId::EnumId(e), _)) => !cx.db.enum_data(e).variants.is_empty(), _ => false, }; - if arms.is_empty() && !non_empty_enum { + if arms_is_empty && !non_empty_enum { format!("type `{}` is non-empty", scrut_ty.display(cx.db)) } else { let pat_display = |witness| DisplayWitness(witness, cx); diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index 08843a6c99941..d351e257d2e7c 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -64,6 +64,7 @@ diagnostics![ MissingUnsafe, MovedOutOfRef, NeedMut, + NonExhaustiveLet, NoSuchField, PrivateAssocItem, PrivateField, @@ -280,6 +281,12 @@ pub struct MissingMatchArms { pub uncovered_patterns: String, } +#[derive(Debug)] +pub struct NonExhaustiveLet { + pub pat: InFile>, + pub uncovered_patterns: String, +} + #[derive(Debug)] pub struct TypeMismatch { pub expr_or_pat: InFile>>, @@ -456,6 +463,22 @@ impl AnyDiagnostic { Err(SyntheticSyntax) => (), } } + BodyValidationDiagnostic::NonExhaustiveLet { pat, uncovered_patterns } => { + match source_map.pat_syntax(pat) { + Ok(source_ptr) => { + if let Some(ast_pat) = source_ptr.value.cast::() { + return Some( + NonExhaustiveLet { + pat: InFile::new(source_ptr.file_id, ast_pat), + uncovered_patterns, + } + .into(), + ); + } + } + Err(SyntheticSyntax) => {} + } + } BodyValidationDiagnostic::RemoveTrailingReturn { return_expr } => { if let Ok(source_ptr) = source_map.expr_syntax(return_expr) { // Filters out desugared return expressions (e.g. desugared try operators). diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index bdb55a9d98a27..3c71f84dc4852 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -817,7 +817,7 @@ fn f() { //- minicore: option fn f(_: i32) {} fn main() { - let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)); + let ((Some(mut x), None) | (_, Some(mut x))) = (None, Some(7)) else { return }; //^^^^^ 💡 warn: variable does not need to be mutable f(x); } diff --git a/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs new file mode 100644 index 0000000000000..1a4d2877ef25f --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/non_exhaustive_let.rs @@ -0,0 +1,47 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: non-exhaustive-let +// +// This diagnostic is triggered if a `let` statement without an `else` branch has a non-exhaustive +// pattern. +pub(crate) fn non_exhaustive_let( + ctx: &DiagnosticsContext<'_>, + d: &hir::NonExhaustiveLet, +) -> Diagnostic { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0005"), + format!("non-exhaustive pattern: {}", d.uncovered_patterns), + d.pat.map(Into::into), + ) +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn option_nonexhaustive() { + check_diagnostics( + r#" +//- minicore: option +fn main() { + let None = Some(5); + //^^^^ error: non-exhaustive pattern: `Some(_)` not covered +} +"#, + ); + } + + #[test] + fn option_exhaustive() { + check_diagnostics( + r#" +//- minicore: option +fn main() { + let Some(_) | None = Some(5); +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 9d21bb4cd9fb6..3a3888011d75e 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -41,6 +41,7 @@ mod handlers { pub(crate) mod moved_out_of_ref; pub(crate) mod mutability_errors; pub(crate) mod no_such_field; + pub(crate) mod non_exhaustive_let; pub(crate) mod private_assoc_item; pub(crate) mod private_field; pub(crate) mod remove_trailing_return; @@ -359,6 +360,7 @@ pub fn diagnostics( AnyDiagnostic::MissingUnsafe(d) => handlers::missing_unsafe::missing_unsafe(&ctx, &d), AnyDiagnostic::MovedOutOfRef(d) => handlers::moved_out_of_ref::moved_out_of_ref(&ctx, &d), AnyDiagnostic::NeedMut(d) => handlers::mutability_errors::need_mut(&ctx, &d), + AnyDiagnostic::NonExhaustiveLet(d) => handlers::non_exhaustive_let::non_exhaustive_let(&ctx, &d), AnyDiagnostic::NoSuchField(d) => handlers::no_such_field::no_such_field(&ctx, &d), AnyDiagnostic::PrivateAssocItem(d) => handlers::private_assoc_item::private_assoc_item(&ctx, &d), AnyDiagnostic::PrivateField(d) => handlers::private_field::private_field(&ctx, &d), From 6dd5dc10ef2a7e305045657a9b71662a53884f9f Mon Sep 17 00:00:00 2001 From: UserIsntAvailable Date: Mon, 5 Feb 2024 13:26:47 -0500 Subject: [PATCH 050/134] test: fix `disallow_renaming_for_non_local_definition` to follow PR changes. --- crates/ide/src/rename.rs | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/crates/ide/src/rename.rs b/crates/ide/src/rename.rs index c128fa5f41a2a..f78153df38bd6 100644 --- a/crates/ide/src/rename.rs +++ b/crates/ide/src/rename.rs @@ -138,7 +138,7 @@ pub(crate) fn rename( return rename_to_self(&sema, local); } } - def.rename(&sema, new_name, rename_external) + def.rename(&sema, new_name) }) .collect(), }; @@ -2695,7 +2695,8 @@ use qux as frob; //- /lib.rs crate:lib new_source_root:library pub struct S; //- /main.rs crate:main deps:lib new_source_root:local -use lib::S$0; +use lib::S; +fn main() { let _: S$0; } "#, "error: Cannot rename a non-local definition", ); From e9c80a9c256677d85398d93880eec1e54e226d2a Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 15 Feb 2024 00:59:13 +0900 Subject: [PATCH 051/134] fix: False positive diagnostic for necessary `else` --- crates/hir-ty/src/diagnostics/expr.rs | 8 ++++++- .../src/handlers/remove_unnecessary_else.rs | 23 +++++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 0c5d6399619a6..571f01dde2ef9 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -338,7 +338,13 @@ impl ExprValidator { fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, body: &Body) { if let Expr::If { condition: _, then_branch, else_branch } = expr { - if else_branch.is_none() { + if let Some(else_branch) = else_branch { + // If else branch has a tail, it is an "expression" that produces a value, + // e.g. `let a = if { ... } else { ... };` and this `else` is not unnecessary + if let Expr::Block { tail: Some(_), .. } = body.exprs[*else_branch] { + return; + } + } else { return; } if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index ae8241ec2c695..813c07a505db2 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -384,6 +384,29 @@ fn test() { return bar; } } +"#, + ); + } + + #[test] + fn no_diagnostic_if_tail_exists_in_else_branch() { + check_diagnostics_with_needless_return_disabled( + r#" +fn test1(a: bool) { + let _x = if a { + return; + } else { + 1 + }; +} + +fn test2(a: bool) -> i32 { + if a { + return 1; + } else { + 0 + } +} "#, ); } From d14b22863bd58e78a8e9193db987ae22ba1e57e1 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Thu, 15 Feb 2024 01:29:48 +0900 Subject: [PATCH 052/134] Handle cases for `else if` --- crates/hir-ty/src/diagnostics/expr.rs | 19 +++++++++++++++++-- .../src/handlers/remove_unnecessary_else.rs | 12 ++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 571f01dde2ef9..ff70618ca129c 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -341,8 +341,23 @@ impl ExprValidator { if let Some(else_branch) = else_branch { // If else branch has a tail, it is an "expression" that produces a value, // e.g. `let a = if { ... } else { ... };` and this `else` is not unnecessary - if let Expr::Block { tail: Some(_), .. } = body.exprs[*else_branch] { - return; + let mut branch = *else_branch; + loop { + match body.exprs[branch] { + Expr::Block { tail: Some(_), .. } => return, + Expr::If { then_branch, else_branch, .. } => { + if let Expr::Block { tail: Some(_), .. } = body.exprs[then_branch] { + return; + } + if let Some(else_branch) = else_branch { + // Continue checking for branches like `if { ... } else if { ... } else...` + branch = else_branch; + continue; + } + } + _ => break, + } + break; } } else { return; diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 813c07a505db2..bbc10e96cef84 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -407,6 +407,18 @@ fn test2(a: bool) -> i32 { 0 } } + +fn test3(a: bool, b: bool, c: bool) { + let _x = if a { + return; + } else if b { + return; + } else if c { + 1 + } else { + return; + }; +} "#, ); } From 21f4ff03516eebb3cbdd8947ad3a7a00c980a692 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Fri, 16 Feb 2024 23:53:00 +0900 Subject: [PATCH 053/134] Check for let expr ancestors instead of tail expr --- crates/hir-ty/src/diagnostics/expr.rs | 52 ++++++++++--------- .../src/handlers/remove_unnecessary_else.rs | 10 +--- 2 files changed, 29 insertions(+), 33 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index ff70618ca129c..718409e15997d 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -12,6 +12,7 @@ use hir_expand::name; use itertools::Itertools; use rustc_hash::FxHashSet; use rustc_pattern_analysis::usefulness::{compute_match_usefulness, ValidityConstraint}; +use syntax::{ast, AstNode}; use tracing::debug; use triomphe::Arc; use typed_arena::Arena; @@ -108,7 +109,7 @@ impl ExprValidator { self.check_for_trailing_return(*body_expr, &body); } Expr::If { .. } => { - self.check_for_unnecessary_else(id, expr, &body); + self.check_for_unnecessary_else(id, expr, db); } Expr::Block { .. } => { self.validate_block(db, expr); @@ -336,31 +337,34 @@ impl ExprValidator { } } - fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, body: &Body) { + fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) { if let Expr::If { condition: _, then_branch, else_branch } = expr { - if let Some(else_branch) = else_branch { - // If else branch has a tail, it is an "expression" that produces a value, - // e.g. `let a = if { ... } else { ... };` and this `else` is not unnecessary - let mut branch = *else_branch; - loop { - match body.exprs[branch] { - Expr::Block { tail: Some(_), .. } => return, - Expr::If { then_branch, else_branch, .. } => { - if let Expr::Block { tail: Some(_), .. } = body.exprs[then_branch] { - return; - } - if let Some(else_branch) = else_branch { - // Continue checking for branches like `if { ... } else if { ... } else...` - branch = else_branch; - continue; - } - } - _ => break, - } - break; - } - } else { + if else_branch.is_none() { + return; + } + let (body, source_map) = db.body_with_source_map(self.owner); + let Ok(source_ptr) = source_map.expr_syntax(id) else { return; + }; + let root = source_ptr.file_syntax(db.upcast()); + let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else { + return; + }; + let mut top_if_expr = if_expr; + loop { + let parent = top_if_expr.syntax().parent(); + let has_parent_let_stmt = + parent.as_ref().map_or(false, |node| ast::LetStmt::can_cast(node.kind())); + if has_parent_let_stmt { + // Bail if parent or direct ancestor is a let stmt. + return; + } + let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { + // Parent is neither an if expr nor a let stmt. + break; + }; + // Check parent if expr. + top_if_expr = parent_if_expr; } if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { let last_then_expr = tail.or_else(|| match statements.last()? { diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index bbc10e96cef84..351f728747efa 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -400,15 +400,7 @@ fn test1(a: bool) { }; } -fn test2(a: bool) -> i32 { - if a { - return 1; - } else { - 0 - } -} - -fn test3(a: bool, b: bool, c: bool) { +fn test2(a: bool, b: bool, c: bool) { let _x = if a { return; } else if b { From 8f6e2127c3a1537298cbda11a8f261875edd6659 Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Fri, 16 Feb 2024 23:54:01 +0900 Subject: [PATCH 054/134] Fix the remove unnecessary else action to preserve block tail expr --- .../src/handlers/remove_unnecessary_else.rs | 43 +++++++++++++++++-- 1 file changed, 40 insertions(+), 3 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 351f728747efa..289ce64035432 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -41,9 +41,11 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option { - block.statements().map(|stmt| format!("\n{indent}{stmt}")).join("") - } + ast::ElseBranch::Block(ref block) => block + .statements() + .map(|stmt| format!("\n{indent}{stmt}")) + .chain(block.tail_expr().map(|tail| format!("\n{indent}{tail}"))) + .join(""), ast::ElseBranch::IfExpr(ref nested_if_expr) => { format!("\n{indent}{nested_if_expr}") } @@ -171,6 +173,41 @@ fn test() { ); } + #[test] + fn remove_unnecessary_else_for_return3() { + check_diagnostics_with_needless_return_disabled( + r#" +fn test(a: bool) -> i32 { + if a { + return 1; + } else { + //^^^^ 💡 weak: remove unnecessary else block + 0 + } +} +"#, + ); + check_fix( + r#" +fn test(a: bool) -> i32 { + if a { + return 1; + } else$0 { + 0 + } +} +"#, + r#" +fn test(a: bool) -> i32 { + if a { + return 1; + } + 0 +} +"#, + ); + } + #[test] fn remove_unnecessary_else_for_return_in_child_if_expr() { check_diagnostics_with_needless_return_disabled( From 1205853c3689a69e81578dfd066b17e3ebe376cf Mon Sep 17 00:00:00 2001 From: Shoyu Vanilla Date: Sat, 17 Feb 2024 00:55:45 +0900 Subject: [PATCH 055/134] Apply indent fix in #16575 --- .../src/handlers/remove_unnecessary_else.rs | 47 +++++++++++++++++-- 1 file changed, 44 insertions(+), 3 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 289ce64035432..9564807a334e4 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -2,7 +2,10 @@ use hir::{db::ExpandDatabase, diagnostics::RemoveUnnecessaryElse, HirFileIdExt}; use ide_db::{assists::Assist, source_change::SourceChange}; use itertools::Itertools; use syntax::{ - ast::{self, edit::IndentLevel}, + ast::{ + self, + edit::{AstNodeEdit, IndentLevel}, + }, AstNode, SyntaxToken, TextRange, }; use text_edit::TextEdit; @@ -41,12 +44,15 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option block + ast::ElseBranch::Block(block) => block .statements() .map(|stmt| format!("\n{indent}{stmt}")) .chain(block.tail_expr().map(|tail| format!("\n{indent}{tail}"))) .join(""), - ast::ElseBranch::IfExpr(ref nested_if_expr) => { + ast::ElseBranch::IfExpr(mut nested_if_expr) => { + if has_parent_if_expr { + nested_if_expr = nested_if_expr.indent(IndentLevel(1)) + } format!("\n{indent}{nested_if_expr}") } }; @@ -251,6 +257,41 @@ fn test() { ); } + #[test] + fn remove_unnecessary_else_for_return_in_child_if_expr2() { + check_fix( + r#" +fn test() { + if foo { + do_something(); + } else if qux { + return bar; + } else$0 if quux { + do_something_else(); + } else { + do_something_else2(); + } +} +"#, + r#" +fn test() { + if foo { + do_something(); + } else { + if qux { + return bar; + } + if quux { + do_something_else(); + } else { + do_something_else2(); + } + } +} +"#, + ); + } + #[test] fn remove_unnecessary_else_for_break() { check_diagnostics( From ff7031008651021c330b93d4bd502810022b045d Mon Sep 17 00:00:00 2001 From: davidsemakula Date: Fri, 16 Feb 2024 20:39:52 +0300 Subject: [PATCH 056/134] fix: only emit "unnecessary else" diagnostic for expr stmts --- crates/hir-ty/src/diagnostics/expr.rs | 64 +++++++++++-------- .../src/handlers/remove_unnecessary_else.rs | 14 +++- 2 files changed, 49 insertions(+), 29 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 718409e15997d..4fe75f24b80c8 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -109,7 +109,7 @@ impl ExprValidator { self.check_for_trailing_return(*body_expr, &body); } Expr::If { .. } => { - self.check_for_unnecessary_else(id, expr, db); + self.check_for_unnecessary_else(id, expr, &body, db); } Expr::Block { .. } => { self.validate_block(db, expr); @@ -337,35 +337,17 @@ impl ExprValidator { } } - fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) { + fn check_for_unnecessary_else( + &mut self, + id: ExprId, + expr: &Expr, + body: &Body, + db: &dyn HirDatabase, + ) { if let Expr::If { condition: _, then_branch, else_branch } = expr { if else_branch.is_none() { return; } - let (body, source_map) = db.body_with_source_map(self.owner); - let Ok(source_ptr) = source_map.expr_syntax(id) else { - return; - }; - let root = source_ptr.file_syntax(db.upcast()); - let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else { - return; - }; - let mut top_if_expr = if_expr; - loop { - let parent = top_if_expr.syntax().parent(); - let has_parent_let_stmt = - parent.as_ref().map_or(false, |node| ast::LetStmt::can_cast(node.kind())); - if has_parent_let_stmt { - // Bail if parent or direct ancestor is a let stmt. - return; - } - let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { - // Parent is neither an if expr nor a let stmt. - break; - }; - // Check parent if expr. - top_if_expr = parent_if_expr; - } if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { let last_then_expr = tail.or_else(|| match statements.last()? { Statement::Expr { expr, .. } => Some(*expr), @@ -374,6 +356,36 @@ impl ExprValidator { if let Some(last_then_expr) = last_then_expr { let last_then_expr_ty = &self.infer[last_then_expr]; if last_then_expr_ty.is_never() { + // Only look at sources if the then branch diverges and we have an else branch. + let (_, source_map) = db.body_with_source_map(self.owner); + let Ok(source_ptr) = source_map.expr_syntax(id) else { + return; + }; + let root = source_ptr.file_syntax(db.upcast()); + let ast::Expr::IfExpr(if_expr) = source_ptr.value.to_node(&root) else { + return; + }; + let mut top_if_expr = if_expr; + loop { + let parent = top_if_expr.syntax().parent(); + let has_parent_expr_stmt_or_stmt_list = + parent.as_ref().map_or(false, |node| { + ast::ExprStmt::can_cast(node.kind()) + | ast::StmtList::can_cast(node.kind()) + }); + if has_parent_expr_stmt_or_stmt_list { + // Only emit diagnostic if parent or direct ancestor is either + // an expr stmt or a stmt list. + break; + } + let Some(parent_if_expr) = parent.and_then(ast::IfExpr::cast) else { + // Bail if parent is neither an if expr, an expr stmt nor a stmt list. + return; + }; + // Check parent if expr. + top_if_expr = parent_if_expr; + } + self.diagnostics .push(BodyValidationDiagnostic::RemoveUnnecessaryElse { if_expr: id }) } diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 9564807a334e4..7bfd64596ed2f 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -467,10 +467,10 @@ fn test() { } #[test] - fn no_diagnostic_if_tail_exists_in_else_branch() { + fn no_diagnostic_if_not_expr_stmt() { check_diagnostics_with_needless_return_disabled( r#" -fn test1(a: bool) { +fn test1() { let _x = if a { return; } else { @@ -478,7 +478,7 @@ fn test1(a: bool) { }; } -fn test2(a: bool, b: bool, c: bool) { +fn test2() { let _x = if a { return; } else if b { @@ -491,5 +491,13 @@ fn test2(a: bool, b: bool, c: bool) { } "#, ); + check_diagnostics_with_disabled( + r#" +fn test3() { + foo(if a { return 1 } else { 0 }) +} +"#, + std::iter::once("E0308".to_owned()), + ); } } From 7dfeb2cdcc2a899f929bb3da1c2db7fe6725fb47 Mon Sep 17 00:00:00 2001 From: davidsemakula Date: Mon, 19 Feb 2024 14:43:43 +0300 Subject: [PATCH 057/134] refactor "unnecessary else" diagnostic test --- .../ide-diagnostics/src/handlers/remove_unnecessary_else.rs | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 7bfd64596ed2f..9c63d79d91033 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -491,13 +491,12 @@ fn test2() { } "#, ); - check_diagnostics_with_disabled( + check_diagnostics( r#" -fn test3() { +fn test3() -> u8 { foo(if a { return 1 } else { 0 }) } "#, - std::iter::once("E0308".to_owned()), ); } } From f2218e727840ec0286d89ecc758322674e1efb6d Mon Sep 17 00:00:00 2001 From: davidsemakula Date: Mon, 19 Feb 2024 15:35:47 +0300 Subject: [PATCH 058/134] refactor: remove body parameter for "unnecessary else" diagnostic --- crates/hir-ty/src/diagnostics/expr.rs | 12 +++--------- 1 file changed, 3 insertions(+), 9 deletions(-) diff --git a/crates/hir-ty/src/diagnostics/expr.rs b/crates/hir-ty/src/diagnostics/expr.rs index 4fe75f24b80c8..6c8a187516575 100644 --- a/crates/hir-ty/src/diagnostics/expr.rs +++ b/crates/hir-ty/src/diagnostics/expr.rs @@ -109,7 +109,7 @@ impl ExprValidator { self.check_for_trailing_return(*body_expr, &body); } Expr::If { .. } => { - self.check_for_unnecessary_else(id, expr, &body, db); + self.check_for_unnecessary_else(id, expr, db); } Expr::Block { .. } => { self.validate_block(db, expr); @@ -337,18 +337,12 @@ impl ExprValidator { } } - fn check_for_unnecessary_else( - &mut self, - id: ExprId, - expr: &Expr, - body: &Body, - db: &dyn HirDatabase, - ) { + fn check_for_unnecessary_else(&mut self, id: ExprId, expr: &Expr, db: &dyn HirDatabase) { if let Expr::If { condition: _, then_branch, else_branch } = expr { if else_branch.is_none() { return; } - if let Expr::Block { statements, tail, .. } = &body.exprs[*then_branch] { + if let Expr::Block { statements, tail, .. } = &self.body.exprs[*then_branch] { let last_then_expr = tail.or_else(|| match statements.last()? { Statement::Expr { expr, .. } => Some(*expr), _ => None, From d818b531c98d5361310e43127e05f5fe02d88013 Mon Sep 17 00:00:00 2001 From: Rose Hudson Date: Fri, 16 Feb 2024 14:54:58 +0000 Subject: [PATCH 059/134] internal: make check_diagnostics_with_disabled more ergonomic --- .../src/handlers/incorrect_case.rs | 2 +- .../src/handlers/mutability_errors.rs | 4 ++-- .../src/handlers/remove_trailing_return.rs | 2 +- .../src/handlers/remove_unnecessary_else.rs | 16 ++++++++-------- .../src/handlers/type_mismatch.rs | 2 +- crates/ide-diagnostics/src/tests.rs | 7 ++----- 6 files changed, 15 insertions(+), 18 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/incorrect_case.rs b/crates/ide-diagnostics/src/handlers/incorrect_case.rs index 5e2541795ca1c..db28928a24ea2 100644 --- a/crates/ide-diagnostics/src/handlers/incorrect_case.rs +++ b/crates/ide-diagnostics/src/handlers/incorrect_case.rs @@ -512,7 +512,7 @@ impl BAD_TRAIT for () { fn BadFunction() {} } "#, - std::iter::once("unused_variables".to_owned()), + &["unused_variables"], ); } diff --git a/crates/ide-diagnostics/src/handlers/mutability_errors.rs b/crates/ide-diagnostics/src/handlers/mutability_errors.rs index 3c71f84dc4852..91f1058d65bbc 100644 --- a/crates/ide-diagnostics/src/handlers/mutability_errors.rs +++ b/crates/ide-diagnostics/src/handlers/mutability_errors.rs @@ -448,7 +448,7 @@ fn main(b: bool) { &mut x; } "#, - std::iter::once("remove-unnecessary-else".to_owned()), + &["remove-unnecessary-else"], ); check_diagnostics_with_disabled( r#" @@ -463,7 +463,7 @@ fn main(b: bool) { &mut x; } "#, - std::iter::once("remove-unnecessary-else".to_owned()), + &["remove-unnecessary-else"], ); } diff --git a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs index b7667dc318f0c..7a040e46e3386 100644 --- a/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs +++ b/crates/ide-diagnostics/src/handlers/remove_trailing_return.rs @@ -140,7 +140,7 @@ fn foo(x: usize) -> u8 { } //^^^^^^^^^ 💡 weak: replace return ; with } "#, - std::iter::once("remove-unnecessary-else".to_owned()), + &["remove-unnecessary-else"], ); } diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 9c63d79d91033..8310af0f524c8 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -97,13 +97,9 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option i32 { #[test] fn remove_unnecessary_else_for_return_in_child_if_expr() { - check_diagnostics_with_needless_return_disabled( + check_diagnostics_with_disabled( r#" fn test() { if foo { @@ -229,6 +227,7 @@ fn test() { } } "#, + &["needless_return"], ); check_fix( r#" @@ -453,7 +452,7 @@ fn test() { #[test] fn no_diagnostic_if_no_divergence_in_else_branch() { - check_diagnostics_with_needless_return_disabled( + check_diagnostics_with_disabled( r#" fn test() { if foo { @@ -463,6 +462,7 @@ fn test() { } } "#, + &["needless_return"], ); } diff --git a/crates/ide-diagnostics/src/handlers/type_mismatch.rs b/crates/ide-diagnostics/src/handlers/type_mismatch.rs index 8c97281b78328..4c255322280fc 100644 --- a/crates/ide-diagnostics/src/handlers/type_mismatch.rs +++ b/crates/ide-diagnostics/src/handlers/type_mismatch.rs @@ -730,7 +730,7 @@ fn f() -> i32 { } fn g() { return; } "#, - std::iter::once("needless_return".to_owned()), + &["needless_return"], ); } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index 4e4a851f67e0a..9e134620ee3f1 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -198,12 +198,9 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) { } #[track_caller] -pub(crate) fn check_diagnostics_with_disabled( - ra_fixture: &str, - disabled: impl Iterator, -) { +pub(crate) fn check_diagnostics_with_disabled(ra_fixture: &str, disabled: &[&str]) { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.extend(disabled); + config.disabled.extend(disabled.into_iter().map(|&s| s.to_owned())); check_diagnostics_with_config(config, ra_fixture) } From a492d9d16489e2801fc0933f969cf3d3a2f88206 Mon Sep 17 00:00:00 2001 From: Rose Hudson Date: Sun, 11 Feb 2024 15:34:52 +0000 Subject: [PATCH 060/134] feat: add unresolved-ident diagnostic --- crates/hir-ty/src/infer.rs | 3 ++ crates/hir-ty/src/infer/expr.rs | 14 +++++- crates/hir/src/diagnostics.rs | 10 ++++ .../src/handlers/inactive_code.rs | 1 + .../src/handlers/missing_fields.rs | 3 +- .../src/handlers/missing_match_arms.rs | 16 ++++--- .../src/handlers/remove_unnecessary_else.rs | 25 ++++++---- .../src/handlers/undeclared_label.rs | 8 ++-- .../src/handlers/unresolved_field.rs | 7 ++- .../src/handlers/unresolved_ident.rs | 46 +++++++++++++++++++ .../src/handlers/unresolved_method.rs | 4 +- crates/ide-diagnostics/src/lib.rs | 2 + 12 files changed, 113 insertions(+), 26 deletions(-) create mode 100644 crates/ide-diagnostics/src/handlers/unresolved_ident.rs diff --git a/crates/hir-ty/src/infer.rs b/crates/hir-ty/src/infer.rs index 1977f00517cd1..9cea414e1a009 100644 --- a/crates/hir-ty/src/infer.rs +++ b/crates/hir-ty/src/infer.rs @@ -221,6 +221,9 @@ pub enum InferenceDiagnostic { UnresolvedAssocItem { id: ExprOrPatId, }, + UnresolvedIdent { + expr: ExprId, + }, // FIXME: This should be emitted in body lowering BreakOutsideOfLoop { expr: ExprId, diff --git a/crates/hir-ty/src/infer/expr.rs b/crates/hir-ty/src/infer/expr.rs index 428ed6748c6c2..c377a51e7d3b7 100644 --- a/crates/hir-ty/src/infer/expr.rs +++ b/crates/hir-ty/src/infer/expr.rs @@ -13,7 +13,7 @@ use hir_def::{ ArithOp, Array, BinaryOp, ClosureKind, Expr, ExprId, LabelId, Literal, Statement, UnaryOp, }, lang_item::{LangItem, LangItemTarget}, - path::{GenericArg, GenericArgs}, + path::{GenericArg, GenericArgs, Path}, BlockId, ConstParamId, FieldId, ItemContainerId, Lookup, TupleFieldId, TupleId, }; use hir_expand::name::{name, Name}; @@ -439,7 +439,17 @@ impl InferenceContext<'_> { } Expr::Path(p) => { let g = self.resolver.update_to_inner_scope(self.db.upcast(), self.owner, tgt_expr); - let ty = self.infer_path(p, tgt_expr.into()).unwrap_or_else(|| self.err_ty()); + let ty = match self.infer_path(p, tgt_expr.into()) { + Some(ty) => ty, + None => { + if matches!(p, Path::Normal { mod_path, .. } if mod_path.is_ident()) { + self.push_diagnostic(InferenceDiagnostic::UnresolvedIdent { + expr: tgt_expr, + }); + } + self.err_ty() + } + }; self.resolver.reset_to_guard(g); ty } diff --git a/crates/hir/src/diagnostics.rs b/crates/hir/src/diagnostics.rs index d351e257d2e7c..80cd0c9c794b6 100644 --- a/crates/hir/src/diagnostics.rs +++ b/crates/hir/src/diagnostics.rs @@ -87,6 +87,7 @@ diagnostics![ UnresolvedMacroCall, UnresolvedMethodCall, UnresolvedModule, + UnresolvedIdent, UnresolvedProcMacro, UnusedMut, UnusedVariable, @@ -242,6 +243,11 @@ pub struct UnresolvedAssocItem { pub expr_or_pat: InFile>>>, } +#[derive(Debug)] +pub struct UnresolvedIdent { + pub expr: InFile>, +} + #[derive(Debug)] pub struct PrivateField { pub expr: InFile>, @@ -588,6 +594,10 @@ impl AnyDiagnostic { }; UnresolvedAssocItem { expr_or_pat }.into() } + &InferenceDiagnostic::UnresolvedIdent { expr } => { + let expr = expr_syntax(expr); + UnresolvedIdent { expr }.into() + } &InferenceDiagnostic::BreakOutsideOfLoop { expr, is_break, bad_value_break } => { let expr = expr_syntax(expr); BreakOutsideOfLoop { expr, is_break, bad_value_break }.into() diff --git a/crates/ide-diagnostics/src/handlers/inactive_code.rs b/crates/ide-diagnostics/src/handlers/inactive_code.rs index 7db5ea04fbd06..785a42352bfab 100644 --- a/crates/ide-diagnostics/src/handlers/inactive_code.rs +++ b/crates/ide-diagnostics/src/handlers/inactive_code.rs @@ -60,6 +60,7 @@ fn f() { #[cfg(a)] let x = 0; // let statement //^^^^^^^^^^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled + fn abc() {} abc(#[cfg(a)] 0); //^^^^^^^^^^^ weak: code is inactive due to #[cfg] directives: a is disabled let x = Struct { diff --git a/crates/ide-diagnostics/src/handlers/missing_fields.rs b/crates/ide-diagnostics/src/handlers/missing_fields.rs index c70f39eb286f9..09daefd084dc2 100644 --- a/crates/ide-diagnostics/src/handlers/missing_fields.rs +++ b/crates/ide-diagnostics/src/handlers/missing_fields.rs @@ -634,7 +634,8 @@ struct TestStruct { one: i32, two: i64 } fn test_fn() { let one = 1; - let s = TestStruct{ ..a }; + let a = TestStruct{ one, two: 2 }; + let _ = TestStruct{ ..a }; } "#, ); diff --git a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs index 7632fdf1d090e..8596f5792e0fb 100644 --- a/crates/ide-diagnostics/src/handlers/missing_match_arms.rs +++ b/crates/ide-diagnostics/src/handlers/missing_match_arms.rs @@ -18,7 +18,9 @@ pub(crate) fn missing_match_arms( #[cfg(test)] mod tests { use crate::{ - tests::{check_diagnostics, check_diagnostics_with_config}, + tests::{ + check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled, + }, DiagnosticsConfig, }; @@ -282,7 +284,7 @@ fn main() { cov_mark::check_count!(validate_match_bailed_out, 4); // Match statements with arms that don't match the // expression pattern do not fire this diagnostic. - check_diagnostics( + check_diagnostics_with_disabled( r#" enum Either { A, B } enum Either2 { C, D } @@ -307,6 +309,7 @@ fn main() { match Unresolved::Bar { Unresolved::Baz => () } } "#, + &["E0425"], ); } @@ -397,11 +400,11 @@ fn main() { match loop {} { Either::A => (), } - match loop { break Foo::A } { - //^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `B` not covered + match loop { break Either::A } { + //^^^^^^^^^^^^^^^^^^^^^^^^ error: missing match arm: `B` not covered Either::A => (), } - match loop { break Foo::A } { + match loop { break Either::A } { Either::A => (), Either::B => (), } @@ -977,7 +980,7 @@ fn f(ty: Enum) { #[test] fn unexpected_ty_fndef() { cov_mark::check!(validate_match_bailed_out); - check_diagnostics( + check_diagnostics_with_disabled( r" enum Exp { Tuple(()), @@ -987,6 +990,7 @@ fn f() { Exp::Tuple => {} } }", + &["E0425"], ); } diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index 8310af0f524c8..d5095b754696a 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -95,7 +95,7 @@ fn fixes(ctx: &DiagnosticsContext<'_>, d: &RemoveUnnecessaryElse) -> Option ! { loop {} } "#, + &["E0425"], ); check_fix( r#" @@ -422,7 +425,7 @@ fn never() -> ! { #[test] fn no_diagnostic_if_no_else_branch() { - check_diagnostics( + check_diagnostics_with_disabled( r#" fn test() { if foo { @@ -432,12 +435,13 @@ fn test() { do_something_else(); } "#, + &["E0425"], ); } #[test] fn no_diagnostic_if_no_divergence() { - check_diagnostics( + check_diagnostics_with_disabled( r#" fn test() { if foo { @@ -447,6 +451,7 @@ fn test() { } } "#, + &["E0425"], ); } @@ -462,7 +467,7 @@ fn test() { } } "#, - &["needless_return"], + &["needless_return", "E0425"], ); } diff --git a/crates/ide-diagnostics/src/handlers/undeclared_label.rs b/crates/ide-diagnostics/src/handlers/undeclared_label.rs index a6a0fdc655fb2..97943b7e8b347 100644 --- a/crates/ide-diagnostics/src/handlers/undeclared_label.rs +++ b/crates/ide-diagnostics/src/handlers/undeclared_label.rs @@ -38,10 +38,12 @@ fn foo() { fn while_let_loop_with_label_in_condition() { check_diagnostics( r#" +//- minicore: option + fn foo() { let mut optional = Some(0); - 'my_label: while let Some(a) = match optional { + 'my_label: while let Some(_) = match optional { None => break 'my_label, Some(val) => Some(val), } { @@ -59,8 +61,8 @@ fn foo() { r#" //- minicore: iterator fn foo() { - 'xxx: for _ in unknown { - 'yyy: for _ in unknown { + 'xxx: for _ in [] { + 'yyy: for _ in [] { break 'xxx; continue 'yyy; break 'zzz; diff --git a/crates/ide-diagnostics/src/handlers/unresolved_field.rs b/crates/ide-diagnostics/src/handlers/unresolved_field.rs index 65abfd8a294b2..4c01a2d155a2f 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_field.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_field.rs @@ -78,7 +78,9 @@ fn method_fix( #[cfg(test)] mod tests { use crate::{ - tests::{check_diagnostics, check_diagnostics_with_config}, + tests::{ + check_diagnostics, check_diagnostics_with_config, check_diagnostics_with_disabled, + }, DiagnosticsConfig, }; @@ -148,7 +150,7 @@ fn foo() { #[test] fn no_diagnostic_on_unknown() { - check_diagnostics( + check_diagnostics_with_disabled( r#" fn foo() { x.foo; @@ -156,6 +158,7 @@ fn foo() { (&((x,),),).foo; } "#, + &["E0425"], ); } diff --git a/crates/ide-diagnostics/src/handlers/unresolved_ident.rs b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs new file mode 100644 index 0000000000000..295c8a2c615fd --- /dev/null +++ b/crates/ide-diagnostics/src/handlers/unresolved_ident.rs @@ -0,0 +1,46 @@ +use crate::{Diagnostic, DiagnosticCode, DiagnosticsContext}; + +// Diagnostic: unresolved-ident +// +// This diagnostic is triggered if an expr-position ident is invalid. +pub(crate) fn unresolved_ident( + ctx: &DiagnosticsContext<'_>, + d: &hir::UnresolvedIdent, +) -> Diagnostic { + Diagnostic::new_with_syntax_node_ptr( + ctx, + DiagnosticCode::RustcHardError("E0425"), + "no such value in this scope", + d.expr.map(Into::into), + ) + .experimental() +} + +#[cfg(test)] +mod tests { + use crate::tests::check_diagnostics; + + #[test] + fn missing() { + check_diagnostics( + r#" +fn main() { + let _ = x; + //^ error: no such value in this scope +} +"#, + ); + } + + #[test] + fn present() { + check_diagnostics( + r#" +fn main() { + let x = 5; + let _ = x; +} +"#, + ); + } +} diff --git a/crates/ide-diagnostics/src/handlers/unresolved_method.rs b/crates/ide-diagnostics/src/handlers/unresolved_method.rs index 648d081898ceb..0614fdc5514aa 100644 --- a/crates/ide-diagnostics/src/handlers/unresolved_method.rs +++ b/crates/ide-diagnostics/src/handlers/unresolved_method.rs @@ -335,8 +335,8 @@ fn main() { r#" struct Foo { bar: i32 } fn foo() { - Foo { bar: i32 }.bar(); - // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists + Foo { bar: 0 }.bar(); + // ^^^ error: no method `bar` on type `Foo`, but a field with a similar name exists } "#, ); diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 3a3888011d75e..4428b8baafbdb 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -59,6 +59,7 @@ mod handlers { pub(crate) mod unresolved_assoc_item; pub(crate) mod unresolved_extern_crate; pub(crate) mod unresolved_field; + pub(crate) mod unresolved_ident; pub(crate) mod unresolved_import; pub(crate) mod unresolved_macro_call; pub(crate) mod unresolved_method; @@ -377,6 +378,7 @@ pub fn diagnostics( AnyDiagnostic::UnresolvedAssocItem(d) => handlers::unresolved_assoc_item::unresolved_assoc_item(&ctx, &d), AnyDiagnostic::UnresolvedExternCrate(d) => handlers::unresolved_extern_crate::unresolved_extern_crate(&ctx, &d), AnyDiagnostic::UnresolvedField(d) => handlers::unresolved_field::unresolved_field(&ctx, &d), + AnyDiagnostic::UnresolvedIdent(d) => handlers::unresolved_ident::unresolved_ident(&ctx, &d), AnyDiagnostic::UnresolvedImport(d) => handlers::unresolved_import::unresolved_import(&ctx, &d), AnyDiagnostic::UnresolvedMacroCall(d) => handlers::unresolved_macro_call::unresolved_macro_call(&ctx, &d), AnyDiagnostic::UnresolvedMethodCall(d) => handlers::unresolved_method::unresolved_method(&ctx, &d), From 1e448f84c3a05350af700de7903083572bba34fc Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 13:53:29 +0100 Subject: [PATCH 061/134] Clippy --- .../src/handlers/remove_unnecessary_else.rs | 9 ++++++--- crates/ide-diagnostics/src/tests.rs | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs index d5095b754696a..47844876dc540 100644 --- a/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs +++ b/crates/ide-diagnostics/src/handlers/remove_unnecessary_else.rs @@ -179,7 +179,7 @@ fn test() { #[test] fn remove_unnecessary_else_for_return3() { - check_diagnostics_with_needless_return_disabled( + check_diagnostics_with_disabled( r#" fn test(a: bool) -> i32 { if a { @@ -190,6 +190,7 @@ fn test(a: bool) -> i32 { } } "#, + &["needless_return", "E0425"], ); check_fix( r#" @@ -473,7 +474,7 @@ fn test() { #[test] fn no_diagnostic_if_not_expr_stmt() { - check_diagnostics_with_needless_return_disabled( + check_diagnostics_with_disabled( r#" fn test1() { let _x = if a { @@ -495,13 +496,15 @@ fn test2() { }; } "#, + &["needless_return", "E0425"], ); - check_diagnostics( + check_diagnostics_with_disabled( r#" fn test3() -> u8 { foo(if a { return 1 } else { 0 }) } "#, + &["E0425"], ); } } diff --git a/crates/ide-diagnostics/src/tests.rs b/crates/ide-diagnostics/src/tests.rs index 9e134620ee3f1..901ceffbb266d 100644 --- a/crates/ide-diagnostics/src/tests.rs +++ b/crates/ide-diagnostics/src/tests.rs @@ -200,7 +200,7 @@ pub(crate) fn check_diagnostics(ra_fixture: &str) { #[track_caller] pub(crate) fn check_diagnostics_with_disabled(ra_fixture: &str, disabled: &[&str]) { let mut config = DiagnosticsConfig::test_sample(); - config.disabled.extend(disabled.into_iter().map(|&s| s.to_owned())); + config.disabled.extend(disabled.iter().map(|&s| s.to_owned())); check_diagnostics_with_config(config, ra_fixture) } From d2b27d09ea075b36bac166e7ac029742510e8662 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 16:46:09 +0100 Subject: [PATCH 062/134] Don't populate rust_ir::AdtVariantDatum::fields for now due to perf --- crates/hir-ty/src/chalk_db.rs | 3 +++ crates/hir-ty/src/tests/coercion.rs | 4 +++- crates/hir-ty/src/tests/traits.rs | 12 ++++++------ 3 files changed, 12 insertions(+), 7 deletions(-) diff --git a/crates/hir-ty/src/chalk_db.rs b/crates/hir-ty/src/chalk_db.rs index 49393f05a1ab9..40a195f7d95a7 100644 --- a/crates/hir-ty/src/chalk_db.rs +++ b/crates/hir-ty/src/chalk_db.rs @@ -742,6 +742,8 @@ pub(crate) fn adt_datum_query( phantom_data, }; + #[cfg(FALSE)] + // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it let variant_id_to_fields = |id: VariantId| { let variant_data = &id.variant_data(db.upcast()); let fields = if variant_data.fields().is_empty() { @@ -757,6 +759,7 @@ pub(crate) fn adt_datum_query( }; rust_ir::AdtVariantDatum { fields } }; + let variant_id_to_fields = |_: VariantId| rust_ir::AdtVariantDatum { fields: vec![] }; let (kind, variants) = match adt_id { hir_def::AdtId::StructId(id) => { diff --git a/crates/hir-ty/src/tests/coercion.rs b/crates/hir-ty/src/tests/coercion.rs index bfb8df61a3330..d56b15b9b741e 100644 --- a/crates/hir-ty/src/tests/coercion.rs +++ b/crates/hir-ty/src/tests/coercion.rs @@ -536,7 +536,7 @@ fn test() { #[test] fn coerce_unsize_generic() { - check_no_mismatches( + check( r#" //- minicore: coerce_unsized struct Foo { t: T }; @@ -544,7 +544,9 @@ struct Bar(Foo); fn test() { let _: &Foo<[usize]> = &Foo { t: [1, 2, 3] }; + //^^^^^^^^^^^^^^^^^^^^^ expected &Foo<[usize]>, got &Foo<[i32; 3]> let _: &Bar<[usize]> = &Bar(Foo { t: [1, 2, 3] }); + //^^^^^^^^^^^^^^^^^^^^^^^^^^ expected &Bar<[usize]>, got &Bar<[i32; 3]> } "#, ); diff --git a/crates/hir-ty/src/tests/traits.rs b/crates/hir-ty/src/tests/traits.rs index 68cd6071ec79b..879c69c758fcd 100644 --- a/crates/hir-ty/src/tests/traits.rs +++ b/crates/hir-ty/src/tests/traits.rs @@ -4583,21 +4583,21 @@ fn f() { Struct::::IS_SEND; //^^^^^^^^^^^^^^^^^^^^Yes Struct::::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^^Yes Struct::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^^^^^^^^^Yes Enum::::IS_SEND; //^^^^^^^^^^^^^^^^^^Yes Enum::::IS_SEND; - //^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^Yes Enum::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^^^^^^^Yes Union::::IS_SEND; //^^^^^^^^^^^^^^^^^^^Yes Union::::IS_SEND; - //^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^Yes Union::<*const T>::IS_SEND; - //^^^^^^^^^^^^^^^^^^^^^^^^^^{unknown} + //^^^^^^^^^^^^^^^^^^^^^^^^^^Yes PhantomData::::IS_SEND; //^^^^^^^^^^^^^^^^^^^^^^^^^Yes PhantomData::::IS_SEND; From a822291a025f495aacef9201807fce77971e8097 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 18:00:50 +0100 Subject: [PATCH 063/134] Infallible definition hovers --- crates/ide-db/src/defs.rs | 13 ++-- crates/ide/src/hover.rs | 37 ++++++------ crates/ide/src/hover/render.rs | 14 ++--- crates/ide/src/hover/tests.rs | 105 +++++++++++++++++++++++++++++++-- crates/ide/src/static_index.rs | 4 +- 5 files changed, 135 insertions(+), 38 deletions(-) diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index d95d94ec72e2c..747c90561deea 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -213,8 +213,8 @@ impl Definition { }) } - pub fn label(&self, db: &RootDatabase) -> Option { - let label = match *self { + pub fn label(&self, db: &RootDatabase) -> String { + match *self { Definition::Macro(it) => it.display(db).to_string(), Definition::Field(it) => it.display(db).to_string(), Definition::TupleField(it) => it.display(db).to_string(), @@ -241,7 +241,11 @@ impl Definition { } } Definition::SelfType(impl_def) => { - impl_def.self_ty(db).as_adt().and_then(|adt| Definition::Adt(adt).label(db))? + let self_ty = &impl_def.self_ty(db); + match self_ty.as_adt() { + Some(it) => it.display(db).to_string(), + None => self_ty.display(db).to_string(), + } } Definition::GenericParam(it) => it.display(db).to_string(), Definition::Label(it) => it.name(db).display(db).to_string(), @@ -249,8 +253,7 @@ impl Definition { Definition::BuiltinAttr(it) => format!("#[{}]", it.name(db)), Definition::ToolModule(it) => it.name(db).to_string(), Definition::DeriveHelper(it) => format!("derive_helper {}", it.name(db).display(db)), - }; - Some(label) + } } } diff --git a/crates/ide/src/hover.rs b/crates/ide/src/hover.rs index 19b181ae3b61e..4a7350feb385e 100644 --- a/crates/ide/src/hover.rs +++ b/crates/ide/src/hover.rs @@ -147,7 +147,7 @@ fn hover_simple( if let Some(doc_comment) = token_as_doc_comment(&original_token) { cov_mark::hit!(no_highlight_on_comment_hover); return doc_comment.get_definition_with_descend_at(sema, offset, |def, node, range| { - let res = hover_for_definition(sema, file_id, def, &node, config)?; + let res = hover_for_definition(sema, file_id, def, &node, config); Some(RangeInfo::new(range, res)) }); } @@ -161,7 +161,7 @@ fn hover_simple( Definition::from(resolution?), &original_token.parent()?, config, - )?; + ); return Some(RangeInfo::new(range, res)); } @@ -215,7 +215,7 @@ fn hover_simple( }) .flatten() .unique_by(|&(def, _)| def) - .filter_map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) + .map(|(def, node)| hover_for_definition(sema, file_id, def, &node, config)) .reduce(|mut acc: HoverResult, HoverResult { markup, actions }| { acc.actions.extend(actions); acc.markup = Markup::from(format!("{}\n---\n{markup}", acc.markup)); @@ -373,9 +373,9 @@ pub(crate) fn hover_for_definition( def: Definition, scope_node: &SyntaxNode, config: &HoverConfig, -) -> Option { +) -> HoverResult { let famous_defs = match &def { - Definition::BuiltinType(_) => Some(FamousDefs(sema, sema.scope(scope_node)?.krate())), + Definition::BuiltinType(_) => sema.scope(scope_node).map(|it| FamousDefs(sema, it.krate())), _ => None, }; @@ -396,20 +396,19 @@ pub(crate) fn hover_for_definition( }; let notable_traits = def_ty.map(|ty| notable_traits(db, &ty)).unwrap_or_default(); - render::definition(sema.db, def, famous_defs.as_ref(), ¬able_traits, config).map(|markup| { - HoverResult { - markup: render::process_markup(sema.db, def, &markup, config), - actions: [ - show_implementations_action(sema.db, def), - show_fn_references_action(sema.db, def), - runnable_action(sema, def, file_id), - goto_type_action_for_def(sema.db, def, ¬able_traits), - ] - .into_iter() - .flatten() - .collect(), - } - }) + let markup = render::definition(sema.db, def, famous_defs.as_ref(), ¬able_traits, config); + HoverResult { + markup: render::process_markup(sema.db, def, &markup, config), + actions: [ + show_implementations_action(sema.db, def), + show_fn_references_action(sema.db, def), + runnable_action(sema, def, file_id), + goto_type_action_for_def(sema.db, def, ¬able_traits), + ] + .into_iter() + .flatten() + .collect(), + } } fn notable_traits( diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index eff055c959926..42342d94b6d13 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -264,7 +264,7 @@ pub(super) fn keyword( let markup = process_markup( sema.db, Definition::Module(doc_owner), - &markup(Some(docs.into()), description, None)?, + &markup(Some(docs.into()), description, None), config, ); Some(HoverResult { markup, actions }) @@ -396,11 +396,11 @@ pub(super) fn definition( famous_defs: Option<&FamousDefs<'_, '_>>, notable_traits: &[(Trait, Vec<(Option, Name)>)], config: &HoverConfig, -) -> Option { +) -> Markup { let mod_path = definition_mod_path(db, &def); - let label = def.label(db)?; + let label = def.label(db); let docs = def.docs(db, famous_defs); - let value = match def { + let value = (|| match def { Definition::Variant(it) => { if !it.parent_enum(db).is_data_carrying(db) { match it.eval(db) { @@ -436,7 +436,7 @@ pub(super) fn definition( Some(body.to_string()) } _ => None, - }; + })(); let layout_info = match def { Definition::Field(it) => render_memory_layout( @@ -683,7 +683,7 @@ fn definition_mod_path(db: &RootDatabase, def: &Definition) -> Option { def.module(db).map(|module| path(db, module, definition_owner_name(db, def))) } -fn markup(docs: Option, desc: String, mod_path: Option) -> Option { +fn markup(docs: Option, desc: String, mod_path: Option) -> Markup { let mut buf = String::new(); if let Some(mod_path) = mod_path { @@ -696,7 +696,7 @@ fn markup(docs: Option, desc: String, mod_path: Option) -> Optio if let Some(doc) = docs { format_to!(buf, "\n___\n\n{}", doc); } - Some(buf.into()) + buf.into() } fn find_std_module(famous_defs: &FamousDefs<'_, '_>, name: &str) -> Option { diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 69ddc1e45efbd..157f8ff371ef2 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -1279,11 +1279,11 @@ impl Thing { ); check( r#" - enum Thing { A } - impl Thing { - pub fn thing(a: Self$0) {} - } - "#, +enum Thing { A } +impl Thing { + pub fn thing(a: Self$0) {} +} +"#, expect![[r#" *Self* @@ -1298,6 +1298,42 @@ impl Thing { ``` "#]], ); + check( + r#" +impl usize { + pub fn thing(a: Self$0) {} +} +"#, + expect![[r#" + *Self* + + ```rust + test + ``` + + ```rust + usize + ``` + "#]], + ); + check( + r#" +impl fn() -> usize { + pub fn thing(a: Self$0) {} +} +"#, + expect![[r#" + *Self* + + ```rust + test + ``` + + ```rust + fn() -> usize + ``` + "#]], + ); } #[test] @@ -7201,6 +7237,65 @@ impl Iterator for S { ); } +#[test] +fn extern_items() { + check( + r#" +extern "C" { + static STATIC$0: (); +} +"#, + expect![[r#" + *STATIC* + + ```rust + test + ``` + + ```rust + static STATIC: () + ``` + "#]], + ); + check( + r#" +extern "C" { + fn fun$0(); +} +"#, + expect![[r#" + *fun* + + ```rust + test + ``` + + ```rust + unsafe fn fun() + ``` + "#]], + ); + check( + r#" +extern "C" { + type Ty$0; +} +"#, + expect![[r#" + *Ty* + + ```rust + test + ``` + + ```rust + // size = 0, align = 1 + type Ty + ``` + "#]], + ); +} + #[test] fn notable_ranged() { check_hover_range( diff --git a/crates/ide/src/static_index.rs b/crates/ide/src/static_index.rs index 5feaf21aa9795..2929a7522e591 100644 --- a/crates/ide/src/static_index.rs +++ b/crates/ide/src/static_index.rs @@ -186,7 +186,7 @@ impl StaticIndex<'_> { } else { let it = self.tokens.insert(TokenStaticData { documentation: documentation_for_definition(&sema, def, &node), - hover: hover_for_definition(&sema, file_id, def, &node, &hover_config), + hover: Some(hover_for_definition(&sema, file_id, def, &node, &hover_config)), definition: def.try_to_nav(self.db).map(UpmappingResult::call_site).map(|it| { FileRange { file_id: it.file_id, range: it.focus_or_full_range() } }), @@ -196,7 +196,7 @@ impl StaticIndex<'_> { enclosing_moniker: current_crate .zip(def.enclosing_definition(self.db)) .and_then(|(cc, enclosing_def)| def_to_moniker(self.db, enclosing_def, cc)), - signature: def.label(self.db), + signature: Some(def.label(self.db)), kind: def_to_kind(self.db, def), }); self.def_map.insert(def, it); From 47b21730c4bf6ff2a93929e58009646fd022e3c8 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Wed, 31 Jan 2024 03:24:24 +0100 Subject: [PATCH 064/134] Factor out unspecialization --- .../rustc_pattern_analysis/src/usefulness.rs | 36 +++++++++++-------- 1 file changed, 21 insertions(+), 15 deletions(-) diff --git a/compiler/rustc_pattern_analysis/src/usefulness.rs b/compiler/rustc_pattern_analysis/src/usefulness.rs index d35b0248e415c..ffea76e4d53f7 100644 --- a/compiler/rustc_pattern_analysis/src/usefulness.rs +++ b/compiler/rustc_pattern_analysis/src/usefulness.rs @@ -1118,6 +1118,25 @@ impl<'p, Cx: TypeCx> Matrix<'p, Cx> { } Ok(matrix) } + + /// Recover row usefulness and intersection information from a processed specialized matrix. + /// `specialized` must come from `self.specialize_constructor`. + fn unspecialize(&mut self, specialized: Self) { + for child_row in specialized.rows() { + let parent_row_id = child_row.parent_row; + let parent_row = &mut self.rows[parent_row_id]; + // A parent row is useful if any of its children is. + parent_row.useful |= child_row.useful; + for child_intersection in child_row.intersects.iter() { + // Convert the intersecting ids into ids for the parent matrix. + let parent_intersection = specialized.rows[child_intersection].parent_row; + // Note: self-intersection can happen with or-patterns. + if parent_intersection != parent_row_id { + parent_row.intersects.insert(parent_intersection); + } + } + } + } } /// Pretty-printer for matrices of patterns, example: @@ -1542,21 +1561,6 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( // Accumulate the found witnesses. ret.extend(witnesses); - for child_row in spec_matrix.rows() { - let parent_row_id = child_row.parent_row; - let parent_row = &mut matrix.rows[parent_row_id]; - // A parent row is useful if any of its children is. - parent_row.useful |= child_row.useful; - for child_intersection in child_row.intersects.iter() { - // Convert the intersecting ids into ids for the parent matrix. - let parent_intersection = spec_matrix.rows[child_intersection].parent_row; - // Note: self-intersection can happen with or-patterns. - if parent_intersection != parent_row_id { - parent_row.intersects.insert(parent_intersection); - } - } - } - // Detect ranges that overlap on their endpoints. if let Constructor::IntRange(overlap_range) = ctor { if overlap_range.is_singleton() @@ -1566,6 +1570,8 @@ fn compute_exhaustiveness_and_usefulness<'a, 'p, Cx: TypeCx>( collect_overlapping_range_endpoints(mcx, overlap_range, matrix, &spec_matrix); } } + + matrix.unspecialize(spec_matrix); } // Record usefulness in the patterns. From d93096ecc0cb530d851ebbd58dce6cd2e68c850f Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 20 Feb 2024 10:40:39 +0100 Subject: [PATCH 065/134] internal: Fetch toolchain and datalayout for DetachedFiles --- crates/hir-def/src/body/pretty.rs | 4 +- crates/hir-def/src/import_map.rs | 4 +- crates/hir-def/src/item_tree.rs | 6 +- crates/hir-def/src/item_tree/lower.rs | 26 ++++- crates/hir-def/src/item_tree/pretty.rs | 9 +- crates/hir-def/src/nameres.rs | 2 +- crates/hir-def/src/nameres/collector.rs | 2 +- crates/hir-def/src/nameres/tests/macros.rs | 3 - .../src/diagnostics/match_check/pat_util.rs | 2 +- crates/hir-ty/src/mir/eval/shim.rs | 8 +- crates/hir-ty/src/mir/eval/shim/simd.rs | 1 + crates/hir-ty/src/mir/lower.rs | 10 +- .../hir-ty/src/mir/lower/pattern_matching.rs | 13 ++- .../src/handlers/generate_delegate_methods.rs | 2 +- .../src/completions/item_list/trait_impl.rs | 2 +- crates/ide-db/src/imports/insert_use/tests.rs | 1 - crates/ide-db/src/symbol_index.rs | 3 +- crates/ide/src/join_lines.rs | 1 - crates/load-cargo/src/lib.rs | 15 ++- crates/project-model/src/workspace.rs | 96 ++++++++++++++----- crates/rust-analyzer/src/cargo_target_spec.rs | 3 +- crates/rust-analyzer/src/cli/lsif.rs | 2 +- crates/rust-analyzer/src/cli/rustc_tests.rs | 22 +++-- crates/rust-analyzer/src/cli/scip.rs | 2 +- crates/salsa/salsa-macros/src/query_group.rs | 1 - crates/salsa/src/debug.rs | 1 - crates/salsa/src/derived.rs | 1 - crates/salsa/src/input.rs | 1 - crates/salsa/src/interned.rs | 1 - 29 files changed, 164 insertions(+), 80 deletions(-) diff --git a/crates/hir-def/src/body/pretty.rs b/crates/hir-def/src/body/pretty.rs index 7007dea638ef9..cd14f7b855a81 100644 --- a/crates/hir-def/src/body/pretty.rs +++ b/crates/hir-def/src/body/pretty.rs @@ -6,8 +6,8 @@ use itertools::Itertools; use crate::{ hir::{ - Array, BindingAnnotation, BindingId, CaptureBy, ClosureKind, Literal, LiteralOrConst, - Movability, Statement, + Array, BindingAnnotation, CaptureBy, ClosureKind, Literal, LiteralOrConst, Movability, + Statement, }, pretty::{print_generic_args, print_path, print_type_ref}, type_ref::TypeRef, diff --git a/crates/hir-def/src/import_map.rs b/crates/hir-def/src/import_map.rs index 98982c7db8406..faa1eed15a45a 100644 --- a/crates/hir-def/src/import_map.rs +++ b/crates/hir-def/src/import_map.rs @@ -3,7 +3,7 @@ use std::{fmt, hash::BuildHasherDefault}; use base_db::CrateId; -use fst::{self, raw::IndexedValue, Automaton, Streamer}; +use fst::{raw::IndexedValue, Automaton, Streamer}; use hir_expand::name::Name; use indexmap::IndexMap; use itertools::Itertools; @@ -477,7 +477,7 @@ mod tests { use expect_test::{expect, Expect}; use test_fixture::WithFixture; - use crate::{db::DefDatabase, test_db::TestDB, ItemContainerId, Lookup}; + use crate::{test_db::TestDB, ItemContainerId, Lookup}; use super::*; diff --git a/crates/hir-def/src/item_tree.rs b/crates/hir-def/src/item_tree.rs index be16a5e31a23c..bb36950f95acd 100644 --- a/crates/hir-def/src/item_tree.rs +++ b/crates/hir-def/src/item_tree.rs @@ -44,13 +44,13 @@ use std::{ ops::{Index, Range}, }; -use ast::{AstNode, HasName, StructKind}; +use ast::{AstNode, StructKind}; use base_db::CrateId; use either::Either; use hir_expand::{ ast_id_map::{AstIdNode, FileAstId}, attrs::RawAttrs, - name::{name, AsName, Name}, + name::Name, ExpandTo, HirFileId, InFile, }; use intern::Interned; @@ -67,7 +67,7 @@ use crate::{ attr::Attrs, db::DefDatabase, generics::{GenericParams, LifetimeParamData, TypeOrConstParamData}, - path::{path, AssociatedTypeBinding, GenericArgs, ImportAlias, ModPath, Path, PathKind}, + path::{GenericArgs, ImportAlias, ModPath, Path, PathKind}, type_ref::{Mutability, TraitRef, TypeBound, TypeRef}, visibility::{RawVisibility, VisibilityExplicitness}, BlockId, Lookup, diff --git a/crates/hir-def/src/item_tree/lower.rs b/crates/hir-def/src/item_tree/lower.rs index e0aa3ae612352..37fdece876810 100644 --- a/crates/hir-def/src/item_tree/lower.rs +++ b/crates/hir-def/src/item_tree/lower.rs @@ -2,17 +2,33 @@ use std::collections::hash_map::Entry; -use hir_expand::{ast_id_map::AstIdMap, span_map::SpanMapRef, HirFileId}; -use syntax::ast::{self, HasModuleItem, HasTypeBounds, IsString}; +use hir_expand::{ + ast_id_map::AstIdMap, mod_path::path, name, name::AsName, span_map::SpanMapRef, HirFileId, +}; +use la_arena::Arena; +use syntax::{ + ast::{self, HasModuleItem, HasName, HasTypeBounds, IsString}, + AstNode, +}; +use triomphe::Arc; use crate::{ + db::DefDatabase, generics::{GenericParams, GenericParamsCollector, TypeParamData, TypeParamProvenance}, - type_ref::{LifetimeRef, TraitBoundModifier, TraitRef}, + item_tree::{ + AssocItem, AttrOwner, Const, Either, Enum, ExternBlock, ExternCrate, Field, FieldAstId, + Fields, FileItemTreeId, FnFlags, Function, GenericArgs, Idx, IdxRange, Impl, ImportAlias, + Interned, ItemTree, ItemTreeData, ItemTreeNode, Macro2, MacroCall, MacroRules, Mod, + ModItem, ModKind, ModPath, Mutability, Name, Param, ParamAstId, Path, Range, RawAttrs, + RawIdx, RawVisibilityId, Static, Struct, StructKind, Trait, TraitAlias, TypeAlias, Union, + Use, UseTree, UseTreeKind, Variant, + }, + path::AssociatedTypeBinding, + type_ref::{LifetimeRef, TraitBoundModifier, TraitRef, TypeBound, TypeRef}, + visibility::RawVisibility, LocalLifetimeParamId, LocalTypeOrConstParamId, }; -use super::*; - fn id(index: Idx) -> FileItemTreeId { FileItemTreeId(index) } diff --git a/crates/hir-def/src/item_tree/pretty.rs b/crates/hir-def/src/item_tree/pretty.rs index 0086b7180b2bd..87c90a4c6ab94 100644 --- a/crates/hir-def/src/item_tree/pretty.rs +++ b/crates/hir-def/src/item_tree/pretty.rs @@ -6,12 +6,17 @@ use span::ErasedFileAstId; use crate::{ generics::{TypeOrConstParamData, WherePredicate, WherePredicateTypeTarget}, + item_tree::{ + AttrOwner, Const, DefDatabase, Enum, ExternBlock, ExternCrate, Field, FieldAstId, Fields, + FileItemTreeId, FnFlags, Function, GenericParams, Impl, Interned, ItemTree, Macro2, + MacroCall, MacroRules, Mod, ModItem, ModKind, Param, ParamAstId, Path, RawAttrs, + RawVisibilityId, Static, Struct, Trait, TraitAlias, TypeAlias, TypeBound, TypeRef, Union, + Use, UseTree, UseTreeKind, Variant, + }, pretty::{print_path, print_type_bounds, print_type_ref}, visibility::RawVisibility, }; -use super::*; - pub(super) fn print_item_tree(db: &dyn DefDatabase, tree: &ItemTree) -> String { let mut p = Printer { db, tree, buf: String::new(), indent_level: 0, needs_indent: true }; diff --git a/crates/hir-def/src/nameres.rs b/crates/hir-def/src/nameres.rs index 2a9390e797808..a2eca066438af 100644 --- a/crates/hir-def/src/nameres.rs +++ b/crates/hir-def/src/nameres.rs @@ -57,7 +57,7 @@ pub mod proc_macro; #[cfg(test)] mod tests; -use std::{cmp::Ord, ops::Deref}; +use std::ops::Deref; use base_db::{CrateId, Edition, FileId}; use hir_expand::{ diff --git a/crates/hir-def/src/nameres/collector.rs b/crates/hir-def/src/nameres/collector.rs index 88838f58fe787..32825406505de 100644 --- a/crates/hir-def/src/nameres/collector.rs +++ b/crates/hir-def/src/nameres/collector.rs @@ -2446,7 +2446,7 @@ mod tests { use base_db::SourceDatabase; use test_fixture::WithFixture; - use crate::{db::DefDatabase, test_db::TestDB}; + use crate::test_db::TestDB; use super::*; diff --git a/crates/hir-def/src/nameres/tests/macros.rs b/crates/hir-def/src/nameres/tests/macros.rs index bf89ea711a0a1..d278b75e8158a 100644 --- a/crates/hir-def/src/nameres/tests/macros.rs +++ b/crates/hir-def/src/nameres/tests/macros.rs @@ -1,10 +1,7 @@ use expect_test::expect; -use test_fixture::WithFixture; use itertools::Itertools; -use crate::nameres::tests::check; - use super::*; #[test] diff --git a/crates/hir-ty/src/diagnostics/match_check/pat_util.rs b/crates/hir-ty/src/diagnostics/match_check/pat_util.rs index 217454499ef6d..c6a26cdd1d0f8 100644 --- a/crates/hir-ty/src/diagnostics/match_check/pat_util.rs +++ b/crates/hir-ty/src/diagnostics/match_check/pat_util.rs @@ -2,7 +2,7 @@ //! //! Originates from `rustc_hir::pat_util` -use std::iter::{Enumerate, ExactSizeIterator}; +use std::iter::Enumerate; pub(crate) struct EnumerateAndAdjust { enumerate: Enumerate, diff --git a/crates/hir-ty/src/mir/eval/shim.rs b/crates/hir-ty/src/mir/eval/shim.rs index d68803fe2801a..cd992d0760204 100644 --- a/crates/hir-ty/src/mir/eval/shim.rs +++ b/crates/hir-ty/src/mir/eval/shim.rs @@ -8,9 +8,13 @@ use hir_def::{ builtin_type::{BuiltinInt, BuiltinUint}, resolver::HasResolver, }; -use hir_expand::mod_path::ModPath; -use super::*; +use crate::mir::eval::{ + name, pad16, static_lifetime, Address, AdtId, Arc, BuiltinType, Evaluator, FunctionId, + HasModule, HirDisplay, Interned, InternedClosure, Interner, Interval, IntervalAndTy, + IntervalOrOwned, ItemContainerId, LangItem, Layout, Locals, Lookup, MirEvalError, MirSpan, + ModPath, Mutability, Result, Substitution, Ty, TyBuilder, TyExt, +}; mod simd; diff --git a/crates/hir-ty/src/mir/eval/shim/simd.rs b/crates/hir-ty/src/mir/eval/shim/simd.rs index eddfd0acfb98c..e229a4ab31727 100644 --- a/crates/hir-ty/src/mir/eval/shim/simd.rs +++ b/crates/hir-ty/src/mir/eval/shim/simd.rs @@ -2,6 +2,7 @@ use std::cmp::Ordering; +use crate::consteval::try_const_usize; use crate::TyKind; use super::*; diff --git a/crates/hir-ty/src/mir/lower.rs b/crates/hir-ty/src/mir/lower.rs index b038900cdacba..ed316f972689f 100644 --- a/crates/hir-ty/src/mir/lower.rs +++ b/crates/hir-ty/src/mir/lower.rs @@ -31,14 +31,20 @@ use crate::{ inhabitedness::is_ty_uninhabited_from, layout::LayoutError, mapping::ToChalk, + mir::{ + intern_const_scalar, return_slot, AggregateKind, Arena, BasicBlock, BasicBlockId, BinOp, + BorrowKind, CastKind, ClosureId, ConstScalar, Either, Expr, FieldId, Idx, InferenceResult, + Interner, Local, LocalId, MemoryMap, MirBody, MirSpan, Mutability, Operand, Place, + PlaceElem, PointerCast, ProjectionElem, ProjectionStore, RawIdx, Rvalue, Statement, + StatementKind, Substitution, SwitchTargets, Terminator, TerminatorKind, TupleFieldId, Ty, + UnOp, VariantId, + }, static_lifetime, traits::FnTrait, utils::{generics, ClosureSubst}, Adjust, Adjustment, AutoBorrow, CallableDefId, TyBuilder, TyExt, }; -use super::*; - mod as_place; mod pattern_matching; diff --git a/crates/hir-ty/src/mir/lower/pattern_matching.rs b/crates/hir-ty/src/mir/lower/pattern_matching.rs index 8202bac532f7a..85c8d1685b874 100644 --- a/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -2,9 +2,16 @@ use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId}; -use crate::BindingMode; - -use super::*; +use crate::{ + mir::lower::{ + BasicBlockId, BinOp, BindingId, BorrowKind, Either, Expr, FieldId, Idx, Interner, + MemoryMap, MirLowerCtx, MirLowerError, MirSpan, Mutability, Operand, Pat, PatId, Place, + PlaceElem, ProjectionElem, RecordFieldPat, ResolveValueResult, Result, Rvalue, + Substitution, SwitchTargets, TerminatorKind, TupleFieldId, TupleId, TyBuilder, TyKind, + ValueNs, VariantData, VariantId, + }, + BindingMode, +}; macro_rules! not_supported { ($x: expr) => { diff --git a/crates/ide-assists/src/handlers/generate_delegate_methods.rs b/crates/ide-assists/src/handlers/generate_delegate_methods.rs index 4f2df5633c3cb..38f40b8d58b4c 100644 --- a/crates/ide-assists/src/handlers/generate_delegate_methods.rs +++ b/crates/ide-assists/src/handlers/generate_delegate_methods.rs @@ -1,4 +1,4 @@ -use hir::{self, HasCrate, HasVisibility}; +use hir::{HasCrate, HasVisibility}; use ide_db::{path_transform::PathTransform, FxHashSet}; use syntax::{ ast::{ diff --git a/crates/ide-completion/src/completions/item_list/trait_impl.rs b/crates/ide-completion/src/completions/item_list/trait_impl.rs index 3c4b89ca742ec..7394d63be5868 100644 --- a/crates/ide-completion/src/completions/item_list/trait_impl.rs +++ b/crates/ide-completion/src/completions/item_list/trait_impl.rs @@ -31,7 +31,7 @@ //! } //! ``` -use hir::{self, HasAttrs}; +use hir::HasAttrs; use ide_db::{ documentation::HasDocs, path_transform::PathTransform, syntax_helpers::insert_whitespace_into_node, traits::get_missing_assoc_items, SymbolKind, diff --git a/crates/ide-db/src/imports/insert_use/tests.rs b/crates/ide-db/src/imports/insert_use/tests.rs index 6b0fecae26758..10c285a13fbcc 100644 --- a/crates/ide-db/src/imports/insert_use/tests.rs +++ b/crates/ide-db/src/imports/insert_use/tests.rs @@ -1,4 +1,3 @@ -use hir::PrefixKind; use stdx::trim_indent; use test_fixture::WithFixture; use test_utils::{assert_eq_text, CURSOR_MARKER}; diff --git a/crates/ide-db/src/symbol_index.rs b/crates/ide-db/src/symbol_index.rs index 92c09089e1f13..c65467a43249b 100644 --- a/crates/ide-db/src/symbol_index.rs +++ b/crates/ide-db/src/symbol_index.rs @@ -31,7 +31,7 @@ use base_db::{ salsa::{self, ParallelDatabase}, SourceDatabaseExt, SourceRootId, Upcast, }; -use fst::{self, raw::IndexedValue, Automaton, Streamer}; +use fst::{raw::IndexedValue, Automaton, Streamer}; use hir::{ db::HirDatabase, import_map::{AssocSearchMode, SearchMode}, @@ -394,7 +394,6 @@ impl Query { mod tests { use expect_test::expect_file; - use hir::symbols::SymbolCollector; use test_fixture::WithFixture; use super::*; diff --git a/crates/ide/src/join_lines.rs b/crates/ide/src/join_lines.rs index fef0ec35ba091..815a4ba7fd702 100644 --- a/crates/ide/src/join_lines.rs +++ b/crates/ide/src/join_lines.rs @@ -303,7 +303,6 @@ fn compute_ws(left: SyntaxKind, right: SyntaxKind) -> &'static str { #[cfg(test)] mod tests { - use syntax::SourceFile; use test_utils::{add_cursor, assert_eq_text, extract_offset, extract_range}; use super::*; diff --git a/crates/load-cargo/src/lib.rs b/crates/load-cargo/src/lib.rs index 8c5592da63ecd..830d19a709c42 100644 --- a/crates/load-cargo/src/lib.rs +++ b/crates/load-cargo/src/lib.rs @@ -309,6 +309,10 @@ fn load_crate_graph( vfs: &mut vfs::Vfs, receiver: &Receiver, ) -> AnalysisHost { + let (ProjectWorkspace::Cargo { toolchain, target_layout, .. } + | ProjectWorkspace::Json { toolchain, target_layout, .. } + | ProjectWorkspace::DetachedFiles { toolchain, target_layout, .. }) = ws; + let lru_cap = std::env::var("RA_LRU_CAP").ok().and_then(|it| it.parse::().ok()); let mut host = AnalysisHost::new(lru_cap); let mut analysis_change = Change::new(); @@ -344,14 +348,9 @@ fn load_crate_graph( let num_crates = crate_graph.len(); analysis_change.set_crate_graph(crate_graph); analysis_change.set_proc_macros(proc_macros); - if let ProjectWorkspace::Cargo { toolchain, target_layout, .. } - | ProjectWorkspace::Json { toolchain, target_layout, .. } = ws - { - analysis_change.set_target_data_layouts( - iter::repeat(target_layout.clone()).take(num_crates).collect(), - ); - analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); - } + analysis_change + .set_target_data_layouts(iter::repeat(target_layout.clone()).take(num_crates).collect()); + analysis_change.set_toolchains(iter::repeat(toolchain.clone()).take(num_crates).collect()); host.apply_change(analysis_change); host diff --git a/crates/project-model/src/workspace.rs b/crates/project-model/src/workspace.rs index b7ae76be8cec0..bcb5dcadb5b99 100644 --- a/crates/project-model/src/workspace.rs +++ b/crates/project-model/src/workspace.rs @@ -100,6 +100,8 @@ pub enum ProjectWorkspace { /// Holds cfg flags for the current target. We get those by running /// `rustc --print cfg`. rustc_cfg: Vec, + toolchain: Option, + target_layout: TargetLayoutLoadResult, }, } @@ -145,16 +147,24 @@ impl fmt::Debug for ProjectWorkspace { debug_struct.field("n_sysroot_crates", &sysroot.num_packages()); } debug_struct - .field("toolchain", &toolchain) .field("n_rustc_cfg", &rustc_cfg.len()) + .field("toolchain", &toolchain) .field("data_layout", &data_layout); debug_struct.finish() } - ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => f + ProjectWorkspace::DetachedFiles { + files, + sysroot, + rustc_cfg, + toolchain, + target_layout, + } => f .debug_struct("DetachedFiles") .field("n_files", &files.len()) .field("sysroot", &sysroot.is_ok()) .field("n_rustc_cfg", &rustc_cfg.len()) + .field("toolchain", &toolchain) + .field("data_layout", &target_layout) .finish(), } } @@ -403,32 +413,54 @@ impl ProjectWorkspace { detached_files: Vec, config: &CargoConfig, ) -> anyhow::Result { + let dir = detached_files + .first() + .and_then(|it| it.parent()) + .ok_or_else(|| format_err!("No detached files to load"))?; let sysroot = match &config.sysroot { Some(RustLibSource::Path(path)) => { Sysroot::with_sysroot_dir(path.clone(), config.sysroot_query_metadata) .map_err(|e| Some(format!("Failed to find sysroot at {path}:{e}"))) } - Some(RustLibSource::Discover) => { - let dir = &detached_files - .first() - .and_then(|it| it.parent()) - .ok_or_else(|| format_err!("No detached files to load"))?; - Sysroot::discover(dir, &config.extra_env, config.sysroot_query_metadata).map_err( - |e| { - Some(format!( - "Failed to find sysroot for {dir}. Is rust-src installed? {e}" - )) - }, - ) - } + Some(RustLibSource::Discover) => Sysroot::discover( + dir, + &config.extra_env, + config.sysroot_query_metadata, + ) + .map_err(|e| { + Some(format!("Failed to find sysroot for {dir}. Is rust-src installed? {e}")) + }), None => Err(None), }; - let rustc_cfg = rustc_cfg::get( + + let sysroot_ref = sysroot.as_ref().ok(); + let toolchain = match get_toolchain_version( + dir, + sysroot_ref, + toolchain::Tool::Rustc, + &config.extra_env, + "rustc ", + ) { + Ok(it) => it, + Err(e) => { + tracing::error!("{e}"); + None + } + }; + + let rustc_cfg = rustc_cfg::get(None, &config.extra_env, RustcCfgConfig::Rustc(sysroot_ref)); + let data_layout = target_data_layout::get( + RustcDataLayoutConfig::Rustc(sysroot_ref), None, - &FxHashMap::default(), - RustcCfgConfig::Rustc(sysroot.as_ref().ok()), + &config.extra_env, ); - Ok(ProjectWorkspace::DetachedFiles { files: detached_files, sysroot, rustc_cfg }) + Ok(ProjectWorkspace::DetachedFiles { + files: detached_files, + sysroot, + rustc_cfg, + toolchain, + target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), + }) } /// Runs the build scripts for this [`ProjectWorkspace`]. @@ -724,7 +756,13 @@ impl ProjectWorkspace { cfg_overrides, build_scripts, ), - ProjectWorkspace::DetachedFiles { files, sysroot, rustc_cfg } => { + ProjectWorkspace::DetachedFiles { + files, + sysroot, + rustc_cfg, + toolchain: _, + target_layout: _, + } => { detached_files_to_crate_graph(rustc_cfg.clone(), load, files, sysroot.as_ref().ok()) } }; @@ -786,9 +824,21 @@ impl ProjectWorkspace { && toolchain == o_toolchain } ( - Self::DetachedFiles { files, sysroot, rustc_cfg }, - Self::DetachedFiles { files: o_files, sysroot: o_sysroot, rustc_cfg: o_rustc_cfg }, - ) => files == o_files && sysroot == o_sysroot && rustc_cfg == o_rustc_cfg, + Self::DetachedFiles { files, sysroot, rustc_cfg, toolchain, target_layout }, + Self::DetachedFiles { + files: o_files, + sysroot: o_sysroot, + rustc_cfg: o_rustc_cfg, + toolchain: o_toolchain, + target_layout: o_target_layout, + }, + ) => { + files == o_files + && sysroot == o_sysroot + && rustc_cfg == o_rustc_cfg + && toolchain == o_toolchain + && target_layout == o_target_layout + } _ => false, } } diff --git a/crates/rust-analyzer/src/cargo_target_spec.rs b/crates/rust-analyzer/src/cargo_target_spec.rs index 9a9357a539897..815a98980b93b 100644 --- a/crates/rust-analyzer/src/cargo_target_spec.rs +++ b/crates/rust-analyzer/src/cargo_target_spec.rs @@ -4,7 +4,7 @@ use std::mem; use cfg::{CfgAtom, CfgExpr}; use ide::{Cancellable, CrateId, FileId, RunnableKind, TestId}; -use project_model::{self, CargoFeatures, ManifestPath, TargetKind}; +use project_model::{CargoFeatures, ManifestPath, TargetKind}; use rustc_hash::FxHashSet; use vfs::AbsPathBuf; @@ -208,7 +208,6 @@ fn required_features(cfg_expr: &CfgExpr, features: &mut Vec) { mod tests { use super::*; - use cfg::CfgExpr; use mbe::{syntax_node_to_token_tree, DummyTestSpanMap, DUMMY}; use syntax::{ ast::{self, AstNode}, diff --git a/crates/rust-analyzer/src/cli/lsif.rs b/crates/rust-analyzer/src/cli/lsif.rs index 1424a775777fd..5e810463db6cb 100644 --- a/crates/rust-analyzer/src/cli/lsif.rs +++ b/crates/rust-analyzer/src/cli/lsif.rs @@ -13,7 +13,7 @@ use ide_db::{ LineIndexDatabase, }; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; -use lsp_types::{self, lsif}; +use lsp_types::lsif; use project_model::{CargoConfig, ProjectManifest, ProjectWorkspace, RustLibSource}; use rustc_hash::FxHashMap; use vfs::{AbsPathBuf, Vfs}; diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs index 64ea246a45872..25f84d770bfb3 100644 --- a/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -5,7 +5,8 @@ use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::Path use hir::{Change, Crate}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; use profile::StopWatch; -use project_model::{CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; +use project_model::target_data_layout::RustcDataLayoutConfig; +use project_model::{target_data_layout, CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; use load_cargo::{load_workspace, LoadCargoConfig, ProcMacroServerChoice}; use rustc_hash::FxHashMap; @@ -60,15 +61,22 @@ impl Tester { std::fs::write(&tmp_file, "")?; let cargo_config = CargoConfig { sysroot: Some(RustLibSource::Discover), ..Default::default() }; + + let sysroot = + Ok(Sysroot::discover(tmp_file.parent().unwrap(), &cargo_config.extra_env, false) + .unwrap()); + let data_layout = target_data_layout::get( + RustcDataLayoutConfig::Rustc(sysroot.as_ref().ok()), + None, + &cargo_config.extra_env, + ); + let workspace = ProjectWorkspace::DetachedFiles { files: vec![tmp_file.clone()], - sysroot: Ok(Sysroot::discover( - tmp_file.parent().unwrap(), - &cargo_config.extra_env, - false, - ) - .unwrap()), + sysroot, rustc_cfg: vec![], + toolchain: None, + target_layout: data_layout.map(Arc::from).map_err(|it| Arc::from(it.to_string())), }; let load_cargo_config = LoadCargoConfig { load_out_dirs_from_check: false, diff --git a/crates/rust-analyzer/src/cli/scip.rs b/crates/rust-analyzer/src/cli/scip.rs index 2d56830c87f30..27869a5a7e63d 100644 --- a/crates/rust-analyzer/src/cli/scip.rs +++ b/crates/rust-analyzer/src/cli/scip.rs @@ -324,7 +324,7 @@ fn moniker_to_symbol(moniker: &MonikerResult) -> scip_types::Symbol { #[cfg(test)] mod test { use super::*; - use ide::{AnalysisHost, FilePosition, StaticIndex, TextSize}; + use ide::{AnalysisHost, FilePosition, TextSize}; use scip::symbol::format_symbol; use test_fixture::ChangeFixture; diff --git a/crates/salsa/salsa-macros/src/query_group.rs b/crates/salsa/salsa-macros/src/query_group.rs index e535d7ed0438a..5d1678ef12006 100644 --- a/crates/salsa/salsa-macros/src/query_group.rs +++ b/crates/salsa/salsa-macros/src/query_group.rs @@ -1,5 +1,4 @@ //! -use std::{convert::TryFrom, iter::FromIterator}; use crate::parenthesized::Parenthesized; use heck::ToUpperCamelCase; diff --git a/crates/salsa/src/debug.rs b/crates/salsa/src/debug.rs index 0925ddb3d85bb..5f113541f04cf 100644 --- a/crates/salsa/src/debug.rs +++ b/crates/salsa/src/debug.rs @@ -5,7 +5,6 @@ use crate::durability::Durability; use crate::plumbing::QueryStorageOps; use crate::Query; use crate::QueryTable; -use std::iter::FromIterator; /// Additional methods on queries that can be used to "peek into" /// their current state. These methods are meant for debugging and diff --git a/crates/salsa/src/derived.rs b/crates/salsa/src/derived.rs index c381e66e087bc..d631671005816 100644 --- a/crates/salsa/src/derived.rs +++ b/crates/salsa/src/derived.rs @@ -13,7 +13,6 @@ use crate::Runtime; use crate::{Database, DatabaseKeyIndex, QueryDb, Revision}; use parking_lot::RwLock; use std::borrow::Borrow; -use std::convert::TryFrom; use std::hash::Hash; use std::marker::PhantomData; use triomphe::Arc; diff --git a/crates/salsa/src/input.rs b/crates/salsa/src/input.rs index 4e8fca6149b7e..c2539570e0f9f 100644 --- a/crates/salsa/src/input.rs +++ b/crates/salsa/src/input.rs @@ -14,7 +14,6 @@ use crate::Runtime; use crate::{DatabaseKeyIndex, QueryDb}; use indexmap::map::Entry; use parking_lot::RwLock; -use std::convert::TryFrom; use std::iter; use tracing::debug; diff --git a/crates/salsa/src/interned.rs b/crates/salsa/src/interned.rs index 731839e9598c0..822219f51859c 100644 --- a/crates/salsa/src/interned.rs +++ b/crates/salsa/src/interned.rs @@ -13,7 +13,6 @@ use crate::{Database, DatabaseKeyIndex, QueryDb}; use parking_lot::RwLock; use rustc_hash::FxHashMap; use std::collections::hash_map::Entry; -use std::convert::From; use std::fmt::Debug; use std::hash::Hash; use triomphe::Arc; From 85203d97216e88f1bc2df9eb5e8d1d0bd9d93118 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Mon, 19 Feb 2024 18:14:48 +0100 Subject: [PATCH 066/134] Render assoc item owner in hover for items other than functions --- crates/hir/src/lib.rs | 88 ++++++++++++++++++++++++++++++++++ crates/ide-db/src/defs.rs | 21 ++++++-- crates/ide/src/hover/render.rs | 22 ++++++--- crates/ide/src/hover/tests.rs | 32 ++++++------- 4 files changed, 135 insertions(+), 28 deletions(-) diff --git a/crates/hir/src/lib.rs b/crates/hir/src/lib.rs index beaa6dd4d67cb..2d8811cf5ebeb 100644 --- a/crates/hir/src/lib.rs +++ b/crates/hir/src/lib.rs @@ -2653,6 +2653,37 @@ impl ItemInNs { } } +/// Invariant: `inner.as_extern_assoc_item(db).is_some()` +/// We do not actively enforce this invariant. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +pub enum ExternAssocItem { + Function(Function), + Static(Static), + TypeAlias(TypeAlias), +} + +pub trait AsExternAssocItem { + fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option; +} + +impl AsExternAssocItem for Function { + fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_extern_assoc_item(db, ExternAssocItem::Function, self.id) + } +} + +impl AsExternAssocItem for Static { + fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_extern_assoc_item(db, ExternAssocItem::Static, self.id) + } +} + +impl AsExternAssocItem for TypeAlias { + fn as_extern_assoc_item(self, db: &dyn HirDatabase) -> Option { + as_extern_assoc_item(db, ExternAssocItem::TypeAlias, self.id) + } +} + /// Invariant: `inner.as_assoc_item(db).is_some()` /// We do not actively enforce this invariant. #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] @@ -2727,6 +2758,63 @@ where } } +fn as_extern_assoc_item<'db, ID, DEF, LOC>( + db: &(dyn HirDatabase + 'db), + ctor: impl FnOnce(DEF) -> ExternAssocItem, + id: ID, +) -> Option +where + ID: Lookup = dyn DefDatabase + 'db, Data = AssocItemLoc>, + DEF: From, + LOC: ItemTreeNode, +{ + match id.lookup(db.upcast()).container { + ItemContainerId::ExternBlockId(_) => Some(ctor(DEF::from(id))), + ItemContainerId::TraitId(_) | ItemContainerId::ImplId(_) | ItemContainerId::ModuleId(_) => { + None + } + } +} + +impl ExternAssocItem { + pub fn name(self, db: &dyn HirDatabase) -> Name { + match self { + Self::Function(it) => it.name(db), + Self::Static(it) => it.name(db), + Self::TypeAlias(it) => it.name(db), + } + } + + pub fn module(self, db: &dyn HirDatabase) -> Module { + match self { + Self::Function(f) => f.module(db), + Self::Static(c) => c.module(db), + Self::TypeAlias(t) => t.module(db), + } + } + + pub fn as_function(self) -> Option { + match self { + Self::Function(v) => Some(v), + _ => None, + } + } + + pub fn as_static(self) -> Option { + match self { + Self::Static(v) => Some(v), + _ => None, + } + } + + pub fn as_type_alias(self) -> Option { + match self { + Self::TypeAlias(v) => Some(v), + _ => None, + } + } +} + impl AssocItem { pub fn name(self, db: &dyn HirDatabase) -> Option { match self { diff --git a/crates/ide-db/src/defs.rs b/crates/ide-db/src/defs.rs index 747c90561deea..1b6ff8bad53c5 100644 --- a/crates/ide-db/src/defs.rs +++ b/crates/ide-db/src/defs.rs @@ -8,11 +8,11 @@ use arrayvec::ArrayVec; use either::Either; use hir::{ - Adt, AsAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType, Const, Crate, - DefWithBody, DeriveHelper, DocLinkDef, ExternCrateDecl, Field, Function, GenericParam, - HasVisibility, HirDisplay, Impl, Label, Local, Macro, Module, ModuleDef, Name, PathResolution, - Semantics, Static, ToolModule, Trait, TraitAlias, TupleField, TypeAlias, Variant, VariantDef, - Visibility, + Adt, AsAssocItem, AsExternAssocItem, AssocItem, AttributeTemplate, BuiltinAttr, BuiltinType, + Const, Crate, DefWithBody, DeriveHelper, DocLinkDef, ExternAssocItem, ExternCrateDecl, Field, + Function, GenericParam, HasVisibility, HirDisplay, Impl, Label, Local, Macro, Module, + ModuleDef, Name, PathResolution, Semantics, Static, ToolModule, Trait, TraitAlias, TupleField, + TypeAlias, Variant, VariantDef, Visibility, }; use stdx::{format_to, impl_from}; use syntax::{ @@ -742,6 +742,17 @@ impl AsAssocItem for Definition { } } +impl AsExternAssocItem for Definition { + fn as_extern_assoc_item(self, db: &dyn hir::db::HirDatabase) -> Option { + match self { + Definition::Function(it) => it.as_extern_assoc_item(db), + Definition::Static(it) => it.as_extern_assoc_item(db), + Definition::TypeAlias(it) => it.as_extern_assoc_item(db), + _ => None, + } + } +} + impl From for Definition { fn from(assoc_item: AssocItem) -> Self { match assoc_item { diff --git a/crates/ide/src/hover/render.rs b/crates/ide/src/hover/render.rs index 42342d94b6d13..563e78253a8ae 100644 --- a/crates/ide/src/hover/render.rs +++ b/crates/ide/src/hover/render.rs @@ -3,8 +3,8 @@ use std::{mem, ops::Not}; use either::Either; use hir::{ - Adt, AsAssocItem, CaptureKind, HasCrate, HasSource, HirDisplay, Layout, LayoutError, Name, - Semantics, Trait, Type, TypeInfo, + Adt, AsAssocItem, AsExternAssocItem, CaptureKind, HasCrate, HasSource, HirDisplay, Layout, + LayoutError, Name, Semantics, Trait, Type, TypeInfo, }; use ide_db::{ base_db::SourceDatabase, @@ -369,12 +369,20 @@ fn definition_owner_name(db: &RootDatabase, def: &Definition) -> Option match def { Definition::Field(f) => Some(f.parent_def(db).name(db)), Definition::Local(l) => l.parent(db).name(db), - Definition::Function(f) => match f.as_assoc_item(db)?.container(db) { - hir::AssocItemContainer::Trait(t) => Some(t.name(db)), - hir::AssocItemContainer::Impl(i) => i.self_ty(db).as_adt().map(|adt| adt.name(db)), - }, Definition::Variant(e) => Some(e.parent_enum(db).name(db)), - _ => None, + + d => { + if let Some(assoc_item) = d.as_assoc_item(db) { + match assoc_item.container(db) { + hir::AssocItemContainer::Trait(t) => Some(t.name(db)), + hir::AssocItemContainer::Impl(i) => { + i.self_ty(db).as_adt().map(|adt| adt.name(db)) + } + } + } else { + return d.as_extern_assoc_item(db).map(|_| "".to_owned()); + } + } } .map(|name| name.display(db).to_string()) } diff --git a/crates/ide/src/hover/tests.rs b/crates/ide/src/hover/tests.rs index 157f8ff371ef2..ead4f91595f0e 100644 --- a/crates/ide/src/hover/tests.rs +++ b/crates/ide/src/hover/tests.rs @@ -1202,7 +1202,7 @@ fn main() { *C* ```rust - test + test::X ``` ```rust @@ -2277,7 +2277,7 @@ fn main() { let foo_test = unsafe { fo$0o(1, 2, 3); } } *foo* ```rust - test + test:: ``` ```rust @@ -4266,7 +4266,7 @@ fn main() { *B* ```rust - test + test::T ``` ```rust @@ -4295,7 +4295,7 @@ fn main() { *B* ```rust - test + test::T ``` ```rust @@ -4327,7 +4327,7 @@ fn main() { *B* ```rust - test + test::T ``` ```rust @@ -4919,7 +4919,7 @@ fn test() { *FOO* ```rust - test + test::S ``` ```rust @@ -5284,7 +5284,7 @@ impl T1 for Foo { *Bar* ```rust - test::t2 + test::t2::T2 ``` ```rust @@ -5306,7 +5306,7 @@ trait A { *Assoc* ```rust - test + test::A ``` ```rust @@ -5327,7 +5327,7 @@ trait A { *Assoc* ```rust - test + test::A ``` ```rust @@ -5346,7 +5346,7 @@ trait A where *Assoc* ```rust - test + test::A ``` ```rust @@ -6632,7 +6632,7 @@ fn test() { *A* ```rust - test + test::S ``` ```rust @@ -6661,7 +6661,7 @@ fn test() { *A* ```rust - test + test::S ``` ```rust @@ -6691,7 +6691,7 @@ mod m { *A* ```rust - test + test::S ``` ```rust @@ -7249,7 +7249,7 @@ extern "C" { *STATIC* ```rust - test + test:: ``` ```rust @@ -7267,7 +7267,7 @@ extern "C" { *fun* ```rust - test + test:: ``` ```rust @@ -7285,7 +7285,7 @@ extern "C" { *Ty* ```rust - test + test:: ``` ```rust From 06d6c62f80c2729f2ba3129c43c2cd3417b50251 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:28:27 +0000 Subject: [PATCH 067/134] Add newtype for raw idents --- compiler/rustc_ast/src/token.rs | 38 +++++++++++++----- compiler/rustc_ast/src/tokenstream.rs | 2 +- compiler/rustc_ast_pretty/src/pprust/state.rs | 8 ++-- compiler/rustc_builtin_macros/src/asm.rs | 3 +- .../src/assert/context.rs | 8 ++-- compiler/rustc_expand/src/mbe/macro_check.rs | 4 +- compiler/rustc_expand/src/mbe/macro_rules.rs | 14 +++++-- compiler/rustc_expand/src/mbe/metavar_expr.rs | 4 +- compiler/rustc_expand/src/mbe/quoted.rs | 4 +- compiler/rustc_expand/src/parse/tests.rs | 40 ++++++++++++++----- .../rustc_expand/src/proc_macro_server.rs | 13 +++--- .../rustc_expand/src/tokenstream/tests.rs | 5 ++- compiler/rustc_lint/src/builtin.rs | 2 +- compiler/rustc_parse/src/lexer/mod.rs | 8 ++-- .../rustc_parse/src/lexer/unicode_chars.rs | 2 +- .../rustc_parse/src/parser/diagnostics.rs | 13 +++--- compiler/rustc_parse/src/parser/expr.rs | 38 ++++++++++-------- compiler/rustc_parse/src/parser/item.rs | 7 ++-- compiler/rustc_parse/src/parser/mod.rs | 9 +++-- .../rustc_parse/src/parser/nonterminal.rs | 2 +- compiler/rustc_parse/src/parser/pat.rs | 2 +- compiler/rustc_parse/src/parser/path.rs | 3 +- compiler/rustc_parse/src/parser/stmt.rs | 2 +- src/librustdoc/clean/render_macro_matchers.rs | 4 +- 24 files changed, 148 insertions(+), 87 deletions(-) diff --git a/compiler/rustc_ast/src/token.rs b/compiler/rustc_ast/src/token.rs index 50fe37dcdb6fc..5ccc7d51066d9 100644 --- a/compiler/rustc_ast/src/token.rs +++ b/compiler/rustc_ast/src/token.rs @@ -107,7 +107,7 @@ impl Lit { /// Keep this in sync with `Token::can_begin_literal_or_bool` excluding unary negation. pub fn from_token(token: &Token) -> Option { match token.uninterpolate().kind { - Ident(name, false) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), + Ident(name, IdentIsRaw::No) if name.is_bool_lit() => Some(Lit::new(Bool, name, None)), Literal(token_lit) => Some(token_lit), Interpolated(ref nt) if let NtExpr(expr) | NtLiteral(expr) = &nt.0 @@ -183,7 +183,7 @@ impl LitKind { } } -pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool { +pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool { let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() @@ -214,7 +214,7 @@ pub fn ident_can_begin_expr(name: Symbol, span: Span, is_raw: bool) -> bool { .contains(&name) } -fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { +fn ident_can_begin_type(name: Symbol, span: Span, is_raw: IdentIsRaw) -> bool { let ident_token = Token::new(Ident(name, is_raw), span); !ident_token.is_reserved_ident() @@ -223,6 +223,24 @@ fn ident_can_begin_type(name: Symbol, span: Span, is_raw: bool) -> bool { .contains(&name) } +#[derive(PartialEq, Encodable, Decodable, Debug, Copy, Clone, HashStable_Generic)] +pub enum IdentIsRaw { + No, + Yes, +} + +impl From for IdentIsRaw { + fn from(b: bool) -> Self { + if b { Self::Yes } else { Self::No } + } +} + +impl From for bool { + fn from(is_raw: IdentIsRaw) -> bool { + matches!(is_raw, IdentIsRaw::Yes) + } +} + // SAFETY: due to the `Clone` impl below, all fields of all variants other than // `Interpolated` must impl `Copy`. #[derive(PartialEq, Encodable, Decodable, Debug, HashStable_Generic)] @@ -298,7 +316,7 @@ pub enum TokenKind { /// Do not forget about `NtIdent` when you want to match on identifiers. /// It's recommended to use `Token::(ident,uninterpolate,uninterpolated_span)` to /// treat regular and interpolated identifiers in the same way. - Ident(Symbol, /* is_raw */ bool), + Ident(Symbol, IdentIsRaw), /// Lifetime identifier token. /// Do not forget about `NtLifetime` when you want to match on lifetime identifiers. /// It's recommended to use `Token::(lifetime,uninterpolate,uninterpolated_span)` to @@ -411,7 +429,7 @@ impl Token { /// Recovers a `Token` from an `Ident`. This creates a raw identifier if necessary. pub fn from_ast_ident(ident: Ident) -> Self { - Token::new(Ident(ident.name, ident.is_raw_guess()), ident.span) + Token::new(Ident(ident.name, ident.is_raw_guess().into()), ident.span) } /// For interpolated tokens, returns a span of the fragment to which the interpolated @@ -567,7 +585,7 @@ impl Token { pub fn can_begin_literal_maybe_minus(&self) -> bool { match self.uninterpolate().kind { Literal(..) | BinOp(Minus) => true, - Ident(name, false) if name.is_bool_lit() => true, + Ident(name, IdentIsRaw::No) if name.is_bool_lit() => true, Interpolated(ref nt) => match &nt.0 { NtLiteral(_) => true, NtExpr(e) => match &e.kind { @@ -602,7 +620,7 @@ impl Token { /// Returns an identifier if this token is an identifier. #[inline] - pub fn ident(&self) -> Option<(Ident, /* is_raw */ bool)> { + pub fn ident(&self) -> Option<(Ident, IdentIsRaw)> { // We avoid using `Token::uninterpolate` here because it's slow. match &self.kind { &Ident(name, is_raw) => Some((Ident::new(name, self.span), is_raw)), @@ -755,7 +773,7 @@ impl Token { /// Returns `true` if the token is a non-raw identifier for which `pred` holds. pub fn is_non_raw_ident_where(&self, pred: impl FnOnce(Ident) -> bool) -> bool { match self.ident() { - Some((id, false)) => pred(id), + Some((id, IdentIsRaw::No)) => pred(id), _ => false, } } @@ -806,7 +824,7 @@ impl Token { _ => return None, }, SingleQuote => match joint.kind { - Ident(name, false) => Lifetime(Symbol::intern(&format!("'{name}"))), + Ident(name, IdentIsRaw::No) => Lifetime(Symbol::intern(&format!("'{name}"))), _ => return None, }, @@ -836,7 +854,7 @@ pub enum Nonterminal { NtPat(P), NtExpr(P), NtTy(P), - NtIdent(Ident, /* is_raw */ bool), + NtIdent(Ident, IdentIsRaw), NtLifetime(Ident), NtLiteral(P), /// Stuff inside brackets for attributes diff --git a/compiler/rustc_ast/src/tokenstream.rs b/compiler/rustc_ast/src/tokenstream.rs index 298c01a456740..adc3056cc2921 100644 --- a/compiler/rustc_ast/src/tokenstream.rs +++ b/compiler/rustc_ast/src/tokenstream.rs @@ -656,7 +656,7 @@ impl TokenStream { DelimSpacing::new(Spacing::JointHidden, Spacing::Alone), Delimiter::Bracket, [ - TokenTree::token_alone(token::Ident(sym::doc, false), span), + TokenTree::token_alone(token::Ident(sym::doc, token::IdentIsRaw::No), span), TokenTree::token_alone(token::Eq, span), TokenTree::token_alone( TokenKind::lit(token::StrRaw(num_of_hashes), data, None), diff --git a/compiler/rustc_ast_pretty/src/pprust/state.rs b/compiler/rustc_ast_pretty/src/pprust/state.rs index 7ea0078ea3bb9..6e1974f48b26b 100644 --- a/compiler/rustc_ast_pretty/src/pprust/state.rs +++ b/compiler/rustc_ast_pretty/src/pprust/state.rs @@ -185,7 +185,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool { // IDENT + `!`: `println!()`, but `if !x { ... }` needs a space after the `if` (Tok(Token { kind: Ident(sym, is_raw), span }, _), Tok(Token { kind: Not, .. }, _)) - if !Ident::new(*sym, *span).is_reserved() || *is_raw => + if !Ident::new(*sym, *span).is_reserved() || matches!(is_raw, IdentIsRaw::Yes) => { false } @@ -197,7 +197,7 @@ fn space_between(tt1: &TokenTree, tt2: &TokenTree) -> bool { || *sym == kw::Fn || *sym == kw::SelfUpper || *sym == kw::Pub - || *is_raw => + || matches!(is_raw, IdentIsRaw::Yes) => { false } @@ -731,7 +731,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere token::NtBlock(e) => self.block_to_string(e), token::NtStmt(e) => self.stmt_to_string(e), token::NtPat(e) => self.pat_to_string(e), - token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(*e, *is_raw).to_string(), + &token::NtIdent(e, is_raw) => IdentPrinter::for_ast_ident(e, is_raw.into()).to_string(), token::NtLifetime(e) => e.to_string(), token::NtLiteral(e) => self.expr_to_string(e), token::NtVis(e) => self.vis_to_string(e), @@ -795,7 +795,7 @@ pub trait PrintState<'a>: std::ops::Deref + std::ops::Dere /* Name components */ token::Ident(s, is_raw) => { - IdentPrinter::new(s, is_raw, convert_dollar_crate).to_string().into() + IdentPrinter::new(s, is_raw.into(), convert_dollar_crate).to_string().into() } token::Lifetime(s) => s.to_string().into(), diff --git a/compiler/rustc_builtin_macros/src/asm.rs b/compiler/rustc_builtin_macros/src/asm.rs index 0b2e63b403bf5..8489217ad931f 100644 --- a/compiler/rustc_builtin_macros/src/asm.rs +++ b/compiler/rustc_builtin_macros/src/asm.rs @@ -1,3 +1,4 @@ +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter}; @@ -416,7 +417,7 @@ fn parse_reg<'a>( ) -> PResult<'a, ast::InlineAsmRegOrRegClass> { p.expect(&token::OpenDelim(Delimiter::Parenthesis))?; let result = match p.token.uninterpolate().kind { - token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name), + token::Ident(name, IdentIsRaw::No) => ast::InlineAsmRegOrRegClass::RegClass(name), token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => { *explicit_reg = true; ast::InlineAsmRegOrRegClass::Reg(symbol) diff --git a/compiler/rustc_builtin_macros/src/assert/context.rs b/compiler/rustc_builtin_macros/src/assert/context.rs index d244897f8a5de..01821ee833f1b 100644 --- a/compiler/rustc_builtin_macros/src/assert/context.rs +++ b/compiler/rustc_builtin_macros/src/assert/context.rs @@ -1,7 +1,6 @@ use rustc_ast::{ ptr::P, - token, - token::Delimiter, + token::{self, Delimiter, IdentIsRaw}, tokenstream::{DelimSpan, TokenStream, TokenTree}, BinOpKind, BorrowKind, DelimArgs, Expr, ExprKind, ItemKind, MacCall, MethodCall, Mutability, Path, PathSegment, Stmt, StructRest, UnOp, UseTree, UseTreeKind, DUMMY_NODE_ID, @@ -170,7 +169,10 @@ impl<'cx, 'a> Context<'cx, 'a> { ]; let captures = self.capture_decls.iter().flat_map(|cap| { [ - TokenTree::token_joint_hidden(token::Ident(cap.ident.name, false), cap.ident.span), + TokenTree::token_joint_hidden( + token::Ident(cap.ident.name, IdentIsRaw::No), + cap.ident.span, + ), TokenTree::token_alone(token::Comma, self.span), ] }); diff --git a/compiler/rustc_expand/src/mbe/macro_check.rs b/compiler/rustc_expand/src/mbe/macro_check.rs index e66cfbe6fb689..ffc8f782fd344 100644 --- a/compiler/rustc_expand/src/mbe/macro_check.rs +++ b/compiler/rustc_expand/src/mbe/macro_check.rs @@ -107,7 +107,7 @@ use crate::errors; use crate::mbe::{KleeneToken, TokenTree}; -use rustc_ast::token::{Delimiter, Token, TokenKind}; +use rustc_ast::token::{Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::{NodeId, DUMMY_NODE_ID}; use rustc_data_structures::fx::FxHashMap; use rustc_errors::{DiagnosticMessage, MultiSpan}; @@ -409,7 +409,7 @@ fn check_nested_occurrences( match (state, tt) { ( NestedMacroState::Empty, - &TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }), + &TokenTree::Token(Token { kind: TokenKind::Ident(name, IdentIsRaw::No), .. }), ) => { if name == kw::MacroRules { state = NestedMacroState::MacroRules; diff --git a/compiler/rustc_expand/src/mbe/macro_rules.rs b/compiler/rustc_expand/src/mbe/macro_rules.rs index c82609503c18a..bf99e9e6d5cc8 100644 --- a/compiler/rustc_expand/src/mbe/macro_rules.rs +++ b/compiler/rustc_expand/src/mbe/macro_rules.rs @@ -8,6 +8,7 @@ use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser} use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc}; use crate::mbe::transcribe::transcribe; +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::token::{self, Delimiter, NonterminalKind, Token, TokenKind, TokenKind::*}; use rustc_ast::tokenstream::{DelimSpan, TokenStream}; @@ -1302,7 +1303,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes, - Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, + Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => { + IsInFollow::Yes + } _ => IsInFollow::No(TOKENS), }, _ => IsInFollow::No(TOKENS), @@ -1313,7 +1316,9 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { FatArrow | Comma | Eq => IsInFollow::Yes, - Ident(name, false) if name == kw::If || name == kw::In => IsInFollow::Yes, + Ident(name, IdentIsRaw::No) if name == kw::If || name == kw::In => { + IsInFollow::Yes + } _ => IsInFollow::No(TOKENS), }, _ => IsInFollow::No(TOKENS), @@ -1336,7 +1341,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { | BinOp(token::Shr) | Semi | BinOp(token::Or) => IsInFollow::Yes, - Ident(name, false) if name == kw::As || name == kw::Where => { + Ident(name, IdentIsRaw::No) if name == kw::As || name == kw::Where => { IsInFollow::Yes } _ => IsInFollow::No(TOKENS), @@ -1364,7 +1369,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow { match tok { TokenTree::Token(token) => match token.kind { Comma => IsInFollow::Yes, - Ident(name, is_raw) if is_raw || name != kw::Priv => IsInFollow::Yes, + Ident(_, IdentIsRaw::Yes) => IsInFollow::Yes, + Ident(name, _) if name != kw::Priv => IsInFollow::Yes, _ => { if token.can_begin_type() { IsInFollow::Yes diff --git a/compiler/rustc_expand/src/mbe/metavar_expr.rs b/compiler/rustc_expand/src/mbe/metavar_expr.rs index 3ca0787ce8e1f..84f7dc4771a9e 100644 --- a/compiler/rustc_expand/src/mbe/metavar_expr.rs +++ b/compiler/rustc_expand/src/mbe/metavar_expr.rs @@ -1,4 +1,4 @@ -use rustc_ast::token::{self, Delimiter}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw}; use rustc_ast::tokenstream::{RefTokenTreeCursor, TokenStream, TokenTree}; use rustc_ast::{LitIntType, LitKind}; use rustc_ast_pretty::pprust; @@ -142,7 +142,7 @@ fn parse_ident<'sess>( if let Some(tt) = iter.next() && let TokenTree::Token(token, _) = tt { - if let Some((elem, false)) = token.ident() { + if let Some((elem, IdentIsRaw::No)) = token.ident() { return Ok(elem); } let token_str = pprust::token_to_string(token); diff --git a/compiler/rustc_expand/src/mbe/quoted.rs b/compiler/rustc_expand/src/mbe/quoted.rs index 0fdfa5631389e..ec1dd807d1a4f 100644 --- a/compiler/rustc_expand/src/mbe/quoted.rs +++ b/compiler/rustc_expand/src/mbe/quoted.rs @@ -2,7 +2,7 @@ use crate::errors; use crate::mbe::macro_parser::count_metavar_decls; use crate::mbe::{Delimited, KleeneOp, KleeneToken, MetaVarExpr, SequenceRepetition, TokenTree}; -use rustc_ast::token::{self, Delimiter, Token}; +use rustc_ast::token::{self, Delimiter, IdentIsRaw, Token}; use rustc_ast::{tokenstream, NodeId}; use rustc_ast_pretty::pprust; use rustc_feature::Features; @@ -222,7 +222,7 @@ fn parse_tree<'a>( Some(tokenstream::TokenTree::Token(token, _)) if token.is_ident() => { let (ident, is_raw) = token.ident().unwrap(); let span = ident.span.with_lo(span.lo()); - if ident.name == kw::Crate && !is_raw { + if ident.name == kw::Crate && matches!(is_raw, IdentIsRaw::No) { TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span) } else { TokenTree::MetaVar(span, ident) diff --git a/compiler/rustc_expand/src/parse/tests.rs b/compiler/rustc_expand/src/parse/tests.rs index 7a888250ca161..0e07b41b43c7d 100644 --- a/compiler/rustc_expand/src/parse/tests.rs +++ b/compiler/rustc_expand/src/parse/tests.rs @@ -2,6 +2,7 @@ use crate::tests::{ matches_codepattern, string_to_stream, with_error_checking_parse, with_expected_parse_error, }; +use ast::token::IdentIsRaw; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token}; use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree}; @@ -74,9 +75,12 @@ fn string_to_tts_macro() { match tts { [ - TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name_macro_rules, IdentIsRaw::No), .. }, + _, + ), TokenTree::Token(Token { kind: token::Not, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }, _), + TokenTree::Token(Token { kind: token::Ident(name_zip, IdentIsRaw::No), .. }, _), TokenTree::Delimited(.., macro_delim, macro_tts), ] if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" => { let tts = ¯o_tts.trees().collect::>(); @@ -90,7 +94,10 @@ fn string_to_tts_macro() { match &tts[..] { [ TokenTree::Token(Token { kind: token::Dollar, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name, IdentIsRaw::No), .. }, + _, + ), ] if first_delim == &Delimiter::Parenthesis && name.as_str() == "a" => { } _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), @@ -99,7 +106,10 @@ fn string_to_tts_macro() { match &tts[..] { [ TokenTree::Token(Token { kind: token::Dollar, .. }, _), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }, _), + TokenTree::Token( + Token { kind: token::Ident(name, IdentIsRaw::No), .. }, + _, + ), ] if second_delim == &Delimiter::Parenthesis && name.as_str() == "a" => {} _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), @@ -119,8 +129,11 @@ fn string_to_tts_1() { let tts = string_to_stream("fn a(b: i32) { b; }".to_string()); let expected = TokenStream::new(vec![ - TokenTree::token_alone(token::Ident(kw::Fn, false), sp(0, 2)), - TokenTree::token_joint_hidden(token::Ident(Symbol::intern("a"), false), sp(3, 4)), + TokenTree::token_alone(token::Ident(kw::Fn, IdentIsRaw::No), sp(0, 2)), + TokenTree::token_joint_hidden( + token::Ident(Symbol::intern("a"), IdentIsRaw::No), + sp(3, 4), + ), TokenTree::Delimited( DelimSpan::from_pair(sp(4, 5), sp(11, 12)), // `JointHidden` because the `(` is followed immediately by @@ -128,10 +141,16 @@ fn string_to_tts_1() { DelimSpacing::new(Spacing::JointHidden, Spacing::Alone), Delimiter::Parenthesis, TokenStream::new(vec![ - TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(5, 6)), + TokenTree::token_joint( + token::Ident(Symbol::intern("b"), IdentIsRaw::No), + sp(5, 6), + ), TokenTree::token_alone(token::Colon, sp(6, 7)), // `JointHidden` because the `i32` is immediately followed by the `)`. - TokenTree::token_joint_hidden(token::Ident(sym::i32, false), sp(8, 11)), + TokenTree::token_joint_hidden( + token::Ident(sym::i32, IdentIsRaw::No), + sp(8, 11), + ), ]) .into(), ), @@ -143,7 +162,10 @@ fn string_to_tts_1() { DelimSpacing::new(Spacing::Alone, Spacing::Alone), Delimiter::Brace, TokenStream::new(vec![ - TokenTree::token_joint(token::Ident(Symbol::intern("b"), false), sp(15, 16)), + TokenTree::token_joint( + token::Ident(Symbol::intern("b"), IdentIsRaw::No), + sp(15, 16), + ), // `Alone` because the `;` is followed by whitespace. TokenTree::token_alone(token::Semi, sp(16, 17)), ]) diff --git a/compiler/rustc_expand/src/proc_macro_server.rs b/compiler/rustc_expand/src/proc_macro_server.rs index 8f31b5801da7f..b5595c01b877c 100644 --- a/compiler/rustc_expand/src/proc_macro_server.rs +++ b/compiler/rustc_expand/src/proc_macro_server.rs @@ -1,4 +1,5 @@ use crate::base::ExtCtxt; +use ast::token::IdentIsRaw; use pm::bridge::{ server, DelimSpan, Diagnostic, ExpnGlobals, Group, Ident, LitKind, Literal, Punct, TokenTree, }; @@ -216,7 +217,9 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec op("?"), SingleQuote => op("'"), - Ident(sym, is_raw) => trees.push(TokenTree::Ident(Ident { sym, is_raw, span })), + Ident(sym, is_raw) => { + trees.push(TokenTree::Ident(Ident { sym, is_raw: is_raw.into(), span })) + } Lifetime(name) => { let ident = symbol::Ident::new(name, span).without_first_quote(); trees.extend([ @@ -238,7 +241,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec)> for Vec { trees.push(TokenTree::Ident(Ident { sym: ident.name, - is_raw: *is_raw, + is_raw: matches!(is_raw, IdentIsRaw::Yes), span: ident.span, })) } @@ -352,7 +355,7 @@ impl ToInternal> } TokenTree::Ident(self::Ident { sym, is_raw, span }) => { rustc.sess().symbol_gallery.insert(sym, span); - smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw), span)] + smallvec![tokenstream::TokenTree::token_alone(Ident(sym, is_raw.into()), span)] } TokenTree::Literal(self::Literal { kind: self::LitKind::Integer, @@ -569,7 +572,7 @@ impl server::TokenStream for Rustc<'_, '_> { match &expr.kind { ast::ExprKind::Lit(token_lit) if token_lit.kind == token::Bool => { Ok(tokenstream::TokenStream::token_alone( - token::Ident(token_lit.symbol, false), + token::Ident(token_lit.symbol, IdentIsRaw::No), expr.span, )) } diff --git a/compiler/rustc_expand/src/tokenstream/tests.rs b/compiler/rustc_expand/src/tokenstream/tests.rs index 91c4dd732e3a5..78795e86fd5da 100644 --- a/compiler/rustc_expand/src/tokenstream/tests.rs +++ b/compiler/rustc_expand/src/tokenstream/tests.rs @@ -1,6 +1,6 @@ use crate::tests::string_to_stream; -use rustc_ast::token; +use rustc_ast::token::{self, IdentIsRaw}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_span::create_default_session_globals_then; use rustc_span::{BytePos, Span, Symbol}; @@ -86,7 +86,8 @@ fn test_diseq_1() { fn test_is_empty() { create_default_session_globals_then(|| { let test0 = TokenStream::default(); - let test1 = TokenStream::token_alone(token::Ident(Symbol::intern("a"), false), sp(0, 1)); + let test1 = + TokenStream::token_alone(token::Ident(Symbol::intern("a"), IdentIsRaw::No), sp(0, 1)); let test2 = string_to_ts("foo(bar::baz)"); assert_eq!(test0.is_empty(), true); diff --git a/compiler/rustc_lint/src/builtin.rs b/compiler/rustc_lint/src/builtin.rs index f9149f54e926c..30b9e64cb095c 100644 --- a/compiler/rustc_lint/src/builtin.rs +++ b/compiler/rustc_lint/src/builtin.rs @@ -1821,7 +1821,7 @@ impl KeywordIdents { match tt { // Only report non-raw idents. TokenTree::Token(token, _) => { - if let Some((ident, false)) = token.ident() { + if let Some((ident, token::IdentIsRaw::No)) = token.ident() { self.check_ident_token(cx, UnderMacro(true), ident); } } diff --git a/compiler/rustc_parse/src/lexer/mod.rs b/compiler/rustc_parse/src/lexer/mod.rs index c768ea93b5fec..dc9f5bad765fc 100644 --- a/compiler/rustc_parse/src/lexer/mod.rs +++ b/compiler/rustc_parse/src/lexer/mod.rs @@ -4,7 +4,7 @@ use crate::errors; use crate::lexer::unicode_chars::UNICODE_ARRAY; use crate::make_unclosed_delims_error; use rustc_ast::ast::{self, AttrStyle}; -use rustc_ast::token::{self, CommentKind, Delimiter, Token, TokenKind}; +use rustc_ast::token::{self, CommentKind, Delimiter, IdentIsRaw, Token, TokenKind}; use rustc_ast::tokenstream::TokenStream; use rustc_ast::util::unicode::contains_text_flow_control_chars; use rustc_errors::{codes::*, Applicability, DiagCtxt, DiagnosticBuilder, StashKey}; @@ -181,7 +181,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { self.dcx().emit_err(errors::CannotBeRawIdent { span, ident: sym }); } self.sess.raw_identifier_spans.push(span); - token::Ident(sym, true) + token::Ident(sym, IdentIsRaw::Yes) } rustc_lexer::TokenKind::UnknownPrefix => { self.report_unknown_prefix(start); @@ -201,7 +201,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { let span = self.mk_sp(start, self.pos); self.sess.bad_unicode_identifiers.borrow_mut().entry(sym).or_default() .push(span); - token::Ident(sym, false) + token::Ident(sym, IdentIsRaw::No) } // split up (raw) c string literals to an ident and a string literal when edition < 2021. rustc_lexer::TokenKind::Literal { @@ -339,7 +339,7 @@ impl<'sess, 'src> StringReader<'sess, 'src> { let sym = nfc_normalize(self.str_from(start)); let span = self.mk_sp(start, self.pos); self.sess.symbol_gallery.insert(sym, span); - token::Ident(sym, false) + token::Ident(sym, IdentIsRaw::No) } /// Detect usages of Unicode codepoints changing the direction of the text on screen and loudly diff --git a/compiler/rustc_parse/src/lexer/unicode_chars.rs b/compiler/rustc_parse/src/lexer/unicode_chars.rs index a136abaa28bb8..3b4e05332fac9 100644 --- a/compiler/rustc_parse/src/lexer/unicode_chars.rs +++ b/compiler/rustc_parse/src/lexer/unicode_chars.rs @@ -307,7 +307,7 @@ pub(crate) const UNICODE_ARRAY: &[(char, &str, &str)] = &[ // fancier error recovery to it, as there will be less overall work to do this way. const ASCII_ARRAY: &[(&str, &str, Option)] = &[ (" ", "Space", None), - ("_", "Underscore", Some(token::Ident(kw::Underscore, false))), + ("_", "Underscore", Some(token::Ident(kw::Underscore, token::IdentIsRaw::No))), ("-", "Minus/Hyphen", Some(token::BinOp(token::Minus))), (",", "Comma", Some(token::Comma)), (";", "Semicolon", Some(token::Semi)), diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 659716548d95d..7f5d604050dd0 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -21,6 +21,7 @@ use crate::errors::{ use crate::fluent_generated as fluent; use crate::parser; use crate::parser::attr::InnerAttrPolicy; +use ast::token::IdentIsRaw; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; @@ -264,7 +265,7 @@ impl<'a> Parser<'a> { pub(super) fn expected_ident_found( &mut self, recover: bool, - ) -> PResult<'a, (Ident, /* is_raw */ bool)> { + ) -> PResult<'a, (Ident, IdentIsRaw)> { if let TokenKind::DocComment(..) = self.prev_token.kind { return Err(self.dcx().create_err(DocCommentDoesNotDocumentAnything { span: self.prev_token.span, @@ -290,11 +291,11 @@ impl<'a> Parser<'a> { let bad_token = self.token.clone(); // suggest prepending a keyword in identifier position with `r#` - let suggest_raw = if let Some((ident, false)) = self.token.ident() + let suggest_raw = if let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.is_raw_guess() && self.look_ahead(1, |t| valid_follow.contains(&t.kind)) { - recovered_ident = Some((ident, true)); + recovered_ident = Some((ident, IdentIsRaw::Yes)); // `Symbol::to_string()` is different from `Symbol::into_diagnostic_arg()`, // which uses `Symbol::to_ident_string()` and "helpfully" adds an implicit `r#` @@ -320,7 +321,7 @@ impl<'a> Parser<'a> { let help_cannot_start_number = self.is_lit_bad_ident().map(|(len, valid_portion)| { let (invalid, valid) = self.token.span.split_at(len as u32); - recovered_ident = Some((Ident::new(valid_portion, valid), false)); + recovered_ident = Some((Ident::new(valid_portion, valid), IdentIsRaw::No)); HelpIdentifierStartsWithNumber { num_span: invalid } }); @@ -653,9 +654,9 @@ impl<'a> Parser<'a> { // positive for a `cr#` that wasn't intended to start a c-string literal, but identifying // that in the parser requires unbounded lookahead, so we only add a hint to the existing // error rather than replacing it entirely. - if ((self.prev_token.kind == TokenKind::Ident(sym::c, false) + if ((self.prev_token.kind == TokenKind::Ident(sym::c, IdentIsRaw::No) && matches!(&self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }))) - || (self.prev_token.kind == TokenKind::Ident(sym::cr, false) + || (self.prev_token.kind == TokenKind::Ident(sym::cr, IdentIsRaw::No) && matches!( &self.token.kind, TokenKind::Literal(token::Lit { kind: token::Str, .. }) | token::Pound diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 20b9581f2ef27..455d9c3deb365 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -10,6 +10,7 @@ use super::{ use crate::errors; use crate::maybe_recover_from_interpolated_ty_qpath; use ast::mut_visit::{noop_visit_expr, MutVisitor}; +use ast::token::IdentIsRaw; use ast::{CoroutineKind, ForLoopKind, GenBlockKind, Pat, Path, PathSegment}; use core::mem; use rustc_ast::ptr::P; @@ -128,7 +129,7 @@ impl<'a> Parser<'a> { match self.parse_expr_res(restrictions, None) { Ok(expr) => Ok(expr), Err(err) => match self.token.ident() { - Some((Ident { name: kw::Underscore, .. }, false)) + Some((Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) if self.may_recover() && self.look_ahead(1, |t| t == &token::Comma) => { // Special-case handling of `foo(_, _, _)` @@ -459,7 +460,9 @@ impl<'a> Parser<'a> { return None; } (Some(op), _) => (op, self.token.span), - (None, Some((Ident { name: sym::and, span }, false))) if self.may_recover() => { + (None, Some((Ident { name: sym::and, span }, IdentIsRaw::No))) + if self.may_recover() => + { self.dcx().emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "and".into(), @@ -467,7 +470,7 @@ impl<'a> Parser<'a> { }); (AssocOp::LAnd, span) } - (None, Some((Ident { name: sym::or, span }, false))) if self.may_recover() => { + (None, Some((Ident { name: sym::or, span }, IdentIsRaw::No))) if self.may_recover() => { self.dcx().emit_err(errors::InvalidLogicalOperator { span: self.token.span, incorrect: "or".into(), @@ -744,7 +747,7 @@ impl<'a> Parser<'a> { ( // `foo: ` ExprKind::Path(None, ast::Path { segments, .. }), - token::Ident(kw::For | kw::Loop | kw::While, false), + token::Ident(kw::For | kw::Loop | kw::While, IdentIsRaw::No), ) if segments.len() == 1 => { let snapshot = self.create_snapshot_for_diagnostic(); let label = Label { @@ -957,19 +960,20 @@ impl<'a> Parser<'a> { fn parse_expr_dot_or_call_with_(&mut self, mut e: P, lo: Span) -> PResult<'a, P> { loop { - let has_question = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { - // we are using noexpect here because we don't expect a `?` directly after a `return` - // which could be suggested otherwise - self.eat_noexpect(&token::Question) - } else { - self.eat(&token::Question) - }; + let has_question = + if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { + // we are using noexpect here because we don't expect a `?` directly after a `return` + // which could be suggested otherwise + self.eat_noexpect(&token::Question) + } else { + self.eat(&token::Question) + }; if has_question { // `expr?` e = self.mk_expr(lo.to(self.prev_token.span), ExprKind::Try(e)); continue; } - let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, false) { + let has_dot = if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { // we are using noexpect here because we don't expect a `.` directly after a `return` // which could be suggested otherwise self.eat_noexpect(&token::Dot) @@ -1128,19 +1132,19 @@ impl<'a> Parser<'a> { // 1. DestructuredFloat::TrailingDot(sym, ident_span, dot_span) => { assert!(suffix.is_none()); - self.token = Token::new(token::Ident(sym, false), ident_span); + self.token = Token::new(token::Ident(sym, IdentIsRaw::No), ident_span); let next_token = (Token::new(token::Dot, dot_span), self.token_spacing); self.parse_expr_tuple_field_access(lo, base, sym, None, Some(next_token)) } // 1.2 | 1.2e3 DestructuredFloat::MiddleDot(symbol1, ident1_span, dot_span, symbol2, ident2_span) => { - self.token = Token::new(token::Ident(symbol1, false), ident1_span); + self.token = Token::new(token::Ident(symbol1, IdentIsRaw::No), ident1_span); // This needs to be `Spacing::Alone` to prevent regressions. // See issue #76399 and PR #76285 for more details let next_token1 = (Token::new(token::Dot, dot_span), Spacing::Alone); let base1 = self.parse_expr_tuple_field_access(lo, base, symbol1, None, Some(next_token1)); - let next_token2 = Token::new(token::Ident(symbol2, false), ident2_span); + let next_token2 = Token::new(token::Ident(symbol2, IdentIsRaw::No), ident2_span); self.bump_with((next_token2, self.token_spacing)); // `.` self.parse_expr_tuple_field_access(lo, base1, symbol2, suffix, None) } @@ -1948,7 +1952,7 @@ impl<'a> Parser<'a> { self.bump(); // `builtin` self.bump(); // `#` - let Some((ident, false)) = self.token.ident() else { + let Some((ident, IdentIsRaw::No)) = self.token.ident() else { let err = self.dcx().create_err(errors::ExpectedBuiltinIdent { span: self.token.span }); return Err(err); }; @@ -3576,7 +3580,7 @@ impl<'a> Parser<'a> { fn find_struct_error_after_field_looking_code(&self) -> Option { match self.token.ident() { Some((ident, is_raw)) - if (is_raw || !ident.is_reserved()) + if (matches!(is_raw, IdentIsRaw::Yes) || !ident.is_reserved()) && self.look_ahead(1, |t| *t == token::Colon) => { Some(ast::ExprField { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index e7b9076bd3c8f..c6e80f3f07f37 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -3,6 +3,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken}; use crate::errors::{self, MacroExpandsToAdtField}; use crate::fluent_generated as fluent; +use ast::token::IdentIsRaw; use rustc_ast::ast::*; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, TokenKind}; @@ -1079,7 +1080,7 @@ impl<'a> Parser<'a> { fn parse_ident_or_underscore(&mut self) -> PResult<'a, Ident> { match self.token.ident() { - Some((ident @ Ident { name: kw::Underscore, .. }, false)) => { + Some((ident @ Ident { name: kw::Underscore, .. }, IdentIsRaw::No)) => { self.bump(); Ok(ident) } @@ -1965,7 +1966,7 @@ impl<'a> Parser<'a> { let (ident, is_raw) = self.ident_or_err(true)?; if ident.name == kw::Underscore { self.sess.gated_spans.gate(sym::unnamed_fields, lo); - } else if !is_raw && ident.is_reserved() { + } else if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() { let snapshot = self.create_snapshot_for_diagnostic(); let err = if self.check_fn_front_matter(false, Case::Sensitive) { let inherited_vis = Visibility { @@ -2743,7 +2744,7 @@ impl<'a> Parser<'a> { fn parse_self_param(&mut self) -> PResult<'a, Option> { // Extract an identifier *after* having confirmed that the token is one. let expect_self_ident = |this: &mut Self| match this.token.ident() { - Some((ident, false)) => { + Some((ident, IdentIsRaw::No)) => { this.bump(); ident } diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index dea2b9e6ca70b..80f6a20b985b9 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -11,6 +11,7 @@ mod stmt; mod ty; use crate::lexer::UnmatchedDelim; +use ast::token::IdentIsRaw; pub use attr_wrapper::AttrWrapper; pub use diagnostics::AttemptLocalParseRecovery; pub(crate) use expr::ForbiddenLetReason; @@ -499,7 +500,7 @@ impl<'a> Parser<'a> { fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> { let (ident, is_raw) = self.ident_or_err(recover)?; - if !is_raw && ident.is_reserved() { + if matches!(is_raw, IdentIsRaw::No) && ident.is_reserved() { let err = self.expected_ident_found_err(); if recover { err.emit(); @@ -511,7 +512,7 @@ impl<'a> Parser<'a> { Ok(ident) } - fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, /* is_raw */ bool)> { + fn ident_or_err(&mut self, recover: bool) -> PResult<'a, (Ident, IdentIsRaw)> { match self.token.ident() { Some(ident) => Ok(ident), None => self.expected_ident_found(recover), @@ -568,7 +569,7 @@ impl<'a> Parser<'a> { } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() + && let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { true @@ -598,7 +599,7 @@ impl<'a> Parser<'a> { } if case == Case::Insensitive - && let Some((ident, /* is_raw */ false)) = self.token.ident() + && let Some((ident, IdentIsRaw::No)) = self.token.ident() && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() { self.dcx().emit_err(errors::KwBadCase { span: ident.span, kw: kw.as_str() }); diff --git a/compiler/rustc_parse/src/parser/nonterminal.rs b/compiler/rustc_parse/src/parser/nonterminal.rs index 071d6b72f3b96..f1572a18a8be0 100644 --- a/compiler/rustc_parse/src/parser/nonterminal.rs +++ b/compiler/rustc_parse/src/parser/nonterminal.rs @@ -201,6 +201,6 @@ impl<'a> Parser<'a> { /// The token is an identifier, but not `_`. /// We prohibit passing `_` to macros expecting `ident` for now. -fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> { +fn get_macro_ident(token: &Token) -> Option<(Ident, token::IdentIsRaw)> { token.ident().filter(|(ident, _)| ident.name != kw::Underscore) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 75fc013d3e6fe..072db24265eea 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -311,7 +311,7 @@ impl<'a> Parser<'a> { matches!( &token.uninterpolate().kind, token::FatArrow // e.g. `a | => 0,`. - | token::Ident(kw::If, false) // e.g. `a | if expr`. + | token::Ident(kw::If, token::IdentIsRaw::No) // e.g. `a | if expr`. | token::Eq // e.g. `let a | = 0`. | token::Semi // e.g. `let a |;`. | token::Colon // e.g. `let a | :`. diff --git a/compiler/rustc_parse/src/parser/path.rs b/compiler/rustc_parse/src/parser/path.rs index 681039999a652..6e7bbe7e06d42 100644 --- a/compiler/rustc_parse/src/parser/path.rs +++ b/compiler/rustc_parse/src/parser/path.rs @@ -2,6 +2,7 @@ use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{Parser, Restrictions, TokenType}; use crate::errors::PathSingleColon; use crate::{errors, maybe_whole}; +use ast::token::IdentIsRaw; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Token, TokenKind}; use rustc_ast::{ @@ -390,7 +391,7 @@ impl<'a> Parser<'a> { pub(super) fn parse_path_segment_ident(&mut self) -> PResult<'a, Ident> { match self.token.ident() { - Some((ident, false)) if ident.is_path_segment_keyword() => { + Some((ident, IdentIsRaw::No)) if ident.is_path_segment_keyword() => { self.bump(); Ok(ident) } diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 1bae5b3224035..5c2f0967b649d 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -691,7 +691,7 @@ impl<'a> Parser<'a> { token.kind, token::Ident( kw::For | kw::Loop | kw::While, - false + token::IdentIsRaw::No ) | token::OpenDelim(Delimiter::Brace) ) }) diff --git a/src/librustdoc/clean/render_macro_matchers.rs b/src/librustdoc/clean/render_macro_matchers.rs index b736f4a795614..b6880cfc60fbf 100644 --- a/src/librustdoc/clean/render_macro_matchers.rs +++ b/src/librustdoc/clean/render_macro_matchers.rs @@ -1,4 +1,4 @@ -use rustc_ast::token::{self, BinOpToken, Delimiter}; +use rustc_ast::token::{self, BinOpToken, Delimiter, IdentIsRaw}; use rustc_ast::tokenstream::{TokenStream, TokenTree}; use rustc_ast_pretty::pprust::state::State as Printer; use rustc_ast_pretty::pprust::PrintState; @@ -148,7 +148,7 @@ fn print_tts(printer: &mut Printer<'_>, tts: &TokenStream) { (false, Other) } (Pound, token::Not) => (false, PoundBang), - (_, token::Ident(symbol, /* is_raw */ false)) + (_, token::Ident(symbol, IdentIsRaw::No)) if !usually_needs_space_between_keyword_and_open_delim(*symbol, tt.span) => { (true, Ident) From f5d0d087ad310856f9ed32fdef01acc009a91ff7 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:31:06 +0000 Subject: [PATCH 068/134] Add newtype for `IsTuple` --- .../rustc_builtin_macros/src/deriving/decodable.rs | 2 +- .../rustc_builtin_macros/src/deriving/default.rs | 4 ++-- .../src/deriving/generic/mod.rs | 13 +++++++++++-- 3 files changed, 14 insertions(+), 5 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/deriving/decodable.rs b/compiler/rustc_builtin_macros/src/deriving/decodable.rs index 97d6b82de98c6..bf4693cd54198 100644 --- a/compiler/rustc_builtin_macros/src/deriving/decodable.rs +++ b/compiler/rustc_builtin_macros/src/deriving/decodable.rs @@ -198,7 +198,7 @@ where match fields { Unnamed(fields, is_tuple) => { let path_expr = cx.expr_path(outer_pat_path); - if !*is_tuple { + if matches!(is_tuple, IsTuple::No) { path_expr } else { let fields = fields diff --git a/compiler/rustc_builtin_macros/src/deriving/default.rs b/compiler/rustc_builtin_macros/src/deriving/default.rs index d5a42566e1912..0bd2d423a294f 100644 --- a/compiler/rustc_builtin_macros/src/deriving/default.rs +++ b/compiler/rustc_builtin_macros/src/deriving/default.rs @@ -62,8 +62,8 @@ fn default_struct_substructure( let default_call = |span| cx.expr_call_global(span, default_ident.clone(), ThinVec::new()); let expr = match summary { - Unnamed(_, false) => cx.expr_ident(trait_span, substr.type_ident), - Unnamed(fields, true) => { + Unnamed(_, IsTuple::No) => cx.expr_ident(trait_span, substr.type_ident), + Unnamed(fields, IsTuple::Yes) => { let exprs = fields.iter().map(|sp| default_call(*sp)).collect(); cx.expr_call_ident(trait_span, substr.type_ident, exprs) } diff --git a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs index 6eeb028728c9d..3ee4fded74999 100644 --- a/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs +++ b/compiler/rustc_builtin_macros/src/deriving/generic/mod.rs @@ -286,10 +286,16 @@ pub struct FieldInfo { pub other_selflike_exprs: Vec>, } +#[derive(Copy, Clone)] +pub enum IsTuple { + No, + Yes, +} + /// Fields for a static method pub enum StaticFields { /// Tuple and unit structs/enum variants like this. - Unnamed(Vec, bool /*is tuple*/), + Unnamed(Vec, IsTuple), /// Normal structs/struct variants. Named(Vec<(Ident, Span)>), } @@ -1439,7 +1445,10 @@ impl<'a> TraitDef<'a> { } } - let is_tuple = matches!(struct_def, ast::VariantData::Tuple(..)); + let is_tuple = match struct_def { + ast::VariantData::Tuple(..) => IsTuple::Yes, + _ => IsTuple::No, + }; match (just_spans.is_empty(), named_idents.is_empty()) { (false, false) => cx .dcx() From 4850ae84422569747901c14169b5ed6dfbfb96a3 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:44:33 +0000 Subject: [PATCH 069/134] Add newtype for parser recovery --- compiler/rustc_builtin_macros/src/format.rs | 6 +-- .../rustc_parse/src/parser/diagnostics.rs | 18 ++++----- compiler/rustc_parse/src/parser/expr.rs | 8 ++-- compiler/rustc_parse/src/parser/item.rs | 26 +++++++------ compiler/rustc_parse/src/parser/mod.rs | 37 +++++++++++++------ compiler/rustc_parse/src/parser/stmt.rs | 6 +-- 6 files changed, 57 insertions(+), 44 deletions(-) diff --git a/compiler/rustc_builtin_macros/src/format.rs b/compiler/rustc_builtin_macros/src/format.rs index b66f7111ff006..3366378d38da8 100644 --- a/compiler/rustc_builtin_macros/src/format.rs +++ b/compiler/rustc_builtin_macros/src/format.rs @@ -10,6 +10,7 @@ use rustc_ast::{ use rustc_data_structures::fx::FxHashSet; use rustc_errors::{Applicability, DiagnosticBuilder, MultiSpan, PResult, SingleLabelManySpans}; use rustc_expand::base::{self, *}; +use rustc_parse::parser::Recovered; use rustc_parse_format as parse; use rustc_span::symbol::{Ident, Symbol}; use rustc_span::{BytePos, InnerSpan, Span}; @@ -111,9 +112,8 @@ fn parse_args<'a>(ecx: &mut ExtCtxt<'a>, sp: Span, tts: TokenStream) -> PResult< _ => return Err(err), } } - Ok(recovered) => { - assert!(recovered); - } + Ok(Recovered::Yes) => (), + Ok(Recovered::No) => unreachable!(), } } first = false; diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index 7f5d604050dd0..f4e7bb413ddff 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -22,6 +22,7 @@ use crate::fluent_generated as fluent; use crate::parser; use crate::parser::attr::InnerAttrPolicy; use ast::token::IdentIsRaw; +use parser::Recovered; use rustc_ast as ast; use rustc_ast::ptr::P; use rustc_ast::token::{self, Delimiter, Lit, LitKind, Token, TokenKind}; @@ -430,7 +431,7 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { debug!("expected_one_of_not_found(edible: {:?}, inedible: {:?})", edible, inedible); fn tokens_to_string(tokens: &[TokenType]) -> String { let mut i = tokens.iter(); @@ -533,7 +534,7 @@ impl<'a> Parser<'a> { sugg: ExpectedSemiSugg::ChangeToSemi(self.token.span), }); self.bump(); - return Ok(true); + return Ok(Recovered::Yes); } else if self.look_ahead(0, |t| { t == &token::CloseDelim(Delimiter::Brace) || ((t.can_begin_expr() || t.can_begin_item()) @@ -557,7 +558,7 @@ impl<'a> Parser<'a> { unexpected_token_label: Some(self.token.span), sugg: ExpectedSemiSugg::AddSemi(span), }); - return Ok(true); + return Ok(Recovered::Yes); } } @@ -712,7 +713,7 @@ impl<'a> Parser<'a> { if self.check_too_many_raw_str_terminators(&mut err) { if expected.contains(&TokenType::Token(token::Semi)) && self.eat(&token::Semi) { err.emit(); - return Ok(true); + return Ok(Recovered::Yes); } else { return Err(err); } @@ -1224,7 +1225,7 @@ impl<'a> Parser<'a> { |p| p.parse_generic_arg(None), ); match x { - Ok((_, _, false)) => { + Ok((_, _, Recovered::No)) => { if self.eat(&token::Gt) { // We made sense of it. Improve the error message. e.span_suggestion_verbose( @@ -1248,7 +1249,7 @@ impl<'a> Parser<'a> { } } } - Ok((_, _, true)) => {} + Ok((_, _, Recovered::Yes)) => {} Err(err) => { err.cancel(); } @@ -1841,10 +1842,7 @@ impl<'a> Parser<'a> { /// Creates a `DiagnosticBuilder` for an unexpected token `t` and tries to recover if it is a /// closing delimiter. - pub(super) fn unexpected_try_recover( - &mut self, - t: &TokenKind, - ) -> PResult<'a, bool /* recovered */> { + pub(super) fn unexpected_try_recover(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { let token_str = pprust::token_kind_to_string(t); let this_token_str = super::token_descr(&self.token); let (prev_sp, sp) = match (&self.token.kind, self.subparser_name) { diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 455d9c3deb365..081f30ef11a4c 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -3,7 +3,7 @@ use super::diagnostics::SnapshotParser; use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ - AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Restrictions, + AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions, SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, }; @@ -3093,10 +3093,10 @@ impl<'a> Parser<'a> { if !require_comma { arm_body = Some(expr); this.eat(&token::Comma); - Ok(false) + Ok(Recovered::No) } else if let Some(body) = this.parse_arm_body_missing_braces(&expr, arrow_span) { arm_body = Some(body); - Ok(true) + Ok(Recovered::Yes) } else { let expr_span = expr.span; arm_body = Some(expr); @@ -3177,7 +3177,7 @@ impl<'a> Parser<'a> { this.dcx().emit_err(errors::MissingCommaAfterMatchArm { span: arm_span.shrink_to_hi(), }); - return Ok(true); + return Ok(Recovered::Yes); } Err(err) }); diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index c6e80f3f07f37..6e9af1e15d8e6 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1,6 +1,8 @@ use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; -use super::{AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, TrailingToken}; +use super::{ + AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, TrailingToken, +}; use crate::errors::{self, MacroExpandsToAdtField}; use crate::fluent_generated as fluent; use ast::token::IdentIsRaw; @@ -1534,10 +1536,10 @@ impl<'a> Parser<'a> { err.span_label(span, "while parsing this enum"); err.help(help); err.emit(); - (thin_vec![], true) + (thin_vec![], Recovered::Yes) } }; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else if this.check(&token::OpenDelim(Delimiter::Parenthesis)) { let body = match this.parse_tuple_struct_body() { Ok(body) => body, @@ -1622,7 +1624,7 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } // No `where` so: `struct Foo;` } else if self.eat(&token::Semi) { @@ -1634,7 +1636,7 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } // Tuple-style struct definition with optional where-clause. } else if self.token == token::OpenDelim(Delimiter::Parenthesis) { let body = VariantData::Tuple(self.parse_tuple_struct_body()?, DUMMY_NODE_ID); @@ -1663,14 +1665,14 @@ impl<'a> Parser<'a> { class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else if self.token == token::OpenDelim(Delimiter::Brace) { let (fields, recovered) = self.parse_record_struct_body( "union", class_name.span, generics.where_clause.has_where_token, )?; - VariantData::Struct { fields, recovered } + VariantData::Struct { fields, recovered: recovered.into() } } else { let token_str = super::token_descr(&self.token); let msg = format!("expected `where` or `{{` after union name, found {token_str}"); @@ -1687,14 +1689,14 @@ impl<'a> Parser<'a> { adt_ty: &str, ident_span: Span, parsed_where: bool, - ) -> PResult<'a, (ThinVec, /* recovered */ bool)> { + ) -> PResult<'a, (ThinVec, Recovered)> { let mut fields = ThinVec::new(); - let mut recovered = false; + let mut recovered = Recovered::No; if self.eat(&token::OpenDelim(Delimiter::Brace)) { while self.token != token::CloseDelim(Delimiter::Brace) { let field = self.parse_field_def(adt_ty).map_err(|e| { self.consume_block(Delimiter::Brace, ConsumeClosingDelim::No); - recovered = true; + recovered = Recovered::Yes; e }); match field { @@ -2465,8 +2467,8 @@ impl<'a> Parser<'a> { // `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't // account for this. match self.expect_one_of(&[], &[]) { - Ok(true) => {} - Ok(false) => unreachable!(), + Ok(Recovered::Yes) => {} + Ok(Recovered::No) => unreachable!(), Err(mut err) => { // Qualifier keywords ordering check enum WrongKw { diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 80f6a20b985b9..27e9fb10a9fcb 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -358,6 +358,19 @@ pub enum FollowedByType { No, } +/// Whether a function performed recovery +#[derive(Copy, Clone, Debug)] +pub enum Recovered { + No, + Yes, +} + +impl From for bool { + fn from(r: Recovered) -> bool { + matches!(r, Recovered::Yes) + } +} + #[derive(Clone, Copy, PartialEq, Eq)] pub enum TokenDescription { ReservedIdentifier, @@ -456,11 +469,11 @@ impl<'a> Parser<'a> { } /// Expects and consumes the token `t`. Signals an error if the next token is not `t`. - pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> { + pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, Recovered> { if self.expected_tokens.is_empty() { if self.token == *t { self.bump(); - Ok(false) + Ok(Recovered::No) } else { self.unexpected_try_recover(t) } @@ -476,13 +489,13 @@ impl<'a> Parser<'a> { &mut self, edible: &[TokenKind], inedible: &[TokenKind], - ) -> PResult<'a, bool /* recovered */> { + ) -> PResult<'a, Recovered> { if edible.contains(&self.token.kind) { self.bump(); - Ok(false) + Ok(Recovered::No) } else if inedible.contains(&self.token.kind) { // leave it in the input - Ok(false) + Ok(Recovered::No) } else if self.token.kind != token::Eof && self.last_unexpected_token_span == Some(self.token.span) { @@ -784,9 +797,9 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */, bool /* recovered */)> { + ) -> PResult<'a, (ThinVec, bool /* trailing */, Recovered)> { let mut first = true; - let mut recovered = false; + let mut recovered = Recovered::No; let mut trailing = false; let mut v = ThinVec::new(); @@ -801,12 +814,12 @@ impl<'a> Parser<'a> { } else { // check for separator match self.expect(t) { - Ok(false) /* not recovered */ => { + Ok(Recovered::No) => { self.current_closure.take(); } - Ok(true) /* recovered */ => { + Ok(Recovered::Yes) => { self.current_closure.take(); - recovered = true; + recovered = Recovered::Yes; break; } Err(mut expect_err) => { @@ -979,7 +992,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */, bool /* recovered */)> { + ) -> PResult<'a, (ThinVec, bool /* trailing */, Recovered)> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } @@ -993,7 +1006,7 @@ impl<'a> Parser<'a> { f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, ) -> PResult<'a, (ThinVec, bool /* trailing */)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; - if !recovered { + if matches!(recovered, Recovered::No) { self.eat(ket); } Ok((val, trailing)) diff --git a/compiler/rustc_parse/src/parser/stmt.rs b/compiler/rustc_parse/src/parser/stmt.rs index 5c2f0967b649d..ee02b69c6140d 100644 --- a/compiler/rustc_parse/src/parser/stmt.rs +++ b/compiler/rustc_parse/src/parser/stmt.rs @@ -11,6 +11,7 @@ use crate::errors; use crate::maybe_whole; use crate::errors::MalformedLoopLabel; +use crate::parser::Recovered; use ast::Label; use rustc_ast as ast; use rustc_ast::ptr::P; @@ -661,7 +662,6 @@ impl<'a> Parser<'a> { if self.token != token::Eof && classify::expr_requires_semi_to_be_stmt(expr) => { // Just check for errors and recover; do not eat semicolon yet. - // `expect_one_of` returns PResult<'a, bool /* recovered */> let expect_result = self.expect_one_of(&[], &[token::Semi, token::CloseDelim(Delimiter::Brace)]); @@ -669,7 +669,7 @@ impl<'a> Parser<'a> { let replace_with_err = 'break_recover: { match expect_result { // Recover from parser, skip type error to avoid extra errors. - Ok(true) => true, + Ok(Recovered::Yes) => true, Err(e) => { if self.recover_colon_as_semi() { // recover_colon_as_semi has already emitted a nicer error. @@ -735,7 +735,7 @@ impl<'a> Parser<'a> { true } - Ok(false) => false, + Ok(Recovered::No) => false, } }; From acb2cee618bca1be9a1d778a525c07d2792f0f91 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:48:23 +0000 Subject: [PATCH 070/134] Add newtype for trailing in parser --- compiler/rustc_parse/src/parser/expr.rs | 4 ++-- compiler/rustc_parse/src/parser/item.rs | 5 +++-- compiler/rustc_parse/src/parser/mod.rs | 22 ++++++++++++++-------- compiler/rustc_parse/src/parser/pat.rs | 16 +++++++++------- compiler/rustc_parse/src/parser/ty.rs | 4 ++-- 5 files changed, 30 insertions(+), 21 deletions(-) diff --git a/compiler/rustc_parse/src/parser/expr.rs b/compiler/rustc_parse/src/parser/expr.rs index 081f30ef11a4c..cb46eb25fa4c1 100644 --- a/compiler/rustc_parse/src/parser/expr.rs +++ b/compiler/rustc_parse/src/parser/expr.rs @@ -4,7 +4,7 @@ use super::pat::{CommaRecoveryMode, Expected, RecoverColon, RecoverComma}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ AttrWrapper, BlockMode, ClosureSpans, ForceCollect, Parser, PathStyle, Recovered, Restrictions, - SemiColonMode, SeqSep, TokenExpectType, TokenType, TrailingToken, + SemiColonMode, SeqSep, TokenExpectType, TokenType, Trailing, TrailingToken, }; use crate::errors; @@ -1561,7 +1561,7 @@ impl<'a> Parser<'a> { return Ok(self.recover_seq_parse_error(Delimiter::Parenthesis, lo, err)); } }; - let kind = if es.len() == 1 && !trailing_comma { + let kind = if es.len() == 1 && matches!(trailing_comma, Trailing::No) { // `(e)` is parenthesized `e`. ExprKind::Paren(es.into_iter().next().unwrap()) } else { diff --git a/compiler/rustc_parse/src/parser/item.rs b/compiler/rustc_parse/src/parser/item.rs index 6e9af1e15d8e6..b9fbf1c576589 100644 --- a/compiler/rustc_parse/src/parser/item.rs +++ b/compiler/rustc_parse/src/parser/item.rs @@ -1,7 +1,8 @@ use super::diagnostics::{dummy_arg, ConsumeClosingDelim}; use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign}; use super::{ - AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, TrailingToken, + AttrWrapper, FollowedByType, ForceCollect, Parser, PathStyle, Recovered, Trailing, + TrailingToken, }; use crate::errors::{self, MacroExpandsToAdtField}; use crate::fluent_generated as fluent; @@ -1459,7 +1460,7 @@ impl<'a> Parser<'a> { let (variants, _) = if self.token == TokenKind::Semi { self.dcx().emit_err(errors::UseEmptyBlockNotSemi { span: self.token.span }); self.bump(); - (thin_vec![], false) + (thin_vec![], Trailing::No) } else { self.parse_delim_comma_seq(Delimiter::Brace, |p| p.parse_enum_variant(id.span)) .map_err(|mut err| { diff --git a/compiler/rustc_parse/src/parser/mod.rs b/compiler/rustc_parse/src/parser/mod.rs index 27e9fb10a9fcb..29dd2eeb56aba 100644 --- a/compiler/rustc_parse/src/parser/mod.rs +++ b/compiler/rustc_parse/src/parser/mod.rs @@ -371,6 +371,12 @@ impl From for bool { } } +#[derive(Copy, Clone, Debug)] +pub enum Trailing { + No, + Yes, +} + #[derive(Clone, Copy, PartialEq, Eq)] pub enum TokenDescription { ReservedIdentifier, @@ -797,10 +803,10 @@ impl<'a> Parser<'a> { sep: SeqSep, expect: TokenExpectType, mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */, Recovered)> { + ) -> PResult<'a, (ThinVec, Trailing, Recovered)> { let mut first = true; let mut recovered = Recovered::No; - let mut trailing = false; + let mut trailing = Trailing::No; let mut v = ThinVec::new(); while !self.expect_any_with_type(kets, expect) { @@ -914,7 +920,7 @@ impl<'a> Parser<'a> { } } if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) { - trailing = true; + trailing = Trailing::Yes; break; } @@ -992,7 +998,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */, Recovered)> { + ) -> PResult<'a, (ThinVec, Trailing, Recovered)> { self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f) } @@ -1004,7 +1010,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec, Trailing)> { let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?; if matches!(recovered, Recovered::No) { self.eat(ket); @@ -1021,7 +1027,7 @@ impl<'a> Parser<'a> { ket: &TokenKind, sep: SeqSep, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec, Trailing)> { self.expect(bra)?; self.parse_seq_to_end(ket, sep, f) } @@ -1033,7 +1039,7 @@ impl<'a> Parser<'a> { &mut self, delim: Delimiter, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec, Trailing)> { self.parse_unspanned_seq( &token::OpenDelim(delim), &token::CloseDelim(delim), @@ -1048,7 +1054,7 @@ impl<'a> Parser<'a> { fn parse_paren_comma_seq( &mut self, f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>, - ) -> PResult<'a, (ThinVec, bool /* trailing */)> { + ) -> PResult<'a, (ThinVec, Trailing)> { self.parse_delim_comma_seq(Delimiter::Parenthesis, f) } diff --git a/compiler/rustc_parse/src/parser/pat.rs b/compiler/rustc_parse/src/parser/pat.rs index 072db24265eea..2ede19b11e049 100644 --- a/compiler/rustc_parse/src/parser/pat.rs +++ b/compiler/rustc_parse/src/parser/pat.rs @@ -1,4 +1,4 @@ -use super::{ForceCollect, Parser, PathStyle, Restrictions, TrailingToken}; +use super::{ForceCollect, Parser, PathStyle, Restrictions, Trailing, TrailingToken}; use crate::errors::{ self, AmbiguousRangePattern, DotDotDotForRemainingFields, DotDotDotRangeToPatternNotAllowed, DotDotDotRestPattern, EnumPatternInsteadOfIdentifier, ExpectedBindingLeftOfAt, @@ -696,7 +696,9 @@ impl<'a> Parser<'a> { // Here, `(pat,)` is a tuple pattern. // For backward compatibility, `(..)` is a tuple pattern as well. - Ok(if fields.len() == 1 && !(trailing_comma || fields[0].is_rest()) { + let paren_pattern = + fields.len() == 1 && !(matches!(trailing_comma, Trailing::Yes) || fields[0].is_rest()); + if paren_pattern { let pat = fields.into_iter().next().unwrap(); let close_paren = self.prev_token.span; @@ -714,7 +716,7 @@ impl<'a> Parser<'a> { }, }); - self.parse_pat_range_begin_with(begin.clone(), form)? + self.parse_pat_range_begin_with(begin.clone(), form) } // recover ranges with parentheses around the `(start)..` PatKind::Err(_) @@ -729,15 +731,15 @@ impl<'a> Parser<'a> { }, }); - self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form)? + self.parse_pat_range_begin_with(self.mk_expr(pat.span, ExprKind::Err), form) } // (pat) with optional parentheses - _ => PatKind::Paren(pat), + _ => Ok(PatKind::Paren(pat)), } } else { - PatKind::Tuple(fields) - }) + Ok(PatKind::Tuple(fields)) + } } /// Parse a mutable binding with the `mut` token already eaten. diff --git a/compiler/rustc_parse/src/parser/ty.rs b/compiler/rustc_parse/src/parser/ty.rs index f79f2a813b223..d65e06494fc7b 100644 --- a/compiler/rustc_parse/src/parser/ty.rs +++ b/compiler/rustc_parse/src/parser/ty.rs @@ -1,4 +1,4 @@ -use super::{Parser, PathStyle, TokenType}; +use super::{Parser, PathStyle, TokenType, Trailing}; use crate::errors::{ self, DynAfterMut, ExpectedFnPathFoundFnKeyword, ExpectedMutOrConstInRawPointerType, @@ -415,7 +415,7 @@ impl<'a> Parser<'a> { Ok(ty) })?; - if ts.len() == 1 && !trailing { + if ts.len() == 1 && matches!(trailing, Trailing::No) { let ty = ts.into_iter().next().unwrap().into_inner(); let maybe_bounds = allow_plus == AllowPlus::Yes && self.token.is_like_plus(); match ty.kind { From cb51c850232d8b98b37c0cde6090392b9f077939 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:50:50 +0000 Subject: [PATCH 071/134] Use `Recovered` more --- .../rustc_parse/src/parser/diagnostics.rs | 25 ++++++++++--------- 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/compiler/rustc_parse/src/parser/diagnostics.rs b/compiler/rustc_parse/src/parser/diagnostics.rs index f4e7bb413ddff..03a2b47683a14 100644 --- a/compiler/rustc_parse/src/parser/diagnostics.rs +++ b/compiler/rustc_parse/src/parser/diagnostics.rs @@ -1288,7 +1288,7 @@ impl<'a> Parser<'a> { err: &mut ComparisonOperatorsCannotBeChained, inner_op: &Expr, outer_op: &Spanned, - ) -> bool /* advanced the cursor */ { + ) -> Recovered { if let ExprKind::Binary(op, l1, r1) = &inner_op.kind { if let ExprKind::Field(_, ident) = l1.kind && ident.as_str().parse::().is_err() @@ -1296,7 +1296,7 @@ impl<'a> Parser<'a> { { // The parser has encountered `foo.bar Parser<'a> { span: inner_op.span.shrink_to_hi(), middle_term: expr_to_str(r1), }); - false // Keep the current parse behavior, where the AST is `(x < y) < z`. + Recovered::No // Keep the current parse behavior, where the AST is `(x < y) < z`. } // `x == y < z` (BinOpKind::Eq, AssocOp::Less | AssocOp::LessEqual | AssocOp::Greater | AssocOp::GreaterEqual) => { @@ -1329,12 +1329,12 @@ impl<'a> Parser<'a> { left: r1.span.shrink_to_lo(), right: r2.span.shrink_to_hi(), }); - true + Recovered::Yes } Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + Recovered::Yes } } } @@ -1349,19 +1349,19 @@ impl<'a> Parser<'a> { left: l1.span.shrink_to_lo(), right: r1.span.shrink_to_hi(), }); - true + Recovered::Yes } Err(expr_err) => { expr_err.cancel(); self.restore_snapshot(snapshot); - false + Recovered::No } } } - _ => false, + _ => Recovered::No, }; } - false + Recovered::No } /// Produces an error if comparison operators are chained (RFC #558). @@ -1489,8 +1489,9 @@ impl<'a> Parser<'a> { // If it looks like a genuine attempt to chain operators (as opposed to a // misformatted turbofish, for instance), suggest a correct form. - if self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op) - { + let recovered = self + .attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); + if matches!(recovered, Recovered::Yes) { self.dcx().emit_err(err); mk_err_expr(self, inner_op.span.to(self.prev_token.span)) } else { @@ -1502,7 +1503,7 @@ impl<'a> Parser<'a> { let recover = self.attempt_chained_comparison_suggestion(&mut err, inner_op, outer_op); self.dcx().emit_err(err); - if recover { + if matches!(recover, Recovered::Yes) { return mk_err_expr(self, inner_op.span.to(self.prev_token.span)); } } From 9bfc46c5d8cafda9b9fe4a5ce38f134edb37cf6f Mon Sep 17 00:00:00 2001 From: clubby789 Date: Tue, 13 Feb 2024 23:57:43 +0000 Subject: [PATCH 072/134] Add newtype for first input type --- .../src/coherence/orphan.rs | 10 ++++----- .../src/traits/coherence.rs | 21 ++++++++++++++++--- .../rustc_trait_selection/src/traits/mod.rs | 2 +- 3 files changed, 24 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_hir_analysis/src/coherence/orphan.rs b/compiler/rustc_hir_analysis/src/coherence/orphan.rs index 07bbaa1926edf..b46a67d08eb03 100644 --- a/compiler/rustc_hir_analysis/src/coherence/orphan.rs +++ b/compiler/rustc_hir_analysis/src/coherence/orphan.rs @@ -1,14 +1,13 @@ //! Orphan checker: every impl either implements a trait defined in this //! crate or pertains to a type defined in this crate. +use crate::errors; use rustc_errors::ErrorGuaranteed; use rustc_hir as hir; use rustc_middle::ty::{self, AliasKind, Ty, TyCtxt, TypeVisitableExt}; use rustc_span::def_id::LocalDefId; use rustc_span::Span; -use rustc_trait_selection::traits; - -use crate::errors; +use rustc_trait_selection::traits::{self, IsFirstInputType}; #[instrument(skip(tcx), level = "debug")] pub(crate) fn orphan_check_impl( @@ -288,7 +287,7 @@ fn emit_orphan_check_error<'tcx>( (Vec::new(), Vec::new(), Vec::new(), Vec::new(), Vec::new()); let mut sugg = None; for &(mut ty, is_target_ty) in &tys { - let span = if is_target_ty { + let span = if matches!(is_target_ty, IsFirstInputType::Yes) { // Point at `D` in `impl for C in D` self_ty_span } else { @@ -321,7 +320,8 @@ fn emit_orphan_check_error<'tcx>( } } - let is_foreign = !trait_ref.def_id.is_local() && !is_target_ty; + let is_foreign = + !trait_ref.def_id.is_local() && matches!(is_target_ty, IsFirstInputType::No); match &ty.kind() { ty::Slice(_) => { diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index f663f02f87289..0d3169cec143c 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -598,9 +598,24 @@ pub fn trait_ref_is_local_or_fundamental<'tcx>( trait_ref.def_id.krate == LOCAL_CRATE || tcx.has_attr(trait_ref.def_id, sym::fundamental) } +#[derive(Debug, Copy, Clone)] +pub enum IsFirstInputType { + No, + Yes, +} + +impl From for IsFirstInputType { + fn from(b: bool) -> IsFirstInputType { + match b { + false => IsFirstInputType::No, + true => IsFirstInputType::Yes, + } + } +} + #[derive(Debug)] pub enum OrphanCheckErr<'tcx> { - NonLocalInputType(Vec<(Ty<'tcx>, bool /* Is this the first input type? */)>), + NonLocalInputType(Vec<(Ty<'tcx>, IsFirstInputType)>), UncoveredTy(Ty<'tcx>, Option>), } @@ -751,7 +766,7 @@ struct OrphanChecker<'tcx, F> { /// Ignore orphan check failures and exclusively search for the first /// local type. search_first_local_ty: bool, - non_local_tys: Vec<(Ty<'tcx>, bool)>, + non_local_tys: Vec<(Ty<'tcx>, IsFirstInputType)>, } impl<'tcx, F, E> OrphanChecker<'tcx, F> @@ -769,7 +784,7 @@ where } fn found_non_local_ty(&mut self, t: Ty<'tcx>) -> ControlFlow> { - self.non_local_tys.push((t, self.in_self_ty)); + self.non_local_tys.push((t, self.in_self_ty.into())); ControlFlow::Continue(()) } diff --git a/compiler/rustc_trait_selection/src/traits/mod.rs b/compiler/rustc_trait_selection/src/traits/mod.rs index 9eec60ea06c21..32447aca390a4 100644 --- a/compiler/rustc_trait_selection/src/traits/mod.rs +++ b/compiler/rustc_trait_selection/src/traits/mod.rs @@ -42,7 +42,7 @@ use std::fmt::Debug; use std::ops::ControlFlow; pub use self::coherence::{add_placeholder_note, orphan_check, overlapping_impls}; -pub use self::coherence::{OrphanCheckErr, OverlapResult}; +pub use self::coherence::{IsFirstInputType, OrphanCheckErr, OverlapResult}; pub use self::engine::{ObligationCtxt, TraitEngineExt}; pub use self::fulfill::{FulfillmentContext, PendingPredicateObligation}; pub use self::normalize::NormalizeExt; From 3377dac31e5990a6fd65660f356806c82a8327c7 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Wed, 14 Feb 2024 00:01:08 +0000 Subject: [PATCH 073/134] Add newtype for signedness in LLVM SIMD --- compiler/rustc_codegen_llvm/src/intrinsic.rs | 55 +++++++++++--------- 1 file changed, 31 insertions(+), 24 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 4415c51acf684..574097e82dcc9 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -2094,9 +2094,16 @@ fn generic_simd_intrinsic<'ll, 'tcx>( return Ok(args[0].immediate()); } + #[derive(Copy, Clone)] + enum Sign { + Unsigned, + Signed, + } + use Sign::*; + enum Style { Float, - Int(/* is signed? */ bool), + Int(Sign), Unsupported, } @@ -2104,11 +2111,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>( // vectors of pointer-sized integers should've been // disallowed before here, so this unwrap is safe. ty::Int(i) => ( - Style::Int(true), + Style::Int(Signed), i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Uint(u) => ( - Style::Int(false), + Style::Int(Unsigned), u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Float(f) => (Style::Float, f.bit_width()), @@ -2116,11 +2123,11 @@ fn generic_simd_intrinsic<'ll, 'tcx>( }; let (out_style, out_width) = match out_elem.kind() { ty::Int(i) => ( - Style::Int(true), + Style::Int(Signed), i.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Uint(u) => ( - Style::Int(false), + Style::Int(Unsigned), u.normalize(bx.tcx().sess.target.pointer_width).bit_width().unwrap(), ), ty::Float(f) => (Style::Float, f.bit_width()), @@ -2128,31 +2135,31 @@ fn generic_simd_intrinsic<'ll, 'tcx>( }; match (in_style, out_style) { - (Style::Int(in_is_signed), Style::Int(_)) => { + (Style::Int(sign), Style::Int(_)) => { return Ok(match in_width.cmp(&out_width) { Ordering::Greater => bx.trunc(args[0].immediate(), llret_ty), Ordering::Equal => args[0].immediate(), - Ordering::Less => { - if in_is_signed { - bx.sext(args[0].immediate(), llret_ty) - } else { - bx.zext(args[0].immediate(), llret_ty) - } - } + Ordering::Less => match sign { + Sign::Signed => bx.sext(args[0].immediate(), llret_ty), + Sign::Unsigned => bx.zext(args[0].immediate(), llret_ty), + }, }); } - (Style::Int(in_is_signed), Style::Float) => { - return Ok(if in_is_signed { - bx.sitofp(args[0].immediate(), llret_ty) - } else { - bx.uitofp(args[0].immediate(), llret_ty) - }); + (Style::Int(Sign::Signed), Style::Float) => { + return Ok(bx.sitofp(args[0].immediate(), llret_ty)); } - (Style::Float, Style::Int(out_is_signed)) => { - return Ok(match (out_is_signed, name == sym::simd_as) { - (false, false) => bx.fptoui(args[0].immediate(), llret_ty), - (true, false) => bx.fptosi(args[0].immediate(), llret_ty), - (_, true) => bx.cast_float_to_int(out_is_signed, args[0].immediate(), llret_ty), + (Style::Int(Sign::Unsigned), Style::Float) => { + return Ok(bx.uitofp(args[0].immediate(), llret_ty)); + } + (Style::Float, Style::Int(sign)) => { + return Ok(match (sign, name == sym::simd_as) { + (Sign::Unsigned, false) => bx.fptoui(args[0].immediate(), llret_ty), + (Sign::Signed, false) => bx.fptosi(args[0].immediate(), llret_ty), + (_, true) => bx.cast_float_to_int( + matches!(sign, Sign::Signed), + args[0].immediate(), + llret_ty, + ), }); } (Style::Float, Style::Float) => { From 06e77397e18a8743087ef3f2b5546a148c0560ec Mon Sep 17 00:00:00 2001 From: clubby789 Date: Wed, 14 Feb 2024 00:04:54 +0000 Subject: [PATCH 074/134] Add newtype for using the prelude in resolution --- compiler/rustc_resolve/src/diagnostics.rs | 2 +- compiler/rustc_resolve/src/ident.rs | 24 ++++++++++++++++------- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/compiler/rustc_resolve/src/diagnostics.rs b/compiler/rustc_resolve/src/diagnostics.rs index 4b978fefa107a..99ce47254328c 100644 --- a/compiler/rustc_resolve/src/diagnostics.rs +++ b/compiler/rustc_resolve/src/diagnostics.rs @@ -1111,7 +1111,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { suggestions.extend( tmp_suggestions .into_iter() - .filter(|s| use_prelude || this.is_builtin_macro(s.res)), + .filter(|s| use_prelude.into() || this.is_builtin_macro(s.res)), ); } } diff --git a/compiler/rustc_resolve/src/ident.rs b/compiler/rustc_resolve/src/ident.rs index 4583f991cabd5..7e7424be303ac 100644 --- a/compiler/rustc_resolve/src/ident.rs +++ b/compiler/rustc_resolve/src/ident.rs @@ -23,6 +23,18 @@ use Namespace::*; type Visibility = ty::Visibility; +#[derive(Copy, Clone)] +pub enum UsePrelude { + No, + Yes, +} + +impl From for bool { + fn from(up: UsePrelude) -> bool { + matches!(up, UsePrelude::Yes) + } +} + impl<'a, 'tcx> Resolver<'a, 'tcx> { /// A generic scope visitor. /// Visits scopes in order to resolve some identifier in them or perform other actions. @@ -32,12 +44,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { scope_set: ScopeSet<'a>, parent_scope: &ParentScope<'a>, ctxt: SyntaxContext, - mut visitor: impl FnMut( - &mut Self, - Scope<'a>, - /*use_prelude*/ bool, - SyntaxContext, - ) -> Option, + mut visitor: impl FnMut(&mut Self, Scope<'a>, UsePrelude, SyntaxContext) -> Option, ) -> Option { // General principles: // 1. Not controlled (user-defined) names should have higher priority than controlled names @@ -133,6 +140,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }; if visit { + let use_prelude = if use_prelude { UsePrelude::Yes } else { UsePrelude::No }; if let break_result @ Some(..) = visitor(self, scope, use_prelude, ctxt) { return break_result; } @@ -579,7 +587,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None, ignore_binding, ) { - if use_prelude || this.is_builtin_macro(binding.res()) { + if matches!(use_prelude, UsePrelude::Yes) + || this.is_builtin_macro(binding.res()) + { result = Ok((binding, Flags::MISC_FROM_PRELUDE)); } } From 9dee352da09f53af244ecb651885dd0e62fc594d Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 20 Feb 2024 15:55:17 +0100 Subject: [PATCH 075/134] fix: server hanging up on build script task --- crates/rust-analyzer/src/global_state.rs | 20 ++++---------------- crates/rust-analyzer/src/lsp/utils.rs | 1 + crates/rust-analyzer/src/reload.rs | 7 ++----- 3 files changed, 7 insertions(+), 21 deletions(-) diff --git a/crates/rust-analyzer/src/global_state.rs b/crates/rust-analyzer/src/global_state.rs index 293807a383baa..b2d507491b177 100644 --- a/crates/rust-analyzer/src/global_state.rs +++ b/crates/rust-analyzer/src/global_state.rs @@ -301,19 +301,12 @@ impl GlobalState { if let Some(path) = vfs_path.as_path() { let path = path.to_path_buf(); if reload::should_refresh_for_change(&path, file.kind()) { - workspace_structure_change = Some(( - path.clone(), - false, - AsRef::::as_ref(&path).ends_with("build.rs"), - )); + workspace_structure_change = Some((path.clone(), false)); } if file.is_created_or_deleted() { has_structure_changes = true; - workspace_structure_change = Some(( - path, - self.crate_graph_file_dependencies.contains(vfs_path), - false, - )); + workspace_structure_change = + Some((path, self.crate_graph_file_dependencies.contains(vfs_path))); } else if path.extension() == Some("rs".as_ref()) { modified_rust_files.push(file.file_id); } @@ -365,16 +358,11 @@ impl GlobalState { // FIXME: ideally we should only trigger a workspace fetch for non-library changes // but something's going wrong with the source root business when we add a new local // crate see https://github.com/rust-lang/rust-analyzer/issues/13029 - if let Some((path, force_crate_graph_reload, build_scripts_touched)) = - workspace_structure_change - { + if let Some((path, force_crate_graph_reload)) = workspace_structure_change { self.fetch_workspaces_queue.request_op( format!("workspace vfs file change: {path}"), force_crate_graph_reload, ); - if build_scripts_touched { - self.fetch_build_data_queue.request_op(format!("build.rs changed: {path}"), ()); - } } } diff --git a/crates/rust-analyzer/src/lsp/utils.rs b/crates/rust-analyzer/src/lsp/utils.rs index 10335cb145335..800c0eee53a02 100644 --- a/crates/rust-analyzer/src/lsp/utils.rs +++ b/crates/rust-analyzer/src/lsp/utils.rs @@ -134,6 +134,7 @@ impl GlobalState { let token = lsp_types::ProgressToken::String( cancel_token.unwrap_or_else(|| format!("rustAnalyzer/{title}")), ); + tracing::debug!(?token, ?state, "report_progress {message:?}"); let work_done_progress = match state { Progress::Begin => { self.send_request::( diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 5895459d1fcf8..00494ca5ba090 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -411,10 +411,7 @@ impl GlobalState { if *force_reload_crate_graph { self.recreate_crate_graph(cause); } - if self.build_deps_changed && self.config.run_build_scripts() { - self.build_deps_changed = false; - self.fetch_build_data_queue.request_op("build_deps_changed".to_owned(), ()); - } + // Current build scripts do not match the version of the active // workspace, so there's nothing for us to update. return; @@ -424,7 +421,7 @@ impl GlobalState { // Here, we completely changed the workspace (Cargo.toml edit), so // we don't care about build-script results, they are stale. - // FIXME: can we abort the build scripts here? + // FIXME: can we abort the build scripts here if they are already running? self.workspaces = Arc::new(workspaces); if self.config.run_build_scripts() { From e59efe4d7e77bb498d32bc75b46fe7924c3f8270 Mon Sep 17 00:00:00 2001 From: Takashiidobe Date: Tue, 20 Feb 2024 09:02:49 -0500 Subject: [PATCH 076/134] Add examples for some methods on slices --- library/core/src/slice/mod.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/library/core/src/slice/mod.rs b/library/core/src/slice/mod.rs index c948337ba6c2d..ab43499f2689d 100644 --- a/library/core/src/slice/mod.rs +++ b/library/core/src/slice/mod.rs @@ -146,6 +146,9 @@ impl [T] { /// ``` /// let a = [1, 2, 3]; /// assert!(!a.is_empty()); + /// + /// let b: &[i32] = &[]; + /// assert!(b.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_stable(feature = "const_slice_is_empty", since = "1.39.0")] @@ -185,6 +188,9 @@ impl [T] { /// *first = 5; /// } /// assert_eq!(x, &[5, 1, 2]); + /// + /// let y: &mut [i32] = &mut []; + /// assert_eq!(None, y.first_mut()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] @@ -297,7 +303,7 @@ impl [T] { if let [.., last] = self { Some(last) } else { None } } - /// Returns a mutable reference to the last item in the slice. + /// Returns a mutable reference to the last item in the slice, or `None` if it is empty. /// /// # Examples /// @@ -308,6 +314,9 @@ impl [T] { /// *last = 10; /// } /// assert_eq!(x, &[0, 1, 10]); + /// + /// let y: &mut [i32] = &mut []; + /// assert_eq!(None, y.last_mut()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[rustc_const_unstable(feature = "const_slice_first_last", issue = "83570")] From 16b15a203ec888714ffbcc7168a259d18555caae Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Tue, 20 Feb 2024 17:02:59 +0100 Subject: [PATCH 077/134] internal: Attempt to add a timeout to rustc-tests --- .../src/handlers/useless_braces.rs | 4 +- crates/ide-diagnostics/src/lib.rs | 20 ++++-- crates/rust-analyzer/src/cli/rustc_tests.rs | 70 +++++++++++++++---- 3 files changed, 71 insertions(+), 23 deletions(-) diff --git a/crates/ide-diagnostics/src/handlers/useless_braces.rs b/crates/ide-diagnostics/src/handlers/useless_braces.rs index 863a7ab783ec8..79bcaa0a9c4c9 100644 --- a/crates/ide-diagnostics/src/handlers/useless_braces.rs +++ b/crates/ide-diagnostics/src/handlers/useless_braces.rs @@ -4,7 +4,7 @@ use ide_db::{ source_change::SourceChange, }; use itertools::Itertools; -use syntax::{ast, AstNode, SyntaxNode}; +use syntax::{ast, AstNode, SyntaxNode, SyntaxNodePtr}; use text_edit::TextEdit; use crate::{fix, Diagnostic, DiagnosticCode}; @@ -43,7 +43,7 @@ pub(crate) fn useless_braces( "Unnecessary braces in use statement".to_owned(), FileRange { file_id, range: use_range }, ) - .with_main_node(InFile::new(file_id.into(), node.clone())) + .with_main_node(InFile::new(file_id.into(), SyntaxNodePtr::new(node))) .with_fixes(Some(vec![fix( "remove_braces", "Remove unnecessary braces", diff --git a/crates/ide-diagnostics/src/lib.rs b/crates/ide-diagnostics/src/lib.rs index 4428b8baafbdb..9f4368b04e79b 100644 --- a/crates/ide-diagnostics/src/lib.rs +++ b/crates/ide-diagnostics/src/lib.rs @@ -142,7 +142,7 @@ pub struct Diagnostic { pub experimental: bool, pub fixes: Option>, // The node that will be affected by `#[allow]` and similar attributes. - pub main_node: Option>, + pub main_node: Option>, } impl Diagnostic { @@ -174,9 +174,8 @@ impl Diagnostic { message: impl Into, node: InFile, ) -> Diagnostic { - let file_id = node.file_id; Diagnostic::new(code, message, ctx.sema.diagnostics_display_range(node)) - .with_main_node(node.map(|x| x.to_node(&ctx.sema.parse_or_expand(file_id)))) + .with_main_node(node) } fn experimental(mut self) -> Diagnostic { @@ -184,7 +183,7 @@ impl Diagnostic { self } - fn with_main_node(mut self, main_node: InFile) -> Diagnostic { + fn with_main_node(mut self, main_node: InFile) -> Diagnostic { self.main_node = Some(main_node); self } @@ -394,8 +393,17 @@ pub fn diagnostics( res.push(d) } - let mut diagnostics_of_range = - res.iter_mut().filter_map(|x| Some((x.main_node.clone()?, x))).collect::>(); + let mut diagnostics_of_range = res + .iter_mut() + .filter_map(|it| { + Some(( + it.main_node + .map(|ptr| ptr.map(|node| node.to_node(&ctx.sema.parse_or_expand(ptr.file_id)))) + .clone()?, + it, + )) + }) + .collect::>(); let mut rustc_stack: FxHashMap> = FxHashMap::default(); let mut clippy_stack: FxHashMap> = FxHashMap::default(); diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs index 25f84d770bfb3..b3b6da1f698e8 100644 --- a/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -1,9 +1,13 @@ //! Run all tests in a project, similar to `cargo test`, but using the mir interpreter. +use std::convert::identity; +use std::thread::Builder; +use std::time::{Duration, Instant}; use std::{cell::RefCell, fs::read_to_string, panic::AssertUnwindSafe, path::PathBuf}; use hir::{Change, Crate}; use ide::{AnalysisHost, DiagnosticCode, DiagnosticsConfig}; +use itertools::Either; use profile::StopWatch; use project_model::target_data_layout::RustcDataLayoutConfig; use project_model::{target_data_layout, CargoConfig, ProjectWorkspace, RustLibSource, Sysroot}; @@ -100,6 +104,7 @@ impl Tester { } fn test(&mut self, p: PathBuf) { + println!("{}", p.display()); if p.parent().unwrap().file_name().unwrap() == "auxiliary" { // These are not tests return; @@ -132,15 +137,44 @@ impl Tester { self.host.apply_change(change); let diagnostic_config = DiagnosticsConfig::test_sample(); + let res = std::thread::scope(|s| { + let worker = Builder::new() + .stack_size(40 * 1024 * 1024) + .spawn_scoped(s, { + let diagnostic_config = &diagnostic_config; + let main = std::thread::current(); + let analysis = self.host.analysis(); + let root_file = self.root_file; + move || { + let res = std::panic::catch_unwind(move || { + analysis.diagnostics( + diagnostic_config, + ide::AssistResolveStrategy::None, + root_file, + ) + }); + main.unpark(); + res + } + }) + .unwrap(); + + let timeout = Duration::from_secs(5); + let now = Instant::now(); + while now.elapsed() <= timeout && !worker.is_finished() { + std::thread::park_timeout(timeout - now.elapsed()); + } + + if !worker.is_finished() { + // attempt to cancel the worker, won't work for chalk hangs unfortunately + self.host.request_cancellation(); + } + worker.join().and_then(identity) + }); let mut actual = FxHashMap::default(); - let panicked = match std::panic::catch_unwind(|| { - self.host - .analysis() - .diagnostics(&diagnostic_config, ide::AssistResolveStrategy::None, self.root_file) - .unwrap() - }) { - Err(e) => Some(e), - Ok(diags) => { + let panicked = match res { + Err(e) => Some(Either::Left(e)), + Ok(Ok(diags)) => { for diag in diags { if !matches!(diag.code, DiagnosticCode::RustcHardError(_)) { continue; @@ -152,6 +186,7 @@ impl Tester { } None } + Ok(Err(e)) => Some(Either::Right(e)), }; // Ignore tests with diagnostics that we don't emit. ignore_test |= expected.keys().any(|k| !SUPPORTED_DIAGNOSTICS.contains(k)); @@ -159,14 +194,19 @@ impl Tester { println!("{p:?} IGNORE"); self.ignore_count += 1; } else if let Some(panic) = panicked { - if let Some(msg) = panic - .downcast_ref::() - .map(String::as_str) - .or_else(|| panic.downcast_ref::<&str>().copied()) - { - println!("{msg:?} ") + match panic { + Either::Left(panic) => { + if let Some(msg) = panic + .downcast_ref::() + .map(String::as_str) + .or_else(|| panic.downcast_ref::<&str>().copied()) + { + println!("{msg:?} ") + } + println!("{p:?} PANIC"); + } + Either::Right(_) => println!("{p:?} CANCELLED"), } - println!("PANIC"); self.fail_count += 1; } else if actual == expected { println!("{p:?} PASS"); From 07421c13d48918984bd51fad2cde529ebf23c5ae Mon Sep 17 00:00:00 2001 From: DropDemBits Date: Tue, 20 Feb 2024 14:01:50 -0500 Subject: [PATCH 078/134] fix: Don't add `\` before `{` The LSP snippet grammar only specifies that `$`, `}`, and `\` can be escaped with backslashes, but not `{`. --- crates/rust-analyzer/src/lsp/to_proto.rs | 16 +++++++--------- 1 file changed, 7 insertions(+), 9 deletions(-) diff --git a/crates/rust-analyzer/src/lsp/to_proto.rs b/crates/rust-analyzer/src/lsp/to_proto.rs index 4101d476cd308..481ebfefd4eed 100644 --- a/crates/rust-analyzer/src/lsp/to_proto.rs +++ b/crates/rust-analyzer/src/lsp/to_proto.rs @@ -1002,10 +1002,8 @@ fn merge_text_and_snippet_edits( let mut new_text = current_indel.insert; // find which snippet bits need to be escaped - let escape_places = new_text - .rmatch_indices(['\\', '$', '{', '}']) - .map(|(insert, _)| insert) - .collect_vec(); + let escape_places = + new_text.rmatch_indices(['\\', '$', '}']).map(|(insert, _)| insert).collect_vec(); let mut escape_places = escape_places.into_iter().peekable(); let mut escape_prior_bits = |new_text: &mut String, up_to: usize| { for before in escape_places.peeking_take_while(|insert| *insert >= up_to) { @@ -2176,7 +2174,7 @@ fn bar(_: usize) {} character: 0, }, }, - new_text: "\\$${1:ab\\{\\}\\$c\\\\d}ef", + new_text: "\\$${1:ab{\\}\\$c\\\\d}ef", insert_text_format: Some( Snippet, ), @@ -2272,7 +2270,7 @@ struct ProcMacro { character: 5, }, }, - new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n \\}", insert_text_format: Some( Snippet, ), @@ -2336,7 +2334,7 @@ struct P { character: 5, }, }, - new_text: "$0disabled = false;\n ProcMacro \\{\n disabled,\n \\}", + new_text: "$0disabled = false;\n ProcMacro {\n disabled,\n \\}", insert_text_format: Some( Snippet, ), @@ -2401,7 +2399,7 @@ struct ProcMacro { character: 5, }, }, - new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n \\}", insert_text_format: Some( Snippet, ), @@ -2466,7 +2464,7 @@ struct P { character: 5, }, }, - new_text: "${0:disabled} = false;\n ProcMacro \\{\n disabled,\n \\}", + new_text: "${0:disabled} = false;\n ProcMacro {\n disabled,\n \\}", insert_text_format: Some( Snippet, ), From 344a79c17dd321279897cc287313ff38e0fe255e Mon Sep 17 00:00:00 2001 From: Chase Douglas Date: Tue, 20 Feb 2024 16:42:20 -0800 Subject: [PATCH 079/134] Drop RUSTC_BOOTSTRAP env var when building build scripts Some packages (e.g. thiserror) force a recompile if the value of the `RUSTC_BOOTSTRAP` env var changes. RA sets the variable to 1 in order to enable rustc / cargo unstable options it uses. This causes flapping recompiles when building outside of RA. As of Cargo 1.75 the `--keep-going` flag is stable. This change uses the flag without `RUSTC_BOOTSTRAP` if the Cargo version is >= 1.75, and drops `--keep-going` otherwise. This fixes build script recompilation. --- crates/project-model/src/build_scripts.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/project-model/src/build_scripts.rs b/crates/project-model/src/build_scripts.rs index ab72f1fba09dd..621b6ca3efa43 100644 --- a/crates/project-model/src/build_scripts.rs +++ b/crates/project-model/src/build_scripts.rs @@ -138,7 +138,7 @@ impl WorkspaceBuildScripts { toolchain: &Option, sysroot: Option<&Sysroot>, ) -> io::Result { - const RUST_1_62: Version = Version::new(1, 62, 0); + const RUST_1_75: Version = Version::new(1, 75, 0); let current_dir = match &config.invocation_location { InvocationLocation::Root(root) if config.run_build_script_command.is_some() => { @@ -162,7 +162,7 @@ impl WorkspaceBuildScripts { progress, ) { Ok(WorkspaceBuildScripts { error: Some(error), .. }) - if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_62) => + if toolchain.as_ref().map_or(false, |it| *it >= RUST_1_75) => { // building build scripts failed, attempt to build with --keep-going so // that we potentially get more build data @@ -172,7 +172,8 @@ impl WorkspaceBuildScripts { &workspace.workspace_root().to_path_buf(), sysroot, )?; - cmd.args(["-Z", "unstable-options", "--keep-going"]).env("RUSTC_BOOTSTRAP", "1"); + + cmd.args(["--keep-going"]); let mut res = Self::run_per_ws(cmd, workspace, current_dir, progress)?; res.error = Some(error); Ok(res) From 2826eb51aa60e2e73d7f35408d2ee2727ec36334 Mon Sep 17 00:00:00 2001 From: Chase Douglas Date: Tue, 20 Feb 2024 16:49:07 -0800 Subject: [PATCH 080/134] Don't build dependencies when retrieving target data layout `cargo rustc -- ` first builds dependencies then calls `rustc ` for the current package. Here, we don't want to build dependencies, we just want to call `rustc --print`. An unstable `cargo rustc` `--print` command bypasses building dependencies first. This speeds up execution of this code path and ensures RA doesn't recompile dependencies with the `RUSTC_BOOTSRAP=1` env var flag set. Note that we must pass `-Z unstable-options` twice, first to enable the `cargo` unstable `--print` flag, then later to enable the unstable `rustc` `target-spec-json` print request. --- crates/project-model/src/target_data_layout.rs | 11 ++++++++++- 1 file changed, 10 insertions(+), 1 deletion(-) diff --git a/crates/project-model/src/target_data_layout.rs b/crates/project-model/src/target_data_layout.rs index af635dda5782d..98917351c5e88 100644 --- a/crates/project-model/src/target_data_layout.rs +++ b/crates/project-model/src/target_data_layout.rs @@ -32,7 +32,16 @@ pub fn get( Sysroot::set_rustup_toolchain_env(&mut cmd, sysroot); cmd.envs(extra_env); cmd.current_dir(cargo_toml.parent()) - .args(["rustc", "--", "-Z", "unstable-options", "--print", "target-spec-json"]) + .args([ + "rustc", + "-Z", + "unstable-options", + "--print", + "target-spec-json", + "--", + "-Z", + "unstable-options", + ]) .env("RUSTC_BOOTSTRAP", "1"); if let Some(target) = target { cmd.args(["--target", target]); From 893cb760e0cf6af0b60e628bc3adade814c59953 Mon Sep 17 00:00:00 2001 From: Nadrieril Date: Mon, 12 Feb 2024 04:27:37 +0100 Subject: [PATCH 081/134] Split off `test_candidates` into several functions and improve comments --- .../rustc_mir_build/src/build/matches/mod.rs | 369 +++++++++++------- 1 file changed, 218 insertions(+), 151 deletions(-) diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs index ccf299649cf8f..fab8f9d2254d9 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/build/matches/mod.rs @@ -1137,39 +1137,61 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// the value, we will set and generate a branch to the appropriate /// pre-binding block. /// - /// If we find that *NONE* of the candidates apply, we branch to the - /// `otherwise_block`, setting it to `Some` if required. In principle, this - /// means that the input list was not exhaustive, though at present we - /// sometimes are not smart enough to recognize all exhaustive inputs. + /// If we find that *NONE* of the candidates apply, we branch to `otherwise_block`. /// /// It might be surprising that the input can be non-exhaustive. /// Indeed, initially, it is not, because all matches are /// exhaustive in Rust. But during processing we sometimes divide /// up the list of candidates and recurse with a non-exhaustive - /// list. This is important to keep the size of the generated code - /// under control. See [`Builder::test_candidates`] for more details. + /// list. This is how our lowering approach (called "backtracking + /// automaton" in the literature) works. + /// See [`Builder::test_candidates`] for more details. /// /// If `fake_borrows` is `Some`, then places which need fake borrows /// will be added to it. /// - /// For an example of a case where we set `otherwise_block`, even for an - /// exhaustive match, consider: - /// + /// For an example of how we use `otherwise_block`, consider: + /// ``` + /// # fn foo((x, y): (bool, bool)) -> u32 { + /// match (x, y) { + /// (true, true) => 1, + /// (_, false) => 2, + /// (false, true) => 3, + /// } + /// # } + /// ``` + /// For this match, we generate something like: /// ``` - /// # fn foo(x: (bool, bool)) { - /// match x { - /// (true, true) => (), - /// (_, false) => (), - /// (false, true) => (), + /// # fn foo((x, y): (bool, bool)) -> u32 { + /// if x { + /// if y { + /// return 1 + /// } else { + /// // continue + /// } + /// } else { + /// // continue /// } + /// if y { + /// if x { + /// // This is actually unreachable because the `(true, true)` case was handled above. + /// // continue + /// } else { + /// return 3 + /// } + /// } else { + /// return 2 + /// } + /// // this is the final `otherwise_block`, which is unreachable because the match was exhaustive. + /// unreachable!() /// # } /// ``` /// - /// For this match, we check if `x.0` matches `true` (for the first - /// arm). If it doesn't match, we check `x.1`. If `x.1` is `true` we check - /// if `x.0` matches `false` (for the third arm). In the (impossible at - /// runtime) case when `x.0` is now `true`, we branch to - /// `otherwise_block`. + /// Every `continue` is an instance of branching to some `otherwise_block` somewhere deep within + /// the algorithm. For more details on why we lower like this, see [`Builder::test_candidates`]. + /// + /// Note how we test `x` twice. This is the tradeoff of backtracking automata: we prefer smaller + /// code size at the expense of non-optimal code paths. #[instrument(skip(self, fake_borrows), level = "debug")] fn match_candidates<'pat>( &mut self, @@ -1533,18 +1555,12 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } - /// This is the most subtle part of the matching algorithm. At - /// this point, the input candidates have been fully simplified, - /// and so we know that all remaining match-pairs require some - /// sort of test. To decide what test to perform, we take the highest - /// priority candidate (the first one in the list, as of January 2021) - /// and extract the first match-pair from the list. From this we decide - /// what kind of test is needed using [`Builder::test`], defined in the - /// [`test` module](mod@test). + /// Pick a test to run. Which test doesn't matter as long as it is guaranteed to fully match at + /// least one match pair. We currently simply pick the test corresponding to the first match + /// pair of the first candidate in the list. /// - /// *Note:* taking the first match pair is somewhat arbitrary, and - /// we might do better here by choosing more carefully what to - /// test. + /// *Note:* taking the first match pair is somewhat arbitrary, and we might do better here by + /// choosing more carefully what to test. /// /// For example, consider the following possible match-pairs: /// @@ -1556,121 +1572,19 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { /// [`Switch`]: TestKind::Switch /// [`SwitchInt`]: TestKind::SwitchInt /// [`Range`]: TestKind::Range - /// - /// Once we know what sort of test we are going to perform, this - /// test may also help us winnow down our candidates. So we walk over - /// the candidates (from high to low priority) and check. This - /// gives us, for each outcome of the test, a transformed list of - /// candidates. For example, if we are testing `x.0`'s variant, - /// and we have a candidate `(x.0 @ Some(v), x.1 @ 22)`, - /// then we would have a resulting candidate of `((x.0 as Some).0 @ v, x.1 @ 22)`. - /// Note that the first match-pair is now simpler (and, in fact, irrefutable). - /// - /// But there may also be candidates that the test just doesn't - /// apply to. The classical example involves wildcards: - /// - /// ``` - /// # let (x, y, z) = (true, true, true); - /// match (x, y, z) { - /// (true , _ , true ) => true, // (0) - /// (_ , true , _ ) => true, // (1) - /// (false, false, _ ) => false, // (2) - /// (true , _ , false) => false, // (3) - /// } - /// # ; - /// ``` - /// - /// In that case, after we test on `x`, there are 2 overlapping candidate - /// sets: - /// - /// - If the outcome is that `x` is true, candidates 0, 1, and 3 - /// - If the outcome is that `x` is false, candidates 1 and 2 - /// - /// Here, the traditional "decision tree" method would generate 2 - /// separate code-paths for the 2 separate cases. - /// - /// In some cases, this duplication can create an exponential amount of - /// code. This is most easily seen by noticing that this method terminates - /// with precisely the reachable arms being reachable - but that problem - /// is trivially NP-complete: - /// - /// ```ignore (illustrative) - /// match (var0, var1, var2, var3, ...) { - /// (true , _ , _ , false, true, ...) => false, - /// (_ , true, true , false, _ , ...) => false, - /// (false, _ , false, false, _ , ...) => false, - /// ... - /// _ => true - /// } - /// ``` - /// - /// Here the last arm is reachable only if there is an assignment to - /// the variables that does not match any of the literals. Therefore, - /// compilation would take an exponential amount of time in some cases. - /// - /// That kind of exponential worst-case might not occur in practice, but - /// our simplistic treatment of constants and guards would make it occur - /// in very common situations - for example [#29740]: - /// - /// ```ignore (illustrative) - /// match x { - /// "foo" if foo_guard => ..., - /// "bar" if bar_guard => ..., - /// "baz" if baz_guard => ..., - /// ... - /// } - /// ``` - /// - /// [#29740]: https://github.com/rust-lang/rust/issues/29740 - /// - /// Here we first test the match-pair `x @ "foo"`, which is an [`Eq` test]. - /// - /// [`Eq` test]: TestKind::Eq - /// - /// It might seem that we would end up with 2 disjoint candidate - /// sets, consisting of the first candidate or the other two, but our - /// algorithm doesn't reason about `"foo"` being distinct from the other - /// constants; it considers the latter arms to potentially match after - /// both outcomes, which obviously leads to an exponential number - /// of tests. - /// - /// To avoid these kinds of problems, our algorithm tries to ensure - /// the amount of generated tests is linear. When we do a k-way test, - /// we return an additional "unmatched" set alongside the obvious `k` - /// sets. When we encounter a candidate that would be present in more - /// than one of the sets, we put it and all candidates below it into the - /// "unmatched" set. This ensures these `k+1` sets are disjoint. - /// - /// After we perform our test, we branch into the appropriate candidate - /// set and recurse with `match_candidates`. These sub-matches are - /// obviously non-exhaustive - as we discarded our otherwise set - so - /// we set their continuation to do `match_candidates` on the - /// "unmatched" set (which is again non-exhaustive). - /// - /// If you apply this to the above test, you basically wind up - /// with an if-else-if chain, testing each candidate in turn, - /// which is precisely what we want. - /// - /// In addition to avoiding exponential-time blowups, this algorithm - /// also has the nice property that each guard and arm is only generated - /// once. - fn test_candidates<'pat, 'b, 'c>( + fn pick_test( &mut self, - span: Span, - scrutinee_span: Span, - mut candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>], - start_block: BasicBlock, - otherwise_block: BasicBlock, + candidates: &mut [&mut Candidate<'_, 'tcx>], fake_borrows: &mut Option>>, - ) { - // extract the match-pair from the highest priority candidate + ) -> (PlaceBuilder<'tcx>, Test<'tcx>) { + // Extract the match-pair from the highest priority candidate let match_pair = &candidates.first().unwrap().match_pairs[0]; let mut test = self.test(match_pair); let match_place = match_pair.place.clone(); - // most of the time, the test to perform is simply a function - // of the main candidate; but for a test like SwitchInt, we - // may want to add cases based on the candidates that are + debug!("test_candidates: test={:?} match_pair={:?}", test, match_pair); + // Most of the time, the test to perform is simply a function of the main candidate; but for + // a test like SwitchInt, we may want to add cases based on the candidates that are // available match test.kind { TestKind::SwitchInt { switch_ty: _, ref mut options } => { @@ -1697,20 +1611,58 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { fb.insert(resolved_place); } - // perform the test, branching to one of N blocks. For each of - // those N possible outcomes, create a (initially empty) - // vector of candidates. Those are the candidates that still - // apply if the test has that particular outcome. - debug!("test_candidates: test={:?} match_pair={:?}", test, match_pair); + (match_place, test) + } + + /// Given a test, we sort the input candidates into several buckets. If a candidate only matches + /// in one of the branches of `test`, we move it there. If it could match in more than one of + /// the branches of `test`, we stop sorting candidates. + /// + /// This returns a pair of + /// - the candidates that weren't sorted; + /// - for each possible outcome of the test, the candidates that match in that outcome. + /// + /// Moreover, we transform the branched candidates to reflect the fact that we know which + /// outcome of `test` occurred. + /// + /// For example: + /// ``` + /// # let (x, y, z) = (true, true, true); + /// match (x, y, z) { + /// (true , _ , true ) => true, // (0) + /// (false, false, _ ) => false, // (1) + /// (_ , true , _ ) => true, // (2) + /// (true , _ , false) => false, // (3) + /// } + /// # ; + /// ``` + /// + /// Assume we are testing on `x`. There are 2 overlapping candidate sets: + /// - If the outcome is that `x` is true, candidates 0, 2, and 3 + /// - If the outcome is that `x` is false, candidates 1 and 2 + /// + /// Following our algorithm, candidate 0 is sorted into outcome `x == true`, candidate 1 goes + /// into outcome `x == false`, and candidate 2 and 3 remain unsorted. + /// + /// The sorted candidates are transformed: + /// - candidate 0 becomes `[z @ true]` since we know that `x` was `true`; + /// - candidate 1 becomes `[y @ false]` since we know that `x` was `false`. + fn sort_candidates<'b, 'c, 'pat>( + &mut self, + match_place: &PlaceBuilder<'tcx>, + test: &Test<'tcx>, + mut candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>], + ) -> (&'b mut [&'c mut Candidate<'pat, 'tcx>], Vec>>) { + // For each of the N possible outcomes, create a (initially empty) vector of candidates. + // Those are the candidates that apply if the test has that particular outcome. let mut target_candidates: Vec>> = vec![]; target_candidates.resize_with(test.targets(), Default::default); let total_candidate_count = candidates.len(); - // Sort the candidates into the appropriate vector in - // `target_candidates`. Note that at some point we may - // encounter a candidate where the test is not relevant; at - // that point, we stop sorting. + // Sort the candidates into the appropriate vector in `target_candidates`. Note that at some + // point we may encounter a candidate where the test is not relevant; at that point, we stop + // sorting. while let Some(candidate) = candidates.first_mut() { let Some(idx) = self.sort_candidate(&match_place, &test, candidate) else { break; @@ -1719,7 +1671,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { target_candidates[idx].push(candidate); candidates = rest; } - // at least the first candidate ought to be tested + + // At least the first candidate ought to be tested assert!( total_candidate_count > candidates.len(), "{total_candidate_count}, {candidates:#?}" @@ -1727,16 +1680,130 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { debug!("tested_candidates: {}", total_candidate_count - candidates.len()); debug!("untested_candidates: {}", candidates.len()); + (candidates, target_candidates) + } + + /// This is the most subtle part of the match lowering algorithm. At this point, the input + /// candidates have been fully simplified, so all remaining match-pairs require some sort of + /// test. + /// + /// Once we pick what sort of test we are going to perform, this test will help us winnow down + /// our candidates. So we walk over the candidates (from high to low priority) and check. We + /// compute, for each outcome of the test, a transformed list of candidates. If a candidate + /// matches in a single branch of our test, we add it to the corresponding outcome. We also + /// transform it to record the fact that we know which outcome occurred. + /// + /// For example, if we are testing `x.0`'s variant, and we have a candidate `(x.0 @ Some(v), x.1 + /// @ 22)`, then we would have a resulting candidate of `((x.0 as Some).0 @ v, x.1 @ 22)` in the + /// branch corresponding to `Some`. To ensure we make progress, we always pick a test that + /// results in simplifying the first candidate. + /// + /// But there may also be candidates that the test doesn't + /// apply to. The classical example is wildcards: + /// + /// ``` + /// # let (x, y, z) = (true, true, true); + /// match (x, y, z) { + /// (true , _ , true ) => true, // (0) + /// (false, false, _ ) => false, // (1) + /// (_ , true , _ ) => true, // (2) + /// (true , _ , false) => false, // (3) + /// } + /// # ; + /// ``` + /// + /// Here, the traditional "decision tree" method would generate 2 separate code-paths for the 2 + /// possible values of `x`. This would however duplicate some candidates, which would need to be + /// lowered several times. + /// + /// In some cases, this duplication can create an exponential amount of + /// code. This is most easily seen by noticing that this method terminates + /// with precisely the reachable arms being reachable - but that problem + /// is trivially NP-complete: + /// + /// ```ignore (illustrative) + /// match (var0, var1, var2, var3, ...) { + /// (true , _ , _ , false, true, ...) => false, + /// (_ , true, true , false, _ , ...) => false, + /// (false, _ , false, false, _ , ...) => false, + /// ... + /// _ => true + /// } + /// ``` + /// + /// Here the last arm is reachable only if there is an assignment to + /// the variables that does not match any of the literals. Therefore, + /// compilation would take an exponential amount of time in some cases. + /// + /// In rustc, we opt instead for the "backtracking automaton" approach. This guarantees we never + /// duplicate a candidate (except in the presence of or-patterns). In fact this guarantee is + /// ensured by the fact that we carry around `&mut Candidate`s which can't be duplicated. + /// + /// To make this work, whenever we decide to perform a test, if we encounter a candidate that + /// could match in more than one branch of the test, we stop. We generate code for the test and + /// for the candidates in its branches; the remaining candidates will be tested if the + /// candidates in the branches fail to match. + /// + /// For example, if we test on `x` in the following: + /// ``` + /// # fn foo((x, y, z): (bool, bool, bool)) -> u32 { + /// match (x, y, z) { + /// (true , _ , true ) => 0, + /// (false, false, _ ) => 1, + /// (_ , true , _ ) => 2, + /// (true , _ , false) => 3, + /// } + /// # } + /// ``` + /// this function generates code that looks more of less like: + /// ``` + /// # fn foo((x, y, z): (bool, bool, bool)) -> u32 { + /// if x { + /// match (y, z) { + /// (_, true) => return 0, + /// _ => {} // continue matching + /// } + /// } else { + /// match (y, z) { + /// (false, _) => return 1, + /// _ => {} // continue matching + /// } + /// } + /// // the block here is `remainder_start` + /// match (x, y, z) { + /// (_ , true , _ ) => 2, + /// (true , _ , false) => 3, + /// _ => unreachable!(), + /// } + /// # } + /// ``` + fn test_candidates<'pat, 'b, 'c>( + &mut self, + span: Span, + scrutinee_span: Span, + candidates: &'b mut [&'c mut Candidate<'pat, 'tcx>], + start_block: BasicBlock, + otherwise_block: BasicBlock, + fake_borrows: &mut Option>>, + ) { + // Extract the match-pair from the highest priority candidate and build a test from it. + let (match_place, test) = self.pick_test(candidates, fake_borrows); + + // For each of the N possible test outcomes, build the vector of candidates that applies if + // the test has that particular outcome. + let (remaining_candidates, target_candidates) = + self.sort_candidates(&match_place, &test, candidates); + // The block that we should branch to if none of the // `target_candidates` match. - let remainder_start = if !candidates.is_empty() { + let remainder_start = if !remaining_candidates.is_empty() { let remainder_start = self.cfg.start_new_block(); self.match_candidates( span, scrutinee_span, remainder_start, otherwise_block, - candidates, + remaining_candidates, fake_borrows, ); remainder_start From aa99d954faae0446664f82b0c3c597664845ca90 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Wed, 21 Feb 2024 08:40:02 -0300 Subject: [PATCH 082/134] Test windows random shims --- .../miri/tests/pass/shims/windows-rand.rs | 41 +++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 src/tools/miri/tests/pass/shims/windows-rand.rs diff --git a/src/tools/miri/tests/pass/shims/windows-rand.rs b/src/tools/miri/tests/pass/shims/windows-rand.rs new file mode 100644 index 0000000000000..e2bcb7bd7cb77 --- /dev/null +++ b/src/tools/miri/tests/pass/shims/windows-rand.rs @@ -0,0 +1,41 @@ +//@only-target-windows: this directly tests windows only random functions +use core::ffi::c_void; +use core::mem::size_of_val; +use core::ptr::null_mut; + +// Windows API definitions. +type NTSTATUS = i32; +type BOOLEAN = u8; +const BCRYPT_USE_SYSTEM_PREFERRED_RNG: u32 = 0x00000002; +const BCRYPT_RNG_ALG_HANDLE: *mut c_void = 0x81 as *mut c_void; +#[link(name = "bcrypt")] +extern "system" { + fn BCryptGenRandom( + halgorithm: *mut c_void, + pbbuffer: *mut u8, + cbbuffer: u32, + dwflags: u32, + ) -> NTSTATUS; +} +#[link(name = "advapi32")] +extern "system" { + #[link_name = "SystemFunction036"] + fn RtlGenRandom(RandomBuffer: *mut u8, RandomBufferLength: u32) -> BOOLEAN; +} + +fn main() { + let mut key = [0u8; 24]; + let len: u32 = size_of_val(&key).try_into().unwrap(); + let ret = unsafe { + BCryptGenRandom(null_mut(), key.as_mut_ptr(), len, BCRYPT_USE_SYSTEM_PREFERRED_RNG) + }; + // NTSTATUS codes use the high bit to indicate an error + assert!(ret >= 0); + + let ret = unsafe { BCryptGenRandom(BCRYPT_RNG_ALG_HANDLE, key.as_mut_ptr(), len, 0) }; + assert!(ret >= 0); + + let ret = unsafe { RtlGenRandom(key.as_mut_ptr(), len) }; + // RtlGenRandom returns a BOOLEAN where 0 indicates an error + assert_ne!(ret, 0); +} From 6edbc8d875987b25c7bc64c6c903611841645e5f Mon Sep 17 00:00:00 2001 From: Michael Goulet Date: Wed, 21 Feb 2024 19:05:45 +0000 Subject: [PATCH 083/134] Prevent cycle in implied predicates computation --- .../src/collect/predicates_of.rs | 30 ++++++++++++++----- .../implied-bounds-cycle.rs | 10 +++++++ .../implied-bounds-cycle.stderr | 17 +++++++++++ 3 files changed, 49 insertions(+), 8 deletions(-) create mode 100644 tests/ui/associated-type-bounds/implied-bounds-cycle.rs create mode 100644 tests/ui/associated-type-bounds/implied-bounds-cycle.stderr diff --git a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs index 351ac2eb7702e..f70bb8c4289f6 100644 --- a/compiler/rustc_hir_analysis/src/collect/predicates_of.rs +++ b/compiler/rustc_hir_analysis/src/collect/predicates_of.rs @@ -640,16 +640,30 @@ pub(super) fn implied_predicates_with_filter( // Now require that immediate supertraits are converted, which will, in // turn, reach indirect supertraits, so we detect cycles now instead of - // overflowing during elaboration. - if matches!(filter, PredicateFilter::SelfOnly) { - for &(pred, span) in implied_bounds { - debug!("superbound: {:?}", pred); - if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() - && bound.polarity == ty::ImplPolarity::Positive - { - tcx.at(span).super_predicates_of(bound.def_id()); + // overflowing during elaboration. Same for implied predicates, which + // make sure we walk into associated type bounds. + match filter { + PredicateFilter::SelfOnly => { + for &(pred, span) in implied_bounds { + debug!("superbound: {:?}", pred); + if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() + && bound.polarity == ty::ImplPolarity::Positive + { + tcx.at(span).super_predicates_of(bound.def_id()); + } + } + } + PredicateFilter::SelfAndAssociatedTypeBounds => { + for &(pred, span) in implied_bounds { + debug!("superbound: {:?}", pred); + if let ty::ClauseKind::Trait(bound) = pred.kind().skip_binder() + && bound.polarity == ty::ImplPolarity::Positive + { + tcx.at(span).implied_predicates_of(bound.def_id()); + } } } + _ => {} } ty::GenericPredicates { parent: None, predicates: implied_bounds } diff --git a/tests/ui/associated-type-bounds/implied-bounds-cycle.rs b/tests/ui/associated-type-bounds/implied-bounds-cycle.rs new file mode 100644 index 0000000000000..785d47d479148 --- /dev/null +++ b/tests/ui/associated-type-bounds/implied-bounds-cycle.rs @@ -0,0 +1,10 @@ +#![feature(associated_type_bounds)] + +trait A { + type T; +} + +trait B: A {} +//~^ ERROR cycle detected when computing the implied predicates of `B` + +fn main() {} diff --git a/tests/ui/associated-type-bounds/implied-bounds-cycle.stderr b/tests/ui/associated-type-bounds/implied-bounds-cycle.stderr new file mode 100644 index 0000000000000..1c1c64ea5f5ec --- /dev/null +++ b/tests/ui/associated-type-bounds/implied-bounds-cycle.stderr @@ -0,0 +1,17 @@ +error[E0391]: cycle detected when computing the implied predicates of `B` + --> $DIR/implied-bounds-cycle.rs:7:15 + | +LL | trait B: A {} + | ^ + | + = note: ...which immediately requires computing the implied predicates of `B` again +note: cycle used when computing normalized predicates of `B` + --> $DIR/implied-bounds-cycle.rs:7:1 + | +LL | trait B: A {} + | ^^^^^^^^^^^^^^^^ + = note: see https://rustc-dev-guide.rust-lang.org/overview.html#queries and https://rustc-dev-guide.rust-lang.org/query.html for more information + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0391`. From 3197aee8b31faa200081143c9d353119400759df Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Thu, 22 Feb 2024 01:08:49 +0300 Subject: [PATCH 084/134] support `no-op` compression profile in rust-installer Signed-off-by: onur-ozkan --- src/tools/rust-installer/src/compression.rs | 11 ++++++++++- src/tools/rust-installer/src/main.rs | 2 +- src/tools/rust-installer/src/tarballer.rs | 4 ++++ 3 files changed, 15 insertions(+), 2 deletions(-) diff --git a/src/tools/rust-installer/src/compression.rs b/src/tools/rust-installer/src/compression.rs index 902b2ec690784..4e840dbfbb441 100644 --- a/src/tools/rust-installer/src/compression.rs +++ b/src/tools/rust-installer/src/compression.rs @@ -1,11 +1,12 @@ use anyhow::{Context, Error}; use flate2::{read::GzDecoder, write::GzEncoder}; use rayon::prelude::*; -use std::{convert::TryFrom, fmt, io::Read, io::Write, path::Path, str::FromStr}; +use std::{fmt, io::Read, io::Write, path::Path, str::FromStr}; use xz2::{read::XzDecoder, write::XzEncoder}; #[derive(Default, Debug, Copy, Clone)] pub enum CompressionProfile { + NoOp, Fast, #[default] Balanced, @@ -20,6 +21,7 @@ impl FromStr for CompressionProfile { "fast" => Self::Fast, "balanced" => Self::Balanced, "best" => Self::Best, + "no-op" => Self::NoOp, other => anyhow::bail!("invalid compression profile: {other}"), }) } @@ -31,6 +33,7 @@ impl fmt::Display for CompressionProfile { CompressionProfile::Fast => f.write_str("fast"), CompressionProfile::Balanced => f.write_str("balanced"), CompressionProfile::Best => f.write_str("best"), + CompressionProfile::NoOp => f.write_str("no-op"), } } } @@ -78,10 +81,16 @@ impl CompressionFormat { CompressionProfile::Fast => flate2::Compression::fast(), CompressionProfile::Balanced => flate2::Compression::new(6), CompressionProfile::Best => flate2::Compression::best(), + CompressionProfile::NoOp => panic!( + "compression profile 'no-op' should not call `CompressionFormat::encode`." + ), }, )), CompressionFormat::Xz => { let encoder = match profile { + CompressionProfile::NoOp => panic!( + "compression profile 'no-op' should not call `CompressionFormat::encode`." + ), CompressionProfile::Fast => { xz2::stream::MtStreamBuilder::new().threads(6).preset(1).encoder().unwrap() } diff --git a/src/tools/rust-installer/src/main.rs b/src/tools/rust-installer/src/main.rs index 99acecdd43c35..efb4c5bcb83f9 100644 --- a/src/tools/rust-installer/src/main.rs +++ b/src/tools/rust-installer/src/main.rs @@ -1,5 +1,5 @@ use anyhow::{Context, Result}; -use clap::{self, Parser}; +use clap::Parser; #[derive(Parser)] struct CommandLine { diff --git a/src/tools/rust-installer/src/tarballer.rs b/src/tools/rust-installer/src/tarballer.rs index 7572dc6dcf885..e5a925b2cbf2d 100644 --- a/src/tools/rust-installer/src/tarballer.rs +++ b/src/tools/rust-installer/src/tarballer.rs @@ -38,6 +38,10 @@ actor! { impl Tarballer { /// Generates the actual tarballs pub fn run(self) -> Result<()> { + if let CompressionProfile::NoOp = self.compression_profile { + return Ok(()); + } + let tarball_name = self.output.clone() + ".tar"; let encoder = CombinedEncoder::new( self.compression_formats From 94597e85cfd2df40b467f087a12c8f727813b947 Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Thu, 22 Feb 2024 01:09:12 +0300 Subject: [PATCH 085/134] force dist.compression-profile = "no-op" for `x install` Signed-off-by: onur-ozkan --- src/bootstrap/src/utils/tarball.rs | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/src/bootstrap/src/utils/tarball.rs b/src/bootstrap/src/utils/tarball.rs index 573d923ed8fdd..a14dfd1ca1234 100644 --- a/src/bootstrap/src/utils/tarball.rs +++ b/src/bootstrap/src/utils/tarball.rs @@ -3,8 +3,8 @@ use std::{ process::Command, }; -use crate::core::build_steps::dist::distdir; use crate::core::builder::Builder; +use crate::core::{build_steps::dist::distdir, builder::Kind}; use crate::utils::channel; use crate::utils::helpers::t; @@ -325,7 +325,22 @@ impl<'a> Tarball<'a> { assert!(!formats.is_empty(), "dist.compression-formats can't be empty"); cmd.arg("--compression-formats").arg(formats.join(",")); } - cmd.args(["--compression-profile", &self.builder.config.dist_compression_profile]); + + // For `x install` tarball files aren't needed, so we can speed up the process by not producing them. + let compression_profile = if self.builder.kind == Kind::Install { + self.builder.verbose("Forcing dist.compression-profile = 'no-op' for `x install`."); + // "no-op" indicates that the rust-installer won't produce compressed tarball sources. + "no-op" + } else { + assert!( + self.builder.config.dist_compression_profile != "no-op", + "dist.compression-profile = 'no-op' can only be used for `x install`" + ); + + &self.builder.config.dist_compression_profile + }; + + cmd.args(&["--compression-profile", compression_profile]); self.builder.run(&mut cmd); // Ensure there are no symbolic links in the tarball. In particular, From a13ec8d00396ac6f5a3f285f8fcd95a2ab6c8824 Mon Sep 17 00:00:00 2001 From: onur-ozkan Date: Thu, 22 Feb 2024 01:33:06 +0300 Subject: [PATCH 086/134] add changelog entry Signed-off-by: onur-ozkan --- src/bootstrap/src/utils/change_tracker.rs | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/bootstrap/src/utils/change_tracker.rs b/src/bootstrap/src/utils/change_tracker.rs index b813d82ca6f53..9a50ad4437e73 100644 --- a/src/bootstrap/src/utils/change_tracker.rs +++ b/src/bootstrap/src/utils/change_tracker.rs @@ -131,4 +131,9 @@ pub const CONFIG_CHANGE_HISTORY: &[ChangeInfo] = &[ severity: ChangeSeverity::Warning, summary: "The \"codegen\"/\"llvm\" profile has been removed and replaced with \"compiler\", use it instead for the same behavior.", }, + ChangeInfo { + change_id: 118724, + severity: ChangeSeverity::Info, + summary: "`x install` now skips providing tarball sources (under 'build/dist' path) to speed up the installation process.", + }, ]; From f5ec4cb37561743efd869d61e457ae39938c1680 Mon Sep 17 00:00:00 2001 From: The Miri Conjob Bot Date: Thu, 22 Feb 2024 04:54:42 +0000 Subject: [PATCH 087/134] Preparing for merge from rustc --- src/tools/miri/rust-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/miri/rust-version b/src/tools/miri/rust-version index 715917e0f2fbb..02ab748c44722 100644 --- a/src/tools/miri/rust-version +++ b/src/tools/miri/rust-version @@ -1 +1 @@ -d5735645753e990a72446094f703df9b5e421555 +c5f69bdd5173a948e0131f934fa7c4cbf5e0b55f From 6f3bc7d938f5a8a772ba67cf41aad16a03b9fb1c Mon Sep 17 00:00:00 2001 From: The Miri Conjob Bot Date: Thu, 22 Feb 2024 05:03:17 +0000 Subject: [PATCH 088/134] fmt --- src/tools/miri/tests/pass-dep/shims/mmap.rs | 5 +++-- src/tools/miri/tests/pass/slices.rs | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/src/tools/miri/tests/pass-dep/shims/mmap.rs b/src/tools/miri/tests/pass-dep/shims/mmap.rs index 0cbe8d942946d..5acdedc67bf71 100644 --- a/src/tools/miri/tests/pass-dep/shims/mmap.rs +++ b/src/tools/miri/tests/pass-dep/shims/mmap.rs @@ -155,8 +155,9 @@ fn test_mremap() { // Test all of our error conditions // Not aligned - let ptr = - unsafe { libc::mremap(ptr::without_provenance_mut(1), page_size, page_size, libc::MREMAP_MAYMOVE) }; + let ptr = unsafe { + libc::mremap(ptr::without_provenance_mut(1), page_size, page_size, libc::MREMAP_MAYMOVE) + }; assert_eq!(ptr, libc::MAP_FAILED); assert_eq!(Error::last_os_error().raw_os_error().unwrap(), libc::EINVAL); diff --git a/src/tools/miri/tests/pass/slices.rs b/src/tools/miri/tests/pass/slices.rs index d30ca96ea41cc..0b9805681b494 100644 --- a/src/tools/miri/tests/pass/slices.rs +++ b/src/tools/miri/tests/pass/slices.rs @@ -29,7 +29,8 @@ fn slice_of_zst() { // In a slice of zero-size elements the pointer is meaningless. // Ensure iteration still works even if the pointer is at the end of the address space. - let slice: &[()] = unsafe { slice::from_raw_parts(ptr::without_provenance(-5isize as usize), 10) }; + let slice: &[()] = + unsafe { slice::from_raw_parts(ptr::without_provenance(-5isize as usize), 10) }; assert_eq!(slice.len(), 10); assert_eq!(slice.iter().count(), 10); From f89d17b4269fa5ed589b7a87824382edd0a9eea2 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Feb 2024 10:42:30 +0100 Subject: [PATCH 089/134] Remove ops_salsa_runtime_mut, replace it with direct synthetic_write API --- crates/ide-db/src/apply_change.rs | 2 +- crates/salsa/salsa-macros/src/database_storage.rs | 4 ++-- crates/salsa/src/lib.rs | 13 +++++++++---- crates/salsa/src/plumbing.rs | 11 +++++++++-- crates/salsa/tests/incremental/memoized_volatile.rs | 4 ++-- crates/salsa/tests/on_demand_inputs.rs | 4 ++-- crates/salsa/tests/storage_varieties/tests.rs | 4 ++-- 7 files changed, 27 insertions(+), 15 deletions(-) diff --git a/crates/ide-db/src/apply_change.rs b/crates/ide-db/src/apply_change.rs index 1a214ef0bf564..2b2df144d6dd1 100644 --- a/crates/ide-db/src/apply_change.rs +++ b/crates/ide-db/src/apply_change.rs @@ -17,7 +17,7 @@ impl RootDatabase { pub fn request_cancellation(&mut self) { let _p = tracing::span!(tracing::Level::INFO, "RootDatabase::request_cancellation").entered(); - self.salsa_runtime_mut().synthetic_write(Durability::LOW); + self.synthetic_write(Durability::LOW); } pub fn apply_change(&mut self, change: Change) { diff --git a/crates/salsa/salsa-macros/src/database_storage.rs b/crates/salsa/salsa-macros/src/database_storage.rs index 0ec75bb043dbe..223da9b5290fa 100644 --- a/crates/salsa/salsa-macros/src/database_storage.rs +++ b/crates/salsa/salsa-macros/src/database_storage.rs @@ -154,8 +154,8 @@ pub(crate) fn database(args: TokenStream, input: TokenStream) -> TokenStream { self.#db_storage_field.salsa_runtime() } - fn ops_salsa_runtime_mut(&mut self) -> &mut salsa::Runtime { - self.#db_storage_field.salsa_runtime_mut() + fn synthetic_write(&mut self, durability: salsa::Durability) { + self.#db_storage_field.salsa_runtime_mut().synthetic_write(durability) } fn fmt_index( diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 48b5d633bd672..98b3a48e37ca6 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -96,11 +96,16 @@ pub trait Database: plumbing::DatabaseOps { self.ops_salsa_runtime() } - /// Gives access to the underlying salsa runtime. + /// A "synthetic write" causes the system to act *as though* some + /// input of durability `durability` has changed. This is mostly + /// useful for profiling scenarios. /// - /// This method should not be overridden by `Database` implementors. - fn salsa_runtime_mut(&mut self) -> &mut Runtime { - self.ops_salsa_runtime_mut() + /// **WARNING:** Just like an ordinary write, this method triggers + /// cancellation. If you invoke it while a snapshot exists, it + /// will block until that snapshot is dropped -- if that snapshot + /// is owned by the current thread, this could trigger deadlock. + fn synthetic_write(&mut self, durability: Durability) { + plumbing::DatabaseOps::synthetic_write(self, durability) } } diff --git a/crates/salsa/src/plumbing.rs b/crates/salsa/src/plumbing.rs index 71332e39cadbb..b8df87fd5e5ca 100644 --- a/crates/salsa/src/plumbing.rs +++ b/crates/salsa/src/plumbing.rs @@ -38,8 +38,15 @@ pub trait DatabaseOps { /// Gives access to the underlying salsa runtime. fn ops_salsa_runtime(&self) -> &Runtime; - /// Gives access to the underlying salsa runtime. - fn ops_salsa_runtime_mut(&mut self) -> &mut Runtime; + /// A "synthetic write" causes the system to act *as though* some + /// input of durability `durability` has changed. This is mostly + /// useful for profiling scenarios. + /// + /// **WARNING:** Just like an ordinary write, this method triggers + /// cancellation. If you invoke it while a snapshot exists, it + /// will block until that snapshot is dropped -- if that snapshot + /// is owned by the current thread, this could trigger deadlock. + fn synthetic_write(&mut self, durability: Durability); /// Formats a database key index in a human readable fashion. fn fmt_index( diff --git a/crates/salsa/tests/incremental/memoized_volatile.rs b/crates/salsa/tests/incremental/memoized_volatile.rs index 6dc5030063b78..3dcc32eece373 100644 --- a/crates/salsa/tests/incremental/memoized_volatile.rs +++ b/crates/salsa/tests/incremental/memoized_volatile.rs @@ -58,7 +58,7 @@ fn revalidate() { // Second generation: volatile will change (to 1) but memoized1 // will not (still 0, as 1/2 = 0) - query.salsa_runtime_mut().synthetic_write(Durability::LOW); + query.synthetic_write(Durability::LOW); query.memoized2(); query.assert_log(&["Volatile invoked", "Memoized1 invoked"]); query.memoized2(); @@ -67,7 +67,7 @@ fn revalidate() { // Third generation: volatile will change (to 2) and memoized1 // will too (to 1). Therefore, after validating that Memoized1 // changed, we now invoke Memoized2. - query.salsa_runtime_mut().synthetic_write(Durability::LOW); + query.synthetic_write(Durability::LOW); query.memoized2(); query.assert_log(&["Volatile invoked", "Memoized1 invoked", "Memoized2 invoked"]); diff --git a/crates/salsa/tests/on_demand_inputs.rs b/crates/salsa/tests/on_demand_inputs.rs index 5d0e4866442e5..677d633ee7cc0 100644 --- a/crates/salsa/tests/on_demand_inputs.rs +++ b/crates/salsa/tests/on_demand_inputs.rs @@ -111,7 +111,7 @@ fn on_demand_input_durability() { } "#]].assert_debug_eq(&events); - db.salsa_runtime_mut().synthetic_write(Durability::LOW); + db.synthetic_write(Durability::LOW); events.replace(vec![]); assert_eq!(db.c(1), 10); assert_eq!(db.c(2), 20); @@ -128,7 +128,7 @@ fn on_demand_input_durability() { } "#]].assert_debug_eq(&events); - db.salsa_runtime_mut().synthetic_write(Durability::HIGH); + db.synthetic_write(Durability::HIGH); events.replace(vec![]); assert_eq!(db.c(1), 10); assert_eq!(db.c(2), 20); diff --git a/crates/salsa/tests/storage_varieties/tests.rs b/crates/salsa/tests/storage_varieties/tests.rs index f75c7c142febe..8e2f9b03cb9a3 100644 --- a/crates/salsa/tests/storage_varieties/tests.rs +++ b/crates/salsa/tests/storage_varieties/tests.rs @@ -20,7 +20,7 @@ fn volatile_twice() { let v2 = db.volatile(); // volatiles are cached, so 2nd read returns the same assert_eq!(v1, v2); - db.salsa_runtime_mut().synthetic_write(Durability::LOW); // clears volatile caches + db.synthetic_write(Durability::LOW); // clears volatile caches let v3 = db.volatile(); // will re-increment the counter let v4 = db.volatile(); // second call will be cached @@ -40,7 +40,7 @@ fn intermingled() { assert_eq!(v1, v3); assert_eq!(v2, v4); - db.salsa_runtime_mut().synthetic_write(Durability::LOW); // clears volatile caches + db.synthetic_write(Durability::LOW); // clears volatile caches let v5 = db.memoized(); // re-executes volatile, caches new result let v6 = db.memoized(); // re-use cached result From 7485392fbffb9acfaaeeb618e32ed83c9ee0be1b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=AE=B8=E6=9D=B0=E5=8F=8B=20Jieyou=20Xu=20=28Joe=29?= Date: Thu, 22 Feb 2024 18:55:02 +0000 Subject: [PATCH 090/134] Ignore compiletest test directive migration commits --- .git-blame-ignore-revs | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index d23682596fd3f..663ace48e9e80 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -20,3 +20,6 @@ f97fddab91fbf290ea5b691fe355d6f915220b6e cc907f80b95c6ec530c5ee1b05b044a468f07eca # format let-chains b2d2184edea578109a48ec3d8decbee5948e8f35 +# test directives migration +6e48b96692d63a79a14563f27fe5185f122434f8 +ec2cc761bc7067712ecc7734502f703fe3b024c8 From 5e6da720f621308a0f44e7c6bbd13a9fad68b240 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Esteban=20K=C3=BCber?= Date: Thu, 22 Feb 2024 06:15:55 +0000 Subject: [PATCH 091/134] Account for RPITIT in E0310 explicit lifetime constraint suggestion When given ```rust trait Original { fn f() -> impl Fn(); } trait Erased { fn f(&self) -> Box; } impl Erased for T { fn f(&self) -> Box { Box::new(::f()) } } ``` avoid suggestion to restrict the `Trait::{opaque}` type in a `where` clause: ``` error[E0310]: the associated type `::{opaque#0}` may not live long enough --> $DIR/missing-static-bound-from-impl.rs:11:9 | LL | Box::new(::f()) | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | | | the associated type `::{opaque#0}` must be valid for the static lifetime... | ...so that the type `impl Fn()` will meet its required lifetime bounds ``` CC #119773. --- .../rustc_infer/src/infer/error_reporting/mod.rs | 8 ++++++++ .../impl-trait/in-trait/async-and-ret-ref.stderr | 2 -- .../in-trait/missing-static-bound-from-impl.rs | 16 ++++++++++++++++ .../missing-static-bound-from-impl.stderr | 12 ++++++++++++ 4 files changed, 36 insertions(+), 2 deletions(-) create mode 100644 tests/ui/impl-trait/in-trait/missing-static-bound-from-impl.rs create mode 100644 tests/ui/impl-trait/in-trait/missing-static-bound-from-impl.stderr diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index 505d56cf49179..470b97b777835 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -2434,6 +2434,14 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { let suggestion = if has_lifetimes { format!(" + {lt_name}") } else { format!(": {lt_name}") }; suggs.push((sp, suggestion)) + } else if let GenericKind::Alias(ref p) = bound_kind + && let ty::Projection = p.kind(self.tcx) + && let DefKind::AssocTy = self.tcx.def_kind(p.def_id) + && let Some(ty::ImplTraitInTraitData::Trait { .. }) = + self.tcx.opt_rpitit_info(p.def_id) + { + // The lifetime found in the `impl` is longer than the one on the RPITIT. + // Do not suggest `::{opaque}: 'static`. } else if let Some(generics) = self.tcx.hir().get_generics(suggestion_scope) { let pred = format!("{bound_kind}: {lt_name}"); let suggestion = format!("{} {}", generics.add_where_or_trailing_comma(), pred); diff --git a/tests/ui/impl-trait/in-trait/async-and-ret-ref.stderr b/tests/ui/impl-trait/in-trait/async-and-ret-ref.stderr index 79a86b0a3aedf..15aa3cf54bbe5 100644 --- a/tests/ui/impl-trait/in-trait/async-and-ret-ref.stderr +++ b/tests/ui/impl-trait/in-trait/async-and-ret-ref.stderr @@ -6,8 +6,6 @@ LL | async fn foo() -> &'static impl T; | | | the associated type `::{opaque#0}` must be valid for the static lifetime... | ...so that the reference type `&'static impl T` does not outlive the data it points at - | - = help: consider adding an explicit lifetime bound `::{opaque#0}: 'static`... error: aborting due to 1 previous error diff --git a/tests/ui/impl-trait/in-trait/missing-static-bound-from-impl.rs b/tests/ui/impl-trait/in-trait/missing-static-bound-from-impl.rs new file mode 100644 index 0000000000000..a36799c3ebd46 --- /dev/null +++ b/tests/ui/impl-trait/in-trait/missing-static-bound-from-impl.rs @@ -0,0 +1,16 @@ +trait Original { + fn f() -> impl Fn(); +} + +trait Erased { + fn f(&self) -> Box; +} + +impl Erased for T { + fn f(&self) -> Box { + Box::new(::f()) + //~^ ERROR the associated type `::{opaque#0}` may not live long enough + } +} + +fn main () {} diff --git a/tests/ui/impl-trait/in-trait/missing-static-bound-from-impl.stderr b/tests/ui/impl-trait/in-trait/missing-static-bound-from-impl.stderr new file mode 100644 index 0000000000000..5ec0ee38347aa --- /dev/null +++ b/tests/ui/impl-trait/in-trait/missing-static-bound-from-impl.stderr @@ -0,0 +1,12 @@ +error[E0310]: the associated type `::{opaque#0}` may not live long enough + --> $DIR/missing-static-bound-from-impl.rs:11:9 + | +LL | Box::new(::f()) + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + | | + | the associated type `::{opaque#0}` must be valid for the static lifetime... + | ...so that the type `impl Fn()` will meet its required lifetime bounds + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0310`. From cdfb73ab9c702be655a0164d79eb0ca0a8942384 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Feb 2024 22:25:55 +0100 Subject: [PATCH 092/134] fix: Fix proc-macro server not accounting for string delimiters correctly --- crates/proc-macro-srv/src/proc_macros.rs | 6 +-- crates/proc-macro-srv/src/server.rs | 9 +++- .../src/server/rust_analyzer_span.rs | 43 ++++++++++-------- crates/proc-macro-srv/src/server/token_id.rs | 45 +++++++++++-------- .../proc-macro-srv/src/server/token_stream.rs | 11 ++--- crates/proc-macro-srv/src/tests/mod.rs | 26 ++++++++--- 6 files changed, 85 insertions(+), 55 deletions(-) diff --git a/crates/proc-macro-srv/src/proc_macros.rs b/crates/proc-macro-srv/src/proc_macros.rs index 3fe968c81ca12..686d5b0438aa9 100644 --- a/crates/proc-macro-srv/src/proc_macros.rs +++ b/crates/proc-macro-srv/src/proc_macros.rs @@ -64,7 +64,7 @@ impl ProcMacros { &bridge::server::SameThread, S::make_server(call_site, def_site, mixed_site), parsed_body, - false, + cfg!(debug_assertions), ); return res .map(|it| it.into_subtree(call_site)) @@ -75,7 +75,7 @@ impl ProcMacros { &bridge::server::SameThread, S::make_server(call_site, def_site, mixed_site), parsed_body, - false, + cfg!(debug_assertions), ); return res .map(|it| it.into_subtree(call_site)) @@ -87,7 +87,7 @@ impl ProcMacros { S::make_server(call_site, def_site, mixed_site), parsed_attributes, parsed_body, - false, + cfg!(debug_assertions), ); return res .map(|it| it.into_subtree(call_site)) diff --git a/crates/proc-macro-srv/src/server.rs b/crates/proc-macro-srv/src/server.rs index ff8fd295d884a..5a814e23e7af2 100644 --- a/crates/proc-macro-srv/src/server.rs +++ b/crates/proc-macro-srv/src/server.rs @@ -93,7 +93,14 @@ impl LiteralFormatter { let hashes = get_hashes_str(n); f(&["br", hashes, "\"", symbol, "\"", hashes, suffix]) } - _ => f(&[symbol, suffix]), + bridge::LitKind::CStr => f(&["c\"", symbol, "\"", suffix]), + bridge::LitKind::CStrRaw(n) => { + let hashes = get_hashes_str(n); + f(&["cr", hashes, "\"", symbol, "\"", hashes, suffix]) + } + bridge::LitKind::Integer | bridge::LitKind::Float | bridge::LitKind::ErrWithGuar => { + f(&[symbol, suffix]) + } }) } diff --git a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs index c6a0a6665553f..e0d708559dbf4 100644 --- a/crates/proc-macro-srv/src/server/rust_analyzer_span.rs +++ b/crates/proc-macro-srv/src/server/rust_analyzer_span.rs @@ -97,22 +97,33 @@ impl server::FreeFunctions for RaSpanServer { } let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; - let kind = match kind { - LiteralKind::Int { .. } => LitKind::Integer, - LiteralKind::Float { .. } => LitKind::Float, - LiteralKind::Char { .. } => LitKind::Char, - LiteralKind::Byte { .. } => LitKind::Byte, - LiteralKind::Str { .. } => LitKind::Str, - LiteralKind::ByteStr { .. } => LitKind::ByteStr, - LiteralKind::CStr { .. } => LitKind::CStr, - LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), - LiteralKind::RawByteStr { n_hashes } => { - LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) - } - LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + let (kind, start_offset, end_offset) = match kind { + LiteralKind::Int { .. } => (LitKind::Integer, 0, 0), + LiteralKind::Float { .. } => (LitKind::Float, 0, 0), + LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize), + LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize), + LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize), + LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize), + LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize), + LiteralKind::RawStr { n_hashes } => ( + LitKind::StrRaw(n_hashes.unwrap_or_default()), + 2 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawByteStr { n_hashes } => ( + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawCStr { n_hashes } => ( + LitKind::CStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), }; let (lit, suffix) = s.split_at(suffix_start as usize); + let lit = &lit[start_offset..lit.len() - end_offset]; let suffix = match suffix { "" | "_" => None, suffix => Some(Symbol::intern(self.interner, suffix)), @@ -248,12 +259,8 @@ impl server::TokenStream for RaSpanServer { } tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { - // FIXME: handle literal kinds - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, &lit.text), - // FIXME: handle suffixes - suffix: None, span: lit.span, + ..server::FreeFunctions::literal_from_str(self, &lit.text).unwrap() }) } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { diff --git a/crates/proc-macro-srv/src/server/token_id.rs b/crates/proc-macro-srv/src/server/token_id.rs index 7e9d8057ac9a5..d1622ab026ba4 100644 --- a/crates/proc-macro-srv/src/server/token_id.rs +++ b/crates/proc-macro-srv/src/server/token_id.rs @@ -89,22 +89,34 @@ impl server::FreeFunctions for TokenIdServer { } let TokenKind::Literal { kind, suffix_start } = lit.kind else { return Err(()) }; - let kind = match kind { - LiteralKind::Int { .. } => LitKind::Integer, - LiteralKind::Float { .. } => LitKind::Float, - LiteralKind::Char { .. } => LitKind::Char, - LiteralKind::Byte { .. } => LitKind::Byte, - LiteralKind::Str { .. } => LitKind::Str, - LiteralKind::ByteStr { .. } => LitKind::ByteStr, - LiteralKind::CStr { .. } => LitKind::CStr, - LiteralKind::RawStr { n_hashes } => LitKind::StrRaw(n_hashes.unwrap_or_default()), - LiteralKind::RawByteStr { n_hashes } => { - LitKind::ByteStrRaw(n_hashes.unwrap_or_default()) - } - LiteralKind::RawCStr { n_hashes } => LitKind::CStrRaw(n_hashes.unwrap_or_default()), + + let (kind, start_offset, end_offset) = match kind { + LiteralKind::Int { .. } => (LitKind::Integer, 0, 0), + LiteralKind::Float { .. } => (LitKind::Float, 0, 0), + LiteralKind::Char { terminated } => (LitKind::Char, 1, terminated as usize), + LiteralKind::Byte { terminated } => (LitKind::Byte, 2, terminated as usize), + LiteralKind::Str { terminated } => (LitKind::Str, 1, terminated as usize), + LiteralKind::ByteStr { terminated } => (LitKind::ByteStr, 2, terminated as usize), + LiteralKind::CStr { terminated } => (LitKind::CStr, 2, terminated as usize), + LiteralKind::RawStr { n_hashes } => ( + LitKind::StrRaw(n_hashes.unwrap_or_default()), + 2 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawByteStr { n_hashes } => ( + LitKind::ByteStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), + LiteralKind::RawCStr { n_hashes } => ( + LitKind::CStrRaw(n_hashes.unwrap_or_default()), + 3 + n_hashes.unwrap_or_default() as usize, + 1 + n_hashes.unwrap_or_default() as usize, + ), }; let (lit, suffix) = s.split_at(suffix_start as usize); + let lit = &lit[start_offset..lit.len() - end_offset]; let suffix = match suffix { "" | "_" => None, suffix => Some(Symbol::intern(self.interner, suffix)), @@ -233,12 +245,9 @@ impl server::TokenStream for TokenIdServer { } tt::TokenTree::Leaf(tt::Leaf::Literal(lit)) => { bridge::TokenTree::Literal(bridge::Literal { - // FIXME: handle literal kinds - kind: bridge::LitKind::Integer, // dummy - symbol: Symbol::intern(self.interner, &lit.text), - // FIXME: handle suffixes - suffix: None, span: lit.span, + ..server::FreeFunctions::literal_from_str(self, &lit.text) + .unwrap_or_else(|_| panic!("`{}`", lit.text)) }) } tt::TokenTree::Leaf(tt::Leaf::Punct(punct)) => { diff --git a/crates/proc-macro-srv/src/server/token_stream.rs b/crates/proc-macro-srv/src/server/token_stream.rs index 5edaa720fc7ec..408db60e872be 100644 --- a/crates/proc-macro-srv/src/server/token_stream.rs +++ b/crates/proc-macro-srv/src/server/token_stream.rs @@ -115,8 +115,6 @@ pub(super) mod token_stream { } } - type LexError = String; - /// Attempts to break the string into tokens and parse those tokens into a token stream. /// May fail for a number of reasons, for example, if the string contains unbalanced delimiters /// or characters not existing in the language. @@ -124,13 +122,10 @@ pub(super) mod token_stream { /// /// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to /// change these errors into `LexError`s later. - #[rustfmt::skip] - impl /*FromStr for*/ TokenStream { - // type Err = LexError; - - pub(crate) fn from_str(src: &str, call_site: S) -> Result, LexError> { + impl TokenStream { + pub(crate) fn from_str(src: &str, call_site: S) -> Result, String> { let subtree = - mbe::parse_to_token_tree_static_span(call_site, src).ok_or("Failed to parse from mbe")?; + mbe::parse_to_token_tree_static_span(call_site, src).ok_or("lexing error")?; Ok(TokenStream::with_subtree(subtree)) } diff --git a/crates/proc-macro-srv/src/tests/mod.rs b/crates/proc-macro-srv/src/tests/mod.rs index e5bfe5ee92cd8..54a20357d2629 100644 --- a/crates/proc-macro-srv/src/tests/mod.rs +++ b/crates/proc-macro-srv/src/tests/mod.rs @@ -169,7 +169,7 @@ fn test_fn_like_mk_idents() { fn test_fn_like_macro_clone_literals() { assert_expand( "fn_like_clone_tokens", - r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##"###, + r###"1u16, 2_u32, -4i64, 3.14f32, "hello bridge", "suffixed"suffix, r##"raw"##, 'a', b'b', c"null""###, expect![[r###" SUBTREE $$ 1 1 LITERAL 1u16 1 @@ -181,11 +181,17 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] 1 LITERAL 3.14f32 1 PUNCH , [alone] 1 - LITERAL ""hello bridge"" 1 + LITERAL "hello bridge" 1 PUNCH , [alone] 1 - LITERAL ""suffixed""suffix 1 + LITERAL "suffixed"suffix 1 PUNCH , [alone] 1 - LITERAL r##"r##"raw"##"## 1"###]], + LITERAL r##"raw"## 1 + PUNCH , [alone] 1 + LITERAL 'a' 1 + PUNCH , [alone] 1 + LITERAL b'b' 1 + PUNCH , [alone] 1 + LITERAL c"null" 1"###]], expect![[r###" SUBTREE $$ SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } SpanData { range: 0..100, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 1u16 SpanData { range: 0..4, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } @@ -197,11 +203,17 @@ fn test_fn_like_macro_clone_literals() { PUNCH , [alone] SpanData { range: 18..19, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } LITERAL 3.14f32 SpanData { range: 20..27, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 27..28, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL ""hello bridge"" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "hello bridge" SpanData { range: 29..43, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 43..44, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL ""suffixed""suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL "suffixed"suffix SpanData { range: 45..61, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } PUNCH , [alone] SpanData { range: 61..62, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } - LITERAL r##"r##"raw"##"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], + LITERAL r##"raw"## SpanData { range: 63..73, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 73..74, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL 'a' SpanData { range: 75..78, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 78..79, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL b'b' SpanData { range: 80..84, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + PUNCH , [alone] SpanData { range: 84..85, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) } + LITERAL c"null" SpanData { range: 86..93, anchor: SpanAnchor(FileId(42), 2), ctx: SyntaxContextId(0) }"###]], ); } From efa6948b577f702eda9c85a1adf26ddc327f0261 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Feb 2024 22:32:39 +0100 Subject: [PATCH 093/134] Fix rust-analyzer not enabling rust-analyzer spans on the proc-macro server --- crates/proc-macro-api/src/process.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/crates/proc-macro-api/src/process.rs b/crates/proc-macro-api/src/process.rs index 12eafcea442d3..72f95643c8b5e 100644 --- a/crates/proc-macro-api/src/process.rs +++ b/crates/proc-macro-api/src/process.rs @@ -45,7 +45,7 @@ impl ProcMacroProcessSrv { }) }; let mut srv = create_srv(true)?; - tracing::info!("sending version check"); + tracing::info!("sending proc-macro server version check"); match srv.version_check() { Ok(v) if v > CURRENT_API_VERSION => Err(io::Error::new( io::ErrorKind::Other, @@ -55,14 +55,15 @@ impl ProcMacroProcessSrv { ), )), Ok(v) => { - tracing::info!("got version {v}"); + tracing::info!("Proc-macro server version: {v}"); srv = create_srv(false)?; srv.version = v; - if srv.version > RUST_ANALYZER_SPAN_SUPPORT { + if srv.version >= RUST_ANALYZER_SPAN_SUPPORT { if let Ok(mode) = srv.enable_rust_analyzer_spans() { srv.mode = mode; } } + tracing::info!("Proc-macro server span mode: {:?}", srv.mode); Ok(srv) } Err(e) => { From 9b7284dca7690d3ffdecf3c9eb6e88afe07ec01a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Feb 2024 10:10:19 +0100 Subject: [PATCH 094/134] fix: Fix deadlock in recreate_crate_graph <-> file_line_index --- crates/rust-analyzer/src/reload.rs | 44 +++++++++++++++--------------- 1 file changed, 22 insertions(+), 22 deletions(-) diff --git a/crates/rust-analyzer/src/reload.rs b/crates/rust-analyzer/src/reload.rs index 00494ca5ba090..f6bc032c01986 100644 --- a/crates/rust-analyzer/src/reload.rs +++ b/crates/rust-analyzer/src/reload.rs @@ -522,13 +522,14 @@ impl GlobalState { } fn recreate_crate_graph(&mut self, cause: String) { - { + // crate graph construction relies on these paths, record them so when one of them gets + // deleted or created we trigger a reconstruction of the crate graph + let mut crate_graph_file_dependencies = FxHashSet::default(); + + let (crate_graph, proc_macro_paths, layouts, toolchains) = { // Create crate graph from all the workspaces let vfs = &mut self.vfs.write().0; let loader = &mut self.loader; - // crate graph construction relies on these paths, record them so when one of them gets - // deleted or created we trigger a reconstruction of the crate graph - let mut crate_graph_file_dependencies = FxHashSet::default(); let load = |path: &AbsPath| { let _p = tracing::span!(tracing::Level::DEBUG, "switch_workspaces::load").entered(); @@ -545,25 +546,24 @@ impl GlobalState { } }; - let (crate_graph, proc_macro_paths, layouts, toolchains) = - ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load); - - let mut change = Change::new(); - if self.config.expand_proc_macros() { - change.set_proc_macros( - crate_graph - .iter() - .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) - .collect(), - ); - self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); - } - change.set_crate_graph(crate_graph); - change.set_target_data_layouts(layouts); - change.set_toolchains(toolchains); - self.analysis_host.apply_change(change); - self.crate_graph_file_dependencies = crate_graph_file_dependencies; + ws_to_crate_graph(&self.workspaces, self.config.extra_env(), load) + }; + let mut change = Change::new(); + if self.config.expand_proc_macros() { + change.set_proc_macros( + crate_graph + .iter() + .map(|id| (id, Err("Proc-macros have not been built yet".to_owned()))) + .collect(), + ); + self.fetch_proc_macros_queue.request_op(cause, proc_macro_paths); } + change.set_crate_graph(crate_graph); + change.set_target_data_layouts(layouts); + change.set_toolchains(toolchains); + self.analysis_host.apply_change(change); + self.crate_graph_file_dependencies = crate_graph_file_dependencies; + self.process_changes(); self.reload_flycheck(); } From 6dca7948f771cb2e0d3e1461acb03b2268075f02 Mon Sep 17 00:00:00 2001 From: cui fliter Date: Fri, 23 Feb 2024 18:45:03 +0800 Subject: [PATCH 095/134] remove repetitive words Signed-off-by: cui fliter --- crates/hir-ty/src/db.rs | 2 +- crates/salsa/src/runtime/dependency_graph.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/hir-ty/src/db.rs b/crates/hir-ty/src/db.rs index 42313ff52b1fd..f9e8cff55393f 100644 --- a/crates/hir-ty/src/db.rs +++ b/crates/hir-ty/src/db.rs @@ -90,7 +90,7 @@ pub trait HirDatabase: DefDatabase + Upcast { #[salsa::cycle(crate::lower::ty_recover)] fn ty(&self, def: TyDefId) -> Binders; - /// Returns the type of the value of the given constant, or `None` if the the `ValueTyDefId` is + /// Returns the type of the value of the given constant, or `None` if the `ValueTyDefId` is /// a `StructId` or `EnumVariantId` with a record constructor. #[salsa::invoke(crate::lower::value_ty_query)] fn value_ty(&self, def: ValueTyDefId) -> Option>; diff --git a/crates/salsa/src/runtime/dependency_graph.rs b/crates/salsa/src/runtime/dependency_graph.rs index e41eb280deee5..dd223eeeba9f8 100644 --- a/crates/salsa/src/runtime/dependency_graph.rs +++ b/crates/salsa/src/runtime/dependency_graph.rs @@ -12,7 +12,7 @@ type QueryStack = Vec; #[derive(Debug, Default)] pub(super) struct DependencyGraph { - /// A `(K -> V)` pair in this map indicates that the the runtime + /// A `(K -> V)` pair in this map indicates that the runtime /// `K` is blocked on some query executing in the runtime `V`. /// This encodes a graph that must be acyclic (or else deadlock /// will result). From 7159aed51e7ef4d8459a10e449bf8e9cc09c9b98 Mon Sep 17 00:00:00 2001 From: clubby789 Date: Mon, 5 Feb 2024 16:15:47 +0000 Subject: [PATCH 096/134] Use `br` instead of conditional when branching on constant --- compiler/rustc_codegen_ssa/src/mir/block.rs | 18 ++++-- tests/codegen/constant-branch.rs | 67 +++++++++++++++++++++ 2 files changed, 80 insertions(+), 5 deletions(-) create mode 100644 tests/codegen/constant-branch.rs diff --git a/compiler/rustc_codegen_ssa/src/mir/block.rs b/compiler/rustc_codegen_ssa/src/mir/block.rs index 00007110938e0..9c7aadb81f828 100644 --- a/compiler/rustc_codegen_ssa/src/mir/block.rs +++ b/compiler/rustc_codegen_ssa/src/mir/block.rs @@ -319,7 +319,15 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { targets: &SwitchTargets, ) { let discr = self.codegen_operand(bx, discr); + let discr_value = discr.immediate(); let switch_ty = discr.layout.ty; + // If our discriminant is a constant we can branch directly + if let Some(const_discr) = bx.const_to_opt_u128(discr_value, false) { + let target = targets.target_for_value(const_discr); + bx.br(helper.llbb_with_cleanup(self, target)); + return; + }; + let mut target_iter = targets.iter(); if target_iter.len() == 1 { // If there are two targets (one conditional, one fallback), emit `br` instead of @@ -330,14 +338,14 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { if switch_ty == bx.tcx().types.bool { // Don't generate trivial icmps when switching on bool. match test_value { - 0 => bx.cond_br(discr.immediate(), llfalse, lltrue), - 1 => bx.cond_br(discr.immediate(), lltrue, llfalse), + 0 => bx.cond_br(discr_value, llfalse, lltrue), + 1 => bx.cond_br(discr_value, lltrue, llfalse), _ => bug!(), } } else { let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty)); let llval = bx.const_uint_big(switch_llty, test_value); - let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval); + let cmp = bx.icmp(IntPredicate::IntEQ, discr_value, llval); bx.cond_br(cmp, lltrue, llfalse); } } else if self.cx.sess().opts.optimize == OptLevel::No @@ -362,11 +370,11 @@ impl<'a, 'tcx, Bx: BuilderMethods<'a, 'tcx>> FunctionCx<'a, 'tcx, Bx> { let ll2 = helper.llbb_with_cleanup(self, target2); let switch_llty = bx.immediate_backend_type(bx.layout_of(switch_ty)); let llval = bx.const_uint_big(switch_llty, test_value1); - let cmp = bx.icmp(IntPredicate::IntEQ, discr.immediate(), llval); + let cmp = bx.icmp(IntPredicate::IntEQ, discr_value, llval); bx.cond_br(cmp, ll1, ll2); } else { bx.switch( - discr.immediate(), + discr_value, helper.llbb_with_cleanup(self, targets.otherwise()), target_iter.map(|(value, target)| (value, helper.llbb_with_cleanup(self, target))), ); diff --git a/tests/codegen/constant-branch.rs b/tests/codegen/constant-branch.rs new file mode 100644 index 0000000000000..3328b1eb4a88b --- /dev/null +++ b/tests/codegen/constant-branch.rs @@ -0,0 +1,67 @@ +//@ compile-flags: -Zmir-opt-level=0 -C no-prepopulate-passes -Copt-level=0 +// make sure that branching on a constant does not emit a conditional +// branch or a switch + +#![crate_type = "lib"] + +// CHECK-LABEL: @if_bool +#[no_mangle] +pub fn if_bool() { + // CHECK: br label %{{.+}} + _ = if true { + 0 + } else { + 1 + }; + + // CHECK: br label %{{.+}} + _ = if false { + 0 + } else { + 1 + }; +} + +// CHECK-LABEL: @if_constant_int_eq +#[no_mangle] +pub fn if_constant_int_eq() { + let val = 0; + // CHECK: br label %{{.+}} + _ = if val == 0 { + 0 + } else { + 1 + }; + + // CHECK: br label %{{.+}} + _ = if val == 1 { + 0 + } else { + 1 + }; +} + +// CHECK-LABEL: @if_constant_match +#[no_mangle] +pub fn if_constant_match() { + // CHECK: br label %{{.+}} + _ = match 1 { + 1 => 2, + 2 => 3, + _ => 4 + }; + + // CHECK: br label %{{.+}} + _ = match 1 { + 2 => 3, + _ => 4 + }; + + // CHECK: br label %[[MINUS1:.+]] + _ = match -1 { + // CHECK: [[MINUS1]]: + // CHECK: store i32 1 + -1 => 1, + _ => 0, + } +} From cc4d0e1bd1600cb892a25a57bdeeb70ad258c153 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Thu, 22 Feb 2024 21:13:52 +0100 Subject: [PATCH 097/134] Optimize salsa some more --- Cargo.lock | 1 + crates/salsa/Cargo.toml | 1 + crates/salsa/salsa-macros/src/query_group.rs | 4 +- crates/salsa/src/derived.rs | 25 ++-- crates/salsa/src/derived/slot.rs | 124 +++++++++++-------- crates/salsa/src/durability.rs | 4 +- crates/salsa/src/input.rs | 53 ++++---- crates/salsa/src/interned.rs | 16 +-- crates/salsa/src/lib.rs | 2 +- crates/salsa/src/plumbing.rs | 4 +- crates/salsa/src/revision.rs | 2 +- crates/salsa/src/runtime.rs | 52 ++++---- crates/salsa/src/runtime/local_state.rs | 7 +- 13 files changed, 147 insertions(+), 148 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7b29d7bb798df..3c87291dbadb4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1709,6 +1709,7 @@ dependencies = [ "dissimilar", "expect-test", "indexmap", + "itertools", "linked-hash-map", "lock_api", "oorandom", diff --git a/crates/salsa/Cargo.toml b/crates/salsa/Cargo.toml index 4ccbc3de846d5..9eec21f6a15ff 100644 --- a/crates/salsa/Cargo.toml +++ b/crates/salsa/Cargo.toml @@ -21,6 +21,7 @@ rustc-hash = "1.0" smallvec = "1.0.0" oorandom = "11" triomphe = "0.1.11" +itertools.workspace = true salsa-macros = { version = "0.0.0", path = "salsa-macros" } diff --git a/crates/salsa/salsa-macros/src/query_group.rs b/crates/salsa/salsa-macros/src/query_group.rs index 5d1678ef12006..a868d920b6669 100644 --- a/crates/salsa/salsa-macros/src/query_group.rs +++ b/crates/salsa/salsa-macros/src/query_group.rs @@ -526,7 +526,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream fmt_ops.extend(quote! { #query_index => { salsa::plumbing::QueryStorageOps::fmt_index( - &*self.#fn_name, db, input, fmt, + &*self.#fn_name, db, input.key_index(), fmt, ) } }); @@ -537,7 +537,7 @@ pub(crate) fn query_group(args: TokenStream, input: TokenStream) -> TokenStream maybe_changed_ops.extend(quote! { #query_index => { salsa::plumbing::QueryStorageOps::maybe_changed_after( - &*self.#fn_name, db, input, revision + &*self.#fn_name, db, input.key_index(), revision ) } }); diff --git a/crates/salsa/src/derived.rs b/crates/salsa/src/derived.rs index d631671005816..bf532bdccf64d 100644 --- a/crates/salsa/src/derived.rs +++ b/crates/salsa/src/derived.rs @@ -102,13 +102,13 @@ where let mut write = self.slot_map.write(); let entry = write.entry(key.clone()); - let key_index = u32::try_from(entry.index()).unwrap(); + let key_index = entry.index() as u32; let database_key_index = DatabaseKeyIndex { group_index: self.group_index, query_index: Q::QUERY_INDEX, key_index, }; - entry.or_insert_with(|| Arc::new(Slot::new(key.clone(), database_key_index))).clone() + entry.or_insert_with(|| Arc::new(Slot::new(database_key_index))).clone() } } @@ -131,34 +131,33 @@ where fn fmt_index( &self, _db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - assert_eq!(index.group_index, self.group_index); - assert_eq!(index.query_index, Q::QUERY_INDEX); let slot_map = self.slot_map.read(); - let key = slot_map.get_index(index.key_index as usize).unwrap().0; + let key = slot_map.get_index(index as usize).unwrap().0; write!(fmt, "{}({:?})", Q::QUERY_NAME, key) } fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + index: u32, revision: Revision, ) -> bool { - assert_eq!(input.group_index, self.group_index); - assert_eq!(input.query_index, Q::QUERY_INDEX); debug_assert!(revision < db.salsa_runtime().current_revision()); - let slot = self.slot_map.read().get_index(input.key_index as usize).unwrap().1.clone(); - slot.maybe_changed_after(db, revision) + let read = &self.slot_map.read(); + let Some((key, slot)) = read.get_index(index as usize) else { + return false; + }; + slot.maybe_changed_after(db, revision, key) } fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value { db.unwind_if_cancelled(); let slot = self.slot(key); - let StampedValue { value, durability, changed_at } = slot.read(db); + let StampedValue { value, durability, changed_at } = slot.read(db, key); if let Some(evicted) = self.lru_list.record_use(&slot) { evicted.evict(); @@ -182,7 +181,7 @@ where C: std::iter::FromIterator>, { let slot_map = self.slot_map.read(); - slot_map.values().filter_map(|slot| slot.as_table_entry()).collect() + slot_map.iter().filter_map(|(key, slot)| slot.as_table_entry(key)).collect() } } diff --git a/crates/salsa/src/derived/slot.rs b/crates/salsa/src/derived/slot.rs index 4fad791a26aec..75204c8ff6047 100644 --- a/crates/salsa/src/derived/slot.rs +++ b/crates/salsa/src/derived/slot.rs @@ -26,8 +26,8 @@ where Q: QueryFunction, MP: MemoizationPolicy, { - key: Q::Key, - database_key_index: DatabaseKeyIndex, + key_index: u32, + group_index: u16, state: RwLock>, policy: PhantomData, lru_index: LruIndex, @@ -110,10 +110,10 @@ where Q: QueryFunction, MP: MemoizationPolicy, { - pub(super) fn new(key: Q::Key, database_key_index: DatabaseKeyIndex) -> Self { + pub(super) fn new(database_key_index: DatabaseKeyIndex) -> Self { Self { - key, - database_key_index, + key_index: database_key_index.key_index, + group_index: database_key_index.group_index, state: RwLock::new(QueryState::NotComputed), lru_index: LruIndex::default(), policy: PhantomData, @@ -121,10 +121,18 @@ where } pub(super) fn database_key_index(&self) -> DatabaseKeyIndex { - self.database_key_index + DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index: self.key_index, + } } - pub(super) fn read(&self, db: &>::DynDb) -> StampedValue { + pub(super) fn read( + &self, + db: &>::DynDb, + key: &Q::Key, + ) -> StampedValue { let runtime = db.salsa_runtime(); // NB: We don't need to worry about people modifying the @@ -147,7 +155,7 @@ where } } - self.read_upgrade(db, revision_now) + self.read_upgrade(db, key, revision_now) } /// Second phase of a read operation: acquires an upgradable-read @@ -157,6 +165,7 @@ where fn read_upgrade( &self, db: &>::DynDb, + key: &Q::Key, revision_now: Revision, ) -> StampedValue { let runtime = db.salsa_runtime(); @@ -186,8 +195,8 @@ where } }; - let panic_guard = PanicGuard::new(self.database_key_index, self, runtime); - let active_query = runtime.push_query(self.database_key_index); + let panic_guard = PanicGuard::new(self, runtime); + let active_query = runtime.push_query(self.database_key_index()); // If we have an old-value, it *may* now be stale, since there // has been a new revision since the last time we checked. So, @@ -200,7 +209,7 @@ where db.salsa_event(Event { runtime_id: runtime.id(), kind: EventKind::DidValidateMemoizedValue { - database_key: self.database_key_index, + database_key: self.database_key_index(), }, }); @@ -210,7 +219,7 @@ where } } - self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo) + self.execute(db, runtime, revision_now, active_query, panic_guard, old_memo, key) } fn execute( @@ -221,22 +230,23 @@ where active_query: ActiveQueryGuard<'_>, panic_guard: PanicGuard<'_, Q, MP>, old_memo: Option>, + key: &Q::Key, ) -> StampedValue { - tracing::info!("{:?}: executing query", self.database_key_index.debug(db)); + tracing::info!("{:?}: executing query", self.database_key_index().debug(db)); db.salsa_event(Event { runtime_id: db.salsa_runtime().id(), - kind: EventKind::WillExecute { database_key: self.database_key_index }, + kind: EventKind::WillExecute { database_key: self.database_key_index() }, }); // Query was not previously executed, or value is potentially // stale, or value is absent. Let's execute! - let value = match Cycle::catch(|| Q::execute(db, self.key.clone())) { + let value = match Cycle::catch(|| Q::execute(db, key.clone())) { Ok(v) => v, Err(cycle) => { tracing::debug!( "{:?}: caught cycle {:?}, have strategy {:?}", - self.database_key_index.debug(db), + self.database_key_index().debug(db), cycle, Q::CYCLE_STRATEGY, ); @@ -248,12 +258,12 @@ where crate::plumbing::CycleRecoveryStrategy::Fallback => { if let Some(c) = active_query.take_cycle() { assert!(c.is(&cycle)); - Q::cycle_fallback(db, &cycle, &self.key) + Q::cycle_fallback(db, &cycle, key) } else { // we are not a participant in this cycle debug_assert!(!cycle .participant_keys() - .any(|k| k == self.database_key_index)); + .any(|k| k == self.database_key_index())); cycle.throw() } } @@ -303,7 +313,7 @@ where }; let memo_value = - if self.should_memoize_value(&self.key) { Some(new_value.value.clone()) } else { None }; + if self.should_memoize_value(key) { Some(new_value.value.clone()) } else { None }; debug!("read_upgrade({:?}): result.revisions = {:#?}", self, revisions,); @@ -395,13 +405,11 @@ where } } - pub(super) fn as_table_entry(&self) -> Option> { + pub(super) fn as_table_entry(&self, key: &Q::Key) -> Option> { match &*self.state.read() { QueryState::NotComputed => None, - QueryState::InProgress { .. } => Some(TableEntry::new(self.key.clone(), None)), - QueryState::Memoized(memo) => { - Some(TableEntry::new(self.key.clone(), memo.value.clone())) - } + QueryState::InProgress { .. } => Some(TableEntry::new(key.clone(), None)), + QueryState::Memoized(memo) => Some(TableEntry::new(key.clone(), memo.value.clone())), } } @@ -436,6 +444,7 @@ where &self, db: &>::DynDb, revision: Revision, + key: &Q::Key, ) -> bool { let runtime = db.salsa_runtime(); let revision_now = runtime.current_revision(); @@ -458,7 +467,7 @@ where MaybeChangedSinceProbeState::ChangedAt(changed_at) => return changed_at > revision, MaybeChangedSinceProbeState::Stale(state) => { drop(state); - return self.maybe_changed_after_upgrade(db, revision); + return self.maybe_changed_after_upgrade(db, revision, key); } } } @@ -495,6 +504,7 @@ where &self, db: &>::DynDb, revision: Revision, + key: &Q::Key, ) -> bool { let runtime = db.salsa_runtime(); let revision_now = runtime.current_revision(); @@ -513,7 +523,9 @@ where // If another thread was active, then the cache line is going to be // either verified or cleared out. Just recurse to figure out which. // Note that we don't need an upgradable read. - MaybeChangedSinceProbeState::Retry => return self.maybe_changed_after(db, revision), + MaybeChangedSinceProbeState::Retry => { + return self.maybe_changed_after(db, revision, key) + } MaybeChangedSinceProbeState::Stale(state) => { type RwLockUpgradableReadGuard<'a, T> = @@ -527,8 +539,8 @@ where } }; - let panic_guard = PanicGuard::new(self.database_key_index, self, runtime); - let active_query = runtime.push_query(self.database_key_index); + let panic_guard = PanicGuard::new(self, runtime); + let active_query = runtime.push_query(self.database_key_index()); if old_memo.verify_revisions(db.ops_database(), revision_now, &active_query) { let maybe_changed = old_memo.revisions.changed_at > revision; @@ -538,8 +550,15 @@ where // We found that this memoized value may have changed // but we have an old value. We can re-run the code and // actually *check* if it has changed. - let StampedValue { changed_at, .. } = - self.execute(db, runtime, revision_now, active_query, panic_guard, Some(old_memo)); + let StampedValue { changed_at, .. } = self.execute( + db, + runtime, + revision_now, + active_query, + panic_guard, + Some(old_memo), + key, + ); changed_at > revision } else { // We found that inputs to this memoized value may have chanced @@ -560,7 +579,7 @@ where ) { runtime.block_on_or_unwind( db.ops_database(), - self.database_key_index, + self.database_key_index(), other_id, mutex_guard, ) @@ -585,7 +604,6 @@ where Q: QueryFunction, MP: MemoizationPolicy, { - database_key_index: DatabaseKeyIndex, slot: &'me Slot, runtime: &'me Runtime, } @@ -595,12 +613,8 @@ where Q: QueryFunction, MP: MemoizationPolicy, { - fn new( - database_key_index: DatabaseKeyIndex, - slot: &'me Slot, - runtime: &'me Runtime, - ) -> Self { - Self { database_key_index, slot, runtime } + fn new(slot: &'me Slot, runtime: &'me Runtime) -> Self { + Self { slot, runtime } } /// Indicates that we have concluded normally (without panicking). @@ -616,17 +630,18 @@ where /// inserted; if others were blocked, waiting for us to finish, /// then notify them. fn overwrite_placeholder(&mut self, wait_result: WaitResult, opt_memo: Option>) { - let mut write = self.slot.state.write(); - - let old_value = match opt_memo { - // Replace the `InProgress` marker that we installed with the new - // memo, thus releasing our unique access to this key. - Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)), - - // We had installed an `InProgress` marker, but we panicked before - // it could be removed. At this point, we therefore "own" unique - // access to our slot, so we can just remove the key. - None => std::mem::replace(&mut *write, QueryState::NotComputed), + let old_value = { + let mut write = self.slot.state.write(); + match opt_memo { + // Replace the `InProgress` marker that we installed with the new + // memo, thus releasing our unique access to this key. + Some(memo) => std::mem::replace(&mut *write, QueryState::Memoized(memo)), + + // We had installed an `InProgress` marker, but we panicked before + // it could be removed. At this point, we therefore "own" unique + // access to our slot, so we can just remove the key. + None => std::mem::replace(&mut *write, QueryState::NotComputed), + } }; match old_value { @@ -638,7 +653,8 @@ where // acquire a mutex; the mutex will guarantee that all writes // we are interested in are visible. if anyone_waiting.load(Ordering::Relaxed) { - self.runtime.unblock_queries_blocked_on(self.database_key_index, wait_result); + self.runtime + .unblock_queries_blocked_on(self.slot.database_key_index(), wait_result); } } _ => panic!( @@ -692,10 +708,10 @@ where return None; } if self.verify_revisions(db, revision_now, active_query) { - Some(StampedValue { + self.value.clone().map(|value| StampedValue { durability: self.revisions.durability, changed_at: self.revisions.changed_at, - value: self.value.as_ref().unwrap().clone(), + value, }) } else { None @@ -748,7 +764,7 @@ where // input changed *again*. QueryInputs::Tracked { inputs } => { let changed_input = - inputs.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); + inputs.slice.iter().find(|&&input| db.maybe_changed_after(input, verified_at)); if let Some(input) = changed_input { debug!("validate_memoized_value: `{:?}` may have changed", input); @@ -788,7 +804,7 @@ where MP: MemoizationPolicy, { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(fmt, "{:?}({:?})", Q::default(), self.key) + write!(fmt, "{:?}", Q::default()) } } diff --git a/crates/salsa/src/durability.rs b/crates/salsa/src/durability.rs index 0c82f6345ab65..44abae3170f66 100644 --- a/crates/salsa/src/durability.rs +++ b/crates/salsa/src/durability.rs @@ -42,9 +42,9 @@ impl Durability { pub(crate) const MAX: Durability = Self::HIGH; /// Number of durability levels. - pub(crate) const LEN: usize = 3; + pub(crate) const LEN: usize = Self::MAX.index() + 1; - pub(crate) fn index(self) -> usize { + pub(crate) const fn index(self) -> usize { self.0 as usize } } diff --git a/crates/salsa/src/input.rs b/crates/salsa/src/input.rs index c2539570e0f9f..922ec5a775218 100644 --- a/crates/salsa/src/input.rs +++ b/crates/salsa/src/input.rs @@ -29,7 +29,7 @@ where } struct Slot { - database_key_index: DatabaseKeyIndex, + key_index: u32, stamped_value: RwLock>, } @@ -54,27 +54,25 @@ where fn fmt_index( &self, _db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - assert_eq!(index.group_index, self.group_index); - assert_eq!(index.query_index, Q::QUERY_INDEX); let slot_map = self.slots.read(); - let key = slot_map.get_index(index.key_index as usize).unwrap().0; + let key = slot_map.get_index(index as usize).unwrap().0; write!(fmt, "{}({:?})", Q::QUERY_NAME, key) } fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + index: u32, revision: Revision, ) -> bool { - assert_eq!(input.group_index, self.group_index); - assert_eq!(input.query_index, Q::QUERY_INDEX); debug_assert!(revision < db.salsa_runtime().current_revision()); let slots = &self.slots.read(); - let slot = slots.get_index(input.key_index as usize).unwrap().1; + let Some((_, slot)) = slots.get_index(index as usize) else { + return true; + }; debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); @@ -96,7 +94,11 @@ where let StampedValue { value, durability, changed_at } = slot.stamped_value.read().clone(); db.salsa_runtime().report_query_read_and_unwind_if_cycle_resulted( - slot.database_key_index, + DatabaseKeyIndex { + group_index: self.group_index, + query_index: Q::QUERY_INDEX, + key_index: slot.key_index, + }, durability, changed_at, ); @@ -174,16 +176,8 @@ where } Entry::Vacant(entry) => { - let key_index = u32::try_from(entry.index()).unwrap(); - let database_key_index = DatabaseKeyIndex { - group_index: self.group_index, - query_index: Q::QUERY_INDEX, - key_index, - }; - entry.insert(Slot { - database_key_index, - stamped_value: RwLock::new(stamped_value), - }); + let key_index = entry.index() as u32; + entry.insert(Slot { key_index, stamped_value: RwLock::new(stamped_value) }); None } } @@ -196,7 +190,6 @@ pub struct UnitInputStorage where Q: Query, { - group_index: u16, slot: UnitSlot, } @@ -222,36 +215,32 @@ where fn new(group_index: u16) -> Self { let database_key_index = DatabaseKeyIndex { group_index, query_index: Q::QUERY_INDEX, key_index: 0 }; - UnitInputStorage { - group_index, - slot: UnitSlot { database_key_index, stamped_value: RwLock::new(None) }, - } + UnitInputStorage { slot: UnitSlot { database_key_index, stamped_value: RwLock::new(None) } } } fn fmt_index( &self, _db: &>::DynDb, - index: DatabaseKeyIndex, + _index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - assert_eq!(index.group_index, self.group_index); - assert_eq!(index.query_index, Q::QUERY_INDEX); write!(fmt, "{}", Q::QUERY_NAME) } fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + _index: u32, revision: Revision, ) -> bool { - assert_eq!(input.group_index, self.group_index); - assert_eq!(input.query_index, Q::QUERY_INDEX); debug_assert!(revision < db.salsa_runtime().current_revision()); debug!("maybe_changed_after(slot={:?}, revision={:?})", Q::default(), revision,); - let changed_at = self.slot.stamped_value.read().as_ref().unwrap().changed_at; + let Some(value) = &*self.slot.stamped_value.read() else { + return true; + }; + let changed_at = value.changed_at; debug!("maybe_changed_after: changed_at = {:?}", changed_at); diff --git a/crates/salsa/src/interned.rs b/crates/salsa/src/interned.rs index 822219f51859c..c065e7e2bde57 100644 --- a/crates/salsa/src/interned.rs +++ b/crates/salsa/src/interned.rs @@ -265,12 +265,10 @@ where fn fmt_index( &self, _db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { - assert_eq!(index.group_index, self.group_index); - assert_eq!(index.query_index, Q::QUERY_INDEX); - let intern_id = InternId::from(index.key_index); + let intern_id = InternId::from(index); let slot = self.lookup_value(intern_id); write!(fmt, "{}({:?})", Q::QUERY_NAME, slot.value) } @@ -278,13 +276,11 @@ where fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + input: u32, revision: Revision, ) -> bool { - assert_eq!(input.group_index, self.group_index); - assert_eq!(input.query_index, Q::QUERY_INDEX); debug_assert!(revision < db.salsa_runtime().current_revision()); - let intern_id = InternId::from(input.key_index); + let intern_id = InternId::from(input); let slot = self.lookup_value(intern_id); slot.maybe_changed_after(revision) } @@ -388,7 +384,7 @@ where fn fmt_index( &self, db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result { let group_storage = @@ -400,7 +396,7 @@ where fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + input: u32, revision: Revision, ) -> bool { let group_storage = diff --git a/crates/salsa/src/lib.rs b/crates/salsa/src/lib.rs index 98b3a48e37ca6..fe80759887303 100644 --- a/crates/salsa/src/lib.rs +++ b/crates/salsa/src/lib.rs @@ -54,7 +54,7 @@ pub trait Database: plumbing::DatabaseOps { /// runtime. It permits the database to be customized and to /// inject logging or other custom behavior. fn salsa_event(&self, event_fn: Event) { - #![allow(unused_variables)] + _ = event_fn; } /// Starts unwinding the stack if the current revision is cancelled. diff --git a/crates/salsa/src/plumbing.rs b/crates/salsa/src/plumbing.rs index b8df87fd5e5ca..1a8ff33b2efcc 100644 --- a/crates/salsa/src/plumbing.rs +++ b/crates/salsa/src/plumbing.rs @@ -173,7 +173,7 @@ where fn fmt_index( &self, db: &>::DynDb, - index: DatabaseKeyIndex, + index: u32, fmt: &mut std::fmt::Formatter<'_>, ) -> std::fmt::Result; @@ -186,7 +186,7 @@ where fn maybe_changed_after( &self, db: &>::DynDb, - input: DatabaseKeyIndex, + index: u32, revision: Revision, ) -> bool; // ANCHOR_END:maybe_changed_after diff --git a/crates/salsa/src/revision.rs b/crates/salsa/src/revision.rs index d97aaf9debabd..559b03386087f 100644 --- a/crates/salsa/src/revision.rs +++ b/crates/salsa/src/revision.rs @@ -46,7 +46,7 @@ pub(crate) struct AtomicRevision { } impl AtomicRevision { - pub(crate) fn start() -> Self { + pub(crate) const fn start() -> Self { Self { data: AtomicU32::new(START) } } diff --git a/crates/salsa/src/runtime.rs b/crates/salsa/src/runtime.rs index 40b8856991f9c..a7d5a2457823f 100644 --- a/crates/salsa/src/runtime.rs +++ b/crates/salsa/src/runtime.rs @@ -4,13 +4,14 @@ use crate::hash::FxIndexSet; use crate::plumbing::CycleRecoveryStrategy; use crate::revision::{AtomicRevision, Revision}; use crate::{Cancelled, Cycle, Database, DatabaseKeyIndex, Event, EventKind}; +use itertools::Itertools; use parking_lot::lock_api::{RawRwLock, RawRwLockRecursive}; use parking_lot::{Mutex, RwLock}; use std::hash::Hash; use std::panic::panic_any; -use std::sync::atomic::{AtomicUsize, Ordering}; +use std::sync::atomic::{AtomicU32, Ordering}; use tracing::debug; -use triomphe::Arc; +use triomphe::{Arc, ThinArc}; mod dependency_graph; use dependency_graph::DependencyGraph; @@ -297,8 +298,7 @@ impl Runtime { // (at least for this execution, not necessarily across executions), // no matter where it started on the stack. Find the minimum // key and rotate it to the front. - let min = v.iter().min().unwrap(); - let index = v.iter().position(|p| p == min).unwrap(); + let index = v.iter().position_min().unwrap_or_default(); v.rotate_left(index); // No need to store extra memory. @@ -440,7 +440,7 @@ impl Runtime { /// State that will be common to all threads (when we support multiple threads) struct SharedState { /// Stores the next id to use for a snapshotted runtime (starts at 1). - next_id: AtomicUsize, + next_id: AtomicU32, /// Whenever derived queries are executing, they acquire this lock /// in read mode. Mutating inputs (and thus creating a new @@ -457,50 +457,46 @@ struct SharedState { /// revision is cancelled). pending_revision: AtomicRevision, - /// Stores the "last change" revision for values of each duration. + /// Stores the "last change" revision for values of each Durability. /// This vector is always of length at least 1 (for Durability 0) - /// but its total length depends on the number of durations. The + /// but its total length depends on the number of Durabilities. The /// element at index 0 is special as it represents the "current /// revision". In general, we have the invariant that revisions /// in here are *declining* -- that is, `revisions[i] >= /// revisions[i + 1]`, for all `i`. This is because when you /// modify a value with durability D, that implies that values /// with durability less than D may have changed too. - revisions: Vec, + revisions: [AtomicRevision; Durability::LEN], /// The dependency graph tracks which runtimes are blocked on one /// another, waiting for queries to terminate. dependency_graph: Mutex, } -impl SharedState { - fn with_durabilities(durabilities: usize) -> Self { - SharedState { - next_id: AtomicUsize::new(1), - query_lock: Default::default(), - revisions: (0..durabilities).map(|_| AtomicRevision::start()).collect(), - pending_revision: AtomicRevision::start(), - dependency_graph: Default::default(), - } - } -} - impl std::panic::RefUnwindSafe for SharedState {} impl Default for SharedState { fn default() -> Self { - Self::with_durabilities(Durability::LEN) + #[allow(clippy::declare_interior_mutable_const)] + const START: AtomicRevision = AtomicRevision::start(); + SharedState { + next_id: AtomicU32::new(1), + query_lock: Default::default(), + revisions: [START; Durability::LEN], + pending_revision: START, + dependency_graph: Default::default(), + } } } impl std::fmt::Debug for SharedState { fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - let query_lock = if self.query_lock.try_write().is_some() { - "" - } else if self.query_lock.try_read().is_some() { + let query_lock = if self.query_lock.is_locked_exclusive() { + "" + } else if self.query_lock.is_locked() { "" } else { - "" + "" }; fmt.debug_struct("SharedState") .field("query_lock", &query_lock) @@ -570,7 +566,9 @@ impl ActiveQuery { if dependencies.is_empty() { QueryInputs::NoInputs } else { - QueryInputs::Tracked { inputs: dependencies.iter().copied().collect() } + QueryInputs::Tracked { + inputs: ThinArc::from_header_and_iter((), dependencies.iter().copied()), + } } } }; @@ -616,7 +614,7 @@ impl ActiveQuery { /// complete, its `RuntimeId` may potentially be re-used. #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)] pub struct RuntimeId { - counter: usize, + counter: u32, } #[derive(Clone, Debug)] diff --git a/crates/salsa/src/runtime/local_state.rs b/crates/salsa/src/runtime/local_state.rs index 91b95dffe78a4..7ac21dec1a89e 100644 --- a/crates/salsa/src/runtime/local_state.rs +++ b/crates/salsa/src/runtime/local_state.rs @@ -1,5 +1,6 @@ //! use tracing::debug; +use triomphe::ThinArc; use crate::durability::Durability; use crate::runtime::ActiveQuery; @@ -7,7 +8,6 @@ use crate::runtime::Revision; use crate::Cycle; use crate::DatabaseKeyIndex; use std::cell::RefCell; -use triomphe::Arc; /// State that is specific to a single execution thread. /// @@ -43,7 +43,7 @@ pub(crate) struct QueryRevisions { #[derive(Debug, Clone)] pub(crate) enum QueryInputs { /// Non-empty set of inputs, fully known - Tracked { inputs: Arc<[DatabaseKeyIndex]> }, + Tracked { inputs: ThinArc<(), DatabaseKeyIndex> }, /// Empty set of inputs, fully known. NoInputs, @@ -145,8 +145,7 @@ impl LocalState { /// the current thread is blocking. The stack must be restored /// with [`Self::restore_query_stack`] when the thread unblocks. pub(super) fn take_query_stack(&self) -> Vec { - assert!(self.query_stack.borrow().is_some(), "query stack already taken"); - self.query_stack.take().unwrap() + self.query_stack.take().expect("query stack already taken") } /// Restores a query stack taken with [`Self::take_query_stack`] once From e656844833e0e71416a84bf692240a7a8b809e0a Mon Sep 17 00:00:00 2001 From: NikitaShyrei Date: Fri, 23 Feb 2024 16:35:07 +0100 Subject: [PATCH 098/134] moved tests file --- library/std/src/sys/{pal/sgx/rwlock => locks/rwlock/sgx}/tests.rs | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename library/std/src/sys/{pal/sgx/rwlock => locks/rwlock/sgx}/tests.rs (100%) diff --git a/library/std/src/sys/pal/sgx/rwlock/tests.rs b/library/std/src/sys/locks/rwlock/sgx/tests.rs similarity index 100% rename from library/std/src/sys/pal/sgx/rwlock/tests.rs rename to library/std/src/sys/locks/rwlock/sgx/tests.rs From c6a6e63a458064e8f7cdbe6f5992a195274eab0a Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Feb 2024 17:24:29 +0100 Subject: [PATCH 099/134] internal: Pin commit of rust-lang/rust for rustc-test metrics --- crates/rust-analyzer/src/cli/rustc_tests.rs | 1 + xtask/src/metrics.rs | 6 +++++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/cli/rustc_tests.rs b/crates/rust-analyzer/src/cli/rustc_tests.rs index b3b6da1f698e8..7062b60cbfc18 100644 --- a/crates/rust-analyzer/src/cli/rustc_tests.rs +++ b/crates/rust-analyzer/src/cli/rustc_tests.rs @@ -276,6 +276,7 @@ impl flags::RustcTests { pub fn run(self) -> Result<()> { let mut tester = Tester::new()?; let walk_dir = WalkDir::new(self.rustc_repo.join("tests/ui")); + eprintln!("Running tests for tests/ui"); for i in walk_dir { let i = i?; let p = i.into_path(); diff --git a/xtask/src/metrics.rs b/xtask/src/metrics.rs index 2efafa10a828e..285abb9efcb4d 100644 --- a/xtask/src/metrics.rs +++ b/xtask/src/metrics.rs @@ -86,7 +86,11 @@ impl Metrics { fn measure_rustc_tests(&mut self, sh: &Shell) -> anyhow::Result<()> { eprintln!("\nMeasuring rustc tests"); - cmd!(sh, "git clone --depth=1 https://github.com/rust-lang/rust").run()?; + cmd!( + sh, + "git clone --depth=1 --branch 1.76.0 https://github.com/rust-lang/rust.git --single-branch" + ) + .run()?; let output = cmd!(sh, "./target/release/rust-analyzer rustc-tests ./rust").read()?; for (metric, value, unit) in parse_metrics(&output) { From 62cb9d1a978a760dad4f56951922b8b533c98d08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20Kr=C3=BCger?= Date: Fri, 23 Feb 2024 19:07:42 +0100 Subject: [PATCH 100/134] delay cloning of iterator items --- .../src/traits/coherence.rs | 35 ++++++++++--------- 1 file changed, 19 insertions(+), 16 deletions(-) diff --git a/compiler/rustc_trait_selection/src/traits/coherence.rs b/compiler/rustc_trait_selection/src/traits/coherence.rs index 3619d02438da1..ac0685994eabf 100644 --- a/compiler/rustc_trait_selection/src/traits/coherence.rs +++ b/compiler/rustc_trait_selection/src/traits/coherence.rs @@ -320,22 +320,25 @@ fn impl_intersection_has_impossible_obligation<'a, 'cx, 'tcx>( let mut errors = fulfill_cx.select_where_possible(infcx); errors.pop().map(|err| err.obligation) } else { - obligations.iter().cloned().find(|obligation| { - // We use `evaluate_root_obligation` to correctly track intercrate - // ambiguity clauses. We cannot use this in the new solver. - let evaluation_result = selcx.evaluate_root_obligation(obligation); - - match evaluation_result { - Ok(result) => !result.may_apply(), - // If overflow occurs, we need to conservatively treat the goal as possibly holding, - // since there can be instantiations of this goal that don't overflow and result in - // success. This isn't much of a problem in the old solver, since we treat overflow - // fatally (this still can be encountered: ), - // but in the new solver, this is very important for correctness, since overflow - // *must* be treated as ambiguity for completeness. - Err(_overflow) => false, - } - }) + obligations + .iter() + .find(|obligation| { + // We use `evaluate_root_obligation` to correctly track intercrate + // ambiguity clauses. We cannot use this in the new solver. + let evaluation_result = selcx.evaluate_root_obligation(obligation); + + match evaluation_result { + Ok(result) => !result.may_apply(), + // If overflow occurs, we need to conservatively treat the goal as possibly holding, + // since there can be instantiations of this goal that don't overflow and result in + // success. This isn't much of a problem in the old solver, since we treat overflow + // fatally (this still can be encountered: ), + // but in the new solver, this is very important for correctness, since overflow + // *must* be treated as ambiguity for completeness. + Err(_overflow) => false, + } + }) + .cloned() } } From d9a08624aad55a91f839e6ee3acf7117d197cda9 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Feb 2024 19:31:53 +0100 Subject: [PATCH 101/134] internal: Disable rustc test metrics --- .github/workflows/metrics.yaml | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index be9f504e59966..87a1bd53a5c54 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -67,7 +67,7 @@ jobs: other_metrics: strategy: matrix: - names: [self, rustc_tests, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] + names: [self, ripgrep-13.0.0, webrender-2022, diesel-1.4.8, hyper-0.14.18] runs-on: ubuntu-latest needs: [setup_cargo, build_metrics] @@ -118,11 +118,6 @@ jobs: with: name: self-${{ github.sha }} - - name: Download rustc_tests metrics - uses: actions/download-artifact@v3 - with: - name: rustc_tests-${{ github.sha }} - - name: Download ripgrep-13.0.0 metrics uses: actions/download-artifact@v3 with: @@ -151,7 +146,7 @@ jobs: chmod 700 ~/.ssh git clone --depth 1 git@github.com:rust-analyzer/metrics.git - jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json rustc_tests.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json + jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json cd metrics git add . git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 From 8e0dd993d63d179402b0f5330aadfe589069b387 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 23 Feb 2024 19:22:27 +0100 Subject: [PATCH 102/134] check that simd_insert/extract indices are in-bounds --- compiler/rustc_codegen_llvm/src/intrinsic.rs | 53 ++++++++++++------- compiler/rustc_codegen_ssa/messages.ftl | 6 +-- compiler/rustc_codegen_ssa/src/errors.rs | 12 +---- ...-out-of-bounds.rs => not-out-of-bounds.rs} | 15 +++--- ...bounds.stderr => not-out-of-bounds.stderr} | 42 +++++++++------ 5 files changed, 73 insertions(+), 55 deletions(-) rename tests/ui/simd/{shuffle-not-out-of-bounds.rs => not-out-of-bounds.rs} (88%) rename tests/ui/simd/{shuffle-not-out-of-bounds.stderr => not-out-of-bounds.stderr} (71%) diff --git a/compiler/rustc_codegen_llvm/src/intrinsic.rs b/compiler/rustc_codegen_llvm/src/intrinsic.rs index 23e6f054a7c1e..1eac2157cac3c 100644 --- a/compiler/rustc_codegen_llvm/src/intrinsic.rs +++ b/compiler/rustc_codegen_llvm/src/intrinsic.rs @@ -1079,7 +1079,7 @@ fn generic_simd_intrinsic<'ll, 'tcx>( .map(|(arg_idx, val)| { let idx = val.unwrap_leaf().try_to_i32().unwrap(); if idx >= i32::try_from(total_len).unwrap() { - bx.sess().dcx().emit_err(InvalidMonomorphization::ShuffleIndexOutOfBounds { + bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds { span, name, arg_idx: arg_idx as u64, @@ -1138,24 +1138,15 @@ fn generic_simd_intrinsic<'ll, 'tcx>( let val = bx.const_get_elt(vector, i as u64); match bx.const_to_opt_u128(val, true) { None => { - bx.sess().dcx().emit_err( - InvalidMonomorphization::ShuffleIndexNotConstant { - span, - name, - arg_idx, - }, - ); - None + bug!("typeck should have already ensured that these are const") } Some(idx) if idx >= total_len => { - bx.sess().dcx().emit_err( - InvalidMonomorphization::ShuffleIndexOutOfBounds { - span, - name, - arg_idx, - total_len, - }, - ); + bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds { + span, + name, + arg_idx, + total_len, + }); None } Some(idx) => Some(bx.const_i32(idx as i32)), @@ -1184,10 +1175,22 @@ fn generic_simd_intrinsic<'ll, 'tcx>( out_ty: arg_tys[2] } ); + let idx = bx + .const_to_opt_u128(args[1].immediate(), false) + .expect("typeck should have ensure that this is a const"); + if idx >= in_len.into() { + bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds { + span, + name, + arg_idx: 1, + total_len: in_len.into(), + }); + return Ok(bx.const_null(llret_ty)); + } return Ok(bx.insert_element( args[0].immediate(), args[2].immediate(), - args[1].immediate(), + bx.const_i32(idx as i32), )); } if name == sym::simd_extract { @@ -1195,7 +1198,19 @@ fn generic_simd_intrinsic<'ll, 'tcx>( ret_ty == in_elem, InvalidMonomorphization::ReturnType { span, name, in_elem, in_ty, ret_ty } ); - return Ok(bx.extract_element(args[0].immediate(), args[1].immediate())); + let idx = bx + .const_to_opt_u128(args[1].immediate(), false) + .expect("typeck should have ensure that this is a const"); + if idx >= in_len.into() { + bx.sess().dcx().emit_err(InvalidMonomorphization::SimdIndexOutOfBounds { + span, + name, + arg_idx: 1, + total_len: in_len.into(), + }); + return Ok(bx.const_null(llret_ty)); + } + return Ok(bx.extract_element(args[0].immediate(), bx.const_i32(idx as i32))); } if name == sym::simd_select { diff --git a/compiler/rustc_codegen_ssa/messages.ftl b/compiler/rustc_codegen_ssa/messages.ftl index fa7719d89716e..5ba66d1be4329 100644 --- a/compiler/rustc_codegen_ssa/messages.ftl +++ b/compiler/rustc_codegen_ssa/messages.ftl @@ -106,14 +106,12 @@ codegen_ssa_invalid_monomorphization_return_type = invalid monomorphization of ` codegen_ssa_invalid_monomorphization_second_argument_length = invalid monomorphization of `{$name}` intrinsic: expected second argument with length {$in_len} (same as input type `{$in_ty}`), found `{$arg_ty}` with length {$out_len} -codegen_ssa_invalid_monomorphization_shuffle_index_not_constant = invalid monomorphization of `{$name}` intrinsic: shuffle index #{$arg_idx} is not a constant - -codegen_ssa_invalid_monomorphization_shuffle_index_out_of_bounds = invalid monomorphization of `{$name}` intrinsic: shuffle index #{$arg_idx} is out of bounds (limit {$total_len}) - codegen_ssa_invalid_monomorphization_simd_argument = invalid monomorphization of `{$name}` intrinsic: expected SIMD argument type, found non-SIMD `{$ty}` codegen_ssa_invalid_monomorphization_simd_first = invalid monomorphization of `{$name}` intrinsic: expected SIMD first type, found non-SIMD `{$ty}` +codegen_ssa_invalid_monomorphization_simd_index_out_of_bounds = invalid monomorphization of `{$name}` intrinsic: SIMD index #{$arg_idx} is out of bounds (limit {$total_len}) + codegen_ssa_invalid_monomorphization_simd_input = invalid monomorphization of `{$name}` intrinsic: expected SIMD input type, found non-SIMD `{$ty}` codegen_ssa_invalid_monomorphization_simd_return = invalid monomorphization of `{$name}` intrinsic: expected SIMD return type, found non-SIMD `{$ty}` diff --git a/compiler/rustc_codegen_ssa/src/errors.rs b/compiler/rustc_codegen_ssa/src/errors.rs index e42a8bd9ed98d..a7ac502b24837 100644 --- a/compiler/rustc_codegen_ssa/src/errors.rs +++ b/compiler/rustc_codegen_ssa/src/errors.rs @@ -797,16 +797,8 @@ pub enum InvalidMonomorphization<'tcx> { out_ty: Ty<'tcx>, }, - #[diag(codegen_ssa_invalid_monomorphization_shuffle_index_not_constant, code = E0511)] - ShuffleIndexNotConstant { - #[primary_span] - span: Span, - name: Symbol, - arg_idx: u64, - }, - - #[diag(codegen_ssa_invalid_monomorphization_shuffle_index_out_of_bounds, code = E0511)] - ShuffleIndexOutOfBounds { + #[diag(codegen_ssa_invalid_monomorphization_simd_index_out_of_bounds, code = E0511)] + SimdIndexOutOfBounds { #[primary_span] span: Span, name: Symbol, diff --git a/tests/ui/simd/shuffle-not-out-of-bounds.rs b/tests/ui/simd/not-out-of-bounds.rs similarity index 88% rename from tests/ui/simd/shuffle-not-out-of-bounds.rs rename to tests/ui/simd/not-out-of-bounds.rs index 158e9956435da..36d7a5865bc54 100644 --- a/tests/ui/simd/shuffle-not-out-of-bounds.rs +++ b/tests/ui/simd/not-out-of-bounds.rs @@ -1,6 +1,6 @@ //@ build-fail #![allow(non_camel_case_types)] -#![feature(repr_simd, platform_intrinsics)] +#![feature(repr_simd, core_intrinsics)] // Test for #73542 to verify out-of-bounds shuffle vectors do not compile. @@ -28,9 +28,7 @@ struct u8x32([u8; 32]); #[derive(Copy, Clone)] struct u8x64([u8; 64]); -extern "platform-intrinsic" { - pub fn simd_shuffle(x: T, y: T, idx: I) -> U; -} +use std::intrinsics::simd::*; // Test vectors by lane size. Since LLVM does not distinguish between a shuffle // over two f32s and a shuffle over two u64s, or any other such combination, @@ -70,13 +68,16 @@ fn main() { test_shuffle_lanes!(32, u8x32, simd_shuffle); test_shuffle_lanes!(64, u8x64, simd_shuffle); - extern "platform-intrinsic" { - fn simd_shuffle(a: T, b: T, i: I) -> U; - } let v = u8x2([0, 0]); const I: [u32; 2] = [4, 4]; unsafe { let _: u8x2 = simd_shuffle(v, v, I); //~^ ERROR invalid monomorphization of `simd_shuffle` intrinsic } + + // also check insert/extract + unsafe { + simd_insert(v, 2, 0); //~ ERROR invalid monomorphization of `simd_insert` intrinsic + let _val: u8 = simd_extract(v, 2); //~ ERROR invalid monomorphization of `simd_extract` intrinsic + } } diff --git a/tests/ui/simd/shuffle-not-out-of-bounds.stderr b/tests/ui/simd/not-out-of-bounds.stderr similarity index 71% rename from tests/ui/simd/shuffle-not-out-of-bounds.stderr rename to tests/ui/simd/not-out-of-bounds.stderr index 59e5ab858664a..5682935c1f1ae 100644 --- a/tests/ui/simd/shuffle-not-out-of-bounds.stderr +++ b/tests/ui/simd/not-out-of-bounds.stderr @@ -1,5 +1,5 @@ -error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: shuffle index #0 is out of bounds (limit 4) - --> $DIR/shuffle-not-out-of-bounds.rs:51:21 +error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: SIMD index #0 is out of bounds (limit 4) + --> $DIR/not-out-of-bounds.rs:49:21 | LL | $y(vec1, vec2, ARR) | ^^^^^^^^^^^^^^^^^^^ @@ -9,8 +9,8 @@ LL | test_shuffle_lanes!(2, u8x2, simd_shuffle); | = note: this error originates in the macro `test_shuffle_lanes` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: shuffle index #0 is out of bounds (limit 8) - --> $DIR/shuffle-not-out-of-bounds.rs:51:21 +error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: SIMD index #0 is out of bounds (limit 8) + --> $DIR/not-out-of-bounds.rs:49:21 | LL | $y(vec1, vec2, ARR) | ^^^^^^^^^^^^^^^^^^^ @@ -20,8 +20,8 @@ LL | test_shuffle_lanes!(4, u8x4, simd_shuffle); | = note: this error originates in the macro `test_shuffle_lanes` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: shuffle index #0 is out of bounds (limit 16) - --> $DIR/shuffle-not-out-of-bounds.rs:51:21 +error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: SIMD index #0 is out of bounds (limit 16) + --> $DIR/not-out-of-bounds.rs:49:21 | LL | $y(vec1, vec2, ARR) | ^^^^^^^^^^^^^^^^^^^ @@ -31,8 +31,8 @@ LL | test_shuffle_lanes!(8, u8x8, simd_shuffle); | = note: this error originates in the macro `test_shuffle_lanes` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: shuffle index #0 is out of bounds (limit 32) - --> $DIR/shuffle-not-out-of-bounds.rs:51:21 +error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: SIMD index #0 is out of bounds (limit 32) + --> $DIR/not-out-of-bounds.rs:49:21 | LL | $y(vec1, vec2, ARR) | ^^^^^^^^^^^^^^^^^^^ @@ -42,8 +42,8 @@ LL | test_shuffle_lanes!(16, u8x16, simd_shuffle); | = note: this error originates in the macro `test_shuffle_lanes` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: shuffle index #0 is out of bounds (limit 64) - --> $DIR/shuffle-not-out-of-bounds.rs:51:21 +error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: SIMD index #0 is out of bounds (limit 64) + --> $DIR/not-out-of-bounds.rs:49:21 | LL | $y(vec1, vec2, ARR) | ^^^^^^^^^^^^^^^^^^^ @@ -53,8 +53,8 @@ LL | test_shuffle_lanes!(32, u8x32, simd_shuffle); | = note: this error originates in the macro `test_shuffle_lanes` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: shuffle index #0 is out of bounds (limit 128) - --> $DIR/shuffle-not-out-of-bounds.rs:51:21 +error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: SIMD index #0 is out of bounds (limit 128) + --> $DIR/not-out-of-bounds.rs:49:21 | LL | $y(vec1, vec2, ARR) | ^^^^^^^^^^^^^^^^^^^ @@ -64,12 +64,24 @@ LL | test_shuffle_lanes!(64, u8x64, simd_shuffle); | = note: this error originates in the macro `test_shuffle_lanes` (in Nightly builds, run with -Z macro-backtrace for more info) -error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: shuffle index #0 is out of bounds (limit 4) - --> $DIR/shuffle-not-out-of-bounds.rs:79:23 +error[E0511]: invalid monomorphization of `simd_shuffle` intrinsic: SIMD index #0 is out of bounds (limit 4) + --> $DIR/not-out-of-bounds.rs:74:23 | LL | let _: u8x2 = simd_shuffle(v, v, I); | ^^^^^^^^^^^^^^^^^^^^^ -error: aborting due to 7 previous errors +error[E0511]: invalid monomorphization of `simd_insert` intrinsic: expected inserted type `u8` (element of input `u8x2`), found `i32` + --> $DIR/not-out-of-bounds.rs:80:9 + | +LL | simd_insert(v, 2, 0); + | ^^^^^^^^^^^^^^^^^^^^ + +error[E0511]: invalid monomorphization of `simd_extract` intrinsic: SIMD index #1 is out of bounds (limit 2) + --> $DIR/not-out-of-bounds.rs:81:24 + | +LL | let _val: u8 = simd_extract(v, 2); + | ^^^^^^^^^^^^^^^^^^ + +error: aborting due to 9 previous errors For more information about this error, try `rustc --explain E0511`. From 134e2b2056d3d5519dc3b294ceb1558cf7af276c Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 23 Feb 2024 19:34:17 +0100 Subject: [PATCH 103/134] interpret: do no ICE on OOB shuffle/insert/extract indices --- .../src/interpret/intrinsics.rs | 20 +++++++++++-------- src/tools/miri/src/shims/intrinsics/simd.rs | 10 ++++------ .../tests/fail/intrinsics/simd-extract.rs | 8 ++++++++ .../tests/fail/intrinsics/simd-extract.stderr | 15 ++++++++++++++ 4 files changed, 39 insertions(+), 14 deletions(-) create mode 100644 src/tools/miri/tests/fail/intrinsics/simd-extract.rs create mode 100644 src/tools/miri/tests/fail/intrinsics/simd-extract.stderr diff --git a/compiler/rustc_const_eval/src/interpret/intrinsics.rs b/compiler/rustc_const_eval/src/interpret/intrinsics.rs index f020616f6d8c8..1cb991b38f7e9 100644 --- a/compiler/rustc_const_eval/src/interpret/intrinsics.rs +++ b/compiler/rustc_const_eval/src/interpret/intrinsics.rs @@ -379,10 +379,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { let (input, input_len) = self.operand_to_simd(&args[0])?; let (dest, dest_len) = self.place_to_simd(dest)?; assert_eq!(input_len, dest_len, "Return vector length must match input length"); - assert!( - index < dest_len, - "Index `{index}` must be in bounds of vector with length {dest_len}" - ); + // Bounds are not checked by typeck so we have to do it ourselves. + if index >= input_len { + throw_ub_format!( + "`simd_insert` index {index} is out-of-bounds of vector with length {input_len}" + ); + } for i in 0..dest_len { let place = self.project_index(&dest, i)?; @@ -397,10 +399,12 @@ impl<'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> InterpCx<'mir, 'tcx, M> { sym::simd_extract => { let index = u64::from(self.read_scalar(&args[1])?.to_u32()?); let (input, input_len) = self.operand_to_simd(&args[0])?; - assert!( - index < input_len, - "index `{index}` must be in bounds of vector with length {input_len}" - ); + // Bounds are not checked by typeck so we have to do it ourselves. + if index >= input_len { + throw_ub_format!( + "`simd_extract` index {index} is out-of-bounds of vector with length {input_len}" + ); + } self.copy_op(&self.project_index(&input, index)?, dest)?; } sym::likely | sym::unlikely | sym::black_box => { diff --git a/src/tools/miri/src/shims/intrinsics/simd.rs b/src/tools/miri/src/shims/intrinsics/simd.rs index ea2d104694af7..ca8773cac14b5 100644 --- a/src/tools/miri/src/shims/intrinsics/simd.rs +++ b/src/tools/miri/src/shims/intrinsics/simd.rs @@ -563,9 +563,8 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> { let right_idx = src_index.checked_sub(left_len).unwrap(); this.read_immediate(&this.project_index(&right, right_idx)?)? } else { - span_bug!( - this.cur_span(), - "simd_shuffle index {src_index} is out of bounds for 2 vectors of size {left_len}", + throw_ub_format!( + "`simd_shuffle_generic` index {src_index} is out-of-bounds for 2 vectors with length {dest_len}" ); }; this.write_immediate(*val, &dest)?; @@ -604,9 +603,8 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> { let right_idx = src_index.checked_sub(left_len).unwrap(); this.read_immediate(&this.project_index(&right, right_idx)?)? } else { - span_bug!( - this.cur_span(), - "simd_shuffle index {src_index} is out of bounds for 2 vectors of size {left_len}", + throw_ub_format!( + "`simd_shuffle` index {src_index} is out-of-bounds for 2 vectors with length {dest_len}" ); }; this.write_immediate(*val, &dest)?; diff --git a/src/tools/miri/tests/fail/intrinsics/simd-extract.rs b/src/tools/miri/tests/fail/intrinsics/simd-extract.rs new file mode 100644 index 0000000000000..02b9d30df5e97 --- /dev/null +++ b/src/tools/miri/tests/fail/intrinsics/simd-extract.rs @@ -0,0 +1,8 @@ +#![feature(portable_simd, core_intrinsics)] +use std::simd::*; + +fn main() { + let v = i32x4::splat(0); + let _x: i32 = unsafe { std::intrinsics::simd::simd_extract(v, 4) }; + //~^ERROR: index 4 is out-of-bounds +} diff --git a/src/tools/miri/tests/fail/intrinsics/simd-extract.stderr b/src/tools/miri/tests/fail/intrinsics/simd-extract.stderr new file mode 100644 index 0000000000000..dc6b22de4925a --- /dev/null +++ b/src/tools/miri/tests/fail/intrinsics/simd-extract.stderr @@ -0,0 +1,15 @@ +error: Undefined Behavior: `simd_extract` index 4 is out-of-bounds of vector with length 4 + --> $DIR/simd-extract.rs:LL:CC + | +LL | let _x: i32 = unsafe { std::intrinsics::simd::simd_extract(v, 4) }; + | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `simd_extract` index 4 is out-of-bounds of vector with length 4 + | + = help: this indicates a bug in the program: it performed an invalid operation, and caused Undefined Behavior + = help: see https://doc.rust-lang.org/nightly/reference/behavior-considered-undefined.html for further information + = note: BACKTRACE: + = note: inside `main` at $DIR/simd-extract.rs:LL:CC + +note: some details are omitted, run with `MIRIFLAGS=-Zmiri-backtrace=full` for a verbose backtrace + +error: aborting due to 1 previous error + From 86a7fc840f11cd1f20fdf26d3071e34a2d4bc313 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Matthias=20Kr=C3=BCger?= Date: Fri, 23 Feb 2024 19:56:35 +0100 Subject: [PATCH 104/134] compiler: clippy::complexity fixes --- compiler/rustc_ast_lowering/src/delegation.rs | 2 +- .../src/diagnostics/conflict_errors.rs | 2 +- .../src/diagnostics/outlives_suggestion.rs | 5 ++-- .../src/diagnostics/region_errors.rs | 2 +- .../rustc_const_eval/src/interpret/intern.rs | 4 ++-- compiler/rustc_errors/src/lib.rs | 2 +- compiler/rustc_expand/src/mbe/transcribe.rs | 15 +++--------- compiler/rustc_hir_typeck/src/_match.rs | 4 ++-- .../rustc_hir_typeck/src/fn_ctxt/_impl.rs | 4 ++-- .../rustc_hir_typeck/src/fn_ctxt/checks.rs | 5 ++-- .../src/fn_ctxt/suggestions.rs | 23 +++++++----------- .../rustc_hir_typeck/src/method/suggest.rs | 8 ++----- compiler/rustc_hir_typeck/src/pat.rs | 2 +- .../src/infer/error_reporting/mod.rs | 2 +- compiler/rustc_middle/src/ty/print/mod.rs | 2 +- .../rustc_mir_build/src/thir/pattern/mod.rs | 19 ++++++--------- compiler/rustc_parse/src/validate_attr.rs | 2 +- .../rustc_pattern_analysis/src/constructor.rs | 16 +++++-------- .../rustc_resolve/src/late/diagnostics.rs | 2 +- compiler/rustc_session/src/session.rs | 2 +- compiler/rustc_smir/src/rustc_smir/context.rs | 3 +-- .../src/traits/error_reporting/suggestions.rs | 24 +++++++++---------- .../src/traits/project.rs | 7 ++---- 23 files changed, 62 insertions(+), 95 deletions(-) diff --git a/compiler/rustc_ast_lowering/src/delegation.rs b/compiler/rustc_ast_lowering/src/delegation.rs index d1ba93f067553..77dd03d15f520 100644 --- a/compiler/rustc_ast_lowering/src/delegation.rs +++ b/compiler/rustc_ast_lowering/src/delegation.rs @@ -138,7 +138,7 @@ impl<'hir> LoweringContext<'_, 'hir> { } else { self.tcx.fn_arg_names(sig_id).len() }; - let inputs = self.arena.alloc_from_iter((0..args_count).into_iter().map(|arg| hir::Ty { + let inputs = self.arena.alloc_from_iter((0..args_count).map(|arg| hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::InferDelegation(sig_id, hir::InferDelegationKind::Input(arg)), span: self.lower_span(param_span), diff --git a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs index e1509da913a56..3c6bd1d36fd37 100644 --- a/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/conflict_errors.rs @@ -1559,7 +1559,7 @@ impl<'cx, 'tcx> MirBorrowckCtxt<'cx, 'tcx> { // A bare path doesn't need a `let` assignment, it's already a simple // binding access. // As a new binding wasn't added, we don't need to modify the advancing call. - sugg.push((loop_span.with_hi(pat_span.lo()), format!("while let Some("))); + sugg.push((loop_span.with_hi(pat_span.lo()), "while let Some(".to_string())); sugg.push(( pat_span.shrink_to_hi().with_hi(head.span.lo()), ") = ".to_string(), diff --git a/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs b/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs index b2c7a98142eec..6beae61ca7f09 100644 --- a/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs +++ b/compiler/rustc_borrowck/src/diagnostics/outlives_suggestion.rs @@ -134,14 +134,13 @@ impl OutlivesSuggestionBuilder { for (r, bound) in unified.into_iter() { if !unified_already.contains(fr) { - suggested.push(SuggestedConstraint::Equal(fr_name.clone(), bound)); + suggested.push(SuggestedConstraint::Equal(fr_name, bound)); unified_already.insert(r); } } if !other.is_empty() { - let other = - other.iter().map(|(_, rname)| rname.clone()).collect::>(); + let other = other.iter().map(|(_, rname)| *rname).collect::>(); suggested.push(SuggestedConstraint::Outlives(fr_name, other)) } } diff --git a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs index 50d22881c3e61..e586c58781cf1 100644 --- a/compiler/rustc_borrowck/src/diagnostics/region_errors.rs +++ b/compiler/rustc_borrowck/src/diagnostics/region_errors.rs @@ -280,7 +280,7 @@ impl<'a, 'tcx> MirBorrowckCtxt<'a, 'tcx> { .iter() .rfind(|param| param.def_id.to_def_id() == defid) .is_some() { - suggestions.push((bounded_span.shrink_to_hi(), format!(" + 'static"))); + suggestions.push((bounded_span.shrink_to_hi(), " + 'static".to_string())); } }); }); diff --git a/compiler/rustc_const_eval/src/interpret/intern.rs b/compiler/rustc_const_eval/src/interpret/intern.rs index 959ec2ca86558..82ce9ecd21d18 100644 --- a/compiler/rustc_const_eval/src/interpret/intern.rs +++ b/compiler/rustc_const_eval/src/interpret/intern.rs @@ -140,7 +140,7 @@ pub fn intern_const_alloc_recursive< alloc.1.mutability = base_mutability; alloc.1.provenance().ptrs().iter().map(|&(_, prov)| prov).collect() } else { - intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().map(|prov| prov).collect() + intern_shallow(ecx, base_alloc_id, base_mutability).unwrap().collect() }; // We need to distinguish "has just been interned" from "was already in `tcx`", // so we track this in a separate set. @@ -277,7 +277,7 @@ impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>> // We are not doing recursive interning, so we don't currently support provenance. // (If this assertion ever triggers, we should just implement a // proper recursive interning loop -- or just call `intern_const_alloc_recursive`. - if !self.tcx.try_get_global_alloc(prov.alloc_id()).is_some() { + if self.tcx.try_get_global_alloc(prov.alloc_id()).is_none() { panic!("`intern_with_temp_alloc` with nested allocations"); } } diff --git a/compiler/rustc_errors/src/lib.rs b/compiler/rustc_errors/src/lib.rs index ada8fa2e96544..3f667e264e85f 100644 --- a/compiler/rustc_errors/src/lib.rs +++ b/compiler/rustc_errors/src/lib.rs @@ -844,7 +844,7 @@ impl DiagCtxt { .emitted_diagnostic_codes .iter() .filter_map(|&code| { - if registry.try_find_description(code).is_ok().clone() { + if registry.try_find_description(code).is_ok() { Some(code.to_string()) } else { None diff --git a/compiler/rustc_expand/src/mbe/transcribe.rs b/compiler/rustc_expand/src/mbe/transcribe.rs index 519e4a634d8ab..4a18055d4ca0a 100644 --- a/compiler/rustc_expand/src/mbe/transcribe.rs +++ b/compiler/rustc_expand/src/mbe/transcribe.rs @@ -555,23 +555,14 @@ fn count_repetitions<'a>( ) -> PResult<'a, usize> { // Recursively count the number of matches in `matched` at given depth // (or at the top-level of `matched` if no depth is given). - fn count<'a>( - cx: &ExtCtxt<'a>, - depth_curr: usize, - depth_max: usize, - matched: &NamedMatch, - sp: &DelimSpan, - ) -> PResult<'a, usize> { + fn count<'a>(depth_curr: usize, depth_max: usize, matched: &NamedMatch) -> PResult<'a, usize> { match matched { MatchedTokenTree(_) | MatchedNonterminal(_) => Ok(1), MatchedSeq(named_matches) => { if depth_curr == depth_max { Ok(named_matches.len()) } else { - named_matches - .iter() - .map(|elem| count(cx, depth_curr + 1, depth_max, elem, sp)) - .sum() + named_matches.iter().map(|elem| count(depth_curr + 1, depth_max, elem)).sum() } } } @@ -612,7 +603,7 @@ fn count_repetitions<'a>( return Err(cx.dcx().create_err(CountRepetitionMisplaced { span: sp.entire() })); } - count(cx, depth_user, depth_max, matched, sp) + count(depth_user, depth_max, matched) } /// Returns a `NamedMatch` item declared on the LHS given an arbitrary [Ident] diff --git a/compiler/rustc_hir_typeck/src/_match.rs b/compiler/rustc_hir_typeck/src/_match.rs index cb131f1d1669e..e852ee0f049b6 100644 --- a/compiler/rustc_hir_typeck/src/_match.rs +++ b/compiler/rustc_hir_typeck/src/_match.rs @@ -401,12 +401,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { // check that the `if` expr without `else` is the fn body's expr if expr.span == sp { - return self.get_fn_decl(hir_id).and_then(|(_, fn_decl, _)| { + return self.get_fn_decl(hir_id).map(|(_, fn_decl, _)| { let (ty, span) = match fn_decl.output { hir::FnRetTy::DefaultReturn(span) => ("()".to_string(), span), hir::FnRetTy::Return(ty) => (ty_to_string(ty), ty.span), }; - Some((span, format!("expected `{ty}` because of this return type"))) + (span, format!("expected `{ty}` because of this return type")) }); } } diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs index 9303e437a968e..7d448820cebe9 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/_impl.rs @@ -846,7 +846,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let item_name = item_segment.ident; let result = self .resolve_fully_qualified_call(span, item_name, ty.normalized, qself.span, hir_id) - .and_then(|r| { + .map(|r| { // lint bare trait if the method is found in the trait if span.edition().at_least_rust_2021() && let Some(diag) = @@ -854,7 +854,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { diag.emit(); } - Ok(r) + r }) .or_else(|error| { let guar = self diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs index 4bc86435482fb..75e4dd5a61c19 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/checks.rs @@ -555,9 +555,8 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { let args = self.infcx.fresh_args_for_item(call_name.span, assoc.def_id); let fn_sig = tcx.fn_sig(assoc.def_id).instantiate(tcx, args); - let fn_sig = - self.instantiate_binder_with_fresh_vars(call_name.span, FnCall, fn_sig); - Some((assoc, fn_sig)); + + self.instantiate_binder_with_fresh_vars(call_name.span, FnCall, fn_sig); } None }; diff --git a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs index e57717c25d93d..f09af99995711 100644 --- a/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs +++ b/compiler/rustc_hir_typeck/src/fn_ctxt/suggestions.rs @@ -1061,20 +1061,15 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return; } - let scope = self - .tcx - .hir() - .parent_iter(id) - .filter(|(_, node)| { - matches!( - node, - Node::Expr(Expr { kind: ExprKind::Closure(..), .. }) - | Node::Item(_) - | Node::TraitItem(_) - | Node::ImplItem(_) - ) - }) - .next(); + let scope = self.tcx.hir().parent_iter(id).find(|(_, node)| { + matches!( + node, + Node::Expr(Expr { kind: ExprKind::Closure(..), .. }) + | Node::Item(_) + | Node::TraitItem(_) + | Node::ImplItem(_) + ) + }); let in_closure = matches!(scope, Some((_, Node::Expr(Expr { kind: ExprKind::Closure(..), .. })))); diff --git a/compiler/rustc_hir_typeck/src/method/suggest.rs b/compiler/rustc_hir_typeck/src/method/suggest.rs index babdbce000515..f0586328835ce 100644 --- a/compiler/rustc_hir_typeck/src/method/suggest.rs +++ b/compiler/rustc_hir_typeck/src/method/suggest.rs @@ -370,9 +370,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; if let Some(file) = file { err.note(format!("the full type name has been written to '{}'", file.display())); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note("consider using `--verbose` to print the full type name to the console"); } err @@ -497,9 +495,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { if let Some(file) = ty_file { err.note(format!("the full type name has been written to '{}'", file.display(),)); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note("consider using `--verbose` to print the full type name to the console"); } if rcvr_ty.references_error() { err.downgrade_to_delayed_bug(); diff --git a/compiler/rustc_hir_typeck/src/pat.rs b/compiler/rustc_hir_typeck/src/pat.rs index b15c9ef901877..c28c1c7760303 100644 --- a/compiler/rustc_hir_typeck/src/pat.rs +++ b/compiler/rustc_hir_typeck/src/pat.rs @@ -2035,7 +2035,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { slice: Option<&'tcx Pat<'tcx>>, span: Span, ) -> Option> { - if !slice.is_none() { + if slice.is_some() { return None; } diff --git a/compiler/rustc_infer/src/infer/error_reporting/mod.rs b/compiler/rustc_infer/src/infer/error_reporting/mod.rs index d40f3f501f58a..e245dee4dafd7 100644 --- a/compiler/rustc_infer/src/infer/error_reporting/mod.rs +++ b/compiler/rustc_infer/src/infer/error_reporting/mod.rs @@ -1938,7 +1938,7 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full type name has been written to '{}'", path.display(), )); - diag.note(format!("consider using `--verbose` to print the full type name to the console")); + diag.note("consider using `--verbose` to print the full type name to the console"); } } } diff --git a/compiler/rustc_middle/src/ty/print/mod.rs b/compiler/rustc_middle/src/ty/print/mod.rs index 19f8ba124f143..520fc1dd7aa4d 100644 --- a/compiler/rustc_middle/src/ty/print/mod.rs +++ b/compiler/rustc_middle/src/ty/print/mod.rs @@ -139,7 +139,7 @@ pub trait Printer<'tcx>: Sized { _, hir::CoroutineSource::Closure, )) = self.tcx().coroutine_kind(def_id) - && args.len() >= parent_args.len() + 1 + && args.len() > parent_args.len() { return self.path_generic_args( |cx| cx.print_def_path(def_id, parent_args), diff --git a/compiler/rustc_mir_build/src/thir/pattern/mod.rs b/compiler/rustc_mir_build/src/thir/pattern/mod.rs index 0329e1d3096dc..0b03cb52373a4 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/mod.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/mod.rs @@ -223,19 +223,14 @@ impl<'a, 'tcx> PatCtxt<'a, 'tcx> { // If we are handling a range with associated constants (e.g. // `Foo::<'a>::A..=Foo::B`), we need to put the ascriptions for the associated // constants somewhere. Have them on the range pattern. - for ascr in [lo_ascr, hi_ascr] { - if let Some(ascription) = ascr { - kind = PatKind::AscribeUserType { - ascription, - subpattern: Box::new(Pat { span, ty, kind }), - }; - } + for ascription in [lo_ascr, hi_ascr].into_iter().flatten() { + kind = PatKind::AscribeUserType { + ascription, + subpattern: Box::new(Pat { span, ty, kind }), + }; } - for inline_const in [lo_inline, hi_inline] { - if let Some(def) = inline_const { - kind = - PatKind::InlineConstant { def, subpattern: Box::new(Pat { span, ty, kind }) }; - } + for def in [lo_inline, hi_inline].into_iter().flatten() { + kind = PatKind::InlineConstant { def, subpattern: Box::new(Pat { span, ty, kind }) }; } Ok(kind) } diff --git a/compiler/rustc_parse/src/validate_attr.rs b/compiler/rustc_parse/src/validate_attr.rs index b0982029657bc..5d46581f646cc 100644 --- a/compiler/rustc_parse/src/validate_attr.rs +++ b/compiler/rustc_parse/src/validate_attr.rs @@ -88,7 +88,7 @@ pub fn parse_meta<'a>(sess: &'a ParseSess, attr: &Attribute) -> PResult<'a, Meta // results in `ast::ExprKind::Err`. In that case we delay // the error because an earlier error will have already // been reported. - let msg = format!("attribute value must be a literal"); + let msg = "attribute value must be a literal"; let mut err = sess.dcx.struct_span_err(expr.span, msg); if let ast::ExprKind::Err = expr.kind { err.downgrade_to_delayed_bug(); diff --git a/compiler/rustc_pattern_analysis/src/constructor.rs b/compiler/rustc_pattern_analysis/src/constructor.rs index 24824682b74cd..483986969d167 100644 --- a/compiler/rustc_pattern_analysis/src/constructor.rs +++ b/compiler/rustc_pattern_analysis/src/constructor.rs @@ -694,18 +694,14 @@ impl Clone for Constructor { fn clone(&self) -> Self { match self { Constructor::Struct => Constructor::Struct, - Constructor::Variant(idx) => Constructor::Variant(idx.clone()), + Constructor::Variant(idx) => Constructor::Variant(*idx), Constructor::Ref => Constructor::Ref, - Constructor::Slice(slice) => Constructor::Slice(slice.clone()), + Constructor::Slice(slice) => Constructor::Slice(*slice), Constructor::UnionField => Constructor::UnionField, - Constructor::Bool(b) => Constructor::Bool(b.clone()), - Constructor::IntRange(range) => Constructor::IntRange(range.clone()), - Constructor::F32Range(lo, hi, end) => { - Constructor::F32Range(lo.clone(), hi.clone(), end.clone()) - } - Constructor::F64Range(lo, hi, end) => { - Constructor::F64Range(lo.clone(), hi.clone(), end.clone()) - } + Constructor::Bool(b) => Constructor::Bool(*b), + Constructor::IntRange(range) => Constructor::IntRange(*range), + Constructor::F32Range(lo, hi, end) => Constructor::F32Range(lo.clone(), *hi, *end), + Constructor::F64Range(lo, hi, end) => Constructor::F64Range(lo.clone(), *hi, *end), Constructor::Str(value) => Constructor::Str(value.clone()), Constructor::Opaque(inner) => Constructor::Opaque(inner.clone()), Constructor::Or => Constructor::Or, diff --git a/compiler/rustc_resolve/src/late/diagnostics.rs b/compiler/rustc_resolve/src/late/diagnostics.rs index 335bf0949d62e..51723fc81a01f 100644 --- a/compiler/rustc_resolve/src/late/diagnostics.rs +++ b/compiler/rustc_resolve/src/late/diagnostics.rs @@ -1582,7 +1582,7 @@ impl<'a: 'ast, 'ast, 'tcx> LateResolutionVisitor<'a, '_, 'ast, 'tcx> { None => ("/* fields */".to_string(), Applicability::HasPlaceholders), }; let pad = match field_ids { - Some(field_ids) if field_ids.is_empty() => "", + Some([]) => "", _ => " ", }; err.span_suggestion( diff --git a/compiler/rustc_session/src/session.rs b/compiler/rustc_session/src/session.rs index 02c7a0c6371f9..b6c1948689892 100644 --- a/compiler/rustc_session/src/session.rs +++ b/compiler/rustc_session/src/session.rs @@ -1264,7 +1264,7 @@ fn validate_commandline_args_with_session_available(sess: &Session) { // LLVM CFI using rustc LTO requires a single codegen unit. if sess.is_sanitizer_cfi_enabled() && sess.lto() == config::Lto::Fat - && !(sess.codegen_units().as_usize() == 1) + && (sess.codegen_units().as_usize() != 1) { sess.dcx().emit_err(errors::SanitizerCfiRequiresSingleCodegenUnit); } diff --git a/compiler/rustc_smir/src/rustc_smir/context.rs b/compiler/rustc_smir/src/rustc_smir/context.rs index b95186b0a1c5b..540bc48354866 100644 --- a/compiler/rustc_smir/src/rustc_smir/context.rs +++ b/compiler/rustc_smir/src/rustc_smir/context.rs @@ -208,11 +208,10 @@ impl<'tcx> Context for TablesWrapper<'tcx> { let crates: Vec = [LOCAL_CRATE] .iter() .chain(tables.tcx.crates(()).iter()) - .map(|crate_num| { + .filter_map(|crate_num| { let crate_name = tables.tcx.crate_name(*crate_num).to_string(); (name == crate_name).then(|| smir_crate(tables.tcx, *crate_num)) }) - .flatten() .collect(); crates } diff --git a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs index 8ae31392b40e6..85f6da0d6cc82 100644 --- a/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs +++ b/compiler/rustc_trait_selection/src/traits/error_reporting/suggestions.rs @@ -1283,9 +1283,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full type name has been written to '{}'", file.display() )); - err.note(format!( - "consider using `--verbose` to print full type name to the console" - )); + err.note( + "consider using `--verbose` to print full type name to the console", + ); } if imm_ref_self_ty_satisfies_pred && mut_ref_self_ty_satisfies_pred { @@ -2869,9 +2869,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full name for the type has been written to '{}'", file.display(), )); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note( + "consider using `--verbose` to print the full type name to the console", + ); } } ObligationCauseCode::RepeatElementCopy { @@ -3339,9 +3339,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full type name has been written to '{}'", file.display(), )); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note( + "consider using `--verbose` to print the full type name to the console", + ); } let mut parent_predicate = parent_trait_pred; let mut data = &data.derived; @@ -3395,9 +3395,9 @@ impl<'tcx> TypeErrCtxt<'_, 'tcx> { "the full type name has been written to '{}'", file.display(), )); - err.note(format!( - "consider using `--verbose` to print the full type name to the console" - )); + err.note( + "consider using `--verbose` to print the full type name to the console", + ); } } // #74711: avoid a stack overflow diff --git a/compiler/rustc_trait_selection/src/traits/project.rs b/compiler/rustc_trait_selection/src/traits/project.rs index f8de19043e1bc..68c03e3c73e74 100644 --- a/compiler/rustc_trait_selection/src/traits/project.rs +++ b/compiler/rustc_trait_selection/src/traits/project.rs @@ -1031,12 +1031,9 @@ fn assemble_candidates_from_impls<'cx, 'tcx>( { candidate_set.mark_ambiguous(); true - } else if obligation.predicate.args.type_at(0).to_opt_closure_kind().is_some() - && obligation.predicate.args.type_at(1).to_opt_closure_kind().is_some() - { - true } else { - false + obligation.predicate.args.type_at(0).to_opt_closure_kind().is_some() + && obligation.predicate.args.type_at(1).to_opt_closure_kind().is_some() } } else if lang_items.discriminant_kind_trait() == Some(trait_ref.def_id) { match self_ty.kind() { From 7bc0435c9a966e491d7cc8c9c083ad9ea1ea6569 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 23 Feb 2024 20:17:54 +0100 Subject: [PATCH 105/134] cargo update --- src/tools/miri/Cargo.lock | 244 +++++++++----------------- src/tools/miri/cargo-miri/Cargo.lock | 176 ++++++++++++------- src/tools/miri/miri-script/Cargo.lock | 100 +++++------ 3 files changed, 248 insertions(+), 272 deletions(-) diff --git a/src/tools/miri/Cargo.lock b/src/tools/miri/Cargo.lock index 87dc51bd61255..1adae2b7a2275 100644 --- a/src/tools/miri/Cargo.lock +++ b/src/tools/miri/Cargo.lock @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "aes" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac1f845298e95f983ff1944b728ae08b8cebab80d684f0a832ed0fc74dfa27e2" +checksum = "b169f7a6d4742236a0a00c541b845991d0ac43e546831af1249753ab4c3aa3a0" dependencies = [ "cfg-if", "cipher", @@ -58,9 +58,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.79" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" [[package]] name = "autocfg" @@ -91,9 +91,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "bstr" @@ -117,9 +117,9 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ceed8ef69d8518a5dda55c07425450b58a4e1946f4951eab6d7191ee86c2443d" +checksum = "694c8807f2ae16faecc43dc17d74b3eb042482789fd0eb64b39a2e04e087053f" dependencies = [ "serde", ] @@ -140,12 +140,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.0.83" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" -dependencies = [ - "libc", -] +checksum = "7f9fa1897e4325be0d68d48df6aa1a71ac2ed4d27723887e7754192705350730" [[package]] name = "cfg-if" @@ -208,15 +205,15 @@ checksum = "55b672471b4e9f9e95499ea597ff64941a309b2cdbffcc46f2cc5e2d971fd335" [[package]] name = "console" -version = "0.15.7" +version = "0.15.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +checksum = "0e1f83fc076bd6dd27517eacdf25fef6c4dfe5f1d7448bafaaf3a26f13b5e4eb" dependencies = [ "encode_unicode", "lazy_static", "libc", "unicode-width", - "windows-sys 0.45.0", + "windows-sys 0.52.0", ] [[package]] @@ -230,22 +227,18 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.10" +version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82a9b73a36529d9c47029b9fb3a6f0ea3cc916a261195352ba19e770fc1748b2" +checksum = "176dc175b78f56c0f321911d9c8eb2b77a78a4860b9c19db83835fea1a46649b" dependencies = [ - "cfg-if", "crossbeam-utils", ] [[package]] name = "crossbeam-utils" -version = "0.8.18" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3a430a770ebd84726f584a90ee7f020d28db52c6d02138900f22341f866d39c" -dependencies = [ - "cfg-if", -] +checksum = "248e3bacc7dc6baa3b21e405ee045c3047101a49145e7e9eca583ab4c2ca5345" [[package]] name = "crypto-common" @@ -285,9 +278,9 @@ dependencies = [ [[package]] name = "eyre" -version = "0.6.11" +version = "0.6.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6267a1fa6f59179ea4afc8e50fd8612a3cc60bc858f786ff877a4a8cb042799" +checksum = "7cd915d99f24784cdc19fd37ef22b97e3ff0ae756c7e492e9fbfe897d61e2aec" dependencies = [ "indenter", "once_cell", @@ -311,9 +304,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", "libc", @@ -334,9 +327,9 @@ checksum = "ce23b50ad8242c51a442f3ff322d56b02f08852c77e4c0b4d3fd684abc89c683" [[package]] name = "indicatif" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" dependencies = [ "console", "instant", @@ -393,9 +386,9 @@ checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" [[package]] name = "libc" -version = "0.2.151" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libffi" @@ -428,9 +421,9 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "lock_api" @@ -479,9 +472,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7810e0be55b428ada41041c41f32c9f1a42817901b4ccf45fa3d4b6561e74c7" +checksum = "9d811f3e15f28568be3407c8e7fdb6514c1cda3cb30683f15b6a1a1dc4ea14a7" dependencies = [ "adler", ] @@ -513,7 +506,7 @@ version = "0.27.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "cfg-if", "libc", ] @@ -616,9 +609,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.76" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] @@ -673,9 +666,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.2" +version = "1.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", @@ -685,9 +678,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", "memchr", @@ -735,11 +728,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.28" +version = "0.38.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "errno", "libc", "linux-raw-sys", @@ -748,9 +741,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" [[package]] name = "scopeguard" @@ -760,27 +753,27 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "semver" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.195" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.195" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", @@ -789,9 +782,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.111" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", @@ -809,15 +802,15 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.11.2" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" +checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" [[package]] name = "syn" -version = "2.0.48" +version = "2.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb" dependencies = [ "proc-macro2", "quote", @@ -826,31 +819,30 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.9.0" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" +checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" dependencies = [ "cfg-if", "fastrand", - "redox_syscall", "rustix", "windows-sys 0.52.0", ] [[package]] name = "thiserror" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2", "quote", @@ -859,9 +851,9 @@ dependencies = [ [[package]] name = "thread_local" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c" dependencies = [ "cfg-if", "once_cell", @@ -993,15 +985,6 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" -[[package]] -name = "windows-sys" -version = "0.45.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" -dependencies = [ - "windows-targets 0.42.2", -] - [[package]] name = "windows-sys" version = "0.48.0" @@ -1017,22 +1000,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.0", -] - -[[package]] -name = "windows-targets" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" -dependencies = [ - "windows_aarch64_gnullvm 0.42.2", - "windows_aarch64_msvc 0.42.2", - "windows_i686_gnu 0.42.2", - "windows_i686_msvc 0.42.2", - "windows_x86_64_gnu 0.42.2", - "windows_x86_64_gnullvm 0.42.2", - "windows_x86_64_msvc 0.42.2", + "windows-targets 0.52.3", ] [[package]] @@ -1052,25 +1020,19 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +checksum = "d380ba1dc7187569a8a9e91ed34b8ccfc33123bbacb8c0aed2d1ad7f3ef2dc5f" dependencies = [ - "windows_aarch64_gnullvm 0.52.0", - "windows_aarch64_msvc 0.52.0", - "windows_i686_gnu 0.52.0", - "windows_i686_msvc 0.52.0", - "windows_x86_64_gnu 0.52.0", - "windows_x86_64_gnullvm 0.52.0", - "windows_x86_64_msvc 0.52.0", + "windows_aarch64_gnullvm 0.52.3", + "windows_aarch64_msvc 0.52.3", + "windows_i686_gnu 0.52.3", + "windows_i686_msvc 0.52.3", + "windows_x86_64_gnu 0.52.3", + "windows_x86_64_gnullvm 0.52.3", + "windows_x86_64_msvc 0.52.3", ] -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" - [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -1079,15 +1041,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" - -[[package]] -name = "windows_aarch64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" +checksum = "68e5dcfb9413f53afd9c8f86e56a7b4d86d9a2fa26090ea2dc9e40fba56c6ec6" [[package]] name = "windows_aarch64_msvc" @@ -1097,15 +1053,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" - -[[package]] -name = "windows_i686_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" +checksum = "8dab469ebbc45798319e69eebf92308e541ce46760b49b18c6b3fe5e8965b30f" [[package]] name = "windows_i686_gnu" @@ -1115,15 +1065,9 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" - -[[package]] -name = "windows_i686_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" +checksum = "2a4e9b6a7cac734a8b4138a4e1044eac3404d8326b6c0f939276560687a033fb" [[package]] name = "windows_i686_msvc" @@ -1133,15 +1077,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" - -[[package]] -name = "windows_x86_64_gnu" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" +checksum = "28b0ec9c422ca95ff34a78755cfa6ad4a51371da2a5ace67500cf7ca5f232c58" [[package]] name = "windows_x86_64_gnu" @@ -1151,15 +1089,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" - -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" +checksum = "704131571ba93e89d7cd43482277d6632589b18ecf4468f591fbae0a8b101614" [[package]] name = "windows_x86_64_gnullvm" @@ -1169,15 +1101,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" - -[[package]] -name = "windows_x86_64_msvc" -version = "0.42.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" +checksum = "42079295511643151e98d61c38c0acc444e52dd42ab456f7ccfd5152e8ecf21c" [[package]] name = "windows_x86_64_msvc" @@ -1187,9 +1113,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +checksum = "0770833d60a970638e989b3fa9fd2bb1aaadcf88963d1659fd7d9990196ed2d6" [[package]] name = "yansi-term" diff --git a/src/tools/miri/cargo-miri/Cargo.lock b/src/tools/miri/cargo-miri/Cargo.lock index fe384c9d497e1..6841a345ce198 100644 --- a/src/tools/miri/cargo-miri/Cargo.lock +++ b/src/tools/miri/cargo-miri/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "anyhow" -version = "1.0.79" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" [[package]] name = "bitflags" @@ -16,9 +16,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "camino" @@ -44,9 +44,9 @@ dependencies = [ [[package]] name = "cargo-platform" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ceed8ef69d8518a5dda55c07425450b58a4e1946f4951eab6d7191ee86c2443d" +checksum = "694c8807f2ae16faecc43dc17d74b3eb042482789fd0eb64b39a2e04e087053f" dependencies = [ "serde", ] @@ -110,9 +110,9 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", "libc", @@ -127,9 +127,9 @@ checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "libc" -version = "0.2.151" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libredox" @@ -137,16 +137,16 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "libc", "redox_syscall", ] [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "option-ext" @@ -156,9 +156,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "proc-macro2" -version = "1.0.76" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] @@ -194,13 +194,14 @@ dependencies = [ [[package]] name = "rustc-build-sysroot" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39dcf8d82b1f79a179bdb284dc44db440a9666eefa5a6df5ef282d6db930d544" +checksum = "a26170e1d79ea32f7ccec3188dd13cfc1f18c82764a9cbc1071667c0f865a4ea" dependencies = [ "anyhow", "rustc_version", "tempfile", + "walkdir", ] [[package]] @@ -220,11 +221,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.28" +version = "0.38.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "errno", "libc", "linux-raw-sys", @@ -233,33 +234,42 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" + +[[package]] +name = "same-file" +version = "1.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502" +dependencies = [ + "winapi-util", +] [[package]] name = "semver" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" dependencies = [ "serde", ] [[package]] name = "serde" -version = "1.0.195" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63261df402c67811e9ac6def069e4786148c4563f4b50fd4bf30aa370d626b02" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.195" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46fe8f8603d81ba86327b23a2e9cdf49e1255fb94a4c5f297f6ee0547178ea2c" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2", "quote", @@ -268,9 +278,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.111" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "176e46fa42316f18edd598015a5166857fc835ec732f5215eac6b7bdbf0a84f4" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "itoa", "ryu", @@ -279,9 +289,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.48" +version = "2.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb" dependencies = [ "proc-macro2", "quote", @@ -290,31 +300,30 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.9.0" +version = "3.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" +checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" dependencies = [ "cfg-if", "fastrand", - "redox_syscall", "rustix", "windows-sys 0.52.0", ] [[package]] name = "thiserror" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2", "quote", @@ -327,12 +336,53 @@ version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +[[package]] +name = "walkdir" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +dependencies = [ + "same-file", + "winapi-util", +] + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-util" +version = "0.1.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +dependencies = [ + "winapi", +] + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + [[package]] name = "windows-sys" version = "0.48.0" @@ -348,7 +398,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.0", + "windows-targets 0.52.3", ] [[package]] @@ -368,17 +418,17 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +checksum = "d380ba1dc7187569a8a9e91ed34b8ccfc33123bbacb8c0aed2d1ad7f3ef2dc5f" dependencies = [ - "windows_aarch64_gnullvm 0.52.0", - "windows_aarch64_msvc 0.52.0", - "windows_i686_gnu 0.52.0", - "windows_i686_msvc 0.52.0", - "windows_x86_64_gnu 0.52.0", - "windows_x86_64_gnullvm 0.52.0", - "windows_x86_64_msvc 0.52.0", + "windows_aarch64_gnullvm 0.52.3", + "windows_aarch64_msvc 0.52.3", + "windows_i686_gnu 0.52.3", + "windows_i686_msvc 0.52.3", + "windows_x86_64_gnu 0.52.3", + "windows_x86_64_gnullvm 0.52.3", + "windows_x86_64_msvc 0.52.3", ] [[package]] @@ -389,9 +439,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" +checksum = "68e5dcfb9413f53afd9c8f86e56a7b4d86d9a2fa26090ea2dc9e40fba56c6ec6" [[package]] name = "windows_aarch64_msvc" @@ -401,9 +451,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" +checksum = "8dab469ebbc45798319e69eebf92308e541ce46760b49b18c6b3fe5e8965b30f" [[package]] name = "windows_i686_gnu" @@ -413,9 +463,9 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" +checksum = "2a4e9b6a7cac734a8b4138a4e1044eac3404d8326b6c0f939276560687a033fb" [[package]] name = "windows_i686_msvc" @@ -425,9 +475,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" +checksum = "28b0ec9c422ca95ff34a78755cfa6ad4a51371da2a5ace67500cf7ca5f232c58" [[package]] name = "windows_x86_64_gnu" @@ -437,9 +487,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" +checksum = "704131571ba93e89d7cd43482277d6632589b18ecf4468f591fbae0a8b101614" [[package]] name = "windows_x86_64_gnullvm" @@ -449,9 +499,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" +checksum = "42079295511643151e98d61c38c0acc444e52dd42ab456f7ccfd5152e8ecf21c" [[package]] name = "windows_x86_64_msvc" @@ -461,6 +511,6 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +checksum = "0770833d60a970638e989b3fa9fd2bb1aaadcf88963d1659fd7d9990196ed2d6" diff --git a/src/tools/miri/miri-script/Cargo.lock b/src/tools/miri/miri-script/Cargo.lock index 04615f3d8c165..a6f7467f0a2f7 100644 --- a/src/tools/miri/miri-script/Cargo.lock +++ b/src/tools/miri/miri-script/Cargo.lock @@ -4,9 +4,9 @@ version = 3 [[package]] name = "anyhow" -version = "1.0.79" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" [[package]] name = "bitflags" @@ -16,9 +16,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "cfg-if" @@ -55,9 +55,9 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "either" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" [[package]] name = "errno" @@ -71,9 +71,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "190092ea657667030ac6a35e305e62fc4dd69fd98ac98631e5d3a2b1575a12b5" dependencies = [ "cfg-if", "libc", @@ -100,9 +100,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.151" +version = "0.2.153" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "9c198f91728a82281a64e1f4f9eeb25d82cb32a5de251c6bd1b5154d63a8e7bd" [[package]] name = "libredox" @@ -110,16 +110,16 @@ version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "libc", "redox_syscall", ] [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "miri-script" @@ -157,9 +157,9 @@ checksum = "a6e819bbd49d5939f682638fa54826bf1650abddcd65d000923de8ad63cc7d15" [[package]] name = "proc-macro2" -version = "1.0.76" +version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "95fc56cda0b5c3325f5fbbd7ff9fda9e02bb00bb3dac51252d2f1bfa1cb8cc8c" +checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] @@ -204,11 +204,11 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.28" +version = "0.38.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "6ea3e1a662af26cd7a3ba09c0297a31af215563ecf42817c98df621387f4e949" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.2", "errno", "libc", "linux-raw-sys", @@ -226,9 +226,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" [[package]] name = "shell-words" @@ -238,9 +238,9 @@ checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" [[package]] name = "syn" -version = "2.0.48" +version = "2.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb" dependencies = [ "proc-macro2", "quote", @@ -249,18 +249,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2", "quote", @@ -347,7 +347,7 @@ version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ - "windows-targets 0.52.0", + "windows-targets 0.52.3", ] [[package]] @@ -367,17 +367,17 @@ dependencies = [ [[package]] name = "windows-targets" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +checksum = "d380ba1dc7187569a8a9e91ed34b8ccfc33123bbacb8c0aed2d1ad7f3ef2dc5f" dependencies = [ - "windows_aarch64_gnullvm 0.52.0", - "windows_aarch64_msvc 0.52.0", - "windows_i686_gnu 0.52.0", - "windows_i686_msvc 0.52.0", - "windows_x86_64_gnu 0.52.0", - "windows_x86_64_gnullvm 0.52.0", - "windows_x86_64_msvc 0.52.0", + "windows_aarch64_gnullvm 0.52.3", + "windows_aarch64_msvc 0.52.3", + "windows_i686_gnu 0.52.3", + "windows_i686_msvc 0.52.3", + "windows_x86_64_gnu 0.52.3", + "windows_x86_64_gnullvm 0.52.3", + "windows_x86_64_msvc 0.52.3", ] [[package]] @@ -388,9 +388,9 @@ checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" [[package]] name = "windows_aarch64_gnullvm" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" +checksum = "68e5dcfb9413f53afd9c8f86e56a7b4d86d9a2fa26090ea2dc9e40fba56c6ec6" [[package]] name = "windows_aarch64_msvc" @@ -400,9 +400,9 @@ checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" [[package]] name = "windows_aarch64_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" +checksum = "8dab469ebbc45798319e69eebf92308e541ce46760b49b18c6b3fe5e8965b30f" [[package]] name = "windows_i686_gnu" @@ -412,9 +412,9 @@ checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" [[package]] name = "windows_i686_gnu" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" +checksum = "2a4e9b6a7cac734a8b4138a4e1044eac3404d8326b6c0f939276560687a033fb" [[package]] name = "windows_i686_msvc" @@ -424,9 +424,9 @@ checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" [[package]] name = "windows_i686_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" +checksum = "28b0ec9c422ca95ff34a78755cfa6ad4a51371da2a5ace67500cf7ca5f232c58" [[package]] name = "windows_x86_64_gnu" @@ -436,9 +436,9 @@ checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" [[package]] name = "windows_x86_64_gnu" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" +checksum = "704131571ba93e89d7cd43482277d6632589b18ecf4468f591fbae0a8b101614" [[package]] name = "windows_x86_64_gnullvm" @@ -448,9 +448,9 @@ checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" [[package]] name = "windows_x86_64_gnullvm" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" +checksum = "42079295511643151e98d61c38c0acc444e52dd42ab456f7ccfd5152e8ecf21c" [[package]] name = "windows_x86_64_msvc" @@ -460,9 +460,9 @@ checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" [[package]] name = "windows_x86_64_msvc" -version = "0.52.0" +version = "0.52.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" +checksum = "0770833d60a970638e989b3fa9fd2bb1aaadcf88963d1659fd7d9990196ed2d6" [[package]] name = "xshell" From 0c3a524acbbaa1ddf6e3dc014bd51da570db7c79 Mon Sep 17 00:00:00 2001 From: Lukas Wirth Date: Fri, 23 Feb 2024 20:22:01 +0100 Subject: [PATCH 106/134] Fix: Fix metrics CI failing --- .github/workflows/metrics.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/metrics.yaml b/.github/workflows/metrics.yaml index 87a1bd53a5c54..de61b2389ae27 100644 --- a/.github/workflows/metrics.yaml +++ b/.github/workflows/metrics.yaml @@ -146,7 +146,7 @@ jobs: chmod 700 ~/.ssh git clone --depth 1 git@github.com:rust-analyzer/metrics.git - jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5] * .[6]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json + jq -s ".[0] * .[1] * .[2] * .[3] * .[4] * .[5]" build.json self.json ripgrep-13.0.0.json webrender-2022.json diesel-1.4.8.json hyper-0.14.18.json -c >> metrics/metrics.json cd metrics git add . git -c user.name=Bot -c user.email=dummy@example.com commit --message 📈 From 64779737db21f9e506f4562d9923e7037e15ecfb Mon Sep 17 00:00:00 2001 From: David Barsky Date: Fri, 23 Feb 2024 15:34:23 -0500 Subject: [PATCH 107/134] internal: fix deadlock introduced by #16643 --- crates/salsa/src/derived.rs | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/crates/salsa/src/derived.rs b/crates/salsa/src/derived.rs index bf532bdccf64d..153df999f5349 100644 --- a/crates/salsa/src/derived.rs +++ b/crates/salsa/src/derived.rs @@ -146,11 +146,14 @@ where revision: Revision, ) -> bool { debug_assert!(revision < db.salsa_runtime().current_revision()); - let read = &self.slot_map.read(); + let read = self.slot_map.read(); let Some((key, slot)) = read.get_index(index as usize) else { return false; }; - slot.maybe_changed_after(db, revision, key) + let (key, slot) = (key.clone(), slot.clone()); + // note: this drop is load-bearing. removing it would causes deadlocks. + drop(read); + slot.maybe_changed_after(db, revision, &key) } fn fetch(&self, db: &>::DynDb, key: &Q::Key) -> Q::Value { From 7234c9893ded40e746877be7d5e4c53a606b36a1 Mon Sep 17 00:00:00 2001 From: Wojciech Geisler Date: Sat, 24 Feb 2024 00:34:40 +0200 Subject: [PATCH 108/134] Fix incorrect doc of ScopedJoinHandle::is_finished Fixes the explanation how to use is_finished to achieve a non-blocking join. The updated version matches the documentation of the non-scoped JoinHandle::is_finished. --- library/std/src/thread/scoped.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/std/src/thread/scoped.rs b/library/std/src/thread/scoped.rs index ada69aa8269f6..7b11e7c17b187 100644 --- a/library/std/src/thread/scoped.rs +++ b/library/std/src/thread/scoped.rs @@ -311,7 +311,7 @@ impl<'scope, T> ScopedJoinHandle<'scope, T> { /// Checks if the associated thread has finished running its main function. /// /// `is_finished` supports implementing a non-blocking join operation, by checking - /// `is_finished`, and calling `join` if it returns `false`. This function does not block. To + /// `is_finished`, and calling `join` if it returns `true`. This function does not block. To /// block while waiting on the thread to finish, use [`join`][Self::join]. /// /// This might return `true` for a brief moment after the thread's main From 2f3c0b98593044c00492f4287e27ef9733121c97 Mon Sep 17 00:00:00 2001 From: Ben Kimock Date: Fri, 23 Feb 2024 18:04:01 -0500 Subject: [PATCH 109/134] Ignore less tests in debug builds --- src/tools/compiletest/src/runtest.rs | 7 +++++-- tests/assembly/option-nonzero-eq.rs | 1 - tests/assembly/slice-is_ascii.rs | 1 - tests/assembly/static-relocation-model.rs | 1 - tests/codegen/align-offset.rs | 1 - tests/codegen/array-map.rs | 1 - tests/codegen/ascii-char.rs | 1 - tests/codegen/binary-search-index-no-bound-check.rs | 1 - tests/codegen/infallible-unwrap-in-opt-z.rs | 1 - tests/codegen/issue-97217.rs | 1 - tests/codegen/issues/issue-101082.rs | 1 - tests/codegen/issues/issue-101814.rs | 1 - tests/codegen/issues/issue-106369.rs | 1 - tests/codegen/issues/issue-116878.rs | 1 - tests/codegen/issues/issue-37945.rs | 1 - tests/codegen/issues/issue-45222.rs | 1 - tests/codegen/issues/issue-45466.rs | 1 - tests/codegen/issues/issue-45964-bounds-check-slice-pos.rs | 1 - tests/codegen/issues/issue-69101-bounds-check.rs | 1 - tests/codegen/issues/issue-73258.rs | 1 - .../issues/issue-73396-bounds-check-after-position.rs | 1 - .../issues/issue-98294-get-mut-copy-from-slice-opt.rs | 1 - tests/codegen/iter-repeat-n-trivial-drop.rs | 1 - tests/codegen/layout-size-checks.rs | 1 - tests/codegen/lib-optimizations/iter-sum.rs | 1 - tests/codegen/mem-replace-big-type.rs | 2 +- tests/codegen/mem-replace-simple-type.rs | 2 +- tests/codegen/ptr-arithmetic.rs | 1 - tests/codegen/ptr-read-metadata.rs | 1 - tests/codegen/simd/simd-wide-sum.rs | 1 - tests/codegen/simd/swap-simd-types.rs | 1 - tests/codegen/slice-as_chunks.rs | 1 - tests/codegen/slice-indexing.rs | 1 - tests/codegen/slice-iter-fold.rs | 1 - tests/codegen/slice-iter-len-eq-zero.rs | 1 - tests/codegen/slice-iter-nonnull.rs | 1 - tests/codegen/slice-ref-equality.rs | 1 - tests/codegen/slice-reverse.rs | 2 +- tests/codegen/slice_as_from_ptr_range.rs | 1 - tests/codegen/swap-large-types.rs | 1 - tests/codegen/swap-small-types.rs | 1 - tests/codegen/transmute-optimized.rs | 1 - tests/codegen/unchecked_shifts.rs | 1 - tests/codegen/unwind-landingpad-inline.rs | 1 - tests/codegen/vec-calloc.rs | 1 - tests/codegen/vec-in-place.rs | 2 +- tests/codegen/vec-iter-collect-len.rs | 1 - tests/codegen/vec-iter.rs | 1 - tests/codegen/vec-optimizes-away.rs | 1 - tests/codegen/vec-reserve-extend.rs | 2 -- tests/codegen/vec-shrink-panik.rs | 2 +- tests/codegen/vec_pop_push_noop.rs | 2 -- tests/codegen/vecdeque-drain.rs | 2 +- tests/codegen/vecdeque-nonempty-get-no-panic.rs | 1 - tests/codegen/vecdeque_no_panic.rs | 2 +- tests/codegen/virtual-function-elimination.rs | 1 - tests/mir-opt/pre-codegen/slice_filter.rs | 1 - ...slice_filter.variant_a-{closure#0}.PreCodegen.after.mir | 2 +- ...slice_filter.variant_b-{closure#0}.PreCodegen.after.mir | 2 +- 59 files changed, 14 insertions(+), 62 deletions(-) diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index 27a8079d893fa..61211a7d67538 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -2503,8 +2503,11 @@ impl<'test> TestCx<'test> { // overridden by `compile-flags`. rustc.arg("-Copt-level=2"); } - RunPassValgrind | Pretty | DebugInfo | Codegen | Rustdoc | RustdocJson | RunMake - | CodegenUnits | JsDocTest | Assembly => { + Assembly | Codegen => { + rustc.arg("-Cdebug-assertions=no"); + } + RunPassValgrind | Pretty | DebugInfo | Rustdoc | RustdocJson | RunMake + | CodegenUnits | JsDocTest => { // do not use JSON output } } diff --git a/tests/assembly/option-nonzero-eq.rs b/tests/assembly/option-nonzero-eq.rs index d6ec586d938b0..b04cf63fd7837 100644 --- a/tests/assembly/option-nonzero-eq.rs +++ b/tests/assembly/option-nonzero-eq.rs @@ -5,7 +5,6 @@ //@ compile-flags: --crate-type=lib -O -C llvm-args=-x86-asm-syntax=intel //@ only-x86_64 //@ ignore-sgx -//@ ignore-debug use std::cmp::Ordering; diff --git a/tests/assembly/slice-is_ascii.rs b/tests/assembly/slice-is_ascii.rs index 0b7643952149c..3a050347d8981 100644 --- a/tests/assembly/slice-is_ascii.rs +++ b/tests/assembly/slice-is_ascii.rs @@ -5,7 +5,6 @@ //@ compile-flags: --crate-type=lib -O -C llvm-args=-x86-asm-syntax=intel //@ only-x86_64 //@ ignore-sgx -//@ ignore-debug #![feature(str_internals)] diff --git a/tests/assembly/static-relocation-model.rs b/tests/assembly/static-relocation-model.rs index 975818bf94fb0..50527b8534513 100644 --- a/tests/assembly/static-relocation-model.rs +++ b/tests/assembly/static-relocation-model.rs @@ -6,7 +6,6 @@ //@ [A64] needs-llvm-components: aarch64 //@ [ppc64le] compile-flags: --target powerpc64le-unknown-linux-gnu -Crelocation-model=static //@ [ppc64le] needs-llvm-components: powerpc -//@ ignore-debug: alignment checks insert panics that we don't have a lang item for #![feature(no_core, lang_items)] #![no_core] diff --git a/tests/codegen/align-offset.rs b/tests/codegen/align-offset.rs index 9819dc209668e..15b11f413cb56 100644 --- a/tests/codegen/align-offset.rs +++ b/tests/codegen/align-offset.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug (debug assertions in `slice::from_raw_parts` block optimizations) #![crate_type = "lib"] diff --git a/tests/codegen/array-map.rs b/tests/codegen/array-map.rs index b356f8f33f9f1..743a15989f78e 100644 --- a/tests/codegen/array-map.rs +++ b/tests/codegen/array-map.rs @@ -1,6 +1,5 @@ //@ compile-flags: -C opt-level=3 -C target-cpu=x86-64-v3 //@ only-x86_64 -//@ ignore-debug (the extra assertions get in the way) #![crate_type = "lib"] diff --git a/tests/codegen/ascii-char.rs b/tests/codegen/ascii-char.rs index 30f285096ed8d..fab9f8632fcad 100644 --- a/tests/codegen/ascii-char.rs +++ b/tests/codegen/ascii-char.rs @@ -1,5 +1,4 @@ //@ compile-flags: -C opt-level=1 -//@ ignore-debug (the extra assertions get in the way) #![crate_type = "lib"] #![feature(ascii_char)] diff --git a/tests/codegen/binary-search-index-no-bound-check.rs b/tests/codegen/binary-search-index-no-bound-check.rs index d2627d67142f1..96f6bb54b3fe0 100644 --- a/tests/codegen/binary-search-index-no-bound-check.rs +++ b/tests/codegen/binary-search-index-no-bound-check.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] // Make sure no bounds checks are emitted when slicing or indexing diff --git a/tests/codegen/infallible-unwrap-in-opt-z.rs b/tests/codegen/infallible-unwrap-in-opt-z.rs index cbcba03ad0a98..3756fafe3840c 100644 --- a/tests/codegen/infallible-unwrap-in-opt-z.rs +++ b/tests/codegen/infallible-unwrap-in-opt-z.rs @@ -1,5 +1,4 @@ //@ compile-flags: -C opt-level=z --edition=2021 -//@ ignore-debug #![crate_type = "lib"] diff --git a/tests/codegen/issue-97217.rs b/tests/codegen/issue-97217.rs index 93dd1228ce123..a0dfff2ef2eb9 100644 --- a/tests/codegen/issue-97217.rs +++ b/tests/codegen/issue-97217.rs @@ -1,5 +1,4 @@ //@ compile-flags: -C opt-level=3 -//@ ignore-debug: the debug assertions get in the way //@ min-llvm-version: 17.0.2 #![crate_type = "lib"] diff --git a/tests/codegen/issues/issue-101082.rs b/tests/codegen/issues/issue-101082.rs index 7c96f9a34f8cb..550d267a98fa2 100644 --- a/tests/codegen/issues/issue-101082.rs +++ b/tests/codegen/issues/issue-101082.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/issues/issue-101814.rs b/tests/codegen/issues/issue-101814.rs index 6175d80c9cde1..e3843e9edb0bd 100644 --- a/tests/codegen/issues/issue-101814.rs +++ b/tests/codegen/issues/issue-101814.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/issues/issue-106369.rs b/tests/codegen/issues/issue-106369.rs index 5120c5f4e4903..fd375e4e60584 100644 --- a/tests/codegen/issues/issue-106369.rs +++ b/tests/codegen/issues/issue-106369.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug (the extra assertions get in the way) #![crate_type = "lib"] diff --git a/tests/codegen/issues/issue-116878.rs b/tests/codegen/issues/issue-116878.rs index 2c561d7be799b..a09fac42c0182 100644 --- a/tests/codegen/issues/issue-116878.rs +++ b/tests/codegen/issues/issue-116878.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] /// Make sure no bounds checks are emitted after a `get_unchecked`. diff --git a/tests/codegen/issues/issue-37945.rs b/tests/codegen/issues/issue-37945.rs index 3f750157a8182..756a75e2f0ed3 100644 --- a/tests/codegen/issues/issue-37945.rs +++ b/tests/codegen/issues/issue-37945.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O -Zmerge-functions=disabled //@ ignore-32bit LLVM has a bug with them -//@ ignore-debug // Check that LLVM understands that `Iter` pointer is not null. Issue #37945. diff --git a/tests/codegen/issues/issue-45222.rs b/tests/codegen/issues/issue-45222.rs index 8fa9d87f4970f..d2c1ba421c45e 100644 --- a/tests/codegen/issues/issue-45222.rs +++ b/tests/codegen/issues/issue-45222.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/issues/issue-45466.rs b/tests/codegen/issues/issue-45466.rs index 165bc3ca41126..fc714247dfb6d 100644 --- a/tests/codegen/issues/issue-45466.rs +++ b/tests/codegen/issues/issue-45466.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type="rlib"] diff --git a/tests/codegen/issues/issue-45964-bounds-check-slice-pos.rs b/tests/codegen/issues/issue-45964-bounds-check-slice-pos.rs index c9bc7fc316e94..b7568bea4d0a3 100644 --- a/tests/codegen/issues/issue-45964-bounds-check-slice-pos.rs +++ b/tests/codegen/issues/issue-45964-bounds-check-slice-pos.rs @@ -2,7 +2,6 @@ // prevent optimizing away bounds checks //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type="rlib"] diff --git a/tests/codegen/issues/issue-69101-bounds-check.rs b/tests/codegen/issues/issue-69101-bounds-check.rs index f96a8e9da4b4e..c014a1c1b1d43 100644 --- a/tests/codegen/issues/issue-69101-bounds-check.rs +++ b/tests/codegen/issues/issue-69101-bounds-check.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] // Make sure no bounds checks are emitted in the loop when upfront slicing diff --git a/tests/codegen/issues/issue-73258.rs b/tests/codegen/issues/issue-73258.rs index bc71e15a41fe2..48f14fe2dfe3e 100644 --- a/tests/codegen/issues/issue-73258.rs +++ b/tests/codegen/issues/issue-73258.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug (the extra assertions get in the way) #![crate_type = "lib"] diff --git a/tests/codegen/issues/issue-73396-bounds-check-after-position.rs b/tests/codegen/issues/issue-73396-bounds-check-after-position.rs index db91a85474d2b..ef4538ac84e1b 100644 --- a/tests/codegen/issues/issue-73396-bounds-check-after-position.rs +++ b/tests/codegen/issues/issue-73396-bounds-check-after-position.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] // Make sure no bounds checks are emitted when slicing or indexing diff --git a/tests/codegen/issues/issue-98294-get-mut-copy-from-slice-opt.rs b/tests/codegen/issues/issue-98294-get-mut-copy-from-slice-opt.rs index 9f65222b38697..40827e32a0124 100644 --- a/tests/codegen/issues/issue-98294-get-mut-copy-from-slice-opt.rs +++ b/tests/codegen/issues/issue-98294-get-mut-copy-from-slice-opt.rs @@ -1,4 +1,3 @@ -//@ ignore-debug: The debug assertions get in the way //@ compile-flags: -O #![crate_type = "lib"] diff --git a/tests/codegen/iter-repeat-n-trivial-drop.rs b/tests/codegen/iter-repeat-n-trivial-drop.rs index d0838a3e860d4..31020b77984ba 100644 --- a/tests/codegen/iter-repeat-n-trivial-drop.rs +++ b/tests/codegen/iter-repeat-n-trivial-drop.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O //@ only-x86_64 -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] #![feature(iter_repeat_n)] diff --git a/tests/codegen/layout-size-checks.rs b/tests/codegen/layout-size-checks.rs index 55c2e86b40bbd..901f8f822f320 100644 --- a/tests/codegen/layout-size-checks.rs +++ b/tests/codegen/layout-size-checks.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O //@ only-x86_64 -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/lib-optimizations/iter-sum.rs b/tests/codegen/lib-optimizations/iter-sum.rs index 6b6d61a30660a..b563a6debb52b 100644 --- a/tests/codegen/lib-optimizations/iter-sum.rs +++ b/tests/codegen/lib-optimizations/iter-sum.rs @@ -1,4 +1,3 @@ -//@ ignore-debug: the debug assertions get in the way //@ compile-flags: -O //@ only-x86_64 (vectorization varies between architectures) #![crate_type = "lib"] diff --git a/tests/codegen/mem-replace-big-type.rs b/tests/codegen/mem-replace-big-type.rs index 0234b63aba5e1..c71cbbd08f9ac 100644 --- a/tests/codegen/mem-replace-big-type.rs +++ b/tests/codegen/mem-replace-big-type.rs @@ -4,7 +4,7 @@ // known to be `1` after inlining). //@ compile-flags: -C no-prepopulate-passes -Zinline-mir=no -//@ ignore-debug: the debug assertions get in the way +//@ ignore-debug: precondition checks in ptr::read make them a bad candidate for MIR inlining #![crate_type = "lib"] diff --git a/tests/codegen/mem-replace-simple-type.rs b/tests/codegen/mem-replace-simple-type.rs index b6885aad9e402..b00fbad05d9c5 100644 --- a/tests/codegen/mem-replace-simple-type.rs +++ b/tests/codegen/mem-replace-simple-type.rs @@ -1,6 +1,6 @@ //@ compile-flags: -O -C no-prepopulate-passes //@ only-x86_64 (to not worry about usize differing) -//@ ignore-debug (the debug assertions get in the way) +//@ ignore-debug: precondition checks make mem::replace not a candidate for MIR inlining #![crate_type = "lib"] diff --git a/tests/codegen/ptr-arithmetic.rs b/tests/codegen/ptr-arithmetic.rs index 3a8bfee84ecfb..6f115d33d8ddf 100644 --- a/tests/codegen/ptr-arithmetic.rs +++ b/tests/codegen/ptr-arithmetic.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -Z merge-functions=disabled -//@ ignore-debug (the extra assertions get in the way) #![crate_type = "lib"] diff --git a/tests/codegen/ptr-read-metadata.rs b/tests/codegen/ptr-read-metadata.rs index 622a1cec4ac69..4c623dee5e1e9 100644 --- a/tests/codegen/ptr-read-metadata.rs +++ b/tests/codegen/ptr-read-metadata.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -Z merge-functions=disabled -//@ ignore-debug (the extra assertions get in the way) #![crate_type = "lib"] diff --git a/tests/codegen/simd/simd-wide-sum.rs b/tests/codegen/simd/simd-wide-sum.rs index 010500139e595..2edee552ca69a 100644 --- a/tests/codegen/simd/simd-wide-sum.rs +++ b/tests/codegen/simd/simd-wide-sum.rs @@ -1,7 +1,6 @@ //@ revisions: llvm mir-opt3 //@ compile-flags: -C opt-level=3 -Z merge-functions=disabled --edition=2021 //@ only-x86_64 -//@ ignore-debug: the debug assertions get in the way //@ [mir-opt3]compile-flags: -Zmir-opt-level=3 //@ [mir-opt3]build-pass diff --git a/tests/codegen/simd/swap-simd-types.rs b/tests/codegen/simd/swap-simd-types.rs index e03e2d4ff8dfd..32e75220d696a 100644 --- a/tests/codegen/simd/swap-simd-types.rs +++ b/tests/codegen/simd/swap-simd-types.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O -C target-feature=+avx //@ only-x86_64 -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/slice-as_chunks.rs b/tests/codegen/slice-as_chunks.rs index c9cd482a9a4ea..631d18d780951 100644 --- a/tests/codegen/slice-as_chunks.rs +++ b/tests/codegen/slice-as_chunks.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O //@ only-64bit (because the LLVM type of i64 for usize shows up) -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] #![feature(slice_as_chunks)] diff --git a/tests/codegen/slice-indexing.rs b/tests/codegen/slice-indexing.rs index 52714a76a8dd2..ecce920107197 100644 --- a/tests/codegen/slice-indexing.rs +++ b/tests/codegen/slice-indexing.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O //@ only-64bit (because the LLVM type of i64 for usize shows up) -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/slice-iter-fold.rs b/tests/codegen/slice-iter-fold.rs index 5a9d789b98464..1770cd4a11994 100644 --- a/tests/codegen/slice-iter-fold.rs +++ b/tests/codegen/slice-iter-fold.rs @@ -1,4 +1,3 @@ -//@ ignore-debug: the debug assertions get in the way //@ compile-flags: -O #![crate_type = "lib"] diff --git a/tests/codegen/slice-iter-len-eq-zero.rs b/tests/codegen/slice-iter-len-eq-zero.rs index 43c64511e527b..b2a4b2495b6a2 100644 --- a/tests/codegen/slice-iter-len-eq-zero.rs +++ b/tests/codegen/slice-iter-len-eq-zero.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug: the debug assertions add extra comparisons #![crate_type = "lib"] type Demo = [u8; 3]; diff --git a/tests/codegen/slice-iter-nonnull.rs b/tests/codegen/slice-iter-nonnull.rs index c82ae3b61b412..c960688b00c18 100644 --- a/tests/codegen/slice-iter-nonnull.rs +++ b/tests/codegen/slice-iter-nonnull.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug (these add extra checks that make it hard to verify) #![crate_type = "lib"] #![feature(exact_size_is_empty)] diff --git a/tests/codegen/slice-ref-equality.rs b/tests/codegen/slice-ref-equality.rs index d34aecd1903a7..371e685ec6c92 100644 --- a/tests/codegen/slice-ref-equality.rs +++ b/tests/codegen/slice-ref-equality.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -Zmerge-functions=disabled -//@ ignore-debug (the extra assertions get in the way) #![crate_type = "lib"] diff --git a/tests/codegen/slice-reverse.rs b/tests/codegen/slice-reverse.rs index 40bc89bc9d0cf..21add929f051f 100644 --- a/tests/codegen/slice-reverse.rs +++ b/tests/codegen/slice-reverse.rs @@ -1,6 +1,6 @@ //@ compile-flags: -O //@ only-x86_64 -//@ ignore-debug: the debug assertions in from_raw_parts get in the way +//@ ignore-debug: debug assertions prevent generating shufflevector #![crate_type = "lib"] diff --git a/tests/codegen/slice_as_from_ptr_range.rs b/tests/codegen/slice_as_from_ptr_range.rs index cc811e8f5894c..47c60461c0e81 100644 --- a/tests/codegen/slice_as_from_ptr_range.rs +++ b/tests/codegen/slice_as_from_ptr_range.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O //@ only-64bit (because we're using [ui]size) -//@ ignore-debug (because the assertions get in the way) #![crate_type = "lib"] #![feature(slice_from_ptr_range)] diff --git a/tests/codegen/swap-large-types.rs b/tests/codegen/swap-large-types.rs index 2069789081403..b182f3ed94798 100644 --- a/tests/codegen/swap-large-types.rs +++ b/tests/codegen/swap-large-types.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O //@ only-x86_64 -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/swap-small-types.rs b/tests/codegen/swap-small-types.rs index 8d7f9f49eef64..5fdf4a5804a9c 100644 --- a/tests/codegen/swap-small-types.rs +++ b/tests/codegen/swap-small-types.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O -Z merge-functions=disabled //@ only-x86_64 -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/transmute-optimized.rs b/tests/codegen/transmute-optimized.rs index 43d2a55c9955e..9217def76b519 100644 --- a/tests/codegen/transmute-optimized.rs +++ b/tests/codegen/transmute-optimized.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -Z merge-functions=disabled -//@ ignore-debug #![crate_type = "lib"] diff --git a/tests/codegen/unchecked_shifts.rs b/tests/codegen/unchecked_shifts.rs index 4e351d8d33356..9cf2f2b0cb673 100644 --- a/tests/codegen/unchecked_shifts.rs +++ b/tests/codegen/unchecked_shifts.rs @@ -1,5 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug (because unchecked is checked in debug) #![crate_type = "lib"] #![feature(unchecked_shifts)] diff --git a/tests/codegen/unwind-landingpad-inline.rs b/tests/codegen/unwind-landingpad-inline.rs index 953ba5e901a20..77ef8d2a5fe75 100644 --- a/tests/codegen/unwind-landingpad-inline.rs +++ b/tests/codegen/unwind-landingpad-inline.rs @@ -1,6 +1,5 @@ //@ min-llvm-version: 17.0.2 //@ compile-flags: -Copt-level=3 -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] // This test checks that we can inline drop_in_place in diff --git a/tests/codegen/vec-calloc.rs b/tests/codegen/vec-calloc.rs index bae344ab01d62..f88ed7ae8a555 100644 --- a/tests/codegen/vec-calloc.rs +++ b/tests/codegen/vec-calloc.rs @@ -1,6 +1,5 @@ //@ compile-flags: -O -Z merge-functions=disabled //@ only-x86_64 -//@ ignore-debug #![crate_type = "lib"] diff --git a/tests/codegen/vec-in-place.rs b/tests/codegen/vec-in-place.rs index 3ac2ec13d4764..7a175dc4f7e1c 100644 --- a/tests/codegen/vec-in-place.rs +++ b/tests/codegen/vec-in-place.rs @@ -1,4 +1,4 @@ -//@ ignore-debug: the debug assertions get in the way +//@ ignore-debug: FIXME: checks for call detect scoped noalias metadata //@ compile-flags: -O -Z merge-functions=disabled #![crate_type = "lib"] diff --git a/tests/codegen/vec-iter-collect-len.rs b/tests/codegen/vec-iter-collect-len.rs index 0c225abd326d8..e4242c5740239 100644 --- a/tests/codegen/vec-iter-collect-len.rs +++ b/tests/codegen/vec-iter-collect-len.rs @@ -1,4 +1,3 @@ -//@ ignore-debug: the debug assertions get in the way //@ compile-flags: -O #![crate_type="lib"] diff --git a/tests/codegen/vec-iter.rs b/tests/codegen/vec-iter.rs index 47e11af5bc3d1..310680969c4fe 100644 --- a/tests/codegen/vec-iter.rs +++ b/tests/codegen/vec-iter.rs @@ -1,4 +1,3 @@ -//@ ignore-debug: the debug assertions get in the way //@ compile-flags: -O #![crate_type = "lib"] #![feature(exact_size_is_empty)] diff --git a/tests/codegen/vec-optimizes-away.rs b/tests/codegen/vec-optimizes-away.rs index 9cbfbc115b046..77a94b0b4294a 100644 --- a/tests/codegen/vec-optimizes-away.rs +++ b/tests/codegen/vec-optimizes-away.rs @@ -1,4 +1,3 @@ -//@ ignore-debug: the debug assertions get in the way //@ compile-flags: -O #![crate_type = "lib"] diff --git a/tests/codegen/vec-reserve-extend.rs b/tests/codegen/vec-reserve-extend.rs index 12795937bc817..1f00f7d206339 100644 --- a/tests/codegen/vec-reserve-extend.rs +++ b/tests/codegen/vec-reserve-extend.rs @@ -1,6 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug -// (with debug assertions turned on, `assert_unchecked` generates a real assertion) #![crate_type = "lib"] diff --git a/tests/codegen/vec-shrink-panik.rs b/tests/codegen/vec-shrink-panik.rs index 33b70300bf415..4e996b234f98d 100644 --- a/tests/codegen/vec-shrink-panik.rs +++ b/tests/codegen/vec-shrink-panik.rs @@ -4,7 +4,7 @@ //@ [old]ignore-llvm-version: 17 - 99 //@ [new]min-llvm-version: 17 //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way +//@ ignore-debug: plain old debug assertions //@ needs-unwind #![crate_type = "lib"] #![feature(shrink_to)] diff --git a/tests/codegen/vec_pop_push_noop.rs b/tests/codegen/vec_pop_push_noop.rs index 5a2009b01d7ce..83765d1085419 100644 --- a/tests/codegen/vec_pop_push_noop.rs +++ b/tests/codegen/vec_pop_push_noop.rs @@ -1,6 +1,4 @@ //@ compile-flags: -O -//@ ignore-debug -// (with debug assertions turned on, `assert_unchecked` generates a real assertion) #![crate_type = "lib"] diff --git a/tests/codegen/vecdeque-drain.rs b/tests/codegen/vecdeque-drain.rs index cd549f8ebd4a6..31fcf035f1154 100644 --- a/tests/codegen/vecdeque-drain.rs +++ b/tests/codegen/vecdeque-drain.rs @@ -1,7 +1,7 @@ // Check that draining at the front or back doesn't copy memory. //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way +//@ ignore-debug: FIXME: checks for call detect scoped noalias metadata #![crate_type = "lib"] diff --git a/tests/codegen/vecdeque-nonempty-get-no-panic.rs b/tests/codegen/vecdeque-nonempty-get-no-panic.rs index 1128b4ed7a442..3f802de9eeed7 100644 --- a/tests/codegen/vecdeque-nonempty-get-no-panic.rs +++ b/tests/codegen/vecdeque-nonempty-get-no-panic.rs @@ -1,7 +1,6 @@ // Guards against regression for optimization discussed in issue #80836 //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way #![crate_type = "lib"] diff --git a/tests/codegen/vecdeque_no_panic.rs b/tests/codegen/vecdeque_no_panic.rs index 57b6b2abbea9a..be2c4810ebc44 100644 --- a/tests/codegen/vecdeque_no_panic.rs +++ b/tests/codegen/vecdeque_no_panic.rs @@ -1,7 +1,7 @@ // This test checks that `VecDeque::front[_mut]()` and `VecDeque::back[_mut]()` can't panic. //@ compile-flags: -O -//@ ignore-debug: the debug assertions get in the way +//@ ignore-debug: plain old debug assertions #![crate_type = "lib"] diff --git a/tests/codegen/virtual-function-elimination.rs b/tests/codegen/virtual-function-elimination.rs index bf4a74085ed80..6c391d9114b42 100644 --- a/tests/codegen/virtual-function-elimination.rs +++ b/tests/codegen/virtual-function-elimination.rs @@ -1,6 +1,5 @@ //@ compile-flags: -Zvirtual-function-elimination -Clto -O -Csymbol-mangling-version=v0 //@ ignore-32bit -//@ ignore-debug // CHECK: @vtable.0 = {{.*}}, !type ![[TYPE0:[0-9]+]], !vcall_visibility ![[VCALL_VIS0:[0-9]+]] // CHECK: @vtable.1 = {{.*}}, !type ![[TYPE1:[0-9]+]], !vcall_visibility ![[VCALL_VIS0:[0-9]+]] diff --git a/tests/mir-opt/pre-codegen/slice_filter.rs b/tests/mir-opt/pre-codegen/slice_filter.rs index 45686f0bf8806..35881ff2b18c6 100644 --- a/tests/mir-opt/pre-codegen/slice_filter.rs +++ b/tests/mir-opt/pre-codegen/slice_filter.rs @@ -1,6 +1,5 @@ // skip-filecheck //@ compile-flags: -O -Zmir-opt-level=2 -Cdebuginfo=2 -//@ ignore-debug: standard library debug assertions add a panic that breaks this optimization #![crate_type = "lib"] diff --git a/tests/mir-opt/pre-codegen/slice_filter.variant_a-{closure#0}.PreCodegen.after.mir b/tests/mir-opt/pre-codegen/slice_filter.variant_a-{closure#0}.PreCodegen.after.mir index 7370da5629c18..65cac0a81ef13 100644 --- a/tests/mir-opt/pre-codegen/slice_filter.variant_a-{closure#0}.PreCodegen.after.mir +++ b/tests/mir-opt/pre-codegen/slice_filter.variant_a-{closure#0}.PreCodegen.after.mir @@ -1,6 +1,6 @@ // MIR for `variant_a::{closure#0}` after PreCodegen -fn variant_a::{closure#0}(_1: &mut {closure@$DIR/slice_filter.rs:8:25: 8:39}, _2: &&(usize, usize, usize, usize)) -> bool { +fn variant_a::{closure#0}(_1: &mut {closure@$DIR/slice_filter.rs:7:25: 7:39}, _2: &&(usize, usize, usize, usize)) -> bool { let mut _0: bool; let mut _3: &(usize, usize, usize, usize); let _4: &usize; diff --git a/tests/mir-opt/pre-codegen/slice_filter.variant_b-{closure#0}.PreCodegen.after.mir b/tests/mir-opt/pre-codegen/slice_filter.variant_b-{closure#0}.PreCodegen.after.mir index 5477796512c57..d9e118d879a6a 100644 --- a/tests/mir-opt/pre-codegen/slice_filter.variant_b-{closure#0}.PreCodegen.after.mir +++ b/tests/mir-opt/pre-codegen/slice_filter.variant_b-{closure#0}.PreCodegen.after.mir @@ -1,6 +1,6 @@ // MIR for `variant_b::{closure#0}` after PreCodegen -fn variant_b::{closure#0}(_1: &mut {closure@$DIR/slice_filter.rs:12:25: 12:41}, _2: &&(usize, usize, usize, usize)) -> bool { +fn variant_b::{closure#0}(_1: &mut {closure@$DIR/slice_filter.rs:11:25: 11:41}, _2: &&(usize, usize, usize, usize)) -> bool { let mut _0: bool; let mut _3: &(usize, usize, usize, usize); let _4: usize; From 8aaa04b5c5c1d47ee4c4622908f303b8b9632979 Mon Sep 17 00:00:00 2001 From: zachs18 <8355914+zachs18@users.noreply.github.com> Date: Fri, 23 Feb 2024 20:18:04 -0600 Subject: [PATCH 110/134] Apply suggestions from code review Co-authored-by: Amanieu d'Antras --- library/std/src/sync/mutex.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/library/std/src/sync/mutex.rs b/library/std/src/sync/mutex.rs index 1b05247bb7c7c..7b4b53e83dbab 100644 --- a/library/std/src/sync/mutex.rs +++ b/library/std/src/sync/mutex.rs @@ -612,7 +612,7 @@ impl<'a, T: ?Sized> MutexGuard<'a, T> { F: FnOnce(&mut T) -> &mut U, U: ?Sized, { - // SAFETY: the conditions of `MutedGuard::new` were satisfied when the original guard + // SAFETY: the conditions of `MutexGuard::new` were satisfied when the original guard // was created, and have been upheld throughout `map` and/or `try_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. @@ -720,7 +720,7 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { F: FnOnce(&mut T) -> &mut U, U: ?Sized, { - // SAFETY: the conditions of `MutedGuard::new` were satisfied when the original guard + // SAFETY: the conditions of `MutexGuard::new` were satisfied when the original guard // was created, and have been upheld throughout `map` and/or `try_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. @@ -751,7 +751,7 @@ impl<'a, T: ?Sized> MappedMutexGuard<'a, T> { F: FnOnce(&mut T) -> Option<&mut U>, U: ?Sized, { - // SAFETY: the conditions of `MutedGuard::new` were satisfied when the original guard + // SAFETY: the conditions of `MutexGuard::new` were satisfied when the original guard // was created, and have been upheld throughout `map` and/or `try_map`. // The signature of the closure guarantees that it will not "leak" the lifetime of the reference // passed to it. If the closure panics, the guard will be dropped. From e0bfa5c2940eb926c3b7d8d85f6f351a767d6664 Mon Sep 17 00:00:00 2001 From: Gurinder Singh Date: Sat, 24 Feb 2024 10:34:09 +0530 Subject: [PATCH 111/134] Rustdoc: include crate name in links for local primitives It makes the link easier to use in cases in which the path of the page where it will be embedded is not known beforehand such as when we generate impls dynamically from `register_type_impls` method in `main.js` Earlier for local primitives we would generate a path that was relative to the current page depth passed in `cx.current` . e.g if the current page was `std::simd::prelude::Simd` the generated path would be `../../primitive..html` After this change the path will first take you to the the wesite root and add the crate name. e.g. for `std::simd::prelude::Simd` the path now will be `../../../std/primitive..html` --- src/librustdoc/html/format.rs | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index bb68c84f529a7..973036a40982c 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -879,11 +879,16 @@ fn primitive_link_fragment( match m.primitive_locations.get(&prim) { Some(&def_id) if def_id.is_local() => { let len = cx.current.len(); - let len = if len == 0 { 0 } else { len - 1 }; + let path = if len == 0 { + let cname_sym = ExternalCrate { crate_num: def_id.krate }.name(cx.tcx()); + format!("{cname_sym}/") + } else { + "../".repeat(len - 1) + }; write!( f, "", - "../".repeat(len), + path, prim.as_sym() )?; needs_termination = true; From 30429f8ece19d701cff8f5c547c2069fbf651cfd Mon Sep 17 00:00:00 2001 From: Graeme Read Date: Sat, 24 Feb 2024 06:45:00 +0000 Subject: [PATCH 112/134] feat: Add short flag -V for consistency with other rust tooling --- crates/rust-analyzer/src/cli/flags.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rust-analyzer/src/cli/flags.rs b/crates/rust-analyzer/src/cli/flags.rs index 493e614dce682..3f68c5d053b7b 100644 --- a/crates/rust-analyzer/src/cli/flags.rs +++ b/crates/rust-analyzer/src/cli/flags.rs @@ -30,7 +30,7 @@ xflags::xflags! { default cmd lsp-server { /// Print version. - optional --version + optional -V, --version /// Dump a LSP config JSON schema. optional --print-config-schema From de4efa5e46d219f7622659c7a7551411520adf76 Mon Sep 17 00:00:00 2001 From: Guillaume Boisseau Date: Sat, 24 Feb 2024 09:17:26 +0100 Subject: [PATCH 113/134] Tweak debug!() call Co-authored-by: matthewjasper <20113453+matthewjasper@users.noreply.github.com> --- compiler/rustc_mir_build/src/build/matches/mod.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/rustc_mir_build/src/build/matches/mod.rs b/compiler/rustc_mir_build/src/build/matches/mod.rs index fab8f9d2254d9..60245a8c4b66c 100644 --- a/compiler/rustc_mir_build/src/build/matches/mod.rs +++ b/compiler/rustc_mir_build/src/build/matches/mod.rs @@ -1582,7 +1582,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { let mut test = self.test(match_pair); let match_place = match_pair.place.clone(); - debug!("test_candidates: test={:?} match_pair={:?}", test, match_pair); + debug!(?test, ?match_pair); // Most of the time, the test to perform is simply a function of the main candidate; but for // a test like SwitchInt, we may want to add cases based on the candidates that are // available From ff930d4fed085dcff75c7b9df6af3a4e099d2f21 Mon Sep 17 00:00:00 2001 From: Martin Nordholts Date: Sat, 24 Feb 2024 09:46:18 +0100 Subject: [PATCH 114/134] compiler/rustc_target/src/spec/base/apple/tests.rs: Avoid unnecessary large move Fixes: $ MAGIC_EXTRA_RUSTFLAGS=-Zmove-size-limit=4096 ./x test compiler/rustc_target error: moving 6216 bytes --> compiler/rustc_target/src/spec/base/apple/tests.rs:17:19 | 17 | for target in all_sim_targets { | ^^^^^^^^^^^^^^^ value moved from here | = note: The current maximum size is 4096, but it can be customized with the move_size_limit attribute: `#![move_size_limit = "..."]` = note: `-D large-assignments` implied by `-D warnings` = help: to override `-D warnings` add `#[allow(large_assignments)]` --- compiler/rustc_target/src/spec/base/apple/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/compiler/rustc_target/src/spec/base/apple/tests.rs b/compiler/rustc_target/src/spec/base/apple/tests.rs index f13058ebc82ad..097039d6c7382 100644 --- a/compiler/rustc_target/src/spec/base/apple/tests.rs +++ b/compiler/rustc_target/src/spec/base/apple/tests.rs @@ -14,7 +14,7 @@ fn simulator_targets_set_abi() { aarch64_apple_watchos_sim::target(), ]; - for target in all_sim_targets { + for target in &all_sim_targets { assert_eq!(target.abi, "sim") } } From 55ee14005f7f5798c7cf4c99e2d127c389c23fa7 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 24 Feb 2024 12:15:21 +0100 Subject: [PATCH 115/134] update stdarch --- library/stdarch | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/library/stdarch b/library/stdarch index d5fab978fe1c2..56087ea170d87 160000 --- a/library/stdarch +++ b/library/stdarch @@ -1 +1 @@ -Subproject commit d5fab978fe1c2f0043db0451e9f4857eeba17437 +Subproject commit 56087ea170d878a7a57b3a5725e0c00f5f5cad70 From f32095cd8d43de470c2d3daa5b3cb8f494cfa770 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Fri, 23 Feb 2024 18:00:51 +0100 Subject: [PATCH 116/134] promotion: don't promote int::MIN / -1 --- .../rustc_mir_transform/src/promote_consts.rs | 29 +- tests/ui/consts/promote-not.rs | 4 + tests/ui/consts/promote-not.stderr | 64 +++- tests/ui/consts/promotion.rs | 5 +- .../ui/lint/lint-overflowing-ops.noopt.stderr | 263 +++---------- tests/ui/lint/lint-overflowing-ops.opt.stderr | 359 +++--------------- ...lowing-ops.opt_with_overflow_checks.stderr | 263 +++---------- tests/ui/lint/lint-overflowing-ops.rs | 12 - 8 files changed, 265 insertions(+), 734 deletions(-) diff --git a/compiler/rustc_mir_transform/src/promote_consts.rs b/compiler/rustc_mir_transform/src/promote_consts.rs index 577b8f2080fcd..2e11da4d585ef 100644 --- a/compiler/rustc_mir_transform/src/promote_consts.rs +++ b/compiler/rustc_mir_transform/src/promote_consts.rs @@ -482,17 +482,40 @@ impl<'tcx> Validator<'_, 'tcx> { match op { BinOp::Div | BinOp::Rem => { if lhs_ty.is_integral() { + let sz = lhs_ty.primitive_size(self.tcx); // Integer division: the RHS must be a non-zero const. - let const_val = match rhs { + let rhs_val = match rhs { Operand::Constant(c) => { - c.const_.try_eval_bits(self.tcx, self.param_env) + c.const_.try_eval_scalar_int(self.tcx, self.param_env) } _ => None, }; - match const_val { + match rhs_val.map(|x| x.try_to_uint(sz).unwrap()) { + // for the zero test, int vs uint does not matter Some(x) if x != 0 => {} // okay _ => return Err(Unpromotable), // value not known or 0 -- not okay } + // Furthermore, for signed divison, we also have to exclude `int::MIN / -1`. + if lhs_ty.is_signed() { + match rhs_val.map(|x| x.try_to_int(sz).unwrap()) { + Some(-1) | None => { + // The RHS is -1 or unknown, so we have to be careful. + // But is the LHS int::MIN? + let lhs_val = match lhs { + Operand::Constant(c) => c + .const_ + .try_eval_scalar_int(self.tcx, self.param_env), + _ => None, + }; + let lhs_min = sz.signed_int_min(); + match lhs_val.map(|x| x.try_to_int(sz).unwrap()) { + Some(x) if x != lhs_min => {} // okay + _ => return Err(Unpromotable), // value not known or int::MIN -- not okay + } + } + _ => {} + } + } } } // The remaining operations can never fail. diff --git a/tests/ui/consts/promote-not.rs b/tests/ui/consts/promote-not.rs index 907617052f119..47a06e8a72b71 100644 --- a/tests/ui/consts/promote-not.rs +++ b/tests/ui/consts/promote-not.rs @@ -49,6 +49,10 @@ fn main() { // No promotion of fallible operations. let _val: &'static _ = &(1/0); //~ ERROR temporary value dropped while borrowed let _val: &'static _ = &(1/(1-1)); //~ ERROR temporary value dropped while borrowed + let _val: &'static _ = &((1+1)/(1-1)); //~ ERROR temporary value dropped while borrowed + let _val: &'static _ = &(i32::MIN/-1); //~ ERROR temporary value dropped while borrowed + let _val: &'static _ = &(i32::MIN/(0-1)); //~ ERROR temporary value dropped while borrowed + let _val: &'static _ = &(-128i8/-1); //~ ERROR temporary value dropped while borrowed let _val: &'static _ = &(1%0); //~ ERROR temporary value dropped while borrowed let _val: &'static _ = &(1%(1-1)); //~ ERROR temporary value dropped while borrowed let _val: &'static _ = &([1,2,3][4]+1); //~ ERROR temporary value dropped while borrowed diff --git a/tests/ui/consts/promote-not.stderr b/tests/ui/consts/promote-not.stderr index 524d69817217d..67ac5922efd96 100644 --- a/tests/ui/consts/promote-not.stderr +++ b/tests/ui/consts/promote-not.stderr @@ -105,6 +105,50 @@ LL | } error[E0716]: temporary value dropped while borrowed --> $DIR/promote-not.rs:52:29 | +LL | let _val: &'static _ = &((1+1)/(1-1)); + | ---------- ^^^^^^^^^^^^^ creates a temporary value which is freed while still in use + | | + | type annotation requires that borrow lasts for `'static` +... +LL | } + | - temporary value is freed at the end of this statement + +error[E0716]: temporary value dropped while borrowed + --> $DIR/promote-not.rs:53:29 + | +LL | let _val: &'static _ = &(i32::MIN/-1); + | ---------- ^^^^^^^^^^^^^ creates a temporary value which is freed while still in use + | | + | type annotation requires that borrow lasts for `'static` +... +LL | } + | - temporary value is freed at the end of this statement + +error[E0716]: temporary value dropped while borrowed + --> $DIR/promote-not.rs:54:29 + | +LL | let _val: &'static _ = &(i32::MIN/(0-1)); + | ---------- ^^^^^^^^^^^^^^^^ creates a temporary value which is freed while still in use + | | + | type annotation requires that borrow lasts for `'static` +... +LL | } + | - temporary value is freed at the end of this statement + +error[E0716]: temporary value dropped while borrowed + --> $DIR/promote-not.rs:55:29 + | +LL | let _val: &'static _ = &(-128i8/-1); + | ---------- ^^^^^^^^^^^ creates a temporary value which is freed while still in use + | | + | type annotation requires that borrow lasts for `'static` +... +LL | } + | - temporary value is freed at the end of this statement + +error[E0716]: temporary value dropped while borrowed + --> $DIR/promote-not.rs:56:29 + | LL | let _val: &'static _ = &(1%0); | ---------- ^^^^^ creates a temporary value which is freed while still in use | | @@ -114,7 +158,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:53:29 + --> $DIR/promote-not.rs:57:29 | LL | let _val: &'static _ = &(1%(1-1)); | ---------- ^^^^^^^^^ creates a temporary value which is freed while still in use @@ -125,7 +169,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:54:29 + --> $DIR/promote-not.rs:58:29 | LL | let _val: &'static _ = &([1,2,3][4]+1); | ---------- ^^^^^^^^^^^^^^ creates a temporary value which is freed while still in use @@ -136,7 +180,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:57:29 + --> $DIR/promote-not.rs:61:29 | LL | let _val: &'static _ = &TEST_DROP; | ---------- ^^^^^^^^^ creates a temporary value which is freed while still in use @@ -147,7 +191,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:59:29 + --> $DIR/promote-not.rs:63:29 | LL | let _val: &'static _ = &&TEST_DROP; | ---------- ^^^^^^^^^^ creates a temporary value which is freed while still in use @@ -158,7 +202,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:59:30 + --> $DIR/promote-not.rs:63:30 | LL | let _val: &'static _ = &&TEST_DROP; | ---------- ^^^^^^^^^ creates a temporary value which is freed while still in use @@ -169,7 +213,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:62:29 + --> $DIR/promote-not.rs:66:29 | LL | let _val: &'static _ = &(&TEST_DROP,); | ---------- ^^^^^^^^^^^^^ creates a temporary value which is freed while still in use @@ -180,7 +224,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:62:31 + --> $DIR/promote-not.rs:66:31 | LL | let _val: &'static _ = &(&TEST_DROP,); | ---------- ^^^^^^^^^ creates a temporary value which is freed while still in use @@ -191,7 +235,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:65:29 + --> $DIR/promote-not.rs:69:29 | LL | let _val: &'static _ = &[&TEST_DROP; 1]; | ---------- ^^^^^^^^^^^^^^^ creates a temporary value which is freed while still in use @@ -202,7 +246,7 @@ LL | } | - temporary value is freed at the end of this statement error[E0716]: temporary value dropped while borrowed - --> $DIR/promote-not.rs:65:31 + --> $DIR/promote-not.rs:69:31 | LL | let _val: &'static _ = &[&TEST_DROP; 1]; | ---------- ^^^^^^^^^ - temporary value is freed at the end of this statement @@ -210,6 +254,6 @@ LL | let _val: &'static _ = &[&TEST_DROP; 1]; | | creates a temporary value which is freed while still in use | type annotation requires that borrow lasts for `'static` -error: aborting due to 20 previous errors +error: aborting due to 24 previous errors For more information about this error, try `rustc --explain E0716`. diff --git a/tests/ui/consts/promotion.rs b/tests/ui/consts/promotion.rs index 211dcf8a4e8f2..b18495a4a6bf5 100644 --- a/tests/ui/consts/promotion.rs +++ b/tests/ui/consts/promotion.rs @@ -28,8 +28,11 @@ fn main() { // make sure that this does not cause trouble despite overflowing assert_static(&(0-1)); - // div-by-non-0 is okay + // div-by-non-0 (and also not MIN/-1) is okay assert_static(&(1/1)); + assert_static(&(0/1)); + assert_static(&(1/-1)); + assert_static(&(i32::MIN/1)); assert_static(&(1%1)); // in-bounds array access is okay diff --git a/tests/ui/lint/lint-overflowing-ops.noopt.stderr b/tests/ui/lint/lint-overflowing-ops.noopt.stderr index f89ee8569c66f..1b7b73cec38d5 100644 --- a/tests/ui/lint/lint-overflowing-ops.noopt.stderr +++ b/tests/ui/lint/lint-overflowing-ops.noopt.stderr @@ -876,498 +876,353 @@ error: this operation will panic at runtime LL | let _n = &(i8::MIN / -1); | ^^^^^^^^^^^^^^ attempt to compute `i8::MIN / -1_i8`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:251:15 - | -LL | let _n = &(i8::MIN / -1); - | ^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:251:14 - | -LL | let _n = &(i8::MIN / -1); - | ^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:254:14 + --> $DIR/lint-overflowing-ops.rs:253:14 | LL | let _n = 1i16 / 0; | ^^^^^^^^ attempt to divide `1_i16` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:255:15 + --> $DIR/lint-overflowing-ops.rs:254:15 | LL | let _n = &(1i16 / 0); | ^^^^^^^^^^ attempt to divide `1_i16` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:256:14 + --> $DIR/lint-overflowing-ops.rs:255:14 | LL | let _n = i16::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i16::MIN / -1_i16`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:257:15 + --> $DIR/lint-overflowing-ops.rs:256:15 | LL | let _n = &(i16::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i16::MIN / -1_i16`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:257:15 - | -LL | let _n = &(i16::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:257:14 - | -LL | let _n = &(i16::MIN / -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:260:14 + --> $DIR/lint-overflowing-ops.rs:258:14 | LL | let _n = 1i32 / 0; | ^^^^^^^^ attempt to divide `1_i32` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:261:15 + --> $DIR/lint-overflowing-ops.rs:259:15 | LL | let _n = &(1i32 / 0); | ^^^^^^^^^^ attempt to divide `1_i32` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:262:14 + --> $DIR/lint-overflowing-ops.rs:260:14 | LL | let _n = i32::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i32::MIN / -1_i32`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:263:15 + --> $DIR/lint-overflowing-ops.rs:261:15 | LL | let _n = &(i32::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i32::MIN / -1_i32`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:263:15 - | -LL | let _n = &(i32::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:263:14 - | -LL | let _n = &(i32::MIN / -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:266:14 + --> $DIR/lint-overflowing-ops.rs:263:14 | LL | let _n = 1i64 / 0; | ^^^^^^^^ attempt to divide `1_i64` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:267:15 + --> $DIR/lint-overflowing-ops.rs:264:15 | LL | let _n = &(1i64 / 0); | ^^^^^^^^^^ attempt to divide `1_i64` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:268:14 + --> $DIR/lint-overflowing-ops.rs:265:14 | LL | let _n = i64::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i64::MIN / -1_i64`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:269:15 + --> $DIR/lint-overflowing-ops.rs:266:15 | LL | let _n = &(i64::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i64::MIN / -1_i64`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:269:15 - | -LL | let _n = &(i64::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:269:14 - | -LL | let _n = &(i64::MIN / -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:272:14 + --> $DIR/lint-overflowing-ops.rs:268:14 | LL | let _n = 1i128 / 0; | ^^^^^^^^^ attempt to divide `1_i128` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:273:15 + --> $DIR/lint-overflowing-ops.rs:269:15 | LL | let _n = &(1i128 / 0); | ^^^^^^^^^^^ attempt to divide `1_i128` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:274:14 + --> $DIR/lint-overflowing-ops.rs:270:14 | LL | let _n = i128::MIN / -1; | ^^^^^^^^^^^^^^ attempt to compute `i128::MIN / -1_i128`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:275:15 + --> $DIR/lint-overflowing-ops.rs:271:15 | LL | let _n = &(i128::MIN / -1); | ^^^^^^^^^^^^^^^^ attempt to compute `i128::MIN / -1_i128`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:275:15 - | -LL | let _n = &(i128::MIN / -1); - | ^^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:275:14 - | -LL | let _n = &(i128::MIN / -1); - | ^^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:278:14 + --> $DIR/lint-overflowing-ops.rs:273:14 | LL | let _n = 1isize / 0; | ^^^^^^^^^^ attempt to divide `1_isize` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:279:15 + --> $DIR/lint-overflowing-ops.rs:274:15 | LL | let _n = &(1isize / 0); | ^^^^^^^^^^^^ attempt to divide `1_isize` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:280:14 + --> $DIR/lint-overflowing-ops.rs:275:14 | LL | let _n = isize::MIN / -1; | ^^^^^^^^^^^^^^^ attempt to compute `isize::MIN / -1_isize`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:281:15 + --> $DIR/lint-overflowing-ops.rs:276:15 | LL | let _n = &(isize::MIN / -1); | ^^^^^^^^^^^^^^^^^ attempt to compute `isize::MIN / -1_isize`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:281:15 - | -LL | let _n = &(isize::MIN / -1); - | ^^^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:281:14 - | -LL | let _n = &(isize::MIN / -1); - | ^^^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:286:14 + --> $DIR/lint-overflowing-ops.rs:280:14 | LL | let _n = 1u8 % 0; | ^^^^^^^ attempt to calculate the remainder of `1_u8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:287:15 + --> $DIR/lint-overflowing-ops.rs:281:15 | LL | let _n = &(1u8 % 0); | ^^^^^^^^^ attempt to calculate the remainder of `1_u8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:289:14 + --> $DIR/lint-overflowing-ops.rs:283:14 | LL | let _n = 1u16 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:290:15 + --> $DIR/lint-overflowing-ops.rs:284:15 | LL | let _n = &(1u16 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:292:14 + --> $DIR/lint-overflowing-ops.rs:286:14 | LL | let _n = 1u32 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:293:15 + --> $DIR/lint-overflowing-ops.rs:287:15 | LL | let _n = &(1u32 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:295:14 + --> $DIR/lint-overflowing-ops.rs:289:14 | LL | let _n = 1u64 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:296:15 + --> $DIR/lint-overflowing-ops.rs:290:15 | LL | let _n = &(1u64 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:298:14 + --> $DIR/lint-overflowing-ops.rs:292:14 | LL | let _n = 1u128 % 0; | ^^^^^^^^^ attempt to calculate the remainder of `1_u128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:299:15 + --> $DIR/lint-overflowing-ops.rs:293:15 | LL | let _n = &(1u128 % 0); | ^^^^^^^^^^^ attempt to calculate the remainder of `1_u128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:301:14 + --> $DIR/lint-overflowing-ops.rs:295:14 | LL | let _n = 1usize % 0; | ^^^^^^^^^^ attempt to calculate the remainder of `1_usize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:302:15 + --> $DIR/lint-overflowing-ops.rs:296:15 | LL | let _n = &(1usize % 0); | ^^^^^^^^^^^^ attempt to calculate the remainder of `1_usize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:304:14 + --> $DIR/lint-overflowing-ops.rs:298:14 | LL | let _n = 1i8 % 0; | ^^^^^^^ attempt to calculate the remainder of `1_i8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:305:15 + --> $DIR/lint-overflowing-ops.rs:299:15 | LL | let _n = &(1i8 % 0); | ^^^^^^^^^ attempt to calculate the remainder of `1_i8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:306:14 + --> $DIR/lint-overflowing-ops.rs:300:14 | LL | let _n = i8::MIN % -1; | ^^^^^^^^^^^^ attempt to compute `i8::MIN % -1_i8`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:307:15 + --> $DIR/lint-overflowing-ops.rs:301:15 | LL | let _n = &(i8::MIN % -1); | ^^^^^^^^^^^^^^ attempt to compute `i8::MIN % -1_i8`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:307:15 - | -LL | let _n = &(i8::MIN % -1); - | ^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:307:14 - | -LL | let _n = &(i8::MIN % -1); - | ^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:310:14 + --> $DIR/lint-overflowing-ops.rs:303:14 | LL | let _n = 1i16 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:311:15 + --> $DIR/lint-overflowing-ops.rs:304:15 | LL | let _n = &(1i16 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:312:14 + --> $DIR/lint-overflowing-ops.rs:305:14 | LL | let _n = i16::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i16::MIN % -1_i16`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:313:15 + --> $DIR/lint-overflowing-ops.rs:306:15 | LL | let _n = &(i16::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i16::MIN % -1_i16`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:313:15 - | -LL | let _n = &(i16::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:313:14 - | -LL | let _n = &(i16::MIN % -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:316:14 + --> $DIR/lint-overflowing-ops.rs:308:14 | LL | let _n = 1i32 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:317:15 + --> $DIR/lint-overflowing-ops.rs:309:15 | LL | let _n = &(1i32 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:318:14 + --> $DIR/lint-overflowing-ops.rs:310:14 | LL | let _n = i32::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i32::MIN % -1_i32`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:319:15 + --> $DIR/lint-overflowing-ops.rs:311:15 | LL | let _n = &(i32::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i32::MIN % -1_i32`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:319:15 - | -LL | let _n = &(i32::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:319:14 - | -LL | let _n = &(i32::MIN % -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:322:14 + --> $DIR/lint-overflowing-ops.rs:313:14 | LL | let _n = 1i64 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:323:15 + --> $DIR/lint-overflowing-ops.rs:314:15 | LL | let _n = &(1i64 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:324:14 + --> $DIR/lint-overflowing-ops.rs:315:14 | LL | let _n = i64::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i64::MIN % -1_i64`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:325:15 + --> $DIR/lint-overflowing-ops.rs:316:15 | LL | let _n = &(i64::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i64::MIN % -1_i64`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:325:15 - | -LL | let _n = &(i64::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:325:14 - | -LL | let _n = &(i64::MIN % -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:328:14 + --> $DIR/lint-overflowing-ops.rs:318:14 | LL | let _n = 1i128 % 0; | ^^^^^^^^^ attempt to calculate the remainder of `1_i128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:329:15 + --> $DIR/lint-overflowing-ops.rs:319:15 | LL | let _n = &(1i128 % 0); | ^^^^^^^^^^^ attempt to calculate the remainder of `1_i128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:330:14 + --> $DIR/lint-overflowing-ops.rs:320:14 | LL | let _n = i128::MIN % -1; | ^^^^^^^^^^^^^^ attempt to compute `i128::MIN % -1_i128`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:331:15 + --> $DIR/lint-overflowing-ops.rs:321:15 | LL | let _n = &(i128::MIN % -1); | ^^^^^^^^^^^^^^^^ attempt to compute `i128::MIN % -1_i128`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:331:15 - | -LL | let _n = &(i128::MIN % -1); - | ^^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:331:14 - | -LL | let _n = &(i128::MIN % -1); - | ^^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:334:14 + --> $DIR/lint-overflowing-ops.rs:323:14 | LL | let _n = 1isize % 0; | ^^^^^^^^^^ attempt to calculate the remainder of `1_isize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:335:15 + --> $DIR/lint-overflowing-ops.rs:324:15 | LL | let _n = &(1isize % 0); | ^^^^^^^^^^^^ attempt to calculate the remainder of `1_isize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:336:14 + --> $DIR/lint-overflowing-ops.rs:325:14 | LL | let _n = isize::MIN % -1; | ^^^^^^^^^^^^^^^ attempt to compute `isize::MIN % -1_isize`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:337:15 + --> $DIR/lint-overflowing-ops.rs:326:15 | LL | let _n = &(isize::MIN % -1); | ^^^^^^^^^^^^^^^^^ attempt to compute `isize::MIN % -1_isize`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:337:15 - | -LL | let _n = &(isize::MIN % -1); - | ^^^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:337:14 - | -LL | let _n = &(isize::MIN % -1); - | ^^^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:341:14 + --> $DIR/lint-overflowing-ops.rs:329:14 | LL | let _n = [1, 2, 3][4]; | ^^^^^^^^^^^^ index out of bounds: the length is 3 but the index is 4 error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:342:15 + --> $DIR/lint-overflowing-ops.rs:330:15 | LL | let _n = &([1, 2, 3][4]); | ^^^^^^^^^^^^^^ index out of bounds: the length is 3 but the index is 4 -error: aborting due to 215 previous errors +error: aborting due to 203 previous errors -For more information about this error, try `rustc --explain E0080`. diff --git a/tests/ui/lint/lint-overflowing-ops.opt.stderr b/tests/ui/lint/lint-overflowing-ops.opt.stderr index 7ac5c4e0d76ff..1b7b73cec38d5 100644 --- a/tests/ui/lint/lint-overflowing-ops.opt.stderr +++ b/tests/ui/lint/lint-overflowing-ops.opt.stderr @@ -876,594 +876,353 @@ error: this operation will panic at runtime LL | let _n = &(i8::MIN / -1); | ^^^^^^^^^^^^^^ attempt to compute `i8::MIN / -1_i8`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:251:15 - | -LL | let _n = &(i8::MIN / -1); - | ^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:251:14 - | -LL | let _n = &(i8::MIN / -1); - | ^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:251:14 - | -LL | let _n = &(i8::MIN / -1); - | ^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:254:14 + --> $DIR/lint-overflowing-ops.rs:253:14 | LL | let _n = 1i16 / 0; | ^^^^^^^^ attempt to divide `1_i16` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:255:15 + --> $DIR/lint-overflowing-ops.rs:254:15 | LL | let _n = &(1i16 / 0); | ^^^^^^^^^^ attempt to divide `1_i16` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:256:14 + --> $DIR/lint-overflowing-ops.rs:255:14 | LL | let _n = i16::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i16::MIN / -1_i16`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:257:15 + --> $DIR/lint-overflowing-ops.rs:256:15 | LL | let _n = &(i16::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i16::MIN / -1_i16`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:257:15 - | -LL | let _n = &(i16::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:257:14 - | -LL | let _n = &(i16::MIN / -1); - | ^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:257:14 - | -LL | let _n = &(i16::MIN / -1); - | ^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:260:14 + --> $DIR/lint-overflowing-ops.rs:258:14 | LL | let _n = 1i32 / 0; | ^^^^^^^^ attempt to divide `1_i32` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:261:15 + --> $DIR/lint-overflowing-ops.rs:259:15 | LL | let _n = &(1i32 / 0); | ^^^^^^^^^^ attempt to divide `1_i32` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:262:14 + --> $DIR/lint-overflowing-ops.rs:260:14 | LL | let _n = i32::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i32::MIN / -1_i32`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:263:15 + --> $DIR/lint-overflowing-ops.rs:261:15 | LL | let _n = &(i32::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i32::MIN / -1_i32`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:263:15 - | -LL | let _n = &(i32::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:263:14 - | -LL | let _n = &(i32::MIN / -1); - | ^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:263:14 - | -LL | let _n = &(i32::MIN / -1); - | ^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:266:14 + --> $DIR/lint-overflowing-ops.rs:263:14 | LL | let _n = 1i64 / 0; | ^^^^^^^^ attempt to divide `1_i64` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:267:15 + --> $DIR/lint-overflowing-ops.rs:264:15 | LL | let _n = &(1i64 / 0); | ^^^^^^^^^^ attempt to divide `1_i64` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:268:14 + --> $DIR/lint-overflowing-ops.rs:265:14 | LL | let _n = i64::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i64::MIN / -1_i64`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:269:15 + --> $DIR/lint-overflowing-ops.rs:266:15 | LL | let _n = &(i64::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i64::MIN / -1_i64`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:269:15 - | -LL | let _n = &(i64::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:269:14 - | -LL | let _n = &(i64::MIN / -1); - | ^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:269:14 - | -LL | let _n = &(i64::MIN / -1); - | ^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:272:14 + --> $DIR/lint-overflowing-ops.rs:268:14 | LL | let _n = 1i128 / 0; | ^^^^^^^^^ attempt to divide `1_i128` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:273:15 + --> $DIR/lint-overflowing-ops.rs:269:15 | LL | let _n = &(1i128 / 0); | ^^^^^^^^^^^ attempt to divide `1_i128` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:274:14 + --> $DIR/lint-overflowing-ops.rs:270:14 | LL | let _n = i128::MIN / -1; | ^^^^^^^^^^^^^^ attempt to compute `i128::MIN / -1_i128`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:275:15 + --> $DIR/lint-overflowing-ops.rs:271:15 | LL | let _n = &(i128::MIN / -1); | ^^^^^^^^^^^^^^^^ attempt to compute `i128::MIN / -1_i128`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:275:15 - | -LL | let _n = &(i128::MIN / -1); - | ^^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:275:14 - | -LL | let _n = &(i128::MIN / -1); - | ^^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:275:14 - | -LL | let _n = &(i128::MIN / -1); - | ^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:278:14 + --> $DIR/lint-overflowing-ops.rs:273:14 | LL | let _n = 1isize / 0; | ^^^^^^^^^^ attempt to divide `1_isize` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:279:15 + --> $DIR/lint-overflowing-ops.rs:274:15 | LL | let _n = &(1isize / 0); | ^^^^^^^^^^^^ attempt to divide `1_isize` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:280:14 + --> $DIR/lint-overflowing-ops.rs:275:14 | LL | let _n = isize::MIN / -1; | ^^^^^^^^^^^^^^^ attempt to compute `isize::MIN / -1_isize`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:281:15 + --> $DIR/lint-overflowing-ops.rs:276:15 | LL | let _n = &(isize::MIN / -1); | ^^^^^^^^^^^^^^^^^ attempt to compute `isize::MIN / -1_isize`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:281:15 - | -LL | let _n = &(isize::MIN / -1); - | ^^^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:281:14 - | -LL | let _n = &(isize::MIN / -1); - | ^^^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:281:14 - | -LL | let _n = &(isize::MIN / -1); - | ^^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:286:14 + --> $DIR/lint-overflowing-ops.rs:280:14 | LL | let _n = 1u8 % 0; | ^^^^^^^ attempt to calculate the remainder of `1_u8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:287:15 + --> $DIR/lint-overflowing-ops.rs:281:15 | LL | let _n = &(1u8 % 0); | ^^^^^^^^^ attempt to calculate the remainder of `1_u8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:289:14 + --> $DIR/lint-overflowing-ops.rs:283:14 | LL | let _n = 1u16 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:290:15 + --> $DIR/lint-overflowing-ops.rs:284:15 | LL | let _n = &(1u16 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:292:14 + --> $DIR/lint-overflowing-ops.rs:286:14 | LL | let _n = 1u32 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:293:15 + --> $DIR/lint-overflowing-ops.rs:287:15 | LL | let _n = &(1u32 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:295:14 + --> $DIR/lint-overflowing-ops.rs:289:14 | LL | let _n = 1u64 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:296:15 + --> $DIR/lint-overflowing-ops.rs:290:15 | LL | let _n = &(1u64 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:298:14 + --> $DIR/lint-overflowing-ops.rs:292:14 | LL | let _n = 1u128 % 0; | ^^^^^^^^^ attempt to calculate the remainder of `1_u128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:299:15 + --> $DIR/lint-overflowing-ops.rs:293:15 | LL | let _n = &(1u128 % 0); | ^^^^^^^^^^^ attempt to calculate the remainder of `1_u128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:301:14 + --> $DIR/lint-overflowing-ops.rs:295:14 | LL | let _n = 1usize % 0; | ^^^^^^^^^^ attempt to calculate the remainder of `1_usize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:302:15 + --> $DIR/lint-overflowing-ops.rs:296:15 | LL | let _n = &(1usize % 0); | ^^^^^^^^^^^^ attempt to calculate the remainder of `1_usize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:304:14 + --> $DIR/lint-overflowing-ops.rs:298:14 | LL | let _n = 1i8 % 0; | ^^^^^^^ attempt to calculate the remainder of `1_i8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:305:15 + --> $DIR/lint-overflowing-ops.rs:299:15 | LL | let _n = &(1i8 % 0); | ^^^^^^^^^ attempt to calculate the remainder of `1_i8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:306:14 + --> $DIR/lint-overflowing-ops.rs:300:14 | LL | let _n = i8::MIN % -1; | ^^^^^^^^^^^^ attempt to compute `i8::MIN % -1_i8`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:307:15 + --> $DIR/lint-overflowing-ops.rs:301:15 | LL | let _n = &(i8::MIN % -1); | ^^^^^^^^^^^^^^ attempt to compute `i8::MIN % -1_i8`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:307:15 - | -LL | let _n = &(i8::MIN % -1); - | ^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:307:14 - | -LL | let _n = &(i8::MIN % -1); - | ^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:307:14 - | -LL | let _n = &(i8::MIN % -1); - | ^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:310:14 + --> $DIR/lint-overflowing-ops.rs:303:14 | LL | let _n = 1i16 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:311:15 + --> $DIR/lint-overflowing-ops.rs:304:15 | LL | let _n = &(1i16 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:312:14 + --> $DIR/lint-overflowing-ops.rs:305:14 | LL | let _n = i16::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i16::MIN % -1_i16`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:313:15 + --> $DIR/lint-overflowing-ops.rs:306:15 | LL | let _n = &(i16::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i16::MIN % -1_i16`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:313:15 - | -LL | let _n = &(i16::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:313:14 - | -LL | let _n = &(i16::MIN % -1); - | ^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:313:14 - | -LL | let _n = &(i16::MIN % -1); - | ^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:316:14 + --> $DIR/lint-overflowing-ops.rs:308:14 | LL | let _n = 1i32 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:317:15 + --> $DIR/lint-overflowing-ops.rs:309:15 | LL | let _n = &(1i32 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:318:14 + --> $DIR/lint-overflowing-ops.rs:310:14 | LL | let _n = i32::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i32::MIN % -1_i32`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:319:15 + --> $DIR/lint-overflowing-ops.rs:311:15 | LL | let _n = &(i32::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i32::MIN % -1_i32`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:319:15 - | -LL | let _n = &(i32::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:319:14 - | -LL | let _n = &(i32::MIN % -1); - | ^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:319:14 - | -LL | let _n = &(i32::MIN % -1); - | ^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:322:14 + --> $DIR/lint-overflowing-ops.rs:313:14 | LL | let _n = 1i64 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:323:15 + --> $DIR/lint-overflowing-ops.rs:314:15 | LL | let _n = &(1i64 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:324:14 + --> $DIR/lint-overflowing-ops.rs:315:14 | LL | let _n = i64::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i64::MIN % -1_i64`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:325:15 + --> $DIR/lint-overflowing-ops.rs:316:15 | LL | let _n = &(i64::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i64::MIN % -1_i64`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:325:15 - | -LL | let _n = &(i64::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:325:14 - | -LL | let _n = &(i64::MIN % -1); - | ^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:325:14 - | -LL | let _n = &(i64::MIN % -1); - | ^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:328:14 + --> $DIR/lint-overflowing-ops.rs:318:14 | LL | let _n = 1i128 % 0; | ^^^^^^^^^ attempt to calculate the remainder of `1_i128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:329:15 + --> $DIR/lint-overflowing-ops.rs:319:15 | LL | let _n = &(1i128 % 0); | ^^^^^^^^^^^ attempt to calculate the remainder of `1_i128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:330:14 + --> $DIR/lint-overflowing-ops.rs:320:14 | LL | let _n = i128::MIN % -1; | ^^^^^^^^^^^^^^ attempt to compute `i128::MIN % -1_i128`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:331:15 + --> $DIR/lint-overflowing-ops.rs:321:15 | LL | let _n = &(i128::MIN % -1); | ^^^^^^^^^^^^^^^^ attempt to compute `i128::MIN % -1_i128`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:331:15 - | -LL | let _n = &(i128::MIN % -1); - | ^^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:331:14 - | -LL | let _n = &(i128::MIN % -1); - | ^^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:331:14 - | -LL | let _n = &(i128::MIN % -1); - | ^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:334:14 + --> $DIR/lint-overflowing-ops.rs:323:14 | LL | let _n = 1isize % 0; | ^^^^^^^^^^ attempt to calculate the remainder of `1_isize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:335:15 + --> $DIR/lint-overflowing-ops.rs:324:15 | LL | let _n = &(1isize % 0); | ^^^^^^^^^^^^ attempt to calculate the remainder of `1_isize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:336:14 + --> $DIR/lint-overflowing-ops.rs:325:14 | LL | let _n = isize::MIN % -1; | ^^^^^^^^^^^^^^^ attempt to compute `isize::MIN % -1_isize`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:337:15 + --> $DIR/lint-overflowing-ops.rs:326:15 | LL | let _n = &(isize::MIN % -1); | ^^^^^^^^^^^^^^^^^ attempt to compute `isize::MIN % -1_isize`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:337:15 - | -LL | let _n = &(isize::MIN % -1); - | ^^^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:337:14 - | -LL | let _n = &(isize::MIN % -1); - | ^^^^^^^^^^^^^^^^^^ - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:337:14 - | -LL | let _n = &(isize::MIN % -1); - | ^^^^^^^^^^^^^^^^^^ - | - = note: duplicate diagnostic emitted due to `-Z deduplicate-diagnostics=no` - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:341:14 + --> $DIR/lint-overflowing-ops.rs:329:14 | LL | let _n = [1, 2, 3][4]; | ^^^^^^^^^^^^ index out of bounds: the length is 3 but the index is 4 error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:342:15 + --> $DIR/lint-overflowing-ops.rs:330:15 | LL | let _n = &([1, 2, 3][4]); | ^^^^^^^^^^^^^^ index out of bounds: the length is 3 but the index is 4 -error: aborting due to 215 previous errors +error: aborting due to 203 previous errors -For more information about this error, try `rustc --explain E0080`. diff --git a/tests/ui/lint/lint-overflowing-ops.opt_with_overflow_checks.stderr b/tests/ui/lint/lint-overflowing-ops.opt_with_overflow_checks.stderr index f89ee8569c66f..1b7b73cec38d5 100644 --- a/tests/ui/lint/lint-overflowing-ops.opt_with_overflow_checks.stderr +++ b/tests/ui/lint/lint-overflowing-ops.opt_with_overflow_checks.stderr @@ -876,498 +876,353 @@ error: this operation will panic at runtime LL | let _n = &(i8::MIN / -1); | ^^^^^^^^^^^^^^ attempt to compute `i8::MIN / -1_i8`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:251:15 - | -LL | let _n = &(i8::MIN / -1); - | ^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:251:14 - | -LL | let _n = &(i8::MIN / -1); - | ^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:254:14 + --> $DIR/lint-overflowing-ops.rs:253:14 | LL | let _n = 1i16 / 0; | ^^^^^^^^ attempt to divide `1_i16` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:255:15 + --> $DIR/lint-overflowing-ops.rs:254:15 | LL | let _n = &(1i16 / 0); | ^^^^^^^^^^ attempt to divide `1_i16` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:256:14 + --> $DIR/lint-overflowing-ops.rs:255:14 | LL | let _n = i16::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i16::MIN / -1_i16`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:257:15 + --> $DIR/lint-overflowing-ops.rs:256:15 | LL | let _n = &(i16::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i16::MIN / -1_i16`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:257:15 - | -LL | let _n = &(i16::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:257:14 - | -LL | let _n = &(i16::MIN / -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:260:14 + --> $DIR/lint-overflowing-ops.rs:258:14 | LL | let _n = 1i32 / 0; | ^^^^^^^^ attempt to divide `1_i32` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:261:15 + --> $DIR/lint-overflowing-ops.rs:259:15 | LL | let _n = &(1i32 / 0); | ^^^^^^^^^^ attempt to divide `1_i32` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:262:14 + --> $DIR/lint-overflowing-ops.rs:260:14 | LL | let _n = i32::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i32::MIN / -1_i32`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:263:15 + --> $DIR/lint-overflowing-ops.rs:261:15 | LL | let _n = &(i32::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i32::MIN / -1_i32`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:263:15 - | -LL | let _n = &(i32::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:263:14 - | -LL | let _n = &(i32::MIN / -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:266:14 + --> $DIR/lint-overflowing-ops.rs:263:14 | LL | let _n = 1i64 / 0; | ^^^^^^^^ attempt to divide `1_i64` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:267:15 + --> $DIR/lint-overflowing-ops.rs:264:15 | LL | let _n = &(1i64 / 0); | ^^^^^^^^^^ attempt to divide `1_i64` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:268:14 + --> $DIR/lint-overflowing-ops.rs:265:14 | LL | let _n = i64::MIN / -1; | ^^^^^^^^^^^^^ attempt to compute `i64::MIN / -1_i64`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:269:15 + --> $DIR/lint-overflowing-ops.rs:266:15 | LL | let _n = &(i64::MIN / -1); | ^^^^^^^^^^^^^^^ attempt to compute `i64::MIN / -1_i64`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:269:15 - | -LL | let _n = &(i64::MIN / -1); - | ^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:269:14 - | -LL | let _n = &(i64::MIN / -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:272:14 + --> $DIR/lint-overflowing-ops.rs:268:14 | LL | let _n = 1i128 / 0; | ^^^^^^^^^ attempt to divide `1_i128` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:273:15 + --> $DIR/lint-overflowing-ops.rs:269:15 | LL | let _n = &(1i128 / 0); | ^^^^^^^^^^^ attempt to divide `1_i128` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:274:14 + --> $DIR/lint-overflowing-ops.rs:270:14 | LL | let _n = i128::MIN / -1; | ^^^^^^^^^^^^^^ attempt to compute `i128::MIN / -1_i128`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:275:15 + --> $DIR/lint-overflowing-ops.rs:271:15 | LL | let _n = &(i128::MIN / -1); | ^^^^^^^^^^^^^^^^ attempt to compute `i128::MIN / -1_i128`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:275:15 - | -LL | let _n = &(i128::MIN / -1); - | ^^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:275:14 - | -LL | let _n = &(i128::MIN / -1); - | ^^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:278:14 + --> $DIR/lint-overflowing-ops.rs:273:14 | LL | let _n = 1isize / 0; | ^^^^^^^^^^ attempt to divide `1_isize` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:279:15 + --> $DIR/lint-overflowing-ops.rs:274:15 | LL | let _n = &(1isize / 0); | ^^^^^^^^^^^^ attempt to divide `1_isize` by zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:280:14 + --> $DIR/lint-overflowing-ops.rs:275:14 | LL | let _n = isize::MIN / -1; | ^^^^^^^^^^^^^^^ attempt to compute `isize::MIN / -1_isize`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:281:15 + --> $DIR/lint-overflowing-ops.rs:276:15 | LL | let _n = &(isize::MIN / -1); | ^^^^^^^^^^^^^^^^^ attempt to compute `isize::MIN / -1_isize`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:281:15 - | -LL | let _n = &(isize::MIN / -1); - | ^^^^^^^^^^^^^^^^^ overflow in signed division (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:281:14 - | -LL | let _n = &(isize::MIN / -1); - | ^^^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:286:14 + --> $DIR/lint-overflowing-ops.rs:280:14 | LL | let _n = 1u8 % 0; | ^^^^^^^ attempt to calculate the remainder of `1_u8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:287:15 + --> $DIR/lint-overflowing-ops.rs:281:15 | LL | let _n = &(1u8 % 0); | ^^^^^^^^^ attempt to calculate the remainder of `1_u8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:289:14 + --> $DIR/lint-overflowing-ops.rs:283:14 | LL | let _n = 1u16 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:290:15 + --> $DIR/lint-overflowing-ops.rs:284:15 | LL | let _n = &(1u16 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:292:14 + --> $DIR/lint-overflowing-ops.rs:286:14 | LL | let _n = 1u32 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:293:15 + --> $DIR/lint-overflowing-ops.rs:287:15 | LL | let _n = &(1u32 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:295:14 + --> $DIR/lint-overflowing-ops.rs:289:14 | LL | let _n = 1u64 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_u64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:296:15 + --> $DIR/lint-overflowing-ops.rs:290:15 | LL | let _n = &(1u64 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_u64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:298:14 + --> $DIR/lint-overflowing-ops.rs:292:14 | LL | let _n = 1u128 % 0; | ^^^^^^^^^ attempt to calculate the remainder of `1_u128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:299:15 + --> $DIR/lint-overflowing-ops.rs:293:15 | LL | let _n = &(1u128 % 0); | ^^^^^^^^^^^ attempt to calculate the remainder of `1_u128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:301:14 + --> $DIR/lint-overflowing-ops.rs:295:14 | LL | let _n = 1usize % 0; | ^^^^^^^^^^ attempt to calculate the remainder of `1_usize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:302:15 + --> $DIR/lint-overflowing-ops.rs:296:15 | LL | let _n = &(1usize % 0); | ^^^^^^^^^^^^ attempt to calculate the remainder of `1_usize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:304:14 + --> $DIR/lint-overflowing-ops.rs:298:14 | LL | let _n = 1i8 % 0; | ^^^^^^^ attempt to calculate the remainder of `1_i8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:305:15 + --> $DIR/lint-overflowing-ops.rs:299:15 | LL | let _n = &(1i8 % 0); | ^^^^^^^^^ attempt to calculate the remainder of `1_i8` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:306:14 + --> $DIR/lint-overflowing-ops.rs:300:14 | LL | let _n = i8::MIN % -1; | ^^^^^^^^^^^^ attempt to compute `i8::MIN % -1_i8`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:307:15 + --> $DIR/lint-overflowing-ops.rs:301:15 | LL | let _n = &(i8::MIN % -1); | ^^^^^^^^^^^^^^ attempt to compute `i8::MIN % -1_i8`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:307:15 - | -LL | let _n = &(i8::MIN % -1); - | ^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:307:14 - | -LL | let _n = &(i8::MIN % -1); - | ^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:310:14 + --> $DIR/lint-overflowing-ops.rs:303:14 | LL | let _n = 1i16 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:311:15 + --> $DIR/lint-overflowing-ops.rs:304:15 | LL | let _n = &(1i16 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i16` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:312:14 + --> $DIR/lint-overflowing-ops.rs:305:14 | LL | let _n = i16::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i16::MIN % -1_i16`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:313:15 + --> $DIR/lint-overflowing-ops.rs:306:15 | LL | let _n = &(i16::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i16::MIN % -1_i16`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:313:15 - | -LL | let _n = &(i16::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:313:14 - | -LL | let _n = &(i16::MIN % -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:316:14 + --> $DIR/lint-overflowing-ops.rs:308:14 | LL | let _n = 1i32 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:317:15 + --> $DIR/lint-overflowing-ops.rs:309:15 | LL | let _n = &(1i32 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i32` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:318:14 + --> $DIR/lint-overflowing-ops.rs:310:14 | LL | let _n = i32::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i32::MIN % -1_i32`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:319:15 + --> $DIR/lint-overflowing-ops.rs:311:15 | LL | let _n = &(i32::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i32::MIN % -1_i32`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:319:15 - | -LL | let _n = &(i32::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:319:14 - | -LL | let _n = &(i32::MIN % -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:322:14 + --> $DIR/lint-overflowing-ops.rs:313:14 | LL | let _n = 1i64 % 0; | ^^^^^^^^ attempt to calculate the remainder of `1_i64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:323:15 + --> $DIR/lint-overflowing-ops.rs:314:15 | LL | let _n = &(1i64 % 0); | ^^^^^^^^^^ attempt to calculate the remainder of `1_i64` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:324:14 + --> $DIR/lint-overflowing-ops.rs:315:14 | LL | let _n = i64::MIN % -1; | ^^^^^^^^^^^^^ attempt to compute `i64::MIN % -1_i64`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:325:15 + --> $DIR/lint-overflowing-ops.rs:316:15 | LL | let _n = &(i64::MIN % -1); | ^^^^^^^^^^^^^^^ attempt to compute `i64::MIN % -1_i64`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:325:15 - | -LL | let _n = &(i64::MIN % -1); - | ^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:325:14 - | -LL | let _n = &(i64::MIN % -1); - | ^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:328:14 + --> $DIR/lint-overflowing-ops.rs:318:14 | LL | let _n = 1i128 % 0; | ^^^^^^^^^ attempt to calculate the remainder of `1_i128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:329:15 + --> $DIR/lint-overflowing-ops.rs:319:15 | LL | let _n = &(1i128 % 0); | ^^^^^^^^^^^ attempt to calculate the remainder of `1_i128` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:330:14 + --> $DIR/lint-overflowing-ops.rs:320:14 | LL | let _n = i128::MIN % -1; | ^^^^^^^^^^^^^^ attempt to compute `i128::MIN % -1_i128`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:331:15 + --> $DIR/lint-overflowing-ops.rs:321:15 | LL | let _n = &(i128::MIN % -1); | ^^^^^^^^^^^^^^^^ attempt to compute `i128::MIN % -1_i128`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:331:15 - | -LL | let _n = &(i128::MIN % -1); - | ^^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:331:14 - | -LL | let _n = &(i128::MIN % -1); - | ^^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:334:14 + --> $DIR/lint-overflowing-ops.rs:323:14 | LL | let _n = 1isize % 0; | ^^^^^^^^^^ attempt to calculate the remainder of `1_isize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:335:15 + --> $DIR/lint-overflowing-ops.rs:324:15 | LL | let _n = &(1isize % 0); | ^^^^^^^^^^^^ attempt to calculate the remainder of `1_isize` with a divisor of zero error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:336:14 + --> $DIR/lint-overflowing-ops.rs:325:14 | LL | let _n = isize::MIN % -1; | ^^^^^^^^^^^^^^^ attempt to compute `isize::MIN % -1_isize`, which would overflow error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:337:15 + --> $DIR/lint-overflowing-ops.rs:326:15 | LL | let _n = &(isize::MIN % -1); | ^^^^^^^^^^^^^^^^^ attempt to compute `isize::MIN % -1_isize`, which would overflow -error[E0080]: evaluation of constant value failed - --> $DIR/lint-overflowing-ops.rs:337:15 - | -LL | let _n = &(isize::MIN % -1); - | ^^^^^^^^^^^^^^^^^ overflow in signed remainder (dividing MIN by -1) - -note: erroneous constant encountered - --> $DIR/lint-overflowing-ops.rs:337:14 - | -LL | let _n = &(isize::MIN % -1); - | ^^^^^^^^^^^^^^^^^^ - error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:341:14 + --> $DIR/lint-overflowing-ops.rs:329:14 | LL | let _n = [1, 2, 3][4]; | ^^^^^^^^^^^^ index out of bounds: the length is 3 but the index is 4 error: this operation will panic at runtime - --> $DIR/lint-overflowing-ops.rs:342:15 + --> $DIR/lint-overflowing-ops.rs:330:15 | LL | let _n = &([1, 2, 3][4]); | ^^^^^^^^^^^^^^ index out of bounds: the length is 3 but the index is 4 -error: aborting due to 215 previous errors +error: aborting due to 203 previous errors -For more information about this error, try `rustc --explain E0080`. diff --git a/tests/ui/lint/lint-overflowing-ops.rs b/tests/ui/lint/lint-overflowing-ops.rs index 4ef99f6c5fa18..3aadf77324377 100644 --- a/tests/ui/lint/lint-overflowing-ops.rs +++ b/tests/ui/lint/lint-overflowing-ops.rs @@ -249,37 +249,31 @@ fn main() { let _n = &(1i8 / 0); //~ ERROR: this operation will panic at runtime let _n = i8::MIN / -1; //~ ERROR: this operation will panic at runtime let _n = &(i8::MIN / -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1i16 / 0; //~ ERROR: this operation will panic at runtime let _n = &(1i16 / 0); //~ ERROR: this operation will panic at runtime let _n = i16::MIN / -1; //~ ERROR: this operation will panic at runtime let _n = &(i16::MIN / -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1i32 / 0; //~ ERROR: this operation will panic at runtime let _n = &(1i32 / 0); //~ ERROR: this operation will panic at runtime let _n = i32::MIN / -1; //~ ERROR: this operation will panic at runtime let _n = &(i32::MIN / -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1i64 / 0; //~ ERROR: this operation will panic at runtime let _n = &(1i64 / 0); //~ ERROR: this operation will panic at runtime let _n = i64::MIN / -1; //~ ERROR: this operation will panic at runtime let _n = &(i64::MIN / -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1i128 / 0; //~ ERROR: this operation will panic at runtime let _n = &(1i128 / 0); //~ ERROR: this operation will panic at runtime let _n = i128::MIN / -1; //~ ERROR: this operation will panic at runtime let _n = &(i128::MIN / -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1isize / 0; //~ ERROR: this operation will panic at runtime let _n = &(1isize / 0); //~ ERROR: this operation will panic at runtime let _n = isize::MIN / -1; //~ ERROR: this operation will panic at runtime let _n = &(isize::MIN / -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed // Modulus @@ -305,37 +299,31 @@ fn main() { let _n = &(1i8 % 0); //~ ERROR: this operation will panic at runtime let _n = i8::MIN % -1; //~ ERROR: this operation will panic at runtime let _n = &(i8::MIN % -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1i16 % 0; //~ ERROR: this operation will panic at runtime let _n = &(1i16 % 0); //~ ERROR: this operation will panic at runtime let _n = i16::MIN % -1; //~ ERROR: this operation will panic at runtime let _n = &(i16::MIN % -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1i32 % 0; //~ ERROR: this operation will panic at runtime let _n = &(1i32 % 0); //~ ERROR: this operation will panic at runtime let _n = i32::MIN % -1; //~ ERROR: this operation will panic at runtime let _n = &(i32::MIN % -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1i64 % 0; //~ ERROR: this operation will panic at runtime let _n = &(1i64 % 0); //~ ERROR: this operation will panic at runtime let _n = i64::MIN % -1; //~ ERROR: this operation will panic at runtime let _n = &(i64::MIN % -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1i128 % 0; //~ ERROR: this operation will panic at runtime let _n = &(1i128 % 0); //~ ERROR: this operation will panic at runtime let _n = i128::MIN % -1; //~ ERROR: this operation will panic at runtime let _n = &(i128::MIN % -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed let _n = 1isize % 0; //~ ERROR: this operation will panic at runtime let _n = &(1isize % 0); //~ ERROR: this operation will panic at runtime let _n = isize::MIN % -1; //~ ERROR: this operation will panic at runtime let _n = &(isize::MIN % -1); //~ ERROR: this operation will panic at runtime - //~^ERROR: evaluation of constant value failed // Out of bounds access let _n = [1, 2, 3][4]; //~ ERROR: this operation will panic at runtime From ff187a92d84f352670a9a63e8519eac114456d38 Mon Sep 17 00:00:00 2001 From: Pavel Grigorenko Date: Sat, 24 Feb 2024 16:02:17 +0300 Subject: [PATCH 117/134] library: use `addr_of!` --- library/alloc/src/boxed/thin.rs | 2 +- library/alloc/src/rc.rs | 4 ++-- library/alloc/src/sync.rs | 4 ++-- library/core/src/ffi/c_str.rs | 3 ++- library/core/src/iter/adapters/filter_map.rs | 6 +++--- library/core/src/ptr/mod.rs | 2 +- library/std/src/os/unix/net/addr.rs | 4 ++-- library/std/src/os/unix/net/ancillary.rs | 4 ++-- library/std/src/os/unix/net/datagram.rs | 12 ++++++------ library/std/src/os/unix/net/listener.rs | 10 +++++++--- library/std/src/os/unix/net/stream.rs | 4 ++-- library/std/src/os/unix/ucred.rs | 4 ++-- library/std/src/panicking.rs | 2 +- library/std/src/sync/mpmc/zero.rs | 8 ++++++-- library/std/src/sys/pal/hermit/net.rs | 4 ++-- library/std/src/sys/pal/hermit/time.rs | 4 ++-- library/std/src/sys/pal/sgx/abi/tls/mod.rs | 2 +- library/std/src/sys/pal/unix/fs.rs | 6 +++--- library/std/src/sys/pal/unix/mod.rs | 2 +- library/std/src/sys/pal/unix/net.rs | 2 +- .../std/src/sys/pal/unix/process/process_fuchsia.rs | 4 ++-- library/std/src/sys/pal/unix/process/process_unix.rs | 12 ++++++------ library/std/src/sys/pal/unix/thread.rs | 8 ++++---- library/std/src/sys/pal/unix/thread_local_dtor.rs | 2 +- library/std/src/sys/pal/wasi/mod.rs | 2 +- library/std/src/sys/pal/windows/fs.rs | 8 ++++---- library/std/src/sys/pal/windows/io.rs | 2 +- library/std/src/sys/pal/windows/net.rs | 2 +- library/std/src/sys/pal/windows/pipe.rs | 2 +- library/std/src/sys/pal/windows/process.rs | 6 +++--- library/std/src/sys/pal/windows/rand.rs | 4 ++-- library/std/src/sys/pal/windows/thread_parking.rs | 2 +- library/std/src/sys_common/net.rs | 8 ++++---- library/std/src/sys_common/once/queue.rs | 2 +- library/unwind/src/libunwind.rs | 4 ++-- 35 files changed, 83 insertions(+), 74 deletions(-) diff --git a/library/alloc/src/boxed/thin.rs b/library/alloc/src/boxed/thin.rs index 3b29c144a89f8..0421a12b3a952 100644 --- a/library/alloc/src/boxed/thin.rs +++ b/library/alloc/src/boxed/thin.rs @@ -176,7 +176,7 @@ impl ThinBox { fn with_header(&self) -> &WithHeader<::Metadata> { // SAFETY: both types are transparent to `NonNull` - unsafe { &*((&self.ptr) as *const WithOpaqueHeader as *const WithHeader<_>) } + unsafe { &*(core::ptr::addr_of!(self.ptr) as *const WithHeader<_>) } } } diff --git a/library/alloc/src/rc.rs b/library/alloc/src/rc.rs index c3d0019be3975..084157b97ab41 100644 --- a/library/alloc/src/rc.rs +++ b/library/alloc/src/rc.rs @@ -1969,7 +1969,7 @@ impl Rc { // Copy value as bytes ptr::copy_nonoverlapping( - &*src as *const T as *const u8, + core::ptr::addr_of!(*src) as *const u8, ptr::addr_of_mut!((*ptr).value) as *mut u8, value_size, ); @@ -2440,7 +2440,7 @@ impl fmt::Debug for Rc { #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Rc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Pointer::fmt(&(&**self as *const T), f) + fmt::Pointer::fmt(&core::ptr::addr_of!(**self), f) } } diff --git a/library/alloc/src/sync.rs b/library/alloc/src/sync.rs index 524aa35e04529..00f47f5c6e0ed 100644 --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@ -1914,7 +1914,7 @@ impl Arc { // Copy value as bytes ptr::copy_nonoverlapping( - &*src as *const T as *const u8, + core::ptr::addr_of!(*src) as *const u8, ptr::addr_of_mut!((*ptr).data) as *mut u8, value_size, ); @@ -3265,7 +3265,7 @@ impl fmt::Debug for Arc { #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Pointer for Arc { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - fmt::Pointer::fmt(&(&**self as *const T), f) + fmt::Pointer::fmt(&core::ptr::addr_of!(**self), f) } } diff --git a/library/core/src/ffi/c_str.rs b/library/core/src/ffi/c_str.rs index 248943cf02260..20186a2de0fd8 100644 --- a/library/core/src/ffi/c_str.rs +++ b/library/core/src/ffi/c_str.rs @@ -4,6 +4,7 @@ use crate::ffi::c_char; use crate::fmt; use crate::intrinsics; use crate::ops; +use crate::ptr::addr_of; use crate::slice; use crate::slice::memchr; use crate::str; @@ -603,7 +604,7 @@ impl CStr { pub const fn to_bytes_with_nul(&self) -> &[u8] { // SAFETY: Transmuting a slice of `c_char`s to a slice of `u8`s // is safe on all supported targets. - unsafe { &*(&self.inner as *const [c_char] as *const [u8]) } + unsafe { &*(addr_of!(self.inner) as *const [u8]) } } /// Yields a &[str] slice if the `CStr` contains valid UTF-8. diff --git a/library/core/src/iter/adapters/filter_map.rs b/library/core/src/iter/adapters/filter_map.rs index 64bd5b3e2b668..1a5f9e6265454 100644 --- a/library/core/src/iter/adapters/filter_map.rs +++ b/library/core/src/iter/adapters/filter_map.rs @@ -2,6 +2,7 @@ use crate::iter::{adapters::SourceIter, FusedIterator, InPlaceIterable, TrustedF use crate::mem::{ManuallyDrop, MaybeUninit}; use crate::num::NonZero; use crate::ops::{ControlFlow, Try}; +use crate::ptr::addr_of; use crate::{array, fmt}; /// An iterator that uses `f` to both filter and map elements from `iter`. @@ -98,9 +99,8 @@ where // SAFETY: Loop conditions ensure the index is in bounds. unsafe { - let opt_payload_at: *const MaybeUninit = (&val as *const Option) - .byte_add(core::mem::offset_of!(Option, Some.0)) - .cast(); + let opt_payload_at: *const MaybeUninit = + addr_of!(val).byte_add(core::mem::offset_of!(Option, Some.0)).cast(); let dst = guard.array.as_mut_ptr().add(idx); crate::ptr::copy_nonoverlapping(opt_payload_at, dst, 1); crate::mem::forget(val); diff --git a/library/core/src/ptr/mod.rs b/library/core/src/ptr/mod.rs index 90b3341f0ad4d..fc5b08c9801a8 100644 --- a/library/core/src/ptr/mod.rs +++ b/library/core/src/ptr/mod.rs @@ -1553,7 +1553,7 @@ pub const unsafe fn write_unaligned(dst: *mut T, src: T) { // `dst` cannot overlap `src` because the caller has mutable access // to `dst` while `src` is owned by this function. unsafe { - copy_nonoverlapping(&src as *const T as *const u8, dst as *mut u8, mem::size_of::()); + copy_nonoverlapping(addr_of!(src) as *const u8, dst as *mut u8, mem::size_of::()); // We are calling the intrinsic directly to avoid function calls in the generated code. intrinsics::forget(src); } diff --git a/library/std/src/os/unix/net/addr.rs b/library/std/src/os/unix/net/addr.rs index 6c99e8c36203a..9757653e02c06 100644 --- a/library/std/src/os/unix/net/addr.rs +++ b/library/std/src/os/unix/net/addr.rs @@ -21,7 +21,7 @@ mod libc { fn sun_path_offset(addr: &libc::sockaddr_un) -> usize { // Work with an actual instance of the type since using a null pointer is UB let base = (addr as *const libc::sockaddr_un).addr(); - let path = (&addr.sun_path as *const libc::c_char).addr(); + let path = core::ptr::addr_of!(addr.sun_path).addr(); path - base } @@ -98,7 +98,7 @@ impl SocketAddr { unsafe { let mut addr: libc::sockaddr_un = mem::zeroed(); let mut len = mem::size_of::() as libc::socklen_t; - cvt(f(&mut addr as *mut _ as *mut _, &mut len))?; + cvt(f(core::ptr::addr_of_mut!(addr) as *mut _, &mut len))?; SocketAddr::from_parts(addr, len) } } diff --git a/library/std/src/os/unix/net/ancillary.rs b/library/std/src/os/unix/net/ancillary.rs index 218536689fdbe..1d279d6adbc25 100644 --- a/library/std/src/os/unix/net/ancillary.rs +++ b/library/std/src/os/unix/net/ancillary.rs @@ -37,7 +37,7 @@ pub(super) fn recv_vectored_with_ancillary_from( unsafe { let mut msg_name: libc::sockaddr_un = zeroed(); let mut msg: libc::msghdr = zeroed(); - msg.msg_name = &mut msg_name as *mut _ as *mut _; + msg.msg_name = core::ptr::addr_of_mut!(msg_name) as *mut _; msg.msg_namelen = size_of::() as libc::socklen_t; msg.msg_iov = bufs.as_mut_ptr().cast(); msg.msg_iovlen = bufs.len() as _; @@ -70,7 +70,7 @@ pub(super) fn send_vectored_with_ancillary_to( if let Some(path) = path { sockaddr_un(path)? } else { (zeroed(), 0) }; let mut msg: libc::msghdr = zeroed(); - msg.msg_name = &mut msg_name as *mut _ as *mut _; + msg.msg_name = core::ptr::addr_of_mut!(msg_name) as *mut _; msg.msg_namelen = msg_namelen; msg.msg_iov = bufs.as_ptr() as *mut _; msg.msg_iovlen = bufs.len() as _; diff --git a/library/std/src/os/unix/net/datagram.rs b/library/std/src/os/unix/net/datagram.rs index 3b7b610fdf96c..0b4d955294ca5 100644 --- a/library/std/src/os/unix/net/datagram.rs +++ b/library/std/src/os/unix/net/datagram.rs @@ -91,7 +91,7 @@ impl UnixDatagram { let socket = UnixDatagram::unbound()?; let (addr, len) = sockaddr_un(path.as_ref())?; - cvt(libc::bind(socket.as_raw_fd(), &addr as *const _ as *const _, len as _))?; + cvt(libc::bind(socket.as_raw_fd(), core::ptr::addr_of!(addr) as *const _, len as _))?; Ok(socket) } @@ -124,7 +124,7 @@ impl UnixDatagram { let socket = UnixDatagram::unbound()?; cvt(libc::bind( socket.as_raw_fd(), - &socket_addr.addr as *const _ as *const _, + core::ptr::addr_of!(socket_addr.addr) as *const _, socket_addr.len as _, ))?; Ok(socket) @@ -206,7 +206,7 @@ impl UnixDatagram { unsafe { let (addr, len) = sockaddr_un(path.as_ref())?; - cvt(libc::connect(self.as_raw_fd(), &addr as *const _ as *const _, len))?; + cvt(libc::connect(self.as_raw_fd(), core::ptr::addr_of!(addr) as *const _, len))?; } Ok(()) } @@ -238,7 +238,7 @@ impl UnixDatagram { unsafe { cvt(libc::connect( self.as_raw_fd(), - &socket_addr.addr as *const _ as *const _, + core::ptr::addr_of!(socket_addr.addr) as *const _, socket_addr.len, ))?; } @@ -505,7 +505,7 @@ impl UnixDatagram { buf.as_ptr() as *const _, buf.len(), MSG_NOSIGNAL, - &addr as *const _ as *const _, + core::ptr::addr_of!(addr) as *const _, len, ))?; Ok(count as usize) @@ -540,7 +540,7 @@ impl UnixDatagram { buf.as_ptr() as *const _, buf.len(), MSG_NOSIGNAL, - &socket_addr.addr as *const _ as *const _, + core::ptr::addr_of!(socket_addr.addr) as *const _, socket_addr.len, ))?; Ok(count as usize) diff --git a/library/std/src/os/unix/net/listener.rs b/library/std/src/os/unix/net/listener.rs index d64a43bc20bab..31286a906ea99 100644 --- a/library/std/src/os/unix/net/listener.rs +++ b/library/std/src/os/unix/net/listener.rs @@ -99,7 +99,11 @@ impl UnixListener { )))] const backlog: libc::c_int = libc::SOMAXCONN; - cvt(libc::bind(inner.as_inner().as_raw_fd(), &addr as *const _ as *const _, len as _))?; + cvt(libc::bind( + inner.as_inner().as_raw_fd(), + core::ptr::addr_of!(addr) as *const _, + len as _, + ))?; cvt(libc::listen(inner.as_inner().as_raw_fd(), backlog))?; Ok(UnixListener(inner)) @@ -139,7 +143,7 @@ impl UnixListener { const backlog: libc::c_int = 128; cvt(libc::bind( inner.as_raw_fd(), - &socket_addr.addr as *const _ as *const _, + core::ptr::addr_of!(socket_addr.addr) as *const _, socket_addr.len as _, ))?; cvt(libc::listen(inner.as_raw_fd(), backlog))?; @@ -174,7 +178,7 @@ impl UnixListener { pub fn accept(&self) -> io::Result<(UnixStream, SocketAddr)> { let mut storage: libc::sockaddr_un = unsafe { mem::zeroed() }; let mut len = mem::size_of_val(&storage) as libc::socklen_t; - let sock = self.0.accept(&mut storage as *mut _ as *mut _, &mut len)?; + let sock = self.0.accept(core::ptr::addr_of_mut!(storage) as *mut _, &mut len)?; let addr = SocketAddr::from_parts(storage, len)?; Ok((UnixStream(sock), addr)) } diff --git a/library/std/src/os/unix/net/stream.rs b/library/std/src/os/unix/net/stream.rs index e117f616cafd4..b1cd504e21939 100644 --- a/library/std/src/os/unix/net/stream.rs +++ b/library/std/src/os/unix/net/stream.rs @@ -96,7 +96,7 @@ impl UnixStream { let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?; let (addr, len) = sockaddr_un(path.as_ref())?; - cvt(libc::connect(inner.as_raw_fd(), &addr as *const _ as *const _, len))?; + cvt(libc::connect(inner.as_raw_fd(), core::ptr::addr_of!(addr) as *const _, len))?; Ok(UnixStream(inner)) } } @@ -130,7 +130,7 @@ impl UnixStream { let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_STREAM)?; cvt(libc::connect( inner.as_raw_fd(), - &socket_addr.addr as *const _ as *const _, + core::ptr::addr_of!(socket_addr.addr) as *const _, socket_addr.len, ))?; Ok(UnixStream(inner)) diff --git a/library/std/src/os/unix/ucred.rs b/library/std/src/os/unix/ucred.rs index 6a0cc2d2c48ff..6efa74182cc0b 100644 --- a/library/std/src/os/unix/ucred.rs +++ b/library/std/src/os/unix/ucred.rs @@ -62,7 +62,7 @@ pub mod impl_linux { socket.as_raw_fd(), SOL_SOCKET, SO_PEERCRED, - &mut ucred as *mut ucred as *mut c_void, + core::ptr::addr_of_mut!(ucred) as *mut c_void, &mut ucred_size, ); @@ -122,7 +122,7 @@ pub mod impl_mac { socket.as_raw_fd(), SOL_LOCAL, LOCAL_PEERPID, - &mut pid as *mut pid_t as *mut c_void, + core::ptr::addr_of_mut!(pid) as *mut c_void, &mut pid_size, ); diff --git a/library/std/src/panicking.rs b/library/std/src/panicking.rs index 66b4ec37c8ec5..ef701d3867a1b 100644 --- a/library/std/src/panicking.rs +++ b/library/std/src/panicking.rs @@ -502,7 +502,7 @@ pub unsafe fn r#try R>(f: F) -> Result> // method of calling a catch panic whilst juggling ownership. let mut data = Data { f: ManuallyDrop::new(f) }; - let data_ptr = &mut data as *mut _ as *mut u8; + let data_ptr = core::ptr::addr_of_mut!(data) as *mut u8; // SAFETY: // // Access to the union's fields: this is `std` and we know that the `r#try` diff --git a/library/std/src/sync/mpmc/zero.rs b/library/std/src/sync/mpmc/zero.rs index 33f768dcbe902..1b82713edc748 100644 --- a/library/std/src/sync/mpmc/zero.rs +++ b/library/std/src/sync/mpmc/zero.rs @@ -182,7 +182,11 @@ impl Channel { // Prepare for blocking until a receiver wakes us up. let oper = Operation::hook(token); let mut packet = Packet::::message_on_stack(msg); - inner.senders.register_with_packet(oper, &mut packet as *mut Packet as *mut (), cx); + inner.senders.register_with_packet( + oper, + core::ptr::addr_of_mut!(packet) as *mut (), + cx, + ); inner.receivers.notify(); drop(inner); @@ -251,7 +255,7 @@ impl Channel { let mut packet = Packet::::empty_on_stack(); inner.receivers.register_with_packet( oper, - &mut packet as *mut Packet as *mut (), + core::ptr::addr_of_mut!(packet) as *mut (), cx, ); inner.senders.notify(); diff --git a/library/std/src/sys/pal/hermit/net.rs b/library/std/src/sys/pal/hermit/net.rs index 871a2ccdfa49c..1c53796f5d49c 100644 --- a/library/std/src/sys/pal/hermit/net.rs +++ b/library/std/src/sys/pal/hermit/net.rs @@ -207,7 +207,7 @@ impl Socket { buf.as_mut_ptr(), buf.len(), flags, - &mut storage as *mut _ as *mut _, + core::ptr::addr_of_mut!(storage) as *mut _, &mut addrlen, ) })?; @@ -323,7 +323,7 @@ impl Socket { netc::ioctl( self.as_raw_fd(), netc::FIONBIO, - &mut nonblocking as *mut _ as *mut core::ffi::c_void, + core::ptr::addr_of_mut!(nonblocking) as *mut core::ffi::c_void, ) }) .map(drop) diff --git a/library/std/src/sys/pal/hermit/time.rs b/library/std/src/sys/pal/hermit/time.rs index b0e9634d2299c..f289dafd8bc56 100644 --- a/library/std/src/sys/pal/hermit/time.rs +++ b/library/std/src/sys/pal/hermit/time.rs @@ -100,7 +100,7 @@ pub struct Instant(Timespec); impl Instant { pub fn now() -> Instant { let mut time: Timespec = Timespec::zero(); - let _ = unsafe { abi::clock_gettime(CLOCK_MONOTONIC, &mut time.t as *mut timespec) }; + let _ = unsafe { abi::clock_gettime(CLOCK_MONOTONIC, core::ptr::addr_of_mut!(time.t)) }; Instant(time) } @@ -197,7 +197,7 @@ pub const UNIX_EPOCH: SystemTime = SystemTime(Timespec::zero()); impl SystemTime { pub fn now() -> SystemTime { let mut time: Timespec = Timespec::zero(); - let _ = unsafe { abi::clock_gettime(CLOCK_REALTIME, &mut time.t as *mut timespec) }; + let _ = unsafe { abi::clock_gettime(CLOCK_REALTIME, core::ptr::addr_of_mut!(time.t)) }; SystemTime(time) } diff --git a/library/std/src/sys/pal/sgx/abi/tls/mod.rs b/library/std/src/sys/pal/sgx/abi/tls/mod.rs index 6762a43b483a4..8a9ea4ac00df0 100644 --- a/library/std/src/sys/pal/sgx/abi/tls/mod.rs +++ b/library/std/src/sys/pal/sgx/abi/tls/mod.rs @@ -95,7 +95,7 @@ impl Tls { #[allow(unused)] pub unsafe fn activate_persistent(self: Box) { // FIXME: Needs safety information. See entry.S for `set_tls_ptr` definition. - unsafe { set_tls_ptr((&*self) as *const Tls as _) }; + unsafe { set_tls_ptr(core::ptr::addr_of!(*self) as _) }; mem::forget(self); } diff --git a/library/std/src/sys/pal/unix/fs.rs b/library/std/src/sys/pal/unix/fs.rs index c75323ef7757a..086cdfe6e9434 100644 --- a/library/std/src/sys/pal/unix/fs.rs +++ b/library/std/src/sys/pal/unix/fs.rs @@ -1344,7 +1344,7 @@ impl File { } cvt(unsafe { libc::fsetattrlist( self.as_raw_fd(), - (&attrlist as *const libc::attrlist).cast::().cast_mut(), + core::ptr::addr_of!(attrlist).cast::().cast_mut(), buf.as_ptr().cast::().cast_mut(), num_times * mem::size_of::(), 0 @@ -1744,7 +1744,7 @@ fn open_from(from: &Path) -> io::Result<(crate::fs::File, crate::fs::Metadata)> #[cfg(target_os = "espidf")] fn open_to_and_set_permissions( to: &Path, - reader_metadata: crate::fs::Metadata, + _reader_metadata: crate::fs::Metadata, ) -> io::Result<(crate::fs::File, crate::fs::Metadata)> { use crate::fs::OpenOptions; let writer = OpenOptions::new().open(to)?; @@ -1918,7 +1918,7 @@ pub fn copy(from: &Path, to: &Path) -> io::Result { copyfile_state_get( state.0, COPYFILE_STATE_COPIED, - &mut bytes_copied as *mut libc::off_t as *mut libc::c_void, + core::ptr::addr_of_mut!(bytes_copied) as *mut libc::c_void, ) })?; Ok(bytes_copied as u64) diff --git a/library/std/src/sys/pal/unix/mod.rs b/library/std/src/sys/pal/unix/mod.rs index 04b8c5ca91604..23287258f2f87 100644 --- a/library/std/src/sys/pal/unix/mod.rs +++ b/library/std/src/sys/pal/unix/mod.rs @@ -38,7 +38,7 @@ pub mod thread_parking; pub mod time; #[cfg(target_os = "espidf")] -pub fn init(argc: isize, argv: *const *const u8, _sigpipe: u8) {} +pub fn init(_argc: isize, _argv: *const *const u8, _sigpipe: u8) {} #[cfg(not(target_os = "espidf"))] // SAFETY: must be called only once during runtime initialization. diff --git a/library/std/src/sys/pal/unix/net.rs b/library/std/src/sys/pal/unix/net.rs index 1b6a6bb2c5c77..f4ae7d21781a6 100644 --- a/library/std/src/sys/pal/unix/net.rs +++ b/library/std/src/sys/pal/unix/net.rs @@ -316,7 +316,7 @@ impl Socket { buf.as_mut_ptr() as *mut c_void, buf.len(), flags, - &mut storage as *mut _ as *mut _, + core::ptr::addr_of_mut!(storage) as *mut _, &mut addrlen, ) })?; diff --git a/library/std/src/sys/pal/unix/process/process_fuchsia.rs b/library/std/src/sys/pal/unix/process/process_fuchsia.rs index b6a74fb48318c..23c2be6adf9ee 100644 --- a/library/std/src/sys/pal/unix/process/process_fuchsia.rs +++ b/library/std/src/sys/pal/unix/process/process_fuchsia.rs @@ -182,7 +182,7 @@ impl Process { zx_cvt(zx_object_get_info( self.handle.raw(), ZX_INFO_PROCESS, - &mut proc_info as *mut _ as *mut libc::c_void, + core::ptr::addr_of_mut!(proc_info) as *mut libc::c_void, mem::size_of::(), &mut actual, &mut avail, @@ -219,7 +219,7 @@ impl Process { zx_cvt(zx_object_get_info( self.handle.raw(), ZX_INFO_PROCESS, - &mut proc_info as *mut _ as *mut libc::c_void, + core::ptr::addr_of_mut!(proc_info) as *mut libc::c_void, mem::size_of::(), &mut actual, &mut avail, diff --git a/library/std/src/sys/pal/unix/process/process_unix.rs b/library/std/src/sys/pal/unix/process/process_unix.rs index d5a77085725c5..97cbd1929d329 100644 --- a/library/std/src/sys/pal/unix/process/process_unix.rs +++ b/library/std/src/sys/pal/unix/process/process_unix.rs @@ -694,15 +694,15 @@ impl Command { let mut iov = [IoSlice::new(b"")]; let mut msg: libc::msghdr = mem::zeroed(); - msg.msg_iov = &mut iov as *mut _ as *mut _; + msg.msg_iov = core::ptr::addr_of_mut!(iov) as *mut _; msg.msg_iovlen = 1; // only attach cmsg if we successfully acquired the pidfd if pidfd >= 0 { msg.msg_controllen = mem::size_of_val(&cmsg.buf) as _; - msg.msg_control = &mut cmsg.buf as *mut _ as *mut _; + msg.msg_control = core::ptr::addr_of_mut!(cmsg.buf) as *mut _; - let hdr = CMSG_FIRSTHDR(&mut msg as *mut _ as *mut _); + let hdr = CMSG_FIRSTHDR(core::ptr::addr_of_mut!(msg) as *mut _); (*hdr).cmsg_level = SOL_SOCKET; (*hdr).cmsg_type = SCM_RIGHTS; (*hdr).cmsg_len = CMSG_LEN(SCM_MSG_LEN as _) as _; @@ -744,17 +744,17 @@ impl Command { let mut msg: libc::msghdr = mem::zeroed(); - msg.msg_iov = &mut iov as *mut _ as *mut _; + msg.msg_iov = core::ptr::addr_of_mut!(iov) as *mut _; msg.msg_iovlen = 1; msg.msg_controllen = mem::size_of::() as _; - msg.msg_control = &mut cmsg as *mut _ as *mut _; + msg.msg_control = core::ptr::addr_of_mut!(cmsg) as *mut _; match cvt_r(|| libc::recvmsg(sock.as_raw(), &mut msg, libc::MSG_CMSG_CLOEXEC)) { Err(_) => return -1, Ok(_) => {} } - let hdr = CMSG_FIRSTHDR(&mut msg as *mut _ as *mut _); + let hdr = CMSG_FIRSTHDR(core::ptr::addr_of_mut!(msg) as *mut _); if hdr.is_null() || (*hdr).cmsg_level != SOL_SOCKET || (*hdr).cmsg_type != SCM_RIGHTS diff --git a/library/std/src/sys/pal/unix/thread.rs b/library/std/src/sys/pal/unix/thread.rs index 97976407bb40f..864de31c6ebfc 100644 --- a/library/std/src/sys/pal/unix/thread.rs +++ b/library/std/src/sys/pal/unix/thread.rs @@ -239,7 +239,7 @@ impl Thread { tv_nsec: nsecs, }; secs -= ts.tv_sec as u64; - let ts_ptr = &mut ts as *mut _; + let ts_ptr = core::ptr::addr_of_mut!(ts); if libc::nanosleep(ts_ptr, ts_ptr) == -1 { assert_eq!(os::errno(), libc::EINTR); secs += ts.tv_sec as u64; @@ -418,8 +418,8 @@ pub fn available_parallelism() -> io::Result> { libc::sysctl( mib.as_mut_ptr(), 2, - &mut cpus as *mut _ as *mut _, - &mut cpus_size as *mut _ as *mut _, + core::ptr::addr_of_mut!(cpus) as *mut _, + core::ptr::addr_of_mut!(cpus_size) as *mut _, ptr::null_mut(), 0, ) @@ -864,7 +864,7 @@ pub mod guard { .unwrap(); match sysctlbyname.get() { Some(fcn) => { - if fcn(oid.as_ptr(), &mut guard as *mut _ as *mut _, &mut size as *mut _ as *mut _, crate::ptr::null_mut(), 0) == 0 { + if fcn(oid.as_ptr(), core::ptr::addr_of_mut!(guard) as *mut _, core::ptr::addr_of_mut!(size) as *mut _, crate::ptr::null_mut(), 0) == 0 { return guard; } return 1; diff --git a/library/std/src/sys/pal/unix/thread_local_dtor.rs b/library/std/src/sys/pal/unix/thread_local_dtor.rs index 8857f96501c19..79b152cece945 100644 --- a/library/std/src/sys/pal/unix/thread_local_dtor.rs +++ b/library/std/src/sys/pal/unix/thread_local_dtor.rs @@ -58,7 +58,7 @@ pub unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern "C" fn(*mut u8)) { unsafe extern "C" fn(*mut libc::c_void), >(dtor), t.cast(), - &__dso_handle as *const _ as *mut _, + core::ptr::addr_of!(__dso_handle) as *mut _, ); } return; diff --git a/library/std/src/sys/pal/wasi/mod.rs b/library/std/src/sys/pal/wasi/mod.rs index 084b8e0e21639..e6cbd51e768a7 100644 --- a/library/std/src/sys/pal/wasi/mod.rs +++ b/library/std/src/sys/pal/wasi/mod.rs @@ -166,7 +166,7 @@ pub fn abort_internal() -> ! { pub fn hashmap_random_keys() -> (u64, u64) { let mut ret = (0u64, 0u64); unsafe { - let base = &mut ret as *mut (u64, u64) as *mut u8; + let base = core::ptr::addr_of_mut!(ret) as *mut u8; let len = mem::size_of_val(&ret); wasi::random_get(base, len).expect("random_get failure"); } diff --git a/library/std/src/sys/pal/windows/fs.rs b/library/std/src/sys/pal/windows/fs.rs index b82a83ae7a3e8..3a9e7b4660b36 100644 --- a/library/std/src/sys/pal/windows/fs.rs +++ b/library/std/src/sys/pal/windows/fs.rs @@ -394,7 +394,7 @@ impl File { cvt(c::GetFileInformationByHandleEx( self.handle.as_raw_handle(), c::FileBasicInfo, - &mut info as *mut _ as *mut c_void, + core::ptr::addr_of_mut!(info) as *mut c_void, size as c::DWORD, ))?; let mut attr = FileAttr { @@ -422,7 +422,7 @@ impl File { cvt(c::GetFileInformationByHandleEx( self.handle.as_raw_handle(), c::FileStandardInfo, - &mut info as *mut _ as *mut c_void, + core::ptr::addr_of_mut!(info) as *mut c_void, size as c::DWORD, ))?; attr.file_size = info.AllocationSize as u64; @@ -638,7 +638,7 @@ impl File { cvt(c::GetFileInformationByHandleEx( self.handle.as_raw_handle(), c::FileBasicInfo, - &mut info as *mut _ as *mut c_void, + core::ptr::addr_of_mut!(info) as *mut c_void, size as c::DWORD, ))?; Ok(info) @@ -1438,7 +1438,7 @@ pub fn copy(from: &Path, to: &Path) -> io::Result { pfrom.as_ptr(), pto.as_ptr(), Some(callback), - &mut size as *mut _ as *mut _, + core::ptr::addr_of_mut!(size) as *mut _, ptr::null_mut(), 0, ) diff --git a/library/std/src/sys/pal/windows/io.rs b/library/std/src/sys/pal/windows/io.rs index b73d9f3ff4c4a..77b8f3c410eb8 100644 --- a/library/std/src/sys/pal/windows/io.rs +++ b/library/std/src/sys/pal/windows/io.rs @@ -122,7 +122,7 @@ unsafe fn msys_tty_on(handle: c::HANDLE) -> bool { let res = c::GetFileInformationByHandleEx( handle, c::FileNameInfo, - &mut name_info as *mut _ as *mut c_void, + core::ptr::addr_of_mut!(name_info) as *mut c_void, size_of::() as u32, ); if res == 0 { diff --git a/library/std/src/sys/pal/windows/net.rs b/library/std/src/sys/pal/windows/net.rs index e37fbe9ef83e4..1e6169ea8ece0 100644 --- a/library/std/src/sys/pal/windows/net.rs +++ b/library/std/src/sys/pal/windows/net.rs @@ -310,7 +310,7 @@ impl Socket { buf.as_mut_ptr() as *mut _, length, flags, - &mut storage as *mut _ as *mut _, + core::ptr::addr_of_mut!(storage) as *mut _, &mut addrlen, ) }; diff --git a/library/std/src/sys/pal/windows/pipe.rs b/library/std/src/sys/pal/windows/pipe.rs index fd10df82d8b47..013f588676ae8 100644 --- a/library/std/src/sys/pal/windows/pipe.rs +++ b/library/std/src/sys/pal/windows/pipe.rs @@ -375,7 +375,7 @@ impl AnonPipe { let mut overlapped: c::OVERLAPPED = crate::mem::zeroed(); // `hEvent` is unused by `ReadFileEx` and `WriteFileEx`. // Therefore the documentation suggests using it to smuggle a pointer to the callback. - overlapped.hEvent = &mut async_result as *mut _ as *mut _; + overlapped.hEvent = core::ptr::addr_of_mut!(async_result) as *mut _; // Asynchronous read of the pipe. // If successful, `callback` will be called once it completes. diff --git a/library/std/src/sys/pal/windows/process.rs b/library/std/src/sys/pal/windows/process.rs index 6a94d37714038..e4ab2ca7da1ce 100644 --- a/library/std/src/sys/pal/windows/process.rs +++ b/library/std/src/sys/pal/windows/process.rs @@ -350,10 +350,10 @@ impl Command { StartupInfo: si, lpAttributeList: proc_thread_attribute_list.0.as_mut_ptr() as _, }; - si_ptr = &mut si_ex as *mut _ as _; + si_ptr = core::ptr::addr_of_mut!(si_ex) as _; } else { si.cb = mem::size_of::() as c::DWORD; - si_ptr = &mut si as *mut _ as _; + si_ptr = core::ptr::addr_of_mut!(si) as _; } unsafe { @@ -935,7 +935,7 @@ fn make_proc_thread_attribute_list( // It's theoretically possible for the attribute count to exceed a u32 value. // Therefore, we ensure that we don't add more attributes than the buffer was initialized for. for (&attribute, value) in attributes.iter().take(attribute_count as usize) { - let value_ptr = &*value.data as *const (dyn Send + Sync) as _; + let value_ptr = core::ptr::addr_of!(*value.data) as _; cvt(unsafe { c::UpdateProcThreadAttribute( proc_thread_attribute_list.0.as_mut_ptr() as _, diff --git a/library/std/src/sys/pal/windows/rand.rs b/library/std/src/sys/pal/windows/rand.rs index 5d8fd13785a09..bd1ae6b06076e 100644 --- a/library/std/src/sys/pal/windows/rand.rs +++ b/library/std/src/sys/pal/windows/rand.rs @@ -7,7 +7,7 @@ pub fn hashmap_random_keys() -> (u64, u64) { let ret = unsafe { c::BCryptGenRandom( ptr::null_mut(), - &mut v as *mut _ as *mut u8, + core::ptr::addr_of_mut!(v) as *mut u8, mem::size_of_val(&v) as c::ULONG, c::BCRYPT_USE_SYSTEM_PREFERRED_RNG, ) @@ -28,7 +28,7 @@ fn fallback_rng() -> (u64, u64) { let mut v = (0, 0); let ret = unsafe { - c::RtlGenRandom(&mut v as *mut _ as *mut c_void, mem::size_of_val(&v) as c::ULONG) + c::RtlGenRandom(core::ptr::addr_of_mut!(v) as *mut c_void, mem::size_of_val(&v) as c::ULONG) }; if ret != 0 { v } else { panic!("fallback RNG broken: {}", io::Error::last_os_error()) } diff --git a/library/std/src/sys/pal/windows/thread_parking.rs b/library/std/src/sys/pal/windows/thread_parking.rs index 343b530b15ef9..ea485d71f5adc 100644 --- a/library/std/src/sys/pal/windows/thread_parking.rs +++ b/library/std/src/sys/pal/windows/thread_parking.rs @@ -215,7 +215,7 @@ impl Parker { } fn ptr(&self) -> c::LPVOID { - &self.state as *const _ as c::LPVOID + core::ptr::addr_of!(self.state) as c::LPVOID } } diff --git a/library/std/src/sys_common/net.rs b/library/std/src/sys_common/net.rs index de7d31baaaf8c..581c46af0eacf 100644 --- a/library/std/src/sys_common/net.rs +++ b/library/std/src/sys_common/net.rs @@ -70,7 +70,7 @@ pub fn setsockopt( sock.as_raw(), level, option_name, - &option_value as *const T as *const _, + core::ptr::addr_of!(option_value) as *const _, mem::size_of::() as c::socklen_t, ))?; Ok(()) @@ -85,7 +85,7 @@ pub fn getsockopt(sock: &Socket, level: c_int, option_name: c_int) -> i sock.as_raw(), level, option_name, - &mut option_value as *mut T as *mut _, + core::ptr::addr_of_mut!(option_value) as *mut _, &mut option_len, ))?; Ok(option_value) @@ -99,7 +99,7 @@ where unsafe { let mut storage: c::sockaddr_storage = mem::zeroed(); let mut len = mem::size_of_val(&storage) as c::socklen_t; - cvt(f(&mut storage as *mut _ as *mut _, &mut len))?; + cvt(f(core::ptr::addr_of_mut!(storage) as *mut _, &mut len))?; sockaddr_to_addr(&storage, len as usize) } } @@ -444,7 +444,7 @@ impl TcpListener { pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> { let mut storage: c::sockaddr_storage = unsafe { mem::zeroed() }; let mut len = mem::size_of_val(&storage) as c::socklen_t; - let sock = self.inner.accept(&mut storage as *mut _ as *mut _, &mut len)?; + let sock = self.inner.accept(core::ptr::addr_of_mut!(storage) as *mut _, &mut len)?; let addr = sockaddr_to_addr(&storage, len as usize)?; Ok((TcpStream { inner: sock }, addr)) } diff --git a/library/std/src/sys_common/once/queue.rs b/library/std/src/sys_common/once/queue.rs index 3cc1df113e3f1..730cdb768bd27 100644 --- a/library/std/src/sys_common/once/queue.rs +++ b/library/std/src/sys_common/once/queue.rs @@ -212,7 +212,7 @@ fn wait(state_and_queue: &AtomicPtr, mut current_state: *mut Masked) { signaled: AtomicBool::new(false), next: current_state.with_addr(current_state.addr() & !STATE_MASK) as *const Waiter, }; - let me = &node as *const Waiter as *const Masked as *mut Masked; + let me = core::ptr::addr_of!(node) as *const Masked as *mut Masked; // Try to slide in the node at the head of the linked list, making sure // that another thread didn't just replace the head of the linked list. diff --git a/library/unwind/src/libunwind.rs b/library/unwind/src/libunwind.rs index 1b5f6f9dde36c..527c408c89edd 100644 --- a/library/unwind/src/libunwind.rs +++ b/library/unwind/src/libunwind.rs @@ -219,14 +219,14 @@ if #[cfg(any(target_os = "ios", target_os = "tvos", target_os = "watchos", targe pub unsafe fn _Unwind_GetGR(ctx: *mut _Unwind_Context, reg_index: c_int) -> _Unwind_Word { let mut val: _Unwind_Word = core::ptr::null(); _Unwind_VRS_Get(ctx, _UVRSC_CORE, reg_index as _Unwind_Word, _UVRSD_UINT32, - &mut val as *mut _ as *mut c_void); + core::ptr::addr_of_mut!(val) as *mut c_void); val } pub unsafe fn _Unwind_SetGR(ctx: *mut _Unwind_Context, reg_index: c_int, value: _Unwind_Word) { let mut value = value; _Unwind_VRS_Set(ctx, _UVRSC_CORE, reg_index as _Unwind_Word, _UVRSD_UINT32, - &mut value as *mut _ as *mut c_void); + core::ptr::addr_of_mut!(value) as *mut c_void); } pub unsafe fn _Unwind_GetIP(ctx: *mut _Unwind_Context) From f08e2d4137bdf0e443f4061c8b1289f0a92513be Mon Sep 17 00:00:00 2001 From: Gary Guo Date: Sat, 16 Dec 2023 01:49:01 +0000 Subject: [PATCH 118/134] Forbid use of `extern "C-unwind"` inside standard library Those libraries are build with `-C panic=unwind` and is expected to be linkable to `-C panic=abort` library. To ensure unsoundness compiler needs to prevent a `C-unwind` call to exist, as doing so may leak foreign exceptions into `-C panic=abort`. --- library/alloc/src/lib.rs | 1 + library/core/src/lib.rs | 1 + library/proc_macro/src/lib.rs | 1 + 3 files changed, 3 insertions(+) diff --git a/library/alloc/src/lib.rs b/library/alloc/src/lib.rs index b84273848ee95..45e93feb6c5b3 100644 --- a/library/alloc/src/lib.rs +++ b/library/alloc/src/lib.rs @@ -92,6 +92,7 @@ #![warn(multiple_supertrait_upcastable)] #![allow(internal_features)] #![allow(rustdoc::redundant_explicit_links)] +#![deny(ffi_unwind_calls)] // // Library features: // tidy-alphabetical-start diff --git a/library/core/src/lib.rs b/library/core/src/lib.rs index 456d88122af64..49cead680e33b 100644 --- a/library/core/src/lib.rs +++ b/library/core/src/lib.rs @@ -106,6 +106,7 @@ #![allow(incomplete_features)] #![warn(multiple_supertrait_upcastable)] #![allow(internal_features)] +#![deny(ffi_unwind_calls)] // Do not check link redundancy on bootstraping phase #![allow(rustdoc::redundant_explicit_links)] // diff --git a/library/proc_macro/src/lib.rs b/library/proc_macro/src/lib.rs index d05458a6944ac..610966625b535 100644 --- a/library/proc_macro/src/lib.rs +++ b/library/proc_macro/src/lib.rs @@ -36,6 +36,7 @@ #![feature(strict_provenance)] #![recursion_limit = "256"] #![allow(internal_features)] +#![deny(ffi_unwind_calls)] #[unstable(feature = "proc_macro_internals", issue = "27812")] #[doc(hidden)] From 613cb3262dfca935cac5a198f835f2aaa73faf18 Mon Sep 17 00:00:00 2001 From: Pavel Grigorenko Date: Sat, 24 Feb 2024 16:47:34 +0300 Subject: [PATCH 119/134] compiler: use `addr_of!` --- compiler/rustc_codegen_llvm/src/back/archive.rs | 2 +- compiler/rustc_codegen_llvm/src/lib.rs | 4 ++-- compiler/rustc_codegen_llvm/src/llvm_util.rs | 2 +- compiler/rustc_data_structures/src/sync.rs | 2 +- compiler/rustc_middle/src/query/on_disk_cache.rs | 4 ++-- compiler/rustc_middle/src/ty/list.rs | 2 +- compiler/stable_mir/src/compiler_interface.rs | 2 +- 7 files changed, 9 insertions(+), 9 deletions(-) diff --git a/compiler/rustc_codegen_llvm/src/back/archive.rs b/compiler/rustc_codegen_llvm/src/back/archive.rs index 67bc86e4c9026..b6bbc81732ee2 100644 --- a/compiler/rustc_codegen_llvm/src/back/archive.rs +++ b/compiler/rustc_codegen_llvm/src/back/archive.rs @@ -313,7 +313,7 @@ fn get_llvm_object_symbols( llvm::LLVMRustGetSymbols( buf.as_ptr(), buf.len(), - &mut *state as *mut &mut _ as *mut c_void, + std::ptr::addr_of_mut!(*state) as *mut c_void, callback, error_callback, ) diff --git a/compiler/rustc_codegen_llvm/src/lib.rs b/compiler/rustc_codegen_llvm/src/lib.rs index 35210b0b2e86d..c84461e53eb1b 100644 --- a/compiler/rustc_codegen_llvm/src/lib.rs +++ b/compiler/rustc_codegen_llvm/src/lib.rs @@ -169,7 +169,7 @@ impl WriteBackendMethods for LlvmCodegenBackend { fn print_pass_timings(&self) { unsafe { let mut size = 0; - let cstr = llvm::LLVMRustPrintPassTimings(&mut size as *mut usize); + let cstr = llvm::LLVMRustPrintPassTimings(std::ptr::addr_of_mut!(size)); if cstr.is_null() { println!("failed to get pass timings"); } else { @@ -182,7 +182,7 @@ impl WriteBackendMethods for LlvmCodegenBackend { fn print_statistics(&self) { unsafe { let mut size = 0; - let cstr = llvm::LLVMRustPrintStatistics(&mut size as *mut usize); + let cstr = llvm::LLVMRustPrintStatistics(std::ptr::addr_of_mut!(size)); if cstr.is_null() { println!("failed to get pass stats"); } else { diff --git a/compiler/rustc_codegen_llvm/src/llvm_util.rs b/compiler/rustc_codegen_llvm/src/llvm_util.rs index 54e8ed85e3250..1b2beac56a20b 100644 --- a/compiler/rustc_codegen_llvm/src/llvm_util.rs +++ b/compiler/rustc_codegen_llvm/src/llvm_util.rs @@ -435,7 +435,7 @@ pub(crate) fn print(req: &PrintRequest, mut out: &mut dyn PrintBackendInfo, sess &tm, cpu_cstring.as_ptr(), callback, - &mut out as *mut &mut dyn PrintBackendInfo as *mut c_void, + std::ptr::addr_of_mut!(out) as *mut c_void, ); } } diff --git a/compiler/rustc_data_structures/src/sync.rs b/compiler/rustc_data_structures/src/sync.rs index adcb6ceaebf89..32202ac3eded1 100644 --- a/compiler/rustc_data_structures/src/sync.rs +++ b/compiler/rustc_data_structures/src/sync.rs @@ -429,7 +429,7 @@ impl RwLock { #[inline(always)] pub fn leak(&self) -> &T { let guard = self.read(); - let ret = unsafe { &*(&*guard as *const T) }; + let ret = unsafe { &*std::ptr::addr_of!(*guard) }; std::mem::forget(guard); ret } diff --git a/compiler/rustc_middle/src/query/on_disk_cache.rs b/compiler/rustc_middle/src/query/on_disk_cache.rs index f4dfbe059ebcd..9c7c46f2ad24b 100644 --- a/compiler/rustc_middle/src/query/on_disk_cache.rs +++ b/compiler/rustc_middle/src/query/on_disk_cache.rs @@ -233,7 +233,7 @@ impl<'sess> OnDiskCache<'sess> { for (index, file) in files.iter().enumerate() { let index = SourceFileIndex(index as u32); - let file_ptr: *const SourceFile = &**file as *const _; + let file_ptr: *const SourceFile = std::ptr::addr_of!(**file); file_to_file_index.insert(file_ptr, index); let source_file_id = EncodedSourceFileId::new(tcx, file); file_index_to_stable_id.insert(index, source_file_id); @@ -835,7 +835,7 @@ pub struct CacheEncoder<'a, 'tcx> { impl<'a, 'tcx> CacheEncoder<'a, 'tcx> { #[inline] fn source_file_index(&mut self, source_file: Lrc) -> SourceFileIndex { - self.file_to_file_index[&(&*source_file as *const SourceFile)] + self.file_to_file_index[&std::ptr::addr_of!(*source_file)] } /// Encode something with additional information that allows to do some diff --git a/compiler/rustc_middle/src/ty/list.rs b/compiler/rustc_middle/src/ty/list.rs index 4f9c9d85763a2..336c2dce1141c 100644 --- a/compiler/rustc_middle/src/ty/list.rs +++ b/compiler/rustc_middle/src/ty/list.rs @@ -61,7 +61,7 @@ impl List { // length) that is 64-byte aligned, thus featuring the necessary // trailing padding for elements with up to 64-byte alignment. static EMPTY_SLICE: InOrder = InOrder(0, MaxAlign); - unsafe { &*(&EMPTY_SLICE as *const _ as *const List) } + unsafe { &*(std::ptr::addr_of!(EMPTY_SLICE) as *const List) } } pub fn len(&self) -> usize { diff --git a/compiler/stable_mir/src/compiler_interface.rs b/compiler/stable_mir/src/compiler_interface.rs index 6272f793f40cc..0f7d8d7e083bf 100644 --- a/compiler/stable_mir/src/compiler_interface.rs +++ b/compiler/stable_mir/src/compiler_interface.rs @@ -208,7 +208,7 @@ where if TLV.is_set() { Err(Error::from("StableMIR already running")) } else { - let ptr: *const () = &context as *const &_ as _; + let ptr: *const () = std::ptr::addr_of!(context) as _; TLV.set(&Cell::new(ptr), || Ok(f())) } } From c7be4439ade7cd4b1e506bd87f2c9b131c6d594e Mon Sep 17 00:00:00 2001 From: Ross Smyth Date: Fri, 23 Feb 2024 19:09:46 -0500 Subject: [PATCH 120/134] Windows miri-script execution egronomics This allows for Windows users to use miri-script without pain --- src/tools/miri/miri.bat | 10 ++++++++++ 1 file changed, 10 insertions(+) create mode 100644 src/tools/miri/miri.bat diff --git a/src/tools/miri/miri.bat b/src/tools/miri/miri.bat new file mode 100644 index 0000000000000..91e8a6e8f3b25 --- /dev/null +++ b/src/tools/miri/miri.bat @@ -0,0 +1,10 @@ +:: This makes execution of ./miri on Linux and Windows the same. +:: Windows will not execute the bash script, and select this. +@echo off +set MIRI_SCRIPT_TARGET_DIR=%0\..\miri-script\target +cargo build %CARGO_EXTRA_FLAGS% -q --target-dir %MIRI_SCRIPT_TARGET_DIR% --manifest-path %0\..\miri-script\Cargo.toml + +:: Forwards all arguments to this file to the executable. +:: We invoke the binary directly to avoid going through rustup, which would set some extra +:: env vars that we do not want. +%MIRI_SCRIPT_TARGET_DIR%\debug\miri-script %* From 3e7c2e1f6dcfb2d1b1c81885c274a4d73f91b0f3 Mon Sep 17 00:00:00 2001 From: Ross Smyth Date: Fri, 23 Feb 2024 12:03:01 -0500 Subject: [PATCH 121/134] Add "cargo miri clean" command --- src/tools/miri/README.md | 4 +- src/tools/miri/cargo-miri/src/phases.rs | 23 ++++++--- src/tools/miri/cargo-miri/src/setup.rs | 9 +--- src/tools/miri/cargo-miri/src/util.rs | 64 +++++++++++++++++++++++++ 4 files changed, 84 insertions(+), 16 deletions(-) diff --git a/src/tools/miri/README.md b/src/tools/miri/README.md index 60bf07b1736e4..944d2bbe87941 100644 --- a/src/tools/miri/README.md +++ b/src/tools/miri/README.md @@ -279,7 +279,7 @@ Miri builds and vice-versa. You may be running `cargo miri` with a different compiler version than the one used to build the custom libstd that Miri uses, and Miri failed to detect that. -Try deleting `~/.cache/miri`. +Try running `cargo miri clean`. #### "no mir for `std::rt::lang_start_internal`" @@ -465,7 +465,7 @@ Moreover, Miri recognizes some environment variables: must point to the `library` subdirectory of a `rust-lang/rust` repository checkout. Note that changing files in that directory does not automatically trigger a re-build of the standard library; you have to clear the Miri build - cache manually (on Linux, `rm -rf ~/.cache/miri`; + cache with `cargo miri clean` or deleting it manually (on Linux, `rm -rf ~/.cache/miri`; on Windows, `rmdir /S "%LOCALAPPDATA%\rust-lang\miri\cache"`; and on macOS, `rm -rf ~/Library/Caches/org.rust-lang.miri`). * `MIRI_SYSROOT` (recognized by `cargo miri` and the Miri driver) indicates the sysroot to use. When diff --git a/src/tools/miri/cargo-miri/src/phases.rs b/src/tools/miri/cargo-miri/src/phases.rs index 79ce1f4ca3226..315f7a23a912a 100644 --- a/src/tools/miri/cargo-miri/src/phases.rs +++ b/src/tools/miri/cargo-miri/src/phases.rs @@ -20,6 +20,7 @@ Subcommands: test, t Run tests nextest Run tests with nextest (requires cargo-nextest installed) setup Only perform automatic setup, but without asking questions (for getting a proper libstd) + clean Clean the Miri cache & target directory The cargo options are exactly the same as for `cargo run` and `cargo test`, respectively. @@ -74,14 +75,15 @@ pub fn phase_cargo_miri(mut args: impl Iterator) { // We cannot know which of those flags take arguments and which do not, // so we cannot detect subcommands later. let Some(subcommand) = args.next() else { - show_error!("`cargo miri` needs to be called with a subcommand (`run`, `test`)"); + show_error!("`cargo miri` needs to be called with a subcommand (`run`, `test`, `clean`)"); }; let subcommand = match &*subcommand { "setup" => MiriCommand::Setup, "test" | "t" | "run" | "r" | "nextest" => MiriCommand::Forward(subcommand), + "clean" => MiriCommand::Clean, _ => show_error!( - "`cargo miri` supports the following subcommands: `run`, `test`, `nextest`, and `setup`." + "`cargo miri` supports the following subcommands: `run`, `test`, `nextest`, `clean`, and `setup`." ), }; let verbose = num_arg_flag("-v"); @@ -93,6 +95,16 @@ pub fn phase_cargo_miri(mut args: impl Iterator) { let target = get_arg_flag_value("--target"); let target = target.as_ref().unwrap_or(host); + // If cleaning the the target directory & sysroot cache, + // delete them then exit. There is no reason to setup a new + // sysroot in this execution. + if let MiriCommand::Clean = subcommand { + let metadata = get_cargo_metadata(); + clean_target_dir(&metadata); + clean_sysroot(); + return; + } + // We always setup. let miri_sysroot = setup(&subcommand, target, &rustc_version, verbose); @@ -110,6 +122,7 @@ pub fn phase_cargo_miri(mut args: impl Iterator) { let cargo_cmd = match subcommand { MiriCommand::Forward(s) => s, MiriCommand::Setup => return, // `cargo miri setup` stops here. + MiriCommand::Clean => unreachable!(), }; let metadata = get_cargo_metadata(); let mut cmd = cargo(); @@ -142,11 +155,7 @@ pub fn phase_cargo_miri(mut args: impl Iterator) { .arg(format!("target.'cfg(all())'.runner=[{cargo_miri_path_for_toml}, 'runner']")); // Set `--target-dir` to `miri` inside the original target directory. - let mut target_dir = match get_arg_flag_value("--target-dir") { - Some(dir) => PathBuf::from(dir), - None => metadata.target_directory.clone().into_std_path_buf(), - }; - target_dir.push("miri"); + let target_dir = get_target_dir(&metadata); cmd.arg("--target-dir").arg(target_dir); // *After* we set all the flags that need setting, forward everything else. Make sure to skip diff --git a/src/tools/miri/cargo-miri/src/setup.rs b/src/tools/miri/cargo-miri/src/setup.rs index 8ae5b8c3e82cc..a98e1fcd485ea 100644 --- a/src/tools/miri/cargo-miri/src/setup.rs +++ b/src/tools/miri/cargo-miri/src/setup.rs @@ -67,13 +67,8 @@ pub fn setup( } // Determine where to put the sysroot. - let sysroot_dir = match std::env::var_os("MIRI_SYSROOT") { - Some(dir) => PathBuf::from(dir), - None => { - let user_dirs = directories::ProjectDirs::from("org", "rust-lang", "miri").unwrap(); - user_dirs.cache_dir().to_owned() - } - }; + let sysroot_dir = get_sysroot_dir(); + // Sysroot configuration and build details. let no_std = match std::env::var_os("MIRI_NO_STD") { None => diff --git a/src/tools/miri/cargo-miri/src/util.rs b/src/tools/miri/cargo-miri/src/util.rs index 3c5912684556c..6c1a074cd8c6e 100644 --- a/src/tools/miri/cargo-miri/src/util.rs +++ b/src/tools/miri/cargo-miri/src/util.rs @@ -74,6 +74,8 @@ pub enum MiriCommand { Setup, /// A command to be forwarded to cargo. Forward(String), + /// Clean the miri cache + Clean, } /// Escapes `s` in a way that is suitable for using it as a string literal in TOML syntax. @@ -249,3 +251,65 @@ pub fn debug_cmd(prefix: &str, verbose: usize, cmd: &Command) { } eprintln!("{prefix} running command: {cmd:?}"); } + +/// Get the target directory for miri output. +/// +/// Either in an argument passed-in, or from cargo metadata. +pub fn get_target_dir(meta: &Metadata) -> PathBuf { + let mut output = match get_arg_flag_value("--target-dir") { + Some(dir) => PathBuf::from(dir), + None => meta.target_directory.clone().into_std_path_buf(), + }; + output.push("miri"); + output +} + +/// Determines where the sysroot of this exeuction is +/// +/// Either in a user-specified spot by an envar, or in a default cache location. +pub fn get_sysroot_dir() -> PathBuf { + match std::env::var_os("MIRI_SYSROOT") { + Some(dir) => PathBuf::from(dir), + None => { + let user_dirs = directories::ProjectDirs::from("org", "rust-lang", "miri").unwrap(); + user_dirs.cache_dir().to_owned() + } + } +} + +/// An idempotent version of the stdlib's remove_dir_all +/// it is considered a success if the directory was not there. +fn remove_dir_all_idem(dir: &Path) -> std::io::Result<()> { + match std::fs::remove_dir_all(dir) { + Ok(_) => Ok(()), + // If the directory doesn't exist, it is still a success. + Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(()), + Err(err) => Err(err), + } +} + +/// Deletes the Miri sysroot cache +/// Returns an error if the MIRI_SYSROOT env var is set. +pub fn clean_sysroot() { + if std::env::var_os("MIRI_SYSROOT").is_some() { + show_error!( + "MIRI_SYSROOT is set. Please clean your custom sysroot cache directory manually." + ) + } + + let sysroot_dir = get_sysroot_dir(); + + eprintln!("Cleaning sysroot cache at {}", sysroot_dir.display()); + + // Keep it simple, just remove the directory. + remove_dir_all_idem(&sysroot_dir).unwrap_or_else(|err| show_error!("{}", err)); +} + +/// Deletes the Miri target directory +pub fn clean_target_dir(meta: &Metadata) { + let target_dir = get_target_dir(meta); + + eprintln!("Cleaning target directory at {}", target_dir.display()); + + remove_dir_all_idem(&target_dir).unwrap_or_else(|err| show_error!("{}", err)) +} From cd36cda60c99cc17183a206c93713c375838af81 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sat, 24 Feb 2024 18:26:07 +0100 Subject: [PATCH 122/134] compiletest: call cargo-miri directly rather than via 'cargo run' --- src/tools/miri/tests/compiletest.rs | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/tools/miri/tests/compiletest.rs b/src/tools/miri/tests/compiletest.rs index 8bed321e6551c..d8f7cafe3b2ed 100644 --- a/src/tools/miri/tests/compiletest.rs +++ b/src/tools/miri/tests/compiletest.rs @@ -99,18 +99,15 @@ fn test_config(target: &str, path: &str, mode: Mode, with_dependencies: bool) -> }; if with_dependencies { + // Set the `cargo-miri` binary, which we expect to be in the same folder as the `miri` binary. + // (It's a separate crate, so we don't get an env var from cargo.) + let mut prog = miri_path(); + prog.set_file_name("cargo-miri"); + config.dependency_builder.program = prog; + let builder_args = ["miri", "run"]; // There is no `cargo miri build` so we just use `cargo miri run`. + config.dependency_builder.args = builder_args.into_iter().map(Into::into).collect(); config.dependencies_crate_manifest_path = Some(Path::new("test_dependencies").join("Cargo.toml")); - let mut builder_args = vec!["run".into()]; - builder_args.extend(flagsplit(&env::var("CARGO_EXTRA_FLAGS").unwrap_or_default())); - builder_args.extend([ - "--manifest-path".into(), - "cargo-miri/Cargo.toml".into(), - "--".into(), - "miri".into(), - "run".into(), // There is no `cargo miri build` so we just use `cargo miri run`. - ]); - config.dependency_builder.args = builder_args.into_iter().map(Into::into).collect(); // Reset `RUSTFLAGS` to work around . config.dependency_builder.envs.push(("RUSTFLAGS".into(), None)); } From 81d7069e342fe506baa2a8431a975ec63e9b9713 Mon Sep 17 00:00:00 2001 From: Nilstrieb <48135649+Nilstrieb@users.noreply.github.com> Date: Wed, 14 Feb 2024 22:08:03 +0100 Subject: [PATCH 123/134] Add `#[rustc_no_mir_inline]` for standard library UB checks Co-authored-by: Ben Kimock --- compiler/rustc_feature/src/builtin_attrs.rs | 4 ++++ compiler/rustc_mir_transform/src/inline.rs | 4 ++++ compiler/rustc_span/src/symbol.rs | 1 + library/core/src/intrinsics.rs | 18 +++++++++++++++--- ..._mir_inline.caller.Inline.panic-abort.diff | 19 +++++++++++++++++++ ...mir_inline.caller.Inline.panic-unwind.diff | 19 +++++++++++++++++++ ...ne.caller.PreCodegen.after.panic-abort.mir | 14 ++++++++++++++ ...e.caller.PreCodegen.after.panic-unwind.mir | 14 ++++++++++++++ tests/mir-opt/inline/rustc_no_mir_inline.rs | 17 +++++++++++++++++ 9 files changed, 107 insertions(+), 3 deletions(-) create mode 100644 tests/mir-opt/inline/rustc_no_mir_inline.caller.Inline.panic-abort.diff create mode 100644 tests/mir-opt/inline/rustc_no_mir_inline.caller.Inline.panic-unwind.diff create mode 100644 tests/mir-opt/inline/rustc_no_mir_inline.caller.PreCodegen.after.panic-abort.mir create mode 100644 tests/mir-opt/inline/rustc_no_mir_inline.caller.PreCodegen.after.panic-unwind.mir create mode 100644 tests/mir-opt/inline/rustc_no_mir_inline.rs diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 99875ec540545..003baf071b8c8 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -792,6 +792,10 @@ pub const BUILTIN_ATTRIBUTES: &[BuiltinAttribute] = &[ rustc_intrinsic, Normal, template!(Word), ErrorFollowing, "the `#[rustc_intrinsic]` attribute is used to declare intrinsics with function bodies", ), + rustc_attr!( + rustc_no_mir_inline, Normal, template!(Word), WarnFollowing, + "#[rustc_no_mir_inline] prevents the MIR inliner from inlining a function while not affecting codegen" + ), // ========================================================================== // Internal attributes, Testing: diff --git a/compiler/rustc_mir_transform/src/inline.rs b/compiler/rustc_mir_transform/src/inline.rs index 2009539d4d084..36546a03cdfc5 100644 --- a/compiler/rustc_mir_transform/src/inline.rs +++ b/compiler/rustc_mir_transform/src/inline.rs @@ -421,6 +421,10 @@ impl<'tcx> Inliner<'tcx> { callee_attrs: &CodegenFnAttrs, cross_crate_inlinable: bool, ) -> Result<(), &'static str> { + if self.tcx.has_attr(callsite.callee.def_id(), sym::rustc_no_mir_inline) { + return Err("#[rustc_no_mir_inline]"); + } + if let InlineAttr::Never = callee_attrs.inline { return Err("never inline hint"); } diff --git a/compiler/rustc_span/src/symbol.rs b/compiler/rustc_span/src/symbol.rs index 46472a131ff4b..609ab054da21e 100644 --- a/compiler/rustc_span/src/symbol.rs +++ b/compiler/rustc_span/src/symbol.rs @@ -1441,6 +1441,7 @@ symbols! { rustc_mir, rustc_must_implement_one_of, rustc_never_returns_null_ptr, + rustc_no_mir_inline, rustc_nonnull_optimization_guaranteed, rustc_nounwind, rustc_object_lifetime_default, diff --git a/library/core/src/intrinsics.rs b/library/core/src/intrinsics.rs index e42b2315ea5d7..f9d89795a9988 100644 --- a/library/core/src/intrinsics.rs +++ b/library/core/src/intrinsics.rs @@ -2706,13 +2706,25 @@ pub const unsafe fn const_deallocate(_ptr: *mut u8, _size: usize, _align: usize) macro_rules! assert_unsafe_precondition { ($message:expr, ($($name:ident:$ty:ty = $arg:expr),*$(,)?) => $e:expr $(,)?) => { { + // #[cfg(bootstrap)] (this comment) // When the standard library is compiled with debug assertions, we want the check to inline for better performance. // This is important when working on the compiler, which is compiled with debug assertions locally. // When not compiled with debug assertions (so the precompiled std) we outline the check to minimize the compile // time impact when debug assertions are disabled. - // It is not clear whether that is the best solution, see #120848. - #[cfg_attr(debug_assertions, inline(always))] - #[cfg_attr(not(debug_assertions), inline(never))] + // The proper solution to this is the `#[rustc_no_mir_inline]` below, but we still want decent performance for cfg(bootstrap). + #[cfg_attr(all(debug_assertions, bootstrap), inline(always))] + #[cfg_attr(all(not(debug_assertions), bootstrap), inline(never))] + + // This check is inlineable, but not by the MIR inliner. + // The reason for this is that the MIR inliner is in an exceptionally bad position + // to think about whether or not to inline this. In MIR, this call is gated behind `debug_assertions`, + // which will codegen to `false` in release builds. Inlining the check would be wasted work in that case and + // would be bad for compile times. + // + // LLVM on the other hand sees the constant branch, so if it's `false`, it can immediately delete it without + // inlining the check. If it's `true`, it can inline it and get significantly better performance. + #[cfg_attr(not(bootstrap), rustc_no_mir_inline)] + #[cfg_attr(not(bootstrap), inline)] #[rustc_nounwind] fn precondition_check($($name:$ty),*) { if !$e { diff --git a/tests/mir-opt/inline/rustc_no_mir_inline.caller.Inline.panic-abort.diff b/tests/mir-opt/inline/rustc_no_mir_inline.caller.Inline.panic-abort.diff new file mode 100644 index 0000000000000..dd79cff1dcf80 --- /dev/null +++ b/tests/mir-opt/inline/rustc_no_mir_inline.caller.Inline.panic-abort.diff @@ -0,0 +1,19 @@ +- // MIR for `caller` before Inline ++ // MIR for `caller` after Inline + + fn caller() -> () { + let mut _0: (); + let _1: (); + + bb0: { + StorageLive(_1); + _1 = callee() -> [return: bb1, unwind unreachable]; + } + + bb1: { + StorageDead(_1); + _0 = const (); + return; + } + } + diff --git a/tests/mir-opt/inline/rustc_no_mir_inline.caller.Inline.panic-unwind.diff b/tests/mir-opt/inline/rustc_no_mir_inline.caller.Inline.panic-unwind.diff new file mode 100644 index 0000000000000..4506a338edd7a --- /dev/null +++ b/tests/mir-opt/inline/rustc_no_mir_inline.caller.Inline.panic-unwind.diff @@ -0,0 +1,19 @@ +- // MIR for `caller` before Inline ++ // MIR for `caller` after Inline + + fn caller() -> () { + let mut _0: (); + let _1: (); + + bb0: { + StorageLive(_1); + _1 = callee() -> [return: bb1, unwind continue]; + } + + bb1: { + StorageDead(_1); + _0 = const (); + return; + } + } + diff --git a/tests/mir-opt/inline/rustc_no_mir_inline.caller.PreCodegen.after.panic-abort.mir b/tests/mir-opt/inline/rustc_no_mir_inline.caller.PreCodegen.after.panic-abort.mir new file mode 100644 index 0000000000000..d0772e51a07fe --- /dev/null +++ b/tests/mir-opt/inline/rustc_no_mir_inline.caller.PreCodegen.after.panic-abort.mir @@ -0,0 +1,14 @@ +// MIR for `caller` after PreCodegen + +fn caller() -> () { + let mut _0: (); + let _1: (); + + bb0: { + _1 = callee() -> [return: bb1, unwind unreachable]; + } + + bb1: { + return; + } +} diff --git a/tests/mir-opt/inline/rustc_no_mir_inline.caller.PreCodegen.after.panic-unwind.mir b/tests/mir-opt/inline/rustc_no_mir_inline.caller.PreCodegen.after.panic-unwind.mir new file mode 100644 index 0000000000000..39ad4f1010b79 --- /dev/null +++ b/tests/mir-opt/inline/rustc_no_mir_inline.caller.PreCodegen.after.panic-unwind.mir @@ -0,0 +1,14 @@ +// MIR for `caller` after PreCodegen + +fn caller() -> () { + let mut _0: (); + let _1: (); + + bb0: { + _1 = callee() -> [return: bb1, unwind continue]; + } + + bb1: { + return; + } +} diff --git a/tests/mir-opt/inline/rustc_no_mir_inline.rs b/tests/mir-opt/inline/rustc_no_mir_inline.rs new file mode 100644 index 0000000000000..b008df32726b5 --- /dev/null +++ b/tests/mir-opt/inline/rustc_no_mir_inline.rs @@ -0,0 +1,17 @@ +// EMIT_MIR_FOR_EACH_PANIC_STRATEGY +#![crate_type = "lib"] +#![feature(rustc_attrs)] + +//@ compile-flags: -Zmir-opt-level=2 -Zinline-mir + +#[inline] +#[rustc_no_mir_inline] +pub fn callee() {} + +// EMIT_MIR rustc_no_mir_inline.caller.Inline.diff +// EMIT_MIR rustc_no_mir_inline.caller.PreCodegen.after.mir +pub fn caller() { + // CHECK-LABEL: fn caller( + // CHECK: callee() + callee(); +} From 93ec0e6299e31e6857e8ad741750034f35762b11 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Thu, 4 Jan 2024 19:06:37 +0000 Subject: [PATCH 124/134] Stabilize `cfg_target_abi` --- compiler/rustc_feature/src/accepted.rs | 2 + compiler/rustc_feature/src/builtin_attrs.rs | 1 - compiler/rustc_feature/src/unstable.rs | 2 - .../src/spec/targets/armv6_unknown_freebsd.rs | 3 +- .../targets/armv6_unknown_netbsd_eabihf.rs | 2 - .../src/spec/targets/armv7_unknown_freebsd.rs | 3 +- .../targets/armv7_unknown_netbsd_eabihf.rs | 2 - library/unwind/src/lib.rs | 2 +- tests/ui/cfg/cfg-target-abi.rs | 1 - tests/ui/check-cfg/well-known-values.rs | 1 - tests/ui/check-cfg/well-known-values.stderr | 56 +++++++++---------- .../feature-gate-cfg-target-abi.rs | 13 ----- .../feature-gate-cfg-target-abi.stderr | 43 -------------- 13 files changed, 33 insertions(+), 98 deletions(-) delete mode 100644 tests/ui/feature-gates/feature-gate-cfg-target-abi.rs delete mode 100644 tests/ui/feature-gates/feature-gate-cfg-target-abi.stderr diff --git a/compiler/rustc_feature/src/accepted.rs b/compiler/rustc_feature/src/accepted.rs index 18f6ae35054b4..1b2993dabdb83 100644 --- a/compiler/rustc_feature/src/accepted.rs +++ b/compiler/rustc_feature/src/accepted.rs @@ -84,6 +84,8 @@ declare_features! ( (accepted, cfg_doctest, "1.40.0", Some(62210)), /// Enables `#[cfg(panic = "...")]` config key. (accepted, cfg_panic, "1.60.0", Some(77443)), + /// Allows `cfg(target_abi = "...")`. + (accepted, cfg_target_abi, "CURRENT_RUSTC_VERSION", Some(80970)), /// Allows `cfg(target_feature = "...")`. (accepted, cfg_target_feature, "1.27.0", Some(29717)), /// Allows `cfg(target_vendor = "...")`. diff --git a/compiler/rustc_feature/src/builtin_attrs.rs b/compiler/rustc_feature/src/builtin_attrs.rs index 99875ec540545..5b4221a9f400d 100644 --- a/compiler/rustc_feature/src/builtin_attrs.rs +++ b/compiler/rustc_feature/src/builtin_attrs.rs @@ -25,7 +25,6 @@ pub type GatedCfg = (Symbol, Symbol, GateFn); const GATED_CFGS: &[GatedCfg] = &[ // (name in cfg, feature, function to check if the feature is enabled) (sym::overflow_checks, sym::cfg_overflow_checks, cfg_fn!(cfg_overflow_checks)), - (sym::target_abi, sym::cfg_target_abi, cfg_fn!(cfg_target_abi)), (sym::target_thread_local, sym::cfg_target_thread_local, cfg_fn!(cfg_target_thread_local)), ( sym::target_has_atomic_equal_alignment, diff --git a/compiler/rustc_feature/src/unstable.rs b/compiler/rustc_feature/src/unstable.rs index 93c183a65ef3e..8eea4b7d8a8d3 100644 --- a/compiler/rustc_feature/src/unstable.rs +++ b/compiler/rustc_feature/src/unstable.rs @@ -373,8 +373,6 @@ declare_features! ( (unstable, cfg_sanitize, "1.41.0", Some(39699)), /// Allows `cfg(sanitizer_cfi_generalize_pointers)` and `cfg(sanitizer_cfi_normalize_integers)`. (unstable, cfg_sanitizer_cfi, "1.77.0", Some(89653)), - /// Allows `cfg(target_abi = "...")`. - (unstable, cfg_target_abi, "1.55.0", Some(80970)), /// Allows `cfg(target(abi = "..."))`. (unstable, cfg_target_compact, "1.63.0", Some(96901)), /// Allows `cfg(target_has_atomic_load_store = "...")`. diff --git a/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs b/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs index 22f6ee8105576..70e40f60f22a0 100644 --- a/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/targets/armv6_unknown_freebsd.rs @@ -8,8 +8,7 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: change env to "gnu" when cfg_target_abi becomes stable - env: "gnueabihf".into(), + env: "gnu".into(), features: "+v6,+vfp2,-d32".into(), max_atomic_width: Some(64), mcount: "\u{1}__gnu_mcount_nc".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs b/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs index 84d9ceac04d46..ca0db5e564001 100644 --- a/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs +++ b/compiler/rustc_target/src/spec/targets/armv6_unknown_netbsd_eabihf.rs @@ -8,8 +8,6 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: remove env when cfg_target_abi becomes stable - env: "eabihf".into(), features: "+v6,+vfp2,-d32".into(), max_atomic_width: Some(64), mcount: "__mcount".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs b/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs index 9f4a432c6fa5f..61b6d7a63e358 100644 --- a/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs +++ b/compiler/rustc_target/src/spec/targets/armv7_unknown_freebsd.rs @@ -8,8 +8,7 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: change env to "gnu" when cfg_target_abi becomes stable - env: "gnueabihf".into(), + env: "gnu".into(), features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), max_atomic_width: Some(64), mcount: "\u{1}__gnu_mcount_nc".into(), diff --git a/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs b/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs index e5518c6daecf0..7afdb87b62eee 100644 --- a/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs +++ b/compiler/rustc_target/src/spec/targets/armv7_unknown_netbsd_eabihf.rs @@ -8,8 +8,6 @@ pub fn target() -> Target { arch: "arm".into(), options: TargetOptions { abi: "eabihf".into(), - // FIXME: remove env when cfg_target_abi becomes stable - env: "eabihf".into(), features: "+v7,+vfp3,-d32,+thumb2,-neon".into(), max_atomic_width: Some(64), mcount: "__mcount".into(), diff --git a/library/unwind/src/lib.rs b/library/unwind/src/lib.rs index f5988a4df1364..a64f290463374 100644 --- a/library/unwind/src/lib.rs +++ b/library/unwind/src/lib.rs @@ -3,7 +3,7 @@ #![feature(link_cfg)] #![feature(staged_api)] #![feature(c_unwind)] -#![feature(cfg_target_abi)] +#![cfg_attr(bootstrap, feature(cfg_target_abi))] #![feature(strict_provenance)] #![cfg_attr(not(target_env = "msvc"), feature(libc))] #![allow(internal_features)] diff --git a/tests/ui/cfg/cfg-target-abi.rs b/tests/ui/cfg/cfg-target-abi.rs index 5d13337c1c303..306ae077325b4 100644 --- a/tests/ui/cfg/cfg-target-abi.rs +++ b/tests/ui/cfg/cfg-target-abi.rs @@ -1,5 +1,4 @@ //@ run-pass -#![feature(cfg_target_abi)] #[cfg(target_abi = "eabihf")] pub fn main() { diff --git a/tests/ui/check-cfg/well-known-values.rs b/tests/ui/check-cfg/well-known-values.rs index 0c55e35a993fb..859a36c604c6f 100644 --- a/tests/ui/check-cfg/well-known-values.rs +++ b/tests/ui/check-cfg/well-known-values.rs @@ -10,7 +10,6 @@ #![feature(cfg_overflow_checks)] #![feature(cfg_relocation_model)] #![feature(cfg_sanitize)] -#![feature(cfg_target_abi)] #![feature(cfg_target_has_atomic)] #![feature(cfg_target_has_atomic_equal_alignment)] #![feature(cfg_target_thread_local)] diff --git a/tests/ui/check-cfg/well-known-values.stderr b/tests/ui/check-cfg/well-known-values.stderr index 6535cd9a1a1f0..5f52421fef5b2 100644 --- a/tests/ui/check-cfg/well-known-values.stderr +++ b/tests/ui/check-cfg/well-known-values.stderr @@ -1,5 +1,5 @@ warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:26:5 + --> $DIR/well-known-values.rs:25:5 | LL | clippy = "_UNEXPECTED_VALUE", | ^^^^^^---------------------- @@ -11,7 +11,7 @@ LL | clippy = "_UNEXPECTED_VALUE", = note: `#[warn(unexpected_cfgs)]` on by default warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:28:5 + --> $DIR/well-known-values.rs:27:5 | LL | debug_assertions = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^---------------------- @@ -22,7 +22,7 @@ LL | debug_assertions = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:30:5 + --> $DIR/well-known-values.rs:29:5 | LL | doc = "_UNEXPECTED_VALUE", | ^^^---------------------- @@ -33,7 +33,7 @@ LL | doc = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:32:5 + --> $DIR/well-known-values.rs:31:5 | LL | doctest = "_UNEXPECTED_VALUE", | ^^^^^^^---------------------- @@ -44,7 +44,7 @@ LL | doctest = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:34:5 + --> $DIR/well-known-values.rs:33:5 | LL | miri = "_UNEXPECTED_VALUE", | ^^^^---------------------- @@ -55,7 +55,7 @@ LL | miri = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:36:5 + --> $DIR/well-known-values.rs:35:5 | LL | overflow_checks = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^---------------------- @@ -66,7 +66,7 @@ LL | overflow_checks = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:38:5 + --> $DIR/well-known-values.rs:37:5 | LL | panic = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -75,7 +75,7 @@ LL | panic = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:40:5 + --> $DIR/well-known-values.rs:39:5 | LL | proc_macro = "_UNEXPECTED_VALUE", | ^^^^^^^^^^---------------------- @@ -86,7 +86,7 @@ LL | proc_macro = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:42:5 + --> $DIR/well-known-values.rs:41:5 | LL | relocation_model = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -95,7 +95,7 @@ LL | relocation_model = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:44:5 + --> $DIR/well-known-values.rs:43:5 | LL | sanitize = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -104,7 +104,7 @@ LL | sanitize = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:46:5 + --> $DIR/well-known-values.rs:45:5 | LL | target_abi = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -113,7 +113,7 @@ LL | target_abi = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:48:5 + --> $DIR/well-known-values.rs:47:5 | LL | target_arch = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -122,7 +122,7 @@ LL | target_arch = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:50:5 + --> $DIR/well-known-values.rs:49:5 | LL | target_endian = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -131,16 +131,16 @@ LL | target_endian = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:52:5 + --> $DIR/well-known-values.rs:51:5 | LL | target_env = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | - = note: expected values for `target_env` are: ``, `eabihf`, `gnu`, `gnueabihf`, `msvc`, `musl`, `newlib`, `nto70`, `nto71`, `ohos`, `psx`, `relibc`, `sgx`, `uclibc` + = note: expected values for `target_env` are: ``, `gnu`, `msvc`, `musl`, `newlib`, `nto70`, `nto71`, `ohos`, `psx`, `relibc`, `sgx`, `uclibc` = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:54:5 + --> $DIR/well-known-values.rs:53:5 | LL | target_family = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -149,7 +149,7 @@ LL | target_family = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:56:5 + --> $DIR/well-known-values.rs:55:5 | LL | target_feature = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -158,7 +158,7 @@ LL | target_feature = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:58:5 + --> $DIR/well-known-values.rs:57:5 | LL | target_has_atomic = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -167,7 +167,7 @@ LL | target_has_atomic = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:60:5 + --> $DIR/well-known-values.rs:59:5 | LL | target_has_atomic_equal_alignment = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -176,7 +176,7 @@ LL | target_has_atomic_equal_alignment = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:62:5 + --> $DIR/well-known-values.rs:61:5 | LL | target_has_atomic_load_store = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -185,7 +185,7 @@ LL | target_has_atomic_load_store = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:64:5 + --> $DIR/well-known-values.rs:63:5 | LL | target_os = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -194,7 +194,7 @@ LL | target_os = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:66:5 + --> $DIR/well-known-values.rs:65:5 | LL | target_pointer_width = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -203,7 +203,7 @@ LL | target_pointer_width = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:68:5 + --> $DIR/well-known-values.rs:67:5 | LL | target_thread_local = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^---------------------- @@ -214,7 +214,7 @@ LL | target_thread_local = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:70:5 + --> $DIR/well-known-values.rs:69:5 | LL | target_vendor = "_UNEXPECTED_VALUE", | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -223,7 +223,7 @@ LL | target_vendor = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:72:5 + --> $DIR/well-known-values.rs:71:5 | LL | test = "_UNEXPECTED_VALUE", | ^^^^---------------------- @@ -234,7 +234,7 @@ LL | test = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:74:5 + --> $DIR/well-known-values.rs:73:5 | LL | unix = "_UNEXPECTED_VALUE", | ^^^^---------------------- @@ -245,7 +245,7 @@ LL | unix = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `_UNEXPECTED_VALUE` - --> $DIR/well-known-values.rs:76:5 + --> $DIR/well-known-values.rs:75:5 | LL | windows = "_UNEXPECTED_VALUE", | ^^^^^^^---------------------- @@ -256,7 +256,7 @@ LL | windows = "_UNEXPECTED_VALUE", = note: see for more information about checking conditional configuration warning: unexpected `cfg` condition value: `linuz` - --> $DIR/well-known-values.rs:82:7 + --> $DIR/well-known-values.rs:81:7 | LL | #[cfg(target_os = "linuz")] // testing that we suggest `linux` | ^^^^^^^^^^^^------- diff --git a/tests/ui/feature-gates/feature-gate-cfg-target-abi.rs b/tests/ui/feature-gates/feature-gate-cfg-target-abi.rs deleted file mode 100644 index d005dc3ad456d..0000000000000 --- a/tests/ui/feature-gates/feature-gate-cfg-target-abi.rs +++ /dev/null @@ -1,13 +0,0 @@ -#[cfg(target_abi = "x")] //~ ERROR `cfg(target_abi)` is experimental -struct Foo(u64, u64); - -#[cfg_attr(target_abi = "x", x)] //~ ERROR `cfg(target_abi)` is experimental -struct Bar(u64, u64); - -#[cfg(not(any(all(target_abi = "x"))))] //~ ERROR `cfg(target_abi)` is experimental -fn foo() {} - -fn main() { - cfg!(target_abi = "x"); - //~^ ERROR `cfg(target_abi)` is experimental and subject to change -} diff --git a/tests/ui/feature-gates/feature-gate-cfg-target-abi.stderr b/tests/ui/feature-gates/feature-gate-cfg-target-abi.stderr deleted file mode 100644 index 4829f8572cc5f..0000000000000 --- a/tests/ui/feature-gates/feature-gate-cfg-target-abi.stderr +++ /dev/null @@ -1,43 +0,0 @@ -error[E0658]: `cfg(target_abi)` is experimental and subject to change - --> $DIR/feature-gate-cfg-target-abi.rs:1:7 - | -LL | #[cfg(target_abi = "x")] - | ^^^^^^^^^^^^^^^^ - | - = note: see issue #80970 for more information - = help: add `#![feature(cfg_target_abi)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error[E0658]: `cfg(target_abi)` is experimental and subject to change - --> $DIR/feature-gate-cfg-target-abi.rs:4:12 - | -LL | #[cfg_attr(target_abi = "x", x)] - | ^^^^^^^^^^^^^^^^ - | - = note: see issue #80970 for more information - = help: add `#![feature(cfg_target_abi)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error[E0658]: `cfg(target_abi)` is experimental and subject to change - --> $DIR/feature-gate-cfg-target-abi.rs:7:19 - | -LL | #[cfg(not(any(all(target_abi = "x"))))] - | ^^^^^^^^^^^^^^^^ - | - = note: see issue #80970 for more information - = help: add `#![feature(cfg_target_abi)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error[E0658]: `cfg(target_abi)` is experimental and subject to change - --> $DIR/feature-gate-cfg-target-abi.rs:11:10 - | -LL | cfg!(target_abi = "x"); - | ^^^^^^^^^^^^^^^^ - | - = note: see issue #80970 for more information - = help: add `#![feature(cfg_target_abi)]` to the crate attributes to enable - = note: this compiler was built on YYYY-MM-DD; consider upgrading it if it is out of date - -error: aborting due to 4 previous errors - -For more information about this error, try `rustc --explain E0658`. From 2401ae1896bf47d0c393dd21596817cf83153320 Mon Sep 17 00:00:00 2001 From: Nilstrieb <48135649+Nilstrieb@users.noreply.github.com> Date: Sat, 24 Feb 2024 22:44:21 +0100 Subject: [PATCH 125/134] Make most bootstrap step types !Copy This makes all bootstrap types except for `Compiler` and `TargetSelection` `!Copy`. This makes it easier to modify them by adding !Copy types in the future and comes at no cost of code clarity, the impls were completely unused. --- src/bootstrap/src/core/build_steps/check.rs | 4 +- src/bootstrap/src/core/build_steps/clean.rs | 2 +- src/bootstrap/src/core/build_steps/compile.rs | 6 +- src/bootstrap/src/core/build_steps/dist.rs | 38 +++++------ src/bootstrap/src/core/build_steps/doc.rs | 20 +++--- src/bootstrap/src/core/build_steps/install.rs | 4 +- src/bootstrap/src/core/build_steps/llvm.rs | 10 +-- src/bootstrap/src/core/build_steps/run.rs | 18 ++--- src/bootstrap/src/core/build_steps/setup.rs | 6 +- .../src/core/build_steps/synthetic_targets.rs | 2 +- src/bootstrap/src/core/build_steps/test.rs | 66 +++++++++---------- src/bootstrap/src/core/build_steps/tool.rs | 20 +++--- src/bootstrap/src/core/builder.rs | 2 +- 13 files changed, 99 insertions(+), 99 deletions(-) diff --git a/src/bootstrap/src/core/build_steps/check.rs b/src/bootstrap/src/core/build_steps/check.rs index 3ac60f15ef67e..f6f4253a36435 100644 --- a/src/bootstrap/src/core/build_steps/check.rs +++ b/src/bootstrap/src/core/build_steps/check.rs @@ -367,7 +367,7 @@ impl Step for CodegenBackend { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustAnalyzer { pub target: TargetSelection, } @@ -441,7 +441,7 @@ impl Step for RustAnalyzer { macro_rules! tool_check_step { ($name:ident, $path:literal, $($alias:literal, )* $source_type:path $(, $default:literal )?) => { - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct $name { pub target: TargetSelection, } diff --git a/src/bootstrap/src/core/build_steps/clean.rs b/src/bootstrap/src/core/build_steps/clean.rs index 4b993945f19fc..17ca92f25a928 100644 --- a/src/bootstrap/src/core/build_steps/clean.rs +++ b/src/bootstrap/src/core/build_steps/clean.rs @@ -14,7 +14,7 @@ use crate::utils::cache::Interned; use crate::utils::helpers::t; use crate::{Build, Compiler, Mode, Subcommand}; -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CleanAll {} impl Step for CleanAll { diff --git a/src/bootstrap/src/core/build_steps/compile.rs b/src/bootstrap/src/core/build_steps/compile.rs index d349cd67fed2c..938813044217b 100644 --- a/src/bootstrap/src/core/build_steps/compile.rs +++ b/src/bootstrap/src/core/build_steps/compile.rs @@ -727,7 +727,7 @@ fn apple_darwin_sign_file(file_path: &Path) { assert!(status.success()); } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct StartupObjects { pub compiler: Compiler, pub target: TargetSelection, @@ -1491,7 +1491,7 @@ pub fn compiler_file( PathBuf::from(out.trim()) } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Sysroot { pub compiler: Compiler, /// See [`Std::force_recompile`]. @@ -1653,7 +1653,7 @@ impl Step for Sysroot { } } -#[derive(Debug, Copy, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, PartialOrd, Ord, Clone, PartialEq, Eq, Hash)] pub struct Assemble { /// The compiler which we will produce in this step. Assemble itself will /// take care of ensuring that the necessary prerequisites to do so exist, diff --git a/src/bootstrap/src/core/build_steps/dist.rs b/src/bootstrap/src/core/build_steps/dist.rs index d9ab18e7250b0..fe50a787f9f05 100644 --- a/src/bootstrap/src/core/build_steps/dist.rs +++ b/src/bootstrap/src/core/build_steps/dist.rs @@ -50,7 +50,7 @@ fn should_build_extended_tool(builder: &Builder<'_>, tool: &str) -> bool { builder.config.tools.as_ref().map_or(true, |tools| tools.contains(tool)) } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Docs { pub host: TargetSelection, } @@ -83,7 +83,7 @@ impl Step for Docs { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct JsonDocs { pub host: TargetSelection, } @@ -121,7 +121,7 @@ impl Step for JsonDocs { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustcDocs { pub host: TargetSelection, } @@ -308,7 +308,7 @@ fn make_win_dist( } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Mingw { pub host: TargetSelection, } @@ -348,7 +348,7 @@ impl Step for Mingw { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Rustc { pub compiler: Compiler, } @@ -476,7 +476,7 @@ impl Step for Rustc { let man_src = builder.src.join("src/doc/man"); let man_dst = image.join("share/man/man1"); - // don't use our `bootstrap::{copy, cp_r}`, because those try + // don't use our `bootstrap::{copy_internal, cp_r}`, because those try // to hardlink, and we don't want to edit the source templates for file_entry in builder.read_dir(&man_src) { let page_src = file_entry.path(); @@ -617,7 +617,7 @@ fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Std { pub compiler: Compiler, pub target: TargetSelection, @@ -664,7 +664,7 @@ impl Step for Std { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct RustcDev { pub compiler: Compiler, pub target: TargetSelection, @@ -723,7 +723,7 @@ impl Step for RustcDev { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Analysis { pub compiler: Compiler, pub target: TargetSelection, @@ -870,7 +870,7 @@ fn copy_src_dirs( } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Src; impl Step for Src { @@ -931,7 +931,7 @@ impl Step for Src { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct PlainSourceTarball; impl Step for PlainSourceTarball { @@ -1031,7 +1031,7 @@ impl Step for PlainSourceTarball { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Cargo { pub compiler: Compiler, pub target: TargetSelection, @@ -1080,7 +1080,7 @@ impl Step for Cargo { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Rls { pub compiler: Compiler, pub target: TargetSelection, @@ -1122,7 +1122,7 @@ impl Step for Rls { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzer { pub compiler: Compiler, pub target: TargetSelection, @@ -1164,7 +1164,7 @@ impl Step for RustAnalyzer { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Clippy { pub compiler: Compiler, pub target: TargetSelection, @@ -1212,7 +1212,7 @@ impl Step for Clippy { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Miri { pub compiler: Compiler, pub target: TargetSelection, @@ -1359,7 +1359,7 @@ impl Step for CodegenBackend { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Rustfmt { pub compiler: Compiler, pub target: TargetSelection, @@ -1404,7 +1404,7 @@ impl Step for Rustfmt { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct RustDemangler { pub compiler: Compiler, pub target: TargetSelection, @@ -1460,7 +1460,7 @@ impl Step for RustDemangler { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct Extended { stage: u32, host: TargetSelection, diff --git a/src/bootstrap/src/core/build_steps/doc.rs b/src/bootstrap/src/core/build_steps/doc.rs index 7a122a8676ba7..a4903ce235365 100644 --- a/src/bootstrap/src/core/build_steps/doc.rs +++ b/src/bootstrap/src/core/build_steps/doc.rs @@ -32,7 +32,7 @@ macro_rules! submodule_helper { macro_rules! book { ($($name:ident, $path:expr, $book_name:expr $(, submodule $(= $submodule:literal)? )? ;)+) => { $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $name { target: TargetSelection, } @@ -86,7 +86,7 @@ book!( StyleGuide, "src/doc/style-guide", "style-guide"; ); -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct UnstableBook { target: TargetSelection, } @@ -160,7 +160,7 @@ impl Step for RustbookSrc

{ } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct TheBook { compiler: Compiler, target: TargetSelection, @@ -286,7 +286,7 @@ fn invoke_rustdoc( builder.run(&mut cmd); } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Standalone { compiler: Compiler, target: TargetSelection, @@ -389,7 +389,7 @@ impl Step for Standalone { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Releases { compiler: Compiler, target: TargetSelection, @@ -492,7 +492,7 @@ pub struct SharedAssetsPaths { pub version_info: PathBuf, } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct SharedAssets { target: TargetSelection, } @@ -872,7 +872,7 @@ macro_rules! tool_doc { $(is_library = $is_library:expr,)? $(crates = $crates:expr)? ) => { - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $tool { target: TargetSelection, } @@ -1021,7 +1021,7 @@ tool_doc!( crates = ["bootstrap"] ); -#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)] pub struct ErrorIndex { pub target: TargetSelection, } @@ -1056,7 +1056,7 @@ impl Step for ErrorIndex { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct UnstableBookGen { target: TargetSelection, } @@ -1112,7 +1112,7 @@ fn symlink_dir_force(config: &Config, original: &Path, link: &Path) { ); } -#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Ord, PartialOrd, Debug, Clone, Hash, PartialEq, Eq)] pub struct RustcBook { pub compiler: Compiler, pub target: TargetSelection, diff --git a/src/bootstrap/src/core/build_steps/install.rs b/src/bootstrap/src/core/build_steps/install.rs index 29238b90225af..6726671ddd9b3 100644 --- a/src/bootstrap/src/core/build_steps/install.rs +++ b/src/bootstrap/src/core/build_steps/install.rs @@ -159,7 +159,7 @@ macro_rules! install { only_hosts: $only_hosts:expr, $run_item:block $(, $c:ident)*;)+) => { $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $name { pub compiler: Compiler, pub target: TargetSelection, @@ -303,7 +303,7 @@ install!((self, builder, _config), }; ); -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Src { pub stage: u32, } diff --git a/src/bootstrap/src/core/build_steps/llvm.rs b/src/bootstrap/src/core/build_steps/llvm.rs index 9622321a74e7c..4a92acfa3d90d 100644 --- a/src/bootstrap/src/core/build_steps/llvm.rs +++ b/src/bootstrap/src/core/build_steps/llvm.rs @@ -242,7 +242,7 @@ pub(crate) fn is_ci_llvm_modified(config: &Config) -> bool { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Llvm { pub target: TargetSelection, } @@ -815,7 +815,7 @@ fn get_var(var_base: &str, host: &str, target: &str) -> Option { .or_else(|| env::var_os(var_base)) } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Lld { pub target: TargetSelection, } @@ -937,7 +937,7 @@ impl Step for Lld { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Sanitizers { pub target: TargetSelection, } @@ -1147,7 +1147,7 @@ impl HashStamp { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrtBeginEnd { pub target: TargetSelection, } @@ -1215,7 +1215,7 @@ impl Step for CrtBeginEnd { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Libunwind { pub target: TargetSelection, } diff --git a/src/bootstrap/src/core/build_steps/run.rs b/src/bootstrap/src/core/build_steps/run.rs index 27b0c7760f078..61ee2fc1f6f3d 100644 --- a/src/bootstrap/src/core/build_steps/run.rs +++ b/src/bootstrap/src/core/build_steps/run.rs @@ -10,7 +10,7 @@ use crate::core::config::TargetSelection; use crate::utils::helpers::output; use crate::Mode; -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ExpandYamlAnchors; impl Step for ExpandYamlAnchors { @@ -36,7 +36,7 @@ impl Step for ExpandYamlAnchors { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct BuildManifest; impl Step for BuildManifest { @@ -75,7 +75,7 @@ impl Step for BuildManifest { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct BumpStage0; impl Step for BumpStage0 { @@ -97,7 +97,7 @@ impl Step for BumpStage0 { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct ReplaceVersionPlaceholder; impl Step for ReplaceVersionPlaceholder { @@ -119,7 +119,7 @@ impl Step for ReplaceVersionPlaceholder { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Miri { stage: u32, host: TargetSelection, @@ -178,7 +178,7 @@ impl Step for Miri { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct CollectLicenseMetadata; impl Step for CollectLicenseMetadata { @@ -210,7 +210,7 @@ impl Step for CollectLicenseMetadata { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct GenerateCopyright; impl Step for GenerateCopyright { @@ -240,7 +240,7 @@ impl Step for GenerateCopyright { } } -#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, PartialOrd, Ord, Clone, Hash, PartialEq, Eq)] pub struct GenerateWindowsSys; impl Step for GenerateWindowsSys { @@ -262,7 +262,7 @@ impl Step for GenerateWindowsSys { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct GenerateCompletions; macro_rules! generate_completions { diff --git a/src/bootstrap/src/core/build_steps/setup.rs b/src/bootstrap/src/core/build_steps/setup.rs index 74a5578b43ec1..7bc68b5aec11f 100644 --- a/src/bootstrap/src/core/build_steps/setup.rs +++ b/src/bootstrap/src/core/build_steps/setup.rs @@ -233,7 +233,7 @@ fn setup_config_toml(path: &PathBuf, profile: Profile, config: &Config) { } /// Creates a toolchain link for stage1 using `rustup` -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct Link; impl Step for Link { type Output = (); @@ -444,7 +444,7 @@ fn prompt_user(prompt: &str) -> io::Result> { } /// Installs `src/etc/pre-push.sh` as a Git hook -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct Hook; impl Step for Hook { @@ -516,7 +516,7 @@ undesirable, simply delete the `pre-push` file from .git/hooks." } /// Sets up or displays `src/etc/rust_analyzer_settings.json` -#[derive(Clone, Copy, Debug, Eq, PartialEq, Hash)] +#[derive(Clone, Debug, Eq, PartialEq, Hash)] pub struct Vscode; impl Step for Vscode { diff --git a/src/bootstrap/src/core/build_steps/synthetic_targets.rs b/src/bootstrap/src/core/build_steps/synthetic_targets.rs index a00835402ec8b..89d50b5ffffa4 100644 --- a/src/bootstrap/src/core/build_steps/synthetic_targets.rs +++ b/src/bootstrap/src/core/build_steps/synthetic_targets.rs @@ -12,7 +12,7 @@ use crate::core::config::TargetSelection; use crate::Compiler; use std::process::{Command, Stdio}; -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub(crate) struct MirOptPanicAbortSyntheticTarget { pub(crate) compiler: Compiler, pub(crate) base: TargetSelection, diff --git a/src/bootstrap/src/core/build_steps/test.rs b/src/bootstrap/src/core/build_steps/test.rs index 791f847a8661a..a2c6e79d5e25c 100644 --- a/src/bootstrap/src/core/build_steps/test.rs +++ b/src/bootstrap/src/core/build_steps/test.rs @@ -86,7 +86,7 @@ impl Step for CrateBootstrap { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Linkcheck { host: TargetSelection, } @@ -179,7 +179,7 @@ fn check_if_tidy_is_installed() -> bool { .map_or(false, |status| status.success()) } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct HtmlCheck { target: TargetSelection, } @@ -220,7 +220,7 @@ impl Step for HtmlCheck { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Cargotest { stage: u32, host: TargetSelection, @@ -266,7 +266,7 @@ impl Step for Cargotest { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Cargo { stage: u32, host: TargetSelection, @@ -327,7 +327,7 @@ impl Step for Cargo { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustAnalyzer { stage: u32, host: TargetSelection, @@ -386,7 +386,7 @@ impl Step for RustAnalyzer { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Rustfmt { stage: u32, host: TargetSelection, @@ -433,7 +433,7 @@ impl Step for Rustfmt { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustDemangler { stage: u32, host: TargetSelection, @@ -492,7 +492,7 @@ impl Step for RustDemangler { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Miri { stage: u32, host: TargetSelection, @@ -699,7 +699,7 @@ impl Step for Miri { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CompiletestTest { host: TargetSelection, } @@ -747,7 +747,7 @@ impl Step for CompiletestTest { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Clippy { stage: u32, host: TargetSelection, @@ -815,7 +815,7 @@ fn path_for_cargo(builder: &Builder<'_>, compiler: Compiler) -> OsString { env::join_paths(iter::once(path).chain(env::split_paths(&old_path))).expect("") } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustdocTheme { pub compiler: Compiler, } @@ -852,7 +852,7 @@ impl Step for RustdocTheme { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustdocJSStd { pub target: TargetSelection, } @@ -912,7 +912,7 @@ impl Step for RustdocJSStd { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustdocJSNotStd { pub target: TargetSelection, pub compiler: Compiler, @@ -966,7 +966,7 @@ fn get_browser_ui_test_version(npm: &Path) -> Option { .or_else(|| get_browser_ui_test_version_inner(npm, true)) } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustdocGUI { pub target: TargetSelection, pub compiler: Compiler, @@ -1060,7 +1060,7 @@ impl Step for RustdocGUI { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Tidy; impl Step for Tidy { @@ -1151,7 +1151,7 @@ HELP: to skip test's attempt to check tidiness, pass `--skip src/tools/tidy` to } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ExpandYamlAnchors; impl Step for ExpandYamlAnchors { @@ -1251,7 +1251,7 @@ macro_rules! test_definitions { host: $host:expr, compare_mode: $compare_mode:expr }) => { - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct $name { pub compiler: Compiler, pub target: TargetSelection, @@ -1294,7 +1294,7 @@ macro_rules! coverage_test_alias { default: $default:expr, only_hosts: $only_hosts:expr $(,)? }) => { - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct $name { pub compiler: Compiler, pub target: TargetSelection, @@ -1376,7 +1376,7 @@ default_test!(Assembly { path: "tests/assembly", mode: "assembly", suite: "assem /// /// Each individual mode also has its own alias that will run the tests in /// just that mode. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Coverage { pub compiler: Compiler, pub target: TargetSelection, @@ -1438,7 +1438,7 @@ host_test!(CoverageRunRustdoc { }); // For the mir-opt suite we do not use macros, as we need custom behavior when blessing. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct MirOpt { pub compiler: Compiler, pub target: TargetSelection, @@ -1494,7 +1494,7 @@ impl Step for MirOpt { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] struct Compiletest { compiler: Compiler, target: TargetSelection, @@ -2142,7 +2142,7 @@ impl BookTest { macro_rules! test_book { ($($name:ident, $path:expr, $book_name:expr, default=$default:expr;)+) => { $( - #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct $name { compiler: Compiler, } @@ -2187,7 +2187,7 @@ test_book!( EditionGuide, "src/doc/edition-guide", "edition-guide", default=false; ); -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct ErrorIndex { compiler: Compiler, } @@ -2264,7 +2264,7 @@ fn markdown_test(builder: &Builder<'_>, compiler: Compiler, markdown: &Path) -> } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustcGuide; impl Step for RustcGuide { @@ -2537,7 +2537,7 @@ impl Step for Crate { } /// Rustdoc is special in various ways, which is why this step is different from `Crate`. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateRustdoc { host: TargetSelection, } @@ -2638,7 +2638,7 @@ impl Step for CrateRustdoc { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct CrateRustdocJsonTypes { host: TargetSelection, } @@ -2708,7 +2708,7 @@ impl Step for CrateRustdocJsonTypes { /// QEMU we have to build our own tools so we've got conditional dependencies /// on those programs as well. Note that the remote test client is built for /// the build target (us) and the server is built for the target. -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RemoteCopyLibs { compiler: Compiler, target: TargetSelection, @@ -2754,7 +2754,7 @@ impl Step for RemoteCopyLibs { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Distcheck; impl Step for Distcheck { @@ -2824,7 +2824,7 @@ impl Step for Distcheck { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Bootstrap; impl Step for Bootstrap { @@ -2876,7 +2876,7 @@ impl Step for Bootstrap { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TierCheck { pub compiler: Compiler, } @@ -2926,7 +2926,7 @@ impl Step for TierCheck { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct LintDocs { pub compiler: Compiler, pub target: TargetSelection, @@ -2959,7 +2959,7 @@ impl Step for LintDocs { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct RustInstaller; impl Step for RustInstaller { @@ -3020,7 +3020,7 @@ impl Step for RustInstaller { } } -#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct TestHelpers { pub target: TargetSelection, } diff --git a/src/bootstrap/src/core/build_steps/tool.rs b/src/bootstrap/src/core/build_steps/tool.rs index ba867a04ec55f..889876f461df2 100644 --- a/src/bootstrap/src/core/build_steps/tool.rs +++ b/src/bootstrap/src/core/build_steps/tool.rs @@ -15,7 +15,7 @@ use crate::Compiler; use crate::Mode; use crate::{gha, Kind}; -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub enum SourceType { InTree, Submodule, @@ -220,7 +220,7 @@ macro_rules! bootstrap_tool { $(,allow_features = $allow_features:expr)? ; )+) => { - #[derive(Copy, PartialEq, Eq, Clone)] + #[derive(PartialEq, Eq, Clone)] pub enum Tool { $( $name, @@ -241,7 +241,7 @@ macro_rules! bootstrap_tool { } $( - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct $name { pub compiler: Compiler, pub target: TargetSelection, @@ -315,7 +315,7 @@ bootstrap_tool!( CoverageDump, "src/tools/coverage-dump", "coverage-dump"; ); -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] +#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] pub struct ErrorIndex { pub compiler: Compiler, } @@ -369,7 +369,7 @@ impl Step for ErrorIndex { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RemoteTestServer { pub compiler: Compiler, pub target: TargetSelection, @@ -403,7 +403,7 @@ impl Step for RemoteTestServer { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] +#[derive(Debug, Clone, Hash, PartialEq, Eq, Ord, PartialOrd)] pub struct Rustdoc { /// This should only ever be 0 or 2. /// We sometimes want to reference the "bootstrap" rustdoc, which is why this option is here. @@ -515,7 +515,7 @@ impl Step for Rustdoc { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct Cargo { pub compiler: Compiler, pub target: TargetSelection, @@ -560,7 +560,7 @@ impl Step for Cargo { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct LldWrapper { pub compiler: Compiler, pub target: TargetSelection, @@ -589,7 +589,7 @@ impl Step for LldWrapper { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzer { pub compiler: Compiler, pub target: TargetSelection, @@ -637,7 +637,7 @@ impl Step for RustAnalyzer { } } -#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] +#[derive(Debug, Clone, Hash, PartialEq, Eq)] pub struct RustAnalyzerProcMacroSrv { pub compiler: Compiler, pub target: TargetSelection, diff --git a/src/bootstrap/src/core/builder.rs b/src/bootstrap/src/core/builder.rs index 0ec5e16de1da5..e169cba3c13ae 100644 --- a/src/bootstrap/src/core/builder.rs +++ b/src/bootstrap/src/core/builder.rs @@ -1069,7 +1069,7 @@ impl<'a> Builder<'a> { /// Returns the libdir where the standard library and other artifacts are /// found for a compiler's sysroot. pub fn sysroot_libdir(&self, compiler: Compiler, target: TargetSelection) -> Interned { - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] struct Libdir { compiler: Compiler, target: TargetSelection, From 24aa34858645e96316bb61f4a095d13486a56892 Mon Sep 17 00:00:00 2001 From: Michael Howell Date: Sat, 24 Feb 2024 15:38:55 -0700 Subject: [PATCH 126/134] Add test cases for inlining compiler-private items Closes #106421 This was already fixed by f5d43a052b9eb464e54af819143467954d814a24, but now the test cases are added. --- tests/rustdoc/auxiliary/issue-106421-force-unstable.rs | 9 +++++++++ tests/rustdoc/issue-106421-not-internal.rs | 8 ++++++++ tests/rustdoc/issue-106421.rs | 8 ++++++++ 3 files changed, 25 insertions(+) create mode 100644 tests/rustdoc/auxiliary/issue-106421-force-unstable.rs create mode 100644 tests/rustdoc/issue-106421-not-internal.rs create mode 100644 tests/rustdoc/issue-106421.rs diff --git a/tests/rustdoc/auxiliary/issue-106421-force-unstable.rs b/tests/rustdoc/auxiliary/issue-106421-force-unstable.rs new file mode 100644 index 0000000000000..589d44c1f774d --- /dev/null +++ b/tests/rustdoc/auxiliary/issue-106421-force-unstable.rs @@ -0,0 +1,9 @@ +//@ compile-flags: -Zforce-unstable-if-unmarked +#![crate_name="foo"] +pub struct FatalError; + +impl FatalError { + pub fn raise(self) -> ! { + loop {} + } +} diff --git a/tests/rustdoc/issue-106421-not-internal.rs b/tests/rustdoc/issue-106421-not-internal.rs new file mode 100644 index 0000000000000..1d744fba53f8a --- /dev/null +++ b/tests/rustdoc/issue-106421-not-internal.rs @@ -0,0 +1,8 @@ +//@ aux-build:issue-106421-force-unstable.rs +//@ ignore-cross-compile +// This is the version where a non-compiler-internal crate inlines a compiler-internal one. +// In this case, the item shouldn't be documented, because regular users can't get at it. +extern crate foo; + +// @!has issue_106421_not_internal/struct.FatalError.html '//*[@id="method.raise"]' 'fn raise' +pub use foo::FatalError; diff --git a/tests/rustdoc/issue-106421.rs b/tests/rustdoc/issue-106421.rs new file mode 100644 index 0000000000000..d4141a4ab0c70 --- /dev/null +++ b/tests/rustdoc/issue-106421.rs @@ -0,0 +1,8 @@ +//@ aux-build:issue-106421-force-unstable.rs +//@ ignore-cross-compile +//@ compile-flags: -Zforce-unstable-if-unmarked + +extern crate foo; + +// @has issue_106421/struct.FatalError.html '//*[@id="method.raise"]' 'fn raise' +pub use foo::FatalError; From 9577051174b9c77ffc4c5b427d90ba49608d5697 Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Sun, 25 Feb 2024 08:17:20 +0100 Subject: [PATCH 127/134] bump rustc-build-sysroot --- Cargo.lock | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cc2c7dd7e16be..a6e016afd82d6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3332,13 +3332,14 @@ dependencies = [ [[package]] name = "rustc-build-sysroot" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39dcf8d82b1f79a179bdb284dc44db440a9666eefa5a6df5ef282d6db930d544" +checksum = "a26170e1d79ea32f7ccec3188dd13cfc1f18c82764a9cbc1071667c0f865a4ea" dependencies = [ "anyhow", "rustc_version", "tempfile", + "walkdir", ] [[package]] From 9caeabe2e2e9dab5356b06d2b1a46ae11cc59ea1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 25 Feb 2024 09:56:19 +0200 Subject: [PATCH 128/134] Add missing imports --- .../crates/hir-ty/src/mir/lower/pattern_matching.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs index a6d5ce723e31d..85c8d1685b874 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/mir/lower/pattern_matching.rs @@ -1,6 +1,6 @@ //! MIR lowering for patterns -use hir_def::AssocItemId; +use hir_def::{hir::LiteralOrConst, resolver::HasResolver, AssocItemId}; use crate::{ mir::lower::{ From 5ee6a5d704fe9259e9719dd5a31fa3f69b68489c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lauren=C8=9Biu=20Nicola?= Date: Sun, 25 Feb 2024 09:58:11 +0200 Subject: [PATCH 129/134] Avoid using cfg(FALSE) --- src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs index 40a195f7d95a7..e678a2fee1321 100644 --- a/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs +++ b/src/tools/rust-analyzer/crates/hir-ty/src/chalk_db.rs @@ -742,9 +742,8 @@ pub(crate) fn adt_datum_query( phantom_data, }; - #[cfg(FALSE)] // this slows down rust-analyzer by quite a bit unfortunately, so enabling this is currently not worth it - let variant_id_to_fields = |id: VariantId| { + let _variant_id_to_fields = |id: VariantId| { let variant_data = &id.variant_data(db.upcast()); let fields = if variant_data.fields().is_empty() { vec![] From 5b7786cd1d246fd0bbe748c8690fb99b1134d2be Mon Sep 17 00:00:00 2001 From: Ralf Jung Date: Thu, 8 Feb 2024 20:08:19 +0100 Subject: [PATCH 130/134] make non-PartialEq-typed consts as patterns a hard error --- compiler/rustc_lint/src/lib.rs | 5 ++ compiler/rustc_lint_defs/src/builtin.rs | 52 ------------------- compiler/rustc_mir_build/src/errors.rs | 14 ++--- .../src/thir/pattern/const_to_pat.rs | 15 +++--- .../ui/consts/const_in_pattern/issue-65466.rs | 7 +-- .../const_in_pattern/issue-65466.stderr | 21 ++------ .../match/issue-72896-non-partial-eq-const.rs | 4 +- .../issue-72896-non-partial-eq-const.stderr | 21 ++------ 8 files changed, 27 insertions(+), 112 deletions(-) diff --git a/compiler/rustc_lint/src/lib.rs b/compiler/rustc_lint/src/lib.rs index d8e12c04f7536..844f87c3f5055 100644 --- a/compiler/rustc_lint/src/lib.rs +++ b/compiler/rustc_lint/src/lib.rs @@ -527,6 +527,11 @@ fn register_builtins(store: &mut LintStore) { "no longer needed, see #93367 \ for more information", ); + store.register_removed( + "const_patterns_without_partial_eq", + "converted into hard error, see RFC #3535 \ + for more information", + ); } fn register_internals(store: &mut LintStore) { diff --git a/compiler/rustc_lint_defs/src/builtin.rs b/compiler/rustc_lint_defs/src/builtin.rs index 84a050a242a69..1cddb45428c8b 100644 --- a/compiler/rustc_lint_defs/src/builtin.rs +++ b/compiler/rustc_lint_defs/src/builtin.rs @@ -32,7 +32,6 @@ declare_lint_pass! { CONFLICTING_REPR_HINTS, CONST_EVALUATABLE_UNCHECKED, CONST_ITEM_MUTATION, - CONST_PATTERNS_WITHOUT_PARTIAL_EQ, DEAD_CODE, DEPRECATED, DEPRECATED_CFG_ATTR_CRATE_TYPE_NAME, @@ -2342,57 +2341,6 @@ declare_lint! { }; } -declare_lint! { - /// The `const_patterns_without_partial_eq` lint detects constants that are used in patterns, - /// whose type does not implement `PartialEq`. - /// - /// ### Example - /// - /// ```rust,compile_fail - /// #![deny(const_patterns_without_partial_eq)] - /// - /// trait EnumSetType { - /// type Repr; - /// } - /// - /// enum Enum8 { } - /// impl EnumSetType for Enum8 { - /// type Repr = u8; - /// } - /// - /// #[derive(PartialEq, Eq)] - /// struct EnumSet { - /// __enumset_underlying: T::Repr, - /// } - /// - /// const CONST_SET: EnumSet = EnumSet { __enumset_underlying: 3 }; - /// - /// fn main() { - /// match CONST_SET { - /// CONST_SET => { /* ok */ } - /// _ => panic!("match fell through?"), - /// } - /// } - /// ``` - /// - /// {{produces}} - /// - /// ### Explanation - /// - /// Previous versions of Rust accepted constants in patterns, even if those constants' types - /// did not have `PartialEq` implemented. The compiler falls back to comparing the value - /// field-by-field. In the future we'd like to ensure that pattern matching always - /// follows `PartialEq` semantics, so that trait bound will become a requirement for - /// matching on constants. - pub CONST_PATTERNS_WITHOUT_PARTIAL_EQ, - Warn, - "constant in pattern does not implement `PartialEq`", - @future_incompatible = FutureIncompatibleInfo { - reason: FutureIncompatibilityReason::FutureReleaseErrorReportInDeps, - reference: "issue #116122 ", - }; -} - declare_lint! { /// The `ambiguous_associated_items` lint detects ambiguity between /// [associated items] and [enum variants]. diff --git a/compiler/rustc_mir_build/src/errors.rs b/compiler/rustc_mir_build/src/errors.rs index 48b93ce0ac5cb..101f1cb9f2f5f 100644 --- a/compiler/rustc_mir_build/src/errors.rs +++ b/compiler/rustc_mir_build/src/errors.rs @@ -767,6 +767,14 @@ pub struct TypeNotStructural<'tcx> { pub non_sm_ty: Ty<'tcx>, } +#[derive(Diagnostic)] +#[diag(mir_build_non_partial_eq_match)] +pub struct TypeNotPartialEq<'tcx> { + #[primary_span] + pub span: Span, + pub non_peq_ty: Ty<'tcx>, +} + #[derive(Diagnostic)] #[diag(mir_build_invalid_pattern)] pub struct InvalidPattern<'tcx> { @@ -822,12 +830,6 @@ pub struct NontrivialStructuralMatch<'tcx> { pub non_sm_ty: Ty<'tcx>, } -#[derive(LintDiagnostic)] -#[diag(mir_build_non_partial_eq_match)] -pub struct NonPartialEqMatch<'tcx> { - pub non_peq_ty: Ty<'tcx>, -} - #[derive(Diagnostic)] #[diag(mir_build_pattern_not_covered, code = E0005)] pub(crate) struct PatternNotCovered<'s, 'tcx> { diff --git a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs index c77c80d9f4b29..09727f9b71bef 100644 --- a/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs +++ b/compiler/rustc_mir_build/src/thir/pattern/const_to_pat.rs @@ -16,7 +16,7 @@ use std::cell::Cell; use super::PatCtxt; use crate::errors::{ - IndirectStructuralMatch, InvalidPattern, NaNPattern, NonPartialEqMatch, PointerPattern, + IndirectStructuralMatch, InvalidPattern, NaNPattern, PointerPattern, TypeNotPartialEq, TypeNotStructural, UnionPattern, UnsizedPattern, }; @@ -208,15 +208,12 @@ impl<'tcx> ConstToPat<'tcx> { ); } - // Always check for `PartialEq`, even if we emitted other lints. (But not if there were - // any errors.) This ensures it shows up in cargo's future-compat reports as well. + // Always check for `PartialEq` if we had no other errors yet. if !self.type_has_partial_eq_impl(cv.ty()) { - self.tcx().emit_node_span_lint( - lint::builtin::CONST_PATTERNS_WITHOUT_PARTIAL_EQ, - self.id, - self.span, - NonPartialEqMatch { non_peq_ty: cv.ty() }, - ); + let err = TypeNotPartialEq { span: self.span, non_peq_ty: cv.ty() }; + let e = self.tcx().dcx().emit_err(err); + let kind = PatKind::Error(e); + return Box::new(Pat { span: self.span, ty: cv.ty(), kind }); } } diff --git a/tests/ui/consts/const_in_pattern/issue-65466.rs b/tests/ui/consts/const_in_pattern/issue-65466.rs index 048fca762d5a1..62efce648761a 100644 --- a/tests/ui/consts/const_in_pattern/issue-65466.rs +++ b/tests/ui/consts/const_in_pattern/issue-65466.rs @@ -1,7 +1,3 @@ -#![deny(indirect_structural_match)] - -//@ check-pass - #[derive(PartialEq, Eq)] enum O { Some(*const T), // Can also use PhantomData @@ -15,8 +11,7 @@ const C: &[O] = &[O::None]; fn main() { let x = O::None; match &[x][..] { - C => (), //~WARN: the type must implement `PartialEq` - //~| previously accepted + C => (), //~ERROR: the type must implement `PartialEq` _ => (), } } diff --git a/tests/ui/consts/const_in_pattern/issue-65466.stderr b/tests/ui/consts/const_in_pattern/issue-65466.stderr index 9c80cb3a849c0..7d5e5b5b0c64a 100644 --- a/tests/ui/consts/const_in_pattern/issue-65466.stderr +++ b/tests/ui/consts/const_in_pattern/issue-65466.stderr @@ -1,23 +1,8 @@ -warning: to use a constant of type `&[O]` in a pattern, the type must implement `PartialEq` - --> $DIR/issue-65466.rs:18:9 +error: to use a constant of type `&[O]` in a pattern, the type must implement `PartialEq` + --> $DIR/issue-65466.rs:14:9 | LL | C => (), | ^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #116122 - = note: `#[warn(const_patterns_without_partial_eq)]` on by default - -warning: 1 warning emitted -Future incompatibility report: Future breakage diagnostic: -warning: to use a constant of type `&[O]` in a pattern, the type must implement `PartialEq` - --> $DIR/issue-65466.rs:18:9 - | -LL | C => (), - | ^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #116122 - = note: `#[warn(const_patterns_without_partial_eq)]` on by default +error: aborting due to 1 previous error diff --git a/tests/ui/match/issue-72896-non-partial-eq-const.rs b/tests/ui/match/issue-72896-non-partial-eq-const.rs index d497271460849..f15eae83896d1 100644 --- a/tests/ui/match/issue-72896-non-partial-eq-const.rs +++ b/tests/ui/match/issue-72896-non-partial-eq-const.rs @@ -1,4 +1,3 @@ -//@ run-pass trait EnumSetType { type Repr; } @@ -17,8 +16,7 @@ const CONST_SET: EnumSet = EnumSet { __enumset_underlying: 3 }; fn main() { match CONST_SET { - CONST_SET => { /* ok */ } //~WARN: must implement `PartialEq` - //~| previously accepted + CONST_SET => { /* ok */ } //~ERROR: must implement `PartialEq` _ => panic!("match fell through?"), } } diff --git a/tests/ui/match/issue-72896-non-partial-eq-const.stderr b/tests/ui/match/issue-72896-non-partial-eq-const.stderr index a7fc0cfc05457..4155586c16060 100644 --- a/tests/ui/match/issue-72896-non-partial-eq-const.stderr +++ b/tests/ui/match/issue-72896-non-partial-eq-const.stderr @@ -1,23 +1,8 @@ -warning: to use a constant of type `EnumSet` in a pattern, the type must implement `PartialEq` - --> $DIR/issue-72896-non-partial-eq-const.rs:20:9 +error: to use a constant of type `EnumSet` in a pattern, the type must implement `PartialEq` + --> $DIR/issue-72896-non-partial-eq-const.rs:19:9 | LL | CONST_SET => { /* ok */ } | ^^^^^^^^^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #116122 - = note: `#[warn(const_patterns_without_partial_eq)]` on by default - -warning: 1 warning emitted -Future incompatibility report: Future breakage diagnostic: -warning: to use a constant of type `EnumSet` in a pattern, the type must implement `PartialEq` - --> $DIR/issue-72896-non-partial-eq-const.rs:20:9 - | -LL | CONST_SET => { /* ok */ } - | ^^^^^^^^^ - | - = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release! - = note: for more information, see issue #116122 - = note: `#[warn(const_patterns_without_partial_eq)]` on by default +error: aborting due to 1 previous error From fa7557181f371d88fb67f17b85827954f79cdf94 Mon Sep 17 00:00:00 2001 From: Gurinder Singh Date: Sun, 25 Feb 2024 17:51:56 +0530 Subject: [PATCH 131/134] Don't use `unwrap()` in `ArrayIntoIter` lint when typeck fails --- compiler/rustc_lint/src/array_into_iter.rs | 14 +++++++++----- .../lint/ice-array-into-iter-lint-issue-121532.rs | 11 +++++++++++ .../ice-array-into-iter-lint-issue-121532.stderr | 9 +++++++++ 3 files changed, 29 insertions(+), 5 deletions(-) create mode 100644 tests/ui/lint/ice-array-into-iter-lint-issue-121532.rs create mode 100644 tests/ui/lint/ice-array-into-iter-lint-issue-121532.stderr diff --git a/compiler/rustc_lint/src/array_into_iter.rs b/compiler/rustc_lint/src/array_into_iter.rs index 3a5c585366a31..993b1d739a13d 100644 --- a/compiler/rustc_lint/src/array_into_iter.rs +++ b/compiler/rustc_lint/src/array_into_iter.rs @@ -70,11 +70,15 @@ impl<'tcx> LateLintPass<'tcx> for ArrayIntoIter { // Check if the method call actually calls the libcore // `IntoIterator::into_iter`. - let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap(); - match cx.tcx.trait_of_item(def_id) { - Some(trait_id) if cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id) => {} - _ => return, - }; + let trait_id = cx + .typeck_results() + .type_dependent_def_id(expr.hir_id) + .and_then(|did| cx.tcx.trait_of_item(did)); + if trait_id.is_none() + || !cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id.unwrap()) + { + return; + } // As this is a method call expression, we have at least one argument. let receiver_ty = cx.typeck_results().expr_ty(receiver_arg); diff --git a/tests/ui/lint/ice-array-into-iter-lint-issue-121532.rs b/tests/ui/lint/ice-array-into-iter-lint-issue-121532.rs new file mode 100644 index 0000000000000..92cab01fe48c8 --- /dev/null +++ b/tests/ui/lint/ice-array-into-iter-lint-issue-121532.rs @@ -0,0 +1,11 @@ +// Regression test for #121532 +// Checks the we don't ICE in ArrayIntoIter +// lint when typeck has failed + + // Typeck fails for the arg type as + // `Self` makes no sense here +fn func(a: Self::ItemsIterator) { //~ ERROR failed to resolve: `Self` is only available in impls, traits, and type definitions + a.into_iter(); +} + +fn main() {} diff --git a/tests/ui/lint/ice-array-into-iter-lint-issue-121532.stderr b/tests/ui/lint/ice-array-into-iter-lint-issue-121532.stderr new file mode 100644 index 0000000000000..73ceddae940b2 --- /dev/null +++ b/tests/ui/lint/ice-array-into-iter-lint-issue-121532.stderr @@ -0,0 +1,9 @@ +error[E0433]: failed to resolve: `Self` is only available in impls, traits, and type definitions + --> $DIR/ice-array-into-iter-lint-issue-121532.rs:7:12 + | +LL | fn func(a: Self::ItemsIterator) { + | ^^^^ `Self` is only available in impls, traits, and type definitions + +error: aborting due to 1 previous error + +For more information about this error, try `rustc --explain E0433`. From 08caefbb103d1809113172d16eaf8f66c2edc2f1 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Wed, 20 Dec 2023 11:57:19 +0000 Subject: [PATCH 132/134] Windows: Use ProcessPrng for random keys --- library/std/src/sys/pal/windows/c.rs | 15 +++++++ .../std/src/sys/pal/windows/c/bindings.txt | 4 -- .../std/src/sys/pal/windows/c/windows_sys.rs | 12 ------ library/std/src/sys/pal/windows/rand.rs | 41 ++++++------------- 4 files changed, 28 insertions(+), 44 deletions(-) diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs index 6b12d7db8b03a..ad8e01bfa9b03 100644 --- a/library/std/src/sys/pal/windows/c.rs +++ b/library/std/src/sys/pal/windows/c.rs @@ -321,6 +321,21 @@ pub unsafe fn NtWriteFile( } } +// Use raw-dylib to import ProcessPrng as we can't rely on there being an import library. +cfg_if::cfg_if! { +if #[cfg(not(target_vendor = "win7"))] { + #[cfg(target_arch = "x86")] + #[link(name = "bcryptprimitives", kind = "raw-dylib", import_name_type = "undecorated")] + extern "system" { + pub fn ProcessPrng(pbdata: *mut u8, cbdata: usize) -> BOOL; + } + #[cfg(not(target_arch = "x86"))] + #[link(name = "bcryptprimitives", kind = "raw-dylib")] + extern "system" { + pub fn ProcessPrng(pbdata: *mut u8, cbdata: usize) -> BOOL; + } +}} + // Functions that aren't available on every version of Windows that we support, // but we still use them and just provide some form of a fallback implementation. compat_fn_with_fallback! { diff --git a/library/std/src/sys/pal/windows/c/bindings.txt b/library/std/src/sys/pal/windows/c/bindings.txt index 726f1c3df8294..ab2a8caf5dfd9 100644 --- a/library/std/src/sys/pal/windows/c/bindings.txt +++ b/library/std/src/sys/pal/windows/c/bindings.txt @@ -2180,10 +2180,6 @@ Windows.Win32.Networking.WinSock.WSATRY_AGAIN Windows.Win32.Networking.WinSock.WSATYPE_NOT_FOUND Windows.Win32.Networking.WinSock.WSAVERNOTSUPPORTED Windows.Win32.Security.Authentication.Identity.RtlGenRandom -Windows.Win32.Security.Cryptography.BCRYPT_ALG_HANDLE -Windows.Win32.Security.Cryptography.BCRYPT_USE_SYSTEM_PREFERRED_RNG -Windows.Win32.Security.Cryptography.BCryptGenRandom -Windows.Win32.Security.Cryptography.BCRYPTGENRANDOM_FLAGS Windows.Win32.Security.SECURITY_ATTRIBUTES Windows.Win32.Security.TOKEN_ACCESS_MASK Windows.Win32.Security.TOKEN_ACCESS_PSEUDO_HANDLE diff --git a/library/std/src/sys/pal/windows/c/windows_sys.rs b/library/std/src/sys/pal/windows/c/windows_sys.rs index c386b66a722df..8eb779373f7e4 100644 --- a/library/std/src/sys/pal/windows/c/windows_sys.rs +++ b/library/std/src/sys/pal/windows/c/windows_sys.rs @@ -15,15 +15,6 @@ extern "system" { pub fn RtlGenRandom(randombuffer: *mut ::core::ffi::c_void, randombufferlength: u32) -> BOOLEAN; } -#[link(name = "bcrypt")] -extern "system" { - pub fn BCryptGenRandom( - halgorithm: BCRYPT_ALG_HANDLE, - pbbuffer: *mut u8, - cbbuffer: u32, - dwflags: BCRYPTGENRANDOM_FLAGS, - ) -> NTSTATUS; -} #[link(name = "kernel32")] extern "system" { pub fn AcquireSRWLockExclusive(srwlock: *mut SRWLOCK) -> (); @@ -889,9 +880,6 @@ impl ::core::clone::Clone for ARM64_NT_NEON128_0 { *self } } -pub type BCRYPTGENRANDOM_FLAGS = u32; -pub type BCRYPT_ALG_HANDLE = *mut ::core::ffi::c_void; -pub const BCRYPT_USE_SYSTEM_PREFERRED_RNG: BCRYPTGENRANDOM_FLAGS = 2u32; pub const BELOW_NORMAL_PRIORITY_CLASS: PROCESS_CREATION_FLAGS = 16384u32; pub type BOOL = i32; pub type BOOLEAN = u8; diff --git a/library/std/src/sys/pal/windows/rand.rs b/library/std/src/sys/pal/windows/rand.rs index bd1ae6b06076e..e427546222aea 100644 --- a/library/std/src/sys/pal/windows/rand.rs +++ b/library/std/src/sys/pal/windows/rand.rs @@ -1,42 +1,27 @@ -use crate::mem; -use crate::ptr; use crate::sys::c; +use core::mem; +use core::ptr; +#[cfg(not(target_vendor = "win7"))] +#[inline] pub fn hashmap_random_keys() -> (u64, u64) { let mut v = (0, 0); - let ret = unsafe { - c::BCryptGenRandom( - ptr::null_mut(), - core::ptr::addr_of_mut!(v) as *mut u8, - mem::size_of_val(&v) as c::ULONG, - c::BCRYPT_USE_SYSTEM_PREFERRED_RNG, - ) - }; - if c::nt_success(ret) { v } else { fallback_rng() } + let ret = unsafe { c::ProcessPrng(ptr::addr_of_mut!(v).cast::(), mem::size_of_val(&v)) }; + // ProcessPrng is documented as always returning `TRUE`. + // https://learn.microsoft.com/en-us/windows/win32/seccng/processprng#return-value + debug_assert_eq!(ret, c::TRUE); + v } -/// Generate random numbers using the fallback RNG function (RtlGenRandom) -/// -/// This is necessary because of a failure to load the SysWOW64 variant of the -/// bcryptprimitives.dll library from code that lives in bcrypt.dll -/// See -#[cfg(not(target_vendor = "uwp"))] -#[inline(never)] -fn fallback_rng() -> (u64, u64) { +#[cfg(target_vendor = "win7")] +pub fn hashmap_random_keys() -> (u64, u64) { use crate::ffi::c_void; use crate::io; let mut v = (0, 0); let ret = unsafe { - c::RtlGenRandom(core::ptr::addr_of_mut!(v) as *mut c_void, mem::size_of_val(&v) as c::ULONG) + c::RtlGenRandom(ptr::addr_of_mut!(v).cast::(), mem::size_of_val(&v) as c::ULONG) }; - if ret != 0 { v } else { panic!("fallback RNG broken: {}", io::Error::last_os_error()) } -} - -/// We can't use RtlGenRandom with UWP, so there is no fallback -#[cfg(target_vendor = "uwp")] -#[inline(never)] -fn fallback_rng() -> (u64, u64) { - panic!("fallback RNG broken: RtlGenRandom() not supported on UWP"); + if ret != 0 { v } else { panic!("RNG broken: {}", io::Error::last_os_error()) } } From 8f89cbd796fe54b3454175193d6c2be245376161 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Tue, 20 Feb 2024 08:51:24 -0300 Subject: [PATCH 133/134] Add ProcessPrng shim to Miri This is essentially the same as SystemFunction036 (aka RtlGenRandom) except that the given length is a usize instead of a u32 --- src/tools/miri/src/shims/windows/foreign_items.rs | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/tools/miri/src/shims/windows/foreign_items.rs b/src/tools/miri/src/shims/windows/foreign_items.rs index bddc30b8379b2..fdd7fc5fad4e8 100644 --- a/src/tools/miri/src/shims/windows/foreign_items.rs +++ b/src/tools/miri/src/shims/windows/foreign_items.rs @@ -427,6 +427,14 @@ pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriInterpCxExt<'mir, 'tcx> { this.gen_random(ptr, len.into())?; this.write_scalar(Scalar::from_bool(true), dest)?; } + "ProcessPrng" => { + let [ptr, len] = + this.check_shim(abi, Abi::System { unwind: false }, link_name, args)?; + let ptr = this.read_pointer(ptr)?; + let len = this.read_target_usize(len)?; + this.gen_random(ptr, len.into())?; + this.write_scalar(Scalar::from_i32(1), dest)?; + } "BCryptGenRandom" => { let [algorithm, ptr, len, flags] = this.check_shim(abi, Abi::System { unwind: false }, link_name, args)?; From 843eaf2e71f9a1ae564c5d24ec69c7a0c29e4e53 Mon Sep 17 00:00:00 2001 From: Chris Denton Date: Tue, 20 Feb 2024 16:09:49 -0300 Subject: [PATCH 134/134] Cranelift: Don't use raw-dylib in std --- .../0029-stdlib-rawdylib-processprng.patch | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch diff --git a/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch new file mode 100644 index 0000000000000..6af11e54d88af --- /dev/null +++ b/compiler/rustc_codegen_cranelift/patches/0029-stdlib-rawdylib-processprng.patch @@ -0,0 +1,47 @@ +From 9f65e742ba3e41474e6126c6c4469c48eaa6ca7e Mon Sep 17 00:00:00 2001 +From: Chris Denton +Date: Tue, 20 Feb 2024 16:01:40 -0300 +Subject: [PATCH] Don't use raw-dylib in std + +--- + library/std/src/sys/pal/windows/c.rs | 2 +- + library/std/src/sys/pal/windows/rand.rs | 3 +-- + 2 files changed, 2 insertions(+), 3 deletions(-) + +diff --git a/library/std/src/sys/pal/windows/c.rs b/library/std/src/sys/pal/windows/c.rs +index ad8e01bfa9b..9ca8e4c16ce 100644 +--- a/library/std/src/sys/pal/windows/c.rs ++++ b/library/std/src/sys/pal/windows/c.rs +@@ -323,7 +323,7 @@ pub unsafe fn NtWriteFile( + + // Use raw-dylib to import ProcessPrng as we can't rely on there being an import library. + cfg_if::cfg_if! { +-if #[cfg(not(target_vendor = "win7"))] { ++if #[cfg(any())] { + #[cfg(target_arch = "x86")] + #[link(name = "bcryptprimitives", kind = "raw-dylib", import_name_type = "undecorated")] + extern "system" { +diff --git a/library/std/src/sys/pal/windows/rand.rs b/library/std/src/sys/pal/windows/rand.rs +index e427546222a..f2fe42a4d51 100644 +--- a/library/std/src/sys/pal/windows/rand.rs ++++ b/library/std/src/sys/pal/windows/rand.rs +@@ -2,7 +2,7 @@ + use core::mem; + use core::ptr; + +-#[cfg(not(target_vendor = "win7"))] ++#[cfg(any())] + #[inline] + pub fn hashmap_random_keys() -> (u64, u64) { + let mut v = (0, 0); +@@ -13,7 +13,6 @@ pub fn hashmap_random_keys() -> (u64, u64) { + v + } + +-#[cfg(target_vendor = "win7")] + pub fn hashmap_random_keys() -> (u64, u64) { + use crate::ffi::c_void; + use crate::io; +-- +2.42.0.windows.2 +