diff --git a/compiler/rustc_data_structures/src/functor.rs b/compiler/rustc_data_structures/src/functor.rs
deleted file mode 100644
index e3fcaccb1bd5f..0000000000000
--- a/compiler/rustc_data_structures/src/functor.rs
+++ /dev/null
@@ -1,116 +0,0 @@
-use rustc_index::{Idx, IndexVec};
-use std::{mem, rc::Rc, sync::Arc};
-
-pub trait IdFunctor: Sized {
-    type Inner;
-
-    fn try_map_id<F, E>(self, f: F) -> Result<Self, E>
-    where
-        F: FnMut(Self::Inner) -> Result<Self::Inner, E>;
-}
-
-impl<T> IdFunctor for Box<T> {
-    type Inner = T;
-
-    #[inline]
-    fn try_map_id<F, E>(self, mut f: F) -> Result<Self, E>
-    where
-        F: FnMut(Self::Inner) -> Result<Self::Inner, E>,
-    {
-        let raw = Box::into_raw(self);
-        Ok(unsafe {
-            // SAFETY: The raw pointer points to a valid value of type `T`.
-            let value = raw.read();
-            // SAFETY: Converts `Box<T>` to `Box<MaybeUninit<T>>` which is the
-            // inverse of `Box::assume_init()` and should be safe.
-            let raw: Box<mem::MaybeUninit<T>> = Box::from_raw(raw.cast());
-            // SAFETY: Write the mapped value back into the `Box`.
-            Box::write(raw, f(value)?)
-        })
-    }
-}
-
-impl<T> IdFunctor for Vec<T> {
-    type Inner = T;
-
-    #[inline]
-    fn try_map_id<F, E>(self, f: F) -> Result<Self, E>
-    where
-        F: FnMut(Self::Inner) -> Result<Self::Inner, E>,
-    {
-        self.into_iter().map(f).collect()
-    }
-}
-
-impl<T> IdFunctor for Box<[T]> {
-    type Inner = T;
-
-    #[inline]
-    fn try_map_id<F, E>(self, f: F) -> Result<Self, E>
-    where
-        F: FnMut(Self::Inner) -> Result<Self::Inner, E>,
-    {
-        Vec::from(self).try_map_id(f).map(Into::into)
-    }
-}
-
-impl<I: Idx, T> IdFunctor for IndexVec<I, T> {
-    type Inner = T;
-
-    #[inline]
-    fn try_map_id<F, E>(self, f: F) -> Result<Self, E>
-    where
-        F: FnMut(Self::Inner) -> Result<Self::Inner, E>,
-    {
-        self.raw.try_map_id(f).map(IndexVec::from_raw)
-    }
-}
-
-macro_rules! rc {
-    ($($rc:ident),+) => {$(
-        impl<T: Clone> IdFunctor for $rc<T> {
-            type Inner = T;
-
-            #[inline]
-            fn try_map_id<F, E>(mut self, mut f: F) -> Result<Self, E>
-            where
-                F: FnMut(Self::Inner) -> Result<Self::Inner, E>,
-            {
-                // We merely want to replace the contained `T`, if at all possible,
-                // so that we don't needlessly allocate a new `$rc` or indeed clone
-                // the contained type.
-                unsafe {
-                    // First step is to ensure that we have a unique reference to
-                    // the contained type, which `$rc::make_mut` will accomplish (by
-                    // allocating a new `$rc` and cloning the `T` only if required).
-                    // This is done *before* casting to `$rc<ManuallyDrop<T>>` so that
-                    // panicking during `make_mut` does not leak the `T`.
-                    $rc::make_mut(&mut self);
-
-                    // Casting to `$rc<ManuallyDrop<T>>` is safe because `ManuallyDrop`
-                    // is `repr(transparent)`.
-                    let ptr = $rc::into_raw(self).cast::<mem::ManuallyDrop<T>>();
-                    let mut unique = $rc::from_raw(ptr);
-
-                    // Call to `$rc::make_mut` above guarantees that `unique` is the
-                    // sole reference to the contained value, so we can avoid doing
-                    // a checked `get_mut` here.
-                    let slot = $rc::get_mut_unchecked(&mut unique);
-
-                    // Semantically move the contained type out from `unique`, fold
-                    // it, then move the folded value back into `unique`. Should
-                    // folding fail, `ManuallyDrop` ensures that the "moved-out"
-                    // value is not re-dropped.
-                    let owned = mem::ManuallyDrop::take(slot);
-                    let folded = f(owned)?;
-                    *slot = mem::ManuallyDrop::new(folded);
-
-                    // Cast back to `$rc<T>`.
-                    Ok($rc::from_raw($rc::into_raw(unique).cast()))
-                }
-            }
-        }
-    )+};
-}
-
-rc! { Rc, Arc }
diff --git a/compiler/rustc_data_structures/src/lib.rs b/compiler/rustc_data_structures/src/lib.rs
index 461ec3a90ed97..4dd4ade4e6bef 100644
--- a/compiler/rustc_data_structures/src/lib.rs
+++ b/compiler/rustc_data_structures/src/lib.rs
@@ -8,7 +8,6 @@
 
 #![doc(html_root_url = "https://doc.rust-lang.org/nightly/nightly-rustc/")]
 #![feature(array_windows)]
-#![feature(associated_type_bounds)]
 #![feature(auto_traits)]
 #![feature(cell_leak)]
 #![feature(core_intrinsics)]
@@ -19,15 +18,12 @@
 #![feature(min_specialization)]
 #![feature(never_type)]
 #![feature(type_alias_impl_trait)]
-#![feature(new_uninit)]
 #![feature(lazy_cell)]
 #![feature(rustc_attrs)]
 #![feature(negative_impls)]
 #![feature(test)]
 #![feature(thread_id_value)]
-#![feature(vec_into_raw_parts)]
 #![feature(allocator_api)]
-#![feature(get_mut_unchecked)]
 #![feature(lint_reasons)]
 #![feature(unwrap_infallible)]
 #![feature(strict_provenance)]
@@ -63,7 +59,6 @@ pub mod binary_search_util;
 pub mod captures;
 pub mod flat_map_in_place;
 pub mod flock;
-pub mod functor;
 pub mod fx;
 pub mod graph;
 pub mod intern;
diff --git a/compiler/rustc_type_ir/src/lib.rs b/compiler/rustc_type_ir/src/lib.rs
index 5df068de1f849..9f8d9f02ec24c 100644
--- a/compiler/rustc_type_ir/src/lib.rs
+++ b/compiler/rustc_type_ir/src/lib.rs
@@ -1,7 +1,9 @@
 #![feature(associated_type_defaults)]
 #![feature(fmt_helpers_for_derive)]
+#![feature(get_mut_unchecked)]
 #![feature(min_specialization)]
 #![feature(never_type)]
+#![feature(new_uninit)]
 #![feature(rustc_attrs)]
 #![feature(unwrap_infallible)]
 #![deny(rustc::untranslatable_diagnostic)]
diff --git a/compiler/rustc_type_ir/src/structural_impls.rs b/compiler/rustc_type_ir/src/structural_impls.rs
index f1037fe0bafa5..08af96ea15f0f 100644
--- a/compiler/rustc_type_ir/src/structural_impls.rs
+++ b/compiler/rustc_type_ir/src/structural_impls.rs
@@ -5,12 +5,12 @@
 use crate::fold::{FallibleTypeFolder, TypeFoldable};
 use crate::visit::{TypeVisitable, TypeVisitor};
 use crate::{ConstKind, FloatTy, InferTy, IntTy, Interner, UintTy, UniverseIndex};
-use rustc_data_structures::functor::IdFunctor;
 use rustc_data_structures::sync::Lrc;
 use rustc_index::{Idx, IndexVec};
 
 use core::fmt;
 use std::marker::PhantomData;
+use std::mem;
 use std::ops::ControlFlow;
 
 ///////////////////////////////////////////////////////////////////////////
@@ -108,8 +108,39 @@ impl<I: Interner, T: TypeVisitable<I>, E: TypeVisitable<I>> TypeVisitable<I> for
 }
 
 impl<I: Interner, T: TypeFoldable<I>> TypeFoldable<I> for Lrc<T> {
-    fn try_fold_with<F: FallibleTypeFolder<I>>(self, folder: &mut F) -> Result<Self, F::Error> {
-        self.try_map_id(|value| value.try_fold_with(folder))
+    fn try_fold_with<F: FallibleTypeFolder<I>>(mut self, folder: &mut F) -> Result<Self, F::Error> {
+        // We merely want to replace the contained `T`, if at all possible,
+        // so that we don't needlessly allocate a new `Lrc` or indeed clone
+        // the contained type.
+        unsafe {
+            // First step is to ensure that we have a unique reference to
+            // the contained type, which `Lrc::make_mut` will accomplish (by
+            // allocating a new `Lrc` and cloning the `T` only if required).
+            // This is done *before* casting to `Lrc<ManuallyDrop<T>>` so that
+            // panicking during `make_mut` does not leak the `T`.
+            Lrc::make_mut(&mut self);
+
+            // Casting to `Lrc<ManuallyDrop<T>>` is safe because `ManuallyDrop`
+            // is `repr(transparent)`.
+            let ptr = Lrc::into_raw(self).cast::<mem::ManuallyDrop<T>>();
+            let mut unique = Lrc::from_raw(ptr);
+
+            // Call to `Lrc::make_mut` above guarantees that `unique` is the
+            // sole reference to the contained value, so we can avoid doing
+            // a checked `get_mut` here.
+            let slot = Lrc::get_mut_unchecked(&mut unique);
+
+            // Semantically move the contained type out from `unique`, fold
+            // it, then move the folded value back into `unique`. Should
+            // folding fail, `ManuallyDrop` ensures that the "moved-out"
+            // value is not re-dropped.
+            let owned = mem::ManuallyDrop::take(slot);
+            let folded = owned.try_fold_with(folder)?;
+            *slot = mem::ManuallyDrop::new(folded);
+
+            // Cast back to `Lrc<T>`.
+            Ok(Lrc::from_raw(Lrc::into_raw(unique).cast()))
+        }
     }
 }
 
@@ -120,8 +151,9 @@ impl<I: Interner, T: TypeVisitable<I>> TypeVisitable<I> for Lrc<T> {
 }
 
 impl<I: Interner, T: TypeFoldable<I>> TypeFoldable<I> for Box<T> {
-    fn try_fold_with<F: FallibleTypeFolder<I>>(self, folder: &mut F) -> Result<Self, F::Error> {
-        self.try_map_id(|value| value.try_fold_with(folder))
+    fn try_fold_with<F: FallibleTypeFolder<I>>(mut self, folder: &mut F) -> Result<Self, F::Error> {
+        *self = (*self).try_fold_with(folder)?;
+        Ok(self)
     }
 }
 
@@ -133,7 +165,7 @@ impl<I: Interner, T: TypeVisitable<I>> TypeVisitable<I> for Box<T> {
 
 impl<I: Interner, T: TypeFoldable<I>> TypeFoldable<I> for Vec<T> {
     fn try_fold_with<F: FallibleTypeFolder<I>>(self, folder: &mut F) -> Result<Self, F::Error> {
-        self.try_map_id(|t| t.try_fold_with(folder))
+        self.into_iter().map(|t| t.try_fold_with(folder)).collect()
     }
 }
 
@@ -161,7 +193,7 @@ impl<I: Interner, T: TypeVisitable<I>> TypeVisitable<I> for Box<[T]> {
 
 impl<I: Interner, T: TypeFoldable<I>, Ix: Idx> TypeFoldable<I> for IndexVec<Ix, T> {
     fn try_fold_with<F: FallibleTypeFolder<I>>(self, folder: &mut F) -> Result<Self, F::Error> {
-        self.try_map_id(|x| x.try_fold_with(folder))
+        self.raw.try_fold_with(folder).map(IndexVec::from_raw)
     }
 }