perf: remove unnecessary Arcs

This commit is contained in:
daimond113 2025-04-21 13:21:14 +02:00
parent 74f364ee32
commit fbcc836064
No known key found for this signature in database
GPG key ID: 640DC95EC1190354
15 changed files with 55 additions and 68 deletions

View file

@ -135,7 +135,7 @@ async fn run() -> std::io::Result<()> {
tracing::info!("auth: {auth}");
auth
},
source: Arc::new(tokio::sync::RwLock::new(source)),
source: tokio::sync::RwLock::new(source).into(),
project,
search_reader,

View file

@ -134,7 +134,7 @@ impl ExecuteCommand {
&DownloadOptions {
project: project.clone(),
reqwest: reqwest.clone(),
reporter: Arc::new(()),
reporter: ().into(),
id: id.clone(),
},
)
@ -150,7 +150,7 @@ impl ExecuteCommand {
&pkg_ref,
&GetTargetOptions {
project: project.clone(),
path: Arc::from(tempdir.path()),
path: tempdir.path().into(),
id: id.clone(),
},
)
@ -174,7 +174,7 @@ impl ExecuteCommand {
project
.download_and_link(
&Arc::new(graph),
&graph,
DownloadAndLinkOptions::<CliReporter<Stderr>, ()>::new(reqwest)
.reporter(reporter)
.refreshed_sources(refreshed_sources)

View file

@ -223,7 +223,7 @@ impl InitCommand {
&GetTargetOptions {
project: project.clone(),
// HACK: the pesde package source doesn't use the path, so we can just use an empty one
path: Arc::from(Path::new("")),
path: Path::new("").into(),
id: id.clone(),
},
)

View file

@ -1,5 +1,3 @@
use std::sync::Arc;
use crate::cli::{
style::{CLI_STYLE, INFO_STYLE, WARN_PREFIX},
up_to_date_lockfile, VersionedPackageName,
@ -59,8 +57,8 @@ impl PatchCommand {
&DownloadOptions {
project: project.clone(),
reqwest,
reporter: Arc::new(()),
id: Arc::new(id),
reporter: ().into(),
id: id.into(),
},
)
.await?

View file

@ -159,7 +159,7 @@ impl PublishCommand {
&node.pkg_ref,
&GetTargetOptions {
project,
path: Arc::from(container_folder),
path: container_folder.into(),
id,
},
)

View file

@ -14,7 +14,7 @@ use pesde::{
use relative_path::RelativePathBuf;
use std::{
collections::HashSet, env::current_dir, ffi::OsString, io::Write as _, path::Path,
process::Command, sync::Arc,
process::Command,
};
#[derive(Debug, Args)]
@ -128,8 +128,8 @@ impl RunCommand {
&node.pkg_ref,
&GetTargetOptions {
project,
path: Arc::from(container_folder.as_path()),
id: Arc::new(id),
path: container_folder.as_path().into(),
id: id.into(),
},
)
.await?;

View file

@ -267,8 +267,6 @@ pub async fn install(
});
}
let graph = Arc::new(graph);
if options.write {
root_progress.reset();
root_progress.set_length(0);
@ -414,7 +412,7 @@ pub async fn install(
target: manifest.target.kind(),
overrides,
graph: Arc::into_inner(graph).unwrap(),
graph,
workspace: run_on_workspace_members(project, |_| async { Ok(()) }).await?,
};

View file

@ -44,12 +44,13 @@ where
root_progress.set_style(root_progress_style());
root_progress.enable_steady_tick(Duration::from_millis(100));
let reporter = Arc::new(CliReporter::with_writer(
writer,
let reporter = CliReporter::with_writer(writer, multi_progress.clone(), root_progress.clone());
let result = f(
multi_progress.clone(),
root_progress.clone(),
));
let result = f(multi_progress.clone(), root_progress.clone(), reporter).await;
reporter.into(),
)
.await;
root_progress.finish();
multi_progress.clear().unwrap();

View file

@ -29,7 +29,6 @@ use std::{
collections::BTreeSet,
env::current_exe,
path::{Path, PathBuf},
sync::Arc,
};
use tracing::instrument;
@ -192,7 +191,7 @@ pub async fn get_or_download_engine(
&engine_ref,
&DownloadOptions {
reqwest: reqwest.clone(),
reporter: Arc::new(reporter),
reporter: reporter.into(),
version: version.clone(),
},
)

View file

@ -141,7 +141,7 @@ impl Project {
project: project.clone(),
reqwest,
id: package_id.clone(),
reporter: Arc::new(progress_reporter),
reporter: progress_reporter.into(),
},
)
.await
@ -154,7 +154,7 @@ impl Project {
project: project.clone(),
reqwest,
id: package_id.clone(),
reporter: Arc::new(()),
reporter: ().into(),
},
)
.await

View file

@ -16,6 +16,7 @@ use crate::{
use fs_err::tokio as fs;
use futures::TryStreamExt as _;
use std::{
borrow::Cow,
collections::HashMap,
convert::Infallible,
future::{self, Future},
@ -164,7 +165,7 @@ impl Project {
#[instrument(skip_all, fields(prod = options.prod), level = "debug")]
pub async fn download_and_link<Reporter, Hooks>(
&self,
graph: &Arc<DependencyGraph>,
graph: &DependencyGraph,
options: DownloadAndLinkOptions<Reporter, Hooks>,
) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>>
where
@ -181,7 +182,6 @@ impl Project {
force,
} = options;
let graph = graph.clone();
let reqwest = reqwest.clone();
let manifest = self.deser_manifest().await?;
@ -222,7 +222,7 @@ impl Project {
let mut downloaded_graph = DependencyGraph::new();
let graph_to_download = if force {
graph.clone()
Cow::Borrowed(graph)
} else {
let mut tasks = graph
.iter()
@ -249,7 +249,7 @@ impl Project {
graph_to_download.insert(id, node);
}
Arc::new(graph_to_download)
Cow::Owned(graph_to_download)
};
let downloaded = self
@ -285,10 +285,10 @@ impl Project {
.into_iter()
.partition::<HashMap<_, _>, _>(|(_, node)| node.pkg_ref.is_wally_package());
let mut graph = Arc::new(DependencyGraphWithTarget::new());
let mut graph = DependencyGraphWithTarget::new();
async fn get_graph_targets<Hooks: DownloadAndLinkHooks>(
graph: &mut Arc<DependencyGraphWithTarget>,
graph: &mut DependencyGraphWithTarget,
project: &Project,
manifest_target_kind: TargetKind,
downloaded_graph: HashMap<PackageId, DependencyGraphNode>,
@ -297,10 +297,10 @@ impl Project {
.into_iter()
.map(|(id, node)| {
let source = node.pkg_ref.source();
let path = Arc::from(
node.container_folder_from_project(&id, project, manifest_target_kind)
.as_path(),
);
let path = node
.container_folder_from_project(&id, project, manifest_target_kind)
.as_path()
.into();
let id = Arc::new(id);
let project = project.clone();
@ -326,7 +326,7 @@ impl Project {
while let Some(task) = tasks.join_next().await {
let (id, node) = task.unwrap()?;
Arc::get_mut(graph).unwrap().insert(id, node);
graph.insert(id, node);
}
Ok(())
@ -342,7 +342,7 @@ impl Project {
.instrument(tracing::debug_span!("get targets (non-wally)"))
.await?;
self.link_dependencies(graph.clone(), false)
self.link_dependencies(&graph, false)
.instrument(tracing::debug_span!("link (non-wally)"))
.await?;
@ -394,7 +394,7 @@ impl Project {
.await
}
None => {
apply_patch(&id, container_folder, &patch_path, Arc::new(())).await
apply_patch(&id, container_folder, &patch_path, ().into()).await
}
}
}
@ -407,7 +407,7 @@ impl Project {
}
// step 4. link ALL dependencies. do so with types
self.link_dependencies(graph.clone(), true)
self.link_dependencies(&graph, true)
.instrument(tracing::debug_span!("link (all)"))
.await?;
@ -418,8 +418,6 @@ impl Project {
.map_err(errors::DownloadAndLinkError::Hook)?;
}
let mut graph = Arc::into_inner(graph).unwrap();
if prod {
graph.retain(|_, node| node.node.resolved_ty != DependencyType::Dev);
}

View file

@ -152,13 +152,14 @@ impl Project {
auth_config: AuthConfig,
) -> Self {
Project {
shared: Arc::new(ProjectShared {
shared: ProjectShared {
package_dir: package_dir.as_ref().to_path_buf(),
workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()),
data_dir: data_dir.as_ref().to_path_buf(),
cas_dir: cas_dir.as_ref().to_path_buf(),
auth_config,
}),
}
.into(),
}
}

View file

@ -15,8 +15,8 @@ fn index_entry(
entry: &fs::DirEntry,
packages_index_dir: &Path,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
used_paths: &Arc<HashSet<PathBuf>>,
#[cfg(feature = "patches")] patched_packages: &Arc<HashSet<PathBuf>>,
used_paths: Arc<HashSet<PathBuf>>,
#[cfg(feature = "patches")] patched_packages: Arc<HashSet<PathBuf>>,
) {
fn get_package_name_from_container(container: &Path) -> (bool, String) {
let Component::Normal(first_component) = container.components().next().unwrap() else {
@ -40,9 +40,6 @@ fn index_entry(
#[cfg_attr(not(feature = "patches"), allow(unused_variables))]
let (is_wally, package_name) = get_package_name_from_container(&path_relative);
let used_paths = used_paths.clone();
#[cfg(feature = "patches")]
let patched_packages = patched_packages.clone();
tasks.spawn(async move {
if is_wally {
#[cfg(not(feature = "wally-compat"))]
@ -100,9 +97,8 @@ fn index_entry(
fn packages_entry(
entry: fs::DirEntry,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
expected_aliases: &Arc<HashSet<Alias>>,
expected_aliases: Arc<HashSet<Alias>>,
) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move {
if entry.file_type().await?.is_dir() {
return Ok(());
@ -134,9 +130,8 @@ fn packages_entry(
fn scripts_entry(
entry: fs::DirEntry,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
expected_aliases: &Arc<HashSet<Alias>>,
expected_aliases: Arc<HashSet<Alias>>,
) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move {
if !entry.file_type().await?.is_dir() {
return Ok(());
@ -244,16 +239,16 @@ impl Project {
&entry?,
&packages_index_dir,
&mut tasks,
&used_paths,
used_paths.clone(),
#[cfg(feature = "patches")]
&patched_packages,
patched_packages.clone(),
);
}
Some(entry) = packages_entries.next_entry().map(Result::transpose) => {
packages_entry(
entry?,
&mut tasks,
&expected_aliases,
expected_aliases.clone(),
);
}
else => break,
@ -288,7 +283,7 @@ impl Project {
let expected_aliases = Arc::new(expected_aliases);
while let Some(entry) = entries.next_entry().await? {
scripts_entry(entry, &mut tasks, &expected_aliases);
scripts_entry(entry, &mut tasks, expected_aliases.clone());
}
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}

View file

@ -15,7 +15,6 @@ use std::{
collections::HashMap,
ffi::OsStr,
path::{Path, PathBuf},
sync::Arc,
};
use tokio::task::{spawn_blocking, JoinSet};
use tracing::{instrument, Instrument as _};
@ -64,16 +63,15 @@ impl Project {
#[instrument(skip(self, graph), level = "debug")]
pub(crate) async fn link_dependencies(
&self,
graph: Arc<DependencyGraphWithTarget>,
graph: &DependencyGraphWithTarget,
with_types: bool,
) -> Result<(), errors::LinkingError> {
let manifest = self.deser_manifest().await?;
let manifest_target_kind = manifest.target.kind();
let manifest = Arc::new(manifest);
// step 1. link all non-wally packages (and their dependencies) temporarily without types
// we do this separately to allow the required tools for the scripts to be installed
self.link(&graph, &manifest, &Arc::new(PackageTypes::default()), false)
self.link(graph, &manifest, &PackageTypes::default(), false)
.await?;
if !with_types {
@ -155,15 +153,14 @@ impl Project {
}
// step 3. link all packages (and their dependencies), this time with types
self.link(&graph, &manifest, &Arc::new(package_types), true)
.await
self.link(graph, &manifest, &package_types, true).await
}
async fn link(
&self,
graph: &Arc<DependencyGraphWithTarget>,
manifest: &Arc<Manifest>,
package_types: &Arc<PackageTypes>,
graph: &DependencyGraphWithTarget,
manifest: &Manifest,
package_types: &PackageTypes,
is_complete: bool,
) -> Result<(), errors::LinkingError> {
let package_dir_canonical = fs::canonicalize(self.package_dir()).await?;
@ -308,7 +305,7 @@ impl Project {
for (dep_id, dep_alias) in &node.node.dependencies {
let dep_id = dep_id.clone();
let dep_alias = dep_alias.clone();
let graph = graph.clone();
let dep_node = graph.get(&dep_id).cloned();
let node = node.clone();
let package_id = package_id.clone();
let node_container_folder = node_container_folder.clone();
@ -316,7 +313,7 @@ impl Project {
let package_dir = self.package_dir().to_path_buf();
dependency_tasks.spawn(async move {
let Some(dep_node) = graph.get(&dep_id) else {
let Some(dep_node) = dep_node else {
return if is_complete {
Err(errors::LinkingError::DependencyNotFound(
dep_id.to_string(),

View file

@ -128,8 +128,8 @@ impl io::Write for IndicatifWriter {
Self::suspend(|| io::stderr().write_all(buf))
}
fn write_fmt(&mut self, args: std::fmt::Arguments<'_>) -> io::Result<()> {
Self::suspend(|| io::stderr().write_fmt(args))
fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> io::Result<()> {
Self::suspend(|| io::stderr().write_fmt(fmt))
}
}