Merge branch 'main' into dev-only

This commit is contained in:
Stefan 2025-04-24 00:17:24 +01:00 committed by GitHub
commit 09ec3735ca
Signed by: DevComp
GPG key ID: B5690EEEBB952194
18 changed files with 974 additions and 835 deletions

1502
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -41,6 +41,80 @@ name = "pesde"
path = "src/main.rs" path = "src/main.rs"
required-features = ["bin"] required-features = ["bin"]
[lints]
workspace = true
[dependencies]
serde = { version = "1.0.219", features = ["derive"] }
toml = "0.8.20"
gix = { version = "0.71.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
semver = { version = "1.0.26", features = ["serde"] }
reqwest = { version = "0.12.15", default-features = false, features = ["rustls-tls", "stream", "json"] }
tokio-tar = "0.3.1"
async-compression = { version = "0.4.22", features = ["tokio", "gzip"] }
pathdiff = "0.2.3"
relative-path = { version = "1.9.3", features = ["serde"] }
tracing = { version = "0.1.41", features = ["attributes"] }
thiserror = "2.0.12"
tokio = { version = "1.44.2", features = ["process", "macros"] }
tokio-util = "0.7.14"
async-stream = "0.3.6"
futures = "0.3.31"
full_moon = { version = "1.2.0", features = ["luau"] }
url = { version = "2.5.4", features = ["serde"] }
jiff = { version = "0.2.9", default-features = false, features = ["serde", "std"] }
sha2 = "0.10.8"
tempfile = "3.19.1"
wax = { version = "0.6.0", default-features = false }
fs-err = { version = "3.1.0", features = ["tokio"] }
urlencoding = "2.1.3"
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"] }
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
git2 = { version = "0.20.1", optional = true }
serde_json = { version = "1.0.140", optional = true }
anyhow = { version = "1.0.98", optional = true }
open = { version = "5.3.2", optional = true }
keyring = { version = "3.6.2", features = ["crypto-rust", "windows-native", "apple-native", "sync-secret-service"], optional = true }
console = { version = "0.15.11", optional = true }
toml_edit = { version = "0.22.24", optional = true }
clap = { version = "4.5.37", features = ["derive"], optional = true }
dirs = { version = "6.0.0", optional = true }
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
indicatif = { version = "0.17.11", optional = true }
inquire = { version = "0.7.5", default-features = false, features = ["console", "one-liners"], optional = true }
paste = { version = "1.0.15", optional = true }
[target.'cfg(target_os = "windows")'.dependencies]
windows-registry = { version = "0.5.1", optional = true }
windows = { version = "0.61.1", features = ["Win32_Storage", "Win32_Storage_FileSystem", "Win32_Security"], optional = true }
[dev-dependencies]
schemars = { git = "https://github.com/daimond113/schemars", rev = "bc7c7d6", features = ["semver1", "url2"] }
[workspace]
resolver = "2"
members = ["registry"]
[profile.dev.package.full_moon]
opt-level = 3
[profile.dev.package.miniz_oxide]
opt-level = 3
[profile.release]
opt-level = "s"
lto = true
incremental = true
codegen-units = 1
panic = "abort"
[profile.release.package.pesde-registry]
# add debug symbols for Sentry stack traces
debug = "full"
[workspace.lints.clippy] [workspace.lints.clippy]
zero_sized_map_values = "warn" zero_sized_map_values = "warn"
while_float = "deny" while_float = "deny"
@ -187,77 +261,3 @@ branches_sharing_code = "warn"
bool_to_int_with_if = "warn" bool_to_int_with_if = "warn"
assigning_clones = "warn" assigning_clones = "warn"
as_underscore = "warn" as_underscore = "warn"
[lints]
workspace = true
[dependencies]
serde = { version = "1.0.217", features = ["derive"] }
toml = "0.8.20"
gix = { version = "0.70.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
semver = { version = "1.0.25", features = ["serde"] }
reqwest = { version = "0.12.12", default-features = false, features = ["rustls-tls", "stream", "json"] }
tokio-tar = "0.3.1"
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
pathdiff = "0.2.3"
relative-path = { version = "1.9.3", features = ["serde"] }
tracing = { version = "0.1.41", features = ["attributes"] }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["process", "macros"] }
tokio-util = "0.7.13"
async-stream = "0.3.6"
futures = "0.3.31"
full_moon = { version = "1.2.0", features = ["luau"] }
url = { version = "2.5.4", features = ["serde"] }
jiff = { version = "0.1.29", default-features = false, features = ["serde", "std"] }
sha2 = "0.10.8"
tempfile = "3.16.0"
wax = { version = "0.6.0", default-features = false }
fs-err = { version = "3.1.0", features = ["tokio"] }
urlencoding = "2.1.3"
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"] }
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
git2 = { version = "0.20.0", optional = true }
serde_json = { version = "1.0.138", optional = true }
anyhow = { version = "1.0.95", optional = true }
open = { version = "5.3.2", optional = true }
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "sync-secret-service"], optional = true }
console = { version = "0.15.10", optional = true }
toml_edit = { version = "0.22.23", optional = true }
clap = { version = "4.5.28", features = ["derive"], optional = true }
dirs = { version = "6.0.0", optional = true }
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
indicatif = { version = "0.17.11", optional = true }
inquire = { version = "0.7.5", default-features = false, features = ["console", "one-liners"], optional = true }
paste = { version = "1.0.15", optional = true }
[target.'cfg(target_os = "windows")'.dependencies]
windows-registry = { version = "0.4.0", optional = true }
windows = { version = "0.59.0", features = ["Win32_Storage", "Win32_Storage_FileSystem", "Win32_Security"], optional = true }
[dev-dependencies]
schemars = { git = "https://github.com/daimond113/schemars", rev = "bc7c7d6", features = ["semver1", "url2"] }
[workspace]
resolver = "2"
members = ["registry"]
[profile.dev.package.full_moon]
opt-level = 3
[profile.dev.package.miniz_oxide]
opt-level = 3
[profile.release]
opt-level = "s"
lto = true
incremental = true
codegen-units = 1
panic = "abort"
[profile.release.package.pesde-registry]
# add debug symbols for Sentry stack traces
debug = "full"

View file

@ -9,46 +9,46 @@ publish = false
workspace = true workspace = true
[dependencies] [dependencies]
actix-web = "4.9.0" actix-web = "4.10.2"
actix-cors = "0.7.0" actix-cors = "0.7.1"
actix-governor = "0.8.0" actix-governor = "0.8.0"
dotenvy = "0.15.7" dotenvy = "0.15.7"
thiserror = "2.0.11" thiserror = "2.0.12"
tantivy = "0.22.0" tantivy = "0.24.0"
semver = "1.0.25" semver = "1.0.26"
jiff = { version = "0.1.29", features = ["serde"] } jiff = { version = "0.2.9", features = ["serde"] }
futures = "0.3.31" futures = "0.3.31"
tokio = "1.43.0" tokio = "1.44.2"
tokio-util = "0.7.13" tokio-util = "0.7.14"
tempfile = "3.16.0" tempfile = "3.19.1"
fs-err = { version = "3.1.0", features = ["tokio"] } fs-err = { version = "3.1.0", features = ["tokio"] }
async-stream = "0.3.6" async-stream = "0.3.6"
git2 = "0.20.0" git2 = "0.20.1"
gix = { version = "0.70.0", default-features = false, features = [ gix = { version = "0.71.0", default-features = false, features = [
"blocking-http-transport-reqwest-rust-tls", "blocking-http-transport-reqwest-rust-tls",
"credentials", "credentials",
] } ] }
serde = "1.0.217" serde = "1.0.219"
serde_json = "1.0.138" serde_json = "1.0.140"
serde_yaml = "0.9.34" serde_yaml = "0.9.34"
toml = "0.8.20" toml = "0.8.20"
convert_case = "0.7.1" convert_case = "0.8.0"
sha2 = "0.10.8" sha2 = "0.10.8"
rusty-s3 = "0.7.0" rusty-s3 = "0.7.0"
reqwest = { version = "0.12.12", default-features = false, features = ["json", "rustls-tls"] } reqwest = { version = "0.12.15", default-features = false, features = ["json", "rustls-tls"] }
constant_time_eq = "0.3.1" constant_time_eq = "0.4.2"
tokio-tar = "0.3.1" tokio-tar = "0.3.1"
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] } async-compression = { version = "0.4.22", features = ["tokio", "gzip"] }
tracing = { version = "0.1.41", features = ["attributes"] } tracing = { version = "0.1.41", features = ["attributes"] }
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
tracing-actix-web = "0.7.15" tracing-actix-web = "0.7.18"
sentry = { version = "0.36.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] } sentry = { version = "0.37.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] }
sentry-actix = "0.36.0" sentry-actix = "0.37.0"
pesde = { path = "..", default-features = false, features = ["wally-compat"] } pesde = { path = "..", default-features = false, features = ["wally-compat"] }

View file

@ -135,7 +135,7 @@ async fn run() -> std::io::Result<()> {
tracing::info!("auth: {auth}"); tracing::info!("auth: {auth}");
auth auth
}, },
source: Arc::new(tokio::sync::RwLock::new(source)), source: tokio::sync::RwLock::new(source).into(),
project, project,
search_reader, search_reader,

View file

@ -113,7 +113,7 @@ impl AddCommand {
PackageSources::Git(GitPackageSource::new(url.clone())), PackageSources::Git(GitPackageSource::new(url.clone())),
DependencySpecifiers::Git(GitDependencySpecifier { DependencySpecifiers::Git(GitDependencySpecifier {
repo: url.clone(), repo: url.clone(),
rev: rev.to_string(), rev: rev.clone(),
path: None, path: None,
}), }),
), ),

View file

@ -134,7 +134,7 @@ impl ExecuteCommand {
&DownloadOptions { &DownloadOptions {
project: project.clone(), project: project.clone(),
reqwest: reqwest.clone(), reqwest: reqwest.clone(),
reporter: Arc::new(()), reporter: ().into(),
id: id.clone(), id: id.clone(),
}, },
) )
@ -150,7 +150,7 @@ impl ExecuteCommand {
&pkg_ref, &pkg_ref,
&GetTargetOptions { &GetTargetOptions {
project: project.clone(), project: project.clone(),
path: Arc::from(tempdir.path()), path: tempdir.path().into(),
id: id.clone(), id: id.clone(),
}, },
) )
@ -174,7 +174,7 @@ impl ExecuteCommand {
project project
.download_and_link( .download_and_link(
&Arc::new(graph), &graph,
DownloadAndLinkOptions::<CliReporter<Stderr>, ()>::new(reqwest) DownloadAndLinkOptions::<CliReporter<Stderr>, ()>::new(reqwest)
.reporter(reporter) .reporter(reporter)
.refreshed_sources(refreshed_sources) .refreshed_sources(refreshed_sources)

View file

@ -223,7 +223,7 @@ impl InitCommand {
&GetTargetOptions { &GetTargetOptions {
project: project.clone(), project: project.clone(),
// HACK: the pesde package source doesn't use the path, so we can just use an empty one // HACK: the pesde package source doesn't use the path, so we can just use an empty one
path: Arc::from(Path::new("")), path: Path::new("").into(),
id: id.clone(), id: id.clone(),
}, },
) )

View file

@ -1,5 +1,3 @@
use std::sync::Arc;
use crate::cli::{ use crate::cli::{
style::{CLI_STYLE, INFO_STYLE, WARN_PREFIX}, style::{CLI_STYLE, INFO_STYLE, WARN_PREFIX},
up_to_date_lockfile, VersionedPackageName, up_to_date_lockfile, VersionedPackageName,
@ -59,8 +57,8 @@ impl PatchCommand {
&DownloadOptions { &DownloadOptions {
project: project.clone(), project: project.clone(),
reqwest, reqwest,
reporter: Arc::new(()), reporter: ().into(),
id: Arc::new(id), id: id.into(),
}, },
) )
.await? .await?

View file

@ -159,7 +159,7 @@ impl PublishCommand {
&node.pkg_ref, &node.pkg_ref,
&GetTargetOptions { &GetTargetOptions {
project, project,
path: Arc::from(container_folder), path: container_folder.into(),
id, id,
}, },
) )

View file

@ -14,7 +14,7 @@ use pesde::{
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use std::{ use std::{
collections::HashSet, env::current_dir, ffi::OsString, io::Write as _, path::Path, collections::HashSet, env::current_dir, ffi::OsString, io::Write as _, path::Path,
process::Command, sync::Arc, process::Command,
}; };
#[derive(Debug, Args)] #[derive(Debug, Args)]
@ -128,8 +128,8 @@ impl RunCommand {
&node.pkg_ref, &node.pkg_ref,
&GetTargetOptions { &GetTargetOptions {
project, project,
path: Arc::from(container_folder.as_path()), path: container_folder.as_path().into(),
id: Arc::new(id), id: id.into(),
}, },
) )
.await?; .await?;

View file

@ -267,8 +267,6 @@ pub async fn install(
}); });
} }
let graph = Arc::new(graph);
if options.write { if options.write {
root_progress.reset(); root_progress.reset();
root_progress.set_length(0); root_progress.set_length(0);
@ -414,7 +412,7 @@ pub async fn install(
target: manifest.target.kind(), target: manifest.target.kind(),
overrides, overrides,
graph: Arc::into_inner(graph).unwrap(), graph,
workspace: run_on_workspace_members(project, |_| async { Ok(()) }).await?, workspace: run_on_workspace_members(project, |_| async { Ok(()) }).await?,
}; };

View file

@ -44,12 +44,13 @@ where
root_progress.set_style(root_progress_style()); root_progress.set_style(root_progress_style());
root_progress.enable_steady_tick(Duration::from_millis(100)); root_progress.enable_steady_tick(Duration::from_millis(100));
let reporter = Arc::new(CliReporter::with_writer( let reporter = CliReporter::with_writer(writer, multi_progress.clone(), root_progress.clone());
writer, let result = f(
multi_progress.clone(), multi_progress.clone(),
root_progress.clone(), root_progress.clone(),
)); reporter.into(),
let result = f(multi_progress.clone(), root_progress.clone(), reporter).await; )
.await;
root_progress.finish(); root_progress.finish();
multi_progress.clear().unwrap(); multi_progress.clear().unwrap();

View file

@ -29,7 +29,6 @@ use std::{
collections::BTreeSet, collections::BTreeSet,
env::current_exe, env::current_exe,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc,
}; };
use tracing::instrument; use tracing::instrument;
@ -192,7 +191,7 @@ pub async fn get_or_download_engine(
&engine_ref, &engine_ref,
&DownloadOptions { &DownloadOptions {
reqwest: reqwest.clone(), reqwest: reqwest.clone(),
reporter: Arc::new(reporter), reporter: reporter.into(),
version: version.clone(), version: version.clone(),
}, },
) )

View file

@ -141,7 +141,7 @@ impl Project {
project: project.clone(), project: project.clone(),
reqwest, reqwest,
id: package_id.clone(), id: package_id.clone(),
reporter: Arc::new(progress_reporter), reporter: progress_reporter.into(),
}, },
) )
.await .await
@ -154,7 +154,7 @@ impl Project {
project: project.clone(), project: project.clone(),
reqwest, reqwest,
id: package_id.clone(), id: package_id.clone(),
reporter: Arc::new(()), reporter: ().into(),
}, },
) )
.await .await

View file

@ -16,6 +16,7 @@ use crate::{
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::TryStreamExt as _; use futures::TryStreamExt as _;
use std::{ use std::{
borrow::Cow,
collections::HashMap, collections::HashMap,
convert::Infallible, convert::Infallible,
future::{self, Future}, future::{self, Future},
@ -179,7 +180,7 @@ impl Project {
#[instrument(skip_all, fields(install_dependencies = debug(options.install_dependencies_mode)), level = "debug")] #[instrument(skip_all, fields(install_dependencies = debug(options.install_dependencies_mode)), level = "debug")]
pub async fn download_and_link<Reporter, Hooks>( pub async fn download_and_link<Reporter, Hooks>(
&self, &self,
graph: &Arc<DependencyGraph>, graph: &DependencyGraph,
options: DownloadAndLinkOptions<Reporter, Hooks>, options: DownloadAndLinkOptions<Reporter, Hooks>,
) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>> ) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>>
where where
@ -196,7 +197,6 @@ impl Project {
force, force,
} = options; } = options;
let graph = graph.clone();
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
let manifest = self.deser_manifest().await?; let manifest = self.deser_manifest().await?;
@ -237,7 +237,7 @@ impl Project {
let mut downloaded_graph = DependencyGraph::new(); let mut downloaded_graph = DependencyGraph::new();
let graph_to_download = if force { let graph_to_download = if force {
graph.clone() Cow::Borrowed(graph)
} else { } else {
let mut tasks = graph let mut tasks = graph
.iter() .iter()
@ -279,7 +279,7 @@ impl Project {
graph_to_download.insert(id, node); graph_to_download.insert(id, node);
} }
Arc::new(graph_to_download) Cow::Owned(graph_to_download)
}; };
let downloaded = self let downloaded = self
@ -315,10 +315,10 @@ impl Project {
.into_iter() .into_iter()
.partition::<HashMap<_, _>, _>(|(_, node)| node.pkg_ref.is_wally_package()); .partition::<HashMap<_, _>, _>(|(_, node)| node.pkg_ref.is_wally_package());
let mut graph = Arc::new(DependencyGraphWithTarget::new()); let mut graph = DependencyGraphWithTarget::new();
async fn get_graph_targets<Hooks: DownloadAndLinkHooks>( async fn get_graph_targets<Hooks: DownloadAndLinkHooks>(
graph: &mut Arc<DependencyGraphWithTarget>, graph: &mut DependencyGraphWithTarget,
project: &Project, project: &Project,
manifest_target_kind: TargetKind, manifest_target_kind: TargetKind,
downloaded_graph: HashMap<PackageId, DependencyGraphNode>, downloaded_graph: HashMap<PackageId, DependencyGraphNode>,
@ -327,10 +327,10 @@ impl Project {
.into_iter() .into_iter()
.map(|(id, node)| { .map(|(id, node)| {
let source = node.pkg_ref.source(); let source = node.pkg_ref.source();
let path = Arc::from( let path = node
node.container_folder_from_project(&id, project, manifest_target_kind) .container_folder_from_project(&id, project, manifest_target_kind)
.as_path(), .as_path()
); .into();
let id = Arc::new(id); let id = Arc::new(id);
let project = project.clone(); let project = project.clone();
@ -356,7 +356,7 @@ impl Project {
while let Some(task) = tasks.join_next().await { while let Some(task) = tasks.join_next().await {
let (id, node) = task.unwrap()?; let (id, node) = task.unwrap()?;
Arc::get_mut(graph).unwrap().insert(id, node); graph.insert(id, node);
} }
Ok(()) Ok(())
@ -372,7 +372,7 @@ impl Project {
.instrument(tracing::debug_span!("get targets (non-wally)")) .instrument(tracing::debug_span!("get targets (non-wally)"))
.await?; .await?;
self.link_dependencies(graph.clone(), false) self.link_dependencies(&graph, false)
.instrument(tracing::debug_span!("link (non-wally)")) .instrument(tracing::debug_span!("link (non-wally)"))
.await?; .await?;
@ -424,7 +424,7 @@ impl Project {
.await .await
} }
None => { None => {
apply_patch(&id, container_folder, &patch_path, Arc::new(())).await apply_patch(&id, container_folder, &patch_path, ().into()).await
} }
} }
} }
@ -437,7 +437,7 @@ impl Project {
} }
// step 4. link ALL dependencies. do so with types // step 4. link ALL dependencies. do so with types
self.link_dependencies(graph.clone(), true) self.link_dependencies(&graph, true)
.instrument(tracing::debug_span!("link (all)")) .instrument(tracing::debug_span!("link (all)"))
.await?; .await?;

View file

@ -152,13 +152,14 @@ impl Project {
auth_config: AuthConfig, auth_config: AuthConfig,
) -> Self { ) -> Self {
Project { Project {
shared: Arc::new(ProjectShared { shared: ProjectShared {
package_dir: package_dir.as_ref().to_path_buf(), package_dir: package_dir.as_ref().to_path_buf(),
workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()), workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()),
data_dir: data_dir.as_ref().to_path_buf(), data_dir: data_dir.as_ref().to_path_buf(),
cas_dir: cas_dir.as_ref().to_path_buf(), cas_dir: cas_dir.as_ref().to_path_buf(),
auth_config, auth_config,
}), }
.into(),
} }
} }

View file

@ -15,8 +15,8 @@ fn index_entry(
entry: &fs::DirEntry, entry: &fs::DirEntry,
packages_index_dir: &Path, packages_index_dir: &Path,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>, tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
used_paths: &Arc<HashSet<PathBuf>>, used_paths: Arc<HashSet<PathBuf>>,
#[cfg(feature = "patches")] patched_packages: &Arc<HashSet<PathBuf>>, #[cfg(feature = "patches")] patched_packages: Arc<HashSet<PathBuf>>,
) { ) {
fn get_package_name_from_container(container: &Path) -> (bool, String) { fn get_package_name_from_container(container: &Path) -> (bool, String) {
let Component::Normal(first_component) = container.components().next().unwrap() else { let Component::Normal(first_component) = container.components().next().unwrap() else {
@ -40,9 +40,6 @@ fn index_entry(
#[cfg_attr(not(feature = "patches"), allow(unused_variables))] #[cfg_attr(not(feature = "patches"), allow(unused_variables))]
let (is_wally, package_name) = get_package_name_from_container(&path_relative); let (is_wally, package_name) = get_package_name_from_container(&path_relative);
let used_paths = used_paths.clone();
#[cfg(feature = "patches")]
let patched_packages = patched_packages.clone();
tasks.spawn(async move { tasks.spawn(async move {
if is_wally { if is_wally {
#[cfg(not(feature = "wally-compat"))] #[cfg(not(feature = "wally-compat"))]
@ -100,9 +97,8 @@ fn index_entry(
fn packages_entry( fn packages_entry(
entry: fs::DirEntry, entry: fs::DirEntry,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>, tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
expected_aliases: &Arc<HashSet<Alias>>, expected_aliases: Arc<HashSet<Alias>>,
) { ) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move { tasks.spawn(async move {
if entry.file_type().await?.is_dir() { if entry.file_type().await?.is_dir() {
return Ok(()); return Ok(());
@ -134,9 +130,8 @@ fn packages_entry(
fn scripts_entry( fn scripts_entry(
entry: fs::DirEntry, entry: fs::DirEntry,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>, tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
expected_aliases: &Arc<HashSet<Alias>>, expected_aliases: Arc<HashSet<Alias>>,
) { ) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move { tasks.spawn(async move {
if !entry.file_type().await?.is_dir() { if !entry.file_type().await?.is_dir() {
return Ok(()); return Ok(());
@ -244,16 +239,16 @@ impl Project {
&entry?, &entry?,
&packages_index_dir, &packages_index_dir,
&mut tasks, &mut tasks,
&used_paths, used_paths.clone(),
#[cfg(feature = "patches")] #[cfg(feature = "patches")]
&patched_packages, patched_packages.clone(),
); );
} }
Some(entry) = packages_entries.next_entry().map(Result::transpose) => { Some(entry) = packages_entries.next_entry().map(Result::transpose) => {
packages_entry( packages_entry(
entry?, entry?,
&mut tasks, &mut tasks,
&expected_aliases, expected_aliases.clone(),
); );
} }
else => break, else => break,
@ -288,7 +283,7 @@ impl Project {
let expected_aliases = Arc::new(expected_aliases); let expected_aliases = Arc::new(expected_aliases);
while let Some(entry) = entries.next_entry().await? { while let Some(entry) = entries.next_entry().await? {
scripts_entry(entry, &mut tasks, &expected_aliases); scripts_entry(entry, &mut tasks, expected_aliases.clone());
} }
} }
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {} Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}

View file

@ -15,7 +15,6 @@ use std::{
collections::HashMap, collections::HashMap,
ffi::OsStr, ffi::OsStr,
path::{Path, PathBuf}, path::{Path, PathBuf},
sync::Arc,
}; };
use tokio::task::{spawn_blocking, JoinSet}; use tokio::task::{spawn_blocking, JoinSet};
use tracing::{instrument, Instrument as _}; use tracing::{instrument, Instrument as _};
@ -64,16 +63,15 @@ impl Project {
#[instrument(skip(self, graph), level = "debug")] #[instrument(skip(self, graph), level = "debug")]
pub(crate) async fn link_dependencies( pub(crate) async fn link_dependencies(
&self, &self,
graph: Arc<DependencyGraphWithTarget>, graph: &DependencyGraphWithTarget,
with_types: bool, with_types: bool,
) -> Result<(), errors::LinkingError> { ) -> Result<(), errors::LinkingError> {
let manifest = self.deser_manifest().await?; let manifest = self.deser_manifest().await?;
let manifest_target_kind = manifest.target.kind(); let manifest_target_kind = manifest.target.kind();
let manifest = Arc::new(manifest);
// step 1. link all non-wally packages (and their dependencies) temporarily without types // step 1. link all non-wally packages (and their dependencies) temporarily without types
// we do this separately to allow the required tools for the scripts to be installed // we do this separately to allow the required tools for the scripts to be installed
self.link(&graph, &manifest, &Arc::new(PackageTypes::default()), false) self.link(graph, &manifest, &PackageTypes::default(), false)
.await?; .await?;
if !with_types { if !with_types {
@ -155,15 +153,14 @@ impl Project {
} }
// step 3. link all packages (and their dependencies), this time with types // step 3. link all packages (and their dependencies), this time with types
self.link(&graph, &manifest, &Arc::new(package_types), true) self.link(graph, &manifest, &package_types, true).await
.await
} }
async fn link( async fn link(
&self, &self,
graph: &Arc<DependencyGraphWithTarget>, graph: &DependencyGraphWithTarget,
manifest: &Arc<Manifest>, manifest: &Manifest,
package_types: &Arc<PackageTypes>, package_types: &PackageTypes,
is_complete: bool, is_complete: bool,
) -> Result<(), errors::LinkingError> { ) -> Result<(), errors::LinkingError> {
let package_dir_canonical = fs::canonicalize(self.package_dir()).await?; let package_dir_canonical = fs::canonicalize(self.package_dir()).await?;
@ -308,7 +305,7 @@ impl Project {
for (dep_id, dep_alias) in &node.node.dependencies { for (dep_id, dep_alias) in &node.node.dependencies {
let dep_id = dep_id.clone(); let dep_id = dep_id.clone();
let dep_alias = dep_alias.clone(); let dep_alias = dep_alias.clone();
let graph = graph.clone(); let dep_node = graph.get(&dep_id).cloned();
let node = node.clone(); let node = node.clone();
let package_id = package_id.clone(); let package_id = package_id.clone();
let node_container_folder = node_container_folder.clone(); let node_container_folder = node_container_folder.clone();
@ -316,7 +313,7 @@ impl Project {
let package_dir = self.package_dir().to_path_buf(); let package_dir = self.package_dir().to_path_buf();
dependency_tasks.spawn(async move { dependency_tasks.spawn(async move {
let Some(dep_node) = graph.get(&dep_id) else { let Some(dep_node) = dep_node else {
return if is_complete { return if is_complete {
Err(errors::LinkingError::DependencyNotFound( Err(errors::LinkingError::DependencyNotFound(
dep_id.to_string(), dep_id.to_string(),