mirror of
https://github.com/pesde-pkg/pesde.git
synced 2025-04-19 03:13:51 +01:00
feat: store dependency over downloaded graphs
This commit is contained in:
parent
6a8dfe0ba3
commit
d0169976cd
26 changed files with 829 additions and 562 deletions
|
@ -16,6 +16,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
||||||
|
|
||||||
### Changed
|
### Changed
|
||||||
- Change handling of graphs to a flat structure by @daimond113
|
- Change handling of graphs to a flat structure by @daimond113
|
||||||
|
- Store dependency over downloaded graphs in the lockfile by @daimond113
|
||||||
|
|
||||||
### Removed
|
### Removed
|
||||||
- Remove old includes format compatibility by @daimond113
|
- Remove old includes format compatibility by @daimond113
|
||||||
|
|
|
@ -29,6 +29,7 @@ bin = [
|
||||||
"tokio/rt",
|
"tokio/rt",
|
||||||
"tokio/rt-multi-thread",
|
"tokio/rt-multi-thread",
|
||||||
"tokio/macros",
|
"tokio/macros",
|
||||||
|
"dep:tempfile",
|
||||||
]
|
]
|
||||||
wally-compat = ["dep:async_zip", "dep:serde_json"]
|
wally-compat = ["dep:async_zip", "dep:serde_json"]
|
||||||
patches = ["dep:git2"]
|
patches = ["dep:git2"]
|
||||||
|
@ -56,7 +57,7 @@ pathdiff = "0.2.3"
|
||||||
relative-path = { version = "1.9.3", features = ["serde"] }
|
relative-path = { version = "1.9.3", features = ["serde"] }
|
||||||
tracing = { version = "0.1.41", features = ["attributes"] }
|
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||||
thiserror = "2.0.7"
|
thiserror = "2.0.7"
|
||||||
tokio = { version = "1.42.0", features = ["process"] }
|
tokio = { version = "1.42.0", features = ["process", "macros"] }
|
||||||
tokio-util = "0.7.13"
|
tokio-util = "0.7.13"
|
||||||
async-stream = "0.3.6"
|
async-stream = "0.3.6"
|
||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
|
@ -64,7 +65,6 @@ full_moon = { version = "1.1.2", features = ["luau"] }
|
||||||
url = { version = "2.5.4", features = ["serde"] }
|
url = { version = "2.5.4", features = ["serde"] }
|
||||||
chrono = { version = "0.4.39", features = ["serde"] }
|
chrono = { version = "0.4.39", features = ["serde"] }
|
||||||
sha2 = "0.10.8"
|
sha2 = "0.10.8"
|
||||||
tempfile = "3.14.0"
|
|
||||||
wax = { version = "0.6.0", default-features = false }
|
wax = { version = "0.6.0", default-features = false }
|
||||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||||
|
|
||||||
|
@ -86,6 +86,7 @@ dirs = { version = "5.0.1", optional = true }
|
||||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
|
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
|
||||||
indicatif = { version = "0.17.9", optional = true }
|
indicatif = { version = "0.17.9", optional = true }
|
||||||
inquire = { version = "0.7.5", optional = true }
|
inquire = { version = "0.7.5", optional = true }
|
||||||
|
tempfile = { version = "3.14.0", optional = true }
|
||||||
|
|
||||||
[target.'cfg(target_os = "windows")'.dependencies]
|
[target.'cfg(target_os = "windows")'.dependencies]
|
||||||
winreg = { version = "0.52.0", optional = true }
|
winreg = { version = "0.52.0", optional = true }
|
||||||
|
|
|
@ -15,7 +15,9 @@ use pesde::{
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::{
|
source::{
|
||||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
traits::{DownloadOptions, PackageSource, RefreshOptions, ResolveOptions},
|
traits::{
|
||||||
|
DownloadOptions, GetTargetOptions, PackageSource, RefreshOptions, ResolveOptions,
|
||||||
|
},
|
||||||
PackageSources,
|
PackageSources,
|
||||||
},
|
},
|
||||||
Project, RefreshedSources,
|
Project, RefreshedSources,
|
||||||
|
@ -139,7 +141,7 @@ impl ExecuteCommand {
|
||||||
project.auth_config().clone(),
|
project.auth_config().clone(),
|
||||||
);
|
);
|
||||||
|
|
||||||
let (fs, target) = source
|
let fs = source
|
||||||
.download(
|
.download(
|
||||||
&pkg_ref,
|
&pkg_ref,
|
||||||
&DownloadOptions {
|
&DownloadOptions {
|
||||||
|
@ -150,12 +152,24 @@ impl ExecuteCommand {
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to download package")?;
|
.context("failed to download package")?;
|
||||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
|
||||||
|
|
||||||
fs.write_to(tempdir.path(), project.cas_dir(), true)
|
fs.write_to(tempdir.path(), project.cas_dir(), true)
|
||||||
.await
|
.await
|
||||||
.context("failed to write package contents")?;
|
.context("failed to write package contents")?;
|
||||||
|
|
||||||
|
let target = source
|
||||||
|
.get_target(
|
||||||
|
&pkg_ref,
|
||||||
|
&GetTargetOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
path: Arc::from(tempdir.path()),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to get target")?;
|
||||||
|
|
||||||
|
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||||
|
|
||||||
let graph = project
|
let graph = project
|
||||||
.dependency_graph(None, refreshed_sources.clone(), true)
|
.dependency_graph(None, refreshed_sources.clone(), true)
|
||||||
.await
|
.await
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
use crate::cli::up_to_date_lockfile;
|
use crate::cli::up_to_date_lockfile;
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use futures::future::try_join_all;
|
|
||||||
use pesde::{
|
use pesde::{
|
||||||
source::{
|
source::{
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
|
@ -10,6 +9,7 @@ use pesde::{
|
||||||
Project, RefreshedSources,
|
Project, RefreshedSources,
|
||||||
};
|
};
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
|
use tokio::task::JoinSet;
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct OutdatedCommand {
|
pub struct OutdatedCommand {
|
||||||
|
@ -38,11 +38,13 @@ impl OutdatedCommand {
|
||||||
|
|
||||||
let refreshed_sources = RefreshedSources::new();
|
let refreshed_sources = RefreshedSources::new();
|
||||||
|
|
||||||
if try_join_all(graph.into_iter().map(|(current_id, node)| {
|
let mut tasks = graph
|
||||||
|
.into_iter()
|
||||||
|
.map(|(current_id, node)| {
|
||||||
let project = project.clone();
|
let project = project.clone();
|
||||||
let refreshed_sources = refreshed_sources.clone();
|
let refreshed_sources = refreshed_sources.clone();
|
||||||
async move {
|
async move {
|
||||||
let Some((alias, mut specifier, _)) = node.node.direct else {
|
let Some((alias, mut specifier, _)) = node.direct else {
|
||||||
return Ok::<bool, anyhow::Error>(true);
|
return Ok::<bool, anyhow::Error>(true);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -55,7 +57,7 @@ impl OutdatedCommand {
|
||||||
return Ok(true);
|
return Ok(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
let source = node.node.pkg_ref.source();
|
let source = node.pkg_ref.source();
|
||||||
refreshed_sources
|
refreshed_sources
|
||||||
.refresh(
|
.refresh(
|
||||||
&source,
|
&source,
|
||||||
|
@ -108,11 +110,18 @@ impl OutdatedCommand {
|
||||||
|
|
||||||
Ok(true)
|
Ok(true)
|
||||||
}
|
}
|
||||||
}))
|
})
|
||||||
.await?
|
.collect::<JoinSet<_>>();
|
||||||
.into_iter()
|
|
||||||
.all(|b| b)
|
let mut all_up_to_date = true;
|
||||||
{
|
|
||||||
|
while let Some(task) = tasks.join_next().await {
|
||||||
|
if !task.unwrap()? {
|
||||||
|
all_up_to_date = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if all_up_to_date {
|
||||||
println!("all packages are up to date");
|
println!("all packages are up to date");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -33,11 +33,14 @@ impl PatchCommand {
|
||||||
|
|
||||||
let node = graph.get(&id).context("package not found in graph")?;
|
let node = graph.get(&id).context("package not found in graph")?;
|
||||||
|
|
||||||
if matches!(node.node.pkg_ref, PackageRefs::Workspace(_)) {
|
if matches!(
|
||||||
anyhow::bail!("cannot patch a workspace package")
|
node.pkg_ref,
|
||||||
|
PackageRefs::Workspace(_) | PackageRefs::Path(_)
|
||||||
|
) {
|
||||||
|
anyhow::bail!("cannot patch a workspace or a path package")
|
||||||
}
|
}
|
||||||
|
|
||||||
let source = node.node.pkg_ref.source();
|
let source = node.pkg_ref.source();
|
||||||
|
|
||||||
let directory = project
|
let directory = project
|
||||||
.data_dir()
|
.data_dir()
|
||||||
|
@ -49,7 +52,7 @@ impl PatchCommand {
|
||||||
|
|
||||||
source
|
source
|
||||||
.download(
|
.download(
|
||||||
&node.node.pkg_ref,
|
&node.pkg_ref,
|
||||||
&DownloadOptions {
|
&DownloadOptions {
|
||||||
project: project.clone(),
|
project: project.clone(),
|
||||||
reqwest,
|
reqwest,
|
||||||
|
@ -57,7 +60,6 @@ impl PatchCommand {
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
.await?
|
.await?
|
||||||
.0
|
|
||||||
.write_to(&directory, project.cas_dir(), false)
|
.write_to(&directory, project.cas_dir(), false)
|
||||||
.await
|
.await
|
||||||
.context("failed to write package contents")?;
|
.context("failed to write package contents")?;
|
||||||
|
|
|
@ -12,20 +12,23 @@ use pesde::{
|
||||||
git_index::GitBasedSource,
|
git_index::GitBasedSource,
|
||||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::{PackageSource, RefreshOptions, ResolveOptions},
|
traits::{GetTargetOptions, PackageRef, PackageSource, RefreshOptions, ResolveOptions},
|
||||||
workspace::{
|
workspace::{
|
||||||
specifier::{VersionType, VersionTypeOrReq},
|
specifier::{VersionType, VersionTypeOrReq},
|
||||||
WorkspacePackageSource,
|
WorkspacePackageSource,
|
||||||
},
|
},
|
||||||
PackageSources, IGNORED_DIRS, IGNORED_FILES,
|
PackageSources, IGNORED_DIRS, IGNORED_FILES,
|
||||||
},
|
},
|
||||||
Project, RefreshedSources, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
|
Project, RefreshedSources, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||||
};
|
};
|
||||||
use reqwest::{header::AUTHORIZATION, StatusCode};
|
use reqwest::{header::AUTHORIZATION, StatusCode};
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
use std::path::PathBuf;
|
use std::{path::PathBuf, sync::Arc};
|
||||||
use tempfile::Builder;
|
use tempfile::Builder;
|
||||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
|
use tokio::{
|
||||||
|
io::{AsyncSeekExt, AsyncWriteExt},
|
||||||
|
task::JoinSet,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Args, Clone)]
|
#[derive(Debug, Args, Clone)]
|
||||||
pub struct PublishCommand {
|
pub struct PublishCommand {
|
||||||
|
@ -73,6 +76,7 @@ impl PublishCommand {
|
||||||
project: &Project,
|
project: &Project,
|
||||||
reqwest: reqwest::Client,
|
reqwest: reqwest::Client,
|
||||||
is_root: bool,
|
is_root: bool,
|
||||||
|
refreshed_sources: &RefreshedSources,
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let mut manifest = project
|
let mut manifest = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
|
@ -111,18 +115,65 @@ impl PublishCommand {
|
||||||
|
|
||||||
match up_to_date_lockfile(project).await? {
|
match up_to_date_lockfile(project).await? {
|
||||||
Some(lockfile) => {
|
Some(lockfile) => {
|
||||||
if lockfile
|
let mut tasks = lockfile
|
||||||
.graph
|
.graph
|
||||||
.values()
|
.iter()
|
||||||
.filter_map(|node| node.node.direct.as_ref().map(|_| node))
|
.filter(|(_, node)| node.direct.is_some())
|
||||||
.any(|node| {
|
.map(|(id, node)| {
|
||||||
node.target.build_files().is_none()
|
let project = project.clone();
|
||||||
&& !matches!(node.node.resolved_ty, DependencyType::Dev)
|
let base_folder = manifest
|
||||||
|
.target
|
||||||
|
.kind()
|
||||||
|
.packages_folder(id.version_id().target());
|
||||||
|
let container_folder = node.container_folder(
|
||||||
|
&project
|
||||||
|
.package_dir()
|
||||||
|
.join(base_folder)
|
||||||
|
.join(PACKAGES_CONTAINER_NAME),
|
||||||
|
id,
|
||||||
|
);
|
||||||
|
|
||||||
|
let node = node.clone();
|
||||||
|
let refreshed_sources = refreshed_sources.clone();
|
||||||
|
|
||||||
|
async move {
|
||||||
|
let source = node.pkg_ref.source();
|
||||||
|
refreshed_sources
|
||||||
|
.refresh(
|
||||||
|
&source,
|
||||||
|
&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to refresh source")?;
|
||||||
|
let target = source
|
||||||
|
.get_target(
|
||||||
|
&node.pkg_ref,
|
||||||
|
&GetTargetOptions {
|
||||||
|
project,
|
||||||
|
path: Arc::from(container_folder),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
Ok::<_, anyhow::Error>(
|
||||||
|
target.build_files().is_none()
|
||||||
|
&& !matches!(node.resolved_ty, DependencyType::Dev),
|
||||||
|
)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
{
|
.collect::<JoinSet<_>>();
|
||||||
|
|
||||||
|
while let Some(result) = tasks.join_next().await {
|
||||||
|
let result = result
|
||||||
|
.unwrap()
|
||||||
|
.context("failed to get target of dependency node")?;
|
||||||
|
if result {
|
||||||
anyhow::bail!("roblox packages may not depend on non-roblox packages");
|
anyhow::bail!("roblox packages may not depend on non-roblox packages");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
None => {
|
None => {
|
||||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||||
}
|
}
|
||||||
|
@ -376,8 +427,6 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let refreshed_sources = RefreshedSources::new();
|
|
||||||
|
|
||||||
for specifier in manifest
|
for specifier in manifest
|
||||||
.dependencies
|
.dependencies
|
||||||
.values_mut()
|
.values_mut()
|
||||||
|
@ -693,7 +742,12 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||||
let result = self.clone().run_impl(&project, reqwest.clone(), true).await;
|
let refreshed_sources = RefreshedSources::new();
|
||||||
|
|
||||||
|
let result = self
|
||||||
|
.clone()
|
||||||
|
.run_impl(&project, reqwest.clone(), true, &refreshed_sources)
|
||||||
|
.await;
|
||||||
if project.workspace_dir().is_some() {
|
if project.workspace_dir().is_some() {
|
||||||
return result;
|
return result;
|
||||||
} else {
|
} else {
|
||||||
|
@ -703,7 +757,11 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
run_on_workspace_members(&project, |project| {
|
run_on_workspace_members(&project, |project| {
|
||||||
let reqwest = reqwest.clone();
|
let reqwest = reqwest.clone();
|
||||||
let this = self.clone();
|
let this = self.clone();
|
||||||
async move { this.run_impl(&project, reqwest, false).await }
|
let refreshed_sources = refreshed_sources.clone();
|
||||||
|
async move {
|
||||||
|
this.run_impl(&project, reqwest, false, &refreshed_sources)
|
||||||
|
.await
|
||||||
|
}
|
||||||
})
|
})
|
||||||
.await
|
.await
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
|
|
|
@ -6,11 +6,13 @@ use pesde::{
|
||||||
errors::{ManifestReadError, WorkspaceMembersError},
|
errors::{ManifestReadError, WorkspaceMembersError},
|
||||||
linking::generator::generate_bin_linking_module,
|
linking::generator::generate_bin_linking_module,
|
||||||
names::{PackageName, PackageNames},
|
names::{PackageName, PackageNames},
|
||||||
|
source::traits::{GetTargetOptions, PackageRef, PackageSource, RefreshOptions},
|
||||||
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||||
};
|
};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::Path, process::Command,
|
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::Path, process::Command,
|
||||||
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
|
@ -75,7 +77,7 @@ impl RunCommand {
|
||||||
|
|
||||||
let mut versions = graph
|
let mut versions = graph
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|(id, node)| *id.name() == pkg_name && node.node.direct.is_some())
|
.filter(|(id, node)| *id.name() == pkg_name && node.direct.is_some())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let (id, node) = match versions.len() {
|
let (id, node) = match versions.len() {
|
||||||
|
@ -84,17 +86,13 @@ impl RunCommand {
|
||||||
_ => anyhow::bail!("multiple versions found. use the package's alias instead."),
|
_ => anyhow::bail!("multiple versions found. use the package's alias instead."),
|
||||||
};
|
};
|
||||||
|
|
||||||
let Some(bin_path) = node.target.bin_path() else {
|
|
||||||
anyhow::bail!("package has no bin path");
|
|
||||||
};
|
|
||||||
|
|
||||||
let base_folder = project
|
let base_folder = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
.await?
|
.await?
|
||||||
.target
|
.target
|
||||||
.kind()
|
.kind()
|
||||||
.packages_folder(id.version_id().target());
|
.packages_folder(id.version_id().target());
|
||||||
let container_folder = node.node.container_folder(
|
let container_folder = node.container_folder(
|
||||||
&project
|
&project
|
||||||
.package_dir()
|
.package_dir()
|
||||||
.join(base_folder)
|
.join(base_folder)
|
||||||
|
@ -102,6 +100,27 @@ impl RunCommand {
|
||||||
&id,
|
&id,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
let source = node.pkg_ref.source();
|
||||||
|
source
|
||||||
|
.refresh(&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.context("failed to refresh source")?;
|
||||||
|
let target = source
|
||||||
|
.get_target(
|
||||||
|
&node.pkg_ref,
|
||||||
|
&GetTargetOptions {
|
||||||
|
project,
|
||||||
|
path: Arc::from(container_folder.as_path()),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let Some(bin_path) = target.bin_path() else {
|
||||||
|
anyhow::bail!("package has no bin path");
|
||||||
|
};
|
||||||
|
|
||||||
let path = bin_path.to_path(&container_folder);
|
let path = bin_path.to_path(&container_folder);
|
||||||
|
|
||||||
run(&path, &path);
|
run(&path, &path);
|
||||||
|
|
|
@ -5,24 +5,24 @@ use std::{
|
||||||
time::Instant,
|
time::Instant,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
use crate::cli::{
|
||||||
|
bin_dir,
|
||||||
|
reporters::{self, CliReporter},
|
||||||
|
resolve_overrides, run_on_workspace_members, up_to_date_lockfile,
|
||||||
|
};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::future::try_join_all;
|
use futures::future::try_join_all;
|
||||||
use pesde::{
|
use pesde::{
|
||||||
download_and_link::{filter_graph, DownloadAndLinkHooks, DownloadAndLinkOptions},
|
download_and_link::{filter_graph, DownloadAndLinkHooks, DownloadAndLinkOptions},
|
||||||
lockfile::{DependencyGraph, DownloadedGraph, Lockfile},
|
graph::{ConvertableGraph, DependencyGraph, DownloadedGraph},
|
||||||
|
lockfile::Lockfile,
|
||||||
manifest::{target::TargetKind, DependencyType},
|
manifest::{target::TargetKind, DependencyType},
|
||||||
Project, RefreshedSources, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
Project, RefreshedSources, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
||||||
};
|
};
|
||||||
use tokio::task::JoinSet;
|
use tokio::task::JoinSet;
|
||||||
|
|
||||||
use crate::cli::{
|
|
||||||
bin_dir,
|
|
||||||
reporters::{self, CliReporter},
|
|
||||||
resolve_overrides, run_on_workspace_members, up_to_date_lockfile,
|
|
||||||
};
|
|
||||||
|
|
||||||
use super::files::make_executable;
|
use super::files::make_executable;
|
||||||
|
|
||||||
fn bin_link_file(alias: &str) -> String {
|
fn bin_link_file(alias: &str) -> String {
|
||||||
|
@ -68,7 +68,7 @@ impl DownloadAndLinkHooks for InstallHooks {
|
||||||
) -> Result<(), Self::Error> {
|
) -> Result<(), Self::Error> {
|
||||||
let mut tasks = downloaded_graph
|
let mut tasks = downloaded_graph
|
||||||
.values()
|
.values()
|
||||||
.filter(|node| node.target.bin_path().is_some())
|
.filter(|node| node.target.as_ref().is_some_and(|t| t.bin_path().is_some()))
|
||||||
.filter_map(|node| node.node.direct.as_ref())
|
.filter_map(|node| node.node.direct.as_ref())
|
||||||
.map(|(alias, _, _)| alias)
|
.map(|(alias, _, _)| alias)
|
||||||
.filter(|alias| {
|
.filter(|alias| {
|
||||||
|
@ -237,13 +237,7 @@ pub async fn install(
|
||||||
root_progress.reset();
|
root_progress.reset();
|
||||||
root_progress.set_message("resolve");
|
root_progress.set_message("resolve");
|
||||||
|
|
||||||
let old_graph = lockfile.map(|lockfile| {
|
let old_graph = lockfile.map(|lockfile| lockfile.graph);
|
||||||
lockfile
|
|
||||||
.graph
|
|
||||||
.into_iter()
|
|
||||||
.map(|(id, node)| (id, node.node))
|
|
||||||
.collect()
|
|
||||||
});
|
|
||||||
|
|
||||||
let graph = project
|
let graph = project
|
||||||
.dependency_graph(
|
.dependency_graph(
|
||||||
|
@ -285,7 +279,12 @@ pub async fn install(
|
||||||
root_progress.set_message("patch");
|
root_progress.set_message("patch");
|
||||||
|
|
||||||
project
|
project
|
||||||
.apply_patches(&filter_graph(&downloaded_graph, options.prod), reporter)
|
.apply_patches(
|
||||||
|
&Arc::into_inner(filter_graph(&downloaded_graph, options.prod))
|
||||||
|
.unwrap()
|
||||||
|
.convert(),
|
||||||
|
reporter,
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -297,7 +296,7 @@ pub async fn install(
|
||||||
target: manifest.target.kind(),
|
target: manifest.target.kind(),
|
||||||
overrides,
|
overrides,
|
||||||
|
|
||||||
graph: downloaded_graph,
|
graph: Arc::into_inner(graph).unwrap(),
|
||||||
|
|
||||||
workspace: run_on_workspace_members(project, |_| async { Ok(()) }).await?,
|
workspace: run_on_workspace_members(project, |_| async { Ok(()) }).await?,
|
||||||
};
|
};
|
||||||
|
@ -330,7 +329,7 @@ pub async fn install(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Prints the difference between two graphs.
|
/// Prints the difference between two graphs.
|
||||||
pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: DownloadedGraph) {
|
pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: DependencyGraph) {
|
||||||
let mut old_pkg_map = BTreeMap::new();
|
let mut old_pkg_map = BTreeMap::new();
|
||||||
let mut old_direct_pkg_map = BTreeMap::new();
|
let mut old_direct_pkg_map = BTreeMap::new();
|
||||||
let mut new_pkg_map = BTreeMap::new();
|
let mut new_pkg_map = BTreeMap::new();
|
||||||
|
@ -344,9 +343,9 @@ pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: D
|
||||||
}
|
}
|
||||||
|
|
||||||
for (id, node) in &new_graph {
|
for (id, node) in &new_graph {
|
||||||
new_pkg_map.insert(id, &node.node);
|
new_pkg_map.insert(id, node);
|
||||||
if node.node.direct.is_some() {
|
if node.direct.is_some() {
|
||||||
new_direct_pkg_map.insert(id, &node.node);
|
new_direct_pkg_map.insert(id, node);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -118,8 +118,7 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
|
||||||
.graph
|
.graph
|
||||||
.iter()
|
.iter()
|
||||||
.filter_map(|(_, node)| {
|
.filter_map(|(_, node)| {
|
||||||
node.node
|
node.direct
|
||||||
.direct
|
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.map(|(_, spec, source_ty)| (spec, source_ty))
|
.map(|(_, spec, source_ty)| (spec, source_ty))
|
||||||
})
|
})
|
||||||
|
@ -166,7 +165,7 @@ impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<a
|
||||||
|
|
||||||
impl VersionedPackageName {
|
impl VersionedPackageName {
|
||||||
#[cfg(feature = "patches")]
|
#[cfg(feature = "patches")]
|
||||||
fn get(self, graph: &pesde::lockfile::DownloadedGraph) -> anyhow::Result<PackageId> {
|
fn get(self, graph: &pesde::graph::DependencyGraph) -> anyhow::Result<PackageId> {
|
||||||
let version_id = match self.1 {
|
let version_id = match self.1 {
|
||||||
Some(version) => version,
|
Some(version) => version,
|
||||||
None => {
|
None => {
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
lockfile::{DependencyGraph, DownloadedDependencyGraphNode},
|
graph::{DependencyGraph, DownloadedDependencyGraphNode},
|
||||||
manifest::DependencyType,
|
manifest::DependencyType,
|
||||||
reporters::{DownloadProgressReporter, DownloadsReporter},
|
reporters::{DownloadProgressReporter, DownloadsReporter},
|
||||||
source::{
|
source::{
|
||||||
ids::PackageId,
|
ids::PackageId,
|
||||||
traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions},
|
traits::{DownloadOptions, GetTargetOptions, PackageRef, PackageSource, RefreshOptions},
|
||||||
},
|
},
|
||||||
Project, RefreshedSources, PACKAGES_CONTAINER_NAME,
|
Project, RefreshedSources, PACKAGES_CONTAINER_NAME,
|
||||||
};
|
};
|
||||||
|
@ -17,7 +17,7 @@ use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
/// Options for downloading.
|
/// Options for downloading.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct DownloadGraphOptions<Reporter> {
|
pub(crate) struct DownloadGraphOptions<Reporter> {
|
||||||
/// The reqwest client.
|
/// The reqwest client.
|
||||||
pub reqwest: reqwest::Client,
|
pub reqwest: reqwest::Client,
|
||||||
/// The downloads reporter.
|
/// The downloads reporter.
|
||||||
|
@ -39,7 +39,7 @@ where
|
||||||
Reporter: for<'a> DownloadsReporter<'a> + Send + Sync + 'static,
|
Reporter: for<'a> DownloadsReporter<'a> + Send + Sync + 'static,
|
||||||
{
|
{
|
||||||
/// Creates a new download options with the given reqwest client and reporter.
|
/// Creates a new download options with the given reqwest client and reporter.
|
||||||
pub fn new(reqwest: reqwest::Client) -> Self {
|
pub(crate) fn new(reqwest: reqwest::Client) -> Self {
|
||||||
Self {
|
Self {
|
||||||
reqwest,
|
reqwest,
|
||||||
reporter: None,
|
reporter: None,
|
||||||
|
@ -52,37 +52,37 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the downloads reporter.
|
/// Sets the downloads reporter.
|
||||||
pub fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
|
pub(crate) fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
|
||||||
self.reporter.replace(reporter.into());
|
self.reporter.replace(reporter.into());
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the refreshed sources.
|
/// Sets the refreshed sources.
|
||||||
pub fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
|
pub(crate) fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
|
||||||
self.refreshed_sources = refreshed_sources;
|
self.refreshed_sources = refreshed_sources;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets whether to skip dev dependencies.
|
/// Sets whether to skip dev dependencies.
|
||||||
pub fn prod(mut self, prod: bool) -> Self {
|
pub(crate) fn prod(mut self, prod: bool) -> Self {
|
||||||
self.prod = prod;
|
self.prod = prod;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets whether to write the downloaded packages to disk.
|
/// Sets whether to write the downloaded packages to disk.
|
||||||
pub fn write(mut self, write: bool) -> Self {
|
pub(crate) fn write(mut self, write: bool) -> Self {
|
||||||
self.write = write;
|
self.write = write;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets whether to download Wally packages.
|
/// Sets whether to download Wally packages.
|
||||||
pub fn wally(mut self, wally: bool) -> Self {
|
pub(crate) fn wally(mut self, wally: bool) -> Self {
|
||||||
self.wally = wally;
|
self.wally = wally;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the max number of concurrent network requests.
|
/// Sets the max number of concurrent network requests.
|
||||||
pub fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
|
pub(crate) fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
|
||||||
self.network_concurrency = network_concurrency;
|
self.network_concurrency = network_concurrency;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
@ -105,7 +105,7 @@ impl<Reporter> Clone for DownloadGraphOptions<Reporter> {
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Downloads a graph of dependencies.
|
/// Downloads a graph of dependencies.
|
||||||
#[instrument(skip_all, fields(prod = options.prod, wally = options.wally, write = options.write), level = "debug")]
|
#[instrument(skip_all, fields(prod = options.prod, wally = options.wally, write = options.write), level = "debug")]
|
||||||
pub async fn download_graph<Reporter>(
|
pub(crate) async fn download_graph<Reporter>(
|
||||||
&self,
|
&self,
|
||||||
graph: &DependencyGraph,
|
graph: &DependencyGraph,
|
||||||
options: DownloadGraphOptions<Reporter>,
|
options: DownloadGraphOptions<Reporter>,
|
||||||
|
@ -138,7 +138,7 @@ impl Project {
|
||||||
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
||||||
.filter(|(_, node)| node.pkg_ref.like_wally() == wally)
|
.filter(|(_, node)| node.pkg_ref.like_wally() == wally)
|
||||||
.map(|(package_id, node)| {
|
.map(|(package_id, node)| {
|
||||||
let span = tracing::info_span!("download", package_id = package_id.to_string(),);
|
let span = tracing::info_span!("download", package_id = package_id.to_string());
|
||||||
|
|
||||||
let project = self.clone();
|
let project = self.clone();
|
||||||
let reqwest = reqwest.clone();
|
let reqwest = reqwest.clone();
|
||||||
|
@ -184,7 +184,7 @@ impl Project {
|
||||||
|
|
||||||
tracing::debug!("downloading");
|
tracing::debug!("downloading");
|
||||||
|
|
||||||
let (fs, target) = match progress_reporter {
|
let fs = match progress_reporter {
|
||||||
Some(progress_reporter) => {
|
Some(progress_reporter) => {
|
||||||
source
|
source
|
||||||
.download(
|
.download(
|
||||||
|
@ -214,10 +214,25 @@ impl Project {
|
||||||
|
|
||||||
tracing::debug!("downloaded");
|
tracing::debug!("downloaded");
|
||||||
|
|
||||||
|
let mut target = None;
|
||||||
|
|
||||||
if write {
|
if write {
|
||||||
if !prod || node.resolved_ty != DependencyType::Dev {
|
if !prod || node.resolved_ty != DependencyType::Dev {
|
||||||
fs.write_to(container_folder, project.cas_dir(), true)
|
fs.write_to(&container_folder, project.cas_dir(), true)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
target = Some(
|
||||||
|
source
|
||||||
|
.get_target(
|
||||||
|
&node.pkg_ref,
|
||||||
|
&GetTargetOptions {
|
||||||
|
project,
|
||||||
|
path: Arc::from(container_folder),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(Box::new)?,
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
tracing::debug!("skipping write to disk, dev dependency in prod mode");
|
tracing::debug!("skipping write to disk, dev dependency in prod mode");
|
||||||
}
|
}
|
||||||
|
@ -264,6 +279,10 @@ pub mod errors {
|
||||||
#[error("failed to download package")]
|
#[error("failed to download package")]
|
||||||
DownloadFailed(#[from] Box<crate::source::errors::DownloadError>),
|
DownloadFailed(#[from] Box<crate::source::errors::DownloadError>),
|
||||||
|
|
||||||
|
/// Error getting target
|
||||||
|
#[error("failed to get target")]
|
||||||
|
GetTargetFailed(#[from] Box<crate::source::errors::GetTargetError>),
|
||||||
|
|
||||||
/// Error writing package contents
|
/// Error writing package contents
|
||||||
#[error("failed to write package contents")]
|
#[error("failed to write package contents")]
|
||||||
WriteFailed(#[source] std::io::Error),
|
WriteFailed(#[source] std::io::Error),
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
download::DownloadGraphOptions,
|
download::DownloadGraphOptions,
|
||||||
lockfile::{DependencyGraph, DownloadedGraph},
|
graph::{DependencyGraph, DownloadedGraph},
|
||||||
manifest::DependencyType,
|
manifest::DependencyType,
|
||||||
reporters::DownloadsReporter,
|
reporters::DownloadsReporter,
|
||||||
Project, RefreshedSources,
|
Project, RefreshedSources,
|
||||||
|
@ -29,10 +29,6 @@ pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> Arc<DownloadedGraph>
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Receiver for dependencies downloaded and linked
|
|
||||||
pub type DownloadAndLinkReceiver =
|
|
||||||
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
|
|
||||||
|
|
||||||
/// Hooks to perform actions after certain events during download and linking.
|
/// Hooks to perform actions after certain events during download and linking.
|
||||||
#[allow(unused_variables)]
|
#[allow(unused_variables)]
|
||||||
pub trait DownloadAndLinkHooks {
|
pub trait DownloadAndLinkHooks {
|
||||||
|
|
98
src/graph.rs
Normal file
98
src/graph.rs
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
use crate::{
|
||||||
|
manifest::{
|
||||||
|
target::{Target, TargetKind},
|
||||||
|
DependencyType,
|
||||||
|
},
|
||||||
|
source::{
|
||||||
|
ids::{PackageId, VersionId},
|
||||||
|
refs::PackageRefs,
|
||||||
|
specifiers::DependencySpecifiers,
|
||||||
|
traits::PackageRef,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::{
|
||||||
|
collections::BTreeMap,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
};
|
||||||
|
|
||||||
|
/// A graph of dependencies
|
||||||
|
pub type Graph<Node> = BTreeMap<PackageId, Node>;
|
||||||
|
|
||||||
|
/// A dependency graph node
|
||||||
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
pub struct DependencyGraphNode {
|
||||||
|
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
|
||||||
|
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||||
|
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
|
||||||
|
/// The dependencies of the package
|
||||||
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
|
pub dependencies: BTreeMap<PackageId, String>,
|
||||||
|
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
|
||||||
|
pub resolved_ty: DependencyType,
|
||||||
|
/// Whether the resolved type should be Peer if this isn't depended on
|
||||||
|
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
|
||||||
|
pub is_peer: bool,
|
||||||
|
/// The package reference
|
||||||
|
pub pkg_ref: PackageRefs,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl DependencyGraphNode {
|
||||||
|
pub(crate) fn base_folder(&self, version_id: &VersionId, project_target: TargetKind) -> String {
|
||||||
|
if self.pkg_ref.use_new_structure() {
|
||||||
|
version_id.target().packages_folder(&project_target)
|
||||||
|
} else {
|
||||||
|
"..".to_string()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the folder to store the contents of the package in
|
||||||
|
pub fn container_folder<P: AsRef<Path>>(&self, path: &P, package_id: &PackageId) -> PathBuf {
|
||||||
|
let (name, version) = package_id.parts();
|
||||||
|
|
||||||
|
if self.pkg_ref.like_wally() {
|
||||||
|
return path
|
||||||
|
.as_ref()
|
||||||
|
.join(format!(
|
||||||
|
"{}_{}@{}",
|
||||||
|
package_id.name().as_str().0,
|
||||||
|
name.as_str().1,
|
||||||
|
version
|
||||||
|
))
|
||||||
|
.join(name.as_str().1);
|
||||||
|
}
|
||||||
|
|
||||||
|
path.as_ref()
|
||||||
|
.join(name.escaped())
|
||||||
|
.join(version.to_string())
|
||||||
|
.join(name.as_str().1)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A graph of `DependencyGraphNode`s
|
||||||
|
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
||||||
|
|
||||||
|
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DownloadedDependencyGraphNode {
|
||||||
|
/// The target of the package
|
||||||
|
/// None only if download was called with write = false or is a dev dependency in a prod install
|
||||||
|
pub target: Option<Target>,
|
||||||
|
/// The node
|
||||||
|
pub node: DependencyGraphNode,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A graph of `DownloadedDependencyGraphNode`s
|
||||||
|
pub type DownloadedGraph = Graph<DownloadedDependencyGraphNode>;
|
||||||
|
|
||||||
|
/// A trait for converting a graph to a different type of graph
|
||||||
|
pub trait ConvertableGraph<Node> {
|
||||||
|
/// Converts the graph to a different type of graph
|
||||||
|
fn convert(self) -> Graph<Node>;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConvertableGraph<DependencyGraphNode> for DownloadedGraph {
|
||||||
|
fn convert(self) -> Graph<DependencyGraphNode> {
|
||||||
|
self.into_iter().map(|(id, node)| (id, node.node)).collect()
|
||||||
|
}
|
||||||
|
}
|
|
@ -29,6 +29,8 @@ use wax::Pattern;
|
||||||
pub mod download;
|
pub mod download;
|
||||||
/// Utility for downloading and linking in the correct order
|
/// Utility for downloading and linking in the correct order
|
||||||
pub mod download_and_link;
|
pub mod download_and_link;
|
||||||
|
/// Graphs
|
||||||
|
pub mod graph;
|
||||||
/// Linking packages
|
/// Linking packages
|
||||||
pub mod linking;
|
pub mod linking;
|
||||||
/// Lockfile
|
/// Lockfile
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
graph::{DownloadedDependencyGraphNode, DownloadedGraph},
|
||||||
linking::generator::get_file_types,
|
linking::generator::get_file_types,
|
||||||
lockfile::{DownloadedDependencyGraphNode, DownloadedGraph},
|
|
||||||
manifest::Manifest,
|
manifest::Manifest,
|
||||||
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
|
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
|
||||||
source::{
|
source::{
|
||||||
|
@ -30,7 +30,7 @@ async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<Pat
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
||||||
let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
|
let hash = store_in_cas(cas_dir, contents.as_bytes()).await?;
|
||||||
|
|
||||||
match fs::remove_file(&destination).await {
|
match fs::remove_file(&destination).await {
|
||||||
Ok(_) => {}
|
Ok(_) => {}
|
||||||
|
@ -52,10 +52,12 @@ impl ExecuteScriptHooks for LinkingExecuteScriptHooks {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type PackageTypes = HashMap<PackageId, Vec<String>>;
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Links the dependencies of the project
|
/// Links the dependencies of the project
|
||||||
#[instrument(skip(self, graph), level = "debug")]
|
#[instrument(skip(self, graph), level = "debug")]
|
||||||
pub async fn link_dependencies(
|
pub(crate) async fn link_dependencies(
|
||||||
&self,
|
&self,
|
||||||
graph: Arc<DownloadedGraph>,
|
graph: Arc<DownloadedGraph>,
|
||||||
with_types: bool,
|
with_types: bool,
|
||||||
|
@ -66,7 +68,7 @@ impl Project {
|
||||||
|
|
||||||
// step 1. link all non-wally packages (and their dependencies) temporarily without types
|
// step 1. link all non-wally packages (and their dependencies) temporarily without types
|
||||||
// we do this separately to allow the required tools for the scripts to be installed
|
// we do this separately to allow the required tools for the scripts to be installed
|
||||||
self.link(&graph, &manifest, &Arc::new(Default::default()), false)
|
self.link(&graph, &manifest, &Arc::new(PackageTypes::default()), false)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
if !with_types {
|
if !with_types {
|
||||||
|
@ -78,14 +80,14 @@ impl Project {
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(package_id, node)| {
|
.map(|(package_id, node)| {
|
||||||
let span =
|
let span =
|
||||||
tracing::info_span!("extract types", package_id = package_id.to_string(),);
|
tracing::info_span!("extract types", package_id = package_id.to_string());
|
||||||
|
|
||||||
let package_id = package_id.clone();
|
let package_id = package_id.clone();
|
||||||
let node = node.clone();
|
let node = node.clone();
|
||||||
let project = self.clone();
|
let project = self.clone();
|
||||||
|
|
||||||
async move {
|
async move {
|
||||||
let Some(lib_file) = node.target.lib_path() else {
|
let Some(lib_file) = node.target.as_ref().and_then(|t| t.lib_path()) else {
|
||||||
return Ok((package_id, vec![]));
|
return Ok((package_id, vec![]));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -124,7 +126,9 @@ impl Project {
|
||||||
vec![]
|
vec![]
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(build_files) = Some(&node.target)
|
if let Some(build_files) = node
|
||||||
|
.target
|
||||||
|
.as_ref()
|
||||||
.filter(|_| !node.node.pkg_ref.like_wally())
|
.filter(|_| !node.node.pkg_ref.like_wally())
|
||||||
.and_then(|t| t.build_files())
|
.and_then(|t| t.build_files())
|
||||||
{
|
{
|
||||||
|
@ -146,7 +150,7 @@ impl Project {
|
||||||
})
|
})
|
||||||
.collect::<JoinSet<_>>();
|
.collect::<JoinSet<_>>();
|
||||||
|
|
||||||
let mut package_types = HashMap::new();
|
let mut package_types = PackageTypes::new();
|
||||||
while let Some(task) = tasks.join_next().await {
|
while let Some(task) = tasks.join_next().await {
|
||||||
let (version_id, types) = task.unwrap()?;
|
let (version_id, types) = task.unwrap()?;
|
||||||
package_types.insert(version_id, types);
|
package_types.insert(version_id, types);
|
||||||
|
@ -167,15 +171,19 @@ impl Project {
|
||||||
node: &DownloadedDependencyGraphNode,
|
node: &DownloadedDependencyGraphNode,
|
||||||
package_id: &PackageId,
|
package_id: &PackageId,
|
||||||
alias: &str,
|
alias: &str,
|
||||||
package_types: &HashMap<PackageId, Vec<String>>,
|
package_types: &PackageTypes,
|
||||||
manifest: &Manifest,
|
manifest: &Manifest,
|
||||||
) -> Result<(), errors::LinkingError> {
|
) -> Result<(), errors::LinkingError> {
|
||||||
static NO_TYPES: Vec<String> = Vec::new();
|
static NO_TYPES: Vec<String> = Vec::new();
|
||||||
|
|
||||||
if let Some(lib_file) = node.target.lib_path() {
|
let Some(target) = &node.target else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(lib_file) = target.lib_path() {
|
||||||
let lib_module = generator::generate_lib_linking_module(
|
let lib_module = generator::generate_lib_linking_module(
|
||||||
&generator::get_lib_require_path(
|
&generator::get_lib_require_path(
|
||||||
&node.target.kind(),
|
&target.kind(),
|
||||||
base_folder,
|
base_folder,
|
||||||
lib_file,
|
lib_file,
|
||||||
container_folder,
|
container_folder,
|
||||||
|
@ -195,7 +203,7 @@ impl Project {
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(bin_file) = node.target.bin_path() {
|
if let Some(bin_file) = target.bin_path() {
|
||||||
let bin_module = generator::generate_bin_linking_module(
|
let bin_module = generator::generate_bin_linking_module(
|
||||||
container_folder,
|
container_folder,
|
||||||
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
|
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
|
||||||
|
@ -209,7 +217,7 @@ impl Project {
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(scripts) = node.target.scripts().filter(|s| !s.is_empty()) {
|
if let Some(scripts) = target.scripts().filter(|s| !s.is_empty()) {
|
||||||
let scripts_base =
|
let scripts_base =
|
||||||
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
|
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -238,7 +246,7 @@ impl Project {
|
||||||
&self,
|
&self,
|
||||||
graph: &Arc<DownloadedGraph>,
|
graph: &Arc<DownloadedGraph>,
|
||||||
manifest: &Arc<Manifest>,
|
manifest: &Arc<Manifest>,
|
||||||
package_types: &Arc<HashMap<PackageId, Vec<String>>>,
|
package_types: &Arc<PackageTypes>,
|
||||||
is_complete: bool,
|
is_complete: bool,
|
||||||
) -> Result<(), errors::LinkingError> {
|
) -> Result<(), errors::LinkingError> {
|
||||||
let mut tasks = graph
|
let mut tasks = graph
|
||||||
|
@ -319,7 +327,10 @@ impl Project {
|
||||||
let linker_folder = create_and_canonicalize(node_container_folder.join(
|
let linker_folder = create_and_canonicalize(node_container_folder.join(
|
||||||
node.node.base_folder(
|
node.node.base_folder(
|
||||||
package_id.version_id(),
|
package_id.version_id(),
|
||||||
dependency_node.target.kind(),
|
match &dependency_node.target {
|
||||||
|
Some(t) => t.kind(),
|
||||||
|
None => continue,
|
||||||
|
},
|
||||||
),
|
),
|
||||||
))
|
))
|
||||||
.await?;
|
.await?;
|
||||||
|
|
|
@ -1,94 +1,14 @@
|
||||||
#![allow(deprecated)]
|
#![allow(deprecated)]
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::{
|
graph::DependencyGraph,
|
||||||
overrides::OverrideKey,
|
manifest::{overrides::OverrideKey, target::TargetKind},
|
||||||
target::{Target, TargetKind},
|
|
||||||
DependencyType,
|
|
||||||
},
|
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::{
|
source::specifiers::DependencySpecifiers,
|
||||||
ids::{PackageId, VersionId},
|
|
||||||
refs::PackageRefs,
|
|
||||||
specifiers::DependencySpecifiers,
|
|
||||||
traits::PackageRef,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use semver::Version;
|
use semver::Version;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::{
|
use std::collections::BTreeMap;
|
||||||
collections::BTreeMap,
|
|
||||||
path::{Path, PathBuf},
|
|
||||||
};
|
|
||||||
|
|
||||||
/// A graph of dependencies
|
|
||||||
pub type Graph<Node> = BTreeMap<PackageId, Node>;
|
|
||||||
|
|
||||||
/// A dependency graph node
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct DependencyGraphNode {
|
|
||||||
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
|
|
||||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
|
||||||
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
|
|
||||||
/// The dependencies of the package
|
|
||||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
|
||||||
pub dependencies: BTreeMap<PackageId, String>,
|
|
||||||
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
|
|
||||||
pub resolved_ty: DependencyType,
|
|
||||||
/// Whether the resolved type should be Peer if this isn't depended on
|
|
||||||
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
|
|
||||||
pub is_peer: bool,
|
|
||||||
/// The package reference
|
|
||||||
pub pkg_ref: PackageRefs,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl DependencyGraphNode {
|
|
||||||
pub(crate) fn base_folder(&self, version_id: &VersionId, project_target: TargetKind) -> String {
|
|
||||||
if self.pkg_ref.use_new_structure() {
|
|
||||||
version_id.target().packages_folder(&project_target)
|
|
||||||
} else {
|
|
||||||
"..".to_string()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns the folder to store the contents of the package in
|
|
||||||
pub fn container_folder<P: AsRef<Path>>(&self, path: &P, package_id: &PackageId) -> PathBuf {
|
|
||||||
let (name, version) = package_id.parts();
|
|
||||||
|
|
||||||
if self.pkg_ref.like_wally() {
|
|
||||||
return path
|
|
||||||
.as_ref()
|
|
||||||
.join(format!(
|
|
||||||
"{}_{}@{}",
|
|
||||||
package_id.name().as_str().0,
|
|
||||||
name.as_str().1,
|
|
||||||
version
|
|
||||||
))
|
|
||||||
.join(name.as_str().1);
|
|
||||||
}
|
|
||||||
|
|
||||||
path.as_ref()
|
|
||||||
.join(name.escaped())
|
|
||||||
.join(version.to_string())
|
|
||||||
.join(name.as_str().1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A graph of `DependencyGraphNode`s
|
|
||||||
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
|
||||||
|
|
||||||
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
|
||||||
pub struct DownloadedDependencyGraphNode {
|
|
||||||
/// The target of the package
|
|
||||||
pub target: Target,
|
|
||||||
/// The node
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub node: DependencyGraphNode,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A graph of `DownloadedDependencyGraphNode`s
|
|
||||||
pub type DownloadedGraph = Graph<DownloadedDependencyGraphNode>;
|
|
||||||
|
|
||||||
/// A lockfile
|
/// A lockfile
|
||||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||||
|
@ -108,8 +28,8 @@ pub struct Lockfile {
|
||||||
pub workspace: BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>,
|
pub workspace: BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>,
|
||||||
|
|
||||||
/// The graph of dependencies
|
/// The graph of dependencies
|
||||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
#[serde(default, skip_serializing_if = "DependencyGraph::is_empty")]
|
||||||
pub graph: DownloadedGraph,
|
pub graph: DependencyGraph,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Old lockfile stuff. Will be removed in a future version.
|
/// Old lockfile stuff. Will be removed in a future version.
|
||||||
|
@ -201,9 +121,7 @@ pub mod old {
|
||||||
versions.into_iter().map(move |(version, node)| {
|
versions.into_iter().map(move |(version, node)| {
|
||||||
(
|
(
|
||||||
PackageId(name.clone(), version),
|
PackageId(name.clone(), version),
|
||||||
super::DownloadedDependencyGraphNode {
|
crate::graph::DependencyGraphNode {
|
||||||
target: node.target,
|
|
||||||
node: super::DependencyGraphNode {
|
|
||||||
direct: node.node.direct,
|
direct: node.node.direct,
|
||||||
dependencies: node
|
dependencies: node
|
||||||
.node
|
.node
|
||||||
|
@ -217,7 +135,6 @@ pub mod old {
|
||||||
is_peer: node.node.is_peer,
|
is_peer: node.node.is_peer,
|
||||||
pkg_ref: node.node.pkg_ref,
|
pkg_ref: node.node.pkg_ref,
|
||||||
},
|
},
|
||||||
},
|
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
lockfile::DownloadedGraph,
|
graph::DependencyGraph,
|
||||||
reporters::{PatchProgressReporter, PatchesReporter},
|
reporters::{PatchProgressReporter, PatchesReporter},
|
||||||
source::ids::PackageId,
|
source::ids::PackageId,
|
||||||
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||||
|
@ -80,7 +80,7 @@ impl Project {
|
||||||
#[instrument(skip(self, graph, reporter), level = "debug")]
|
#[instrument(skip(self, graph, reporter), level = "debug")]
|
||||||
pub async fn apply_patches<Reporter>(
|
pub async fn apply_patches<Reporter>(
|
||||||
&self,
|
&self,
|
||||||
graph: &DownloadedGraph,
|
graph: &DependencyGraph,
|
||||||
reporter: Arc<Reporter>,
|
reporter: Arc<Reporter>,
|
||||||
) -> Result<(), errors::ApplyPatchesError>
|
) -> Result<(), errors::ApplyPatchesError>
|
||||||
where
|
where
|
||||||
|
@ -102,7 +102,7 @@ impl Project {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let container_folder = node.node.container_folder(
|
let container_folder = node.container_folder(
|
||||||
&self
|
&self
|
||||||
.package_dir()
|
.package_dir()
|
||||||
.join(
|
.join(
|
||||||
|
@ -116,7 +116,7 @@ impl Project {
|
||||||
);
|
);
|
||||||
|
|
||||||
let reporter = reporter.clone();
|
let reporter = reporter.clone();
|
||||||
let span = tracing::info_span!("apply patch", package_id = package_id.to_string(),);
|
let span = tracing::info_span!("apply patch", package_id = package_id.to_string());
|
||||||
|
|
||||||
tasks.spawn(
|
tasks.spawn(
|
||||||
async move {
|
async move {
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
lockfile::{DependencyGraph, DependencyGraphNode},
|
graph::{DependencyGraph, DependencyGraphNode},
|
||||||
manifest::{overrides::OverrideSpecifier, DependencyType},
|
manifest::{overrides::OverrideSpecifier, DependencyType},
|
||||||
source::{
|
source::{
|
||||||
ids::PackageId,
|
ids::PackageId,
|
||||||
|
|
|
@ -10,7 +10,6 @@ use sha2::{Digest, Sha256};
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
future::Future,
|
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
use tempfile::Builder;
|
use tempfile::Builder;
|
||||||
|
@ -22,7 +21,7 @@ use tracing::instrument;
|
||||||
|
|
||||||
/// A file system entry
|
/// A file system entry
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
pub enum FSEntry {
|
pub enum FsEntry {
|
||||||
/// A file with the given hash
|
/// A file with the given hash
|
||||||
#[serde(rename = "f")]
|
#[serde(rename = "f")]
|
||||||
File(String),
|
File(String),
|
||||||
|
@ -35,9 +34,9 @@ pub enum FSEntry {
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||||
// don't need to differentiate between CAS and non-CAS, since non-CAS won't be serialized
|
// don't need to differentiate between CAS and non-CAS, since non-CAS won't be serialized
|
||||||
#[serde(untagged)]
|
#[serde(untagged)]
|
||||||
pub enum PackageFS {
|
pub enum PackageFs {
|
||||||
/// A package stored in the CAS
|
/// A package stored in the CAS
|
||||||
CAS(BTreeMap<RelativePathBuf, FSEntry>),
|
CAS(BTreeMap<RelativePathBuf, FsEntry>),
|
||||||
/// A package that's to be copied
|
/// A package that's to be copied
|
||||||
Copy(PathBuf, TargetKind),
|
Copy(PathBuf, TargetKind),
|
||||||
}
|
}
|
||||||
|
@ -74,15 +73,9 @@ pub(crate) fn cas_path(hash: &str, cas_dir: &Path) -> PathBuf {
|
||||||
cas_dir.join(prefix).join(rest)
|
cas_dir.join(prefix).join(rest)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) async fn store_in_cas<
|
pub(crate) async fn store_in_cas<R: tokio::io::AsyncRead + Unpin, P: AsRef<Path>>(
|
||||||
R: tokio::io::AsyncRead + Unpin,
|
|
||||||
P: AsRef<Path>,
|
|
||||||
C: FnMut(Vec<u8>) -> F,
|
|
||||||
F: Future<Output = std::io::Result<()>>,
|
|
||||||
>(
|
|
||||||
cas_dir: P,
|
cas_dir: P,
|
||||||
mut contents: R,
|
mut contents: R,
|
||||||
mut bytes_cb: C,
|
|
||||||
) -> std::io::Result<String> {
|
) -> std::io::Result<String> {
|
||||||
let tmp_dir = cas_dir.as_ref().join(".tmp");
|
let tmp_dir = cas_dir.as_ref().join(".tmp");
|
||||||
fs::create_dir_all(&tmp_dir).await?;
|
fs::create_dir_all(&tmp_dir).await?;
|
||||||
|
@ -105,7 +98,6 @@ pub(crate) async fn store_in_cas<
|
||||||
|
|
||||||
let bytes = &buf[..bytes_read];
|
let bytes = &buf[..bytes_read];
|
||||||
hasher.update(bytes);
|
hasher.update(bytes);
|
||||||
bytes_cb(bytes.to_vec()).await?;
|
|
||||||
file_writer.write_all(bytes).await?;
|
file_writer.write_all(bytes).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -125,7 +117,7 @@ pub(crate) async fn store_in_cas<
|
||||||
Ok(hash)
|
Ok(hash)
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PackageFS {
|
impl PackageFs {
|
||||||
/// Write the package to the given destination
|
/// Write the package to the given destination
|
||||||
#[instrument(skip(self), level = "debug")]
|
#[instrument(skip(self), level = "debug")]
|
||||||
pub async fn write_to<P: AsRef<Path> + Debug, Q: AsRef<Path> + Debug>(
|
pub async fn write_to<P: AsRef<Path> + Debug, Q: AsRef<Path> + Debug>(
|
||||||
|
@ -135,7 +127,7 @@ impl PackageFS {
|
||||||
link: bool,
|
link: bool,
|
||||||
) -> std::io::Result<()> {
|
) -> std::io::Result<()> {
|
||||||
match self {
|
match self {
|
||||||
PackageFS::CAS(entries) => {
|
PackageFs::CAS(entries) => {
|
||||||
try_join_all(entries.iter().map(|(path, entry)| {
|
try_join_all(entries.iter().map(|(path, entry)| {
|
||||||
let destination = destination.as_ref().to_path_buf();
|
let destination = destination.as_ref().to_path_buf();
|
||||||
let cas_path = cas_path.as_ref().to_path_buf();
|
let cas_path = cas_path.as_ref().to_path_buf();
|
||||||
|
@ -144,7 +136,7 @@ impl PackageFS {
|
||||||
let path = path.to_path(destination);
|
let path = path.to_path(destination);
|
||||||
|
|
||||||
match entry {
|
match entry {
|
||||||
FSEntry::File(hash) => {
|
FsEntry::File(hash) => {
|
||||||
if let Some(parent) = path.parent() {
|
if let Some(parent) = path.parent() {
|
||||||
fs::create_dir_all(parent).await?;
|
fs::create_dir_all(parent).await?;
|
||||||
}
|
}
|
||||||
|
@ -159,7 +151,7 @@ impl PackageFS {
|
||||||
set_readonly(&path, false).await?;
|
set_readonly(&path, false).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
FSEntry::Directory => {
|
FsEntry::Directory => {
|
||||||
fs::create_dir_all(path).await?;
|
fs::create_dir_all(path).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -169,7 +161,7 @@ impl PackageFS {
|
||||||
}))
|
}))
|
||||||
.await?;
|
.await?;
|
||||||
}
|
}
|
||||||
PackageFS::Copy(src, target) => {
|
PackageFs::Copy(src, target) => {
|
||||||
fs::create_dir_all(destination.as_ref()).await?;
|
fs::create_dir_all(destination.as_ref()).await?;
|
||||||
|
|
||||||
let mut read_dir = fs::read_dir(src).await?;
|
let mut read_dir = fs::read_dir(src).await?;
|
||||||
|
@ -220,7 +212,7 @@ impl PackageFS {
|
||||||
file_hash: H,
|
file_hash: H,
|
||||||
cas_path: P,
|
cas_path: P,
|
||||||
) -> Option<String> {
|
) -> Option<String> {
|
||||||
if !matches!(self, PackageFS::CAS(_)) {
|
if !matches!(self, PackageFs::CAS(_)) {
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
deser_manifest,
|
||||||
manifest::{
|
manifest::{
|
||||||
target::{Target, TargetKind},
|
target::{Target, TargetKind},
|
||||||
Manifest,
|
Manifest,
|
||||||
|
@ -6,22 +7,22 @@ use crate::{
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::{store_in_cas, FSEntry, PackageFS},
|
fs::{store_in_cas, FsEntry, PackageFs},
|
||||||
git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier},
|
git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier},
|
||||||
git_index::{read_file, GitBasedSource},
|
git_index::{read_file, GitBasedSource},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::{DownloadOptions, PackageRef, RefreshOptions, ResolveOptions},
|
traits::{DownloadOptions, GetTargetOptions, PackageRef, RefreshOptions, ResolveOptions},
|
||||||
|
wally::compat_util::get_target,
|
||||||
PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||||
},
|
},
|
||||||
util::hash,
|
util::hash,
|
||||||
Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::future::try_join_all;
|
|
||||||
use gix::{bstr::BStr, traverse::tree::Recorder, ObjectId, Url};
|
use gix::{bstr::BStr, traverse::tree::Recorder, ObjectId, Url};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::{collections::BTreeMap, fmt::Debug, hash::Hash, path::PathBuf, sync::Arc};
|
use std::{collections::BTreeMap, fmt::Debug, hash::Hash, path::PathBuf};
|
||||||
use tokio::{sync::Mutex, task::spawn_blocking};
|
use tokio::task::{spawn_blocking, JoinSet};
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
/// The Git package reference
|
/// The Git package reference
|
||||||
|
@ -65,6 +66,7 @@ impl PackageSource for GitPackageSource {
|
||||||
type RefreshError = crate::source::git_index::errors::RefreshError;
|
type RefreshError = crate::source::git_index::errors::RefreshError;
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
type GetTargetError = errors::GetTargetError;
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||||
|
@ -337,8 +339,10 @@ impl PackageSource for GitPackageSource {
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
options: &DownloadOptions<R>,
|
options: &DownloadOptions<R>,
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<PackageFs, Self::DownloadError> {
|
||||||
let DownloadOptions { project, .. } = options;
|
let DownloadOptions {
|
||||||
|
project, reporter, ..
|
||||||
|
} = options;
|
||||||
|
|
||||||
let index_file = project
|
let index_file = project
|
||||||
.cas_dir()
|
.cas_dir()
|
||||||
|
@ -353,52 +357,10 @@ impl PackageSource for GitPackageSource {
|
||||||
pkg_ref.repo,
|
pkg_ref.repo,
|
||||||
pkg_ref.tree_id
|
pkg_ref.tree_id
|
||||||
);
|
);
|
||||||
|
reporter.report_done();
|
||||||
let fs = toml::from_str::<PackageFS>(&s).map_err(|e| {
|
return toml::from_str::<PackageFs>(&s).map_err(|e| {
|
||||||
errors::DownloadError::DeserializeFile(Box::new(self.repo_url.clone()), e)
|
errors::DownloadError::DeserializeFile(Box::new(self.repo_url.clone()), e)
|
||||||
})?;
|
});
|
||||||
|
|
||||||
let manifest = match &fs {
|
|
||||||
PackageFS::CAS(entries) => {
|
|
||||||
match entries.get(&RelativePathBuf::from(MANIFEST_FILE_NAME)) {
|
|
||||||
Some(FSEntry::File(hash)) => match fs
|
|
||||||
.read_file(hash, project.cas_dir())
|
|
||||||
.await
|
|
||||||
.map(|m| toml::de::from_str::<Manifest>(&m))
|
|
||||||
{
|
|
||||||
Some(Ok(m)) => Some(m),
|
|
||||||
Some(Err(e)) => {
|
|
||||||
return Err(errors::DownloadError::DeserializeFile(
|
|
||||||
Box::new(self.repo_url.clone()),
|
|
||||||
e,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_ => unreachable!("the package fs should be CAS"),
|
|
||||||
};
|
|
||||||
|
|
||||||
let target = match manifest {
|
|
||||||
Some(manifest) => manifest.target,
|
|
||||||
#[cfg(feature = "wally-compat")]
|
|
||||||
None if !pkg_ref.new_structure => {
|
|
||||||
let tempdir = tempfile::tempdir()?;
|
|
||||||
fs.write_to(tempdir.path(), project.cas_dir(), false)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
crate::source::wally::compat_util::get_target(project, &tempdir).await?
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
return Err(errors::DownloadError::NoManifest(Box::new(
|
|
||||||
self.repo_url.clone(),
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
return Ok((fs, target));
|
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||||
Err(e) => return Err(errors::DownloadError::Io(e)),
|
Err(e) => return Err(errors::DownloadError::Io(e)),
|
||||||
|
@ -453,9 +415,10 @@ impl PackageSource for GitPackageSource {
|
||||||
.await
|
.await
|
||||||
.unwrap()?;
|
.unwrap()?;
|
||||||
|
|
||||||
|
let records = {
|
||||||
let repo = repo.to_thread_local();
|
let repo = repo.to_thread_local();
|
||||||
|
|
||||||
let records = records
|
records
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|entry| {
|
.map(|entry| {
|
||||||
let object = repo.find_object(entry.oid).map_err(|e| {
|
let object = repo.find_object(entry.oid).map_err(|e| {
|
||||||
|
@ -475,11 +438,10 @@ impl PackageSource for GitPackageSource {
|
||||||
},
|
},
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
.collect::<Result<Vec<_>, _>>()?;
|
.collect::<Result<Vec<_>, _>>()?
|
||||||
|
};
|
||||||
|
|
||||||
let manifest = Arc::new(Mutex::new(None::<Vec<u8>>));
|
let mut tasks = records
|
||||||
let entries = try_join_all(
|
|
||||||
records
|
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|(path, contents)| {
|
.filter(|(path, contents)| {
|
||||||
let name = path.file_name().unwrap_or("");
|
let name = path.file_name().unwrap_or("");
|
||||||
|
@ -503,62 +465,28 @@ impl PackageSource for GitPackageSource {
|
||||||
true
|
true
|
||||||
})
|
})
|
||||||
.map(|(path, contents)| {
|
.map(|(path, contents)| {
|
||||||
let manifest = manifest.clone();
|
let project = project.clone();
|
||||||
|
|
||||||
async move {
|
async move {
|
||||||
let Some(contents) = contents else {
|
let Some(contents) = contents else {
|
||||||
return Ok::<_, errors::DownloadError>((path, FSEntry::Directory));
|
return Ok::<_, errors::DownloadError>((path, FsEntry::Directory));
|
||||||
};
|
};
|
||||||
|
|
||||||
let hash =
|
let hash = store_in_cas(project.cas_dir(), contents.as_slice()).await?;
|
||||||
store_in_cas(project.cas_dir(), contents.as_slice(), |_| async { Ok(()) })
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
if path == MANIFEST_FILE_NAME {
|
Ok((path, FsEntry::File(hash)))
|
||||||
manifest.lock().await.replace(contents);
|
}
|
||||||
|
})
|
||||||
|
.collect::<JoinSet<_>>();
|
||||||
|
|
||||||
|
let mut entries = BTreeMap::new();
|
||||||
|
|
||||||
|
while let Some(res) = tasks.join_next().await {
|
||||||
|
let (path, entry) = res.unwrap()?;
|
||||||
|
entries.insert(path, entry);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok((path, FSEntry::File(hash)))
|
let fs = PackageFs::CAS(entries);
|
||||||
}
|
|
||||||
}),
|
|
||||||
)
|
|
||||||
.await?
|
|
||||||
.into_iter()
|
|
||||||
.collect::<BTreeMap<_, _>>();
|
|
||||||
|
|
||||||
let manifest = match Arc::into_inner(manifest).unwrap().into_inner() {
|
|
||||||
Some(data) => match String::from_utf8(data.to_vec()) {
|
|
||||||
Ok(s) => match toml::from_str::<Manifest>(&s) {
|
|
||||||
Ok(m) => Some(m),
|
|
||||||
Err(e) => {
|
|
||||||
return Err(errors::DownloadError::DeserializeFile(
|
|
||||||
Box::new(self.repo_url.clone()),
|
|
||||||
e,
|
|
||||||
))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(e) => return Err(errors::DownloadError::ParseManifest(e)),
|
|
||||||
},
|
|
||||||
None => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let fs = PackageFS::CAS(entries);
|
|
||||||
|
|
||||||
let target = match manifest {
|
|
||||||
Some(manifest) => manifest.target,
|
|
||||||
#[cfg(feature = "wally-compat")]
|
|
||||||
None if !pkg_ref.new_structure => {
|
|
||||||
let tempdir = tempfile::tempdir()?;
|
|
||||||
fs.write_to(tempdir.path(), project.cas_dir(), false)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
crate::source::wally::compat_util::get_target(project, &tempdir).await?
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
return Err(errors::DownloadError::NoManifest(Box::new(
|
|
||||||
self.repo_url.clone(),
|
|
||||||
)))
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(parent) = index_file.parent() {
|
if let Some(parent) = index_file.parent() {
|
||||||
fs::create_dir_all(parent).await?;
|
fs::create_dir_all(parent).await?;
|
||||||
|
@ -573,7 +501,27 @@ impl PackageSource for GitPackageSource {
|
||||||
.await
|
.await
|
||||||
.map_err(errors::DownloadError::Io)?;
|
.map_err(errors::DownloadError::Io)?;
|
||||||
|
|
||||||
Ok((fs, target))
|
reporter.report_done();
|
||||||
|
|
||||||
|
Ok(fs)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
|
async fn get_target(
|
||||||
|
&self,
|
||||||
|
_pkg_ref: &Self::Ref,
|
||||||
|
options: &GetTargetOptions,
|
||||||
|
) -> Result<Target, Self::GetTargetError> {
|
||||||
|
match deser_manifest(&options.path).await {
|
||||||
|
Ok(manifest) => Ok(manifest.target),
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
Err(crate::errors::ManifestReadError::Io(e))
|
||||||
|
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||||
|
{
|
||||||
|
get_target(options).await.map_err(Into::into)
|
||||||
|
}
|
||||||
|
Err(e) => Err(e.into()),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -734,4 +682,18 @@ pub mod errors {
|
||||||
#[error("error parsing tree_id to ObjectId for repository {0}")]
|
#[error("error parsing tree_id to ObjectId for repository {0}")]
|
||||||
ParseTreeId(Box<gix::Url>, #[source] gix::hash::decode::Error),
|
ParseTreeId(Box<gix::Url>, #[source] gix::hash::decode::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when getting a target from a Git package source
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum GetTargetError {
|
||||||
|
/// Reading the manifest failed
|
||||||
|
#[error("error reading manifest")]
|
||||||
|
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||||
|
|
||||||
|
/// An error occurred while creating a Wally target
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
#[error("error creating Wally target")]
|
||||||
|
GetTarget(#[from] crate::source::wally::compat_util::errors::GetTargetError),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::PackageFS, ids::VersionId, refs::PackageRefs, specifiers::DependencySpecifiers,
|
fs::PackageFs, ids::VersionId, refs::PackageRefs, specifiers::DependencySpecifiers,
|
||||||
traits::*,
|
traits::*,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
@ -64,6 +64,7 @@ impl PackageSource for PackageSources {
|
||||||
type RefreshError = errors::RefreshError;
|
type RefreshError = errors::RefreshError;
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
type GetTargetError = errors::GetTargetError;
|
||||||
|
|
||||||
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||||
match self {
|
match self {
|
||||||
|
@ -174,7 +175,7 @@ impl PackageSource for PackageSources {
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
options: &DownloadOptions<R>,
|
options: &DownloadOptions<R>,
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<PackageFs, Self::DownloadError> {
|
||||||
match (self, pkg_ref) {
|
match (self, pkg_ref) {
|
||||||
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => {
|
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => {
|
||||||
source.download(pkg_ref, options).await.map_err(Into::into)
|
source.download(pkg_ref, options).await.map_err(Into::into)
|
||||||
|
@ -200,6 +201,42 @@ impl PackageSource for PackageSources {
|
||||||
_ => Err(errors::DownloadError::Mismatch),
|
_ => Err(errors::DownloadError::Mismatch),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async fn get_target(
|
||||||
|
&self,
|
||||||
|
pkg_ref: &Self::Ref,
|
||||||
|
options: &GetTargetOptions,
|
||||||
|
) -> Result<Target, Self::GetTargetError> {
|
||||||
|
match (self, pkg_ref) {
|
||||||
|
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => source
|
||||||
|
.get_target(pkg_ref, options)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into),
|
||||||
|
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
(PackageSources::Wally(source), PackageRefs::Wally(pkg_ref)) => source
|
||||||
|
.get_target(pkg_ref, options)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into),
|
||||||
|
|
||||||
|
(PackageSources::Git(source), PackageRefs::Git(pkg_ref)) => source
|
||||||
|
.get_target(pkg_ref, options)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into),
|
||||||
|
|
||||||
|
(PackageSources::Workspace(source), PackageRefs::Workspace(pkg_ref)) => source
|
||||||
|
.get_target(pkg_ref, options)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into),
|
||||||
|
|
||||||
|
(PackageSources::Path(source), PackageRefs::Path(pkg_ref)) => source
|
||||||
|
.get_target(pkg_ref, options)
|
||||||
|
.await
|
||||||
|
.map_err(Into::into),
|
||||||
|
|
||||||
|
_ => Err(errors::GetTargetError::Mismatch),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors that can occur when interacting with a package source
|
/// Errors that can occur when interacting with a package source
|
||||||
|
@ -291,4 +328,34 @@ pub mod errors {
|
||||||
#[error("error downloading path package")]
|
#[error("error downloading path package")]
|
||||||
Path(#[from] crate::source::path::errors::DownloadError),
|
Path(#[from] crate::source::path::errors::DownloadError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when getting a package's target
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum GetTargetError {
|
||||||
|
/// The package ref does not match the source (if using the CLI, this is a bug - file an issue)
|
||||||
|
#[error("mismatched package ref for source")]
|
||||||
|
Mismatch,
|
||||||
|
|
||||||
|
/// A pesde package source failed to get the target
|
||||||
|
#[error("error getting target for pesde package")]
|
||||||
|
Pesde(#[from] crate::source::pesde::errors::GetTargetError),
|
||||||
|
|
||||||
|
/// A Wally package source failed to get the target
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
#[error("error getting target for wally package")]
|
||||||
|
Wally(#[from] crate::source::wally::errors::GetTargetError),
|
||||||
|
|
||||||
|
/// A Git package source failed to get the target
|
||||||
|
#[error("error getting target for git package")]
|
||||||
|
Git(#[from] crate::source::git::errors::GetTargetError),
|
||||||
|
|
||||||
|
/// A workspace package source failed to get the target
|
||||||
|
#[error("error getting target for workspace package")]
|
||||||
|
Workspace(#[from] crate::source::workspace::errors::GetTargetError),
|
||||||
|
|
||||||
|
/// A path package source failed to get the target
|
||||||
|
#[error("error getting target for path package")]
|
||||||
|
Path(#[from] crate::source::path::errors::GetTargetError),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,11 +4,11 @@ use crate::{
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::PackageFS,
|
fs::PackageFs,
|
||||||
ids::VersionId,
|
ids::VersionId,
|
||||||
path::pkg_ref::PathPackageRef,
|
path::pkg_ref::PathPackageRef,
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::{DownloadOptions, PackageSource, ResolveOptions},
|
traits::{DownloadOptions, GetTargetOptions, PackageSource, ResolveOptions},
|
||||||
ResolveResult,
|
ResolveResult,
|
||||||
},
|
},
|
||||||
DEFAULT_INDEX_NAME,
|
DEFAULT_INDEX_NAME,
|
||||||
|
@ -31,6 +31,7 @@ impl PackageSource for PathPackageSource {
|
||||||
type RefreshError = errors::RefreshError;
|
type RefreshError = errors::RefreshError;
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
type GetTargetError = errors::GetTargetError;
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
|
@ -103,15 +104,29 @@ impl PackageSource for PathPackageSource {
|
||||||
async fn download<R: DownloadProgressReporter>(
|
async fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
_options: &DownloadOptions<R>,
|
options: &DownloadOptions<R>,
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<PackageFs, Self::DownloadError> {
|
||||||
|
let DownloadOptions { reporter, .. } = options;
|
||||||
let manifest = deser_manifest(&pkg_ref.path).await?;
|
let manifest = deser_manifest(&pkg_ref.path).await?;
|
||||||
|
|
||||||
Ok((
|
reporter.report_done();
|
||||||
PackageFS::Copy(pkg_ref.path.clone(), manifest.target.kind()),
|
|
||||||
manifest.target,
|
Ok(PackageFs::Copy(
|
||||||
|
pkg_ref.path.clone(),
|
||||||
|
manifest.target.kind(),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
|
async fn get_target(
|
||||||
|
&self,
|
||||||
|
pkg_ref: &Self::Ref,
|
||||||
|
_options: &GetTargetOptions,
|
||||||
|
) -> Result<Target, Self::GetTargetError> {
|
||||||
|
let manifest = deser_manifest(&pkg_ref.path).await?;
|
||||||
|
|
||||||
|
Ok(manifest.target)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors that can occur when using a path package source
|
/// Errors that can occur when using a path package source
|
||||||
|
@ -149,4 +164,13 @@ pub mod errors {
|
||||||
#[error("error reading manifest")]
|
#[error("error reading manifest")]
|
||||||
ManifestRead(#[from] crate::errors::ManifestReadError),
|
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when getting the target of a path package
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum GetTargetError {
|
||||||
|
/// Reading the manifest failed
|
||||||
|
#[error("error reading manifest")]
|
||||||
|
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,9 +18,9 @@ use crate::{
|
||||||
names::{PackageName, PackageNames},
|
names::{PackageName, PackageNames},
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::{store_in_cas, FSEntry, PackageFS},
|
fs::{store_in_cas, FsEntry, PackageFs},
|
||||||
git_index::{read_file, root_tree, GitBasedSource},
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
traits::{DownloadOptions, RefreshOptions, ResolveOptions},
|
traits::{DownloadOptions, GetTargetOptions, RefreshOptions, ResolveOptions},
|
||||||
DependencySpecifiers, PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
DependencySpecifiers, PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||||
},
|
},
|
||||||
util::hash,
|
util::hash,
|
||||||
|
@ -102,6 +102,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
type RefreshError = crate::source::git_index::errors::RefreshError;
|
type RefreshError = crate::source::git_index::errors::RefreshError;
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
type GetTargetError = errors::GetTargetError;
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||||
|
@ -170,7 +171,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
options: &DownloadOptions<R>,
|
options: &DownloadOptions<R>,
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<PackageFs, Self::DownloadError> {
|
||||||
let DownloadOptions {
|
let DownloadOptions {
|
||||||
project,
|
project,
|
||||||
reporter,
|
reporter,
|
||||||
|
@ -193,7 +194,10 @@ impl PackageSource for PesdePackageSource {
|
||||||
pkg_ref.version,
|
pkg_ref.version,
|
||||||
pkg_ref.target
|
pkg_ref.target
|
||||||
);
|
);
|
||||||
return Ok((toml::from_str::<PackageFS>(&s)?, pkg_ref.target.clone()));
|
|
||||||
|
reporter.report_done();
|
||||||
|
|
||||||
|
return toml::from_str::<PackageFs>(&s).map_err(Into::into);
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||||
Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
|
Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
|
||||||
|
@ -255,7 +259,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
entries.insert(path, FSEntry::Directory);
|
entries.insert(path, FsEntry::Directory);
|
||||||
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -264,13 +268,13 @@ impl PackageSource for PesdePackageSource {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let hash = store_in_cas(project.cas_dir(), entry, |_| async { Ok(()) })
|
let hash = store_in_cas(project.cas_dir(), entry)
|
||||||
.await
|
.await
|
||||||
.map_err(errors::DownloadError::Store)?;
|
.map_err(errors::DownloadError::Store)?;
|
||||||
entries.insert(path, FSEntry::File(hash));
|
entries.insert(path, FsEntry::File(hash));
|
||||||
}
|
}
|
||||||
|
|
||||||
let fs = PackageFS::CAS(entries);
|
let fs = PackageFs::CAS(entries);
|
||||||
|
|
||||||
if let Some(parent) = index_file.parent() {
|
if let Some(parent) = index_file.parent() {
|
||||||
fs::create_dir_all(parent)
|
fs::create_dir_all(parent)
|
||||||
|
@ -284,7 +288,16 @@ impl PackageSource for PesdePackageSource {
|
||||||
|
|
||||||
reporter.report_done();
|
reporter.report_done();
|
||||||
|
|
||||||
Ok((fs, pkg_ref.target.clone()))
|
Ok(fs)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
|
async fn get_target(
|
||||||
|
&self,
|
||||||
|
pkg_ref: &Self::Ref,
|
||||||
|
_options: &GetTargetOptions,
|
||||||
|
) -> Result<Target, Self::GetTargetError> {
|
||||||
|
Ok(pkg_ref.target.clone())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -569,4 +582,9 @@ pub mod errors {
|
||||||
#[error("error reading index file")]
|
#[error("error reading index file")]
|
||||||
ReadIndex(#[source] std::io::Error),
|
ReadIndex(#[source] std::io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when getting the target for a package from a pesde package source
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum GetTargetError {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,13 +4,14 @@ use crate::{
|
||||||
DependencyType,
|
DependencyType,
|
||||||
},
|
},
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{DependencySpecifiers, PackageFS, PackageSources, ResolveResult},
|
source::{DependencySpecifiers, PackageFs, PackageSources, ResolveResult},
|
||||||
Project, RefreshedSources,
|
Project, RefreshedSources,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
collections::BTreeMap,
|
collections::BTreeMap,
|
||||||
fmt::{Debug, Display},
|
fmt::{Debug, Display},
|
||||||
future::Future,
|
future::Future,
|
||||||
|
path::Path,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -56,6 +57,15 @@ pub struct DownloadOptions<R: DownloadProgressReporter> {
|
||||||
pub reporter: Arc<R>,
|
pub reporter: Arc<R>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Options for getting a package's Target
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct GetTargetOptions {
|
||||||
|
/// The project to get the target for
|
||||||
|
pub project: Project,
|
||||||
|
/// The path the package has been written to
|
||||||
|
pub path: Arc<Path>,
|
||||||
|
}
|
||||||
|
|
||||||
/// A source of packages
|
/// A source of packages
|
||||||
pub trait PackageSource: Debug {
|
pub trait PackageSource: Debug {
|
||||||
/// The specifier type for this source
|
/// The specifier type for this source
|
||||||
|
@ -68,6 +78,8 @@ pub trait PackageSource: Debug {
|
||||||
type ResolveError: std::error::Error + Send + Sync + 'static;
|
type ResolveError: std::error::Error + Send + Sync + 'static;
|
||||||
/// The error type for downloading a package from this source
|
/// The error type for downloading a package from this source
|
||||||
type DownloadError: std::error::Error + Send + Sync + 'static;
|
type DownloadError: std::error::Error + Send + Sync + 'static;
|
||||||
|
/// The error type for getting a package's target from this source
|
||||||
|
type GetTargetError: std::error::Error + Send + Sync + 'static;
|
||||||
|
|
||||||
/// Refreshes the source
|
/// Refreshes the source
|
||||||
fn refresh(
|
fn refresh(
|
||||||
|
@ -82,12 +94,19 @@ pub trait PackageSource: Debug {
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
options: &ResolveOptions,
|
options: &ResolveOptions,
|
||||||
) -> impl Future<Output = Result<ResolveResult<Self::Ref>, Self::ResolveError>>;
|
) -> impl Future<Output = Result<ResolveResult<Self::Ref>, Self::ResolveError>> + Send + Sync;
|
||||||
|
|
||||||
/// Downloads a package
|
/// Downloads a package
|
||||||
fn download<R: DownloadProgressReporter>(
|
fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
options: &DownloadOptions<R>,
|
options: &DownloadOptions<R>,
|
||||||
) -> impl Future<Output = Result<(PackageFS, Target), Self::DownloadError>>;
|
) -> impl Future<Output = Result<PackageFs, Self::DownloadError>> + Send + Sync;
|
||||||
|
|
||||||
|
/// Gets the target of a package
|
||||||
|
fn get_target(
|
||||||
|
&self,
|
||||||
|
pkg_ref: &Self::Ref,
|
||||||
|
options: &GetTargetOptions,
|
||||||
|
) -> impl Future<Output = Result<Target, Self::GetTargetError>> + Send + Sync;
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,12 +2,14 @@ use std::path::Path;
|
||||||
|
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::target::Target,
|
manifest::target::Target,
|
||||||
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
|
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
|
||||||
source::wally::manifest::{Realm, WallyManifest},
|
source::{
|
||||||
|
traits::GetTargetOptions,
|
||||||
|
wally::manifest::{Realm, WallyManifest},
|
||||||
|
},
|
||||||
Project, LINK_LIB_NO_FILE_FOUND,
|
Project, LINK_LIB_NO_FILE_FOUND,
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
|
@ -54,17 +56,18 @@ async fn find_lib_path(
|
||||||
|
|
||||||
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
|
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
|
||||||
|
|
||||||
#[instrument(skip(project, tempdir), level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
pub(crate) async fn get_target(
|
pub(crate) async fn get_target(
|
||||||
project: &Project,
|
options: &GetTargetOptions,
|
||||||
tempdir: &TempDir,
|
|
||||||
) -> Result<Target, errors::GetTargetError> {
|
) -> Result<Target, errors::GetTargetError> {
|
||||||
let lib = find_lib_path(project, tempdir.path())
|
let GetTargetOptions { project, path } = options;
|
||||||
|
|
||||||
|
let lib = find_lib_path(project, path)
|
||||||
.await?
|
.await?
|
||||||
.or_else(|| Some(RelativePathBuf::from(LINK_LIB_NO_FILE_FOUND)));
|
.or_else(|| Some(RelativePathBuf::from(LINK_LIB_NO_FILE_FOUND)));
|
||||||
let build_files = Default::default();
|
let build_files = Default::default();
|
||||||
|
|
||||||
let manifest = tempdir.path().join(WALLY_MANIFEST_FILE_NAME);
|
let manifest = path.join(WALLY_MANIFEST_FILE_NAME);
|
||||||
let manifest = fs::read_to_string(&manifest).await?;
|
let manifest = fs::read_to_string(&manifest).await?;
|
||||||
let manifest: WallyManifest = toml::from_str(&manifest)?;
|
let manifest: WallyManifest = toml::from_str(&manifest)?;
|
||||||
|
|
||||||
|
|
|
@ -3,10 +3,12 @@ use crate::{
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::{store_in_cas, FSEntry, PackageFS},
|
fs::{store_in_cas, FsEntry, PackageFs},
|
||||||
git_index::{read_file, root_tree, GitBasedSource},
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
ids::VersionId,
|
ids::VersionId,
|
||||||
traits::{DownloadOptions, PackageSource, RefreshOptions, ResolveOptions},
|
traits::{
|
||||||
|
DownloadOptions, GetTargetOptions, PackageSource, RefreshOptions, ResolveOptions,
|
||||||
|
},
|
||||||
wally::{
|
wally::{
|
||||||
compat_util::get_target,
|
compat_util::get_target,
|
||||||
manifest::{Realm, WallyManifest},
|
manifest::{Realm, WallyManifest},
|
||||||
|
@ -23,13 +25,8 @@ use gix::Url;
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use reqwest::header::AUTHORIZATION;
|
use reqwest::header::AUTHORIZATION;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{collections::BTreeMap, path::PathBuf, sync::Arc};
|
use std::{collections::BTreeMap, path::PathBuf};
|
||||||
use tempfile::tempdir;
|
use tokio::{io::AsyncReadExt, task::spawn_blocking};
|
||||||
use tokio::{
|
|
||||||
io::{AsyncReadExt, AsyncWriteExt},
|
|
||||||
sync::Mutex,
|
|
||||||
task::spawn_blocking,
|
|
||||||
};
|
|
||||||
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::StreamReader};
|
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::StreamReader};
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
|
@ -96,6 +93,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
type RefreshError = crate::source::git_index::errors::RefreshError;
|
type RefreshError = crate::source::git_index::errors::RefreshError;
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
type GetTargetError = errors::GetTargetError;
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||||
|
@ -108,24 +106,36 @@ impl PackageSource for WallyPackageSource {
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
options: &ResolveOptions,
|
options: &ResolveOptions,
|
||||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||||
|
async fn inner(
|
||||||
|
source: &WallyPackageSource,
|
||||||
|
specifier: &specifier::WallyDependencySpecifier,
|
||||||
|
options: &ResolveOptions,
|
||||||
|
) -> Result<ResolveResult<WallyPackageRef>, errors::ResolveError> {
|
||||||
let ResolveOptions {
|
let ResolveOptions {
|
||||||
project,
|
project,
|
||||||
refreshed_sources,
|
refreshed_sources,
|
||||||
..
|
..
|
||||||
} = options;
|
} = options;
|
||||||
|
|
||||||
let repo = gix::open(self.path(project)).map_err(Box::new)?;
|
let Some(string) = ({
|
||||||
|
let repo = gix::open(source.path(project)).map_err(Box::new)?;
|
||||||
let tree = root_tree(&repo).map_err(Box::new)?;
|
let tree = root_tree(&repo).map_err(Box::new)?;
|
||||||
let (scope, name) = specifier.name.as_str();
|
let (scope, name) = specifier.name.as_str();
|
||||||
let string = match read_file(&tree, [scope, name]) {
|
match read_file(&tree, [scope, name]) {
|
||||||
Ok(Some(s)) => s,
|
Ok(string) => string,
|
||||||
Ok(None) => {
|
Err(e) => {
|
||||||
|
return Err(errors::ResolveError::Read(
|
||||||
|
specifier.name.to_string(),
|
||||||
|
Box::new(e),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}) else {
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
"{} not found in wally registry. searching in backup registries",
|
"{} not found in wally registry. searching in backup registries",
|
||||||
specifier.name
|
specifier.name
|
||||||
);
|
);
|
||||||
|
let config = source.config(project).await.map_err(Box::new)?;
|
||||||
let config = self.config(project).await.map_err(Box::new)?;
|
|
||||||
for registry in config.fallback_registries {
|
for registry in config.fallback_registries {
|
||||||
let source = WallyPackageSource::new(registry);
|
let source = WallyPackageSource::new(registry);
|
||||||
match refreshed_sources
|
match refreshed_sources
|
||||||
|
@ -139,12 +149,12 @@ impl PackageSource for WallyPackageSource {
|
||||||
{
|
{
|
||||||
Ok(()) => {}
|
Ok(()) => {}
|
||||||
Err(super::errors::RefreshError::Wally(e)) => {
|
Err(super::errors::RefreshError::Wally(e)) => {
|
||||||
return Err(Self::ResolveError::Refresh(Box::new(e)));
|
return Err(errors::ResolveError::Refresh(Box::new(e)));
|
||||||
}
|
}
|
||||||
Err(e) => unreachable!("unexpected error: {e:?}"),
|
Err(e) => panic!("unexpected error: {e:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
match Box::pin(source.resolve(specifier, options)).await {
|
match Box::pin(inner(&source, specifier, options)).await {
|
||||||
Ok((name, results)) => {
|
Ok((name, results)) => {
|
||||||
tracing::debug!("found {name} in backup registry {}", source.repo_url);
|
tracing::debug!("found {name} in backup registry {}", source.repo_url);
|
||||||
return Ok((name, results));
|
return Ok((name, results));
|
||||||
|
@ -158,21 +168,14 @@ impl PackageSource for WallyPackageSource {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return Err(Self::ResolveError::NotFound(specifier.name.to_string()));
|
return Err(errors::ResolveError::NotFound(specifier.name.to_string()));
|
||||||
}
|
|
||||||
Err(e) => {
|
|
||||||
return Err(Self::ResolveError::Read(
|
|
||||||
specifier.name.to_string(),
|
|
||||||
Box::new(e),
|
|
||||||
))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
let entries: Vec<WallyManifest> = string
|
let entries: Vec<WallyManifest> = string
|
||||||
.lines()
|
.lines()
|
||||||
.map(serde_json::from_str)
|
.map(serde_json::from_str)
|
||||||
.collect::<Result<_, _>>()
|
.collect::<Result<_, _>>()
|
||||||
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
|
.map_err(|e| errors::ResolveError::Parse(specifier.name.to_string(), e))?;
|
||||||
|
|
||||||
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
|
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||||
|
|
||||||
|
@ -192,24 +195,27 @@ impl PackageSource for WallyPackageSource {
|
||||||
),
|
),
|
||||||
WallyPackageRef {
|
WallyPackageRef {
|
||||||
name: specifier.name.clone(),
|
name: specifier.name.clone(),
|
||||||
index_url: self.repo_url.clone(),
|
index_url: source.repo_url.clone(),
|
||||||
dependencies: manifest.all_dependencies().map_err(|e| {
|
dependencies: manifest.all_dependencies().map_err(|e| {
|
||||||
Self::ResolveError::AllDependencies(specifier.to_string(), e)
|
errors::ResolveError::AllDependencies(specifier.to_string(), e)
|
||||||
})?,
|
})?,
|
||||||
version: manifest.package.version,
|
version: manifest.package.version,
|
||||||
},
|
},
|
||||||
))
|
))
|
||||||
})
|
})
|
||||||
.collect::<Result<_, Self::ResolveError>>()?,
|
.collect::<Result<_, errors::ResolveError>>()?,
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
inner(self, specifier, options).await
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download<R: DownloadProgressReporter>(
|
async fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
options: &DownloadOptions<R>,
|
options: &DownloadOptions<R>,
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<PackageFs, Self::DownloadError> {
|
||||||
let DownloadOptions {
|
let DownloadOptions {
|
||||||
project,
|
project,
|
||||||
reqwest,
|
reqwest,
|
||||||
|
@ -223,7 +229,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
.join(pkg_ref.name.escaped())
|
.join(pkg_ref.name.escaped())
|
||||||
.join(pkg_ref.version.to_string());
|
.join(pkg_ref.version.to_string());
|
||||||
|
|
||||||
let tempdir = match fs::read_to_string(&index_file).await {
|
match fs::read_to_string(&index_file).await {
|
||||||
Ok(s) => {
|
Ok(s) => {
|
||||||
tracing::debug!(
|
tracing::debug!(
|
||||||
"using cached index file for package {}@{}",
|
"using cached index file for package {}@{}",
|
||||||
|
@ -231,14 +237,11 @@ impl PackageSource for WallyPackageSource {
|
||||||
pkg_ref.version
|
pkg_ref.version
|
||||||
);
|
);
|
||||||
|
|
||||||
let tempdir = tempdir()?;
|
reporter.report_done();
|
||||||
let fs = toml::from_str::<PackageFS>(&s)?;
|
|
||||||
|
|
||||||
fs.write_to(&tempdir, project.cas_dir(), false).await?;
|
return toml::from_str::<PackageFs>(&s).map_err(Into::into);
|
||||||
|
|
||||||
return Ok((fs, get_target(project, &tempdir).await?));
|
|
||||||
}
|
}
|
||||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => tempdir()?,
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||||
Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
|
Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -308,30 +311,19 @@ impl PackageSource for WallyPackageSource {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = relative_path.to_path(tempdir.path());
|
|
||||||
|
|
||||||
if is_dir {
|
if is_dir {
|
||||||
fs::create_dir_all(&path).await?;
|
entries.insert(relative_path, FsEntry::Directory);
|
||||||
entries.insert(relative_path, FSEntry::Directory);
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
let entry_reader = archive.reader_without_entry(index).await?;
|
let entry_reader = archive.reader_without_entry(index).await?;
|
||||||
if let Some(parent) = path.parent() {
|
|
||||||
fs::create_dir_all(parent).await?;
|
let hash = store_in_cas(project.cas_dir(), entry_reader.compat()).await?;
|
||||||
|
|
||||||
|
entries.insert(relative_path, FsEntry::File(hash));
|
||||||
}
|
}
|
||||||
|
|
||||||
let writer = Arc::new(Mutex::new(fs::File::create(&path).await?));
|
let fs = PackageFs::CAS(entries);
|
||||||
let hash = store_in_cas(project.cas_dir(), entry_reader.compat(), |bytes| {
|
|
||||||
let writer = writer.clone();
|
|
||||||
async move { writer.lock().await.write_all(&bytes).await }
|
|
||||||
})
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
entries.insert(relative_path, FSEntry::File(hash));
|
|
||||||
}
|
|
||||||
|
|
||||||
let fs = PackageFS::CAS(entries);
|
|
||||||
|
|
||||||
if let Some(parent) = index_file.parent() {
|
if let Some(parent) = index_file.parent() {
|
||||||
fs::create_dir_all(parent)
|
fs::create_dir_all(parent)
|
||||||
|
@ -345,7 +337,16 @@ impl PackageSource for WallyPackageSource {
|
||||||
|
|
||||||
reporter.report_done();
|
reporter.report_done();
|
||||||
|
|
||||||
Ok((fs, get_target(project, &tempdir).await?))
|
Ok(fs)
|
||||||
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
|
async fn get_target(
|
||||||
|
&self,
|
||||||
|
_pkg_ref: &Self::Ref,
|
||||||
|
options: &GetTargetOptions,
|
||||||
|
) -> Result<Target, Self::GetTargetError> {
|
||||||
|
get_target(options).await.map_err(Into::into)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -472,4 +473,13 @@ pub mod errors {
|
||||||
#[error("error writing index file")]
|
#[error("error writing index file")]
|
||||||
WriteIndex(#[source] std::io::Error),
|
WriteIndex(#[source] std::io::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when getting a target from a Wally package source
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum GetTargetError {
|
||||||
|
/// Error getting target
|
||||||
|
#[error("error getting target")]
|
||||||
|
GetTarget(#[from] crate::source::wally::compat_util::errors::GetTargetError),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,10 +4,10 @@ use crate::{
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::PackageFS,
|
fs::PackageFs,
|
||||||
ids::VersionId,
|
ids::VersionId,
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::{DownloadOptions, PackageSource, ResolveOptions},
|
traits::{DownloadOptions, GetTargetOptions, PackageSource, ResolveOptions},
|
||||||
workspace::pkg_ref::WorkspacePackageRef,
|
workspace::pkg_ref::WorkspacePackageRef,
|
||||||
ResolveResult,
|
ResolveResult,
|
||||||
},
|
},
|
||||||
|
@ -34,6 +34,7 @@ impl PackageSource for WorkspacePackageSource {
|
||||||
type RefreshError = errors::RefreshError;
|
type RefreshError = errors::RefreshError;
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
type GetTargetError = errors::GetTargetError;
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
|
@ -133,18 +134,35 @@ impl PackageSource for WorkspacePackageSource {
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
options: &DownloadOptions<R>,
|
options: &DownloadOptions<R>,
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<PackageFs, Self::DownloadError> {
|
||||||
let DownloadOptions { project, .. } = options;
|
let DownloadOptions {
|
||||||
|
project, reporter, ..
|
||||||
|
} = options;
|
||||||
|
|
||||||
let path = pkg_ref
|
let path = pkg_ref
|
||||||
.path
|
.path
|
||||||
.to_path(project.workspace_dir().unwrap_or(project.package_dir()));
|
.to_path(project.workspace_dir().unwrap_or(project.package_dir()));
|
||||||
let manifest = deser_manifest(&path).await?;
|
let manifest = deser_manifest(&path).await?;
|
||||||
|
|
||||||
Ok((
|
reporter.report_done();
|
||||||
PackageFS::Copy(path, manifest.target.kind()),
|
|
||||||
manifest.target,
|
Ok(PackageFs::Copy(path, manifest.target.kind()))
|
||||||
))
|
}
|
||||||
|
|
||||||
|
#[instrument(skip_all, level = "debug")]
|
||||||
|
async fn get_target(
|
||||||
|
&self,
|
||||||
|
pkg_ref: &Self::Ref,
|
||||||
|
options: &GetTargetOptions,
|
||||||
|
) -> Result<Target, Self::GetTargetError> {
|
||||||
|
let GetTargetOptions { project, .. } = options;
|
||||||
|
|
||||||
|
let path = pkg_ref
|
||||||
|
.path
|
||||||
|
.to_path(project.workspace_dir().unwrap_or(project.package_dir()));
|
||||||
|
let manifest = deser_manifest(&path).await?;
|
||||||
|
|
||||||
|
Ok(manifest.target)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -187,4 +205,13 @@ pub mod errors {
|
||||||
#[error("error reading manifest")]
|
#[error("error reading manifest")]
|
||||||
ManifestRead(#[from] crate::errors::ManifestReadError),
|
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when getting the target of a workspace package
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum GetTargetError {
|
||||||
|
/// Reading the manifest failed
|
||||||
|
#[error("error reading manifest")]
|
||||||
|
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Reference in a new issue