mirror of
https://github.com/pesde-pkg/pesde.git
synced 2025-04-08 21:00:56 +01:00
feat: store dependency over downloaded graphs
This commit is contained in:
parent
6a8dfe0ba3
commit
d0169976cd
26 changed files with 829 additions and 562 deletions
|
@ -12,10 +12,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
- Support ignoring parse errors in Luau files by @daimond113
|
||||
- Add path dependencies by @daimond113
|
||||
- Inherit pesde-managed scripts from workspace root by @daimond113
|
||||
- Allow using binaries from workspace root in member packages by @daimond113
|
||||
- Allow using binaries from workspace root in member packages by @daimond113
|
||||
|
||||
### Changed
|
||||
- Change handling of graphs to a flat structure by @daimond113
|
||||
- Store dependency over downloaded graphs in the lockfile by @daimond113
|
||||
|
||||
### Removed
|
||||
- Remove old includes format compatibility by @daimond113
|
||||
|
|
|
@ -29,6 +29,7 @@ bin = [
|
|||
"tokio/rt",
|
||||
"tokio/rt-multi-thread",
|
||||
"tokio/macros",
|
||||
"dep:tempfile",
|
||||
]
|
||||
wally-compat = ["dep:async_zip", "dep:serde_json"]
|
||||
patches = ["dep:git2"]
|
||||
|
@ -56,7 +57,7 @@ pathdiff = "0.2.3"
|
|||
relative-path = { version = "1.9.3", features = ["serde"] }
|
||||
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||
thiserror = "2.0.7"
|
||||
tokio = { version = "1.42.0", features = ["process"] }
|
||||
tokio = { version = "1.42.0", features = ["process", "macros"] }
|
||||
tokio-util = "0.7.13"
|
||||
async-stream = "0.3.6"
|
||||
futures = "0.3.31"
|
||||
|
@ -64,7 +65,6 @@ full_moon = { version = "1.1.2", features = ["luau"] }
|
|||
url = { version = "2.5.4", features = ["serde"] }
|
||||
chrono = { version = "0.4.39", features = ["serde"] }
|
||||
sha2 = "0.10.8"
|
||||
tempfile = "3.14.0"
|
||||
wax = { version = "0.6.0", default-features = false }
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
|
||||
|
@ -86,6 +86,7 @@ dirs = { version = "5.0.1", optional = true }
|
|||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
|
||||
indicatif = { version = "0.17.9", optional = true }
|
||||
inquire = { version = "0.7.5", optional = true }
|
||||
tempfile = { version = "3.14.0", optional = true }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winreg = { version = "0.52.0", optional = true }
|
||||
|
|
|
@ -15,7 +15,9 @@ use pesde::{
|
|||
names::PackageName,
|
||||
source::{
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
traits::{DownloadOptions, PackageSource, RefreshOptions, ResolveOptions},
|
||||
traits::{
|
||||
DownloadOptions, GetTargetOptions, PackageSource, RefreshOptions, ResolveOptions,
|
||||
},
|
||||
PackageSources,
|
||||
},
|
||||
Project, RefreshedSources,
|
||||
|
@ -139,7 +141,7 @@ impl ExecuteCommand {
|
|||
project.auth_config().clone(),
|
||||
);
|
||||
|
||||
let (fs, target) = source
|
||||
let fs = source
|
||||
.download(
|
||||
&pkg_ref,
|
||||
&DownloadOptions {
|
||||
|
@ -150,12 +152,24 @@ impl ExecuteCommand {
|
|||
)
|
||||
.await
|
||||
.context("failed to download package")?;
|
||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||
|
||||
fs.write_to(tempdir.path(), project.cas_dir(), true)
|
||||
.await
|
||||
.context("failed to write package contents")?;
|
||||
|
||||
let target = source
|
||||
.get_target(
|
||||
&pkg_ref,
|
||||
&GetTargetOptions {
|
||||
project: project.clone(),
|
||||
path: Arc::from(tempdir.path()),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to get target")?;
|
||||
|
||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(None, refreshed_sources.clone(), true)
|
||||
.await
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
use crate::cli::up_to_date_lockfile;
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use futures::future::try_join_all;
|
||||
use pesde::{
|
||||
source::{
|
||||
specifiers::DependencySpecifiers,
|
||||
|
@ -10,6 +9,7 @@ use pesde::{
|
|||
Project, RefreshedSources,
|
||||
};
|
||||
use semver::VersionReq;
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct OutdatedCommand {
|
||||
|
@ -38,81 +38,90 @@ impl OutdatedCommand {
|
|||
|
||||
let refreshed_sources = RefreshedSources::new();
|
||||
|
||||
if try_join_all(graph.into_iter().map(|(current_id, node)| {
|
||||
let project = project.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
async move {
|
||||
let Some((alias, mut specifier, _)) = node.node.direct else {
|
||||
return Ok::<bool, anyhow::Error>(true);
|
||||
};
|
||||
|
||||
if matches!(
|
||||
specifier,
|
||||
DependencySpecifiers::Git(_)
|
||||
| DependencySpecifiers::Workspace(_)
|
||||
| DependencySpecifiers::Path(_)
|
||||
) {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let source = node.node.pkg_ref.source();
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&source,
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
if !self.strict {
|
||||
match &mut specifier {
|
||||
DependencySpecifiers::Pesde(spec) => {
|
||||
spec.version = VersionReq::STAR;
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(spec) => {
|
||||
spec.version = VersionReq::STAR;
|
||||
}
|
||||
DependencySpecifiers::Git(_) => {}
|
||||
DependencySpecifiers::Workspace(_) => {}
|
||||
DependencySpecifiers::Path(_) => {}
|
||||
let mut tasks = graph
|
||||
.into_iter()
|
||||
.map(|(current_id, node)| {
|
||||
let project = project.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
async move {
|
||||
let Some((alias, mut specifier, _)) = node.direct else {
|
||||
return Ok::<bool, anyhow::Error>(true);
|
||||
};
|
||||
|
||||
if matches!(
|
||||
specifier,
|
||||
DependencySpecifiers::Git(_)
|
||||
| DependencySpecifiers::Workspace(_)
|
||||
| DependencySpecifiers::Path(_)
|
||||
) {
|
||||
return Ok(true);
|
||||
}
|
||||
|
||||
let source = node.pkg_ref.source();
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&source,
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
if !self.strict {
|
||||
match &mut specifier {
|
||||
DependencySpecifiers::Pesde(spec) => {
|
||||
spec.version = VersionReq::STAR;
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(spec) => {
|
||||
spec.version = VersionReq::STAR;
|
||||
}
|
||||
DependencySpecifiers::Git(_) => {}
|
||||
DependencySpecifiers::Workspace(_) => {}
|
||||
DependencySpecifiers::Path(_) => {}
|
||||
};
|
||||
}
|
||||
|
||||
let version_id = source
|
||||
.resolve(
|
||||
&specifier,
|
||||
&ResolveOptions {
|
||||
project: project.clone(),
|
||||
target: manifest_target_kind,
|
||||
refreshed_sources: refreshed_sources.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package versions")?
|
||||
.1
|
||||
.pop_last()
|
||||
.map(|(v_id, _)| v_id)
|
||||
.with_context(|| format!("no versions of {specifier} found"))?;
|
||||
|
||||
if version_id != *current_id.version_id() {
|
||||
println!(
|
||||
"{} ({alias}) {} -> {version_id}",
|
||||
current_id.name(),
|
||||
current_id.version_id(),
|
||||
);
|
||||
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
let version_id = source
|
||||
.resolve(
|
||||
&specifier,
|
||||
&ResolveOptions {
|
||||
project: project.clone(),
|
||||
target: manifest_target_kind,
|
||||
refreshed_sources: refreshed_sources.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package versions")?
|
||||
.1
|
||||
.pop_last()
|
||||
.map(|(v_id, _)| v_id)
|
||||
.with_context(|| format!("no versions of {specifier} found"))?;
|
||||
let mut all_up_to_date = true;
|
||||
|
||||
if version_id != *current_id.version_id() {
|
||||
println!(
|
||||
"{} ({alias}) {} -> {version_id}",
|
||||
current_id.name(),
|
||||
current_id.version_id(),
|
||||
);
|
||||
|
||||
return Ok(false);
|
||||
}
|
||||
|
||||
Ok(true)
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
if !task.unwrap()? {
|
||||
all_up_to_date = false;
|
||||
}
|
||||
}))
|
||||
.await?
|
||||
.into_iter()
|
||||
.all(|b| b)
|
||||
{
|
||||
}
|
||||
|
||||
if all_up_to_date {
|
||||
println!("all packages are up to date");
|
||||
}
|
||||
|
||||
|
|
|
@ -33,11 +33,14 @@ impl PatchCommand {
|
|||
|
||||
let node = graph.get(&id).context("package not found in graph")?;
|
||||
|
||||
if matches!(node.node.pkg_ref, PackageRefs::Workspace(_)) {
|
||||
anyhow::bail!("cannot patch a workspace package")
|
||||
if matches!(
|
||||
node.pkg_ref,
|
||||
PackageRefs::Workspace(_) | PackageRefs::Path(_)
|
||||
) {
|
||||
anyhow::bail!("cannot patch a workspace or a path package")
|
||||
}
|
||||
|
||||
let source = node.node.pkg_ref.source();
|
||||
let source = node.pkg_ref.source();
|
||||
|
||||
let directory = project
|
||||
.data_dir()
|
||||
|
@ -49,7 +52,7 @@ impl PatchCommand {
|
|||
|
||||
source
|
||||
.download(
|
||||
&node.node.pkg_ref,
|
||||
&node.pkg_ref,
|
||||
&DownloadOptions {
|
||||
project: project.clone(),
|
||||
reqwest,
|
||||
|
@ -57,7 +60,6 @@ impl PatchCommand {
|
|||
},
|
||||
)
|
||||
.await?
|
||||
.0
|
||||
.write_to(&directory, project.cas_dir(), false)
|
||||
.await
|
||||
.context("failed to write package contents")?;
|
||||
|
|
|
@ -12,20 +12,23 @@ use pesde::{
|
|||
git_index::GitBasedSource,
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::{PackageSource, RefreshOptions, ResolveOptions},
|
||||
traits::{GetTargetOptions, PackageRef, PackageSource, RefreshOptions, ResolveOptions},
|
||||
workspace::{
|
||||
specifier::{VersionType, VersionTypeOrReq},
|
||||
WorkspacePackageSource,
|
||||
},
|
||||
PackageSources, IGNORED_DIRS, IGNORED_FILES,
|
||||
},
|
||||
Project, RefreshedSources, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
|
||||
Project, RefreshedSources, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||
};
|
||||
use reqwest::{header::AUTHORIZATION, StatusCode};
|
||||
use semver::VersionReq;
|
||||
use std::path::PathBuf;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use tempfile::Builder;
|
||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
|
||||
use tokio::{
|
||||
io::{AsyncSeekExt, AsyncWriteExt},
|
||||
task::JoinSet,
|
||||
};
|
||||
|
||||
#[derive(Debug, Args, Clone)]
|
||||
pub struct PublishCommand {
|
||||
|
@ -73,6 +76,7 @@ impl PublishCommand {
|
|||
project: &Project,
|
||||
reqwest: reqwest::Client,
|
||||
is_root: bool,
|
||||
refreshed_sources: &RefreshedSources,
|
||||
) -> anyhow::Result<()> {
|
||||
let mut manifest = project
|
||||
.deser_manifest()
|
||||
|
@ -111,16 +115,63 @@ impl PublishCommand {
|
|||
|
||||
match up_to_date_lockfile(project).await? {
|
||||
Some(lockfile) => {
|
||||
if lockfile
|
||||
let mut tasks = lockfile
|
||||
.graph
|
||||
.values()
|
||||
.filter_map(|node| node.node.direct.as_ref().map(|_| node))
|
||||
.any(|node| {
|
||||
node.target.build_files().is_none()
|
||||
&& !matches!(node.node.resolved_ty, DependencyType::Dev)
|
||||
.iter()
|
||||
.filter(|(_, node)| node.direct.is_some())
|
||||
.map(|(id, node)| {
|
||||
let project = project.clone();
|
||||
let base_folder = manifest
|
||||
.target
|
||||
.kind()
|
||||
.packages_folder(id.version_id().target());
|
||||
let container_folder = node.container_folder(
|
||||
&project
|
||||
.package_dir()
|
||||
.join(base_folder)
|
||||
.join(PACKAGES_CONTAINER_NAME),
|
||||
id,
|
||||
);
|
||||
|
||||
let node = node.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
|
||||
async move {
|
||||
let source = node.pkg_ref.source();
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&source,
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
let target = source
|
||||
.get_target(
|
||||
&node.pkg_ref,
|
||||
&GetTargetOptions {
|
||||
project,
|
||||
path: Arc::from(container_folder),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<_, anyhow::Error>(
|
||||
target.build_files().is_none()
|
||||
&& !matches!(node.resolved_ty, DependencyType::Dev),
|
||||
)
|
||||
}
|
||||
})
|
||||
{
|
||||
anyhow::bail!("roblox packages may not depend on non-roblox packages");
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(result) = tasks.join_next().await {
|
||||
let result = result
|
||||
.unwrap()
|
||||
.context("failed to get target of dependency node")?;
|
||||
if result {
|
||||
anyhow::bail!("roblox packages may not depend on non-roblox packages");
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
|
@ -376,8 +427,6 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
|||
}
|
||||
}
|
||||
|
||||
let refreshed_sources = RefreshedSources::new();
|
||||
|
||||
for specifier in manifest
|
||||
.dependencies
|
||||
.values_mut()
|
||||
|
@ -693,7 +742,12 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
|||
}
|
||||
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let result = self.clone().run_impl(&project, reqwest.clone(), true).await;
|
||||
let refreshed_sources = RefreshedSources::new();
|
||||
|
||||
let result = self
|
||||
.clone()
|
||||
.run_impl(&project, reqwest.clone(), true, &refreshed_sources)
|
||||
.await;
|
||||
if project.workspace_dir().is_some() {
|
||||
return result;
|
||||
} else {
|
||||
|
@ -703,7 +757,11 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
|||
run_on_workspace_members(&project, |project| {
|
||||
let reqwest = reqwest.clone();
|
||||
let this = self.clone();
|
||||
async move { this.run_impl(&project, reqwest, false).await }
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
async move {
|
||||
this.run_impl(&project, reqwest, false, &refreshed_sources)
|
||||
.await
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map(|_| ())
|
||||
|
|
|
@ -6,11 +6,13 @@ use pesde::{
|
|||
errors::{ManifestReadError, WorkspaceMembersError},
|
||||
linking::generator::generate_bin_linking_module,
|
||||
names::{PackageName, PackageNames},
|
||||
source::traits::{GetTargetOptions, PackageRef, PackageSource, RefreshOptions},
|
||||
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||
};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::{
|
||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::Path, process::Command,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
|
@ -75,7 +77,7 @@ impl RunCommand {
|
|||
|
||||
let mut versions = graph
|
||||
.into_iter()
|
||||
.filter(|(id, node)| *id.name() == pkg_name && node.node.direct.is_some())
|
||||
.filter(|(id, node)| *id.name() == pkg_name && node.direct.is_some())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let (id, node) = match versions.len() {
|
||||
|
@ -84,17 +86,13 @@ impl RunCommand {
|
|||
_ => anyhow::bail!("multiple versions found. use the package's alias instead."),
|
||||
};
|
||||
|
||||
let Some(bin_path) = node.target.bin_path() else {
|
||||
anyhow::bail!("package has no bin path");
|
||||
};
|
||||
|
||||
let base_folder = project
|
||||
.deser_manifest()
|
||||
.await?
|
||||
.target
|
||||
.kind()
|
||||
.packages_folder(id.version_id().target());
|
||||
let container_folder = node.node.container_folder(
|
||||
let container_folder = node.container_folder(
|
||||
&project
|
||||
.package_dir()
|
||||
.join(base_folder)
|
||||
|
@ -102,6 +100,27 @@ impl RunCommand {
|
|||
&id,
|
||||
);
|
||||
|
||||
let source = node.pkg_ref.source();
|
||||
source
|
||||
.refresh(&RefreshOptions {
|
||||
project: project.clone(),
|
||||
})
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
let target = source
|
||||
.get_target(
|
||||
&node.pkg_ref,
|
||||
&GetTargetOptions {
|
||||
project,
|
||||
path: Arc::from(container_folder.as_path()),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let Some(bin_path) = target.bin_path() else {
|
||||
anyhow::bail!("package has no bin path");
|
||||
};
|
||||
|
||||
let path = bin_path.to_path(&container_folder);
|
||||
|
||||
run(&path, &path);
|
||||
|
|
|
@ -5,24 +5,24 @@ use std::{
|
|||
time::Instant,
|
||||
};
|
||||
|
||||
use crate::cli::{
|
||||
bin_dir,
|
||||
reporters::{self, CliReporter},
|
||||
resolve_overrides, run_on_workspace_members, up_to_date_lockfile,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use fs_err::tokio as fs;
|
||||
use futures::future::try_join_all;
|
||||
use pesde::{
|
||||
download_and_link::{filter_graph, DownloadAndLinkHooks, DownloadAndLinkOptions},
|
||||
lockfile::{DependencyGraph, DownloadedGraph, Lockfile},
|
||||
graph::{ConvertableGraph, DependencyGraph, DownloadedGraph},
|
||||
lockfile::Lockfile,
|
||||
manifest::{target::TargetKind, DependencyType},
|
||||
Project, RefreshedSources, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
||||
};
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
use crate::cli::{
|
||||
bin_dir,
|
||||
reporters::{self, CliReporter},
|
||||
resolve_overrides, run_on_workspace_members, up_to_date_lockfile,
|
||||
};
|
||||
|
||||
use super::files::make_executable;
|
||||
|
||||
fn bin_link_file(alias: &str) -> String {
|
||||
|
@ -68,7 +68,7 @@ impl DownloadAndLinkHooks for InstallHooks {
|
|||
) -> Result<(), Self::Error> {
|
||||
let mut tasks = downloaded_graph
|
||||
.values()
|
||||
.filter(|node| node.target.bin_path().is_some())
|
||||
.filter(|node| node.target.as_ref().is_some_and(|t| t.bin_path().is_some()))
|
||||
.filter_map(|node| node.node.direct.as_ref())
|
||||
.map(|(alias, _, _)| alias)
|
||||
.filter(|alias| {
|
||||
|
@ -237,13 +237,7 @@ pub async fn install(
|
|||
root_progress.reset();
|
||||
root_progress.set_message("resolve");
|
||||
|
||||
let old_graph = lockfile.map(|lockfile| {
|
||||
lockfile
|
||||
.graph
|
||||
.into_iter()
|
||||
.map(|(id, node)| (id, node.node))
|
||||
.collect()
|
||||
});
|
||||
let old_graph = lockfile.map(|lockfile| lockfile.graph);
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(
|
||||
|
@ -285,7 +279,12 @@ pub async fn install(
|
|||
root_progress.set_message("patch");
|
||||
|
||||
project
|
||||
.apply_patches(&filter_graph(&downloaded_graph, options.prod), reporter)
|
||||
.apply_patches(
|
||||
&Arc::into_inner(filter_graph(&downloaded_graph, options.prod))
|
||||
.unwrap()
|
||||
.convert(),
|
||||
reporter,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
|
@ -297,7 +296,7 @@ pub async fn install(
|
|||
target: manifest.target.kind(),
|
||||
overrides,
|
||||
|
||||
graph: downloaded_graph,
|
||||
graph: Arc::into_inner(graph).unwrap(),
|
||||
|
||||
workspace: run_on_workspace_members(project, |_| async { Ok(()) }).await?,
|
||||
};
|
||||
|
@ -330,7 +329,7 @@ pub async fn install(
|
|||
}
|
||||
|
||||
/// Prints the difference between two graphs.
|
||||
pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: DownloadedGraph) {
|
||||
pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: DependencyGraph) {
|
||||
let mut old_pkg_map = BTreeMap::new();
|
||||
let mut old_direct_pkg_map = BTreeMap::new();
|
||||
let mut new_pkg_map = BTreeMap::new();
|
||||
|
@ -344,9 +343,9 @@ pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: D
|
|||
}
|
||||
|
||||
for (id, node) in &new_graph {
|
||||
new_pkg_map.insert(id, &node.node);
|
||||
if node.node.direct.is_some() {
|
||||
new_direct_pkg_map.insert(id, &node.node);
|
||||
new_pkg_map.insert(id, node);
|
||||
if node.direct.is_some() {
|
||||
new_direct_pkg_map.insert(id, node);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -118,8 +118,7 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
|
|||
.graph
|
||||
.iter()
|
||||
.filter_map(|(_, node)| {
|
||||
node.node
|
||||
.direct
|
||||
node.direct
|
||||
.as_ref()
|
||||
.map(|(_, spec, source_ty)| (spec, source_ty))
|
||||
})
|
||||
|
@ -166,7 +165,7 @@ impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<a
|
|||
|
||||
impl VersionedPackageName {
|
||||
#[cfg(feature = "patches")]
|
||||
fn get(self, graph: &pesde::lockfile::DownloadedGraph) -> anyhow::Result<PackageId> {
|
||||
fn get(self, graph: &pesde::graph::DependencyGraph) -> anyhow::Result<PackageId> {
|
||||
let version_id = match self.1 {
|
||||
Some(version) => version,
|
||||
None => {
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
use crate::{
|
||||
lockfile::{DependencyGraph, DownloadedDependencyGraphNode},
|
||||
graph::{DependencyGraph, DownloadedDependencyGraphNode},
|
||||
manifest::DependencyType,
|
||||
reporters::{DownloadProgressReporter, DownloadsReporter},
|
||||
source::{
|
||||
ids::PackageId,
|
||||
traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions},
|
||||
traits::{DownloadOptions, GetTargetOptions, PackageRef, PackageSource, RefreshOptions},
|
||||
},
|
||||
Project, RefreshedSources, PACKAGES_CONTAINER_NAME,
|
||||
};
|
||||
|
@ -17,7 +17,7 @@ use tracing::{instrument, Instrument};
|
|||
|
||||
/// Options for downloading.
|
||||
#[derive(Debug)]
|
||||
pub struct DownloadGraphOptions<Reporter> {
|
||||
pub(crate) struct DownloadGraphOptions<Reporter> {
|
||||
/// The reqwest client.
|
||||
pub reqwest: reqwest::Client,
|
||||
/// The downloads reporter.
|
||||
|
@ -39,7 +39,7 @@ where
|
|||
Reporter: for<'a> DownloadsReporter<'a> + Send + Sync + 'static,
|
||||
{
|
||||
/// Creates a new download options with the given reqwest client and reporter.
|
||||
pub fn new(reqwest: reqwest::Client) -> Self {
|
||||
pub(crate) fn new(reqwest: reqwest::Client) -> Self {
|
||||
Self {
|
||||
reqwest,
|
||||
reporter: None,
|
||||
|
@ -52,37 +52,37 @@ where
|
|||
}
|
||||
|
||||
/// Sets the downloads reporter.
|
||||
pub fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
|
||||
pub(crate) fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
|
||||
self.reporter.replace(reporter.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the refreshed sources.
|
||||
pub fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
|
||||
pub(crate) fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
|
||||
self.refreshed_sources = refreshed_sources;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets whether to skip dev dependencies.
|
||||
pub fn prod(mut self, prod: bool) -> Self {
|
||||
pub(crate) fn prod(mut self, prod: bool) -> Self {
|
||||
self.prod = prod;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets whether to write the downloaded packages to disk.
|
||||
pub fn write(mut self, write: bool) -> Self {
|
||||
pub(crate) fn write(mut self, write: bool) -> Self {
|
||||
self.write = write;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets whether to download Wally packages.
|
||||
pub fn wally(mut self, wally: bool) -> Self {
|
||||
pub(crate) fn wally(mut self, wally: bool) -> Self {
|
||||
self.wally = wally;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the max number of concurrent network requests.
|
||||
pub fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
|
||||
pub(crate) fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
|
||||
self.network_concurrency = network_concurrency;
|
||||
self
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ impl<Reporter> Clone for DownloadGraphOptions<Reporter> {
|
|||
impl Project {
|
||||
/// Downloads a graph of dependencies.
|
||||
#[instrument(skip_all, fields(prod = options.prod, wally = options.wally, write = options.write), level = "debug")]
|
||||
pub async fn download_graph<Reporter>(
|
||||
pub(crate) async fn download_graph<Reporter>(
|
||||
&self,
|
||||
graph: &DependencyGraph,
|
||||
options: DownloadGraphOptions<Reporter>,
|
||||
|
@ -138,7 +138,7 @@ impl Project {
|
|||
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
||||
.filter(|(_, node)| node.pkg_ref.like_wally() == wally)
|
||||
.map(|(package_id, node)| {
|
||||
let span = tracing::info_span!("download", package_id = package_id.to_string(),);
|
||||
let span = tracing::info_span!("download", package_id = package_id.to_string());
|
||||
|
||||
let project = self.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
|
@ -184,7 +184,7 @@ impl Project {
|
|||
|
||||
tracing::debug!("downloading");
|
||||
|
||||
let (fs, target) = match progress_reporter {
|
||||
let fs = match progress_reporter {
|
||||
Some(progress_reporter) => {
|
||||
source
|
||||
.download(
|
||||
|
@ -214,10 +214,25 @@ impl Project {
|
|||
|
||||
tracing::debug!("downloaded");
|
||||
|
||||
let mut target = None;
|
||||
|
||||
if write {
|
||||
if !prod || node.resolved_ty != DependencyType::Dev {
|
||||
fs.write_to(container_folder, project.cas_dir(), true)
|
||||
fs.write_to(&container_folder, project.cas_dir(), true)
|
||||
.await?;
|
||||
|
||||
target = Some(
|
||||
source
|
||||
.get_target(
|
||||
&node.pkg_ref,
|
||||
&GetTargetOptions {
|
||||
project,
|
||||
path: Arc::from(container_folder),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.map_err(Box::new)?,
|
||||
);
|
||||
} else {
|
||||
tracing::debug!("skipping write to disk, dev dependency in prod mode");
|
||||
}
|
||||
|
@ -264,6 +279,10 @@ pub mod errors {
|
|||
#[error("failed to download package")]
|
||||
DownloadFailed(#[from] Box<crate::source::errors::DownloadError>),
|
||||
|
||||
/// Error getting target
|
||||
#[error("failed to get target")]
|
||||
GetTargetFailed(#[from] Box<crate::source::errors::GetTargetError>),
|
||||
|
||||
/// Error writing package contents
|
||||
#[error("failed to write package contents")]
|
||||
WriteFailed(#[source] std::io::Error),
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
download::DownloadGraphOptions,
|
||||
lockfile::{DependencyGraph, DownloadedGraph},
|
||||
graph::{DependencyGraph, DownloadedGraph},
|
||||
manifest::DependencyType,
|
||||
reporters::DownloadsReporter,
|
||||
Project, RefreshedSources,
|
||||
|
@ -29,10 +29,6 @@ pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> Arc<DownloadedGraph>
|
|||
)
|
||||
}
|
||||
|
||||
/// Receiver for dependencies downloaded and linked
|
||||
pub type DownloadAndLinkReceiver =
|
||||
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
|
||||
|
||||
/// Hooks to perform actions after certain events during download and linking.
|
||||
#[allow(unused_variables)]
|
||||
pub trait DownloadAndLinkHooks {
|
||||
|
|
98
src/graph.rs
Normal file
98
src/graph.rs
Normal file
|
@ -0,0 +1,98 @@
|
|||
use crate::{
|
||||
manifest::{
|
||||
target::{Target, TargetKind},
|
||||
DependencyType,
|
||||
},
|
||||
source::{
|
||||
ids::{PackageId, VersionId},
|
||||
refs::PackageRefs,
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::PackageRef,
|
||||
},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
/// A graph of dependencies
|
||||
pub type Graph<Node> = BTreeMap<PackageId, Node>;
|
||||
|
||||
/// A dependency graph node
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct DependencyGraphNode {
|
||||
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
|
||||
/// The dependencies of the package
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
pub dependencies: BTreeMap<PackageId, String>,
|
||||
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
|
||||
pub resolved_ty: DependencyType,
|
||||
/// Whether the resolved type should be Peer if this isn't depended on
|
||||
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
|
||||
pub is_peer: bool,
|
||||
/// The package reference
|
||||
pub pkg_ref: PackageRefs,
|
||||
}
|
||||
|
||||
impl DependencyGraphNode {
|
||||
pub(crate) fn base_folder(&self, version_id: &VersionId, project_target: TargetKind) -> String {
|
||||
if self.pkg_ref.use_new_structure() {
|
||||
version_id.target().packages_folder(&project_target)
|
||||
} else {
|
||||
"..".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the folder to store the contents of the package in
|
||||
pub fn container_folder<P: AsRef<Path>>(&self, path: &P, package_id: &PackageId) -> PathBuf {
|
||||
let (name, version) = package_id.parts();
|
||||
|
||||
if self.pkg_ref.like_wally() {
|
||||
return path
|
||||
.as_ref()
|
||||
.join(format!(
|
||||
"{}_{}@{}",
|
||||
package_id.name().as_str().0,
|
||||
name.as_str().1,
|
||||
version
|
||||
))
|
||||
.join(name.as_str().1);
|
||||
}
|
||||
|
||||
path.as_ref()
|
||||
.join(name.escaped())
|
||||
.join(version.to_string())
|
||||
.join(name.as_str().1)
|
||||
}
|
||||
}
|
||||
|
||||
/// A graph of `DependencyGraphNode`s
|
||||
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
||||
|
||||
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DownloadedDependencyGraphNode {
|
||||
/// The target of the package
|
||||
/// None only if download was called with write = false or is a dev dependency in a prod install
|
||||
pub target: Option<Target>,
|
||||
/// The node
|
||||
pub node: DependencyGraphNode,
|
||||
}
|
||||
|
||||
/// A graph of `DownloadedDependencyGraphNode`s
|
||||
pub type DownloadedGraph = Graph<DownloadedDependencyGraphNode>;
|
||||
|
||||
/// A trait for converting a graph to a different type of graph
|
||||
pub trait ConvertableGraph<Node> {
|
||||
/// Converts the graph to a different type of graph
|
||||
fn convert(self) -> Graph<Node>;
|
||||
}
|
||||
|
||||
impl ConvertableGraph<DependencyGraphNode> for DownloadedGraph {
|
||||
fn convert(self) -> Graph<DependencyGraphNode> {
|
||||
self.into_iter().map(|(id, node)| (id, node.node)).collect()
|
||||
}
|
||||
}
|
|
@ -29,6 +29,8 @@ use wax::Pattern;
|
|||
pub mod download;
|
||||
/// Utility for downloading and linking in the correct order
|
||||
pub mod download_and_link;
|
||||
/// Graphs
|
||||
pub mod graph;
|
||||
/// Linking packages
|
||||
pub mod linking;
|
||||
/// Lockfile
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
graph::{DownloadedDependencyGraphNode, DownloadedGraph},
|
||||
linking::generator::get_file_types,
|
||||
lockfile::{DownloadedDependencyGraphNode, DownloadedGraph},
|
||||
manifest::Manifest,
|
||||
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
|
||||
source::{
|
||||
|
@ -30,7 +30,7 @@ async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<Pat
|
|||
}
|
||||
|
||||
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
|
||||
let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
|
||||
let hash = store_in_cas(cas_dir, contents.as_bytes()).await?;
|
||||
|
||||
match fs::remove_file(&destination).await {
|
||||
Ok(_) => {}
|
||||
|
@ -52,10 +52,12 @@ impl ExecuteScriptHooks for LinkingExecuteScriptHooks {
|
|||
}
|
||||
}
|
||||
|
||||
type PackageTypes = HashMap<PackageId, Vec<String>>;
|
||||
|
||||
impl Project {
|
||||
/// Links the dependencies of the project
|
||||
#[instrument(skip(self, graph), level = "debug")]
|
||||
pub async fn link_dependencies(
|
||||
pub(crate) async fn link_dependencies(
|
||||
&self,
|
||||
graph: Arc<DownloadedGraph>,
|
||||
with_types: bool,
|
||||
|
@ -66,7 +68,7 @@ impl Project {
|
|||
|
||||
// step 1. link all non-wally packages (and their dependencies) temporarily without types
|
||||
// we do this separately to allow the required tools for the scripts to be installed
|
||||
self.link(&graph, &manifest, &Arc::new(Default::default()), false)
|
||||
self.link(&graph, &manifest, &Arc::new(PackageTypes::default()), false)
|
||||
.await?;
|
||||
|
||||
if !with_types {
|
||||
|
@ -78,14 +80,14 @@ impl Project {
|
|||
.iter()
|
||||
.map(|(package_id, node)| {
|
||||
let span =
|
||||
tracing::info_span!("extract types", package_id = package_id.to_string(),);
|
||||
tracing::info_span!("extract types", package_id = package_id.to_string());
|
||||
|
||||
let package_id = package_id.clone();
|
||||
let node = node.clone();
|
||||
let project = self.clone();
|
||||
|
||||
async move {
|
||||
let Some(lib_file) = node.target.lib_path() else {
|
||||
let Some(lib_file) = node.target.as_ref().and_then(|t| t.lib_path()) else {
|
||||
return Ok((package_id, vec![]));
|
||||
};
|
||||
|
||||
|
@ -124,7 +126,9 @@ impl Project {
|
|||
vec![]
|
||||
};
|
||||
|
||||
if let Some(build_files) = Some(&node.target)
|
||||
if let Some(build_files) = node
|
||||
.target
|
||||
.as_ref()
|
||||
.filter(|_| !node.node.pkg_ref.like_wally())
|
||||
.and_then(|t| t.build_files())
|
||||
{
|
||||
|
@ -146,7 +150,7 @@ impl Project {
|
|||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
let mut package_types = HashMap::new();
|
||||
let mut package_types = PackageTypes::new();
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
let (version_id, types) = task.unwrap()?;
|
||||
package_types.insert(version_id, types);
|
||||
|
@ -167,15 +171,19 @@ impl Project {
|
|||
node: &DownloadedDependencyGraphNode,
|
||||
package_id: &PackageId,
|
||||
alias: &str,
|
||||
package_types: &HashMap<PackageId, Vec<String>>,
|
||||
package_types: &PackageTypes,
|
||||
manifest: &Manifest,
|
||||
) -> Result<(), errors::LinkingError> {
|
||||
static NO_TYPES: Vec<String> = Vec::new();
|
||||
|
||||
if let Some(lib_file) = node.target.lib_path() {
|
||||
let Some(target) = &node.target else {
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if let Some(lib_file) = target.lib_path() {
|
||||
let lib_module = generator::generate_lib_linking_module(
|
||||
&generator::get_lib_require_path(
|
||||
&node.target.kind(),
|
||||
&target.kind(),
|
||||
base_folder,
|
||||
lib_file,
|
||||
container_folder,
|
||||
|
@ -195,7 +203,7 @@ impl Project {
|
|||
.await?;
|
||||
}
|
||||
|
||||
if let Some(bin_file) = node.target.bin_path() {
|
||||
if let Some(bin_file) = target.bin_path() {
|
||||
let bin_module = generator::generate_bin_linking_module(
|
||||
container_folder,
|
||||
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
|
||||
|
@ -209,7 +217,7 @@ impl Project {
|
|||
.await?;
|
||||
}
|
||||
|
||||
if let Some(scripts) = node.target.scripts().filter(|s| !s.is_empty()) {
|
||||
if let Some(scripts) = target.scripts().filter(|s| !s.is_empty()) {
|
||||
let scripts_base =
|
||||
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
|
||||
.await?;
|
||||
|
@ -238,7 +246,7 @@ impl Project {
|
|||
&self,
|
||||
graph: &Arc<DownloadedGraph>,
|
||||
manifest: &Arc<Manifest>,
|
||||
package_types: &Arc<HashMap<PackageId, Vec<String>>>,
|
||||
package_types: &Arc<PackageTypes>,
|
||||
is_complete: bool,
|
||||
) -> Result<(), errors::LinkingError> {
|
||||
let mut tasks = graph
|
||||
|
@ -319,7 +327,10 @@ impl Project {
|
|||
let linker_folder = create_and_canonicalize(node_container_folder.join(
|
||||
node.node.base_folder(
|
||||
package_id.version_id(),
|
||||
dependency_node.target.kind(),
|
||||
match &dependency_node.target {
|
||||
Some(t) => t.kind(),
|
||||
None => continue,
|
||||
},
|
||||
),
|
||||
))
|
||||
.await?;
|
||||
|
|
121
src/lockfile.rs
121
src/lockfile.rs
|
@ -1,94 +1,14 @@
|
|||
#![allow(deprecated)]
|
||||
use crate::{
|
||||
manifest::{
|
||||
overrides::OverrideKey,
|
||||
target::{Target, TargetKind},
|
||||
DependencyType,
|
||||
},
|
||||
graph::DependencyGraph,
|
||||
manifest::{overrides::OverrideKey, target::TargetKind},
|
||||
names::PackageName,
|
||||
source::{
|
||||
ids::{PackageId, VersionId},
|
||||
refs::PackageRefs,
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::PackageRef,
|
||||
},
|
||||
source::specifiers::DependencySpecifiers,
|
||||
};
|
||||
use relative_path::RelativePathBuf;
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
/// A graph of dependencies
|
||||
pub type Graph<Node> = BTreeMap<PackageId, Node>;
|
||||
|
||||
/// A dependency graph node
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct DependencyGraphNode {
|
||||
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
|
||||
/// The dependencies of the package
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
pub dependencies: BTreeMap<PackageId, String>,
|
||||
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
|
||||
pub resolved_ty: DependencyType,
|
||||
/// Whether the resolved type should be Peer if this isn't depended on
|
||||
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
|
||||
pub is_peer: bool,
|
||||
/// The package reference
|
||||
pub pkg_ref: PackageRefs,
|
||||
}
|
||||
|
||||
impl DependencyGraphNode {
|
||||
pub(crate) fn base_folder(&self, version_id: &VersionId, project_target: TargetKind) -> String {
|
||||
if self.pkg_ref.use_new_structure() {
|
||||
version_id.target().packages_folder(&project_target)
|
||||
} else {
|
||||
"..".to_string()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the folder to store the contents of the package in
|
||||
pub fn container_folder<P: AsRef<Path>>(&self, path: &P, package_id: &PackageId) -> PathBuf {
|
||||
let (name, version) = package_id.parts();
|
||||
|
||||
if self.pkg_ref.like_wally() {
|
||||
return path
|
||||
.as_ref()
|
||||
.join(format!(
|
||||
"{}_{}@{}",
|
||||
package_id.name().as_str().0,
|
||||
name.as_str().1,
|
||||
version
|
||||
))
|
||||
.join(name.as_str().1);
|
||||
}
|
||||
|
||||
path.as_ref()
|
||||
.join(name.escaped())
|
||||
.join(version.to_string())
|
||||
.join(name.as_str().1)
|
||||
}
|
||||
}
|
||||
|
||||
/// A graph of `DependencyGraphNode`s
|
||||
pub type DependencyGraph = Graph<DependencyGraphNode>;
|
||||
|
||||
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct DownloadedDependencyGraphNode {
|
||||
/// The target of the package
|
||||
pub target: Target,
|
||||
/// The node
|
||||
#[serde(flatten)]
|
||||
pub node: DependencyGraphNode,
|
||||
}
|
||||
|
||||
/// A graph of `DownloadedDependencyGraphNode`s
|
||||
pub type DownloadedGraph = Graph<DownloadedDependencyGraphNode>;
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
/// A lockfile
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
|
@ -108,8 +28,8 @@ pub struct Lockfile {
|
|||
pub workspace: BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>,
|
||||
|
||||
/// The graph of dependencies
|
||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||
pub graph: DownloadedGraph,
|
||||
#[serde(default, skip_serializing_if = "DependencyGraph::is_empty")]
|
||||
pub graph: DependencyGraph,
|
||||
}
|
||||
|
||||
/// Old lockfile stuff. Will be removed in a future version.
|
||||
|
@ -201,22 +121,19 @@ pub mod old {
|
|||
versions.into_iter().map(move |(version, node)| {
|
||||
(
|
||||
PackageId(name.clone(), version),
|
||||
super::DownloadedDependencyGraphNode {
|
||||
target: node.target,
|
||||
node: super::DependencyGraphNode {
|
||||
direct: node.node.direct,
|
||||
dependencies: node
|
||||
.node
|
||||
.dependencies
|
||||
.into_iter()
|
||||
.map(|(name, (version, alias))| {
|
||||
(PackageId(name, version), alias)
|
||||
})
|
||||
.collect(),
|
||||
resolved_ty: node.node.resolved_ty,
|
||||
is_peer: node.node.is_peer,
|
||||
pkg_ref: node.node.pkg_ref,
|
||||
},
|
||||
crate::graph::DependencyGraphNode {
|
||||
direct: node.node.direct,
|
||||
dependencies: node
|
||||
.node
|
||||
.dependencies
|
||||
.into_iter()
|
||||
.map(|(name, (version, alias))| {
|
||||
(PackageId(name, version), alias)
|
||||
})
|
||||
.collect(),
|
||||
resolved_ty: node.node.resolved_ty,
|
||||
is_peer: node.node.is_peer,
|
||||
pkg_ref: node.node.pkg_ref,
|
||||
},
|
||||
)
|
||||
})
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{
|
||||
lockfile::DownloadedGraph,
|
||||
graph::DependencyGraph,
|
||||
reporters::{PatchProgressReporter, PatchesReporter},
|
||||
source::ids::PackageId,
|
||||
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||
|
@ -80,7 +80,7 @@ impl Project {
|
|||
#[instrument(skip(self, graph, reporter), level = "debug")]
|
||||
pub async fn apply_patches<Reporter>(
|
||||
&self,
|
||||
graph: &DownloadedGraph,
|
||||
graph: &DependencyGraph,
|
||||
reporter: Arc<Reporter>,
|
||||
) -> Result<(), errors::ApplyPatchesError>
|
||||
where
|
||||
|
@ -102,7 +102,7 @@ impl Project {
|
|||
continue;
|
||||
};
|
||||
|
||||
let container_folder = node.node.container_folder(
|
||||
let container_folder = node.container_folder(
|
||||
&self
|
||||
.package_dir()
|
||||
.join(
|
||||
|
@ -116,7 +116,7 @@ impl Project {
|
|||
);
|
||||
|
||||
let reporter = reporter.clone();
|
||||
let span = tracing::info_span!("apply patch", package_id = package_id.to_string(),);
|
||||
let span = tracing::info_span!("apply patch", package_id = package_id.to_string());
|
||||
|
||||
tasks.spawn(
|
||||
async move {
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
use crate::{
|
||||
lockfile::{DependencyGraph, DependencyGraphNode},
|
||||
graph::{DependencyGraph, DependencyGraphNode},
|
||||
manifest::{overrides::OverrideSpecifier, DependencyType},
|
||||
source::{
|
||||
ids::PackageId,
|
||||
|
|
|
@ -10,7 +10,6 @@ use sha2::{Digest, Sha256};
|
|||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt::Debug,
|
||||
future::Future,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use tempfile::Builder;
|
||||
|
@ -22,7 +21,7 @@ use tracing::instrument;
|
|||
|
||||
/// A file system entry
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub enum FSEntry {
|
||||
pub enum FsEntry {
|
||||
/// A file with the given hash
|
||||
#[serde(rename = "f")]
|
||||
File(String),
|
||||
|
@ -35,9 +34,9 @@ pub enum FSEntry {
|
|||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
// don't need to differentiate between CAS and non-CAS, since non-CAS won't be serialized
|
||||
#[serde(untagged)]
|
||||
pub enum PackageFS {
|
||||
pub enum PackageFs {
|
||||
/// A package stored in the CAS
|
||||
CAS(BTreeMap<RelativePathBuf, FSEntry>),
|
||||
CAS(BTreeMap<RelativePathBuf, FsEntry>),
|
||||
/// A package that's to be copied
|
||||
Copy(PathBuf, TargetKind),
|
||||
}
|
||||
|
@ -74,15 +73,9 @@ pub(crate) fn cas_path(hash: &str, cas_dir: &Path) -> PathBuf {
|
|||
cas_dir.join(prefix).join(rest)
|
||||
}
|
||||
|
||||
pub(crate) async fn store_in_cas<
|
||||
R: tokio::io::AsyncRead + Unpin,
|
||||
P: AsRef<Path>,
|
||||
C: FnMut(Vec<u8>) -> F,
|
||||
F: Future<Output = std::io::Result<()>>,
|
||||
>(
|
||||
pub(crate) async fn store_in_cas<R: tokio::io::AsyncRead + Unpin, P: AsRef<Path>>(
|
||||
cas_dir: P,
|
||||
mut contents: R,
|
||||
mut bytes_cb: C,
|
||||
) -> std::io::Result<String> {
|
||||
let tmp_dir = cas_dir.as_ref().join(".tmp");
|
||||
fs::create_dir_all(&tmp_dir).await?;
|
||||
|
@ -105,7 +98,6 @@ pub(crate) async fn store_in_cas<
|
|||
|
||||
let bytes = &buf[..bytes_read];
|
||||
hasher.update(bytes);
|
||||
bytes_cb(bytes.to_vec()).await?;
|
||||
file_writer.write_all(bytes).await?;
|
||||
}
|
||||
|
||||
|
@ -125,7 +117,7 @@ pub(crate) async fn store_in_cas<
|
|||
Ok(hash)
|
||||
}
|
||||
|
||||
impl PackageFS {
|
||||
impl PackageFs {
|
||||
/// Write the package to the given destination
|
||||
#[instrument(skip(self), level = "debug")]
|
||||
pub async fn write_to<P: AsRef<Path> + Debug, Q: AsRef<Path> + Debug>(
|
||||
|
@ -135,7 +127,7 @@ impl PackageFS {
|
|||
link: bool,
|
||||
) -> std::io::Result<()> {
|
||||
match self {
|
||||
PackageFS::CAS(entries) => {
|
||||
PackageFs::CAS(entries) => {
|
||||
try_join_all(entries.iter().map(|(path, entry)| {
|
||||
let destination = destination.as_ref().to_path_buf();
|
||||
let cas_path = cas_path.as_ref().to_path_buf();
|
||||
|
@ -144,7 +136,7 @@ impl PackageFS {
|
|||
let path = path.to_path(destination);
|
||||
|
||||
match entry {
|
||||
FSEntry::File(hash) => {
|
||||
FsEntry::File(hash) => {
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
|
@ -159,7 +151,7 @@ impl PackageFS {
|
|||
set_readonly(&path, false).await?;
|
||||
}
|
||||
}
|
||||
FSEntry::Directory => {
|
||||
FsEntry::Directory => {
|
||||
fs::create_dir_all(path).await?;
|
||||
}
|
||||
}
|
||||
|
@ -169,7 +161,7 @@ impl PackageFS {
|
|||
}))
|
||||
.await?;
|
||||
}
|
||||
PackageFS::Copy(src, target) => {
|
||||
PackageFs::Copy(src, target) => {
|
||||
fs::create_dir_all(destination.as_ref()).await?;
|
||||
|
||||
let mut read_dir = fs::read_dir(src).await?;
|
||||
|
@ -220,7 +212,7 @@ impl PackageFS {
|
|||
file_hash: H,
|
||||
cas_path: P,
|
||||
) -> Option<String> {
|
||||
if !matches!(self, PackageFS::CAS(_)) {
|
||||
if !matches!(self, PackageFs::CAS(_)) {
|
||||
return None;
|
||||
}
|
||||
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
use crate::{
|
||||
deser_manifest,
|
||||
manifest::{
|
||||
target::{Target, TargetKind},
|
||||
Manifest,
|
||||
|
@ -6,22 +7,22 @@ use crate::{
|
|||
names::PackageNames,
|
||||
reporters::DownloadProgressReporter,
|
||||
source::{
|
||||
fs::{store_in_cas, FSEntry, PackageFS},
|
||||
fs::{store_in_cas, FsEntry, PackageFs},
|
||||
git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier},
|
||||
git_index::{read_file, GitBasedSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::{DownloadOptions, PackageRef, RefreshOptions, ResolveOptions},
|
||||
traits::{DownloadOptions, GetTargetOptions, PackageRef, RefreshOptions, ResolveOptions},
|
||||
wally::compat_util::get_target,
|
||||
PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||
},
|
||||
util::hash,
|
||||
Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
use futures::future::try_join_all;
|
||||
use gix::{bstr::BStr, traverse::tree::Recorder, ObjectId, Url};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::{collections::BTreeMap, fmt::Debug, hash::Hash, path::PathBuf, sync::Arc};
|
||||
use tokio::{sync::Mutex, task::spawn_blocking};
|
||||
use std::{collections::BTreeMap, fmt::Debug, hash::Hash, path::PathBuf};
|
||||
use tokio::task::{spawn_blocking, JoinSet};
|
||||
use tracing::instrument;
|
||||
|
||||
/// The Git package reference
|
||||
|
@ -65,6 +66,7 @@ impl PackageSource for GitPackageSource {
|
|||
type RefreshError = crate::source::git_index::errors::RefreshError;
|
||||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
type GetTargetError = errors::GetTargetError;
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||
|
@ -337,8 +339,10 @@ impl PackageSource for GitPackageSource {
|
|||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||
let DownloadOptions { project, .. } = options;
|
||||
) -> Result<PackageFs, Self::DownloadError> {
|
||||
let DownloadOptions {
|
||||
project, reporter, ..
|
||||
} = options;
|
||||
|
||||
let index_file = project
|
||||
.cas_dir()
|
||||
|
@ -353,52 +357,10 @@ impl PackageSource for GitPackageSource {
|
|||
pkg_ref.repo,
|
||||
pkg_ref.tree_id
|
||||
);
|
||||
|
||||
let fs = toml::from_str::<PackageFS>(&s).map_err(|e| {
|
||||
reporter.report_done();
|
||||
return toml::from_str::<PackageFs>(&s).map_err(|e| {
|
||||
errors::DownloadError::DeserializeFile(Box::new(self.repo_url.clone()), e)
|
||||
})?;
|
||||
|
||||
let manifest = match &fs {
|
||||
PackageFS::CAS(entries) => {
|
||||
match entries.get(&RelativePathBuf::from(MANIFEST_FILE_NAME)) {
|
||||
Some(FSEntry::File(hash)) => match fs
|
||||
.read_file(hash, project.cas_dir())
|
||||
.await
|
||||
.map(|m| toml::de::from_str::<Manifest>(&m))
|
||||
{
|
||||
Some(Ok(m)) => Some(m),
|
||||
Some(Err(e)) => {
|
||||
return Err(errors::DownloadError::DeserializeFile(
|
||||
Box::new(self.repo_url.clone()),
|
||||
e,
|
||||
))
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
_ => unreachable!("the package fs should be CAS"),
|
||||
};
|
||||
|
||||
let target = match manifest {
|
||||
Some(manifest) => manifest.target,
|
||||
#[cfg(feature = "wally-compat")]
|
||||
None if !pkg_ref.new_structure => {
|
||||
let tempdir = tempfile::tempdir()?;
|
||||
fs.write_to(tempdir.path(), project.cas_dir(), false)
|
||||
.await?;
|
||||
|
||||
crate::source::wally::compat_util::get_target(project, &tempdir).await?
|
||||
}
|
||||
None => {
|
||||
return Err(errors::DownloadError::NoManifest(Box::new(
|
||||
self.repo_url.clone(),
|
||||
)))
|
||||
}
|
||||
};
|
||||
|
||||
return Ok((fs, target));
|
||||
});
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(errors::DownloadError::Io(e)),
|
||||
|
@ -453,112 +415,78 @@ impl PackageSource for GitPackageSource {
|
|||
.await
|
||||
.unwrap()?;
|
||||
|
||||
let repo = repo.to_thread_local();
|
||||
let records = {
|
||||
let repo = repo.to_thread_local();
|
||||
|
||||
let records = records
|
||||
.into_iter()
|
||||
.map(|entry| {
|
||||
let object = repo.find_object(entry.oid).map_err(|e| {
|
||||
errors::DownloadError::ParseOidToObject(
|
||||
entry.oid,
|
||||
Box::new(self.repo_url.clone()),
|
||||
e,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok::<_, errors::DownloadError>((
|
||||
RelativePathBuf::from(entry.filepath.to_string()),
|
||||
if matches!(object.kind, gix::object::Kind::Tree) {
|
||||
None
|
||||
} else {
|
||||
Some(object.data.clone())
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?;
|
||||
|
||||
let manifest = Arc::new(Mutex::new(None::<Vec<u8>>));
|
||||
let entries = try_join_all(
|
||||
records
|
||||
.into_iter()
|
||||
.filter(|(path, contents)| {
|
||||
let name = path.file_name().unwrap_or("");
|
||||
if contents.is_none() {
|
||||
return !IGNORED_DIRS.contains(&name);
|
||||
}
|
||||
|
||||
if IGNORED_FILES.contains(&name) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if pkg_ref.use_new_structure() && name == "default.project.json" {
|
||||
tracing::debug!(
|
||||
"removing default.project.json from {}#{} at {path} - using new structure",
|
||||
pkg_ref.repo,
|
||||
pkg_ref.tree_id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.map(|(path, contents)| {
|
||||
let manifest = manifest.clone();
|
||||
async move {
|
||||
let Some(contents) = contents else {
|
||||
return Ok::<_, errors::DownloadError>((path, FSEntry::Directory));
|
||||
};
|
||||
|
||||
let hash =
|
||||
store_in_cas(project.cas_dir(), contents.as_slice(), |_| async { Ok(()) })
|
||||
.await?;
|
||||
|
||||
if path == MANIFEST_FILE_NAME {
|
||||
manifest.lock().await.replace(contents);
|
||||
}
|
||||
|
||||
Ok((path, FSEntry::File(hash)))
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.collect::<BTreeMap<_, _>>();
|
||||
|
||||
let manifest = match Arc::into_inner(manifest).unwrap().into_inner() {
|
||||
Some(data) => match String::from_utf8(data.to_vec()) {
|
||||
Ok(s) => match toml::from_str::<Manifest>(&s) {
|
||||
Ok(m) => Some(m),
|
||||
Err(e) => {
|
||||
return Err(errors::DownloadError::DeserializeFile(
|
||||
.map(|entry| {
|
||||
let object = repo.find_object(entry.oid).map_err(|e| {
|
||||
errors::DownloadError::ParseOidToObject(
|
||||
entry.oid,
|
||||
Box::new(self.repo_url.clone()),
|
||||
e,
|
||||
))
|
||||
}
|
||||
},
|
||||
Err(e) => return Err(errors::DownloadError::ParseManifest(e)),
|
||||
},
|
||||
None => None,
|
||||
)
|
||||
})?;
|
||||
|
||||
Ok::<_, errors::DownloadError>((
|
||||
RelativePathBuf::from(entry.filepath.to_string()),
|
||||
if matches!(object.kind, gix::object::Kind::Tree) {
|
||||
None
|
||||
} else {
|
||||
Some(object.data.clone())
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect::<Result<Vec<_>, _>>()?
|
||||
};
|
||||
|
||||
let fs = PackageFS::CAS(entries);
|
||||
let mut tasks = records
|
||||
.into_iter()
|
||||
.filter(|(path, contents)| {
|
||||
let name = path.file_name().unwrap_or("");
|
||||
if contents.is_none() {
|
||||
return !IGNORED_DIRS.contains(&name);
|
||||
}
|
||||
|
||||
let target = match manifest {
|
||||
Some(manifest) => manifest.target,
|
||||
#[cfg(feature = "wally-compat")]
|
||||
None if !pkg_ref.new_structure => {
|
||||
let tempdir = tempfile::tempdir()?;
|
||||
fs.write_to(tempdir.path(), project.cas_dir(), false)
|
||||
.await?;
|
||||
if IGNORED_FILES.contains(&name) {
|
||||
return false;
|
||||
}
|
||||
|
||||
crate::source::wally::compat_util::get_target(project, &tempdir).await?
|
||||
}
|
||||
None => {
|
||||
return Err(errors::DownloadError::NoManifest(Box::new(
|
||||
self.repo_url.clone(),
|
||||
)))
|
||||
}
|
||||
};
|
||||
if pkg_ref.use_new_structure() && name == "default.project.json" {
|
||||
tracing::debug!(
|
||||
"removing default.project.json from {}#{} at {path} - using new structure",
|
||||
pkg_ref.repo,
|
||||
pkg_ref.tree_id
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.map(|(path, contents)| {
|
||||
let project = project.clone();
|
||||
|
||||
async move {
|
||||
let Some(contents) = contents else {
|
||||
return Ok::<_, errors::DownloadError>((path, FsEntry::Directory));
|
||||
};
|
||||
|
||||
let hash = store_in_cas(project.cas_dir(), contents.as_slice()).await?;
|
||||
|
||||
Ok((path, FsEntry::File(hash)))
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
let mut entries = BTreeMap::new();
|
||||
|
||||
while let Some(res) = tasks.join_next().await {
|
||||
let (path, entry) = res.unwrap()?;
|
||||
entries.insert(path, entry);
|
||||
}
|
||||
|
||||
let fs = PackageFs::CAS(entries);
|
||||
|
||||
if let Some(parent) = index_file.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
|
@ -573,7 +501,27 @@ impl PackageSource for GitPackageSource {
|
|||
.await
|
||||
.map_err(errors::DownloadError::Io)?;
|
||||
|
||||
Ok((fs, target))
|
||||
reporter.report_done();
|
||||
|
||||
Ok(fs)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn get_target(
|
||||
&self,
|
||||
_pkg_ref: &Self::Ref,
|
||||
options: &GetTargetOptions,
|
||||
) -> Result<Target, Self::GetTargetError> {
|
||||
match deser_manifest(&options.path).await {
|
||||
Ok(manifest) => Ok(manifest.target),
|
||||
#[cfg(feature = "wally-compat")]
|
||||
Err(crate::errors::ManifestReadError::Io(e))
|
||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||
{
|
||||
get_target(options).await.map_err(Into::into)
|
||||
}
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -734,4 +682,18 @@ pub mod errors {
|
|||
#[error("error parsing tree_id to ObjectId for repository {0}")]
|
||||
ParseTreeId(Box<gix::Url>, #[source] gix::hash::decode::Error),
|
||||
}
|
||||
|
||||
/// Errors that can occur when getting a target from a Git package source
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum GetTargetError {
|
||||
/// Reading the manifest failed
|
||||
#[error("error reading manifest")]
|
||||
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||
|
||||
/// An error occurred while creating a Wally target
|
||||
#[cfg(feature = "wally-compat")]
|
||||
#[error("error creating Wally target")]
|
||||
GetTarget(#[from] crate::source::wally::compat_util::errors::GetTargetError),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,7 +3,7 @@ use crate::{
|
|||
names::PackageNames,
|
||||
reporters::DownloadProgressReporter,
|
||||
source::{
|
||||
fs::PackageFS, ids::VersionId, refs::PackageRefs, specifiers::DependencySpecifiers,
|
||||
fs::PackageFs, ids::VersionId, refs::PackageRefs, specifiers::DependencySpecifiers,
|
||||
traits::*,
|
||||
},
|
||||
};
|
||||
|
@ -64,6 +64,7 @@ impl PackageSource for PackageSources {
|
|||
type RefreshError = errors::RefreshError;
|
||||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
type GetTargetError = errors::GetTargetError;
|
||||
|
||||
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||
match self {
|
||||
|
@ -174,7 +175,7 @@ impl PackageSource for PackageSources {
|
|||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||
) -> Result<PackageFs, Self::DownloadError> {
|
||||
match (self, pkg_ref) {
|
||||
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => {
|
||||
source.download(pkg_ref, options).await.map_err(Into::into)
|
||||
|
@ -200,6 +201,42 @@ impl PackageSource for PackageSources {
|
|||
_ => Err(errors::DownloadError::Mismatch),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_target(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &GetTargetOptions,
|
||||
) -> Result<Target, Self::GetTargetError> {
|
||||
match (self, pkg_ref) {
|
||||
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => source
|
||||
.get_target(pkg_ref, options)
|
||||
.await
|
||||
.map_err(Into::into),
|
||||
|
||||
#[cfg(feature = "wally-compat")]
|
||||
(PackageSources::Wally(source), PackageRefs::Wally(pkg_ref)) => source
|
||||
.get_target(pkg_ref, options)
|
||||
.await
|
||||
.map_err(Into::into),
|
||||
|
||||
(PackageSources::Git(source), PackageRefs::Git(pkg_ref)) => source
|
||||
.get_target(pkg_ref, options)
|
||||
.await
|
||||
.map_err(Into::into),
|
||||
|
||||
(PackageSources::Workspace(source), PackageRefs::Workspace(pkg_ref)) => source
|
||||
.get_target(pkg_ref, options)
|
||||
.await
|
||||
.map_err(Into::into),
|
||||
|
||||
(PackageSources::Path(source), PackageRefs::Path(pkg_ref)) => source
|
||||
.get_target(pkg_ref, options)
|
||||
.await
|
||||
.map_err(Into::into),
|
||||
|
||||
_ => Err(errors::GetTargetError::Mismatch),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur when interacting with a package source
|
||||
|
@ -291,4 +328,34 @@ pub mod errors {
|
|||
#[error("error downloading path package")]
|
||||
Path(#[from] crate::source::path::errors::DownloadError),
|
||||
}
|
||||
|
||||
/// Errors that can occur when getting a package's target
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum GetTargetError {
|
||||
/// The package ref does not match the source (if using the CLI, this is a bug - file an issue)
|
||||
#[error("mismatched package ref for source")]
|
||||
Mismatch,
|
||||
|
||||
/// A pesde package source failed to get the target
|
||||
#[error("error getting target for pesde package")]
|
||||
Pesde(#[from] crate::source::pesde::errors::GetTargetError),
|
||||
|
||||
/// A Wally package source failed to get the target
|
||||
#[cfg(feature = "wally-compat")]
|
||||
#[error("error getting target for wally package")]
|
||||
Wally(#[from] crate::source::wally::errors::GetTargetError),
|
||||
|
||||
/// A Git package source failed to get the target
|
||||
#[error("error getting target for git package")]
|
||||
Git(#[from] crate::source::git::errors::GetTargetError),
|
||||
|
||||
/// A workspace package source failed to get the target
|
||||
#[error("error getting target for workspace package")]
|
||||
Workspace(#[from] crate::source::workspace::errors::GetTargetError),
|
||||
|
||||
/// A path package source failed to get the target
|
||||
#[error("error getting target for path package")]
|
||||
Path(#[from] crate::source::path::errors::GetTargetError),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,11 +4,11 @@ use crate::{
|
|||
names::PackageNames,
|
||||
reporters::DownloadProgressReporter,
|
||||
source::{
|
||||
fs::PackageFS,
|
||||
fs::PackageFs,
|
||||
ids::VersionId,
|
||||
path::pkg_ref::PathPackageRef,
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::{DownloadOptions, PackageSource, ResolveOptions},
|
||||
traits::{DownloadOptions, GetTargetOptions, PackageSource, ResolveOptions},
|
||||
ResolveResult,
|
||||
},
|
||||
DEFAULT_INDEX_NAME,
|
||||
|
@ -31,6 +31,7 @@ impl PackageSource for PathPackageSource {
|
|||
type RefreshError = errors::RefreshError;
|
||||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
type GetTargetError = errors::GetTargetError;
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn resolve(
|
||||
|
@ -103,15 +104,29 @@ impl PackageSource for PathPackageSource {
|
|||
async fn download<R: DownloadProgressReporter>(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
_options: &DownloadOptions<R>,
|
||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||
options: &DownloadOptions<R>,
|
||||
) -> Result<PackageFs, Self::DownloadError> {
|
||||
let DownloadOptions { reporter, .. } = options;
|
||||
let manifest = deser_manifest(&pkg_ref.path).await?;
|
||||
|
||||
Ok((
|
||||
PackageFS::Copy(pkg_ref.path.clone(), manifest.target.kind()),
|
||||
manifest.target,
|
||||
reporter.report_done();
|
||||
|
||||
Ok(PackageFs::Copy(
|
||||
pkg_ref.path.clone(),
|
||||
manifest.target.kind(),
|
||||
))
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn get_target(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
_options: &GetTargetOptions,
|
||||
) -> Result<Target, Self::GetTargetError> {
|
||||
let manifest = deser_manifest(&pkg_ref.path).await?;
|
||||
|
||||
Ok(manifest.target)
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur when using a path package source
|
||||
|
@ -149,4 +164,13 @@ pub mod errors {
|
|||
#[error("error reading manifest")]
|
||||
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||
}
|
||||
|
||||
/// Errors that can occur when getting the target of a path package
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum GetTargetError {
|
||||
/// Reading the manifest failed
|
||||
#[error("error reading manifest")]
|
||||
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,9 +18,9 @@ use crate::{
|
|||
names::{PackageName, PackageNames},
|
||||
reporters::DownloadProgressReporter,
|
||||
source::{
|
||||
fs::{store_in_cas, FSEntry, PackageFS},
|
||||
fs::{store_in_cas, FsEntry, PackageFs},
|
||||
git_index::{read_file, root_tree, GitBasedSource},
|
||||
traits::{DownloadOptions, RefreshOptions, ResolveOptions},
|
||||
traits::{DownloadOptions, GetTargetOptions, RefreshOptions, ResolveOptions},
|
||||
DependencySpecifiers, PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||
},
|
||||
util::hash,
|
||||
|
@ -102,6 +102,7 @@ impl PackageSource for PesdePackageSource {
|
|||
type RefreshError = crate::source::git_index::errors::RefreshError;
|
||||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
type GetTargetError = errors::GetTargetError;
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||
|
@ -170,7 +171,7 @@ impl PackageSource for PesdePackageSource {
|
|||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||
) -> Result<PackageFs, Self::DownloadError> {
|
||||
let DownloadOptions {
|
||||
project,
|
||||
reporter,
|
||||
|
@ -193,7 +194,10 @@ impl PackageSource for PesdePackageSource {
|
|||
pkg_ref.version,
|
||||
pkg_ref.target
|
||||
);
|
||||
return Ok((toml::from_str::<PackageFS>(&s)?, pkg_ref.target.clone()));
|
||||
|
||||
reporter.report_done();
|
||||
|
||||
return toml::from_str::<PackageFs>(&s).map_err(Into::into);
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
|
||||
|
@ -255,7 +259,7 @@ impl PackageSource for PesdePackageSource {
|
|||
continue;
|
||||
}
|
||||
|
||||
entries.insert(path, FSEntry::Directory);
|
||||
entries.insert(path, FsEntry::Directory);
|
||||
|
||||
continue;
|
||||
}
|
||||
|
@ -264,13 +268,13 @@ impl PackageSource for PesdePackageSource {
|
|||
continue;
|
||||
}
|
||||
|
||||
let hash = store_in_cas(project.cas_dir(), entry, |_| async { Ok(()) })
|
||||
let hash = store_in_cas(project.cas_dir(), entry)
|
||||
.await
|
||||
.map_err(errors::DownloadError::Store)?;
|
||||
entries.insert(path, FSEntry::File(hash));
|
||||
entries.insert(path, FsEntry::File(hash));
|
||||
}
|
||||
|
||||
let fs = PackageFS::CAS(entries);
|
||||
let fs = PackageFs::CAS(entries);
|
||||
|
||||
if let Some(parent) = index_file.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
|
@ -284,7 +288,16 @@ impl PackageSource for PesdePackageSource {
|
|||
|
||||
reporter.report_done();
|
||||
|
||||
Ok((fs, pkg_ref.target.clone()))
|
||||
Ok(fs)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn get_target(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
_options: &GetTargetOptions,
|
||||
) -> Result<Target, Self::GetTargetError> {
|
||||
Ok(pkg_ref.target.clone())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -569,4 +582,9 @@ pub mod errors {
|
|||
#[error("error reading index file")]
|
||||
ReadIndex(#[source] std::io::Error),
|
||||
}
|
||||
|
||||
/// Errors that can occur when getting the target for a package from a pesde package source
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum GetTargetError {}
|
||||
}
|
||||
|
|
|
@ -4,13 +4,14 @@ use crate::{
|
|||
DependencyType,
|
||||
},
|
||||
reporters::DownloadProgressReporter,
|
||||
source::{DependencySpecifiers, PackageFS, PackageSources, ResolveResult},
|
||||
source::{DependencySpecifiers, PackageFs, PackageSources, ResolveResult},
|
||||
Project, RefreshedSources,
|
||||
};
|
||||
use std::{
|
||||
collections::BTreeMap,
|
||||
fmt::{Debug, Display},
|
||||
future::Future,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
|
@ -56,6 +57,15 @@ pub struct DownloadOptions<R: DownloadProgressReporter> {
|
|||
pub reporter: Arc<R>,
|
||||
}
|
||||
|
||||
/// Options for getting a package's Target
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct GetTargetOptions {
|
||||
/// The project to get the target for
|
||||
pub project: Project,
|
||||
/// The path the package has been written to
|
||||
pub path: Arc<Path>,
|
||||
}
|
||||
|
||||
/// A source of packages
|
||||
pub trait PackageSource: Debug {
|
||||
/// The specifier type for this source
|
||||
|
@ -68,6 +78,8 @@ pub trait PackageSource: Debug {
|
|||
type ResolveError: std::error::Error + Send + Sync + 'static;
|
||||
/// The error type for downloading a package from this source
|
||||
type DownloadError: std::error::Error + Send + Sync + 'static;
|
||||
/// The error type for getting a package's target from this source
|
||||
type GetTargetError: std::error::Error + Send + Sync + 'static;
|
||||
|
||||
/// Refreshes the source
|
||||
fn refresh(
|
||||
|
@ -82,12 +94,19 @@ pub trait PackageSource: Debug {
|
|||
&self,
|
||||
specifier: &Self::Specifier,
|
||||
options: &ResolveOptions,
|
||||
) -> impl Future<Output = Result<ResolveResult<Self::Ref>, Self::ResolveError>>;
|
||||
) -> impl Future<Output = Result<ResolveResult<Self::Ref>, Self::ResolveError>> + Send + Sync;
|
||||
|
||||
/// Downloads a package
|
||||
fn download<R: DownloadProgressReporter>(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> impl Future<Output = Result<(PackageFS, Target), Self::DownloadError>>;
|
||||
) -> impl Future<Output = Result<PackageFs, Self::DownloadError>> + Send + Sync;
|
||||
|
||||
/// Gets the target of a package
|
||||
fn get_target(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &GetTargetOptions,
|
||||
) -> impl Future<Output = Result<Target, Self::GetTargetError>> + Send + Sync;
|
||||
}
|
||||
|
|
|
@ -2,12 +2,14 @@ use std::path::Path;
|
|||
|
||||
use relative_path::RelativePathBuf;
|
||||
use serde::Deserialize;
|
||||
use tempfile::TempDir;
|
||||
|
||||
use crate::{
|
||||
manifest::target::Target,
|
||||
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
|
||||
source::wally::manifest::{Realm, WallyManifest},
|
||||
source::{
|
||||
traits::GetTargetOptions,
|
||||
wally::manifest::{Realm, WallyManifest},
|
||||
},
|
||||
Project, LINK_LIB_NO_FILE_FOUND,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
|
@ -54,17 +56,18 @@ async fn find_lib_path(
|
|||
|
||||
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
|
||||
|
||||
#[instrument(skip(project, tempdir), level = "debug")]
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
pub(crate) async fn get_target(
|
||||
project: &Project,
|
||||
tempdir: &TempDir,
|
||||
options: &GetTargetOptions,
|
||||
) -> Result<Target, errors::GetTargetError> {
|
||||
let lib = find_lib_path(project, tempdir.path())
|
||||
let GetTargetOptions { project, path } = options;
|
||||
|
||||
let lib = find_lib_path(project, path)
|
||||
.await?
|
||||
.or_else(|| Some(RelativePathBuf::from(LINK_LIB_NO_FILE_FOUND)));
|
||||
let build_files = Default::default();
|
||||
|
||||
let manifest = tempdir.path().join(WALLY_MANIFEST_FILE_NAME);
|
||||
let manifest = path.join(WALLY_MANIFEST_FILE_NAME);
|
||||
let manifest = fs::read_to_string(&manifest).await?;
|
||||
let manifest: WallyManifest = toml::from_str(&manifest)?;
|
||||
|
||||
|
|
|
@ -3,10 +3,12 @@ use crate::{
|
|||
names::PackageNames,
|
||||
reporters::DownloadProgressReporter,
|
||||
source::{
|
||||
fs::{store_in_cas, FSEntry, PackageFS},
|
||||
fs::{store_in_cas, FsEntry, PackageFs},
|
||||
git_index::{read_file, root_tree, GitBasedSource},
|
||||
ids::VersionId,
|
||||
traits::{DownloadOptions, PackageSource, RefreshOptions, ResolveOptions},
|
||||
traits::{
|
||||
DownloadOptions, GetTargetOptions, PackageSource, RefreshOptions, ResolveOptions,
|
||||
},
|
||||
wally::{
|
||||
compat_util::get_target,
|
||||
manifest::{Realm, WallyManifest},
|
||||
|
@ -23,13 +25,8 @@ use gix::Url;
|
|||
use relative_path::RelativePathBuf;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use serde::Deserialize;
|
||||
use std::{collections::BTreeMap, path::PathBuf, sync::Arc};
|
||||
use tempfile::tempdir;
|
||||
use tokio::{
|
||||
io::{AsyncReadExt, AsyncWriteExt},
|
||||
sync::Mutex,
|
||||
task::spawn_blocking,
|
||||
};
|
||||
use std::{collections::BTreeMap, path::PathBuf};
|
||||
use tokio::{io::AsyncReadExt, task::spawn_blocking};
|
||||
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::StreamReader};
|
||||
use tracing::instrument;
|
||||
|
||||
|
@ -96,6 +93,7 @@ impl PackageSource for WallyPackageSource {
|
|||
type RefreshError = crate::source::git_index::errors::RefreshError;
|
||||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
type GetTargetError = errors::GetTargetError;
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||
|
@ -108,24 +106,36 @@ impl PackageSource for WallyPackageSource {
|
|||
specifier: &Self::Specifier,
|
||||
options: &ResolveOptions,
|
||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||
let ResolveOptions {
|
||||
project,
|
||||
refreshed_sources,
|
||||
..
|
||||
} = options;
|
||||
async fn inner(
|
||||
source: &WallyPackageSource,
|
||||
specifier: &specifier::WallyDependencySpecifier,
|
||||
options: &ResolveOptions,
|
||||
) -> Result<ResolveResult<WallyPackageRef>, errors::ResolveError> {
|
||||
let ResolveOptions {
|
||||
project,
|
||||
refreshed_sources,
|
||||
..
|
||||
} = options;
|
||||
|
||||
let repo = gix::open(self.path(project)).map_err(Box::new)?;
|
||||
let tree = root_tree(&repo).map_err(Box::new)?;
|
||||
let (scope, name) = specifier.name.as_str();
|
||||
let string = match read_file(&tree, [scope, name]) {
|
||||
Ok(Some(s)) => s,
|
||||
Ok(None) => {
|
||||
let Some(string) = ({
|
||||
let repo = gix::open(source.path(project)).map_err(Box::new)?;
|
||||
let tree = root_tree(&repo).map_err(Box::new)?;
|
||||
let (scope, name) = specifier.name.as_str();
|
||||
match read_file(&tree, [scope, name]) {
|
||||
Ok(string) => string,
|
||||
Err(e) => {
|
||||
return Err(errors::ResolveError::Read(
|
||||
specifier.name.to_string(),
|
||||
Box::new(e),
|
||||
))
|
||||
}
|
||||
}
|
||||
}) else {
|
||||
tracing::debug!(
|
||||
"{} not found in wally registry. searching in backup registries",
|
||||
specifier.name
|
||||
);
|
||||
|
||||
let config = self.config(project).await.map_err(Box::new)?;
|
||||
let config = source.config(project).await.map_err(Box::new)?;
|
||||
for registry in config.fallback_registries {
|
||||
let source = WallyPackageSource::new(registry);
|
||||
match refreshed_sources
|
||||
|
@ -139,12 +149,12 @@ impl PackageSource for WallyPackageSource {
|
|||
{
|
||||
Ok(()) => {}
|
||||
Err(super::errors::RefreshError::Wally(e)) => {
|
||||
return Err(Self::ResolveError::Refresh(Box::new(e)));
|
||||
return Err(errors::ResolveError::Refresh(Box::new(e)));
|
||||
}
|
||||
Err(e) => unreachable!("unexpected error: {e:?}"),
|
||||
Err(e) => panic!("unexpected error: {e:?}"),
|
||||
}
|
||||
|
||||
match Box::pin(source.resolve(specifier, options)).await {
|
||||
match Box::pin(inner(&source, specifier, options)).await {
|
||||
Ok((name, results)) => {
|
||||
tracing::debug!("found {name} in backup registry {}", source.repo_url);
|
||||
return Ok((name, results));
|
||||
|
@ -158,50 +168,46 @@ impl PackageSource for WallyPackageSource {
|
|||
}
|
||||
}
|
||||
|
||||
return Err(Self::ResolveError::NotFound(specifier.name.to_string()));
|
||||
}
|
||||
Err(e) => {
|
||||
return Err(Self::ResolveError::Read(
|
||||
specifier.name.to_string(),
|
||||
Box::new(e),
|
||||
))
|
||||
}
|
||||
};
|
||||
return Err(errors::ResolveError::NotFound(specifier.name.to_string()));
|
||||
};
|
||||
|
||||
let entries: Vec<WallyManifest> = string
|
||||
.lines()
|
||||
.map(serde_json::from_str)
|
||||
.collect::<Result<_, _>>()
|
||||
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
|
||||
let entries: Vec<WallyManifest> = string
|
||||
.lines()
|
||||
.map(serde_json::from_str)
|
||||
.collect::<Result<_, _>>()
|
||||
.map_err(|e| errors::ResolveError::Parse(specifier.name.to_string(), e))?;
|
||||
|
||||
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
|
||||
|
||||
Ok((
|
||||
PackageNames::Wally(specifier.name.clone()),
|
||||
entries
|
||||
.into_iter()
|
||||
.filter(|manifest| specifier.version.matches(&manifest.package.version))
|
||||
.map(|manifest| {
|
||||
Ok((
|
||||
VersionId(
|
||||
manifest.package.version.clone(),
|
||||
match manifest.package.realm {
|
||||
Realm::Server => TargetKind::RobloxServer,
|
||||
_ => TargetKind::Roblox,
|
||||
Ok((
|
||||
PackageNames::Wally(specifier.name.clone()),
|
||||
entries
|
||||
.into_iter()
|
||||
.filter(|manifest| specifier.version.matches(&manifest.package.version))
|
||||
.map(|manifest| {
|
||||
Ok((
|
||||
VersionId(
|
||||
manifest.package.version.clone(),
|
||||
match manifest.package.realm {
|
||||
Realm::Server => TargetKind::RobloxServer,
|
||||
_ => TargetKind::Roblox,
|
||||
},
|
||||
),
|
||||
WallyPackageRef {
|
||||
name: specifier.name.clone(),
|
||||
index_url: source.repo_url.clone(),
|
||||
dependencies: manifest.all_dependencies().map_err(|e| {
|
||||
errors::ResolveError::AllDependencies(specifier.to_string(), e)
|
||||
})?,
|
||||
version: manifest.package.version,
|
||||
},
|
||||
),
|
||||
WallyPackageRef {
|
||||
name: specifier.name.clone(),
|
||||
index_url: self.repo_url.clone(),
|
||||
dependencies: manifest.all_dependencies().map_err(|e| {
|
||||
Self::ResolveError::AllDependencies(specifier.to_string(), e)
|
||||
})?,
|
||||
version: manifest.package.version,
|
||||
},
|
||||
))
|
||||
})
|
||||
.collect::<Result<_, Self::ResolveError>>()?,
|
||||
))
|
||||
))
|
||||
})
|
||||
.collect::<Result<_, errors::ResolveError>>()?,
|
||||
))
|
||||
}
|
||||
|
||||
inner(self, specifier, options).await
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
|
@ -209,7 +215,7 @@ impl PackageSource for WallyPackageSource {
|
|||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||
) -> Result<PackageFs, Self::DownloadError> {
|
||||
let DownloadOptions {
|
||||
project,
|
||||
reqwest,
|
||||
|
@ -223,7 +229,7 @@ impl PackageSource for WallyPackageSource {
|
|||
.join(pkg_ref.name.escaped())
|
||||
.join(pkg_ref.version.to_string());
|
||||
|
||||
let tempdir = match fs::read_to_string(&index_file).await {
|
||||
match fs::read_to_string(&index_file).await {
|
||||
Ok(s) => {
|
||||
tracing::debug!(
|
||||
"using cached index file for package {}@{}",
|
||||
|
@ -231,14 +237,11 @@ impl PackageSource for WallyPackageSource {
|
|||
pkg_ref.version
|
||||
);
|
||||
|
||||
let tempdir = tempdir()?;
|
||||
let fs = toml::from_str::<PackageFS>(&s)?;
|
||||
reporter.report_done();
|
||||
|
||||
fs.write_to(&tempdir, project.cas_dir(), false).await?;
|
||||
|
||||
return Ok((fs, get_target(project, &tempdir).await?));
|
||||
return toml::from_str::<PackageFs>(&s).map_err(Into::into);
|
||||
}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => tempdir()?,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
|
||||
};
|
||||
|
||||
|
@ -308,30 +311,19 @@ impl PackageSource for WallyPackageSource {
|
|||
continue;
|
||||
}
|
||||
|
||||
let path = relative_path.to_path(tempdir.path());
|
||||
|
||||
if is_dir {
|
||||
fs::create_dir_all(&path).await?;
|
||||
entries.insert(relative_path, FSEntry::Directory);
|
||||
entries.insert(relative_path, FsEntry::Directory);
|
||||
continue;
|
||||
}
|
||||
|
||||
let entry_reader = archive.reader_without_entry(index).await?;
|
||||
if let Some(parent) = path.parent() {
|
||||
fs::create_dir_all(parent).await?;
|
||||
}
|
||||
|
||||
let writer = Arc::new(Mutex::new(fs::File::create(&path).await?));
|
||||
let hash = store_in_cas(project.cas_dir(), entry_reader.compat(), |bytes| {
|
||||
let writer = writer.clone();
|
||||
async move { writer.lock().await.write_all(&bytes).await }
|
||||
})
|
||||
.await?;
|
||||
let hash = store_in_cas(project.cas_dir(), entry_reader.compat()).await?;
|
||||
|
||||
entries.insert(relative_path, FSEntry::File(hash));
|
||||
entries.insert(relative_path, FsEntry::File(hash));
|
||||
}
|
||||
|
||||
let fs = PackageFS::CAS(entries);
|
||||
let fs = PackageFs::CAS(entries);
|
||||
|
||||
if let Some(parent) = index_file.parent() {
|
||||
fs::create_dir_all(parent)
|
||||
|
@ -345,7 +337,16 @@ impl PackageSource for WallyPackageSource {
|
|||
|
||||
reporter.report_done();
|
||||
|
||||
Ok((fs, get_target(project, &tempdir).await?))
|
||||
Ok(fs)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn get_target(
|
||||
&self,
|
||||
_pkg_ref: &Self::Ref,
|
||||
options: &GetTargetOptions,
|
||||
) -> Result<Target, Self::GetTargetError> {
|
||||
get_target(options).await.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -472,4 +473,13 @@ pub mod errors {
|
|||
#[error("error writing index file")]
|
||||
WriteIndex(#[source] std::io::Error),
|
||||
}
|
||||
|
||||
/// Errors that can occur when getting a target from a Wally package source
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum GetTargetError {
|
||||
/// Error getting target
|
||||
#[error("error getting target")]
|
||||
GetTarget(#[from] crate::source::wally::compat_util::errors::GetTargetError),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,10 +4,10 @@ use crate::{
|
|||
names::PackageNames,
|
||||
reporters::DownloadProgressReporter,
|
||||
source::{
|
||||
fs::PackageFS,
|
||||
fs::PackageFs,
|
||||
ids::VersionId,
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::{DownloadOptions, PackageSource, ResolveOptions},
|
||||
traits::{DownloadOptions, GetTargetOptions, PackageSource, ResolveOptions},
|
||||
workspace::pkg_ref::WorkspacePackageRef,
|
||||
ResolveResult,
|
||||
},
|
||||
|
@ -34,6 +34,7 @@ impl PackageSource for WorkspacePackageSource {
|
|||
type RefreshError = errors::RefreshError;
|
||||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
type GetTargetError = errors::GetTargetError;
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn resolve(
|
||||
|
@ -133,18 +134,35 @@ impl PackageSource for WorkspacePackageSource {
|
|||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||
let DownloadOptions { project, .. } = options;
|
||||
) -> Result<PackageFs, Self::DownloadError> {
|
||||
let DownloadOptions {
|
||||
project, reporter, ..
|
||||
} = options;
|
||||
|
||||
let path = pkg_ref
|
||||
.path
|
||||
.to_path(project.workspace_dir().unwrap_or(project.package_dir()));
|
||||
let manifest = deser_manifest(&path).await?;
|
||||
|
||||
Ok((
|
||||
PackageFS::Copy(path, manifest.target.kind()),
|
||||
manifest.target,
|
||||
))
|
||||
reporter.report_done();
|
||||
|
||||
Ok(PackageFs::Copy(path, manifest.target.kind()))
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
async fn get_target(
|
||||
&self,
|
||||
pkg_ref: &Self::Ref,
|
||||
options: &GetTargetOptions,
|
||||
) -> Result<Target, Self::GetTargetError> {
|
||||
let GetTargetOptions { project, .. } = options;
|
||||
|
||||
let path = pkg_ref
|
||||
.path
|
||||
.to_path(project.workspace_dir().unwrap_or(project.package_dir()));
|
||||
let manifest = deser_manifest(&path).await?;
|
||||
|
||||
Ok(manifest.target)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -187,4 +205,13 @@ pub mod errors {
|
|||
#[error("error reading manifest")]
|
||||
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||
}
|
||||
|
||||
/// Errors that can occur when getting the target of a workspace package
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum GetTargetError {
|
||||
/// Reading the manifest failed
|
||||
#[error("error reading manifest")]
|
||||
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Reference in a new issue