mirror of
https://github.com/pesde-pkg/pesde.git
synced 2025-04-05 11:20:55 +01:00
perf: use arcs where possible, remove unnecessary cloning
This commit is contained in:
parent
a41d9950f8
commit
8e6d877241
29 changed files with 746 additions and 583 deletions
|
@ -5,6 +5,14 @@ All notable changes to this project will be documented in this file.
|
||||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||||
|
|
||||||
|
## [Unreleased]
|
||||||
|
### Added
|
||||||
|
- Improve installation experience by @lukadev-0
|
||||||
|
|
||||||
|
### Performance
|
||||||
|
- Use `Arc` for more efficient cloning of multiple structs by @daimond113
|
||||||
|
- Avoid cloning where possible by @daimond113
|
||||||
|
|
||||||
## [0.5.2] - 2024-12-19
|
## [0.5.2] - 2024-12-19
|
||||||
### Fixed
|
### Fixed
|
||||||
- Change dependency types for removed peer dependencies by @daimond113
|
- Change dependency types for removed peer dependencies by @daimond113
|
||||||
|
|
|
@ -18,6 +18,7 @@ use pesde::{
|
||||||
git_index::{read_file, root_tree, GitBasedSource},
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
|
pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
|
traits::RefreshOptions,
|
||||||
version_id::VersionId,
|
version_id::VersionId,
|
||||||
IGNORED_DIRS, IGNORED_FILES,
|
IGNORED_DIRS, IGNORED_FILES,
|
||||||
},
|
},
|
||||||
|
@ -72,7 +73,12 @@ pub async fn publish_package(
|
||||||
user_id: web::ReqData<UserId>,
|
user_id: web::ReqData<UserId>,
|
||||||
) -> Result<impl Responder, Error> {
|
) -> Result<impl Responder, Error> {
|
||||||
let source = app_state.source.lock().await;
|
let source = app_state.source.lock().await;
|
||||||
source.refresh(&app_state.project).await.map_err(Box::new)?;
|
source
|
||||||
|
.refresh(&RefreshOptions {
|
||||||
|
project: app_state.project.clone(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
.map_err(Box::new)?;
|
||||||
let config = source.config(&app_state.project).await?;
|
let config = source.config(&app_state.project).await?;
|
||||||
|
|
||||||
let package_dir = tempfile::tempdir()?;
|
let package_dir = tempfile::tempdir()?;
|
||||||
|
|
|
@ -14,7 +14,10 @@ use actix_web::{
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use pesde::{
|
use pesde::{
|
||||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
source::{
|
||||||
|
pesde::PesdePackageSource,
|
||||||
|
traits::{PackageSource, RefreshOptions},
|
||||||
|
},
|
||||||
AuthConfig, Project,
|
AuthConfig, Project,
|
||||||
};
|
};
|
||||||
use std::{env::current_dir, path::PathBuf};
|
use std::{env::current_dir, path::PathBuf};
|
||||||
|
@ -106,7 +109,9 @@ async fn run() -> std::io::Result<()> {
|
||||||
);
|
);
|
||||||
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
|
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
|
||||||
source
|
source
|
||||||
.refresh(&project)
|
.refresh(&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
})
|
||||||
.await
|
.await
|
||||||
.expect("failed to refresh source");
|
.expect("failed to refresh source");
|
||||||
let config = source
|
let config = source
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
use std::{collections::HashSet, str::FromStr};
|
use std::str::FromStr;
|
||||||
|
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
|
@ -13,11 +13,11 @@ use pesde::{
|
||||||
git::{specifier::GitDependencySpecifier, GitPackageSource},
|
git::{specifier::GitDependencySpecifier, GitPackageSource},
|
||||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::PackageSource,
|
traits::{PackageSource, RefreshOptions, ResolveOptions},
|
||||||
workspace::WorkspacePackageSource,
|
workspace::WorkspacePackageSource,
|
||||||
PackageSources,
|
PackageSources,
|
||||||
},
|
},
|
||||||
Project, DEFAULT_INDEX_NAME,
|
Project, RefreshedSources, DEFAULT_INDEX_NAME,
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
|
@ -128,17 +128,27 @@ impl AddCommand {
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
};
|
};
|
||||||
source
|
|
||||||
.refresh(&project)
|
let refreshed_sources = RefreshedSources::new();
|
||||||
|
|
||||||
|
refreshed_sources
|
||||||
|
.refresh(
|
||||||
|
&source,
|
||||||
|
&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to refresh package source")?;
|
.context("failed to refresh package source")?;
|
||||||
|
|
||||||
let Some(version_id) = source
|
let Some(version_id) = source
|
||||||
.resolve(
|
.resolve(
|
||||||
&specifier,
|
&specifier,
|
||||||
&project,
|
&ResolveOptions {
|
||||||
manifest.target.kind(),
|
project: project.clone(),
|
||||||
&mut HashSet::new(),
|
target: manifest.target.kind(),
|
||||||
|
refreshed_sources,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to resolve package")?
|
.context("failed to resolve package")?
|
||||||
|
@ -167,7 +177,7 @@ impl AddCommand {
|
||||||
"dependencies"
|
"dependencies"
|
||||||
};
|
};
|
||||||
|
|
||||||
let alias = self.alias.unwrap_or_else(|| match self.name.clone() {
|
let alias = self.alias.unwrap_or_else(|| match &self.name {
|
||||||
AnyPackageIdentifier::PackageName(versioned) => versioned.0.as_str().1.to_string(),
|
AnyPackageIdentifier::PackageName(versioned) => versioned.0.as_str().1.to_string(),
|
||||||
AnyPackageIdentifier::Url((url, _)) => url
|
AnyPackageIdentifier::Url((url, _)) => url
|
||||||
.path
|
.path
|
||||||
|
@ -205,7 +215,8 @@ impl AddCommand {
|
||||||
}
|
}
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
DependencySpecifiers::Wally(spec) => {
|
DependencySpecifiers::Wally(spec) => {
|
||||||
field["wally"] = toml_edit::value(spec.name.clone().to_string());
|
field["wally"] =
|
||||||
|
toml_edit::value(spec.name.clone().to_string().trim_start_matches("wally#"));
|
||||||
field["version"] = toml_edit::value(format!("^{}", version_id.version()));
|
field["version"] = toml_edit::value(format!("^{}", version_id.version()));
|
||||||
|
|
||||||
if let Some(index) = spec.index.filter(|i| i != DEFAULT_INDEX_NAME) {
|
if let Some(index) = spec.index.filter(|i| i != DEFAULT_INDEX_NAME) {
|
||||||
|
|
|
@ -6,13 +6,15 @@ use std::thread::spawn;
|
||||||
use tokio::time::sleep;
|
use tokio::time::sleep;
|
||||||
use url::Url;
|
use url::Url;
|
||||||
|
|
||||||
|
use crate::cli::auth::{get_token_login, set_token};
|
||||||
use pesde::{
|
use pesde::{
|
||||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
source::{
|
||||||
|
pesde::PesdePackageSource,
|
||||||
|
traits::{PackageSource, RefreshOptions},
|
||||||
|
},
|
||||||
Project,
|
Project,
|
||||||
};
|
};
|
||||||
|
|
||||||
use crate::cli::auth::{get_token_login, set_token};
|
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct LoginCommand {
|
pub struct LoginCommand {
|
||||||
/// The token to use for authentication, skipping login
|
/// The token to use for authentication, skipping login
|
||||||
|
@ -57,7 +59,9 @@ impl LoginCommand {
|
||||||
|
|
||||||
let source = PesdePackageSource::new(index_url.clone());
|
let source = PesdePackageSource::new(index_url.clone());
|
||||||
source
|
source
|
||||||
.refresh(project)
|
.refresh(&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
})
|
||||||
.await
|
.await
|
||||||
.context("failed to refresh index")?;
|
.context("failed to refresh index")?;
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::cli::config::read_config;
|
use crate::cli::config::read_config;
|
||||||
|
use anyhow::Context;
|
||||||
use clap::{Args, Subcommand};
|
use clap::{Args, Subcommand};
|
||||||
use pesde::{errors::ManifestReadError, Project, DEFAULT_INDEX_NAME};
|
use pesde::{errors::ManifestReadError, Project, DEFAULT_INDEX_NAME};
|
||||||
|
|
||||||
|
@ -56,10 +57,11 @@ impl AuthSubcommand {
|
||||||
None => {
|
None => {
|
||||||
let index_name = self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
let index_name = self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||||
|
|
||||||
match manifest.unwrap().indices.get(index_name) {
|
manifest
|
||||||
Some(index) => index.clone(),
|
.unwrap()
|
||||||
None => anyhow::bail!("index {index_name} not found in manifest"),
|
.indices
|
||||||
}
|
.remove(index_name)
|
||||||
|
.with_context(|| format!("index {index_name} not found in manifest"))?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -15,20 +15,19 @@ use pesde::{
|
||||||
names::PackageName,
|
names::PackageName,
|
||||||
source::{
|
source::{
|
||||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
traits::PackageSource,
|
traits::{DownloadOptions, PackageSource, RefreshOptions, ResolveOptions},
|
||||||
|
PackageSources,
|
||||||
},
|
},
|
||||||
Project,
|
Project, RefreshedSources,
|
||||||
};
|
};
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
|
||||||
env::current_dir,
|
env::current_dir,
|
||||||
ffi::OsString,
|
ffi::OsString,
|
||||||
io::{Stderr, Write},
|
io::{Stderr, Write},
|
||||||
process::Command,
|
process::Command,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct ExecuteCommand {
|
pub struct ExecuteCommand {
|
||||||
|
@ -53,6 +52,8 @@ impl ExecuteCommand {
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.replace(multi_progress.clone());
|
.replace(multi_progress.clone());
|
||||||
|
|
||||||
|
let refreshed_sources = RefreshedSources::new();
|
||||||
|
|
||||||
let (tempdir, bin_path) = reporters::run_with_reporter_and_writer(
|
let (tempdir, bin_path) = reporters::run_with_reporter_and_writer(
|
||||||
std::io::stderr(),
|
std::io::stderr(),
|
||||||
|multi_progress, root_progress, reporter| async {
|
|multi_progress, root_progress, reporter| async {
|
||||||
|
@ -67,8 +68,13 @@ impl ExecuteCommand {
|
||||||
}
|
}
|
||||||
.context("no index specified")?;
|
.context("no index specified")?;
|
||||||
let source = PesdePackageSource::new(index);
|
let source = PesdePackageSource::new(index);
|
||||||
source
|
refreshed_sources
|
||||||
.refresh(&project)
|
.refresh(
|
||||||
|
&PackageSources::Pesde(source.clone()),
|
||||||
|
&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to refresh source")?;
|
.context("failed to refresh source")?;
|
||||||
|
|
||||||
|
@ -82,7 +88,14 @@ impl ExecuteCommand {
|
||||||
};
|
};
|
||||||
|
|
||||||
if let Some(res) = source
|
if let Some(res) = source
|
||||||
.resolve(&specifier, &project, TargetKind::Lune, &mut HashSet::new())
|
.resolve(
|
||||||
|
&specifier,
|
||||||
|
&ResolveOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
target: TargetKind::Lune,
|
||||||
|
refreshed_sources: refreshed_sources.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to resolve package")?
|
.context("failed to resolve package")?
|
||||||
.1
|
.1
|
||||||
|
@ -92,7 +105,14 @@ impl ExecuteCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
source
|
source
|
||||||
.resolve(&specifier, &project, TargetKind::Luau, &mut HashSet::new())
|
.resolve(
|
||||||
|
&specifier,
|
||||||
|
&ResolveOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
target: TargetKind::Luau,
|
||||||
|
refreshed_sources: refreshed_sources.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to resolve package")?
|
.context("failed to resolve package")?
|
||||||
.1
|
.1
|
||||||
|
@ -120,7 +140,14 @@ impl ExecuteCommand {
|
||||||
);
|
);
|
||||||
|
|
||||||
let (fs, target) = source
|
let (fs, target) = source
|
||||||
.download(&pkg_ref, &project, &reqwest, Arc::new(()))
|
.download(
|
||||||
|
&pkg_ref,
|
||||||
|
&DownloadOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
reqwest: reqwest.clone(),
|
||||||
|
reporter: Arc::new(()),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to download package")?;
|
.context("failed to download package")?;
|
||||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||||
|
@ -129,10 +156,8 @@ impl ExecuteCommand {
|
||||||
.await
|
.await
|
||||||
.context("failed to write package contents")?;
|
.context("failed to write package contents")?;
|
||||||
|
|
||||||
let mut refreshed_sources = HashSet::new();
|
|
||||||
|
|
||||||
let graph = project
|
let graph = project
|
||||||
.dependency_graph(None, &mut refreshed_sources, true)
|
.dependency_graph(None, refreshed_sources.clone(), true)
|
||||||
.await
|
.await
|
||||||
.context("failed to build dependency graph")?;
|
.context("failed to build dependency graph")?;
|
||||||
|
|
||||||
|
@ -152,7 +177,7 @@ impl ExecuteCommand {
|
||||||
&Arc::new(graph),
|
&Arc::new(graph),
|
||||||
DownloadAndLinkOptions::<CliReporter<Stderr>, ()>::new(reqwest)
|
DownloadAndLinkOptions::<CliReporter<Stderr>, ()>::new(reqwest)
|
||||||
.reporter(reporter)
|
.reporter(reporter)
|
||||||
.refreshed_sources(Mutex::new(refreshed_sources))
|
.refreshed_sources(refreshed_sources)
|
||||||
.prod(true)
|
.prod(true)
|
||||||
.write(true),
|
.write(true),
|
||||||
)
|
)
|
||||||
|
|
|
@ -11,12 +11,13 @@ use pesde::{
|
||||||
git_index::GitBasedSource,
|
git_index::GitBasedSource,
|
||||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::PackageSource,
|
traits::{PackageSource, RefreshOptions, ResolveOptions},
|
||||||
|
PackageSources,
|
||||||
},
|
},
|
||||||
Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
|
Project, RefreshedSources, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
|
||||||
};
|
};
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
use std::{collections::HashSet, fmt::Display, str::FromStr};
|
use std::{fmt::Display, str::FromStr};
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct InitCommand {}
|
pub struct InitCommand {}
|
||||||
|
@ -128,13 +129,21 @@ impl InitCommand {
|
||||||
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||||
[DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
|
[DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
|
||||||
|
|
||||||
|
let refreshed_sources = RefreshedSources::new();
|
||||||
|
|
||||||
if target_env.is_roblox()
|
if target_env.is_roblox()
|
||||||
|| inquire::prompt_confirmation(
|
|| inquire::prompt_confirmation(
|
||||||
"would you like to setup default Roblox compatibility scripts?",
|
"would you like to setup default Roblox compatibility scripts?",
|
||||||
)
|
)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
{
|
{
|
||||||
PackageSource::refresh(&source, &project)
|
refreshed_sources
|
||||||
|
.refresh(
|
||||||
|
&PackageSources::Pesde(source.clone()),
|
||||||
|
&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to refresh package source")?;
|
.context("failed to refresh package source")?;
|
||||||
let config = source
|
let config = source
|
||||||
|
@ -193,9 +202,11 @@ impl InitCommand {
|
||||||
index: None,
|
index: None,
|
||||||
target: None,
|
target: None,
|
||||||
},
|
},
|
||||||
&project,
|
&ResolveOptions {
|
||||||
TargetKind::Lune,
|
project: project.clone(),
|
||||||
&mut HashSet::new(),
|
target: TargetKind::Lune,
|
||||||
|
refreshed_sources,
|
||||||
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to resolve scripts package")?
|
.context("failed to resolve scripts package")?
|
||||||
|
|
|
@ -3,17 +3,14 @@ use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use futures::future::try_join_all;
|
use futures::future::try_join_all;
|
||||||
use pesde::{
|
use pesde::{
|
||||||
refresh_sources,
|
|
||||||
source::{
|
source::{
|
||||||
refs::PackageRefs,
|
refs::PackageRefs,
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::{PackageRef, PackageSource},
|
traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions},
|
||||||
},
|
},
|
||||||
Project,
|
Project, RefreshedSources,
|
||||||
};
|
};
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
use std::{collections::HashSet, sync::Arc};
|
|
||||||
use tokio::sync::Mutex;
|
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct OutdatedCommand {
|
pub struct OutdatedCommand {
|
||||||
|
@ -40,19 +37,7 @@ impl OutdatedCommand {
|
||||||
.context("failed to read manifest")?;
|
.context("failed to read manifest")?;
|
||||||
let manifest_target_kind = manifest.target.kind();
|
let manifest_target_kind = manifest.target.kind();
|
||||||
|
|
||||||
let mut refreshed_sources = HashSet::new();
|
let refreshed_sources = RefreshedSources::new();
|
||||||
|
|
||||||
refresh_sources(
|
|
||||||
&project,
|
|
||||||
graph
|
|
||||||
.iter()
|
|
||||||
.flat_map(|(_, versions)| versions.iter())
|
|
||||||
.map(|(_, node)| node.node.pkg_ref.source()),
|
|
||||||
&mut refreshed_sources,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let refreshed_sources = Arc::new(Mutex::new(refreshed_sources));
|
|
||||||
|
|
||||||
if try_join_all(
|
if try_join_all(
|
||||||
graph
|
graph
|
||||||
|
@ -74,14 +59,22 @@ impl OutdatedCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
let source = node.node.pkg_ref.source();
|
let source = node.node.pkg_ref.source();
|
||||||
|
refreshed_sources
|
||||||
|
.refresh(
|
||||||
|
&source,
|
||||||
|
&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
if !self.strict {
|
if !self.strict {
|
||||||
match specifier {
|
match &mut specifier {
|
||||||
DependencySpecifiers::Pesde(ref mut spec) => {
|
DependencySpecifiers::Pesde(spec) => {
|
||||||
spec.version = VersionReq::STAR;
|
spec.version = VersionReq::STAR;
|
||||||
}
|
}
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
DependencySpecifiers::Wally(ref mut spec) => {
|
DependencySpecifiers::Wally(spec) => {
|
||||||
spec.version = VersionReq::STAR;
|
spec.version = VersionReq::STAR;
|
||||||
}
|
}
|
||||||
DependencySpecifiers::Git(_) => {}
|
DependencySpecifiers::Git(_) => {}
|
||||||
|
@ -92,9 +85,11 @@ impl OutdatedCommand {
|
||||||
let version_id = source
|
let version_id = source
|
||||||
.resolve(
|
.resolve(
|
||||||
&specifier,
|
&specifier,
|
||||||
&project,
|
&ResolveOptions {
|
||||||
manifest_target_kind,
|
project: project.clone(),
|
||||||
&mut *refreshed_sources.lock().await,
|
target: manifest_target_kind,
|
||||||
|
refreshed_sources: refreshed_sources.clone(),
|
||||||
|
},
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to resolve package versions")?
|
.context("failed to resolve package versions")?
|
||||||
|
|
|
@ -9,7 +9,7 @@ use pesde::{
|
||||||
patches::setup_patches_repo,
|
patches::setup_patches_repo,
|
||||||
source::{
|
source::{
|
||||||
refs::PackageRefs,
|
refs::PackageRefs,
|
||||||
traits::{PackageRef, PackageSource},
|
traits::{DownloadOptions, PackageRef, PackageSource},
|
||||||
},
|
},
|
||||||
Project, MANIFEST_FILE_NAME,
|
Project, MANIFEST_FILE_NAME,
|
||||||
};
|
};
|
||||||
|
@ -51,7 +51,14 @@ impl PatchCommand {
|
||||||
fs::create_dir_all(&directory).await?;
|
fs::create_dir_all(&directory).await?;
|
||||||
|
|
||||||
source
|
source
|
||||||
.download(&node.node.pkg_ref, &project, &reqwest, Arc::new(()))
|
.download(
|
||||||
|
&node.node.pkg_ref,
|
||||||
|
&DownloadOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
reqwest,
|
||||||
|
reporter: Arc::new(()),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await?
|
.await?
|
||||||
.0
|
.0
|
||||||
.write_to(&directory, project.cas_dir(), false)
|
.write_to(&directory, project.cas_dir(), false)
|
||||||
|
|
|
@ -22,9 +22,16 @@ use pesde::{
|
||||||
},
|
},
|
||||||
Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
|
Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
|
||||||
};
|
};
|
||||||
|
use pesde::{
|
||||||
|
source::{
|
||||||
|
traits::{RefreshOptions, ResolveOptions},
|
||||||
|
PackageSources,
|
||||||
|
},
|
||||||
|
RefreshedSources,
|
||||||
|
};
|
||||||
use reqwest::{header::AUTHORIZATION, StatusCode};
|
use reqwest::{header::AUTHORIZATION, StatusCode};
|
||||||
use semver::VersionReq;
|
use semver::VersionReq;
|
||||||
use std::{collections::HashSet, path::PathBuf};
|
use std::path::PathBuf;
|
||||||
use tempfile::Builder;
|
use tempfile::Builder;
|
||||||
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
|
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
|
||||||
|
|
||||||
|
@ -365,6 +372,8 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
let refreshed_sources = RefreshedSources::new();
|
||||||
|
|
||||||
for specifier in manifest
|
for specifier in manifest
|
||||||
.dependencies
|
.dependencies
|
||||||
.values_mut()
|
.values_mut()
|
||||||
|
@ -406,7 +415,14 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
DependencySpecifiers::Git(_) => {}
|
DependencySpecifiers::Git(_) => {}
|
||||||
DependencySpecifiers::Workspace(spec) => {
|
DependencySpecifiers::Workspace(spec) => {
|
||||||
let pkg_ref = WorkspacePackageSource
|
let pkg_ref = WorkspacePackageSource
|
||||||
.resolve(spec, project, target_kind, &mut HashSet::new())
|
.resolve(
|
||||||
|
spec,
|
||||||
|
&ResolveOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
target: target_kind,
|
||||||
|
refreshed_sources: refreshed_sources.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to resolve workspace package")?
|
.context("failed to resolve workspace package")?
|
||||||
.1
|
.1
|
||||||
|
@ -575,7 +591,13 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
|
||||||
.get(&self.index)
|
.get(&self.index)
|
||||||
.context(format!("missing index {}", self.index))?;
|
.context(format!("missing index {}", self.index))?;
|
||||||
let source = PesdePackageSource::new(index_url.clone());
|
let source = PesdePackageSource::new(index_url.clone());
|
||||||
PackageSource::refresh(&source, project)
|
refreshed_sources
|
||||||
|
.refresh(
|
||||||
|
&PackageSources::Pesde(source.clone()),
|
||||||
|
&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
.await
|
.await
|
||||||
.context("failed to refresh source")?;
|
.context("failed to refresh source")?;
|
||||||
let config = source
|
let config = source
|
||||||
|
|
|
@ -9,8 +9,7 @@ use pesde::{
|
||||||
};
|
};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::PathBuf,
|
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::Path, process::Command,
|
||||||
process::Command,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
|
@ -26,7 +25,7 @@ pub struct RunCommand {
|
||||||
|
|
||||||
impl RunCommand {
|
impl RunCommand {
|
||||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||||
let run = |root: PathBuf, file_path: PathBuf| {
|
let run = |root: &Path, file_path: &Path| {
|
||||||
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
||||||
caller
|
caller
|
||||||
.write_all(
|
.write_all(
|
||||||
|
@ -55,8 +54,8 @@ impl RunCommand {
|
||||||
let Some(package_or_script) = self.package_or_script else {
|
let Some(package_or_script) = self.package_or_script else {
|
||||||
if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
|
if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
|
||||||
run(
|
run(
|
||||||
project.package_dir().to_owned(),
|
project.package_dir(),
|
||||||
script_path.to_path(project.package_dir()),
|
&script_path.to_path(project.package_dir()),
|
||||||
);
|
);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -99,7 +98,7 @@ impl RunCommand {
|
||||||
|
|
||||||
let path = bin_path.to_path(&container_folder);
|
let path = bin_path.to_path(&container_folder);
|
||||||
|
|
||||||
run(path.clone(), path);
|
run(&path, &path);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -107,8 +106,8 @@ impl RunCommand {
|
||||||
if let Ok(manifest) = project.deser_manifest().await {
|
if let Ok(manifest) = project.deser_manifest().await {
|
||||||
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
|
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
|
||||||
run(
|
run(
|
||||||
project.package_dir().to_path_buf(),
|
project.package_dir(),
|
||||||
script_path.to_path(project.package_dir()),
|
&script_path.to_path(project.package_dir()),
|
||||||
);
|
);
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
@ -170,7 +169,7 @@ impl RunCommand {
|
||||||
project.package_dir().to_path_buf()
|
project.package_dir().to_path_buf()
|
||||||
};
|
};
|
||||||
|
|
||||||
run(root, path);
|
run(&root, &path);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -46,7 +46,7 @@ impl SelfUpgradeCommand {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = get_or_download_version(&reqwest, &TagInfo::Complete(latest_version), true)
|
let path = get_or_download_version(&reqwest, TagInfo::Complete(latest_version), true)
|
||||||
.await?
|
.await?
|
||||||
.unwrap();
|
.unwrap();
|
||||||
update_bin_exe(&path).await?;
|
update_bin_exe(&path).await?;
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
|
collections::{BTreeMap, BTreeSet, HashMap},
|
||||||
num::NonZeroUsize,
|
num::NonZeroUsize,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
time::Instant,
|
time::Instant,
|
||||||
|
@ -13,9 +13,9 @@ use pesde::{
|
||||||
download_and_link::{filter_graph, DownloadAndLinkHooks, DownloadAndLinkOptions},
|
download_and_link::{filter_graph, DownloadAndLinkHooks, DownloadAndLinkOptions},
|
||||||
lockfile::{DependencyGraph, DownloadedGraph, Lockfile},
|
lockfile::{DependencyGraph, DownloadedGraph, Lockfile},
|
||||||
manifest::{target::TargetKind, DependencyType},
|
manifest::{target::TargetKind, DependencyType},
|
||||||
Project, MANIFEST_FILE_NAME,
|
Project, RefreshedSources, MANIFEST_FILE_NAME,
|
||||||
};
|
};
|
||||||
use tokio::{sync::Mutex, task::JoinSet};
|
use tokio::task::JoinSet;
|
||||||
|
|
||||||
use crate::cli::{
|
use crate::cli::{
|
||||||
bin_dir,
|
bin_dir,
|
||||||
|
@ -178,7 +178,7 @@ pub async fn install(
|
||||||
) -> anyhow::Result<()> {
|
) -> anyhow::Result<()> {
|
||||||
let start = Instant::now();
|
let start = Instant::now();
|
||||||
|
|
||||||
let mut refreshed_sources = HashSet::new();
|
let refreshed_sources = RefreshedSources::new();
|
||||||
|
|
||||||
let manifest = project
|
let manifest = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
|
@ -276,7 +276,7 @@ pub async fn install(
|
||||||
let graph = project
|
let graph = project
|
||||||
.dependency_graph(
|
.dependency_graph(
|
||||||
old_graph.as_ref().filter(|_| options.use_lockfile),
|
old_graph.as_ref().filter(|_| options.use_lockfile),
|
||||||
&mut refreshed_sources,
|
refreshed_sources.clone(),
|
||||||
false,
|
false,
|
||||||
)
|
)
|
||||||
.await
|
.await
|
||||||
|
@ -298,7 +298,7 @@ pub async fn install(
|
||||||
DownloadAndLinkOptions::<CliReporter, InstallHooks>::new(reqwest.clone())
|
DownloadAndLinkOptions::<CliReporter, InstallHooks>::new(reqwest.clone())
|
||||||
.reporter(reporter.clone())
|
.reporter(reporter.clone())
|
||||||
.hooks(hooks)
|
.hooks(hooks)
|
||||||
.refreshed_sources(Mutex::new(refreshed_sources))
|
.refreshed_sources(refreshed_sources)
|
||||||
.prod(options.prod)
|
.prod(options.prod)
|
||||||
.write(options.write)
|
.write(options.write)
|
||||||
.network_concurrency(options.network_concurrency),
|
.network_concurrency(options.network_concurrency),
|
||||||
|
|
|
@ -251,7 +251,7 @@ pub enum TagInfo {
|
||||||
#[instrument(skip(reqwest), level = "trace")]
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
pub async fn get_or_download_version(
|
pub async fn get_or_download_version(
|
||||||
reqwest: &reqwest::Client,
|
reqwest: &reqwest::Client,
|
||||||
tag: &TagInfo,
|
tag: TagInfo,
|
||||||
always_give_path: bool,
|
always_give_path: bool,
|
||||||
) -> anyhow::Result<Option<PathBuf>> {
|
) -> anyhow::Result<Option<PathBuf>> {
|
||||||
let path = home_dir()?.join("versions");
|
let path = home_dir()?.join("versions");
|
||||||
|
@ -259,7 +259,7 @@ pub async fn get_or_download_version(
|
||||||
.await
|
.await
|
||||||
.context("failed to create versions directory")?;
|
.context("failed to create versions directory")?;
|
||||||
|
|
||||||
let version = match tag {
|
let version = match &tag {
|
||||||
TagInfo::Complete(version) => version,
|
TagInfo::Complete(version) => version,
|
||||||
// don't fetch the version since it could be cached
|
// don't fetch the version since it could be cached
|
||||||
TagInfo::Incomplete(version) => version,
|
TagInfo::Incomplete(version) => version,
|
||||||
|
@ -290,9 +290,9 @@ pub async fn get_or_download_version(
|
||||||
.context("failed to copy current executable to version directory")?;
|
.context("failed to copy current executable to version directory")?;
|
||||||
} else {
|
} else {
|
||||||
let version = match tag {
|
let version = match tag {
|
||||||
TagInfo::Complete(version) => version.clone(),
|
TagInfo::Complete(version) => version,
|
||||||
TagInfo::Incomplete(version) => {
|
TagInfo::Incomplete(version) => {
|
||||||
get_remote_version(reqwest, VersionType::Specific(version.clone()))
|
get_remote_version(reqwest, VersionType::Specific(version))
|
||||||
.await
|
.await
|
||||||
.context("failed to get remote version")?
|
.context("failed to get remote version")?
|
||||||
}
|
}
|
||||||
|
|
198
src/download.rs
198
src/download.rs
|
@ -2,19 +2,17 @@ use crate::{
|
||||||
lockfile::{DependencyGraph, DownloadedDependencyGraphNode},
|
lockfile::{DependencyGraph, DownloadedDependencyGraphNode},
|
||||||
manifest::DependencyType,
|
manifest::DependencyType,
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
refresh_sources,
|
|
||||||
reporters::{DownloadProgressReporter, DownloadsReporter},
|
reporters::{DownloadProgressReporter, DownloadsReporter},
|
||||||
source::{
|
source::{
|
||||||
traits::{PackageRef, PackageSource},
|
traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions},
|
||||||
version_id::VersionId,
|
version_id::VersionId,
|
||||||
PackageSources,
|
|
||||||
},
|
},
|
||||||
Project, PACKAGES_CONTAINER_NAME,
|
Project, RefreshedSources, PACKAGES_CONTAINER_NAME,
|
||||||
};
|
};
|
||||||
use async_stream::try_stream;
|
use async_stream::try_stream;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::Stream;
|
use futures::Stream;
|
||||||
use std::{collections::HashSet, num::NonZeroUsize, sync::Arc};
|
use std::{num::NonZeroUsize, sync::Arc};
|
||||||
use tokio::{sync::Semaphore, task::JoinSet};
|
use tokio::{sync::Semaphore, task::JoinSet};
|
||||||
use tracing::{instrument, Instrument};
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
|
@ -25,6 +23,8 @@ pub struct DownloadGraphOptions<Reporter> {
|
||||||
pub reqwest: reqwest::Client,
|
pub reqwest: reqwest::Client,
|
||||||
/// The downloads reporter.
|
/// The downloads reporter.
|
||||||
pub reporter: Option<Arc<Reporter>>,
|
pub reporter: Option<Arc<Reporter>>,
|
||||||
|
/// The refreshed sources.
|
||||||
|
pub refreshed_sources: RefreshedSources,
|
||||||
/// Whether to skip dev dependencies.
|
/// Whether to skip dev dependencies.
|
||||||
pub prod: bool,
|
pub prod: bool,
|
||||||
/// Whether to write the downloaded packages to disk.
|
/// Whether to write the downloaded packages to disk.
|
||||||
|
@ -44,6 +44,7 @@ where
|
||||||
Self {
|
Self {
|
||||||
reqwest,
|
reqwest,
|
||||||
reporter: None,
|
reporter: None,
|
||||||
|
refreshed_sources: Default::default(),
|
||||||
prod: false,
|
prod: false,
|
||||||
write: false,
|
write: false,
|
||||||
wally: false,
|
wally: false,
|
||||||
|
@ -57,6 +58,12 @@ where
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Sets the refreshed sources.
|
||||||
|
pub fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
|
||||||
|
self.refreshed_sources = refreshed_sources;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
/// Sets whether to skip dev dependencies.
|
/// Sets whether to skip dev dependencies.
|
||||||
pub fn prod(mut self, prod: bool) -> Self {
|
pub fn prod(mut self, prod: bool) -> Self {
|
||||||
self.prod = prod;
|
self.prod = prod;
|
||||||
|
@ -87,6 +94,7 @@ impl<Reporter> Clone for DownloadGraphOptions<Reporter> {
|
||||||
Self {
|
Self {
|
||||||
reqwest: self.reqwest.clone(),
|
reqwest: self.reqwest.clone(),
|
||||||
reporter: self.reporter.clone(),
|
reporter: self.reporter.clone(),
|
||||||
|
refreshed_sources: self.refreshed_sources.clone(),
|
||||||
prod: self.prod,
|
prod: self.prod,
|
||||||
write: self.write,
|
write: self.write,
|
||||||
wally: self.wally,
|
wally: self.wally,
|
||||||
|
@ -101,7 +109,6 @@ impl Project {
|
||||||
pub async fn download_graph<Reporter>(
|
pub async fn download_graph<Reporter>(
|
||||||
&self,
|
&self,
|
||||||
graph: &DependencyGraph,
|
graph: &DependencyGraph,
|
||||||
refreshed_sources: &mut HashSet<PackageSources>,
|
|
||||||
options: DownloadGraphOptions<Reporter>,
|
options: DownloadGraphOptions<Reporter>,
|
||||||
) -> Result<
|
) -> Result<
|
||||||
impl Stream<
|
impl Stream<
|
||||||
|
@ -118,6 +125,7 @@ impl Project {
|
||||||
let DownloadGraphOptions {
|
let DownloadGraphOptions {
|
||||||
reqwest,
|
reqwest,
|
||||||
reporter,
|
reporter,
|
||||||
|
refreshed_sources,
|
||||||
prod,
|
prod,
|
||||||
write,
|
write,
|
||||||
wally,
|
wally,
|
||||||
|
@ -126,111 +134,111 @@ impl Project {
|
||||||
|
|
||||||
let manifest = self.deser_manifest().await?;
|
let manifest = self.deser_manifest().await?;
|
||||||
let manifest_target_kind = manifest.target.kind();
|
let manifest_target_kind = manifest.target.kind();
|
||||||
let project = Arc::new(self.clone());
|
|
||||||
|
|
||||||
refresh_sources(
|
|
||||||
self,
|
|
||||||
graph
|
|
||||||
.iter()
|
|
||||||
.flat_map(|(_, versions)| versions.iter())
|
|
||||||
.map(|(_, node)| node.pkg_ref.source()),
|
|
||||||
refreshed_sources,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
let mut tasks = JoinSet::<Result<_, errors::DownloadGraphError>>::new();
|
|
||||||
let semaphore = Arc::new(Semaphore::new(network_concurrency.get()));
|
let semaphore = Arc::new(Semaphore::new(network_concurrency.get()));
|
||||||
|
|
||||||
for (name, versions) in graph {
|
let mut tasks = graph
|
||||||
for (version_id, node) in versions {
|
.iter()
|
||||||
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
.flat_map(|(name, versions)| {
|
||||||
if node.pkg_ref.like_wally() != wally {
|
versions
|
||||||
continue;
|
.iter()
|
||||||
}
|
.map(|(version_id, node)| (name.clone(), version_id.clone(), node.clone()))
|
||||||
|
})
|
||||||
let name = name.clone();
|
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
||||||
let version_id = version_id.clone();
|
.filter(|(_, _, node)| node.pkg_ref.like_wally() == wally)
|
||||||
let node = node.clone();
|
.map(|(name, version_id, node)| {
|
||||||
|
|
||||||
let span = tracing::info_span!(
|
let span = tracing::info_span!(
|
||||||
"download",
|
"download",
|
||||||
name = name.to_string(),
|
name = name.to_string(),
|
||||||
version_id = version_id.to_string()
|
version_id = version_id.to_string()
|
||||||
);
|
);
|
||||||
|
|
||||||
let project = project.clone();
|
let project = self.clone();
|
||||||
let reqwest = reqwest.clone();
|
let reqwest = reqwest.clone();
|
||||||
let reporter = reporter.clone();
|
let reporter = reporter.clone();
|
||||||
|
let refreshed_sources = refreshed_sources.clone();
|
||||||
let package_dir = project.package_dir().to_path_buf();
|
let package_dir = project.package_dir().to_path_buf();
|
||||||
let semaphore = semaphore.clone();
|
let semaphore = semaphore.clone();
|
||||||
|
|
||||||
tasks.spawn(
|
async move {
|
||||||
async move {
|
let display_name = format!("{name}@{version_id}");
|
||||||
let display_name = format!("{name}@{version_id}");
|
let progress_reporter = reporter
|
||||||
let progress_reporter = reporter
|
.as_deref()
|
||||||
.as_deref()
|
.map(|reporter| reporter.report_download(&display_name));
|
||||||
.map(|reporter| reporter.report_download(&display_name));
|
|
||||||
|
|
||||||
let _permit = semaphore.acquire().await;
|
let _permit = semaphore.acquire().await;
|
||||||
|
|
||||||
if let Some(ref progress_reporter) = progress_reporter {
|
if let Some(ref progress_reporter) = progress_reporter {
|
||||||
progress_reporter.report_start();
|
progress_reporter.report_start();
|
||||||
}
|
|
||||||
|
|
||||||
let source = node.pkg_ref.source();
|
|
||||||
let container_folder = node.container_folder(
|
|
||||||
&package_dir
|
|
||||||
.join(manifest_target_kind.packages_folder(version_id.target()))
|
|
||||||
.join(PACKAGES_CONTAINER_NAME),
|
|
||||||
&name,
|
|
||||||
version_id.version(),
|
|
||||||
);
|
|
||||||
|
|
||||||
fs::create_dir_all(&container_folder).await?;
|
|
||||||
|
|
||||||
let project = project.clone();
|
|
||||||
|
|
||||||
tracing::debug!("downloading");
|
|
||||||
|
|
||||||
let (fs, target) = match progress_reporter {
|
|
||||||
Some(progress_reporter) => {
|
|
||||||
source
|
|
||||||
.download(
|
|
||||||
&node.pkg_ref,
|
|
||||||
&project,
|
|
||||||
&reqwest,
|
|
||||||
Arc::new(progress_reporter),
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
source
|
|
||||||
.download(&node.pkg_ref, &project, &reqwest, Arc::new(()))
|
|
||||||
.await
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.map_err(Box::new)?;
|
|
||||||
|
|
||||||
tracing::debug!("downloaded");
|
|
||||||
|
|
||||||
if write {
|
|
||||||
if !prod || node.resolved_ty != DependencyType::Dev {
|
|
||||||
fs.write_to(container_folder, project.cas_dir(), true)
|
|
||||||
.await?;
|
|
||||||
} else {
|
|
||||||
tracing::debug!(
|
|
||||||
"skipping write to disk, dev dependency in prod mode"
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let downloaded_node = DownloadedDependencyGraphNode { node, target };
|
|
||||||
Ok((downloaded_node, name, version_id))
|
|
||||||
}
|
}
|
||||||
.instrument(span),
|
|
||||||
);
|
let source = node.pkg_ref.source();
|
||||||
}
|
refreshed_sources
|
||||||
}
|
.refresh(
|
||||||
|
&source,
|
||||||
|
&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
let container_folder = node.container_folder(
|
||||||
|
&package_dir
|
||||||
|
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||||
|
.join(PACKAGES_CONTAINER_NAME),
|
||||||
|
&name,
|
||||||
|
version_id.version(),
|
||||||
|
);
|
||||||
|
|
||||||
|
fs::create_dir_all(&container_folder).await?;
|
||||||
|
|
||||||
|
tracing::debug!("downloading");
|
||||||
|
|
||||||
|
let (fs, target) = match progress_reporter {
|
||||||
|
Some(progress_reporter) => {
|
||||||
|
source
|
||||||
|
.download(
|
||||||
|
&node.pkg_ref,
|
||||||
|
&DownloadOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
reqwest,
|
||||||
|
reporter: Arc::new(progress_reporter),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
source
|
||||||
|
.download(
|
||||||
|
&node.pkg_ref,
|
||||||
|
&DownloadOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
reqwest,
|
||||||
|
reporter: Arc::new(()),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.map_err(Box::new)?;
|
||||||
|
|
||||||
|
tracing::debug!("downloaded");
|
||||||
|
|
||||||
|
if write {
|
||||||
|
if !prod || node.resolved_ty != DependencyType::Dev {
|
||||||
|
fs.write_to(container_folder, project.cas_dir(), true)
|
||||||
|
.await?;
|
||||||
|
} else {
|
||||||
|
tracing::debug!("skipping write to disk, dev dependency in prod mode");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let downloaded_node = DownloadedDependencyGraphNode { node, target };
|
||||||
|
Ok((downloaded_node, name, version_id))
|
||||||
|
}
|
||||||
|
.instrument(span)
|
||||||
|
})
|
||||||
|
.collect::<JoinSet<Result<_, errors::DownloadGraphError>>>();
|
||||||
|
|
||||||
let stream = try_stream! {
|
let stream = try_stream! {
|
||||||
while let Some(res) = tasks.join_next().await {
|
while let Some(res) = tasks.join_next().await {
|
||||||
|
@ -256,7 +264,7 @@ pub mod errors {
|
||||||
|
|
||||||
/// An error occurred refreshing a package source
|
/// An error occurred refreshing a package source
|
||||||
#[error("failed to refresh package source")]
|
#[error("failed to refresh package source")]
|
||||||
RefreshFailed(#[from] Box<crate::source::errors::RefreshError>),
|
RefreshFailed(#[from] crate::source::errors::RefreshError),
|
||||||
|
|
||||||
/// Error interacting with the filesystem
|
/// Error interacting with the filesystem
|
||||||
#[error("error interacting with the filesystem")]
|
#[error("error interacting with the filesystem")]
|
||||||
|
|
|
@ -3,18 +3,15 @@ use crate::{
|
||||||
lockfile::{DependencyGraph, DownloadedGraph},
|
lockfile::{DependencyGraph, DownloadedGraph},
|
||||||
manifest::DependencyType,
|
manifest::DependencyType,
|
||||||
reporters::DownloadsReporter,
|
reporters::DownloadsReporter,
|
||||||
source::PackageSources,
|
Project, RefreshedSources,
|
||||||
Project,
|
|
||||||
};
|
};
|
||||||
use futures::TryStreamExt;
|
use futures::TryStreamExt;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
|
||||||
convert::Infallible,
|
convert::Infallible,
|
||||||
future::{self, Future},
|
future::{self, Future},
|
||||||
num::NonZeroUsize,
|
num::NonZeroUsize,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use tokio::sync::Mutex;
|
|
||||||
use tracing::{instrument, Instrument};
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
/// Filters a graph to only include production dependencies, if `prod` is `true`
|
/// Filters a graph to only include production dependencies, if `prod` is `true`
|
||||||
|
@ -90,7 +87,7 @@ pub struct DownloadAndLinkOptions<Reporter = (), Hooks = ()> {
|
||||||
/// The download and link hooks.
|
/// The download and link hooks.
|
||||||
pub hooks: Option<Arc<Hooks>>,
|
pub hooks: Option<Arc<Hooks>>,
|
||||||
/// The refreshed sources.
|
/// The refreshed sources.
|
||||||
pub refreshed_sources: Arc<Mutex<HashSet<PackageSources>>>,
|
pub refreshed_sources: RefreshedSources,
|
||||||
/// Whether to skip dev dependencies.
|
/// Whether to skip dev dependencies.
|
||||||
pub prod: bool,
|
pub prod: bool,
|
||||||
/// Whether to write the downloaded packages to disk.
|
/// Whether to write the downloaded packages to disk.
|
||||||
|
@ -130,11 +127,8 @@ where
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the refreshed sources.
|
/// Sets the refreshed sources.
|
||||||
pub fn refreshed_sources(
|
pub fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
|
||||||
mut self,
|
self.refreshed_sources = refreshed_sources;
|
||||||
refreshed_sources: impl Into<Arc<Mutex<HashSet<PackageSources>>>>,
|
|
||||||
) -> Self {
|
|
||||||
self.refreshed_sources = refreshed_sources.into();
|
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -196,10 +190,10 @@ impl Project {
|
||||||
let graph = graph.clone();
|
let graph = graph.clone();
|
||||||
let reqwest = reqwest.clone();
|
let reqwest = reqwest.clone();
|
||||||
|
|
||||||
let mut refreshed_sources = refreshed_sources.lock().await;
|
|
||||||
let mut downloaded_graph = DownloadedGraph::new();
|
let mut downloaded_graph = DownloadedGraph::new();
|
||||||
|
|
||||||
let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone())
|
let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone())
|
||||||
|
.refreshed_sources(refreshed_sources.clone())
|
||||||
.prod(prod)
|
.prod(prod)
|
||||||
.write(write)
|
.write(write)
|
||||||
.network_concurrency(network_concurrency);
|
.network_concurrency(network_concurrency);
|
||||||
|
@ -209,22 +203,18 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
// step 1. download pesde dependencies
|
// step 1. download pesde dependencies
|
||||||
self.download_graph(
|
self.download_graph(&graph, download_graph_options.clone())
|
||||||
&graph,
|
.instrument(tracing::debug_span!("download (pesde)"))
|
||||||
&mut refreshed_sources,
|
.await?
|
||||||
download_graph_options.clone(),
|
.try_for_each(|(downloaded_node, name, version_id)| {
|
||||||
)
|
downloaded_graph
|
||||||
.instrument(tracing::debug_span!("download (pesde)"))
|
.entry(name)
|
||||||
.await?
|
.or_default()
|
||||||
.try_for_each(|(downloaded_node, name, version_id)| {
|
.insert(version_id, downloaded_node);
|
||||||
downloaded_graph
|
|
||||||
.entry(name)
|
|
||||||
.or_default()
|
|
||||||
.insert(version_id, downloaded_node);
|
|
||||||
|
|
||||||
future::ready(Ok(()))
|
future::ready(Ok(()))
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// step 2. link pesde dependencies. do so without types
|
// step 2. link pesde dependencies. do so without types
|
||||||
if write {
|
if write {
|
||||||
|
@ -246,22 +236,18 @@ impl Project {
|
||||||
}
|
}
|
||||||
|
|
||||||
// step 3. download wally dependencies
|
// step 3. download wally dependencies
|
||||||
self.download_graph(
|
self.download_graph(&graph, download_graph_options.clone().wally(true))
|
||||||
&graph,
|
.instrument(tracing::debug_span!("download (wally)"))
|
||||||
&mut refreshed_sources,
|
.await?
|
||||||
download_graph_options.clone().wally(true),
|
.try_for_each(|(downloaded_node, name, version_id)| {
|
||||||
)
|
downloaded_graph
|
||||||
.instrument(tracing::debug_span!("download (wally)"))
|
.entry(name)
|
||||||
.await?
|
.or_default()
|
||||||
.try_for_each(|(downloaded_node, name, version_id)| {
|
.insert(version_id, downloaded_node);
|
||||||
downloaded_graph
|
|
||||||
.entry(name)
|
|
||||||
.or_default()
|
|
||||||
.insert(version_id, downloaded_node);
|
|
||||||
|
|
||||||
future::ready(Ok(()))
|
future::ready(Ok(()))
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
// step 4. link ALL dependencies. do so with types
|
// step 4. link ALL dependencies. do so with types
|
||||||
if write {
|
if write {
|
||||||
|
|
123
src/lib.rs
123
src/lib.rs
|
@ -6,16 +6,21 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
lockfile::Lockfile,
|
lockfile::Lockfile,
|
||||||
manifest::Manifest,
|
manifest::Manifest,
|
||||||
source::{traits::PackageSource, PackageSources},
|
source::{
|
||||||
|
traits::{PackageSource, RefreshOptions},
|
||||||
|
PackageSources,
|
||||||
|
},
|
||||||
};
|
};
|
||||||
use async_stream::stream;
|
use async_stream::stream;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::{future::try_join_all, Stream};
|
use futures::Stream;
|
||||||
use gix::sec::identity::Account;
|
use gix::sec::identity::Account;
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
|
hash::{Hash, Hasher},
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
use wax::Pattern;
|
use wax::Pattern;
|
||||||
|
@ -56,13 +61,18 @@ pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found"
|
||||||
/// The folder in which scripts are linked
|
/// The folder in which scripts are linked
|
||||||
pub const SCRIPTS_LINK_FOLDER: &str = ".pesde";
|
pub const SCRIPTS_LINK_FOLDER: &str = ".pesde";
|
||||||
|
|
||||||
/// Struct containing the authentication configuration
|
#[derive(Debug, Default)]
|
||||||
#[derive(Debug, Default, Clone)]
|
struct AuthConfigShared {
|
||||||
pub struct AuthConfig {
|
|
||||||
tokens: HashMap<gix::Url, String>,
|
tokens: HashMap<gix::Url, String>,
|
||||||
git_credentials: Option<Account>,
|
git_credentials: Option<Account>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Struct containing the authentication configuration
|
||||||
|
#[derive(Debug, Clone, Default)]
|
||||||
|
pub struct AuthConfig {
|
||||||
|
shared: Arc<AuthConfigShared>,
|
||||||
|
}
|
||||||
|
|
||||||
impl AuthConfig {
|
impl AuthConfig {
|
||||||
/// Create a new `AuthConfig`
|
/// Create a new `AuthConfig`
|
||||||
pub fn new() -> Self {
|
pub fn new() -> Self {
|
||||||
|
@ -70,11 +80,12 @@ impl AuthConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the tokens
|
/// Set the tokens
|
||||||
|
/// Panics if the `AuthConfig` is shared
|
||||||
pub fn with_tokens<I: IntoIterator<Item = (gix::Url, S)>, S: AsRef<str>>(
|
pub fn with_tokens<I: IntoIterator<Item = (gix::Url, S)>, S: AsRef<str>>(
|
||||||
mut self,
|
mut self,
|
||||||
tokens: I,
|
tokens: I,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
self.tokens = tokens
|
Arc::get_mut(&mut self.shared).unwrap().tokens = tokens
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|(url, s)| (url, s.as_ref().to_string()))
|
.map(|(url, s)| (url, s.as_ref().to_string()))
|
||||||
.collect();
|
.collect();
|
||||||
|
@ -82,25 +93,25 @@ impl AuthConfig {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Set the git credentials
|
/// Set the git credentials
|
||||||
|
/// Panics if the `AuthConfig` is shared
|
||||||
pub fn with_git_credentials(mut self, git_credentials: Option<Account>) -> Self {
|
pub fn with_git_credentials(mut self, git_credentials: Option<Account>) -> Self {
|
||||||
self.git_credentials = git_credentials;
|
Arc::get_mut(&mut self.shared).unwrap().git_credentials = git_credentials;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the tokens
|
/// Get the tokens
|
||||||
pub fn tokens(&self) -> &HashMap<gix::Url, String> {
|
pub fn tokens(&self) -> &HashMap<gix::Url, String> {
|
||||||
&self.tokens
|
&self.shared.tokens
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Get the git credentials
|
/// Get the git credentials
|
||||||
pub fn git_credentials(&self) -> Option<&Account> {
|
pub fn git_credentials(&self) -> Option<&Account> {
|
||||||
self.git_credentials.as_ref()
|
self.shared.git_credentials.as_ref()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The main struct of the pesde library, representing a project
|
#[derive(Debug)]
|
||||||
#[derive(Debug, Clone)]
|
struct ProjectShared {
|
||||||
pub struct Project {
|
|
||||||
package_dir: PathBuf,
|
package_dir: PathBuf,
|
||||||
workspace_dir: Option<PathBuf>,
|
workspace_dir: Option<PathBuf>,
|
||||||
data_dir: PathBuf,
|
data_dir: PathBuf,
|
||||||
|
@ -108,6 +119,13 @@ pub struct Project {
|
||||||
cas_dir: PathBuf,
|
cas_dir: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The main struct of the pesde library, representing a project
|
||||||
|
/// Unlike `ProjectShared`, this struct is `Send` and `Sync` and is cheap to clone because it is `Arc`-backed
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct Project {
|
||||||
|
shared: Arc<ProjectShared>,
|
||||||
|
}
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Create a new `Project`
|
/// Create a new `Project`
|
||||||
pub fn new<P: AsRef<Path>, Q: AsRef<Path>, R: AsRef<Path>, S: AsRef<Path>>(
|
pub fn new<P: AsRef<Path>, Q: AsRef<Path>, R: AsRef<Path>, S: AsRef<Path>>(
|
||||||
|
@ -118,43 +136,45 @@ impl Project {
|
||||||
auth_config: AuthConfig,
|
auth_config: AuthConfig,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Project {
|
Project {
|
||||||
package_dir: package_dir.as_ref().to_path_buf(),
|
shared: Arc::new(ProjectShared {
|
||||||
workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()),
|
package_dir: package_dir.as_ref().to_path_buf(),
|
||||||
data_dir: data_dir.as_ref().to_path_buf(),
|
workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()),
|
||||||
auth_config,
|
data_dir: data_dir.as_ref().to_path_buf(),
|
||||||
cas_dir: cas_dir.as_ref().to_path_buf(),
|
auth_config,
|
||||||
|
cas_dir: cas_dir.as_ref().to_path_buf(),
|
||||||
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The directory of the package
|
/// The directory of the package
|
||||||
pub fn package_dir(&self) -> &Path {
|
pub fn package_dir(&self) -> &Path {
|
||||||
&self.package_dir
|
&self.shared.package_dir
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The directory of the workspace this package belongs to, if any
|
/// The directory of the workspace this package belongs to, if any
|
||||||
pub fn workspace_dir(&self) -> Option<&Path> {
|
pub fn workspace_dir(&self) -> Option<&Path> {
|
||||||
self.workspace_dir.as_deref()
|
self.shared.workspace_dir.as_deref()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The directory to store general-purpose data
|
/// The directory to store general-purpose data
|
||||||
pub fn data_dir(&self) -> &Path {
|
pub fn data_dir(&self) -> &Path {
|
||||||
&self.data_dir
|
&self.shared.data_dir
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The authentication configuration
|
/// The authentication configuration
|
||||||
pub fn auth_config(&self) -> &AuthConfig {
|
pub fn auth_config(&self) -> &AuthConfig {
|
||||||
&self.auth_config
|
&self.shared.auth_config
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The CAS (content-addressable storage) directory
|
/// The CAS (content-addressable storage) directory
|
||||||
pub fn cas_dir(&self) -> &Path {
|
pub fn cas_dir(&self) -> &Path {
|
||||||
&self.cas_dir
|
&self.shared.cas_dir
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Read the manifest file
|
/// Read the manifest file
|
||||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
||||||
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
let string = fs::read_to_string(self.package_dir().join(MANIFEST_FILE_NAME)).await?;
|
||||||
Ok(string)
|
Ok(string)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -162,20 +182,24 @@ impl Project {
|
||||||
/// Deserialize the manifest file
|
/// Deserialize the manifest file
|
||||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
|
||||||
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
|
let string = fs::read_to_string(self.package_dir().join(MANIFEST_FILE_NAME)).await?;
|
||||||
Ok(toml::from_str(&string)?)
|
Ok(toml::from_str(&string)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Write the manifest file
|
/// Write the manifest file
|
||||||
#[instrument(skip(self, manifest), level = "debug")]
|
#[instrument(skip(self, manifest), level = "debug")]
|
||||||
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
|
||||||
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
|
fs::write(
|
||||||
|
self.package_dir().join(MANIFEST_FILE_NAME),
|
||||||
|
manifest.as_ref(),
|
||||||
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deserialize the lockfile
|
/// Deserialize the lockfile
|
||||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
|
||||||
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
|
let string = fs::read_to_string(self.package_dir().join(LOCKFILE_FILE_NAME)).await?;
|
||||||
Ok(toml::from_str(&string)?)
|
Ok(toml::from_str(&string)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -186,7 +210,7 @@ impl Project {
|
||||||
lockfile: &Lockfile,
|
lockfile: &Lockfile,
|
||||||
) -> Result<(), errors::LockfileWriteError> {
|
) -> Result<(), errors::LockfileWriteError> {
|
||||||
let string = toml::to_string(lockfile)?;
|
let string = toml::to_string(lockfile)?;
|
||||||
fs::write(self.package_dir.join(LOCKFILE_FILE_NAME), string).await?;
|
fs::write(self.package_dir().join(LOCKFILE_FILE_NAME), string).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -370,24 +394,35 @@ pub async fn matching_globs<'a, P: AsRef<Path> + Debug, I: IntoIterator<Item = &
|
||||||
Ok(paths)
|
Ok(paths)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Refreshes the sources asynchronously
|
/// A struct containing sources already having been refreshed
|
||||||
pub async fn refresh_sources<I: Iterator<Item = PackageSources>>(
|
#[derive(Debug, Clone, Default)]
|
||||||
project: &Project,
|
pub struct RefreshedSources(Arc<tokio::sync::Mutex<HashSet<u64>>>);
|
||||||
sources: I,
|
|
||||||
refreshed_sources: &mut HashSet<PackageSources>,
|
impl RefreshedSources {
|
||||||
) -> Result<(), Box<source::errors::RefreshError>> {
|
/// Create a new empty `RefreshedSources`
|
||||||
try_join_all(sources.map(|source| {
|
pub fn new() -> Self {
|
||||||
let needs_refresh = refreshed_sources.insert(source.clone());
|
RefreshedSources::default()
|
||||||
async move {
|
}
|
||||||
if needs_refresh {
|
|
||||||
source.refresh(project).await.map_err(Box::new)
|
/// Refreshes the source asynchronously if it has not already been refreshed.
|
||||||
} else {
|
/// Will prevent more refreshes of the same source.
|
||||||
Ok(())
|
pub async fn refresh(
|
||||||
}
|
&self,
|
||||||
|
source: &PackageSources,
|
||||||
|
options: &RefreshOptions,
|
||||||
|
) -> Result<(), source::errors::RefreshError> {
|
||||||
|
let mut hasher = std::hash::DefaultHasher::new();
|
||||||
|
source.hash(&mut hasher);
|
||||||
|
let hash = hasher.finish();
|
||||||
|
|
||||||
|
let mut refreshed_sources = self.0.lock().await;
|
||||||
|
|
||||||
|
if refreshed_sources.insert(hash) {
|
||||||
|
source.refresh(options).await
|
||||||
|
} else {
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
}))
|
}
|
||||||
.await
|
|
||||||
.map(|_| ())
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors that can occur when using the pesde library
|
/// Errors that can occur when using the pesde library
|
||||||
|
|
|
@ -332,7 +332,7 @@ async fn run() -> anyhow::Result<()> {
|
||||||
.and_then(|manifest| manifest.pesde_version);
|
.and_then(|manifest| manifest.pesde_version);
|
||||||
|
|
||||||
let exe_path = if let Some(version) = target_version {
|
let exe_path = if let Some(version) = target_version {
|
||||||
get_or_download_version(&reqwest, &TagInfo::Incomplete(version), false).await?
|
get_or_download_version(&reqwest, TagInfo::Incomplete(version), false).await?
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
};
|
};
|
||||||
|
|
103
src/patches.rs
103
src/patches.rs
|
@ -9,7 +9,7 @@ use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature}
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::{path::Path, sync::Arc};
|
use std::{path::Path, sync::Arc};
|
||||||
use tokio::task::JoinSet;
|
use tokio::task::JoinSet;
|
||||||
use tracing::instrument;
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
/// Set up a git repository for patches
|
/// Set up a git repository for patches
|
||||||
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
|
||||||
|
@ -91,7 +91,6 @@ impl Project {
|
||||||
|
|
||||||
for (name, versions) in manifest.patches {
|
for (name, versions) in manifest.patches {
|
||||||
for (version_id, patch_path) in versions {
|
for (version_id, patch_path) in versions {
|
||||||
let name = name.clone();
|
|
||||||
let patch_path = patch_path.to_path(self.package_dir());
|
let patch_path = patch_path.to_path(self.package_dir());
|
||||||
|
|
||||||
let Some(node) = graph
|
let Some(node) = graph
|
||||||
|
@ -114,62 +113,68 @@ impl Project {
|
||||||
);
|
);
|
||||||
|
|
||||||
let reporter = reporter.clone();
|
let reporter = reporter.clone();
|
||||||
|
let span = tracing::info_span!(
|
||||||
|
"apply patch",
|
||||||
|
name = name.to_string(),
|
||||||
|
version_id = version_id.to_string()
|
||||||
|
);
|
||||||
|
let display_name = format!("{name}@{version_id}");
|
||||||
|
|
||||||
tasks.spawn(async move {
|
tasks.spawn(
|
||||||
tracing::debug!("applying patch to {name}@{version_id}");
|
async move {
|
||||||
|
tracing::debug!("applying patch");
|
||||||
|
|
||||||
let display_name = format!("{name}@{version_id}");
|
let progress_reporter = reporter.report_patch(&display_name);
|
||||||
let progress_reporter = reporter.report_patch(&display_name);
|
|
||||||
|
|
||||||
let patch = fs::read(&patch_path)
|
let patch = fs::read(&patch_path)
|
||||||
.await
|
.await
|
||||||
.map_err(errors::ApplyPatchesError::PatchRead)?;
|
.map_err(errors::ApplyPatchesError::PatchRead)?;
|
||||||
let patch = Diff::from_buffer(&patch)?;
|
let patch = Diff::from_buffer(&patch)?;
|
||||||
|
|
||||||
{
|
{
|
||||||
let repo = setup_patches_repo(&container_folder)?;
|
let repo = setup_patches_repo(&container_folder)?;
|
||||||
|
|
||||||
let mut apply_delta_tasks = patch
|
let mut apply_delta_tasks = patch
|
||||||
.deltas()
|
.deltas()
|
||||||
.filter(|delta| matches!(delta.status(), git2::Delta::Modified))
|
.filter(|delta| matches!(delta.status(), git2::Delta::Modified))
|
||||||
.filter_map(|delta| delta.new_file().path())
|
.filter_map(|delta| delta.new_file().path())
|
||||||
.map(|path| {
|
.map(|path| {
|
||||||
RelativePathBuf::from_path(path)
|
RelativePathBuf::from_path(path)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_path(&container_folder)
|
.to_path(&container_folder)
|
||||||
})
|
})
|
||||||
.filter(|path| path.is_file())
|
.filter(|path| path.is_file())
|
||||||
.map(|path| {
|
.map(|path| {
|
||||||
async {
|
async {
|
||||||
// so, we always unlink it
|
// so, we always unlink it
|
||||||
let content = fs::read(&path).await?;
|
let content = fs::read(&path).await?;
|
||||||
fs::remove_file(&path).await?;
|
fs::remove_file(&path).await?;
|
||||||
fs::write(path, content).await?;
|
fs::write(path, content).await?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
.map_err(errors::ApplyPatchesError::File)
|
.map_err(errors::ApplyPatchesError::File)
|
||||||
})
|
})
|
||||||
.collect::<JoinSet<_>>();
|
.collect::<JoinSet<_>>();
|
||||||
|
|
||||||
while let Some(res) = apply_delta_tasks.join_next().await {
|
while let Some(res) = apply_delta_tasks.join_next().await {
|
||||||
res.unwrap()?;
|
res.unwrap()?;
|
||||||
|
}
|
||||||
|
|
||||||
|
repo.apply(&patch, ApplyLocation::Both, None)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
repo.apply(&patch, ApplyLocation::Both, None)?;
|
tracing::debug!("patch applied");
|
||||||
|
|
||||||
|
fs::remove_dir_all(container_folder.join(".git"))
|
||||||
|
.await
|
||||||
|
.map_err(errors::ApplyPatchesError::DotGitRemove)?;
|
||||||
|
|
||||||
|
progress_reporter.report_done();
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
.instrument(span),
|
||||||
tracing::debug!(
|
);
|
||||||
"patch applied to {name}@{version_id}, removing .git directory"
|
|
||||||
);
|
|
||||||
|
|
||||||
fs::remove_dir_all(container_folder.join(".git"))
|
|
||||||
.await
|
|
||||||
.map_err(errors::ApplyPatchesError::DotGitRemove)?;
|
|
||||||
|
|
||||||
progress_reporter.report_done();
|
|
||||||
|
|
||||||
Ok(())
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -5,19 +5,19 @@ use crate::{
|
||||||
source::{
|
source::{
|
||||||
pesde::PesdePackageSource,
|
pesde::PesdePackageSource,
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::{PackageRef, PackageSource},
|
traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions},
|
||||||
version_id::VersionId,
|
version_id::VersionId,
|
||||||
PackageSources,
|
PackageSources,
|
||||||
},
|
},
|
||||||
Project, DEFAULT_INDEX_NAME,
|
Project, RefreshedSources, DEFAULT_INDEX_NAME,
|
||||||
};
|
};
|
||||||
use std::collections::{btree_map::Entry, HashMap, HashSet, VecDeque};
|
use std::collections::{btree_map::Entry, HashMap, VecDeque};
|
||||||
use tracing::{instrument, Instrument};
|
use tracing::{instrument, Instrument};
|
||||||
|
|
||||||
fn insert_node(
|
fn insert_node(
|
||||||
graph: &mut DependencyGraph,
|
graph: &mut DependencyGraph,
|
||||||
name: PackageNames,
|
name: &PackageNames,
|
||||||
version: VersionId,
|
version: &VersionId,
|
||||||
mut node: DependencyGraphNode,
|
mut node: DependencyGraphNode,
|
||||||
is_top_level: bool,
|
is_top_level: bool,
|
||||||
) {
|
) {
|
||||||
|
@ -63,7 +63,7 @@ impl Project {
|
||||||
pub async fn dependency_graph(
|
pub async fn dependency_graph(
|
||||||
&self,
|
&self,
|
||||||
previous_graph: Option<&DependencyGraph>,
|
previous_graph: Option<&DependencyGraph>,
|
||||||
refreshed_sources: &mut HashSet<PackageSources>,
|
refreshed_sources: RefreshedSources,
|
||||||
// used by `x` command - if true, specifier indices are expected to be URLs. will not do peer dependency checks
|
// used by `x` command - if true, specifier indices are expected to be URLs. will not do peer dependency checks
|
||||||
is_published_package: bool,
|
is_published_package: bool,
|
||||||
) -> Result<DependencyGraph, Box<errors::DependencyGraphError>> {
|
) -> Result<DependencyGraph, Box<errors::DependencyGraphError>> {
|
||||||
|
@ -108,8 +108,8 @@ impl Project {
|
||||||
tracing::debug!("resolved {}@{} from old dependency graph", name, version);
|
tracing::debug!("resolved {}@{} from old dependency graph", name, version);
|
||||||
insert_node(
|
insert_node(
|
||||||
&mut graph,
|
&mut graph,
|
||||||
name.clone(),
|
name,
|
||||||
version.clone(),
|
version,
|
||||||
DependencyGraphNode {
|
DependencyGraphNode {
|
||||||
direct: Some((alias.clone(), specifier.clone(), *source_ty)),
|
direct: Some((alias.clone(), specifier.clone(), *source_ty)),
|
||||||
..node.clone()
|
..node.clone()
|
||||||
|
@ -138,13 +138,7 @@ impl Project {
|
||||||
.and_then(|v| v.get(dep_version))
|
.and_then(|v| v.get(dep_version))
|
||||||
{
|
{
|
||||||
tracing::debug!("resolved sub-dependency {dep_name}@{dep_version}");
|
tracing::debug!("resolved sub-dependency {dep_name}@{dep_version}");
|
||||||
insert_node(
|
insert_node(&mut graph, dep_name, dep_version, dep_node.clone(), false);
|
||||||
&mut graph,
|
|
||||||
dep_name.clone(),
|
|
||||||
dep_version.clone(),
|
|
||||||
dep_node.clone(),
|
|
||||||
false,
|
|
||||||
);
|
|
||||||
|
|
||||||
dep_node
|
dep_node
|
||||||
.dependencies
|
.dependencies
|
||||||
|
@ -184,9 +178,13 @@ impl Project {
|
||||||
})
|
})
|
||||||
.collect::<VecDeque<_>>();
|
.collect::<VecDeque<_>>();
|
||||||
|
|
||||||
|
let refresh_options = RefreshOptions {
|
||||||
|
project: self.clone(),
|
||||||
|
};
|
||||||
|
|
||||||
while let Some((specifier, ty, dependant, path, overridden, target)) = queue.pop_front() {
|
while let Some((specifier, ty, dependant, path, overridden, target)) = queue.pop_front() {
|
||||||
async {
|
async {
|
||||||
let alias = path.last().unwrap().clone();
|
let alias = path.last().unwrap();
|
||||||
let depth = path.len() - 1;
|
let depth = path.len() - 1;
|
||||||
|
|
||||||
tracing::debug!("resolving {specifier} ({ty:?})");
|
tracing::debug!("resolving {specifier} ({ty:?})");
|
||||||
|
@ -203,10 +201,7 @@ impl Project {
|
||||||
))?
|
))?
|
||||||
.clone()
|
.clone()
|
||||||
} else {
|
} else {
|
||||||
let index_url = specifier.index.clone().unwrap();
|
specifier.index.as_deref().unwrap()
|
||||||
|
|
||||||
index_url
|
|
||||||
.clone()
|
|
||||||
.try_into()
|
.try_into()
|
||||||
// specifiers in indices store the index url in this field
|
// specifiers in indices store the index url in this field
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -227,10 +222,7 @@ impl Project {
|
||||||
))?
|
))?
|
||||||
.clone()
|
.clone()
|
||||||
} else {
|
} else {
|
||||||
let index_url = specifier.index.clone().unwrap();
|
specifier.index.as_deref().unwrap()
|
||||||
|
|
||||||
index_url
|
|
||||||
.clone()
|
|
||||||
.try_into()
|
.try_into()
|
||||||
// specifiers in indices store the index url in this field
|
// specifiers in indices store the index url in this field
|
||||||
.unwrap()
|
.unwrap()
|
||||||
|
@ -246,12 +238,19 @@ impl Project {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
if refreshed_sources.insert(source.clone()) {
|
refreshed_sources.refresh(
|
||||||
source.refresh(self).await.map_err(|e| Box::new(e.into()))?;
|
&source,
|
||||||
}
|
&refresh_options,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.map_err(|e| Box::new(e.into()))?;
|
||||||
|
|
||||||
let (name, resolved) = source
|
let (name, resolved) = source
|
||||||
.resolve(&specifier, self, target, refreshed_sources)
|
.resolve(&specifier, &ResolveOptions {
|
||||||
|
project: self.clone(),
|
||||||
|
target,
|
||||||
|
refreshed_sources: refreshed_sources.clone(),
|
||||||
|
})
|
||||||
.await
|
.await
|
||||||
.map_err(|e| Box::new(e.into()))?;
|
.map_err(|e| Box::new(e.into()))?;
|
||||||
|
|
||||||
|
@ -341,9 +340,9 @@ impl Project {
|
||||||
};
|
};
|
||||||
insert_node(
|
insert_node(
|
||||||
&mut graph,
|
&mut graph,
|
||||||
name.clone(),
|
&name,
|
||||||
target_version_id.clone(),
|
&target_version_id,
|
||||||
node.clone(),
|
node,
|
||||||
depth == 0,
|
depth == 0,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -10,8 +10,8 @@ use crate::{
|
||||||
git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier},
|
git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier},
|
||||||
git_index::{read_file, GitBasedSource},
|
git_index::{read_file, GitBasedSource},
|
||||||
specifiers::DependencySpecifiers,
|
specifiers::DependencySpecifiers,
|
||||||
traits::PackageRef,
|
traits::{DownloadOptions, PackageRef, RefreshOptions, ResolveOptions},
|
||||||
PackageSource, PackageSources, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||||
},
|
},
|
||||||
util::hash,
|
util::hash,
|
||||||
Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
|
||||||
|
@ -20,13 +20,7 @@ use fs_err::tokio as fs;
|
||||||
use futures::future::try_join_all;
|
use futures::future::try_join_all;
|
||||||
use gix::{bstr::BStr, traverse::tree::Recorder, ObjectId, Url};
|
use gix::{bstr::BStr, traverse::tree::Recorder, ObjectId, Url};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use std::{
|
use std::{collections::BTreeMap, fmt::Debug, hash::Hash, path::PathBuf, sync::Arc};
|
||||||
collections::{BTreeMap, HashSet},
|
|
||||||
fmt::Debug,
|
|
||||||
hash::Hash,
|
|
||||||
path::PathBuf,
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
use tokio::{sync::Mutex, task::spawn_blocking};
|
use tokio::{sync::Mutex, task::spawn_blocking};
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
|
@ -44,7 +38,7 @@ pub struct GitPackageSource {
|
||||||
impl GitBasedSource for GitPackageSource {
|
impl GitBasedSource for GitPackageSource {
|
||||||
fn path(&self, project: &Project) -> PathBuf {
|
fn path(&self, project: &Project) -> PathBuf {
|
||||||
project
|
project
|
||||||
.data_dir
|
.data_dir()
|
||||||
.join("git_repos")
|
.join("git_repos")
|
||||||
.join(hash(self.as_bytes()))
|
.join(hash(self.as_bytes()))
|
||||||
}
|
}
|
||||||
|
@ -73,18 +67,18 @@ impl PackageSource for GitPackageSource {
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||||
GitBasedSource::refresh(self, project).await
|
GitBasedSource::refresh(self, options).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
project: &Project,
|
options: &ResolveOptions,
|
||||||
_project_target: TargetKind,
|
|
||||||
_refreshed_sources: &mut HashSet<PackageSources>,
|
|
||||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||||
|
let ResolveOptions { project, .. } = options;
|
||||||
|
|
||||||
let repo = gix::open(self.path(project))
|
let repo = gix::open(self.path(project))
|
||||||
.map_err(|e| errors::ResolveError::OpenRepo(Box::new(self.repo_url.clone()), e))?;
|
.map_err(|e| errors::ResolveError::OpenRepo(Box::new(self.repo_url.clone()), e))?;
|
||||||
let rev = repo
|
let rev = repo
|
||||||
|
@ -334,15 +328,15 @@ impl PackageSource for GitPackageSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download(
|
async fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
project: &Project,
|
options: &DownloadOptions<R>,
|
||||||
_reqwest: &reqwest::Client,
|
|
||||||
_reporter: Arc<impl DownloadProgressReporter>,
|
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||||
|
let DownloadOptions { project, .. } = options;
|
||||||
|
|
||||||
let index_file = project
|
let index_file = project
|
||||||
.cas_dir
|
.cas_dir()
|
||||||
.join("git_index")
|
.join("git_index")
|
||||||
.join(hash(self.as_bytes()))
|
.join(hash(self.as_bytes()))
|
||||||
.join(&pkg_ref.tree_id);
|
.join(&pkg_ref.tree_id);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
#![allow(async_fn_in_trait)]
|
#![allow(async_fn_in_trait)]
|
||||||
|
|
||||||
use crate::{util::authenticate_conn, Project};
|
use crate::{source::traits::RefreshOptions, util::authenticate_conn, Project};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use gix::remote::Direction;
|
use gix::remote::Direction;
|
||||||
use std::fmt::Debug;
|
use std::fmt::Debug;
|
||||||
|
@ -16,10 +16,13 @@ pub trait GitBasedSource {
|
||||||
fn repo_url(&self) -> &gix::Url;
|
fn repo_url(&self) -> &gix::Url;
|
||||||
|
|
||||||
/// Refreshes the repository
|
/// Refreshes the repository
|
||||||
async fn refresh(&self, project: &Project) -> Result<(), errors::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), errors::RefreshError> {
|
||||||
|
let RefreshOptions { project } = options;
|
||||||
|
|
||||||
let path = self.path(project);
|
let path = self.path(project);
|
||||||
let repo_url = self.repo_url().clone();
|
let repo_url = self.repo_url().clone();
|
||||||
let auth_config = project.auth_config.clone();
|
|
||||||
|
let project = project.clone();
|
||||||
|
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
spawn_blocking(move || {
|
spawn_blocking(move || {
|
||||||
|
@ -47,7 +50,7 @@ pub trait GitBasedSource {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
authenticate_conn(&mut connection, &auth_config);
|
authenticate_conn(&mut connection, project.auth_config());
|
||||||
|
|
||||||
let fetch =
|
let fetch =
|
||||||
match connection.prepare_fetch(gix::progress::Discard, Default::default()) {
|
match connection.prepare_fetch(gix::progress::Discard, Default::default()) {
|
||||||
|
@ -80,7 +83,7 @@ pub trait GitBasedSource {
|
||||||
gix::prepare_clone_bare(repo_url.clone(), &path)
|
gix::prepare_clone_bare(repo_url.clone(), &path)
|
||||||
.map_err(|e| errors::RefreshError::Clone(repo_url.to_string(), Box::new(e)))?
|
.map_err(|e| errors::RefreshError::Clone(repo_url.to_string(), Box::new(e)))?
|
||||||
.configure_connection(move |c| {
|
.configure_connection(move |c| {
|
||||||
authenticate_conn(c, &auth_config);
|
authenticate_conn(c, project.auth_config());
|
||||||
Ok(())
|
Ok(())
|
||||||
})
|
})
|
||||||
.fetch_only(gix::progress::Discard, &false.into())
|
.fetch_only(gix::progress::Discard, &false.into())
|
||||||
|
@ -94,21 +97,16 @@ pub trait GitBasedSource {
|
||||||
|
|
||||||
/// Reads a file from a tree
|
/// Reads a file from a tree
|
||||||
#[instrument(skip(tree), ret, level = "trace")]
|
#[instrument(skip(tree), ret, level = "trace")]
|
||||||
pub fn read_file<
|
pub fn read_file<I: IntoIterator<Item = P> + Debug, P: ToString + PartialEq<gix::bstr::BStr>>(
|
||||||
I: IntoIterator<Item = P> + Clone + Debug,
|
|
||||||
P: ToString + PartialEq<gix::bstr::BStr>,
|
|
||||||
>(
|
|
||||||
tree: &gix::Tree,
|
tree: &gix::Tree,
|
||||||
file_path: I,
|
file_path: I,
|
||||||
) -> Result<Option<String>, errors::ReadFile> {
|
) -> Result<Option<String>, errors::ReadFile> {
|
||||||
let file_path_str = file_path
|
let mut file_path_str = String::new();
|
||||||
.clone()
|
|
||||||
.into_iter()
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect::<Vec<_>>()
|
|
||||||
.join(std::path::MAIN_SEPARATOR_STR);
|
|
||||||
|
|
||||||
let entry = match tree.lookup_entry(file_path) {
|
let entry = match tree.lookup_entry(file_path.into_iter().inspect(|path| {
|
||||||
|
file_path_str.push_str(path.to_string().as_str());
|
||||||
|
file_path_str.push('/');
|
||||||
|
})) {
|
||||||
Ok(Some(entry)) => entry,
|
Ok(Some(entry)) => entry,
|
||||||
Ok(None) => return Ok(None),
|
Ok(None) => return Ok(None),
|
||||||
Err(e) => return Err(errors::ReadFile::Lookup(file_path_str, e)),
|
Err(e) => return Err(errors::ReadFile::Lookup(file_path_str, e)),
|
||||||
|
|
|
@ -1,18 +1,13 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::target::{Target, TargetKind},
|
manifest::target::Target,
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::PackageFS, refs::PackageRefs, specifiers::DependencySpecifiers, traits::*,
|
fs::PackageFS, refs::PackageRefs, specifiers::DependencySpecifiers, traits::*,
|
||||||
version_id::VersionId,
|
version_id::VersionId,
|
||||||
},
|
},
|
||||||
Project,
|
|
||||||
};
|
|
||||||
use std::{
|
|
||||||
collections::{BTreeMap, HashSet},
|
|
||||||
fmt::Debug,
|
|
||||||
sync::Arc,
|
|
||||||
};
|
};
|
||||||
|
use std::{collections::BTreeMap, fmt::Debug};
|
||||||
|
|
||||||
/// Packages' filesystems
|
/// Packages' filesystems
|
||||||
pub mod fs;
|
pub mod fs;
|
||||||
|
@ -66,26 +61,33 @@ impl PackageSource for PackageSources {
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||||
match self {
|
match self {
|
||||||
PackageSources::Pesde(source) => source.refresh(project).await.map_err(Into::into),
|
PackageSources::Pesde(source) => source
|
||||||
|
.refresh(options)
|
||||||
|
.await
|
||||||
|
.map_err(Self::RefreshError::Pesde),
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
PackageSources::Wally(source) => source.refresh(project).await.map_err(Into::into),
|
PackageSources::Wally(source) => source
|
||||||
PackageSources::Git(source) => source.refresh(project).await.map_err(Into::into),
|
.refresh(options)
|
||||||
PackageSources::Workspace(source) => source.refresh(project).await.map_err(Into::into),
|
.await
|
||||||
|
.map_err(Self::RefreshError::Wally),
|
||||||
|
PackageSources::Git(source) => source
|
||||||
|
.refresh(options)
|
||||||
|
.await
|
||||||
|
.map_err(Self::RefreshError::Git),
|
||||||
|
PackageSources::Workspace(source) => source.refresh(options).await.map_err(Into::into),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
project: &Project,
|
options: &ResolveOptions,
|
||||||
project_target: TargetKind,
|
|
||||||
refreshed_sources: &mut HashSet<PackageSources>,
|
|
||||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||||
match (self, specifier) {
|
match (self, specifier) {
|
||||||
(PackageSources::Pesde(source), DependencySpecifiers::Pesde(specifier)) => source
|
(PackageSources::Pesde(source), DependencySpecifiers::Pesde(specifier)) => source
|
||||||
.resolve(specifier, project, project_target, refreshed_sources)
|
.resolve(specifier, options)
|
||||||
.await
|
.await
|
||||||
.map(|(name, results)| {
|
.map(|(name, results)| {
|
||||||
(
|
(
|
||||||
|
@ -100,7 +102,7 @@ impl PackageSource for PackageSources {
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
(PackageSources::Wally(source), DependencySpecifiers::Wally(specifier)) => source
|
(PackageSources::Wally(source), DependencySpecifiers::Wally(specifier)) => source
|
||||||
.resolve(specifier, project, project_target, refreshed_sources)
|
.resolve(specifier, options)
|
||||||
.await
|
.await
|
||||||
.map(|(name, results)| {
|
.map(|(name, results)| {
|
||||||
(
|
(
|
||||||
|
@ -114,7 +116,7 @@ impl PackageSource for PackageSources {
|
||||||
.map_err(Into::into),
|
.map_err(Into::into),
|
||||||
|
|
||||||
(PackageSources::Git(source), DependencySpecifiers::Git(specifier)) => source
|
(PackageSources::Git(source), DependencySpecifiers::Git(specifier)) => source
|
||||||
.resolve(specifier, project, project_target, refreshed_sources)
|
.resolve(specifier, options)
|
||||||
.await
|
.await
|
||||||
.map(|(name, results)| {
|
.map(|(name, results)| {
|
||||||
(
|
(
|
||||||
|
@ -129,7 +131,7 @@ impl PackageSource for PackageSources {
|
||||||
|
|
||||||
(PackageSources::Workspace(source), DependencySpecifiers::Workspace(specifier)) => {
|
(PackageSources::Workspace(source), DependencySpecifiers::Workspace(specifier)) => {
|
||||||
source
|
source
|
||||||
.resolve(specifier, project, project_target, refreshed_sources)
|
.resolve(specifier, options)
|
||||||
.await
|
.await
|
||||||
.map(|(name, results)| {
|
.map(|(name, results)| {
|
||||||
(
|
(
|
||||||
|
@ -149,34 +151,28 @@ impl PackageSource for PackageSources {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn download(
|
async fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
project: &Project,
|
options: &DownloadOptions<R>,
|
||||||
reqwest: &reqwest::Client,
|
|
||||||
reporter: Arc<impl DownloadProgressReporter>,
|
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||||
match (self, pkg_ref) {
|
match (self, pkg_ref) {
|
||||||
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => source
|
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => {
|
||||||
.download(pkg_ref, project, reqwest, reporter)
|
source.download(pkg_ref, options).await.map_err(Into::into)
|
||||||
.await
|
}
|
||||||
.map_err(Into::into),
|
|
||||||
|
|
||||||
#[cfg(feature = "wally-compat")]
|
#[cfg(feature = "wally-compat")]
|
||||||
(PackageSources::Wally(source), PackageRefs::Wally(pkg_ref)) => source
|
(PackageSources::Wally(source), PackageRefs::Wally(pkg_ref)) => {
|
||||||
.download(pkg_ref, project, reqwest, reporter)
|
source.download(pkg_ref, options).await.map_err(Into::into)
|
||||||
.await
|
}
|
||||||
.map_err(Into::into),
|
|
||||||
|
|
||||||
(PackageSources::Git(source), PackageRefs::Git(pkg_ref)) => source
|
(PackageSources::Git(source), PackageRefs::Git(pkg_ref)) => {
|
||||||
.download(pkg_ref, project, reqwest, reporter)
|
source.download(pkg_ref, options).await.map_err(Into::into)
|
||||||
.await
|
}
|
||||||
.map_err(Into::into),
|
|
||||||
|
|
||||||
(PackageSources::Workspace(source), PackageRefs::Workspace(pkg_ref)) => source
|
(PackageSources::Workspace(source), PackageRefs::Workspace(pkg_ref)) => {
|
||||||
.download(pkg_ref, project, reqwest, reporter)
|
source.download(pkg_ref, options).await.map_err(Into::into)
|
||||||
.await
|
}
|
||||||
.map_err(Into::into),
|
|
||||||
|
|
||||||
_ => Err(errors::DownloadError::Mismatch),
|
_ => Err(errors::DownloadError::Mismatch),
|
||||||
}
|
}
|
||||||
|
@ -191,9 +187,18 @@ pub mod errors {
|
||||||
#[derive(Debug, Error)]
|
#[derive(Debug, Error)]
|
||||||
#[non_exhaustive]
|
#[non_exhaustive]
|
||||||
pub enum RefreshError {
|
pub enum RefreshError {
|
||||||
/// A git-based package source failed to refresh
|
/// A pesde package source failed to refresh
|
||||||
#[error("error refreshing pesde package source")]
|
#[error("error refreshing pesde package source")]
|
||||||
GitBased(#[from] crate::source::git_index::errors::RefreshError),
|
Pesde(#[source] crate::source::git_index::errors::RefreshError),
|
||||||
|
|
||||||
|
/// A Wally package source failed to refresh
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
#[error("error refreshing wally package source")]
|
||||||
|
Wally(#[source] crate::source::git_index::errors::RefreshError),
|
||||||
|
|
||||||
|
/// A Git package source failed to refresh
|
||||||
|
#[error("error refreshing git package source")]
|
||||||
|
Git(#[source] crate::source::git_index::errors::RefreshError),
|
||||||
|
|
||||||
/// A workspace package source failed to refresh
|
/// A workspace package source failed to refresh
|
||||||
#[error("error refreshing workspace package source")]
|
#[error("error refreshing workspace package source")]
|
||||||
|
|
|
@ -7,7 +7,6 @@ use std::{
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
sync::Arc,
|
|
||||||
};
|
};
|
||||||
use tokio_util::io::StreamReader;
|
use tokio_util::io::StreamReader;
|
||||||
|
|
||||||
|
@ -15,17 +14,14 @@ use pkg_ref::PesdePackageRef;
|
||||||
use specifier::PesdeDependencySpecifier;
|
use specifier::PesdeDependencySpecifier;
|
||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::{
|
manifest::{target::Target, DependencyType},
|
||||||
target::{Target, TargetKind},
|
|
||||||
DependencyType,
|
|
||||||
},
|
|
||||||
names::{PackageName, PackageNames},
|
names::{PackageName, PackageNames},
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::{store_in_cas, FSEntry, PackageFS},
|
fs::{store_in_cas, FSEntry, PackageFS},
|
||||||
git_index::{read_file, root_tree, GitBasedSource},
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
DependencySpecifiers, PackageSource, PackageSources, ResolveResult, VersionId,
|
traits::{DownloadOptions, RefreshOptions, ResolveOptions},
|
||||||
IGNORED_DIRS, IGNORED_FILES,
|
DependencySpecifiers, PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
|
||||||
},
|
},
|
||||||
util::hash,
|
util::hash,
|
||||||
Project,
|
Project,
|
||||||
|
@ -58,7 +54,10 @@ pub struct ScopeInfo {
|
||||||
|
|
||||||
impl GitBasedSource for PesdePackageSource {
|
impl GitBasedSource for PesdePackageSource {
|
||||||
fn path(&self, project: &Project) -> PathBuf {
|
fn path(&self, project: &Project) -> PathBuf {
|
||||||
project.data_dir.join("indices").join(hash(self.as_bytes()))
|
project
|
||||||
|
.data_dir()
|
||||||
|
.join("indices")
|
||||||
|
.join(hash(self.as_bytes()))
|
||||||
}
|
}
|
||||||
|
|
||||||
fn repo_url(&self) -> &Url {
|
fn repo_url(&self) -> &Url {
|
||||||
|
@ -105,18 +104,22 @@ impl PackageSource for PesdePackageSource {
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||||
GitBasedSource::refresh(self, project).await
|
GitBasedSource::refresh(self, options).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
project: &Project,
|
options: &ResolveOptions,
|
||||||
project_target: TargetKind,
|
|
||||||
_refreshed_sources: &mut HashSet<PackageSources>,
|
|
||||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||||
|
let ResolveOptions {
|
||||||
|
project,
|
||||||
|
target: project_target,
|
||||||
|
..
|
||||||
|
} = options;
|
||||||
|
|
||||||
let (scope, name) = specifier.name.as_str();
|
let (scope, name) = specifier.name.as_str();
|
||||||
let repo = gix::open(self.path(project)).map_err(Box::new)?;
|
let repo = gix::open(self.path(project)).map_err(Box::new)?;
|
||||||
let tree = root_tree(&repo).map_err(Box::new)?;
|
let tree = root_tree(&repo).map_err(Box::new)?;
|
||||||
|
@ -142,7 +145,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter(|(VersionId(version, target), _)| {
|
.filter(|(VersionId(version, target), _)| {
|
||||||
specifier.version.matches(version)
|
specifier.version.matches(version)
|
||||||
&& specifier.target.unwrap_or(project_target) == *target
|
&& specifier.target.unwrap_or(*project_target) == *target
|
||||||
})
|
})
|
||||||
.map(|(id, entry)| {
|
.map(|(id, entry)| {
|
||||||
let version = id.version().clone();
|
let version = id.version().clone();
|
||||||
|
@ -163,16 +166,20 @@ impl PackageSource for PesdePackageSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download(
|
async fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
project: &Project,
|
options: &DownloadOptions<R>,
|
||||||
reqwest: &reqwest::Client,
|
|
||||||
reporter: Arc<impl DownloadProgressReporter>,
|
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||||
|
let DownloadOptions {
|
||||||
|
project,
|
||||||
|
reporter,
|
||||||
|
reqwest,
|
||||||
|
} = options;
|
||||||
|
|
||||||
let config = self.config(project).await.map_err(Box::new)?;
|
let config = self.config(project).await.map_err(Box::new)?;
|
||||||
let index_file = project
|
let index_file = project
|
||||||
.cas_dir
|
.cas_dir()
|
||||||
.join("index")
|
.join("index")
|
||||||
.join(pkg_ref.name.escaped())
|
.join(pkg_ref.name.escaped())
|
||||||
.join(pkg_ref.version.to_string())
|
.join(pkg_ref.version.to_string())
|
||||||
|
@ -200,7 +207,7 @@ impl PackageSource for PesdePackageSource {
|
||||||
|
|
||||||
let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream");
|
let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream");
|
||||||
|
|
||||||
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
|
if let Some(token) = project.auth_config().tokens().get(&self.repo_url) {
|
||||||
tracing::debug!("using token for {}", self.repo_url);
|
tracing::debug!("using token for {}", self.repo_url);
|
||||||
request = request.header(AUTHORIZATION, token);
|
request = request.header(AUTHORIZATION, token);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
#![allow(async_fn_in_trait)]
|
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::{
|
manifest::{
|
||||||
target::{Target, TargetKind},
|
target::{Target, TargetKind},
|
||||||
|
@ -6,11 +5,12 @@ use crate::{
|
||||||
},
|
},
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{DependencySpecifiers, PackageFS, PackageSources, ResolveResult},
|
source::{DependencySpecifiers, PackageFS, PackageSources, ResolveResult},
|
||||||
Project,
|
Project, RefreshedSources,
|
||||||
};
|
};
|
||||||
use std::{
|
use std::{
|
||||||
collections::{BTreeMap, HashSet},
|
collections::BTreeMap,
|
||||||
fmt::{Debug, Display},
|
fmt::{Debug, Display},
|
||||||
|
future::Future,
|
||||||
sync::Arc,
|
sync::Arc,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -27,6 +27,35 @@ pub trait PackageRef: Debug {
|
||||||
fn source(&self) -> PackageSources;
|
fn source(&self) -> PackageSources;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Options for refreshing a source
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct RefreshOptions {
|
||||||
|
/// The project to refresh for
|
||||||
|
pub project: Project,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Options for resolving a package
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ResolveOptions {
|
||||||
|
/// The project to resolve for
|
||||||
|
pub project: Project,
|
||||||
|
/// The target to resolve for
|
||||||
|
pub target: TargetKind,
|
||||||
|
/// The sources that have been refreshed
|
||||||
|
pub refreshed_sources: RefreshedSources,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Options for downloading a package
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DownloadOptions<R: DownloadProgressReporter> {
|
||||||
|
/// The project to download for
|
||||||
|
pub project: Project,
|
||||||
|
/// The reqwest client to use
|
||||||
|
pub reqwest: reqwest::Client,
|
||||||
|
/// The reporter to use
|
||||||
|
pub reporter: Arc<R>,
|
||||||
|
}
|
||||||
|
|
||||||
/// A source of packages
|
/// A source of packages
|
||||||
pub trait PackageSource: Debug {
|
pub trait PackageSource: Debug {
|
||||||
/// The specifier type for this source
|
/// The specifier type for this source
|
||||||
|
@ -34,32 +63,31 @@ pub trait PackageSource: Debug {
|
||||||
/// The reference type for this source
|
/// The reference type for this source
|
||||||
type Ref: PackageRef;
|
type Ref: PackageRef;
|
||||||
/// The error type for refreshing this source
|
/// The error type for refreshing this source
|
||||||
type RefreshError: std::error::Error;
|
type RefreshError: std::error::Error + Send + Sync + 'static;
|
||||||
/// The error type for resolving a package from this source
|
/// The error type for resolving a package from this source
|
||||||
type ResolveError: std::error::Error;
|
type ResolveError: std::error::Error + Send + Sync + 'static;
|
||||||
/// The error type for downloading a package from this source
|
/// The error type for downloading a package from this source
|
||||||
type DownloadError: std::error::Error;
|
type DownloadError: std::error::Error + Send + Sync + 'static;
|
||||||
|
|
||||||
/// Refreshes the source
|
/// Refreshes the source
|
||||||
async fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
|
fn refresh(
|
||||||
Ok(())
|
&self,
|
||||||
|
_options: &RefreshOptions,
|
||||||
|
) -> impl Future<Output = Result<(), Self::RefreshError>> + Send + Sync {
|
||||||
|
async { Ok(()) }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Resolves a specifier to a reference
|
/// Resolves a specifier to a reference
|
||||||
async fn resolve(
|
fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
project: &Project,
|
options: &ResolveOptions,
|
||||||
project_target: TargetKind,
|
) -> impl Future<Output = Result<ResolveResult<Self::Ref>, Self::ResolveError>>;
|
||||||
refreshed_sources: &mut HashSet<PackageSources>,
|
|
||||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError>;
|
|
||||||
|
|
||||||
/// Downloads a package
|
/// Downloads a package
|
||||||
async fn download(
|
fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
project: &Project,
|
options: &DownloadOptions<R>,
|
||||||
reqwest: &reqwest::Client,
|
) -> impl Future<Output = Result<(PackageFS, Target), Self::DownloadError>>;
|
||||||
reporter: Arc<impl DownloadProgressReporter>,
|
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError>;
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ async fn find_lib_path(
|
||||||
|
|
||||||
let result = execute_script(
|
let result = execute_script(
|
||||||
ScriptName::SourcemapGenerator,
|
ScriptName::SourcemapGenerator,
|
||||||
&script_path.to_path(&project.package_dir),
|
&script_path.to_path(project.package_dir()),
|
||||||
[package_dir],
|
[package_dir],
|
||||||
project,
|
project,
|
||||||
true,
|
true,
|
||||||
|
|
|
@ -5,7 +5,7 @@ use crate::{
|
||||||
source::{
|
source::{
|
||||||
fs::{store_in_cas, FSEntry, PackageFS},
|
fs::{store_in_cas, FSEntry, PackageFS},
|
||||||
git_index::{read_file, root_tree, GitBasedSource},
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
traits::PackageSource,
|
traits::{DownloadOptions, PackageSource, RefreshOptions, ResolveOptions},
|
||||||
version_id::VersionId,
|
version_id::VersionId,
|
||||||
wally::{
|
wally::{
|
||||||
compat_util::get_target,
|
compat_util::get_target,
|
||||||
|
@ -23,11 +23,7 @@ use gix::Url;
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use reqwest::header::AUTHORIZATION;
|
use reqwest::header::AUTHORIZATION;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{
|
use std::{collections::BTreeMap, path::PathBuf, sync::Arc};
|
||||||
collections::{BTreeMap, HashSet},
|
|
||||||
path::PathBuf,
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
use tempfile::tempdir;
|
use tempfile::tempdir;
|
||||||
use tokio::{
|
use tokio::{
|
||||||
io::{AsyncReadExt, AsyncWriteExt},
|
io::{AsyncReadExt, AsyncWriteExt},
|
||||||
|
@ -53,7 +49,7 @@ pub struct WallyPackageSource {
|
||||||
impl GitBasedSource for WallyPackageSource {
|
impl GitBasedSource for WallyPackageSource {
|
||||||
fn path(&self, project: &Project) -> PathBuf {
|
fn path(&self, project: &Project) -> PathBuf {
|
||||||
project
|
project
|
||||||
.data_dir
|
.data_dir()
|
||||||
.join("wally_indices")
|
.join("wally_indices")
|
||||||
.join(hash(self.as_bytes()))
|
.join(hash(self.as_bytes()))
|
||||||
}
|
}
|
||||||
|
@ -102,18 +98,22 @@ impl PackageSource for WallyPackageSource {
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
|
async fn refresh(&self, options: &RefreshOptions) -> Result<(), Self::RefreshError> {
|
||||||
GitBasedSource::refresh(self, project).await
|
GitBasedSource::refresh(self, options).await
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
project: &Project,
|
options: &ResolveOptions,
|
||||||
project_target: TargetKind,
|
|
||||||
refreshed_sources: &mut HashSet<PackageSources>,
|
|
||||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||||
|
let ResolveOptions {
|
||||||
|
project,
|
||||||
|
refreshed_sources,
|
||||||
|
..
|
||||||
|
} = options;
|
||||||
|
|
||||||
let repo = gix::open(self.path(project)).map_err(Box::new)?;
|
let repo = gix::open(self.path(project)).map_err(Box::new)?;
|
||||||
let tree = root_tree(&repo).map_err(Box::new)?;
|
let tree = root_tree(&repo).map_err(Box::new)?;
|
||||||
let (scope, name) = specifier.name.as_str();
|
let (scope, name) = specifier.name.as_str();
|
||||||
|
@ -127,23 +127,26 @@ impl PackageSource for WallyPackageSource {
|
||||||
|
|
||||||
let config = self.config(project).await.map_err(Box::new)?;
|
let config = self.config(project).await.map_err(Box::new)?;
|
||||||
for registry in config.fallback_registries {
|
for registry in config.fallback_registries {
|
||||||
let source = WallyPackageSource::new(registry.clone());
|
let source = WallyPackageSource::new(registry);
|
||||||
if refreshed_sources.insert(PackageSources::Wally(source.clone())) {
|
match refreshed_sources
|
||||||
GitBasedSource::refresh(&source, project)
|
.refresh(
|
||||||
.await
|
&PackageSources::Wally(source.clone()),
|
||||||
.map_err(Box::new)?;
|
&RefreshOptions {
|
||||||
|
project: project.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
{
|
||||||
|
Ok(()) => {}
|
||||||
|
Err(super::errors::RefreshError::Wally(e)) => {
|
||||||
|
return Err(Self::ResolveError::Refresh(Box::new(e)));
|
||||||
|
}
|
||||||
|
Err(e) => unreachable!("unexpected error: {e:?}"),
|
||||||
}
|
}
|
||||||
|
|
||||||
match Box::pin(source.resolve(
|
match Box::pin(source.resolve(specifier, options)).await {
|
||||||
specifier,
|
|
||||||
project,
|
|
||||||
project_target,
|
|
||||||
refreshed_sources,
|
|
||||||
))
|
|
||||||
.await
|
|
||||||
{
|
|
||||||
Ok((name, results)) => {
|
Ok((name, results)) => {
|
||||||
tracing::debug!("found {} in backup registry {registry}", name);
|
tracing::debug!("found {name} in backup registry {}", source.repo_url);
|
||||||
return Ok((name, results));
|
return Ok((name, results));
|
||||||
}
|
}
|
||||||
Err(errors::ResolveError::NotFound(_)) => {
|
Err(errors::ResolveError::NotFound(_)) => {
|
||||||
|
@ -202,16 +205,20 @@ impl PackageSource for WallyPackageSource {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download(
|
async fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
project: &Project,
|
options: &DownloadOptions<R>,
|
||||||
reqwest: &reqwest::Client,
|
|
||||||
reporter: Arc<impl DownloadProgressReporter>,
|
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||||
|
let DownloadOptions {
|
||||||
|
project,
|
||||||
|
reqwest,
|
||||||
|
reporter,
|
||||||
|
} = options;
|
||||||
|
|
||||||
let config = self.config(project).await.map_err(Box::new)?;
|
let config = self.config(project).await.map_err(Box::new)?;
|
||||||
let index_file = project
|
let index_file = project
|
||||||
.cas_dir
|
.cas_dir()
|
||||||
.join("wally_index")
|
.join("wally_index")
|
||||||
.join(pkg_ref.name.escaped())
|
.join(pkg_ref.name.escaped())
|
||||||
.join(pkg_ref.version.to_string());
|
.join(pkg_ref.version.to_string());
|
||||||
|
@ -250,7 +257,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
.unwrap_or("0.3.2"),
|
.unwrap_or("0.3.2"),
|
||||||
);
|
);
|
||||||
|
|
||||||
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
|
if let Some(token) = project.auth_config().tokens().get(&self.repo_url) {
|
||||||
tracing::debug!("using token for {}", self.repo_url);
|
tracing::debug!("using token for {}", self.repo_url);
|
||||||
request = request.header(AUTHORIZATION, token);
|
request = request.header(AUTHORIZATION, token);
|
||||||
}
|
}
|
||||||
|
@ -291,6 +298,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
|
|
||||||
let archive = Arc::new(Mutex::new(archive));
|
let archive = Arc::new(Mutex::new(archive));
|
||||||
|
|
||||||
|
// todo: remove this asyncification, since the Mutex makes it sequential anyway
|
||||||
let entries = try_join_all(
|
let entries = try_join_all(
|
||||||
entries
|
entries
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
@ -363,7 +371,7 @@ impl PackageSource for WallyPackageSource {
|
||||||
pub struct WallyIndexConfig {
|
pub struct WallyIndexConfig {
|
||||||
api: url::Url,
|
api: url::Url,
|
||||||
#[serde(default, deserialize_with = "crate::util::deserialize_gix_url_vec")]
|
#[serde(default, deserialize_with = "crate::util::deserialize_gix_url_vec")]
|
||||||
fallback_registries: Vec<gix::Url>,
|
fallback_registries: Vec<Url>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Errors that can occur when interacting with a Wally package source
|
/// Errors that can occur when interacting with a Wally package source
|
||||||
|
|
|
@ -1,21 +1,20 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::target::{Target, TargetKind},
|
manifest::target::Target,
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
reporters::DownloadProgressReporter,
|
reporters::DownloadProgressReporter,
|
||||||
source::{
|
source::{
|
||||||
fs::PackageFS, specifiers::DependencySpecifiers, traits::PackageSource,
|
fs::PackageFS,
|
||||||
version_id::VersionId, workspace::pkg_ref::WorkspacePackageRef, PackageSources,
|
specifiers::DependencySpecifiers,
|
||||||
|
traits::{DownloadOptions, PackageSource, ResolveOptions},
|
||||||
|
version_id::VersionId,
|
||||||
|
workspace::pkg_ref::WorkspacePackageRef,
|
||||||
ResolveResult,
|
ResolveResult,
|
||||||
},
|
},
|
||||||
Project, DEFAULT_INDEX_NAME,
|
DEFAULT_INDEX_NAME,
|
||||||
};
|
};
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use reqwest::Client;
|
use std::collections::BTreeMap;
|
||||||
use std::{
|
|
||||||
collections::{BTreeMap, HashSet},
|
|
||||||
sync::Arc,
|
|
||||||
};
|
|
||||||
use tokio::pin;
|
use tokio::pin;
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
|
@ -35,25 +34,21 @@ impl PackageSource for WorkspacePackageSource {
|
||||||
type ResolveError = errors::ResolveError;
|
type ResolveError = errors::ResolveError;
|
||||||
type DownloadError = errors::DownloadError;
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
async fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
|
|
||||||
// no-op
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn resolve(
|
async fn resolve(
|
||||||
&self,
|
&self,
|
||||||
specifier: &Self::Specifier,
|
specifier: &Self::Specifier,
|
||||||
project: &Project,
|
options: &ResolveOptions,
|
||||||
project_target: TargetKind,
|
|
||||||
_refreshed_sources: &mut HashSet<PackageSources>,
|
|
||||||
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError> {
|
||||||
|
let ResolveOptions {
|
||||||
|
project,
|
||||||
|
target: project_target,
|
||||||
|
..
|
||||||
|
} = options;
|
||||||
|
|
||||||
let (path, manifest) = 'finder: {
|
let (path, manifest) = 'finder: {
|
||||||
let workspace_dir = project
|
let workspace_dir = project.workspace_dir().unwrap_or(project.package_dir());
|
||||||
.workspace_dir
|
let target = specifier.target.unwrap_or(*project_target);
|
||||||
.as_ref()
|
|
||||||
.unwrap_or(&project.package_dir);
|
|
||||||
let target = specifier.target.unwrap_or(project_target);
|
|
||||||
|
|
||||||
let members = project.workspace_members(workspace_dir, true).await?;
|
let members = project.workspace_members(workspace_dir, true).await?;
|
||||||
pin!(members);
|
pin!(members);
|
||||||
|
@ -70,77 +65,77 @@ impl PackageSource for WorkspacePackageSource {
|
||||||
));
|
));
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let manifest_target_kind = manifest.target.kind();
|
||||||
|
let pkg_ref = WorkspacePackageRef {
|
||||||
|
// workspace_dir is guaranteed to be Some by the workspace_members method
|
||||||
|
// strip_prefix is guaranteed to be Some by same method
|
||||||
|
// from_path is guaranteed to be Ok because we just stripped the absolute path
|
||||||
|
path: RelativePathBuf::from_path(
|
||||||
|
path.strip_prefix(project.workspace_dir().unwrap()).unwrap(),
|
||||||
|
)
|
||||||
|
.unwrap(),
|
||||||
|
dependencies: manifest
|
||||||
|
.all_dependencies()?
|
||||||
|
.into_iter()
|
||||||
|
.map(|(alias, (mut spec, ty))| {
|
||||||
|
match &mut spec {
|
||||||
|
DependencySpecifiers::Pesde(spec) => {
|
||||||
|
let index_name = spec.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||||
|
|
||||||
|
spec.index = Some(
|
||||||
|
manifest
|
||||||
|
.indices
|
||||||
|
.get(index_name)
|
||||||
|
.ok_or(errors::ResolveError::IndexNotFound(
|
||||||
|
index_name.to_string(),
|
||||||
|
manifest.name.to_string(),
|
||||||
|
))?
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
#[cfg(feature = "wally-compat")]
|
||||||
|
DependencySpecifiers::Wally(spec) => {
|
||||||
|
let index_name = spec.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||||
|
|
||||||
|
spec.index = Some(
|
||||||
|
manifest
|
||||||
|
.wally_indices
|
||||||
|
.get(index_name)
|
||||||
|
.ok_or(errors::ResolveError::IndexNotFound(
|
||||||
|
index_name.to_string(),
|
||||||
|
manifest.name.to_string(),
|
||||||
|
))?
|
||||||
|
.to_string(),
|
||||||
|
)
|
||||||
|
}
|
||||||
|
DependencySpecifiers::Git(_) => {}
|
||||||
|
DependencySpecifiers::Workspace(_) => {}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok((alias, (spec, ty)))
|
||||||
|
})
|
||||||
|
.collect::<Result<_, errors::ResolveError>>()?,
|
||||||
|
target: manifest.target,
|
||||||
|
};
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
PackageNames::Pesde(manifest.name.clone()),
|
PackageNames::Pesde(manifest.name),
|
||||||
BTreeMap::from([(
|
BTreeMap::from([(
|
||||||
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
VersionId::new(manifest.version, manifest_target_kind),
|
||||||
WorkspacePackageRef {
|
pkg_ref,
|
||||||
// workspace_dir is guaranteed to be Some by the workspace_members method
|
|
||||||
// strip_prefix is guaranteed to be Some by same method
|
|
||||||
// from_path is guaranteed to be Ok because we just stripped the absolute path
|
|
||||||
path: RelativePathBuf::from_path(
|
|
||||||
path.strip_prefix(project.workspace_dir.clone().unwrap())
|
|
||||||
.unwrap(),
|
|
||||||
)
|
|
||||||
.unwrap(),
|
|
||||||
dependencies: manifest
|
|
||||||
.all_dependencies()?
|
|
||||||
.into_iter()
|
|
||||||
.map(|(alias, (mut spec, ty))| {
|
|
||||||
match &mut spec {
|
|
||||||
DependencySpecifiers::Pesde(spec) => {
|
|
||||||
let index_name =
|
|
||||||
spec.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
|
||||||
|
|
||||||
spec.index = Some(
|
|
||||||
manifest
|
|
||||||
.indices
|
|
||||||
.get(index_name)
|
|
||||||
.ok_or(errors::ResolveError::IndexNotFound(
|
|
||||||
index_name.to_string(),
|
|
||||||
manifest.name.to_string(),
|
|
||||||
))?
|
|
||||||
.to_string(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
#[cfg(feature = "wally-compat")]
|
|
||||||
DependencySpecifiers::Wally(spec) => {
|
|
||||||
let index_name =
|
|
||||||
spec.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
|
||||||
|
|
||||||
spec.index = Some(
|
|
||||||
manifest
|
|
||||||
.wally_indices
|
|
||||||
.get(index_name)
|
|
||||||
.ok_or(errors::ResolveError::IndexNotFound(
|
|
||||||
index_name.to_string(),
|
|
||||||
manifest.name.to_string(),
|
|
||||||
))?
|
|
||||||
.to_string(),
|
|
||||||
)
|
|
||||||
}
|
|
||||||
DependencySpecifiers::Git(_) => {}
|
|
||||||
DependencySpecifiers::Workspace(_) => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok((alias, (spec, ty)))
|
|
||||||
})
|
|
||||||
.collect::<Result<_, errors::ResolveError>>()?,
|
|
||||||
target: manifest.target,
|
|
||||||
},
|
|
||||||
)]),
|
)]),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip_all, level = "debug")]
|
#[instrument(skip_all, level = "debug")]
|
||||||
async fn download(
|
async fn download<R: DownloadProgressReporter>(
|
||||||
&self,
|
&self,
|
||||||
pkg_ref: &Self::Ref,
|
pkg_ref: &Self::Ref,
|
||||||
project: &Project,
|
options: &DownloadOptions<R>,
|
||||||
_reqwest: &Client,
|
|
||||||
_reporter: Arc<impl DownloadProgressReporter>,
|
|
||||||
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
) -> Result<(PackageFS, Target), Self::DownloadError> {
|
||||||
let path = pkg_ref.path.to_path(project.workspace_dir.clone().unwrap());
|
let DownloadOptions { project, .. } = options;
|
||||||
|
|
||||||
|
let path = pkg_ref.path.to_path(project.workspace_dir().unwrap());
|
||||||
|
|
||||||
Ok((
|
Ok((
|
||||||
PackageFS::Copy(path, pkg_ref.target.kind()),
|
PackageFS::Copy(path, pkg_ref.target.kind()),
|
||||||
|
|
Loading…
Add table
Reference in a new issue