pesde/src/download.rs
daimond113 f4050abec8
feat: add engines
Squashed commit of the following:

commit 5767042964
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Thu Jan 16 18:28:52 2025 +0100

    fix(engines): correct engine detection on unix

    The `current_exe` function doesn't return the
    symlinked path on Unix, so the engine detection
    was failing there. This commit fixes that by
    using the 0th argument of the program to get
    the path of the executable on Unix.

commit b51c9d9571
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 22:43:50 2025 +0100

    refactor: print deprecated warning on CLI side

    Prints the deprecated warning on the CLI side
    which means it'll have a more consistent look
    with the rest of the CLI output.

commit 5ace844035
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 22:21:36 2025 +0100

    feat: add alias validation

    Ensures aliases don't contain characters which could
    cause issues. They are now also forbidden from being
    the same as an engine name to avoid issues.

commit a33302aff9
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 21:23:40 2025 +0100

    refactor: apply clippy lints

commit 2d534a534d
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 21:22:14 2025 +0100

    feat(engines): print incompatibility warning for dependencies

    Adds a warning message when a dependency depends
    on an incompatible engine.

commit 4946a19f8b
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 18:33:38 2025 +0100

    feat(engines): create linkers at install time

    Additionally fixes engines being executed as scripts,
    and fixes downloading pesde from GitHub.

commit e3177eeb75
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Tue Jan 14 14:33:26 2025 +0100

    fix(engines): store & link engines correctly

    Fixes issues with how engines were stored
    which resulted in errors. Also makes outdated
    linkers get updated.

commit 037ead66bb
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Mon Jan 13 12:26:19 2025 +0100

    docs: remove prerequisites

commit ddb496ff7d
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Mon Jan 13 12:25:53 2025 +0100

    ci: remove tar builds

commit e9f0c25554
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Mon Jan 13 12:25:11 2025 +0100

    chore(docs): update astro and starlight

commit fc349e6f21
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Sun Jan 12 23:12:27 2025 +0100

    feat: add engines

    Adds the initial implementation of the engines feature.
    Not tested yet. Requires documentation and
    more work for non-pesde engines to be usable.
2025-01-16 19:11:16 +01:00

203 lines
5.2 KiB
Rust

use crate::{
graph::{DependencyGraph, DependencyGraphNode},
reporters::{DownloadProgressReporter, DownloadsReporter},
source::{
fs::PackageFs,
ids::PackageId,
traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions},
},
Project, RefreshedSources,
};
use async_stream::try_stream;
use futures::Stream;
use std::{num::NonZeroUsize, sync::Arc};
use tokio::{sync::Semaphore, task::JoinSet};
use tracing::{instrument, Instrument};
/// Options for downloading.
#[derive(Debug)]
pub(crate) struct DownloadGraphOptions<Reporter> {
/// The reqwest client.
pub reqwest: reqwest::Client,
/// The downloads reporter.
pub reporter: Option<Arc<Reporter>>,
/// The refreshed sources.
pub refreshed_sources: RefreshedSources,
/// The max number of concurrent network requests.
pub network_concurrency: NonZeroUsize,
}
impl<Reporter> DownloadGraphOptions<Reporter>
where
Reporter: DownloadsReporter + Send + Sync + 'static,
{
/// Creates a new download options with the given reqwest client and reporter.
pub(crate) fn new(reqwest: reqwest::Client) -> Self {
Self {
reqwest,
reporter: None,
refreshed_sources: Default::default(),
network_concurrency: NonZeroUsize::new(16).unwrap(),
}
}
/// Sets the downloads reporter.
pub(crate) fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
self.reporter.replace(reporter.into());
self
}
/// Sets the refreshed sources.
pub(crate) fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
self.refreshed_sources = refreshed_sources;
self
}
/// Sets the max number of concurrent network requests.
pub(crate) fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
self.network_concurrency = network_concurrency;
self
}
}
impl<Reporter> Clone for DownloadGraphOptions<Reporter> {
fn clone(&self) -> Self {
Self {
reqwest: self.reqwest.clone(),
reporter: self.reporter.clone(),
refreshed_sources: self.refreshed_sources.clone(),
network_concurrency: self.network_concurrency,
}
}
}
impl Project {
/// Downloads a graph of dependencies.
#[instrument(skip_all, level = "debug")]
pub(crate) async fn download_graph<Reporter>(
&self,
graph: &DependencyGraph,
options: DownloadGraphOptions<Reporter>,
) -> Result<
impl Stream<
Item = Result<(PackageId, DependencyGraphNode, PackageFs), errors::DownloadGraphError>,
>,
errors::DownloadGraphError,
>
where
Reporter: DownloadsReporter + Send + Sync + 'static,
{
let DownloadGraphOptions {
reqwest,
reporter,
refreshed_sources,
network_concurrency,
} = options;
let semaphore = Arc::new(Semaphore::new(network_concurrency.get()));
let mut tasks = graph
.iter()
.map(|(package_id, node)| {
let span = tracing::info_span!("download", package_id = package_id.to_string());
let project = self.clone();
let reqwest = reqwest.clone();
let reporter = reporter.clone();
let refreshed_sources = refreshed_sources.clone();
let semaphore = semaphore.clone();
let package_id = Arc::new(package_id.clone());
let node = node.clone();
async move {
let progress_reporter = reporter
.clone()
.map(|reporter| reporter.report_download(package_id.to_string()));
let _permit = semaphore.acquire().await;
if let Some(ref progress_reporter) = progress_reporter {
progress_reporter.report_start();
}
let source = node.pkg_ref.source();
refreshed_sources
.refresh(
&source,
&RefreshOptions {
project: project.clone(),
},
)
.await?;
tracing::debug!("downloading");
let fs = match progress_reporter {
Some(progress_reporter) => {
source
.download(
&node.pkg_ref,
&DownloadOptions {
project: project.clone(),
reqwest,
id: package_id.clone(),
reporter: Arc::new(progress_reporter),
},
)
.await
}
None => {
source
.download(
&node.pkg_ref,
&DownloadOptions {
project: project.clone(),
reqwest,
id: package_id.clone(),
reporter: Arc::new(()),
},
)
.await
}
}
.map_err(Box::new)?;
tracing::debug!("downloaded");
Ok((Arc::into_inner(package_id).unwrap(), node, fs))
}
.instrument(span)
})
.collect::<JoinSet<Result<_, errors::DownloadGraphError>>>();
let stream = try_stream! {
while let Some(res) = tasks.join_next().await {
yield res.unwrap()?;
}
};
Ok(stream)
}
}
/// Errors that can occur when downloading a graph
pub mod errors {
use thiserror::Error;
/// Errors that can occur when downloading a graph
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadGraphError {
/// An error occurred refreshing a package source
#[error("failed to refresh package source")]
RefreshFailed(#[from] crate::source::errors::RefreshError),
/// Error interacting with the filesystem
#[error("error interacting with the filesystem")]
Io(#[from] std::io::Error),
/// Error downloading a package
#[error("failed to download package")]
DownloadFailed(#[from] Box<crate::source::errors::DownloadError>),
}
}