feat: add engines

Squashed commit of the following:

commit 5767042964
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Thu Jan 16 18:28:52 2025 +0100

    fix(engines): correct engine detection on unix

    The `current_exe` function doesn't return the
    symlinked path on Unix, so the engine detection
    was failing there. This commit fixes that by
    using the 0th argument of the program to get
    the path of the executable on Unix.

commit b51c9d9571
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 22:43:50 2025 +0100

    refactor: print deprecated warning on CLI side

    Prints the deprecated warning on the CLI side
    which means it'll have a more consistent look
    with the rest of the CLI output.

commit 5ace844035
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 22:21:36 2025 +0100

    feat: add alias validation

    Ensures aliases don't contain characters which could
    cause issues. They are now also forbidden from being
    the same as an engine name to avoid issues.

commit a33302aff9
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 21:23:40 2025 +0100

    refactor: apply clippy lints

commit 2d534a534d
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 21:22:14 2025 +0100

    feat(engines): print incompatibility warning for dependencies

    Adds a warning message when a dependency depends
    on an incompatible engine.

commit 4946a19f8b
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Wed Jan 15 18:33:38 2025 +0100

    feat(engines): create linkers at install time

    Additionally fixes engines being executed as scripts,
    and fixes downloading pesde from GitHub.

commit e3177eeb75
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Tue Jan 14 14:33:26 2025 +0100

    fix(engines): store & link engines correctly

    Fixes issues with how engines were stored
    which resulted in errors. Also makes outdated
    linkers get updated.

commit 037ead66bb
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Mon Jan 13 12:26:19 2025 +0100

    docs: remove prerequisites

commit ddb496ff7d
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Mon Jan 13 12:25:53 2025 +0100

    ci: remove tar builds

commit e9f0c25554
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Mon Jan 13 12:25:11 2025 +0100

    chore(docs): update astro and starlight

commit fc349e6f21
Author: daimond113 <72147841+daimond113@users.noreply.github.com>
Date:   Sun Jan 12 23:12:27 2025 +0100

    feat: add engines

    Adds the initial implementation of the engines feature.
    Not tested yet. Requires documentation and
    more work for non-pesde engines to be usable.
This commit is contained in:
daimond113 2025-01-16 19:11:16 +01:00
parent d4979bbdb2
commit f4050abec8
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
45 changed files with 1497 additions and 518 deletions

View file

@ -96,11 +96,9 @@ jobs:
if [ ${{ matrix.host }} = "windows" ]; then
mv target/${{ matrix.target }}/release/${{ env.BIN_NAME }}.exe ${{ env.BIN_NAME }}.exe
7z a ${{ env.ARCHIVE_NAME }}.zip ${{ env.BIN_NAME }}.exe
tar -czf ${{ env.ARCHIVE_NAME }}.tar.gz ${{ env.BIN_NAME }}.exe
else
mv target/${{ matrix.target }}/release/${{ env.BIN_NAME }} ${{ env.BIN_NAME }}
zip -r ${{ env.ARCHIVE_NAME }}.zip ${{ env.BIN_NAME }}
tar -czf ${{ env.ARCHIVE_NAME }}.tar.gz ${{ env.BIN_NAME }}
fi
- name: Upload zip artifact
@ -109,12 +107,6 @@ jobs:
name: ${{ env.ARCHIVE_NAME }}.zip
path: ${{ env.ARCHIVE_NAME }}.zip
- name: Upload tar.gz artifact
uses: actions/upload-artifact@v4
with:
name: ${{ env.ARCHIVE_NAME }}.tar.gz
path: ${{ env.ARCHIVE_NAME }}.tar.gz
publish:
name: Publish to crates.io
runs-on: ubuntu-latest

View file

@ -14,7 +14,6 @@ bin = [
"dep:clap",
"dep:dirs",
"dep:tracing-subscriber",
"reqwest/json",
"dep:indicatif",
"dep:inquire",
"dep:toml_edit",
@ -30,7 +29,7 @@ bin = [
"tokio/rt-multi-thread",
"tokio/macros",
]
wally-compat = ["dep:async_zip", "dep:serde_json"]
wally-compat = ["dep:serde_json"]
patches = ["dep:git2"]
version-management = ["bin"]
schema = ["dep:schemars"]
@ -49,7 +48,7 @@ toml = "0.8.19"
serde_with = "3.11.0"
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
semver = { version = "1.0.24", features = ["serde"] }
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls", "stream"] }
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls", "stream", "json"] }
tokio-tar = "0.3.1"
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
pathdiff = "0.2.3"
@ -68,11 +67,11 @@ tempfile = "3.14.0"
wax = { version = "0.6.0", default-features = false }
fs-err = { version = "3.0.0", features = ["tokio"] }
urlencoding = "2.1.3"
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"] }
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
git2 = { version = "0.19.0", optional = true }
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
serde_json = { version = "1.0.133", optional = true }
schemars = { git = "https://github.com/daimond113/schemars", rev = "bc7c7d6", features = ["semver1", "url2"], optional = true }

Binary file not shown.

View file

@ -10,20 +10,20 @@
"astro": "astro"
},
"dependencies": {
"@astrojs/check": "^0.9.3",
"@astrojs/starlight": "^0.28.2",
"@astrojs/starlight-tailwind": "^2.0.3",
"@astrojs/tailwind": "^5.1.1",
"@fontsource-variable/nunito-sans": "^5.1.0",
"@shikijs/rehype": "^1.21.0",
"astro": "^4.15.9",
"@astrojs/check": "0.9.4",
"@astrojs/starlight": "0.30.6",
"@astrojs/starlight-tailwind": "3.0.0",
"@astrojs/tailwind": "5.1.4",
"@fontsource-variable/nunito-sans": "^5.1.1",
"@shikijs/rehype": "^1.26.2",
"astro": "5.1.5",
"sharp": "^0.33.5",
"shiki": "^1.21.0",
"tailwindcss": "^3.4.13",
"typescript": "^5.6.2"
"shiki": "^1.26.2",
"tailwindcss": "^3.4.17",
"typescript": "^5.7.3"
},
"devDependencies": {
"prettier-plugin-astro": "^0.14.1",
"prettier-plugin-tailwindcss": "^0.6.8"
"prettier-plugin-tailwindcss": "^0.6.9"
}
}

View file

@ -1,6 +1,7 @@
import { defineCollection } from "astro:content"
import { docsLoader } from "@astrojs/starlight/loaders"
import { docsSchema } from "@astrojs/starlight/schema"
export const collections = {
docs: defineCollection({ schema: docsSchema() }),
docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }),
}

View file

@ -5,22 +5,11 @@ description: Install pesde
import { Aside, Steps, TabItem, Tabs } from "@astrojs/starlight/components"
## Prerequisites
pesde requires [Lune](https://lune-org.github.io/docs) to be installed on your
system in order to function properly.
You can follow the installation instructions in the
[Lune documentation](https://lune-org.github.io/docs/getting-started/1-installation).
## Installing pesde
<Steps>
1. Go to the [GitHub releases page](https://github.com/pesde-pkg/pesde/releases/latest).
2. Download the corresponding archive for your operating system. You can choose
whether to use the `.zip` or `.tar.gz` files.
2. Download the corresponding archive for your operating system.
3. Extract the downloaded archive to a folder on your computer.
@ -76,6 +65,7 @@ You can follow the installation instructions in the
</TabItem>
</Tabs>
<br />
5. Verify that pesde is installed by running the following command:

View file

@ -368,6 +368,7 @@ pub async fn publish_package(
let new_entry = IndexFileEntry {
target: manifest.target.clone(),
published_at: chrono::Utc::now(),
engines: manifest.engines.clone(),
description: manifest.description.clone(),
license: manifest.license.clone(),
authors: manifest.authors.clone(),

View file

@ -50,8 +50,10 @@ pub async fn search_packages(
let source = Arc::new(app_state.source.clone().read_owned().await);
let mut results = Vec::with_capacity(top_docs.len());
results.extend((0..top_docs.len()).map(|_| None::<PackageResponse>));
let mut results = top_docs
.iter()
.map(|_| None::<PackageResponse>)
.collect::<Vec<_>>();
let mut tasks = top_docs
.into_iter()

View file

@ -3,7 +3,7 @@ use chrono::{DateTime, Utc};
use pesde::{
manifest::{
target::{Target, TargetKind},
DependencyType,
Alias, DependencyType,
},
names::PackageName,
source::{
@ -125,7 +125,7 @@ pub struct PackageResponseInner {
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
docs: BTreeSet<RegistryDocEntry>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
}
impl PackageResponseInner {

View file

@ -7,7 +7,7 @@ use semver::VersionReq;
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
use pesde::{
manifest::target::TargetKind,
manifest::{target::TargetKind, Alias},
names::PackageNames,
source::{
git::{specifier::GitDependencySpecifier, GitPackageSource},
@ -37,7 +37,7 @@ pub struct AddCommand {
/// The alias to use for the package
#[arg(short, long)]
alias: Option<String>,
alias: Option<Alias>,
/// Whether to add the package as a peer dependency
#[arg(short, long)]
@ -180,24 +180,29 @@ impl AddCommand {
"dependencies"
};
let alias = self.alias.unwrap_or_else(|| match &self.name {
AnyPackageIdentifier::PackageName(versioned) => versioned.0.name().to_string(),
AnyPackageIdentifier::Url((url, _)) => url
.path
.to_string()
.split('/')
.last()
.map(|s| s.to_string())
.unwrap_or(url.path.to_string()),
AnyPackageIdentifier::Workspace(versioned) => versioned.0.name().to_string(),
AnyPackageIdentifier::Path(path) => path
.file_name()
.map(|s| s.to_string_lossy().to_string())
.expect("path has no file name"),
});
let alias = match self.alias {
Some(alias) => alias,
None => match &self.name {
AnyPackageIdentifier::PackageName(versioned) => versioned.0.name().to_string(),
AnyPackageIdentifier::Url((url, _)) => url
.path
.to_string()
.split('/')
.next_back()
.map(|s| s.to_string())
.unwrap_or(url.path.to_string()),
AnyPackageIdentifier::Workspace(versioned) => versioned.0.name().to_string(),
AnyPackageIdentifier::Path(path) => path
.file_name()
.map(|s| s.to_string_lossy().to_string())
.expect("path has no file name"),
}
.parse()
.context("auto-generated alias is invalid. use --alias to specify one")?,
};
let field = &mut manifest[dependency_key]
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()))[&alias];
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()))[alias.as_str()];
match specifier {
DependencySpecifiers::Pesde(spec) => {

View file

@ -259,7 +259,7 @@ impl InitCommand {
continue;
};
let field = &mut dev_deps[alias];
let field = &mut dev_deps[alias.as_str()];
field["name"] = toml_edit::value(spec.name.to_string());
field["version"] = toml_edit::value(spec.version.to_string());
field["target"] = toml_edit::value(

View file

@ -1,8 +1,9 @@
use crate::cli::{version::update_bin_exe, HOME_DIR};
use crate::cli::{version::replace_pesde_bin_exe, HOME_DIR};
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use std::env::current_exe;
#[derive(Debug, Args)]
pub struct SelfInstallCommand {
/// Skip adding the bin directory to the PATH
@ -70,7 +71,7 @@ and then restart your shell.
);
}
update_bin_exe(&current_exe().context("failed to get current exe path")?).await?;
replace_pesde_bin_exe(&current_exe().context("failed to get current exe path")?).await?;
Ok(())
}

View file

@ -1,13 +1,17 @@
use crate::cli::{
config::read_config,
version::{
current_version, get_or_download_version, get_remote_version, no_build_metadata,
update_bin_exe, TagInfo, VersionType,
use crate::{
cli::{
config::read_config,
version::{
current_version, find_latest_version, get_or_download_engine, replace_pesde_bin_exe,
},
},
util::no_build_metadata,
};
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use pesde::engine::EngineKind;
use semver::VersionReq;
#[derive(Debug, Args)]
pub struct SelfUpgradeCommand {
@ -25,7 +29,7 @@ impl SelfUpgradeCommand {
.context("no cached version found")?
.1
} else {
get_remote_version(&reqwest, VersionType::Latest).await?
find_latest_version(&reqwest).await?
};
let latest_version_no_metadata = no_build_metadata(&latest_version);
@ -46,10 +50,13 @@ impl SelfUpgradeCommand {
return Ok(());
}
let path = get_or_download_version(&reqwest, TagInfo::Complete(latest_version), true)
.await?
.unwrap();
update_bin_exe(&path).await?;
let path = get_or_download_engine(
&reqwest,
EngineKind::Pesde,
VersionReq::parse(&format!("={latest_version}")).unwrap(),
)
.await?;
replace_pesde_bin_exe(&path).await?;
println!("upgraded to version {display_latest_version}!");

View file

@ -1,10 +1,3 @@
use std::{
collections::{BTreeMap, BTreeSet, HashMap},
num::NonZeroUsize,
sync::Arc,
time::Instant,
};
use super::files::make_executable;
use crate::cli::{
bin_dir,
@ -16,14 +9,23 @@ use colored::Colorize;
use fs_err::tokio as fs;
use pesde::{
download_and_link::{DownloadAndLinkHooks, DownloadAndLinkOptions},
engine::EngineKind,
graph::{DependencyGraph, DependencyGraphWithTarget},
lockfile::Lockfile,
manifest::{target::TargetKind, DependencyType},
Project, RefreshedSources, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
manifest::{target::TargetKind, Alias, DependencyType, Manifest},
names::PackageNames,
source::{pesde::PesdePackageSource, refs::PackageRefs, traits::PackageRef, PackageSources},
version_matches, Project, RefreshedSources, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
};
use std::{
collections::{BTreeMap, BTreeSet, HashMap},
num::NonZeroUsize,
sync::Arc,
time::Instant,
};
use tokio::task::JoinSet;
fn bin_link_file(alias: &str) -> String {
fn bin_link_file(alias: &Alias) -> String {
let mut all_combinations = BTreeSet::new();
for a in TargetKind::VARIANTS {
@ -68,23 +70,13 @@ impl DownloadAndLinkHooks for InstallHooks {
.values()
.filter(|node| node.target.bin_path().is_some())
.filter_map(|node| node.node.direct.as_ref())
.map(|(alias, _, _)| alias)
.filter(|alias| {
if *alias == env!("CARGO_BIN_NAME") {
tracing::warn!(
"package {alias} has the same name as the CLI, skipping bin link"
);
return false;
}
true
})
.map(|alias| {
.map(|(alias, _, _)| {
let bin_folder = self.bin_folder.clone();
let alias = alias.clone();
async move {
let bin_exec_file = bin_folder
.join(&alias)
.join(alias.as_str())
.with_extension(std::env::consts::EXE_EXTENSION);
let impl_folder = bin_folder.join(".impl");
@ -92,7 +84,7 @@ impl DownloadAndLinkHooks for InstallHooks {
.await
.context("failed to create bin link folder")?;
let bin_file = impl_folder.join(&alias).with_extension("luau");
let bin_file = impl_folder.join(alias.as_str()).with_extension("luau");
fs::write(&bin_file, bin_link_file(&alias))
.await
.context("failed to write bin link file")?;
@ -196,10 +188,26 @@ pub async fn install(
let overrides = resolve_overrides(&manifest)?;
let (new_lockfile, old_graph) =
reporters::run_with_reporter(|_, root_progress, reporter| async {
reporters::run_with_reporter(|multi, root_progress, reporter| async {
let multi = multi;
let root_progress = root_progress;
root_progress.set_prefix(format!("{} {}: ", manifest.name, manifest.target));
#[cfg(feature = "version-management")]
{
root_progress.set_message("update engine linkers");
let mut tasks = manifest
.engines
.keys()
.map(|engine| crate::cli::version::make_linker_if_needed(*engine))
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
}
root_progress.set_message("clean");
if options.write {
@ -246,6 +254,41 @@ pub async fn install(
)
.await
.context("failed to build dependency graph")?;
let mut tasks = graph
.iter()
.filter_map(|(id, node)| {
let PackageSources::Pesde(source) = node.pkg_ref.source() else {
return None;
};
#[allow(irrefutable_let_patterns)]
let PackageNames::Pesde(name) = id.name().clone() else {
panic!("unexpected package name");
};
let project = project.clone();
Some(async move {
let file = source.read_index_file(&name, &project).await.context("failed to read package index file")?.context("package not found in index")?;
Ok::<_, anyhow::Error>(if file.meta.deprecated.is_empty() {
None
} else {
Some((name, file.meta.deprecated))
})
})
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
let Some((name, reason)) = task.unwrap()? else {
continue;
};
multi.suspend(|| {
println!("{}: package {name} is deprecated: {reason}", "warn".yellow().bold());
});
}
let graph = Arc::new(graph);
if options.write {
@ -285,9 +328,104 @@ pub async fn install(
root_progress.set_message("patch");
project
.apply_patches(&downloaded_graph.convert(), reporter)
.apply_patches(&downloaded_graph.clone().convert(), reporter)
.await?;
}
#[cfg(feature = "version-management")]
{
let mut tasks = manifest
.engines
.into_iter()
.map(|(engine, req)| async move {
Ok::<_, anyhow::Error>(
crate::cli::version::get_installed_versions(engine)
.await?
.into_iter()
.filter(|version| version_matches(version, &req))
.next_back()
.map(|version| (engine, version)),
)
})
.collect::<JoinSet<_>>();
let mut resolved_engine_versions = HashMap::new();
while let Some(task) = tasks.join_next().await {
let Some((engine, version)) = task.unwrap()? else {
continue;
};
resolved_engine_versions.insert(engine, version);
}
let manifest_target_kind = manifest.target.kind();
let mut tasks = downloaded_graph.iter()
.map(|(id, node)| {
let id = id.clone();
let node = node.clone();
let project = project.clone();
async move {
let engines = match &node.node.pkg_ref {
PackageRefs::Pesde(pkg_ref) => {
let source = PesdePackageSource::new(pkg_ref.index_url.clone());
#[allow(irrefutable_let_patterns)]
let PackageNames::Pesde(name) = id.name() else {
panic!("unexpected package name");
};
let mut file = source.read_index_file(name, &project).await.context("failed to read package index file")?.context("package not found in index")?;
file
.entries
.remove(id.version_id())
.context("package version not found in index")?
.engines
}
#[cfg(feature = "wally-compat")]
PackageRefs::Wally(_) => Default::default(),
_ => {
let path = node.node.container_folder_from_project(
&id,
&project,
manifest_target_kind,
);
match fs::read_to_string(path.join(MANIFEST_FILE_NAME)).await {
Ok(manifest) => match toml::from_str::<Manifest>(&manifest) {
Ok(manifest) => manifest.engines,
Err(e) => return Err(e).context("failed to read package manifest"),
},
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Default::default(),
Err(e) => return Err(e).context("failed to read package manifest"),
}
}
};
Ok((id, engines))
}
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
let (id, required_engines) = task.unwrap()?;
for (engine, req) in required_engines {
if engine == EngineKind::Pesde {
continue;
}
let Some(version) = resolved_engine_versions.get(&engine) else {
tracing::debug!("package {id} requires {engine} {req}, but it is not installed");
continue;
};
if !version_matches(version, &req) {
multi.suspend(|| {
println!("{}: package {id} requires {engine} {req}, but {version} is installed", "warn".yellow().bold());
});
}
}
}
}
}
root_progress.set_message("finish");
@ -310,7 +448,7 @@ pub async fn install(
anyhow::Ok((new_lockfile, old_graph.unwrap_or_default()))
})
.await?;
.await?;
let elapsed = start.elapsed();

View file

@ -99,31 +99,29 @@ impl<W> CliReporter<W> {
}
}
pub struct CliDownloadProgressReporter<'a, W> {
root_reporter: &'a CliReporter<W>,
pub struct CliDownloadProgressReporter<W> {
root_reporter: Arc<CliReporter<W>>,
name: String,
progress: OnceLock<ProgressBar>,
set_progress: Once,
}
impl<'a, W: Write + Send + Sync + 'static> DownloadsReporter<'a> for CliReporter<W> {
type DownloadProgressReporter = CliDownloadProgressReporter<'a, W>;
impl<W: Write + Send + Sync + 'static> DownloadsReporter for CliReporter<W> {
type DownloadProgressReporter = CliDownloadProgressReporter<W>;
fn report_download<'b>(&'a self, name: &'b str) -> Self::DownloadProgressReporter {
fn report_download(self: Arc<Self>, name: String) -> Self::DownloadProgressReporter {
self.root_progress.inc_length(1);
CliDownloadProgressReporter {
root_reporter: self,
name: name.to_string(),
name,
progress: OnceLock::new(),
set_progress: Once::new(),
}
}
}
impl<W: Write + Send + Sync + 'static> DownloadProgressReporter
for CliDownloadProgressReporter<'_, W>
{
impl<W: Write + Send + Sync + 'static> DownloadProgressReporter for CliDownloadProgressReporter<W> {
fn report_start(&self) {
let progress = self.root_reporter.multi_progress.add(ProgressBar::new(0));
progress.set_style(self.root_reporter.child_style.clone());
@ -171,16 +169,16 @@ impl<W: Write + Send + Sync + 'static> DownloadProgressReporter
}
}
pub struct CliPatchProgressReporter<'a, W> {
root_reporter: &'a CliReporter<W>,
pub struct CliPatchProgressReporter<W> {
root_reporter: Arc<CliReporter<W>>,
name: String,
progress: ProgressBar,
}
impl<'a, W: Write + Send + Sync + 'static> PatchesReporter<'a> for CliReporter<W> {
type PatchProgressReporter = CliPatchProgressReporter<'a, W>;
impl<W: Write + Send + Sync + 'static> PatchesReporter for CliReporter<W> {
type PatchProgressReporter = CliPatchProgressReporter<W>;
fn report_patch<'b>(&'a self, name: &'b str) -> Self::PatchProgressReporter {
fn report_patch(self: Arc<Self>, name: String) -> Self::PatchProgressReporter {
let progress = self.multi_progress.add(ProgressBar::new(0));
progress.set_style(self.child_style.clone());
progress.set_message(format!("- {name}"));
@ -195,7 +193,7 @@ impl<'a, W: Write + Send + Sync + 'static> PatchesReporter<'a> for CliReporter<W
}
}
impl<W: Write + Send + Sync + 'static> PatchProgressReporter for CliPatchProgressReporter<'_, W> {
impl<W: Write + Send + Sync + 'static> PatchProgressReporter for CliPatchProgressReporter<W> {
fn report_done(&self) {
if self.progress.is_hidden() {
writeln!(

View file

@ -1,97 +1,59 @@
use crate::cli::{
bin_dir,
config::{read_config, write_config, CliConfig},
files::make_executable,
home_dir,
use crate::{
cli::{
bin_dir,
config::{read_config, write_config, CliConfig},
files::make_executable,
home_dir,
reporters::run_with_reporter,
},
util::no_build_metadata,
};
use anyhow::Context;
use colored::Colorize;
use fs_err::tokio as fs;
use futures::StreamExt;
use reqwest::header::ACCEPT;
use semver::Version;
use serde::Deserialize;
use pesde::{
engine::{
source::{
traits::{DownloadOptions, EngineSource, ResolveOptions},
EngineSources,
},
EngineKind,
},
reporters::DownloadsReporter,
version_matches,
};
use semver::{Version, VersionReq};
use std::{
collections::BTreeSet,
env::current_exe,
path::{Path, PathBuf},
sync::Arc,
};
use tokio::io::AsyncWrite;
use tracing::instrument;
pub fn current_version() -> Version {
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
}
#[derive(Debug, Deserialize)]
struct Release {
tag_name: String,
assets: Vec<Asset>,
}
#[derive(Debug, Deserialize)]
struct Asset {
name: String,
url: url::Url,
}
#[instrument(level = "trace")]
fn get_repo() -> (String, String) {
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
let (owner, repo) = (
parts.next().unwrap().to_string(),
parts.next().unwrap().to_string(),
);
tracing::trace!("repository for updates: {owner}/{repo}");
(owner, repo)
}
#[derive(Debug)]
pub enum VersionType {
Latest,
Specific(Version),
}
#[instrument(skip(reqwest), level = "trace")]
pub async fn get_remote_version(
reqwest: &reqwest::Client,
ty: VersionType,
) -> anyhow::Result<Version> {
let (owner, repo) = get_repo();
let mut releases = reqwest
.get(format!(
"https://api.github.com/repos/{owner}/{repo}/releases",
))
.send()
.await
.context("failed to send request to GitHub API")?
.error_for_status()
.context("failed to get GitHub API response")?
.json::<Vec<Release>>()
.await
.context("failed to parse GitHub API response")?
.into_iter()
.filter_map(|release| Version::parse(release.tag_name.trim_start_matches('v')).ok());
match ty {
VersionType::Latest => releases.max(),
VersionType::Specific(version) => {
releases.find(|v| no_build_metadata(v) == no_build_metadata(&version))
}
}
.context("failed to find latest version")
}
pub fn no_build_metadata(version: &Version) -> Version {
let mut version = version.clone();
version.build = semver::BuildMetadata::EMPTY;
version
}
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
pub async fn find_latest_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> {
let version = EngineSources::pesde()
.resolve(
&VersionReq::STAR,
&ResolveOptions {
reqwest: reqwest.clone(),
},
)
.await
.context("failed to resolve version")?
.pop_last()
.context("no versions found")?
.0;
Ok(version)
}
#[instrument(skip(reqwest), level = "trace")]
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
let config = read_config().await?;
@ -104,7 +66,7 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
version
} else {
tracing::debug!("checking for updates");
let version = get_remote_version(reqwest, VersionType::Latest).await?;
let version = find_latest_version(reqwest).await?;
write_config(&CliConfig {
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
@ -180,154 +142,132 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
Ok(())
}
#[instrument(skip(reqwest, writer), level = "trace")]
pub async fn download_github_release<W: AsyncWrite + Unpin>(
reqwest: &reqwest::Client,
version: &Version,
mut writer: W,
) -> anyhow::Result<()> {
let (owner, repo) = get_repo();
const ENGINES_DIR: &str = "engines";
let release = reqwest
.get(format!(
"https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}",
))
.send()
.await
.context("failed to send request to GitHub API")?
.error_for_status()
.context("failed to get GitHub API response")?
.json::<Release>()
.await
.context("failed to parse GitHub API response")?;
#[instrument(level = "trace")]
pub async fn get_installed_versions(engine: EngineKind) -> anyhow::Result<BTreeSet<Version>> {
let source = engine.source();
let path = home_dir()?.join(ENGINES_DIR).join(source.directory());
let mut installed_versions = BTreeSet::new();
let asset = release
.assets
.into_iter()
.find(|asset| {
asset.name.ends_with(&format!(
"-{}-{}.tar.gz",
std::env::consts::OS,
std::env::consts::ARCH
))
})
.context("failed to find asset for current platform")?;
let mut read_dir = match fs::read_dir(&path).await {
Ok(read_dir) => read_dir,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(installed_versions),
Err(e) => return Err(e).context("failed to read engines directory"),
};
let bytes = reqwest
.get(asset.url)
.header(ACCEPT, "application/octet-stream")
.send()
.await
.context("failed to send request to download asset")?
.error_for_status()
.context("failed to download asset")?
.bytes()
.await
.context("failed to download asset")?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
let mut archive = tokio_tar::Archive::new(&mut decoder);
let Some(version) = path.file_name().and_then(|s| s.to_str()) else {
continue;
};
let mut entry = archive
.entries()
.context("failed to read archive entries")?
.next()
.await
.context("archive has no entry")?
.context("failed to get first archive entry")?;
if let Ok(version) = Version::parse(version) {
installed_versions.insert(version);
}
}
tokio::io::copy(&mut entry, &mut writer)
.await
.context("failed to write archive entry to file")
.map(|_| ())
}
#[derive(Debug)]
pub enum TagInfo {
Complete(Version),
Incomplete(Version),
Ok(installed_versions)
}
#[instrument(skip(reqwest), level = "trace")]
pub async fn get_or_download_version(
pub async fn get_or_download_engine(
reqwest: &reqwest::Client,
tag: TagInfo,
always_give_path: bool,
) -> anyhow::Result<Option<PathBuf>> {
let path = home_dir()?.join("versions");
engine: EngineKind,
req: VersionReq,
) -> anyhow::Result<PathBuf> {
let source = engine.source();
let path = home_dir()?.join(ENGINES_DIR).join(source.directory());
let installed_versions = get_installed_versions(engine).await?;
let max_matching = installed_versions
.iter()
.filter(|v| version_matches(v, &req))
.next_back();
if let Some(version) = max_matching {
return Ok(path
.join(version.to_string())
.join(source.expected_file_name())
.with_extension(std::env::consts::EXE_EXTENSION));
}
let mut versions = source
.resolve(
&req,
&ResolveOptions {
reqwest: reqwest.clone(),
},
)
.await
.context("failed to resolve versions")?;
let (version, engine_ref) = versions.pop_last().context("no matching versions found")?;
let path = path.join(version.to_string());
fs::create_dir_all(&path)
.await
.context("failed to create versions directory")?;
.context("failed to create engine container folder")?;
let version = match &tag {
TagInfo::Complete(version) => version,
// don't fetch the version since it could be cached
TagInfo::Incomplete(version) => version,
};
let path = path
.join(source.expected_file_name())
.with_extension(std::env::consts::EXE_EXTENSION);
let path = path.join(format!(
"{}{}",
no_build_metadata(version),
std::env::consts::EXE_SUFFIX
));
let mut file = fs::File::create(&path)
.await
.context("failed to create new file")?;
let is_requested_version = !always_give_path && *version == current_version();
run_with_reporter(|_, root_progress, reporter| async {
let root_progress = root_progress;
if path.exists() {
tracing::debug!("version already exists");
root_progress.set_message("download");
return Ok(if is_requested_version {
None
} else {
Some(path)
});
}
let reporter = reporter.report_download(format!("{engine} v{version}"));
if is_requested_version {
tracing::debug!("copying current executable to version directory");
fs::copy(current_exe()?, &path)
let archive = source
.download(
&engine_ref,
&DownloadOptions {
reqwest: reqwest.clone(),
reporter: Arc::new(reporter),
version: version.clone(),
},
)
.await
.context("failed to copy current executable to version directory")?;
} else {
let version = match tag {
TagInfo::Complete(version) => version,
TagInfo::Incomplete(version) => {
get_remote_version(reqwest, VersionType::Specific(version))
.await
.context("failed to get remote version")?
}
};
.context("failed to download engine")?;
tracing::debug!("downloading version");
download_github_release(
reqwest,
&version,
fs::File::create(&path)
tokio::io::copy(
&mut archive
.find_executable(source.expected_file_name())
.await
.context("failed to create version file")?,
.context("failed to find executable")?,
&mut file,
)
.await?;
}
.await
.context("failed to write to file")?;
Ok::<_, anyhow::Error>(())
})
.await?;
make_executable(&path)
.await
.context("failed to make downloaded version executable")?;
Ok(if is_requested_version {
None
} else {
Some(path)
})
if engine != EngineKind::Pesde {
make_linker_if_needed(engine).await?;
}
Ok(path)
}
#[instrument(level = "trace")]
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
let bin_exe_path = bin_dir().await?.join(format!(
"{}{}",
env!("CARGO_BIN_NAME"),
std::env::consts::EXE_SUFFIX
));
let mut downloaded_file = downloaded_file.to_path_buf();
pub async fn replace_pesde_bin_exe(with: &Path) -> anyhow::Result<()> {
let bin_exe_path = bin_dir()
.await?
.join(EngineKind::Pesde.to_string())
.with_extension(std::env::consts::EXE_EXTENSION);
let exists = bin_exe_path.exists();
@ -339,23 +279,42 @@ pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
let tempfile = tempfile::Builder::new()
.make(|_| Ok(()))
.context("failed to create temporary file")?;
let path = tempfile.into_temp_path().to_path_buf();
let temp_path = tempfile.into_temp_path().to_path_buf();
#[cfg(windows)]
let path = path.with_extension("exe");
let temp_path = temp_path.with_extension("exe");
let current_exe = current_exe().context("failed to get current exe path")?;
if current_exe == downloaded_file {
downloaded_file = path.to_path_buf();
match fs::rename(&bin_exe_path, &temp_path).await {
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
Err(e) => return Err(e).context("failed to rename existing executable"),
}
fs::rename(&bin_exe_path, &path)
.await
.context("failed to rename current executable")?;
}
fs::copy(downloaded_file, &bin_exe_path)
fs::copy(with, &bin_exe_path)
.await
.context("failed to copy executable to bin folder")?;
make_executable(&bin_exe_path).await
}
#[instrument(level = "trace")]
pub async fn make_linker_if_needed(engine: EngineKind) -> anyhow::Result<()> {
let bin_dir = bin_dir().await?;
let linker = bin_dir
.join(engine.to_string())
.with_extension(std::env::consts::EXE_EXTENSION);
let exists = linker.exists();
if !exists {
let exe = current_exe().context("failed to get current exe path")?;
#[cfg(windows)]
let result = fs::symlink_file(exe, linker);
#[cfg(not(windows))]
let result = fs::symlink(exe, linker);
result.await.context("failed to create symlink")?;
}
Ok(())
}

View file

@ -29,7 +29,7 @@ pub(crate) struct DownloadGraphOptions<Reporter> {
impl<Reporter> DownloadGraphOptions<Reporter>
where
Reporter: for<'a> DownloadsReporter<'a> + Send + Sync + 'static,
Reporter: DownloadsReporter + Send + Sync + 'static,
{
/// Creates a new download options with the given reqwest client and reporter.
pub(crate) fn new(reqwest: reqwest::Client) -> Self {
@ -85,7 +85,7 @@ impl Project {
errors::DownloadGraphError,
>
where
Reporter: for<'a> DownloadsReporter<'a> + Send + Sync + 'static,
Reporter: DownloadsReporter + Send + Sync + 'static,
{
let DownloadGraphOptions {
reqwest,
@ -111,8 +111,8 @@ impl Project {
async move {
let progress_reporter = reporter
.as_deref()
.map(|reporter| reporter.report_download(&package_id.to_string()));
.clone()
.map(|reporter| reporter.report_download(package_id.to_string()));
let _permit = semaphore.acquire().await;

View file

@ -81,7 +81,7 @@ pub struct DownloadAndLinkOptions<Reporter = (), Hooks = ()> {
impl<Reporter, Hooks> DownloadAndLinkOptions<Reporter, Hooks>
where
Reporter: for<'a> DownloadsReporter<'a> + Send + Sync + 'static,
Reporter: DownloadsReporter + Send + Sync + 'static,
Hooks: DownloadAndLinkHooks + Send + Sync + 'static,
{
/// Creates a new download options with the given reqwest client and reporter.
@ -149,7 +149,7 @@ impl Project {
options: DownloadAndLinkOptions<Reporter, Hooks>,
) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>>
where
Reporter: for<'a> DownloadsReporter<'a> + 'static,
Reporter: DownloadsReporter + 'static,
Hooks: DownloadAndLinkHooks + 'static,
{
let DownloadAndLinkOptions {

63
src/engine/mod.rs Normal file
View file

@ -0,0 +1,63 @@
/// Sources of engines
pub mod source;
use crate::engine::source::EngineSources;
use serde_with::{DeserializeFromStr, SerializeDisplay};
use std::{fmt::Display, str::FromStr};
/// All supported engines
#[derive(
SerializeDisplay, DeserializeFromStr, Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "schema", schemars(rename_all = "snake_case"))]
pub enum EngineKind {
/// The pesde package manager
Pesde,
/// The Lune runtime
Lune,
}
impl Display for EngineKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EngineKind::Pesde => write!(f, "pesde"),
EngineKind::Lune => write!(f, "lune"),
}
}
}
impl FromStr for EngineKind {
type Err = errors::EngineKindFromStrError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"pesde" => Ok(EngineKind::Pesde),
"lune" => Ok(EngineKind::Lune),
_ => Err(errors::EngineKindFromStrError::Unknown(s.to_string())),
}
}
}
impl EngineKind {
/// Returns the source to get this engine from
pub fn source(&self) -> EngineSources {
match self {
EngineKind::Pesde => EngineSources::pesde(),
EngineKind::Lune => EngineSources::lune(),
}
}
}
/// Errors related to engine kinds
pub mod errors {
use thiserror::Error;
/// Errors which can occur while using the FromStr implementation of EngineKind
#[derive(Debug, Error)]
pub enum EngineKindFromStrError {
/// The string isn't a recognized EngineKind
#[error("unknown engine kind {0}")]
Unknown(String),
}
}

View file

@ -0,0 +1,320 @@
use futures::StreamExt;
use std::{
collections::BTreeSet,
mem::ManuallyDrop,
path::{Path, PathBuf},
pin::Pin,
str::FromStr,
task::{Context, Poll},
};
use tokio::{
io::{AsyncBufRead, AsyncRead, AsyncReadExt, ReadBuf},
pin,
};
use tokio_util::compat::{Compat, FuturesAsyncReadCompatExt};
/// The kind of encoding used for the archive
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum EncodingKind {
/// Gzip
Gzip,
}
/// The kind of archive
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ArchiveKind {
/// Tar
Tar,
/// Zip
Zip,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct ArchiveInfo(ArchiveKind, Option<EncodingKind>);
impl FromStr for ArchiveInfo {
type Err = errors::ArchiveInfoFromStrError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parts = s.split('.').collect::<Vec<_>>();
Ok(match &*parts {
[.., "tar", "gz"] => ArchiveInfo(ArchiveKind::Tar, Some(EncodingKind::Gzip)),
[.., "tar"] => ArchiveInfo(ArchiveKind::Tar, None),
[.., "zip", "gz"] => {
return Err(errors::ArchiveInfoFromStrError::Unsupported(
ArchiveKind::Zip,
Some(EncodingKind::Gzip),
))
}
[.., "zip"] => ArchiveInfo(ArchiveKind::Zip, None),
_ => return Err(errors::ArchiveInfoFromStrError::Invalid(s.to_string())),
})
}
}
pub(crate) type ArchiveReader = Pin<Box<dyn AsyncBufRead>>;
/// An archive
pub struct Archive {
pub(crate) info: ArchiveInfo,
pub(crate) reader: ArchiveReader,
}
enum TarReader {
Gzip(async_compression::tokio::bufread::GzipDecoder<ArchiveReader>),
Plain(ArchiveReader),
}
// TODO: try to see if we can avoid the unsafe blocks
impl AsyncRead for TarReader {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
unsafe {
match self.get_unchecked_mut() {
Self::Gzip(r) => Pin::new_unchecked(r).poll_read(cx, buf),
Self::Plain(r) => Pin::new_unchecked(r).poll_read(cx, buf),
}
}
}
}
enum ArchiveEntryInner {
Tar(tokio_tar::Entry<tokio_tar::Archive<TarReader>>),
Zip {
archive: *mut async_zip::tokio::read::seek::ZipFileReader<std::io::Cursor<Vec<u8>>>,
reader: ManuallyDrop<
Compat<
async_zip::tokio::read::ZipEntryReader<
'static,
std::io::Cursor<Vec<u8>>,
async_zip::base::read::WithoutEntry,
>,
>,
>,
},
}
impl Drop for ArchiveEntryInner {
fn drop(&mut self) {
match self {
Self::Tar(_) => {}
Self::Zip { archive, reader } => unsafe {
ManuallyDrop::drop(reader);
drop(Box::from_raw(*archive));
},
}
}
}
/// An entry in an archive. Usually the executable
pub struct ArchiveEntry(ArchiveEntryInner);
impl AsyncRead for ArchiveEntry {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
unsafe {
match &mut self.get_unchecked_mut().0 {
ArchiveEntryInner::Tar(r) => Pin::new_unchecked(r).poll_read(cx, buf),
ArchiveEntryInner::Zip { reader, .. } => {
Pin::new_unchecked(&mut **reader).poll_read(cx, buf)
}
}
}
}
}
impl Archive {
/// Finds the executable in the archive and returns it as an [`ArchiveEntry`]
pub async fn find_executable(
self,
expected_file_name: &str,
) -> Result<ArchiveEntry, errors::FindExecutableError> {
#[derive(Debug, PartialEq, Eq)]
struct Candidate {
path: PathBuf,
file_name_matches: bool,
extension_matches: bool,
has_permissions: bool,
}
impl Candidate {
fn new(path: PathBuf, perms: u32, expected_file_name: &str) -> Self {
Self {
file_name_matches: path
.file_name()
.is_some_and(|name| name == expected_file_name),
extension_matches: match path.extension() {
Some(ext) if ext == std::env::consts::EXE_EXTENSION => true,
None if std::env::consts::EXE_EXTENSION.is_empty() => true,
_ => false,
},
path,
has_permissions: perms & 0o111 != 0,
}
}
fn should_be_considered(&self) -> bool {
// if nothing matches, we should not consider this candidate as it is most likely not
self.file_name_matches || self.extension_matches || self.has_permissions
}
}
impl Ord for Candidate {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.file_name_matches
.cmp(&other.file_name_matches)
.then(self.extension_matches.cmp(&other.extension_matches))
.then(self.has_permissions.cmp(&other.has_permissions))
}
}
impl PartialOrd for Candidate {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
let mut candidates = BTreeSet::new();
match self.info {
ArchiveInfo(ArchiveKind::Tar, encoding) => {
use async_compression::tokio::bufread as decoders;
let reader = match encoding {
Some(EncodingKind::Gzip) => {
TarReader::Gzip(decoders::GzipDecoder::new(self.reader))
}
None => TarReader::Plain(self.reader),
};
let mut archive = tokio_tar::Archive::new(reader);
let mut entries = archive.entries()?;
while let Some(entry) = entries.next().await.transpose()? {
if entry.header().entry_type().is_dir() {
continue;
}
let candidate = Candidate::new(
entry.path()?.to_path_buf(),
entry.header().mode()?,
expected_file_name,
);
if candidate.should_be_considered() {
candidates.insert(candidate);
}
}
let Some(candidate) = candidates.pop_last() else {
return Err(errors::FindExecutableError::ExecutableNotFound);
};
let mut entries = archive.entries()?;
while let Some(entry) = entries.next().await.transpose()? {
if entry.header().entry_type().is_dir() {
continue;
}
let path = entry.path()?;
if path == candidate.path {
return Ok(ArchiveEntry(ArchiveEntryInner::Tar(entry)));
}
}
}
ArchiveInfo(ArchiveKind::Zip, _) => {
let reader = self.reader;
pin!(reader);
// TODO: would be lovely to not have to read the whole archive into memory
let mut buf = vec![];
reader.read_to_end(&mut buf).await?;
let archive = async_zip::base::read::seek::ZipFileReader::with_tokio(
std::io::Cursor::new(buf),
)
.await?;
for entry in archive.file().entries() {
if entry.dir()? {
continue;
}
let path: &Path = entry.filename().as_str()?.as_ref();
let candidate = Candidate::new(
path.to_path_buf(),
entry.unix_permissions().unwrap_or(0) as u32,
expected_file_name,
);
if candidate.should_be_considered() {
candidates.insert(candidate);
}
}
let Some(candidate) = candidates.pop_last() else {
return Err(errors::FindExecutableError::ExecutableNotFound);
};
for (i, entry) in archive.file().entries().iter().enumerate() {
if entry.dir()? {
continue;
}
let path: &Path = entry.filename().as_str()?.as_ref();
if candidate.path == path {
let ptr = Box::into_raw(Box::new(archive));
let reader = (unsafe { &mut *ptr }).reader_without_entry(i).await?;
return Ok(ArchiveEntry(ArchiveEntryInner::Zip {
archive: ptr,
reader: ManuallyDrop::new(reader.compat()),
}));
}
}
}
}
Err(errors::FindExecutableError::ExecutableNotFound)
}
}
/// Errors that can occur when working with archives
pub mod errors {
use thiserror::Error;
/// Errors that can occur when parsing archive info
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ArchiveInfoFromStrError {
/// The string is not a valid archive descriptor. E.g. `{name}.tar.gz`
#[error("string `{0}` is not a valid archive descriptor")]
Invalid(String),
/// The archive type is not supported. E.g. `{name}.zip.gz`
#[error("archive type {0:?} with encoding {1:?} is not supported")]
Unsupported(super::ArchiveKind, Option<super::EncodingKind>),
}
/// Errors that can occur when finding an executable in an archive
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum FindExecutableError {
/// The executable was not found in the archive
#[error("failed to find executable in archive")]
ExecutableNotFound,
/// An IO error occurred
#[error("IO error")]
Io(#[from] std::io::Error),
/// An error occurred reading the zip archive
#[error("failed to read zip archive")]
Zip(#[from] async_zip::error::ZipError),
}
}

View file

@ -0,0 +1,19 @@
use serde::Deserialize;
/// A GitHub release
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
pub struct Release {
/// The tag name of the release
pub tag_name: String,
/// The assets of the release
pub assets: Vec<Asset>,
}
/// An asset of a GitHub release
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
pub struct Asset {
/// The name of the asset
pub name: String,
/// The download URL of the asset
pub url: url::Url,
}

View file

@ -0,0 +1,146 @@
/// The GitHub engine reference
pub mod engine_ref;
use crate::{
engine::source::{
archive::Archive,
github::engine_ref::Release,
traits::{DownloadOptions, EngineSource, ResolveOptions},
},
reporters::{response_to_async_read, DownloadProgressReporter},
util::no_build_metadata,
version_matches,
};
use reqwest::header::ACCEPT;
use semver::{Version, VersionReq};
use std::{collections::BTreeMap, path::PathBuf};
/// The GitHub engine source
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub struct GitHubEngineSource {
/// The owner of the repository to download from
pub owner: String,
/// The repository of which to download releases from
pub repo: String,
/// The template for the asset name. `{VERSION}` will be replaced with the version
pub asset_template: String,
}
impl EngineSource for GitHubEngineSource {
type Ref = Release;
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn directory(&self) -> PathBuf {
PathBuf::from("github").join(&self.owner).join(&self.repo)
}
fn expected_file_name(&self) -> &str {
&self.repo
}
async fn resolve(
&self,
requirement: &VersionReq,
options: &ResolveOptions,
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
let ResolveOptions { reqwest, .. } = options;
Ok(reqwest
.get(format!(
"https://api.github.com/repos/{}/{}/releases",
urlencoding::encode(&self.owner),
urlencoding::encode(&self.repo),
))
.send()
.await?
.error_for_status()?
.json::<Vec<Release>>()
.await?
.into_iter()
.filter_map(
|release| match release.tag_name.trim_start_matches('v').parse() {
Ok(version) if version_matches(&version, requirement) => {
Some((version, release))
}
_ => None,
},
)
.collect())
}
async fn download<R: DownloadProgressReporter + 'static>(
&self,
engine_ref: &Self::Ref,
options: &DownloadOptions<R>,
) -> Result<Archive, Self::DownloadError> {
let DownloadOptions {
reqwest,
reporter,
version,
..
} = options;
let desired_asset_names = [
self.asset_template
.replace("{VERSION}", &version.to_string()),
self.asset_template
.replace("{VERSION}", &no_build_metadata(version).to_string()),
];
let asset = engine_ref
.assets
.iter()
.find(|asset| {
desired_asset_names
.iter()
.any(|name| asset.name.eq_ignore_ascii_case(name))
})
.ok_or(errors::DownloadError::AssetNotFound)?;
reporter.report_start();
let response = reqwest
.get(asset.url.clone())
.header(ACCEPT, "application/octet-stream")
.send()
.await?
.error_for_status()?;
Ok(Archive {
info: asset.name.parse()?,
reader: Box::pin(response_to_async_read(response, reporter.clone())),
})
}
}
/// Errors that can occur when working with the GitHub engine source
pub mod errors {
use thiserror::Error;
/// Errors that can occur when resolving a GitHub engine
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ResolveError {
/// Handling the request failed
#[error("failed to handle GitHub API request")]
Request(#[from] reqwest::Error),
}
/// Errors that can occur when downloading a GitHub engine
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadError {
/// An asset for the current platform could not be found
#[error("failed to find asset for current platform")]
AssetNotFound,
/// Handling the request failed
#[error("failed to handle GitHub API request")]
Request(#[from] reqwest::Error),
/// The asset's name could not be parsed
#[error("failed to parse asset name")]
ParseAssetName(#[from] crate::engine::source::archive::errors::ArchiveInfoFromStrError),
}
}

143
src/engine/source/mod.rs Normal file
View file

@ -0,0 +1,143 @@
use crate::{
engine::source::{
archive::Archive,
traits::{DownloadOptions, EngineSource, ResolveOptions},
},
reporters::DownloadProgressReporter,
};
use semver::{Version, VersionReq};
use std::{collections::BTreeMap, path::PathBuf};
/// Archives
pub mod archive;
/// The GitHub engine source
pub mod github;
/// Traits for engine sources
pub mod traits;
/// Engine references
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub enum EngineRefs {
/// A GitHub engine reference
GitHub(github::engine_ref::Release),
}
/// Engine sources
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub enum EngineSources {
/// A GitHub engine source
GitHub(github::GitHubEngineSource),
}
impl EngineSource for EngineSources {
type Ref = EngineRefs;
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn directory(&self) -> PathBuf {
match self {
EngineSources::GitHub(source) => source.directory(),
}
}
fn expected_file_name(&self) -> &str {
match self {
EngineSources::GitHub(source) => source.expected_file_name(),
}
}
async fn resolve(
&self,
requirement: &VersionReq,
options: &ResolveOptions,
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
match self {
EngineSources::GitHub(source) => source
.resolve(requirement, options)
.await
.map(|map| {
map.into_iter()
.map(|(version, release)| (version, EngineRefs::GitHub(release)))
.collect()
})
.map_err(Into::into),
}
}
async fn download<R: DownloadProgressReporter + 'static>(
&self,
engine_ref: &Self::Ref,
options: &DownloadOptions<R>,
) -> Result<Archive, Self::DownloadError> {
match (self, engine_ref) {
(EngineSources::GitHub(source), EngineRefs::GitHub(release)) => {
source.download(release, options).await.map_err(Into::into)
}
// for the future
#[allow(unreachable_patterns)]
_ => Err(errors::DownloadError::Mismatch),
}
}
}
impl EngineSources {
/// Returns the source for the pesde engine
pub fn pesde() -> Self {
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
let (owner, repo) = (
parts.next().unwrap().to_string(),
parts.next().unwrap().to_string(),
);
EngineSources::GitHub(github::GitHubEngineSource {
owner,
repo,
asset_template: format!(
"pesde-{{VERSION}}-{}-{}.zip",
std::env::consts::OS,
std::env::consts::ARCH
),
})
}
/// Returns the source for the lune engine
pub fn lune() -> Self {
EngineSources::GitHub(github::GitHubEngineSource {
owner: "lune-org".into(),
repo: "lune".into(),
asset_template: format!(
"lune-{{VERSION}}-{}-{}.zip",
std::env::consts::OS,
std::env::consts::ARCH
),
})
}
}
/// Errors that can occur when working with engine sources
pub mod errors {
use thiserror::Error;
/// Errors that can occur when resolving an engine
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ResolveError {
/// Failed to resolve the GitHub engine
#[error("failed to resolve github engine")]
GitHub(#[from] super::github::errors::ResolveError),
}
/// Errors that can occur when downloading an engine
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadError {
/// Failed to download the GitHub engine
#[error("failed to download github engine")]
GitHub(#[from] super::github::errors::DownloadError),
/// Mismatched engine reference
#[error("mismatched engine reference")]
Mismatch,
}
}

View file

@ -0,0 +1,51 @@
use crate::{engine::source::archive::Archive, reporters::DownloadProgressReporter};
use semver::{Version, VersionReq};
use std::{collections::BTreeMap, fmt::Debug, future::Future, path::PathBuf, sync::Arc};
/// Options for resolving an engine
#[derive(Debug, Clone)]
pub struct ResolveOptions {
/// The reqwest client to use
pub reqwest: reqwest::Client,
}
/// Options for downloading an engine
#[derive(Debug, Clone)]
pub struct DownloadOptions<R: DownloadProgressReporter> {
/// The reqwest client to use
pub reqwest: reqwest::Client,
/// The reporter to use
pub reporter: Arc<R>,
/// The version of the engine to be downloaded
pub version: Version,
}
/// A source of engines
pub trait EngineSource: Debug {
/// The reference type for this source
type Ref;
/// The error type for resolving an engine from this source
type ResolveError: std::error::Error + Send + Sync + 'static;
/// The error type for downloading an engine from this source
type DownloadError: std::error::Error + Send + Sync + 'static;
/// Returns the folder to store the engine's versions in
fn directory(&self) -> PathBuf;
/// Returns the expected file name of the engine in the archive
fn expected_file_name(&self) -> &str;
/// Resolves a requirement to a reference
fn resolve(
&self,
requirement: &VersionReq,
options: &ResolveOptions,
) -> impl Future<Output = Result<BTreeMap<Version, Self::Ref>, Self::ResolveError>> + Send + Sync;
/// Downloads an engine
fn download<R: DownloadProgressReporter + 'static>(
&self,
engine_ref: &Self::Ref,
options: &DownloadOptions<R>,
) -> impl Future<Output = Result<Archive, Self::DownloadError>> + Send + Sync;
}

View file

@ -1,7 +1,7 @@
use crate::{
manifest::{
target::{Target, TargetKind},
DependencyType,
Alias, DependencyType,
},
source::{
ids::{PackageId, VersionId},
@ -22,10 +22,10 @@ pub type Graph<Node> = BTreeMap<PackageId, Node>;
pub struct DependencyGraphNode {
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
pub direct: Option<(Alias, DependencySpecifiers, DependencyType)>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<PackageId, String>,
pub dependencies: BTreeMap<PackageId, Alias>,
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
pub resolved_ty: DependencyType,
/// Whether the resolved type should be Peer if this isn't depended on

View file

@ -15,6 +15,7 @@ use async_stream::try_stream;
use fs_err::tokio as fs;
use futures::Stream;
use gix::sec::identity::Account;
use semver::{Version, VersionReq};
use std::{
collections::{HashMap, HashSet},
fmt::Debug,
@ -29,6 +30,8 @@ use wax::Pattern;
pub mod download;
/// Utility for downloading and linking in the correct order
pub mod download_and_link;
/// Handling of engines
pub mod engine;
/// Graphs
pub mod graph;
/// Linking packages
@ -117,8 +120,8 @@ struct ProjectShared {
package_dir: PathBuf,
workspace_dir: Option<PathBuf>,
data_dir: PathBuf,
auth_config: AuthConfig,
cas_dir: PathBuf,
auth_config: AuthConfig,
}
/// The main struct of the pesde library, representing a project
@ -130,11 +133,11 @@ pub struct Project {
impl Project {
/// Create a new `Project`
pub fn new<P: AsRef<Path>, Q: AsRef<Path>, R: AsRef<Path>, S: AsRef<Path>>(
package_dir: P,
workspace_dir: Option<Q>,
data_dir: R,
cas_dir: S,
pub fn new(
package_dir: impl AsRef<Path>,
workspace_dir: Option<impl AsRef<Path>>,
data_dir: impl AsRef<Path>,
cas_dir: impl AsRef<Path>,
auth_config: AuthConfig,
) -> Self {
Project {
@ -142,8 +145,8 @@ impl Project {
package_dir: package_dir.as_ref().to_path_buf(),
workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()),
data_dir: data_dir.as_ref().to_path_buf(),
auth_config,
cas_dir: cas_dir.as_ref().to_path_buf(),
auth_config,
}),
}
}
@ -163,16 +166,16 @@ impl Project {
&self.shared.data_dir
}
/// The authentication configuration
pub fn auth_config(&self) -> &AuthConfig {
&self.shared.auth_config
}
/// The CAS (content-addressable storage) directory
pub fn cas_dir(&self) -> &Path {
&self.shared.cas_dir
}
/// The authentication configuration
pub fn auth_config(&self) -> &AuthConfig {
&self.shared.auth_config
}
/// Read the manifest file
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
@ -425,6 +428,12 @@ pub async fn find_roots(
Ok((project_root.unwrap_or(cwd), workspace_dir))
}
/// Returns whether a version matches a version requirement
/// Differs from `VersionReq::matches` in that EVERY version matches `*`
pub fn version_matches(version: &Version, req: &VersionReq) -> bool {
*req == VersionReq::STAR || req.matches(version)
}
/// Errors that can occur when using the pesde library
pub mod errors {
use std::path::PathBuf;

View file

@ -1,7 +1,7 @@
use crate::{
graph::{DependencyGraphNodeWithTarget, DependencyGraphWithTarget},
linking::generator::get_file_types,
manifest::Manifest,
manifest::{Alias, Manifest},
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
source::{
fs::{cas_path, store_in_cas},
@ -169,7 +169,7 @@ impl Project {
relative_container_folder: &Path,
node: &DependencyGraphNodeWithTarget,
package_id: &PackageId,
alias: &str,
alias: &Alias,
package_types: &Arc<PackageTypes>,
manifest: &Arc<Manifest>,
remove: bool,
@ -243,7 +243,8 @@ impl Project {
.filter(|s| !s.is_empty() && node.node.direct.is_some() && is_root)
{
let scripts_container = self.package_dir().join(SCRIPTS_LINK_FOLDER);
let scripts_base = create_and_canonicalize(scripts_container.join(alias)).await?;
let scripts_base =
create_and_canonicalize(scripts_container.join(alias.as_str())).await?;
if remove {
tasks.spawn(async move {

View file

@ -41,7 +41,7 @@ pub mod old {
manifest::{
overrides::OverrideKey,
target::{Target, TargetKind},
DependencyType,
Alias, DependencyType,
},
names::{PackageName, PackageNames},
source::{
@ -60,10 +60,10 @@ pub mod old {
pub struct DependencyGraphNodeOld {
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
pub direct: Option<(Alias, DependencySpecifiers, DependencyType)>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
pub dependencies: BTreeMap<PackageNames, (VersionId, Alias)>,
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
pub resolved_ty: DependencyType,
/// Whether the resolved type should be Peer if this isn't depended on

View file

@ -1,14 +1,16 @@
#[cfg(feature = "version-management")]
use crate::cli::version::{check_for_updates, get_or_download_version, TagInfo};
use crate::cli::version::{check_for_updates, current_version, get_or_download_engine};
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
use anyhow::Context;
use clap::{builder::styling::AnsiColor, Parser};
use fs_err::tokio as fs;
use indicatif::MultiProgress;
use pesde::{find_roots, AuthConfig, Project};
use pesde::{engine::EngineKind, find_roots, AuthConfig, Project};
use semver::VersionReq;
use std::{
io,
path::{Path, PathBuf},
str::FromStr,
sync::Mutex,
};
use tempfile::NamedTempFile;
@ -135,27 +137,39 @@ impl<'a> MakeWriter<'a> for IndicatifWriter {
async fn run() -> anyhow::Result<()> {
let cwd = std::env::current_dir().expect("failed to get current working directory");
// Unix doesn't return the symlinked path, so we need to get it from the 0 argument
#[cfg(unix)]
let current_exe = PathBuf::from(std::env::args_os().next().expect("argument 0 not set"));
#[cfg(not(unix))]
let current_exe = std::env::current_exe().expect("failed to get current executable path");
let exe_name = current_exe
.file_stem()
.unwrap()
.to_str()
.expect("exe name is not valid utf-8");
let exe_name_engine = EngineKind::from_str(exe_name);
#[cfg(windows)]
'scripts: {
let exe = std::env::current_exe().expect("failed to get current executable path");
if exe.parent().is_some_and(|parent| {
parent.file_name().is_some_and(|parent| parent != "bin")
|| parent
.parent()
.and_then(|parent| parent.file_name())
.is_some_and(|parent| parent != HOME_DIR)
}) {
// if we're an engine, we don't want to run any scripts
if exe_name_engine.is_ok() {
break 'scripts;
}
let exe_name = exe.file_name().unwrap().to_string_lossy();
let exe_name = exe_name
.strip_suffix(std::env::consts::EXE_SUFFIX)
.unwrap_or(&exe_name);
if let Some(bin_folder) = current_exe.parent() {
// we're not in {path}/bin/{exe}
if bin_folder.file_name().is_some_and(|parent| parent != "bin") {
break 'scripts;
}
if exe_name == env!("CARGO_BIN_NAME") {
break 'scripts;
// we're not in {path}/.pesde/bin/{exe}
if bin_folder
.parent()
.and_then(|home_folder| home_folder.file_name())
.is_some_and(|home_folder| home_folder != HOME_DIR)
{
break 'scripts;
}
}
// the bin script will search for the project root itself, so we do that to ensure
@ -164,9 +178,11 @@ async fn run() -> anyhow::Result<()> {
let status = std::process::Command::new("lune")
.arg("run")
.arg(
exe.parent()
.map(|p| p.join(".impl").join(exe.file_name().unwrap()))
.unwrap_or(exe)
current_exe
.parent()
.unwrap_or(&current_exe)
.join(".impl")
.join(current_exe.file_name().unwrap())
.with_extension("luau"),
)
.arg("--")
@ -265,34 +281,47 @@ async fn run() -> anyhow::Result<()> {
};
#[cfg(feature = "version-management")]
{
let target_version = project
'engines: {
let Ok(engine) = exe_name_engine else {
break 'engines;
};
let req = project
.deser_manifest()
.await
.ok()
.and_then(|manifest| manifest.pesde_version);
.and_then(|mut manifest| manifest.engines.remove(&engine));
let exe_path = if let Some(version) = target_version {
get_or_download_version(&reqwest, TagInfo::Incomplete(version), false).await?
} else {
None
};
if let Some(exe_path) = exe_path {
let status = std::process::Command::new(exe_path)
.args(std::env::args_os().skip(1))
.status()
.expect("failed to run new version");
std::process::exit(status.code().unwrap());
if engine == EngineKind::Pesde {
match &req {
// we're already running a compatible version
Some(req) if req.matches(&current_version()) => break 'engines,
// the user has not requested a specific version, so we'll just use the current one
None => break 'engines,
_ => (),
}
}
display_err(
check_for_updates(&reqwest).await,
" while checking for updates",
);
let exe_path =
get_or_download_engine(&reqwest, engine, req.unwrap_or(VersionReq::STAR)).await?;
if exe_path == current_exe {
anyhow::bail!("engine linker executed by itself")
}
let status = std::process::Command::new(exe_path)
.args(std::env::args_os().skip(1))
.status()
.expect("failed to run new version");
std::process::exit(status.code().unwrap());
}
#[cfg(feature = "version-management")]
display_err(
check_for_updates(&reqwest).await,
" while checking for updates",
);
let cli = Cli::parse();
cli.subcommand.run(project, reqwest).await

View file

@ -1,4 +1,5 @@
use crate::{
engine::EngineKind,
manifest::{
overrides::{OverrideKey, OverrideSpecifier},
target::Target,
@ -7,9 +8,14 @@ use crate::{
source::specifiers::DependencySpecifiers,
};
use relative_path::RelativePathBuf;
use semver::Version;
use semver::{Version, VersionReq};
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use std::{
collections::{BTreeMap, HashMap},
fmt::Display,
str::FromStr,
};
use tracing::instrument;
/// Overrides
@ -85,31 +91,88 @@ pub struct Manifest {
crate::names::PackageNames,
BTreeMap<crate::source::ids::VersionId, RelativePathBuf>,
>,
#[serde(default, skip_serializing)]
/// Which version of the pesde CLI this package uses
pub pesde_version: Option<Version>,
/// A list of globs pointing to workspace members' directories
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub workspace_members: Vec<String>,
/// The Roblox place of this project
#[serde(default, skip_serializing)]
pub place: BTreeMap<target::RobloxPlaceKind, String>,
/// The engines this package supports
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
#[cfg_attr(feature = "schema", schemars(with = "BTreeMap<EngineKind, String>"))]
pub engines: BTreeMap<EngineKind, VersionReq>,
/// The standard dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, DependencySpecifiers>,
pub dependencies: BTreeMap<Alias, DependencySpecifiers>,
/// The peer dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub peer_dependencies: BTreeMap<String, DependencySpecifiers>,
pub peer_dependencies: BTreeMap<Alias, DependencySpecifiers>,
/// The dev dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dev_dependencies: BTreeMap<String, DependencySpecifiers>,
pub dev_dependencies: BTreeMap<Alias, DependencySpecifiers>,
/// The user-defined fields of the package
#[cfg_attr(feature = "schema", schemars(skip))]
#[serde(flatten)]
pub user_defined_fields: HashMap<String, toml::Value>,
}
/// An alias of a dependency
#[derive(
SerializeDisplay, DeserializeFromStr, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
pub struct Alias(String);
impl Display for Alias {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.pad(&self.0)
}
}
impl FromStr for Alias {
type Err = errors::AliasFromStr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.is_empty() {
return Err(errors::AliasFromStr::Empty);
}
if !s
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_')
{
return Err(errors::AliasFromStr::InvalidCharacters(s.to_string()));
}
if EngineKind::from_str(s).is_ok() {
return Err(errors::AliasFromStr::EngineName(s.to_string()));
}
Ok(Self(s.to_string()))
}
}
#[cfg(feature = "schema")]
impl schemars::JsonSchema for Alias {
fn schema_name() -> std::borrow::Cow<'static, str> {
"Alias".into()
}
fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
schemars::json_schema!({
"type": "string",
"pattern": r#"^[a-zA-Z0-9_-]+$"#,
})
}
}
impl Alias {
/// Get the alias as a string
pub fn as_str(&self) -> &str {
&self.0
}
}
/// A dependency type
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[serde(rename_all = "snake_case")]
@ -127,10 +190,8 @@ impl Manifest {
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub fn all_dependencies(
&self,
) -> Result<
BTreeMap<String, (DependencySpecifiers, DependencyType)>,
errors::AllDependenciesError,
> {
) -> Result<BTreeMap<Alias, (DependencySpecifiers, DependencyType)>, errors::AllDependenciesError>
{
let mut all_deps = BTreeMap::new();
for (deps, ty) in [
@ -151,14 +212,32 @@ impl Manifest {
/// Errors that can occur when interacting with manifests
pub mod errors {
use crate::manifest::Alias;
use thiserror::Error;
/// Errors that can occur when parsing an alias from a string
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum AliasFromStr {
/// The alias is empty
#[error("the alias is empty")]
Empty,
/// The alias contains characters outside a-z, A-Z, 0-9, -, and _
#[error("alias `{0}` contains characters outside a-z, A-Z, 0-9, -, and _")]
InvalidCharacters(String),
/// The alias is an engine name
#[error("alias `{0}` is an engine name")]
EngineName(String),
}
/// Errors that can occur when trying to get all dependencies from a manifest
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum AllDependenciesError {
/// Another specifier is already using the alias
#[error("another specifier is already using the alias {0}")]
AliasConflict(String),
AliasConflict(Alias),
}
}

View file

@ -1,4 +1,4 @@
use crate::source::specifiers::DependencySpecifiers;
use crate::{manifest::Alias, source::specifiers::DependencySpecifiers};
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use std::{
@ -10,7 +10,7 @@ use std::{
#[derive(
Debug, DeserializeFromStr, SerializeDisplay, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
pub struct OverrideKey(pub Vec<Vec<String>>);
pub struct OverrideKey(pub Vec<Vec<Alias>>);
impl FromStr for OverrideKey {
type Err = errors::OverrideKeyFromStr;
@ -18,8 +18,13 @@ impl FromStr for OverrideKey {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let overrides = s
.split(',')
.map(|overrides| overrides.split('>').map(ToString::to_string).collect())
.collect::<Vec<Vec<String>>>();
.map(|overrides| {
overrides
.split('>')
.map(Alias::from_str)
.collect::<Result<_, _>>()
})
.collect::<Result<Vec<Vec<Alias>>, _>>()?;
if overrides.is_empty() {
return Err(errors::OverrideKeyFromStr::Empty);
@ -38,7 +43,7 @@ impl schemars::JsonSchema for OverrideKey {
fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
schemars::json_schema!({
"type": "string",
"pattern": r#"^([a-zA-Z]+(>[a-zA-Z]+)+)(,([a-zA-Z]+(>[a-zA-Z]+)+))*$"#,
"pattern": r#"^(?:[a-zA-Z0-9_-]+>[a-zA-Z0-9_-]+(?:>[a-zA-Z0-9_-]+)*)(?:,(?:[a-zA-Z0-9_-]+>[a-zA-Z0-9_-]+(?:>[a-zA-Z0-9_-]+)*))*$"#,
})
}
}
@ -53,7 +58,7 @@ impl Display for OverrideKey {
.map(|overrides| {
overrides
.iter()
.map(String::as_str)
.map(Alias::as_str)
.collect::<Vec<_>>()
.join(">")
})
@ -71,7 +76,7 @@ pub enum OverrideSpecifier {
/// A specifier for a dependency
Specifier(DependencySpecifiers),
/// An alias for a dependency the current project depends on
Alias(String),
Alias(Alias),
}
/// Errors that can occur when interacting with override keys
@ -85,5 +90,9 @@ pub mod errors {
/// The override key is empty
#[error("empty override key")]
Empty,
/// An alias in the override key is invalid
#[error("invalid alias in override key")]
InvalidAlias(#[from] crate::manifest::errors::AliasFromStr),
}
}

View file

@ -84,7 +84,7 @@ impl Project {
reporter: Arc<Reporter>,
) -> Result<(), errors::ApplyPatchesError>
where
Reporter: for<'a> PatchesReporter<'a> + Send + Sync + 'static,
Reporter: PatchesReporter + Send + Sync + 'static,
{
let manifest = self.deser_manifest().await?;
@ -112,7 +112,7 @@ impl Project {
async move {
tracing::debug!("applying patch");
let progress_reporter = reporter.report_patch(&package_id.to_string());
let progress_reporter = reporter.report_patch(package_id.to_string());
let patch = fs::read(&patch_path)
.await

View file

@ -9,18 +9,23 @@
#![allow(unused_variables)]
use async_stream::stream;
use futures::StreamExt;
use std::sync::Arc;
use tokio::io::AsyncBufRead;
/// Reports downloads.
pub trait DownloadsReporter<'a>: Send + Sync {
pub trait DownloadsReporter: Send + Sync {
/// The [`DownloadProgressReporter`] type associated with this reporter.
type DownloadProgressReporter: DownloadProgressReporter + 'a;
type DownloadProgressReporter: DownloadProgressReporter + 'static;
/// Starts a new download.
fn report_download<'b>(&'a self, name: &'b str) -> Self::DownloadProgressReporter;
fn report_download(self: Arc<Self>, name: String) -> Self::DownloadProgressReporter;
}
impl DownloadsReporter<'_> for () {
impl DownloadsReporter for () {
type DownloadProgressReporter = ();
fn report_download(&self, name: &str) -> Self::DownloadProgressReporter {}
fn report_download(self: Arc<Self>, name: String) -> Self::DownloadProgressReporter {}
}
/// Reports the progress of a single download.
@ -41,17 +46,17 @@ pub trait DownloadProgressReporter: Send + Sync {
impl DownloadProgressReporter for () {}
/// Reports the progress of applying patches.
pub trait PatchesReporter<'a>: Send + Sync {
pub trait PatchesReporter: Send + Sync {
/// The [`PatchProgressReporter`] type associated with this reporter.
type PatchProgressReporter: PatchProgressReporter + 'a;
type PatchProgressReporter: PatchProgressReporter + 'static;
/// Starts a new patch.
fn report_patch<'b>(&'a self, name: &'b str) -> Self::PatchProgressReporter;
fn report_patch(self: Arc<Self>, name: String) -> Self::PatchProgressReporter;
}
impl PatchesReporter<'_> for () {
impl PatchesReporter for () {
type PatchProgressReporter = ();
fn report_patch(&self, name: &str) -> Self::PatchProgressReporter {}
fn report_patch(self: Arc<Self>, name: String) -> Self::PatchProgressReporter {}
}
/// Reports the progress of a single patch.
@ -61,3 +66,32 @@ pub trait PatchProgressReporter: Send + Sync {
}
impl PatchProgressReporter for () {}
pub(crate) fn response_to_async_read<R: DownloadProgressReporter>(
response: reqwest::Response,
reporter: Arc<R>,
) -> impl AsyncBufRead {
let total_len = response.content_length().unwrap_or(0);
reporter.report_progress(total_len, 0);
let mut bytes_downloaded = 0;
let mut stream = response.bytes_stream();
let bytes = stream!({
while let Some(chunk) = stream.next().await {
let chunk = match chunk {
Ok(chunk) => chunk,
Err(err) => {
yield Err(std::io::Error::new(std::io::ErrorKind::Other, err));
continue;
}
};
bytes_downloaded += chunk.len() as u64;
reporter.report_progress(total_len, bytes_downloaded);
yield Ok(chunk);
}
reporter.report_done();
});
tokio_util::io::StreamReader::new(bytes)
}

View file

@ -1,6 +1,6 @@
use crate::{
graph::{DependencyGraph, DependencyGraphNode},
manifest::{overrides::OverrideSpecifier, DependencyType},
manifest::{overrides::OverrideSpecifier, Alias, DependencyType},
source::{
ids::PackageId,
pesde::PesdePackageSource,
@ -92,12 +92,12 @@ impl Project {
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty)) else {
tracing::debug!(
"dependency {package_id} (old alias {old_alias}) from old dependency graph is no longer in the manifest",
);
"dependency {package_id} (old alias {old_alias}) from old dependency graph is no longer in the manifest",
);
continue;
};
let span = tracing::info_span!("resolve from old graph", alias);
let span = tracing::info_span!("resolve from old graph", alias = alias.as_str());
let _guard = span.enter();
tracing::debug!("resolved {package_id} from old dependency graph");
@ -121,6 +121,7 @@ impl Project {
let inner_span =
tracing::info_span!("resolve dependency", path = path.join(">"));
let _inner_guard = inner_span.enter();
if let Some(dep_node) = previous_graph.get(dep_id) {
tracing::debug!("resolved sub-dependency {dep_id}");
insert_node(&mut graph, dep_id, dep_node.clone(), false);
@ -262,7 +263,7 @@ impl Project {
.get_mut(&dependant_id)
.expect("dependant package not found in graph")
.dependencies
.insert(package_id.clone(), alias.clone());
.insert(package_id.clone(), alias.clone());
}
let pkg_ref = &resolved[package_id.version_id()];
@ -339,7 +340,7 @@ impl Project {
tracing::debug!(
"overridden specifier found for {} ({dependency_spec})",
path.iter()
.map(String::as_str)
.map(Alias::as_str)
.chain(std::iter::once(dependency_alias.as_str()))
.collect::<Vec<_>>()
.join(">"),
@ -368,7 +369,7 @@ impl Project {
Ok(())
}
.instrument(tracing::info_span!("resolve new/changed", path = path.join(">")))
.instrument(tracing::info_span!("resolve new/changed", path = path.iter().map(Alias::as_str).collect::<Vec<_>>().join(">")))
.await?;
}
@ -388,6 +389,7 @@ impl Project {
/// Errors that can occur when resolving dependencies
pub mod errors {
use crate::manifest::Alias;
use thiserror::Error;
/// Errors that can occur when creating a dependency graph
@ -425,6 +427,6 @@ pub mod errors {
/// An alias for an override was not found in the manifest
#[error("alias `{0}` not found in manifest")]
AliasNotFound(String),
AliasNotFound(Alias),
}
}

View file

@ -2,7 +2,7 @@ use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use crate::{
manifest::DependencyType,
manifest::{Alias, DependencyType},
source::{git::GitPackageSource, DependencySpecifiers, PackageRef, PackageSources},
};
@ -19,12 +19,12 @@ pub struct GitPackageRef {
pub tree_id: String,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
pub dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
/// Whether this package uses the new structure
pub new_structure: bool,
}
impl PackageRef for GitPackageRef {
fn dependencies(&self) -> &BTreeMap<String, (DependencySpecifiers, DependencyType)> {
fn dependencies(&self) -> &BTreeMap<Alias, (DependencySpecifiers, DependencyType)> {
&self.dependencies
}

View file

@ -1,5 +1,5 @@
use crate::{
manifest::DependencyType,
manifest::{Alias, DependencyType},
source::{path::PathPackageSource, DependencySpecifiers, PackageRef, PackageSources},
};
use serde::{Deserialize, Serialize};
@ -12,10 +12,10 @@ pub struct PathPackageRef {
pub path: PathBuf,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
pub dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
}
impl PackageRef for PathPackageRef {
fn dependencies(&self) -> &BTreeMap<String, (DependencySpecifiers, DependencyType)> {
fn dependencies(&self) -> &BTreeMap<Alias, (DependencySpecifiers, DependencyType)> {
&self.dependencies
}

View file

@ -8,15 +8,15 @@ use std::{
hash::Hash,
path::PathBuf,
};
use tokio_util::io::StreamReader;
use pkg_ref::PesdePackageRef;
use specifier::PesdeDependencySpecifier;
use crate::{
manifest::{target::Target, DependencyType},
engine::EngineKind,
manifest::{target::Target, Alias, DependencyType},
names::{PackageName, PackageNames},
reporters::DownloadProgressReporter,
reporters::{response_to_async_read, DownloadProgressReporter},
source::{
fs::{store_in_cas, FsEntry, PackageFs},
git_index::{read_file, root_tree, GitBasedSource},
@ -28,7 +28,8 @@ use crate::{
};
use fs_err::tokio as fs;
use futures::StreamExt;
use tokio::task::spawn_blocking;
use semver::VersionReq;
use tokio::{pin, task::spawn_blocking};
use tracing::instrument;
/// The pesde package reference
@ -95,23 +96,31 @@ impl PesdePackageSource {
.unwrap()
}
fn read_index_file(
/// Reads the index file of a package
pub async fn read_index_file(
&self,
name: &PackageName,
project: &Project,
) -> Result<Option<IndexFile>, errors::ReadIndexFileError> {
let (scope, name) = name.as_str();
let repo = gix::open(self.path(project)).map_err(Box::new)?;
let tree = root_tree(&repo).map_err(Box::new)?;
let string = match read_file(&tree, [scope, name]) {
Ok(Some(s)) => s,
Ok(None) => return Ok(None),
Err(e) => {
return Err(errors::ReadIndexFileError::ReadFile(e));
}
};
let path = self.path(project);
let name = name.clone();
toml::from_str(&string).map_err(Into::into)
spawn_blocking(move || {
let (scope, name) = name.as_str();
let repo = gix::open(&path).map_err(Box::new)?;
let tree = root_tree(&repo).map_err(Box::new)?;
let string = match read_file(&tree, [scope, name]) {
Ok(Some(s)) => s,
Ok(None) => return Ok(None),
Err(e) => {
return Err(errors::ReadIndexFileError::ReadFile(e));
}
};
toml::from_str(&string).map_err(Into::into)
})
.await
.unwrap()
}
}
@ -140,16 +149,12 @@ impl PackageSource for PesdePackageSource {
..
} = options;
let Some(IndexFile { meta, entries, .. }) =
self.read_index_file(&specifier.name, project)?
let Some(IndexFile { entries, .. }) =
self.read_index_file(&specifier.name, project).await?
else {
return Err(errors::ResolveError::NotFound(specifier.name.to_string()));
};
if !meta.deprecated.is_empty() {
tracing::warn!("{} is deprecated: {}", specifier.name, meta.deprecated);
}
tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
Ok((
@ -229,23 +234,8 @@ impl PackageSource for PesdePackageSource {
let response = request.send().await?.error_for_status()?;
let total_len = response.content_length().unwrap_or(0);
reporter.report_progress(total_len, 0);
let mut bytes_downloaded = 0;
let bytes = response
.bytes_stream()
.inspect(|chunk| {
chunk.as_ref().ok().inspect(|chunk| {
bytes_downloaded += chunk.len() as u64;
reporter.report_progress(total_len, bytes_downloaded);
});
})
.map(|result| {
result.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
});
let bytes = StreamReader::new(bytes);
let bytes = response_to_async_read(response, reporter.clone());
pin!(bytes);
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes);
let mut archive = tokio_tar::Archive::new(&mut decoder);
@ -297,8 +287,6 @@ impl PackageSource for PesdePackageSource {
.await
.map_err(errors::DownloadError::WriteIndex)?;
reporter.report_done();
Ok(fs)
}
@ -314,7 +302,8 @@ impl PackageSource for PesdePackageSource {
panic!("unexpected package name");
};
let Some(IndexFile { mut entries, .. }) = self.read_index_file(name, &options.project)?
let Some(IndexFile { mut entries, .. }) =
self.read_index_file(name, &options.project).await?
else {
return Err(errors::GetTargetError::NotFound(name.to_string()));
};
@ -478,6 +467,9 @@ pub struct IndexFileEntry {
/// When this package was published
#[serde(default = "chrono::Utc::now")]
pub published_at: chrono::DateTime<chrono::Utc>,
/// The engines this package supports
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub engines: BTreeMap<EngineKind, VersionReq>,
/// The description of this package
#[serde(default, skip_serializing_if = "Option::is_none")]
@ -502,7 +494,7 @@ pub struct IndexFileEntry {
/// The dependencies of this package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
pub dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
}
/// The package metadata in the index file

View file

@ -3,7 +3,7 @@ use std::collections::BTreeMap;
use serde::{Deserialize, Serialize};
use crate::{
manifest::DependencyType,
manifest::{Alias, DependencyType},
source::{pesde::PesdePackageSource, DependencySpecifiers, PackageRef, PackageSources},
};
@ -18,10 +18,10 @@ pub struct PesdePackageRef {
pub index_url: gix::Url,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
pub dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
}
impl PackageRef for PesdePackageRef {
fn dependencies(&self) -> &BTreeMap<String, (DependencySpecifiers, DependencyType)> {
fn dependencies(&self) -> &BTreeMap<Alias, (DependencySpecifiers, DependencyType)> {
&self.dependencies
}

View file

@ -1,5 +1,5 @@
use crate::{
manifest::DependencyType,
manifest::{Alias, DependencyType},
source::{pesde, specifiers::DependencySpecifiers, traits::PackageRef, PackageSources},
};
use serde::{Deserialize, Serialize};
@ -35,7 +35,7 @@ impl PackageRefs {
}
impl PackageRef for PackageRefs {
fn dependencies(&self) -> &BTreeMap<String, (DependencySpecifiers, DependencyType)> {
fn dependencies(&self) -> &BTreeMap<Alias, (DependencySpecifiers, DependencyType)> {
match self {
PackageRefs::Pesde(pkg_ref) => pkg_ref.dependencies(),
#[cfg(feature = "wally-compat")]

View file

@ -1,7 +1,7 @@
use crate::{
manifest::{
target::{Target, TargetKind},
DependencyType,
Alias, DependencyType,
},
reporters::DownloadProgressReporter,
source::{ids::PackageId, DependencySpecifiers, PackageFs, PackageSources, ResolveResult},
@ -21,7 +21,7 @@ pub trait DependencySpecifier: Debug + Display {}
/// A reference to a package
pub trait PackageRef: Debug {
/// The dependencies of this package
fn dependencies(&self) -> &BTreeMap<String, (DependencySpecifiers, DependencyType)>;
fn dependencies(&self) -> &BTreeMap<Alias, (DependencySpecifiers, DependencyType)>;
/// Whether to use the new structure (`packages` folders inside the package's content folder) or the old structure (Wally-style, with linker files in the parent of the folder containing the package's contents)
fn use_new_structure(&self) -> bool;
/// The source of this package

View file

@ -1,7 +1,7 @@
use std::collections::BTreeMap;
use crate::{
manifest::{errors, DependencyType},
manifest::{errors, Alias, DependencyType},
names::wally::WallyPackageName,
source::{specifiers::DependencySpecifiers, wally::specifier::WallyDependencySpecifier},
};
@ -28,9 +28,9 @@ pub struct WallyPackage {
pub fn deserialize_specifiers<'de, D: Deserializer<'de>>(
deserializer: D,
) -> Result<BTreeMap<String, WallyDependencySpecifier>, D::Error> {
) -> Result<BTreeMap<Alias, WallyDependencySpecifier>, D::Error> {
// specifier is in form of `name@version_req`
BTreeMap::<String, String>::deserialize(deserializer)?
BTreeMap::<Alias, String>::deserialize(deserializer)?
.into_iter()
.map(|(k, v)| {
let (name, version) = v.split_once('@').ok_or_else(|| {
@ -54,11 +54,11 @@ pub fn deserialize_specifiers<'de, D: Deserializer<'de>>(
pub struct WallyManifest {
pub package: WallyPackage,
#[serde(default, deserialize_with = "deserialize_specifiers")]
pub dependencies: BTreeMap<String, WallyDependencySpecifier>,
pub dependencies: BTreeMap<Alias, WallyDependencySpecifier>,
#[serde(default, deserialize_with = "deserialize_specifiers")]
pub server_dependencies: BTreeMap<String, WallyDependencySpecifier>,
pub server_dependencies: BTreeMap<Alias, WallyDependencySpecifier>,
#[serde(default, deserialize_with = "deserialize_specifiers")]
pub dev_dependencies: BTreeMap<String, WallyDependencySpecifier>,
pub dev_dependencies: BTreeMap<Alias, WallyDependencySpecifier>,
}
impl WallyManifest {
@ -66,10 +66,8 @@ impl WallyManifest {
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub fn all_dependencies(
&self,
) -> Result<
BTreeMap<String, (DependencySpecifiers, DependencyType)>,
errors::AllDependenciesError,
> {
) -> Result<BTreeMap<Alias, (DependencySpecifiers, DependencyType)>, errors::AllDependenciesError>
{
let mut all_deps = BTreeMap::new();
for (deps, ty) in [

View file

@ -1,7 +1,7 @@
use crate::{
manifest::target::{Target, TargetKind},
names::PackageNames,
reporters::DownloadProgressReporter,
reporters::{response_to_async_read, DownloadProgressReporter},
source::{
fs::{store_in_cas, FsEntry, PackageFs},
git_index::{read_file, root_tree, GitBasedSource},
@ -20,14 +20,13 @@ use crate::{
Project,
};
use fs_err::tokio as fs;
use futures::StreamExt;
use gix::Url;
use relative_path::RelativePathBuf;
use reqwest::header::AUTHORIZATION;
use serde::Deserialize;
use std::{collections::BTreeMap, path::PathBuf};
use tokio::{io::AsyncReadExt, task::spawn_blocking};
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::StreamReader};
use tokio::{io::AsyncReadExt, pin, task::spawn_blocking};
use tokio_util::compat::FuturesAsyncReadCompatExt;
use tracing::instrument;
pub(crate) mod compat_util;
@ -268,22 +267,9 @@ impl PackageSource for WallyPackageSource {
let response = request.send().await?.error_for_status()?;
let total_len = response.content_length().unwrap_or(0);
reporter.report_progress(total_len, 0);
let bytes = response_to_async_read(response, reporter.clone());
pin!(bytes);
let mut bytes_downloaded = 0;
let bytes = response
.bytes_stream()
.inspect(|chunk| {
chunk.as_ref().ok().inspect(|chunk| {
bytes_downloaded += chunk.len() as u64;
reporter.report_progress(total_len, bytes_downloaded);
});
})
.map(|result| {
result.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
});
let mut bytes = StreamReader::new(bytes);
let mut buf = Vec::with_capacity(total_len as usize);
bytes.read_to_end(&mut buf).await?;
@ -335,8 +321,6 @@ impl PackageSource for WallyPackageSource {
.await
.map_err(errors::DownloadError::WriteIndex)?;
reporter.report_done();
Ok(fs)
}

View file

@ -3,7 +3,7 @@ use std::collections::BTreeMap;
use serde::{Deserialize, Serialize};
use crate::{
manifest::DependencyType,
manifest::{Alias, DependencyType},
source::{wally::WallyPackageSource, DependencySpecifiers, PackageRef, PackageSources},
};
@ -18,10 +18,10 @@ pub struct WallyPackageRef {
pub index_url: gix::Url,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
pub dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
}
impl PackageRef for WallyPackageRef {
fn dependencies(&self) -> &BTreeMap<String, (DependencySpecifiers, DependencyType)> {
fn dependencies(&self) -> &BTreeMap<Alias, (DependencySpecifiers, DependencyType)> {
&self.dependencies
}

View file

@ -3,7 +3,7 @@ use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use crate::{
manifest::DependencyType,
manifest::{Alias, DependencyType},
source::{workspace::WorkspacePackageSource, DependencySpecifiers, PackageRef, PackageSources},
};
@ -14,10 +14,10 @@ pub struct WorkspacePackageRef {
pub path: RelativePathBuf,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<String, (DependencySpecifiers, DependencyType)>,
pub dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
}
impl PackageRef for WorkspacePackageRef {
fn dependencies(&self) -> &BTreeMap<String, (DependencySpecifiers, DependencyType)> {
fn dependencies(&self) -> &BTreeMap<Alias, (DependencySpecifiers, DependencyType)> {
&self.dependencies
}

View file

@ -1,5 +1,6 @@
use crate::AuthConfig;
use gix::bstr::BStr;
use semver::Version;
use serde::{Deserialize, Deserializer, Serializer};
use sha2::{Digest, Sha256};
use std::collections::{BTreeMap, HashSet};
@ -88,3 +89,9 @@ pub fn hash<S: AsRef<[u8]>>(struc: S) -> String {
pub fn is_default<T: Default + Eq>(t: &T) -> bool {
t == &T::default()
}
pub fn no_build_metadata(version: &Version) -> Version {
let mut version = version.clone();
version.build = semver::BuildMetadata::EMPTY;
version
}