mirror of
https://github.com/pesde-pkg/pesde.git
synced 2025-04-20 03:43:47 +01:00
feat: add engines
Adds the initial implementation of the engines feature. Not tested yet. Requires documentation and more work for non-pesde engines to be usable.
This commit is contained in:
parent
72c1c39401
commit
fc349e6f21
17 changed files with 1014 additions and 322 deletions
|
@ -14,7 +14,6 @@ bin = [
|
||||||
"dep:clap",
|
"dep:clap",
|
||||||
"dep:dirs",
|
"dep:dirs",
|
||||||
"dep:tracing-subscriber",
|
"dep:tracing-subscriber",
|
||||||
"reqwest/json",
|
|
||||||
"dep:indicatif",
|
"dep:indicatif",
|
||||||
"dep:inquire",
|
"dep:inquire",
|
||||||
"dep:toml_edit",
|
"dep:toml_edit",
|
||||||
|
@ -30,7 +29,7 @@ bin = [
|
||||||
"tokio/rt-multi-thread",
|
"tokio/rt-multi-thread",
|
||||||
"tokio/macros",
|
"tokio/macros",
|
||||||
]
|
]
|
||||||
wally-compat = ["dep:async_zip", "dep:serde_json"]
|
wally-compat = ["dep:serde_json"]
|
||||||
patches = ["dep:git2"]
|
patches = ["dep:git2"]
|
||||||
version-management = ["bin"]
|
version-management = ["bin"]
|
||||||
schema = ["dep:schemars"]
|
schema = ["dep:schemars"]
|
||||||
|
@ -49,7 +48,7 @@ toml = "0.8.19"
|
||||||
serde_with = "3.11.0"
|
serde_with = "3.11.0"
|
||||||
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
||||||
semver = { version = "1.0.24", features = ["serde"] }
|
semver = { version = "1.0.24", features = ["serde"] }
|
||||||
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls", "stream"] }
|
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls", "stream", "json"] }
|
||||||
tokio-tar = "0.3.1"
|
tokio-tar = "0.3.1"
|
||||||
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
||||||
pathdiff = "0.2.3"
|
pathdiff = "0.2.3"
|
||||||
|
@ -68,11 +67,11 @@ tempfile = "3.14.0"
|
||||||
wax = { version = "0.6.0", default-features = false }
|
wax = { version = "0.6.0", default-features = false }
|
||||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||||
urlencoding = "2.1.3"
|
urlencoding = "2.1.3"
|
||||||
|
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"] }
|
||||||
|
|
||||||
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
||||||
git2 = { version = "0.19.0", optional = true }
|
git2 = { version = "0.19.0", optional = true }
|
||||||
|
|
||||||
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
|
|
||||||
serde_json = { version = "1.0.133", optional = true }
|
serde_json = { version = "1.0.133", optional = true }
|
||||||
|
|
||||||
schemars = { git = "https://github.com/daimond113/schemars", rev = "bc7c7d6", features = ["semver1", "url2"], optional = true }
|
schemars = { git = "https://github.com/daimond113/schemars", rev = "bc7c7d6", features = ["semver1", "url2"], optional = true }
|
||||||
|
|
|
@ -50,8 +50,10 @@ pub async fn search_packages(
|
||||||
|
|
||||||
let source = Arc::new(app_state.source.clone().read_owned().await);
|
let source = Arc::new(app_state.source.clone().read_owned().await);
|
||||||
|
|
||||||
let mut results = Vec::with_capacity(top_docs.len());
|
let mut results = top_docs
|
||||||
results.extend((0..top_docs.len()).map(|_| None::<PackageResponse>));
|
.iter()
|
||||||
|
.map(|_| None::<PackageResponse>)
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
let mut tasks = top_docs
|
let mut tasks = top_docs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
use crate::cli::{version::update_bin_exe, HOME_DIR};
|
use crate::cli::{version::replace_bin_exe, HOME_DIR};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
use pesde::engine::EngineKind;
|
||||||
use std::env::current_exe;
|
use std::env::current_exe;
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct SelfInstallCommand {
|
pub struct SelfInstallCommand {
|
||||||
/// Skip adding the bin directory to the PATH
|
/// Skip adding the bin directory to the PATH
|
||||||
|
@ -70,7 +72,11 @@ and then restart your shell.
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
update_bin_exe(¤t_exe().context("failed to get current exe path")?).await?;
|
replace_bin_exe(
|
||||||
|
EngineKind::Pesde,
|
||||||
|
¤t_exe().context("failed to get current exe path")?,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,15 @@
|
||||||
use crate::cli::{
|
use crate::cli::{
|
||||||
config::read_config,
|
config::read_config,
|
||||||
version::{
|
version::{
|
||||||
current_version, get_or_download_version, get_remote_version, no_build_metadata,
|
current_version, find_latest_version, get_or_download_engine, no_build_metadata,
|
||||||
update_bin_exe, TagInfo, VersionType,
|
replace_bin_exe,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::Args;
|
use clap::Args;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
|
use pesde::engine::EngineKind;
|
||||||
|
use semver::VersionReq;
|
||||||
|
|
||||||
#[derive(Debug, Args)]
|
#[derive(Debug, Args)]
|
||||||
pub struct SelfUpgradeCommand {
|
pub struct SelfUpgradeCommand {
|
||||||
|
@ -25,7 +27,7 @@ impl SelfUpgradeCommand {
|
||||||
.context("no cached version found")?
|
.context("no cached version found")?
|
||||||
.1
|
.1
|
||||||
} else {
|
} else {
|
||||||
get_remote_version(&reqwest, VersionType::Latest).await?
|
find_latest_version(&reqwest).await?
|
||||||
};
|
};
|
||||||
|
|
||||||
let latest_version_no_metadata = no_build_metadata(&latest_version);
|
let latest_version_no_metadata = no_build_metadata(&latest_version);
|
||||||
|
@ -46,10 +48,13 @@ impl SelfUpgradeCommand {
|
||||||
return Ok(());
|
return Ok(());
|
||||||
}
|
}
|
||||||
|
|
||||||
let path = get_or_download_version(&reqwest, TagInfo::Complete(latest_version), true)
|
let path = get_or_download_engine(
|
||||||
.await?
|
&reqwest,
|
||||||
.unwrap();
|
EngineKind::Pesde,
|
||||||
update_bin_exe(&path).await?;
|
VersionReq::parse(&format!("={latest_version}")).unwrap(),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
replace_bin_exe(EngineKind::Pesde, &path).await?;
|
||||||
|
|
||||||
println!("upgraded to version {display_latest_version}!");
|
println!("upgraded to version {display_latest_version}!");
|
||||||
|
|
||||||
|
|
|
@ -7,83 +7,28 @@ use crate::cli::{
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::StreamExt;
|
use pesde::{
|
||||||
use reqwest::header::ACCEPT;
|
engine::{
|
||||||
use semver::Version;
|
source::{
|
||||||
use serde::Deserialize;
|
traits::{DownloadOptions, EngineSource, ResolveOptions},
|
||||||
use std::{
|
EngineSources,
|
||||||
env::current_exe,
|
},
|
||||||
path::{Path, PathBuf},
|
EngineKind,
|
||||||
|
},
|
||||||
|
version_matches,
|
||||||
|
};
|
||||||
|
use semver::{Version, VersionReq};
|
||||||
|
use std::{
|
||||||
|
collections::BTreeSet,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
sync::Arc,
|
||||||
};
|
};
|
||||||
use tokio::io::AsyncWrite;
|
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
pub fn current_version() -> Version {
|
pub fn current_version() -> Version {
|
||||||
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct Release {
|
|
||||||
tag_name: String,
|
|
||||||
assets: Vec<Asset>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
|
||||||
struct Asset {
|
|
||||||
name: String,
|
|
||||||
url: url::Url,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(level = "trace")]
|
|
||||||
fn get_repo() -> (String, String) {
|
|
||||||
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
|
||||||
let (owner, repo) = (
|
|
||||||
parts.next().unwrap().to_string(),
|
|
||||||
parts.next().unwrap().to_string(),
|
|
||||||
);
|
|
||||||
|
|
||||||
tracing::trace!("repository for updates: {owner}/{repo}");
|
|
||||||
|
|
||||||
(owner, repo)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum VersionType {
|
|
||||||
Latest,
|
|
||||||
Specific(Version),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(reqwest), level = "trace")]
|
|
||||||
pub async fn get_remote_version(
|
|
||||||
reqwest: &reqwest::Client,
|
|
||||||
ty: VersionType,
|
|
||||||
) -> anyhow::Result<Version> {
|
|
||||||
let (owner, repo) = get_repo();
|
|
||||||
|
|
||||||
let mut releases = reqwest
|
|
||||||
.get(format!(
|
|
||||||
"https://api.github.com/repos/{owner}/{repo}/releases",
|
|
||||||
))
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.context("failed to send request to GitHub API")?
|
|
||||||
.error_for_status()
|
|
||||||
.context("failed to get GitHub API response")?
|
|
||||||
.json::<Vec<Release>>()
|
|
||||||
.await
|
|
||||||
.context("failed to parse GitHub API response")?
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|release| Version::parse(release.tag_name.trim_start_matches('v')).ok());
|
|
||||||
|
|
||||||
match ty {
|
|
||||||
VersionType::Latest => releases.max(),
|
|
||||||
VersionType::Specific(version) => {
|
|
||||||
releases.find(|v| no_build_metadata(v) == no_build_metadata(&version))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.context("failed to find latest version")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn no_build_metadata(version: &Version) -> Version {
|
pub fn no_build_metadata(version: &Version) -> Version {
|
||||||
let mut version = version.clone();
|
let mut version = version.clone();
|
||||||
version.build = semver::BuildMetadata::EMPTY;
|
version.build = semver::BuildMetadata::EMPTY;
|
||||||
|
@ -92,6 +37,23 @@ pub fn no_build_metadata(version: &Version) -> Version {
|
||||||
|
|
||||||
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
|
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
|
||||||
|
|
||||||
|
pub async fn find_latest_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> {
|
||||||
|
let version = EngineSources::pesde()
|
||||||
|
.resolve(
|
||||||
|
&VersionReq::STAR,
|
||||||
|
&ResolveOptions {
|
||||||
|
reqwest: reqwest.clone(),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to resolve version")?
|
||||||
|
.pop_last()
|
||||||
|
.context("no versions found")?
|
||||||
|
.0;
|
||||||
|
|
||||||
|
Ok(version)
|
||||||
|
}
|
||||||
|
|
||||||
#[instrument(skip(reqwest), level = "trace")]
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
|
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
|
||||||
let config = read_config().await?;
|
let config = read_config().await?;
|
||||||
|
@ -104,7 +66,7 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
|
||||||
version
|
version
|
||||||
} else {
|
} else {
|
||||||
tracing::debug!("checking for updates");
|
tracing::debug!("checking for updates");
|
||||||
let version = get_remote_version(reqwest, VersionType::Latest).await?;
|
let version = find_latest_version(reqwest).await?;
|
||||||
|
|
||||||
write_config(&CliConfig {
|
write_config(&CliConfig {
|
||||||
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
|
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
|
||||||
|
@ -180,154 +142,105 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(skip(reqwest, writer), level = "trace")]
|
|
||||||
pub async fn download_github_release<W: AsyncWrite + Unpin>(
|
|
||||||
reqwest: &reqwest::Client,
|
|
||||||
version: &Version,
|
|
||||||
mut writer: W,
|
|
||||||
) -> anyhow::Result<()> {
|
|
||||||
let (owner, repo) = get_repo();
|
|
||||||
|
|
||||||
let release = reqwest
|
|
||||||
.get(format!(
|
|
||||||
"https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}",
|
|
||||||
))
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.context("failed to send request to GitHub API")?
|
|
||||||
.error_for_status()
|
|
||||||
.context("failed to get GitHub API response")?
|
|
||||||
.json::<Release>()
|
|
||||||
.await
|
|
||||||
.context("failed to parse GitHub API response")?;
|
|
||||||
|
|
||||||
let asset = release
|
|
||||||
.assets
|
|
||||||
.into_iter()
|
|
||||||
.find(|asset| {
|
|
||||||
asset.name.ends_with(&format!(
|
|
||||||
"-{}-{}.tar.gz",
|
|
||||||
std::env::consts::OS,
|
|
||||||
std::env::consts::ARCH
|
|
||||||
))
|
|
||||||
})
|
|
||||||
.context("failed to find asset for current platform")?;
|
|
||||||
|
|
||||||
let bytes = reqwest
|
|
||||||
.get(asset.url)
|
|
||||||
.header(ACCEPT, "application/octet-stream")
|
|
||||||
.send()
|
|
||||||
.await
|
|
||||||
.context("failed to send request to download asset")?
|
|
||||||
.error_for_status()
|
|
||||||
.context("failed to download asset")?
|
|
||||||
.bytes()
|
|
||||||
.await
|
|
||||||
.context("failed to download asset")?;
|
|
||||||
|
|
||||||
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
|
|
||||||
let mut archive = tokio_tar::Archive::new(&mut decoder);
|
|
||||||
|
|
||||||
let mut entry = archive
|
|
||||||
.entries()
|
|
||||||
.context("failed to read archive entries")?
|
|
||||||
.next()
|
|
||||||
.await
|
|
||||||
.context("archive has no entry")?
|
|
||||||
.context("failed to get first archive entry")?;
|
|
||||||
|
|
||||||
tokio::io::copy(&mut entry, &mut writer)
|
|
||||||
.await
|
|
||||||
.context("failed to write archive entry to file")
|
|
||||||
.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum TagInfo {
|
|
||||||
Complete(Version),
|
|
||||||
Incomplete(Version),
|
|
||||||
}
|
|
||||||
|
|
||||||
#[instrument(skip(reqwest), level = "trace")]
|
#[instrument(skip(reqwest), level = "trace")]
|
||||||
pub async fn get_or_download_version(
|
pub async fn get_or_download_engine(
|
||||||
reqwest: &reqwest::Client,
|
reqwest: &reqwest::Client,
|
||||||
tag: TagInfo,
|
engine: EngineKind,
|
||||||
always_give_path: bool,
|
req: VersionReq,
|
||||||
) -> anyhow::Result<Option<PathBuf>> {
|
) -> anyhow::Result<PathBuf> {
|
||||||
let path = home_dir()?.join("versions");
|
let source = engine.source();
|
||||||
|
|
||||||
|
let path = home_dir()?.join("engines").join(source.directory());
|
||||||
fs::create_dir_all(&path)
|
fs::create_dir_all(&path)
|
||||||
.await
|
.await
|
||||||
.context("failed to create versions directory")?;
|
.context("failed to create engines directory")?;
|
||||||
|
|
||||||
let version = match &tag {
|
let mut read_dir = fs::read_dir(&path)
|
||||||
TagInfo::Complete(version) => version,
|
.await
|
||||||
// don't fetch the version since it could be cached
|
.context("failed to read engines directory")?;
|
||||||
TagInfo::Incomplete(version) => version,
|
|
||||||
|
let mut matching_versions = BTreeSet::new();
|
||||||
|
|
||||||
|
while let Some(entry) = read_dir.next_entry().await? {
|
||||||
|
let path = entry.path();
|
||||||
|
|
||||||
|
#[cfg(windows)]
|
||||||
|
let version = path.file_stem();
|
||||||
|
#[cfg(not(windows))]
|
||||||
|
let version = path.file_name();
|
||||||
|
|
||||||
|
let Some(version) = version.and_then(|s| s.to_str()) else {
|
||||||
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
let path = path.join(format!(
|
if let Ok(version) = Version::parse(version) {
|
||||||
"{}{}",
|
if version_matches(&version, &req) {
|
||||||
no_build_metadata(version),
|
matching_versions.insert(version);
|
||||||
std::env::consts::EXE_SUFFIX
|
}
|
||||||
));
|
}
|
||||||
|
|
||||||
let is_requested_version = !always_give_path && *version == current_version();
|
|
||||||
|
|
||||||
if path.exists() {
|
|
||||||
tracing::debug!("version already exists");
|
|
||||||
|
|
||||||
return Ok(if is_requested_version {
|
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(path)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if is_requested_version {
|
if let Some(version) = matching_versions.pop_last() {
|
||||||
tracing::debug!("copying current executable to version directory");
|
return Ok(path
|
||||||
fs::copy(current_exe()?, &path)
|
.join(version.to_string())
|
||||||
.await
|
.join(source.expected_file_name())
|
||||||
.context("failed to copy current executable to version directory")?;
|
.with_extension(std::env::consts::EXE_EXTENSION));
|
||||||
} else {
|
|
||||||
let version = match tag {
|
|
||||||
TagInfo::Complete(version) => version,
|
|
||||||
TagInfo::Incomplete(version) => {
|
|
||||||
get_remote_version(reqwest, VersionType::Specific(version))
|
|
||||||
.await
|
|
||||||
.context("failed to get remote version")?
|
|
||||||
}
|
}
|
||||||
};
|
|
||||||
|
|
||||||
tracing::debug!("downloading version");
|
let mut versions = source
|
||||||
download_github_release(
|
.resolve(
|
||||||
reqwest,
|
&req,
|
||||||
&version,
|
&ResolveOptions {
|
||||||
fs::File::create(&path)
|
reqwest: reqwest.clone(),
|
||||||
.await
|
},
|
||||||
.context("failed to create version file")?,
|
|
||||||
)
|
)
|
||||||
.await?;
|
.await
|
||||||
}
|
.context("failed to resolve versions")?;
|
||||||
|
let (version, engine_ref) = versions.pop_last().context("no matching versions found")?;
|
||||||
|
|
||||||
|
let path = path
|
||||||
|
.join(version.to_string())
|
||||||
|
.join(source.expected_file_name())
|
||||||
|
.with_extension(std::env::consts::EXE_EXTENSION);
|
||||||
|
|
||||||
|
let archive = source
|
||||||
|
.download(
|
||||||
|
&engine_ref,
|
||||||
|
&DownloadOptions {
|
||||||
|
reqwest: reqwest.clone(),
|
||||||
|
reporter: Arc::new(()),
|
||||||
|
version,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to download engine")?;
|
||||||
|
|
||||||
|
let mut file = fs::File::create(&path)
|
||||||
|
.await
|
||||||
|
.context("failed to create new file")?;
|
||||||
|
tokio::io::copy(
|
||||||
|
&mut archive
|
||||||
|
.find_executable(source.expected_file_name())
|
||||||
|
.await
|
||||||
|
.context("failed to find executable")?,
|
||||||
|
&mut file,
|
||||||
|
)
|
||||||
|
.await
|
||||||
|
.context("failed to write to file")?;
|
||||||
|
|
||||||
make_executable(&path)
|
make_executable(&path)
|
||||||
.await
|
.await
|
||||||
.context("failed to make downloaded version executable")?;
|
.context("failed to make downloaded version executable")?;
|
||||||
|
|
||||||
Ok(if is_requested_version {
|
Ok(path)
|
||||||
None
|
|
||||||
} else {
|
|
||||||
Some(path)
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[instrument(level = "trace")]
|
#[instrument(level = "trace")]
|
||||||
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
|
pub async fn replace_bin_exe(engine: EngineKind, with: &Path) -> anyhow::Result<()> {
|
||||||
let bin_exe_path = bin_dir().await?.join(format!(
|
let bin_exe_path = bin_dir()
|
||||||
"{}{}",
|
.await?
|
||||||
env!("CARGO_BIN_NAME"),
|
.join(engine.to_string())
|
||||||
std::env::consts::EXE_SUFFIX
|
.with_extension(std::env::consts::EXE_EXTENSION);
|
||||||
));
|
|
||||||
let mut downloaded_file = downloaded_file.to_path_buf();
|
|
||||||
|
|
||||||
let exists = bin_exe_path.exists();
|
let exists = bin_exe_path.exists();
|
||||||
|
|
||||||
|
@ -339,21 +252,18 @@ pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
|
||||||
let tempfile = tempfile::Builder::new()
|
let tempfile = tempfile::Builder::new()
|
||||||
.make(|_| Ok(()))
|
.make(|_| Ok(()))
|
||||||
.context("failed to create temporary file")?;
|
.context("failed to create temporary file")?;
|
||||||
let path = tempfile.into_temp_path().to_path_buf();
|
let temp_path = tempfile.into_temp_path().to_path_buf();
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
let path = path.with_extension("exe");
|
let temp_path = temp_path.with_extension("exe");
|
||||||
|
|
||||||
let current_exe = current_exe().context("failed to get current exe path")?;
|
match fs::rename(&bin_exe_path, &temp_path).await {
|
||||||
if current_exe == downloaded_file {
|
Ok(_) => {}
|
||||||
downloaded_file = path.to_path_buf();
|
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||||
|
Err(e) => return Err(e).context("failed to rename existing executable"),
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fs::rename(&bin_exe_path, &path)
|
fs::copy(with, &bin_exe_path)
|
||||||
.await
|
|
||||||
.context("failed to rename current executable")?;
|
|
||||||
}
|
|
||||||
|
|
||||||
fs::copy(downloaded_file, &bin_exe_path)
|
|
||||||
.await
|
.await
|
||||||
.context("failed to copy executable to bin folder")?;
|
.context("failed to copy executable to bin folder")?;
|
||||||
|
|
||||||
|
|
63
src/engine/mod.rs
Normal file
63
src/engine/mod.rs
Normal file
|
@ -0,0 +1,63 @@
|
||||||
|
/// Sources of engines
|
||||||
|
pub mod source;
|
||||||
|
|
||||||
|
use crate::engine::source::EngineSources;
|
||||||
|
use serde_with::{DeserializeFromStr, SerializeDisplay};
|
||||||
|
use std::{fmt::Display, str::FromStr};
|
||||||
|
|
||||||
|
/// All supported engines
|
||||||
|
#[derive(
|
||||||
|
SerializeDisplay, DeserializeFromStr, Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord,
|
||||||
|
)]
|
||||||
|
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
|
||||||
|
#[cfg_attr(feature = "schema", schemars(rename_all = "snake_case"))]
|
||||||
|
pub enum EngineKind {
|
||||||
|
/// The pesde package manager
|
||||||
|
Pesde,
|
||||||
|
/// The Lune runtime
|
||||||
|
Lune,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Display for EngineKind {
|
||||||
|
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||||
|
match self {
|
||||||
|
EngineKind::Pesde => write!(f, "pesde"),
|
||||||
|
EngineKind::Lune => write!(f, "lune"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl FromStr for EngineKind {
|
||||||
|
type Err = errors::EngineKindFromStrError;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
match s.to_lowercase().as_str() {
|
||||||
|
"pesde" => Ok(EngineKind::Pesde),
|
||||||
|
"lune" => Ok(EngineKind::Lune),
|
||||||
|
_ => Err(errors::EngineKindFromStrError::Unknown(s.to_string())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EngineKind {
|
||||||
|
/// Returns the source to get this engine from
|
||||||
|
pub fn source(&self) -> EngineSources {
|
||||||
|
match self {
|
||||||
|
EngineKind::Pesde => EngineSources::pesde(),
|
||||||
|
EngineKind::Lune => EngineSources::lune(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors related to engine kinds
|
||||||
|
pub mod errors {
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
/// Errors which can occur while using the FromStr implementation of EngineKind
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
pub enum EngineKindFromStrError {
|
||||||
|
/// The string isn't a recognized EngineKind
|
||||||
|
#[error("unknown engine kind {0}")]
|
||||||
|
Unknown(String),
|
||||||
|
}
|
||||||
|
}
|
319
src/engine/source/archive.rs
Normal file
319
src/engine/source/archive.rs
Normal file
|
@ -0,0 +1,319 @@
|
||||||
|
use futures::StreamExt;
|
||||||
|
use std::{
|
||||||
|
collections::BTreeSet,
|
||||||
|
mem::ManuallyDrop,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
pin::Pin,
|
||||||
|
str::FromStr,
|
||||||
|
task::{Context, Poll},
|
||||||
|
};
|
||||||
|
use tokio::{
|
||||||
|
io::{AsyncBufRead, AsyncRead, AsyncReadExt, ReadBuf},
|
||||||
|
pin,
|
||||||
|
};
|
||||||
|
use tokio_util::compat::{Compat, FuturesAsyncReadCompatExt};
|
||||||
|
|
||||||
|
/// The kind of encoding used for the archive
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub enum EncodingKind {
|
||||||
|
/// Gzip
|
||||||
|
Gzip,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// The kind of archive
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub enum ArchiveKind {
|
||||||
|
/// Tar
|
||||||
|
Tar,
|
||||||
|
/// Zip
|
||||||
|
Zip,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||||
|
pub(crate) struct ArchiveInfo(ArchiveKind, Option<EncodingKind>);
|
||||||
|
|
||||||
|
impl FromStr for ArchiveInfo {
|
||||||
|
type Err = errors::ArchiveInfoFromStrError;
|
||||||
|
|
||||||
|
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||||
|
let parts = s.split('.').collect::<Vec<_>>();
|
||||||
|
|
||||||
|
Ok(match &*parts {
|
||||||
|
[.., "tar", "gz"] => ArchiveInfo(ArchiveKind::Tar, Some(EncodingKind::Gzip)),
|
||||||
|
[.., "tar"] => ArchiveInfo(ArchiveKind::Tar, None),
|
||||||
|
[.., "zip", "gz"] => {
|
||||||
|
return Err(errors::ArchiveInfoFromStrError::Unsupported(
|
||||||
|
ArchiveKind::Zip,
|
||||||
|
Some(EncodingKind::Gzip),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
[.., "zip"] => ArchiveInfo(ArchiveKind::Zip, None),
|
||||||
|
_ => return Err(errors::ArchiveInfoFromStrError::Invalid(s.to_string())),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An archive
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
|
||||||
|
pub struct Archive<R: AsyncBufRead + 'static> {
|
||||||
|
pub(crate) info: ArchiveInfo,
|
||||||
|
pub(crate) reader: R,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum TarReader<R: AsyncBufRead> {
|
||||||
|
Gzip(async_compression::tokio::bufread::GzipDecoder<R>),
|
||||||
|
Plain(R),
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: try to see if we can avoid the unsafe blocks
|
||||||
|
|
||||||
|
impl<R: AsyncBufRead> AsyncRead for TarReader<R> {
|
||||||
|
fn poll_read(
|
||||||
|
self: Pin<&mut Self>,
|
||||||
|
cx: &mut Context<'_>,
|
||||||
|
buf: &mut ReadBuf<'_>,
|
||||||
|
) -> Poll<std::io::Result<()>> {
|
||||||
|
unsafe {
|
||||||
|
match self.get_unchecked_mut() {
|
||||||
|
Self::Gzip(r) => Pin::new_unchecked(r).poll_read(cx, buf),
|
||||||
|
Self::Plain(r) => Pin::new_unchecked(r).poll_read(cx, buf),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
enum ArchiveEntryInner<R: AsyncBufRead + 'static> {
|
||||||
|
Tar(tokio_tar::Entry<tokio_tar::Archive<TarReader<Pin<Box<R>>>>>),
|
||||||
|
Zip {
|
||||||
|
archive: *mut async_zip::tokio::read::seek::ZipFileReader<std::io::Cursor<Vec<u8>>>,
|
||||||
|
reader: ManuallyDrop<
|
||||||
|
Compat<
|
||||||
|
async_zip::tokio::read::ZipEntryReader<
|
||||||
|
'static,
|
||||||
|
std::io::Cursor<Vec<u8>>,
|
||||||
|
async_zip::base::read::WithoutEntry,
|
||||||
|
>,
|
||||||
|
>,
|
||||||
|
>,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R: AsyncBufRead> Drop for ArchiveEntryInner<R> {
|
||||||
|
fn drop(&mut self) {
|
||||||
|
match self {
|
||||||
|
Self::Tar(_) => {}
|
||||||
|
Self::Zip { archive, reader } => unsafe {
|
||||||
|
ManuallyDrop::drop(reader);
|
||||||
|
drop(Box::from_raw(*archive));
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An entry in an archive. Usually the executable
|
||||||
|
pub struct ArchiveEntry<R: AsyncBufRead + 'static>(ArchiveEntryInner<R>);
|
||||||
|
|
||||||
|
impl<R: AsyncBufRead> AsyncRead for ArchiveEntry<R> {
|
||||||
|
fn poll_read(
|
||||||
|
self: Pin<&mut Self>,
|
||||||
|
cx: &mut Context<'_>,
|
||||||
|
buf: &mut ReadBuf<'_>,
|
||||||
|
) -> Poll<std::io::Result<()>> {
|
||||||
|
unsafe {
|
||||||
|
match &mut self.get_unchecked_mut().0 {
|
||||||
|
ArchiveEntryInner::Tar(r) => Pin::new_unchecked(r).poll_read(cx, buf),
|
||||||
|
ArchiveEntryInner::Zip { reader, .. } => {
|
||||||
|
Pin::new_unchecked(&mut **reader).poll_read(cx, buf)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<R: AsyncBufRead + 'static> Archive<R> {
|
||||||
|
/// Finds the executable in the archive and returns it as an [`ArchiveEntry`]
|
||||||
|
pub async fn find_executable(
|
||||||
|
self,
|
||||||
|
expected_file_name: &str,
|
||||||
|
) -> Result<ArchiveEntry<R>, errors::FindExecutableError> {
|
||||||
|
#[derive(Debug, PartialEq, Eq)]
|
||||||
|
struct Candidate {
|
||||||
|
path: PathBuf,
|
||||||
|
file_name_matches: bool,
|
||||||
|
extension_matches: bool,
|
||||||
|
has_permissions: bool,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Candidate {
|
||||||
|
fn new(path: PathBuf, perms: u32, expected_file_name: &str) -> Self {
|
||||||
|
Self {
|
||||||
|
file_name_matches: path
|
||||||
|
.file_name()
|
||||||
|
.is_some_and(|name| name == expected_file_name),
|
||||||
|
extension_matches: match path.extension() {
|
||||||
|
Some(ext) if ext == std::env::consts::EXE_EXTENSION => true,
|
||||||
|
None if std::env::consts::EXE_EXTENSION.is_empty() => true,
|
||||||
|
_ => false,
|
||||||
|
},
|
||||||
|
path,
|
||||||
|
has_permissions: perms & 0o111 != 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn should_be_considered(&self) -> bool {
|
||||||
|
// if nothing matches, we should not consider this candidate as it is most likely not
|
||||||
|
self.file_name_matches || self.extension_matches || self.has_permissions
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Ord for Candidate {
|
||||||
|
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||||
|
self.file_name_matches
|
||||||
|
.cmp(&other.file_name_matches)
|
||||||
|
.then(self.extension_matches.cmp(&other.extension_matches))
|
||||||
|
.then(self.has_permissions.cmp(&other.has_permissions))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PartialOrd for Candidate {
|
||||||
|
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||||
|
Some(self.cmp(other))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut candidates = BTreeSet::new();
|
||||||
|
|
||||||
|
match self.info {
|
||||||
|
ArchiveInfo(ArchiveKind::Tar, encoding) => {
|
||||||
|
use async_compression::tokio::bufread as decoders;
|
||||||
|
|
||||||
|
let reader = Box::pin(self.reader);
|
||||||
|
let reader = match encoding {
|
||||||
|
Some(EncodingKind::Gzip) => TarReader::Gzip(decoders::GzipDecoder::new(reader)),
|
||||||
|
None => TarReader::Plain(reader),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut archive = tokio_tar::Archive::new(reader);
|
||||||
|
let mut entries = archive.entries()?;
|
||||||
|
|
||||||
|
while let Some(entry) = entries.next().await.transpose()? {
|
||||||
|
if entry.header().entry_type().is_dir() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let candidate = Candidate::new(
|
||||||
|
entry.path()?.to_path_buf(),
|
||||||
|
entry.header().mode()?,
|
||||||
|
expected_file_name,
|
||||||
|
);
|
||||||
|
if candidate.should_be_considered() {
|
||||||
|
candidates.insert(candidate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(candidate) = candidates.pop_last() else {
|
||||||
|
return Err(errors::FindExecutableError::ExecutableNotFound);
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut entries = archive.entries()?;
|
||||||
|
|
||||||
|
while let Some(entry) = entries.next().await.transpose()? {
|
||||||
|
if entry.header().entry_type().is_dir() {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let path = entry.path()?;
|
||||||
|
if path == candidate.path {
|
||||||
|
return Ok(ArchiveEntry(ArchiveEntryInner::Tar(entry)));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
ArchiveInfo(ArchiveKind::Zip, _) => {
|
||||||
|
let reader = self.reader;
|
||||||
|
pin!(reader);
|
||||||
|
|
||||||
|
// TODO: would be lovely to not have to read the whole archive into memory
|
||||||
|
let mut buf = vec![];
|
||||||
|
reader.read_to_end(&mut buf).await?;
|
||||||
|
|
||||||
|
let archive = async_zip::base::read::seek::ZipFileReader::with_tokio(
|
||||||
|
std::io::Cursor::new(buf),
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
for entry in archive.file().entries() {
|
||||||
|
if entry.dir()? {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let path: &Path = entry.filename().as_str()?.as_ref();
|
||||||
|
let candidate = Candidate::new(
|
||||||
|
path.to_path_buf(),
|
||||||
|
entry.unix_permissions().unwrap_or(0) as u32,
|
||||||
|
expected_file_name,
|
||||||
|
);
|
||||||
|
if candidate.should_be_considered() {
|
||||||
|
candidates.insert(candidate);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let Some(candidate) = candidates.pop_last() else {
|
||||||
|
return Err(errors::FindExecutableError::ExecutableNotFound);
|
||||||
|
};
|
||||||
|
|
||||||
|
for (i, entry) in archive.file().entries().iter().enumerate() {
|
||||||
|
if entry.dir()? {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let path: &Path = entry.filename().as_str()?.as_ref();
|
||||||
|
if candidate.path == path {
|
||||||
|
let ptr = Box::into_raw(Box::new(archive));
|
||||||
|
let reader = (unsafe { &mut *ptr }).reader_without_entry(i).await?;
|
||||||
|
return Ok(ArchiveEntry(ArchiveEntryInner::Zip {
|
||||||
|
archive: ptr,
|
||||||
|
reader: ManuallyDrop::new(reader.compat()),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Err(errors::FindExecutableError::ExecutableNotFound)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when working with archives
|
||||||
|
pub mod errors {
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
/// Errors that can occur when parsing archive info
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum ArchiveInfoFromStrError {
|
||||||
|
/// The string is not a valid archive descriptor. E.g. `{name}.tar.gz`
|
||||||
|
#[error("string `{0}` is not a valid archive descriptor")]
|
||||||
|
Invalid(String),
|
||||||
|
|
||||||
|
/// The archive type is not supported. E.g. `{name}.zip.gz`
|
||||||
|
#[error("archive type {0:?} with encoding {1:?} is not supported")]
|
||||||
|
Unsupported(super::ArchiveKind, Option<super::EncodingKind>),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when finding an executable in an archive
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum FindExecutableError {
|
||||||
|
/// The executable was not found in the archive
|
||||||
|
#[error("failed to find executable in archive")]
|
||||||
|
ExecutableNotFound,
|
||||||
|
|
||||||
|
/// An IO error occurred
|
||||||
|
#[error("IO error")]
|
||||||
|
Io(#[from] std::io::Error),
|
||||||
|
|
||||||
|
/// An error occurred reading the zip archive
|
||||||
|
#[error("failed to read zip archive")]
|
||||||
|
Zip(#[from] async_zip::error::ZipError),
|
||||||
|
}
|
||||||
|
}
|
19
src/engine/source/github/engine_ref.rs
Normal file
19
src/engine/source/github/engine_ref.rs
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
use serde::Deserialize;
|
||||||
|
|
||||||
|
/// A GitHub release
|
||||||
|
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
|
||||||
|
pub struct Release {
|
||||||
|
/// The tag name of the release
|
||||||
|
pub tag_name: String,
|
||||||
|
/// The assets of the release
|
||||||
|
pub assets: Vec<Asset>,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// An asset of a GitHub release
|
||||||
|
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
|
||||||
|
pub struct Asset {
|
||||||
|
/// The name of the asset
|
||||||
|
pub name: String,
|
||||||
|
/// The download URL of the asset
|
||||||
|
pub url: url::Url,
|
||||||
|
}
|
137
src/engine/source/github/mod.rs
Normal file
137
src/engine/source/github/mod.rs
Normal file
|
@ -0,0 +1,137 @@
|
||||||
|
/// The GitHub engine reference
|
||||||
|
pub mod engine_ref;
|
||||||
|
|
||||||
|
use crate::{
|
||||||
|
engine::source::{
|
||||||
|
archive::Archive,
|
||||||
|
github::engine_ref::Release,
|
||||||
|
traits::{DownloadOptions, EngineSource, ResolveOptions},
|
||||||
|
},
|
||||||
|
reporters::{response_to_async_read, DownloadProgressReporter},
|
||||||
|
version_matches,
|
||||||
|
};
|
||||||
|
use reqwest::header::ACCEPT;
|
||||||
|
use semver::{Version, VersionReq};
|
||||||
|
use std::{collections::BTreeMap, path::PathBuf};
|
||||||
|
use tokio::io::AsyncBufRead;
|
||||||
|
|
||||||
|
/// The GitHub engine source
|
||||||
|
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
|
||||||
|
pub struct GitHubEngineSource {
|
||||||
|
/// The owner of the repository to download from
|
||||||
|
pub owner: String,
|
||||||
|
/// The repository of which to download releases from
|
||||||
|
pub repo: String,
|
||||||
|
/// The template for the asset name. `{VERSION}` will be replaced with the version
|
||||||
|
pub asset_template: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EngineSource for GitHubEngineSource {
|
||||||
|
type Ref = Release;
|
||||||
|
type ResolveError = errors::ResolveError;
|
||||||
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
|
fn directory(&self) -> PathBuf {
|
||||||
|
PathBuf::from("github").join(&self.owner).join(&self.repo)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expected_file_name(&self) -> &str {
|
||||||
|
&self.repo
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn resolve(
|
||||||
|
&self,
|
||||||
|
requirement: &VersionReq,
|
||||||
|
options: &ResolveOptions,
|
||||||
|
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
|
||||||
|
let ResolveOptions { reqwest, .. } = options;
|
||||||
|
|
||||||
|
Ok(reqwest
|
||||||
|
.get(format!(
|
||||||
|
"https://api.github.com/repos/{}/{}/releases",
|
||||||
|
urlencoding::encode(&self.owner),
|
||||||
|
urlencoding::encode(&self.repo),
|
||||||
|
))
|
||||||
|
.send()
|
||||||
|
.await?
|
||||||
|
.error_for_status()?
|
||||||
|
.json::<Vec<Release>>()
|
||||||
|
.await?
|
||||||
|
.into_iter()
|
||||||
|
.filter_map(
|
||||||
|
|release| match release.tag_name.trim_start_matches('v').parse() {
|
||||||
|
Ok(version) if version_matches(&version, requirement) => {
|
||||||
|
Some((version, release))
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
.collect())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download<R: DownloadProgressReporter + 'static>(
|
||||||
|
&self,
|
||||||
|
engine_ref: &Self::Ref,
|
||||||
|
options: &DownloadOptions<R>,
|
||||||
|
) -> Result<Archive<impl AsyncBufRead + 'static>, Self::DownloadError> {
|
||||||
|
let DownloadOptions {
|
||||||
|
reqwest,
|
||||||
|
reporter,
|
||||||
|
version,
|
||||||
|
..
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
let desired_asset_name = self
|
||||||
|
.asset_template
|
||||||
|
.replace("{VERSION}", &version.to_string());
|
||||||
|
|
||||||
|
let asset = engine_ref
|
||||||
|
.assets
|
||||||
|
.iter()
|
||||||
|
.find(|asset| asset.name.eq_ignore_ascii_case(&desired_asset_name))
|
||||||
|
.ok_or(errors::DownloadError::AssetNotFound)?;
|
||||||
|
|
||||||
|
let response = reqwest
|
||||||
|
.get(asset.url.clone())
|
||||||
|
.header(ACCEPT, "application/octet-stream")
|
||||||
|
.send()
|
||||||
|
.await?
|
||||||
|
.error_for_status()?;
|
||||||
|
|
||||||
|
Ok(Archive {
|
||||||
|
info: asset.name.parse()?,
|
||||||
|
reader: response_to_async_read(response, reporter.clone()),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when working with the GitHub engine source
|
||||||
|
pub mod errors {
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
/// Errors that can occur when resolving a GitHub engine
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum ResolveError {
|
||||||
|
/// Handling the request failed
|
||||||
|
#[error("failed to handle GitHub API request")]
|
||||||
|
Request(#[from] reqwest::Error),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when downloading a GitHub engine
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum DownloadError {
|
||||||
|
/// An asset for the current platform could not be found
|
||||||
|
#[error("failed to find asset for current platform")]
|
||||||
|
AssetNotFound,
|
||||||
|
|
||||||
|
/// Handling the request failed
|
||||||
|
#[error("failed to handle GitHub API request")]
|
||||||
|
Request(#[from] reqwest::Error),
|
||||||
|
|
||||||
|
/// The asset's name could not be parsed
|
||||||
|
#[error("failed to parse asset name")]
|
||||||
|
ParseAssetName(#[from] crate::engine::source::archive::errors::ArchiveInfoFromStrError),
|
||||||
|
}
|
||||||
|
}
|
144
src/engine/source/mod.rs
Normal file
144
src/engine/source/mod.rs
Normal file
|
@ -0,0 +1,144 @@
|
||||||
|
use crate::{
|
||||||
|
engine::source::{
|
||||||
|
archive::Archive,
|
||||||
|
traits::{DownloadOptions, EngineSource, ResolveOptions},
|
||||||
|
},
|
||||||
|
reporters::DownloadProgressReporter,
|
||||||
|
};
|
||||||
|
use semver::{Version, VersionReq};
|
||||||
|
use std::{collections::BTreeMap, path::PathBuf};
|
||||||
|
use tokio::io::AsyncBufRead;
|
||||||
|
|
||||||
|
/// Archives
|
||||||
|
pub mod archive;
|
||||||
|
/// The GitHub engine source
|
||||||
|
pub mod github;
|
||||||
|
/// Traits for engine sources
|
||||||
|
pub mod traits;
|
||||||
|
|
||||||
|
/// Engine references
|
||||||
|
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
|
||||||
|
pub enum EngineRefs {
|
||||||
|
/// A GitHub engine reference
|
||||||
|
GitHub(github::engine_ref::Release),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Engine sources
|
||||||
|
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
|
||||||
|
pub enum EngineSources {
|
||||||
|
/// A GitHub engine source
|
||||||
|
GitHub(github::GitHubEngineSource),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EngineSource for EngineSources {
|
||||||
|
type Ref = EngineRefs;
|
||||||
|
type ResolveError = errors::ResolveError;
|
||||||
|
type DownloadError = errors::DownloadError;
|
||||||
|
|
||||||
|
fn directory(&self) -> PathBuf {
|
||||||
|
match self {
|
||||||
|
EngineSources::GitHub(source) => source.directory(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn expected_file_name(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
EngineSources::GitHub(source) => source.expected_file_name(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn resolve(
|
||||||
|
&self,
|
||||||
|
requirement: &VersionReq,
|
||||||
|
options: &ResolveOptions,
|
||||||
|
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
|
||||||
|
match self {
|
||||||
|
EngineSources::GitHub(source) => source
|
||||||
|
.resolve(requirement, options)
|
||||||
|
.await
|
||||||
|
.map(|map| {
|
||||||
|
map.into_iter()
|
||||||
|
.map(|(version, release)| (version, EngineRefs::GitHub(release)))
|
||||||
|
.collect()
|
||||||
|
})
|
||||||
|
.map_err(Into::into),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn download<R: DownloadProgressReporter + 'static>(
|
||||||
|
&self,
|
||||||
|
engine_ref: &Self::Ref,
|
||||||
|
options: &DownloadOptions<R>,
|
||||||
|
) -> Result<Archive<impl AsyncBufRead + 'static>, Self::DownloadError> {
|
||||||
|
match (self, engine_ref) {
|
||||||
|
(EngineSources::GitHub(source), EngineRefs::GitHub(release)) => {
|
||||||
|
source.download(release, options).await.map_err(Into::into)
|
||||||
|
}
|
||||||
|
|
||||||
|
// for the future
|
||||||
|
#[allow(unreachable_patterns)]
|
||||||
|
_ => Err(errors::DownloadError::Mismatch),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EngineSources {
|
||||||
|
/// Returns the source for the pesde engine
|
||||||
|
pub fn pesde() -> Self {
|
||||||
|
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
||||||
|
let (owner, repo) = (
|
||||||
|
parts.next().unwrap().to_string(),
|
||||||
|
parts.next().unwrap().to_string(),
|
||||||
|
);
|
||||||
|
|
||||||
|
EngineSources::GitHub(github::GitHubEngineSource {
|
||||||
|
owner,
|
||||||
|
repo,
|
||||||
|
asset_template: format!(
|
||||||
|
"pesde-{{VERSION}}-{}-{}.zip",
|
||||||
|
std::env::consts::OS,
|
||||||
|
std::env::consts::ARCH
|
||||||
|
),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns the source for the lune engine
|
||||||
|
pub fn lune() -> Self {
|
||||||
|
EngineSources::GitHub(github::GitHubEngineSource {
|
||||||
|
owner: "lune-org".into(),
|
||||||
|
repo: "lune".into(),
|
||||||
|
asset_template: format!(
|
||||||
|
"lune-{{VERSION}}-{}-{}.zip",
|
||||||
|
std::env::consts::OS,
|
||||||
|
std::env::consts::ARCH
|
||||||
|
),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when working with engine sources
|
||||||
|
pub mod errors {
|
||||||
|
use thiserror::Error;
|
||||||
|
|
||||||
|
/// Errors that can occur when resolving an engine
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum ResolveError {
|
||||||
|
/// Failed to resolve the GitHub engine
|
||||||
|
#[error("failed to resolve github engine")]
|
||||||
|
GitHub(#[from] super::github::errors::ResolveError),
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Errors that can occur when downloading an engine
|
||||||
|
#[derive(Debug, Error)]
|
||||||
|
#[non_exhaustive]
|
||||||
|
pub enum DownloadError {
|
||||||
|
/// Failed to download the GitHub engine
|
||||||
|
#[error("failed to download github engine")]
|
||||||
|
GitHub(#[from] super::github::errors::DownloadError),
|
||||||
|
|
||||||
|
/// Mismatched engine reference
|
||||||
|
#[error("mismatched engine reference")]
|
||||||
|
Mismatch,
|
||||||
|
}
|
||||||
|
}
|
54
src/engine/source/traits.rs
Normal file
54
src/engine/source/traits.rs
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
use crate::{engine::source::archive::Archive, reporters::DownloadProgressReporter};
|
||||||
|
use semver::{Version, VersionReq};
|
||||||
|
use std::{collections::BTreeMap, fmt::Debug, future::Future, path::PathBuf, sync::Arc};
|
||||||
|
use tokio::io::AsyncBufRead;
|
||||||
|
|
||||||
|
/// Options for resolving an engine
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct ResolveOptions {
|
||||||
|
/// The reqwest client to use
|
||||||
|
pub reqwest: reqwest::Client,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Options for downloading an engine
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct DownloadOptions<R: DownloadProgressReporter> {
|
||||||
|
/// The reqwest client to use
|
||||||
|
pub reqwest: reqwest::Client,
|
||||||
|
/// The reporter to use
|
||||||
|
pub reporter: Arc<R>,
|
||||||
|
/// The version of the engine to be downloaded
|
||||||
|
pub version: Version,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A source of engines
|
||||||
|
pub trait EngineSource: Debug {
|
||||||
|
/// The reference type for this source
|
||||||
|
type Ref;
|
||||||
|
/// The error type for resolving an engine from this source
|
||||||
|
type ResolveError: std::error::Error + Send + Sync + 'static;
|
||||||
|
/// The error type for downloading an engine from this source
|
||||||
|
type DownloadError: std::error::Error + Send + Sync + 'static;
|
||||||
|
|
||||||
|
/// Returns the folder to store the engine's versions in
|
||||||
|
fn directory(&self) -> PathBuf;
|
||||||
|
|
||||||
|
/// Returns the expected file name of the engine in the archive
|
||||||
|
fn expected_file_name(&self) -> &str;
|
||||||
|
|
||||||
|
/// Resolves a requirement to a reference
|
||||||
|
fn resolve(
|
||||||
|
&self,
|
||||||
|
requirement: &VersionReq,
|
||||||
|
options: &ResolveOptions,
|
||||||
|
) -> impl Future<Output = Result<BTreeMap<Version, Self::Ref>, Self::ResolveError>> + Send + Sync;
|
||||||
|
|
||||||
|
/// Downloads an engine
|
||||||
|
fn download<R: DownloadProgressReporter + 'static>(
|
||||||
|
&self,
|
||||||
|
engine_ref: &Self::Ref,
|
||||||
|
options: &DownloadOptions<R>,
|
||||||
|
) -> impl Future<Output = Result<Archive<impl AsyncBufRead + 'static>, Self::DownloadError>>
|
||||||
|
+ Send
|
||||||
|
+ Sync;
|
||||||
|
}
|
33
src/lib.rs
33
src/lib.rs
|
@ -15,6 +15,7 @@ use async_stream::try_stream;
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::Stream;
|
use futures::Stream;
|
||||||
use gix::sec::identity::Account;
|
use gix::sec::identity::Account;
|
||||||
|
use semver::{Version, VersionReq};
|
||||||
use std::{
|
use std::{
|
||||||
collections::{HashMap, HashSet},
|
collections::{HashMap, HashSet},
|
||||||
fmt::Debug,
|
fmt::Debug,
|
||||||
|
@ -29,6 +30,8 @@ use wax::Pattern;
|
||||||
pub mod download;
|
pub mod download;
|
||||||
/// Utility for downloading and linking in the correct order
|
/// Utility for downloading and linking in the correct order
|
||||||
pub mod download_and_link;
|
pub mod download_and_link;
|
||||||
|
/// Handling of engines
|
||||||
|
pub mod engine;
|
||||||
/// Graphs
|
/// Graphs
|
||||||
pub mod graph;
|
pub mod graph;
|
||||||
/// Linking packages
|
/// Linking packages
|
||||||
|
@ -117,8 +120,8 @@ struct ProjectShared {
|
||||||
package_dir: PathBuf,
|
package_dir: PathBuf,
|
||||||
workspace_dir: Option<PathBuf>,
|
workspace_dir: Option<PathBuf>,
|
||||||
data_dir: PathBuf,
|
data_dir: PathBuf,
|
||||||
auth_config: AuthConfig,
|
|
||||||
cas_dir: PathBuf,
|
cas_dir: PathBuf,
|
||||||
|
auth_config: AuthConfig,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The main struct of the pesde library, representing a project
|
/// The main struct of the pesde library, representing a project
|
||||||
|
@ -130,11 +133,11 @@ pub struct Project {
|
||||||
|
|
||||||
impl Project {
|
impl Project {
|
||||||
/// Create a new `Project`
|
/// Create a new `Project`
|
||||||
pub fn new<P: AsRef<Path>, Q: AsRef<Path>, R: AsRef<Path>, S: AsRef<Path>>(
|
pub fn new(
|
||||||
package_dir: P,
|
package_dir: impl AsRef<Path>,
|
||||||
workspace_dir: Option<Q>,
|
workspace_dir: Option<impl AsRef<Path>>,
|
||||||
data_dir: R,
|
data_dir: impl AsRef<Path>,
|
||||||
cas_dir: S,
|
cas_dir: impl AsRef<Path>,
|
||||||
auth_config: AuthConfig,
|
auth_config: AuthConfig,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Project {
|
Project {
|
||||||
|
@ -142,8 +145,8 @@ impl Project {
|
||||||
package_dir: package_dir.as_ref().to_path_buf(),
|
package_dir: package_dir.as_ref().to_path_buf(),
|
||||||
workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()),
|
workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()),
|
||||||
data_dir: data_dir.as_ref().to_path_buf(),
|
data_dir: data_dir.as_ref().to_path_buf(),
|
||||||
auth_config,
|
|
||||||
cas_dir: cas_dir.as_ref().to_path_buf(),
|
cas_dir: cas_dir.as_ref().to_path_buf(),
|
||||||
|
auth_config,
|
||||||
}),
|
}),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -163,16 +166,16 @@ impl Project {
|
||||||
&self.shared.data_dir
|
&self.shared.data_dir
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The authentication configuration
|
|
||||||
pub fn auth_config(&self) -> &AuthConfig {
|
|
||||||
&self.shared.auth_config
|
|
||||||
}
|
|
||||||
|
|
||||||
/// The CAS (content-addressable storage) directory
|
/// The CAS (content-addressable storage) directory
|
||||||
pub fn cas_dir(&self) -> &Path {
|
pub fn cas_dir(&self) -> &Path {
|
||||||
&self.shared.cas_dir
|
&self.shared.cas_dir
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The authentication configuration
|
||||||
|
pub fn auth_config(&self) -> &AuthConfig {
|
||||||
|
&self.shared.auth_config
|
||||||
|
}
|
||||||
|
|
||||||
/// Read the manifest file
|
/// Read the manifest file
|
||||||
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
|
||||||
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
|
||||||
|
@ -425,6 +428,12 @@ pub async fn find_roots(
|
||||||
Ok((project_root.unwrap_or(cwd), workspace_dir))
|
Ok((project_root.unwrap_or(cwd), workspace_dir))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns whether a version matches a version requirement
|
||||||
|
/// Differs from `VersionReq::matches` in that EVERY version matches `*`
|
||||||
|
pub fn version_matches(version: &Version, req: &VersionReq) -> bool {
|
||||||
|
*req == VersionReq::STAR || req.matches(version)
|
||||||
|
}
|
||||||
|
|
||||||
/// Errors that can occur when using the pesde library
|
/// Errors that can occur when using the pesde library
|
||||||
pub mod errors {
|
pub mod errors {
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
|
81
src/main.rs
81
src/main.rs
|
@ -1,14 +1,16 @@
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
use crate::cli::version::{check_for_updates, get_or_download_version, TagInfo};
|
use crate::cli::version::{check_for_updates, current_version, get_or_download_engine};
|
||||||
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
|
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
|
||||||
use anyhow::Context;
|
use anyhow::Context;
|
||||||
use clap::{builder::styling::AnsiColor, Parser};
|
use clap::{builder::styling::AnsiColor, Parser};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use indicatif::MultiProgress;
|
use indicatif::MultiProgress;
|
||||||
use pesde::{find_roots, AuthConfig, Project};
|
use pesde::{engine::EngineKind, find_roots, AuthConfig, Project};
|
||||||
|
use semver::VersionReq;
|
||||||
use std::{
|
use std::{
|
||||||
io,
|
io,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
|
str::FromStr,
|
||||||
sync::Mutex,
|
sync::Mutex,
|
||||||
};
|
};
|
||||||
use tempfile::NamedTempFile;
|
use tempfile::NamedTempFile;
|
||||||
|
@ -135,38 +137,43 @@ impl<'a> MakeWriter<'a> for IndicatifWriter {
|
||||||
|
|
||||||
async fn run() -> anyhow::Result<()> {
|
async fn run() -> anyhow::Result<()> {
|
||||||
let cwd = std::env::current_dir().expect("failed to get current working directory");
|
let cwd = std::env::current_dir().expect("failed to get current working directory");
|
||||||
|
let current_exe = std::env::current_exe().expect("failed to get current executable path");
|
||||||
|
let exe_name = current_exe.file_stem().unwrap();
|
||||||
|
|
||||||
#[cfg(windows)]
|
#[cfg(windows)]
|
||||||
'scripts: {
|
'scripts: {
|
||||||
let exe = std::env::current_exe().expect("failed to get current executable path");
|
// we're called the same as the binary, so we're not a (legal) script
|
||||||
if exe.parent().is_some_and(|parent| {
|
if exe_name == env!("CARGO_PKG_NAME") {
|
||||||
parent.file_name().is_some_and(|parent| parent != "bin")
|
|
||||||
|| parent
|
|
||||||
.parent()
|
|
||||||
.and_then(|parent| parent.file_name())
|
|
||||||
.is_some_and(|parent| parent != HOME_DIR)
|
|
||||||
}) {
|
|
||||||
break 'scripts;
|
break 'scripts;
|
||||||
}
|
}
|
||||||
|
|
||||||
let exe_name = exe.file_name().unwrap().to_string_lossy();
|
if let Some(bin_folder) = current_exe.parent() {
|
||||||
let exe_name = exe_name
|
// we're not in {path}/bin/{exe}
|
||||||
.strip_suffix(std::env::consts::EXE_SUFFIX)
|
if bin_folder.file_name().is_some_and(|parent| parent != "bin") {
|
||||||
.unwrap_or(&exe_name);
|
|
||||||
|
|
||||||
if exe_name == env!("CARGO_BIN_NAME") {
|
|
||||||
break 'scripts;
|
break 'scripts;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// we're not in {path}/.pesde/bin/{exe}
|
||||||
|
if bin_folder
|
||||||
|
.parent()
|
||||||
|
.and_then(|home_folder| home_folder.file_name())
|
||||||
|
.is_some_and(|home_folder| home_folder != HOME_DIR)
|
||||||
|
{
|
||||||
|
break 'scripts;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// the bin script will search for the project root itself, so we do that to ensure
|
// the bin script will search for the project root itself, so we do that to ensure
|
||||||
// consistency across platforms, since the script is executed using a shebang
|
// consistency across platforms, since the script is executed using a shebang
|
||||||
// on unix systems
|
// on unix systems
|
||||||
let status = std::process::Command::new("lune")
|
let status = std::process::Command::new("lune")
|
||||||
.arg("run")
|
.arg("run")
|
||||||
.arg(
|
.arg(
|
||||||
exe.parent()
|
current_exe
|
||||||
.map(|p| p.join(".impl").join(exe.file_name().unwrap()))
|
.parent()
|
||||||
.unwrap_or(exe)
|
.unwrap_or(¤t_exe)
|
||||||
|
.join(".impl")
|
||||||
|
.join(current_exe.file_name().unwrap())
|
||||||
.with_extension("luau"),
|
.with_extension("luau"),
|
||||||
)
|
)
|
||||||
.arg("--")
|
.arg("--")
|
||||||
|
@ -265,20 +272,36 @@ async fn run() -> anyhow::Result<()> {
|
||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(feature = "version-management")]
|
#[cfg(feature = "version-management")]
|
||||||
{
|
'engines: {
|
||||||
let target_version = project
|
let Some(engine) = exe_name
|
||||||
|
.to_str()
|
||||||
|
.and_then(|str| EngineKind::from_str(str).ok())
|
||||||
|
else {
|
||||||
|
break 'engines;
|
||||||
|
};
|
||||||
|
|
||||||
|
let req = project
|
||||||
.deser_manifest()
|
.deser_manifest()
|
||||||
.await
|
.await
|
||||||
.ok()
|
.ok()
|
||||||
.and_then(|manifest| manifest.pesde_version);
|
.and_then(|mut manifest| manifest.engines.remove(&engine));
|
||||||
|
|
||||||
let exe_path = if let Some(version) = target_version {
|
if engine == EngineKind::Pesde {
|
||||||
get_or_download_version(&reqwest, TagInfo::Incomplete(version), false).await?
|
match &req {
|
||||||
} else {
|
// we're already running a compatible version
|
||||||
None
|
Some(req) if req.matches(¤t_version()) => break 'engines,
|
||||||
};
|
// the user has not requested a specific version, so we'll just use the current one
|
||||||
|
None => break 'engines,
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let exe_path =
|
||||||
|
get_or_download_engine(&reqwest, engine, req.unwrap_or(VersionReq::STAR)).await?;
|
||||||
|
if exe_path == current_exe {
|
||||||
|
break 'engines;
|
||||||
|
}
|
||||||
|
|
||||||
if let Some(exe_path) = exe_path {
|
|
||||||
let status = std::process::Command::new(exe_path)
|
let status = std::process::Command::new(exe_path)
|
||||||
.args(std::env::args_os().skip(1))
|
.args(std::env::args_os().skip(1))
|
||||||
.status()
|
.status()
|
||||||
|
@ -287,11 +310,11 @@ async fn run() -> anyhow::Result<()> {
|
||||||
std::process::exit(status.code().unwrap());
|
std::process::exit(status.code().unwrap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "version-management")]
|
||||||
display_err(
|
display_err(
|
||||||
check_for_updates(&reqwest).await,
|
check_for_updates(&reqwest).await,
|
||||||
" while checking for updates",
|
" while checking for updates",
|
||||||
);
|
);
|
||||||
}
|
|
||||||
|
|
||||||
let cli = Cli::parse();
|
let cli = Cli::parse();
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
|
engine::EngineKind,
|
||||||
manifest::{
|
manifest::{
|
||||||
overrides::{OverrideKey, OverrideSpecifier},
|
overrides::{OverrideKey, OverrideSpecifier},
|
||||||
target::Target,
|
target::Target,
|
||||||
|
@ -7,7 +8,7 @@ use crate::{
|
||||||
source::specifiers::DependencySpecifiers,
|
source::specifiers::DependencySpecifiers,
|
||||||
};
|
};
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use semver::Version;
|
use semver::{Version, VersionReq};
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
use std::collections::{BTreeMap, HashMap};
|
use std::collections::{BTreeMap, HashMap};
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
@ -85,15 +86,16 @@ pub struct Manifest {
|
||||||
crate::names::PackageNames,
|
crate::names::PackageNames,
|
||||||
BTreeMap<crate::source::ids::VersionId, RelativePathBuf>,
|
BTreeMap<crate::source::ids::VersionId, RelativePathBuf>,
|
||||||
>,
|
>,
|
||||||
#[serde(default, skip_serializing)]
|
|
||||||
/// Which version of the pesde CLI this package uses
|
|
||||||
pub pesde_version: Option<Version>,
|
|
||||||
/// A list of globs pointing to workspace members' directories
|
/// A list of globs pointing to workspace members' directories
|
||||||
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
#[serde(default, skip_serializing_if = "Vec::is_empty")]
|
||||||
pub workspace_members: Vec<String>,
|
pub workspace_members: Vec<String>,
|
||||||
/// The Roblox place of this project
|
/// The Roblox place of this project
|
||||||
#[serde(default, skip_serializing)]
|
#[serde(default, skip_serializing)]
|
||||||
pub place: BTreeMap<target::RobloxPlaceKind, String>,
|
pub place: BTreeMap<target::RobloxPlaceKind, String>,
|
||||||
|
/// The engines this package supports
|
||||||
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
|
#[cfg_attr(feature = "schema", schemars(with = "BTreeMap<EngineKind, String>"))]
|
||||||
|
pub engines: BTreeMap<EngineKind, VersionReq>,
|
||||||
|
|
||||||
/// The standard dependencies of the package
|
/// The standard dependencies of the package
|
||||||
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
|
||||||
|
|
|
@ -9,6 +9,11 @@
|
||||||
|
|
||||||
#![allow(unused_variables)]
|
#![allow(unused_variables)]
|
||||||
|
|
||||||
|
use async_stream::stream;
|
||||||
|
use futures::StreamExt;
|
||||||
|
use std::sync::Arc;
|
||||||
|
use tokio::io::AsyncBufRead;
|
||||||
|
|
||||||
/// Reports downloads.
|
/// Reports downloads.
|
||||||
pub trait DownloadsReporter<'a>: Send + Sync {
|
pub trait DownloadsReporter<'a>: Send + Sync {
|
||||||
/// The [`DownloadProgressReporter`] type associated with this reporter.
|
/// The [`DownloadProgressReporter`] type associated with this reporter.
|
||||||
|
@ -61,3 +66,32 @@ pub trait PatchProgressReporter: Send + Sync {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PatchProgressReporter for () {}
|
impl PatchProgressReporter for () {}
|
||||||
|
|
||||||
|
pub(crate) fn response_to_async_read<R: DownloadProgressReporter>(
|
||||||
|
response: reqwest::Response,
|
||||||
|
reporter: Arc<R>,
|
||||||
|
) -> impl AsyncBufRead {
|
||||||
|
let total_len = response.content_length().unwrap_or(0);
|
||||||
|
reporter.report_progress(total_len, 0);
|
||||||
|
|
||||||
|
let mut bytes_downloaded = 0;
|
||||||
|
let mut stream = response.bytes_stream();
|
||||||
|
let bytes = stream!({
|
||||||
|
while let Some(chunk) = stream.next().await {
|
||||||
|
let chunk = match chunk {
|
||||||
|
Ok(chunk) => chunk,
|
||||||
|
Err(err) => {
|
||||||
|
yield Err(std::io::Error::new(std::io::ErrorKind::Other, err));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
bytes_downloaded += chunk.len() as u64;
|
||||||
|
reporter.report_progress(total_len, bytes_downloaded);
|
||||||
|
yield Ok(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
reporter.report_done();
|
||||||
|
});
|
||||||
|
|
||||||
|
tokio_util::io::StreamReader::new(bytes)
|
||||||
|
}
|
||||||
|
|
|
@ -8,7 +8,6 @@ use std::{
|
||||||
hash::Hash,
|
hash::Hash,
|
||||||
path::PathBuf,
|
path::PathBuf,
|
||||||
};
|
};
|
||||||
use tokio_util::io::StreamReader;
|
|
||||||
|
|
||||||
use pkg_ref::PesdePackageRef;
|
use pkg_ref::PesdePackageRef;
|
||||||
use specifier::PesdeDependencySpecifier;
|
use specifier::PesdeDependencySpecifier;
|
||||||
|
@ -16,7 +15,7 @@ use specifier::PesdeDependencySpecifier;
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::{target::Target, DependencyType},
|
manifest::{target::Target, DependencyType},
|
||||||
names::{PackageName, PackageNames},
|
names::{PackageName, PackageNames},
|
||||||
reporters::DownloadProgressReporter,
|
reporters::{response_to_async_read, DownloadProgressReporter},
|
||||||
source::{
|
source::{
|
||||||
fs::{store_in_cas, FsEntry, PackageFs},
|
fs::{store_in_cas, FsEntry, PackageFs},
|
||||||
git_index::{read_file, root_tree, GitBasedSource},
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
|
@ -28,7 +27,7 @@ use crate::{
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::StreamExt;
|
use futures::StreamExt;
|
||||||
use tokio::task::spawn_blocking;
|
use tokio::{pin, task::spawn_blocking};
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
/// The pesde package reference
|
/// The pesde package reference
|
||||||
|
@ -229,23 +228,8 @@ impl PackageSource for PesdePackageSource {
|
||||||
|
|
||||||
let response = request.send().await?.error_for_status()?;
|
let response = request.send().await?.error_for_status()?;
|
||||||
|
|
||||||
let total_len = response.content_length().unwrap_or(0);
|
let bytes = response_to_async_read(response, reporter.clone());
|
||||||
reporter.report_progress(total_len, 0);
|
pin!(bytes);
|
||||||
|
|
||||||
let mut bytes_downloaded = 0;
|
|
||||||
let bytes = response
|
|
||||||
.bytes_stream()
|
|
||||||
.inspect(|chunk| {
|
|
||||||
chunk.as_ref().ok().inspect(|chunk| {
|
|
||||||
bytes_downloaded += chunk.len() as u64;
|
|
||||||
reporter.report_progress(total_len, bytes_downloaded);
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.map(|result| {
|
|
||||||
result.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
|
|
||||||
});
|
|
||||||
|
|
||||||
let bytes = StreamReader::new(bytes);
|
|
||||||
|
|
||||||
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes);
|
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes);
|
||||||
let mut archive = tokio_tar::Archive::new(&mut decoder);
|
let mut archive = tokio_tar::Archive::new(&mut decoder);
|
||||||
|
@ -297,8 +281,6 @@ impl PackageSource for PesdePackageSource {
|
||||||
.await
|
.await
|
||||||
.map_err(errors::DownloadError::WriteIndex)?;
|
.map_err(errors::DownloadError::WriteIndex)?;
|
||||||
|
|
||||||
reporter.report_done();
|
|
||||||
|
|
||||||
Ok(fs)
|
Ok(fs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::{
|
use crate::{
|
||||||
manifest::target::{Target, TargetKind},
|
manifest::target::{Target, TargetKind},
|
||||||
names::PackageNames,
|
names::PackageNames,
|
||||||
reporters::DownloadProgressReporter,
|
reporters::{response_to_async_read, DownloadProgressReporter},
|
||||||
source::{
|
source::{
|
||||||
fs::{store_in_cas, FsEntry, PackageFs},
|
fs::{store_in_cas, FsEntry, PackageFs},
|
||||||
git_index::{read_file, root_tree, GitBasedSource},
|
git_index::{read_file, root_tree, GitBasedSource},
|
||||||
|
@ -20,14 +20,13 @@ use crate::{
|
||||||
Project,
|
Project,
|
||||||
};
|
};
|
||||||
use fs_err::tokio as fs;
|
use fs_err::tokio as fs;
|
||||||
use futures::StreamExt;
|
|
||||||
use gix::Url;
|
use gix::Url;
|
||||||
use relative_path::RelativePathBuf;
|
use relative_path::RelativePathBuf;
|
||||||
use reqwest::header::AUTHORIZATION;
|
use reqwest::header::AUTHORIZATION;
|
||||||
use serde::Deserialize;
|
use serde::Deserialize;
|
||||||
use std::{collections::BTreeMap, path::PathBuf};
|
use std::{collections::BTreeMap, path::PathBuf};
|
||||||
use tokio::{io::AsyncReadExt, task::spawn_blocking};
|
use tokio::{io::AsyncReadExt, pin, task::spawn_blocking};
|
||||||
use tokio_util::{compat::FuturesAsyncReadCompatExt, io::StreamReader};
|
use tokio_util::compat::FuturesAsyncReadCompatExt;
|
||||||
use tracing::instrument;
|
use tracing::instrument;
|
||||||
|
|
||||||
pub(crate) mod compat_util;
|
pub(crate) mod compat_util;
|
||||||
|
@ -268,22 +267,9 @@ impl PackageSource for WallyPackageSource {
|
||||||
let response = request.send().await?.error_for_status()?;
|
let response = request.send().await?.error_for_status()?;
|
||||||
|
|
||||||
let total_len = response.content_length().unwrap_or(0);
|
let total_len = response.content_length().unwrap_or(0);
|
||||||
reporter.report_progress(total_len, 0);
|
let bytes = response_to_async_read(response, reporter.clone());
|
||||||
|
pin!(bytes);
|
||||||
|
|
||||||
let mut bytes_downloaded = 0;
|
|
||||||
let bytes = response
|
|
||||||
.bytes_stream()
|
|
||||||
.inspect(|chunk| {
|
|
||||||
chunk.as_ref().ok().inspect(|chunk| {
|
|
||||||
bytes_downloaded += chunk.len() as u64;
|
|
||||||
reporter.report_progress(total_len, bytes_downloaded);
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.map(|result| {
|
|
||||||
result.map_err(|err| std::io::Error::new(std::io::ErrorKind::Other, err))
|
|
||||||
});
|
|
||||||
|
|
||||||
let mut bytes = StreamReader::new(bytes);
|
|
||||||
let mut buf = Vec::with_capacity(total_len as usize);
|
let mut buf = Vec::with_capacity(total_len as usize);
|
||||||
bytes.read_to_end(&mut buf).await?;
|
bytes.read_to_end(&mut buf).await?;
|
||||||
|
|
||||||
|
@ -335,8 +321,6 @@ impl PackageSource for WallyPackageSource {
|
||||||
.await
|
.await
|
||||||
.map_err(errors::DownloadError::WriteIndex)?;
|
.map_err(errors::DownloadError::WriteIndex)?;
|
||||||
|
|
||||||
reporter.report_done();
|
|
||||||
|
|
||||||
Ok(fs)
|
Ok(fs)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue