feat: switch to tracing for logging
Some checks are pending
Test & Lint / lint (push) Waiting to run

This commit is contained in:
daimond113 2024-12-16 23:00:37 +01:00
parent 0dde647042
commit 52603ea43e
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
32 changed files with 446 additions and 221 deletions

147
Cargo.lock generated
View file

@ -357,6 +357,12 @@ version = "1.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457"
[[package]]
name = "arrayvec"
version = "0.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
[[package]] [[package]]
name = "async-broadcast" name = "async-broadcast"
version = "0.7.1" version = "0.7.1"
@ -680,7 +686,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22" checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22"
dependencies = [ dependencies = [
"memchr", "memchr",
"regex-automata", "regex-automata 0.4.9",
"serde", "serde",
] ]
@ -2893,19 +2899,10 @@ dependencies = [
"number_prefix", "number_prefix",
"portable-atomic", "portable-atomic",
"unicode-width 0.2.0", "unicode-width 0.2.0",
"vt100",
"web-time", "web-time",
] ]
[[package]]
name = "indicatif-log-bridge"
version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "63703cf9069b85dbe6fe26e1c5230d013dee99d3559cd3d02ba39e099ef7ab02"
dependencies = [
"indicatif",
"log",
]
[[package]] [[package]]
name = "inout" name = "inout"
version = "0.1.3" version = "0.1.3"
@ -3240,6 +3237,15 @@ version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5" checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
[[package]]
name = "matchers"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
dependencies = [
"regex-automata 0.1.10",
]
[[package]] [[package]]
name = "maybe-async" name = "maybe-async"
version = "0.2.10" version = "0.2.10"
@ -3407,6 +3413,16 @@ version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21" checksum = "38bf9645c8b145698bb0b18a4637dcacbc421ea49bef2317e4fd8065a387cf21"
[[package]]
name = "nu-ansi-term"
version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
dependencies = [
"overload",
"winapi",
]
[[package]] [[package]]
name = "num" name = "num"
version = "0.4.3" version = "0.4.3"
@ -3606,6 +3622,12 @@ dependencies = [
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "overload"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
[[package]] [[package]]
name = "ownedbytes" name = "ownedbytes"
version = "0.7.0" version = "0.7.0"
@ -3680,13 +3702,10 @@ dependencies = [
"git2", "git2",
"gix", "gix",
"indicatif", "indicatif",
"indicatif-log-bridge",
"inquire", "inquire",
"keyring", "keyring",
"log",
"open", "open",
"pathdiff", "pathdiff",
"pretty_env_logger",
"relative-path", "relative-path",
"reqwest", "reqwest",
"semver", "semver",
@ -3701,6 +3720,9 @@ dependencies = [
"tokio-util", "tokio-util",
"toml", "toml",
"toml_edit", "toml_edit",
"tracing",
"tracing-indicatif",
"tracing-subscriber",
"url", "url",
"wax", "wax",
"winreg", "winreg",
@ -4068,8 +4090,17 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-automata", "regex-automata 0.4.9",
"regex-syntax", "regex-syntax 0.8.5",
]
[[package]]
name = "regex-automata"
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
dependencies = [
"regex-syntax 0.6.29",
] ]
[[package]] [[package]]
@ -4080,7 +4111,7 @@ checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908"
dependencies = [ dependencies = [
"aho-corasick", "aho-corasick",
"memchr", "memchr",
"regex-syntax", "regex-syntax 0.8.5",
] ]
[[package]] [[package]]
@ -4089,6 +4120,12 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a" checksum = "53a49587ad06b26609c52e423de037e7f57f20d53535d66e08c695f347df952a"
[[package]]
name = "regex-syntax"
version = "0.6.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
[[package]] [[package]]
name = "regex-syntax" name = "regex-syntax"
version = "0.8.5" version = "0.8.5"
@ -4641,6 +4678,15 @@ dependencies = [
"digest", "digest",
] ]
[[package]]
name = "sharded-slab"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
dependencies = [
"lazy_static",
]
[[package]] [[package]]
name = "shell-words" name = "shell-words"
version = "1.1.0" version = "1.1.0"
@ -4925,7 +4971,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d60769b80ad7953d8a7b2c70cdfe722bbcdcac6bccc8ac934c40c034d866fc18" checksum = "d60769b80ad7953d8a7b2c70cdfe722bbcdcac6bccc8ac934c40c034d866fc18"
dependencies = [ dependencies = [
"byteorder", "byteorder",
"regex-syntax", "regex-syntax 0.8.5",
"utf8-ranges", "utf8-ranges",
] ]
@ -5260,13 +5306,45 @@ dependencies = [
"valuable", "valuable",
] ]
[[package]]
name = "tracing-indicatif"
version = "0.3.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "74ba258e9de86447f75edf6455fded8e5242704c6fccffe7bf8d7fb6daef1180"
dependencies = [
"indicatif",
"tracing",
"tracing-core",
"tracing-subscriber",
]
[[package]]
name = "tracing-log"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
dependencies = [
"log",
"once_cell",
"tracing-core",
]
[[package]] [[package]]
name = "tracing-subscriber" name = "tracing-subscriber"
version = "0.3.19" version = "0.3.19"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008"
dependencies = [ dependencies = [
"matchers",
"nu-ansi-term",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
"thread_local",
"tracing",
"tracing-core", "tracing-core",
"tracing-log",
] ]
[[package]] [[package]]
@ -5437,6 +5515,39 @@ version = "0.9.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "vt100"
version = "0.15.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "84cd863bf0db7e392ba3bd04994be3473491b31e66340672af5d11943c6274de"
dependencies = [
"itoa",
"log",
"unicode-width 0.1.14",
"vte",
]
[[package]]
name = "vte"
version = "0.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f5022b5fbf9407086c180e9557be968742d839e68346af7792b8592489732197"
dependencies = [
"arrayvec",
"utf8parse",
"vte_generate_state_changes",
]
[[package]]
name = "vte_generate_state_changes"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e369bee1b05d510a7b4ed645f5faa90619e05437111783ea5848f28d97d3c2e"
dependencies = [
"proc-macro2",
"quote",
]
[[package]] [[package]]
name = "walkdir" name = "walkdir"
version = "2.5.0" version = "2.5.0"

View file

@ -13,10 +13,10 @@ include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHAN
bin = [ bin = [
"dep:clap", "dep:clap",
"dep:dirs", "dep:dirs",
"dep:pretty_env_logger", "dep:tracing-subscriber",
"reqwest/json", "reqwest/json",
"dep:indicatif", "dep:indicatif",
"dep:indicatif-log-bridge", "dep:tracing-indicatif",
"dep:inquire", "dep:inquire",
"dep:toml_edit", "dep:toml_edit",
"dep:colored", "dep:colored",
@ -54,7 +54,7 @@ tokio-tar = "0.3.1"
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] } async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
pathdiff = "0.2.3" pathdiff = "0.2.3"
relative-path = { version = "1.9.3", features = ["serde"] } relative-path = { version = "1.9.3", features = ["serde"] }
log = "0.4.22" tracing = { version = "0.1.41", features = ["attributes"] }
thiserror = "2.0.7" thiserror = "2.0.7"
tokio = { version = "1.42.0", features = ["process"] } tokio = { version = "1.42.0", features = ["process"] }
tokio-util = "0.7.13" tokio-util = "0.7.13"
@ -81,9 +81,9 @@ colored = { version = "2.1.0", optional = true }
toml_edit = { version = "0.22.22", optional = true } toml_edit = { version = "0.22.22", optional = true }
clap = { version = "4.5.23", features = ["derive"], optional = true } clap = { version = "4.5.23", features = ["derive"], optional = true }
dirs = { version = "5.0.1", optional = true } dirs = { version = "5.0.1", optional = true }
pretty_env_logger = { version = "0.5.0", optional = true } tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
indicatif = { version = "0.17.9", optional = true } indicatif = { version = "0.17.9", optional = true }
indicatif-log-bridge = { version = "0.2.3", optional = true } tracing-indicatif = { version = "0.3.8", optional = true }
inquire = { version = "0.7.5", optional = true } inquire = { version = "0.7.5", optional = true }
[target.'cfg(target_os = "windows")'.dependencies] [target.'cfg(target_os = "windows")'.dependencies]

View file

@ -5,6 +5,7 @@ use keyring::Entry;
use reqwest::header::AUTHORIZATION; use reqwest::header::AUTHORIZATION;
use serde::{ser::SerializeMap, Deserialize, Serialize}; use serde::{ser::SerializeMap, Deserialize, Serialize};
use std::collections::BTreeMap; use std::collections::BTreeMap;
use tracing::instrument;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Tokens(pub BTreeMap<gix::Url, String>); pub struct Tokens(pub BTreeMap<gix::Url, String>);
@ -37,15 +38,20 @@ impl<'de> Deserialize<'de> for Tokens {
} }
} }
#[instrument(level = "trace")]
pub async fn get_tokens() -> anyhow::Result<Tokens> { pub async fn get_tokens() -> anyhow::Result<Tokens> {
let config = read_config().await?; let config = read_config().await?;
if !config.tokens.0.is_empty() { if !config.tokens.0.is_empty() {
tracing::debug!("using tokens from config");
return Ok(config.tokens); return Ok(config.tokens);
} }
match Entry::new("tokens", env!("CARGO_PKG_NAME")) { match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
Ok(entry) => match entry.get_password() { Ok(entry) => match entry.get_password() {
Ok(token) => return serde_json::from_str(&token).context("failed to parse tokens"), Ok(token) => {
tracing::debug!("using tokens from keyring");
return serde_json::from_str(&token).context("failed to parse tokens");
}
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {} Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
Err(e) => return Err(e.into()), Err(e) => return Err(e.into()),
}, },
@ -56,16 +62,22 @@ pub async fn get_tokens() -> anyhow::Result<Tokens> {
Ok(Tokens(BTreeMap::new())) Ok(Tokens(BTreeMap::new()))
} }
#[instrument(level = "trace")]
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> { pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?; let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?; let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
match entry.set_password(&json) { match entry.set_password(&json) {
Ok(()) => return Ok(()), Ok(()) => {
tracing::debug!("tokens saved to keyring");
return Ok(());
}
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {} Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
Err(e) => return Err(e.into()), Err(e) => return Err(e.into()),
} }
tracing::debug!("tokens saved to config");
let mut config = read_config().await?; let mut config = read_config().await?;
config.tokens = tokens; config.tokens = tokens;
write_config(&config).await.map_err(Into::into) write_config(&config).await.map_err(Into::into)
@ -86,6 +98,7 @@ struct UserResponse {
login: String, login: String,
} }
#[instrument(level = "trace")]
pub async fn get_token_login( pub async fn get_token_login(
reqwest: &reqwest::Client, reqwest: &reqwest::Client,
access_token: &str, access_token: &str,

View file

@ -2,6 +2,7 @@ use std::{collections::HashSet, str::FromStr};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::Colorize;
use semver::VersionReq; use semver::VersionReq;
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName}; use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
@ -62,7 +63,7 @@ impl AddCommand {
.cloned(); .cloned();
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) { if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
log::error!("index {index} not found"); println!("{}: index {index} not found", "error".red().bold());
return Ok(()); return Ok(());
} }
@ -89,7 +90,7 @@ impl AddCommand {
.cloned(); .cloned();
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) { if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
log::error!("wally index {index} not found"); println!("{}: wally index {index} not found", "error".red().bold());
return Ok(()); return Ok(());
} }
@ -145,7 +146,7 @@ impl AddCommand {
.pop_last() .pop_last()
.map(|(v_id, _)| v_id) .map(|(v_id, _)| v_id)
else { else {
log::error!("no versions found for package {specifier}"); println!("{}: no versions found for package", "error".red().bold());
return Ok(()); return Ok(());
}; };

View file

@ -2,7 +2,6 @@ use crate::cli::{config::read_config, progress_bar, VersionedPackageName};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use indicatif::MultiProgress;
use pesde::{ use pesde::{
linking::generator::generate_bin_linking_module, linking::generator::generate_bin_linking_module,
manifest::target::TargetKind, manifest::target::TargetKind,
@ -35,12 +34,7 @@ pub struct ExecuteCommand {
} }
impl ExecuteCommand { impl ExecuteCommand {
pub async fn run( pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let index = match self.index { let index = match self.index {
Some(index) => Some(index), Some(index) => Some(index),
None => read_config().await.ok().map(|c| c.default_index), None => read_config().await.ok().map(|c| c.default_index),
@ -84,7 +78,7 @@ impl ExecuteCommand {
); );
}; };
log::info!("found package {}@{version}", pkg_ref.name); println!("using {}@{version}", pkg_ref.name);
let tmp_dir = project.cas_dir().join(".tmp"); let tmp_dir = project.cas_dir().join(".tmp");
fs::create_dir_all(&tmp_dir) fs::create_dir_all(&tmp_dir)
@ -134,7 +128,6 @@ impl ExecuteCommand {
progress_bar( progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(), graph.values().map(|versions| versions.len() as u64).sum(),
rx, rx,
&multi,
"📥 ".to_string(), "📥 ".to_string(),
"downloading dependencies".to_string(), "downloading dependencies".to_string(),
"downloaded dependencies".to_string(), "downloaded dependencies".to_string(),

View file

@ -6,7 +6,6 @@ use clap::Args;
use colored::{ColoredString, Colorize}; use colored::{ColoredString, Colorize};
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::future::try_join_all; use futures::future::try_join_all;
use indicatif::MultiProgress;
use pesde::{ use pesde::{
download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project, download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project,
MANIFEST_FILE_NAME, MANIFEST_FILE_NAME,
@ -89,12 +88,7 @@ fn job(n: u8) -> ColoredString {
struct CallbackError(#[from] anyhow::Error); struct CallbackError(#[from] anyhow::Error);
impl InstallCommand { impl InstallCommand {
pub async fn run( pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let mut refreshed_sources = HashSet::new(); let mut refreshed_sources = HashSet::new();
let manifest = project let manifest = project
@ -116,10 +110,10 @@ impl InstallCommand {
match project.deser_lockfile().await { match project.deser_lockfile().await {
Ok(lockfile) => { Ok(lockfile) => {
if lockfile.overrides != manifest.overrides { if lockfile.overrides != manifest.overrides {
log::debug!("overrides are different"); tracing::debug!("overrides are different");
None None
} else if lockfile.target != manifest.target.kind() { } else if lockfile.target != manifest.target.kind() {
log::debug!("target kind is different"); tracing::debug!("target kind is different");
None None
} else { } else {
Some(lockfile) Some(lockfile)
@ -153,7 +147,7 @@ impl InstallCommand {
deleted_folders deleted_folders
.entry(folder.to_string()) .entry(folder.to_string())
.or_insert_with(|| async move { .or_insert_with(|| async move {
log::debug!("deleting the {folder} folder"); tracing::debug!("deleting the {folder} folder");
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder)) if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
.await .await
@ -219,7 +213,7 @@ impl InstallCommand {
.map(|(alias, _, _)| alias) .map(|(alias, _, _)| alias)
.filter(|alias| { .filter(|alias| {
if *alias == env!("CARGO_BIN_NAME") { if *alias == env!("CARGO_BIN_NAME") {
log::warn!( tracing::warn!(
"package {alias} has the same name as the CLI, skipping bin link" "package {alias} has the same name as the CLI, skipping bin link"
); );
return false; return false;
@ -281,7 +275,6 @@ exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
progress_bar( progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(), graph.values().map(|versions| versions.len() as u64).sum(),
rx, rx,
&multi,
format!("{} 📥 ", job(3)), format!("{} 📥 ", job(3)),
"downloading dependencies".to_string(), "downloading dependencies".to_string(),
"downloaded dependencies".to_string(), "downloaded dependencies".to_string(),
@ -303,7 +296,6 @@ exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
progress_bar( progress_bar(
manifest.patches.values().map(|v| v.len() as u64).sum(), manifest.patches.values().map(|v| v.len() as u64).sum(),
rx, rx,
&multi,
format!("{} 🩹 ", job(JOBS - 1)), format!("{} 🩹 ", job(JOBS - 1)),
"applying patches".to_string(), "applying patches".to_string(),
"applied patches".to_string(), "applied patches".to_string(),
@ -323,9 +315,8 @@ exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
graph: downloaded_graph, graph: downloaded_graph,
workspace: run_on_workspace_members(&project, |project| { workspace: run_on_workspace_members(&project, |project| {
let multi = multi.clone();
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, multi, reqwest)).await } async move { Box::pin(self.run(project, reqwest)).await }
}) })
.await?, .await?,
}) })

View file

@ -1,4 +1,3 @@
use indicatif::MultiProgress;
use pesde::Project; use pesde::Project;
mod add; mod add;
@ -72,18 +71,13 @@ pub enum Subcommand {
} }
impl Subcommand { impl Subcommand {
pub async fn run( pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
match self { match self {
Subcommand::Auth(auth) => auth.run(project, reqwest).await, Subcommand::Auth(auth) => auth.run(project, reqwest).await,
Subcommand::Config(config) => config.run().await, Subcommand::Config(config) => config.run().await,
Subcommand::Init(init) => init.run(project).await, Subcommand::Init(init) => init.run(project).await,
Subcommand::Run(run) => run.run(project).await, Subcommand::Run(run) => run.run(project).await,
Subcommand::Install(install) => install.run(project, multi, reqwest).await, Subcommand::Install(install) => install.run(project, reqwest).await,
Subcommand::Publish(publish) => publish.run(project, reqwest).await, Subcommand::Publish(publish) => publish.run(project, reqwest).await,
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
Subcommand::SelfInstall(self_install) => self_install.run().await, Subcommand::SelfInstall(self_install) => self_install.run().await,
@ -94,9 +88,9 @@ impl Subcommand {
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await, Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
Subcommand::Add(add) => add.run(project).await, Subcommand::Add(add) => add.run(project).await,
Subcommand::Update(update) => update.run(project, multi, reqwest).await, Subcommand::Update(update) => update.run(project, reqwest).await,
Subcommand::Outdated(outdated) => outdated.run(project).await, Subcommand::Outdated(outdated) => outdated.run(project).await,
Subcommand::Execute(execute) => execute.run(project, multi, reqwest).await, Subcommand::Execute(execute) => execute.run(project, reqwest).await,
} }
} }
} }

View file

@ -4,6 +4,7 @@ use async_compression::Level;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
use fs_err::tokio as fs; use fs_err::tokio as fs;
#[allow(deprecated)]
use pesde::{ use pesde::{
manifest::{target::Target, DependencyType}, manifest::{target::Target, DependencyType},
matching_globs_old_behaviour, matching_globs_old_behaviour,
@ -129,6 +130,7 @@ impl PublishCommand {
_ => None, _ => None,
}; };
#[allow(deprecated)]
let mut paths = matching_globs_old_behaviour( let mut paths = matching_globs_old_behaviour(
project.package_dir(), project.package_dir(),
manifest.includes.iter().map(|s| s.as_str()), manifest.includes.iter().map(|s| s.as_str()),
@ -624,7 +626,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
.body(archive); .body(archive);
if let Some(token) = project.auth_config().tokens().get(index_url) { if let Some(token) = project.auth_config().tokens().get(index_url) {
log::debug!("using token for {index_url}"); tracing::debug!("using token for {index_url}");
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }

View file

@ -2,7 +2,6 @@ use crate::cli::{progress_bar, run_on_workspace_members};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
use indicatif::MultiProgress;
use pesde::{lockfile::Lockfile, Project}; use pesde::{lockfile::Lockfile, Project};
use std::{collections::HashSet, sync::Arc}; use std::{collections::HashSet, sync::Arc};
use tokio::sync::Mutex; use tokio::sync::Mutex;
@ -11,12 +10,7 @@ use tokio::sync::Mutex;
pub struct UpdateCommand {} pub struct UpdateCommand {}
impl UpdateCommand { impl UpdateCommand {
pub async fn run( pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let mut refreshed_sources = HashSet::new(); let mut refreshed_sources = HashSet::new();
let manifest = project let manifest = project
@ -60,7 +54,6 @@ impl UpdateCommand {
progress_bar( progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(), graph.values().map(|versions| versions.len() as u64).sum(),
rx, rx,
&multi,
"📥 ".to_string(), "📥 ".to_string(),
"downloading dependencies".to_string(), "downloading dependencies".to_string(),
"downloaded dependencies".to_string(), "downloaded dependencies".to_string(),
@ -73,9 +66,8 @@ impl UpdateCommand {
}, },
workspace: run_on_workspace_members(&project, |project| { workspace: run_on_workspace_members(&project, |project| {
let multi = multi.clone();
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, multi, reqwest)).await } async move { Box::pin(self.run(project, reqwest)).await }
}) })
.await?, .await?,
}) })

View file

@ -2,6 +2,7 @@ use crate::cli::{auth::Tokens, home_dir};
use anyhow::Context; use anyhow::Context;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use tracing::instrument;
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(default)] #[serde(default)]
@ -30,6 +31,7 @@ impl Default for CliConfig {
} }
} }
#[instrument(level = "trace")]
pub async fn read_config() -> anyhow::Result<CliConfig> { pub async fn read_config() -> anyhow::Result<CliConfig> {
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await { let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
Ok(config_string) => config_string, Ok(config_string) => config_string,
@ -44,6 +46,7 @@ pub async fn read_config() -> anyhow::Result<CliConfig> {
Ok(config) Ok(config)
} }
#[instrument(level = "trace")]
pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> { pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
let config_string = toml::to_string(config).context("failed to serialize config")?; let config_string = toml::to_string(config).context("failed to serialize config")?;
fs::write(home_dir()?.join("config.toml"), config_string) fs::write(home_dir()?.join("config.toml"), config_string)

View file

@ -2,7 +2,6 @@ use anyhow::Context;
use colored::Colorize; use colored::Colorize;
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::StreamExt; use futures::StreamExt;
use indicatif::MultiProgress;
use pesde::{ use pesde::{
lockfile::Lockfile, lockfile::Lockfile,
manifest::target::TargetKind, manifest::target::TargetKind,
@ -19,6 +18,7 @@ use std::{
time::Duration, time::Duration,
}; };
use tokio::pin; use tokio::pin;
use tracing::instrument;
pub mod auth; pub mod auth;
pub mod commands; pub mod commands;
@ -43,6 +43,7 @@ pub async fn bin_dir() -> anyhow::Result<PathBuf> {
Ok(bin_dir) Ok(bin_dir)
} }
#[instrument(skip(project), ret(level = "trace"), level = "debug")]
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> { pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
let manifest = project.deser_manifest().await?; let manifest = project.deser_manifest().await?;
let lockfile = match project.deser_lockfile().await { let lockfile = match project.deser_lockfile().await {
@ -56,17 +57,17 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
}; };
if manifest.overrides != lockfile.overrides { if manifest.overrides != lockfile.overrides {
log::debug!("overrides are different"); tracing::debug!("overrides are different");
return Ok(None); return Ok(None);
} }
if manifest.target.kind() != lockfile.target { if manifest.target.kind() != lockfile.target {
log::debug!("target kind is different"); tracing::debug!("target kind is different");
return Ok(None); return Ok(None);
} }
if manifest.name != lockfile.name || manifest.version != lockfile.version { if manifest.name != lockfile.name || manifest.version != lockfile.version {
log::debug!("name or version is different"); tracing::debug!("name or version is different");
return Ok(None); return Ok(None);
} }
@ -88,7 +89,7 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
.iter() .iter()
.all(|(_, (spec, ty))| specs.contains(&(spec, ty))); .all(|(_, (spec, ty))| specs.contains(&(spec, ty)));
log::debug!("dependencies are the same: {same_dependencies}"); tracing::debug!("dependencies are the same: {same_dependencies}");
Ok(if same_dependencies { Ok(if same_dependencies {
Some(lockfile) Some(lockfile)
@ -133,7 +134,7 @@ impl VersionedPackageName {
let versions = graph.get(&self.0).context("package not found in graph")?; let versions = graph.get(&self.0).context("package not found in graph")?;
if versions.len() == 1 { if versions.len() == 1 {
let version = versions.keys().next().unwrap().clone(); let version = versions.keys().next().unwrap().clone();
log::debug!("only one version found, using {version}"); tracing::debug!("only one version found, using {version}");
version version
} else { } else {
anyhow::bail!( anyhow::bail!(
@ -195,21 +196,18 @@ pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>( pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>(
len: u64, len: u64,
mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>, mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>,
multi: &MultiProgress,
prefix: String, prefix: String,
progress_msg: String, progress_msg: String,
finish_msg: String, finish_msg: String,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let bar = multi.add( let bar = indicatif::ProgressBar::new(len)
indicatif::ProgressBar::new(len) .with_style(
.with_style( indicatif::ProgressStyle::default_bar()
indicatif::ProgressStyle::default_bar() .template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")? .progress_chars("█▓▒░ "),
.progress_chars("█▓▒░ "), )
) .with_prefix(prefix)
.with_prefix(prefix) .with_message(progress_msg);
.with_message(progress_msg),
);
bar.enable_steady_tick(Duration::from_millis(100)); bar.enable_steady_tick(Duration::from_millis(100));
while let Some(result) = rx.recv().await { while let Some(result) = rx.recv().await {

View file

@ -15,7 +15,8 @@ use std::{
env::current_exe, env::current_exe,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use tokio::io::AsyncReadExt; use tokio::io::AsyncWrite;
use tracing::instrument;
pub fn current_version() -> Version { pub fn current_version() -> Version {
Version::parse(env!("CARGO_PKG_VERSION")).unwrap() Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
@ -33,14 +34,20 @@ struct Asset {
url: url::Url, url: url::Url,
} }
#[instrument(level = "trace")]
fn get_repo() -> (String, String) { fn get_repo() -> (String, String) {
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3); let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
( let (owner, repo) = (
parts.next().unwrap().to_string(), parts.next().unwrap().to_string(),
parts.next().unwrap().to_string(), parts.next().unwrap().to_string(),
) );
tracing::trace!("repository for updates: {owner}/{repo}");
(owner, repo)
} }
#[instrument(skip(reqwest), level = "trace")]
pub async fn get_latest_remote_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> { pub async fn get_latest_remote_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> {
let (owner, repo) = get_repo(); let (owner, repo) = get_repo();
@ -72,6 +79,7 @@ pub fn no_build_metadata(version: &Version) -> Version {
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6); const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
#[instrument(skip(reqwest), level = "trace")]
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> { pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
let config = read_config().await?; let config = read_config().await?;
@ -79,8 +87,10 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
.last_checked_updates .last_checked_updates
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL) .filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
{ {
tracing::debug!("using cached version");
version version
} else { } else {
tracing::debug!("checking for updates");
let version = get_latest_remote_version(reqwest).await?; let version = get_latest_remote_version(reqwest).await?;
write_config(&CliConfig { write_config(&CliConfig {
@ -157,10 +167,12 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
Ok(()) Ok(())
} }
pub async fn download_github_release( #[instrument(skip(reqwest, writer), level = "trace")]
pub async fn download_github_release<W: AsyncWrite + Unpin>(
reqwest: &reqwest::Client, reqwest: &reqwest::Client,
version: &Version, version: &Version,
) -> anyhow::Result<Vec<u8>> { mut writer: W,
) -> anyhow::Result<()> {
let (owner, repo) = get_repo(); let (owner, repo) = get_repo();
let release = reqwest let release = reqwest
@ -211,16 +223,13 @@ pub async fn download_github_release(
.context("archive has no entry")? .context("archive has no entry")?
.context("failed to get first archive entry")?; .context("failed to get first archive entry")?;
let mut result = Vec::new(); tokio::io::copy(&mut entry, &mut writer)
entry
.read_to_end(&mut result)
.await .await
.context("failed to read archive entry bytes")?; .context("failed to write archive entry to file")
.map(|_| ())
Ok(result)
} }
#[instrument(skip(reqwest), level = "trace")]
pub async fn get_or_download_version( pub async fn get_or_download_version(
reqwest: &reqwest::Client, reqwest: &reqwest::Client,
version: &Version, version: &Version,
@ -236,6 +245,8 @@ pub async fn get_or_download_version(
let is_requested_version = !always_give_path && *version == current_version(); let is_requested_version = !always_give_path && *version == current_version();
if path.exists() { if path.exists() {
tracing::debug!("version already exists");
return Ok(if is_requested_version { return Ok(if is_requested_version {
None None
} else { } else {
@ -244,14 +255,20 @@ pub async fn get_or_download_version(
} }
if is_requested_version { if is_requested_version {
tracing::debug!("copying current executable to version directory");
fs::copy(current_exe()?, &path) fs::copy(current_exe()?, &path)
.await .await
.context("failed to copy current executable to version directory")?; .context("failed to copy current executable to version directory")?;
} else { } else {
let bytes = download_github_release(reqwest, version).await?; tracing::debug!("downloading version");
fs::write(&path, bytes) download_github_release(
.await reqwest,
.context("failed to write downloaded version file")?; version,
fs::File::create(&path)
.await
.context("failed to create version file")?,
)
.await?;
} }
make_executable(&path) make_executable(&path)
@ -265,6 +282,7 @@ pub async fn get_or_download_version(
}) })
} }
#[instrument(level = "trace")]
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> { pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
let bin_exe_path = bin_dir().await?.join(format!( let bin_exe_path = bin_dir().await?.join(format!(
"{}{}", "{}{}",

View file

@ -13,6 +13,7 @@ use std::{
collections::HashSet, collections::HashSet,
sync::{Arc, Mutex}, sync::{Arc, Mutex},
}; };
use tracing::instrument;
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>; type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
@ -23,6 +24,7 @@ pub(crate) type MultithreadDownloadJob = (
impl Project { impl Project {
/// Downloads a graph of dependencies /// Downloads a graph of dependencies
#[instrument(skip(self, graph, refreshed_sources, reqwest), level = "debug")]
pub async fn download_graph( pub async fn download_graph(
&self, &self,
graph: &DependencyGraph, graph: &DependencyGraph,
@ -98,7 +100,7 @@ impl Project {
let project = project.clone(); let project = project.clone();
log::debug!("downloading {name}@{version_id}"); tracing::debug!("downloading {name}@{version_id}");
let (fs, target) = let (fs, target) =
match source.download(&node.pkg_ref, &project, &reqwest).await { match source.download(&node.pkg_ref, &project, &reqwest).await {
@ -109,7 +111,7 @@ impl Project {
} }
}; };
log::debug!("downloaded {name}@{version_id}"); tracing::debug!("downloaded {name}@{version_id}");
if write { if write {
if !prod || node.resolved_ty != DependencyType::Dev { if !prod || node.resolved_ty != DependencyType::Dev {
@ -123,7 +125,7 @@ impl Project {
} }
}; };
} else { } else {
log::debug!("skipping writing {name}@{version_id} to disk, dev dependency in prod mode"); tracing::debug!("skipping writing {name}@{version_id} to disk, dev dependency in prod mode");
} }
} }

View file

@ -11,6 +11,7 @@ use std::{
sync::{Arc, Mutex as StdMutex}, sync::{Arc, Mutex as StdMutex},
}; };
use tokio::sync::Mutex; use tokio::sync::Mutex;
use tracing::instrument;
/// Filters a graph to only include production dependencies, if `prod` is `true` /// Filters a graph to only include production dependencies, if `prod` is `true`
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph { pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
@ -33,8 +34,16 @@ pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
.collect() .collect()
} }
/// Receiver for dependencies downloaded and linked
pub type DownloadAndLinkReceiver =
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
impl Project { impl Project {
/// Downloads a graph of dependencies and links them in the correct order /// Downloads a graph of dependencies and links them in the correct order
#[instrument(
skip(self, graph, refreshed_sources, reqwest, pesde_cb),
level = "debug"
)]
pub async fn download_and_link< pub async fn download_and_link<
F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static, F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static,
R: Future<Output = Result<(), E>> + Send, R: Future<Output = Result<(), E>> + Send,
@ -49,9 +58,7 @@ impl Project {
pesde_cb: F, pesde_cb: F,
) -> Result< ) -> Result<
( (
tokio::sync::mpsc::Receiver< DownloadAndLinkReceiver,
Result<String, crate::download::errors::DownloadGraphError>,
>,
impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>, impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>,
), ),
errors::DownloadAndLinkError<E>, errors::DownloadAndLinkError<E>,

View file

@ -14,8 +14,10 @@ use futures::{future::try_join_all, Stream};
use gix::sec::identity::Account; use gix::sec::identity::Account;
use std::{ use std::{
collections::{HashMap, HashSet}, collections::{HashMap, HashSet},
fmt::Debug,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use tracing::instrument;
use wax::Pattern; use wax::Pattern;
/// Downloading packages /// Downloading packages
@ -149,29 +151,35 @@ impl Project {
} }
/// Read the manifest file /// Read the manifest file
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> { pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?; let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(string) Ok(string)
} }
// TODO: cache the manifest
/// Deserialize the manifest file /// Deserialize the manifest file
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> { pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?; let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(toml::from_str(&string)?) Ok(toml::from_str(&string)?)
} }
/// Write the manifest file /// Write the manifest file
#[instrument(skip(self, manifest), level = "debug")]
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> { pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
} }
/// Deserialize the lockfile /// Deserialize the lockfile
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> { pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?; let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
Ok(toml::from_str(&string)?) Ok(toml::from_str(&string)?)
} }
/// Write the lockfile /// Write the lockfile
#[instrument(skip(self, lockfile), level = "debug")]
pub async fn write_lockfile( pub async fn write_lockfile(
&self, &self,
lockfile: Lockfile, lockfile: Lockfile,
@ -182,7 +190,8 @@ impl Project {
} }
/// Get the workspace members /// Get the workspace members
pub async fn workspace_members<P: AsRef<Path>>( #[instrument(skip(self), level = "debug")]
pub async fn workspace_members<P: AsRef<Path> + Debug>(
&self, &self,
dir: P, dir: P,
can_ref_self: bool, can_ref_self: bool,
@ -222,7 +231,16 @@ impl Project {
} }
/// Gets all matching paths in a directory /// Gets all matching paths in a directory
pub async fn matching_globs_old_behaviour<'a, P: AsRef<Path>, I: IntoIterator<Item = &'a str>>( #[deprecated(
since = "0.5.0-rc.13",
note = "use `matching_globs` instead, which does not have the old behaviour of including whole directories by their name (`src` instead of `src/**`)"
)]
#[instrument(ret, level = "trace")]
pub async fn matching_globs_old_behaviour<
'a,
P: AsRef<Path> + Debug,
I: IntoIterator<Item = &'a str> + Debug,
>(
dir: P, dir: P,
globs: I, globs: I,
relative: bool, relative: bool,
@ -270,7 +288,7 @@ pub async fn matching_globs_old_behaviour<'a, P: AsRef<Path>, I: IntoIterator<It
is_entire_dir_included || is_filename_match, is_entire_dir_included || is_filename_match,
)); ));
if is_filename_match { if is_filename_match {
log::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display()); tracing::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display());
} }
} }
@ -293,7 +311,8 @@ pub async fn matching_globs_old_behaviour<'a, P: AsRef<Path>, I: IntoIterator<It
} }
/// Gets all matching paths in a directory /// Gets all matching paths in a directory
pub async fn matching_globs<'a, P: AsRef<Path>, I: IntoIterator<Item = &'a str>>( #[instrument(ret, level = "trace")]
pub async fn matching_globs<'a, P: AsRef<Path> + Debug, I: IntoIterator<Item = &'a str> + Debug>(
dir: P, dir: P,
globs: I, globs: I,
relative: bool, relative: bool,

View file

@ -117,10 +117,18 @@ pub fn get_lib_require_path(
) -> Result<String, errors::GetLibRequirePath> { ) -> Result<String, errors::GetLibRequirePath> {
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap(); let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
let path = if use_new_structure { let path = if use_new_structure {
log::debug!("using new structure for require path with {:?}", lib_file); tracing::debug!(
"using new structure for require path with {:?} of {}",
lib_file,
container_dir.display()
);
lib_file.to_path(path) lib_file.to_path(path)
} else { } else {
log::debug!("using old structure for require path with {:?}", lib_file); tracing::debug!(
"using old structure for require path with {:?} of {}",
lib_file,
container_dir.display()
);
path path
}; };

View file

@ -20,6 +20,7 @@ use std::{
sync::Arc, sync::Arc,
}; };
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use tracing::instrument;
/// Generates linking modules for a project /// Generates linking modules for a project
pub mod generator; pub mod generator;
@ -44,6 +45,7 @@ async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std:
impl Project { impl Project {
/// Links the dependencies of the project /// Links the dependencies of the project
#[instrument(skip(self, graph), level = "debug")]
pub async fn link_dependencies( pub async fn link_dependencies(
&self, &self,
graph: &DownloadedGraph, graph: &DownloadedGraph,
@ -110,7 +112,7 @@ impl Project {
} }
}; };
log::debug!("{name}@{version_id} has {} exported types", types.len()); tracing::debug!("{name}@{version_id} has {} exported types", types.len());
types types
} else { } else {
@ -122,7 +124,7 @@ impl Project {
.and_then(|t| t.build_files()) .and_then(|t| t.build_files())
{ {
let Some(script_path) = roblox_sync_config_gen_script else { let Some(script_path) = roblox_sync_config_gen_script else {
log::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator); tracing::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator);
return Ok((version_id, types)); return Ok((version_id, types));
}; };

View file

@ -14,7 +14,7 @@ use relative_path::RelativePathBuf;
use semver::Version; use semver::Version;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::{ use std::{
collections::{btree_map::Entry, BTreeMap}, collections::BTreeMap,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -74,45 +74,6 @@ impl DependencyGraphNode {
/// A graph of `DependencyGraphNode`s /// A graph of `DependencyGraphNode`s
pub type DependencyGraph = Graph<DependencyGraphNode>; pub type DependencyGraph = Graph<DependencyGraphNode>;
pub(crate) fn insert_node(
graph: &mut DependencyGraph,
name: PackageNames,
version: VersionId,
mut node: DependencyGraphNode,
is_top_level: bool,
) {
if !is_top_level && node.direct.take().is_some() {
log::debug!(
"tried to insert {name}@{version} as direct dependency from a non top-level context",
);
}
match graph
.entry(name.clone())
.or_default()
.entry(version.clone())
{
Entry::Vacant(entry) => {
entry.insert(node);
}
Entry::Occupied(existing) => {
let current_node = existing.into_mut();
match (&current_node.direct, &node.direct) {
(Some(_), Some(_)) => {
log::warn!("duplicate direct dependency for {name}@{version}");
}
(None, Some(_)) => {
current_node.direct = node.direct;
}
(_, _) => {}
}
}
}
}
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target` /// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
#[derive(Serialize, Deserialize, Debug, Clone)] #[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DownloadedDependencyGraphNode { pub struct DownloadedDependencyGraphNode {

View file

@ -4,14 +4,18 @@ use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
use anyhow::Context; use anyhow::Context;
use clap::{builder::styling::AnsiColor, Parser}; use clap::{builder::styling::AnsiColor, Parser};
use fs_err::tokio as fs; use fs_err::tokio as fs;
use indicatif::MultiProgress;
use indicatif_log_bridge::LogWrapper;
use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME}; use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
use std::{ use std::{
collections::HashSet, collections::HashSet,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
use tracing::instrument;
use tracing_indicatif::IndicatifLayer;
use tracing_subscriber::{
filter::LevelFilter, fmt::time::uptime, layer::SubscriberExt, util::SubscriberInitExt,
EnvFilter,
};
mod cli; mod cli;
pub mod util; pub mod util;
@ -38,6 +42,7 @@ struct Cli {
subcommand: cli::commands::Subcommand, subcommand: cli::commands::Subcommand,
} }
#[instrument(level = "trace")]
async fn get_linkable_dir(path: &Path) -> PathBuf { async fn get_linkable_dir(path: &Path) -> PathBuf {
let mut curr_path = PathBuf::new(); let mut curr_path = PathBuf::new();
let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file"); let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file");
@ -68,7 +73,7 @@ async fn get_linkable_dir(path: &Path) -> PathBuf {
if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() { if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() {
if let Err(err) = fs::remove_file(&try_path).await { if let Err(err) = fs::remove_file(&try_path).await {
log::warn!( tracing::warn!(
"failed to remove temporary file at {}: {err}", "failed to remove temporary file at {}: {err}",
try_path.display() try_path.display()
); );
@ -129,6 +134,29 @@ async fn run() -> anyhow::Result<()> {
std::process::exit(status.code().unwrap()); std::process::exit(status.code().unwrap());
} }
let indicatif_layer = IndicatifLayer::new();
let tracing_env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy()
.add_directive("reqwest=info".parse().unwrap())
.add_directive("rustls=info".parse().unwrap())
.add_directive("tokio_util=info".parse().unwrap())
.add_directive("goblin=info".parse().unwrap())
.add_directive("tower=info".parse().unwrap())
.add_directive("hyper=info".parse().unwrap())
.add_directive("h2=info".parse().unwrap());
tracing_subscriber::registry()
.with(tracing_env_filter)
.with(
tracing_subscriber::fmt::layer()
.with_writer(indicatif_layer.get_stderr_writer())
.with_timer(uptime()),
)
.with(indicatif_layer)
.init();
let (project_root_dir, project_workspace_dir) = 'finder: { let (project_root_dir, project_workspace_dir) = 'finder: {
let mut current_path = Some(cwd.clone()); let mut current_path = Some(cwd.clone());
let mut project_root = None::<PathBuf>; let mut project_root = None::<PathBuf>;
@ -191,16 +219,13 @@ async fn run() -> anyhow::Result<()> {
(project_root.unwrap_or_else(|| cwd.clone()), workspace_dir) (project_root.unwrap_or_else(|| cwd.clone()), workspace_dir)
}; };
let multi = { tracing::trace!(
let logger = pretty_env_logger::formatted_builder() "project root: {}\nworkspace root: {}",
.parse_env(pretty_env_logger::env_logger::Env::default().default_filter_or("info")) project_root_dir.display(),
.build(); project_workspace_dir
let multi = MultiProgress::new(); .as_ref()
.map_or("none".to_string(), |p| p.display().to_string())
LogWrapper::new(multi.clone(), logger).try_init().unwrap(); );
multi
};
let home_dir = home_dir()?; let home_dir = home_dir()?;
let data_dir = home_dir.join("data"); let data_dir = home_dir.join("data");
@ -217,7 +242,7 @@ async fn run() -> anyhow::Result<()> {
} }
.join("cas"); .join("cas");
log::debug!("using cas dir in {}", cas_dir.display()); tracing::debug!("using cas dir in {}", cas_dir.display());
let project = Project::new( let project = Project::new(
project_root_dir, project_root_dir,
@ -278,7 +303,7 @@ async fn run() -> anyhow::Result<()> {
let cli = Cli::parse(); let cli = Cli::parse();
cli.subcommand.run(project, multi, reqwest).await cli.subcommand.run(project, reqwest).await
} }
#[tokio::main] #[tokio::main]

View file

@ -1,13 +1,13 @@
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
use crate::{ use crate::{
manifest::{overrides::OverrideKey, target::Target}, manifest::{overrides::OverrideKey, target::Target},
names::PackageName, names::PackageName,
source::specifiers::DependencySpecifiers, source::specifiers::DependencySpecifiers,
}; };
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap};
use tracing::instrument;
/// Overrides /// Overrides
pub mod overrides; pub mod overrides;
@ -107,6 +107,7 @@ pub enum DependencyType {
impl Manifest { impl Manifest {
/// Get all dependencies from the manifest /// Get all dependencies from the manifest
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub fn all_dependencies( pub fn all_dependencies(
&self, &self,
) -> Result< ) -> Result<

View file

@ -3,6 +3,7 @@ use fs_err::tokio as fs;
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature}; use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use std::path::Path; use std::path::Path;
use tracing::instrument;
/// Set up a git repository for patches /// Set up a git repository for patches
pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> { pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Error> {
@ -69,6 +70,7 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
impl Project { impl Project {
/// Apply patches to the project's dependencies /// Apply patches to the project's dependencies
#[instrument(skip(self, graph), level = "debug")]
pub async fn apply_patches( pub async fn apply_patches(
&self, &self,
graph: &DownloadedGraph, graph: &DownloadedGraph,
@ -97,7 +99,7 @@ impl Project {
.get(&name) .get(&name)
.and_then(|versions| versions.get(&version_id)) .and_then(|versions| versions.get(&version_id))
else { else {
log::warn!( tracing::warn!(
"patch for {name}@{version_id} not applied because it is not in the graph" "patch for {name}@{version_id} not applied because it is not in the graph"
); );
tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap(); tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap();
@ -114,7 +116,7 @@ impl Project {
); );
tokio::spawn(async move { tokio::spawn(async move {
log::debug!("applying patch to {name}@{version_id}"); tracing::debug!("applying patch to {name}@{version_id}");
let patch = match fs::read(&patch_path).await { let patch = match fs::read(&patch_path).await {
Ok(patch) => patch, Ok(patch) => patch,
@ -195,7 +197,9 @@ impl Project {
} }
} }
log::debug!("patch applied to {name}@{version_id}, removing .git directory"); tracing::debug!(
"patch applied to {name}@{version_id}, removing .git directory"
);
if let Err(e) = fs::remove_dir_all(container_folder.join(".git")).await { if let Err(e) = fs::remove_dir_all(container_folder.join(".git")).await {
tx.send(Err(errors::ApplyPatchesError::DotGitRemove(e))) tx.send(Err(errors::ApplyPatchesError::DotGitRemove(e)))

View file

@ -1,5 +1,5 @@
use crate::{ use crate::{
lockfile::{insert_node, DependencyGraph, DependencyGraphNode}, lockfile::{DependencyGraph, DependencyGraphNode},
manifest::DependencyType, manifest::DependencyType,
names::PackageNames, names::PackageNames,
source::{ source::{
@ -11,10 +11,55 @@ use crate::{
}, },
Project, DEFAULT_INDEX_NAME, Project, DEFAULT_INDEX_NAME,
}; };
use std::collections::{HashMap, HashSet, VecDeque}; use std::collections::{btree_map::Entry, HashMap, HashSet, VecDeque};
use tracing::instrument;
fn insert_node(
graph: &mut DependencyGraph,
name: PackageNames,
version: VersionId,
mut node: DependencyGraphNode,
is_top_level: bool,
) {
if !is_top_level && node.direct.take().is_some() {
tracing::debug!(
"tried to insert {name}@{version} as direct dependency from a non top-level context",
);
}
match graph
.entry(name.clone())
.or_default()
.entry(version.clone())
{
Entry::Vacant(entry) => {
entry.insert(node);
}
Entry::Occupied(existing) => {
let current_node = existing.into_mut();
match (&current_node.direct, &node.direct) {
(Some(_), Some(_)) => {
tracing::warn!("duplicate direct dependency for {name}@{version}");
}
(None, Some(_)) => {
current_node.direct = node.direct;
}
(_, _) => {}
}
}
}
}
impl Project { impl Project {
/// Create a dependency graph from the project's manifest /// Create a dependency graph from the project's manifest
#[instrument(
skip(self, previous_graph, refreshed_sources),
ret(level = "trace"),
level = "debug"
)]
pub async fn dependency_graph( pub async fn dependency_graph(
&self, &self,
previous_graph: Option<&DependencyGraph>, previous_graph: Option<&DependencyGraph>,
@ -51,13 +96,13 @@ impl Project {
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty)) let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty))
else { else {
log::debug!( tracing::debug!(
"dependency {name}@{version} from old dependency graph is no longer in the manifest", "dependency {name}@{version} from old dependency graph is no longer in the manifest",
); );
continue; continue;
}; };
log::debug!("resolved {}@{} from old dependency graph", name, version); tracing::debug!("resolved {}@{} from old dependency graph", name, version);
insert_node( insert_node(
&mut graph, &mut graph,
name.clone(), name.clone(),
@ -80,7 +125,7 @@ impl Project {
.get(dep_name) .get(dep_name)
.and_then(|v| v.get(dep_version)) .and_then(|v| v.get(dep_version))
{ {
log::debug!( tracing::debug!(
"{}resolved dependency {}@{} from {}@{}", "{}resolved dependency {}@{} from {}@{}",
"\t".repeat(depth), "\t".repeat(depth),
dep_name, dep_name,
@ -102,7 +147,7 @@ impl Project {
.map(|(name, (version, _))| (name, version, depth + 1)) .map(|(name, (version, _))| (name, version, depth + 1))
.for_each(|dep| queue.push_back(dep)); .for_each(|dep| queue.push_back(dep));
} else { } else {
log::warn!( tracing::warn!(
"dependency {}@{} from {}@{} not found in previous graph", "dependency {}@{} from {}@{} not found in previous graph",
dep_name, dep_name,
dep_version, dep_version,
@ -133,7 +178,7 @@ impl Project {
let alias = path.last().unwrap().clone(); let alias = path.last().unwrap().clone();
let depth = path.len() - 1; let depth = path.len() - 1;
log::debug!( tracing::debug!(
"{}resolving {specifier} from {}", "{}resolving {specifier} from {}",
"\t".repeat(depth), "\t".repeat(depth),
path.join(">") path.join(">")
@ -243,7 +288,7 @@ impl Project {
.get_mut(&name) .get_mut(&name)
.and_then(|versions| versions.get_mut(&target_version_id)) .and_then(|versions| versions.get_mut(&target_version_id))
{ {
log::debug!( tracing::debug!(
"{}{}@{} already resolved", "{}{}@{} already resolved",
"\t".repeat(depth), "\t".repeat(depth),
name, name,
@ -253,7 +298,7 @@ impl Project {
if std::mem::discriminant(&already_resolved.pkg_ref) if std::mem::discriminant(&already_resolved.pkg_ref)
!= std::mem::discriminant(pkg_ref) != std::mem::discriminant(pkg_ref)
{ {
log::warn!( tracing::warn!(
"resolved package {name}@{target_version_id} has a different source than the previously resolved one at {}, this may cause issues", "resolved package {name}@{target_version_id} has a different source than the previously resolved one at {}, this may cause issues",
path.join(">") path.join(">")
); );
@ -290,7 +335,7 @@ impl Project {
depth == 0, depth == 0,
); );
log::debug!( tracing::debug!(
"{}resolved {}@{} from new dependency graph", "{}resolved {}@{} from new dependency graph",
"\t".repeat(depth), "\t".repeat(depth),
name, name,
@ -318,7 +363,7 @@ impl Project {
}); });
if overridden.is_some() { if overridden.is_some() {
log::debug!( tracing::debug!(
"{}overridden specifier found for {} ({dependency_spec})", "{}overridden specifier found for {} ({dependency_spec})",
"\t".repeat(depth), "\t".repeat(depth),
path.iter() path.iter()
@ -346,7 +391,7 @@ impl Project {
for (name, versions) in &graph { for (name, versions) in &graph {
for (version_id, node) in versions { for (version_id, node) in versions {
if node.resolved_ty == DependencyType::Peer { if node.resolved_ty == DependencyType::Peer {
log::warn!("peer dependency {name}@{version_id} was not resolved"); tracing::warn!("peer dependency {name}@{version_id} was not resolved");
} }
} }
} }

View file

@ -1,7 +1,7 @@
use crate::Project; use crate::Project;
use std::{ use std::{
ffi::OsStr, ffi::OsStr,
fmt::{Display, Formatter}, fmt::{Debug, Display, Formatter},
path::Path, path::Path,
process::Stdio, process::Stdio,
}; };
@ -9,6 +9,7 @@ use tokio::{
io::{AsyncBufReadExt, BufReader}, io::{AsyncBufReadExt, BufReader},
process::Command, process::Command,
}; };
use tracing::instrument;
/// Script names used by pesde /// Script names used by pesde
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
@ -30,7 +31,8 @@ impl Display for ScriptName {
} }
} }
pub(crate) async fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>( #[instrument(skip(project), level = "debug")]
pub(crate) async fn execute_script<A: IntoIterator<Item = S> + Debug, S: AsRef<OsStr> + Debug>(
script_name: ScriptName, script_name: ScriptName,
script_path: &Path, script_path: &Path,
args: A, args: A,
@ -59,10 +61,10 @@ pub(crate) async fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
while let Some(line) = stderr.next_line().await.transpose() { while let Some(line) = stderr.next_line().await.transpose() {
match line { match line {
Ok(line) => { Ok(line) => {
log::error!("[{script}]: {line}"); tracing::error!("[{script}]: {line}");
} }
Err(e) => { Err(e) => {
log::error!("ERROR IN READING STDERR OF {script}: {e}"); tracing::error!("ERROR IN READING STDERR OF {script}: {e}");
break; break;
} }
} }
@ -78,11 +80,11 @@ pub(crate) async fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
stdout_str.push_str(&line); stdout_str.push_str(&line);
stdout_str.push('\n'); stdout_str.push('\n');
} else { } else {
log::info!("[{script_2}]: {line}"); tracing::info!("[{script_2}]: {line}");
} }
} }
Err(e) => { Err(e) => {
log::error!("ERROR IN READING STDOUT OF {script_2}: {e}"); tracing::error!("ERROR IN READING STDOUT OF {script_2}: {e}");
break; break;
} }
} }
@ -95,7 +97,7 @@ pub(crate) async fn execute_script<A: IntoIterator<Item = S>, S: AsRef<OsStr>>(
} }
} }
Err(e) if e.kind() == std::io::ErrorKind::NotFound => { Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
log::warn!("Lune could not be found in PATH: {e}"); tracing::warn!("Lune could not be found in PATH: {e}");
Ok(None) Ok(None)
} }

View file

@ -9,6 +9,7 @@ use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
fmt::Debug,
future::Future, future::Future,
path::{Path, PathBuf}, path::{Path, PathBuf},
}; };
@ -17,6 +18,7 @@ use tokio::{
io::{AsyncReadExt, AsyncWriteExt}, io::{AsyncReadExt, AsyncWriteExt},
pin, pin,
}; };
use tracing::instrument;
/// A file system entry /// A file system entry
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -125,7 +127,8 @@ pub(crate) async fn store_in_cas<
impl PackageFS { impl PackageFS {
/// Write the package to the given destination /// Write the package to the given destination
pub async fn write_to<P: AsRef<Path>, Q: AsRef<Path>>( #[instrument(skip(self), level = "debug")]
pub async fn write_to<P: AsRef<Path> + Debug, Q: AsRef<Path> + Debug>(
&self, &self,
destination: P, destination: P,
cas_path: Q, cas_path: Q,
@ -211,7 +214,8 @@ impl PackageFS {
} }
/// Returns the contents of the file with the given hash /// Returns the contents of the file with the given hash
pub async fn read_file<P: AsRef<Path>, H: AsRef<str>>( #[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn read_file<P: AsRef<Path> + Debug, H: AsRef<str> + Debug>(
&self, &self,
file_hash: H, file_hash: H,
cas_path: P, cas_path: P,

View file

@ -27,6 +27,7 @@ use std::{
sync::Arc, sync::Arc,
}; };
use tokio::{sync::Mutex, task::spawn_blocking}; use tokio::{sync::Mutex, task::spawn_blocking};
use tracing::instrument;
/// The Git package reference /// The Git package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -70,10 +71,12 @@ impl PackageSource for GitPackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
#[instrument(skip_all, level = "debug")]
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await GitBasedSource::refresh(self, project).await
} }
#[instrument(skip_all, level = "debug")]
async fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
@ -329,6 +332,7 @@ impl PackageSource for GitPackageSource {
)) ))
} }
#[instrument(skip_all, level = "debug")]
async fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
@ -343,7 +347,7 @@ impl PackageSource for GitPackageSource {
match fs::read_to_string(&index_file).await { match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
log::debug!( tracing::debug!(
"using cached index file for package {}#{}", "using cached index file for package {}#{}",
pkg_ref.repo, pkg_ref.repo,
pkg_ref.tree_id pkg_ref.tree_id
@ -487,7 +491,7 @@ impl PackageSource for GitPackageSource {
} }
if pkg_ref.use_new_structure() && name == "default.project.json" { if pkg_ref.use_new_structure() && name == "default.project.json" {
log::debug!( tracing::debug!(
"removing default.project.json from {}#{} at {path} - using new structure", "removing default.project.json from {}#{} at {path} - using new structure",
pkg_ref.repo, pkg_ref.repo,
pkg_ref.tree_id pkg_ref.tree_id

View file

@ -1,8 +1,11 @@
#![allow(async_fn_in_trait)] #![allow(async_fn_in_trait)]
use crate::{util::authenticate_conn, Project}; use crate::{util::authenticate_conn, Project};
use fs_err::tokio as fs; use fs_err::tokio as fs;
use gix::remote::Direction; use gix::remote::Direction;
use std::fmt::Debug;
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use tracing::instrument;
/// A trait for sources that are based on Git repositories /// A trait for sources that are based on Git repositories
pub trait GitBasedSource { pub trait GitBasedSource {
@ -90,7 +93,11 @@ pub trait GitBasedSource {
} }
/// Reads a file from a tree /// Reads a file from a tree
pub fn read_file<I: IntoIterator<Item = P> + Clone, P: ToString + PartialEq<gix::bstr::BStr>>( #[instrument(skip(tree), ret, level = "trace")]
pub fn read_file<
I: IntoIterator<Item = P> + Clone + Debug,
P: ToString + PartialEq<gix::bstr::BStr>,
>(
tree: &gix::Tree, tree: &gix::Tree,
file_path: I, file_path: I,
) -> Result<Option<String>, errors::ReadFile> { ) -> Result<Option<String>, errors::ReadFile> {
@ -120,6 +127,7 @@ pub fn read_file<I: IntoIterator<Item = P> + Clone, P: ToString + PartialEq<gix:
} }
/// Gets the root tree of a repository /// Gets the root tree of a repository
#[instrument(skip(repo), level = "trace")]
pub fn root_tree(repo: &gix::Repository) -> Result<gix::Tree, errors::TreeError> { pub fn root_tree(repo: &gix::Repository) -> Result<gix::Tree, errors::TreeError> {
// this is a bare repo, so this is the actual path // this is a bare repo, so this is the actual path
let path = repo.path().to_path_buf(); let path = repo.path().to_path_buf();

View file

@ -30,6 +30,7 @@ use crate::{
use fs_err::tokio as fs; use fs_err::tokio as fs;
use futures::StreamExt; use futures::StreamExt;
use tokio::task::spawn_blocking; use tokio::task::spawn_blocking;
use tracing::instrument;
/// The pesde package reference /// The pesde package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -73,6 +74,7 @@ impl PesdePackageSource {
} }
/// Reads the config file /// Reads the config file
#[instrument(skip_all, ret(level = "trace"), level = "debug")]
pub async fn config(&self, project: &Project) -> Result<IndexConfig, errors::ConfigError> { pub async fn config(&self, project: &Project) -> Result<IndexConfig, errors::ConfigError> {
let repo_url = self.repo_url.clone(); let repo_url = self.repo_url.clone();
let path = self.path(project); let path = self.path(project);
@ -99,10 +101,12 @@ impl PackageSource for PesdePackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
#[instrument(skip_all, level = "debug")]
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await GitBasedSource::refresh(self, project).await
} }
#[instrument(skip_all, level = "debug")]
async fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
@ -127,7 +131,7 @@ impl PackageSource for PesdePackageSource {
let entries: IndexFile = toml::from_str(&string) let entries: IndexFile = toml::from_str(&string)
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?; .map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
log::debug!("{} has {} possible entries", specifier.name, entries.len()); tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
Ok(( Ok((
PackageNames::Pesde(specifier.name.clone()), PackageNames::Pesde(specifier.name.clone()),
@ -155,6 +159,7 @@ impl PackageSource for PesdePackageSource {
)) ))
} }
#[instrument(skip_all, level = "debug")]
async fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
@ -171,7 +176,7 @@ impl PackageSource for PesdePackageSource {
match fs::read_to_string(&index_file).await { match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
log::debug!( tracing::debug!(
"using cached index file for package {}@{} {}", "using cached index file for package {}@{} {}",
pkg_ref.name, pkg_ref.name,
pkg_ref.version, pkg_ref.version,
@ -192,7 +197,7 @@ impl PackageSource for PesdePackageSource {
let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream"); let mut request = reqwest.get(&url).header(ACCEPT, "application/octet-stream");
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) { if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
log::debug!("using token for {}", self.repo_url); tracing::debug!("using token for {}", self.repo_url);
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }

View file

@ -11,6 +11,7 @@ use crate::{
Project, LINK_LIB_NO_FILE_FOUND, Project, LINK_LIB_NO_FILE_FOUND,
}; };
use fs_err::tokio as fs; use fs_err::tokio as fs;
use tracing::instrument;
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -19,7 +20,8 @@ struct SourcemapNode {
file_paths: Vec<RelativePathBuf>, file_paths: Vec<RelativePathBuf>,
} }
pub(crate) async fn find_lib_path( #[instrument(skip(project, package_dir), level = "debug")]
async fn find_lib_path(
project: &Project, project: &Project,
package_dir: &Path, package_dir: &Path,
) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> { ) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> {
@ -29,7 +31,7 @@ pub(crate) async fn find_lib_path(
.scripts .scripts
.get(&ScriptName::SourcemapGenerator.to_string()) .get(&ScriptName::SourcemapGenerator.to_string())
else { else {
log::warn!("no sourcemap generator script found in manifest"); tracing::warn!("no sourcemap generator script found in manifest");
return Ok(None); return Ok(None);
}; };
@ -55,6 +57,7 @@ pub(crate) async fn find_lib_path(
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml"; pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
#[instrument(skip(project, tempdir), level = "debug")]
pub(crate) async fn get_target( pub(crate) async fn get_target(
project: &Project, project: &Project,
tempdir: &TempDir, tempdir: &TempDir,

View file

@ -1,13 +1,13 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use semver::{Version, VersionReq};
use serde::{Deserialize, Deserializer};
use crate::{ use crate::{
manifest::{errors, DependencyType}, manifest::{errors, DependencyType},
names::wally::WallyPackageName, names::wally::WallyPackageName,
source::{specifiers::DependencySpecifiers, wally::specifier::WallyDependencySpecifier}, source::{specifiers::DependencySpecifiers, wally::specifier::WallyDependencySpecifier},
}; };
use semver::{Version, VersionReq};
use serde::{Deserialize, Deserializer};
use tracing::instrument;
#[derive(Deserialize, Clone, Debug)] #[derive(Deserialize, Clone, Debug)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
@ -63,6 +63,7 @@ pub struct WallyManifest {
impl WallyManifest { impl WallyManifest {
/// Get all dependencies from the manifest /// Get all dependencies from the manifest
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub fn all_dependencies( pub fn all_dependencies(
&self, &self,
) -> Result< ) -> Result<

View file

@ -30,6 +30,7 @@ use std::{
use tempfile::tempdir; use tempfile::tempdir;
use tokio::{io::AsyncWriteExt, sync::Mutex, task::spawn_blocking}; use tokio::{io::AsyncWriteExt, sync::Mutex, task::spawn_blocking};
use tokio_util::compat::FuturesAsyncReadCompatExt; use tokio_util::compat::FuturesAsyncReadCompatExt;
use tracing::instrument;
pub(crate) mod compat_util; pub(crate) mod compat_util;
pub(crate) mod manifest; pub(crate) mod manifest;
@ -68,6 +69,7 @@ impl WallyPackageSource {
} }
/// Reads the config file /// Reads the config file
#[instrument(skip_all, ret(level = "trace"), level = "debug")]
pub async fn config(&self, project: &Project) -> Result<WallyIndexConfig, errors::ConfigError> { pub async fn config(&self, project: &Project) -> Result<WallyIndexConfig, errors::ConfigError> {
let repo_url = self.repo_url.clone(); let repo_url = self.repo_url.clone();
let path = self.path(project); let path = self.path(project);
@ -94,10 +96,12 @@ impl PackageSource for WallyPackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
#[instrument(skip_all, level = "debug")]
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await GitBasedSource::refresh(self, project).await
} }
#[instrument(skip_all, level = "debug")]
async fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
@ -111,7 +115,7 @@ impl PackageSource for WallyPackageSource {
let string = match read_file(&tree, [scope, name]) { let string = match read_file(&tree, [scope, name]) {
Ok(Some(s)) => s, Ok(Some(s)) => s,
Ok(None) => { Ok(None) => {
log::debug!( tracing::debug!(
"{} not found in wally registry. searching in backup registries", "{} not found in wally registry. searching in backup registries",
specifier.name specifier.name
); );
@ -134,7 +138,7 @@ impl PackageSource for WallyPackageSource {
.await .await
{ {
Ok((name, results)) => { Ok((name, results)) => {
log::debug!("found {} in backup registry {registry}", name); tracing::debug!("found {} in backup registry {registry}", name);
return Ok((name, results)); return Ok((name, results));
} }
Err(errors::ResolveError::NotFound(_)) => { Err(errors::ResolveError::NotFound(_)) => {
@ -162,7 +166,7 @@ impl PackageSource for WallyPackageSource {
.collect::<Result<_, _>>() .collect::<Result<_, _>>()
.map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?; .map_err(|e| Self::ResolveError::Parse(specifier.name.to_string(), e))?;
log::debug!("{} has {} possible entries", specifier.name, entries.len()); tracing::debug!("{} has {} possible entries", specifier.name, entries.len());
Ok(( Ok((
PackageNames::Wally(specifier.name.clone()), PackageNames::Wally(specifier.name.clone()),
@ -192,6 +196,7 @@ impl PackageSource for WallyPackageSource {
)) ))
} }
#[instrument(skip_all, level = "debug")]
async fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
@ -207,7 +212,7 @@ impl PackageSource for WallyPackageSource {
let tempdir = match fs::read_to_string(&index_file).await { let tempdir = match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
log::debug!( tracing::debug!(
"using cached index file for package {}@{}", "using cached index file for package {}@{}",
pkg_ref.name, pkg_ref.name,
pkg_ref.version pkg_ref.version
@ -240,7 +245,7 @@ impl PackageSource for WallyPackageSource {
); );
if let Some(token) = project.auth_config.tokens().get(&self.repo_url) { if let Some(token) = project.auth_config.tokens().get(&self.repo_url) {
log::debug!("using token for {}", self.repo_url); tracing::debug!("using token for {}", self.repo_url);
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }

View file

@ -13,6 +13,7 @@ use relative_path::RelativePathBuf;
use reqwest::Client; use reqwest::Client;
use std::collections::{BTreeMap, HashSet}; use std::collections::{BTreeMap, HashSet};
use tokio::pin; use tokio::pin;
use tracing::instrument;
/// The workspace package reference /// The workspace package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -35,6 +36,7 @@ impl PackageSource for WorkspacePackageSource {
Ok(()) Ok(())
} }
#[instrument(skip_all, level = "debug")]
async fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
@ -126,6 +128,7 @@ impl PackageSource for WorkspacePackageSource {
)) ))
} }
#[instrument(skip_all, level = "debug")]
async fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,

View file

@ -19,7 +19,7 @@ impl DependencySpecifier for WorkspaceDependencySpecifier {}
impl Display for WorkspaceDependencySpecifier { impl Display for WorkspaceDependencySpecifier {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "workspace:{}{}", self.version, self.name) write!(f, "{}@workspace:{}", self.name, self.version)
} }
} }