feat: begin switch to async

This commit is contained in:
daimond113 2024-11-05 20:44:24 +01:00
parent 37072eda24
commit 2b0f29a2f9
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
50 changed files with 1259 additions and 1044 deletions

356
Cargo.lock generated
View file

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
version = 3
version = 4
[[package]]
name = "actix-codec"
@ -389,15 +389,6 @@ version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"
[[package]]
name = "arbitrary"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110"
dependencies = [
"derive_arbitrary",
]
[[package]]
name = "arc-swap"
version = "1.7.1"
@ -428,6 +419,45 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "async-compression"
version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cb8f1d480b0ea3783ab015936d2a55c87e219676f0c0b7dec61494043f21857"
dependencies = [
"deflate64",
"flate2",
"futures-core",
"futures-io",
"memchr",
"pin-project-lite",
"tokio",
]
[[package]]
name = "async-executor"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7ebdfa2ebdab6b1760375fa7d6f382b9f486eac35fc994625a00e89280bdbb7"
dependencies = [
"async-task",
"concurrent-queue",
"fastrand",
"futures-lite",
"slab",
]
[[package]]
name = "async-fs"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebcd09b382f40fcd159c2d695175b2ae620ffa5f3bd6f664131efff4e8b9e04a"
dependencies = [
"async-lock",
"blocking",
"futures-lite",
]
[[package]]
name = "async-io"
version = "2.3.4"
@ -506,6 +536,28 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "async-stream"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-stream-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]]
name = "async-task"
version = "4.7.1"
@ -523,6 +575,21 @@ dependencies = [
"syn 2.0.85",
]
[[package]]
name = "async_zip"
version = "0.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b9f7252833d5ed4b00aa9604b563529dd5e11de9c23615de2dcdf91eb87b52"
dependencies = [
"async-compression",
"crc32fast",
"futures-lite",
"pin-project",
"thiserror",
"tokio",
"tokio-util",
]
[[package]]
name = "atomic-waker"
version = "1.1.2"
@ -688,27 +755,6 @@ dependencies = [
"bytes",
]
[[package]]
name = "bzip2"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8"
dependencies = [
"bzip2-sys",
"libc",
]
[[package]]
name = "bzip2-sys"
version = "0.1.11+1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc"
dependencies = [
"cc",
"libc",
"pkg-config",
]
[[package]]
name = "cbc"
version = "0.1.2"
@ -923,21 +969,6 @@ dependencies = [
"libc",
]
[[package]]
name = "crc"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636"
dependencies = [
"crc-catalog",
]
[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]]
name = "crc32fast"
version = "1.4.2"
@ -1125,17 +1156,6 @@ dependencies = [
"serde",
]
[[package]]
name = "derive_arbitrary"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]]
name = "derive_more"
version = "0.99.18"
@ -1202,17 +1222,6 @@ dependencies = [
"windows-sys 0.48.0",
]
[[package]]
name = "displaydoc"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]]
name = "dotenvy"
version = "0.15.7"
@ -1433,6 +1442,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bb60e7409f34ef959985bc9d9c5ee8f5db24ee46ed9775850548021710f807f"
dependencies = [
"autocfg",
"tokio",
]
[[package]]
@ -1867,12 +1877,14 @@ checksum = "8e0eb9efdf96c35c0bed7596d1bef2d4ce6360a1d09738001f9d3e402aa7ba3e"
dependencies = [
"bytes",
"crc32fast",
"crossbeam-channel",
"flate2",
"gix-hash",
"gix-trace",
"gix-utils",
"libc",
"once_cell",
"parking_lot",
"prodash",
"sha1_smol",
"thiserror",
@ -2936,6 +2948,7 @@ dependencies = [
"security-framework 2.11.1",
"security-framework 3.0.0",
"windows-sys 0.59.0",
"zbus",
]
[[package]]
@ -3008,7 +3021,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [
"bitflags 2.6.0",
"libc",
"redox_syscall",
"redox_syscall 0.5.7",
]
[[package]]
@ -3070,12 +3083,6 @@ dependencies = [
"scopeguard",
]
[[package]]
name = "lockfree-object-pool"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e"
[[package]]
name = "log"
version = "0.4.22"
@ -3097,16 +3104,6 @@ version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
[[package]]
name = "lzma-rs"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e"
dependencies = [
"byteorder",
"crc",
]
[[package]]
name = "maybe-async"
version = "0.2.10"
@ -3517,7 +3514,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
dependencies = [
"cfg-if",
"libc",
"redox_syscall",
"redox_syscall 0.5.7",
"smallvec",
"windows-targets 0.52.6",
]
@ -3540,16 +3537,6 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d61c5ce1153ab5b689d0c074c4e7fc613e942dfb7dd9eea5ab202d2ad91fe361"
[[package]]
name = "pbkdf2"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
dependencies = [
"digest",
"hmac",
]
[[package]]
name = "percent-encoding"
version = "2.3.1"
@ -3561,13 +3548,16 @@ name = "pesde"
version = "0.5.0-rc.7"
dependencies = [
"anyhow",
"async-compression",
"async-stream",
"async_zip",
"chrono",
"clap",
"colored",
"dirs",
"flate2",
"fs-err",
"full_moon",
"futures",
"git2",
"gix",
"glob",
@ -3586,15 +3576,15 @@ dependencies = [
"serde_json",
"serde_with",
"sha2",
"tar",
"tempfile",
"thiserror",
"threadpool",
"tokio",
"tokio-tar",
"tokio-util",
"toml",
"toml_edit",
"url",
"winreg",
"zip",
]
[[package]]
@ -3605,11 +3595,11 @@ dependencies = [
"actix-governor",
"actix-multipart",
"actix-web",
"async-compression",
"chrono",
"constant_time_eq",
"convert_case 0.6.0",
"dotenvy",
"flate2",
"fs-err",
"futures",
"git2",
@ -3628,13 +3618,34 @@ dependencies = [
"serde_yaml",
"sha2",
"tantivy",
"tar",
"tempfile",
"thiserror",
"tokio",
"tokio-tar",
"toml",
"url",
]
[[package]]
name = "pin-project"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]]
name = "pin-project-lite"
version = "0.2.14"
@ -3889,6 +3900,15 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "redox_syscall"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
dependencies = [
"bitflags 1.3.2",
]
[[package]]
name = "redox_syscall"
version = "0.5.7"
@ -4536,12 +4556,6 @@ dependencies = [
"libc",
]
[[package]]
name = "simd-adler32"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
[[package]]
name = "sketches-ddsketch"
version = "0.2.2"
@ -4818,17 +4832,6 @@ dependencies = [
"serde",
]
[[package]]
name = "tar"
version = "0.4.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ff6c40d3aedb5e06b57c6f669ad17ab063dd1e63d977c6a88e7f4dfa4f04020"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]]
name = "tempfile"
version = "3.13.0"
@ -4881,15 +4884,6 @@ dependencies = [
"once_cell",
]
[[package]]
name = "threadpool"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
dependencies = [
"num_cpus",
]
[[package]]
name = "time"
version = "0.3.36"
@ -4938,9 +4932,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.40.0"
version = "1.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998"
checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb"
dependencies = [
"backtrace",
"bytes",
@ -4950,9 +4944,21 @@ dependencies = [
"pin-project-lite",
"signal-hook-registry",
"socket2",
"tokio-macros",
"windows-sys 0.52.0",
]
[[package]]
name = "tokio-macros"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]]
name = "tokio-native-tls"
version = "0.3.1"
@ -4974,6 +4980,32 @@ dependencies = [
"tokio",
]
[[package]]
name = "tokio-stream"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "tokio-tar"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5714c010ca3e5c27114c1cdeb9d14641ace49874aa5626d7149e47aedace75"
dependencies = [
"filetime",
"futures-core",
"libc",
"redox_syscall 0.3.5",
"tokio",
"tokio-stream",
"xattr",
]
[[package]]
name = "tokio-util"
version = "0.7.12"
@ -4982,6 +5014,7 @@ checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a"
dependencies = [
"bytes",
"futures-core",
"futures-io",
"futures-sink",
"pin-project-lite",
"tokio",
@ -5618,9 +5651,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb97012beadd29e654708a0fdb4c84bc046f537aecfde2c3ee0a9e4b4d48c725"
dependencies = [
"async-broadcast",
"async-executor",
"async-fs",
"async-io",
"async-lock",
"async-process",
"async-recursion",
"async-task",
"async-trait",
"blocking",
"enumflags2",
"event-listener",
"futures-core",
@ -5693,63 +5732,6 @@ name = "zeroize"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
dependencies = [
"zeroize_derive",
]
[[package]]
name = "zeroize_derive"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]]
name = "zip"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc5e4288ea4057ae23afc69a4472434a87a2495cafce6632fd1c4ec9f5cf3494"
dependencies = [
"aes",
"arbitrary",
"bzip2",
"constant_time_eq",
"crc32fast",
"crossbeam-utils",
"deflate64",
"displaydoc",
"flate2",
"hmac",
"indexmap 2.6.0",
"lzma-rs",
"memchr",
"pbkdf2",
"rand",
"sha1",
"thiserror",
"time",
"zeroize",
"zopfli",
"zstd",
]
[[package]]
name = "zopfli"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5019f391bac5cf252e93bbcc53d039ffd62c7bfb7c150414d61369afe57e946"
dependencies = [
"bumpalo",
"crc32fast",
"lockfree-object-pool",
"log",
"once_cell",
"simd-adler32",
]
[[package]]
name = "zstd"

View file

@ -27,9 +27,12 @@ bin = [
"gix/worktree-mutation",
"serde_json",
"winreg",
"fs-err/expose_original_error"
"fs-err/expose_original_error",
"tokio/rt",
"tokio/rt-multi-thread",
"tokio/macros",
]
wally-compat = ["zip", "serde_json"]
wally-compat = ["async_zip", "serde_json"]
patches = ["git2"]
version-management = ["bin"]
@ -45,33 +48,36 @@ uninlined_format_args = "warn"
serde = { version = "1.0.213", features = ["derive"] }
toml = "0.8.19"
serde_with = "3.11.0"
gix = { version = "0.67.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials"] }
gix = { version = "0.67.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
semver = { version = "1.0.23", features = ["serde"] }
reqwest = { version = "0.12.8", default-features = false, features = ["rustls-tls", "blocking"] }
tar = "0.4.42"
flate2 = "1.0.34"
reqwest = { version = "0.12.8", default-features = false, features = ["rustls-tls"] }
tokio-tar = "0.3.1"
async-compression = { version = "0.4.17", features = ["tokio", "gzip"] }
pathdiff = "0.2.2"
relative-path = { version = "1.9.3", features = ["serde"] }
log = "0.4.22"
thiserror = "1.0.65"
threadpool = "1.8.1"
tokio = "1.41.0"
tokio-util = "0.7.12"
async-stream = "0.3.6"
futures = "0.3.31"
full_moon = { version = "1.1.0", features = ["luau"] }
url = { version = "2.5.2", features = ["serde"] }
chrono = { version = "0.4.38", features = ["serde"] }
sha2 = "0.10.8"
tempfile = "3.13.0"
glob = "0.3.1"
fs-err = "3.0.0"
fs-err = { version = "3.0.0", features = ["tokio"] }
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
git2 = { version = "0.19.0", optional = true }
zip = { version = "2.2.0", optional = true }
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
serde_json = { version = "1.0.132", optional = true }
anyhow = { version = "1.0.91", optional = true }
open = { version = "5.3.0", optional = true }
keyring = { version = "3.5.0", features = ["crypto-rust", "windows-native", "apple-native", "sync-secret-service"], optional = true }
keyring = { version = "3.5.0", features = ["crypto-rust", "windows-native", "apple-native", "async-secret-service", "async-io"], optional = true }
colored = { version = "2.1.0", optional = true }
toml_edit = { version = "0.22.22", optional = true }
clap = { version = "4.5.20", features = ["derive"], optional = true }

View file

@ -17,8 +17,9 @@ semver = "1.0.23"
chrono = { version = "0.4.38", features = ["serde"] }
url = "2.5.2"
futures = "0.3.31"
tokio = "1.41.0"
tempfile = "3.13.0"
fs-err = "3.0.0"
fs-err = { version = "3.0.0", features = ["tokio"] }
git2 = "0.19.0"
gix = { version = "0.67.0", default-features = false, features = [
@ -37,8 +38,8 @@ rusty-s3 = "0.5.0"
reqwest = { version = "0.12.8", features = ["json", "rustls-tls"] }
constant_time_eq = "0.3.1"
tar = "0.4.42"
flate2 = "1.0.34"
tokio-tar = "0.3.1"
async-compression = { version = "0.4.17", features = ["tokio", "gzip"] }
log = "0.4.22"
pretty_env_logger = "0.5.0"

View file

@ -72,7 +72,7 @@ pub async fn get_package_version(
let (scope, name_part) = name.as_str();
let entries: IndexFile = {
let source = app_state.source.lock().unwrap();
let source = app_state.source.lock().await;
match source.read_file([scope, name_part], &app_state.project, None)? {
Some(versions) => toml::de::from_str(&versions)?,

View file

@ -17,7 +17,7 @@ pub async fn get_package_versions(
let (scope, name_part) = name.as_str();
let source = app_state.source.lock().unwrap();
let source = app_state.source.lock().await;
let versions: IndexFile =
match source.read_file([scope, name_part], &app_state.project, None)? {
Some(versions) => toml::de::from_str(&versions)?,

View file

@ -1,17 +1,3 @@
use actix_multipart::Multipart;
use actix_web::{web, HttpResponse, Responder};
use convert_case::{Case, Casing};
use flate2::read::GzDecoder;
use futures::{future::join_all, join, StreamExt};
use git2::{Remote, Repository, Signature};
use serde::Deserialize;
use sha2::{Digest, Sha256};
use std::{
collections::{BTreeSet, HashMap},
io::{Cursor, Read, Write},
};
use tar::Archive;
use crate::{
auth::UserId,
benv,
@ -20,6 +6,13 @@ use crate::{
storage::StorageImpl,
AppState,
};
use actix_multipart::Multipart;
use actix_web::{web, HttpResponse, Responder};
use async_compression::Level;
use convert_case::{Case, Casing};
use fs_err::tokio as fs;
use futures::{future::join_all, join, StreamExt};
use git2::{Remote, Repository, Signature};
use pesde::{
manifest::Manifest,
source::{
@ -31,6 +24,13 @@ use pesde::{
},
MANIFEST_FILE_NAME,
};
use serde::Deserialize;
use sha2::{Digest, Sha256};
use std::{
collections::{BTreeSet, HashMap},
io::{Cursor, Write},
};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
fn signature<'a>() -> Signature<'a> {
Signature::now(
@ -71,18 +71,16 @@ pub async fn publish_package(
mut body: Multipart,
user_id: web::ReqData<UserId>,
) -> Result<impl Responder, Error> {
let max_archive_size = {
let source = app_state.source.lock().unwrap();
source.refresh(&app_state.project).map_err(Box::new)?;
source.config(&app_state.project)?.max_archive_size
};
let source = app_state.source.lock().await;
source.refresh(&app_state.project).await.map_err(Box::new)?;
let config = source.config(&app_state.project)?;
let bytes = body
.next()
.await
.ok_or(Error::InvalidArchive)?
.map_err(|_| Error::InvalidArchive)?
.bytes(max_archive_size)
.bytes(config.max_archive_size)
.await
.map_err(|_| Error::InvalidArchive)?
.map_err(|_| Error::InvalidArchive)?;
@ -90,10 +88,10 @@ pub async fn publish_package(
let package_dir = tempfile::tempdir()?;
{
let mut decoder = GzDecoder::new(Cursor::new(&bytes));
let mut archive = Archive::new(&mut decoder);
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(Cursor::new(&bytes));
let mut archive = tokio_tar::Archive::new(&mut decoder);
archive.unpack(package_dir.path())?;
archive.unpack(package_dir.path()).await?;
}
let mut manifest = None::<Manifest>;
@ -101,15 +99,15 @@ pub async fn publish_package(
let mut docs = BTreeSet::new();
let mut docs_pages = HashMap::new();
for entry in fs_err::read_dir(package_dir.path())? {
let entry = entry?;
let mut read_dir = fs::read_dir(package_dir.path()).await?;
while let Some(entry) = read_dir.next_entry().await? {
let file_name = entry
.file_name()
.to_str()
.ok_or(Error::InvalidArchive)?
.to_string();
if entry.file_type()?.is_dir() {
if entry.file_type().await?.is_dir() {
if IGNORED_DIRS.contains(&file_name.as_str()) {
return Err(Error::InvalidArchive);
}
@ -117,23 +115,22 @@ pub async fn publish_package(
if file_name == "docs" {
let mut stack = vec![(
BTreeSet::new(),
fs_err::read_dir(entry.path())?,
fs::read_dir(entry.path()).await?,
None::<DocEntryInfo>,
)];
'outer: while let Some((set, iter, category_info)) = stack.last_mut() {
for entry in iter {
let entry = entry?;
while let Some(entry) = iter.next_entry().await? {
let file_name = entry
.file_name()
.to_str()
.ok_or(Error::InvalidArchive)?
.to_string();
if entry.file_type()?.is_dir() {
if entry.file_type().await?.is_dir() {
stack.push((
BTreeSet::new(),
fs_err::read_dir(entry.path())?,
fs::read_dir(entry.path()).await?,
Some(DocEntryInfo {
label: Some(file_name.to_case(Case::Title)),
..Default::default()
@ -143,7 +140,7 @@ pub async fn publish_package(
}
if file_name == "_category_.json" {
let info = fs_err::read_to_string(entry.path())?;
let info = fs::read_to_string(entry.path()).await?;
let mut info: DocEntryInfo = serde_json::from_str(&info)?;
let old_info = category_info.take();
info.label = info.label.or(old_info.and_then(|i| i.label));
@ -155,16 +152,16 @@ pub async fn publish_package(
continue;
};
let content = fs_err::read_to_string(entry.path())?;
let content = fs::read_to_string(entry.path()).await?;
let content = content.trim();
let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
let mut gz = flate2::read::GzEncoder::new(
let mut gz = async_compression::tokio::bufread::GzipEncoder::with_quality(
Cursor::new(content.as_bytes().to_vec()),
flate2::Compression::best(),
Level::Best,
);
let mut bytes = vec![];
gz.read_to_end(&mut bytes)?;
gz.read_to_end(&mut bytes).await?;
docs_pages.insert(hash.to_string(), bytes);
let mut lines = content.lines().peekable();
@ -245,7 +242,7 @@ pub async fn publish_package(
}
if file_name == MANIFEST_FILE_NAME {
let content = fs_err::read_to_string(entry.path())?;
let content = fs::read_to_string(entry.path()).await?;
manifest = Some(toml::de::from_str(&content)?);
} else if file_name
@ -258,12 +255,12 @@ pub async fn publish_package(
return Err(Error::InvalidArchive);
}
let file = fs_err::File::open(entry.path())?;
let mut file = fs::File::open(entry.path()).await?;
let mut gz = flate2::read::GzEncoder::new(file, flate2::Compression::best());
let mut bytes = vec![];
gz.read_to_end(&mut bytes)?;
readme = Some(bytes);
let mut gz = async_compression::tokio::write::GzipEncoder::new(vec![]);
tokio::io::copy(&mut file, &mut gz).await?;
gz.shutdown().await?;
readme = Some(gz.into_inner());
}
}
@ -272,10 +269,6 @@ pub async fn publish_package(
};
{
let source = app_state.source.lock().unwrap();
source.refresh(&app_state.project).map_err(Box::new)?;
let config = source.config(&app_state.project)?;
let dependencies = manifest
.all_dependencies()
.map_err(|_| Error::InvalidArchive)?;

View file

@ -59,7 +59,7 @@ pub async fn search_packages(
)
.unwrap();
let source = app_state.source.lock().unwrap();
let source = app_state.source.lock().await;
let top_docs = top_docs
.into_iter()

View file

@ -5,13 +5,13 @@ use actix_web::{
rt::System,
web, App, HttpServer,
};
use fs_err::tokio as fs;
use log::info;
use std::{env::current_dir, path::PathBuf, sync::Mutex};
use pesde::{
source::{pesde::PesdePackageSource, traits::PackageSource},
AuthConfig, Project,
};
use std::{env::current_dir, path::PathBuf};
use crate::{
auth::{get_auth_from_env, Auth, UserIdExtractor},
@ -38,13 +38,13 @@ pub fn make_reqwest() -> reqwest::Client {
}
pub struct AppState {
pub source: Mutex<PesdePackageSource>,
pub source: tokio::sync::Mutex<PesdePackageSource>,
pub project: Project,
pub storage: Storage,
pub auth: Auth,
pub search_reader: tantivy::IndexReader,
pub search_writer: Mutex<tantivy::IndexWriter>,
pub search_writer: std::sync::Mutex<tantivy::IndexWriter>,
}
#[macro_export]
@ -87,7 +87,7 @@ async fn run() -> std::io::Result<()> {
let cwd = current_dir().unwrap();
let data_dir =
PathBuf::from(benv!("DATA_DIR" => "{CWD}/data").replace("{CWD}", cwd.to_str().unwrap()));
fs_err::create_dir_all(&data_dir).unwrap();
fs::create_dir_all(&data_dir).await.unwrap();
let project = Project::new(
&cwd,
@ -100,7 +100,10 @@ async fn run() -> std::io::Result<()> {
})),
);
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
source.refresh(&project).expect("failed to refresh source");
source
.refresh(&project)
.await
.expect("failed to refresh source");
let (search_reader, search_writer) = make_search(&project, &source);
@ -116,11 +119,11 @@ async fn run() -> std::io::Result<()> {
info!("auth: {auth}");
auth
},
source: Mutex::new(source),
source: tokio::sync::Mutex::new(source),
project,
search_reader,
search_writer: Mutex::new(search_writer),
search_writer: std::sync::Mutex::new(search_writer),
});
let publish_governor_config = GovernorConfigBuilder::default()

View file

@ -3,6 +3,7 @@ use actix_web::{
http::header::{CONTENT_ENCODING, CONTENT_TYPE},
HttpResponse,
};
use fs_err::tokio as fs;
use pesde::{names::PackageName, source::version_id::VersionId};
use std::{
fmt::Display,
@ -14,8 +15,8 @@ pub struct FSStorage {
pub root: PathBuf,
}
fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
Ok(match fs_err::read(path) {
async fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
Ok(match fs::read(path).await {
Ok(contents) => HttpResponse::Ok()
.append_header((CONTENT_TYPE, content_type))
.append_header((CONTENT_ENCODING, "gzip"))
@ -40,9 +41,9 @@ impl StorageImpl for FSStorage {
.join(name)
.join(version.version().to_string())
.join(version.target().to_string());
fs_err::create_dir_all(&path)?;
fs::create_dir_all(&path).await?;
fs_err::write(path.join("pkg.tar.gz"), &contents)?;
fs::write(path.join("pkg.tar.gz"), &contents).await?;
Ok(())
}
@ -61,7 +62,7 @@ impl StorageImpl for FSStorage {
.join(version.version().to_string())
.join(version.target().to_string());
read_file_to_response(&path.join("pkg.tar.gz"), "application/gzip")
read_file_to_response(&path.join("pkg.tar.gz"), "application/gzip").await
}
async fn store_readme(
@ -78,9 +79,9 @@ impl StorageImpl for FSStorage {
.join(name)
.join(version.version().to_string())
.join(version.target().to_string());
fs_err::create_dir_all(&path)?;
fs::create_dir_all(&path).await?;
fs_err::write(path.join("readme.gz"), &contents)?;
fs::write(path.join("readme.gz"), &contents).await?;
Ok(())
}
@ -99,14 +100,14 @@ impl StorageImpl for FSStorage {
.join(version.version().to_string())
.join(version.target().to_string());
read_file_to_response(&path.join("readme.gz"), "text/plain")
read_file_to_response(&path.join("readme.gz"), "text/plain").await
}
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
let path = self.root.join("Doc");
fs_err::create_dir_all(&path)?;
fs::create_dir_all(&path).await?;
fs_err::write(path.join(format!("{doc_hash}.gz")), &contents)?;
fs::write(path.join(format!("{doc_hash}.gz")), &contents).await?;
Ok(())
}
@ -114,7 +115,7 @@ impl StorageImpl for FSStorage {
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
let path = self.root.join("Doc");
read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain")
read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain").await
}
}

View file

@ -37,8 +37,8 @@ impl<'de> Deserialize<'de> for Tokens {
}
}
pub fn get_tokens() -> anyhow::Result<Tokens> {
let config = read_config()?;
pub async fn get_tokens() -> anyhow::Result<Tokens> {
let config = read_config().await?;
if !config.tokens.0.is_empty() {
return Ok(config.tokens);
}
@ -56,7 +56,7 @@ pub fn get_tokens() -> anyhow::Result<Tokens> {
Ok(Tokens(BTreeMap::new()))
}
pub fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
@ -66,19 +66,19 @@ pub fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
Err(e) => return Err(e.into()),
}
let mut config = read_config()?;
let mut config = read_config().await?;
config.tokens = tokens;
write_config(&config).map_err(Into::into)
write_config(&config).await.map_err(Into::into)
}
pub fn set_token(repo: &gix::Url, token: Option<&str>) -> anyhow::Result<()> {
let mut tokens = get_tokens()?;
pub async fn set_token(repo: &gix::Url, token: Option<&str>) -> anyhow::Result<()> {
let mut tokens = get_tokens().await?;
if let Some(token) = token {
tokens.0.insert(repo.clone(), token.to_string());
} else {
tokens.0.remove(repo);
}
set_tokens(tokens)
set_tokens(tokens).await
}
#[derive(Debug, Deserialize)]
@ -86,18 +86,20 @@ struct UserResponse {
login: String,
}
pub fn get_token_login(
reqwest: &reqwest::blocking::Client,
pub async fn get_token_login(
reqwest: &reqwest::Client,
access_token: &str,
) -> anyhow::Result<String> {
let response = reqwest
.get("https://api.github.com/user")
.header(AUTHORIZATION, access_token)
.send()
.await
.context("failed to send user request")?
.error_for_status()
.context("failed to get user")?
.json::<UserResponse>()
.await
.context("failed to parse user response")?;
Ok(response.login)

View file

@ -47,9 +47,10 @@ pub struct AddCommand {
}
impl AddCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
let manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
let (source, specifier) = match &self.name {
@ -65,7 +66,10 @@ impl AddCommand {
return Ok(());
}
let index = index.unwrap_or(read_config()?.default_index);
let index = match index {
Some(index) => index,
None => read_config().await?.default_index,
};
let source = PackageSources::Pesde(PesdePackageSource::new(index));
let specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier {
@ -89,7 +93,7 @@ impl AddCommand {
return Ok(());
}
let index = index.unwrap_or(read_config()?.default_index);
let index = index.context("no wally index found")?;
let source =
PackageSources::Wally(pesde::source::wally::WallyPackageSource::new(index));
@ -125,10 +129,12 @@ impl AddCommand {
};
source
.refresh(&project)
.await
.context("failed to refresh package source")?;
let Some(version_id) = source
.resolve(&specifier, &project, manifest.target.kind())
.await
.context("failed to resolve package")?
.1
.pop_last()
@ -141,7 +147,10 @@ impl AddCommand {
let project_target = manifest.target.kind();
let mut manifest = toml_edit::DocumentMut::from_str(
&project.read_manifest().context("failed to read manifest")?,
&project
.read_manifest()
.await
.context("failed to read manifest")?,
)
.context("failed to parse manifest")?;
let dependency_key = if self.peer {
@ -227,6 +236,7 @@ impl AddCommand {
project
.write_manifest(manifest.to_string())
.await
.context("failed to write manifest")?;
Ok(())

View file

@ -3,6 +3,7 @@ use clap::Args;
use colored::Colorize;
use serde::Deserialize;
use std::thread::spawn;
use tokio::time::sleep;
use url::Url;
use pesde::{
@ -46,16 +47,19 @@ enum AccessTokenResponse {
}
impl LoginCommand {
pub fn authenticate_device_flow(
pub async fn authenticate_device_flow(
&self,
index_url: &gix::Url,
project: &Project,
reqwest: &reqwest::blocking::Client,
reqwest: &reqwest::Client,
) -> anyhow::Result<String> {
println!("logging in into {index_url}");
let source = PesdePackageSource::new(index_url.clone());
source.refresh(project).context("failed to refresh index")?;
source
.refresh(project)
.await
.context("failed to refresh index")?;
let config = source
.config(project)
@ -70,10 +74,12 @@ impl LoginCommand {
&[("client_id", &client_id)],
)?)
.send()
.await
.context("failed to send device code request")?
.error_for_status()
.context("failed to get device code response")?
.json::<DeviceCodeResponse>()
.await
.context("failed to parse device code response")?;
println!(
@ -102,7 +108,7 @@ impl LoginCommand {
let mut interval = std::time::Duration::from_secs(response.interval);
while time_left > 0 {
std::thread::sleep(interval);
sleep(interval).await;
time_left = time_left.saturating_sub(interval.as_secs());
let response = reqwest
@ -118,10 +124,12 @@ impl LoginCommand {
],
)?)
.send()
.await
.context("failed to send access token request")?
.error_for_status()
.context("failed to get access token response")?
.json::<AccessTokenResponse>()
.await
.context("failed to parse access token response")?;
match response {
@ -149,16 +157,19 @@ impl LoginCommand {
anyhow::bail!("code expired, please re-run the login command");
}
pub fn run(
pub async fn run(
self,
index_url: gix::Url,
project: Project,
reqwest: reqwest::blocking::Client,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let token_given = self.token.is_some();
let token = match self.token {
Some(token) => token,
None => self.authenticate_device_flow(&index_url, &project, &reqwest)?,
None => {
self.authenticate_device_flow(&index_url, &project, &reqwest)
.await?
}
};
let token = if token_given {
@ -168,13 +179,13 @@ impl LoginCommand {
let token = format!("Bearer {token}");
println!(
"logged in as {} for {index_url}",
get_token_login(&reqwest, &token)?.bold()
get_token_login(&reqwest, &token).await?.bold()
);
token
};
set_token(&index_url, Some(&token))?;
set_token(&index_url, Some(&token)).await?;
Ok(())
}

View file

@ -5,8 +5,8 @@ use clap::Args;
pub struct LogoutCommand {}
impl LogoutCommand {
pub fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
set_token(&index_url, None)?;
pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
set_token(&index_url, None).await?;
println!("logged out of {index_url}");

View file

@ -28,8 +28,8 @@ pub enum AuthCommands {
}
impl AuthSubcommand {
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
let manifest = match project.deser_manifest() {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let manifest = match project.deser_manifest().await {
Ok(manifest) => Some(manifest),
Err(e) => match e {
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
@ -44,7 +44,7 @@ impl AuthSubcommand {
},
None => match manifest {
Some(_) => None,
None => Some(read_config()?.default_index),
None => Some(read_config().await?.default_index),
},
};
@ -61,9 +61,9 @@ impl AuthSubcommand {
};
match self.command {
AuthCommands::Login(login) => login.run(index_url, project, reqwest),
AuthCommands::Logout(logout) => logout.run(index_url),
AuthCommands::WhoAmI(whoami) => whoami.run(index_url, reqwest),
AuthCommands::Login(login) => login.run(index_url, project, reqwest).await,
AuthCommands::Logout(logout) => logout.run(index_url).await,
AuthCommands::WhoAmI(whoami) => whoami.run(index_url, reqwest).await,
}
}
}

View file

@ -6,12 +6,8 @@ use colored::Colorize;
pub struct WhoAmICommand {}
impl WhoAmICommand {
pub fn run(
self,
index_url: gix::Url,
reqwest: reqwest::blocking::Client,
) -> anyhow::Result<()> {
let tokens = get_tokens()?;
pub async fn run(self, index_url: gix::Url, reqwest: reqwest::Client) -> anyhow::Result<()> {
let tokens = get_tokens().await?;
let token = match tokens.0.get(&index_url) {
Some(token) => token,
None => {
@ -22,7 +18,7 @@ impl WhoAmICommand {
println!(
"logged in as {} into {index_url}",
get_token_login(&reqwest, token)?.bold()
get_token_login(&reqwest, token).await?.bold()
);
Ok(())

View file

@ -13,8 +13,8 @@ pub struct DefaultIndexCommand {
}
impl DefaultIndexCommand {
pub fn run(self) -> anyhow::Result<()> {
let mut config = read_config()?;
pub async fn run(self) -> anyhow::Result<()> {
let mut config = read_config().await?;
let index = if self.reset {
Some(CliConfig::default().default_index)
@ -25,7 +25,7 @@ impl DefaultIndexCommand {
match index {
Some(index) => {
config.default_index = index.clone();
write_config(&config)?;
write_config(&config).await?;
println!("default index set to: {index}");
}
None => {

View file

@ -13,10 +13,10 @@ pub enum ConfigCommands {
}
impl ConfigCommands {
pub fn run(self) -> anyhow::Result<()> {
pub async fn run(self) -> anyhow::Result<()> {
match self {
ConfigCommands::DefaultIndex(default_index) => default_index.run(),
ConfigCommands::ScriptsRepo(scripts_repo) => scripts_repo.run(),
ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
ConfigCommands::ScriptsRepo(scripts_repo) => scripts_repo.run().await,
}
}
}

View file

@ -4,6 +4,7 @@ use crate::cli::{
};
use anyhow::Context;
use clap::Args;
use fs_err::tokio as fs;
#[derive(Debug, Args)]
pub struct ScriptsRepoCommand {
@ -17,8 +18,8 @@ pub struct ScriptsRepoCommand {
}
impl ScriptsRepoCommand {
pub fn run(self) -> anyhow::Result<()> {
let mut config = read_config()?;
pub async fn run(self) -> anyhow::Result<()> {
let mut config = read_config().await?;
let repo = if self.reset {
Some(CliConfig::default().scripts_repo)
@ -29,9 +30,10 @@ impl ScriptsRepoCommand {
match repo {
Some(repo) => {
config.scripts_repo = repo.clone();
write_config(&config)?;
write_config(&config).await?;
fs_err::remove_dir_all(home_dir()?.join("scripts"))
fs::remove_dir_all(home_dir()?.join("scripts"))
.await
.context("failed to remove scripts directory")?;
println!("scripts repo set to: {repo}");

View file

@ -1,6 +1,7 @@
use crate::cli::{config::read_config, VersionedPackageName};
use anyhow::Context;
use clap::Args;
use fs_err::tokio as fs;
use pesde::{
linking::generator::generate_bin_linking_module,
manifest::target::TargetKind,
@ -30,14 +31,16 @@ pub struct ExecuteCommand {
}
impl ExecuteCommand {
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
let index = self
.index
.or_else(|| read_config().ok().map(|c| c.default_index))
.context("no index specified")?;
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let index = match self.index {
Some(index) => Some(index),
None => read_config().await.ok().map(|c| c.default_index),
}
.context("no index specified")?;
let source = PesdePackageSource::new(index);
source
.refresh(&project)
.await
.context("failed to refresh source")?;
let version_req = self.package.1.unwrap_or(VersionReq::STAR);
@ -51,6 +54,7 @@ impl ExecuteCommand {
if let Some(res) = source
.resolve(&specifier, &project, TargetKind::Lune)
.await
.context("failed to resolve package")?
.1
.pop_last()
@ -60,6 +64,7 @@ impl ExecuteCommand {
source
.resolve(&specifier, &project, TargetKind::Luau)
.await
.context("failed to resolve package")?
.1
.pop_last()
@ -74,16 +79,20 @@ impl ExecuteCommand {
let (fs, target) = source
.download(&pkg_ref, &project, &reqwest)
.await
.context("failed to download package")?;
let bin_path = target.bin_path().context("package has no binary export")?;
let tmp_dir = project.cas_dir().join(".tmp");
fs_err::create_dir_all(&tmp_dir).context("failed to create temporary directory")?;
fs::create_dir_all(&tmp_dir)
.await
.context("failed to create temporary directory")?;
let tempdir =
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
fs.write_to(tempdir.path(), project.cas_dir(), true)
.await
.context("failed to write package contents")?;
let mut caller =

View file

@ -10,6 +10,7 @@ use pesde::{
};
use crate::cli::{config::read_config, HOME_DIR};
use fs_err::tokio as fs;
#[derive(Debug, Args)]
pub struct InitCommand {}
@ -25,8 +26,8 @@ require(home_dir .. {:?})"#,
}
impl InitCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
match project.read_manifest() {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
match project.read_manifest().await {
Ok(_) => {
println!("{}", "project already initialized".red());
return Ok(());
@ -125,25 +126,29 @@ impl InitCommand {
let folder = project
.package_dir()
.join(concat!(".", env!("CARGO_PKG_NAME")));
fs_err::create_dir_all(&folder).context("failed to create scripts folder")?;
fs::create_dir_all(&folder)
.await
.context("failed to create scripts folder")?;
fs_err::write(
fs::write(
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
script_contents(Path::new(&format!(
"lune/rojo/{}.luau",
ScriptName::RobloxSyncConfigGenerator
))),
)
.await
.context("failed to write sync config generator script file")?;
#[cfg(feature = "wally-compat")]
fs_err::write(
fs::write(
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
script_contents(Path::new(&format!(
"lune/rojo/{}.luau",
ScriptName::SourcemapGenerator
))),
)
.await
.context("failed to write sourcemap generator script file")?;
let scripts =
@ -166,9 +171,9 @@ impl InitCommand {
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[DEFAULT_INDEX_NAME] =
toml_edit::value(read_config()?.default_index.to_bstring().to_string());
toml_edit::value(read_config().await?.default_index.to_bstring().to_string());
project.write_manifest(manifest.to_string())?;
project.write_manifest(manifest.to_string()).await?;
println!("{}", "initialized project".green());
Ok(())

View file

@ -1,26 +1,21 @@
use crate::cli::{
bin_dir, download_graph, files::make_executable, run_on_workspace_members, up_to_date_lockfile,
bin_dir, download_graph, files::make_executable, repos::update_scripts,
run_on_workspace_members, up_to_date_lockfile,
};
use anyhow::Context;
use clap::Args;
use colored::{ColoredString, Colorize};
use fs_err::tokio as fs;
use indicatif::MultiProgress;
use pesde::{
lockfile::Lockfile,
manifest::{target::TargetKind, DependencyType},
Project, MANIFEST_FILE_NAME,
};
use std::{
collections::{BTreeSet, HashSet},
thread::JoinHandle,
};
use std::collections::{BTreeSet, HashSet};
#[derive(Debug, Args, Copy, Clone)]
pub struct InstallCommand {
/// The amount of threads to use for downloading
#[arg(short, long, default_value_t = 6, value_parser = clap::value_parser!(u64).range(1..=128))]
threads: u64,
/// Whether to error on changes in the lockfile
#[arg(long)]
locked: bool,
@ -91,21 +86,21 @@ fn job(n: u8) -> ColoredString {
}
impl InstallCommand {
pub fn run(
pub async fn run(
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::blocking::Client,
update_task: &mut Option<JoinHandle<()>>,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let mut refreshed_sources = HashSet::new();
let manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
let lockfile = if self.locked {
match up_to_date_lockfile(&project)? {
match up_to_date_lockfile(&project).await? {
None => {
anyhow::bail!(
"lockfile is out of sync, run `{} install` to update it",
@ -115,7 +110,7 @@ impl InstallCommand {
file => file,
}
} else {
match project.deser_lockfile() {
match project.deser_lockfile().await {
Ok(lockfile) => {
if lockfile.overrides != manifest.overrides {
log::debug!("overrides are different");
@ -154,7 +149,8 @@ impl InstallCommand {
if deleted_folders.insert(folder.to_string()) {
log::debug!("deleting the {folder} folder");
if let Some(e) = fs_err::remove_dir_all(project.package_dir().join(&folder))
if let Some(e) = fs::remove_dir_all(project.package_dir().join(&folder))
.await
.err()
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
{
@ -184,12 +180,10 @@ impl InstallCommand {
let graph = project
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources)
.await
.context("failed to build dependency graph")?;
if let Some(task) = update_task.take() {
log::debug!("waiting for update task to finish");
task.join().expect("failed to join update task");
}
update_scripts(&project).await?;
let downloaded_graph = download_graph(
&project,
@ -197,12 +191,12 @@ impl InstallCommand {
&graph,
&multi,
&reqwest,
self.threads as usize,
self.prod,
true,
format!("{} 📥 downloading dependencies", job(3)),
format!("{} 📥 downloaded dependencies", job(3)),
)?;
)
.await?;
let filtered_graph = if self.prod {
downloaded_graph
@ -225,9 +219,10 @@ impl InstallCommand {
project
.link_dependencies(&filtered_graph)
.await
.context("failed to link dependencies")?;
let bin_folder = bin_dir()?;
let bin_folder = bin_dir().await?;
for versions in filtered_graph.values() {
for node in versions.values() {
@ -245,18 +240,22 @@ impl InstallCommand {
}
let bin_file = bin_folder.join(alias);
fs_err::write(&bin_file, bin_link_file(alias))
fs::write(&bin_file, bin_link_file(alias))
.await
.context("failed to write bin link file")?;
make_executable(&bin_file).context("failed to make bin link executable")?;
make_executable(&bin_file)
.await
.context("failed to make bin link executable")?;
#[cfg(windows)]
{
let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION);
fs_err::copy(
fs::copy(
std::env::current_exe().context("failed to get current executable path")?,
&bin_file,
)
.await
.context("failed to copy bin link file")?;
}
}
@ -268,6 +267,7 @@ impl InstallCommand {
project
.apply_patches(&filtered_graph)
.await
.context("failed to apply patches")?;
}
@ -283,9 +283,13 @@ impl InstallCommand {
graph: downloaded_graph,
workspace: run_on_workspace_members(&project, |project| {
self.run(project, multi.clone(), reqwest.clone(), &mut None)
})?,
let multi = multi.clone();
let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, multi, reqwest)).await }
})
.await?,
})
.await
.context("failed to write lockfile")?;
Ok(())

View file

@ -1,6 +1,5 @@
use indicatif::MultiProgress;
use pesde::Project;
use std::thread::JoinHandle;
mod add;
mod auth;
@ -73,40 +72,31 @@ pub enum Subcommand {
}
impl Subcommand {
pub fn run(
pub async fn run(
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::blocking::Client,
update_task: JoinHandle<()>,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let mut update_task = Some(update_task);
let res = match self {
Subcommand::Auth(auth) => auth.run(project, reqwest),
Subcommand::Config(config) => config.run(),
Subcommand::Init(init) => init.run(project),
Subcommand::Run(run) => run.run(project, &mut update_task),
Subcommand::Install(install) => install.run(project, multi, reqwest, &mut update_task),
Subcommand::Publish(publish) => publish.run(project, reqwest),
match self {
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
Subcommand::Config(config) => config.run().await,
Subcommand::Init(init) => init.run(project).await,
Subcommand::Run(run) => run.run(project).await,
Subcommand::Install(install) => install.run(project, multi, reqwest).await,
Subcommand::Publish(publish) => publish.run(project, reqwest).await,
#[cfg(feature = "version-management")]
Subcommand::SelfInstall(self_install) => self_install.run(),
Subcommand::SelfInstall(self_install) => self_install.run().await,
#[cfg(feature = "patches")]
Subcommand::Patch(patch) => patch.run(project, reqwest),
Subcommand::Patch(patch) => patch.run(project, reqwest).await,
#[cfg(feature = "patches")]
Subcommand::PatchCommit(patch_commit) => patch_commit.run(project),
Subcommand::PatchCommit(patch_commit) => patch_commit.run(project).await,
#[cfg(feature = "version-management")]
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest),
Subcommand::Add(add) => add.run(project),
Subcommand::Update(update) => update.run(project, multi, reqwest, &mut update_task),
Subcommand::Outdated(outdated) => outdated.run(project),
Subcommand::Execute(execute) => execute.run(project, reqwest),
};
if let Some(handle) = update_task.take() {
handle.join().expect("failed to join update task");
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
Subcommand::Add(add) => add.run(project).await,
Subcommand::Update(update) => update.run(project, multi, reqwest).await,
Subcommand::Outdated(outdated) => outdated.run(project).await,
Subcommand::Execute(execute) => execute.run(project, reqwest).await,
}
res
}
}

View file

@ -20,11 +20,12 @@ pub struct OutdatedCommand {
}
impl OutdatedCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
let graph = project.deser_lockfile()?.graph;
pub async fn run(self, project: Project) -> anyhow::Result<()> {
let graph = project.deser_lockfile().await?.graph;
let manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
let mut refreshed_sources = HashSet::new();
@ -45,7 +46,7 @@ impl OutdatedCommand {
let source = node.node.pkg_ref.source();
if refreshed_sources.insert(source.clone()) {
source.refresh(&project)?;
source.refresh(&project).await?;
}
if !self.strict {
@ -64,6 +65,7 @@ impl OutdatedCommand {
let version_id = source
.resolve(&specifier, &project, manifest.target.kind())
.await
.context("failed to resolve package versions")?
.1
.pop_last()

View file

@ -2,6 +2,7 @@ use crate::cli::{up_to_date_lockfile, VersionedPackageName};
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use fs_err::tokio as fs;
use pesde::{
patches::setup_patches_repo,
source::{
@ -19,8 +20,8 @@ pub struct PatchCommand {
}
impl PatchCommand {
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
lockfile.graph
} else {
anyhow::bail!("outdated lockfile, please run the install command first")
@ -45,12 +46,14 @@ impl PatchCommand {
.join(name.escaped())
.join(version_id.escaped())
.join(chrono::Utc::now().timestamp().to_string());
fs_err::create_dir_all(&directory)?;
fs::create_dir_all(&directory).await?;
source
.download(&node.node.pkg_ref, &project, &reqwest)?
.download(&node.node.pkg_ref, &project, &reqwest)
.await?
.0
.write_to(&directory, project.cas_dir(), false)
.await
.context("failed to write package contents")?;
setup_patches_repo(&directory)?;

View file

@ -1,6 +1,7 @@
use crate::cli::up_to_date_lockfile;
use anyhow::Context;
use clap::Args;
use fs_err::tokio as fs;
use pesde::{names::PackageNames, patches::create_patch, source::version_id::VersionId, Project};
use std::{path::PathBuf, str::FromStr};
@ -12,8 +13,8 @@ pub struct PatchCommitCommand {
}
impl PatchCommitCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
lockfile.graph
} else {
anyhow::bail!("outdated lockfile, please run the install command first")
@ -48,15 +49,22 @@ impl PatchCommitCommand {
.context("package not found in graph")?;
let mut manifest = toml_edit::DocumentMut::from_str(
&project.read_manifest().context("failed to read manifest")?,
&project
.read_manifest()
.await
.context("failed to read manifest")?,
)
.context("failed to parse manifest")?;
let patch = create_patch(&self.directory).context("failed to create patch")?;
fs_err::remove_dir_all(self.directory).context("failed to remove patch directory")?;
fs::remove_dir_all(self.directory)
.await
.context("failed to remove patch directory")?;
let patches_dir = project.package_dir().join("patches");
fs_err::create_dir_all(&patches_dir).context("failed to create patches directory")?;
fs::create_dir_all(&patches_dir)
.await
.context("failed to create patches directory")?;
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
@ -65,7 +73,9 @@ impl PatchCommitCommand {
anyhow::bail!("patch file already exists: {}", patch_file.display());
}
fs_err::write(&patch_file, patch).context("failed to write patch file")?;
fs::write(&patch_file, patch)
.await
.context("failed to write patch file")?;
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[&name.to_string()][&version_id.to_string()] =
@ -73,6 +83,7 @@ impl PatchCommitCommand {
project
.write_manifest(manifest.to_string())
.await
.context("failed to write manifest")?;
println!(concat!(

View file

@ -1,15 +1,9 @@
use crate::cli::{display_err, run_on_workspace_members, up_to_date_lockfile};
use anyhow::Context;
use async_compression::Level;
use clap::Args;
use colored::Colorize;
use reqwest::{header::AUTHORIZATION, StatusCode};
use semver::VersionReq;
use std::{
io::{Seek, Write},
path::Component,
};
use tempfile::tempfile;
use crate::cli::{run_on_workspace_members, up_to_date_lockfile};
use fs_err::tokio as fs;
use pesde::{
manifest::{target::Target, DependencyType},
scripts::ScriptName,
@ -25,6 +19,11 @@ use pesde::{
},
Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
};
use reqwest::{header::AUTHORIZATION, StatusCode};
use semver::VersionReq;
use std::path::Component;
use tempfile::Builder;
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
#[derive(Debug, Args, Clone)]
pub struct PublishCommand {
@ -42,9 +41,10 @@ pub struct PublishCommand {
}
impl PublishCommand {
fn run_impl(self, project: &Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
async fn run_impl(self, project: &Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let mut manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
println!(
@ -72,7 +72,7 @@ impl PublishCommand {
anyhow::bail!("no build files found in target");
}
match up_to_date_lockfile(project)? {
match up_to_date_lockfile(project).await? {
Some(lockfile) => {
if lockfile
.graph
@ -93,10 +93,9 @@ impl PublishCommand {
}
}
let mut archive = tar::Builder::new(flate2::write::GzEncoder::new(
vec![],
flate2::Compression::best(),
));
let mut archive = tokio_tar::Builder::new(
async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
);
let mut display_includes: Vec<String> = vec![MANIFEST_FILE_NAME.to_string()];
let mut display_build_files: Vec<String> = vec![];
@ -179,8 +178,9 @@ impl PublishCommand {
anyhow::bail!("{name} must point to a file");
}
let contents =
fs_err::read_to_string(&export_path).context(format!("failed to read {name}"))?;
let contents = fs::read_to_string(&export_path)
.await
.context(format!("failed to read {name}"))?;
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
errs.into_iter()
@ -237,17 +237,21 @@ impl PublishCommand {
if included_path.is_file() {
display_includes.push(included_name.clone());
archive.append_file(
included_name,
fs_err::File::open(&included_path)
.context(format!("failed to read {included_name}"))?
.file_mut(),
)?;
archive
.append_file(
included_name,
fs::File::open(&included_path)
.await
.context(format!("failed to read {included_name}"))?
.file_mut(),
)
.await?;
} else {
display_includes.push(format!("{included_name}/*"));
archive
.append_dir_all(included_name, &included_path)
.await
.context(format!("failed to include directory {included_name}"))?;
}
}
@ -331,6 +335,7 @@ impl PublishCommand {
DependencySpecifiers::Workspace(spec) => {
let pkg_ref = WorkspacePackageSource
.resolve(spec, project, target_kind)
.await
.context("failed to resolve workspace package")?
.1
.pop_last()
@ -345,7 +350,8 @@ impl PublishCommand {
.context("failed to get workspace directory")?,
)
.join(MANIFEST_FILE_NAME);
let manifest = fs_err::read_to_string(&manifest)
let manifest = fs::read_to_string(&manifest)
.await
.context("failed to read workspace package manifest")?;
let manifest = toml::from_str::<pesde::manifest::Manifest>(&manifest)
.context("failed to parse workspace package manifest")?;
@ -442,25 +448,41 @@ impl PublishCommand {
println!();
}
let mut temp_manifest = tempfile().context("failed to create temp manifest file")?;
let temp_path = Builder::new().make(|_| Ok(()))?.into_temp_path();
let mut temp_manifest = fs::OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.read(true)
.open(temp_path.to_path_buf())
.await?;
temp_manifest
.write_all(
toml::to_string(&manifest)
.context("failed to serialize manifest")?
.as_bytes(),
)
.await
.context("failed to write temp manifest file")?;
temp_manifest
.rewind()
.await
.context("failed to rewind temp manifest file")?;
archive.append_file(MANIFEST_FILE_NAME, &mut temp_manifest)?;
archive
.append_file(MANIFEST_FILE_NAME, temp_manifest.file_mut())
.await?;
let archive = archive
let mut encoder = archive
.into_inner()
.context("failed to encode archive")?
.finish()
.context("failed to get archive bytes")?;
.await
.context("failed to finish archive")?;
encoder
.shutdown()
.await
.context("failed to finish archive")?;
let archive = encoder.into_inner();
let index_url = manifest
.indices
@ -469,6 +491,7 @@ impl PublishCommand {
let source = PesdePackageSource::new(index_url.clone());
source
.refresh(project)
.await
.context("failed to refresh source")?;
let config = source
.config(project)
@ -494,7 +517,7 @@ impl PublishCommand {
}
if self.dry_run {
fs_err::write("package.tar.gz", archive)?;
fs::write("package.tar.gz", archive).await?;
println!(
"{}",
@ -506,9 +529,9 @@ impl PublishCommand {
let mut request = reqwest
.post(format!("{}/v0/packages", config.api()))
.multipart(reqwest::blocking::multipart::Form::new().part(
.multipart(reqwest::multipart::Form::new().part(
"tarball",
reqwest::blocking::multipart::Part::bytes(archive).file_name("package.tar.gz"),
reqwest::multipart::Part::bytes(archive).file_name("package.tar.gz"),
));
if let Some(token) = project.auth_config().tokens().get(index_url) {
@ -516,10 +539,13 @@ impl PublishCommand {
request = request.header(AUTHORIZATION, token);
}
let response = request.send().context("failed to send request")?;
let response = request.send().await.context("failed to send request")?;
let status = response.status();
let text = response.text().context("failed to get response text")?;
let text = response
.text()
.await
.context("failed to get response text")?;
match status {
StatusCode::CONFLICT => {
println!("{}", "package version already exists".red().bold());
@ -544,17 +570,20 @@ impl PublishCommand {
Ok(())
}
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
let result = self.clone().run_impl(&project, reqwest.clone());
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let result = self.clone().run_impl(&project, reqwest.clone()).await;
if project.workspace_dir().is_some() {
return result;
} else if let Err(result) = result {
println!("an error occurred publishing workspace root: {result}");
} else {
display_err(result, " occurred publishing workspace root");
}
run_on_workspace_members(&project, |project| {
self.clone().run_impl(&project, reqwest.clone())
let reqwest = reqwest.clone();
let this = self.clone();
async move { this.run_impl(&project, reqwest).await }
})
.await
.map(|_| ())
}
}

View file

@ -1,4 +1,4 @@
use crate::cli::up_to_date_lockfile;
use crate::cli::{repos::update_scripts, up_to_date_lockfile};
use anyhow::Context;
use clap::Args;
use pesde::{
@ -8,9 +8,8 @@ use pesde::{
Project, PACKAGES_CONTAINER_NAME,
};
use relative_path::RelativePathBuf;
use std::{
env::current_dir, ffi::OsString, io::Write, path::PathBuf, process::Command, thread::JoinHandle,
};
use std::{env::current_dir, ffi::OsString, io::Write, path::PathBuf, process::Command};
use tokio::runtime::Handle;
#[derive(Debug, Args)]
pub struct RunCommand {
@ -24,15 +23,11 @@ pub struct RunCommand {
}
impl RunCommand {
pub fn run(
self,
project: Project,
update_task: &mut Option<JoinHandle<()>>,
) -> anyhow::Result<()> {
let mut run = |path: PathBuf| {
if let Some(handle) = update_task.take() {
handle.join().expect("failed to join update task");
}
pub async fn run(self, project: Project) -> anyhow::Result<()> {
let run = |path: PathBuf| {
Handle::current()
.block_on(update_scripts(&project))
.expect("failed to update scripts");
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
caller
@ -62,7 +57,7 @@ impl RunCommand {
let package_or_script = match self.package_or_script {
Some(package_or_script) => package_or_script,
None => {
if let Some(script_path) = project.deser_manifest()?.target.bin_path() {
if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
run(script_path.to_path(project.package_dir()));
}
@ -71,7 +66,7 @@ impl RunCommand {
};
if let Ok(pkg_name) = package_or_script.parse::<PackageName>() {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
lockfile.graph
} else {
anyhow::bail!("outdated lockfile, please run the install command first")
@ -89,7 +84,8 @@ impl RunCommand {
};
let base_folder = project
.deser_manifest()?
.deser_manifest()
.await?
.target
.kind()
.packages_folder(&node.node.pkg_ref.target_kind());
@ -106,7 +102,7 @@ impl RunCommand {
}
}
if let Ok(manifest) = project.deser_manifest() {
if let Ok(manifest) = project.deser_manifest().await {
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
run(script_path.to_path(project.package_dir()))
}

View file

@ -10,7 +10,7 @@ pub struct SelfInstallCommand {
}
impl SelfInstallCommand {
pub fn run(self) -> anyhow::Result<()> {
pub async fn run(self) -> anyhow::Result<()> {
#[cfg(windows)]
{
if !self.skip_add_to_path {
@ -24,7 +24,7 @@ impl SelfInstallCommand {
.0;
let path: String = env.get_value("Path").context("failed to get Path value")?;
let bin_dir = crate::cli::bin_dir()?;
let bin_dir = crate::cli::bin_dir().await?;
let bin_dir = bin_dir.to_string_lossy();
let exists = path.split(';').any(|part| *part == bin_dir);
@ -68,7 +68,7 @@ and then restart your shell.
);
}
update_bin_exe()?;
update_bin_exe().await?;
Ok(())
}

View file

@ -5,10 +5,10 @@ use clap::Args;
pub struct SelfUpgradeCommand {}
impl SelfUpgradeCommand {
pub fn run(self, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> {
let config = read_config()?;
pub async fn run(self, reqwest: reqwest::Client) -> anyhow::Result<()> {
let config = read_config().await?;
get_or_download_version(&reqwest, &config.last_checked_updates.unwrap().1)?;
get_or_download_version(&reqwest, &config.last_checked_updates.unwrap().1).await?;
// a call to `update_bin_exe` or other similar function *should* be here, in case new versions
// have fixes to bugs in executing other versions, but that would cause
// the current file to be overwritten by itself, so this needs more thought

View file

@ -1,30 +1,26 @@
use crate::cli::{download_graph, run_on_workspace_members};
use crate::cli::{download_graph, repos::update_scripts, run_on_workspace_members};
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use indicatif::MultiProgress;
use pesde::{lockfile::Lockfile, Project};
use std::{collections::HashSet, thread::JoinHandle};
use std::collections::HashSet;
#[derive(Debug, Args, Copy, Clone)]
pub struct UpdateCommand {
/// The amount of threads to use for downloading
#[arg(short, long, default_value_t = 6, value_parser = clap::value_parser!(u64).range(1..=128))]
threads: u64,
}
pub struct UpdateCommand {}
impl UpdateCommand {
pub fn run(
pub async fn run(
self,
project: Project,
multi: MultiProgress,
reqwest: reqwest::blocking::Client,
update_task: &mut Option<JoinHandle<()>>,
reqwest: reqwest::Client,
) -> anyhow::Result<()> {
let mut refreshed_sources = HashSet::new();
let manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
println!(
@ -36,11 +32,10 @@ impl UpdateCommand {
let graph = project
.dependency_graph(None, &mut refreshed_sources)
.await
.context("failed to build dependency graph")?;
if let Some(handle) = update_task.take() {
handle.join().expect("failed to join update task");
}
update_scripts(&project).await?;
project
.write_lockfile(Lockfile {
@ -55,17 +50,21 @@ impl UpdateCommand {
&graph,
&multi,
&reqwest,
self.threads as usize,
false,
false,
"📥 downloading dependencies".to_string(),
"📥 downloaded dependencies".to_string(),
)?,
)
.await?,
workspace: run_on_workspace_members(&project, |project| {
self.run(project, multi.clone(), reqwest.clone(), &mut None)
})?,
let multi = multi.clone();
let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, multi, reqwest)).await }
})
.await?,
})
.await
.context("failed to write lockfile")?;
Ok(())

View file

@ -1,5 +1,6 @@
use crate::cli::{auth::Tokens, home_dir};
use anyhow::Context;
use fs_err::tokio as fs;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -38,8 +39,8 @@ impl Default for CliConfig {
}
}
pub fn read_config() -> anyhow::Result<CliConfig> {
let config_string = match fs_err::read_to_string(home_dir()?.join("config.toml")) {
pub async fn read_config() -> anyhow::Result<CliConfig> {
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
Ok(config_string) => config_string,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Ok(CliConfig::default());
@ -52,9 +53,10 @@ pub fn read_config() -> anyhow::Result<CliConfig> {
Ok(config)
}
pub fn write_config(config: &CliConfig) -> anyhow::Result<()> {
pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
let config_string = toml::to_string(config).context("failed to serialize config")?;
fs_err::write(home_dir()?.join("config.toml"), config_string)
fs::write(home_dir()?.join("config.toml"), config_string)
.await
.context("failed to write config file")?;
Ok(())

View file

@ -1,16 +1,19 @@
use std::path::Path;
pub fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> {
pub async fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> {
#[cfg(unix)]
{
use anyhow::Context;
use fs_err::tokio as fs;
use std::os::unix::fs::PermissionsExt;
let mut perms = fs_err::metadata(&_path)
let mut perms = fs::metadata(&_path)
.await
.context("failed to get bin link file metadata")?
.permissions();
perms.set_mode(perms.mode() | 0o111);
fs_err::set_permissions(&_path, perms)
fs::set_permissions(&_path, perms)
.await
.context("failed to set bin link file permissions")?;
}

View file

@ -1,4 +1,7 @@
use anyhow::Context;
use colored::Colorize;
use fs_err::tokio as fs;
use futures::StreamExt;
use indicatif::MultiProgress;
use pesde::{
lockfile::{DependencyGraph, DownloadedGraph, Lockfile},
@ -10,11 +13,13 @@ use pesde::{
use relative_path::RelativePathBuf;
use std::{
collections::{BTreeMap, HashSet},
future::Future,
path::PathBuf,
str::FromStr,
sync::Arc,
time::Duration,
};
use tokio::pin;
pub mod auth;
pub mod commands;
@ -32,15 +37,17 @@ pub fn home_dir() -> anyhow::Result<PathBuf> {
.join(HOME_DIR))
}
pub fn bin_dir() -> anyhow::Result<PathBuf> {
pub async fn bin_dir() -> anyhow::Result<PathBuf> {
let bin_dir = home_dir()?.join("bin");
fs_err::create_dir_all(&bin_dir).context("failed to create bin folder")?;
fs::create_dir_all(&bin_dir)
.await
.context("failed to create bin folder")?;
Ok(bin_dir)
}
pub fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
let manifest = project.deser_manifest()?;
let lockfile = match project.deser_lockfile() {
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
let manifest = project.deser_manifest().await?;
let lockfile = match project.deser_lockfile().await {
Ok(lockfile) => lockfile,
Err(pesde::errors::LockfileReadError::Io(e))
if e.kind() == std::io::ErrorKind::NotFound =>
@ -185,13 +192,12 @@ pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
}
#[allow(clippy::too_many_arguments)]
pub fn download_graph(
pub async fn download_graph(
project: &Project,
refreshed_sources: &mut HashSet<PackageSources>,
graph: &DependencyGraph,
multi: &MultiProgress,
reqwest: &reqwest::blocking::Client,
threads: usize,
reqwest: &reqwest::Client,
prod: bool,
write: bool,
progress_msg: String,
@ -207,11 +213,12 @@ pub fn download_graph(
);
bar.enable_steady_tick(Duration::from_millis(100));
let (rx, downloaded_graph) = project
.download_graph(graph, refreshed_sources, reqwest, threads, prod, write)
let (mut rx, downloaded_graph) = project
.download_graph(graph, refreshed_sources, reqwest, prod, write)
.await
.context("failed to download dependencies")?;
while let Ok(result) = rx.recv() {
while let Some(result) = rx.recv().await {
bar.inc(1);
match result {
@ -238,41 +245,63 @@ pub fn shift_project_dir(project: &Project, pkg_dir: PathBuf) -> Project {
)
}
pub fn run_on_workspace_members(
pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
project: &Project,
f: impl Fn(Project) -> anyhow::Result<()>,
f: impl Fn(Project) -> F,
) -> anyhow::Result<BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>> {
Ok(match project.workspace_dir() {
Some(_) => {
// this might seem counterintuitive, but remember that
// the presence of a workspace dir means that this project is a member of one
Default::default()
}
None => project
.workspace_members(project.package_dir())
.context("failed to get workspace members")?
.into_iter()
.map(|(path, manifest)| {
(
manifest.name,
manifest.target.kind(),
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap())
.unwrap(),
)
})
.map(|(name, target, path)| {
f(shift_project_dir(
project,
path.to_path(project.package_dir()),
))
.map(|_| (name, target, path))
})
.collect::<Result<Vec<_>, _>>()
.context("failed to install workspace member's dependencies")?
.into_iter()
.fold(BTreeMap::new(), |mut map, (name, target, path)| {
map.entry(name).or_default().insert(target, path);
map
}),
})
// this might seem counterintuitive, but remember that
// the presence of a workspace dir means that this project is a member of one
if project.workspace_dir().is_some() {
return Ok(Default::default());
}
let members_future = project.workspace_members(project.package_dir()).await?;
pin!(members_future);
let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
while let Some((path, manifest)) = members_future.next().await.transpose()? {
let relative_path =
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap()).unwrap();
f(shift_project_dir(project, path)).await?;
results
.entry(manifest.name)
.or_default()
.insert(manifest.target.kind(), relative_path);
}
Ok(results)
}
pub fn display_err(result: anyhow::Result<()>, prefix: &str) {
if let Err(err) = result {
eprintln!("{}: {err}\n", format!("error{prefix}").red().bold());
let cause = err.chain().skip(1).collect::<Vec<_>>();
if !cause.is_empty() {
eprintln!("{}:", "caused by".red().bold());
for err in cause {
eprintln!(" - {err}");
}
}
let backtrace = err.backtrace();
match backtrace.status() {
std::backtrace::BacktraceStatus::Disabled => {
eprintln!(
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
"help".yellow().bold()
);
}
std::backtrace::BacktraceStatus::Captured => {
eprintln!("\n{}:\n{backtrace}", "backtrace".yellow().bold());
}
_ => {
eprintln!("\n{}: not captured", "backtrace".yellow().bold());
}
}
}
}

View file

@ -3,18 +3,22 @@ use crate::{
util::authenticate_conn,
};
use anyhow::Context;
use fs_err::tokio as fs;
use gix::remote::{fetch::Shallow, Direction};
use pesde::Project;
use std::path::Path;
use std::{path::Path, sync::atomic::AtomicBool};
use tokio::{runtime::Handle, task::spawn_blocking};
fn update_repo<P: AsRef<Path>>(
async fn update_repo<P: AsRef<Path>>(
name: &str,
path: P,
url: gix::Url,
project: &Project,
) -> anyhow::Result<()> {
let path = path.as_ref();
if path.exists() {
let should_update = path.exists();
let (repo, oid) = if should_update {
let repo = gix::open(path).context(format!("failed to open {name} repository"))?;
let remote = repo
@ -49,68 +53,91 @@ fn update_repo<P: AsRef<Path>>(
.or(unpacked.2)
.context("couldn't find oid in remote ref")?;
let tree = repo
.find_object(oid)
.context(format!("failed to find {name} repository tree"))?
.peel_to_tree()
.context(format!("failed to peel {name} repository object to tree"))?;
let mut index = gix::index::File::from_state(
gix::index::State::from_tree(&tree.id, &repo.objects, Default::default()).context(
format!("failed to create index state from {name} repository tree"),
)?,
repo.index_path(),
);
let opts = gix::worktree::state::checkout::Options {
overwrite_existing: true,
destination_is_initially_empty: false,
..Default::default()
};
gix::worktree::state::checkout(
&mut index,
repo.work_dir().context(format!("{name} repo is bare"))?,
repo.objects
.clone()
.into_arc()
.context("failed to clone objects")?,
&gix::progress::Discard,
&gix::progress::Discard,
&false.into(),
opts,
)
.context(format!("failed to checkout {name} repository"))?;
index
.write(gix::index::write::Options::default())
.context("failed to write index")?;
(repo, gix::ObjectId::from(oid))
} else {
fs_err::create_dir_all(path).context(format!("failed to create {name} directory"))?;
fs::create_dir_all(path)
.await
.context(format!("failed to create {name} directory"))?;
gix::prepare_clone(url, path)
let repo = gix::prepare_clone(url, path)
.context(format!("failed to prepare {name} repository clone"))?
.with_shallow(Shallow::Deepen(1))
.fetch_then_checkout(gix::progress::Discard, &false.into())
.fetch_only(gix::progress::Discard, &false.into())
.context(format!("failed to fetch and checkout {name} repository"))?
.0
.main_worktree(gix::progress::Discard, &false.into())
.context(format!("failed to set {name} repository as main worktree"))?;
.0;
let oid = {
let mut head = repo
.head()
.context(format!("failed to get {name} repository head"))?;
let obj = head
.peel_to_object_in_place()
.context(format!("failed to peel {name} repository head to object"))?;
obj.id
};
(repo, oid)
};
Ok(())
let tree = repo
.find_object(oid)
.context(format!("failed to find {name} repository tree"))?
.peel_to_tree()
.context(format!("failed to peel {name} repository object to tree"))?;
let mut index = gix::index::File::from_state(
gix::index::State::from_tree(&tree.id, &repo.objects, Default::default()).context(
format!("failed to create index state from {name} repository tree"),
)?,
repo.index_path(),
);
let opts = gix::worktree::state::checkout::Options {
overwrite_existing: true,
destination_is_initially_empty: !should_update,
..Default::default()
};
gix::worktree::state::checkout(
&mut index,
repo.work_dir().context(format!("{name} repo is bare"))?,
repo.objects
.clone()
.into_arc()
.context("failed to clone objects")?,
&gix::progress::Discard,
&gix::progress::Discard,
&false.into(),
opts,
)
.context(format!("failed to checkout {name} repository"))?;
index
.write(gix::index::write::Options::default())
.context("failed to write index")
}
pub fn update_repo_dependencies(project: &Project) -> anyhow::Result<()> {
static SCRIPTS_UPDATED: AtomicBool = AtomicBool::new(false);
pub async fn update_scripts(project: &Project) -> anyhow::Result<()> {
if SCRIPTS_UPDATED.swap(true, std::sync::atomic::Ordering::Relaxed) {
return Ok(());
}
let home_dir = home_dir()?;
let config = read_config()?;
let config = read_config().await?;
update_repo(
"scripts",
home_dir.join("scripts"),
config.scripts_repo,
project,
)?;
let project = project.clone();
spawn_blocking(move || {
Handle::current().block_on(update_repo(
"scripts",
home_dir.join("scripts"),
config.scripts_repo,
&project,
))
})
.await??;
Ok(())
}

View file

@ -1,16 +1,18 @@
use anyhow::Context;
use colored::Colorize;
use reqwest::header::ACCEPT;
use semver::Version;
use serde::Deserialize;
use std::{io::Read, path::PathBuf};
use crate::cli::{
bin_dir,
config::{read_config, write_config, CliConfig},
files::make_executable,
home_dir,
};
use anyhow::Context;
use colored::Colorize;
use fs_err::tokio as fs;
use futures::StreamExt;
use reqwest::header::ACCEPT;
use semver::Version;
use serde::Deserialize;
use std::path::PathBuf;
use tokio::io::AsyncReadExt;
pub fn current_version() -> Version {
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
@ -38,10 +40,10 @@ fn get_repo() -> (String, String) {
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<()> {
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
let (owner, repo) = get_repo();
let config = read_config()?;
let config = read_config().await?;
let version = if let Some((_, version)) = config
.last_checked_updates
@ -54,10 +56,12 @@ pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<
"https://api.github.com/repos/{owner}/{repo}/releases",
))
.send()
.await
.context("failed to send request to GitHub API")?
.error_for_status()
.context("failed to get GitHub API response")?
.json::<Vec<Release>>()
.await
.context("failed to parse GitHub API response")?;
let version = releases
@ -69,7 +73,8 @@ pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<
write_config(&CliConfig {
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
..config
})?;
})
.await?;
version
};
@ -97,8 +102,8 @@ pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<
Ok(())
}
pub fn download_github_release(
reqwest: &reqwest::blocking::Client,
pub async fn download_github_release(
reqwest: &reqwest::Client,
version: &Version,
) -> anyhow::Result<Vec<u8>> {
let (owner, repo) = get_repo();
@ -108,10 +113,12 @@ pub fn download_github_release(
"https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}",
))
.send()
.await
.context("failed to send request to GitHub API")?
.error_for_status()
.context("failed to get GitHub API response")?
.json::<Release>()
.await
.context("failed to parse GitHub API response")?;
let asset = release
@ -130,34 +137,43 @@ pub fn download_github_release(
.get(asset.url)
.header(ACCEPT, "application/octet-stream")
.send()
.await
.context("failed to send request to download asset")?
.error_for_status()
.context("failed to download asset")?
.bytes()
.await
.context("failed to download asset")?;
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref());
let mut archive = tar::Archive::new(&mut decoder);
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
let mut archive = tokio_tar::Archive::new(&mut decoder);
let entry = archive
let mut entry = archive
.entries()
.context("failed to read archive entries")?
.next()
.await
.context("archive has no entry")?
.context("failed to get first archive entry")?;
let mut result = Vec::new();
entry
.bytes()
.collect::<Result<Vec<u8>, std::io::Error>>()
.context("failed to read archive entry bytes")
.read_to_end(&mut result)
.await
.context("failed to read archive entry bytes")?;
Ok(result)
}
pub fn get_or_download_version(
reqwest: &reqwest::blocking::Client,
pub async fn get_or_download_version(
reqwest: &reqwest::Client,
version: &Version,
) -> anyhow::Result<Option<PathBuf>> {
let path = home_dir()?.join("versions");
fs_err::create_dir_all(&path).context("failed to create versions directory")?;
fs::create_dir_all(&path)
.await
.context("failed to create versions directory")?;
let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
@ -172,14 +188,19 @@ pub fn get_or_download_version(
}
if is_requested_version {
fs_err::copy(std::env::current_exe()?, &path)
fs::copy(std::env::current_exe()?, &path)
.await
.context("failed to copy current executable to version directory")?;
} else {
let bytes = download_github_release(reqwest, version)?;
fs_err::write(&path, bytes).context("failed to write downloaded version file")?;
let bytes = download_github_release(reqwest, version).await?;
fs::write(&path, bytes)
.await
.context("failed to write downloaded version file")?;
}
make_executable(&path).context("failed to make downloaded version executable")?;
make_executable(&path)
.await
.context("failed to make downloaded version executable")?;
Ok(if is_requested_version {
None
@ -188,48 +209,52 @@ pub fn get_or_download_version(
})
}
pub fn max_installed_version() -> anyhow::Result<Version> {
pub async fn max_installed_version() -> anyhow::Result<Version> {
let versions_dir = home_dir()?.join("versions");
fs_err::create_dir_all(&versions_dir).context("failed to create versions directory")?;
fs::create_dir_all(&versions_dir)
.await
.context("failed to create versions directory")?;
let max_version = fs_err::read_dir(versions_dir)
.context("failed to read versions directory")?
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.map(|entry| {
#[cfg(not(windows))]
let name = entry
.path()
.file_name()
.unwrap()
.to_string_lossy()
.to_string();
#[cfg(windows)]
let name = entry
.path()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
let mut read_dir = fs::read_dir(versions_dir)
.await
.context("failed to read versions directory")?;
let mut max_version = current_version();
Version::parse(&name).unwrap()
})
.max()
.filter(|v| v >= &current_version())
.unwrap_or_else(current_version);
while let Some(entry) = read_dir.next_entry().await? {
#[cfg(not(windows))]
let name = entry
.path()
.file_name()
.unwrap()
.to_string_lossy()
.to_string();
#[cfg(windows)]
let name = entry
.path()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
let version = Version::parse(&name).unwrap();
if version > max_version {
max_version = version;
}
}
Ok(max_version)
}
pub fn update_bin_exe() -> anyhow::Result<()> {
let copy_to = bin_dir()?.join(format!(
pub async fn update_bin_exe() -> anyhow::Result<()> {
let copy_to = bin_dir().await?.join(format!(
"{}{}",
env!("CARGO_BIN_NAME"),
std::env::consts::EXE_SUFFIX
));
fs_err::copy(std::env::current_exe()?, &copy_to)
fs::copy(std::env::current_exe()?, &copy_to)
.await
.context("failed to copy executable to bin folder")?;
make_executable(&copy_to)
make_executable(&copy_to).await
}

View file

@ -7,42 +7,41 @@ use crate::{
},
Project, PACKAGES_CONTAINER_NAME,
};
use fs_err::create_dir_all;
use fs_err::tokio as fs;
use std::{
collections::HashSet,
sync::{mpsc::Receiver, Arc, Mutex},
sync::{Arc, Mutex},
};
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
type MultithreadDownloadJob = (
Receiver<Result<(), errors::DownloadGraphError>>,
tokio::sync::mpsc::Receiver<Result<(), errors::DownloadGraphError>>,
MultithreadedGraph,
);
impl Project {
/// Downloads a graph of dependencies
pub fn download_graph(
pub async fn download_graph(
&self,
graph: &DependencyGraph,
refreshed_sources: &mut HashSet<PackageSources>,
reqwest: &reqwest::blocking::Client,
threads: usize,
reqwest: &reqwest::Client,
prod: bool,
write: bool,
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
let manifest = self.deser_manifest()?;
let manifest = self.deser_manifest().await?;
let downloaded_graph: MultithreadedGraph = Arc::new(Mutex::new(Default::default()));
let threadpool = threadpool::ThreadPool::new(threads);
let (tx, rx) = std::sync::mpsc::channel();
let (tx, rx) =
tokio::sync::mpsc::channel(graph.iter().map(|(_, versions)| versions.len()).sum());
for (name, versions) in graph {
for (version_id, node) in versions {
let source = node.pkg_ref.source();
if refreshed_sources.insert(source.clone()) {
source.refresh(self).map_err(Box::new)?;
source.refresh(self).await.map_err(Box::new)?;
}
let container_folder = node.container_folder(
@ -59,7 +58,7 @@ impl Project {
version_id.version(),
);
create_dir_all(&container_folder)?;
fs::create_dir_all(&container_folder).await?;
let tx = tx.clone();
@ -71,27 +70,29 @@ impl Project {
let reqwest = reqwest.clone();
let downloaded_graph = downloaded_graph.clone();
threadpool.execute(move || {
tokio::spawn(async move {
let project = project.clone();
log::debug!("downloading {name}@{version_id}");
let (fs, target) = match source.download(&node.pkg_ref, &project, &reqwest) {
Ok(target) => target,
Err(e) => {
tx.send(Err(Box::new(e).into())).unwrap();
return;
}
};
let (fs, target) =
match source.download(&node.pkg_ref, &project, &reqwest).await {
Ok(target) => target,
Err(e) => {
tx.send(Err(Box::new(e).into())).await.unwrap();
return;
}
};
log::debug!("downloaded {name}@{version_id}");
if write {
if !prod || node.ty != DependencyType::Dev {
match fs.write_to(container_folder, project.cas_dir(), true) {
match fs.write_to(container_folder, project.cas_dir(), true).await {
Ok(_) => {}
Err(e) => {
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
.await
.unwrap();
return;
}
@ -101,13 +102,15 @@ impl Project {
}
}
let mut downloaded_graph = downloaded_graph.lock().unwrap();
downloaded_graph
.entry(name)
.or_default()
.insert(version_id, DownloadedDependencyGraphNode { node, target });
{
let mut downloaded_graph = downloaded_graph.lock().unwrap();
downloaded_graph
.entry(name)
.or_default()
.insert(version_id, DownloadedDependencyGraphNode { node, target });
}
tx.send(Ok(())).unwrap();
tx.send(Ok(())).await.unwrap();
});
}
}

View file

@ -4,6 +4,9 @@
//! It has been designed with multiple targets in mind, namely Roblox, Lune, and Luau.
use crate::{lockfile::Lockfile, manifest::Manifest};
use async_stream::stream;
use fs_err::tokio as fs;
use futures::Stream;
use gix::sec::identity::Account;
use std::{
collections::HashMap,
@ -137,42 +140,49 @@ impl Project {
}
/// Read the manifest file
pub fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
let string = fs_err::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(string)
}
/// Deserialize the manifest file
pub fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
let string = fs_err::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?;
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(toml::from_str(&string)?)
}
/// Write the manifest file
pub fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
fs_err::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref())
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
}
/// Deserialize the lockfile
pub fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
let string = fs_err::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME))?;
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
Ok(toml::from_str(&string)?)
}
/// Write the lockfile
pub fn write_lockfile(&self, lockfile: Lockfile) -> Result<(), errors::LockfileWriteError> {
pub async fn write_lockfile(
&self,
lockfile: Lockfile,
) -> Result<(), errors::LockfileWriteError> {
let string = toml::to_string(&lockfile)?;
fs_err::write(self.package_dir.join(LOCKFILE_FILE_NAME), string)?;
fs::write(self.package_dir.join(LOCKFILE_FILE_NAME), string).await?;
Ok(())
}
/// Get the workspace members
pub fn workspace_members<P: AsRef<Path>>(
pub async fn workspace_members<P: AsRef<Path>>(
&self,
dir: P,
) -> Result<HashMap<PathBuf, Manifest>, errors::WorkspaceMembersError> {
) -> Result<
impl Stream<Item = Result<(PathBuf, Manifest), errors::WorkspaceMembersError>>,
errors::WorkspaceMembersError,
> {
let dir = dir.as_ref().to_path_buf();
let manifest = fs_err::read_to_string(dir.join(MANIFEST_FILE_NAME))
let manifest = fs::read_to_string(dir.join(MANIFEST_FILE_NAME))
.await
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
@ -188,17 +198,18 @@ impl Project {
.flat_map(|paths| paths.into_iter())
.collect::<Result<Vec<_>, _>>()?;
members
.into_iter()
.map(|path| {
let manifest = fs_err::read_to_string(path.join(MANIFEST_FILE_NAME))
Ok(stream! {
for path in members {
let manifest = fs::read_to_string(path.join(MANIFEST_FILE_NAME))
.await
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e))
})?;
Ok((path, manifest))
})
.collect::<Result<_, _>>()
yield Ok((path, manifest));
}
})
}
}

View file

@ -3,9 +3,14 @@ use crate::{
lockfile::DownloadedGraph,
names::PackageNames,
scripts::{execute_script, ScriptName},
source::{fs::store_in_cas, traits::PackageRef, version_id::VersionId},
source::{
fs::{cas_path, store_in_cas},
traits::PackageRef,
version_id::VersionId,
},
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME,
};
use fs_err::tokio as fs;
use std::{
collections::BTreeMap,
ffi::OsStr,
@ -15,22 +20,25 @@ use std::{
/// Generates linking modules for a project
pub mod generator;
fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
let p = path.as_ref();
fs_err::create_dir_all(p)?;
fs::create_dir_all(p).await?;
p.canonicalize()
}
fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
let cas_path = store_in_cas(cas_dir, contents.as_bytes())?.1;
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
fs_err::hard_link(cas_path, destination)
fs::hard_link(cas_path(&hash, cas_dir), destination).await
}
impl Project {
/// Links the dependencies of the project
pub fn link_dependencies(&self, graph: &DownloadedGraph) -> Result<(), errors::LinkingError> {
let manifest = self.deser_manifest()?;
pub async fn link_dependencies(
&self,
graph: &DownloadedGraph,
) -> Result<(), errors::LinkingError> {
let manifest = self.deser_manifest().await?;
let mut package_types = BTreeMap::<&PackageNames, BTreeMap<&VersionId, Vec<String>>>::new();
@ -57,7 +65,7 @@ impl Project {
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
let lib_file = lib_file.to_path(&container_folder);
let contents = match fs_err::read_to_string(&lib_file) {
let contents = match fs::read_to_string(&lib_file).await {
Ok(contents) => contents,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Err(errors::LinkingError::LibFileNotFound(
@ -128,7 +136,8 @@ impl Project {
.kind()
.packages_folder(&node.node.pkg_ref.target_kind()),
),
)?;
)
.await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = node.node.container_folder(
@ -162,7 +171,8 @@ impl Project {
)?,
types,
),
)?;
)
.await?;
};
if let Some(bin_file) = node.target.bin_path() {
@ -177,7 +187,8 @@ impl Project {
&container_folder,
),
),
)?;
)
.await?;
}
}
@ -208,7 +219,8 @@ impl Project {
.target_kind()
.packages_folder(&dependency_node.node.pkg_ref.target_kind()),
),
)?;
)
.await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = dependency_node.node.container_folder(
@ -220,7 +232,8 @@ impl Project {
let linker_folder = create_and_canonicalize(
node_container_folder
.join(node.node.base_folder(dependency_node.target.kind())),
)?;
)
.await?;
write_cas(
linker_folder.join(format!("{dependency_alias}.luau")),
@ -241,7 +254,8 @@ impl Project {
.and_then(|v| v.get(dependency_version_id))
.unwrap(),
),
)?;
)
.await?;
}
}
}

View file

@ -2,17 +2,16 @@
use crate::cli::version::{
check_for_updates, current_version, get_or_download_version, max_installed_version,
};
use crate::cli::{auth::get_tokens, home_dir, repos::update_repo_dependencies, HOME_DIR};
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
use anyhow::Context;
use clap::Parser;
use colored::Colorize;
use fs_err::tokio as fs;
use indicatif::MultiProgress;
use indicatif_log_bridge::LogWrapper;
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME};
use std::{
collections::HashSet,
path::{Path, PathBuf},
thread::spawn,
};
use tempfile::NamedTempFile;
@ -34,7 +33,7 @@ struct Cli {
subcommand: cli::commands::Subcommand,
}
fn get_linkable_dir(path: &Path) -> PathBuf {
async fn get_linkable_dir(path: &Path) -> PathBuf {
let mut curr_path = PathBuf::new();
let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file");
let temp_file_name = file_to_try.path().file_name().unwrap();
@ -44,8 +43,8 @@ fn get_linkable_dir(path: &Path) -> PathBuf {
let try_path = curr_path.join(temp_file_name);
if fs_err::hard_link(file_to_try.path(), &try_path).is_ok() {
if let Err(err) = fs_err::remove_file(&try_path) {
if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() {
if let Err(err) = fs::remove_file(&try_path).await {
log::warn!(
"failed to remove temporary file at {}: {err}",
try_path.display()
@ -62,7 +61,7 @@ fn get_linkable_dir(path: &Path) -> PathBuf {
);
}
fn run() -> anyhow::Result<()> {
async fn run() -> anyhow::Result<()> {
let cwd = std::env::current_dir().expect("failed to get current working directory");
#[cfg(windows)]
@ -107,8 +106,9 @@ fn run() -> anyhow::Result<()> {
let mut project_root = None::<PathBuf>;
let mut workspace_dir = None::<PathBuf>;
fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
let manifest = fs_err::read_to_string(path.join(MANIFEST_FILE_NAME))
async fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
let manifest = fs::read_to_string(path.join(MANIFEST_FILE_NAME))
.await
.context("failed to read manifest")?;
let manifest: pesde::manifest::Manifest =
toml::from_str(&manifest).context("failed to parse manifest")?;
@ -143,13 +143,13 @@ fn run() -> anyhow::Result<()> {
}
(Some(project_root), None) => {
if get_workspace_members(&path)?.contains(project_root) {
if get_workspace_members(&path).await?.contains(project_root) {
workspace_dir = Some(path);
}
}
(None, None) => {
if get_workspace_members(&path)?.contains(&cwd) {
if get_workspace_members(&path).await?.contains(&cwd) {
// initializing a new member of a workspace
break 'finder (cwd, Some(path));
} else {
@ -179,9 +179,11 @@ fn run() -> anyhow::Result<()> {
let home_dir = home_dir()?;
let data_dir = home_dir.join("data");
fs_err::create_dir_all(&data_dir).expect("failed to create data directory");
fs::create_dir_all(&data_dir)
.await
.expect("failed to create data directory");
let cas_dir = get_linkable_dir(&project_root_dir).join(HOME_DIR);
let cas_dir = get_linkable_dir(&project_root_dir).await.join(HOME_DIR);
let cas_dir = if cas_dir == home_dir {
&data_dir
@ -197,7 +199,7 @@ fn run() -> anyhow::Result<()> {
project_workspace_dir,
data_dir,
cas_dir,
AuthConfig::new().with_tokens(get_tokens()?.0),
AuthConfig::new().with_tokens(get_tokens().await?.0),
);
let reqwest = {
@ -210,7 +212,7 @@ fn run() -> anyhow::Result<()> {
.context("failed to create accept header")?,
);
reqwest::blocking::Client::builder()
reqwest::Client::builder()
.user_agent(concat!(
env!("CARGO_PKG_NAME"),
"/",
@ -224,21 +226,22 @@ fn run() -> anyhow::Result<()> {
{
let target_version = project
.deser_manifest()
.await
.ok()
.and_then(|manifest| manifest.pesde_version);
// store the current version in case it needs to be used later
get_or_download_version(&reqwest, &current_version())?;
get_or_download_version(&reqwest, &current_version()).await?;
let exe_path = if let Some(version) = target_version {
Some(get_or_download_version(&reqwest, &version)?)
Some(get_or_download_version(&reqwest, &version).await?)
} else {
None
};
let exe_path = if let Some(exe_path) = exe_path {
exe_path
} else {
get_or_download_version(&reqwest, &max_installed_version()?)?
get_or_download_version(&reqwest, &max_installed_version().await?).await?
};
if let Some(exe_path) = exe_path {
@ -250,62 +253,26 @@ fn run() -> anyhow::Result<()> {
std::process::exit(status.code().unwrap());
}
display_err(check_for_updates(&reqwest), " while checking for updates");
}
let project_2 = project.clone();
let update_task = spawn(move || {
display_err(
update_repo_dependencies(&project_2),
" while updating repository dependencies",
check_for_updates(&reqwest).await,
" while checking for updates",
);
});
}
let cli = match Cli::try_parse() {
Ok(cli) => cli,
Err(err) => {
let _ = err.print();
update_task.join().expect("failed to join update task");
std::process::exit(err.exit_code());
}
};
cli.subcommand.run(project, multi, reqwest, update_task)
cli.subcommand.run(project, multi, reqwest).await
}
fn display_err(result: anyhow::Result<()>, prefix: &str) {
if let Err(err) = result {
eprintln!("{}: {err}\n", format!("error{prefix}").red().bold());
let cause = err.chain().skip(1).collect::<Vec<_>>();
if !cause.is_empty() {
eprintln!("{}:", "caused by".red().bold());
for err in cause {
eprintln!(" - {err}");
}
}
let backtrace = err.backtrace();
match backtrace.status() {
std::backtrace::BacktraceStatus::Disabled => {
eprintln!(
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
"help".yellow().bold()
);
}
std::backtrace::BacktraceStatus::Captured => {
eprintln!("\n{}:\n{backtrace}", "backtrace".yellow().bold());
}
_ => {
eprintln!("\n{}: not captured", "backtrace".yellow().bold());
}
}
}
}
fn main() {
let result = run();
#[tokio::main]
async fn main() {
let result = run().await;
let is_err = result.is_err();
display_err(result, "");
if is_err {

View file

@ -2,8 +2,8 @@ use crate::{
lockfile::DownloadedGraph, source::traits::PackageRef, Project, MANIFEST_FILE_NAME,
PACKAGES_CONTAINER_NAME,
};
use fs_err::read;
use git2::{ApplyLocation, ApplyOptions, Diff, DiffFormat, DiffLineType, Repository, Signature};
use fs_err::tokio as fs;
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
use relative_path::RelativePathBuf;
use std::path::Path;
@ -72,14 +72,19 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
impl Project {
/// Apply patches to the project's dependencies
pub fn apply_patches(&self, graph: &DownloadedGraph) -> Result<(), errors::ApplyPatchesError> {
let manifest = self.deser_manifest()?;
pub async fn apply_patches(
&self,
graph: &DownloadedGraph,
) -> Result<(), errors::ApplyPatchesError> {
let manifest = self.deser_manifest().await?;
for (name, versions) in manifest.patches {
for (version_id, patch_path) in versions {
let patch_path = patch_path.to_path(self.package_dir());
let patch = Diff::from_buffer(
&read(&patch_path).map_err(errors::ApplyPatchesError::PatchRead)?,
&fs::read(&patch_path)
.await
.map_err(errors::ApplyPatchesError::PatchRead)?,
)?;
let Some(node) = graph
@ -110,42 +115,37 @@ impl Project {
{
let repo = setup_patches_repo(&container_folder)?;
let mut apply_opts = ApplyOptions::new();
apply_opts.delta_callback(|delta| {
let Some(delta) = delta else {
return true;
};
for delta in patch.deltas() {
if !matches!(delta.status(), git2::Delta::Modified) {
return true;
continue;
}
let file = delta.new_file();
let Some(relative_path) = file.path() else {
return true;
continue;
};
let relative_path = RelativePathBuf::from_path(relative_path).unwrap();
let path = relative_path.to_path(&container_folder);
if !path.is_file() {
return true;
continue;
}
// there is no way (as far as I know) to check if it's hardlinked
// so, we always unlink it
let content = read(&path).unwrap();
fs_err::remove_file(&path).unwrap();
fs_err::write(path, content).unwrap();
let content = fs::read(&path).await.unwrap();
fs::remove_file(&path).await.unwrap();
fs::write(path, content).await.unwrap();
}
true
});
repo.apply(&patch, ApplyLocation::Both, Some(&mut apply_opts))?;
repo.apply(&patch, ApplyLocation::Both, None)?;
}
log::debug!("patch applied to {name}@{version_id}, removing .git directory");
fs_err::remove_dir_all(container_folder.join(".git"))
fs::remove_dir_all(container_folder.join(".git"))
.await
.map_err(errors::ApplyPatchesError::DotGitRemove)?;
}
}

View file

@ -15,12 +15,15 @@ use std::collections::{HashMap, HashSet, VecDeque};
impl Project {
/// Create a dependency graph from the project's manifest
pub fn dependency_graph(
pub async fn dependency_graph(
&self,
previous_graph: Option<&DependencyGraph>,
refreshed_sources: &mut HashSet<PackageSources>,
) -> Result<DependencyGraph, Box<errors::DependencyGraphError>> {
let manifest = self.deser_manifest().map_err(|e| Box::new(e.into()))?;
let manifest = self
.deser_manifest()
.await
.map_err(|e| Box::new(e.into()))?;
let mut all_specifiers = manifest
.all_dependencies()
@ -190,11 +193,12 @@ impl Project {
};
if refreshed_sources.insert(source.clone()) {
source.refresh(self).map_err(|e| Box::new(e.into()))?;
source.refresh(self).await.map_err(|e| Box::new(e.into()))?;
}
let (name, resolved) = source
.resolve(&specifier, self, target)
.await
.map_err(|e| Box::new(e.into()))?;
let Some(target_version_id) = graph

View file

@ -1,17 +1,21 @@
use std::{
collections::BTreeMap,
io::{BufWriter, Read, Write},
path::{Path, PathBuf},
};
use crate::{
manifest::target::TargetKind,
source::{IGNORED_DIRS, IGNORED_FILES},
util::hash,
};
use fs_err::tokio as fs;
use relative_path::RelativePathBuf;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::{
collections::{BTreeMap, VecDeque},
future::Future,
path::{Path, PathBuf},
};
use tempfile::Builder;
use tokio::{
io::{AsyncReadExt, AsyncWriteExt, BufWriter},
pin,
};
/// A file system entry
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -35,7 +39,7 @@ pub enum PackageFS {
Copy(PathBuf, TargetKind),
}
fn make_readonly(_file: &fs_err::File) -> std::io::Result<()> {
fn make_readonly(_file: &fs::File) -> std::io::Result<()> {
// on Windows, file deletion is disallowed if the file is read-only which breaks patching
#[cfg(not(windows))]
{
@ -48,58 +52,54 @@ fn make_readonly(_file: &fs_err::File) -> std::io::Result<()> {
Ok(())
}
pub(crate) fn store_in_cas<P: AsRef<Path>>(
cas_dir: P,
contents: &[u8],
) -> std::io::Result<(String, PathBuf)> {
let hash = hash(contents);
pub(crate) fn cas_path(hash: &str, cas_dir: &Path) -> PathBuf {
let (prefix, rest) = hash.split_at(2);
let folder = cas_dir.as_ref().join(prefix);
fs_err::create_dir_all(&folder)?;
let cas_path = folder.join(rest);
if !cas_path.exists() {
let mut file = fs_err::File::create(&cas_path)?;
file.write_all(contents)?;
make_readonly(&file)?;
}
Ok((hash, cas_path))
cas_dir.join(prefix).join(rest)
}
pub(crate) fn store_reader_in_cas<P: AsRef<Path>>(
pub(crate) async fn store_in_cas<
R: tokio::io::AsyncRead + Unpin,
P: AsRef<Path>,
C: FnMut(Vec<u8>) -> F,
F: Future<Output = std::io::Result<()>>,
>(
cas_dir: P,
contents: &mut dyn Read,
mut contents: R,
mut bytes_cb: C,
) -> std::io::Result<String> {
let tmp_dir = cas_dir.as_ref().join(".tmp");
fs_err::create_dir_all(&tmp_dir)?;
fs::create_dir_all(&tmp_dir).await?;
let mut hasher = Sha256::new();
let mut buf = [0; 8 * 1024];
let mut file_writer = BufWriter::new(tempfile::NamedTempFile::new_in(&tmp_dir)?);
let temp_path = Builder::new()
.make_in(&tmp_dir, |_| Ok(()))?
.into_temp_path();
let mut file_writer = BufWriter::new(fs::File::create(temp_path.to_path_buf()).await?);
loop {
let bytes_read = contents.read(&mut buf)?;
let bytes_future = contents.read(&mut buf);
pin!(bytes_future);
let bytes_read = bytes_future.await?;
if bytes_read == 0 {
break;
}
let bytes = &buf[..bytes_read];
hasher.update(bytes);
file_writer.write_all(bytes)?;
bytes_cb(bytes.to_vec()).await?;
file_writer.write_all(bytes).await?;
}
let hash = format!("{:x}", hasher.finalize());
let (prefix, rest) = hash.split_at(2);
let folder = cas_dir.as_ref().join(prefix);
fs_err::create_dir_all(&folder)?;
let cas_path = cas_path(&hash, cas_dir.as_ref());
fs::create_dir_all(cas_path.parent().unwrap()).await?;
let cas_path = folder.join(rest);
match file_writer.into_inner()?.persist_noclobber(&cas_path) {
match temp_path.persist_noclobber(&cas_path) {
Ok(_) => {
make_readonly(&fs_err::File::open(cas_path)?)?;
make_readonly(&file_writer.into_inner())?;
}
Err(e) if e.error.kind() == std::io::ErrorKind::AlreadyExists => {}
Err(e) => return Err(e.error),
@ -108,43 +108,9 @@ pub(crate) fn store_reader_in_cas<P: AsRef<Path>>(
Ok(hash)
}
fn copy_dir_all(
src: impl AsRef<Path>,
dst: impl AsRef<Path>,
target: TargetKind,
) -> std::io::Result<()> {
fs_err::create_dir_all(&dst)?;
'outer: for entry in fs_err::read_dir(src.as_ref().to_path_buf())? {
let entry = entry?;
let ty = entry.file_type()?;
let file_name = entry.file_name().to_string_lossy().to_string();
if ty.is_dir() {
if IGNORED_DIRS.contains(&file_name.as_ref()) {
continue;
}
for other_target in TargetKind::VARIANTS {
if target.packages_folder(other_target) == file_name {
continue 'outer;
}
}
copy_dir_all(entry.path(), dst.as_ref().join(&file_name), target)?;
} else {
if IGNORED_FILES.contains(&file_name.as_ref()) {
continue;
}
fs_err::copy(entry.path(), dst.as_ref().join(file_name))?;
}
}
Ok(())
}
impl PackageFS {
/// Write the package to the given destination
pub fn write_to<P: AsRef<Path>, Q: AsRef<Path>>(
pub async fn write_to<P: AsRef<Path>, Q: AsRef<Path>>(
&self,
destination: P,
cas_path: Q,
@ -158,17 +124,17 @@ impl PackageFS {
match entry {
FSEntry::File(hash) => {
if let Some(parent) = path.parent() {
fs_err::create_dir_all(parent)?;
fs::create_dir_all(parent).await?;
}
let (prefix, rest) = hash.split_at(2);
let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
if link {
fs_err::hard_link(cas_file_path, path)?;
fs::hard_link(cas_file_path, path).await?;
} else {
let mut f = fs_err::File::create(&path)?;
f.write_all(&fs_err::read(cas_file_path)?)?;
let mut f = fs::File::create(&path).await?;
f.write_all(&fs::read(cas_file_path).await?).await?;
#[cfg(unix)]
{
@ -180,13 +146,45 @@ impl PackageFS {
}
}
FSEntry::Directory => {
fs_err::create_dir_all(path)?;
fs::create_dir_all(path).await?;
}
}
}
}
PackageFS::Copy(src, target) => {
copy_dir_all(src, destination, *target)?;
fs::create_dir_all(destination.as_ref()).await?;
let mut read_dirs = VecDeque::from([fs::read_dir(src.to_path_buf())]);
while let Some(read_dir) = read_dirs.pop_front() {
let mut read_dir = read_dir.await?;
while let Some(entry) = read_dir.next_entry().await? {
let relative_path =
RelativePathBuf::from_path(entry.path().strip_prefix(src).unwrap())
.unwrap();
let file_name = relative_path.file_name().unwrap();
if entry.file_type().await?.is_dir() {
if IGNORED_DIRS.contains(&file_name) {
continue;
}
for other_target in TargetKind::VARIANTS {
if target.packages_folder(other_target) == file_name {
continue;
}
}
read_dirs.push_back(fs::read_dir(entry.path()));
continue;
}
if IGNORED_FILES.contains(&file_name) {
continue;
}
fs::copy(entry.path(), relative_path.to_path(destination.as_ref())).await?;
}
}
}
}
@ -194,7 +192,7 @@ impl PackageFS {
}
/// Returns the contents of the file with the given hash
pub fn read_file<P: AsRef<Path>, H: AsRef<str>>(
pub async fn read_file<P: AsRef<Path>, H: AsRef<str>>(
&self,
file_hash: H,
cas_path: P,
@ -205,6 +203,6 @@ impl PackageFS {
let (prefix, rest) = file_hash.as_ref().split_at(2);
let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
fs_err::read_to_string(cas_file_path).ok()
fs::read_to_string(cas_file_path).await.ok()
}
}

View file

@ -10,7 +10,7 @@ use crate::{
},
names::PackageNames,
source::{
fs::{store_in_cas, FSEntry, PackageFS},
fs::{FSEntry, PackageFS},
git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier},
git_index::GitBasedSource,
specifiers::DependencySpecifiers,
@ -20,6 +20,7 @@ use crate::{
util::hash,
Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
};
use fs_err::tokio as fs;
/// The Git package reference
pub mod pkg_ref;
@ -63,11 +64,11 @@ impl PackageSource for GitPackageSource {
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project)
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await
}
fn resolve(
async fn resolve(
&self,
specifier: &Self::Specifier,
project: &Project,
@ -324,11 +325,11 @@ impl PackageSource for GitPackageSource {
))
}
fn download(
async fn download(
&self,
pkg_ref: &Self::Ref,
project: &Project,
_reqwest: &reqwest::blocking::Client,
_reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError> {
let index_file = project
.cas_dir
@ -336,7 +337,7 @@ impl PackageSource for GitPackageSource {
.join(hash(self.as_bytes()))
.join(&pkg_ref.tree_id);
match fs_err::read_to_string(&index_file) {
match fs::read_to_string(&index_file).await {
Ok(s) => {
log::debug!(
"using cached index file for package {}#{} {}",
@ -354,6 +355,7 @@ impl PackageSource for GitPackageSource {
match entries.get(&RelativePathBuf::from(MANIFEST_FILE_NAME)) {
Some(FSEntry::File(hash)) => match fs
.read_file(hash, project.cas_dir())
.await
.map(|m| toml::de::from_str::<Manifest>(&m))
{
Some(Ok(m)) => Some(m),
@ -376,9 +378,10 @@ impl PackageSource for GitPackageSource {
#[cfg(feature = "wally-compat")]
None if !pkg_ref.new_structure => {
let tempdir = tempfile::tempdir()?;
fs.write_to(tempdir.path(), project.cas_dir(), false)?;
fs.write_to(tempdir.path(), project.cas_dir(), false)
.await?;
crate::source::wally::compat_util::get_target(project, &tempdir)?
crate::source::wally::compat_util::get_target(project, &tempdir).await?
}
None => {
return Err(errors::DownloadError::NoManifest(Box::new(
@ -393,47 +396,50 @@ impl PackageSource for GitPackageSource {
Err(e) => return Err(errors::DownloadError::Io(e)),
}
let repo = gix::open(self.path(project))
.map_err(|e| errors::DownloadError::OpenRepo(Box::new(self.repo_url.clone()), e))?;
let rev = repo
.rev_parse_single(BStr::new(&pkg_ref.tree_id))
.map_err(|e| {
errors::DownloadError::ParseRev(
pkg_ref.tree_id.clone(),
Box::new(self.repo_url.clone()),
e,
)
})?;
let tree = rev
.object()
.map_err(|e| {
errors::DownloadError::ParseEntryToObject(Box::new(self.repo_url.clone()), e)
})?
.peel_to_tree()
.map_err(|e| {
errors::DownloadError::ParseObjectToTree(Box::new(self.repo_url.clone()), e)
})?;
let mut recorder = Recorder::default();
tree.traverse()
.breadthfirst(&mut recorder)
.map_err(|e| errors::DownloadError::TraverseTree(Box::new(self.repo_url.clone()), e))?;
let mut entries = BTreeMap::new();
let mut manifest = None;
let repo = gix::open(self.path(project))
.map_err(|e| errors::DownloadError::OpenRepo(Box::new(self.repo_url.clone()), e))?;
let recorder = {
let rev = repo
.rev_parse_single(BStr::new(&pkg_ref.tree_id))
.map_err(|e| {
errors::DownloadError::ParseRev(
pkg_ref.tree_id.clone(),
Box::new(self.repo_url.clone()),
e,
)
})?;
let tree = rev
.object()
.map_err(|e| {
errors::DownloadError::ParseEntryToObject(Box::new(self.repo_url.clone()), e)
})?
.peel_to_tree()
.map_err(|e| {
errors::DownloadError::ParseObjectToTree(Box::new(self.repo_url.clone()), e)
})?;
let mut recorder = Recorder::default();
tree.traverse().breadthfirst(&mut recorder).map_err(|e| {
errors::DownloadError::TraverseTree(Box::new(self.repo_url.clone()), e)
})?;
recorder
};
for entry in recorder.records {
let path = RelativePathBuf::from(entry.filepath.to_string());
let name = path.file_name().unwrap_or("");
let object = repo.find_object(entry.oid).map_err(|e| {
errors::DownloadError::ParseEntryToObject(Box::new(self.repo_url.clone()), e)
})?;
if matches!(object.kind, gix::object::Kind::Tree) {
if path
.components()
.next()
.is_some_and(|ct| IGNORED_DIRS.contains(&ct.as_str()))
{
if IGNORED_DIRS.contains(&name) {
continue;
}
@ -442,13 +448,13 @@ impl PackageSource for GitPackageSource {
continue;
}
if IGNORED_FILES.contains(&path.as_str()) {
if IGNORED_FILES.contains(&name) {
continue;
}
if pkg_ref.use_new_structure() && path == "default.project.json" {
if pkg_ref.use_new_structure() && name == "default.project.json" {
log::debug!(
"removing default.project.json from {}#{} - using new structure",
"removing default.project.json from {}#{} at {path} - using new structure",
pkg_ref.repo,
pkg_ref.tree_id
);
@ -456,13 +462,15 @@ impl PackageSource for GitPackageSource {
}
let data = object.into_blob().data.clone();
let hash = store_in_cas(project.cas_dir(), &data)?.0;
// let hash =
// store_reader_in_cas(project.cas_dir(), data.as_slice(), |_| async { Ok(()) })
// .await?;
if path == MANIFEST_FILE_NAME {
manifest = Some(data);
}
entries.insert(path, FSEntry::File(hash));
// entries.insert(path, FSEntry::File(hash));
}
let manifest = match manifest {
@ -488,9 +496,10 @@ impl PackageSource for GitPackageSource {
#[cfg(feature = "wally-compat")]
None if !pkg_ref.new_structure => {
let tempdir = tempfile::tempdir()?;
fs.write_to(tempdir.path(), project.cas_dir(), false)?;
fs.write_to(tempdir.path(), project.cas_dir(), false)
.await?;
crate::source::wally::compat_util::get_target(project, &tempdir)?
crate::source::wally::compat_util::get_target(project, &tempdir).await?
}
None => {
return Err(errors::DownloadError::NoManifest(Box::new(
@ -500,15 +509,16 @@ impl PackageSource for GitPackageSource {
};
if let Some(parent) = index_file.parent() {
fs_err::create_dir_all(parent)?;
fs::create_dir_all(parent).await?;
}
fs_err::write(
fs::write(
&index_file,
toml::to_string(&fs).map_err(|e| {
errors::DownloadError::SerializeIndex(Box::new(self.repo_url.clone()), e)
})?,
)
.await
.map_err(errors::DownloadError::Io)?;
Ok((fs, target))

View file

@ -1,6 +1,8 @@
use gix::remote::Direction;
#![allow(async_fn_in_trait)]
use crate::{util::authenticate_conn, Project};
use fs_err::tokio as fs;
use gix::remote::Direction;
use tokio::task::spawn_blocking;
/// A trait for sources that are based on Git repositories
pub trait GitBasedSource {
@ -104,56 +106,79 @@ pub trait GitBasedSource {
}
/// Refreshes the repository
fn refresh(&self, project: &Project) -> Result<(), errors::RefreshError> {
async fn refresh(&self, project: &Project) -> Result<(), errors::RefreshError> {
let path = self.path(project);
let repo_url = self.repo_url().clone();
let auth_config = project.auth_config.clone();
if path.exists() {
let repo = match gix::open(&path) {
Ok(repo) => repo,
Err(e) => return Err(errors::RefreshError::Open(path, Box::new(e))),
};
let remote = match repo.find_default_remote(Direction::Fetch) {
Some(Ok(remote)) => remote,
Some(Err(e)) => {
return Err(errors::RefreshError::GetDefaultRemote(path, Box::new(e)))
spawn_blocking(move || {
let repo = match gix::open(&path) {
Ok(repo) => repo,
Err(e) => return Err(errors::RefreshError::Open(path, Box::new(e))),
};
let remote = match repo.find_default_remote(Direction::Fetch) {
Some(Ok(remote)) => remote,
Some(Err(e)) => {
return Err(errors::RefreshError::GetDefaultRemote(path, Box::new(e)))
}
None => {
return Err(errors::RefreshError::NoDefaultRemote(path));
}
};
let mut connection = match remote.connect(Direction::Fetch) {
Ok(connection) => connection,
Err(e) => {
return Err(errors::RefreshError::Connect(
repo_url.to_string(),
Box::new(e),
))
}
};
authenticate_conn(&mut connection, &auth_config);
let fetch =
match connection.prepare_fetch(gix::progress::Discard, Default::default()) {
Ok(fetch) => fetch,
Err(e) => {
return Err(errors::RefreshError::PrepareFetch(
repo_url.to_string(),
Box::new(e),
))
}
};
match fetch.receive(gix::progress::Discard, &false.into()) {
Ok(_) => Ok(()),
Err(e) => Err(errors::RefreshError::Read(
repo_url.to_string(),
Box::new(e),
)),
}
None => {
return Err(errors::RefreshError::NoDefaultRemote(path));
}
};
let mut connection = remote.connect(Direction::Fetch).map_err(|e| {
errors::RefreshError::Connect(self.repo_url().to_string(), Box::new(e))
})?;
authenticate_conn(&mut connection, &project.auth_config);
connection
.prepare_fetch(gix::progress::Discard, Default::default())
.map_err(|e| {
errors::RefreshError::PrepareFetch(self.repo_url().to_string(), Box::new(e))
})?
.receive(gix::progress::Discard, &false.into())
.map_err(|e| {
errors::RefreshError::Read(self.repo_url().to_string(), Box::new(e))
})?;
})
.await
.unwrap()?;
return Ok(());
}
fs_err::create_dir_all(&path)?;
fs::create_dir_all(&path).await?;
let auth_config = project.auth_config.clone();
gix::prepare_clone_bare(self.repo_url().clone(), &path)
.map_err(|e| errors::RefreshError::Clone(self.repo_url().to_string(), Box::new(e)))?
.configure_connection(move |c| {
authenticate_conn(c, &auth_config);
Ok(())
})
.fetch_only(gix::progress::Discard, &false.into())
.map_err(|e| errors::RefreshError::Fetch(self.repo_url().to_string(), Box::new(e)))?;
Ok(())
spawn_blocking(move || {
gix::prepare_clone_bare(repo_url.clone(), &path)
.map_err(|e| errors::RefreshError::Clone(repo_url.to_string(), Box::new(e)))?
.configure_connection(move |c| {
authenticate_conn(c, &auth_config);
Ok(())
})
.fetch_only(gix::progress::Discard, &false.into())
.map_err(|e| errors::RefreshError::Fetch(repo_url.to_string(), Box::new(e)))
})
.await
.unwrap()
.map(|_| ())
}
}

View file

@ -62,17 +62,17 @@ impl PackageSource for PackageSources {
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
match self {
PackageSources::Pesde(source) => source.refresh(project).map_err(Into::into),
PackageSources::Pesde(source) => source.refresh(project).await.map_err(Into::into),
#[cfg(feature = "wally-compat")]
PackageSources::Wally(source) => source.refresh(project).map_err(Into::into),
PackageSources::Git(source) => source.refresh(project).map_err(Into::into),
PackageSources::Workspace(source) => source.refresh(project).map_err(Into::into),
PackageSources::Wally(source) => source.refresh(project).await.map_err(Into::into),
PackageSources::Git(source) => source.refresh(project).await.map_err(Into::into),
PackageSources::Workspace(source) => source.refresh(project).await.map_err(Into::into),
}
}
fn resolve(
async fn resolve(
&self,
specifier: &Self::Specifier,
project: &Project,
@ -81,6 +81,7 @@ impl PackageSource for PackageSources {
match (self, specifier) {
(PackageSources::Pesde(source), DependencySpecifiers::Pesde(specifier)) => source
.resolve(specifier, project, package_target)
.await
.map(|(name, results)| {
(
name,
@ -95,6 +96,7 @@ impl PackageSource for PackageSources {
#[cfg(feature = "wally-compat")]
(PackageSources::Wally(source), DependencySpecifiers::Wally(specifier)) => source
.resolve(specifier, project, package_target)
.await
.map(|(name, results)| {
(
name,
@ -108,6 +110,7 @@ impl PackageSource for PackageSources {
(PackageSources::Git(source), DependencySpecifiers::Git(specifier)) => source
.resolve(specifier, project, package_target)
.await
.map(|(name, results)| {
(
name,
@ -122,6 +125,7 @@ impl PackageSource for PackageSources {
(PackageSources::Workspace(source), DependencySpecifiers::Workspace(specifier)) => {
source
.resolve(specifier, project, package_target)
.await
.map(|(name, results)| {
(
name,
@ -140,28 +144,32 @@ impl PackageSource for PackageSources {
}
}
fn download(
async fn download(
&self,
pkg_ref: &Self::Ref,
project: &Project,
reqwest: &reqwest::blocking::Client,
reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError> {
match (self, pkg_ref) {
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => source
.download(pkg_ref, project, reqwest)
.await
.map_err(Into::into),
#[cfg(feature = "wally-compat")]
(PackageSources::Wally(source), PackageRefs::Wally(pkg_ref)) => source
.download(pkg_ref, project, reqwest)
.await
.map_err(Into::into),
(PackageSources::Git(source), PackageRefs::Git(pkg_ref)) => source
.download(pkg_ref, project, reqwest)
.await
.map_err(Into::into),
(PackageSources::Workspace(source), PackageRefs::Workspace(pkg_ref)) => source
.download(pkg_ref, project, reqwest)
.await
.map_err(Into::into),
_ => Err(errors::DownloadError::Mismatch),

View file

@ -20,13 +20,17 @@ use crate::{
},
names::{PackageName, PackageNames},
source::{
fs::{store_reader_in_cas, FSEntry, PackageFS},
fs::{store_in_cas, FSEntry, PackageFS},
git_index::GitBasedSource,
DependencySpecifiers, PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
},
util::hash,
Project,
};
use fs_err::tokio as fs;
use futures::StreamExt;
// TODO: make more of these functions async
/// The pesde package reference
pub mod pkg_ref;
@ -186,11 +190,11 @@ impl PackageSource for PesdePackageSource {
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project)
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await
}
fn resolve(
async fn resolve(
&self,
specifier: &Self::Specifier,
project: &Project,
@ -239,11 +243,11 @@ impl PackageSource for PesdePackageSource {
))
}
fn download(
async fn download(
&self,
pkg_ref: &Self::Ref,
project: &Project,
reqwest: &reqwest::blocking::Client,
reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError> {
let config = self.config(project).map_err(Box::new)?;
let index_file = project
@ -253,7 +257,7 @@ impl PackageSource for PesdePackageSource {
.join(pkg_ref.version.to_string())
.join(pkg_ref.target.to_string());
match fs_err::read_to_string(&index_file) {
match fs::read_to_string(&index_file).await {
Ok(s) => {
log::debug!(
"using cached index file for package {}@{} {}",
@ -280,26 +284,29 @@ impl PackageSource for PesdePackageSource {
request = request.header(AUTHORIZATION, token);
}
let response = request.send()?.error_for_status()?;
let bytes = response.bytes()?;
let response = request.send().await?.error_for_status()?;
let bytes = response.bytes().await?;
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref());
let mut archive = tar::Archive::new(&mut decoder);
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
let mut archive = tokio_tar::Archive::new(&mut decoder);
let mut entries = BTreeMap::new();
for entry in archive.entries().map_err(errors::DownloadError::Unpack)? {
let mut entry = entry.map_err(errors::DownloadError::Unpack)?;
while let Some(entry) = archive
.entries()
.map_err(errors::DownloadError::Unpack)?
.next()
.await
.transpose()
.map_err(errors::DownloadError::Unpack)?
{
let path =
RelativePathBuf::from_path(entry.path().map_err(errors::DownloadError::Unpack)?)
.unwrap();
let name = path.file_name().unwrap_or("");
if entry.header().entry_type().is_dir() {
if path
.components()
.next()
.is_some_and(|ct| IGNORED_DIRS.contains(&ct.as_str()))
{
if IGNORED_DIRS.contains(&name) {
continue;
}
@ -308,11 +315,12 @@ impl PackageSource for PesdePackageSource {
continue;
}
if IGNORED_FILES.contains(&path.as_str()) {
if IGNORED_FILES.contains(&name) {
continue;
}
let hash = store_reader_in_cas(project.cas_dir(), &mut entry)
let hash = store_in_cas(project.cas_dir(), entry, |_| async { Ok(()) })
.await
.map_err(errors::DownloadError::Store)?;
entries.insert(path, FSEntry::File(hash));
}
@ -320,10 +328,13 @@ impl PackageSource for PesdePackageSource {
let fs = PackageFS::CAS(entries);
if let Some(parent) = index_file.parent() {
fs_err::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
fs::create_dir_all(parent)
.await
.map_err(errors::DownloadError::WriteIndex)?;
}
fs_err::write(&index_file, toml::to_string(&fs)?)
fs::write(&index_file, toml::to_string(&fs)?)
.await
.map_err(errors::DownloadError::WriteIndex)?;
Ok((fs, pkg_ref.target.clone()))

View file

@ -1,3 +1,4 @@
#![allow(async_fn_in_trait)]
use std::{
collections::BTreeMap,
fmt::{Debug, Display},
@ -41,12 +42,12 @@ pub trait PackageSource: Debug {
type DownloadError: std::error::Error;
/// Refreshes the source
fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
async fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
Ok(())
}
/// Resolves a specifier to a reference
fn resolve(
async fn resolve(
&self,
specifier: &Self::Specifier,
project: &Project,
@ -54,10 +55,10 @@ pub trait PackageSource: Debug {
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError>;
/// Downloads a package
fn download(
async fn download(
&self,
pkg_ref: &Self::Ref,
project: &Project,
reqwest: &reqwest::blocking::Client,
reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError>;
}

View file

@ -10,6 +10,7 @@ use crate::{
source::wally::manifest::{Realm, WallyManifest},
Project, LINK_LIB_NO_FILE_FOUND,
};
use fs_err::tokio as fs;
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
@ -18,11 +19,11 @@ struct SourcemapNode {
file_paths: Vec<RelativePathBuf>,
}
pub(crate) fn find_lib_path(
pub(crate) async fn find_lib_path(
project: &Project,
package_dir: &Path,
) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> {
let manifest = project.deser_manifest()?;
let manifest = project.deser_manifest().await?;
let Some(script_path) = manifest
.scripts
@ -53,16 +54,17 @@ pub(crate) fn find_lib_path(
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
pub(crate) fn get_target(
pub(crate) async fn get_target(
project: &Project,
tempdir: &TempDir,
) -> Result<Target, errors::FindLibPathError> {
let lib = find_lib_path(project, tempdir.path())?
let lib = find_lib_path(project, tempdir.path())
.await?
.or_else(|| Some(RelativePathBuf::from(LINK_LIB_NO_FILE_FOUND)));
let build_files = Default::default();
let manifest = tempdir.path().join(WALLY_MANIFEST_FILE_NAME);
let manifest = fs_err::read_to_string(&manifest)?;
let manifest = fs::read_to_string(&manifest).await?;
let manifest: WallyManifest = toml::from_str(&manifest)?;
Ok(if matches!(manifest.package.realm, Realm::Shared) {

View file

@ -1,19 +1,8 @@
use std::{
collections::{BTreeMap, VecDeque},
path::PathBuf,
};
use gix::Url;
use relative_path::RelativePathBuf;
use reqwest::header::AUTHORIZATION;
use serde::Deserialize;
use tempfile::tempdir;
use crate::{
manifest::target::{Target, TargetKind},
names::PackageNames,
source::{
fs::{store_reader_in_cas, FSEntry, PackageFS},
fs::{store_in_cas, FSEntry, PackageFS},
git_index::GitBasedSource,
traits::PackageSource,
version_id::VersionId,
@ -23,6 +12,15 @@ use crate::{
util::hash,
Project,
};
use fs_err::tokio as fs;
use gix::Url;
use relative_path::RelativePathBuf;
use reqwest::header::AUTHORIZATION;
use serde::Deserialize;
use std::{collections::BTreeMap, path::PathBuf, sync::Arc};
use tempfile::tempdir;
use tokio::{io::AsyncWriteExt, sync::Mutex};
use tokio_util::compat::FuturesAsyncReadCompatExt;
pub(crate) mod compat_util;
pub(crate) mod manifest;
@ -86,11 +84,11 @@ impl PackageSource for WallyPackageSource {
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project)
async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project).await
}
fn resolve(
async fn resolve(
&self,
specifier: &Self::Specifier,
project: &Project,
@ -138,11 +136,11 @@ impl PackageSource for WallyPackageSource {
))
}
fn download(
async fn download(
&self,
pkg_ref: &Self::Ref,
project: &Project,
reqwest: &reqwest::blocking::Client,
reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError> {
let config = self.config(project).map_err(Box::new)?;
let index_file = project
@ -151,7 +149,7 @@ impl PackageSource for WallyPackageSource {
.join(pkg_ref.name.escaped())
.join(pkg_ref.version.to_string());
let tempdir = match fs_err::read_to_string(&index_file) {
let tempdir = match fs::read_to_string(&index_file).await {
Ok(s) => {
log::debug!(
"using cached index file for package {}@{}",
@ -162,9 +160,9 @@ impl PackageSource for WallyPackageSource {
let tempdir = tempdir()?;
let fs = toml::from_str::<PackageFS>(&s)?;
fs.write_to(&tempdir, project.cas_dir(), false)?;
fs.write_to(&tempdir, project.cas_dir(), false).await?;
return Ok((fs, get_target(project, &tempdir)?));
return Ok((fs, get_target(project, &tempdir).await?));
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => tempdir()?,
Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
@ -190,50 +188,67 @@ impl PackageSource for WallyPackageSource {
request = request.header(AUTHORIZATION, token);
}
let response = request.send()?.error_for_status()?;
let bytes = response.bytes()?;
let mut archive = zip::ZipArchive::new(std::io::Cursor::new(bytes))?;
archive.extract(tempdir.path())?;
let response = request.send().await?.error_for_status()?;
let mut bytes = response.bytes().await?;
let mut entries = BTreeMap::new();
let mut archive = async_zip::tokio::read::seek::ZipFileReader::with_tokio(
std::io::Cursor::new(&mut bytes),
)
.await?;
let mut dir_entries = fs_err::read_dir(tempdir.path())?.collect::<VecDeque<_>>();
while let Some(entry) = dir_entries.pop_front() {
let entry = entry?;
let path =
RelativePathBuf::from_path(entry.path().strip_prefix(tempdir.path())?).unwrap();
for index in 0..archive.file().entries().len() {
let entry = archive.file().entries().get(index).unwrap();
if entry.file_type()?.is_dir() {
if IGNORED_DIRS.contains(&path.as_str()) {
let relative_path = RelativePathBuf::from_path(entry.filename().as_str()?).unwrap();
let path = relative_path.to_path(tempdir.path());
let name = relative_path.file_name().unwrap_or("");
let entry_is_dir = entry.dir()?;
let entry_reader = archive.reader_without_entry(index).await?;
if entry_is_dir {
if IGNORED_DIRS.contains(&name) {
continue;
}
entries.insert(path, FSEntry::Directory);
dir_entries.extend(fs_err::read_dir(entry.path())?);
entries.insert(relative_path, FSEntry::Directory);
fs::create_dir_all(&path).await?;
continue;
}
if IGNORED_FILES.contains(&path.as_str()) {
if IGNORED_FILES.contains(&name) {
continue;
}
let mut file = fs_err::File::open(entry.path())?;
let hash = store_reader_in_cas(project.cas_dir(), &mut file)?;
entries.insert(path, FSEntry::File(hash));
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).await?;
}
let writer = Arc::new(Mutex::new(fs::File::create(&path).await?));
let hash = store_in_cas(project.cas_dir(), entry_reader.compat(), |bytes| {
let writer = writer.clone();
async move { writer.lock().await.write_all(&bytes).await }
})
.await?;
entries.insert(relative_path, FSEntry::File(hash));
}
let fs = PackageFS::CAS(entries);
if let Some(parent) = index_file.parent() {
fs_err::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?;
fs::create_dir_all(parent)
.await
.map_err(errors::DownloadError::WriteIndex)?;
}
fs_err::write(&index_file, toml::to_string(&fs)?)
fs::write(&index_file, toml::to_string(&fs)?)
.await
.map_err(errors::DownloadError::WriteIndex)?;
Ok((fs, get_target(project, &tempdir)?))
Ok((fs, get_target(project, &tempdir).await?))
}
}
@ -320,7 +335,7 @@ pub mod errors {
/// Error decompressing archive
#[error("error decompressing archive")]
Decompress(#[from] zip::result::ZipError),
Decompress(#[from] async_zip::error::ZipError),
/// Error interacting with the filesystem
#[error("error interacting with the filesystem")]

View file

@ -7,9 +7,11 @@ use crate::{
},
Project, DEFAULT_INDEX_NAME,
};
use futures::StreamExt;
use relative_path::RelativePathBuf;
use reqwest::blocking::Client;
use reqwest::Client;
use std::collections::BTreeMap;
use tokio::pin;
/// The workspace package reference
pub mod pkg_ref;
@ -27,12 +29,12 @@ impl PackageSource for WorkspacePackageSource {
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
async fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
// no-op
Ok(())
}
fn resolve(
async fn resolve(
&self,
specifier: &Self::Specifier,
project: &Project,
@ -45,7 +47,10 @@ impl PackageSource for WorkspacePackageSource {
.unwrap_or(&project.package_dir);
let target = specifier.target.unwrap_or(package_target);
for (path, manifest) in project.workspace_members(workspace_dir)? {
let members = project.workspace_members(workspace_dir).await?;
pin!(members);
while let Some((path, manifest)) = members.next().await.transpose()? {
if manifest.name == specifier.name && manifest.target.kind() == target {
break 'finder (path, manifest);
}
@ -119,7 +124,7 @@ impl PackageSource for WorkspacePackageSource {
))
}
fn download(
async fn download(
&self,
pkg_ref: &Self::Ref,
project: &Project,