feat: begin switch to async

This commit is contained in:
daimond113 2024-11-05 20:44:24 +01:00
parent 37072eda24
commit 2b0f29a2f9
No known key found for this signature in database
GPG key ID: 3A8ECE51328B513C
50 changed files with 1259 additions and 1044 deletions

356
Cargo.lock generated
View file

@ -1,6 +1,6 @@
# This file is automatically @generated by Cargo. # This file is automatically @generated by Cargo.
# It is not intended for manual editing. # It is not intended for manual editing.
version = 3 version = 4
[[package]] [[package]]
name = "actix-codec" name = "actix-codec"
@ -389,15 +389,6 @@ version = "1.0.91"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8" checksum = "c042108f3ed77fd83760a5fd79b53be043192bb3b9dba91d8c574c0ada7850c8"
[[package]]
name = "arbitrary"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110"
dependencies = [
"derive_arbitrary",
]
[[package]] [[package]]
name = "arc-swap" name = "arc-swap"
version = "1.7.1" version = "1.7.1"
@ -428,6 +419,45 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
] ]
[[package]]
name = "async-compression"
version = "0.4.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0cb8f1d480b0ea3783ab015936d2a55c87e219676f0c0b7dec61494043f21857"
dependencies = [
"deflate64",
"flate2",
"futures-core",
"futures-io",
"memchr",
"pin-project-lite",
"tokio",
]
[[package]]
name = "async-executor"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7ebdfa2ebdab6b1760375fa7d6f382b9f486eac35fc994625a00e89280bdbb7"
dependencies = [
"async-task",
"concurrent-queue",
"fastrand",
"futures-lite",
"slab",
]
[[package]]
name = "async-fs"
version = "2.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebcd09b382f40fcd159c2d695175b2ae620ffa5f3bd6f664131efff4e8b9e04a"
dependencies = [
"async-lock",
"blocking",
"futures-lite",
]
[[package]] [[package]]
name = "async-io" name = "async-io"
version = "2.3.4" version = "2.3.4"
@ -506,6 +536,28 @@ dependencies = [
"windows-sys 0.59.0", "windows-sys 0.59.0",
] ]
[[package]]
name = "async-stream"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
dependencies = [
"async-stream-impl",
"futures-core",
"pin-project-lite",
]
[[package]]
name = "async-stream-impl"
version = "0.3.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]] [[package]]
name = "async-task" name = "async-task"
version = "4.7.1" version = "4.7.1"
@ -523,6 +575,21 @@ dependencies = [
"syn 2.0.85", "syn 2.0.85",
] ]
[[package]]
name = "async_zip"
version = "0.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "00b9f7252833d5ed4b00aa9604b563529dd5e11de9c23615de2dcdf91eb87b52"
dependencies = [
"async-compression",
"crc32fast",
"futures-lite",
"pin-project",
"thiserror",
"tokio",
"tokio-util",
]
[[package]] [[package]]
name = "atomic-waker" name = "atomic-waker"
version = "1.1.2" version = "1.1.2"
@ -688,27 +755,6 @@ dependencies = [
"bytes", "bytes",
] ]
[[package]]
name = "bzip2"
version = "0.4.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bdb116a6ef3f6c3698828873ad02c3014b3c85cadb88496095628e3ef1e347f8"
dependencies = [
"bzip2-sys",
"libc",
]
[[package]]
name = "bzip2-sys"
version = "0.1.11+1.0.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc"
dependencies = [
"cc",
"libc",
"pkg-config",
]
[[package]] [[package]]
name = "cbc" name = "cbc"
version = "0.1.2" version = "0.1.2"
@ -923,21 +969,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "crc"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69e6e4d7b33a94f0991c26729976b10ebde1d34c3ee82408fb536164fa10d636"
dependencies = [
"crc-catalog",
]
[[package]]
name = "crc-catalog"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5"
[[package]] [[package]]
name = "crc32fast" name = "crc32fast"
version = "1.4.2" version = "1.4.2"
@ -1125,17 +1156,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "derive_arbitrary"
version = "1.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]] [[package]]
name = "derive_more" name = "derive_more"
version = "0.99.18" version = "0.99.18"
@ -1202,17 +1222,6 @@ dependencies = [
"windows-sys 0.48.0", "windows-sys 0.48.0",
] ]
[[package]]
name = "displaydoc"
version = "0.2.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]] [[package]]
name = "dotenvy" name = "dotenvy"
version = "0.15.7" version = "0.15.7"
@ -1433,6 +1442,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8bb60e7409f34ef959985bc9d9c5ee8f5db24ee46ed9775850548021710f807f" checksum = "8bb60e7409f34ef959985bc9d9c5ee8f5db24ee46ed9775850548021710f807f"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"tokio",
] ]
[[package]] [[package]]
@ -1867,12 +1877,14 @@ checksum = "8e0eb9efdf96c35c0bed7596d1bef2d4ce6360a1d09738001f9d3e402aa7ba3e"
dependencies = [ dependencies = [
"bytes", "bytes",
"crc32fast", "crc32fast",
"crossbeam-channel",
"flate2", "flate2",
"gix-hash", "gix-hash",
"gix-trace", "gix-trace",
"gix-utils", "gix-utils",
"libc", "libc",
"once_cell", "once_cell",
"parking_lot",
"prodash", "prodash",
"sha1_smol", "sha1_smol",
"thiserror", "thiserror",
@ -2936,6 +2948,7 @@ dependencies = [
"security-framework 2.11.1", "security-framework 2.11.1",
"security-framework 3.0.0", "security-framework 3.0.0",
"windows-sys 0.59.0", "windows-sys 0.59.0",
"zbus",
] ]
[[package]] [[package]]
@ -3008,7 +3021,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d"
dependencies = [ dependencies = [
"bitflags 2.6.0", "bitflags 2.6.0",
"libc", "libc",
"redox_syscall", "redox_syscall 0.5.7",
] ]
[[package]] [[package]]
@ -3070,12 +3083,6 @@ dependencies = [
"scopeguard", "scopeguard",
] ]
[[package]]
name = "lockfree-object-pool"
version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9374ef4228402d4b7e403e5838cb880d9ee663314b0a900d5a6aabf0c213552e"
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.22" version = "0.4.22"
@ -3097,16 +3104,6 @@ version = "0.11.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5" checksum = "75761162ae2b0e580d7e7c390558127e5f01b4194debd6221fd8c207fc80e3f5"
[[package]]
name = "lzma-rs"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "297e814c836ae64db86b36cf2a557ba54368d03f6afcd7d947c266692f71115e"
dependencies = [
"byteorder",
"crc",
]
[[package]] [[package]]
name = "maybe-async" name = "maybe-async"
version = "0.2.10" version = "0.2.10"
@ -3517,7 +3514,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8"
dependencies = [ dependencies = [
"cfg-if", "cfg-if",
"libc", "libc",
"redox_syscall", "redox_syscall 0.5.7",
"smallvec", "smallvec",
"windows-targets 0.52.6", "windows-targets 0.52.6",
] ]
@ -3540,16 +3537,6 @@ version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d61c5ce1153ab5b689d0c074c4e7fc613e942dfb7dd9eea5ab202d2ad91fe361" checksum = "d61c5ce1153ab5b689d0c074c4e7fc613e942dfb7dd9eea5ab202d2ad91fe361"
[[package]]
name = "pbkdf2"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
dependencies = [
"digest",
"hmac",
]
[[package]] [[package]]
name = "percent-encoding" name = "percent-encoding"
version = "2.3.1" version = "2.3.1"
@ -3561,13 +3548,16 @@ name = "pesde"
version = "0.5.0-rc.7" version = "0.5.0-rc.7"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-compression",
"async-stream",
"async_zip",
"chrono", "chrono",
"clap", "clap",
"colored", "colored",
"dirs", "dirs",
"flate2",
"fs-err", "fs-err",
"full_moon", "full_moon",
"futures",
"git2", "git2",
"gix", "gix",
"glob", "glob",
@ -3586,15 +3576,15 @@ dependencies = [
"serde_json", "serde_json",
"serde_with", "serde_with",
"sha2", "sha2",
"tar",
"tempfile", "tempfile",
"thiserror", "thiserror",
"threadpool", "tokio",
"tokio-tar",
"tokio-util",
"toml", "toml",
"toml_edit", "toml_edit",
"url", "url",
"winreg", "winreg",
"zip",
] ]
[[package]] [[package]]
@ -3605,11 +3595,11 @@ dependencies = [
"actix-governor", "actix-governor",
"actix-multipart", "actix-multipart",
"actix-web", "actix-web",
"async-compression",
"chrono", "chrono",
"constant_time_eq", "constant_time_eq",
"convert_case 0.6.0", "convert_case 0.6.0",
"dotenvy", "dotenvy",
"flate2",
"fs-err", "fs-err",
"futures", "futures",
"git2", "git2",
@ -3628,13 +3618,34 @@ dependencies = [
"serde_yaml", "serde_yaml",
"sha2", "sha2",
"tantivy", "tantivy",
"tar",
"tempfile", "tempfile",
"thiserror", "thiserror",
"tokio",
"tokio-tar",
"toml", "toml",
"url", "url",
] ]
[[package]]
name = "pin-project"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
version = "1.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]] [[package]]
name = "pin-project-lite" name = "pin-project-lite"
version = "0.2.14" version = "0.2.14"
@ -3889,6 +3900,15 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "redox_syscall"
version = "0.3.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29"
dependencies = [
"bitflags 1.3.2",
]
[[package]] [[package]]
name = "redox_syscall" name = "redox_syscall"
version = "0.5.7" version = "0.5.7"
@ -4536,12 +4556,6 @@ dependencies = [
"libc", "libc",
] ]
[[package]]
name = "simd-adler32"
version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe"
[[package]] [[package]]
name = "sketches-ddsketch" name = "sketches-ddsketch"
version = "0.2.2" version = "0.2.2"
@ -4818,17 +4832,6 @@ dependencies = [
"serde", "serde",
] ]
[[package]]
name = "tar"
version = "0.4.42"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ff6c40d3aedb5e06b57c6f669ad17ab063dd1e63d977c6a88e7f4dfa4f04020"
dependencies = [
"filetime",
"libc",
"xattr",
]
[[package]] [[package]]
name = "tempfile" name = "tempfile"
version = "3.13.0" version = "3.13.0"
@ -4881,15 +4884,6 @@ dependencies = [
"once_cell", "once_cell",
] ]
[[package]]
name = "threadpool"
version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa"
dependencies = [
"num_cpus",
]
[[package]] [[package]]
name = "time" name = "time"
version = "0.3.36" version = "0.3.36"
@ -4938,9 +4932,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]] [[package]]
name = "tokio" name = "tokio"
version = "1.40.0" version = "1.41.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998" checksum = "145f3413504347a2be84393cc8a7d2fb4d863b375909ea59f2158261aa258bbb"
dependencies = [ dependencies = [
"backtrace", "backtrace",
"bytes", "bytes",
@ -4950,9 +4944,21 @@ dependencies = [
"pin-project-lite", "pin-project-lite",
"signal-hook-registry", "signal-hook-registry",
"socket2", "socket2",
"tokio-macros",
"windows-sys 0.52.0", "windows-sys 0.52.0",
] ]
[[package]]
name = "tokio-macros"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]] [[package]]
name = "tokio-native-tls" name = "tokio-native-tls"
version = "0.3.1" version = "0.3.1"
@ -4974,6 +4980,32 @@ dependencies = [
"tokio", "tokio",
] ]
[[package]]
name = "tokio-stream"
version = "0.1.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1"
dependencies = [
"futures-core",
"pin-project-lite",
"tokio",
]
[[package]]
name = "tokio-tar"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9d5714c010ca3e5c27114c1cdeb9d14641ace49874aa5626d7149e47aedace75"
dependencies = [
"filetime",
"futures-core",
"libc",
"redox_syscall 0.3.5",
"tokio",
"tokio-stream",
"xattr",
]
[[package]] [[package]]
name = "tokio-util" name = "tokio-util"
version = "0.7.12" version = "0.7.12"
@ -4982,6 +5014,7 @@ checksum = "61e7c3654c13bcd040d4a03abee2c75b1d14a37b423cf5a813ceae1cc903ec6a"
dependencies = [ dependencies = [
"bytes", "bytes",
"futures-core", "futures-core",
"futures-io",
"futures-sink", "futures-sink",
"pin-project-lite", "pin-project-lite",
"tokio", "tokio",
@ -5618,9 +5651,15 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb97012beadd29e654708a0fdb4c84bc046f537aecfde2c3ee0a9e4b4d48c725" checksum = "bb97012beadd29e654708a0fdb4c84bc046f537aecfde2c3ee0a9e4b4d48c725"
dependencies = [ dependencies = [
"async-broadcast", "async-broadcast",
"async-executor",
"async-fs",
"async-io",
"async-lock",
"async-process", "async-process",
"async-recursion", "async-recursion",
"async-task",
"async-trait", "async-trait",
"blocking",
"enumflags2", "enumflags2",
"event-listener", "event-listener",
"futures-core", "futures-core",
@ -5693,63 +5732,6 @@ name = "zeroize"
version = "1.8.1" version = "1.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde" checksum = "ced3678a2879b30306d323f4542626697a464a97c0a07c9aebf7ebca65cd4dde"
dependencies = [
"zeroize_derive",
]
[[package]]
name = "zeroize_derive"
version = "1.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.85",
]
[[package]]
name = "zip"
version = "2.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc5e4288ea4057ae23afc69a4472434a87a2495cafce6632fd1c4ec9f5cf3494"
dependencies = [
"aes",
"arbitrary",
"bzip2",
"constant_time_eq",
"crc32fast",
"crossbeam-utils",
"deflate64",
"displaydoc",
"flate2",
"hmac",
"indexmap 2.6.0",
"lzma-rs",
"memchr",
"pbkdf2",
"rand",
"sha1",
"thiserror",
"time",
"zeroize",
"zopfli",
"zstd",
]
[[package]]
name = "zopfli"
version = "0.8.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5019f391bac5cf252e93bbcc53d039ffd62c7bfb7c150414d61369afe57e946"
dependencies = [
"bumpalo",
"crc32fast",
"lockfree-object-pool",
"log",
"once_cell",
"simd-adler32",
]
[[package]] [[package]]
name = "zstd" name = "zstd"

View file

@ -27,9 +27,12 @@ bin = [
"gix/worktree-mutation", "gix/worktree-mutation",
"serde_json", "serde_json",
"winreg", "winreg",
"fs-err/expose_original_error" "fs-err/expose_original_error",
"tokio/rt",
"tokio/rt-multi-thread",
"tokio/macros",
] ]
wally-compat = ["zip", "serde_json"] wally-compat = ["async_zip", "serde_json"]
patches = ["git2"] patches = ["git2"]
version-management = ["bin"] version-management = ["bin"]
@ -45,33 +48,36 @@ uninlined_format_args = "warn"
serde = { version = "1.0.213", features = ["derive"] } serde = { version = "1.0.213", features = ["derive"] }
toml = "0.8.19" toml = "0.8.19"
serde_with = "3.11.0" serde_with = "3.11.0"
gix = { version = "0.67.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials"] } gix = { version = "0.67.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
semver = { version = "1.0.23", features = ["serde"] } semver = { version = "1.0.23", features = ["serde"] }
reqwest = { version = "0.12.8", default-features = false, features = ["rustls-tls", "blocking"] } reqwest = { version = "0.12.8", default-features = false, features = ["rustls-tls"] }
tar = "0.4.42" tokio-tar = "0.3.1"
flate2 = "1.0.34" async-compression = { version = "0.4.17", features = ["tokio", "gzip"] }
pathdiff = "0.2.2" pathdiff = "0.2.2"
relative-path = { version = "1.9.3", features = ["serde"] } relative-path = { version = "1.9.3", features = ["serde"] }
log = "0.4.22" log = "0.4.22"
thiserror = "1.0.65" thiserror = "1.0.65"
threadpool = "1.8.1" tokio = "1.41.0"
tokio-util = "0.7.12"
async-stream = "0.3.6"
futures = "0.3.31"
full_moon = { version = "1.1.0", features = ["luau"] } full_moon = { version = "1.1.0", features = ["luau"] }
url = { version = "2.5.2", features = ["serde"] } url = { version = "2.5.2", features = ["serde"] }
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
sha2 = "0.10.8" sha2 = "0.10.8"
tempfile = "3.13.0" tempfile = "3.13.0"
glob = "0.3.1" glob = "0.3.1"
fs-err = "3.0.0" fs-err = { version = "3.0.0", features = ["tokio"] }
# TODO: remove this when gitoxide adds support for: committing, pushing, adding # TODO: remove this when gitoxide adds support for: committing, pushing, adding
git2 = { version = "0.19.0", optional = true } git2 = { version = "0.19.0", optional = true }
zip = { version = "2.2.0", optional = true } async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
serde_json = { version = "1.0.132", optional = true } serde_json = { version = "1.0.132", optional = true }
anyhow = { version = "1.0.91", optional = true } anyhow = { version = "1.0.91", optional = true }
open = { version = "5.3.0", optional = true } open = { version = "5.3.0", optional = true }
keyring = { version = "3.5.0", features = ["crypto-rust", "windows-native", "apple-native", "sync-secret-service"], optional = true } keyring = { version = "3.5.0", features = ["crypto-rust", "windows-native", "apple-native", "async-secret-service", "async-io"], optional = true }
colored = { version = "2.1.0", optional = true } colored = { version = "2.1.0", optional = true }
toml_edit = { version = "0.22.22", optional = true } toml_edit = { version = "0.22.22", optional = true }
clap = { version = "4.5.20", features = ["derive"], optional = true } clap = { version = "4.5.20", features = ["derive"], optional = true }

View file

@ -17,8 +17,9 @@ semver = "1.0.23"
chrono = { version = "0.4.38", features = ["serde"] } chrono = { version = "0.4.38", features = ["serde"] }
url = "2.5.2" url = "2.5.2"
futures = "0.3.31" futures = "0.3.31"
tokio = "1.41.0"
tempfile = "3.13.0" tempfile = "3.13.0"
fs-err = "3.0.0" fs-err = { version = "3.0.0", features = ["tokio"] }
git2 = "0.19.0" git2 = "0.19.0"
gix = { version = "0.67.0", default-features = false, features = [ gix = { version = "0.67.0", default-features = false, features = [
@ -37,8 +38,8 @@ rusty-s3 = "0.5.0"
reqwest = { version = "0.12.8", features = ["json", "rustls-tls"] } reqwest = { version = "0.12.8", features = ["json", "rustls-tls"] }
constant_time_eq = "0.3.1" constant_time_eq = "0.3.1"
tar = "0.4.42" tokio-tar = "0.3.1"
flate2 = "1.0.34" async-compression = { version = "0.4.17", features = ["tokio", "gzip"] }
log = "0.4.22" log = "0.4.22"
pretty_env_logger = "0.5.0" pretty_env_logger = "0.5.0"

View file

@ -72,7 +72,7 @@ pub async fn get_package_version(
let (scope, name_part) = name.as_str(); let (scope, name_part) = name.as_str();
let entries: IndexFile = { let entries: IndexFile = {
let source = app_state.source.lock().unwrap(); let source = app_state.source.lock().await;
match source.read_file([scope, name_part], &app_state.project, None)? { match source.read_file([scope, name_part], &app_state.project, None)? {
Some(versions) => toml::de::from_str(&versions)?, Some(versions) => toml::de::from_str(&versions)?,

View file

@ -17,7 +17,7 @@ pub async fn get_package_versions(
let (scope, name_part) = name.as_str(); let (scope, name_part) = name.as_str();
let source = app_state.source.lock().unwrap(); let source = app_state.source.lock().await;
let versions: IndexFile = let versions: IndexFile =
match source.read_file([scope, name_part], &app_state.project, None)? { match source.read_file([scope, name_part], &app_state.project, None)? {
Some(versions) => toml::de::from_str(&versions)?, Some(versions) => toml::de::from_str(&versions)?,

View file

@ -1,17 +1,3 @@
use actix_multipart::Multipart;
use actix_web::{web, HttpResponse, Responder};
use convert_case::{Case, Casing};
use flate2::read::GzDecoder;
use futures::{future::join_all, join, StreamExt};
use git2::{Remote, Repository, Signature};
use serde::Deserialize;
use sha2::{Digest, Sha256};
use std::{
collections::{BTreeSet, HashMap},
io::{Cursor, Read, Write},
};
use tar::Archive;
use crate::{ use crate::{
auth::UserId, auth::UserId,
benv, benv,
@ -20,6 +6,13 @@ use crate::{
storage::StorageImpl, storage::StorageImpl,
AppState, AppState,
}; };
use actix_multipart::Multipart;
use actix_web::{web, HttpResponse, Responder};
use async_compression::Level;
use convert_case::{Case, Casing};
use fs_err::tokio as fs;
use futures::{future::join_all, join, StreamExt};
use git2::{Remote, Repository, Signature};
use pesde::{ use pesde::{
manifest::Manifest, manifest::Manifest,
source::{ source::{
@ -31,6 +24,13 @@ use pesde::{
}, },
MANIFEST_FILE_NAME, MANIFEST_FILE_NAME,
}; };
use serde::Deserialize;
use sha2::{Digest, Sha256};
use std::{
collections::{BTreeSet, HashMap},
io::{Cursor, Write},
};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
fn signature<'a>() -> Signature<'a> { fn signature<'a>() -> Signature<'a> {
Signature::now( Signature::now(
@ -71,18 +71,16 @@ pub async fn publish_package(
mut body: Multipart, mut body: Multipart,
user_id: web::ReqData<UserId>, user_id: web::ReqData<UserId>,
) -> Result<impl Responder, Error> { ) -> Result<impl Responder, Error> {
let max_archive_size = { let source = app_state.source.lock().await;
let source = app_state.source.lock().unwrap(); source.refresh(&app_state.project).await.map_err(Box::new)?;
source.refresh(&app_state.project).map_err(Box::new)?; let config = source.config(&app_state.project)?;
source.config(&app_state.project)?.max_archive_size
};
let bytes = body let bytes = body
.next() .next()
.await .await
.ok_or(Error::InvalidArchive)? .ok_or(Error::InvalidArchive)?
.map_err(|_| Error::InvalidArchive)? .map_err(|_| Error::InvalidArchive)?
.bytes(max_archive_size) .bytes(config.max_archive_size)
.await .await
.map_err(|_| Error::InvalidArchive)? .map_err(|_| Error::InvalidArchive)?
.map_err(|_| Error::InvalidArchive)?; .map_err(|_| Error::InvalidArchive)?;
@ -90,10 +88,10 @@ pub async fn publish_package(
let package_dir = tempfile::tempdir()?; let package_dir = tempfile::tempdir()?;
{ {
let mut decoder = GzDecoder::new(Cursor::new(&bytes)); let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(Cursor::new(&bytes));
let mut archive = Archive::new(&mut decoder); let mut archive = tokio_tar::Archive::new(&mut decoder);
archive.unpack(package_dir.path())?; archive.unpack(package_dir.path()).await?;
} }
let mut manifest = None::<Manifest>; let mut manifest = None::<Manifest>;
@ -101,15 +99,15 @@ pub async fn publish_package(
let mut docs = BTreeSet::new(); let mut docs = BTreeSet::new();
let mut docs_pages = HashMap::new(); let mut docs_pages = HashMap::new();
for entry in fs_err::read_dir(package_dir.path())? { let mut read_dir = fs::read_dir(package_dir.path()).await?;
let entry = entry?; while let Some(entry) = read_dir.next_entry().await? {
let file_name = entry let file_name = entry
.file_name() .file_name()
.to_str() .to_str()
.ok_or(Error::InvalidArchive)? .ok_or(Error::InvalidArchive)?
.to_string(); .to_string();
if entry.file_type()?.is_dir() { if entry.file_type().await?.is_dir() {
if IGNORED_DIRS.contains(&file_name.as_str()) { if IGNORED_DIRS.contains(&file_name.as_str()) {
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive);
} }
@ -117,23 +115,22 @@ pub async fn publish_package(
if file_name == "docs" { if file_name == "docs" {
let mut stack = vec![( let mut stack = vec![(
BTreeSet::new(), BTreeSet::new(),
fs_err::read_dir(entry.path())?, fs::read_dir(entry.path()).await?,
None::<DocEntryInfo>, None::<DocEntryInfo>,
)]; )];
'outer: while let Some((set, iter, category_info)) = stack.last_mut() { 'outer: while let Some((set, iter, category_info)) = stack.last_mut() {
for entry in iter { while let Some(entry) = iter.next_entry().await? {
let entry = entry?;
let file_name = entry let file_name = entry
.file_name() .file_name()
.to_str() .to_str()
.ok_or(Error::InvalidArchive)? .ok_or(Error::InvalidArchive)?
.to_string(); .to_string();
if entry.file_type()?.is_dir() { if entry.file_type().await?.is_dir() {
stack.push(( stack.push((
BTreeSet::new(), BTreeSet::new(),
fs_err::read_dir(entry.path())?, fs::read_dir(entry.path()).await?,
Some(DocEntryInfo { Some(DocEntryInfo {
label: Some(file_name.to_case(Case::Title)), label: Some(file_name.to_case(Case::Title)),
..Default::default() ..Default::default()
@ -143,7 +140,7 @@ pub async fn publish_package(
} }
if file_name == "_category_.json" { if file_name == "_category_.json" {
let info = fs_err::read_to_string(entry.path())?; let info = fs::read_to_string(entry.path()).await?;
let mut info: DocEntryInfo = serde_json::from_str(&info)?; let mut info: DocEntryInfo = serde_json::from_str(&info)?;
let old_info = category_info.take(); let old_info = category_info.take();
info.label = info.label.or(old_info.and_then(|i| i.label)); info.label = info.label.or(old_info.and_then(|i| i.label));
@ -155,16 +152,16 @@ pub async fn publish_package(
continue; continue;
}; };
let content = fs_err::read_to_string(entry.path())?; let content = fs::read_to_string(entry.path()).await?;
let content = content.trim(); let content = content.trim();
let hash = format!("{:x}", Sha256::digest(content.as_bytes())); let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
let mut gz = flate2::read::GzEncoder::new( let mut gz = async_compression::tokio::bufread::GzipEncoder::with_quality(
Cursor::new(content.as_bytes().to_vec()), Cursor::new(content.as_bytes().to_vec()),
flate2::Compression::best(), Level::Best,
); );
let mut bytes = vec![]; let mut bytes = vec![];
gz.read_to_end(&mut bytes)?; gz.read_to_end(&mut bytes).await?;
docs_pages.insert(hash.to_string(), bytes); docs_pages.insert(hash.to_string(), bytes);
let mut lines = content.lines().peekable(); let mut lines = content.lines().peekable();
@ -245,7 +242,7 @@ pub async fn publish_package(
} }
if file_name == MANIFEST_FILE_NAME { if file_name == MANIFEST_FILE_NAME {
let content = fs_err::read_to_string(entry.path())?; let content = fs::read_to_string(entry.path()).await?;
manifest = Some(toml::de::from_str(&content)?); manifest = Some(toml::de::from_str(&content)?);
} else if file_name } else if file_name
@ -258,12 +255,12 @@ pub async fn publish_package(
return Err(Error::InvalidArchive); return Err(Error::InvalidArchive);
} }
let file = fs_err::File::open(entry.path())?; let mut file = fs::File::open(entry.path()).await?;
let mut gz = flate2::read::GzEncoder::new(file, flate2::Compression::best()); let mut gz = async_compression::tokio::write::GzipEncoder::new(vec![]);
let mut bytes = vec![]; tokio::io::copy(&mut file, &mut gz).await?;
gz.read_to_end(&mut bytes)?; gz.shutdown().await?;
readme = Some(bytes); readme = Some(gz.into_inner());
} }
} }
@ -272,10 +269,6 @@ pub async fn publish_package(
}; };
{ {
let source = app_state.source.lock().unwrap();
source.refresh(&app_state.project).map_err(Box::new)?;
let config = source.config(&app_state.project)?;
let dependencies = manifest let dependencies = manifest
.all_dependencies() .all_dependencies()
.map_err(|_| Error::InvalidArchive)?; .map_err(|_| Error::InvalidArchive)?;

View file

@ -59,7 +59,7 @@ pub async fn search_packages(
) )
.unwrap(); .unwrap();
let source = app_state.source.lock().unwrap(); let source = app_state.source.lock().await;
let top_docs = top_docs let top_docs = top_docs
.into_iter() .into_iter()

View file

@ -5,13 +5,13 @@ use actix_web::{
rt::System, rt::System,
web, App, HttpServer, web, App, HttpServer,
}; };
use fs_err::tokio as fs;
use log::info; use log::info;
use std::{env::current_dir, path::PathBuf, sync::Mutex};
use pesde::{ use pesde::{
source::{pesde::PesdePackageSource, traits::PackageSource}, source::{pesde::PesdePackageSource, traits::PackageSource},
AuthConfig, Project, AuthConfig, Project,
}; };
use std::{env::current_dir, path::PathBuf};
use crate::{ use crate::{
auth::{get_auth_from_env, Auth, UserIdExtractor}, auth::{get_auth_from_env, Auth, UserIdExtractor},
@ -38,13 +38,13 @@ pub fn make_reqwest() -> reqwest::Client {
} }
pub struct AppState { pub struct AppState {
pub source: Mutex<PesdePackageSource>, pub source: tokio::sync::Mutex<PesdePackageSource>,
pub project: Project, pub project: Project,
pub storage: Storage, pub storage: Storage,
pub auth: Auth, pub auth: Auth,
pub search_reader: tantivy::IndexReader, pub search_reader: tantivy::IndexReader,
pub search_writer: Mutex<tantivy::IndexWriter>, pub search_writer: std::sync::Mutex<tantivy::IndexWriter>,
} }
#[macro_export] #[macro_export]
@ -87,7 +87,7 @@ async fn run() -> std::io::Result<()> {
let cwd = current_dir().unwrap(); let cwd = current_dir().unwrap();
let data_dir = let data_dir =
PathBuf::from(benv!("DATA_DIR" => "{CWD}/data").replace("{CWD}", cwd.to_str().unwrap())); PathBuf::from(benv!("DATA_DIR" => "{CWD}/data").replace("{CWD}", cwd.to_str().unwrap()));
fs_err::create_dir_all(&data_dir).unwrap(); fs::create_dir_all(&data_dir).await.unwrap();
let project = Project::new( let project = Project::new(
&cwd, &cwd,
@ -100,7 +100,10 @@ async fn run() -> std::io::Result<()> {
})), })),
); );
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap()); let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
source.refresh(&project).expect("failed to refresh source"); source
.refresh(&project)
.await
.expect("failed to refresh source");
let (search_reader, search_writer) = make_search(&project, &source); let (search_reader, search_writer) = make_search(&project, &source);
@ -116,11 +119,11 @@ async fn run() -> std::io::Result<()> {
info!("auth: {auth}"); info!("auth: {auth}");
auth auth
}, },
source: Mutex::new(source), source: tokio::sync::Mutex::new(source),
project, project,
search_reader, search_reader,
search_writer: Mutex::new(search_writer), search_writer: std::sync::Mutex::new(search_writer),
}); });
let publish_governor_config = GovernorConfigBuilder::default() let publish_governor_config = GovernorConfigBuilder::default()

View file

@ -3,6 +3,7 @@ use actix_web::{
http::header::{CONTENT_ENCODING, CONTENT_TYPE}, http::header::{CONTENT_ENCODING, CONTENT_TYPE},
HttpResponse, HttpResponse,
}; };
use fs_err::tokio as fs;
use pesde::{names::PackageName, source::version_id::VersionId}; use pesde::{names::PackageName, source::version_id::VersionId};
use std::{ use std::{
fmt::Display, fmt::Display,
@ -14,8 +15,8 @@ pub struct FSStorage {
pub root: PathBuf, pub root: PathBuf,
} }
fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> { async fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
Ok(match fs_err::read(path) { Ok(match fs::read(path).await {
Ok(contents) => HttpResponse::Ok() Ok(contents) => HttpResponse::Ok()
.append_header((CONTENT_TYPE, content_type)) .append_header((CONTENT_TYPE, content_type))
.append_header((CONTENT_ENCODING, "gzip")) .append_header((CONTENT_ENCODING, "gzip"))
@ -40,9 +41,9 @@ impl StorageImpl for FSStorage {
.join(name) .join(name)
.join(version.version().to_string()) .join(version.version().to_string())
.join(version.target().to_string()); .join(version.target().to_string());
fs_err::create_dir_all(&path)?; fs::create_dir_all(&path).await?;
fs_err::write(path.join("pkg.tar.gz"), &contents)?; fs::write(path.join("pkg.tar.gz"), &contents).await?;
Ok(()) Ok(())
} }
@ -61,7 +62,7 @@ impl StorageImpl for FSStorage {
.join(version.version().to_string()) .join(version.version().to_string())
.join(version.target().to_string()); .join(version.target().to_string());
read_file_to_response(&path.join("pkg.tar.gz"), "application/gzip") read_file_to_response(&path.join("pkg.tar.gz"), "application/gzip").await
} }
async fn store_readme( async fn store_readme(
@ -78,9 +79,9 @@ impl StorageImpl for FSStorage {
.join(name) .join(name)
.join(version.version().to_string()) .join(version.version().to_string())
.join(version.target().to_string()); .join(version.target().to_string());
fs_err::create_dir_all(&path)?; fs::create_dir_all(&path).await?;
fs_err::write(path.join("readme.gz"), &contents)?; fs::write(path.join("readme.gz"), &contents).await?;
Ok(()) Ok(())
} }
@ -99,14 +100,14 @@ impl StorageImpl for FSStorage {
.join(version.version().to_string()) .join(version.version().to_string())
.join(version.target().to_string()); .join(version.target().to_string());
read_file_to_response(&path.join("readme.gz"), "text/plain") read_file_to_response(&path.join("readme.gz"), "text/plain").await
} }
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> { async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
let path = self.root.join("Doc"); let path = self.root.join("Doc");
fs_err::create_dir_all(&path)?; fs::create_dir_all(&path).await?;
fs_err::write(path.join(format!("{doc_hash}.gz")), &contents)?; fs::write(path.join(format!("{doc_hash}.gz")), &contents).await?;
Ok(()) Ok(())
} }
@ -114,7 +115,7 @@ impl StorageImpl for FSStorage {
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> { async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
let path = self.root.join("Doc"); let path = self.root.join("Doc");
read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain") read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain").await
} }
} }

View file

@ -37,8 +37,8 @@ impl<'de> Deserialize<'de> for Tokens {
} }
} }
pub fn get_tokens() -> anyhow::Result<Tokens> { pub async fn get_tokens() -> anyhow::Result<Tokens> {
let config = read_config()?; let config = read_config().await?;
if !config.tokens.0.is_empty() { if !config.tokens.0.is_empty() {
return Ok(config.tokens); return Ok(config.tokens);
} }
@ -56,7 +56,7 @@ pub fn get_tokens() -> anyhow::Result<Tokens> {
Ok(Tokens(BTreeMap::new())) Ok(Tokens(BTreeMap::new()))
} }
pub fn set_tokens(tokens: Tokens) -> anyhow::Result<()> { pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?; let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?; let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
@ -66,19 +66,19 @@ pub fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
Err(e) => return Err(e.into()), Err(e) => return Err(e.into()),
} }
let mut config = read_config()?; let mut config = read_config().await?;
config.tokens = tokens; config.tokens = tokens;
write_config(&config).map_err(Into::into) write_config(&config).await.map_err(Into::into)
} }
pub fn set_token(repo: &gix::Url, token: Option<&str>) -> anyhow::Result<()> { pub async fn set_token(repo: &gix::Url, token: Option<&str>) -> anyhow::Result<()> {
let mut tokens = get_tokens()?; let mut tokens = get_tokens().await?;
if let Some(token) = token { if let Some(token) = token {
tokens.0.insert(repo.clone(), token.to_string()); tokens.0.insert(repo.clone(), token.to_string());
} else { } else {
tokens.0.remove(repo); tokens.0.remove(repo);
} }
set_tokens(tokens) set_tokens(tokens).await
} }
#[derive(Debug, Deserialize)] #[derive(Debug, Deserialize)]
@ -86,18 +86,20 @@ struct UserResponse {
login: String, login: String,
} }
pub fn get_token_login( pub async fn get_token_login(
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
access_token: &str, access_token: &str,
) -> anyhow::Result<String> { ) -> anyhow::Result<String> {
let response = reqwest let response = reqwest
.get("https://api.github.com/user") .get("https://api.github.com/user")
.header(AUTHORIZATION, access_token) .header(AUTHORIZATION, access_token)
.send() .send()
.await
.context("failed to send user request")? .context("failed to send user request")?
.error_for_status() .error_for_status()
.context("failed to get user")? .context("failed to get user")?
.json::<UserResponse>() .json::<UserResponse>()
.await
.context("failed to parse user response")?; .context("failed to parse user response")?;
Ok(response.login) Ok(response.login)

View file

@ -47,9 +47,10 @@ pub struct AddCommand {
} }
impl AddCommand { impl AddCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> { pub async fn run(self, project: Project) -> anyhow::Result<()> {
let manifest = project let manifest = project
.deser_manifest() .deser_manifest()
.await
.context("failed to read manifest")?; .context("failed to read manifest")?;
let (source, specifier) = match &self.name { let (source, specifier) = match &self.name {
@ -65,7 +66,10 @@ impl AddCommand {
return Ok(()); return Ok(());
} }
let index = index.unwrap_or(read_config()?.default_index); let index = match index {
Some(index) => index,
None => read_config().await?.default_index,
};
let source = PackageSources::Pesde(PesdePackageSource::new(index)); let source = PackageSources::Pesde(PesdePackageSource::new(index));
let specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier { let specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier {
@ -89,7 +93,7 @@ impl AddCommand {
return Ok(()); return Ok(());
} }
let index = index.unwrap_or(read_config()?.default_index); let index = index.context("no wally index found")?;
let source = let source =
PackageSources::Wally(pesde::source::wally::WallyPackageSource::new(index)); PackageSources::Wally(pesde::source::wally::WallyPackageSource::new(index));
@ -125,10 +129,12 @@ impl AddCommand {
}; };
source source
.refresh(&project) .refresh(&project)
.await
.context("failed to refresh package source")?; .context("failed to refresh package source")?;
let Some(version_id) = source let Some(version_id) = source
.resolve(&specifier, &project, manifest.target.kind()) .resolve(&specifier, &project, manifest.target.kind())
.await
.context("failed to resolve package")? .context("failed to resolve package")?
.1 .1
.pop_last() .pop_last()
@ -141,7 +147,10 @@ impl AddCommand {
let project_target = manifest.target.kind(); let project_target = manifest.target.kind();
let mut manifest = toml_edit::DocumentMut::from_str( let mut manifest = toml_edit::DocumentMut::from_str(
&project.read_manifest().context("failed to read manifest")?, &project
.read_manifest()
.await
.context("failed to read manifest")?,
) )
.context("failed to parse manifest")?; .context("failed to parse manifest")?;
let dependency_key = if self.peer { let dependency_key = if self.peer {
@ -227,6 +236,7 @@ impl AddCommand {
project project
.write_manifest(manifest.to_string()) .write_manifest(manifest.to_string())
.await
.context("failed to write manifest")?; .context("failed to write manifest")?;
Ok(()) Ok(())

View file

@ -3,6 +3,7 @@ use clap::Args;
use colored::Colorize; use colored::Colorize;
use serde::Deserialize; use serde::Deserialize;
use std::thread::spawn; use std::thread::spawn;
use tokio::time::sleep;
use url::Url; use url::Url;
use pesde::{ use pesde::{
@ -46,16 +47,19 @@ enum AccessTokenResponse {
} }
impl LoginCommand { impl LoginCommand {
pub fn authenticate_device_flow( pub async fn authenticate_device_flow(
&self, &self,
index_url: &gix::Url, index_url: &gix::Url,
project: &Project, project: &Project,
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
) -> anyhow::Result<String> { ) -> anyhow::Result<String> {
println!("logging in into {index_url}"); println!("logging in into {index_url}");
let source = PesdePackageSource::new(index_url.clone()); let source = PesdePackageSource::new(index_url.clone());
source.refresh(project).context("failed to refresh index")?; source
.refresh(project)
.await
.context("failed to refresh index")?;
let config = source let config = source
.config(project) .config(project)
@ -70,10 +74,12 @@ impl LoginCommand {
&[("client_id", &client_id)], &[("client_id", &client_id)],
)?) )?)
.send() .send()
.await
.context("failed to send device code request")? .context("failed to send device code request")?
.error_for_status() .error_for_status()
.context("failed to get device code response")? .context("failed to get device code response")?
.json::<DeviceCodeResponse>() .json::<DeviceCodeResponse>()
.await
.context("failed to parse device code response")?; .context("failed to parse device code response")?;
println!( println!(
@ -102,7 +108,7 @@ impl LoginCommand {
let mut interval = std::time::Duration::from_secs(response.interval); let mut interval = std::time::Duration::from_secs(response.interval);
while time_left > 0 { while time_left > 0 {
std::thread::sleep(interval); sleep(interval).await;
time_left = time_left.saturating_sub(interval.as_secs()); time_left = time_left.saturating_sub(interval.as_secs());
let response = reqwest let response = reqwest
@ -118,10 +124,12 @@ impl LoginCommand {
], ],
)?) )?)
.send() .send()
.await
.context("failed to send access token request")? .context("failed to send access token request")?
.error_for_status() .error_for_status()
.context("failed to get access token response")? .context("failed to get access token response")?
.json::<AccessTokenResponse>() .json::<AccessTokenResponse>()
.await
.context("failed to parse access token response")?; .context("failed to parse access token response")?;
match response { match response {
@ -149,16 +157,19 @@ impl LoginCommand {
anyhow::bail!("code expired, please re-run the login command"); anyhow::bail!("code expired, please re-run the login command");
} }
pub fn run( pub async fn run(
self, self,
index_url: gix::Url, index_url: gix::Url,
project: Project, project: Project,
reqwest: reqwest::blocking::Client, reqwest: reqwest::Client,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let token_given = self.token.is_some(); let token_given = self.token.is_some();
let token = match self.token { let token = match self.token {
Some(token) => token, Some(token) => token,
None => self.authenticate_device_flow(&index_url, &project, &reqwest)?, None => {
self.authenticate_device_flow(&index_url, &project, &reqwest)
.await?
}
}; };
let token = if token_given { let token = if token_given {
@ -168,13 +179,13 @@ impl LoginCommand {
let token = format!("Bearer {token}"); let token = format!("Bearer {token}");
println!( println!(
"logged in as {} for {index_url}", "logged in as {} for {index_url}",
get_token_login(&reqwest, &token)?.bold() get_token_login(&reqwest, &token).await?.bold()
); );
token token
}; };
set_token(&index_url, Some(&token))?; set_token(&index_url, Some(&token)).await?;
Ok(()) Ok(())
} }

View file

@ -5,8 +5,8 @@ use clap::Args;
pub struct LogoutCommand {} pub struct LogoutCommand {}
impl LogoutCommand { impl LogoutCommand {
pub fn run(self, index_url: gix::Url) -> anyhow::Result<()> { pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
set_token(&index_url, None)?; set_token(&index_url, None).await?;
println!("logged out of {index_url}"); println!("logged out of {index_url}");

View file

@ -28,8 +28,8 @@ pub enum AuthCommands {
} }
impl AuthSubcommand { impl AuthSubcommand {
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> { pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let manifest = match project.deser_manifest() { let manifest = match project.deser_manifest().await {
Ok(manifest) => Some(manifest), Ok(manifest) => Some(manifest),
Err(e) => match e { Err(e) => match e {
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None, ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
@ -44,7 +44,7 @@ impl AuthSubcommand {
}, },
None => match manifest { None => match manifest {
Some(_) => None, Some(_) => None,
None => Some(read_config()?.default_index), None => Some(read_config().await?.default_index),
}, },
}; };
@ -61,9 +61,9 @@ impl AuthSubcommand {
}; };
match self.command { match self.command {
AuthCommands::Login(login) => login.run(index_url, project, reqwest), AuthCommands::Login(login) => login.run(index_url, project, reqwest).await,
AuthCommands::Logout(logout) => logout.run(index_url), AuthCommands::Logout(logout) => logout.run(index_url).await,
AuthCommands::WhoAmI(whoami) => whoami.run(index_url, reqwest), AuthCommands::WhoAmI(whoami) => whoami.run(index_url, reqwest).await,
} }
} }
} }

View file

@ -6,12 +6,8 @@ use colored::Colorize;
pub struct WhoAmICommand {} pub struct WhoAmICommand {}
impl WhoAmICommand { impl WhoAmICommand {
pub fn run( pub async fn run(self, index_url: gix::Url, reqwest: reqwest::Client) -> anyhow::Result<()> {
self, let tokens = get_tokens().await?;
index_url: gix::Url,
reqwest: reqwest::blocking::Client,
) -> anyhow::Result<()> {
let tokens = get_tokens()?;
let token = match tokens.0.get(&index_url) { let token = match tokens.0.get(&index_url) {
Some(token) => token, Some(token) => token,
None => { None => {
@ -22,7 +18,7 @@ impl WhoAmICommand {
println!( println!(
"logged in as {} into {index_url}", "logged in as {} into {index_url}",
get_token_login(&reqwest, token)?.bold() get_token_login(&reqwest, token).await?.bold()
); );
Ok(()) Ok(())

View file

@ -13,8 +13,8 @@ pub struct DefaultIndexCommand {
} }
impl DefaultIndexCommand { impl DefaultIndexCommand {
pub fn run(self) -> anyhow::Result<()> { pub async fn run(self) -> anyhow::Result<()> {
let mut config = read_config()?; let mut config = read_config().await?;
let index = if self.reset { let index = if self.reset {
Some(CliConfig::default().default_index) Some(CliConfig::default().default_index)
@ -25,7 +25,7 @@ impl DefaultIndexCommand {
match index { match index {
Some(index) => { Some(index) => {
config.default_index = index.clone(); config.default_index = index.clone();
write_config(&config)?; write_config(&config).await?;
println!("default index set to: {index}"); println!("default index set to: {index}");
} }
None => { None => {

View file

@ -13,10 +13,10 @@ pub enum ConfigCommands {
} }
impl ConfigCommands { impl ConfigCommands {
pub fn run(self) -> anyhow::Result<()> { pub async fn run(self) -> anyhow::Result<()> {
match self { match self {
ConfigCommands::DefaultIndex(default_index) => default_index.run(), ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
ConfigCommands::ScriptsRepo(scripts_repo) => scripts_repo.run(), ConfigCommands::ScriptsRepo(scripts_repo) => scripts_repo.run().await,
} }
} }
} }

View file

@ -4,6 +4,7 @@ use crate::cli::{
}; };
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use fs_err::tokio as fs;
#[derive(Debug, Args)] #[derive(Debug, Args)]
pub struct ScriptsRepoCommand { pub struct ScriptsRepoCommand {
@ -17,8 +18,8 @@ pub struct ScriptsRepoCommand {
} }
impl ScriptsRepoCommand { impl ScriptsRepoCommand {
pub fn run(self) -> anyhow::Result<()> { pub async fn run(self) -> anyhow::Result<()> {
let mut config = read_config()?; let mut config = read_config().await?;
let repo = if self.reset { let repo = if self.reset {
Some(CliConfig::default().scripts_repo) Some(CliConfig::default().scripts_repo)
@ -29,9 +30,10 @@ impl ScriptsRepoCommand {
match repo { match repo {
Some(repo) => { Some(repo) => {
config.scripts_repo = repo.clone(); config.scripts_repo = repo.clone();
write_config(&config)?; write_config(&config).await?;
fs_err::remove_dir_all(home_dir()?.join("scripts")) fs::remove_dir_all(home_dir()?.join("scripts"))
.await
.context("failed to remove scripts directory")?; .context("failed to remove scripts directory")?;
println!("scripts repo set to: {repo}"); println!("scripts repo set to: {repo}");

View file

@ -1,6 +1,7 @@
use crate::cli::{config::read_config, VersionedPackageName}; use crate::cli::{config::read_config, VersionedPackageName};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use fs_err::tokio as fs;
use pesde::{ use pesde::{
linking::generator::generate_bin_linking_module, linking::generator::generate_bin_linking_module,
manifest::target::TargetKind, manifest::target::TargetKind,
@ -30,14 +31,16 @@ pub struct ExecuteCommand {
} }
impl ExecuteCommand { impl ExecuteCommand {
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> { pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let index = self let index = match self.index {
.index Some(index) => Some(index),
.or_else(|| read_config().ok().map(|c| c.default_index)) None => read_config().await.ok().map(|c| c.default_index),
.context("no index specified")?; }
.context("no index specified")?;
let source = PesdePackageSource::new(index); let source = PesdePackageSource::new(index);
source source
.refresh(&project) .refresh(&project)
.await
.context("failed to refresh source")?; .context("failed to refresh source")?;
let version_req = self.package.1.unwrap_or(VersionReq::STAR); let version_req = self.package.1.unwrap_or(VersionReq::STAR);
@ -51,6 +54,7 @@ impl ExecuteCommand {
if let Some(res) = source if let Some(res) = source
.resolve(&specifier, &project, TargetKind::Lune) .resolve(&specifier, &project, TargetKind::Lune)
.await
.context("failed to resolve package")? .context("failed to resolve package")?
.1 .1
.pop_last() .pop_last()
@ -60,6 +64,7 @@ impl ExecuteCommand {
source source
.resolve(&specifier, &project, TargetKind::Luau) .resolve(&specifier, &project, TargetKind::Luau)
.await
.context("failed to resolve package")? .context("failed to resolve package")?
.1 .1
.pop_last() .pop_last()
@ -74,16 +79,20 @@ impl ExecuteCommand {
let (fs, target) = source let (fs, target) = source
.download(&pkg_ref, &project, &reqwest) .download(&pkg_ref, &project, &reqwest)
.await
.context("failed to download package")?; .context("failed to download package")?;
let bin_path = target.bin_path().context("package has no binary export")?; let bin_path = target.bin_path().context("package has no binary export")?;
let tmp_dir = project.cas_dir().join(".tmp"); let tmp_dir = project.cas_dir().join(".tmp");
fs_err::create_dir_all(&tmp_dir).context("failed to create temporary directory")?; fs::create_dir_all(&tmp_dir)
.await
.context("failed to create temporary directory")?;
let tempdir = let tempdir =
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?; tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
fs.write_to(tempdir.path(), project.cas_dir(), true) fs.write_to(tempdir.path(), project.cas_dir(), true)
.await
.context("failed to write package contents")?; .context("failed to write package contents")?;
let mut caller = let mut caller =

View file

@ -10,6 +10,7 @@ use pesde::{
}; };
use crate::cli::{config::read_config, HOME_DIR}; use crate::cli::{config::read_config, HOME_DIR};
use fs_err::tokio as fs;
#[derive(Debug, Args)] #[derive(Debug, Args)]
pub struct InitCommand {} pub struct InitCommand {}
@ -25,8 +26,8 @@ require(home_dir .. {:?})"#,
} }
impl InitCommand { impl InitCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> { pub async fn run(self, project: Project) -> anyhow::Result<()> {
match project.read_manifest() { match project.read_manifest().await {
Ok(_) => { Ok(_) => {
println!("{}", "project already initialized".red()); println!("{}", "project already initialized".red());
return Ok(()); return Ok(());
@ -125,25 +126,29 @@ impl InitCommand {
let folder = project let folder = project
.package_dir() .package_dir()
.join(concat!(".", env!("CARGO_PKG_NAME"))); .join(concat!(".", env!("CARGO_PKG_NAME")));
fs_err::create_dir_all(&folder).context("failed to create scripts folder")?; fs::create_dir_all(&folder)
.await
.context("failed to create scripts folder")?;
fs_err::write( fs::write(
folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)), folder.join(format!("{}.luau", ScriptName::RobloxSyncConfigGenerator)),
script_contents(Path::new(&format!( script_contents(Path::new(&format!(
"lune/rojo/{}.luau", "lune/rojo/{}.luau",
ScriptName::RobloxSyncConfigGenerator ScriptName::RobloxSyncConfigGenerator
))), ))),
) )
.await
.context("failed to write sync config generator script file")?; .context("failed to write sync config generator script file")?;
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
fs_err::write( fs::write(
folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)), folder.join(format!("{}.luau", ScriptName::SourcemapGenerator)),
script_contents(Path::new(&format!( script_contents(Path::new(&format!(
"lune/rojo/{}.luau", "lune/rojo/{}.luau",
ScriptName::SourcemapGenerator ScriptName::SourcemapGenerator
))), ))),
) )
.await
.context("failed to write sourcemap generator script file")?; .context("failed to write sourcemap generator script file")?;
let scripts = let scripts =
@ -166,9 +171,9 @@ impl InitCommand {
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new())) manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[DEFAULT_INDEX_NAME] = [DEFAULT_INDEX_NAME] =
toml_edit::value(read_config()?.default_index.to_bstring().to_string()); toml_edit::value(read_config().await?.default_index.to_bstring().to_string());
project.write_manifest(manifest.to_string())?; project.write_manifest(manifest.to_string()).await?;
println!("{}", "initialized project".green()); println!("{}", "initialized project".green());
Ok(()) Ok(())

View file

@ -1,26 +1,21 @@
use crate::cli::{ use crate::cli::{
bin_dir, download_graph, files::make_executable, run_on_workspace_members, up_to_date_lockfile, bin_dir, download_graph, files::make_executable, repos::update_scripts,
run_on_workspace_members, up_to_date_lockfile,
}; };
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::{ColoredString, Colorize}; use colored::{ColoredString, Colorize};
use fs_err::tokio as fs;
use indicatif::MultiProgress; use indicatif::MultiProgress;
use pesde::{ use pesde::{
lockfile::Lockfile, lockfile::Lockfile,
manifest::{target::TargetKind, DependencyType}, manifest::{target::TargetKind, DependencyType},
Project, MANIFEST_FILE_NAME, Project, MANIFEST_FILE_NAME,
}; };
use std::{ use std::collections::{BTreeSet, HashSet};
collections::{BTreeSet, HashSet},
thread::JoinHandle,
};
#[derive(Debug, Args, Copy, Clone)] #[derive(Debug, Args, Copy, Clone)]
pub struct InstallCommand { pub struct InstallCommand {
/// The amount of threads to use for downloading
#[arg(short, long, default_value_t = 6, value_parser = clap::value_parser!(u64).range(1..=128))]
threads: u64,
/// Whether to error on changes in the lockfile /// Whether to error on changes in the lockfile
#[arg(long)] #[arg(long)]
locked: bool, locked: bool,
@ -91,21 +86,21 @@ fn job(n: u8) -> ColoredString {
} }
impl InstallCommand { impl InstallCommand {
pub fn run( pub async fn run(
self, self,
project: Project, project: Project,
multi: MultiProgress, multi: MultiProgress,
reqwest: reqwest::blocking::Client, reqwest: reqwest::Client,
update_task: &mut Option<JoinHandle<()>>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let mut refreshed_sources = HashSet::new(); let mut refreshed_sources = HashSet::new();
let manifest = project let manifest = project
.deser_manifest() .deser_manifest()
.await
.context("failed to read manifest")?; .context("failed to read manifest")?;
let lockfile = if self.locked { let lockfile = if self.locked {
match up_to_date_lockfile(&project)? { match up_to_date_lockfile(&project).await? {
None => { None => {
anyhow::bail!( anyhow::bail!(
"lockfile is out of sync, run `{} install` to update it", "lockfile is out of sync, run `{} install` to update it",
@ -115,7 +110,7 @@ impl InstallCommand {
file => file, file => file,
} }
} else { } else {
match project.deser_lockfile() { match project.deser_lockfile().await {
Ok(lockfile) => { Ok(lockfile) => {
if lockfile.overrides != manifest.overrides { if lockfile.overrides != manifest.overrides {
log::debug!("overrides are different"); log::debug!("overrides are different");
@ -154,7 +149,8 @@ impl InstallCommand {
if deleted_folders.insert(folder.to_string()) { if deleted_folders.insert(folder.to_string()) {
log::debug!("deleting the {folder} folder"); log::debug!("deleting the {folder} folder");
if let Some(e) = fs_err::remove_dir_all(project.package_dir().join(&folder)) if let Some(e) = fs::remove_dir_all(project.package_dir().join(&folder))
.await
.err() .err()
.filter(|e| e.kind() != std::io::ErrorKind::NotFound) .filter(|e| e.kind() != std::io::ErrorKind::NotFound)
{ {
@ -184,12 +180,10 @@ impl InstallCommand {
let graph = project let graph = project
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources) .dependency_graph(old_graph.as_ref(), &mut refreshed_sources)
.await
.context("failed to build dependency graph")?; .context("failed to build dependency graph")?;
if let Some(task) = update_task.take() { update_scripts(&project).await?;
log::debug!("waiting for update task to finish");
task.join().expect("failed to join update task");
}
let downloaded_graph = download_graph( let downloaded_graph = download_graph(
&project, &project,
@ -197,12 +191,12 @@ impl InstallCommand {
&graph, &graph,
&multi, &multi,
&reqwest, &reqwest,
self.threads as usize,
self.prod, self.prod,
true, true,
format!("{} 📥 downloading dependencies", job(3)), format!("{} 📥 downloading dependencies", job(3)),
format!("{} 📥 downloaded dependencies", job(3)), format!("{} 📥 downloaded dependencies", job(3)),
)?; )
.await?;
let filtered_graph = if self.prod { let filtered_graph = if self.prod {
downloaded_graph downloaded_graph
@ -225,9 +219,10 @@ impl InstallCommand {
project project
.link_dependencies(&filtered_graph) .link_dependencies(&filtered_graph)
.await
.context("failed to link dependencies")?; .context("failed to link dependencies")?;
let bin_folder = bin_dir()?; let bin_folder = bin_dir().await?;
for versions in filtered_graph.values() { for versions in filtered_graph.values() {
for node in versions.values() { for node in versions.values() {
@ -245,18 +240,22 @@ impl InstallCommand {
} }
let bin_file = bin_folder.join(alias); let bin_file = bin_folder.join(alias);
fs_err::write(&bin_file, bin_link_file(alias)) fs::write(&bin_file, bin_link_file(alias))
.await
.context("failed to write bin link file")?; .context("failed to write bin link file")?;
make_executable(&bin_file).context("failed to make bin link executable")?; make_executable(&bin_file)
.await
.context("failed to make bin link executable")?;
#[cfg(windows)] #[cfg(windows)]
{ {
let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION); let bin_file = bin_file.with_extension(std::env::consts::EXE_EXTENSION);
fs_err::copy( fs::copy(
std::env::current_exe().context("failed to get current executable path")?, std::env::current_exe().context("failed to get current executable path")?,
&bin_file, &bin_file,
) )
.await
.context("failed to copy bin link file")?; .context("failed to copy bin link file")?;
} }
} }
@ -268,6 +267,7 @@ impl InstallCommand {
project project
.apply_patches(&filtered_graph) .apply_patches(&filtered_graph)
.await
.context("failed to apply patches")?; .context("failed to apply patches")?;
} }
@ -283,9 +283,13 @@ impl InstallCommand {
graph: downloaded_graph, graph: downloaded_graph,
workspace: run_on_workspace_members(&project, |project| { workspace: run_on_workspace_members(&project, |project| {
self.run(project, multi.clone(), reqwest.clone(), &mut None) let multi = multi.clone();
})?, let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, multi, reqwest)).await }
})
.await?,
}) })
.await
.context("failed to write lockfile")?; .context("failed to write lockfile")?;
Ok(()) Ok(())

View file

@ -1,6 +1,5 @@
use indicatif::MultiProgress; use indicatif::MultiProgress;
use pesde::Project; use pesde::Project;
use std::thread::JoinHandle;
mod add; mod add;
mod auth; mod auth;
@ -73,40 +72,31 @@ pub enum Subcommand {
} }
impl Subcommand { impl Subcommand {
pub fn run( pub async fn run(
self, self,
project: Project, project: Project,
multi: MultiProgress, multi: MultiProgress,
reqwest: reqwest::blocking::Client, reqwest: reqwest::Client,
update_task: JoinHandle<()>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let mut update_task = Some(update_task); match self {
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
let res = match self { Subcommand::Config(config) => config.run().await,
Subcommand::Auth(auth) => auth.run(project, reqwest), Subcommand::Init(init) => init.run(project).await,
Subcommand::Config(config) => config.run(), Subcommand::Run(run) => run.run(project).await,
Subcommand::Init(init) => init.run(project), Subcommand::Install(install) => install.run(project, multi, reqwest).await,
Subcommand::Run(run) => run.run(project, &mut update_task), Subcommand::Publish(publish) => publish.run(project, reqwest).await,
Subcommand::Install(install) => install.run(project, multi, reqwest, &mut update_task),
Subcommand::Publish(publish) => publish.run(project, reqwest),
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
Subcommand::SelfInstall(self_install) => self_install.run(), Subcommand::SelfInstall(self_install) => self_install.run().await,
#[cfg(feature = "patches")] #[cfg(feature = "patches")]
Subcommand::Patch(patch) => patch.run(project, reqwest), Subcommand::Patch(patch) => patch.run(project, reqwest).await,
#[cfg(feature = "patches")] #[cfg(feature = "patches")]
Subcommand::PatchCommit(patch_commit) => patch_commit.run(project), Subcommand::PatchCommit(patch_commit) => patch_commit.run(project).await,
#[cfg(feature = "version-management")] #[cfg(feature = "version-management")]
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest), Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
Subcommand::Add(add) => add.run(project), Subcommand::Add(add) => add.run(project).await,
Subcommand::Update(update) => update.run(project, multi, reqwest, &mut update_task), Subcommand::Update(update) => update.run(project, multi, reqwest).await,
Subcommand::Outdated(outdated) => outdated.run(project), Subcommand::Outdated(outdated) => outdated.run(project).await,
Subcommand::Execute(execute) => execute.run(project, reqwest), Subcommand::Execute(execute) => execute.run(project, reqwest).await,
};
if let Some(handle) = update_task.take() {
handle.join().expect("failed to join update task");
} }
res
} }
} }

View file

@ -20,11 +20,12 @@ pub struct OutdatedCommand {
} }
impl OutdatedCommand { impl OutdatedCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> { pub async fn run(self, project: Project) -> anyhow::Result<()> {
let graph = project.deser_lockfile()?.graph; let graph = project.deser_lockfile().await?.graph;
let manifest = project let manifest = project
.deser_manifest() .deser_manifest()
.await
.context("failed to read manifest")?; .context("failed to read manifest")?;
let mut refreshed_sources = HashSet::new(); let mut refreshed_sources = HashSet::new();
@ -45,7 +46,7 @@ impl OutdatedCommand {
let source = node.node.pkg_ref.source(); let source = node.node.pkg_ref.source();
if refreshed_sources.insert(source.clone()) { if refreshed_sources.insert(source.clone()) {
source.refresh(&project)?; source.refresh(&project).await?;
} }
if !self.strict { if !self.strict {
@ -64,6 +65,7 @@ impl OutdatedCommand {
let version_id = source let version_id = source
.resolve(&specifier, &project, manifest.target.kind()) .resolve(&specifier, &project, manifest.target.kind())
.await
.context("failed to resolve package versions")? .context("failed to resolve package versions")?
.1 .1
.pop_last() .pop_last()

View file

@ -2,6 +2,7 @@ use crate::cli::{up_to_date_lockfile, VersionedPackageName};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
use fs_err::tokio as fs;
use pesde::{ use pesde::{
patches::setup_patches_repo, patches::setup_patches_repo,
source::{ source::{
@ -19,8 +20,8 @@ pub struct PatchCommand {
} }
impl PatchCommand { impl PatchCommand {
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> { pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? { let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
lockfile.graph lockfile.graph
} else { } else {
anyhow::bail!("outdated lockfile, please run the install command first") anyhow::bail!("outdated lockfile, please run the install command first")
@ -45,12 +46,14 @@ impl PatchCommand {
.join(name.escaped()) .join(name.escaped())
.join(version_id.escaped()) .join(version_id.escaped())
.join(chrono::Utc::now().timestamp().to_string()); .join(chrono::Utc::now().timestamp().to_string());
fs_err::create_dir_all(&directory)?; fs::create_dir_all(&directory).await?;
source source
.download(&node.node.pkg_ref, &project, &reqwest)? .download(&node.node.pkg_ref, &project, &reqwest)
.await?
.0 .0
.write_to(&directory, project.cas_dir(), false) .write_to(&directory, project.cas_dir(), false)
.await
.context("failed to write package contents")?; .context("failed to write package contents")?;
setup_patches_repo(&directory)?; setup_patches_repo(&directory)?;

View file

@ -1,6 +1,7 @@
use crate::cli::up_to_date_lockfile; use crate::cli::up_to_date_lockfile;
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use fs_err::tokio as fs;
use pesde::{names::PackageNames, patches::create_patch, source::version_id::VersionId, Project}; use pesde::{names::PackageNames, patches::create_patch, source::version_id::VersionId, Project};
use std::{path::PathBuf, str::FromStr}; use std::{path::PathBuf, str::FromStr};
@ -12,8 +13,8 @@ pub struct PatchCommitCommand {
} }
impl PatchCommitCommand { impl PatchCommitCommand {
pub fn run(self, project: Project) -> anyhow::Result<()> { pub async fn run(self, project: Project) -> anyhow::Result<()> {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? { let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
lockfile.graph lockfile.graph
} else { } else {
anyhow::bail!("outdated lockfile, please run the install command first") anyhow::bail!("outdated lockfile, please run the install command first")
@ -48,15 +49,22 @@ impl PatchCommitCommand {
.context("package not found in graph")?; .context("package not found in graph")?;
let mut manifest = toml_edit::DocumentMut::from_str( let mut manifest = toml_edit::DocumentMut::from_str(
&project.read_manifest().context("failed to read manifest")?, &project
.read_manifest()
.await
.context("failed to read manifest")?,
) )
.context("failed to parse manifest")?; .context("failed to parse manifest")?;
let patch = create_patch(&self.directory).context("failed to create patch")?; let patch = create_patch(&self.directory).context("failed to create patch")?;
fs_err::remove_dir_all(self.directory).context("failed to remove patch directory")?; fs::remove_dir_all(self.directory)
.await
.context("failed to remove patch directory")?;
let patches_dir = project.package_dir().join("patches"); let patches_dir = project.package_dir().join("patches");
fs_err::create_dir_all(&patches_dir).context("failed to create patches directory")?; fs::create_dir_all(&patches_dir)
.await
.context("failed to create patches directory")?;
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped()); let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
@ -65,7 +73,9 @@ impl PatchCommitCommand {
anyhow::bail!("patch file already exists: {}", patch_file.display()); anyhow::bail!("patch file already exists: {}", patch_file.display());
} }
fs_err::write(&patch_file, patch).context("failed to write patch file")?; fs::write(&patch_file, patch)
.await
.context("failed to write patch file")?;
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new())) manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[&name.to_string()][&version_id.to_string()] = [&name.to_string()][&version_id.to_string()] =
@ -73,6 +83,7 @@ impl PatchCommitCommand {
project project
.write_manifest(manifest.to_string()) .write_manifest(manifest.to_string())
.await
.context("failed to write manifest")?; .context("failed to write manifest")?;
println!(concat!( println!(concat!(

View file

@ -1,15 +1,9 @@
use crate::cli::{display_err, run_on_workspace_members, up_to_date_lockfile};
use anyhow::Context; use anyhow::Context;
use async_compression::Level;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
use reqwest::{header::AUTHORIZATION, StatusCode}; use fs_err::tokio as fs;
use semver::VersionReq;
use std::{
io::{Seek, Write},
path::Component,
};
use tempfile::tempfile;
use crate::cli::{run_on_workspace_members, up_to_date_lockfile};
use pesde::{ use pesde::{
manifest::{target::Target, DependencyType}, manifest::{target::Target, DependencyType},
scripts::ScriptName, scripts::ScriptName,
@ -25,6 +19,11 @@ use pesde::{
}, },
Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME, Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
}; };
use reqwest::{header::AUTHORIZATION, StatusCode};
use semver::VersionReq;
use std::path::Component;
use tempfile::Builder;
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
#[derive(Debug, Args, Clone)] #[derive(Debug, Args, Clone)]
pub struct PublishCommand { pub struct PublishCommand {
@ -42,9 +41,10 @@ pub struct PublishCommand {
} }
impl PublishCommand { impl PublishCommand {
fn run_impl(self, project: &Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> { async fn run_impl(self, project: &Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let mut manifest = project let mut manifest = project
.deser_manifest() .deser_manifest()
.await
.context("failed to read manifest")?; .context("failed to read manifest")?;
println!( println!(
@ -72,7 +72,7 @@ impl PublishCommand {
anyhow::bail!("no build files found in target"); anyhow::bail!("no build files found in target");
} }
match up_to_date_lockfile(project)? { match up_to_date_lockfile(project).await? {
Some(lockfile) => { Some(lockfile) => {
if lockfile if lockfile
.graph .graph
@ -93,10 +93,9 @@ impl PublishCommand {
} }
} }
let mut archive = tar::Builder::new(flate2::write::GzEncoder::new( let mut archive = tokio_tar::Builder::new(
vec![], async_compression::tokio::write::GzipEncoder::with_quality(vec![], Level::Best),
flate2::Compression::best(), );
));
let mut display_includes: Vec<String> = vec![MANIFEST_FILE_NAME.to_string()]; let mut display_includes: Vec<String> = vec![MANIFEST_FILE_NAME.to_string()];
let mut display_build_files: Vec<String> = vec![]; let mut display_build_files: Vec<String> = vec![];
@ -179,8 +178,9 @@ impl PublishCommand {
anyhow::bail!("{name} must point to a file"); anyhow::bail!("{name} must point to a file");
} }
let contents = let contents = fs::read_to_string(&export_path)
fs_err::read_to_string(&export_path).context(format!("failed to read {name}"))?; .await
.context(format!("failed to read {name}"))?;
if let Err(err) = full_moon::parse(&contents).map_err(|errs| { if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
errs.into_iter() errs.into_iter()
@ -237,17 +237,21 @@ impl PublishCommand {
if included_path.is_file() { if included_path.is_file() {
display_includes.push(included_name.clone()); display_includes.push(included_name.clone());
archive.append_file( archive
included_name, .append_file(
fs_err::File::open(&included_path) included_name,
.context(format!("failed to read {included_name}"))? fs::File::open(&included_path)
.file_mut(), .await
)?; .context(format!("failed to read {included_name}"))?
.file_mut(),
)
.await?;
} else { } else {
display_includes.push(format!("{included_name}/*")); display_includes.push(format!("{included_name}/*"));
archive archive
.append_dir_all(included_name, &included_path) .append_dir_all(included_name, &included_path)
.await
.context(format!("failed to include directory {included_name}"))?; .context(format!("failed to include directory {included_name}"))?;
} }
} }
@ -331,6 +335,7 @@ impl PublishCommand {
DependencySpecifiers::Workspace(spec) => { DependencySpecifiers::Workspace(spec) => {
let pkg_ref = WorkspacePackageSource let pkg_ref = WorkspacePackageSource
.resolve(spec, project, target_kind) .resolve(spec, project, target_kind)
.await
.context("failed to resolve workspace package")? .context("failed to resolve workspace package")?
.1 .1
.pop_last() .pop_last()
@ -345,7 +350,8 @@ impl PublishCommand {
.context("failed to get workspace directory")?, .context("failed to get workspace directory")?,
) )
.join(MANIFEST_FILE_NAME); .join(MANIFEST_FILE_NAME);
let manifest = fs_err::read_to_string(&manifest) let manifest = fs::read_to_string(&manifest)
.await
.context("failed to read workspace package manifest")?; .context("failed to read workspace package manifest")?;
let manifest = toml::from_str::<pesde::manifest::Manifest>(&manifest) let manifest = toml::from_str::<pesde::manifest::Manifest>(&manifest)
.context("failed to parse workspace package manifest")?; .context("failed to parse workspace package manifest")?;
@ -442,25 +448,41 @@ impl PublishCommand {
println!(); println!();
} }
let mut temp_manifest = tempfile().context("failed to create temp manifest file")?; let temp_path = Builder::new().make(|_| Ok(()))?.into_temp_path();
let mut temp_manifest = fs::OpenOptions::new()
.create(true)
.write(true)
.truncate(true)
.read(true)
.open(temp_path.to_path_buf())
.await?;
temp_manifest temp_manifest
.write_all( .write_all(
toml::to_string(&manifest) toml::to_string(&manifest)
.context("failed to serialize manifest")? .context("failed to serialize manifest")?
.as_bytes(), .as_bytes(),
) )
.await
.context("failed to write temp manifest file")?; .context("failed to write temp manifest file")?;
temp_manifest temp_manifest
.rewind() .rewind()
.await
.context("failed to rewind temp manifest file")?; .context("failed to rewind temp manifest file")?;
archive.append_file(MANIFEST_FILE_NAME, &mut temp_manifest)?; archive
.append_file(MANIFEST_FILE_NAME, temp_manifest.file_mut())
.await?;
let archive = archive let mut encoder = archive
.into_inner() .into_inner()
.context("failed to encode archive")? .await
.finish() .context("failed to finish archive")?;
.context("failed to get archive bytes")?; encoder
.shutdown()
.await
.context("failed to finish archive")?;
let archive = encoder.into_inner();
let index_url = manifest let index_url = manifest
.indices .indices
@ -469,6 +491,7 @@ impl PublishCommand {
let source = PesdePackageSource::new(index_url.clone()); let source = PesdePackageSource::new(index_url.clone());
source source
.refresh(project) .refresh(project)
.await
.context("failed to refresh source")?; .context("failed to refresh source")?;
let config = source let config = source
.config(project) .config(project)
@ -494,7 +517,7 @@ impl PublishCommand {
} }
if self.dry_run { if self.dry_run {
fs_err::write("package.tar.gz", archive)?; fs::write("package.tar.gz", archive).await?;
println!( println!(
"{}", "{}",
@ -506,9 +529,9 @@ impl PublishCommand {
let mut request = reqwest let mut request = reqwest
.post(format!("{}/v0/packages", config.api())) .post(format!("{}/v0/packages", config.api()))
.multipart(reqwest::blocking::multipart::Form::new().part( .multipart(reqwest::multipart::Form::new().part(
"tarball", "tarball",
reqwest::blocking::multipart::Part::bytes(archive).file_name("package.tar.gz"), reqwest::multipart::Part::bytes(archive).file_name("package.tar.gz"),
)); ));
if let Some(token) = project.auth_config().tokens().get(index_url) { if let Some(token) = project.auth_config().tokens().get(index_url) {
@ -516,10 +539,13 @@ impl PublishCommand {
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }
let response = request.send().context("failed to send request")?; let response = request.send().await.context("failed to send request")?;
let status = response.status(); let status = response.status();
let text = response.text().context("failed to get response text")?; let text = response
.text()
.await
.context("failed to get response text")?;
match status { match status {
StatusCode::CONFLICT => { StatusCode::CONFLICT => {
println!("{}", "package version already exists".red().bold()); println!("{}", "package version already exists".red().bold());
@ -544,17 +570,20 @@ impl PublishCommand {
Ok(()) Ok(())
} }
pub fn run(self, project: Project, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> { pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let result = self.clone().run_impl(&project, reqwest.clone()); let result = self.clone().run_impl(&project, reqwest.clone()).await;
if project.workspace_dir().is_some() { if project.workspace_dir().is_some() {
return result; return result;
} else if let Err(result) = result { } else {
println!("an error occurred publishing workspace root: {result}"); display_err(result, " occurred publishing workspace root");
} }
run_on_workspace_members(&project, |project| { run_on_workspace_members(&project, |project| {
self.clone().run_impl(&project, reqwest.clone()) let reqwest = reqwest.clone();
let this = self.clone();
async move { this.run_impl(&project, reqwest).await }
}) })
.await
.map(|_| ()) .map(|_| ())
} }
} }

View file

@ -1,4 +1,4 @@
use crate::cli::up_to_date_lockfile; use crate::cli::{repos::update_scripts, up_to_date_lockfile};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use pesde::{ use pesde::{
@ -8,9 +8,8 @@ use pesde::{
Project, PACKAGES_CONTAINER_NAME, Project, PACKAGES_CONTAINER_NAME,
}; };
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use std::{ use std::{env::current_dir, ffi::OsString, io::Write, path::PathBuf, process::Command};
env::current_dir, ffi::OsString, io::Write, path::PathBuf, process::Command, thread::JoinHandle, use tokio::runtime::Handle;
};
#[derive(Debug, Args)] #[derive(Debug, Args)]
pub struct RunCommand { pub struct RunCommand {
@ -24,15 +23,11 @@ pub struct RunCommand {
} }
impl RunCommand { impl RunCommand {
pub fn run( pub async fn run(self, project: Project) -> anyhow::Result<()> {
self, let run = |path: PathBuf| {
project: Project, Handle::current()
update_task: &mut Option<JoinHandle<()>>, .block_on(update_scripts(&project))
) -> anyhow::Result<()> { .expect("failed to update scripts");
let mut run = |path: PathBuf| {
if let Some(handle) = update_task.take() {
handle.join().expect("failed to join update task");
}
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile"); let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
caller caller
@ -62,7 +57,7 @@ impl RunCommand {
let package_or_script = match self.package_or_script { let package_or_script = match self.package_or_script {
Some(package_or_script) => package_or_script, Some(package_or_script) => package_or_script,
None => { None => {
if let Some(script_path) = project.deser_manifest()?.target.bin_path() { if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
run(script_path.to_path(project.package_dir())); run(script_path.to_path(project.package_dir()));
} }
@ -71,7 +66,7 @@ impl RunCommand {
}; };
if let Ok(pkg_name) = package_or_script.parse::<PackageName>() { if let Ok(pkg_name) = package_or_script.parse::<PackageName>() {
let graph = if let Some(lockfile) = up_to_date_lockfile(&project)? { let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
lockfile.graph lockfile.graph
} else { } else {
anyhow::bail!("outdated lockfile, please run the install command first") anyhow::bail!("outdated lockfile, please run the install command first")
@ -89,7 +84,8 @@ impl RunCommand {
}; };
let base_folder = project let base_folder = project
.deser_manifest()? .deser_manifest()
.await?
.target .target
.kind() .kind()
.packages_folder(&node.node.pkg_ref.target_kind()); .packages_folder(&node.node.pkg_ref.target_kind());
@ -106,7 +102,7 @@ impl RunCommand {
} }
} }
if let Ok(manifest) = project.deser_manifest() { if let Ok(manifest) = project.deser_manifest().await {
if let Some(script_path) = manifest.scripts.get(&package_or_script) { if let Some(script_path) = manifest.scripts.get(&package_or_script) {
run(script_path.to_path(project.package_dir())) run(script_path.to_path(project.package_dir()))
} }

View file

@ -10,7 +10,7 @@ pub struct SelfInstallCommand {
} }
impl SelfInstallCommand { impl SelfInstallCommand {
pub fn run(self) -> anyhow::Result<()> { pub async fn run(self) -> anyhow::Result<()> {
#[cfg(windows)] #[cfg(windows)]
{ {
if !self.skip_add_to_path { if !self.skip_add_to_path {
@ -24,7 +24,7 @@ impl SelfInstallCommand {
.0; .0;
let path: String = env.get_value("Path").context("failed to get Path value")?; let path: String = env.get_value("Path").context("failed to get Path value")?;
let bin_dir = crate::cli::bin_dir()?; let bin_dir = crate::cli::bin_dir().await?;
let bin_dir = bin_dir.to_string_lossy(); let bin_dir = bin_dir.to_string_lossy();
let exists = path.split(';').any(|part| *part == bin_dir); let exists = path.split(';').any(|part| *part == bin_dir);
@ -68,7 +68,7 @@ and then restart your shell.
); );
} }
update_bin_exe()?; update_bin_exe().await?;
Ok(()) Ok(())
} }

View file

@ -5,10 +5,10 @@ use clap::Args;
pub struct SelfUpgradeCommand {} pub struct SelfUpgradeCommand {}
impl SelfUpgradeCommand { impl SelfUpgradeCommand {
pub fn run(self, reqwest: reqwest::blocking::Client) -> anyhow::Result<()> { pub async fn run(self, reqwest: reqwest::Client) -> anyhow::Result<()> {
let config = read_config()?; let config = read_config().await?;
get_or_download_version(&reqwest, &config.last_checked_updates.unwrap().1)?; get_or_download_version(&reqwest, &config.last_checked_updates.unwrap().1).await?;
// a call to `update_bin_exe` or other similar function *should* be here, in case new versions // a call to `update_bin_exe` or other similar function *should* be here, in case new versions
// have fixes to bugs in executing other versions, but that would cause // have fixes to bugs in executing other versions, but that would cause
// the current file to be overwritten by itself, so this needs more thought // the current file to be overwritten by itself, so this needs more thought

View file

@ -1,30 +1,26 @@
use crate::cli::{download_graph, run_on_workspace_members}; use crate::cli::{download_graph, repos::update_scripts, run_on_workspace_members};
use anyhow::Context; use anyhow::Context;
use clap::Args; use clap::Args;
use colored::Colorize; use colored::Colorize;
use indicatif::MultiProgress; use indicatif::MultiProgress;
use pesde::{lockfile::Lockfile, Project}; use pesde::{lockfile::Lockfile, Project};
use std::{collections::HashSet, thread::JoinHandle}; use std::collections::HashSet;
#[derive(Debug, Args, Copy, Clone)] #[derive(Debug, Args, Copy, Clone)]
pub struct UpdateCommand { pub struct UpdateCommand {}
/// The amount of threads to use for downloading
#[arg(short, long, default_value_t = 6, value_parser = clap::value_parser!(u64).range(1..=128))]
threads: u64,
}
impl UpdateCommand { impl UpdateCommand {
pub fn run( pub async fn run(
self, self,
project: Project, project: Project,
multi: MultiProgress, multi: MultiProgress,
reqwest: reqwest::blocking::Client, reqwest: reqwest::Client,
update_task: &mut Option<JoinHandle<()>>,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let mut refreshed_sources = HashSet::new(); let mut refreshed_sources = HashSet::new();
let manifest = project let manifest = project
.deser_manifest() .deser_manifest()
.await
.context("failed to read manifest")?; .context("failed to read manifest")?;
println!( println!(
@ -36,11 +32,10 @@ impl UpdateCommand {
let graph = project let graph = project
.dependency_graph(None, &mut refreshed_sources) .dependency_graph(None, &mut refreshed_sources)
.await
.context("failed to build dependency graph")?; .context("failed to build dependency graph")?;
if let Some(handle) = update_task.take() { update_scripts(&project).await?;
handle.join().expect("failed to join update task");
}
project project
.write_lockfile(Lockfile { .write_lockfile(Lockfile {
@ -55,17 +50,21 @@ impl UpdateCommand {
&graph, &graph,
&multi, &multi,
&reqwest, &reqwest,
self.threads as usize,
false, false,
false, false,
"📥 downloading dependencies".to_string(), "📥 downloading dependencies".to_string(),
"📥 downloaded dependencies".to_string(), "📥 downloaded dependencies".to_string(),
)?, )
.await?,
workspace: run_on_workspace_members(&project, |project| { workspace: run_on_workspace_members(&project, |project| {
self.run(project, multi.clone(), reqwest.clone(), &mut None) let multi = multi.clone();
})?, let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, multi, reqwest)).await }
})
.await?,
}) })
.await
.context("failed to write lockfile")?; .context("failed to write lockfile")?;
Ok(()) Ok(())

View file

@ -1,5 +1,6 @@
use crate::cli::{auth::Tokens, home_dir}; use crate::cli::{auth::Tokens, home_dir};
use anyhow::Context; use anyhow::Context;
use fs_err::tokio as fs;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -38,8 +39,8 @@ impl Default for CliConfig {
} }
} }
pub fn read_config() -> anyhow::Result<CliConfig> { pub async fn read_config() -> anyhow::Result<CliConfig> {
let config_string = match fs_err::read_to_string(home_dir()?.join("config.toml")) { let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
Ok(config_string) => config_string, Ok(config_string) => config_string,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => { Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Ok(CliConfig::default()); return Ok(CliConfig::default());
@ -52,9 +53,10 @@ pub fn read_config() -> anyhow::Result<CliConfig> {
Ok(config) Ok(config)
} }
pub fn write_config(config: &CliConfig) -> anyhow::Result<()> { pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
let config_string = toml::to_string(config).context("failed to serialize config")?; let config_string = toml::to_string(config).context("failed to serialize config")?;
fs_err::write(home_dir()?.join("config.toml"), config_string) fs::write(home_dir()?.join("config.toml"), config_string)
.await
.context("failed to write config file")?; .context("failed to write config file")?;
Ok(()) Ok(())

View file

@ -1,16 +1,19 @@
use std::path::Path; use std::path::Path;
pub fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> { pub async fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> {
#[cfg(unix)] #[cfg(unix)]
{ {
use anyhow::Context; use anyhow::Context;
use fs_err::tokio as fs;
use std::os::unix::fs::PermissionsExt; use std::os::unix::fs::PermissionsExt;
let mut perms = fs_err::metadata(&_path) let mut perms = fs::metadata(&_path)
.await
.context("failed to get bin link file metadata")? .context("failed to get bin link file metadata")?
.permissions(); .permissions();
perms.set_mode(perms.mode() | 0o111); perms.set_mode(perms.mode() | 0o111);
fs_err::set_permissions(&_path, perms) fs::set_permissions(&_path, perms)
.await
.context("failed to set bin link file permissions")?; .context("failed to set bin link file permissions")?;
} }

View file

@ -1,4 +1,7 @@
use anyhow::Context; use anyhow::Context;
use colored::Colorize;
use fs_err::tokio as fs;
use futures::StreamExt;
use indicatif::MultiProgress; use indicatif::MultiProgress;
use pesde::{ use pesde::{
lockfile::{DependencyGraph, DownloadedGraph, Lockfile}, lockfile::{DependencyGraph, DownloadedGraph, Lockfile},
@ -10,11 +13,13 @@ use pesde::{
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use std::{ use std::{
collections::{BTreeMap, HashSet}, collections::{BTreeMap, HashSet},
future::Future,
path::PathBuf, path::PathBuf,
str::FromStr, str::FromStr,
sync::Arc, sync::Arc,
time::Duration, time::Duration,
}; };
use tokio::pin;
pub mod auth; pub mod auth;
pub mod commands; pub mod commands;
@ -32,15 +37,17 @@ pub fn home_dir() -> anyhow::Result<PathBuf> {
.join(HOME_DIR)) .join(HOME_DIR))
} }
pub fn bin_dir() -> anyhow::Result<PathBuf> { pub async fn bin_dir() -> anyhow::Result<PathBuf> {
let bin_dir = home_dir()?.join("bin"); let bin_dir = home_dir()?.join("bin");
fs_err::create_dir_all(&bin_dir).context("failed to create bin folder")?; fs::create_dir_all(&bin_dir)
.await
.context("failed to create bin folder")?;
Ok(bin_dir) Ok(bin_dir)
} }
pub fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> { pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
let manifest = project.deser_manifest()?; let manifest = project.deser_manifest().await?;
let lockfile = match project.deser_lockfile() { let lockfile = match project.deser_lockfile().await {
Ok(lockfile) => lockfile, Ok(lockfile) => lockfile,
Err(pesde::errors::LockfileReadError::Io(e)) Err(pesde::errors::LockfileReadError::Io(e))
if e.kind() == std::io::ErrorKind::NotFound => if e.kind() == std::io::ErrorKind::NotFound =>
@ -185,13 +192,12 @@ pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
} }
#[allow(clippy::too_many_arguments)] #[allow(clippy::too_many_arguments)]
pub fn download_graph( pub async fn download_graph(
project: &Project, project: &Project,
refreshed_sources: &mut HashSet<PackageSources>, refreshed_sources: &mut HashSet<PackageSources>,
graph: &DependencyGraph, graph: &DependencyGraph,
multi: &MultiProgress, multi: &MultiProgress,
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
threads: usize,
prod: bool, prod: bool,
write: bool, write: bool,
progress_msg: String, progress_msg: String,
@ -207,11 +213,12 @@ pub fn download_graph(
); );
bar.enable_steady_tick(Duration::from_millis(100)); bar.enable_steady_tick(Duration::from_millis(100));
let (rx, downloaded_graph) = project let (mut rx, downloaded_graph) = project
.download_graph(graph, refreshed_sources, reqwest, threads, prod, write) .download_graph(graph, refreshed_sources, reqwest, prod, write)
.await
.context("failed to download dependencies")?; .context("failed to download dependencies")?;
while let Ok(result) = rx.recv() { while let Some(result) = rx.recv().await {
bar.inc(1); bar.inc(1);
match result { match result {
@ -238,41 +245,63 @@ pub fn shift_project_dir(project: &Project, pkg_dir: PathBuf) -> Project {
) )
} }
pub fn run_on_workspace_members( pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
project: &Project, project: &Project,
f: impl Fn(Project) -> anyhow::Result<()>, f: impl Fn(Project) -> F,
) -> anyhow::Result<BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>> { ) -> anyhow::Result<BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>> {
Ok(match project.workspace_dir() { // this might seem counterintuitive, but remember that
Some(_) => { // the presence of a workspace dir means that this project is a member of one
// this might seem counterintuitive, but remember that if project.workspace_dir().is_some() {
// the presence of a workspace dir means that this project is a member of one return Ok(Default::default());
Default::default() }
}
None => project let members_future = project.workspace_members(project.package_dir()).await?;
.workspace_members(project.package_dir()) pin!(members_future);
.context("failed to get workspace members")?
.into_iter() let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
.map(|(path, manifest)| {
( while let Some((path, manifest)) = members_future.next().await.transpose()? {
manifest.name, let relative_path =
manifest.target.kind(), RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap()).unwrap();
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap())
.unwrap(), f(shift_project_dir(project, path)).await?;
)
}) results
.map(|(name, target, path)| { .entry(manifest.name)
f(shift_project_dir( .or_default()
project, .insert(manifest.target.kind(), relative_path);
path.to_path(project.package_dir()), }
))
.map(|_| (name, target, path)) Ok(results)
}) }
.collect::<Result<Vec<_>, _>>()
.context("failed to install workspace member's dependencies")? pub fn display_err(result: anyhow::Result<()>, prefix: &str) {
.into_iter() if let Err(err) = result {
.fold(BTreeMap::new(), |mut map, (name, target, path)| { eprintln!("{}: {err}\n", format!("error{prefix}").red().bold());
map.entry(name).or_default().insert(target, path);
map let cause = err.chain().skip(1).collect::<Vec<_>>();
}),
}) if !cause.is_empty() {
eprintln!("{}:", "caused by".red().bold());
for err in cause {
eprintln!(" - {err}");
}
}
let backtrace = err.backtrace();
match backtrace.status() {
std::backtrace::BacktraceStatus::Disabled => {
eprintln!(
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
"help".yellow().bold()
);
}
std::backtrace::BacktraceStatus::Captured => {
eprintln!("\n{}:\n{backtrace}", "backtrace".yellow().bold());
}
_ => {
eprintln!("\n{}: not captured", "backtrace".yellow().bold());
}
}
}
} }

View file

@ -3,18 +3,22 @@ use crate::{
util::authenticate_conn, util::authenticate_conn,
}; };
use anyhow::Context; use anyhow::Context;
use fs_err::tokio as fs;
use gix::remote::{fetch::Shallow, Direction}; use gix::remote::{fetch::Shallow, Direction};
use pesde::Project; use pesde::Project;
use std::path::Path; use std::{path::Path, sync::atomic::AtomicBool};
use tokio::{runtime::Handle, task::spawn_blocking};
fn update_repo<P: AsRef<Path>>( async fn update_repo<P: AsRef<Path>>(
name: &str, name: &str,
path: P, path: P,
url: gix::Url, url: gix::Url,
project: &Project, project: &Project,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let path = path.as_ref(); let path = path.as_ref();
if path.exists() { let should_update = path.exists();
let (repo, oid) = if should_update {
let repo = gix::open(path).context(format!("failed to open {name} repository"))?; let repo = gix::open(path).context(format!("failed to open {name} repository"))?;
let remote = repo let remote = repo
@ -49,68 +53,91 @@ fn update_repo<P: AsRef<Path>>(
.or(unpacked.2) .or(unpacked.2)
.context("couldn't find oid in remote ref")?; .context("couldn't find oid in remote ref")?;
let tree = repo (repo, gix::ObjectId::from(oid))
.find_object(oid)
.context(format!("failed to find {name} repository tree"))?
.peel_to_tree()
.context(format!("failed to peel {name} repository object to tree"))?;
let mut index = gix::index::File::from_state(
gix::index::State::from_tree(&tree.id, &repo.objects, Default::default()).context(
format!("failed to create index state from {name} repository tree"),
)?,
repo.index_path(),
);
let opts = gix::worktree::state::checkout::Options {
overwrite_existing: true,
destination_is_initially_empty: false,
..Default::default()
};
gix::worktree::state::checkout(
&mut index,
repo.work_dir().context(format!("{name} repo is bare"))?,
repo.objects
.clone()
.into_arc()
.context("failed to clone objects")?,
&gix::progress::Discard,
&gix::progress::Discard,
&false.into(),
opts,
)
.context(format!("failed to checkout {name} repository"))?;
index
.write(gix::index::write::Options::default())
.context("failed to write index")?;
} else { } else {
fs_err::create_dir_all(path).context(format!("failed to create {name} directory"))?; fs::create_dir_all(path)
.await
.context(format!("failed to create {name} directory"))?;
gix::prepare_clone(url, path) let repo = gix::prepare_clone(url, path)
.context(format!("failed to prepare {name} repository clone"))? .context(format!("failed to prepare {name} repository clone"))?
.with_shallow(Shallow::Deepen(1)) .with_shallow(Shallow::Deepen(1))
.fetch_then_checkout(gix::progress::Discard, &false.into()) .fetch_only(gix::progress::Discard, &false.into())
.context(format!("failed to fetch and checkout {name} repository"))? .context(format!("failed to fetch and checkout {name} repository"))?
.0 .0;
.main_worktree(gix::progress::Discard, &false.into())
.context(format!("failed to set {name} repository as main worktree"))?; let oid = {
let mut head = repo
.head()
.context(format!("failed to get {name} repository head"))?;
let obj = head
.peel_to_object_in_place()
.context(format!("failed to peel {name} repository head to object"))?;
obj.id
};
(repo, oid)
}; };
Ok(()) let tree = repo
.find_object(oid)
.context(format!("failed to find {name} repository tree"))?
.peel_to_tree()
.context(format!("failed to peel {name} repository object to tree"))?;
let mut index = gix::index::File::from_state(
gix::index::State::from_tree(&tree.id, &repo.objects, Default::default()).context(
format!("failed to create index state from {name} repository tree"),
)?,
repo.index_path(),
);
let opts = gix::worktree::state::checkout::Options {
overwrite_existing: true,
destination_is_initially_empty: !should_update,
..Default::default()
};
gix::worktree::state::checkout(
&mut index,
repo.work_dir().context(format!("{name} repo is bare"))?,
repo.objects
.clone()
.into_arc()
.context("failed to clone objects")?,
&gix::progress::Discard,
&gix::progress::Discard,
&false.into(),
opts,
)
.context(format!("failed to checkout {name} repository"))?;
index
.write(gix::index::write::Options::default())
.context("failed to write index")
} }
pub fn update_repo_dependencies(project: &Project) -> anyhow::Result<()> { static SCRIPTS_UPDATED: AtomicBool = AtomicBool::new(false);
pub async fn update_scripts(project: &Project) -> anyhow::Result<()> {
if SCRIPTS_UPDATED.swap(true, std::sync::atomic::Ordering::Relaxed) {
return Ok(());
}
let home_dir = home_dir()?; let home_dir = home_dir()?;
let config = read_config()?; let config = read_config().await?;
update_repo( let project = project.clone();
"scripts", spawn_blocking(move || {
home_dir.join("scripts"), Handle::current().block_on(update_repo(
config.scripts_repo, "scripts",
project, home_dir.join("scripts"),
)?; config.scripts_repo,
&project,
))
})
.await??;
Ok(()) Ok(())
} }

View file

@ -1,16 +1,18 @@
use anyhow::Context;
use colored::Colorize;
use reqwest::header::ACCEPT;
use semver::Version;
use serde::Deserialize;
use std::{io::Read, path::PathBuf};
use crate::cli::{ use crate::cli::{
bin_dir, bin_dir,
config::{read_config, write_config, CliConfig}, config::{read_config, write_config, CliConfig},
files::make_executable, files::make_executable,
home_dir, home_dir,
}; };
use anyhow::Context;
use colored::Colorize;
use fs_err::tokio as fs;
use futures::StreamExt;
use reqwest::header::ACCEPT;
use semver::Version;
use serde::Deserialize;
use std::path::PathBuf;
use tokio::io::AsyncReadExt;
pub fn current_version() -> Version { pub fn current_version() -> Version {
Version::parse(env!("CARGO_PKG_VERSION")).unwrap() Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
@ -38,10 +40,10 @@ fn get_repo() -> (String, String) {
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6); const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<()> { pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
let (owner, repo) = get_repo(); let (owner, repo) = get_repo();
let config = read_config()?; let config = read_config().await?;
let version = if let Some((_, version)) = config let version = if let Some((_, version)) = config
.last_checked_updates .last_checked_updates
@ -54,10 +56,12 @@ pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<
"https://api.github.com/repos/{owner}/{repo}/releases", "https://api.github.com/repos/{owner}/{repo}/releases",
)) ))
.send() .send()
.await
.context("failed to send request to GitHub API")? .context("failed to send request to GitHub API")?
.error_for_status() .error_for_status()
.context("failed to get GitHub API response")? .context("failed to get GitHub API response")?
.json::<Vec<Release>>() .json::<Vec<Release>>()
.await
.context("failed to parse GitHub API response")?; .context("failed to parse GitHub API response")?;
let version = releases let version = releases
@ -69,7 +73,8 @@ pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<
write_config(&CliConfig { write_config(&CliConfig {
last_checked_updates: Some((chrono::Utc::now(), version.clone())), last_checked_updates: Some((chrono::Utc::now(), version.clone())),
..config ..config
})?; })
.await?;
version version
}; };
@ -97,8 +102,8 @@ pub fn check_for_updates(reqwest: &reqwest::blocking::Client) -> anyhow::Result<
Ok(()) Ok(())
} }
pub fn download_github_release( pub async fn download_github_release(
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
version: &Version, version: &Version,
) -> anyhow::Result<Vec<u8>> { ) -> anyhow::Result<Vec<u8>> {
let (owner, repo) = get_repo(); let (owner, repo) = get_repo();
@ -108,10 +113,12 @@ pub fn download_github_release(
"https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}", "https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}",
)) ))
.send() .send()
.await
.context("failed to send request to GitHub API")? .context("failed to send request to GitHub API")?
.error_for_status() .error_for_status()
.context("failed to get GitHub API response")? .context("failed to get GitHub API response")?
.json::<Release>() .json::<Release>()
.await
.context("failed to parse GitHub API response")?; .context("failed to parse GitHub API response")?;
let asset = release let asset = release
@ -130,34 +137,43 @@ pub fn download_github_release(
.get(asset.url) .get(asset.url)
.header(ACCEPT, "application/octet-stream") .header(ACCEPT, "application/octet-stream")
.send() .send()
.await
.context("failed to send request to download asset")? .context("failed to send request to download asset")?
.error_for_status() .error_for_status()
.context("failed to download asset")? .context("failed to download asset")?
.bytes() .bytes()
.await
.context("failed to download asset")?; .context("failed to download asset")?;
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref()); let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
let mut archive = tar::Archive::new(&mut decoder); let mut archive = tokio_tar::Archive::new(&mut decoder);
let entry = archive let mut entry = archive
.entries() .entries()
.context("failed to read archive entries")? .context("failed to read archive entries")?
.next() .next()
.await
.context("archive has no entry")? .context("archive has no entry")?
.context("failed to get first archive entry")?; .context("failed to get first archive entry")?;
let mut result = Vec::new();
entry entry
.bytes() .read_to_end(&mut result)
.collect::<Result<Vec<u8>, std::io::Error>>() .await
.context("failed to read archive entry bytes") .context("failed to read archive entry bytes")?;
Ok(result)
} }
pub fn get_or_download_version( pub async fn get_or_download_version(
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
version: &Version, version: &Version,
) -> anyhow::Result<Option<PathBuf>> { ) -> anyhow::Result<Option<PathBuf>> {
let path = home_dir()?.join("versions"); let path = home_dir()?.join("versions");
fs_err::create_dir_all(&path).context("failed to create versions directory")?; fs::create_dir_all(&path)
.await
.context("failed to create versions directory")?;
let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX)); let path = path.join(format!("{version}{}", std::env::consts::EXE_SUFFIX));
@ -172,14 +188,19 @@ pub fn get_or_download_version(
} }
if is_requested_version { if is_requested_version {
fs_err::copy(std::env::current_exe()?, &path) fs::copy(std::env::current_exe()?, &path)
.await
.context("failed to copy current executable to version directory")?; .context("failed to copy current executable to version directory")?;
} else { } else {
let bytes = download_github_release(reqwest, version)?; let bytes = download_github_release(reqwest, version).await?;
fs_err::write(&path, bytes).context("failed to write downloaded version file")?; fs::write(&path, bytes)
.await
.context("failed to write downloaded version file")?;
} }
make_executable(&path).context("failed to make downloaded version executable")?; make_executable(&path)
.await
.context("failed to make downloaded version executable")?;
Ok(if is_requested_version { Ok(if is_requested_version {
None None
@ -188,48 +209,52 @@ pub fn get_or_download_version(
}) })
} }
pub fn max_installed_version() -> anyhow::Result<Version> { pub async fn max_installed_version() -> anyhow::Result<Version> {
let versions_dir = home_dir()?.join("versions"); let versions_dir = home_dir()?.join("versions");
fs_err::create_dir_all(&versions_dir).context("failed to create versions directory")?; fs::create_dir_all(&versions_dir)
.await
.context("failed to create versions directory")?;
let max_version = fs_err::read_dir(versions_dir) let mut read_dir = fs::read_dir(versions_dir)
.context("failed to read versions directory")? .await
.collect::<Result<Vec<_>, _>>()? .context("failed to read versions directory")?;
.into_iter() let mut max_version = current_version();
.map(|entry| {
#[cfg(not(windows))]
let name = entry
.path()
.file_name()
.unwrap()
.to_string_lossy()
.to_string();
#[cfg(windows)]
let name = entry
.path()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
Version::parse(&name).unwrap() while let Some(entry) = read_dir.next_entry().await? {
}) #[cfg(not(windows))]
.max() let name = entry
.filter(|v| v >= &current_version()) .path()
.unwrap_or_else(current_version); .file_name()
.unwrap()
.to_string_lossy()
.to_string();
#[cfg(windows)]
let name = entry
.path()
.file_stem()
.unwrap()
.to_string_lossy()
.to_string();
let version = Version::parse(&name).unwrap();
if version > max_version {
max_version = version;
}
}
Ok(max_version) Ok(max_version)
} }
pub fn update_bin_exe() -> anyhow::Result<()> { pub async fn update_bin_exe() -> anyhow::Result<()> {
let copy_to = bin_dir()?.join(format!( let copy_to = bin_dir().await?.join(format!(
"{}{}", "{}{}",
env!("CARGO_BIN_NAME"), env!("CARGO_BIN_NAME"),
std::env::consts::EXE_SUFFIX std::env::consts::EXE_SUFFIX
)); ));
fs_err::copy(std::env::current_exe()?, &copy_to) fs::copy(std::env::current_exe()?, &copy_to)
.await
.context("failed to copy executable to bin folder")?; .context("failed to copy executable to bin folder")?;
make_executable(&copy_to) make_executable(&copy_to).await
} }

View file

@ -7,42 +7,41 @@ use crate::{
}, },
Project, PACKAGES_CONTAINER_NAME, Project, PACKAGES_CONTAINER_NAME,
}; };
use fs_err::create_dir_all; use fs_err::tokio as fs;
use std::{ use std::{
collections::HashSet, collections::HashSet,
sync::{mpsc::Receiver, Arc, Mutex}, sync::{Arc, Mutex},
}; };
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>; type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
type MultithreadDownloadJob = ( type MultithreadDownloadJob = (
Receiver<Result<(), errors::DownloadGraphError>>, tokio::sync::mpsc::Receiver<Result<(), errors::DownloadGraphError>>,
MultithreadedGraph, MultithreadedGraph,
); );
impl Project { impl Project {
/// Downloads a graph of dependencies /// Downloads a graph of dependencies
pub fn download_graph( pub async fn download_graph(
&self, &self,
graph: &DependencyGraph, graph: &DependencyGraph,
refreshed_sources: &mut HashSet<PackageSources>, refreshed_sources: &mut HashSet<PackageSources>,
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
threads: usize,
prod: bool, prod: bool,
write: bool, write: bool,
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> { ) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
let manifest = self.deser_manifest()?; let manifest = self.deser_manifest().await?;
let downloaded_graph: MultithreadedGraph = Arc::new(Mutex::new(Default::default())); let downloaded_graph: MultithreadedGraph = Arc::new(Mutex::new(Default::default()));
let threadpool = threadpool::ThreadPool::new(threads); let (tx, rx) =
let (tx, rx) = std::sync::mpsc::channel(); tokio::sync::mpsc::channel(graph.iter().map(|(_, versions)| versions.len()).sum());
for (name, versions) in graph { for (name, versions) in graph {
for (version_id, node) in versions { for (version_id, node) in versions {
let source = node.pkg_ref.source(); let source = node.pkg_ref.source();
if refreshed_sources.insert(source.clone()) { if refreshed_sources.insert(source.clone()) {
source.refresh(self).map_err(Box::new)?; source.refresh(self).await.map_err(Box::new)?;
} }
let container_folder = node.container_folder( let container_folder = node.container_folder(
@ -59,7 +58,7 @@ impl Project {
version_id.version(), version_id.version(),
); );
create_dir_all(&container_folder)?; fs::create_dir_all(&container_folder).await?;
let tx = tx.clone(); let tx = tx.clone();
@ -71,27 +70,29 @@ impl Project {
let reqwest = reqwest.clone(); let reqwest = reqwest.clone();
let downloaded_graph = downloaded_graph.clone(); let downloaded_graph = downloaded_graph.clone();
threadpool.execute(move || { tokio::spawn(async move {
let project = project.clone(); let project = project.clone();
log::debug!("downloading {name}@{version_id}"); log::debug!("downloading {name}@{version_id}");
let (fs, target) = match source.download(&node.pkg_ref, &project, &reqwest) { let (fs, target) =
Ok(target) => target, match source.download(&node.pkg_ref, &project, &reqwest).await {
Err(e) => { Ok(target) => target,
tx.send(Err(Box::new(e).into())).unwrap(); Err(e) => {
return; tx.send(Err(Box::new(e).into())).await.unwrap();
} return;
}; }
};
log::debug!("downloaded {name}@{version_id}"); log::debug!("downloaded {name}@{version_id}");
if write { if write {
if !prod || node.ty != DependencyType::Dev { if !prod || node.ty != DependencyType::Dev {
match fs.write_to(container_folder, project.cas_dir(), true) { match fs.write_to(container_folder, project.cas_dir(), true).await {
Ok(_) => {} Ok(_) => {}
Err(e) => { Err(e) => {
tx.send(Err(errors::DownloadGraphError::WriteFailed(e))) tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
.await
.unwrap(); .unwrap();
return; return;
} }
@ -101,13 +102,15 @@ impl Project {
} }
} }
let mut downloaded_graph = downloaded_graph.lock().unwrap(); {
downloaded_graph let mut downloaded_graph = downloaded_graph.lock().unwrap();
.entry(name) downloaded_graph
.or_default() .entry(name)
.insert(version_id, DownloadedDependencyGraphNode { node, target }); .or_default()
.insert(version_id, DownloadedDependencyGraphNode { node, target });
}
tx.send(Ok(())).unwrap(); tx.send(Ok(())).await.unwrap();
}); });
} }
} }

View file

@ -4,6 +4,9 @@
//! It has been designed with multiple targets in mind, namely Roblox, Lune, and Luau. //! It has been designed with multiple targets in mind, namely Roblox, Lune, and Luau.
use crate::{lockfile::Lockfile, manifest::Manifest}; use crate::{lockfile::Lockfile, manifest::Manifest};
use async_stream::stream;
use fs_err::tokio as fs;
use futures::Stream;
use gix::sec::identity::Account; use gix::sec::identity::Account;
use std::{ use std::{
collections::HashMap, collections::HashMap,
@ -137,42 +140,49 @@ impl Project {
} }
/// Read the manifest file /// Read the manifest file
pub fn read_manifest(&self) -> Result<String, errors::ManifestReadError> { pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
let string = fs_err::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?; let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(string) Ok(string)
} }
/// Deserialize the manifest file /// Deserialize the manifest file
pub fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> { pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
let string = fs_err::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME))?; let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(toml::from_str(&string)?) Ok(toml::from_str(&string)?)
} }
/// Write the manifest file /// Write the manifest file
pub fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> { pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
fs_err::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()) fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
} }
/// Deserialize the lockfile /// Deserialize the lockfile
pub fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> { pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
let string = fs_err::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME))?; let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
Ok(toml::from_str(&string)?) Ok(toml::from_str(&string)?)
} }
/// Write the lockfile /// Write the lockfile
pub fn write_lockfile(&self, lockfile: Lockfile) -> Result<(), errors::LockfileWriteError> { pub async fn write_lockfile(
&self,
lockfile: Lockfile,
) -> Result<(), errors::LockfileWriteError> {
let string = toml::to_string(&lockfile)?; let string = toml::to_string(&lockfile)?;
fs_err::write(self.package_dir.join(LOCKFILE_FILE_NAME), string)?; fs::write(self.package_dir.join(LOCKFILE_FILE_NAME), string).await?;
Ok(()) Ok(())
} }
/// Get the workspace members /// Get the workspace members
pub fn workspace_members<P: AsRef<Path>>( pub async fn workspace_members<P: AsRef<Path>>(
&self, &self,
dir: P, dir: P,
) -> Result<HashMap<PathBuf, Manifest>, errors::WorkspaceMembersError> { ) -> Result<
impl Stream<Item = Result<(PathBuf, Manifest), errors::WorkspaceMembersError>>,
errors::WorkspaceMembersError,
> {
let dir = dir.as_ref().to_path_buf(); let dir = dir.as_ref().to_path_buf();
let manifest = fs_err::read_to_string(dir.join(MANIFEST_FILE_NAME)) let manifest = fs::read_to_string(dir.join(MANIFEST_FILE_NAME))
.await
.map_err(errors::WorkspaceMembersError::ManifestMissing)?; .map_err(errors::WorkspaceMembersError::ManifestMissing)?;
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| { let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e)) errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
@ -188,17 +198,18 @@ impl Project {
.flat_map(|paths| paths.into_iter()) .flat_map(|paths| paths.into_iter())
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
members Ok(stream! {
.into_iter() for path in members {
.map(|path| { let manifest = fs::read_to_string(path.join(MANIFEST_FILE_NAME))
let manifest = fs_err::read_to_string(path.join(MANIFEST_FILE_NAME)) .await
.map_err(errors::WorkspaceMembersError::ManifestMissing)?; .map_err(errors::WorkspaceMembersError::ManifestMissing)?;
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| { let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e)) errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e))
})?; })?;
Ok((path, manifest))
}) yield Ok((path, manifest));
.collect::<Result<_, _>>() }
})
} }
} }

View file

@ -3,9 +3,14 @@ use crate::{
lockfile::DownloadedGraph, lockfile::DownloadedGraph,
names::PackageNames, names::PackageNames,
scripts::{execute_script, ScriptName}, scripts::{execute_script, ScriptName},
source::{fs::store_in_cas, traits::PackageRef, version_id::VersionId}, source::{
fs::{cas_path, store_in_cas},
traits::PackageRef,
version_id::VersionId,
},
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME,
}; };
use fs_err::tokio as fs;
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
ffi::OsStr, ffi::OsStr,
@ -15,22 +20,25 @@ use std::{
/// Generates linking modules for a project /// Generates linking modules for a project
pub mod generator; pub mod generator;
fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> { async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
let p = path.as_ref(); let p = path.as_ref();
fs_err::create_dir_all(p)?; fs::create_dir_all(p).await?;
p.canonicalize() p.canonicalize()
} }
fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> { async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
let cas_path = store_in_cas(cas_dir, contents.as_bytes())?.1; let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
fs_err::hard_link(cas_path, destination) fs::hard_link(cas_path(&hash, cas_dir), destination).await
} }
impl Project { impl Project {
/// Links the dependencies of the project /// Links the dependencies of the project
pub fn link_dependencies(&self, graph: &DownloadedGraph) -> Result<(), errors::LinkingError> { pub async fn link_dependencies(
let manifest = self.deser_manifest()?; &self,
graph: &DownloadedGraph,
) -> Result<(), errors::LinkingError> {
let manifest = self.deser_manifest().await?;
let mut package_types = BTreeMap::<&PackageNames, BTreeMap<&VersionId, Vec<String>>>::new(); let mut package_types = BTreeMap::<&PackageNames, BTreeMap<&VersionId, Vec<String>>>::new();
@ -57,7 +65,7 @@ impl Project {
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND { let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
let lib_file = lib_file.to_path(&container_folder); let lib_file = lib_file.to_path(&container_folder);
let contents = match fs_err::read_to_string(&lib_file) { let contents = match fs::read_to_string(&lib_file).await {
Ok(contents) => contents, Ok(contents) => contents,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => { Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
return Err(errors::LinkingError::LibFileNotFound( return Err(errors::LinkingError::LibFileNotFound(
@ -128,7 +136,8 @@ impl Project {
.kind() .kind()
.packages_folder(&node.node.pkg_ref.target_kind()), .packages_folder(&node.node.pkg_ref.target_kind()),
), ),
)?; )
.await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME); let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = node.node.container_folder( let container_folder = node.node.container_folder(
@ -162,7 +171,8 @@ impl Project {
)?, )?,
types, types,
), ),
)?; )
.await?;
}; };
if let Some(bin_file) = node.target.bin_path() { if let Some(bin_file) = node.target.bin_path() {
@ -177,7 +187,8 @@ impl Project {
&container_folder, &container_folder,
), ),
), ),
)?; )
.await?;
} }
} }
@ -208,7 +219,8 @@ impl Project {
.target_kind() .target_kind()
.packages_folder(&dependency_node.node.pkg_ref.target_kind()), .packages_folder(&dependency_node.node.pkg_ref.target_kind()),
), ),
)?; )
.await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME); let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = dependency_node.node.container_folder( let container_folder = dependency_node.node.container_folder(
@ -220,7 +232,8 @@ impl Project {
let linker_folder = create_and_canonicalize( let linker_folder = create_and_canonicalize(
node_container_folder node_container_folder
.join(node.node.base_folder(dependency_node.target.kind())), .join(node.node.base_folder(dependency_node.target.kind())),
)?; )
.await?;
write_cas( write_cas(
linker_folder.join(format!("{dependency_alias}.luau")), linker_folder.join(format!("{dependency_alias}.luau")),
@ -241,7 +254,8 @@ impl Project {
.and_then(|v| v.get(dependency_version_id)) .and_then(|v| v.get(dependency_version_id))
.unwrap(), .unwrap(),
), ),
)?; )
.await?;
} }
} }
} }

View file

@ -2,17 +2,16 @@
use crate::cli::version::{ use crate::cli::version::{
check_for_updates, current_version, get_or_download_version, max_installed_version, check_for_updates, current_version, get_or_download_version, max_installed_version,
}; };
use crate::cli::{auth::get_tokens, home_dir, repos::update_repo_dependencies, HOME_DIR}; use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
use anyhow::Context; use anyhow::Context;
use clap::Parser; use clap::Parser;
use colored::Colorize; use fs_err::tokio as fs;
use indicatif::MultiProgress; use indicatif::MultiProgress;
use indicatif_log_bridge::LogWrapper; use indicatif_log_bridge::LogWrapper;
use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME}; use pesde::{AuthConfig, Project, MANIFEST_FILE_NAME};
use std::{ use std::{
collections::HashSet, collections::HashSet,
path::{Path, PathBuf}, path::{Path, PathBuf},
thread::spawn,
}; };
use tempfile::NamedTempFile; use tempfile::NamedTempFile;
@ -34,7 +33,7 @@ struct Cli {
subcommand: cli::commands::Subcommand, subcommand: cli::commands::Subcommand,
} }
fn get_linkable_dir(path: &Path) -> PathBuf { async fn get_linkable_dir(path: &Path) -> PathBuf {
let mut curr_path = PathBuf::new(); let mut curr_path = PathBuf::new();
let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file"); let file_to_try = NamedTempFile::new_in(path).expect("failed to create temporary file");
let temp_file_name = file_to_try.path().file_name().unwrap(); let temp_file_name = file_to_try.path().file_name().unwrap();
@ -44,8 +43,8 @@ fn get_linkable_dir(path: &Path) -> PathBuf {
let try_path = curr_path.join(temp_file_name); let try_path = curr_path.join(temp_file_name);
if fs_err::hard_link(file_to_try.path(), &try_path).is_ok() { if fs::hard_link(file_to_try.path(), &try_path).await.is_ok() {
if let Err(err) = fs_err::remove_file(&try_path) { if let Err(err) = fs::remove_file(&try_path).await {
log::warn!( log::warn!(
"failed to remove temporary file at {}: {err}", "failed to remove temporary file at {}: {err}",
try_path.display() try_path.display()
@ -62,7 +61,7 @@ fn get_linkable_dir(path: &Path) -> PathBuf {
); );
} }
fn run() -> anyhow::Result<()> { async fn run() -> anyhow::Result<()> {
let cwd = std::env::current_dir().expect("failed to get current working directory"); let cwd = std::env::current_dir().expect("failed to get current working directory");
#[cfg(windows)] #[cfg(windows)]
@ -107,8 +106,9 @@ fn run() -> anyhow::Result<()> {
let mut project_root = None::<PathBuf>; let mut project_root = None::<PathBuf>;
let mut workspace_dir = None::<PathBuf>; let mut workspace_dir = None::<PathBuf>;
fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> { async fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
let manifest = fs_err::read_to_string(path.join(MANIFEST_FILE_NAME)) let manifest = fs::read_to_string(path.join(MANIFEST_FILE_NAME))
.await
.context("failed to read manifest")?; .context("failed to read manifest")?;
let manifest: pesde::manifest::Manifest = let manifest: pesde::manifest::Manifest =
toml::from_str(&manifest).context("failed to parse manifest")?; toml::from_str(&manifest).context("failed to parse manifest")?;
@ -143,13 +143,13 @@ fn run() -> anyhow::Result<()> {
} }
(Some(project_root), None) => { (Some(project_root), None) => {
if get_workspace_members(&path)?.contains(project_root) { if get_workspace_members(&path).await?.contains(project_root) {
workspace_dir = Some(path); workspace_dir = Some(path);
} }
} }
(None, None) => { (None, None) => {
if get_workspace_members(&path)?.contains(&cwd) { if get_workspace_members(&path).await?.contains(&cwd) {
// initializing a new member of a workspace // initializing a new member of a workspace
break 'finder (cwd, Some(path)); break 'finder (cwd, Some(path));
} else { } else {
@ -179,9 +179,11 @@ fn run() -> anyhow::Result<()> {
let home_dir = home_dir()?; let home_dir = home_dir()?;
let data_dir = home_dir.join("data"); let data_dir = home_dir.join("data");
fs_err::create_dir_all(&data_dir).expect("failed to create data directory"); fs::create_dir_all(&data_dir)
.await
.expect("failed to create data directory");
let cas_dir = get_linkable_dir(&project_root_dir).join(HOME_DIR); let cas_dir = get_linkable_dir(&project_root_dir).await.join(HOME_DIR);
let cas_dir = if cas_dir == home_dir { let cas_dir = if cas_dir == home_dir {
&data_dir &data_dir
@ -197,7 +199,7 @@ fn run() -> anyhow::Result<()> {
project_workspace_dir, project_workspace_dir,
data_dir, data_dir,
cas_dir, cas_dir,
AuthConfig::new().with_tokens(get_tokens()?.0), AuthConfig::new().with_tokens(get_tokens().await?.0),
); );
let reqwest = { let reqwest = {
@ -210,7 +212,7 @@ fn run() -> anyhow::Result<()> {
.context("failed to create accept header")?, .context("failed to create accept header")?,
); );
reqwest::blocking::Client::builder() reqwest::Client::builder()
.user_agent(concat!( .user_agent(concat!(
env!("CARGO_PKG_NAME"), env!("CARGO_PKG_NAME"),
"/", "/",
@ -224,21 +226,22 @@ fn run() -> anyhow::Result<()> {
{ {
let target_version = project let target_version = project
.deser_manifest() .deser_manifest()
.await
.ok() .ok()
.and_then(|manifest| manifest.pesde_version); .and_then(|manifest| manifest.pesde_version);
// store the current version in case it needs to be used later // store the current version in case it needs to be used later
get_or_download_version(&reqwest, &current_version())?; get_or_download_version(&reqwest, &current_version()).await?;
let exe_path = if let Some(version) = target_version { let exe_path = if let Some(version) = target_version {
Some(get_or_download_version(&reqwest, &version)?) Some(get_or_download_version(&reqwest, &version).await?)
} else { } else {
None None
}; };
let exe_path = if let Some(exe_path) = exe_path { let exe_path = if let Some(exe_path) = exe_path {
exe_path exe_path
} else { } else {
get_or_download_version(&reqwest, &max_installed_version()?)? get_or_download_version(&reqwest, &max_installed_version().await?).await?
}; };
if let Some(exe_path) = exe_path { if let Some(exe_path) = exe_path {
@ -250,62 +253,26 @@ fn run() -> anyhow::Result<()> {
std::process::exit(status.code().unwrap()); std::process::exit(status.code().unwrap());
} }
display_err(check_for_updates(&reqwest), " while checking for updates");
}
let project_2 = project.clone();
let update_task = spawn(move || {
display_err( display_err(
update_repo_dependencies(&project_2), check_for_updates(&reqwest).await,
" while updating repository dependencies", " while checking for updates",
); );
}); }
let cli = match Cli::try_parse() { let cli = match Cli::try_parse() {
Ok(cli) => cli, Ok(cli) => cli,
Err(err) => { Err(err) => {
let _ = err.print(); let _ = err.print();
update_task.join().expect("failed to join update task");
std::process::exit(err.exit_code()); std::process::exit(err.exit_code());
} }
}; };
cli.subcommand.run(project, multi, reqwest, update_task) cli.subcommand.run(project, multi, reqwest).await
} }
fn display_err(result: anyhow::Result<()>, prefix: &str) { #[tokio::main]
if let Err(err) = result { async fn main() {
eprintln!("{}: {err}\n", format!("error{prefix}").red().bold()); let result = run().await;
let cause = err.chain().skip(1).collect::<Vec<_>>();
if !cause.is_empty() {
eprintln!("{}:", "caused by".red().bold());
for err in cause {
eprintln!(" - {err}");
}
}
let backtrace = err.backtrace();
match backtrace.status() {
std::backtrace::BacktraceStatus::Disabled => {
eprintln!(
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
"help".yellow().bold()
);
}
std::backtrace::BacktraceStatus::Captured => {
eprintln!("\n{}:\n{backtrace}", "backtrace".yellow().bold());
}
_ => {
eprintln!("\n{}: not captured", "backtrace".yellow().bold());
}
}
}
}
fn main() {
let result = run();
let is_err = result.is_err(); let is_err = result.is_err();
display_err(result, ""); display_err(result, "");
if is_err { if is_err {

View file

@ -2,8 +2,8 @@ use crate::{
lockfile::DownloadedGraph, source::traits::PackageRef, Project, MANIFEST_FILE_NAME, lockfile::DownloadedGraph, source::traits::PackageRef, Project, MANIFEST_FILE_NAME,
PACKAGES_CONTAINER_NAME, PACKAGES_CONTAINER_NAME,
}; };
use fs_err::read; use fs_err::tokio as fs;
use git2::{ApplyLocation, ApplyOptions, Diff, DiffFormat, DiffLineType, Repository, Signature}; use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use std::path::Path; use std::path::Path;
@ -72,14 +72,19 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
impl Project { impl Project {
/// Apply patches to the project's dependencies /// Apply patches to the project's dependencies
pub fn apply_patches(&self, graph: &DownloadedGraph) -> Result<(), errors::ApplyPatchesError> { pub async fn apply_patches(
let manifest = self.deser_manifest()?; &self,
graph: &DownloadedGraph,
) -> Result<(), errors::ApplyPatchesError> {
let manifest = self.deser_manifest().await?;
for (name, versions) in manifest.patches { for (name, versions) in manifest.patches {
for (version_id, patch_path) in versions { for (version_id, patch_path) in versions {
let patch_path = patch_path.to_path(self.package_dir()); let patch_path = patch_path.to_path(self.package_dir());
let patch = Diff::from_buffer( let patch = Diff::from_buffer(
&read(&patch_path).map_err(errors::ApplyPatchesError::PatchRead)?, &fs::read(&patch_path)
.await
.map_err(errors::ApplyPatchesError::PatchRead)?,
)?; )?;
let Some(node) = graph let Some(node) = graph
@ -110,42 +115,37 @@ impl Project {
{ {
let repo = setup_patches_repo(&container_folder)?; let repo = setup_patches_repo(&container_folder)?;
let mut apply_opts = ApplyOptions::new(); for delta in patch.deltas() {
apply_opts.delta_callback(|delta| {
let Some(delta) = delta else {
return true;
};
if !matches!(delta.status(), git2::Delta::Modified) { if !matches!(delta.status(), git2::Delta::Modified) {
return true; continue;
} }
let file = delta.new_file(); let file = delta.new_file();
let Some(relative_path) = file.path() else { let Some(relative_path) = file.path() else {
return true; continue;
}; };
let relative_path = RelativePathBuf::from_path(relative_path).unwrap(); let relative_path = RelativePathBuf::from_path(relative_path).unwrap();
let path = relative_path.to_path(&container_folder); let path = relative_path.to_path(&container_folder);
if !path.is_file() { if !path.is_file() {
return true; continue;
} }
// there is no way (as far as I know) to check if it's hardlinked // there is no way (as far as I know) to check if it's hardlinked
// so, we always unlink it // so, we always unlink it
let content = read(&path).unwrap(); let content = fs::read(&path).await.unwrap();
fs_err::remove_file(&path).unwrap(); fs::remove_file(&path).await.unwrap();
fs_err::write(path, content).unwrap(); fs::write(path, content).await.unwrap();
}
true repo.apply(&patch, ApplyLocation::Both, None)?;
});
repo.apply(&patch, ApplyLocation::Both, Some(&mut apply_opts))?;
} }
log::debug!("patch applied to {name}@{version_id}, removing .git directory"); log::debug!("patch applied to {name}@{version_id}, removing .git directory");
fs_err::remove_dir_all(container_folder.join(".git")) fs::remove_dir_all(container_folder.join(".git"))
.await
.map_err(errors::ApplyPatchesError::DotGitRemove)?; .map_err(errors::ApplyPatchesError::DotGitRemove)?;
} }
} }

View file

@ -15,12 +15,15 @@ use std::collections::{HashMap, HashSet, VecDeque};
impl Project { impl Project {
/// Create a dependency graph from the project's manifest /// Create a dependency graph from the project's manifest
pub fn dependency_graph( pub async fn dependency_graph(
&self, &self,
previous_graph: Option<&DependencyGraph>, previous_graph: Option<&DependencyGraph>,
refreshed_sources: &mut HashSet<PackageSources>, refreshed_sources: &mut HashSet<PackageSources>,
) -> Result<DependencyGraph, Box<errors::DependencyGraphError>> { ) -> Result<DependencyGraph, Box<errors::DependencyGraphError>> {
let manifest = self.deser_manifest().map_err(|e| Box::new(e.into()))?; let manifest = self
.deser_manifest()
.await
.map_err(|e| Box::new(e.into()))?;
let mut all_specifiers = manifest let mut all_specifiers = manifest
.all_dependencies() .all_dependencies()
@ -190,11 +193,12 @@ impl Project {
}; };
if refreshed_sources.insert(source.clone()) { if refreshed_sources.insert(source.clone()) {
source.refresh(self).map_err(|e| Box::new(e.into()))?; source.refresh(self).await.map_err(|e| Box::new(e.into()))?;
} }
let (name, resolved) = source let (name, resolved) = source
.resolve(&specifier, self, target) .resolve(&specifier, self, target)
.await
.map_err(|e| Box::new(e.into()))?; .map_err(|e| Box::new(e.into()))?;
let Some(target_version_id) = graph let Some(target_version_id) = graph

View file

@ -1,17 +1,21 @@
use std::{
collections::BTreeMap,
io::{BufWriter, Read, Write},
path::{Path, PathBuf},
};
use crate::{ use crate::{
manifest::target::TargetKind, manifest::target::TargetKind,
source::{IGNORED_DIRS, IGNORED_FILES}, source::{IGNORED_DIRS, IGNORED_FILES},
util::hash,
}; };
use fs_err::tokio as fs;
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::{
collections::{BTreeMap, VecDeque},
future::Future,
path::{Path, PathBuf},
};
use tempfile::Builder;
use tokio::{
io::{AsyncReadExt, AsyncWriteExt, BufWriter},
pin,
};
/// A file system entry /// A file system entry
#[derive(Debug, Clone, Serialize, Deserialize)] #[derive(Debug, Clone, Serialize, Deserialize)]
@ -35,7 +39,7 @@ pub enum PackageFS {
Copy(PathBuf, TargetKind), Copy(PathBuf, TargetKind),
} }
fn make_readonly(_file: &fs_err::File) -> std::io::Result<()> { fn make_readonly(_file: &fs::File) -> std::io::Result<()> {
// on Windows, file deletion is disallowed if the file is read-only which breaks patching // on Windows, file deletion is disallowed if the file is read-only which breaks patching
#[cfg(not(windows))] #[cfg(not(windows))]
{ {
@ -48,58 +52,54 @@ fn make_readonly(_file: &fs_err::File) -> std::io::Result<()> {
Ok(()) Ok(())
} }
pub(crate) fn store_in_cas<P: AsRef<Path>>( pub(crate) fn cas_path(hash: &str, cas_dir: &Path) -> PathBuf {
cas_dir: P,
contents: &[u8],
) -> std::io::Result<(String, PathBuf)> {
let hash = hash(contents);
let (prefix, rest) = hash.split_at(2); let (prefix, rest) = hash.split_at(2);
cas_dir.join(prefix).join(rest)
let folder = cas_dir.as_ref().join(prefix);
fs_err::create_dir_all(&folder)?;
let cas_path = folder.join(rest);
if !cas_path.exists() {
let mut file = fs_err::File::create(&cas_path)?;
file.write_all(contents)?;
make_readonly(&file)?;
}
Ok((hash, cas_path))
} }
pub(crate) fn store_reader_in_cas<P: AsRef<Path>>( pub(crate) async fn store_in_cas<
R: tokio::io::AsyncRead + Unpin,
P: AsRef<Path>,
C: FnMut(Vec<u8>) -> F,
F: Future<Output = std::io::Result<()>>,
>(
cas_dir: P, cas_dir: P,
contents: &mut dyn Read, mut contents: R,
mut bytes_cb: C,
) -> std::io::Result<String> { ) -> std::io::Result<String> {
let tmp_dir = cas_dir.as_ref().join(".tmp"); let tmp_dir = cas_dir.as_ref().join(".tmp");
fs_err::create_dir_all(&tmp_dir)?; fs::create_dir_all(&tmp_dir).await?;
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
let mut buf = [0; 8 * 1024]; let mut buf = [0; 8 * 1024];
let mut file_writer = BufWriter::new(tempfile::NamedTempFile::new_in(&tmp_dir)?);
let temp_path = Builder::new()
.make_in(&tmp_dir, |_| Ok(()))?
.into_temp_path();
let mut file_writer = BufWriter::new(fs::File::create(temp_path.to_path_buf()).await?);
loop { loop {
let bytes_read = contents.read(&mut buf)?; let bytes_future = contents.read(&mut buf);
pin!(bytes_future);
let bytes_read = bytes_future.await?;
if bytes_read == 0 { if bytes_read == 0 {
break; break;
} }
let bytes = &buf[..bytes_read]; let bytes = &buf[..bytes_read];
hasher.update(bytes); hasher.update(bytes);
file_writer.write_all(bytes)?; bytes_cb(bytes.to_vec()).await?;
file_writer.write_all(bytes).await?;
} }
let hash = format!("{:x}", hasher.finalize()); let hash = format!("{:x}", hasher.finalize());
let (prefix, rest) = hash.split_at(2);
let folder = cas_dir.as_ref().join(prefix); let cas_path = cas_path(&hash, cas_dir.as_ref());
fs_err::create_dir_all(&folder)?; fs::create_dir_all(cas_path.parent().unwrap()).await?;
let cas_path = folder.join(rest); match temp_path.persist_noclobber(&cas_path) {
match file_writer.into_inner()?.persist_noclobber(&cas_path) {
Ok(_) => { Ok(_) => {
make_readonly(&fs_err::File::open(cas_path)?)?; make_readonly(&file_writer.into_inner())?;
} }
Err(e) if e.error.kind() == std::io::ErrorKind::AlreadyExists => {} Err(e) if e.error.kind() == std::io::ErrorKind::AlreadyExists => {}
Err(e) => return Err(e.error), Err(e) => return Err(e.error),
@ -108,43 +108,9 @@ pub(crate) fn store_reader_in_cas<P: AsRef<Path>>(
Ok(hash) Ok(hash)
} }
fn copy_dir_all(
src: impl AsRef<Path>,
dst: impl AsRef<Path>,
target: TargetKind,
) -> std::io::Result<()> {
fs_err::create_dir_all(&dst)?;
'outer: for entry in fs_err::read_dir(src.as_ref().to_path_buf())? {
let entry = entry?;
let ty = entry.file_type()?;
let file_name = entry.file_name().to_string_lossy().to_string();
if ty.is_dir() {
if IGNORED_DIRS.contains(&file_name.as_ref()) {
continue;
}
for other_target in TargetKind::VARIANTS {
if target.packages_folder(other_target) == file_name {
continue 'outer;
}
}
copy_dir_all(entry.path(), dst.as_ref().join(&file_name), target)?;
} else {
if IGNORED_FILES.contains(&file_name.as_ref()) {
continue;
}
fs_err::copy(entry.path(), dst.as_ref().join(file_name))?;
}
}
Ok(())
}
impl PackageFS { impl PackageFS {
/// Write the package to the given destination /// Write the package to the given destination
pub fn write_to<P: AsRef<Path>, Q: AsRef<Path>>( pub async fn write_to<P: AsRef<Path>, Q: AsRef<Path>>(
&self, &self,
destination: P, destination: P,
cas_path: Q, cas_path: Q,
@ -158,17 +124,17 @@ impl PackageFS {
match entry { match entry {
FSEntry::File(hash) => { FSEntry::File(hash) => {
if let Some(parent) = path.parent() { if let Some(parent) = path.parent() {
fs_err::create_dir_all(parent)?; fs::create_dir_all(parent).await?;
} }
let (prefix, rest) = hash.split_at(2); let (prefix, rest) = hash.split_at(2);
let cas_file_path = cas_path.as_ref().join(prefix).join(rest); let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
if link { if link {
fs_err::hard_link(cas_file_path, path)?; fs::hard_link(cas_file_path, path).await?;
} else { } else {
let mut f = fs_err::File::create(&path)?; let mut f = fs::File::create(&path).await?;
f.write_all(&fs_err::read(cas_file_path)?)?; f.write_all(&fs::read(cas_file_path).await?).await?;
#[cfg(unix)] #[cfg(unix)]
{ {
@ -180,13 +146,45 @@ impl PackageFS {
} }
} }
FSEntry::Directory => { FSEntry::Directory => {
fs_err::create_dir_all(path)?; fs::create_dir_all(path).await?;
} }
} }
} }
} }
PackageFS::Copy(src, target) => { PackageFS::Copy(src, target) => {
copy_dir_all(src, destination, *target)?; fs::create_dir_all(destination.as_ref()).await?;
let mut read_dirs = VecDeque::from([fs::read_dir(src.to_path_buf())]);
while let Some(read_dir) = read_dirs.pop_front() {
let mut read_dir = read_dir.await?;
while let Some(entry) = read_dir.next_entry().await? {
let relative_path =
RelativePathBuf::from_path(entry.path().strip_prefix(src).unwrap())
.unwrap();
let file_name = relative_path.file_name().unwrap();
if entry.file_type().await?.is_dir() {
if IGNORED_DIRS.contains(&file_name) {
continue;
}
for other_target in TargetKind::VARIANTS {
if target.packages_folder(other_target) == file_name {
continue;
}
}
read_dirs.push_back(fs::read_dir(entry.path()));
continue;
}
if IGNORED_FILES.contains(&file_name) {
continue;
}
fs::copy(entry.path(), relative_path.to_path(destination.as_ref())).await?;
}
}
} }
} }
@ -194,7 +192,7 @@ impl PackageFS {
} }
/// Returns the contents of the file with the given hash /// Returns the contents of the file with the given hash
pub fn read_file<P: AsRef<Path>, H: AsRef<str>>( pub async fn read_file<P: AsRef<Path>, H: AsRef<str>>(
&self, &self,
file_hash: H, file_hash: H,
cas_path: P, cas_path: P,
@ -205,6 +203,6 @@ impl PackageFS {
let (prefix, rest) = file_hash.as_ref().split_at(2); let (prefix, rest) = file_hash.as_ref().split_at(2);
let cas_file_path = cas_path.as_ref().join(prefix).join(rest); let cas_file_path = cas_path.as_ref().join(prefix).join(rest);
fs_err::read_to_string(cas_file_path).ok() fs::read_to_string(cas_file_path).await.ok()
} }
} }

View file

@ -10,7 +10,7 @@ use crate::{
}, },
names::PackageNames, names::PackageNames,
source::{ source::{
fs::{store_in_cas, FSEntry, PackageFS}, fs::{FSEntry, PackageFS},
git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier}, git::{pkg_ref::GitPackageRef, specifier::GitDependencySpecifier},
git_index::GitBasedSource, git_index::GitBasedSource,
specifiers::DependencySpecifiers, specifiers::DependencySpecifiers,
@ -20,6 +20,7 @@ use crate::{
util::hash, util::hash,
Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME, Project, DEFAULT_INDEX_NAME, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
}; };
use fs_err::tokio as fs;
/// The Git package reference /// The Git package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -63,11 +64,11 @@ impl PackageSource for GitPackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project) GitBasedSource::refresh(self, project).await
} }
fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
project: &Project, project: &Project,
@ -324,11 +325,11 @@ impl PackageSource for GitPackageSource {
)) ))
} }
fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
project: &Project, project: &Project,
_reqwest: &reqwest::blocking::Client, _reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError> { ) -> Result<(PackageFS, Target), Self::DownloadError> {
let index_file = project let index_file = project
.cas_dir .cas_dir
@ -336,7 +337,7 @@ impl PackageSource for GitPackageSource {
.join(hash(self.as_bytes())) .join(hash(self.as_bytes()))
.join(&pkg_ref.tree_id); .join(&pkg_ref.tree_id);
match fs_err::read_to_string(&index_file) { match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
log::debug!( log::debug!(
"using cached index file for package {}#{} {}", "using cached index file for package {}#{} {}",
@ -354,6 +355,7 @@ impl PackageSource for GitPackageSource {
match entries.get(&RelativePathBuf::from(MANIFEST_FILE_NAME)) { match entries.get(&RelativePathBuf::from(MANIFEST_FILE_NAME)) {
Some(FSEntry::File(hash)) => match fs Some(FSEntry::File(hash)) => match fs
.read_file(hash, project.cas_dir()) .read_file(hash, project.cas_dir())
.await
.map(|m| toml::de::from_str::<Manifest>(&m)) .map(|m| toml::de::from_str::<Manifest>(&m))
{ {
Some(Ok(m)) => Some(m), Some(Ok(m)) => Some(m),
@ -376,9 +378,10 @@ impl PackageSource for GitPackageSource {
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
None if !pkg_ref.new_structure => { None if !pkg_ref.new_structure => {
let tempdir = tempfile::tempdir()?; let tempdir = tempfile::tempdir()?;
fs.write_to(tempdir.path(), project.cas_dir(), false)?; fs.write_to(tempdir.path(), project.cas_dir(), false)
.await?;
crate::source::wally::compat_util::get_target(project, &tempdir)? crate::source::wally::compat_util::get_target(project, &tempdir).await?
} }
None => { None => {
return Err(errors::DownloadError::NoManifest(Box::new( return Err(errors::DownloadError::NoManifest(Box::new(
@ -393,47 +396,50 @@ impl PackageSource for GitPackageSource {
Err(e) => return Err(errors::DownloadError::Io(e)), Err(e) => return Err(errors::DownloadError::Io(e)),
} }
let repo = gix::open(self.path(project))
.map_err(|e| errors::DownloadError::OpenRepo(Box::new(self.repo_url.clone()), e))?;
let rev = repo
.rev_parse_single(BStr::new(&pkg_ref.tree_id))
.map_err(|e| {
errors::DownloadError::ParseRev(
pkg_ref.tree_id.clone(),
Box::new(self.repo_url.clone()),
e,
)
})?;
let tree = rev
.object()
.map_err(|e| {
errors::DownloadError::ParseEntryToObject(Box::new(self.repo_url.clone()), e)
})?
.peel_to_tree()
.map_err(|e| {
errors::DownloadError::ParseObjectToTree(Box::new(self.repo_url.clone()), e)
})?;
let mut recorder = Recorder::default();
tree.traverse()
.breadthfirst(&mut recorder)
.map_err(|e| errors::DownloadError::TraverseTree(Box::new(self.repo_url.clone()), e))?;
let mut entries = BTreeMap::new(); let mut entries = BTreeMap::new();
let mut manifest = None; let mut manifest = None;
let repo = gix::open(self.path(project))
.map_err(|e| errors::DownloadError::OpenRepo(Box::new(self.repo_url.clone()), e))?;
let recorder = {
let rev = repo
.rev_parse_single(BStr::new(&pkg_ref.tree_id))
.map_err(|e| {
errors::DownloadError::ParseRev(
pkg_ref.tree_id.clone(),
Box::new(self.repo_url.clone()),
e,
)
})?;
let tree = rev
.object()
.map_err(|e| {
errors::DownloadError::ParseEntryToObject(Box::new(self.repo_url.clone()), e)
})?
.peel_to_tree()
.map_err(|e| {
errors::DownloadError::ParseObjectToTree(Box::new(self.repo_url.clone()), e)
})?;
let mut recorder = Recorder::default();
tree.traverse().breadthfirst(&mut recorder).map_err(|e| {
errors::DownloadError::TraverseTree(Box::new(self.repo_url.clone()), e)
})?;
recorder
};
for entry in recorder.records { for entry in recorder.records {
let path = RelativePathBuf::from(entry.filepath.to_string()); let path = RelativePathBuf::from(entry.filepath.to_string());
let name = path.file_name().unwrap_or("");
let object = repo.find_object(entry.oid).map_err(|e| { let object = repo.find_object(entry.oid).map_err(|e| {
errors::DownloadError::ParseEntryToObject(Box::new(self.repo_url.clone()), e) errors::DownloadError::ParseEntryToObject(Box::new(self.repo_url.clone()), e)
})?; })?;
if matches!(object.kind, gix::object::Kind::Tree) { if matches!(object.kind, gix::object::Kind::Tree) {
if path if IGNORED_DIRS.contains(&name) {
.components()
.next()
.is_some_and(|ct| IGNORED_DIRS.contains(&ct.as_str()))
{
continue; continue;
} }
@ -442,13 +448,13 @@ impl PackageSource for GitPackageSource {
continue; continue;
} }
if IGNORED_FILES.contains(&path.as_str()) { if IGNORED_FILES.contains(&name) {
continue; continue;
} }
if pkg_ref.use_new_structure() && path == "default.project.json" { if pkg_ref.use_new_structure() && name == "default.project.json" {
log::debug!( log::debug!(
"removing default.project.json from {}#{} - using new structure", "removing default.project.json from {}#{} at {path} - using new structure",
pkg_ref.repo, pkg_ref.repo,
pkg_ref.tree_id pkg_ref.tree_id
); );
@ -456,13 +462,15 @@ impl PackageSource for GitPackageSource {
} }
let data = object.into_blob().data.clone(); let data = object.into_blob().data.clone();
let hash = store_in_cas(project.cas_dir(), &data)?.0; // let hash =
// store_reader_in_cas(project.cas_dir(), data.as_slice(), |_| async { Ok(()) })
// .await?;
if path == MANIFEST_FILE_NAME { if path == MANIFEST_FILE_NAME {
manifest = Some(data); manifest = Some(data);
} }
entries.insert(path, FSEntry::File(hash)); // entries.insert(path, FSEntry::File(hash));
} }
let manifest = match manifest { let manifest = match manifest {
@ -488,9 +496,10 @@ impl PackageSource for GitPackageSource {
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
None if !pkg_ref.new_structure => { None if !pkg_ref.new_structure => {
let tempdir = tempfile::tempdir()?; let tempdir = tempfile::tempdir()?;
fs.write_to(tempdir.path(), project.cas_dir(), false)?; fs.write_to(tempdir.path(), project.cas_dir(), false)
.await?;
crate::source::wally::compat_util::get_target(project, &tempdir)? crate::source::wally::compat_util::get_target(project, &tempdir).await?
} }
None => { None => {
return Err(errors::DownloadError::NoManifest(Box::new( return Err(errors::DownloadError::NoManifest(Box::new(
@ -500,15 +509,16 @@ impl PackageSource for GitPackageSource {
}; };
if let Some(parent) = index_file.parent() { if let Some(parent) = index_file.parent() {
fs_err::create_dir_all(parent)?; fs::create_dir_all(parent).await?;
} }
fs_err::write( fs::write(
&index_file, &index_file,
toml::to_string(&fs).map_err(|e| { toml::to_string(&fs).map_err(|e| {
errors::DownloadError::SerializeIndex(Box::new(self.repo_url.clone()), e) errors::DownloadError::SerializeIndex(Box::new(self.repo_url.clone()), e)
})?, })?,
) )
.await
.map_err(errors::DownloadError::Io)?; .map_err(errors::DownloadError::Io)?;
Ok((fs, target)) Ok((fs, target))

View file

@ -1,6 +1,8 @@
use gix::remote::Direction; #![allow(async_fn_in_trait)]
use crate::{util::authenticate_conn, Project}; use crate::{util::authenticate_conn, Project};
use fs_err::tokio as fs;
use gix::remote::Direction;
use tokio::task::spawn_blocking;
/// A trait for sources that are based on Git repositories /// A trait for sources that are based on Git repositories
pub trait GitBasedSource { pub trait GitBasedSource {
@ -104,56 +106,79 @@ pub trait GitBasedSource {
} }
/// Refreshes the repository /// Refreshes the repository
fn refresh(&self, project: &Project) -> Result<(), errors::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), errors::RefreshError> {
let path = self.path(project); let path = self.path(project);
let repo_url = self.repo_url().clone();
let auth_config = project.auth_config.clone();
if path.exists() { if path.exists() {
let repo = match gix::open(&path) { spawn_blocking(move || {
Ok(repo) => repo, let repo = match gix::open(&path) {
Err(e) => return Err(errors::RefreshError::Open(path, Box::new(e))), Ok(repo) => repo,
}; Err(e) => return Err(errors::RefreshError::Open(path, Box::new(e))),
let remote = match repo.find_default_remote(Direction::Fetch) { };
Some(Ok(remote)) => remote, let remote = match repo.find_default_remote(Direction::Fetch) {
Some(Err(e)) => { Some(Ok(remote)) => remote,
return Err(errors::RefreshError::GetDefaultRemote(path, Box::new(e))) Some(Err(e)) => {
return Err(errors::RefreshError::GetDefaultRemote(path, Box::new(e)))
}
None => {
return Err(errors::RefreshError::NoDefaultRemote(path));
}
};
let mut connection = match remote.connect(Direction::Fetch) {
Ok(connection) => connection,
Err(e) => {
return Err(errors::RefreshError::Connect(
repo_url.to_string(),
Box::new(e),
))
}
};
authenticate_conn(&mut connection, &auth_config);
let fetch =
match connection.prepare_fetch(gix::progress::Discard, Default::default()) {
Ok(fetch) => fetch,
Err(e) => {
return Err(errors::RefreshError::PrepareFetch(
repo_url.to_string(),
Box::new(e),
))
}
};
match fetch.receive(gix::progress::Discard, &false.into()) {
Ok(_) => Ok(()),
Err(e) => Err(errors::RefreshError::Read(
repo_url.to_string(),
Box::new(e),
)),
} }
None => { })
return Err(errors::RefreshError::NoDefaultRemote(path)); .await
} .unwrap()?;
};
let mut connection = remote.connect(Direction::Fetch).map_err(|e| {
errors::RefreshError::Connect(self.repo_url().to_string(), Box::new(e))
})?;
authenticate_conn(&mut connection, &project.auth_config);
connection
.prepare_fetch(gix::progress::Discard, Default::default())
.map_err(|e| {
errors::RefreshError::PrepareFetch(self.repo_url().to_string(), Box::new(e))
})?
.receive(gix::progress::Discard, &false.into())
.map_err(|e| {
errors::RefreshError::Read(self.repo_url().to_string(), Box::new(e))
})?;
return Ok(()); return Ok(());
} }
fs_err::create_dir_all(&path)?; fs::create_dir_all(&path).await?;
let auth_config = project.auth_config.clone(); spawn_blocking(move || {
gix::prepare_clone_bare(repo_url.clone(), &path)
gix::prepare_clone_bare(self.repo_url().clone(), &path) .map_err(|e| errors::RefreshError::Clone(repo_url.to_string(), Box::new(e)))?
.map_err(|e| errors::RefreshError::Clone(self.repo_url().to_string(), Box::new(e)))? .configure_connection(move |c| {
.configure_connection(move |c| { authenticate_conn(c, &auth_config);
authenticate_conn(c, &auth_config); Ok(())
Ok(()) })
}) .fetch_only(gix::progress::Discard, &false.into())
.fetch_only(gix::progress::Discard, &false.into()) .map_err(|e| errors::RefreshError::Fetch(repo_url.to_string(), Box::new(e)))
.map_err(|e| errors::RefreshError::Fetch(self.repo_url().to_string(), Box::new(e)))?; })
.await
Ok(()) .unwrap()
.map(|_| ())
} }
} }

View file

@ -62,17 +62,17 @@ impl PackageSource for PackageSources {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
match self { match self {
PackageSources::Pesde(source) => source.refresh(project).map_err(Into::into), PackageSources::Pesde(source) => source.refresh(project).await.map_err(Into::into),
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
PackageSources::Wally(source) => source.refresh(project).map_err(Into::into), PackageSources::Wally(source) => source.refresh(project).await.map_err(Into::into),
PackageSources::Git(source) => source.refresh(project).map_err(Into::into), PackageSources::Git(source) => source.refresh(project).await.map_err(Into::into),
PackageSources::Workspace(source) => source.refresh(project).map_err(Into::into), PackageSources::Workspace(source) => source.refresh(project).await.map_err(Into::into),
} }
} }
fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
project: &Project, project: &Project,
@ -81,6 +81,7 @@ impl PackageSource for PackageSources {
match (self, specifier) { match (self, specifier) {
(PackageSources::Pesde(source), DependencySpecifiers::Pesde(specifier)) => source (PackageSources::Pesde(source), DependencySpecifiers::Pesde(specifier)) => source
.resolve(specifier, project, package_target) .resolve(specifier, project, package_target)
.await
.map(|(name, results)| { .map(|(name, results)| {
( (
name, name,
@ -95,6 +96,7 @@ impl PackageSource for PackageSources {
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
(PackageSources::Wally(source), DependencySpecifiers::Wally(specifier)) => source (PackageSources::Wally(source), DependencySpecifiers::Wally(specifier)) => source
.resolve(specifier, project, package_target) .resolve(specifier, project, package_target)
.await
.map(|(name, results)| { .map(|(name, results)| {
( (
name, name,
@ -108,6 +110,7 @@ impl PackageSource for PackageSources {
(PackageSources::Git(source), DependencySpecifiers::Git(specifier)) => source (PackageSources::Git(source), DependencySpecifiers::Git(specifier)) => source
.resolve(specifier, project, package_target) .resolve(specifier, project, package_target)
.await
.map(|(name, results)| { .map(|(name, results)| {
( (
name, name,
@ -122,6 +125,7 @@ impl PackageSource for PackageSources {
(PackageSources::Workspace(source), DependencySpecifiers::Workspace(specifier)) => { (PackageSources::Workspace(source), DependencySpecifiers::Workspace(specifier)) => {
source source
.resolve(specifier, project, package_target) .resolve(specifier, project, package_target)
.await
.map(|(name, results)| { .map(|(name, results)| {
( (
name, name,
@ -140,28 +144,32 @@ impl PackageSource for PackageSources {
} }
} }
fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
project: &Project, project: &Project,
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError> { ) -> Result<(PackageFS, Target), Self::DownloadError> {
match (self, pkg_ref) { match (self, pkg_ref) {
(PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => source (PackageSources::Pesde(source), PackageRefs::Pesde(pkg_ref)) => source
.download(pkg_ref, project, reqwest) .download(pkg_ref, project, reqwest)
.await
.map_err(Into::into), .map_err(Into::into),
#[cfg(feature = "wally-compat")] #[cfg(feature = "wally-compat")]
(PackageSources::Wally(source), PackageRefs::Wally(pkg_ref)) => source (PackageSources::Wally(source), PackageRefs::Wally(pkg_ref)) => source
.download(pkg_ref, project, reqwest) .download(pkg_ref, project, reqwest)
.await
.map_err(Into::into), .map_err(Into::into),
(PackageSources::Git(source), PackageRefs::Git(pkg_ref)) => source (PackageSources::Git(source), PackageRefs::Git(pkg_ref)) => source
.download(pkg_ref, project, reqwest) .download(pkg_ref, project, reqwest)
.await
.map_err(Into::into), .map_err(Into::into),
(PackageSources::Workspace(source), PackageRefs::Workspace(pkg_ref)) => source (PackageSources::Workspace(source), PackageRefs::Workspace(pkg_ref)) => source
.download(pkg_ref, project, reqwest) .download(pkg_ref, project, reqwest)
.await
.map_err(Into::into), .map_err(Into::into),
_ => Err(errors::DownloadError::Mismatch), _ => Err(errors::DownloadError::Mismatch),

View file

@ -20,13 +20,17 @@ use crate::{
}, },
names::{PackageName, PackageNames}, names::{PackageName, PackageNames},
source::{ source::{
fs::{store_reader_in_cas, FSEntry, PackageFS}, fs::{store_in_cas, FSEntry, PackageFS},
git_index::GitBasedSource, git_index::GitBasedSource,
DependencySpecifiers, PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES, DependencySpecifiers, PackageSource, ResolveResult, VersionId, IGNORED_DIRS, IGNORED_FILES,
}, },
util::hash, util::hash,
Project, Project,
}; };
use fs_err::tokio as fs;
use futures::StreamExt;
// TODO: make more of these functions async
/// The pesde package reference /// The pesde package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -186,11 +190,11 @@ impl PackageSource for PesdePackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project) GitBasedSource::refresh(self, project).await
} }
fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
project: &Project, project: &Project,
@ -239,11 +243,11 @@ impl PackageSource for PesdePackageSource {
)) ))
} }
fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
project: &Project, project: &Project,
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError> { ) -> Result<(PackageFS, Target), Self::DownloadError> {
let config = self.config(project).map_err(Box::new)?; let config = self.config(project).map_err(Box::new)?;
let index_file = project let index_file = project
@ -253,7 +257,7 @@ impl PackageSource for PesdePackageSource {
.join(pkg_ref.version.to_string()) .join(pkg_ref.version.to_string())
.join(pkg_ref.target.to_string()); .join(pkg_ref.target.to_string());
match fs_err::read_to_string(&index_file) { match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
log::debug!( log::debug!(
"using cached index file for package {}@{} {}", "using cached index file for package {}@{} {}",
@ -280,26 +284,29 @@ impl PackageSource for PesdePackageSource {
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }
let response = request.send()?.error_for_status()?; let response = request.send().await?.error_for_status()?;
let bytes = response.bytes()?; let bytes = response.bytes().await?;
let mut decoder = flate2::read::GzDecoder::new(bytes.as_ref()); let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
let mut archive = tar::Archive::new(&mut decoder); let mut archive = tokio_tar::Archive::new(&mut decoder);
let mut entries = BTreeMap::new(); let mut entries = BTreeMap::new();
for entry in archive.entries().map_err(errors::DownloadError::Unpack)? { while let Some(entry) = archive
let mut entry = entry.map_err(errors::DownloadError::Unpack)?; .entries()
.map_err(errors::DownloadError::Unpack)?
.next()
.await
.transpose()
.map_err(errors::DownloadError::Unpack)?
{
let path = let path =
RelativePathBuf::from_path(entry.path().map_err(errors::DownloadError::Unpack)?) RelativePathBuf::from_path(entry.path().map_err(errors::DownloadError::Unpack)?)
.unwrap(); .unwrap();
let name = path.file_name().unwrap_or("");
if entry.header().entry_type().is_dir() { if entry.header().entry_type().is_dir() {
if path if IGNORED_DIRS.contains(&name) {
.components()
.next()
.is_some_and(|ct| IGNORED_DIRS.contains(&ct.as_str()))
{
continue; continue;
} }
@ -308,11 +315,12 @@ impl PackageSource for PesdePackageSource {
continue; continue;
} }
if IGNORED_FILES.contains(&path.as_str()) { if IGNORED_FILES.contains(&name) {
continue; continue;
} }
let hash = store_reader_in_cas(project.cas_dir(), &mut entry) let hash = store_in_cas(project.cas_dir(), entry, |_| async { Ok(()) })
.await
.map_err(errors::DownloadError::Store)?; .map_err(errors::DownloadError::Store)?;
entries.insert(path, FSEntry::File(hash)); entries.insert(path, FSEntry::File(hash));
} }
@ -320,10 +328,13 @@ impl PackageSource for PesdePackageSource {
let fs = PackageFS::CAS(entries); let fs = PackageFS::CAS(entries);
if let Some(parent) = index_file.parent() { if let Some(parent) = index_file.parent() {
fs_err::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?; fs::create_dir_all(parent)
.await
.map_err(errors::DownloadError::WriteIndex)?;
} }
fs_err::write(&index_file, toml::to_string(&fs)?) fs::write(&index_file, toml::to_string(&fs)?)
.await
.map_err(errors::DownloadError::WriteIndex)?; .map_err(errors::DownloadError::WriteIndex)?;
Ok((fs, pkg_ref.target.clone())) Ok((fs, pkg_ref.target.clone()))

View file

@ -1,3 +1,4 @@
#![allow(async_fn_in_trait)]
use std::{ use std::{
collections::BTreeMap, collections::BTreeMap,
fmt::{Debug, Display}, fmt::{Debug, Display},
@ -41,12 +42,12 @@ pub trait PackageSource: Debug {
type DownloadError: std::error::Error; type DownloadError: std::error::Error;
/// Refreshes the source /// Refreshes the source
fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
Ok(()) Ok(())
} }
/// Resolves a specifier to a reference /// Resolves a specifier to a reference
fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
project: &Project, project: &Project,
@ -54,10 +55,10 @@ pub trait PackageSource: Debug {
) -> Result<ResolveResult<Self::Ref>, Self::ResolveError>; ) -> Result<ResolveResult<Self::Ref>, Self::ResolveError>;
/// Downloads a package /// Downloads a package
fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
project: &Project, project: &Project,
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError>; ) -> Result<(PackageFS, Target), Self::DownloadError>;
} }

View file

@ -10,6 +10,7 @@ use crate::{
source::wally::manifest::{Realm, WallyManifest}, source::wally::manifest::{Realm, WallyManifest},
Project, LINK_LIB_NO_FILE_FOUND, Project, LINK_LIB_NO_FILE_FOUND,
}; };
use fs_err::tokio as fs;
#[derive(Deserialize)] #[derive(Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
@ -18,11 +19,11 @@ struct SourcemapNode {
file_paths: Vec<RelativePathBuf>, file_paths: Vec<RelativePathBuf>,
} }
pub(crate) fn find_lib_path( pub(crate) async fn find_lib_path(
project: &Project, project: &Project,
package_dir: &Path, package_dir: &Path,
) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> { ) -> Result<Option<RelativePathBuf>, errors::FindLibPathError> {
let manifest = project.deser_manifest()?; let manifest = project.deser_manifest().await?;
let Some(script_path) = manifest let Some(script_path) = manifest
.scripts .scripts
@ -53,16 +54,17 @@ pub(crate) fn find_lib_path(
pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml"; pub(crate) const WALLY_MANIFEST_FILE_NAME: &str = "wally.toml";
pub(crate) fn get_target( pub(crate) async fn get_target(
project: &Project, project: &Project,
tempdir: &TempDir, tempdir: &TempDir,
) -> Result<Target, errors::FindLibPathError> { ) -> Result<Target, errors::FindLibPathError> {
let lib = find_lib_path(project, tempdir.path())? let lib = find_lib_path(project, tempdir.path())
.await?
.or_else(|| Some(RelativePathBuf::from(LINK_LIB_NO_FILE_FOUND))); .or_else(|| Some(RelativePathBuf::from(LINK_LIB_NO_FILE_FOUND)));
let build_files = Default::default(); let build_files = Default::default();
let manifest = tempdir.path().join(WALLY_MANIFEST_FILE_NAME); let manifest = tempdir.path().join(WALLY_MANIFEST_FILE_NAME);
let manifest = fs_err::read_to_string(&manifest)?; let manifest = fs::read_to_string(&manifest).await?;
let manifest: WallyManifest = toml::from_str(&manifest)?; let manifest: WallyManifest = toml::from_str(&manifest)?;
Ok(if matches!(manifest.package.realm, Realm::Shared) { Ok(if matches!(manifest.package.realm, Realm::Shared) {

View file

@ -1,19 +1,8 @@
use std::{
collections::{BTreeMap, VecDeque},
path::PathBuf,
};
use gix::Url;
use relative_path::RelativePathBuf;
use reqwest::header::AUTHORIZATION;
use serde::Deserialize;
use tempfile::tempdir;
use crate::{ use crate::{
manifest::target::{Target, TargetKind}, manifest::target::{Target, TargetKind},
names::PackageNames, names::PackageNames,
source::{ source::{
fs::{store_reader_in_cas, FSEntry, PackageFS}, fs::{store_in_cas, FSEntry, PackageFS},
git_index::GitBasedSource, git_index::GitBasedSource,
traits::PackageSource, traits::PackageSource,
version_id::VersionId, version_id::VersionId,
@ -23,6 +12,15 @@ use crate::{
util::hash, util::hash,
Project, Project,
}; };
use fs_err::tokio as fs;
use gix::Url;
use relative_path::RelativePathBuf;
use reqwest::header::AUTHORIZATION;
use serde::Deserialize;
use std::{collections::BTreeMap, path::PathBuf, sync::Arc};
use tempfile::tempdir;
use tokio::{io::AsyncWriteExt, sync::Mutex};
use tokio_util::compat::FuturesAsyncReadCompatExt;
pub(crate) mod compat_util; pub(crate) mod compat_util;
pub(crate) mod manifest; pub(crate) mod manifest;
@ -86,11 +84,11 @@ impl PackageSource for WallyPackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, project: &Project) -> Result<(), Self::RefreshError> {
GitBasedSource::refresh(self, project) GitBasedSource::refresh(self, project).await
} }
fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
project: &Project, project: &Project,
@ -138,11 +136,11 @@ impl PackageSource for WallyPackageSource {
)) ))
} }
fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
project: &Project, project: &Project,
reqwest: &reqwest::blocking::Client, reqwest: &reqwest::Client,
) -> Result<(PackageFS, Target), Self::DownloadError> { ) -> Result<(PackageFS, Target), Self::DownloadError> {
let config = self.config(project).map_err(Box::new)?; let config = self.config(project).map_err(Box::new)?;
let index_file = project let index_file = project
@ -151,7 +149,7 @@ impl PackageSource for WallyPackageSource {
.join(pkg_ref.name.escaped()) .join(pkg_ref.name.escaped())
.join(pkg_ref.version.to_string()); .join(pkg_ref.version.to_string());
let tempdir = match fs_err::read_to_string(&index_file) { let tempdir = match fs::read_to_string(&index_file).await {
Ok(s) => { Ok(s) => {
log::debug!( log::debug!(
"using cached index file for package {}@{}", "using cached index file for package {}@{}",
@ -162,9 +160,9 @@ impl PackageSource for WallyPackageSource {
let tempdir = tempdir()?; let tempdir = tempdir()?;
let fs = toml::from_str::<PackageFS>(&s)?; let fs = toml::from_str::<PackageFS>(&s)?;
fs.write_to(&tempdir, project.cas_dir(), false)?; fs.write_to(&tempdir, project.cas_dir(), false).await?;
return Ok((fs, get_target(project, &tempdir)?)); return Ok((fs, get_target(project, &tempdir).await?));
} }
Err(e) if e.kind() == std::io::ErrorKind::NotFound => tempdir()?, Err(e) if e.kind() == std::io::ErrorKind::NotFound => tempdir()?,
Err(e) => return Err(errors::DownloadError::ReadIndex(e)), Err(e) => return Err(errors::DownloadError::ReadIndex(e)),
@ -190,50 +188,67 @@ impl PackageSource for WallyPackageSource {
request = request.header(AUTHORIZATION, token); request = request.header(AUTHORIZATION, token);
} }
let response = request.send()?.error_for_status()?; let response = request.send().await?.error_for_status()?;
let bytes = response.bytes()?; let mut bytes = response.bytes().await?;
let mut archive = zip::ZipArchive::new(std::io::Cursor::new(bytes))?;
archive.extract(tempdir.path())?;
let mut entries = BTreeMap::new(); let mut entries = BTreeMap::new();
let mut archive = async_zip::tokio::read::seek::ZipFileReader::with_tokio(
std::io::Cursor::new(&mut bytes),
)
.await?;
let mut dir_entries = fs_err::read_dir(tempdir.path())?.collect::<VecDeque<_>>(); for index in 0..archive.file().entries().len() {
while let Some(entry) = dir_entries.pop_front() { let entry = archive.file().entries().get(index).unwrap();
let entry = entry?;
let path =
RelativePathBuf::from_path(entry.path().strip_prefix(tempdir.path())?).unwrap();
if entry.file_type()?.is_dir() { let relative_path = RelativePathBuf::from_path(entry.filename().as_str()?).unwrap();
if IGNORED_DIRS.contains(&path.as_str()) { let path = relative_path.to_path(tempdir.path());
let name = relative_path.file_name().unwrap_or("");
let entry_is_dir = entry.dir()?;
let entry_reader = archive.reader_without_entry(index).await?;
if entry_is_dir {
if IGNORED_DIRS.contains(&name) {
continue; continue;
} }
entries.insert(path, FSEntry::Directory); entries.insert(relative_path, FSEntry::Directory);
dir_entries.extend(fs_err::read_dir(entry.path())?); fs::create_dir_all(&path).await?;
continue; continue;
} }
if IGNORED_FILES.contains(&path.as_str()) { if IGNORED_FILES.contains(&name) {
continue; continue;
} }
let mut file = fs_err::File::open(entry.path())?; if let Some(parent) = path.parent() {
let hash = store_reader_in_cas(project.cas_dir(), &mut file)?; fs::create_dir_all(parent).await?;
entries.insert(path, FSEntry::File(hash)); }
let writer = Arc::new(Mutex::new(fs::File::create(&path).await?));
let hash = store_in_cas(project.cas_dir(), entry_reader.compat(), |bytes| {
let writer = writer.clone();
async move { writer.lock().await.write_all(&bytes).await }
})
.await?;
entries.insert(relative_path, FSEntry::File(hash));
} }
let fs = PackageFS::CAS(entries); let fs = PackageFS::CAS(entries);
if let Some(parent) = index_file.parent() { if let Some(parent) = index_file.parent() {
fs_err::create_dir_all(parent).map_err(errors::DownloadError::WriteIndex)?; fs::create_dir_all(parent)
.await
.map_err(errors::DownloadError::WriteIndex)?;
} }
fs_err::write(&index_file, toml::to_string(&fs)?) fs::write(&index_file, toml::to_string(&fs)?)
.await
.map_err(errors::DownloadError::WriteIndex)?; .map_err(errors::DownloadError::WriteIndex)?;
Ok((fs, get_target(project, &tempdir)?)) Ok((fs, get_target(project, &tempdir).await?))
} }
} }
@ -320,7 +335,7 @@ pub mod errors {
/// Error decompressing archive /// Error decompressing archive
#[error("error decompressing archive")] #[error("error decompressing archive")]
Decompress(#[from] zip::result::ZipError), Decompress(#[from] async_zip::error::ZipError),
/// Error interacting with the filesystem /// Error interacting with the filesystem
#[error("error interacting with the filesystem")] #[error("error interacting with the filesystem")]

View file

@ -7,9 +7,11 @@ use crate::{
}, },
Project, DEFAULT_INDEX_NAME, Project, DEFAULT_INDEX_NAME,
}; };
use futures::StreamExt;
use relative_path::RelativePathBuf; use relative_path::RelativePathBuf;
use reqwest::blocking::Client; use reqwest::Client;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use tokio::pin;
/// The workspace package reference /// The workspace package reference
pub mod pkg_ref; pub mod pkg_ref;
@ -27,12 +29,12 @@ impl PackageSource for WorkspacePackageSource {
type ResolveError = errors::ResolveError; type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError; type DownloadError = errors::DownloadError;
fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> { async fn refresh(&self, _project: &Project) -> Result<(), Self::RefreshError> {
// no-op // no-op
Ok(()) Ok(())
} }
fn resolve( async fn resolve(
&self, &self,
specifier: &Self::Specifier, specifier: &Self::Specifier,
project: &Project, project: &Project,
@ -45,7 +47,10 @@ impl PackageSource for WorkspacePackageSource {
.unwrap_or(&project.package_dir); .unwrap_or(&project.package_dir);
let target = specifier.target.unwrap_or(package_target); let target = specifier.target.unwrap_or(package_target);
for (path, manifest) in project.workspace_members(workspace_dir)? { let members = project.workspace_members(workspace_dir).await?;
pin!(members);
while let Some((path, manifest)) = members.next().await.transpose()? {
if manifest.name == specifier.name && manifest.target.kind() == target { if manifest.name == specifier.name && manifest.target.kind() == target {
break 'finder (path, manifest); break 'finder (path, manifest);
} }
@ -119,7 +124,7 @@ impl PackageSource for WorkspacePackageSource {
)) ))
} }
fn download( async fn download(
&self, &self,
pkg_ref: &Self::Ref, pkg_ref: &Self::Ref,
project: &Project, project: &Project,