mirror of
https://github.com/pesde-pkg/pesde.git
synced 2025-04-10 22:00:55 +01:00
Compare commits
148 commits
0.5
...
v0.6.1+reg
Author | SHA1 | Date | |
---|---|---|---|
|
4f48963da3 | ||
|
00bc83b4e5 | ||
|
412ce90e7f | ||
|
b8c4f7486b | ||
|
af93b7d584 | ||
|
292565b647 | ||
|
e6ee935c11 | ||
|
0e73db2831 | ||
|
12c62d315d | ||
|
19d6a07851 | ||
|
9a75ebf637 | ||
|
41337ac96a | ||
|
9ad691ee94 | ||
|
31f74cff65 | ||
|
c7a8f919e2 | ||
|
ff5c2e5d61 | ||
|
7520e8e8fc | ||
|
86a111cf83 | ||
|
308320602f | ||
|
cb3c47477a | ||
|
1b984f2d82 | ||
|
c9faef256a | ||
|
e2f4e35a02 | ||
|
5d58b9dffb | ||
|
467a000c5c | ||
|
8510fcf030 | ||
|
ef471069d8 | ||
|
1bc5defd9c | ||
|
f6f0cb48f8 | ||
|
aa4f283e4c | ||
|
9b70929e02 | ||
|
ff6449a340 | ||
|
509838bb08 | ||
|
d6e2f611d8 | ||
|
5ea86e632e | ||
|
a4f2829db2 | ||
|
5c43e3cb95 | ||
|
82e0d38483 | ||
|
7150f6a7da | ||
|
2e02fecd46 | ||
|
ae3530126f | ||
|
4786adf187 | ||
|
04aaa40c69 | ||
|
b384dbe6a0 | ||
|
8a8bbcbd02 | ||
|
c6e7e74a53 | ||
|
041b14f404 | ||
|
ea9c7e6b39 | ||
|
b6799e7400 | ||
|
faaa76cf0d | ||
|
1d131e98c6 | ||
|
8bb8888de8 | ||
|
9c75bff65e | ||
|
72c020efd3 | ||
|
e2d10ac72b | ||
|
73146e6f64 | ||
|
1a79326ebf | ||
|
399c63cc8c | ||
|
a4927bf4be | ||
|
4d39ddae04 | ||
|
7f21131415 | ||
|
c71e879bfd | ||
|
daf0861eb3 | ||
|
5939050ee3 | ||
|
51fc6c3abd | ||
|
c698969f76 | ||
|
70a4dc3226 | ||
|
c6242b8569 | ||
|
ff6d37bf27 | ||
|
43e2d1f325 | ||
|
ba6a02e13b | ||
|
7ad4c6f5c6 | ||
|
0b5c233734 | ||
|
692ae1521d | ||
|
6ae16a7dac | ||
|
f0e69a08e2 | ||
|
6856746ae2 | ||
|
24049d60a2 | ||
|
ca550eee3a | ||
|
2154fc0e84 | ||
|
b30f9ecdeb | ||
|
5cc64f38ec | ||
|
4009313281 | ||
|
3e4ef00f4a | ||
|
801acb0264 | ||
|
8835156b76 | ||
|
446aa748a6 | ||
|
fe979f26c5 | ||
|
95896091cd | ||
|
b9a105cec4 | ||
|
a53ae657e1 | ||
|
5ad3339535 | ||
|
941bb79ea6 | ||
|
0dfc3ef5bd | ||
|
a2ce747879 | ||
|
53bdf0ced6 | ||
|
9e6fa4294f | ||
|
3d659161e6 | ||
|
805a257a76 | ||
|
6ae7e5078c | ||
|
684f711d93 | ||
|
57afa4c593 | ||
|
380a716200 | ||
|
f4050abec8 | ||
|
d4979bbdb2 | ||
|
1eef6078bf | ||
|
72c1c39401 | ||
|
076f5564ee | ||
|
a39b1bb60a | ||
|
dcc869c025 | ||
|
6f4c7137c0 | ||
|
e8c3a66524 | ||
|
6ab334c904 | ||
|
be6410443f | ||
|
685700f572 | ||
|
217ca238ff | ||
|
e61aeb5da0 | ||
|
9bab997992 | ||
|
325453450b | ||
|
243dd39e14 | ||
|
ca5a8f108d | ||
|
de43d2ce42 | ||
|
0ceb2f6653 | ||
|
a627a7253f | ||
|
6f5e2a2473 | ||
|
e5b629e0c5 | ||
|
9bf2af6454 | ||
|
5d62549817 | ||
|
83fa22f7de | ||
|
78e58d63fa | ||
|
d0169976cd | ||
|
6a8dfe0ba3 | ||
|
80b8b151d7 | ||
|
fd5a038d8b | ||
|
7f15264f48 | ||
|
2700fe9e07 | ||
|
c3d2c768db | ||
|
ccb2924362 | ||
|
6cf9f14649 | ||
|
634ef013da | ||
|
30b4459de0 | ||
|
a4d7b4d6e0 | ||
|
2aeee9de34 | ||
|
2936f88a99 | ||
|
aabb353d25 | ||
|
a091d06f36 | ||
|
8e6d877241 | ||
|
a41d9950f8 |
161 changed files with 16936 additions and 11302 deletions
|
@ -1,2 +1,2 @@
|
|||
PUBLIC_REGISTRY_URL= # url of the registry API, this must have a trailing slash and include the version
|
||||
# example: https://registry.pesde.daimond113.com/v0/
|
||||
# example: https://registry.pesde.daimond113.com/v1/
|
3
.git-blame-ignore-revs
Normal file
3
.git-blame-ignore-revs
Normal file
|
@ -0,0 +1,3 @@
|
|||
# .git-blame-ignore-revs
|
||||
# Enabled the `hard_tabs` option in rustfmt.toml
|
||||
0ceb2f6653b12e8261533ef528d78e3dde7ed757
|
1
.github/FUNDING.yml
vendored
1
.github/FUNDING.yml
vendored
|
@ -1,2 +1 @@
|
|||
buy_me_a_coffee: daimond113
|
||||
ko_fi: daimond113
|
||||
|
|
30
.github/ISSUE_TEMPLATE/01-bug-report.md
vendored
Normal file
30
.github/ISSUE_TEMPLATE/01-bug-report.md
vendored
Normal file
|
@ -0,0 +1,30 @@
|
|||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ""
|
||||
labels: bug
|
||||
assignees: daimond113
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
If the issue is with:
|
||||
|
||||
- pesde's library (e.g. linking):
|
||||
1. include your project's manifest (pesde.toml)
|
||||
2. if possible, a repository with a minimal reproduction of the issue.
|
||||
- pesde's CLI (e.g. authentication):
|
||||
1. include a screenshot or copy-paste the output of the CLI.
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Please complete the following information:**
|
||||
|
||||
- pesde: (e.g. 0.6.0)
|
||||
- Operating system: (e.g. Windows 11, macOS 15, Ubuntu 24.04)
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
19
.github/ISSUE_TEMPLATE/02-feature-request.md
vendored
Normal file
19
.github/ISSUE_TEMPLATE/02-feature-request.md
vendored
Normal file
|
@ -0,0 +1,19 @@
|
|||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ""
|
||||
labels: enhancement
|
||||
assignees: daimond113
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
28
.github/ISSUE_TEMPLATE/03-engine-support-request.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/03-engine-support-request.md
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
---
|
||||
name: Engine support request
|
||||
about: Suggest a new engine
|
||||
title: "[ENGINE]"
|
||||
labels: enhancement
|
||||
assignees: daimond113
|
||||
---
|
||||
|
||||
**General information about the engine**
|
||||
Name: Lune
|
||||
Repository: https://github.com/lune-org/lune
|
||||
|
||||
**Checklist**
|
||||
|
||||
- [ ] Is this engine versioned according to [SemVer](https://semver.org/spec/v2.0.0.html)?
|
||||
- [ ] Does this engine provide pre-compiled builds?
|
||||
- [ ] Will the engine get support for at least a year after it gets added to pesde?
|
||||
- [ ] Does the engine have any notable differences to plain Luau (e.g. IO APIs)?
|
||||
- [ ] Does the engine correctly implement `require` (according to Luau's RFCs)?
|
||||
- [ ] Is the engine open-source under a reasonably permissive license?
|
||||
|
||||
If the answer is "no" to any of these questions, the engine will not be added.
|
||||
|
||||
**Additional information**
|
||||
|
||||
- [ ] Are you willing to submit a pull request adding the engine (and a target for it)?
|
||||
|
||||
These questions can help fast-track the engine being added.
|
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
1
.github/ISSUE_TEMPLATE/config.yml
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
blank_issues_enabled: false
|
7
.github/workflows/debug.yml
vendored
7
.github/workflows/debug.yml
vendored
|
@ -40,6 +40,11 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-linux-x86_64
|
||||
|
||||
- job-name: linux-aarch64
|
||||
target: aarch64-unknown-linux-gnu
|
||||
runs-on: ubuntu-24.04-arm
|
||||
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-linux-aarch64
|
||||
|
||||
- job-name: macos-x86_64
|
||||
target: x86_64-apple-darwin
|
||||
runs-on: macos-13
|
||||
|
@ -58,7 +63,7 @@ jobs:
|
|||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install Linux build dependencies
|
||||
if: ${{ matrix.runs-on == 'ubuntu-latest' }}
|
||||
if: ${{ startsWith(matrix.runs-on, 'ubuntu') }}
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install libdbus-1-dev pkg-config
|
||||
|
|
13
.github/workflows/release.yaml
vendored
13
.github/workflows/release.yaml
vendored
|
@ -51,6 +51,11 @@ jobs:
|
|||
arch: x86_64
|
||||
target: x86_64-unknown-linux-gnu
|
||||
|
||||
- os: ubuntu-24.04-arm
|
||||
host: linux
|
||||
arch: aarch64
|
||||
target: aarch64-unknown-linux-gnu
|
||||
|
||||
- os: windows-latest
|
||||
host: windows
|
||||
arch: x86_64
|
||||
|
@ -96,11 +101,9 @@ jobs:
|
|||
if [ ${{ matrix.host }} = "windows" ]; then
|
||||
mv target/${{ matrix.target }}/release/${{ env.BIN_NAME }}.exe ${{ env.BIN_NAME }}.exe
|
||||
7z a ${{ env.ARCHIVE_NAME }}.zip ${{ env.BIN_NAME }}.exe
|
||||
tar -czf ${{ env.ARCHIVE_NAME }}.tar.gz ${{ env.BIN_NAME }}.exe
|
||||
else
|
||||
mv target/${{ matrix.target }}/release/${{ env.BIN_NAME }} ${{ env.BIN_NAME }}
|
||||
zip -r ${{ env.ARCHIVE_NAME }}.zip ${{ env.BIN_NAME }}
|
||||
tar -czf ${{ env.ARCHIVE_NAME }}.tar.gz ${{ env.BIN_NAME }}
|
||||
fi
|
||||
|
||||
- name: Upload zip artifact
|
||||
|
@ -109,12 +112,6 @@ jobs:
|
|||
name: ${{ env.ARCHIVE_NAME }}.zip
|
||||
path: ${{ env.ARCHIVE_NAME }}.zip
|
||||
|
||||
- name: Upload tar.gz artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: ${{ env.ARCHIVE_NAME }}.tar.gz
|
||||
path: ${{ env.ARCHIVE_NAME }}.tar.gz
|
||||
|
||||
publish:
|
||||
name: Publish to crates.io
|
||||
runs-on: ubuntu-latest
|
||||
|
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -4,4 +4,6 @@
|
|||
cobertura.xml
|
||||
tarpaulin-report.html
|
||||
build_rs_cov.profraw
|
||||
registry/data
|
||||
registry/data
|
||||
data
|
||||
manifest.schema.json
|
71
CHANGELOG.md
71
CHANGELOG.md
|
@ -5,6 +5,75 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.6.1] - 2025-03-09
|
||||
### Fixed
|
||||
- Fix path dependencies using project's workspace dependencies by @daimond113
|
||||
- Fix binary linkers not being created for non-direct dependencies by @daimond113
|
||||
- Add missing `run <alias>` behaviour by @daimond113
|
||||
|
||||
### Changed
|
||||
- Binary linkers are now done in Rust to simplify their implementation and cross-runtime portability by @daimond113
|
||||
- Show available targets in add and install commands if the specifier hasn't matched any by @daimond113
|
||||
- Add [@generated](https://generated.at/) marker to lockfiles by @daimond113
|
||||
|
||||
## [0.6.0] - 2025-02-22
|
||||
### Added
|
||||
- Improve installation experience by @lukadev-0
|
||||
- Support using aliases of own dependencies for overrides by @daimond113
|
||||
- Support ignoring parse errors in Luau files by @daimond113
|
||||
- Add path dependencies by @daimond113
|
||||
- Inherit pesde-managed scripts from workspace root by @daimond113
|
||||
- Allow using binaries from workspace root in member packages by @daimond113
|
||||
- Add yanking & deprecating by @daimond113
|
||||
- Add engines as a form of managing runtimes by @daimond113
|
||||
- Modify existing installed packages instead of always reinstalling by @daimond113
|
||||
- Add `cas prune` command to remove unused CAS files & packages by @daimond113
|
||||
- Add `list` and `remove` commands to manage packages in the manifest by @daimond113
|
||||
|
||||
### Fixed
|
||||
- Install dev packages in prod mode and remove them after use to allow them to be used in scripts by @daimond113
|
||||
- Fix infinite loop in the resolver in packages depending on themselves by @daimond113
|
||||
- Do Git operations inside spawn_blocking to avoid performance issues by @daimond113
|
||||
- Scope CAS package indices to the source by @daimond113
|
||||
- Do not copy `default.project.json` in workspace dependencies by @daimond113
|
||||
- Colour deprecate output to match yank output by @daimond113
|
||||
- Fix zbus panic on Linux by @daimond113
|
||||
- Fix `self-upgrade` using the wrong path when doing a fresh download by @daimond113
|
||||
- Fix types not being re-exported by @daimond113
|
||||
- Refresh sources before reading package data to ensure the index is even cloned (remote changes to lockfile) by @daimond113
|
||||
- Correct script linker require paths on Windows by @daimond113
|
||||
- Improve patches in incremental installs by @daimond113
|
||||
- Patches now include newly created files by @daimond113
|
||||
- Fix double path long prefix issues on Windows by @daimond113
|
||||
- Fix panic when using SIGINT by @daimond113
|
||||
|
||||
### Changed
|
||||
- Change handling of graphs to a flat structure by @daimond113
|
||||
- Store dependency over downloaded graphs in the lockfile by @daimond113
|
||||
- Improve linking process by @daimond113
|
||||
- Use a proper url encoding library to ensure compatibility with all characters by @daimond113
|
||||
- The `*` specifier now matches all versions, even prereleases by @daimond113
|
||||
- Switch CLI dependencies to ones used by other dependencies to optimize the binary size by @daimond113
|
||||
- Reorder the `help` command by @daimond113
|
||||
- Ignore submodules instead of failing when using Git dependencies with submodules by @daimond113
|
||||
- Exit with code 1 from invalid directory binary linkers by @daimond113
|
||||
- Patches are now applied before type extraction to allow patches to modify types by @daimond113
|
||||
- Make aliases case-insensitive by @daimond113
|
||||
- Print "update available" message to stderr by @daimond113
|
||||
- Improve output of the `outdated` command by @daimond113
|
||||
- Allow publishing other packages even if an error occurred by @daimond113
|
||||
|
||||
### Removed
|
||||
- Remove old includes format compatibility by @daimond113
|
||||
- Remove data redundancy for workspace package references by @daimond113
|
||||
- Remove dependency checks from CLI in publish command in favor of registry checks by @daimond113
|
||||
|
||||
### Performance
|
||||
- Use `Arc` for more efficient cloning of multiple structs by @daimond113
|
||||
- Avoid cloning where possible by @daimond113
|
||||
- Remove unnecessary mutex in Wally package download by @daimond113
|
||||
- Lazily format error messages by @daimond113
|
||||
|
||||
## [0.5.3] - 2024-12-30
|
||||
### Added
|
||||
- Add meta field in index files to preserve compatibility with potential future changes by @daimond113
|
||||
|
@ -112,6 +181,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
|
|||
- Asyncify dependency linking by @daimond113
|
||||
- Use `exec` in Unix bin linking to reduce the number of processes by @daimond113
|
||||
|
||||
[0.6.1]: https://github.com/daimond113/pesde/compare/v0.6.0%2Bregistry.0.2.1..v0.6.1%2Bregistry.0.2.2
|
||||
[0.6.0]: https://github.com/daimond113/pesde/compare/v0.5.3%2Bregistry.0.1.2..v0.6.0%2Bregistry.0.2.0
|
||||
[0.5.3]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
|
||||
[0.5.2]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
|
||||
[0.5.1]: https://github.com/daimond113/pesde/compare/v0.5.0%2Bregistry.0.1.0..v0.5.1%2Bregistry.0.1.0
|
||||
|
|
1690
Cargo.lock
generated
1690
Cargo.lock
generated
File diff suppressed because it is too large
Load diff
226
Cargo.toml
226
Cargo.toml
|
@ -1,6 +1,6 @@
|
|||
[package]
|
||||
name = "pesde"
|
||||
version = "0.5.3"
|
||||
version = "0.6.1"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
authors = ["daimond113 <contact@daimond113.com>"]
|
||||
|
@ -10,28 +10,29 @@ repository = "https://github.com/pesde-pkg/pesde"
|
|||
include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHANGELOG.md"]
|
||||
|
||||
[features]
|
||||
default = ["wally-compat", "patches"]
|
||||
bin = [
|
||||
"dep:clap",
|
||||
"dep:dirs",
|
||||
"dep:tracing-subscriber",
|
||||
"reqwest/json",
|
||||
"dep:indicatif",
|
||||
"dep:tracing-indicatif",
|
||||
"dep:inquire",
|
||||
"dep:toml_edit",
|
||||
"dep:colored",
|
||||
"dep:console",
|
||||
"dep:anyhow",
|
||||
"dep:keyring",
|
||||
"dep:open",
|
||||
"gix/worktree-mutation",
|
||||
"dep:paste",
|
||||
"dep:serde_json",
|
||||
"dep:winreg",
|
||||
"dep:windows-registry",
|
||||
"dep:windows",
|
||||
"gix/worktree-mutation",
|
||||
"fs-err/expose_original_error",
|
||||
"tokio/rt",
|
||||
"tokio/rt-multi-thread",
|
||||
"tokio/macros",
|
||||
]
|
||||
wally-compat = ["dep:async_zip", "dep:serde_json"]
|
||||
wally-compat = ["dep:serde_json"]
|
||||
patches = ["dep:git2"]
|
||||
version-management = ["bin"]
|
||||
|
||||
|
@ -40,54 +41,205 @@ name = "pesde"
|
|||
path = "src/main.rs"
|
||||
required-features = ["bin"]
|
||||
|
||||
[lints.clippy]
|
||||
[workspace.lints.clippy]
|
||||
zero_sized_map_values = "warn"
|
||||
while_float = "deny"
|
||||
useless_let_if_seq = "warn"
|
||||
unused_trait_names = "warn"
|
||||
unused_result_ok = "warn"
|
||||
unused_peekable = "warn"
|
||||
unused_async = "warn"
|
||||
unreadable_literal = "warn"
|
||||
unnested_or_patterns = "warn"
|
||||
unneeded_field_pattern = "warn"
|
||||
unnecessary_wraps = "warn"
|
||||
unnecessary_semicolon = "warn"
|
||||
unnecessary_self_imports = "warn"
|
||||
unnecessary_literal_bound = "warn"
|
||||
unnecessary_join = "warn"
|
||||
unnecessary_box_returns = "warn"
|
||||
uninlined_format_args = "warn"
|
||||
type_repetition_in_bounds = "warn"
|
||||
try_err = "warn"
|
||||
trivially_copy_pass_by_ref = "warn"
|
||||
trait_duplication_in_bounds = "warn"
|
||||
todo = "deny"
|
||||
suspicious_operation_groupings = "warn"
|
||||
suboptimal_flops = "deny"
|
||||
struct_field_names = "warn"
|
||||
string_to_string = "warn"
|
||||
string_lit_chars_any = "warn"
|
||||
string_lit_as_bytes = "warn"
|
||||
str_split_at_newline = "warn"
|
||||
stable_sort_primitive = "warn"
|
||||
single_option_map = "warn"
|
||||
single_match_else = "warn"
|
||||
single_char_pattern = "warn"
|
||||
significant_drop_tightening = "warn"
|
||||
significant_drop_in_scrutinee = "warn"
|
||||
set_contains_or_insert = "deny"
|
||||
separated_literal_suffix = "warn"
|
||||
semicolon_inside_block = "warn"
|
||||
semicolon_if_nothing_returned = "warn"
|
||||
self_named_module_files = "warn"
|
||||
same_functions_in_if_condition = "warn"
|
||||
return_and_then = "warn"
|
||||
renamed_function_params = "warn"
|
||||
ref_patterns = "deny"
|
||||
ref_option = "deny"
|
||||
ref_binding_to_reference = "deny"
|
||||
redundant_type_annotations = "deny"
|
||||
redundant_else = "warn"
|
||||
redundant_closure_for_method_calls = "warn"
|
||||
redundant_clone = "deny"
|
||||
read_zero_byte_vec = "warn"
|
||||
rc_buffer = "deny"
|
||||
range_plus_one = "deny"
|
||||
range_minus_one = "deny"
|
||||
pub_without_shorthand = "deny"
|
||||
pub_underscore_fields = "deny"
|
||||
precedence_bits = "deny"
|
||||
pathbuf_init_then_push = "warn"
|
||||
path_buf_push_overwrite = "warn"
|
||||
option_option = "deny"
|
||||
option_as_ref_cloned = "deny"
|
||||
nonstandard_macro_braces = "deny"
|
||||
non_zero_suggestions = "deny"
|
||||
no_effect_underscore_binding = "warn"
|
||||
needless_raw_string_hashes = "warn"
|
||||
needless_pass_by_value = "deny"
|
||||
needless_pass_by_ref_mut = "warn"
|
||||
needless_for_each = "deny"
|
||||
needless_continue = "deny"
|
||||
needless_collect = "deny"
|
||||
needless_bitwise_bool = "deny"
|
||||
mut_mut = "deny"
|
||||
must_use_candidate = "warn"
|
||||
mem_forget = "deny"
|
||||
maybe_infinite_iter = "deny"
|
||||
match_wildcard_for_single_variants = "deny"
|
||||
match_bool = "warn"
|
||||
map_unwrap_or = "warn"
|
||||
map_err_ignore = "warn"
|
||||
manual_midpoint = "warn"
|
||||
manual_let_else = "warn"
|
||||
manual_is_variant_and = "warn"
|
||||
manual_is_power_of_two = "warn"
|
||||
lossy_float_literal = "deny"
|
||||
literal_string_with_formatting_args = "warn"
|
||||
large_types_passed_by_value = "warn"
|
||||
large_stack_frames = "warn"
|
||||
large_stack_arrays = "warn"
|
||||
large_digit_groups = "deny"
|
||||
iter_with_drain = "deny"
|
||||
iter_on_single_items = "deny"
|
||||
iter_on_empty_collections = "deny"
|
||||
iter_filter_is_some = "deny"
|
||||
iter_filter_is_ok = "deny"
|
||||
invalid_upcast_comparisons = "deny"
|
||||
integer_division = "deny"
|
||||
infinite_loop = "deny"
|
||||
inefficient_to_string = "warn"
|
||||
index_refutable_slice = "deny"
|
||||
inconsistent_struct_constructor = "warn"
|
||||
imprecise_flops = "deny"
|
||||
implicit_clone = "warn"
|
||||
if_then_some_else_none = "warn"
|
||||
if_not_else = "warn"
|
||||
get_unwrap = "warn"
|
||||
from_iter_instead_of_collect = "warn"
|
||||
format_push_string = "warn"
|
||||
format_collect = "warn"
|
||||
fn_to_numeric_cast_any = "deny"
|
||||
float_cmp_const = "deny"
|
||||
float_cmp = "deny"
|
||||
float_arithmetic = "warn"
|
||||
flat_map_option = "warn"
|
||||
filter_map_next = "warn"
|
||||
filetype_is_file = "deny"
|
||||
explicit_iter_loop = "warn"
|
||||
explicit_into_iter_loop = "warn"
|
||||
explicit_deref_methods = "warn"
|
||||
equatable_if_let = "warn"
|
||||
enum_glob_use = "warn"
|
||||
empty_structs_with_brackets = "warn"
|
||||
empty_enum_variants_with_brackets = "warn"
|
||||
empty_drop = "warn"
|
||||
elidable_lifetime_names = "warn"
|
||||
doc_link_with_quotes = "warn"
|
||||
doc_link_code = "warn"
|
||||
doc_include_without_cfg = "warn"
|
||||
disallowed_script_idents = "warn"
|
||||
derive_partial_eq_without_eq = "warn"
|
||||
deref_by_slicing = "warn"
|
||||
default_numeric_fallback = "warn"
|
||||
dbg_macro = "deny"
|
||||
comparison_chain = "warn"
|
||||
collection_is_never_read = "warn"
|
||||
cloned_instead_of_copied = "warn"
|
||||
clear_with_drain = "warn"
|
||||
cfg_not_test = "warn"
|
||||
cast_sign_loss = "deny"
|
||||
cast_precision_loss = "deny"
|
||||
cast_possible_wrap = "deny"
|
||||
case_sensitive_file_extension_comparisons = "warn"
|
||||
branches_sharing_code = "warn"
|
||||
bool_to_int_with_if = "warn"
|
||||
assigning_clones = "warn"
|
||||
as_underscore = "warn"
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde = { version = "1.0.216", features = ["derive"] }
|
||||
toml = "0.8.19"
|
||||
serde_with = "3.11.0"
|
||||
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
||||
semver = { version = "1.0.24", features = ["serde"] }
|
||||
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls"] }
|
||||
serde = { version = "1.0.217", features = ["derive"] }
|
||||
toml = "0.8.20"
|
||||
gix = { version = "0.70.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
|
||||
semver = { version = "1.0.25", features = ["serde"] }
|
||||
reqwest = { version = "0.12.12", default-features = false, features = ["rustls-tls", "stream", "json"] }
|
||||
tokio-tar = "0.3.1"
|
||||
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
|
||||
pathdiff = "0.2.3"
|
||||
relative-path = { version = "1.9.3", features = ["serde"] }
|
||||
tracing = { version = "0.1.41", features = ["attributes"] }
|
||||
thiserror = "2.0.7"
|
||||
tokio = { version = "1.42.0", features = ["process"] }
|
||||
thiserror = "2.0.11"
|
||||
tokio = { version = "1.43.0", features = ["process", "macros"] }
|
||||
tokio-util = "0.7.13"
|
||||
async-stream = "0.3.6"
|
||||
futures = "0.3.31"
|
||||
full_moon = { version = "1.1.2", features = ["luau"] }
|
||||
full_moon = { version = "1.2.0", features = ["luau"] }
|
||||
url = { version = "2.5.4", features = ["serde"] }
|
||||
chrono = { version = "0.4.39", features = ["serde"] }
|
||||
jiff = { version = "0.1.29", default-features = false, features = ["serde", "std"] }
|
||||
sha2 = "0.10.8"
|
||||
tempfile = "3.14.0"
|
||||
tempfile = "3.16.0"
|
||||
wax = { version = "0.6.0", default-features = false }
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
fs-err = { version = "3.1.0", features = ["tokio"] }
|
||||
urlencoding = "2.1.3"
|
||||
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"] }
|
||||
|
||||
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
|
||||
git2 = { version = "0.19.0", optional = true }
|
||||
git2 = { version = "0.20.0", optional = true }
|
||||
|
||||
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
|
||||
serde_json = { version = "1.0.133", optional = true }
|
||||
serde_json = { version = "1.0.138", optional = true }
|
||||
|
||||
anyhow = { version = "1.0.94", optional = true }
|
||||
open = { version = "5.3.1", optional = true }
|
||||
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "async-secret-service", "async-io"], optional = true }
|
||||
colored = { version = "2.1.0", optional = true }
|
||||
toml_edit = { version = "0.22.22", optional = true }
|
||||
clap = { version = "4.5.23", features = ["derive"], optional = true }
|
||||
dirs = { version = "5.0.1", optional = true }
|
||||
anyhow = { version = "1.0.95", optional = true }
|
||||
open = { version = "5.3.2", optional = true }
|
||||
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "sync-secret-service"], optional = true }
|
||||
console = { version = "0.15.10", optional = true }
|
||||
toml_edit = { version = "0.22.23", optional = true }
|
||||
clap = { version = "4.5.28", features = ["derive"], optional = true }
|
||||
dirs = { version = "6.0.0", optional = true }
|
||||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
|
||||
indicatif = { version = "0.17.9", optional = true }
|
||||
tracing-indicatif = { version = "0.3.8", optional = true }
|
||||
inquire = { version = "0.7.5", optional = true }
|
||||
indicatif = { version = "0.17.11", optional = true }
|
||||
inquire = { version = "0.7.5", default-features = false, features = ["console", "one-liners"], optional = true }
|
||||
paste = { version = "1.0.15", optional = true }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
winreg = { version = "0.52.0", optional = true }
|
||||
windows-registry = { version = "0.4.0", optional = true }
|
||||
windows = { version = "0.59.0", features = ["Win32_Storage", "Win32_Storage_FileSystem", "Win32_Security"], optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
schemars = { git = "https://github.com/daimond113/schemars", rev = "bc7c7d6", features = ["semver1", "url2"] }
|
||||
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
|
@ -96,12 +248,16 @@ members = ["registry"]
|
|||
[profile.dev.package.full_moon]
|
||||
opt-level = 3
|
||||
|
||||
[profile.dev.package.miniz_oxide]
|
||||
opt-level = 3
|
||||
|
||||
[profile.release]
|
||||
opt-level = "s"
|
||||
lto = true
|
||||
incremental = true
|
||||
codegen-units = 1
|
||||
panic = "abort"
|
||||
|
||||
[profile.release.package.pesde-registry]
|
||||
# add debug symbols for Sentry stack traces
|
||||
debug = "full"
|
||||
debug = "full"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
<br>
|
||||
|
||||
<div align="center">
|
||||
<img src="https://raw.githubusercontent.com/pesde-pkg/pesde/0.5/assets/logotype.svg" alt="pesde logo" width="200" />
|
||||
<img src="https://raw.githubusercontent.com/pesde-pkg/pesde/0.6/assets/logotype.svg" alt="pesde logo" width="200" />
|
||||
</div>
|
||||
|
||||
<br>
|
||||
|
|
11
SECURITY.md
11
SECURITY.md
|
@ -3,23 +3,26 @@
|
|||
## Supported Versions
|
||||
|
||||
As pesde is currently in version 0.x, we can only guarantee security for:
|
||||
- **The latest minor** (currently 0.5).
|
||||
|
||||
- **The latest minor** (currently 0.6).
|
||||
- **The latest release candidate for the next version**, if available.
|
||||
|
||||
When a new minor version is released, the previous version will immediately lose security support.
|
||||
|
||||
> **Note:** This policy will change with the release of version 1.0, which will include an extended support period for versions >=1.0.
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | ------------------ |
|
||||
| 0.5.x | :white_check_mark: |
|
||||
| < 0.5 | :x: |
|
||||
| 0.6.x | :white_check_mark: |
|
||||
| < 0.6 | :x: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
We encourage all security concerns to be reported at [pesde@daimond113.com](mailto:pesde@daimond113.com), along the following format:
|
||||
|
||||
- **Subject**: The subject must be prefixed with `[SECURITY]` to ensure it is prioritized as a security concern.
|
||||
- **Content**:
|
||||
- **Affected Versions**: Clearly specify which are affected by the issue.
|
||||
- **Issue Details**: Provide a detailed description of the issue, including reproduction steps and/or a simple example, if applicable.
|
||||
- **Issue Details**: Provide a detailed description of the issue, including reproduction steps and/or a simple example, if applicable.
|
||||
|
||||
We will try to respond as soon as possible.
|
||||
|
|
4
clippy.toml
Normal file
4
clippy.toml
Normal file
|
@ -0,0 +1,4 @@
|
|||
avoid-breaking-exported-api = false
|
||||
disallowed-methods = [
|
||||
"std::path::Path::exists"
|
||||
]
|
BIN
docs/bun.lockb
BIN
docs/bun.lockb
Binary file not shown.
|
@ -10,20 +10,20 @@
|
|||
"astro": "astro"
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/check": "^0.9.3",
|
||||
"@astrojs/starlight": "^0.28.2",
|
||||
"@astrojs/starlight-tailwind": "^2.0.3",
|
||||
"@astrojs/tailwind": "^5.1.1",
|
||||
"@fontsource-variable/nunito-sans": "^5.1.0",
|
||||
"@shikijs/rehype": "^1.21.0",
|
||||
"astro": "^4.15.9",
|
||||
"@astrojs/check": "0.9.4",
|
||||
"@astrojs/starlight": "0.30.6",
|
||||
"@astrojs/starlight-tailwind": "3.0.0",
|
||||
"@astrojs/tailwind": "5.1.4",
|
||||
"@fontsource-variable/nunito-sans": "^5.1.1",
|
||||
"@shikijs/rehype": "^1.26.2",
|
||||
"astro": "5.1.5",
|
||||
"sharp": "^0.33.5",
|
||||
"shiki": "^1.21.0",
|
||||
"tailwindcss": "^3.4.13",
|
||||
"typescript": "^5.6.2"
|
||||
"shiki": "^1.26.2",
|
||||
"tailwindcss": "^3.4.17",
|
||||
"typescript": "^5.7.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier-plugin-astro": "^0.14.1",
|
||||
"prettier-plugin-tailwindcss": "^0.6.8"
|
||||
"prettier-plugin-tailwindcss": "^0.6.9"
|
||||
}
|
||||
}
|
|
@ -3,12 +3,7 @@
|
|||
href="https://pesde.daimond113.com/"
|
||||
class="flex text-[var(--sl-color-text-accent)] hover:opacity-80"
|
||||
>
|
||||
<svg
|
||||
viewBox="0 0 56 28"
|
||||
class="h-7"
|
||||
fill="none"
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
>
|
||||
<svg viewBox="0 0 56 28" class="h-7" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<title>pesde</title>
|
||||
<path
|
||||
d="M0 28V26.3156H2.25652V12.2361H0.0635639V10.5517H4.44947L4.48125 11.9819L3.78205 12.3315C4.41769 11.6746 5.16986 11.1661 6.03857 10.8059C6.92846 10.4245 7.82895 10.2338 8.74003 10.2338C9.863 10.2338 10.88 10.4775 11.7911 10.9648C12.7234 11.4522 13.4544 12.1726 13.9841 13.126C14.5349 14.0795 14.8104 15.2448 14.8104 16.6221C14.8104 18.0416 14.5138 19.26 13.9205 20.277C13.3272 21.2728 12.5327 22.0356 11.5368 22.5653C10.5622 23.095 9.5028 23.3598 8.35865 23.3598C7.72301 23.3598 7.11916 23.2751 6.54708 23.1056C5.99619 22.9361 5.50887 22.7242 5.08511 22.4699C4.66135 22.1945 4.34353 21.8873 4.13165 21.5483L4.60838 21.4529L4.5766 26.3156H7.02381V28H0ZM7.94549 21.6118C9.19558 21.6118 10.2444 21.2092 11.0919 20.4041C11.9394 19.5778 12.3632 18.3807 12.3632 16.8127C12.3632 15.2872 11.9606 14.1113 11.1555 13.2849C10.3503 12.4586 9.3333 12.0454 8.1044 12.0454C7.72301 12.0454 7.26747 12.1196 6.73777 12.2679C6.20807 12.395 5.67837 12.6069 5.14867 12.9035C4.61898 13.2002 4.17403 13.5922 3.81383 14.0795L4.5766 12.7446L4.60838 20.7219L3.8774 19.7367C4.42828 20.3299 5.06392 20.7961 5.78431 21.1351C6.5047 21.4529 7.2251 21.6118 7.94549 21.6118Z"
|
||||
|
@ -27,8 +22,7 @@
|
|||
fill="currentColor"></path>
|
||||
</svg>
|
||||
</a>
|
||||
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span
|
||||
>
|
||||
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span>
|
||||
<a
|
||||
class="font-medium text-[var(--sl-color-gray-2)] no-underline hover:opacity-80 md:text-lg"
|
||||
href="/">docs</a
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import { defineCollection } from "astro:content"
|
||||
import { docsLoader } from "@astrojs/starlight/loaders"
|
||||
import { docsSchema } from "@astrojs/starlight/schema"
|
||||
|
||||
export const collections = {
|
||||
docs: defineCollection({ schema: docsSchema() }),
|
||||
docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }),
|
||||
}
|
||||
|
|
|
@ -42,6 +42,9 @@ hello
|
|||
# Hello, pesde! (pesde/hello@1.0.0, lune)
|
||||
```
|
||||
|
||||
Note that they are scoped to the nearest `pesde.toml` file. However, you can use
|
||||
binaries of the workspace root from member packages.
|
||||
|
||||
## Making a binary package
|
||||
|
||||
To make a binary package you must use a target compatible with binary exports.
|
||||
|
|
|
@ -137,6 +137,24 @@ pesde add workspace:acme/bar
|
|||
href="/guides/workspaces/"
|
||||
/>
|
||||
|
||||
## Path Dependencies
|
||||
|
||||
Path dependencies are dependencies found anywhere available to the operating system.
|
||||
They are useful for local development, but are forbidden in published packages.
|
||||
|
||||
The path must be absolute and point to a directory containing a `pesde.toml` file.
|
||||
|
||||
```toml title="pesde.toml"
|
||||
[dependencies]
|
||||
foo = { path = "/home/user/foo" }
|
||||
```
|
||||
|
||||
You can also add a path dependency by running the following command:
|
||||
|
||||
```sh
|
||||
pesde add path:/home/user/foo
|
||||
```
|
||||
|
||||
## Peer Dependencies
|
||||
|
||||
Peer dependencies are dependencies that are not installed automatically when
|
||||
|
|
|
@ -32,6 +32,29 @@ foo = { name = "acme/foo", version = "^1.0.0" }
|
|||
|
||||
Now, when you run `pesde install`, `bar` 2.0.0 will be used instead of 1.0.0.
|
||||
|
||||
Overrides are also able to use aliases to share the specifier you use for your
|
||||
own dependencies:
|
||||
|
||||
```toml title="pesde.toml"
|
||||
[dependencies]
|
||||
foo = { name = "acme/foo", version = "^1.0.0" }
|
||||
bar = { name = "acme/bar", version = "^2.0.0" }
|
||||
|
||||
[overrides]
|
||||
"foo>bar" = "bar"
|
||||
```
|
||||
|
||||
This is the same as if you had written:
|
||||
|
||||
```toml title="pesde.toml"
|
||||
[dependencies]
|
||||
foo = { name = "acme/foo", version = "^1.0.0" }
|
||||
bar = { name = "acme/bar", version = "^2.0.0" }
|
||||
|
||||
[overrides]
|
||||
"foo>bar" = { name = "acme/bar", version = "^2.0.0" }
|
||||
```
|
||||
|
||||
You can learn more about the syntax for dependency overrides in the
|
||||
[reference](/reference/manifest#overrides).
|
||||
|
||||
|
|
|
@ -91,6 +91,13 @@ For example, you may publish a package that can be used in both Roblox and
|
|||
Luau environments by publishing two versions of the package, one for each
|
||||
environment.
|
||||
|
||||
<Aside type="caution">
|
||||
|
||||
Packages for different targets but on the same version must have
|
||||
the same description.
|
||||
|
||||
</Aside>
|
||||
|
||||
## Documentation
|
||||
|
||||
The `README.md` file in the root of the package will be displayed on the
|
||||
|
|
56
docs/src/content/docs/guides/removing-packages.mdx
Normal file
56
docs/src/content/docs/guides/removing-packages.mdx
Normal file
|
@ -0,0 +1,56 @@
|
|||
---
|
||||
title: Removing Packages
|
||||
description: Learn how to remove packages from the registry.
|
||||
---
|
||||
|
||||
pesde doesn't support removing packages from the registry. This is to ensure
|
||||
that the registry remains a reliable source of packages for everyone. However,
|
||||
pesde provides other mechanisms to handle packages that are no longer needed.
|
||||
|
||||
## Yanking
|
||||
|
||||
Yanking is limited to a specific version (and target) of a package. It is used
|
||||
to mark a version as broken or deprecated. Yanked versions are unavailable
|
||||
to download fresh, but they can still be installed if they are present in the
|
||||
lockfile of a project.
|
||||
|
||||
To yank a package, you can use the `pesde yank` command:
|
||||
|
||||
```sh
|
||||
pesde yank <PACKAGE>@<VERSION> <TARGET>
|
||||
```
|
||||
|
||||
You can leave out the target if you want to yank all targets of the version:
|
||||
|
||||
```sh
|
||||
pesde yank <PACKAGE>@<VERSION>
|
||||
```
|
||||
|
||||
## Deprecating
|
||||
|
||||
On the other hand, deprecating a package is used to mark a package as deprecated
|
||||
in the registry. This is useful when you want to discourage users from using
|
||||
a package, but don't want to break existing projects that depend on it. Unlike
|
||||
yanking, your package will still be able to be installed fresh. However, when it
|
||||
is installed, a warning will be shown to the user.
|
||||
|
||||
To deprecate a package, you can use the `pesde deprecate` command:
|
||||
|
||||
```sh
|
||||
pesde deprecate <PACKAGE> [REASON]
|
||||
```
|
||||
|
||||
You must provide a non-empty reason when deprecating a package. This is to
|
||||
inform users why the package is deprecated. For example, if your package
|
||||
has been replaced by another package, you can provide a reason like:
|
||||
|
||||
```sh
|
||||
pesde deprecate acme/old-package "This package has been replaced by acme/new-package."
|
||||
```
|
||||
|
||||
## Other Options
|
||||
|
||||
There are other situations in which you might want to remove a package from
|
||||
the registry. Please refer to the policies of the registry you are using for
|
||||
more information on how to handle these situations. The process for the official
|
||||
registry is described [here](/registry/policies/#package-removal).
|
|
@ -188,10 +188,13 @@ This will cause the `src` directory to be directly synced into Roblox.
|
|||
|
||||
In pesde, you should not have a `default.project.json` file in your package.
|
||||
Instead, you are required to use the `build_files` field to specify a 1:1 match
|
||||
between Roblox and the file system. pesde forbids `default.project.json` to be
|
||||
part of a published package, and regenerates it when installing a pesde git
|
||||
dependency. This allows the consumer of your package to choose the sync tool
|
||||
they want to use, instead of being constrained to only using Rojo.
|
||||
between Roblox and the file system. These are given to the
|
||||
`roblox_sync_config_generator` script to generate the configuration for the sync
|
||||
tool the user is using. pesde forbids `default.project.json` to be part of a
|
||||
published package, as well as ignoring them from Git dependencies. This allows
|
||||
the consumer of your package to choose the sync tool they want to use, instead
|
||||
of being constrained to only using Rojo as well as preventing broken packages
|
||||
from being published (for example, if the project is configured as a DataModel).
|
||||
|
||||
This has the effect that the structure of the files in the file system ends up
|
||||
being reflected inside Roblox.
|
||||
|
@ -227,3 +230,19 @@ Whereas with pesde, it looks like this:
|
|||
- dependency (roblox_packages/dependency.luau)
|
||||
|
||||
</FileTree>
|
||||
|
||||
### The `roblox_server` target
|
||||
|
||||
Although optimizing your server-only dependency using the `roblox_server` target
|
||||
might sound like a good idea it is not recommended, since it complicates
|
||||
linking and makes your package unnecessarily harder to use. On a public registry
|
||||
it is also redundant, since the package can be downloaded by anyone. Syncing
|
||||
the scripts to the client may also come up as a reason, but it is a
|
||||
micro-optimization which is very hard to observe, so it is unnecessary.
|
||||
|
||||
The target exists for a reason, that is
|
||||
[private registries](/guides/self-hosting-registries). You might want to have
|
||||
internal packages, such as configs or otherwise sensitive code which you do not
|
||||
want clients to see. This is where the `roblox_server` target comes in handy.
|
||||
If you're not using a private registry you should use the standard `roblox`
|
||||
target instead.
|
||||
|
|
|
@ -23,7 +23,7 @@ the following content:
|
|||
api = "https://registry.acme.local/"
|
||||
|
||||
# package download URL (optional)
|
||||
download = "{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}"
|
||||
download = "{API_URL}/v1/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}/archive"
|
||||
|
||||
# the client ID of the GitHub OAuth app (optional)
|
||||
github_oauth_client_id = "a1d648966fdfbdcd9295"
|
||||
|
@ -58,7 +58,7 @@ scripts_packages = ["pesde/scripts_rojo"]
|
|||
- `{PACKAGE_VERSION}`: The package version.
|
||||
- `{PACKAGE_TARGET}`: The package target.
|
||||
|
||||
Defaults to `{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}`.
|
||||
Defaults to `{API_URL}/v1/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}/archive`.
|
||||
|
||||
- **github_oauth_client_id**: This is required if you use GitHub OAuth for
|
||||
authentication. See below for more information.
|
||||
|
@ -115,11 +115,11 @@ for this purpose.
|
|||
`GITHUB_USERNAME`. This is required.
|
||||
|
||||
- **COMMITTER_GIT_NAME**: The name to use for the committer when updating the
|
||||
index repository.\
|
||||
index repository. This is required.\
|
||||
Example: `pesde index updater`
|
||||
|
||||
- **COMMITTER_GIT_EMAIL**: The email to use for the committer when updating the
|
||||
index repository.\
|
||||
index repository. This is required.\
|
||||
Example: `pesde@localhost`
|
||||
|
||||
- **DATA_DIR**: The directory where the registry stores miscellaneous data.
|
||||
|
|
|
@ -5,22 +5,11 @@ description: Install pesde
|
|||
|
||||
import { Aside, Steps, TabItem, Tabs } from "@astrojs/starlight/components"
|
||||
|
||||
## Prerequisites
|
||||
|
||||
pesde requires [Lune](https://lune-org.github.io/docs) to be installed on your
|
||||
system in order to function properly.
|
||||
|
||||
You can follow the installation instructions in the
|
||||
[Lune documentation](https://lune-org.github.io/docs/getting-started/1-installation).
|
||||
|
||||
## Installing pesde
|
||||
|
||||
<Steps>
|
||||
|
||||
1. Go to the [GitHub releases page](https://github.com/pesde-pkg/pesde/releases/latest).
|
||||
|
||||
2. Download the corresponding archive for your operating system. You can choose
|
||||
whether to use the `.zip` or `.tar.gz` files.
|
||||
2. Download the corresponding archive for your operating system.
|
||||
|
||||
3. Extract the downloaded archive to a folder on your computer.
|
||||
|
||||
|
@ -76,6 +65,7 @@ You can follow the installation instructions in the
|
|||
|
||||
</TabItem>
|
||||
</Tabs>
|
||||
<br />
|
||||
|
||||
5. Verify that pesde is installed by running the following command:
|
||||
|
||||
|
@ -92,8 +82,8 @@ You can follow the installation instructions in the
|
|||
It is not recommended to use toolchain managers (such as Rokit or Aftman) to
|
||||
install pesde. You can use `pesde self-upgrade` if you need to update pesde.
|
||||
|
||||
If you need everyone to use the same version of pesde, you can use the
|
||||
`pesde_version` field in `pesde.toml` to specify the version of pesde to use
|
||||
If you need everyone to use a compatible version of pesde, you can use the
|
||||
`[engines.pesde]` field in `pesde.toml` to specify the version of pesde to use
|
||||
for the current project.
|
||||
|
||||
</Aside>
|
||||
|
|
|
@ -33,7 +33,7 @@ pesde init
|
|||
# what is the repository URL of this project?
|
||||
# what is the license of this project? MIT
|
||||
# what environment are you targeting for your package? luau
|
||||
# would you like to setup default Roblox compatibility scripts? No
|
||||
# would you like to setup Roblox compatibility scripts? No
|
||||
```
|
||||
|
||||
The command will create a `pesde.toml` file in the current folder. Go ahead
|
||||
|
|
|
@ -55,10 +55,85 @@ is printed.
|
|||
|
||||
The default index is [`pesde-index`](https://github.com/pesde-pkg/index).
|
||||
|
||||
## `pesde cas`
|
||||
|
||||
Content-addressable storage (CAS) related commands.
|
||||
|
||||
### `pesde cas prune`
|
||||
|
||||
Removes unused CAS files and packages.
|
||||
|
||||
## `pesde init`
|
||||
|
||||
Initializes a new pesde project in the current directory.
|
||||
|
||||
## `pesde add`
|
||||
|
||||
```sh
|
||||
pesde add <PACKAGE>
|
||||
```
|
||||
|
||||
Adds a package to the dependencies of the current project.
|
||||
|
||||
- `-i, --index <INDEX>`: The index in which to search for the package.
|
||||
- `-t, --target <TARGET>`: The target environment for the package.
|
||||
- `-a, --alias <ALIAS>`: The alias to use for the package, defaults to the
|
||||
package name.
|
||||
- `-p, --peer`: Adds the package as a peer dependency.
|
||||
- `-d, --dev`: Adds the package as a dev dependency.
|
||||
|
||||
The following formats are supported:
|
||||
|
||||
```sh
|
||||
pesde add pesde/hello
|
||||
pesde add pesde/hello@1.2.3
|
||||
pesde add wally#pesde/hello
|
||||
pesde add wally#pesde/hello@1.2.3
|
||||
pesde add gh#acme/package#main
|
||||
pesde add https://git.acme.local/package.git#aeff6
|
||||
pesde add workspace:pesde/hello
|
||||
pesde add workspace:pesde/hello@1.2.3
|
||||
pesde add path:/home/user/package
|
||||
```
|
||||
|
||||
## `pesde remove`
|
||||
|
||||
```sh
|
||||
pesde remove <ALIAS>
|
||||
```
|
||||
|
||||
Removes a package from the dependencies of the current project.
|
||||
|
||||
## `pesde install`
|
||||
|
||||
Installs dependencies for the current project.
|
||||
|
||||
- `--locked`: Whether to error if the lockfile is out of date.
|
||||
- `--prod`: Whether to not linking dev dependencies.
|
||||
- `--network-concurrency <CONCURRENCY>`: The number of concurrent network
|
||||
requests to make at most. Defaults to 16.
|
||||
- `--force`: Whether to force reinstall all packages even if they are already
|
||||
installed (useful if there is any issue with the current installation).
|
||||
|
||||
## `pesde update`
|
||||
|
||||
Updates the dependencies of the current project.
|
||||
|
||||
- `--no-install`: Whether to only update the lockfile without installing the
|
||||
dependencies.
|
||||
- `--network-concurrency <CONCURRENCY>`: The number of concurrent network
|
||||
requests to make at most. Defaults to 16.
|
||||
- `--force`: Whether to force reinstall all packages even if they are already
|
||||
installed (useful if there is any issue with the current installation).
|
||||
|
||||
## `pesde outdated`
|
||||
|
||||
Lists outdated dependencies of the current project.
|
||||
|
||||
## `pesde list`
|
||||
|
||||
Lists the dependencies of the current project.
|
||||
|
||||
## `pesde run`
|
||||
|
||||
Runs a script from the current project using Lune.
|
||||
|
@ -83,13 +158,6 @@ Arguments can be passed to the script by using `--` followed by the arguments.
|
|||
pesde run foo -- --arg1 --arg2
|
||||
```
|
||||
|
||||
## `pesde install`
|
||||
|
||||
Installs dependencies for the current project.
|
||||
|
||||
- `--locked`: Whether to error if the lockfile is out of date.
|
||||
- `--prod`: Whether to skip installing dev dependencies.
|
||||
|
||||
## `pesde publish`
|
||||
|
||||
Publishes the current project to the pesde registry.
|
||||
|
@ -99,18 +167,26 @@ Publishes the current project to the pesde registry.
|
|||
publish it.
|
||||
- `-y, --yes`: Whether to skip the confirmation prompt.
|
||||
- `-i, --index`: Name of the index to publish to. Defaults to `default`.
|
||||
- `--no-verify`: Whether to skip syntax validation of the exports of the
|
||||
package.
|
||||
|
||||
## `pesde self-install`
|
||||
## `pesde yank`
|
||||
|
||||
Performs the pesde installation process. This should be the first command run
|
||||
after downloading the pesde binary.
|
||||
Yanks a version of a package from the registry.
|
||||
|
||||
## `pesde self-upgrade`
|
||||
- `--undo`: Whether to unyank the package.
|
||||
- `-i, --index`: Name of the index to yank from. Defaults to `default`.
|
||||
|
||||
Upgrades the pesde binary to the latest version.
|
||||
## `pesde deprecate`
|
||||
|
||||
- `--use-cached`: Whether to use the version displayed in the "upgrade available"
|
||||
message instead of checking for the latest version.
|
||||
```sh
|
||||
pesde deprecate <PACKAGE> [REASON]
|
||||
```
|
||||
|
||||
Deprecates a package in the registry. A non-empty reason must be provided.
|
||||
|
||||
- `--undo`: Whether to undepricate the package.
|
||||
- `-i, --index`: Name of the index to deprecate from. Defaults to `default`.
|
||||
|
||||
## `pesde patch`
|
||||
|
||||
|
@ -137,33 +213,6 @@ pesde patch-commit <PATH>
|
|||
|
||||
Applies the changes made in the patching environment created by `pesde patch`.
|
||||
|
||||
## `pesde add`
|
||||
|
||||
```sh
|
||||
pesde add <PACKAGE>
|
||||
```
|
||||
|
||||
Adds a package to the dependencies of the current project.
|
||||
|
||||
- `-i, --index <INDEX>`: The index in which to search for the package.
|
||||
- `-t, --target <TARGET>`: The target environment for the package.
|
||||
- `-a, --alias <ALIAS>`: The alias to use for the package, defaults to the
|
||||
package name.
|
||||
- `-p, --peer`: Adds the package as a peer dependency.
|
||||
- `-d, --dev`: Adds the package as a dev dependency.
|
||||
|
||||
The following formats are supported:
|
||||
|
||||
```sh
|
||||
pesde add pesde/hello
|
||||
pesde add gh#acme/package#main
|
||||
pesde add https://git.acme.local/package.git#aeff6
|
||||
```
|
||||
|
||||
## `pesde update`
|
||||
|
||||
Updates the dependencies of the current project.
|
||||
|
||||
## `pesde x`
|
||||
|
||||
Runs a one-off binary package.
|
||||
|
@ -178,3 +227,15 @@ a pesde project.
|
|||
```sh
|
||||
pesde x pesde/hello
|
||||
```
|
||||
|
||||
## `pesde self-install`
|
||||
|
||||
Performs the pesde installation process. This should be the first command run
|
||||
after downloading the pesde binary.
|
||||
|
||||
## `pesde self-upgrade`
|
||||
|
||||
Upgrades the pesde binary to the latest version.
|
||||
|
||||
- `--use-cached`: Whether to use the version displayed in the "upgrade available"
|
||||
message instead of checking for the latest version.
|
||||
|
|
|
@ -84,11 +84,6 @@ includes = [
|
|||
]
|
||||
```
|
||||
|
||||
### `pesde_version`
|
||||
|
||||
The version of pesde to use within this project. The `pesde` CLI will look at
|
||||
this field and run the correct version of pesde for this project.
|
||||
|
||||
### `workspace_members`
|
||||
|
||||
A list of globs containing the members of this workspace.
|
||||
|
@ -273,10 +268,27 @@ version `1.0.0`, and the `bar` and `baz` dependencies of the `foo` package with
|
|||
version `2.0.0`.
|
||||
|
||||
Each key in the overrides table is a comma-separated list of package paths. The
|
||||
path is a list of package names separated by `>`. For example, `foo>bar>baz`
|
||||
path is a list of aliases separated by `>`. For example, `foo>bar>baz`
|
||||
refers to the `baz` dependency of the `bar` package, which is a dependency of
|
||||
the `foo` package.
|
||||
|
||||
The value of an override entry can be either a specifier or an alias. If it is an
|
||||
alias (a string), it will be equivalent to putting the specifier of the dependency
|
||||
under that alias. For example, the following two overrides are equivalent:
|
||||
|
||||
```toml
|
||||
[dependencies]
|
||||
bar = { name = "acme/bar", version = "2.0.0" }
|
||||
|
||||
[overrides]
|
||||
"foo>bar" = "bar"
|
||||
```
|
||||
|
||||
```toml
|
||||
[overrides]
|
||||
"foo>bar" = { name = "acme/bar", version = "2.0.0" }
|
||||
```
|
||||
|
||||
<LinkCard
|
||||
title="Overrides"
|
||||
description="Learn more about overriding and patching packages."
|
||||
|
@ -399,18 +411,19 @@ foo = { workspace = "acme/foo", version = "^" }
|
|||
href="/guides/workspaces/#workspace-dependencies"
|
||||
/>
|
||||
|
||||
## `[peer_dependencies]`
|
||||
|
||||
The `[peer_dependencies]` section contains a list of peer dependencies for the
|
||||
package. These are dependencies that are required by the package, but are not
|
||||
installed automatically. Instead, they must be installed by the user of the
|
||||
package.
|
||||
### Path
|
||||
|
||||
```toml
|
||||
[peer_dependencies]
|
||||
foo = { name = "acme/foo", version = "1.2.3" }
|
||||
[dependencies]
|
||||
foo = { path = "/home/user/foo" }
|
||||
```
|
||||
|
||||
**Path dependencies** contain the following fields:
|
||||
|
||||
- `path`: The path to the package on the local filesystem.
|
||||
|
||||
Path dependencies are forbidden in published packages.
|
||||
|
||||
## `[dev_dependencies]`
|
||||
|
||||
The `[dev_dependencies]` section contains a list of development dependencies for
|
||||
|
@ -430,3 +443,31 @@ foo = { name = "acme/foo", version = "1.2.3" }
|
|||
description="Learn more about specifying dependencies in pesde."
|
||||
href="/guides/dependencies/"
|
||||
/>
|
||||
|
||||
## `[peer_dependencies]`
|
||||
|
||||
The `[peer_dependencies]` section contains a list of peer dependencies for the
|
||||
package. These are dependencies that are required by the package, but are not
|
||||
installed automatically. Instead, they must be installed by the user of the
|
||||
package.
|
||||
|
||||
```toml
|
||||
[peer_dependencies]
|
||||
foo = { name = "acme/foo", version = "1.2.3" }
|
||||
```
|
||||
|
||||
## `[engines]`
|
||||
|
||||
The `[engines]` section contains a list of engines that the package is compatible
|
||||
with.
|
||||
|
||||
```toml
|
||||
[engines]
|
||||
pesde = "^0.6.0"
|
||||
lune = "^0.8.9"
|
||||
```
|
||||
|
||||
Currently, the only engines that can be specified are `pesde` and `lune`.
|
||||
Additionally, the engines you declared in your project will be installed when
|
||||
you run `pesde install`. Then, a version of the engine that satisfies the
|
||||
specified version range will be used when you run the engine.
|
||||
|
|
|
@ -5,18 +5,46 @@ All notable changes to this project will be documented in this file.
|
|||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.1.2]
|
||||
## [0.2.2]
|
||||
### Changed
|
||||
- Make search building not async (as it didn't benefit from it) by @daimond113
|
||||
|
||||
## [0.2.1] - 2025-03-02
|
||||
### Changed
|
||||
- Print more error info by @daimond113
|
||||
|
||||
## [0.2.0] - 2025-02-22
|
||||
### Added
|
||||
- Support deprecating and yanking packages by @daimond113
|
||||
- Add yanking & deprecating to registry by @daimond113
|
||||
- Log more information about configured auth & storage by @daimond113
|
||||
- Add individual endpoints for package data over using `Accept` header conditional returns by @daimond113
|
||||
- Set `Content-Length` header for FS storage backend by @daimond113
|
||||
|
||||
### Changed
|
||||
- Remove native-tls dependency by @daimond113
|
||||
- Make aliases case-insensitive by @daimond113
|
||||
|
||||
### Performance
|
||||
- Switch to using a `RwLock` over a `Mutex` to store repository data by @daimond113
|
||||
- Asyncify blocking operations by @daimond113
|
||||
- Asyncify reading of package data of top search results by @daimond113
|
||||
|
||||
## [0.1.2] - 2024-12-30
|
||||
### Changed
|
||||
- Update to pesde lib API changes by @daimond113
|
||||
|
||||
## [0.1.1] - 2024-12-19
|
||||
### Changed
|
||||
- Switch to traccing for logging by @daimond113
|
||||
- Switch to tracing for logging by @daimond113
|
||||
|
||||
## [0.1.0] - 2024-12-14
|
||||
### Added
|
||||
- Rewrite registry for pesde v0.5.0 by @daimond113
|
||||
|
||||
[0.2.2]: https://github.com/daimond113/pesde/compare/v0.6.0%2Bregistry.0.2.1..v0.6.1%2Bregistry.0.2.2
|
||||
[0.2.1]: https://github.com/daimond113/pesde/compare/v0.6.0%2Bregistry.0.2.0..v0.6.0%2Bregistry.0.2.1
|
||||
[0.2.0]: https://github.com/daimond113/pesde/compare/v0.5.3%2Bregistry.0.1.2..v0.6.0%2Bregistry.0.2.0
|
||||
[0.1.2]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
|
||||
[0.1.1]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
|
||||
[0.1.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0
|
||||
|
|
|
@ -1,40 +1,44 @@
|
|||
[package]
|
||||
name = "pesde-registry"
|
||||
version = "0.1.2"
|
||||
version = "0.2.2"
|
||||
edition = "2021"
|
||||
repository = "https://github.com/pesde-pkg/index"
|
||||
publish = false
|
||||
|
||||
[lints]
|
||||
workspace = true
|
||||
|
||||
[dependencies]
|
||||
actix-web = "4.9.0"
|
||||
actix-cors = "0.7.0"
|
||||
actix-governor = "0.8.0"
|
||||
dotenvy = "0.15.7"
|
||||
thiserror = "2.0.7"
|
||||
thiserror = "2.0.11"
|
||||
tantivy = "0.22.0"
|
||||
semver = "1.0.24"
|
||||
chrono = { version = "0.4.39", features = ["serde"] }
|
||||
semver = "1.0.25"
|
||||
jiff = { version = "0.1.29", features = ["serde"] }
|
||||
futures = "0.3.31"
|
||||
tokio = "1.42.0"
|
||||
tempfile = "3.14.0"
|
||||
fs-err = { version = "3.0.0", features = ["tokio"] }
|
||||
tokio = "1.43.0"
|
||||
tokio-util = "0.7.13"
|
||||
tempfile = "3.16.0"
|
||||
fs-err = { version = "3.1.0", features = ["tokio"] }
|
||||
async-stream = "0.3.6"
|
||||
|
||||
git2 = "0.19.0"
|
||||
gix = { version = "0.68.0", default-features = false, features = [
|
||||
git2 = "0.20.0"
|
||||
gix = { version = "0.70.0", default-features = false, features = [
|
||||
"blocking-http-transport-reqwest-rust-tls",
|
||||
"credentials",
|
||||
] }
|
||||
|
||||
serde = "1.0.216"
|
||||
serde_json = "1.0.133"
|
||||
serde = "1.0.217"
|
||||
serde_json = "1.0.138"
|
||||
serde_yaml = "0.9.34"
|
||||
toml = "0.8.19"
|
||||
convert_case = "0.6.0"
|
||||
toml = "0.8.20"
|
||||
convert_case = "0.7.1"
|
||||
sha2 = "0.10.8"
|
||||
|
||||
rusty-s3 = "0.5.0"
|
||||
reqwest = { version = "0.12.9", features = ["json", "rustls-tls"] }
|
||||
rusty-s3 = "0.7.0"
|
||||
reqwest = { version = "0.12.12", default-features = false, features = ["json", "rustls-tls"] }
|
||||
constant_time_eq = "0.3.1"
|
||||
|
||||
tokio-tar = "0.3.1"
|
||||
|
@ -44,7 +48,7 @@ tracing = { version = "0.1.41", features = ["attributes"] }
|
|||
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
|
||||
tracing-actix-web = "0.7.15"
|
||||
|
||||
sentry = { version = "0.35.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] }
|
||||
sentry-actix = "0.35.0"
|
||||
sentry = { version = "0.36.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] }
|
||||
sentry-actix = "0.36.0"
|
||||
|
||||
pesde = { path = "..", features = ["wally-compat"] }
|
||||
pesde = { path = "..", default-features = false, features = ["wally-compat"] }
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{
|
||||
auth::{get_token_from_req, AuthImpl, UserId},
|
||||
error::ReqwestErrorExt,
|
||||
auth::{get_token_from_req, AuthImpl, UserId},
|
||||
error::{display_error, ReqwestErrorExt as _},
|
||||
};
|
||||
use actix_web::{dev::ServiceRequest, Error as ActixError};
|
||||
use reqwest::StatusCode;
|
||||
|
@ -9,79 +9,78 @@ use std::fmt::Display;
|
|||
|
||||
#[derive(Debug)]
|
||||
pub struct GitHubAuth {
|
||||
pub reqwest_client: reqwest::Client,
|
||||
pub client_id: String,
|
||||
pub client_secret: String,
|
||||
pub reqwest_client: reqwest::Client,
|
||||
pub client_id: String,
|
||||
pub client_secret: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct TokenRequestBody {
|
||||
access_token: String,
|
||||
access_token: String,
|
||||
}
|
||||
|
||||
impl AuthImpl for GitHubAuth {
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
let token = match get_token_from_req(req) {
|
||||
Some(token) => token,
|
||||
None => return Ok(None),
|
||||
};
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
let Some(token) = get_token_from_req(req) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let response = match self
|
||||
.reqwest_client
|
||||
.post(format!(
|
||||
"https://api.github.com/applications/{}/token",
|
||||
self.client_id
|
||||
))
|
||||
.basic_auth(&self.client_id, Some(&self.client_secret))
|
||||
.json(&TokenRequestBody {
|
||||
access_token: token,
|
||||
})
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => match response.error_for_status_ref() {
|
||||
Ok(_) => response,
|
||||
Err(e) if e.status().is_some_and(|s| s == StatusCode::NOT_FOUND) => {
|
||||
return Ok(None);
|
||||
}
|
||||
Err(_) => {
|
||||
tracing::error!(
|
||||
"failed to get user: {}",
|
||||
response.into_error().await.unwrap_err()
|
||||
);
|
||||
return Ok(None);
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
tracing::error!("failed to get user: {e}");
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
let response = match self
|
||||
.reqwest_client
|
||||
.post(format!(
|
||||
"https://api.github.com/applications/{}/token",
|
||||
self.client_id
|
||||
))
|
||||
.basic_auth(&self.client_id, Some(&self.client_secret))
|
||||
.json(&TokenRequestBody {
|
||||
access_token: token,
|
||||
})
|
||||
.send()
|
||||
.await
|
||||
{
|
||||
Ok(response) => match response.error_for_status_ref() {
|
||||
Ok(_) => response,
|
||||
Err(e) if e.status().is_some_and(|s| s == StatusCode::NOT_FOUND) => {
|
||||
return Ok(None);
|
||||
}
|
||||
Err(_) => {
|
||||
tracing::error!(
|
||||
"failed to get user info: {}",
|
||||
display_error(response.into_error().await.unwrap_err())
|
||||
);
|
||||
return Ok(None);
|
||||
}
|
||||
},
|
||||
Err(e) => {
|
||||
tracing::error!("failed to send user info request: {}", display_error(e));
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
let user_id = match response.json::<UserResponse>().await {
|
||||
Ok(resp) => resp.user.id,
|
||||
Err(e) => {
|
||||
tracing::error!("failed to get user: {e}");
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
let user_id = match response.json::<UserResponse>().await {
|
||||
Ok(resp) => resp.user.id,
|
||||
Err(e) => {
|
||||
tracing::error!("failed to parse user info response: {}", display_error(e));
|
||||
return Ok(None);
|
||||
}
|
||||
};
|
||||
|
||||
Ok(Some(UserId(user_id)))
|
||||
}
|
||||
Ok(Some(UserId(user_id)))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for GitHubAuth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "GitHub")
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "GitHub (client id: {})", self.client_id)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct User {
|
||||
id: u64,
|
||||
id: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UserResponse {
|
||||
user: User,
|
||||
user: User,
|
||||
}
|
||||
|
|
|
@ -6,194 +6,192 @@ mod token;
|
|||
use crate::{benv, make_reqwest, AppState};
|
||||
use actix_governor::{KeyExtractor, SimpleKeyExtractionError};
|
||||
use actix_web::{
|
||||
body::MessageBody,
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
error::Error as ActixError,
|
||||
http::header::AUTHORIZATION,
|
||||
middleware::Next,
|
||||
web, HttpMessage, HttpResponse,
|
||||
body::MessageBody,
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
error::Error as ActixError,
|
||||
http::header::AUTHORIZATION,
|
||||
middleware::Next,
|
||||
web, HttpMessage as _, HttpResponse,
|
||||
};
|
||||
use pesde::source::pesde::IndexConfig;
|
||||
use sentry::add_breadcrumb;
|
||||
use sha2::{Digest, Sha256};
|
||||
use sha2::{Digest as _, Sha256};
|
||||
use std::fmt::Display;
|
||||
|
||||
#[derive(Debug, Copy, Clone, Hash, PartialOrd, PartialEq, Eq, Ord)]
|
||||
pub struct UserId(pub u64);
|
||||
|
||||
impl UserId {
|
||||
// there isn't any account on GitHub that has the ID 0, so it should be safe to use it
|
||||
pub const DEFAULT: UserId = UserId(0);
|
||||
// there isn't any account on GitHub that has the ID 0, so it should be safe to use it
|
||||
pub const DEFAULT: UserId = UserId(0);
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UserIdExtractor;
|
||||
|
||||
impl KeyExtractor for UserIdExtractor {
|
||||
type Key = UserId;
|
||||
type KeyExtractionError = SimpleKeyExtractionError<&'static str>;
|
||||
type Key = UserId;
|
||||
type KeyExtractionError = SimpleKeyExtractionError<&'static str>;
|
||||
|
||||
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
|
||||
match req.extensions().get::<UserId>() {
|
||||
Some(user_id) => Ok(*user_id),
|
||||
None => Err(SimpleKeyExtractionError::new("UserId not found")),
|
||||
}
|
||||
}
|
||||
fn extract(&self, req: &ServiceRequest) -> Result<Self::Key, Self::KeyExtractionError> {
|
||||
match req.extensions().get::<UserId>() {
|
||||
Some(user_id) => Ok(*user_id),
|
||||
None => Err(SimpleKeyExtractionError::new("UserId not found")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum Auth {
|
||||
GitHub(github::GitHubAuth),
|
||||
None(none::NoneAuth),
|
||||
Token(token::TokenAuth),
|
||||
RwToken(rw_token::RwTokenAuth),
|
||||
GitHub(github::GitHubAuth),
|
||||
None(none::NoneAuth),
|
||||
Token(token::TokenAuth),
|
||||
RwToken(rw_token::RwTokenAuth),
|
||||
}
|
||||
|
||||
pub trait AuthImpl: Display {
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError>;
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError>;
|
||||
|
||||
async fn for_read_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
self.for_write_request(req).await
|
||||
}
|
||||
async fn for_read_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
self.for_write_request(req).await
|
||||
}
|
||||
|
||||
fn read_needs_auth(&self) -> bool {
|
||||
benv!("READ_NEEDS_AUTH").is_ok()
|
||||
}
|
||||
fn read_needs_auth(&self) -> bool {
|
||||
benv!("READ_NEEDS_AUTH").is_ok()
|
||||
}
|
||||
}
|
||||
|
||||
impl AuthImpl for Auth {
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
match self {
|
||||
Auth::GitHub(github) => github.for_write_request(req).await,
|
||||
Auth::None(none) => none.for_write_request(req).await,
|
||||
Auth::Token(token) => token.for_write_request(req).await,
|
||||
Auth::RwToken(rw_token) => rw_token.for_write_request(req).await,
|
||||
}
|
||||
}
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
match self {
|
||||
Auth::GitHub(github) => github.for_write_request(req).await,
|
||||
Auth::None(none) => none.for_write_request(req).await,
|
||||
Auth::Token(token) => token.for_write_request(req).await,
|
||||
Auth::RwToken(rw_token) => rw_token.for_write_request(req).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn for_read_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
match self {
|
||||
Auth::GitHub(github) => github.for_read_request(req).await,
|
||||
Auth::None(none) => none.for_write_request(req).await,
|
||||
Auth::Token(token) => token.for_write_request(req).await,
|
||||
Auth::RwToken(rw_token) => rw_token.for_read_request(req).await,
|
||||
}
|
||||
}
|
||||
async fn for_read_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
match self {
|
||||
Auth::GitHub(github) => github.for_read_request(req).await,
|
||||
Auth::None(none) => none.for_write_request(req).await,
|
||||
Auth::Token(token) => token.for_write_request(req).await,
|
||||
Auth::RwToken(rw_token) => rw_token.for_read_request(req).await,
|
||||
}
|
||||
}
|
||||
|
||||
fn read_needs_auth(&self) -> bool {
|
||||
match self {
|
||||
Auth::GitHub(github) => github.read_needs_auth(),
|
||||
Auth::None(none) => none.read_needs_auth(),
|
||||
Auth::Token(token) => token.read_needs_auth(),
|
||||
Auth::RwToken(rw_token) => rw_token.read_needs_auth(),
|
||||
}
|
||||
}
|
||||
fn read_needs_auth(&self) -> bool {
|
||||
match self {
|
||||
Auth::GitHub(github) => github.read_needs_auth(),
|
||||
Auth::None(none) => none.read_needs_auth(),
|
||||
Auth::Token(token) => token.read_needs_auth(),
|
||||
Auth::RwToken(rw_token) => rw_token.read_needs_auth(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Auth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Auth::GitHub(github) => write!(f, "{}", github),
|
||||
Auth::None(none) => write!(f, "{}", none),
|
||||
Auth::Token(token) => write!(f, "{}", token),
|
||||
Auth::RwToken(rw_token) => write!(f, "{}", rw_token),
|
||||
}
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Auth::GitHub(github) => write!(f, "{github}"),
|
||||
Auth::None(none) => write!(f, "{none}"),
|
||||
Auth::Token(token) => write!(f, "{token}"),
|
||||
Auth::RwToken(rw_token) => write!(f, "{rw_token}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn write_mw(
|
||||
app_state: web::Data<AppState>,
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
app_state: web::Data<AppState>,
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
) -> Result<ServiceResponse<impl MessageBody>, ActixError> {
|
||||
let user_id = match app_state.auth.for_write_request(&req).await? {
|
||||
Some(user_id) => user_id,
|
||||
None => {
|
||||
return Ok(req
|
||||
.into_response(HttpResponse::Unauthorized().finish())
|
||||
.map_into_right_body())
|
||||
}
|
||||
};
|
||||
let Some(user_id) = app_state.auth.for_write_request(&req).await? else {
|
||||
return Ok(req
|
||||
.into_response(HttpResponse::Unauthorized().finish())
|
||||
.map_into_right_body());
|
||||
};
|
||||
|
||||
add_breadcrumb(sentry::Breadcrumb {
|
||||
category: Some("auth".into()),
|
||||
message: Some(format!("write request authorized as {}", user_id.0)),
|
||||
level: sentry::Level::Info,
|
||||
..Default::default()
|
||||
});
|
||||
add_breadcrumb(sentry::Breadcrumb {
|
||||
category: Some("auth".into()),
|
||||
message: Some(format!("write request authorized as {}", user_id.0)),
|
||||
level: sentry::Level::Info,
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
req.extensions_mut().insert(user_id);
|
||||
req.extensions_mut().insert(user_id);
|
||||
|
||||
next.call(req).await.map(|res| res.map_into_left_body())
|
||||
next.call(req)
|
||||
.await
|
||||
.map(ServiceResponse::map_into_left_body)
|
||||
}
|
||||
|
||||
pub async fn read_mw(
|
||||
app_state: web::Data<AppState>,
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
app_state: web::Data<AppState>,
|
||||
req: ServiceRequest,
|
||||
next: Next<impl MessageBody + 'static>,
|
||||
) -> Result<ServiceResponse<impl MessageBody>, ActixError> {
|
||||
if app_state.auth.read_needs_auth() {
|
||||
let user_id = match app_state.auth.for_read_request(&req).await? {
|
||||
Some(user_id) => user_id,
|
||||
None => {
|
||||
return Ok(req
|
||||
.into_response(HttpResponse::Unauthorized().finish())
|
||||
.map_into_right_body())
|
||||
}
|
||||
};
|
||||
if app_state.auth.read_needs_auth() {
|
||||
let Some(user_id) = app_state.auth.for_read_request(&req).await? else {
|
||||
return Ok(req
|
||||
.into_response(HttpResponse::Unauthorized().finish())
|
||||
.map_into_right_body());
|
||||
};
|
||||
|
||||
add_breadcrumb(sentry::Breadcrumb {
|
||||
category: Some("auth".into()),
|
||||
message: Some(format!("read request authorized as {}", user_id.0)),
|
||||
level: sentry::Level::Info,
|
||||
..Default::default()
|
||||
});
|
||||
add_breadcrumb(sentry::Breadcrumb {
|
||||
category: Some("auth".into()),
|
||||
message: Some(format!("read request authorized as {}", user_id.0)),
|
||||
level: sentry::Level::Info,
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
req.extensions_mut().insert(Some(user_id));
|
||||
} else {
|
||||
req.extensions_mut().insert(None::<UserId>);
|
||||
}
|
||||
req.extensions_mut().insert(Some(user_id));
|
||||
} else {
|
||||
req.extensions_mut().insert(None::<UserId>);
|
||||
}
|
||||
|
||||
next.call(req).await.map(|res| res.map_into_left_body())
|
||||
next.call(req)
|
||||
.await
|
||||
.map(ServiceResponse::map_into_left_body)
|
||||
}
|
||||
|
||||
pub fn get_auth_from_env(config: &IndexConfig) -> Auth {
|
||||
if let Ok(token) = benv!("ACCESS_TOKEN") {
|
||||
Auth::Token(token::TokenAuth {
|
||||
token: *Sha256::digest(token.as_bytes()).as_ref(),
|
||||
})
|
||||
} else if let Ok(client_secret) = benv!("GITHUB_CLIENT_SECRET") {
|
||||
Auth::GitHub(github::GitHubAuth {
|
||||
reqwest_client: make_reqwest(),
|
||||
client_id: config
|
||||
.github_oauth_client_id
|
||||
.clone()
|
||||
.expect("index isn't configured for GitHub"),
|
||||
client_secret,
|
||||
})
|
||||
} else if let Ok((r, w)) =
|
||||
benv!("READ_ACCESS_TOKEN").and_then(|r| benv!("WRITE_ACCESS_TOKEN").map(|w| (r, w)))
|
||||
{
|
||||
Auth::RwToken(rw_token::RwTokenAuth {
|
||||
read_token: *Sha256::digest(r.as_bytes()).as_ref(),
|
||||
write_token: *Sha256::digest(w.as_bytes()).as_ref(),
|
||||
})
|
||||
} else {
|
||||
Auth::None(none::NoneAuth)
|
||||
}
|
||||
if let Ok(token) = benv!("ACCESS_TOKEN") {
|
||||
Auth::Token(token::TokenAuth {
|
||||
token: *Sha256::digest(token.as_bytes()).as_ref(),
|
||||
})
|
||||
} else if let Ok(client_secret) = benv!("GITHUB_CLIENT_SECRET") {
|
||||
Auth::GitHub(github::GitHubAuth {
|
||||
reqwest_client: make_reqwest(),
|
||||
client_id: config
|
||||
.github_oauth_client_id
|
||||
.clone()
|
||||
.expect("index isn't configured for GitHub"),
|
||||
client_secret,
|
||||
})
|
||||
} else if let Ok((r, w)) =
|
||||
benv!("READ_ACCESS_TOKEN").and_then(|r| benv!("WRITE_ACCESS_TOKEN").map(|w| (r, w)))
|
||||
{
|
||||
Auth::RwToken(rw_token::RwTokenAuth {
|
||||
read_token: *Sha256::digest(r.as_bytes()).as_ref(),
|
||||
write_token: *Sha256::digest(w.as_bytes()).as_ref(),
|
||||
})
|
||||
} else {
|
||||
Auth::None(none::NoneAuth)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_token_from_req(req: &ServiceRequest) -> Option<String> {
|
||||
let token = req
|
||||
.headers()
|
||||
.get(AUTHORIZATION)
|
||||
.and_then(|token| token.to_str().ok())?;
|
||||
let token = req
|
||||
.headers()
|
||||
.get(AUTHORIZATION)
|
||||
.and_then(|token| token.to_str().ok())?;
|
||||
|
||||
let token = if token.to_lowercase().starts_with("bearer ") {
|
||||
token[7..].to_string()
|
||||
} else {
|
||||
token.to_string()
|
||||
};
|
||||
let token = if token.to_lowercase().starts_with("bearer ") {
|
||||
token[7..].to_string()
|
||||
} else {
|
||||
token.to_string()
|
||||
};
|
||||
|
||||
Some(token)
|
||||
Some(token)
|
||||
}
|
||||
|
|
|
@ -6,13 +6,13 @@ use std::fmt::Display;
|
|||
pub struct NoneAuth;
|
||||
|
||||
impl AuthImpl for NoneAuth {
|
||||
async fn for_write_request(&self, _req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
Ok(Some(UserId::DEFAULT))
|
||||
}
|
||||
async fn for_write_request(&self, _req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
Ok(Some(UserId::DEFAULT))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for NoneAuth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "None")
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "None")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,53 +1,43 @@
|
|||
use crate::auth::{get_token_from_req, AuthImpl, UserId};
|
||||
use actix_web::{dev::ServiceRequest, Error as ActixError};
|
||||
use constant_time_eq::constant_time_eq_32;
|
||||
use sha2::{Digest, Sha256};
|
||||
use sha2::{Digest as _, Sha256};
|
||||
use std::fmt::Display;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RwTokenAuth {
|
||||
pub read_token: [u8; 32],
|
||||
pub write_token: [u8; 32],
|
||||
pub read_token: [u8; 32],
|
||||
pub write_token: [u8; 32],
|
||||
}
|
||||
|
||||
impl AuthImpl for RwTokenAuth {
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
let token = match get_token_from_req(req) {
|
||||
Some(token) => token,
|
||||
None => return Ok(None),
|
||||
};
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
let Some(token) = get_token_from_req(req) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let token: [u8; 32] = Sha256::digest(token.as_bytes()).into();
|
||||
let token: [u8; 32] = Sha256::digest(token.as_bytes()).into();
|
||||
|
||||
Ok(if constant_time_eq_32(&self.write_token, &token) {
|
||||
Some(UserId::DEFAULT)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
}
|
||||
Ok(constant_time_eq_32(&self.write_token, &token).then_some(UserId::DEFAULT))
|
||||
}
|
||||
|
||||
async fn for_read_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
let token = match get_token_from_req(req) {
|
||||
Some(token) => token,
|
||||
None => return Ok(None),
|
||||
};
|
||||
async fn for_read_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
let Some(token) = get_token_from_req(req) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let token: [u8; 32] = Sha256::digest(token.as_bytes()).into();
|
||||
let token: [u8; 32] = Sha256::digest(token.as_bytes()).into();
|
||||
|
||||
Ok(if constant_time_eq_32(&self.read_token, &token) {
|
||||
Some(UserId::DEFAULT)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
}
|
||||
Ok(constant_time_eq_32(&self.read_token, &token).then_some(UserId::DEFAULT))
|
||||
}
|
||||
|
||||
fn read_needs_auth(&self) -> bool {
|
||||
true
|
||||
}
|
||||
fn read_needs_auth(&self) -> bool {
|
||||
true
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for RwTokenAuth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "RwToken")
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "RwToken")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,34 +1,29 @@
|
|||
use crate::auth::{get_token_from_req, AuthImpl, UserId};
|
||||
use actix_web::{dev::ServiceRequest, Error as ActixError};
|
||||
use constant_time_eq::constant_time_eq_32;
|
||||
use sha2::{Digest, Sha256};
|
||||
use sha2::{Digest as _, Sha256};
|
||||
use std::fmt::Display;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TokenAuth {
|
||||
// needs to be an SHA-256 hash
|
||||
pub token: [u8; 32],
|
||||
// needs to be an SHA-256 hash
|
||||
pub token: [u8; 32],
|
||||
}
|
||||
|
||||
impl AuthImpl for TokenAuth {
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
let token = match get_token_from_req(req) {
|
||||
Some(token) => token,
|
||||
None => return Ok(None),
|
||||
};
|
||||
async fn for_write_request(&self, req: &ServiceRequest) -> Result<Option<UserId>, ActixError> {
|
||||
let Some(token) = get_token_from_req(req) else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
let token: [u8; 32] = Sha256::digest(token.as_bytes()).into();
|
||||
let token: [u8; 32] = Sha256::digest(token.as_bytes()).into();
|
||||
|
||||
Ok(if constant_time_eq_32(&self.token, &token) {
|
||||
Some(UserId::DEFAULT)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
}
|
||||
Ok(constant_time_eq_32(&self.token, &token).then_some(UserId::DEFAULT))
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for TokenAuth {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Token")
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Token")
|
||||
}
|
||||
}
|
||||
|
|
76
registry/src/endpoints/deprecate_version.rs
Normal file
76
registry/src/endpoints/deprecate_version.rs
Normal file
|
@ -0,0 +1,76 @@
|
|||
use crate::{
|
||||
auth::UserId,
|
||||
error::{ErrorResponse, RegistryError},
|
||||
git::push_changes,
|
||||
package::{read_package, read_scope_info},
|
||||
search::search_version_changed,
|
||||
AppState,
|
||||
};
|
||||
use actix_web::{http::Method, web, HttpRequest, HttpResponse};
|
||||
use pesde::names::PackageName;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub async fn deprecate_package_version(
|
||||
request: HttpRequest,
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<PackageName>,
|
||||
bytes: web::Bytes,
|
||||
user_id: web::ReqData<UserId>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let deprecated = request.method() != Method::DELETE;
|
||||
let reason = if deprecated {
|
||||
match String::from_utf8(bytes.to_vec()).map(|s| s.trim().to_string()) {
|
||||
Ok(reason) if !reason.is_empty() => reason,
|
||||
Err(e) => {
|
||||
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: format!("invalid utf-8: {e}"),
|
||||
}))
|
||||
}
|
||||
_ => {
|
||||
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: "deprecating must have a non-empty reason".to_string(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
String::new()
|
||||
};
|
||||
let name = path.into_inner();
|
||||
let source = app_state.source.write().await;
|
||||
|
||||
let Some(scope_info) = read_scope_info(&app_state, name.scope(), &source).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
if !scope_info.owners.contains(&user_id.0) {
|
||||
return Ok(HttpResponse::Forbidden().finish());
|
||||
}
|
||||
|
||||
let Some(mut file) = read_package(&app_state, &name, &source).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
if file.meta.deprecated == reason {
|
||||
return Ok(HttpResponse::Conflict().finish());
|
||||
}
|
||||
|
||||
file.meta.deprecated = reason;
|
||||
|
||||
let file_string = toml::to_string(&file)?;
|
||||
|
||||
push_changes(
|
||||
&app_state,
|
||||
&source,
|
||||
name.scope().to_string(),
|
||||
HashMap::from([(name.name().to_string(), file_string.into_bytes())]),
|
||||
format!("{}deprecate {name}", if deprecated { "" } else { "un" }),
|
||||
)
|
||||
.await?;
|
||||
|
||||
search_version_changed(&app_state, &name, &file);
|
||||
|
||||
Ok(HttpResponse::Ok().body(format!(
|
||||
"{}deprecated {name}",
|
||||
if deprecated { "" } else { "un" },
|
||||
)))
|
||||
}
|
|
@ -1,4 +1,9 @@
|
|||
pub mod deprecate_version;
|
||||
pub mod package_archive;
|
||||
pub mod package_doc;
|
||||
pub mod package_readme;
|
||||
pub mod package_version;
|
||||
pub mod package_versions;
|
||||
pub mod publish_version;
|
||||
pub mod search;
|
||||
pub mod yank_version;
|
||||
|
|
27
registry/src/endpoints/package_archive.rs
Normal file
27
registry/src/endpoints/package_archive.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
use actix_web::{web, HttpResponse};
|
||||
|
||||
use crate::{
|
||||
error::RegistryError,
|
||||
package::read_package,
|
||||
request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion},
|
||||
storage::StorageImpl as _,
|
||||
AppState,
|
||||
};
|
||||
use pesde::names::PackageName;
|
||||
|
||||
pub async fn get_package_archive(
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let (name, version, target) = path.into_inner();
|
||||
|
||||
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let Some(v_id) = resolve_version_and_target(&file, version, &target) else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
app_state.storage.get_package(&name, v_id).await
|
||||
}
|
66
registry/src/endpoints/package_doc.rs
Normal file
66
registry/src/endpoints/package_doc.rs
Normal file
|
@ -0,0 +1,66 @@
|
|||
use crate::{
|
||||
error::RegistryError,
|
||||
package::read_package,
|
||||
request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion},
|
||||
storage::StorageImpl as _,
|
||||
AppState,
|
||||
};
|
||||
use actix_web::{web, HttpResponse};
|
||||
use pesde::{
|
||||
names::PackageName,
|
||||
source::{
|
||||
ids::VersionId,
|
||||
pesde::{DocEntryKind, IndexFile},
|
||||
},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
|
||||
pub fn find_package_doc<'a>(
|
||||
file: &'a IndexFile,
|
||||
v_id: &VersionId,
|
||||
doc_name: &str,
|
||||
) -> Option<&'a str> {
|
||||
let mut queue = file.entries[v_id]
|
||||
.docs
|
||||
.iter()
|
||||
.map(|doc| &doc.kind)
|
||||
.collect::<Vec<_>>();
|
||||
while let Some(doc) = queue.pop() {
|
||||
match doc {
|
||||
DocEntryKind::Page { name, hash } if name == doc_name => return Some(hash.as_str()),
|
||||
DocEntryKind::Category { items, .. } => {
|
||||
queue.extend(items.iter().map(|item| &item.kind));
|
||||
}
|
||||
DocEntryKind::Page { .. } => {}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Query {
|
||||
doc: String,
|
||||
}
|
||||
|
||||
pub async fn get_package_doc(
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
|
||||
request_query: web::Query<Query>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let (name, version, target) = path.into_inner();
|
||||
|
||||
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let Some(v_id) = resolve_version_and_target(&file, version, &target) else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let Some(hash) = find_package_doc(&file, v_id, &request_query.doc) else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
app_state.storage.get_doc(hash).await
|
||||
}
|
27
registry/src/endpoints/package_readme.rs
Normal file
27
registry/src/endpoints/package_readme.rs
Normal file
|
@ -0,0 +1,27 @@
|
|||
use actix_web::{web, HttpResponse};
|
||||
|
||||
use crate::{
|
||||
error::RegistryError,
|
||||
package::read_package,
|
||||
request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion},
|
||||
storage::StorageImpl as _,
|
||||
AppState,
|
||||
};
|
||||
use pesde::names::PackageName;
|
||||
|
||||
pub async fn get_package_readme(
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let (name, version, target) = path.into_inner();
|
||||
|
||||
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let Some(v_id) = resolve_version_and_target(&file, version, &target) else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
app_state.storage.get_readme(&name, v_id).await
|
||||
}
|
|
@ -1,171 +1,79 @@
|
|||
use actix_web::{http::header::ACCEPT, web, HttpRequest, HttpResponse, Responder};
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use actix_web::{http::header::ACCEPT, web, HttpRequest, HttpResponse};
|
||||
use serde::Deserialize;
|
||||
|
||||
use crate::{error::Error, package::PackageResponse, storage::StorageImpl, AppState};
|
||||
use pesde::{
|
||||
manifest::target::TargetKind,
|
||||
names::PackageName,
|
||||
source::{
|
||||
git_index::{read_file, root_tree, GitBasedSource},
|
||||
pesde::{DocEntryKind, IndexFile},
|
||||
},
|
||||
use crate::{
|
||||
endpoints::package_doc::find_package_doc,
|
||||
error::RegistryError,
|
||||
package::{read_package, PackageResponse},
|
||||
request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion},
|
||||
storage::StorageImpl as _,
|
||||
AppState,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum VersionRequest {
|
||||
Latest,
|
||||
Specific(Version),
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for VersionRequest {
|
||||
fn deserialize<D>(deserializer: D) -> Result<VersionRequest, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
if s.eq_ignore_ascii_case("latest") {
|
||||
return Ok(VersionRequest::Latest);
|
||||
}
|
||||
|
||||
s.parse()
|
||||
.map(VersionRequest::Specific)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TargetRequest {
|
||||
Any,
|
||||
Specific(TargetKind),
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for TargetRequest {
|
||||
fn deserialize<D>(deserializer: D) -> Result<TargetRequest, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
if s.eq_ignore_ascii_case("any") {
|
||||
return Ok(TargetRequest::Any);
|
||||
}
|
||||
|
||||
s.parse()
|
||||
.map(TargetRequest::Specific)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
use pesde::names::PackageName;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Query {
|
||||
doc: Option<String>,
|
||||
doc: Option<String>,
|
||||
}
|
||||
|
||||
pub async fn get_package_version_v0(
|
||||
request: HttpRequest,
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
|
||||
request_query: web::Query<Query>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let (name, version, target) = path.into_inner();
|
||||
|
||||
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let Some(v_id) = resolve_version_and_target(&file, version, &target) else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
if let Some(doc_name) = request_query.doc.as_deref() {
|
||||
let Some(hash) = find_package_doc(&file, v_id, doc_name) else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
return app_state.storage.get_doc(hash).await;
|
||||
}
|
||||
|
||||
let accept = request
|
||||
.headers()
|
||||
.get(ACCEPT)
|
||||
.and_then(|accept| accept.to_str().ok())
|
||||
.and_then(|accept| match accept.to_lowercase().as_str() {
|
||||
"text/plain" => Some(true),
|
||||
"application/octet-stream" => Some(false),
|
||||
_ => None,
|
||||
});
|
||||
|
||||
if let Some(readme) = accept {
|
||||
return if readme {
|
||||
app_state.storage.get_readme(&name, v_id).await
|
||||
} else {
|
||||
app_state.storage.get_package(&name, v_id).await
|
||||
};
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(PackageResponse::new(&name, v_id, &file)))
|
||||
}
|
||||
|
||||
pub async fn get_package_version(
|
||||
request: HttpRequest,
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<(PackageName, VersionRequest, TargetRequest)>,
|
||||
query: web::Query<Query>,
|
||||
) -> Result<impl Responder, Error> {
|
||||
let (name, version, target) = path.into_inner();
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let (name, version, target) = path.into_inner();
|
||||
|
||||
let (scope, name_part) = name.as_str();
|
||||
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let file: IndexFile = {
|
||||
let source = app_state.source.lock().await;
|
||||
let repo = gix::open(source.path(&app_state.project))?;
|
||||
let tree = root_tree(&repo)?;
|
||||
let Some(v_id) = resolve_version_and_target(&file, version, &target) else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
match read_file(&tree, [scope, name_part])? {
|
||||
Some(versions) => toml::de::from_str(&versions)?,
|
||||
None => return Ok(HttpResponse::NotFound().finish()),
|
||||
}
|
||||
};
|
||||
|
||||
let Some((v_id, entry, targets)) = ({
|
||||
let version = match version {
|
||||
VersionRequest::Latest => match file.entries.keys().map(|k| k.version()).max() {
|
||||
Some(latest) => latest.clone(),
|
||||
None => return Ok(HttpResponse::NotFound().finish()),
|
||||
},
|
||||
VersionRequest::Specific(version) => version,
|
||||
};
|
||||
|
||||
let versions = file
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|(v_id, _)| *v_id.version() == version);
|
||||
|
||||
match target {
|
||||
TargetRequest::Any => versions.clone().min_by_key(|(v_id, _)| *v_id.target()),
|
||||
TargetRequest::Specific(kind) => versions
|
||||
.clone()
|
||||
.find(|(_, entry)| entry.target.kind() == kind),
|
||||
}
|
||||
.map(|(v_id, entry)| {
|
||||
(
|
||||
v_id,
|
||||
entry,
|
||||
versions.map(|(_, entry)| (&entry.target).into()).collect(),
|
||||
)
|
||||
})
|
||||
}) else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
if let Some(doc_name) = query.doc.as_deref() {
|
||||
let hash = 'finder: {
|
||||
let mut hash = entry.docs.iter().map(|doc| &doc.kind).collect::<Vec<_>>();
|
||||
while let Some(doc) = hash.pop() {
|
||||
match doc {
|
||||
DocEntryKind::Page { name, hash } if name == doc_name => {
|
||||
break 'finder hash.clone()
|
||||
}
|
||||
DocEntryKind::Category { items, .. } => {
|
||||
hash.extend(items.iter().map(|item| &item.kind))
|
||||
}
|
||||
_ => continue,
|
||||
};
|
||||
}
|
||||
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
return app_state.storage.get_doc(&hash).await;
|
||||
}
|
||||
|
||||
let accept = request
|
||||
.headers()
|
||||
.get(ACCEPT)
|
||||
.and_then(|accept| accept.to_str().ok())
|
||||
.and_then(|accept| match accept.to_lowercase().as_str() {
|
||||
"text/plain" => Some(true),
|
||||
"application/octet-stream" => Some(false),
|
||||
_ => None,
|
||||
});
|
||||
|
||||
if let Some(readme) = accept {
|
||||
return if readme {
|
||||
app_state.storage.get_readme(&name, v_id).await
|
||||
} else {
|
||||
app_state.storage.get_package(&name, v_id).await
|
||||
};
|
||||
}
|
||||
|
||||
let response = PackageResponse {
|
||||
name: name.to_string(),
|
||||
version: v_id.version().to_string(),
|
||||
targets,
|
||||
description: entry.description.clone().unwrap_or_default(),
|
||||
published_at: entry.published_at,
|
||||
license: entry.license.clone().unwrap_or_default(),
|
||||
authors: entry.authors.clone(),
|
||||
repository: entry.repository.clone().map(|url| url.to_string()),
|
||||
};
|
||||
|
||||
let mut value = serde_json::to_value(response)?;
|
||||
value["docs"] = serde_json::to_value(entry.docs.clone())?;
|
||||
value["dependencies"] = serde_json::to_value(entry.dependencies.clone())?;
|
||||
|
||||
Ok(HttpResponse::Ok().json(value))
|
||||
Ok(HttpResponse::Ok().json(PackageResponse::new(&name, v_id, &file)))
|
||||
}
|
||||
|
|
|
@ -1,54 +1,55 @@
|
|||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use actix_web::{web, HttpResponse, Responder};
|
||||
|
||||
use crate::{error::Error, package::PackageResponse, AppState};
|
||||
use pesde::{
|
||||
names::PackageName,
|
||||
source::{
|
||||
git_index::{read_file, root_tree, GitBasedSource},
|
||||
pesde::IndexFile,
|
||||
},
|
||||
use crate::{
|
||||
error::RegistryError,
|
||||
package::{read_package, PackageResponse, PackageVersionsResponse},
|
||||
AppState,
|
||||
};
|
||||
use actix_web::{web, HttpResponse, Responder};
|
||||
use pesde::{names::PackageName, source::ids::VersionId};
|
||||
use semver::Version;
|
||||
use std::collections::{btree_map::Entry, BTreeMap};
|
||||
|
||||
pub async fn get_package_versions_v0(
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<PackageName>,
|
||||
) -> Result<impl Responder, RegistryError> {
|
||||
let name = path.into_inner();
|
||||
|
||||
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let mut versions = BTreeMap::<&Version, &VersionId>::new();
|
||||
|
||||
for v_id in file.entries.keys() {
|
||||
match versions.entry(v_id.version()) {
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(v_id);
|
||||
}
|
||||
Entry::Occupied(mut entry) => {
|
||||
if entry.get() < &v_id {
|
||||
entry.insert(v_id);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let responses = versions
|
||||
.into_values()
|
||||
.map(|v_id| PackageResponse::new(&name, v_id, &file))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok(HttpResponse::Ok().json(responses))
|
||||
}
|
||||
|
||||
pub async fn get_package_versions(
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<PackageName>,
|
||||
) -> Result<impl Responder, Error> {
|
||||
let name = path.into_inner();
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<PackageName>,
|
||||
) -> Result<impl Responder, RegistryError> {
|
||||
let name = path.into_inner();
|
||||
|
||||
let (scope, name_part) = name.as_str();
|
||||
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let file: IndexFile = {
|
||||
let source = app_state.source.lock().await;
|
||||
let repo = gix::open(source.path(&app_state.project))?;
|
||||
let tree = root_tree(&repo)?;
|
||||
|
||||
match read_file(&tree, [scope, name_part])? {
|
||||
Some(versions) => toml::de::from_str(&versions)?,
|
||||
None => return Ok(HttpResponse::NotFound().finish()),
|
||||
}
|
||||
};
|
||||
|
||||
let mut responses = BTreeMap::new();
|
||||
|
||||
for (v_id, entry) in file.entries {
|
||||
let info = responses
|
||||
.entry(v_id.version().clone())
|
||||
.or_insert_with(|| PackageResponse {
|
||||
name: name.to_string(),
|
||||
version: v_id.version().to_string(),
|
||||
targets: BTreeSet::new(),
|
||||
description: entry.description.unwrap_or_default(),
|
||||
published_at: entry.published_at,
|
||||
license: entry.license.unwrap_or_default(),
|
||||
authors: entry.authors.clone(),
|
||||
repository: entry.repository.clone().map(|url| url.to_string()),
|
||||
});
|
||||
|
||||
info.targets.insert(entry.target.into());
|
||||
info.published_at = info.published_at.max(entry.published_at);
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(responses.into_values().collect::<Vec<_>>()))
|
||||
Ok(HttpResponse::Ok().json(PackageVersionsResponse::new(&name, &file)))
|
||||
}
|
||||
|
|
|
@ -1,507 +1,460 @@
|
|||
use crate::{
|
||||
auth::UserId,
|
||||
benv,
|
||||
error::{Error, ErrorResponse},
|
||||
search::update_version,
|
||||
storage::StorageImpl,
|
||||
AppState,
|
||||
auth::UserId,
|
||||
error::{ErrorResponse, RegistryError},
|
||||
git::push_changes,
|
||||
package::{read_package, read_scope_info},
|
||||
search::update_search_version,
|
||||
storage::StorageImpl as _,
|
||||
AppState,
|
||||
};
|
||||
use actix_web::{web, web::Bytes, HttpResponse, Responder};
|
||||
use actix_web::{web, web::Bytes, HttpResponse};
|
||||
use async_compression::Level;
|
||||
use convert_case::{Case, Casing};
|
||||
use convert_case::{Case, Casing as _};
|
||||
use fs_err::tokio as fs;
|
||||
use futures::{future::join_all, join};
|
||||
use git2::{Remote, Repository, Signature};
|
||||
use pesde::{
|
||||
manifest::Manifest,
|
||||
source::{
|
||||
git_index::{read_file, root_tree, GitBasedSource},
|
||||
pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
|
||||
specifiers::DependencySpecifiers,
|
||||
version_id::VersionId,
|
||||
IGNORED_DIRS, IGNORED_FILES,
|
||||
},
|
||||
MANIFEST_FILE_NAME,
|
||||
manifest::{DependencyType, Manifest},
|
||||
source::{
|
||||
git_index::GitBasedSource as _,
|
||||
ids::VersionId,
|
||||
pesde::{DocEntry, DocEntryKind, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::RefreshOptions,
|
||||
ADDITIONAL_FORBIDDEN_FILES, IGNORED_DIRS, IGNORED_FILES,
|
||||
},
|
||||
MANIFEST_FILE_NAME,
|
||||
};
|
||||
use sentry::add_breadcrumb;
|
||||
use serde::Deserialize;
|
||||
use sha2::{Digest, Sha256};
|
||||
use sha2::{Digest as _, Sha256};
|
||||
use std::{
|
||||
collections::{BTreeSet, HashMap},
|
||||
io::{Cursor, Write},
|
||||
collections::{BTreeSet, HashMap},
|
||||
io::Cursor,
|
||||
};
|
||||
use tokio::{
|
||||
io::{AsyncReadExt as _, AsyncWriteExt as _},
|
||||
task::JoinSet,
|
||||
};
|
||||
use tokio::io::{AsyncReadExt, AsyncWriteExt};
|
||||
|
||||
fn signature<'a>() -> Signature<'a> {
|
||||
Signature::now(
|
||||
&benv!(required "COMMITTER_GIT_NAME"),
|
||||
&benv!(required "COMMITTER_GIT_EMAIL"),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn get_refspec(repo: &Repository, remote: &mut Remote) -> Result<String, git2::Error> {
|
||||
let upstream_branch_buf = repo.branch_upstream_name(repo.head()?.name().unwrap())?;
|
||||
let upstream_branch = upstream_branch_buf.as_str().unwrap();
|
||||
|
||||
let refspec_buf = remote
|
||||
.refspecs()
|
||||
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
|
||||
.unwrap()
|
||||
.rtransform(upstream_branch)?;
|
||||
let refspec = refspec_buf.as_str().unwrap();
|
||||
|
||||
Ok(refspec.to_string())
|
||||
}
|
||||
|
||||
const ADDITIONAL_FORBIDDEN_FILES: &[&str] = &["default.project.json"];
|
||||
|
||||
#[derive(Debug, Deserialize, Default)]
|
||||
struct DocEntryInfo {
|
||||
#[serde(default)]
|
||||
label: Option<String>,
|
||||
#[serde(default, alias = "position")]
|
||||
sidebar_position: Option<usize>,
|
||||
#[serde(default)]
|
||||
collapsed: bool,
|
||||
#[serde(default)]
|
||||
label: Option<String>,
|
||||
#[serde(default, alias = "position")]
|
||||
sidebar_position: Option<usize>,
|
||||
#[serde(default)]
|
||||
collapsed: bool,
|
||||
}
|
||||
|
||||
pub async fn publish_package(
|
||||
app_state: web::Data<AppState>,
|
||||
bytes: Bytes,
|
||||
user_id: web::ReqData<UserId>,
|
||||
) -> Result<impl Responder, Error> {
|
||||
let source = app_state.source.lock().await;
|
||||
source.refresh(&app_state.project).await.map_err(Box::new)?;
|
||||
let config = source.config(&app_state.project).await?;
|
||||
app_state: web::Data<AppState>,
|
||||
bytes: Bytes,
|
||||
user_id: web::ReqData<UserId>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let source = app_state.source.write().await;
|
||||
source
|
||||
.refresh(&RefreshOptions {
|
||||
project: app_state.project.clone(),
|
||||
})
|
||||
.await
|
||||
.map_err(Box::new)?;
|
||||
let config = source.config(&app_state.project).await?;
|
||||
|
||||
let package_dir = tempfile::tempdir()?;
|
||||
let package_dir = tempfile::tempdir()?;
|
||||
|
||||
{
|
||||
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(Cursor::new(&bytes));
|
||||
let mut archive = tokio_tar::Archive::new(&mut decoder);
|
||||
{
|
||||
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(Cursor::new(&bytes));
|
||||
let mut archive = tokio_tar::Archive::new(&mut decoder);
|
||||
|
||||
archive.unpack(package_dir.path()).await?;
|
||||
}
|
||||
archive.unpack(package_dir.path()).await?;
|
||||
}
|
||||
|
||||
let mut manifest = None::<Manifest>;
|
||||
let mut readme = None::<Vec<u8>>;
|
||||
let mut docs = BTreeSet::new();
|
||||
let mut docs_pages = HashMap::new();
|
||||
let mut manifest = None::<Manifest>;
|
||||
let mut readme = None::<Vec<u8>>;
|
||||
let mut docs = BTreeSet::new();
|
||||
let mut docs_pages = HashMap::new();
|
||||
|
||||
let mut read_dir = fs::read_dir(package_dir.path()).await?;
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let file_name = entry
|
||||
.file_name()
|
||||
.to_str()
|
||||
.ok_or_else(|| Error::InvalidArchive("file name contains non UTF-8 characters".into()))?
|
||||
.to_string();
|
||||
let mut read_dir = fs::read_dir(package_dir.path()).await?;
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let file_name = entry
|
||||
.file_name()
|
||||
.to_str()
|
||||
.ok_or_else(|| {
|
||||
RegistryError::InvalidArchive("file name contains non UTF-8 characters".into())
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
if entry.file_type().await?.is_dir() {
|
||||
if IGNORED_DIRS.contains(&file_name.as_str()) {
|
||||
return Err(Error::InvalidArchive(format!(
|
||||
"archive contains forbidden directory: {file_name}"
|
||||
)));
|
||||
}
|
||||
if entry.file_type().await?.is_dir() {
|
||||
if IGNORED_DIRS.contains(&file_name.as_str()) {
|
||||
return Err(RegistryError::InvalidArchive(format!(
|
||||
"archive contains forbidden directory: {file_name}"
|
||||
)));
|
||||
}
|
||||
|
||||
if file_name == "docs" {
|
||||
let mut stack = vec![(
|
||||
BTreeSet::new(),
|
||||
fs::read_dir(entry.path()).await?,
|
||||
None::<DocEntryInfo>,
|
||||
)];
|
||||
if file_name == "docs" {
|
||||
let mut stack = vec![(
|
||||
BTreeSet::new(),
|
||||
fs::read_dir(entry.path()).await?,
|
||||
None::<DocEntryInfo>,
|
||||
)];
|
||||
|
||||
'outer: while let Some((set, iter, category_info)) = stack.last_mut() {
|
||||
while let Some(entry) = iter.next_entry().await? {
|
||||
let file_name = entry
|
||||
.file_name()
|
||||
.to_str()
|
||||
.ok_or_else(|| {
|
||||
Error::InvalidArchive(
|
||||
"file name contains non UTF-8 characters".into(),
|
||||
)
|
||||
})?
|
||||
.to_string();
|
||||
'outer: while let Some((set, iter, category_info)) = stack.last_mut() {
|
||||
while let Some(entry) = iter.next_entry().await? {
|
||||
let file_name = entry
|
||||
.file_name()
|
||||
.to_str()
|
||||
.ok_or_else(|| {
|
||||
RegistryError::InvalidArchive(
|
||||
"file name contains non UTF-8 characters".into(),
|
||||
)
|
||||
})?
|
||||
.to_string();
|
||||
|
||||
if entry.file_type().await?.is_dir() {
|
||||
stack.push((
|
||||
BTreeSet::new(),
|
||||
fs::read_dir(entry.path()).await?,
|
||||
Some(DocEntryInfo {
|
||||
label: Some(file_name.to_case(Case::Title)),
|
||||
..Default::default()
|
||||
}),
|
||||
));
|
||||
continue 'outer;
|
||||
}
|
||||
if entry.file_type().await?.is_dir() {
|
||||
stack.push((
|
||||
BTreeSet::new(),
|
||||
fs::read_dir(entry.path()).await?,
|
||||
Some(DocEntryInfo {
|
||||
label: Some(file_name.to_case(Case::Title)),
|
||||
..Default::default()
|
||||
}),
|
||||
));
|
||||
continue 'outer;
|
||||
}
|
||||
|
||||
if file_name == "_category_.json" {
|
||||
let info = fs::read_to_string(entry.path()).await?;
|
||||
let mut info: DocEntryInfo = serde_json::from_str(&info)?;
|
||||
let old_info = category_info.take();
|
||||
info.label = info.label.or(old_info.and_then(|i| i.label));
|
||||
*category_info = Some(info);
|
||||
continue;
|
||||
}
|
||||
if file_name == "_category_.json" {
|
||||
let info = fs::read_to_string(entry.path()).await?;
|
||||
let mut info: DocEntryInfo = serde_json::from_str(&info)?;
|
||||
let old_info = category_info.take();
|
||||
info.label = info.label.or(old_info.and_then(|i| i.label));
|
||||
*category_info = Some(info);
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(file_name) = file_name.strip_suffix(".md") else {
|
||||
continue;
|
||||
};
|
||||
let Some(file_name) = file_name.strip_suffix(".md") else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let content = fs::read_to_string(entry.path()).await?;
|
||||
let content = content.trim();
|
||||
let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
|
||||
let content = fs::read_to_string(entry.path()).await?;
|
||||
let content = content.trim();
|
||||
let hash = format!("{:x}", Sha256::digest(content.as_bytes()));
|
||||
|
||||
let mut gz = async_compression::tokio::bufread::GzipEncoder::with_quality(
|
||||
Cursor::new(content.as_bytes().to_vec()),
|
||||
Level::Best,
|
||||
);
|
||||
let mut bytes = vec![];
|
||||
gz.read_to_end(&mut bytes).await?;
|
||||
docs_pages.insert(hash.to_string(), bytes);
|
||||
let mut gz = async_compression::tokio::bufread::GzipEncoder::with_quality(
|
||||
Cursor::new(content.as_bytes().to_vec()),
|
||||
Level::Best,
|
||||
);
|
||||
let mut bytes = vec![];
|
||||
gz.read_to_end(&mut bytes).await?;
|
||||
docs_pages.insert(hash.clone(), bytes);
|
||||
|
||||
let mut lines = content.lines().peekable();
|
||||
let front_matter = if lines.peek().filter(|l| **l == "---").is_some() {
|
||||
lines.next(); // skip the first `---`
|
||||
let mut lines = content.lines().peekable();
|
||||
let front_matter = if lines.peek().filter(|l| **l == "---").is_some() {
|
||||
lines.next(); // skip the first `---`
|
||||
|
||||
let front_matter = lines
|
||||
.by_ref()
|
||||
.take_while(|l| *l != "---")
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
let front_matter = lines
|
||||
.by_ref()
|
||||
.take_while(|l| *l != "---")
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
lines.next(); // skip the last `---`
|
||||
lines.next(); // skip the last `---`
|
||||
|
||||
front_matter
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
front_matter
|
||||
} else {
|
||||
"".to_string()
|
||||
};
|
||||
|
||||
let h1 = lines
|
||||
.find(|l| !l.trim().is_empty())
|
||||
.and_then(|l| l.strip_prefix("# "))
|
||||
.map(|s| s.to_string());
|
||||
let h1 = lines
|
||||
.find(|l| !l.trim().is_empty())
|
||||
.and_then(|l| l.strip_prefix("# "))
|
||||
.map(ToString::to_string);
|
||||
|
||||
let info: DocEntryInfo =
|
||||
serde_yaml::from_str(&front_matter).map_err(|_| {
|
||||
Error::InvalidArchive(format!(
|
||||
"doc {file_name}'s frontmatter isn't valid YAML"
|
||||
))
|
||||
})?;
|
||||
let info: DocEntryInfo =
|
||||
serde_yaml::from_str(&front_matter).map_err(|_e| {
|
||||
RegistryError::InvalidArchive(format!(
|
||||
"doc {file_name}'s frontmatter isn't valid YAML"
|
||||
))
|
||||
})?;
|
||||
|
||||
set.insert(DocEntry {
|
||||
label: info.label.or(h1).unwrap_or(file_name.to_case(Case::Title)),
|
||||
position: info.sidebar_position,
|
||||
kind: DocEntryKind::Page {
|
||||
name: entry
|
||||
.path()
|
||||
.strip_prefix(package_dir.path().join("docs"))
|
||||
.unwrap()
|
||||
.with_extension("")
|
||||
.to_str()
|
||||
.ok_or_else(|| {
|
||||
Error::InvalidArchive(
|
||||
"file name contains non UTF-8 characters".into(),
|
||||
)
|
||||
})?
|
||||
// ensure that the path is always using forward slashes
|
||||
.replace("\\", "/"),
|
||||
hash,
|
||||
},
|
||||
});
|
||||
}
|
||||
set.insert(DocEntry {
|
||||
label: info
|
||||
.label
|
||||
.or(h1)
|
||||
.unwrap_or_else(|| file_name.to_case(Case::Title)),
|
||||
position: info.sidebar_position,
|
||||
kind: DocEntryKind::Page {
|
||||
name: entry
|
||||
.path()
|
||||
.strip_prefix(package_dir.path().join("docs"))
|
||||
.unwrap()
|
||||
.with_extension("")
|
||||
.to_str()
|
||||
.ok_or_else(|| {
|
||||
RegistryError::InvalidArchive(
|
||||
"file name contains non UTF-8 characters".into(),
|
||||
)
|
||||
})?
|
||||
// ensure that the path is always using forward slashes
|
||||
.replace('\\', "/"),
|
||||
hash,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// should never be None
|
||||
let (popped, _, category_info) = stack.pop().unwrap();
|
||||
docs = popped;
|
||||
// should never be None
|
||||
let (popped, _, category_info) = stack.pop().unwrap();
|
||||
docs = popped;
|
||||
|
||||
if let Some((set, _, _)) = stack.last_mut() {
|
||||
let category_info = category_info.unwrap_or_default();
|
||||
if let Some((set, _, _)) = stack.last_mut() {
|
||||
let category_info = category_info.unwrap_or_default();
|
||||
|
||||
set.insert(DocEntry {
|
||||
label: category_info.label.unwrap(),
|
||||
position: category_info.sidebar_position,
|
||||
kind: DocEntryKind::Category {
|
||||
items: {
|
||||
let curr_docs = docs;
|
||||
docs = BTreeSet::new();
|
||||
curr_docs
|
||||
},
|
||||
collapsed: category_info.collapsed,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
set.insert(DocEntry {
|
||||
label: category_info.label.unwrap(),
|
||||
position: category_info.sidebar_position,
|
||||
kind: DocEntryKind::Category {
|
||||
items: {
|
||||
let curr_docs = docs;
|
||||
docs = BTreeSet::new();
|
||||
curr_docs
|
||||
},
|
||||
collapsed: category_info.collapsed,
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
if IGNORED_FILES.contains(&file_name.as_str())
|
||||
|| ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str())
|
||||
{
|
||||
return Err(Error::InvalidArchive(format!(
|
||||
"archive contains forbidden file: {file_name}"
|
||||
)));
|
||||
}
|
||||
if IGNORED_FILES.contains(&file_name.as_str())
|
||||
|| ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str())
|
||||
{
|
||||
return Err(RegistryError::InvalidArchive(format!(
|
||||
"archive contains forbidden file: {file_name}"
|
||||
)));
|
||||
}
|
||||
|
||||
if file_name == MANIFEST_FILE_NAME {
|
||||
let content = fs::read_to_string(entry.path()).await?;
|
||||
if file_name == MANIFEST_FILE_NAME {
|
||||
let content = fs::read_to_string(entry.path()).await?;
|
||||
|
||||
manifest = Some(toml::de::from_str(&content)?);
|
||||
} else if file_name
|
||||
.to_lowercase()
|
||||
.split_once('.')
|
||||
.filter(|(file, ext)| *file == "readme" && (*ext == "md" || *ext == "txt"))
|
||||
.is_some()
|
||||
{
|
||||
if readme.is_some() {
|
||||
return Err(Error::InvalidArchive(
|
||||
"archive contains multiple readme files".into(),
|
||||
));
|
||||
}
|
||||
manifest = Some(toml::de::from_str(&content)?);
|
||||
} else if file_name
|
||||
.to_lowercase()
|
||||
.split_once('.')
|
||||
.filter(|(file, ext)| *file == "readme" && (*ext == "md" || *ext == "txt"))
|
||||
.is_some()
|
||||
{
|
||||
if readme.is_some() {
|
||||
return Err(RegistryError::InvalidArchive(
|
||||
"archive contains multiple readme files".into(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut file = fs::File::open(entry.path()).await?;
|
||||
let mut file = fs::File::open(entry.path()).await?;
|
||||
|
||||
let mut gz = async_compression::tokio::write::GzipEncoder::new(vec![]);
|
||||
tokio::io::copy(&mut file, &mut gz).await?;
|
||||
gz.shutdown().await?;
|
||||
readme = Some(gz.into_inner());
|
||||
}
|
||||
}
|
||||
let mut gz = async_compression::tokio::write::GzipEncoder::new(vec![]);
|
||||
tokio::io::copy(&mut file, &mut gz).await?;
|
||||
gz.shutdown().await?;
|
||||
readme = Some(gz.into_inner());
|
||||
}
|
||||
}
|
||||
|
||||
let Some(manifest) = manifest else {
|
||||
return Err(Error::InvalidArchive(
|
||||
"archive doesn't contain a manifest".into(),
|
||||
));
|
||||
};
|
||||
let Some(manifest) = manifest else {
|
||||
return Err(RegistryError::InvalidArchive(
|
||||
"archive doesn't contain a manifest".into(),
|
||||
));
|
||||
};
|
||||
|
||||
add_breadcrumb(sentry::Breadcrumb {
|
||||
category: Some("publish".into()),
|
||||
message: Some(format!(
|
||||
"publish request for {}@{} {}. has readme: {}. docs: {}",
|
||||
manifest.name,
|
||||
manifest.version,
|
||||
manifest.target,
|
||||
readme.is_some(),
|
||||
docs_pages.len()
|
||||
)),
|
||||
level: sentry::Level::Info,
|
||||
..Default::default()
|
||||
});
|
||||
add_breadcrumb(sentry::Breadcrumb {
|
||||
category: Some("publish".into()),
|
||||
message: Some(format!(
|
||||
"publish request for {}@{} {}. has readme: {}. docs: {}",
|
||||
manifest.name,
|
||||
manifest.version,
|
||||
manifest.target,
|
||||
readme.is_some(),
|
||||
docs_pages.len()
|
||||
)),
|
||||
level: sentry::Level::Info,
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
{
|
||||
let dependencies = manifest.all_dependencies().map_err(|e| {
|
||||
Error::InvalidArchive(format!("manifest has invalid dependencies: {e}"))
|
||||
})?;
|
||||
{
|
||||
let dependencies = manifest.all_dependencies().map_err(|e| {
|
||||
RegistryError::InvalidArchive(format!("manifest has invalid dependencies: {e}"))
|
||||
})?;
|
||||
|
||||
for (specifier, _) in dependencies.values() {
|
||||
match specifier {
|
||||
DependencySpecifiers::Pesde(specifier) => {
|
||||
if specifier
|
||||
.index
|
||||
.as_deref()
|
||||
.filter(|index| match gix::Url::try_from(*index) {
|
||||
Ok(url) => config
|
||||
.other_registries_allowed
|
||||
.is_allowed_or_same(source.repo_url().clone(), url),
|
||||
Err(_) => false,
|
||||
})
|
||||
.is_none()
|
||||
{
|
||||
return Err(Error::InvalidArchive(format!(
|
||||
"invalid index in pesde dependency {specifier}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
DependencySpecifiers::Wally(specifier) => {
|
||||
if specifier
|
||||
.index
|
||||
.as_deref()
|
||||
.filter(|index| match gix::Url::try_from(*index) {
|
||||
Ok(url) => config.wally_allowed.is_allowed(url),
|
||||
Err(_) => false,
|
||||
})
|
||||
.is_none()
|
||||
{
|
||||
return Err(Error::InvalidArchive(format!(
|
||||
"invalid index in wally dependency {specifier}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
DependencySpecifiers::Git(specifier) => {
|
||||
if !config.git_allowed.is_allowed(specifier.repo.clone()) {
|
||||
return Err(Error::InvalidArchive(
|
||||
"git dependencies are not allowed".into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
DependencySpecifiers::Workspace(_) => {
|
||||
// workspace specifiers are to be transformed into pesde specifiers by the sender
|
||||
return Err(Error::InvalidArchive(
|
||||
"non-transformed workspace dependency".into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
for (specifier, ty) in dependencies.values() {
|
||||
// we need not verify dev dependencies, as they won't be installed
|
||||
if *ty == DependencyType::Dev {
|
||||
continue;
|
||||
}
|
||||
|
||||
let repo = Repository::open_bare(source.path(&app_state.project))?;
|
||||
let gix_repo = gix::open(repo.path())?;
|
||||
match specifier {
|
||||
DependencySpecifiers::Pesde(specifier) => {
|
||||
let allowed = match gix::Url::try_from(&*specifier.index) {
|
||||
Ok(url) => config
|
||||
.other_registries_allowed
|
||||
.is_allowed_or_same(source.repo_url().clone(), url),
|
||||
Err(_) => false,
|
||||
};
|
||||
|
||||
let gix_tree = root_tree(&gix_repo)?;
|
||||
if !allowed {
|
||||
return Err(RegistryError::InvalidArchive(format!(
|
||||
"invalid index in pesde dependency {specifier}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
DependencySpecifiers::Wally(specifier) => {
|
||||
let allowed = match gix::Url::try_from(&*specifier.index) {
|
||||
Ok(url) => config.wally_allowed.is_allowed(url),
|
||||
Err(_) => false,
|
||||
};
|
||||
|
||||
let (scope, name) = manifest.name.as_str();
|
||||
let mut oids = vec![];
|
||||
if !allowed {
|
||||
return Err(RegistryError::InvalidArchive(format!(
|
||||
"invalid index in wally dependency {specifier}"
|
||||
)));
|
||||
}
|
||||
}
|
||||
DependencySpecifiers::Git(specifier) => {
|
||||
if !config.git_allowed.is_allowed(specifier.repo.clone()) {
|
||||
return Err(RegistryError::InvalidArchive(
|
||||
"git dependencies are not allowed".into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
DependencySpecifiers::Workspace(_) => {
|
||||
// workspace specifiers are to be transformed into pesde specifiers by the sender
|
||||
return Err(RegistryError::InvalidArchive(
|
||||
"non-transformed workspace dependency".into(),
|
||||
));
|
||||
}
|
||||
DependencySpecifiers::Path(_) => {
|
||||
return Err(RegistryError::InvalidArchive(
|
||||
"path dependencies are not allowed".into(),
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
match read_file(&gix_tree, [scope, SCOPE_INFO_FILE])? {
|
||||
Some(info) => {
|
||||
let info: ScopeInfo = toml::de::from_str(&info)?;
|
||||
if !info.owners.contains(&user_id.0) {
|
||||
return Ok(HttpResponse::Forbidden().finish());
|
||||
}
|
||||
}
|
||||
None => {
|
||||
let scope_info = toml::to_string(&ScopeInfo {
|
||||
owners: BTreeSet::from([user_id.0]),
|
||||
})?;
|
||||
let mut files = HashMap::new();
|
||||
|
||||
let mut blob_writer = repo.blob_writer(None)?;
|
||||
blob_writer.write_all(scope_info.as_bytes())?;
|
||||
oids.push((SCOPE_INFO_FILE, blob_writer.commit()?));
|
||||
}
|
||||
};
|
||||
let scope = read_scope_info(&app_state, manifest.name.scope(), &source).await?;
|
||||
if let Some(info) = scope {
|
||||
if !info.owners.contains(&user_id.0) {
|
||||
return Ok(HttpResponse::Forbidden().finish());
|
||||
}
|
||||
} else {
|
||||
let scope_info = toml::to_string(&ScopeInfo {
|
||||
owners: BTreeSet::from([user_id.0]),
|
||||
})?;
|
||||
|
||||
let mut file: IndexFile =
|
||||
toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?;
|
||||
files.insert(SCOPE_INFO_FILE.to_string(), scope_info.into_bytes());
|
||||
}
|
||||
|
||||
let new_entry = IndexFileEntry {
|
||||
target: manifest.target.clone(),
|
||||
published_at: chrono::Utc::now(),
|
||||
description: manifest.description.clone(),
|
||||
license: manifest.license.clone(),
|
||||
authors: manifest.authors.clone(),
|
||||
repository: manifest.repository.clone(),
|
||||
docs,
|
||||
let mut file = read_package(&app_state, &manifest.name, &source)
|
||||
.await?
|
||||
.unwrap_or_default();
|
||||
|
||||
dependencies,
|
||||
};
|
||||
let new_entry = IndexFileEntry {
|
||||
target: manifest.target.clone(),
|
||||
published_at: jiff::Timestamp::now(),
|
||||
engines: manifest.engines.clone(),
|
||||
description: manifest.description.clone(),
|
||||
license: manifest.license.clone(),
|
||||
authors: manifest.authors.clone(),
|
||||
repository: manifest.repository.clone(),
|
||||
yanked: false,
|
||||
docs,
|
||||
|
||||
let this_version = file
|
||||
.entries
|
||||
.keys()
|
||||
.find(|v_id| *v_id.version() == manifest.version);
|
||||
if let Some(this_version) = this_version {
|
||||
let other_entry = file.entries.get(this_version).unwrap();
|
||||
dependencies,
|
||||
};
|
||||
|
||||
// description cannot be different - which one to render in the "Recently published" list?
|
||||
// the others cannot be different because what to return from the versions endpoint?
|
||||
if other_entry.description != new_entry.description
|
||||
|| other_entry.license != new_entry.license
|
||||
|| other_entry.authors != new_entry.authors
|
||||
|| other_entry.repository != new_entry.repository
|
||||
{
|
||||
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: "same version with different description or license already exists"
|
||||
.to_string(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
let same_version = file
|
||||
.entries
|
||||
.iter()
|
||||
.find(|(v_id, _)| *v_id.version() == manifest.version);
|
||||
if let Some((_, other_entry)) = same_version {
|
||||
// description cannot be different - which one to render in the "Recently published" list?
|
||||
if other_entry.description != new_entry.description {
|
||||
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: "same versions with different descriptions are forbidden".to_string(),
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
if file
|
||||
.entries
|
||||
.insert(
|
||||
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
||||
new_entry.clone(),
|
||||
)
|
||||
.is_some()
|
||||
{
|
||||
return Ok(HttpResponse::Conflict().finish());
|
||||
}
|
||||
if file
|
||||
.entries
|
||||
.insert(
|
||||
VersionId::new(manifest.version.clone(), manifest.target.kind()),
|
||||
new_entry.clone(),
|
||||
)
|
||||
.is_some()
|
||||
{
|
||||
return Ok(HttpResponse::Conflict().finish());
|
||||
}
|
||||
|
||||
let mut remote = repo.find_remote("origin")?;
|
||||
let refspec = get_refspec(&repo, &mut remote)?;
|
||||
files.insert(
|
||||
manifest.name.name().to_string(),
|
||||
toml::to_string(&file)?.into_bytes(),
|
||||
);
|
||||
|
||||
let reference = repo.find_reference(&refspec)?;
|
||||
push_changes(
|
||||
&app_state,
|
||||
&source,
|
||||
manifest.name.scope().to_string(),
|
||||
files,
|
||||
format!(
|
||||
"add {}@{} {}",
|
||||
manifest.name, manifest.version, manifest.target
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
drop(source);
|
||||
|
||||
{
|
||||
let index_content = toml::to_string(&file)?;
|
||||
let mut blob_writer = repo.blob_writer(None)?;
|
||||
blob_writer.write_all(index_content.as_bytes())?;
|
||||
oids.push((name, blob_writer.commit()?));
|
||||
}
|
||||
update_search_version(&app_state, &manifest.name, &new_entry);
|
||||
}
|
||||
|
||||
let old_root_tree = reference.peel_to_tree()?;
|
||||
let old_scope_tree = match old_root_tree.get_name(scope) {
|
||||
Some(entry) => Some(repo.find_tree(entry.id())?),
|
||||
None => None,
|
||||
};
|
||||
let version_id = VersionId::new(manifest.version.clone(), manifest.target.kind());
|
||||
|
||||
let mut scope_tree = repo.treebuilder(old_scope_tree.as_ref())?;
|
||||
for (file, oid) in oids {
|
||||
scope_tree.insert(file, oid, 0o100644)?;
|
||||
}
|
||||
let mut tasks = docs_pages
|
||||
.into_iter()
|
||||
.map(|(hash, content)| {
|
||||
let app_state = app_state.clone();
|
||||
async move { app_state.storage.store_doc(hash, content).await }
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
let scope_tree_id = scope_tree.write()?;
|
||||
let mut root_tree = repo.treebuilder(Some(&repo.find_tree(old_root_tree.id())?))?;
|
||||
root_tree.insert(scope, scope_tree_id, 0o040000)?;
|
||||
{
|
||||
let app_state = app_state.clone();
|
||||
let name = manifest.name.clone();
|
||||
let version_id = version_id.clone();
|
||||
|
||||
let tree_oid = root_tree.write()?;
|
||||
tasks.spawn(async move {
|
||||
app_state
|
||||
.storage
|
||||
.store_package(&name, &version_id, bytes.to_vec())
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
||||
repo.commit(
|
||||
Some("HEAD"),
|
||||
&signature(),
|
||||
&signature(),
|
||||
&format!(
|
||||
"add {}@{} {}",
|
||||
manifest.name, manifest.version, manifest.target
|
||||
),
|
||||
&repo.find_tree(tree_oid)?,
|
||||
&[&reference.peel_to_commit()?],
|
||||
)?;
|
||||
if let Some(readme) = readme {
|
||||
let app_state = app_state.clone();
|
||||
let name = manifest.name.clone();
|
||||
let version_id = version_id.clone();
|
||||
|
||||
let mut push_options = git2::PushOptions::new();
|
||||
let mut remote_callbacks = git2::RemoteCallbacks::new();
|
||||
tasks.spawn(async move {
|
||||
app_state
|
||||
.storage
|
||||
.store_readme(&name, &version_id, readme)
|
||||
.await
|
||||
});
|
||||
}
|
||||
|
||||
let git_creds = app_state.project.auth_config().git_credentials().unwrap();
|
||||
remote_callbacks.credentials(|_, _, _| {
|
||||
git2::Cred::userpass_plaintext(&git_creds.username, &git_creds.password)
|
||||
});
|
||||
while let Some(res) = tasks.join_next().await {
|
||||
res.unwrap()?;
|
||||
}
|
||||
|
||||
push_options.remote_callbacks(remote_callbacks);
|
||||
|
||||
remote.push(&[refspec], Some(&mut push_options))?;
|
||||
|
||||
update_version(&app_state, &manifest.name, new_entry);
|
||||
}
|
||||
|
||||
let version_id = VersionId::new(manifest.version.clone(), manifest.target.kind());
|
||||
|
||||
let (a, b, c) = join!(
|
||||
app_state
|
||||
.storage
|
||||
.store_package(&manifest.name, &version_id, bytes.to_vec()),
|
||||
join_all(
|
||||
docs_pages
|
||||
.into_iter()
|
||||
.map(|(hash, content)| app_state.storage.store_doc(hash, content)),
|
||||
),
|
||||
async {
|
||||
if let Some(readme) = readme {
|
||||
app_state
|
||||
.storage
|
||||
.store_readme(&manifest.name, &version_id, readme)
|
||||
.await
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
);
|
||||
a?;
|
||||
b.into_iter().collect::<Result<(), _>>()?;
|
||||
c?;
|
||||
|
||||
Ok(HttpResponse::Ok().body(format!(
|
||||
"published {}@{} {}",
|
||||
manifest.name, manifest.version, manifest.target
|
||||
)))
|
||||
Ok(HttpResponse::Ok().body(format!("published {}@{version_id}", manifest.name)))
|
||||
}
|
||||
|
|
|
@ -1,107 +1,91 @@
|
|||
use std::collections::HashMap;
|
||||
|
||||
use actix_web::{web, HttpResponse, Responder};
|
||||
use serde::Deserialize;
|
||||
use tantivy::{collector::Count, query::AllQuery, schema::Value, DateTime, Order};
|
||||
|
||||
use crate::{error::Error, package::PackageResponse, AppState};
|
||||
use pesde::{
|
||||
names::PackageName,
|
||||
source::{
|
||||
git_index::{read_file, root_tree, GitBasedSource},
|
||||
pesde::IndexFile,
|
||||
},
|
||||
use crate::{
|
||||
error::RegistryError,
|
||||
package::{read_package, PackageResponse},
|
||||
search::find_max_searchable,
|
||||
AppState,
|
||||
};
|
||||
use actix_web::{web, HttpResponse};
|
||||
use pesde::names::PackageName;
|
||||
use serde::Deserialize;
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
use tantivy::{collector::Count, query::AllQuery, schema::Value as _, DateTime, Order};
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Request {
|
||||
#[serde(default)]
|
||||
query: Option<String>,
|
||||
#[serde(default)]
|
||||
offset: Option<usize>,
|
||||
#[serde(default)]
|
||||
query: String,
|
||||
#[serde(default)]
|
||||
offset: usize,
|
||||
}
|
||||
|
||||
pub async fn search_packages(
|
||||
app_state: web::Data<AppState>,
|
||||
request: web::Query<Request>,
|
||||
) -> Result<impl Responder, Error> {
|
||||
let searcher = app_state.search_reader.searcher();
|
||||
let schema = searcher.schema();
|
||||
app_state: web::Data<AppState>,
|
||||
request_query: web::Query<Request>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let searcher = app_state.search_reader.searcher();
|
||||
let schema = searcher.schema();
|
||||
|
||||
let id = schema.get_field("id").unwrap();
|
||||
let id = schema.get_field("id").unwrap();
|
||||
|
||||
let query = request.query.as_deref().unwrap_or_default().trim();
|
||||
let query = request_query.query.trim();
|
||||
|
||||
let query = if query.is_empty() {
|
||||
Box::new(AllQuery)
|
||||
} else {
|
||||
app_state.query_parser.parse_query(query)?
|
||||
};
|
||||
let query = if query.is_empty() {
|
||||
Box::new(AllQuery)
|
||||
} else {
|
||||
app_state.query_parser.parse_query(query)?
|
||||
};
|
||||
|
||||
let (count, top_docs) = searcher
|
||||
.search(
|
||||
&query,
|
||||
&(
|
||||
Count,
|
||||
tantivy::collector::TopDocs::with_limit(50)
|
||||
.and_offset(request.offset.unwrap_or_default())
|
||||
.order_by_fast_field::<DateTime>("published_at", Order::Desc),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
let (count, top_docs) = searcher
|
||||
.search(
|
||||
&query,
|
||||
&(
|
||||
Count,
|
||||
tantivy::collector::TopDocs::with_limit(50)
|
||||
.and_offset(request_query.offset)
|
||||
.order_by_fast_field::<DateTime>("published_at", Order::Desc),
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let source = app_state.source.lock().await;
|
||||
let repo = gix::open(source.path(&app_state.project))?;
|
||||
let tree = root_tree(&repo)?;
|
||||
let source = Arc::new(app_state.source.clone().read_owned().await);
|
||||
|
||||
let top_docs = top_docs
|
||||
.into_iter()
|
||||
.map(|(_, doc_address)| {
|
||||
let doc = searcher.doc::<HashMap<_, _>>(doc_address).unwrap();
|
||||
let mut results = top_docs
|
||||
.iter()
|
||||
.map(|_| None::<PackageResponse>)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let id = doc
|
||||
.get(&id)
|
||||
.unwrap()
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.parse::<PackageName>()
|
||||
.unwrap();
|
||||
let (scope, name) = id.as_str();
|
||||
let mut tasks = top_docs
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, (_, doc_address))| {
|
||||
let app_state = app_state.clone();
|
||||
let doc = searcher.doc::<HashMap<_, _>>(doc_address).unwrap();
|
||||
let source = source.clone();
|
||||
|
||||
let file: IndexFile =
|
||||
toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap();
|
||||
async move {
|
||||
let id = (&doc[&id])
|
||||
.as_str()
|
||||
.unwrap()
|
||||
.parse::<PackageName>()
|
||||
.unwrap();
|
||||
|
||||
let (latest_version, entry) = file
|
||||
.entries
|
||||
.iter()
|
||||
.max_by_key(|(v_id, _)| v_id.version())
|
||||
.unwrap();
|
||||
let file = read_package(&app_state, &id, &source).await?.unwrap();
|
||||
|
||||
PackageResponse {
|
||||
name: id.to_string(),
|
||||
version: latest_version.version().to_string(),
|
||||
targets: file
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|(v_id, _)| v_id.version() == latest_version.version())
|
||||
.map(|(_, entry)| (&entry.target).into())
|
||||
.collect(),
|
||||
description: entry.description.clone().unwrap_or_default(),
|
||||
published_at: file
|
||||
.entries
|
||||
.values()
|
||||
.map(|entry| entry.published_at)
|
||||
.max()
|
||||
.unwrap(),
|
||||
license: entry.license.clone().unwrap_or_default(),
|
||||
authors: entry.authors.clone(),
|
||||
repository: entry.repository.clone().map(|url| url.to_string()),
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let (version_id, _) = find_max_searchable(&file).unwrap();
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
"data": top_docs,
|
||||
"count": count,
|
||||
})))
|
||||
Ok::<_, RegistryError>((i, PackageResponse::new(&id, version_id, &file)))
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(res) = tasks.join_next().await {
|
||||
let (i, res) = res.unwrap()?;
|
||||
results[i] = Some(res);
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(serde_json::json!({
|
||||
"data": results,
|
||||
"count": count,
|
||||
})))
|
||||
}
|
||||
|
|
83
registry/src/endpoints/yank_version.rs
Normal file
83
registry/src/endpoints/yank_version.rs
Normal file
|
@ -0,0 +1,83 @@
|
|||
use crate::{
|
||||
auth::UserId,
|
||||
error::RegistryError,
|
||||
git::push_changes,
|
||||
package::{read_package, read_scope_info},
|
||||
request_path::AllOrSpecificTarget,
|
||||
search::search_version_changed,
|
||||
AppState,
|
||||
};
|
||||
use actix_web::{http::Method, web, HttpRequest, HttpResponse};
|
||||
use pesde::names::PackageName;
|
||||
use semver::Version;
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub async fn yank_package_version(
|
||||
request: HttpRequest,
|
||||
app_state: web::Data<AppState>,
|
||||
path: web::Path<(PackageName, Version, AllOrSpecificTarget)>,
|
||||
user_id: web::ReqData<UserId>,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let yanked = request.method() != Method::DELETE;
|
||||
let (name, version, target) = path.into_inner();
|
||||
let source = app_state.source.write().await;
|
||||
|
||||
let Some(scope_info) = read_scope_info(&app_state, name.scope(), &source).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
if !scope_info.owners.contains(&user_id.0) {
|
||||
return Ok(HttpResponse::Forbidden().finish());
|
||||
}
|
||||
|
||||
let Some(mut file) = read_package(&app_state, &name, &source).await? else {
|
||||
return Ok(HttpResponse::NotFound().finish());
|
||||
};
|
||||
|
||||
let mut targets = vec![];
|
||||
|
||||
for (v_id, entry) in &mut file.entries {
|
||||
if *v_id.version() != version {
|
||||
continue;
|
||||
}
|
||||
|
||||
match target {
|
||||
AllOrSpecificTarget::Specific(kind) if entry.target.kind() != kind => continue,
|
||||
_ => {}
|
||||
}
|
||||
|
||||
if entry.yanked == yanked {
|
||||
continue;
|
||||
}
|
||||
|
||||
targets.push(entry.target.kind().to_string());
|
||||
entry.yanked = yanked;
|
||||
}
|
||||
|
||||
if targets.is_empty() {
|
||||
return Ok(HttpResponse::Conflict().finish());
|
||||
}
|
||||
|
||||
let file_string = toml::to_string(&file)?;
|
||||
|
||||
push_changes(
|
||||
&app_state,
|
||||
&source,
|
||||
name.scope().to_string(),
|
||||
HashMap::from([(name.name().to_string(), file_string.into_bytes())]),
|
||||
format!(
|
||||
"{}yank {name}@{version} {}",
|
||||
if yanked { "" } else { "un" },
|
||||
targets.join(", "),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
search_version_changed(&app_state, &name, &file);
|
||||
|
||||
Ok(HttpResponse::Ok().body(format!(
|
||||
"{}yanked {name}@{version} {}",
|
||||
if yanked { "" } else { "un" },
|
||||
targets.join(", "),
|
||||
)))
|
||||
}
|
|
@ -1,89 +1,107 @@
|
|||
use actix_web::{body::BoxBody, HttpResponse, ResponseError};
|
||||
use pesde::source::git_index::errors::{ReadFile, RefreshError, TreeError};
|
||||
use serde::Serialize;
|
||||
use std::error::Error;
|
||||
use thiserror::Error;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
pub enum Error {
|
||||
#[error("failed to parse query")]
|
||||
Query(#[from] tantivy::query::QueryParserError),
|
||||
pub enum RegistryError {
|
||||
#[error("failed to parse query")]
|
||||
Query(#[from] tantivy::query::QueryParserError),
|
||||
|
||||
#[error("error reading repo file")]
|
||||
ReadFile(#[from] ReadFile),
|
||||
#[error("error reading repo file")]
|
||||
ReadFile(#[from] ReadFile),
|
||||
|
||||
#[error("error deserializing file")]
|
||||
Deserialize(#[from] toml::de::Error),
|
||||
#[error("error deserializing file")]
|
||||
Deserialize(#[from] toml::de::Error),
|
||||
|
||||
#[error("failed to send request: {1}\nserver response: {0}")]
|
||||
ReqwestResponse(String, #[source] reqwest::Error),
|
||||
#[error("failed to send request: {1}\nserver response: {0}")]
|
||||
ReqwestResponse(String, #[source] reqwest::Error),
|
||||
|
||||
#[error("error sending request")]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
#[error("error sending request")]
|
||||
Reqwest(#[from] reqwest::Error),
|
||||
|
||||
#[error("failed to parse archive entries")]
|
||||
Tar(#[from] std::io::Error),
|
||||
#[error("failed to parse archive entries")]
|
||||
Tar(#[from] std::io::Error),
|
||||
|
||||
#[error("invalid archive")]
|
||||
InvalidArchive(String),
|
||||
#[error("invalid archive")]
|
||||
InvalidArchive(String),
|
||||
|
||||
#[error("failed to read index config")]
|
||||
Config(#[from] pesde::source::pesde::errors::ConfigError),
|
||||
#[error("failed to read index config")]
|
||||
Config(#[from] pesde::source::pesde::errors::ConfigError),
|
||||
|
||||
#[error("git error")]
|
||||
Git(#[from] git2::Error),
|
||||
#[error("git error")]
|
||||
Git(#[from] git2::Error),
|
||||
|
||||
#[error("failed to refresh source")]
|
||||
Refresh(#[from] Box<RefreshError>),
|
||||
#[error("failed to refresh source")]
|
||||
Refresh(#[from] Box<RefreshError>),
|
||||
|
||||
#[error("failed to serialize struct")]
|
||||
Serialize(#[from] toml::ser::Error),
|
||||
#[error("failed to serialize struct")]
|
||||
Serialize(#[from] toml::ser::Error),
|
||||
|
||||
#[error("failed to serialize struct")]
|
||||
SerializeJson(#[from] serde_json::Error),
|
||||
#[error("failed to serialize struct")]
|
||||
SerializeJson(#[from] serde_json::Error),
|
||||
|
||||
#[error("failed to open git repo")]
|
||||
OpenRepo(#[from] gix::open::Error),
|
||||
#[error("failed to open git repo")]
|
||||
OpenRepo(#[from] gix::open::Error),
|
||||
|
||||
#[error("failed to get root tree")]
|
||||
RootTree(#[from] TreeError),
|
||||
#[error("failed to get root tree")]
|
||||
RootTree(#[from] TreeError),
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct ErrorResponse {
|
||||
pub error: String,
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
impl ResponseError for Error {
|
||||
fn error_response(&self) -> HttpResponse<BoxBody> {
|
||||
match self {
|
||||
Error::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: format!("failed to parse query: {e}"),
|
||||
}),
|
||||
Error::Tar(_) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: "corrupt archive".to_string(),
|
||||
}),
|
||||
Error::InvalidArchive(e) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: format!("archive is invalid: {e}"),
|
||||
}),
|
||||
e => {
|
||||
tracing::error!("unhandled error: {e:?}");
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
impl ResponseError for RegistryError {
|
||||
fn error_response(&self) -> HttpResponse<BoxBody> {
|
||||
match self {
|
||||
RegistryError::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: format!("failed to parse query: {e}"),
|
||||
}),
|
||||
RegistryError::Tar(_) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: "corrupt archive".to_string(),
|
||||
}),
|
||||
RegistryError::InvalidArchive(e) => HttpResponse::BadRequest().json(ErrorResponse {
|
||||
error: format!("archive is invalid: {e}"),
|
||||
}),
|
||||
e => {
|
||||
tracing::error!("unhandled error: {}", display_error(e));
|
||||
HttpResponse::InternalServerError().finish()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait ReqwestErrorExt {
|
||||
async fn into_error(self) -> Result<Self, Error>
|
||||
where
|
||||
Self: Sized;
|
||||
async fn into_error(self) -> Result<Self, RegistryError>
|
||||
where
|
||||
Self: Sized;
|
||||
}
|
||||
|
||||
impl ReqwestErrorExt for reqwest::Response {
|
||||
async fn into_error(self) -> Result<Self, Error> {
|
||||
match self.error_for_status_ref() {
|
||||
Ok(_) => Ok(self),
|
||||
Err(e) => Err(Error::ReqwestResponse(self.text().await?, e)),
|
||||
}
|
||||
}
|
||||
async fn into_error(self) -> Result<Self, RegistryError> {
|
||||
match self.error_for_status_ref() {
|
||||
Ok(_) => Ok(self),
|
||||
Err(e) => Err(RegistryError::ReqwestResponse(self.text().await?, e)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn display_error<E: Error>(err: E) -> String {
|
||||
let mut causes = vec![];
|
||||
let mut source = err.source();
|
||||
while let Some(src) = source {
|
||||
causes.push(format!("\t- {src}"));
|
||||
source = src.source();
|
||||
}
|
||||
format!(
|
||||
"{err}{}",
|
||||
if causes.is_empty() {
|
||||
"".into()
|
||||
} else {
|
||||
format!("\n{}", causes.join("\n"))
|
||||
}
|
||||
)
|
||||
}
|
||||
|
|
98
registry/src/git.rs
Normal file
98
registry/src/git.rs
Normal file
|
@ -0,0 +1,98 @@
|
|||
use crate::{benv, error::RegistryError, AppState};
|
||||
use git2::{Remote, Repository, Signature};
|
||||
use pesde::source::{git_index::GitBasedSource as _, pesde::PesdePackageSource};
|
||||
use std::collections::HashMap;
|
||||
use tokio::task::spawn_blocking;
|
||||
|
||||
fn signature<'a>() -> Signature<'a> {
|
||||
Signature::now(
|
||||
&benv!(required "COMMITTER_GIT_NAME"),
|
||||
&benv!(required "COMMITTER_GIT_EMAIL"),
|
||||
)
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
fn get_refspec(repo: &Repository, remote: &mut Remote) -> Result<String, git2::Error> {
|
||||
let upstream_branch_buf = repo.branch_upstream_name(repo.head()?.name().unwrap())?;
|
||||
let upstream_branch = upstream_branch_buf.as_str().unwrap();
|
||||
|
||||
let refspec_buf = remote
|
||||
.refspecs()
|
||||
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
|
||||
.unwrap()
|
||||
.rtransform(upstream_branch)?;
|
||||
let refspec = refspec_buf.as_str().unwrap();
|
||||
|
||||
Ok(refspec.to_string())
|
||||
}
|
||||
|
||||
const FILE_FILEMODE: i32 = 0o100_644;
|
||||
const DIR_FILEMODE: i32 = 0o040_000;
|
||||
|
||||
pub async fn push_changes(
|
||||
app_state: &AppState,
|
||||
source: &PesdePackageSource,
|
||||
directory: String,
|
||||
files: HashMap<String, Vec<u8>>,
|
||||
message: String,
|
||||
) -> Result<(), RegistryError> {
|
||||
let path = source.path(&app_state.project);
|
||||
let auth_config = app_state.project.auth_config().clone();
|
||||
|
||||
spawn_blocking(move || {
|
||||
let repo = Repository::open_bare(path)?;
|
||||
let mut oids = HashMap::new();
|
||||
|
||||
let mut remote = repo.find_remote("origin")?;
|
||||
let refspec = get_refspec(&repo, &mut remote)?;
|
||||
|
||||
let reference = repo.find_reference(&refspec)?;
|
||||
|
||||
for (name, contents) in files {
|
||||
let oid = repo.blob(&contents)?;
|
||||
oids.insert(name, oid);
|
||||
}
|
||||
|
||||
let old_root_tree = reference.peel_to_tree()?;
|
||||
let old_dir_tree = match old_root_tree.get_name(&directory) {
|
||||
Some(entry) => Some(repo.find_tree(entry.id())?),
|
||||
None => None,
|
||||
};
|
||||
|
||||
let mut dir_tree = repo.treebuilder(old_dir_tree.as_ref())?;
|
||||
for (file, oid) in oids {
|
||||
dir_tree.insert(file, oid, FILE_FILEMODE)?;
|
||||
}
|
||||
|
||||
let dir_tree_id = dir_tree.write()?;
|
||||
let mut root_tree = repo.treebuilder(Some(&repo.find_tree(old_root_tree.id())?))?;
|
||||
root_tree.insert(directory, dir_tree_id, DIR_FILEMODE)?;
|
||||
|
||||
let tree_oid = root_tree.write()?;
|
||||
|
||||
repo.commit(
|
||||
Some("HEAD"),
|
||||
&signature(),
|
||||
&signature(),
|
||||
&message,
|
||||
&repo.find_tree(tree_oid)?,
|
||||
&[&reference.peel_to_commit()?],
|
||||
)?;
|
||||
|
||||
let mut push_options = git2::PushOptions::new();
|
||||
let mut remote_callbacks = git2::RemoteCallbacks::new();
|
||||
|
||||
let git_creds = auth_config.git_credentials().unwrap();
|
||||
remote_callbacks.credentials(|_, _, _| {
|
||||
git2::Cred::userpass_plaintext(&git_creds.username, &git_creds.password)
|
||||
});
|
||||
|
||||
push_options.remote_callbacks(remote_callbacks);
|
||||
|
||||
remote.push(&[refspec], Some(&mut push_options))?;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
|
@ -1,55 +1,61 @@
|
|||
use crate::{
|
||||
auth::{get_auth_from_env, Auth, UserIdExtractor},
|
||||
search::make_search,
|
||||
storage::{get_storage_from_env, Storage},
|
||||
auth::{get_auth_from_env, Auth, UserIdExtractor},
|
||||
search::make_search,
|
||||
storage::{get_storage_from_env, Storage},
|
||||
};
|
||||
use actix_cors::Cors;
|
||||
use actix_governor::{Governor, GovernorConfigBuilder};
|
||||
use actix_web::{
|
||||
middleware::{from_fn, Compress, NormalizePath, TrailingSlash},
|
||||
rt::System,
|
||||
web,
|
||||
web::PayloadConfig,
|
||||
App, HttpServer,
|
||||
middleware::{from_fn, Compress, NormalizePath, TrailingSlash},
|
||||
rt::System,
|
||||
web,
|
||||
web::PayloadConfig,
|
||||
App, HttpServer,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
use pesde::{
|
||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||
AuthConfig, Project,
|
||||
source::{
|
||||
pesde::PesdePackageSource,
|
||||
traits::{PackageSource as _, RefreshOptions},
|
||||
},
|
||||
AuthConfig, Project,
|
||||
};
|
||||
use std::{env::current_dir, path::PathBuf};
|
||||
use std::{env::current_dir, path::PathBuf, sync::Arc};
|
||||
use tracing::level_filters::LevelFilter;
|
||||
use tracing_subscriber::{
|
||||
fmt::format::FmtSpan, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter,
|
||||
fmt::format::FmtSpan, layer::SubscriberExt as _, util::SubscriberInitExt as _, EnvFilter,
|
||||
};
|
||||
|
||||
mod auth;
|
||||
mod endpoints;
|
||||
mod error;
|
||||
mod git;
|
||||
mod package;
|
||||
mod request_path;
|
||||
mod search;
|
||||
mod storage;
|
||||
|
||||
#[must_use]
|
||||
pub fn make_reqwest() -> reqwest::Client {
|
||||
reqwest::ClientBuilder::new()
|
||||
.user_agent(concat!(
|
||||
env!("CARGO_PKG_NAME"),
|
||||
"/",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
))
|
||||
.build()
|
||||
.unwrap()
|
||||
reqwest::ClientBuilder::new()
|
||||
.user_agent(concat!(
|
||||
env!("CARGO_PKG_NAME"),
|
||||
"/",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
))
|
||||
.build()
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub struct AppState {
|
||||
pub source: tokio::sync::Mutex<PesdePackageSource>,
|
||||
pub project: Project,
|
||||
pub storage: Storage,
|
||||
pub auth: Auth,
|
||||
pub source: Arc<tokio::sync::RwLock<PesdePackageSource>>,
|
||||
pub project: Project,
|
||||
pub storage: Storage,
|
||||
pub auth: Auth,
|
||||
|
||||
pub search_reader: tantivy::IndexReader,
|
||||
pub search_writer: std::sync::Mutex<tantivy::IndexWriter>,
|
||||
pub query_parser: tantivy::query::QueryParser,
|
||||
pub search_reader: tantivy::IndexReader,
|
||||
pub search_writer: std::sync::Mutex<tantivy::IndexWriter>,
|
||||
pub query_parser: tantivy::query::QueryParser,
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
|
@ -58,7 +64,7 @@ macro_rules! benv {
|
|||
std::env::var($name)
|
||||
};
|
||||
($name:expr => $default:expr) => {
|
||||
benv!($name).unwrap_or($default.to_string())
|
||||
benv!($name).unwrap_or_else(|_| $default.to_string())
|
||||
};
|
||||
(required $name:expr) => {
|
||||
benv!($name).expect(concat!("Environment variable `", $name, "` must be set"))
|
||||
|
@ -86,157 +92,223 @@ macro_rules! benv {
|
|||
}
|
||||
|
||||
async fn run() -> std::io::Result<()> {
|
||||
let address = benv!("ADDRESS" => "127.0.0.1");
|
||||
let port: u16 = benv!(parse "PORT" => "8080");
|
||||
let address = benv!("ADDRESS" => "127.0.0.1");
|
||||
let port: u16 = benv!(parse "PORT" => "8080");
|
||||
|
||||
let cwd = current_dir().unwrap();
|
||||
let data_dir =
|
||||
PathBuf::from(benv!("DATA_DIR" => "{CWD}/data").replace("{CWD}", cwd.to_str().unwrap()));
|
||||
fs::create_dir_all(&data_dir).await.unwrap();
|
||||
let cwd = current_dir().unwrap();
|
||||
let data_dir =
|
||||
PathBuf::from(benv!("DATA_DIR" => "{CWD}/data").replace("{CWD}", cwd.to_str().unwrap()));
|
||||
fs::create_dir_all(&data_dir).await.unwrap();
|
||||
|
||||
let project = Project::new(
|
||||
&cwd,
|
||||
None::<PathBuf>,
|
||||
data_dir.join("project"),
|
||||
&cwd,
|
||||
AuthConfig::new().with_git_credentials(Some(gix::sec::identity::Account {
|
||||
username: benv!(required "GIT_USERNAME"),
|
||||
password: benv!(required "GIT_PASSWORD"),
|
||||
})),
|
||||
);
|
||||
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
|
||||
source
|
||||
.refresh(&project)
|
||||
.await
|
||||
.expect("failed to refresh source");
|
||||
let config = source
|
||||
.config(&project)
|
||||
.await
|
||||
.expect("failed to get index config");
|
||||
let project = Project::new(
|
||||
&cwd,
|
||||
None::<PathBuf>,
|
||||
data_dir.join("project"),
|
||||
&cwd,
|
||||
AuthConfig::new().with_git_credentials(Some(gix::sec::identity::Account {
|
||||
username: benv!(required "GIT_USERNAME"),
|
||||
password: benv!(required "GIT_PASSWORD"),
|
||||
})),
|
||||
);
|
||||
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
|
||||
source
|
||||
.refresh(&RefreshOptions {
|
||||
project: project.clone(),
|
||||
})
|
||||
.await
|
||||
.expect("failed to refresh source");
|
||||
let config = source
|
||||
.config(&project)
|
||||
.await
|
||||
.expect("failed to get index config");
|
||||
|
||||
let (search_reader, search_writer, query_parser) = make_search(&project, &source).await;
|
||||
let (search_reader, search_writer, query_parser) = make_search(&project, &source);
|
||||
|
||||
let app_data = web::Data::new(AppState {
|
||||
storage: {
|
||||
let storage = get_storage_from_env();
|
||||
tracing::info!("storage: {storage}");
|
||||
storage
|
||||
},
|
||||
auth: {
|
||||
let auth = get_auth_from_env(&config);
|
||||
tracing::info!("auth: {auth}");
|
||||
auth
|
||||
},
|
||||
source: tokio::sync::Mutex::new(source),
|
||||
project,
|
||||
let app_data = web::Data::new(AppState {
|
||||
storage: {
|
||||
let storage = get_storage_from_env();
|
||||
tracing::info!("storage: {storage}");
|
||||
storage
|
||||
},
|
||||
auth: {
|
||||
let auth = get_auth_from_env(&config);
|
||||
tracing::info!("auth: {auth}");
|
||||
auth
|
||||
},
|
||||
source: Arc::new(tokio::sync::RwLock::new(source)),
|
||||
project,
|
||||
|
||||
search_reader,
|
||||
search_writer: std::sync::Mutex::new(search_writer),
|
||||
query_parser,
|
||||
});
|
||||
search_reader,
|
||||
search_writer: std::sync::Mutex::new(search_writer),
|
||||
query_parser,
|
||||
});
|
||||
|
||||
let publish_governor_config = GovernorConfigBuilder::default()
|
||||
.key_extractor(UserIdExtractor)
|
||||
.burst_size(12)
|
||||
.seconds_per_request(60)
|
||||
.use_headers()
|
||||
.finish()
|
||||
.unwrap();
|
||||
let publish_governor_config = GovernorConfigBuilder::default()
|
||||
.key_extractor(UserIdExtractor)
|
||||
.burst_size(12)
|
||||
.seconds_per_request(60)
|
||||
.use_headers()
|
||||
.finish()
|
||||
.unwrap();
|
||||
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.wrap(sentry_actix::Sentry::with_transaction())
|
||||
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
||||
.wrap(Cors::permissive())
|
||||
.wrap(tracing_actix_web::TracingLogger::default())
|
||||
.wrap(Compress::default())
|
||||
.app_data(app_data.clone())
|
||||
.route(
|
||||
"/",
|
||||
web::get().to(|| async {
|
||||
concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"))
|
||||
}),
|
||||
)
|
||||
.service(
|
||||
web::scope("/v0")
|
||||
.route(
|
||||
"/search",
|
||||
web::get()
|
||||
.to(endpoints::search::search_packages)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}",
|
||||
web::get()
|
||||
.to(endpoints::package_versions::get_package_versions)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}/{version}/{target}",
|
||||
web::get()
|
||||
.to(endpoints::package_version::get_package_version)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.service(
|
||||
web::scope("/packages")
|
||||
.app_data(PayloadConfig::new(config.max_archive_size))
|
||||
.route(
|
||||
"",
|
||||
web::post()
|
||||
.to(endpoints::publish_version::publish_package)
|
||||
.wrap(Governor::new(&publish_governor_config))
|
||||
.wrap(from_fn(auth::write_mw)),
|
||||
),
|
||||
),
|
||||
)
|
||||
})
|
||||
.bind((address, port))?
|
||||
.run()
|
||||
.await
|
||||
let publish_payload_config = PayloadConfig::new(config.max_archive_size);
|
||||
|
||||
HttpServer::new(move || {
|
||||
App::new()
|
||||
.wrap(sentry_actix::Sentry::with_transaction())
|
||||
.wrap(NormalizePath::new(TrailingSlash::Trim))
|
||||
.wrap(Cors::permissive())
|
||||
.wrap(tracing_actix_web::TracingLogger::default())
|
||||
.wrap(Compress::default())
|
||||
.app_data(app_data.clone())
|
||||
.route(
|
||||
"/",
|
||||
web::get().to(|| async {
|
||||
concat!(env!("CARGO_PKG_NAME"), "/", env!("CARGO_PKG_VERSION"))
|
||||
}),
|
||||
)
|
||||
.service(
|
||||
web::scope("/v0")
|
||||
.route(
|
||||
"/search",
|
||||
web::get()
|
||||
.to(endpoints::search::search_packages)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}",
|
||||
web::get()
|
||||
.to(endpoints::package_versions::get_package_versions_v0)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}/{version}/{target}",
|
||||
web::get()
|
||||
.to(endpoints::package_version::get_package_version_v0)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.service(
|
||||
web::scope("/packages")
|
||||
.app_data(publish_payload_config.clone())
|
||||
.route(
|
||||
"",
|
||||
web::post()
|
||||
.to(endpoints::publish_version::publish_package)
|
||||
.wrap(Governor::new(&publish_governor_config))
|
||||
.wrap(from_fn(auth::write_mw)),
|
||||
),
|
||||
),
|
||||
)
|
||||
.service(
|
||||
web::scope("/v1")
|
||||
.route(
|
||||
"/search",
|
||||
web::get()
|
||||
.to(endpoints::search::search_packages)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}",
|
||||
web::get()
|
||||
.to(endpoints::package_versions::get_package_versions)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/packages/{name}/deprecate")
|
||||
.put(endpoints::deprecate_version::deprecate_package_version)
|
||||
.delete(endpoints::deprecate_version::deprecate_package_version)
|
||||
.wrap(from_fn(auth::write_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}/{version}/{target}",
|
||||
web::get()
|
||||
.to(endpoints::package_version::get_package_version)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}/{version}/{target}/archive",
|
||||
web::get()
|
||||
.to(endpoints::package_archive::get_package_archive)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}/{version}/{target}/doc",
|
||||
web::get()
|
||||
.to(endpoints::package_doc::get_package_doc)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.route(
|
||||
"/packages/{name}/{version}/{target}/readme",
|
||||
web::get()
|
||||
.to(endpoints::package_readme::get_package_readme)
|
||||
.wrap(from_fn(auth::read_mw)),
|
||||
)
|
||||
.service(
|
||||
web::resource("/packages/{name}/{version}/{target}/yank")
|
||||
.put(endpoints::yank_version::yank_package_version)
|
||||
.delete(endpoints::yank_version::yank_package_version)
|
||||
.wrap(from_fn(auth::write_mw)),
|
||||
)
|
||||
.service(
|
||||
web::scope("/packages")
|
||||
.app_data(publish_payload_config.clone())
|
||||
.route(
|
||||
"",
|
||||
web::post()
|
||||
.to(endpoints::publish_version::publish_package)
|
||||
.wrap(Governor::new(&publish_governor_config))
|
||||
.wrap(from_fn(auth::write_mw)),
|
||||
),
|
||||
),
|
||||
)
|
||||
})
|
||||
.bind((address, port))?
|
||||
.run()
|
||||
.await
|
||||
}
|
||||
|
||||
// can't use #[actix_web::main] because of Sentry:
|
||||
// "Note: Macros like #[tokio::main] and #[actix_web::main] are not supported. The Sentry client must be initialized before the async runtime is started so that all threads are correctly connected to the Hub."
|
||||
// https://docs.sentry.io/platforms/rust/guides/actix-web/
|
||||
fn main() -> std::io::Result<()> {
|
||||
let _ = dotenvy::dotenv();
|
||||
let _ = dotenvy::dotenv();
|
||||
|
||||
let tracing_env_filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::INFO.into())
|
||||
.from_env_lossy()
|
||||
.add_directive("reqwest=info".parse().unwrap())
|
||||
.add_directive("rustls=info".parse().unwrap())
|
||||
.add_directive("tokio_util=info".parse().unwrap())
|
||||
.add_directive("goblin=info".parse().unwrap())
|
||||
.add_directive("tower=info".parse().unwrap())
|
||||
.add_directive("hyper=info".parse().unwrap())
|
||||
.add_directive("h2=info".parse().unwrap());
|
||||
let tracing_env_filter = EnvFilter::builder()
|
||||
.with_default_directive(LevelFilter::INFO.into())
|
||||
.from_env_lossy()
|
||||
.add_directive("reqwest=info".parse().unwrap())
|
||||
.add_directive("rustls=info".parse().unwrap())
|
||||
.add_directive("tokio_util=info".parse().unwrap())
|
||||
.add_directive("goblin=info".parse().unwrap())
|
||||
.add_directive("tower=info".parse().unwrap())
|
||||
.add_directive("hyper=info".parse().unwrap())
|
||||
.add_directive("h2=info".parse().unwrap());
|
||||
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_env_filter)
|
||||
.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.compact()
|
||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE),
|
||||
)
|
||||
.with(sentry::integrations::tracing::layer())
|
||||
.init();
|
||||
tracing_subscriber::registry()
|
||||
.with(tracing_env_filter)
|
||||
.with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.compact()
|
||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE),
|
||||
)
|
||||
.with(sentry::integrations::tracing::layer())
|
||||
.init();
|
||||
|
||||
let guard = sentry::init(sentry::ClientOptions {
|
||||
release: sentry::release_name!(),
|
||||
dsn: benv!(parse "SENTRY_DSN").ok(),
|
||||
session_mode: sentry::SessionMode::Request,
|
||||
traces_sample_rate: 1.0,
|
||||
debug: true,
|
||||
..Default::default()
|
||||
});
|
||||
let guard = sentry::init(sentry::ClientOptions {
|
||||
release: sentry::release_name!(),
|
||||
dsn: benv!(parse "SENTRY_DSN").ok(),
|
||||
session_mode: sentry::SessionMode::Request,
|
||||
traces_sample_rate: 1.0,
|
||||
debug: true,
|
||||
..Default::default()
|
||||
});
|
||||
|
||||
if guard.is_enabled() {
|
||||
std::env::set_var("RUST_BACKTRACE", "full");
|
||||
tracing::info!("sentry initialized");
|
||||
} else {
|
||||
tracing::info!("sentry **NOT** initialized");
|
||||
}
|
||||
if guard.is_enabled() {
|
||||
std::env::set_var("RUST_BACKTRACE", "full");
|
||||
tracing::info!("sentry initialized");
|
||||
} else {
|
||||
tracing::info!("sentry **NOT** initialized");
|
||||
}
|
||||
|
||||
System::new().block_on(run())
|
||||
System::new().block_on(run())
|
||||
}
|
||||
|
|
|
@ -1,61 +1,267 @@
|
|||
use chrono::{DateTime, Utc};
|
||||
use pesde::manifest::target::{Target, TargetKind};
|
||||
use crate::AppState;
|
||||
use pesde::{
|
||||
manifest::{
|
||||
target::{Target, TargetKind},
|
||||
Alias, DependencyType,
|
||||
},
|
||||
names::PackageName,
|
||||
source::{
|
||||
git_index::{read_file, root_tree, GitBasedSource as _},
|
||||
ids::VersionId,
|
||||
pesde::{IndexFile, IndexFileEntry, PesdePackageSource, ScopeInfo, SCOPE_INFO_FILE},
|
||||
specifiers::DependencySpecifiers,
|
||||
},
|
||||
};
|
||||
use semver::Version;
|
||||
use serde::Serialize;
|
||||
use std::collections::BTreeSet;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use tokio::task::spawn_blocking;
|
||||
|
||||
#[derive(Debug, Serialize, Eq, PartialEq)]
|
||||
struct TargetInfoInner {
|
||||
lib: bool,
|
||||
bin: bool,
|
||||
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
|
||||
scripts: BTreeSet<String>,
|
||||
}
|
||||
|
||||
impl TargetInfoInner {
|
||||
fn new(target: &Target) -> Self {
|
||||
TargetInfoInner {
|
||||
lib: target.lib_path().is_some(),
|
||||
bin: target.bin_path().is_some(),
|
||||
scripts: target
|
||||
.scripts()
|
||||
.map(|scripts| scripts.keys().cloned().collect())
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Eq, PartialEq)]
|
||||
pub struct TargetInfo {
|
||||
kind: TargetKind,
|
||||
lib: bool,
|
||||
bin: bool,
|
||||
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
|
||||
scripts: BTreeSet<String>,
|
||||
kind: TargetKind,
|
||||
#[serde(skip_serializing_if = "std::ops::Not::not")]
|
||||
yanked: bool,
|
||||
#[serde(flatten)]
|
||||
inner: TargetInfoInner,
|
||||
}
|
||||
|
||||
impl From<Target> for TargetInfo {
|
||||
fn from(target: Target) -> Self {
|
||||
(&target).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Target> for TargetInfo {
|
||||
fn from(target: &Target) -> Self {
|
||||
TargetInfo {
|
||||
kind: target.kind(),
|
||||
lib: target.lib_path().is_some(),
|
||||
bin: target.bin_path().is_some(),
|
||||
scripts: target
|
||||
.scripts()
|
||||
.map(|scripts| scripts.keys().cloned().collect())
|
||||
.unwrap_or_default(),
|
||||
}
|
||||
}
|
||||
impl TargetInfo {
|
||||
fn new(target: &Target, yanked: bool) -> Self {
|
||||
TargetInfo {
|
||||
kind: target.kind(),
|
||||
yanked,
|
||||
inner: TargetInfoInner::new(target),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for TargetInfo {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.kind.cmp(&other.kind)
|
||||
}
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.kind.cmp(&other.kind)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for TargetInfo {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Ord, PartialOrd, Eq, PartialEq)]
|
||||
#[serde(untagged)]
|
||||
pub enum RegistryDocEntryKind {
|
||||
Page {
|
||||
name: String,
|
||||
},
|
||||
Category {
|
||||
#[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
|
||||
items: BTreeSet<RegistryDocEntry>,
|
||||
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
|
||||
collapsed: bool,
|
||||
},
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Ord, PartialOrd, Eq, PartialEq)]
|
||||
pub struct RegistryDocEntry {
|
||||
label: String,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
position: Option<usize>,
|
||||
#[serde(flatten)]
|
||||
kind: RegistryDocEntryKind,
|
||||
}
|
||||
|
||||
impl From<pesde::source::pesde::DocEntry> for RegistryDocEntry {
|
||||
fn from(entry: pesde::source::pesde::DocEntry) -> Self {
|
||||
Self {
|
||||
label: entry.label,
|
||||
position: entry.position,
|
||||
kind: match entry.kind {
|
||||
pesde::source::pesde::DocEntryKind::Page { name, .. } => {
|
||||
RegistryDocEntryKind::Page { name }
|
||||
}
|
||||
pesde::source::pesde::DocEntryKind::Category { items, collapsed } => {
|
||||
RegistryDocEntryKind::Category {
|
||||
items: items.into_iter().map(Into::into).collect(),
|
||||
collapsed,
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct PackageResponseInner {
|
||||
published_at: jiff::Timestamp,
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
license: String,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
authors: Vec<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
repository: Option<String>,
|
||||
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
|
||||
docs: BTreeSet<RegistryDocEntry>,
|
||||
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
|
||||
dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
|
||||
}
|
||||
|
||||
impl PackageResponseInner {
|
||||
pub fn new(entry: &IndexFileEntry) -> Self {
|
||||
PackageResponseInner {
|
||||
published_at: entry.published_at,
|
||||
license: entry.license.clone().unwrap_or_default(),
|
||||
authors: entry.authors.clone(),
|
||||
repository: entry.repository.clone().map(|url| url.to_string()),
|
||||
docs: entry.docs.iter().cloned().map(Into::into).collect(),
|
||||
dependencies: entry.dependencies.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct PackageResponse {
|
||||
pub name: String,
|
||||
pub version: String,
|
||||
pub targets: BTreeSet<TargetInfo>,
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
pub description: String,
|
||||
pub published_at: DateTime<Utc>,
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
pub license: String,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
pub authors: Vec<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub repository: Option<String>,
|
||||
name: String,
|
||||
version: String,
|
||||
targets: BTreeSet<TargetInfo>,
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
description: String,
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
deprecated: String,
|
||||
#[serde(flatten)]
|
||||
inner: PackageResponseInner,
|
||||
}
|
||||
|
||||
impl PackageResponse {
|
||||
pub fn new(name: &PackageName, version_id: &VersionId, file: &IndexFile) -> Self {
|
||||
let entry = &file.entries[version_id];
|
||||
|
||||
PackageResponse {
|
||||
name: name.to_string(),
|
||||
version: version_id.version().to_string(),
|
||||
targets: file
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|(ver, _)| ver.version() == version_id.version())
|
||||
.map(|(_, entry)| TargetInfo::new(&entry.target, entry.yanked))
|
||||
.collect(),
|
||||
description: entry.description.clone().unwrap_or_default(),
|
||||
deprecated: file.meta.deprecated.clone(),
|
||||
inner: PackageResponseInner::new(entry),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
struct PackageVersionsResponseVersionInner {
|
||||
target: TargetInfoInner,
|
||||
#[serde(skip_serializing_if = "std::ops::Not::not")]
|
||||
yanked: bool,
|
||||
#[serde(flatten)]
|
||||
inner: PackageResponseInner,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Default)]
|
||||
struct PackageVersionsResponseVersion {
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
description: String,
|
||||
targets: BTreeMap<TargetKind, PackageVersionsResponseVersionInner>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct PackageVersionsResponse {
|
||||
name: String,
|
||||
#[serde(skip_serializing_if = "String::is_empty")]
|
||||
deprecated: String,
|
||||
versions: BTreeMap<Version, PackageVersionsResponseVersion>,
|
||||
}
|
||||
|
||||
impl PackageVersionsResponse {
|
||||
pub fn new(name: &PackageName, file: &IndexFile) -> Self {
|
||||
let mut versions = BTreeMap::<Version, PackageVersionsResponseVersion>::new();
|
||||
|
||||
for (v_id, entry) in &file.entries {
|
||||
let versions_resp = versions.entry(v_id.version().clone()).or_default();
|
||||
|
||||
versions_resp.description = entry.description.clone().unwrap_or_default();
|
||||
versions_resp.targets.insert(
|
||||
entry.target.kind(),
|
||||
PackageVersionsResponseVersionInner {
|
||||
target: TargetInfoInner::new(&entry.target),
|
||||
yanked: entry.yanked,
|
||||
inner: PackageResponseInner::new(entry),
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
PackageVersionsResponse {
|
||||
name: name.to_string(),
|
||||
deprecated: file.meta.deprecated.clone(),
|
||||
versions,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn read_package(
|
||||
app_state: &AppState,
|
||||
package: &PackageName,
|
||||
source: &PesdePackageSource,
|
||||
) -> Result<Option<IndexFile>, crate::error::RegistryError> {
|
||||
let path = source.path(&app_state.project);
|
||||
let package = package.clone();
|
||||
spawn_blocking(move || {
|
||||
let (scope, name) = package.as_str();
|
||||
let repo = gix::open(path)?;
|
||||
let tree = root_tree(&repo)?;
|
||||
|
||||
let Some(versions) = read_file(&tree, [scope, name])? else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
toml::de::from_str(&versions).map_err(Into::into)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
||||
pub async fn read_scope_info(
|
||||
app_state: &AppState,
|
||||
scope: &str,
|
||||
source: &PesdePackageSource,
|
||||
) -> Result<Option<ScopeInfo>, crate::error::RegistryError> {
|
||||
let path = source.path(&app_state.project);
|
||||
let scope = scope.to_string();
|
||||
spawn_blocking(move || {
|
||||
let repo = gix::open(path)?;
|
||||
let tree = root_tree(&repo)?;
|
||||
|
||||
let Some(versions) = read_file(&tree, [&*scope, SCOPE_INFO_FILE])? else {
|
||||
return Ok(None);
|
||||
};
|
||||
|
||||
toml::de::from_str(&versions).map_err(Into::into)
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
}
|
||||
|
|
101
registry/src/request_path.rs
Normal file
101
registry/src/request_path.rs
Normal file
|
@ -0,0 +1,101 @@
|
|||
use pesde::{
|
||||
manifest::target::TargetKind,
|
||||
source::{ids::VersionId, pesde::IndexFile},
|
||||
};
|
||||
use semver::Version;
|
||||
use serde::{Deserialize, Deserializer};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum LatestOrSpecificVersion {
|
||||
Latest,
|
||||
Specific(Version),
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for LatestOrSpecificVersion {
|
||||
fn deserialize<D>(deserializer: D) -> Result<LatestOrSpecificVersion, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
if s.eq_ignore_ascii_case("latest") {
|
||||
return Ok(LatestOrSpecificVersion::Latest);
|
||||
}
|
||||
|
||||
s.parse()
|
||||
.map(LatestOrSpecificVersion::Specific)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AnyOrSpecificTarget {
|
||||
Any,
|
||||
Specific(TargetKind),
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for AnyOrSpecificTarget {
|
||||
fn deserialize<D>(deserializer: D) -> Result<AnyOrSpecificTarget, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
if s.eq_ignore_ascii_case("any") {
|
||||
return Ok(AnyOrSpecificTarget::Any);
|
||||
}
|
||||
|
||||
s.parse()
|
||||
.map(AnyOrSpecificTarget::Specific)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resolve_version_and_target<'a>(
|
||||
file: &'a IndexFile,
|
||||
version: LatestOrSpecificVersion,
|
||||
target: &AnyOrSpecificTarget,
|
||||
) -> Option<&'a VersionId> {
|
||||
let version = match version {
|
||||
LatestOrSpecificVersion::Latest => {
|
||||
match file.entries.keys().map(VersionId::version).max() {
|
||||
Some(latest) => latest.clone(),
|
||||
None => return None,
|
||||
}
|
||||
}
|
||||
LatestOrSpecificVersion::Specific(version) => version,
|
||||
};
|
||||
|
||||
let mut versions = file
|
||||
.entries
|
||||
.iter()
|
||||
.filter(|(v_id, _)| *v_id.version() == version);
|
||||
|
||||
match target {
|
||||
AnyOrSpecificTarget::Any => versions.min_by_key(|(v_id, _)| v_id.target()),
|
||||
AnyOrSpecificTarget::Specific(kind) => {
|
||||
versions.find(|(_, entry)| entry.target.kind() == *kind)
|
||||
}
|
||||
}
|
||||
.map(|(v_id, _)| v_id)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AllOrSpecificTarget {
|
||||
All,
|
||||
Specific(TargetKind),
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for AllOrSpecificTarget {
|
||||
fn deserialize<D>(deserializer: D) -> Result<AllOrSpecificTarget, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let s = String::deserialize(deserializer)?;
|
||||
if s.eq_ignore_ascii_case("all") {
|
||||
return Ok(AllOrSpecificTarget::All);
|
||||
}
|
||||
|
||||
s.parse()
|
||||
.map(AllOrSpecificTarget::Specific)
|
||||
.map_err(serde::de::Error::custom)
|
||||
}
|
||||
}
|
|
@ -1,148 +1,221 @@
|
|||
use crate::AppState;
|
||||
use async_stream::stream;
|
||||
use futures::{Stream, StreamExt};
|
||||
use pesde::{
|
||||
names::PackageName,
|
||||
source::{
|
||||
git_index::{root_tree, GitBasedSource},
|
||||
pesde::{IndexFile, IndexFileEntry, PesdePackageSource, SCOPE_INFO_FILE},
|
||||
},
|
||||
Project,
|
||||
names::PackageName,
|
||||
source::{
|
||||
git_index::{root_tree, GitBasedSource as _},
|
||||
ids::VersionId,
|
||||
pesde::{IndexFile, IndexFileEntry, PesdePackageSource, SCOPE_INFO_FILE},
|
||||
},
|
||||
Project,
|
||||
};
|
||||
use std::collections::BTreeMap;
|
||||
use tantivy::{
|
||||
doc,
|
||||
query::QueryParser,
|
||||
schema::{IndexRecordOption, TextFieldIndexing, TextOptions, FAST, STORED, STRING},
|
||||
tokenizer::TextAnalyzer,
|
||||
DateTime, IndexReader, IndexWriter, Term,
|
||||
doc,
|
||||
query::QueryParser,
|
||||
schema::{IndexRecordOption, TextFieldIndexing, TextOptions, FAST, STORED, STRING},
|
||||
tokenizer::TextAnalyzer,
|
||||
DateTime, IndexReader, IndexWriter, Term,
|
||||
};
|
||||
use tokio::pin;
|
||||
|
||||
pub async fn all_packages(
|
||||
source: &PesdePackageSource,
|
||||
project: &Project,
|
||||
) -> impl Stream<Item = (PackageName, IndexFile)> {
|
||||
let path = source.path(project);
|
||||
type Entries = BTreeMap<String, gix::ObjectId>;
|
||||
|
||||
stream! {
|
||||
let repo = gix::open(&path).expect("failed to open index");
|
||||
let tree = root_tree(&repo).expect("failed to get root tree");
|
||||
|
||||
for entry in tree.iter() {
|
||||
let entry = entry.expect("failed to read entry");
|
||||
let object = entry.object().expect("failed to get object");
|
||||
|
||||
// directories will be trees, and files will be blobs
|
||||
if !matches!(object.kind, gix::object::Kind::Tree) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let package_scope = entry.filename().to_string();
|
||||
|
||||
for inner_entry in object.into_tree().iter() {
|
||||
let inner_entry = inner_entry.expect("failed to read inner entry");
|
||||
let object = inner_entry.object().expect("failed to get object");
|
||||
|
||||
if !matches!(object.kind, gix::object::Kind::Blob) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let package_name = inner_entry.filename().to_string();
|
||||
|
||||
if package_name == SCOPE_INFO_FILE {
|
||||
continue;
|
||||
}
|
||||
|
||||
let blob = object.into_blob();
|
||||
let string = String::from_utf8(blob.data.clone()).expect("failed to parse utf8");
|
||||
|
||||
let file: IndexFile = toml::from_str(&string).expect("failed to parse index file");
|
||||
|
||||
// if this panics, it's an issue with the index.
|
||||
let name = format!("{package_scope}/{package_name}").parse().unwrap();
|
||||
|
||||
yield (name, file);
|
||||
}
|
||||
}
|
||||
}
|
||||
struct TreeIterator<'repo> {
|
||||
repo: &'repo gix::Repository,
|
||||
entries: Entries,
|
||||
current: Option<(String, Entries)>,
|
||||
}
|
||||
|
||||
pub async fn make_search(
|
||||
project: &Project,
|
||||
source: &PesdePackageSource,
|
||||
fn collect_entries(tree: &gix::Tree) -> Result<Entries, gix::objs::decode::Error> {
|
||||
tree.iter()
|
||||
.map(|res| res.map(|r| (r.filename().to_string(), r.object_id())))
|
||||
.collect()
|
||||
}
|
||||
|
||||
impl Iterator for TreeIterator<'_> {
|
||||
type Item = (PackageName, IndexFile);
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self
|
||||
.current
|
||||
.as_ref()
|
||||
.is_none_or(|(_, entries)| entries.is_empty())
|
||||
{
|
||||
loop {
|
||||
let (scope_name, scope_oid) = self.entries.pop_last()?;
|
||||
|
||||
let object = self
|
||||
.repo
|
||||
.find_object(scope_oid)
|
||||
.expect("failed to get scope object");
|
||||
|
||||
if object.kind != gix::objs::Kind::Tree {
|
||||
continue;
|
||||
}
|
||||
|
||||
let tree = object.into_tree();
|
||||
let mut entries = collect_entries(&tree).expect("failed to read scope entries");
|
||||
|
||||
entries.remove(SCOPE_INFO_FILE);
|
||||
|
||||
if entries.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
self.current = Some((scope_name, entries));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let (scope_name, entries) = self.current.as_mut()?;
|
||||
let (file_name, file_oid) = entries.pop_last()?;
|
||||
|
||||
let object = self
|
||||
.repo
|
||||
.find_object(file_oid)
|
||||
.expect("failed to get scope entry object");
|
||||
|
||||
if object.kind != gix::objs::Kind::Blob {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut blob = object.into_blob();
|
||||
let string = String::from_utf8(blob.take_data()).expect("failed to parse utf8");
|
||||
|
||||
let file = toml::from_str(&string).expect("failed to parse index file");
|
||||
|
||||
Some((
|
||||
// if this panics, it's an issue with the index.
|
||||
format!("{scope_name}/{file_name}").parse().unwrap(),
|
||||
file,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn find_max_searchable(file: &IndexFile) -> Option<(&VersionId, &IndexFileEntry)> {
|
||||
file.entries
|
||||
.iter()
|
||||
.filter(|(_, entry)| !entry.yanked)
|
||||
.max_by(|(v_id_a, entry_a), (v_id_b, entry_b)| {
|
||||
v_id_a
|
||||
.version()
|
||||
.cmp(v_id_b.version())
|
||||
.then(entry_a.published_at.cmp(&entry_b.published_at))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn make_search(
|
||||
project: &Project,
|
||||
source: &PesdePackageSource,
|
||||
) -> (IndexReader, IndexWriter, QueryParser) {
|
||||
let mut schema_builder = tantivy::schema::SchemaBuilder::new();
|
||||
let mut schema_builder = tantivy::schema::SchemaBuilder::new();
|
||||
|
||||
let field_options = TextOptions::default().set_indexing_options(
|
||||
TextFieldIndexing::default()
|
||||
.set_tokenizer("ngram")
|
||||
.set_index_option(IndexRecordOption::WithFreqsAndPositions),
|
||||
);
|
||||
let field_options = TextOptions::default().set_indexing_options(
|
||||
TextFieldIndexing::default()
|
||||
.set_tokenizer("ngram")
|
||||
.set_index_option(IndexRecordOption::WithFreqsAndPositions),
|
||||
);
|
||||
|
||||
let id_field = schema_builder.add_text_field("id", STRING | STORED);
|
||||
let scope = schema_builder.add_text_field("scope", field_options.clone());
|
||||
let name = schema_builder.add_text_field("name", field_options.clone());
|
||||
let description = schema_builder.add_text_field("description", field_options);
|
||||
let published_at = schema_builder.add_date_field("published_at", FAST);
|
||||
let id_field = schema_builder.add_text_field("id", STRING | STORED);
|
||||
|
||||
let search_index = tantivy::Index::create_in_ram(schema_builder.build());
|
||||
search_index.tokenizers().register(
|
||||
"ngram",
|
||||
TextAnalyzer::builder(tantivy::tokenizer::NgramTokenizer::all_ngrams(1, 12).unwrap())
|
||||
.filter(tantivy::tokenizer::LowerCaser)
|
||||
.build(),
|
||||
);
|
||||
let scope = schema_builder.add_text_field("scope", field_options.clone());
|
||||
let name = schema_builder.add_text_field("name", field_options.clone());
|
||||
let description = schema_builder.add_text_field("description", field_options);
|
||||
let published_at = schema_builder.add_date_field("published_at", FAST);
|
||||
|
||||
let search_reader = search_index
|
||||
.reader_builder()
|
||||
.reload_policy(tantivy::ReloadPolicy::Manual)
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let mut search_writer = search_index.writer(50_000_000).unwrap();
|
||||
let search_index = tantivy::Index::create_in_ram(schema_builder.build());
|
||||
search_index.tokenizers().register(
|
||||
"ngram",
|
||||
TextAnalyzer::builder(tantivy::tokenizer::NgramTokenizer::all_ngrams(1, 12).unwrap())
|
||||
.filter(tantivy::tokenizer::LowerCaser)
|
||||
.build(),
|
||||
);
|
||||
|
||||
let stream = all_packages(source, project).await;
|
||||
pin!(stream);
|
||||
let search_reader = search_index
|
||||
.reader_builder()
|
||||
.reload_policy(tantivy::ReloadPolicy::Manual)
|
||||
.try_into()
|
||||
.unwrap();
|
||||
let mut search_writer = search_index.writer(50_000_000).unwrap();
|
||||
|
||||
while let Some((pkg_name, mut file)) = stream.next().await {
|
||||
let Some((_, latest_entry)) = file.entries.pop_last() else {
|
||||
tracing::error!("no versions found for {pkg_name}");
|
||||
continue;
|
||||
};
|
||||
let path = source.path(project);
|
||||
let repo = gix::open(path).expect("failed to open index");
|
||||
let tree = root_tree(&repo).expect("failed to get root tree");
|
||||
|
||||
search_writer.add_document(doc!(
|
||||
id_field => pkg_name.to_string(),
|
||||
scope => pkg_name.as_str().0,
|
||||
name => pkg_name.as_str().1,
|
||||
description => latest_entry.description.unwrap_or_default(),
|
||||
published_at => DateTime::from_timestamp_secs(latest_entry.published_at.timestamp()),
|
||||
)).unwrap();
|
||||
}
|
||||
let iter = TreeIterator {
|
||||
entries: collect_entries(&tree).expect("failed to read entries"),
|
||||
repo: &repo,
|
||||
current: None,
|
||||
};
|
||||
|
||||
search_writer.commit().unwrap();
|
||||
search_reader.reload().unwrap();
|
||||
for (pkg_name, file) in iter {
|
||||
if !file.meta.deprecated.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let mut query_parser = QueryParser::for_index(&search_index, vec![scope, name, description]);
|
||||
query_parser.set_field_boost(scope, 2.0);
|
||||
query_parser.set_field_boost(name, 3.5);
|
||||
let Some((_, latest_entry)) = find_max_searchable(&file) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
(search_reader, search_writer, query_parser)
|
||||
search_writer
|
||||
.add_document(doc!(
|
||||
id_field => pkg_name.to_string(),
|
||||
scope => pkg_name.scope(),
|
||||
name => pkg_name.name(),
|
||||
description => latest_entry.description.clone().unwrap_or_default(),
|
||||
published_at => DateTime::from_timestamp_nanos(latest_entry.published_at.as_nanosecond() as i64),
|
||||
))
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
search_writer.commit().unwrap();
|
||||
search_reader.reload().unwrap();
|
||||
|
||||
let mut query_parser = QueryParser::for_index(&search_index, vec![scope, name, description]);
|
||||
query_parser.set_field_boost(scope, 2.0);
|
||||
query_parser.set_field_boost(name, 3.5);
|
||||
|
||||
(search_reader, search_writer, query_parser)
|
||||
}
|
||||
|
||||
pub fn update_version(app_state: &AppState, name: &PackageName, entry: IndexFileEntry) {
|
||||
let mut search_writer = app_state.search_writer.lock().unwrap();
|
||||
let schema = search_writer.index().schema();
|
||||
let id_field = schema.get_field("id").unwrap();
|
||||
pub fn update_search_version(app_state: &AppState, name: &PackageName, entry: &IndexFileEntry) {
|
||||
let mut search_writer = app_state.search_writer.lock().unwrap();
|
||||
let schema = search_writer.index().schema();
|
||||
let id_field = schema.get_field("id").unwrap();
|
||||
|
||||
search_writer.delete_term(Term::from_field_text(id_field, &name.to_string()));
|
||||
search_writer.delete_term(Term::from_field_text(id_field, &name.to_string()));
|
||||
|
||||
search_writer.add_document(doc!(
|
||||
search_writer.add_document(doc!(
|
||||
id_field => name.to_string(),
|
||||
schema.get_field("scope").unwrap() => name.as_str().0,
|
||||
schema.get_field("name").unwrap() => name.as_str().1,
|
||||
schema.get_field("description").unwrap() => entry.description.unwrap_or_default(),
|
||||
schema.get_field("published_at").unwrap() => DateTime::from_timestamp_secs(entry.published_at.timestamp())
|
||||
schema.get_field("scope").unwrap() => name.scope(),
|
||||
schema.get_field("name").unwrap() => name.name(),
|
||||
schema.get_field("description").unwrap() => entry.description.clone().unwrap_or_default(),
|
||||
schema.get_field("published_at").unwrap() => DateTime::from_timestamp_nanos(entry.published_at.as_nanosecond() as i64)
|
||||
)).unwrap();
|
||||
|
||||
search_writer.commit().unwrap();
|
||||
app_state.search_reader.reload().unwrap();
|
||||
search_writer.commit().unwrap();
|
||||
drop(search_writer);
|
||||
app_state.search_reader.reload().unwrap();
|
||||
}
|
||||
|
||||
pub fn search_version_changed(app_state: &AppState, name: &PackageName, file: &IndexFile) {
|
||||
let entry = if file.meta.deprecated.is_empty() {
|
||||
find_max_searchable(file)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let Some((_, entry)) = entry else {
|
||||
let mut search_writer = app_state.search_writer.lock().unwrap();
|
||||
let schema = search_writer.index().schema();
|
||||
let id_field = schema.get_field("id").unwrap();
|
||||
|
||||
search_writer.delete_term(Term::from_field_text(id_field, &name.to_string()));
|
||||
search_writer.commit().unwrap();
|
||||
drop(search_writer);
|
||||
app_state.search_reader.reload().unwrap();
|
||||
|
||||
return;
|
||||
};
|
||||
|
||||
update_search_version(app_state, name, entry);
|
||||
}
|
||||
|
|
|
@ -1,126 +1,131 @@
|
|||
use crate::{error::Error, storage::StorageImpl};
|
||||
use crate::{error::RegistryError, storage::StorageImpl};
|
||||
use actix_web::{
|
||||
http::header::{CONTENT_ENCODING, CONTENT_TYPE},
|
||||
HttpResponse,
|
||||
http::header::{CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE},
|
||||
HttpResponse,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
use pesde::{names::PackageName, source::version_id::VersionId};
|
||||
use pesde::{names::PackageName, source::ids::VersionId};
|
||||
use std::{
|
||||
fmt::Display,
|
||||
path::{Path, PathBuf},
|
||||
fmt::Display,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use tokio_util::io::ReaderStream;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FSStorage {
|
||||
pub root: PathBuf,
|
||||
pub root: PathBuf,
|
||||
}
|
||||
|
||||
async fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
|
||||
Ok(match fs::read(path).await {
|
||||
Ok(contents) => HttpResponse::Ok()
|
||||
.append_header((CONTENT_TYPE, content_type))
|
||||
.append_header((CONTENT_ENCODING, "gzip"))
|
||||
.body(contents),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => HttpResponse::NotFound().finish(),
|
||||
Err(e) => return Err(e.into()),
|
||||
})
|
||||
async fn read_file_to_response(
|
||||
path: &Path,
|
||||
content_type: &str,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
Ok(match fs::File::open(path).await {
|
||||
Ok(file) => HttpResponse::Ok()
|
||||
.append_header((CONTENT_TYPE, content_type))
|
||||
.append_header((CONTENT_ENCODING, "gzip"))
|
||||
.append_header((CONTENT_LENGTH, file.metadata().await?.len()))
|
||||
.streaming(ReaderStream::new(file)),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => HttpResponse::NotFound().finish(),
|
||||
Err(e) => return Err(e.into()),
|
||||
})
|
||||
}
|
||||
|
||||
impl StorageImpl for FSStorage {
|
||||
async fn store_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), Error> {
|
||||
let (scope, name) = package_name.as_str();
|
||||
async fn store_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), RegistryError> {
|
||||
let (scope, name) = package_name.as_str();
|
||||
|
||||
let path = self
|
||||
.root
|
||||
.join(scope)
|
||||
.join(name)
|
||||
.join(version.version().to_string())
|
||||
.join(version.target().to_string());
|
||||
fs::create_dir_all(&path).await?;
|
||||
let path = self
|
||||
.root
|
||||
.join(scope)
|
||||
.join(name)
|
||||
.join(version.version().to_string())
|
||||
.join(version.target().to_string());
|
||||
fs::create_dir_all(&path).await?;
|
||||
|
||||
fs::write(path.join("pkg.tar.gz"), &contents).await?;
|
||||
fs::write(path.join("pkg.tar.gz"), &contents).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let (scope, name) = package_name.as_str();
|
||||
async fn get_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let (scope, name) = package_name.as_str();
|
||||
|
||||
let path = self
|
||||
.root
|
||||
.join(scope)
|
||||
.join(name)
|
||||
.join(version.version().to_string())
|
||||
.join(version.target().to_string());
|
||||
let path = self
|
||||
.root
|
||||
.join(scope)
|
||||
.join(name)
|
||||
.join(version.version().to_string())
|
||||
.join(version.target().to_string());
|
||||
|
||||
read_file_to_response(&path.join("pkg.tar.gz"), "application/gzip").await
|
||||
}
|
||||
read_file_to_response(&path.join("pkg.tar.gz"), "application/gzip").await
|
||||
}
|
||||
|
||||
async fn store_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), Error> {
|
||||
let (scope, name) = package_name.as_str();
|
||||
async fn store_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), RegistryError> {
|
||||
let (scope, name) = package_name.as_str();
|
||||
|
||||
let path = self
|
||||
.root
|
||||
.join(scope)
|
||||
.join(name)
|
||||
.join(version.version().to_string())
|
||||
.join(version.target().to_string());
|
||||
fs::create_dir_all(&path).await?;
|
||||
let path = self
|
||||
.root
|
||||
.join(scope)
|
||||
.join(name)
|
||||
.join(version.version().to_string())
|
||||
.join(version.target().to_string());
|
||||
fs::create_dir_all(&path).await?;
|
||||
|
||||
fs::write(path.join("readme.gz"), &contents).await?;
|
||||
fs::write(path.join("readme.gz"), &contents).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let (scope, name) = package_name.as_str();
|
||||
async fn get_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let (scope, name) = package_name.as_str();
|
||||
|
||||
let path = self
|
||||
.root
|
||||
.join(scope)
|
||||
.join(name)
|
||||
.join(version.version().to_string())
|
||||
.join(version.target().to_string());
|
||||
let path = self
|
||||
.root
|
||||
.join(scope)
|
||||
.join(name)
|
||||
.join(version.version().to_string())
|
||||
.join(version.target().to_string());
|
||||
|
||||
read_file_to_response(&path.join("readme.gz"), "text/plain").await
|
||||
}
|
||||
read_file_to_response(&path.join("readme.gz"), "text/plain").await
|
||||
}
|
||||
|
||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
|
||||
let path = self.root.join("Doc");
|
||||
fs::create_dir_all(&path).await?;
|
||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
|
||||
let path = self.root.join("Doc");
|
||||
fs::create_dir_all(&path).await?;
|
||||
|
||||
fs::write(path.join(format!("{doc_hash}.gz")), &contents).await?;
|
||||
fs::write(path.join(format!("{doc_hash}.gz")), &contents).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
|
||||
let path = self.root.join("Doc");
|
||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
|
||||
let path = self.root.join("Doc");
|
||||
|
||||
read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain").await
|
||||
}
|
||||
read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain").await
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for FSStorage {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "FS")
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "FS ({})", self.root.display())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::{benv, error::Error, make_reqwest};
|
||||
use crate::{benv, error::RegistryError, make_reqwest};
|
||||
use actix_web::HttpResponse;
|
||||
use pesde::{names::PackageName, source::version_id::VersionId};
|
||||
use pesde::{names::PackageName, source::ids::VersionId};
|
||||
use rusty_s3::{Bucket, Credentials, UrlStyle};
|
||||
use std::fmt::Display;
|
||||
|
||||
|
@ -9,133 +9,129 @@ mod s3;
|
|||
|
||||
#[derive(Debug)]
|
||||
pub enum Storage {
|
||||
S3(s3::S3Storage),
|
||||
FS(fs::FSStorage),
|
||||
S3(s3::S3Storage),
|
||||
FS(fs::FSStorage),
|
||||
}
|
||||
|
||||
pub trait StorageImpl: Display {
|
||||
async fn store_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), crate::error::Error>;
|
||||
async fn get_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, crate::error::Error>;
|
||||
async fn store_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), RegistryError>;
|
||||
async fn get_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, RegistryError>;
|
||||
|
||||
async fn store_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), crate::error::Error>;
|
||||
async fn get_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, crate::error::Error>;
|
||||
async fn store_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), RegistryError>;
|
||||
async fn get_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, RegistryError>;
|
||||
|
||||
async fn store_doc(
|
||||
&self,
|
||||
doc_hash: String,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), crate::error::Error>;
|
||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, crate::error::Error>;
|
||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError>;
|
||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError>;
|
||||
}
|
||||
|
||||
impl StorageImpl for Storage {
|
||||
async fn store_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), Error> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.store_package(package_name, version, contents).await,
|
||||
Storage::FS(fs) => fs.store_package(package_name, version, contents).await,
|
||||
}
|
||||
}
|
||||
async fn store_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), RegistryError> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.store_package(package_name, version, contents).await,
|
||||
Storage::FS(fs) => fs.store_package(package_name, version, contents).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.get_package(package_name, version).await,
|
||||
Storage::FS(fs) => fs.get_package(package_name, version).await,
|
||||
}
|
||||
}
|
||||
async fn get_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.get_package(package_name, version).await,
|
||||
Storage::FS(fs) => fs.get_package(package_name, version).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn store_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), Error> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.store_readme(package_name, version, contents).await,
|
||||
Storage::FS(fs) => fs.store_readme(package_name, version, contents).await,
|
||||
}
|
||||
}
|
||||
async fn store_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), RegistryError> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.store_readme(package_name, version, contents).await,
|
||||
Storage::FS(fs) => fs.store_readme(package_name, version, contents).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.get_readme(package_name, version).await,
|
||||
Storage::FS(fs) => fs.get_readme(package_name, version).await,
|
||||
}
|
||||
}
|
||||
async fn get_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.get_readme(package_name, version).await,
|
||||
Storage::FS(fs) => fs.get_readme(package_name, version).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.store_doc(doc_hash, contents).await,
|
||||
Storage::FS(fs) => fs.store_doc(doc_hash, contents).await,
|
||||
}
|
||||
}
|
||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.store_doc(doc_hash, contents).await,
|
||||
Storage::FS(fs) => fs.store_doc(doc_hash, contents).await,
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.get_doc(doc_hash).await,
|
||||
Storage::FS(fs) => fs.get_doc(doc_hash).await,
|
||||
}
|
||||
}
|
||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
|
||||
match self {
|
||||
Storage::S3(s3) => s3.get_doc(doc_hash).await,
|
||||
Storage::FS(fs) => fs.get_doc(doc_hash).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for Storage {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Storage::S3(s3) => write!(f, "{}", s3),
|
||||
Storage::FS(fs) => write!(f, "{}", fs),
|
||||
}
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
Storage::S3(s3) => write!(f, "{s3}"),
|
||||
Storage::FS(fs) => write!(f, "{fs}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_storage_from_env() -> Storage {
|
||||
if let Ok(endpoint) = benv!(parse "S3_ENDPOINT") {
|
||||
Storage::S3(s3::S3Storage {
|
||||
s3_bucket: Bucket::new(
|
||||
endpoint,
|
||||
UrlStyle::Path,
|
||||
benv!(required "S3_BUCKET_NAME"),
|
||||
benv!(required "S3_REGION"),
|
||||
)
|
||||
.unwrap(),
|
||||
s3_credentials: Credentials::new(
|
||||
benv!(required "S3_ACCESS_KEY"),
|
||||
benv!(required "S3_SECRET_KEY"),
|
||||
),
|
||||
reqwest_client: make_reqwest(),
|
||||
})
|
||||
} else if let Ok(root) = benv!(parse "FS_STORAGE_ROOT") {
|
||||
Storage::FS(fs::FSStorage { root })
|
||||
} else {
|
||||
panic!("no storage backend configured")
|
||||
}
|
||||
if let Ok(endpoint) = benv!(parse "S3_ENDPOINT") {
|
||||
Storage::S3(s3::S3Storage {
|
||||
bucket: Bucket::new(
|
||||
endpoint,
|
||||
UrlStyle::Path,
|
||||
benv!(required "S3_BUCKET_NAME"),
|
||||
benv!(required "S3_REGION"),
|
||||
)
|
||||
.unwrap(),
|
||||
credentials: Credentials::new(
|
||||
benv!(required "S3_ACCESS_KEY"),
|
||||
benv!(required "S3_SECRET_KEY"),
|
||||
),
|
||||
reqwest_client: make_reqwest(),
|
||||
})
|
||||
} else if let Ok(root) = benv!(parse "FS_STORAGE_ROOT") {
|
||||
Storage::FS(fs::FSStorage { root })
|
||||
} else {
|
||||
panic!("no storage backend configured")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,166 +1,166 @@
|
|||
use crate::{
|
||||
error::{Error, ReqwestErrorExt},
|
||||
storage::StorageImpl,
|
||||
error::{RegistryError, ReqwestErrorExt as _},
|
||||
storage::StorageImpl,
|
||||
};
|
||||
use actix_web::{http::header::LOCATION, HttpResponse};
|
||||
use pesde::{names::PackageName, source::version_id::VersionId};
|
||||
use pesde::{names::PackageName, source::ids::VersionId};
|
||||
use reqwest::header::{CONTENT_ENCODING, CONTENT_TYPE};
|
||||
use rusty_s3::{
|
||||
actions::{GetObject, PutObject},
|
||||
Bucket, Credentials, S3Action,
|
||||
actions::{GetObject, PutObject},
|
||||
Bucket, Credentials, S3Action as _,
|
||||
};
|
||||
use std::{fmt::Display, time::Duration};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct S3Storage {
|
||||
pub s3_bucket: Bucket,
|
||||
pub s3_credentials: Credentials,
|
||||
pub reqwest_client: reqwest::Client,
|
||||
pub bucket: Bucket,
|
||||
pub credentials: Credentials,
|
||||
pub reqwest_client: reqwest::Client,
|
||||
}
|
||||
|
||||
pub const S3_SIGN_DURATION: Duration = Duration::from_secs(60 * 15);
|
||||
|
||||
impl StorageImpl for S3Storage {
|
||||
async fn store_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), Error> {
|
||||
let object_url = PutObject::new(
|
||||
&self.s3_bucket,
|
||||
Some(&self.s3_credentials),
|
||||
&format!(
|
||||
"{package_name}/{}/{}/pkg.tar.gz",
|
||||
version.version(),
|
||||
version.target()
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
async fn store_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), RegistryError> {
|
||||
let object_url = PutObject::new(
|
||||
&self.bucket,
|
||||
Some(&self.credentials),
|
||||
&format!(
|
||||
"{package_name}/{}/{}/pkg.tar.gz",
|
||||
version.version(),
|
||||
version.target()
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
|
||||
self.reqwest_client
|
||||
.put(object_url)
|
||||
.header(CONTENT_TYPE, "application/gzip")
|
||||
.header(CONTENT_ENCODING, "gzip")
|
||||
.body(contents)
|
||||
.send()
|
||||
.await?
|
||||
.into_error()
|
||||
.await?;
|
||||
self.reqwest_client
|
||||
.put(object_url)
|
||||
.header(CONTENT_TYPE, "application/gzip")
|
||||
.header(CONTENT_ENCODING, "gzip")
|
||||
.body(contents)
|
||||
.send()
|
||||
.await?
|
||||
.into_error()
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let object_url = GetObject::new(
|
||||
&self.s3_bucket,
|
||||
Some(&self.s3_credentials),
|
||||
&format!(
|
||||
"{package_name}/{}/{}/pkg.tar.gz",
|
||||
version.version(),
|
||||
version.target()
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
async fn get_package(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let object_url = GetObject::new(
|
||||
&self.bucket,
|
||||
Some(&self.credentials),
|
||||
&format!(
|
||||
"{package_name}/{}/{}/pkg.tar.gz",
|
||||
version.version(),
|
||||
version.target()
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header((LOCATION, object_url.as_str()))
|
||||
.finish())
|
||||
}
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header((LOCATION, object_url.as_str()))
|
||||
.finish())
|
||||
}
|
||||
|
||||
async fn store_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), Error> {
|
||||
let object_url = PutObject::new(
|
||||
&self.s3_bucket,
|
||||
Some(&self.s3_credentials),
|
||||
&format!(
|
||||
"{package_name}/{}/{}/readme.gz",
|
||||
version.version(),
|
||||
version.target()
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
async fn store_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
contents: Vec<u8>,
|
||||
) -> Result<(), RegistryError> {
|
||||
let object_url = PutObject::new(
|
||||
&self.bucket,
|
||||
Some(&self.credentials),
|
||||
&format!(
|
||||
"{package_name}/{}/{}/readme.gz",
|
||||
version.version(),
|
||||
version.target()
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
|
||||
self.reqwest_client
|
||||
.put(object_url)
|
||||
.header(CONTENT_TYPE, "text/plain")
|
||||
.header(CONTENT_ENCODING, "gzip")
|
||||
.body(contents)
|
||||
.send()
|
||||
.await?
|
||||
.into_error()
|
||||
.await?;
|
||||
self.reqwest_client
|
||||
.put(object_url)
|
||||
.header(CONTENT_TYPE, "text/plain")
|
||||
.header(CONTENT_ENCODING, "gzip")
|
||||
.body(contents)
|
||||
.send()
|
||||
.await?
|
||||
.into_error()
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, Error> {
|
||||
let object_url = GetObject::new(
|
||||
&self.s3_bucket,
|
||||
Some(&self.s3_credentials),
|
||||
&format!(
|
||||
"{package_name}/{}/{}/readme.gz",
|
||||
version.version(),
|
||||
version.target()
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
async fn get_readme(
|
||||
&self,
|
||||
package_name: &PackageName,
|
||||
version: &VersionId,
|
||||
) -> Result<HttpResponse, RegistryError> {
|
||||
let object_url = GetObject::new(
|
||||
&self.bucket,
|
||||
Some(&self.credentials),
|
||||
&format!(
|
||||
"{package_name}/{}/{}/readme.gz",
|
||||
version.version(),
|
||||
version.target()
|
||||
),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header((LOCATION, object_url.as_str()))
|
||||
.finish())
|
||||
}
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header((LOCATION, object_url.as_str()))
|
||||
.finish())
|
||||
}
|
||||
|
||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
|
||||
let object_url = PutObject::new(
|
||||
&self.s3_bucket,
|
||||
Some(&self.s3_credentials),
|
||||
// capitalize Doc to prevent conflicts with scope names
|
||||
&format!("Doc/{}.gz", doc_hash),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
|
||||
let object_url = PutObject::new(
|
||||
&self.bucket,
|
||||
Some(&self.credentials),
|
||||
// capitalize Doc to prevent conflicts with scope names
|
||||
&format!("Doc/{doc_hash}.gz"),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
|
||||
self.reqwest_client
|
||||
.put(object_url)
|
||||
.header(CONTENT_TYPE, "text/plain")
|
||||
.header(CONTENT_ENCODING, "gzip")
|
||||
.body(contents)
|
||||
.send()
|
||||
.await?
|
||||
.into_error()
|
||||
.await?;
|
||||
self.reqwest_client
|
||||
.put(object_url)
|
||||
.header(CONTENT_TYPE, "text/plain")
|
||||
.header(CONTENT_ENCODING, "gzip")
|
||||
.body(contents)
|
||||
.send()
|
||||
.await?
|
||||
.into_error()
|
||||
.await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
|
||||
let object_url = GetObject::new(
|
||||
&self.s3_bucket,
|
||||
Some(&self.s3_credentials),
|
||||
&format!("Doc/{}.gz", doc_hash),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
|
||||
let object_url = GetObject::new(
|
||||
&self.bucket,
|
||||
Some(&self.credentials),
|
||||
&format!("Doc/{doc_hash}.gz"),
|
||||
)
|
||||
.sign(S3_SIGN_DURATION);
|
||||
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header((LOCATION, object_url.as_str()))
|
||||
.finish())
|
||||
}
|
||||
Ok(HttpResponse::TemporaryRedirect()
|
||||
.append_header((LOCATION, object_url.as_str()))
|
||||
.finish())
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for S3Storage {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "S3")
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "S3 (bucket name: {})", self.bucket.name())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1 +1,2 @@
|
|||
imports_granularity = "Crate"
|
||||
imports_granularity = "Crate"
|
||||
hard_tabs = true
|
172
src/cli/auth.rs
172
src/cli/auth.rs
|
@ -1,119 +1,133 @@
|
|||
use crate::cli::config::{read_config, write_config};
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use gix::bstr::BStr;
|
||||
use keyring::Entry;
|
||||
use reqwest::header::AUTHORIZATION;
|
||||
use serde::{ser::SerializeMap, Deserialize, Serialize};
|
||||
use serde::{ser::SerializeMap as _, Deserialize, Serialize};
|
||||
use std::collections::BTreeMap;
|
||||
use tokio::task::spawn_blocking;
|
||||
use tracing::instrument;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Tokens(pub BTreeMap<gix::Url, String>);
|
||||
|
||||
impl Serialize for Tokens {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::ser::Serializer,
|
||||
{
|
||||
let mut map = serializer.serialize_map(Some(self.0.len()))?;
|
||||
for (k, v) in &self.0 {
|
||||
map.serialize_entry(&k.to_bstring().to_string(), v)?;
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: serde::ser::Serializer,
|
||||
{
|
||||
let mut map = serializer.serialize_map(Some(self.0.len()))?;
|
||||
for (k, v) in &self.0 {
|
||||
map.serialize_entry(&k.to_bstring().to_string(), v)?;
|
||||
}
|
||||
map.end()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Tokens {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
Ok(Tokens(
|
||||
BTreeMap::<String, String>::deserialize(deserializer)?
|
||||
.into_iter()
|
||||
.map(|(k, v)| gix::Url::from_bytes(BStr::new(&k)).map(|k| (k, v)))
|
||||
.collect::<Result<_, _>>()
|
||||
.map_err(serde::de::Error::custom)?,
|
||||
))
|
||||
}
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: serde::de::Deserializer<'de>,
|
||||
{
|
||||
Ok(Tokens(
|
||||
BTreeMap::<String, String>::deserialize(deserializer)?
|
||||
.into_iter()
|
||||
.map(|(k, v)| gix::Url::from_bytes(BStr::new(&k)).map(|k| (k, v)))
|
||||
.collect::<Result<_, _>>()
|
||||
.map_err(serde::de::Error::custom)?,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn get_tokens() -> anyhow::Result<Tokens> {
|
||||
let config = read_config().await?;
|
||||
if !config.tokens.0.is_empty() {
|
||||
tracing::debug!("using tokens from config");
|
||||
return Ok(config.tokens);
|
||||
}
|
||||
let config = read_config().await?;
|
||||
if !config.tokens.0.is_empty() {
|
||||
tracing::debug!("using tokens from config");
|
||||
return Ok(config.tokens);
|
||||
}
|
||||
|
||||
match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
|
||||
Ok(entry) => match entry.get_password() {
|
||||
Ok(token) => {
|
||||
tracing::debug!("using tokens from keyring");
|
||||
return serde_json::from_str(&token).context("failed to parse tokens");
|
||||
}
|
||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
},
|
||||
Err(keyring::Error::PlatformFailure(_)) => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
let keyring_tokens = spawn_blocking(|| match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
|
||||
Ok(entry) => match entry.get_password() {
|
||||
Ok(token) => serde_json::from_str(&token)
|
||||
.map(Some)
|
||||
.context("failed to parse tokens"),
|
||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => Ok(None),
|
||||
Err(e) => Err(e.into()),
|
||||
},
|
||||
Err(keyring::Error::PlatformFailure(_)) => Ok(None),
|
||||
Err(e) => Err(e.into()),
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
Ok(Tokens(BTreeMap::new()))
|
||||
if let Some(tokens) = keyring_tokens {
|
||||
tracing::debug!("using tokens from keyring");
|
||||
return Ok(tokens);
|
||||
}
|
||||
|
||||
Ok(Tokens::default())
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
|
||||
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
|
||||
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
|
||||
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
|
||||
|
||||
match entry.set_password(&json) {
|
||||
Ok(()) => {
|
||||
tracing::debug!("tokens saved to keyring");
|
||||
return Ok(());
|
||||
}
|
||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
let to_keyring = spawn_blocking(move || {
|
||||
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
|
||||
|
||||
tracing::debug!("tokens saved to config");
|
||||
match entry.set_password(&json) {
|
||||
Ok(()) => Ok::<_, anyhow::Error>(true),
|
||||
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => Ok(false),
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
})
|
||||
.await
|
||||
.unwrap()?;
|
||||
|
||||
let mut config = read_config().await?;
|
||||
config.tokens = tokens;
|
||||
write_config(&config).await.map_err(Into::into)
|
||||
if to_keyring {
|
||||
tracing::debug!("tokens saved to keyring");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
tracing::debug!("saving tokens to config");
|
||||
|
||||
let mut config = read_config().await?;
|
||||
config.tokens = tokens;
|
||||
write_config(&config).await
|
||||
}
|
||||
|
||||
pub async fn set_token(repo: &gix::Url, token: Option<&str>) -> anyhow::Result<()> {
|
||||
let mut tokens = get_tokens().await?;
|
||||
if let Some(token) = token {
|
||||
tokens.0.insert(repo.clone(), token.to_string());
|
||||
} else {
|
||||
tokens.0.remove(repo);
|
||||
}
|
||||
set_tokens(tokens).await
|
||||
let mut tokens = get_tokens().await?;
|
||||
if let Some(token) = token {
|
||||
tokens.0.insert(repo.clone(), token.to_string());
|
||||
} else {
|
||||
tokens.0.remove(repo);
|
||||
}
|
||||
set_tokens(tokens).await
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct UserResponse {
|
||||
login: String,
|
||||
login: String,
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn get_token_login(
|
||||
reqwest: &reqwest::Client,
|
||||
access_token: &str,
|
||||
reqwest: &reqwest::Client,
|
||||
access_token: &str,
|
||||
) -> anyhow::Result<String> {
|
||||
let response = reqwest
|
||||
.get("https://api.github.com/user")
|
||||
.header(AUTHORIZATION, access_token)
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send user request")?
|
||||
.error_for_status()
|
||||
.context("failed to get user")?
|
||||
.json::<UserResponse>()
|
||||
.await
|
||||
.context("failed to parse user response")?;
|
||||
let response = reqwest
|
||||
.get("https://api.github.com/user")
|
||||
.header(AUTHORIZATION, access_token)
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send user request")?
|
||||
.error_for_status()
|
||||
.context("failed to get user")?
|
||||
.json::<UserResponse>()
|
||||
.await
|
||||
.context("failed to parse user response")?;
|
||||
|
||||
Ok(response.login)
|
||||
Ok(response.login)
|
||||
}
|
||||
|
|
|
@ -1,250 +1,285 @@
|
|||
use std::{collections::HashSet, str::FromStr};
|
||||
use std::str::FromStr as _;
|
||||
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use semver::VersionReq;
|
||||
|
||||
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
|
||||
use crate::cli::{
|
||||
config::read_config, dep_type_to_key, AnyPackageIdentifier, VersionedPackageName,
|
||||
};
|
||||
use pesde::{
|
||||
manifest::target::TargetKind,
|
||||
names::PackageNames,
|
||||
source::{
|
||||
git::{specifier::GitDependencySpecifier, GitPackageSource},
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::PackageSource,
|
||||
workspace::WorkspacePackageSource,
|
||||
PackageSources,
|
||||
},
|
||||
Project, DEFAULT_INDEX_NAME,
|
||||
manifest::{target::TargetKind, Alias, DependencyType},
|
||||
names::PackageNames,
|
||||
source::{
|
||||
git::{specifier::GitDependencySpecifier, GitPackageSource},
|
||||
path::{specifier::PathDependencySpecifier, PathPackageSource},
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::{PackageSource as _, RefreshOptions, ResolveOptions},
|
||||
workspace::{specifier::WorkspaceDependencySpecifier, WorkspacePackageSource},
|
||||
PackageSources,
|
||||
},
|
||||
Project, RefreshedSources, DEFAULT_INDEX_NAME,
|
||||
};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct AddCommand {
|
||||
/// The package name to add
|
||||
#[arg(index = 1)]
|
||||
name: AnyPackageIdentifier<VersionReq>,
|
||||
/// The package name to add
|
||||
#[arg(index = 1)]
|
||||
name: AnyPackageIdentifier<VersionReq>,
|
||||
|
||||
/// The index in which to search for the package
|
||||
#[arg(short, long)]
|
||||
index: Option<String>,
|
||||
/// The index in which to search for the package
|
||||
#[arg(short, long)]
|
||||
index: Option<String>,
|
||||
|
||||
/// The target environment of the package
|
||||
#[arg(short, long)]
|
||||
target: Option<TargetKind>,
|
||||
/// The target environment of the package
|
||||
#[arg(short, long)]
|
||||
target: Option<TargetKind>,
|
||||
|
||||
/// The alias to use for the package
|
||||
#[arg(short, long)]
|
||||
alias: Option<String>,
|
||||
/// The alias to use for the package
|
||||
#[arg(short, long)]
|
||||
alias: Option<Alias>,
|
||||
|
||||
/// Whether to add the package as a peer dependency
|
||||
#[arg(short, long)]
|
||||
peer: bool,
|
||||
/// Whether to add the package as a peer dependency
|
||||
#[arg(short, long)]
|
||||
peer: bool,
|
||||
|
||||
/// Whether to add the package as a dev dependency
|
||||
#[arg(short, long, conflicts_with = "peer")]
|
||||
dev: bool,
|
||||
/// Whether to add the package as a dev dependency
|
||||
#[arg(short, long, conflicts_with = "peer")]
|
||||
dev: bool,
|
||||
}
|
||||
|
||||
impl AddCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?;
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?;
|
||||
|
||||
let (source, specifier) = match &self.name {
|
||||
AnyPackageIdentifier::PackageName(versioned) => match &versioned {
|
||||
VersionedPackageName(PackageNames::Pesde(name), version) => {
|
||||
let index = manifest
|
||||
.indices
|
||||
.get(self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME))
|
||||
.cloned();
|
||||
let (source, specifier) = match &self.name {
|
||||
AnyPackageIdentifier::PackageName(versioned) => match &versioned {
|
||||
VersionedPackageName(PackageNames::Pesde(name), version) => {
|
||||
let index = manifest
|
||||
.indices
|
||||
.get(self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME))
|
||||
.cloned();
|
||||
|
||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||
println!("{}: index {index} not found", "error".red().bold());
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||
anyhow::bail!("index {index} not found");
|
||||
}
|
||||
|
||||
let index = match index {
|
||||
Some(index) => index,
|
||||
None => read_config().await?.default_index,
|
||||
};
|
||||
let index = match index {
|
||||
Some(index) => index,
|
||||
None => read_config().await?.default_index,
|
||||
};
|
||||
|
||||
let source = PackageSources::Pesde(PesdePackageSource::new(index));
|
||||
let specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier {
|
||||
name: name.clone(),
|
||||
version: version.clone().unwrap_or(VersionReq::STAR),
|
||||
index: self.index,
|
||||
target: self.target,
|
||||
});
|
||||
let source = PackageSources::Pesde(PesdePackageSource::new(index));
|
||||
let specifier = DependencySpecifiers::Pesde(PesdeDependencySpecifier {
|
||||
name: name.clone(),
|
||||
version: version.clone().unwrap_or(VersionReq::STAR),
|
||||
index: self.index.unwrap_or_else(|| DEFAULT_INDEX_NAME.to_string()),
|
||||
target: self.target,
|
||||
});
|
||||
|
||||
(source, specifier)
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
VersionedPackageName(PackageNames::Wally(name), version) => {
|
||||
let index = manifest
|
||||
.wally_indices
|
||||
.get(self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME))
|
||||
.cloned();
|
||||
(source, specifier)
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
VersionedPackageName(PackageNames::Wally(name), version) => {
|
||||
let index = manifest
|
||||
.wally_indices
|
||||
.get(self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME))
|
||||
.cloned();
|
||||
|
||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||
println!("{}: wally index {index} not found", "error".red().bold());
|
||||
return Ok(());
|
||||
}
|
||||
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
|
||||
anyhow::bail!("wally index {index} not found");
|
||||
}
|
||||
|
||||
let index = index.context("no wally index found")?;
|
||||
let index = index.context("no wally index found")?;
|
||||
|
||||
let source =
|
||||
PackageSources::Wally(pesde::source::wally::WallyPackageSource::new(index));
|
||||
let specifier = DependencySpecifiers::Wally(
|
||||
pesde::source::wally::specifier::WallyDependencySpecifier {
|
||||
name: name.clone(),
|
||||
version: version.clone().unwrap_or(VersionReq::STAR),
|
||||
index: self.index,
|
||||
},
|
||||
);
|
||||
let source =
|
||||
PackageSources::Wally(pesde::source::wally::WallyPackageSource::new(index));
|
||||
let specifier = DependencySpecifiers::Wally(
|
||||
pesde::source::wally::specifier::WallyDependencySpecifier {
|
||||
name: name.clone(),
|
||||
version: version.clone().unwrap_or(VersionReq::STAR),
|
||||
index: self.index.unwrap_or_else(|| DEFAULT_INDEX_NAME.to_string()),
|
||||
},
|
||||
);
|
||||
|
||||
(source, specifier)
|
||||
}
|
||||
},
|
||||
AnyPackageIdentifier::Url((url, rev)) => (
|
||||
PackageSources::Git(GitPackageSource::new(url.clone())),
|
||||
DependencySpecifiers::Git(GitDependencySpecifier {
|
||||
repo: url.clone(),
|
||||
rev: rev.to_string(),
|
||||
path: None,
|
||||
}),
|
||||
),
|
||||
AnyPackageIdentifier::Workspace(VersionedPackageName(name, version)) => (
|
||||
PackageSources::Workspace(WorkspacePackageSource),
|
||||
DependencySpecifiers::Workspace(
|
||||
pesde::source::workspace::specifier::WorkspaceDependencySpecifier {
|
||||
name: name.clone(),
|
||||
version: version.clone().unwrap_or_default(),
|
||||
target: self.target,
|
||||
},
|
||||
),
|
||||
),
|
||||
};
|
||||
source
|
||||
.refresh(&project)
|
||||
.await
|
||||
.context("failed to refresh package source")?;
|
||||
(source, specifier)
|
||||
}
|
||||
},
|
||||
AnyPackageIdentifier::Url((url, rev)) => (
|
||||
PackageSources::Git(GitPackageSource::new(url.clone())),
|
||||
DependencySpecifiers::Git(GitDependencySpecifier {
|
||||
repo: url.clone(),
|
||||
rev: rev.to_string(),
|
||||
path: None,
|
||||
}),
|
||||
),
|
||||
AnyPackageIdentifier::Workspace(VersionedPackageName(name, version)) => (
|
||||
PackageSources::Workspace(WorkspacePackageSource),
|
||||
DependencySpecifiers::Workspace(WorkspaceDependencySpecifier {
|
||||
name: name.clone(),
|
||||
version: version.clone().unwrap_or_default(),
|
||||
target: self.target,
|
||||
}),
|
||||
),
|
||||
AnyPackageIdentifier::Path(path) => (
|
||||
PackageSources::Path(PathPackageSource),
|
||||
DependencySpecifiers::Path(PathDependencySpecifier { path: path.clone() }),
|
||||
),
|
||||
};
|
||||
|
||||
let Some(version_id) = source
|
||||
.resolve(
|
||||
&specifier,
|
||||
&project,
|
||||
manifest.target.kind(),
|
||||
&mut HashSet::new(),
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package")?
|
||||
.1
|
||||
.pop_last()
|
||||
.map(|(v_id, _)| v_id)
|
||||
else {
|
||||
println!("{}: no versions found for package", "error".red().bold());
|
||||
let refreshed_sources = RefreshedSources::new();
|
||||
|
||||
return Ok(());
|
||||
};
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&source,
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to refresh package source")?;
|
||||
|
||||
let project_target = manifest.target.kind();
|
||||
let mut manifest = toml_edit::DocumentMut::from_str(
|
||||
&project
|
||||
.read_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?,
|
||||
)
|
||||
.context("failed to parse manifest")?;
|
||||
let dependency_key = if self.peer {
|
||||
"peer_dependencies"
|
||||
} else if self.dev {
|
||||
"dev_dependencies"
|
||||
} else {
|
||||
"dependencies"
|
||||
};
|
||||
let (_, mut versions, suggestions) = source
|
||||
.resolve(
|
||||
&specifier,
|
||||
&ResolveOptions {
|
||||
project: project.clone(),
|
||||
target: manifest.target.kind(),
|
||||
refreshed_sources,
|
||||
loose_target: false,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package")?;
|
||||
|
||||
let alias = self.alias.unwrap_or_else(|| match self.name.clone() {
|
||||
AnyPackageIdentifier::PackageName(versioned) => versioned.0.as_str().1.to_string(),
|
||||
AnyPackageIdentifier::Url((url, _)) => url
|
||||
.path
|
||||
.to_string()
|
||||
.split('/')
|
||||
.last()
|
||||
.map(|s| s.to_string())
|
||||
.unwrap_or(url.path.to_string()),
|
||||
AnyPackageIdentifier::Workspace(versioned) => versioned.0.as_str().1.to_string(),
|
||||
});
|
||||
let Some((version_id, _)) = versions.pop_last() else {
|
||||
anyhow::bail!(
|
||||
"no matching versions found for package{}",
|
||||
if suggestions.is_empty() {
|
||||
"".into()
|
||||
} else {
|
||||
format!(
|
||||
". available targets: {}",
|
||||
suggestions
|
||||
.into_iter()
|
||||
.map(|t| t.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
)
|
||||
}
|
||||
);
|
||||
};
|
||||
|
||||
let field = &mut manifest[dependency_key]
|
||||
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()))[&alias];
|
||||
let project_target = manifest.target.kind();
|
||||
let mut manifest = toml_edit::DocumentMut::from_str(
|
||||
&project
|
||||
.read_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?,
|
||||
)
|
||||
.context("failed to parse manifest")?;
|
||||
let dependency_key = dep_type_to_key(if self.peer {
|
||||
DependencyType::Peer
|
||||
} else if self.dev {
|
||||
DependencyType::Dev
|
||||
} else {
|
||||
DependencyType::Standard
|
||||
});
|
||||
|
||||
match specifier {
|
||||
DependencySpecifiers::Pesde(spec) => {
|
||||
field["name"] = toml_edit::value(spec.name.clone().to_string());
|
||||
field["version"] = toml_edit::value(format!("^{}", version_id.version()));
|
||||
let alias = match self.alias {
|
||||
Some(alias) => alias,
|
||||
None => match &self.name {
|
||||
AnyPackageIdentifier::PackageName(versioned) => versioned.0.name().to_string(),
|
||||
AnyPackageIdentifier::Url((url, _)) => url
|
||||
.path
|
||||
.to_string()
|
||||
.split('/')
|
||||
.next_back()
|
||||
.map_or_else(|| url.path.to_string(), ToString::to_string),
|
||||
AnyPackageIdentifier::Workspace(versioned) => versioned.0.name().to_string(),
|
||||
AnyPackageIdentifier::Path(path) => path
|
||||
.file_name()
|
||||
.map(|s| s.to_string_lossy().to_string())
|
||||
.expect("path has no file name"),
|
||||
}
|
||||
.parse()
|
||||
.context("auto-generated alias is invalid. use --alias to specify one")?,
|
||||
};
|
||||
|
||||
if *version_id.target() != project_target {
|
||||
field["target"] = toml_edit::value(version_id.target().to_string());
|
||||
}
|
||||
let field = &mut manifest[dependency_key]
|
||||
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()))[alias.as_str()];
|
||||
|
||||
if let Some(index) = spec.index.filter(|i| i != DEFAULT_INDEX_NAME) {
|
||||
field["index"] = toml_edit::value(index);
|
||||
}
|
||||
match specifier {
|
||||
DependencySpecifiers::Pesde(spec) => {
|
||||
field["name"] = toml_edit::value(spec.name.to_string());
|
||||
field["version"] = toml_edit::value(format!("^{}", version_id.version()));
|
||||
|
||||
println!(
|
||||
"added {}@{} {} to {}",
|
||||
spec.name,
|
||||
version_id.version(),
|
||||
version_id.target(),
|
||||
dependency_key
|
||||
);
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(spec) => {
|
||||
field["wally"] = toml_edit::value(spec.name.clone().to_string());
|
||||
field["version"] = toml_edit::value(format!("^{}", version_id.version()));
|
||||
if version_id.target() != project_target {
|
||||
field["target"] = toml_edit::value(version_id.target().to_string());
|
||||
}
|
||||
|
||||
if let Some(index) = spec.index.filter(|i| i != DEFAULT_INDEX_NAME) {
|
||||
field["index"] = toml_edit::value(index);
|
||||
}
|
||||
if spec.index != DEFAULT_INDEX_NAME {
|
||||
field["index"] = toml_edit::value(spec.index);
|
||||
}
|
||||
|
||||
println!(
|
||||
"added wally {}@{} to {}",
|
||||
spec.name,
|
||||
version_id.version(),
|
||||
dependency_key
|
||||
);
|
||||
}
|
||||
DependencySpecifiers::Git(spec) => {
|
||||
field["repo"] = toml_edit::value(spec.repo.to_bstring().to_string());
|
||||
field["rev"] = toml_edit::value(spec.rev.clone());
|
||||
println!(
|
||||
"added {}@{} {} to {dependency_key}",
|
||||
spec.name,
|
||||
version_id.version(),
|
||||
version_id.target()
|
||||
);
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(spec) => {
|
||||
let name_str = spec.name.to_string();
|
||||
let name_str = name_str.trim_start_matches("wally#");
|
||||
field["wally"] = toml_edit::value(name_str);
|
||||
field["version"] = toml_edit::value(format!("^{}", version_id.version()));
|
||||
|
||||
println!("added git {}#{} to {}", spec.repo, spec.rev, dependency_key);
|
||||
}
|
||||
DependencySpecifiers::Workspace(spec) => {
|
||||
field["workspace"] = toml_edit::value(spec.name.clone().to_string());
|
||||
if let AnyPackageIdentifier::Workspace(versioned) = self.name {
|
||||
if let Some(version) = versioned.1 {
|
||||
field["version"] = toml_edit::value(version.to_string());
|
||||
}
|
||||
}
|
||||
if spec.index != DEFAULT_INDEX_NAME {
|
||||
field["index"] = toml_edit::value(spec.index);
|
||||
}
|
||||
|
||||
println!(
|
||||
"added workspace {}@{} to {}",
|
||||
spec.name, spec.version, dependency_key
|
||||
);
|
||||
}
|
||||
}
|
||||
println!(
|
||||
"added wally {name_str}@{} to {dependency_key}",
|
||||
version_id.version()
|
||||
);
|
||||
}
|
||||
DependencySpecifiers::Git(spec) => {
|
||||
field["repo"] = toml_edit::value(spec.repo.to_bstring().to_string());
|
||||
field["rev"] = toml_edit::value(spec.rev.clone());
|
||||
|
||||
project
|
||||
.write_manifest(manifest.to_string())
|
||||
.await
|
||||
.context("failed to write manifest")?;
|
||||
println!("added git {}#{} to {dependency_key}", spec.repo, spec.rev);
|
||||
}
|
||||
DependencySpecifiers::Workspace(spec) => {
|
||||
field["workspace"] = toml_edit::value(spec.name.to_string());
|
||||
if let AnyPackageIdentifier::Workspace(versioned) = self.name {
|
||||
if let Some(version) = versioned.1 {
|
||||
field["version"] = toml_edit::value(version.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
println!(
|
||||
"added workspace {}@{} to {dependency_key}",
|
||||
spec.name, spec.version
|
||||
);
|
||||
}
|
||||
DependencySpecifiers::Path(spec) => {
|
||||
field["path"] = toml_edit::value(spec.path.to_string_lossy().to_string());
|
||||
|
||||
println!("added path {} to {dependency_key}", spec.path.display());
|
||||
}
|
||||
}
|
||||
|
||||
project
|
||||
.write_manifest(manifest.to_string())
|
||||
.await
|
||||
.context("failed to write manifest")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,193 +1,199 @@
|
|||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use console::style;
|
||||
use serde::Deserialize;
|
||||
use std::thread::spawn;
|
||||
use tokio::time::sleep;
|
||||
use url::Url;
|
||||
|
||||
use pesde::{
|
||||
source::{pesde::PesdePackageSource, traits::PackageSource},
|
||||
Project,
|
||||
use crate::cli::{
|
||||
auth::{get_token_login, set_token},
|
||||
style::URL_STYLE,
|
||||
};
|
||||
use pesde::{
|
||||
source::{
|
||||
pesde::PesdePackageSource,
|
||||
traits::{PackageSource as _, RefreshOptions},
|
||||
},
|
||||
Project,
|
||||
};
|
||||
|
||||
use crate::cli::auth::{get_token_login, set_token};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct LoginCommand {
|
||||
/// The token to use for authentication, skipping login
|
||||
#[arg(short, long)]
|
||||
token: Option<String>,
|
||||
/// The token to use for authentication, skipping login
|
||||
#[arg(short, long)]
|
||||
token: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct DeviceCodeResponse {
|
||||
device_code: String,
|
||||
user_code: String,
|
||||
verification_uri: Url,
|
||||
expires_in: u64,
|
||||
interval: u64,
|
||||
device_code: String,
|
||||
user_code: String,
|
||||
verification_uri: Url,
|
||||
expires_in: u64,
|
||||
interval: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(rename_all = "snake_case", tag = "error")]
|
||||
enum AccessTokenError {
|
||||
AuthorizationPending,
|
||||
SlowDown { interval: u64 },
|
||||
ExpiredToken,
|
||||
AccessDenied,
|
||||
AuthorizationPending,
|
||||
SlowDown { interval: u64 },
|
||||
ExpiredToken,
|
||||
AccessDenied,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
#[serde(untagged)]
|
||||
enum AccessTokenResponse {
|
||||
Success { access_token: String },
|
||||
Success { access_token: String },
|
||||
|
||||
Error(AccessTokenError),
|
||||
Error(AccessTokenError),
|
||||
}
|
||||
|
||||
impl LoginCommand {
|
||||
pub async fn authenticate_device_flow(
|
||||
&self,
|
||||
index_url: &gix::Url,
|
||||
project: &Project,
|
||||
reqwest: &reqwest::Client,
|
||||
) -> anyhow::Result<String> {
|
||||
println!("logging in into {index_url}");
|
||||
pub async fn authenticate_device_flow(
|
||||
&self,
|
||||
index_url: &gix::Url,
|
||||
project: &Project,
|
||||
reqwest: &reqwest::Client,
|
||||
) -> anyhow::Result<String> {
|
||||
println!("logging in into {index_url}");
|
||||
|
||||
let source = PesdePackageSource::new(index_url.clone());
|
||||
source
|
||||
.refresh(project)
|
||||
.await
|
||||
.context("failed to refresh index")?;
|
||||
let source = PesdePackageSource::new(index_url.clone());
|
||||
source
|
||||
.refresh(&RefreshOptions {
|
||||
project: project.clone(),
|
||||
})
|
||||
.await
|
||||
.context("failed to refresh index")?;
|
||||
|
||||
let config = source
|
||||
.config(project)
|
||||
.await
|
||||
.context("failed to read index config")?;
|
||||
let Some(client_id) = config.github_oauth_client_id else {
|
||||
anyhow::bail!("index not configured for Github oauth.");
|
||||
};
|
||||
let config = source
|
||||
.config(project)
|
||||
.await
|
||||
.context("failed to read index config")?;
|
||||
let Some(client_id) = config.github_oauth_client_id else {
|
||||
anyhow::bail!("index not configured for Github oauth.");
|
||||
};
|
||||
|
||||
let response = reqwest
|
||||
.post(Url::parse_with_params(
|
||||
"https://github.com/login/device/code",
|
||||
&[("client_id", &client_id)],
|
||||
)?)
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send device code request")?
|
||||
.error_for_status()
|
||||
.context("failed to get device code response")?
|
||||
.json::<DeviceCodeResponse>()
|
||||
.await
|
||||
.context("failed to parse device code response")?;
|
||||
let response = reqwest
|
||||
.post(Url::parse_with_params(
|
||||
"https://github.com/login/device/code",
|
||||
&[("client_id", &client_id)],
|
||||
)?)
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send device code request")?
|
||||
.error_for_status()
|
||||
.context("failed to get device code response")?
|
||||
.json::<DeviceCodeResponse>()
|
||||
.await
|
||||
.context("failed to parse device code response")?;
|
||||
|
||||
println!(
|
||||
"copy your one-time code: {}\npress enter to open {} in your browser...",
|
||||
response.user_code.bold(),
|
||||
response.verification_uri.as_str().blue()
|
||||
);
|
||||
println!(
|
||||
"copy your one-time code: {}\npress enter to open {} in your browser...",
|
||||
style(response.user_code).bold(),
|
||||
URL_STYLE.apply_to(response.verification_uri.as_str())
|
||||
);
|
||||
|
||||
spawn(move || {
|
||||
{
|
||||
let mut input = String::new();
|
||||
std::io::stdin()
|
||||
.read_line(&mut input)
|
||||
.expect("failed to read input");
|
||||
}
|
||||
spawn(move || {
|
||||
{
|
||||
let mut input = String::new();
|
||||
std::io::stdin()
|
||||
.read_line(&mut input)
|
||||
.expect("failed to read input");
|
||||
}
|
||||
|
||||
match open::that(response.verification_uri.as_str()) {
|
||||
Ok(_) => (),
|
||||
Err(e) => {
|
||||
eprintln!("failed to open browser: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
match open::that(response.verification_uri.as_str()) {
|
||||
Ok(_) => (),
|
||||
Err(e) => {
|
||||
eprintln!("failed to open browser: {e}");
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let mut time_left = response.expires_in;
|
||||
let mut interval = std::time::Duration::from_secs(response.interval);
|
||||
let mut time_left = response.expires_in;
|
||||
let mut interval = std::time::Duration::from_secs(response.interval);
|
||||
|
||||
while time_left > 0 {
|
||||
sleep(interval).await;
|
||||
time_left = time_left.saturating_sub(interval.as_secs());
|
||||
while time_left > 0 {
|
||||
sleep(interval).await;
|
||||
time_left = time_left.saturating_sub(interval.as_secs());
|
||||
|
||||
let response = reqwest
|
||||
.post(Url::parse_with_params(
|
||||
"https://github.com/login/oauth/access_token",
|
||||
&[
|
||||
("client_id", &client_id),
|
||||
("device_code", &response.device_code),
|
||||
(
|
||||
"grant_type",
|
||||
&"urn:ietf:params:oauth:grant-type:device_code".to_string(),
|
||||
),
|
||||
],
|
||||
)?)
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send access token request")?
|
||||
.error_for_status()
|
||||
.context("failed to get access token response")?
|
||||
.json::<AccessTokenResponse>()
|
||||
.await
|
||||
.context("failed to parse access token response")?;
|
||||
let response = reqwest
|
||||
.post(Url::parse_with_params(
|
||||
"https://github.com/login/oauth/access_token",
|
||||
&[
|
||||
("client_id", &client_id),
|
||||
("device_code", &response.device_code),
|
||||
(
|
||||
"grant_type",
|
||||
&"urn:ietf:params:oauth:grant-type:device_code".to_string(),
|
||||
),
|
||||
],
|
||||
)?)
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send access token request")?
|
||||
.error_for_status()
|
||||
.context("failed to get access token response")?
|
||||
.json::<AccessTokenResponse>()
|
||||
.await
|
||||
.context("failed to parse access token response")?;
|
||||
|
||||
match response {
|
||||
AccessTokenResponse::Success { access_token } => {
|
||||
return Ok(access_token);
|
||||
}
|
||||
AccessTokenResponse::Error(e) => match e {
|
||||
AccessTokenError::AuthorizationPending => continue,
|
||||
AccessTokenError::SlowDown {
|
||||
interval: new_interval,
|
||||
} => {
|
||||
interval = std::time::Duration::from_secs(new_interval);
|
||||
continue;
|
||||
}
|
||||
AccessTokenError::ExpiredToken => {
|
||||
break;
|
||||
}
|
||||
AccessTokenError::AccessDenied => {
|
||||
anyhow::bail!("access denied, re-run the login command");
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
match response {
|
||||
AccessTokenResponse::Success { access_token } => {
|
||||
return Ok(access_token);
|
||||
}
|
||||
AccessTokenResponse::Error(e) => match e {
|
||||
AccessTokenError::AuthorizationPending => {}
|
||||
AccessTokenError::SlowDown {
|
||||
interval: new_interval,
|
||||
} => {
|
||||
interval = std::time::Duration::from_secs(new_interval);
|
||||
}
|
||||
AccessTokenError::ExpiredToken => {
|
||||
break;
|
||||
}
|
||||
AccessTokenError::AccessDenied => {
|
||||
anyhow::bail!("access denied, re-run the login command");
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
anyhow::bail!("code expired, please re-run the login command");
|
||||
}
|
||||
anyhow::bail!("code expired, please re-run the login command");
|
||||
}
|
||||
|
||||
pub async fn run(
|
||||
self,
|
||||
index_url: gix::Url,
|
||||
project: Project,
|
||||
reqwest: reqwest::Client,
|
||||
) -> anyhow::Result<()> {
|
||||
let token_given = self.token.is_some();
|
||||
let token = match self.token {
|
||||
Some(token) => token,
|
||||
None => {
|
||||
self.authenticate_device_flow(&index_url, &project, &reqwest)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
pub async fn run(
|
||||
self,
|
||||
index_url: gix::Url,
|
||||
project: Project,
|
||||
reqwest: reqwest::Client,
|
||||
) -> anyhow::Result<()> {
|
||||
let token_given = self.token.is_some();
|
||||
let token = match self.token {
|
||||
Some(token) => token,
|
||||
None => {
|
||||
self.authenticate_device_flow(&index_url, &project, &reqwest)
|
||||
.await?
|
||||
}
|
||||
};
|
||||
|
||||
let token = if token_given {
|
||||
println!("set token for {index_url}");
|
||||
token
|
||||
} else {
|
||||
let token = format!("Bearer {token}");
|
||||
println!(
|
||||
"logged in as {} for {index_url}",
|
||||
get_token_login(&reqwest, &token).await?.bold()
|
||||
);
|
||||
let token = if token_given {
|
||||
println!("set token for {index_url}");
|
||||
token
|
||||
} else {
|
||||
let token = format!("Bearer {token}");
|
||||
println!(
|
||||
"logged in as {} for {index_url}",
|
||||
style(get_token_login(&reqwest, &token).await?).bold()
|
||||
);
|
||||
|
||||
token
|
||||
};
|
||||
token
|
||||
};
|
||||
|
||||
set_token(&index_url, Some(&token)).await?;
|
||||
set_token(&index_url, Some(&token)).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,14 +2,14 @@ use crate::cli::auth::set_token;
|
|||
use clap::Args;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct LogoutCommand {}
|
||||
pub struct LogoutCommand;
|
||||
|
||||
impl LogoutCommand {
|
||||
pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
|
||||
set_token(&index_url, None).await?;
|
||||
pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
|
||||
set_token(&index_url, None).await?;
|
||||
|
||||
println!("logged out of {index_url}");
|
||||
println!("logged out of {index_url}");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
use crate::cli::config::read_config;
|
||||
use crate::cli::get_index;
|
||||
use clap::{Args, Subcommand};
|
||||
use pesde::{errors::ManifestReadError, Project, DEFAULT_INDEX_NAME};
|
||||
use pesde::Project;
|
||||
|
||||
mod login;
|
||||
mod logout;
|
||||
|
@ -9,65 +9,36 @@ mod whoami;
|
|||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct AuthSubcommand {
|
||||
/// The index to use. Defaults to `default`, or the configured default index if current directory doesn't have a manifest
|
||||
#[arg(short, long)]
|
||||
pub index: Option<String>,
|
||||
/// The index to use. Defaults to `default`, or the configured default index if current directory doesn't have a manifest
|
||||
#[arg(short, long)]
|
||||
pub index: Option<String>,
|
||||
|
||||
#[clap(subcommand)]
|
||||
pub command: AuthCommands,
|
||||
#[clap(subcommand)]
|
||||
pub command: AuthCommands,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum AuthCommands {
|
||||
/// Sets a token for an index. Optionally gets it from GitHub
|
||||
Login(login::LoginCommand),
|
||||
/// Removes the stored token
|
||||
Logout(logout::LogoutCommand),
|
||||
/// Prints the username of the currently logged-in user
|
||||
#[clap(name = "whoami")]
|
||||
WhoAmI(whoami::WhoAmICommand),
|
||||
/// Prints the token for an index
|
||||
Token(token::TokenCommand),
|
||||
/// Sets a token for an index. Optionally gets it from GitHub
|
||||
Login(login::LoginCommand),
|
||||
/// Removes the stored token
|
||||
Logout(logout::LogoutCommand),
|
||||
/// Prints the username of the currently logged-in user
|
||||
#[clap(name = "whoami")]
|
||||
WhoAmI(whoami::WhoAmICommand),
|
||||
/// Prints the token for an index
|
||||
Token(token::TokenCommand),
|
||||
}
|
||||
|
||||
impl AuthSubcommand {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let manifest = match project.deser_manifest().await {
|
||||
Ok(manifest) => Some(manifest),
|
||||
Err(e) => match e {
|
||||
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
|
||||
e => return Err(e.into()),
|
||||
},
|
||||
};
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let index_url = get_index(&project, self.index.as_deref()).await?;
|
||||
|
||||
let index_url = match self.index.as_deref() {
|
||||
Some(index) => match index.try_into() {
|
||||
Ok(url) => Some(url),
|
||||
Err(_) => None,
|
||||
},
|
||||
None => match manifest {
|
||||
Some(_) => None,
|
||||
None => Some(read_config().await?.default_index),
|
||||
},
|
||||
};
|
||||
|
||||
let index_url = match index_url {
|
||||
Some(url) => url,
|
||||
None => {
|
||||
let index_name = self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
|
||||
|
||||
match manifest.unwrap().indices.get(index_name) {
|
||||
Some(index) => index.clone(),
|
||||
None => anyhow::bail!("index {index_name} not found in manifest"),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
match self.command {
|
||||
AuthCommands::Login(login) => login.run(index_url, project, reqwest).await,
|
||||
AuthCommands::Logout(logout) => logout.run(index_url).await,
|
||||
AuthCommands::WhoAmI(whoami) => whoami.run(index_url, reqwest).await,
|
||||
AuthCommands::Token(token) => token.run(index_url).await,
|
||||
}
|
||||
}
|
||||
match self.command {
|
||||
AuthCommands::Login(login) => login.run(index_url, project, reqwest).await,
|
||||
AuthCommands::Logout(logout) => logout.run(index_url).await,
|
||||
AuthCommands::WhoAmI(whoami) => whoami.run(index_url, reqwest).await,
|
||||
AuthCommands::Token(token) => token.run(index_url).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,21 +2,18 @@ use crate::cli::auth::get_tokens;
|
|||
use clap::Args;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct TokenCommand {}
|
||||
pub struct TokenCommand;
|
||||
|
||||
impl TokenCommand {
|
||||
pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
|
||||
let tokens = get_tokens().await?;
|
||||
let token = match tokens.0.get(&index_url) {
|
||||
Some(token) => token,
|
||||
None => {
|
||||
println!("not logged in into {index_url}");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
pub async fn run(self, index_url: gix::Url) -> anyhow::Result<()> {
|
||||
let tokens = get_tokens().await?;
|
||||
let Some(token) = tokens.0.get(&index_url) else {
|
||||
println!("not logged in into {index_url}");
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
println!("token for {index_url}: \"{token}\"");
|
||||
println!("token for {index_url}: \"{token}\"");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,26 +1,23 @@
|
|||
use crate::cli::auth::{get_token_login, get_tokens};
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use console::style;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct WhoAmICommand {}
|
||||
pub struct WhoAmICommand;
|
||||
|
||||
impl WhoAmICommand {
|
||||
pub async fn run(self, index_url: gix::Url, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let tokens = get_tokens().await?;
|
||||
let token = match tokens.0.get(&index_url) {
|
||||
Some(token) => token,
|
||||
None => {
|
||||
println!("not logged in into {index_url}");
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
pub async fn run(self, index_url: gix::Url, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let tokens = get_tokens().await?;
|
||||
let Some(token) = tokens.0.get(&index_url) else {
|
||||
println!("not logged in into {index_url}");
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
println!(
|
||||
"logged in as {} into {index_url}",
|
||||
get_token_login(&reqwest, token).await?.bold()
|
||||
);
|
||||
println!(
|
||||
"logged in as {} into {index_url}",
|
||||
style(get_token_login(&reqwest, token).await?).bold()
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
18
src/cli/commands/cas/mod.rs
Normal file
18
src/cli/commands/cas/mod.rs
Normal file
|
@ -0,0 +1,18 @@
|
|||
use clap::Subcommand;
|
||||
use pesde::Project;
|
||||
|
||||
mod prune;
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum CasCommands {
|
||||
/// Removes unused files from the CAS
|
||||
Prune(prune::PruneCommand),
|
||||
}
|
||||
|
||||
impl CasCommands {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
match self {
|
||||
CasCommands::Prune(prune) => prune.run(project).await,
|
||||
}
|
||||
}
|
||||
}
|
346
src/cli/commands/cas/prune.rs
Normal file
346
src/cli/commands/cas/prune.rs
Normal file
|
@ -0,0 +1,346 @@
|
|||
use crate::{
|
||||
cli::{
|
||||
reporters::run_with_reporter,
|
||||
style::{INFO_STYLE, SUCCESS_STYLE},
|
||||
},
|
||||
util::remove_empty_dir,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use async_stream::try_stream;
|
||||
use clap::Args;
|
||||
use fs_err::tokio as fs;
|
||||
use futures::{future::BoxFuture, FutureExt as _, Stream, StreamExt as _};
|
||||
use pesde::{
|
||||
source::fs::{FsEntry, PackageFs},
|
||||
Project,
|
||||
};
|
||||
use std::{
|
||||
collections::{HashMap, HashSet},
|
||||
future::Future,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PruneCommand;
|
||||
|
||||
async fn read_dir_stream(
|
||||
dir: &Path,
|
||||
) -> std::io::Result<impl Stream<Item = std::io::Result<fs::DirEntry>>> {
|
||||
let mut read_dir = fs::read_dir(dir).await?;
|
||||
|
||||
Ok(try_stream! {
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
yield entry;
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(unreachable_code)]
|
||||
async fn get_nlinks(path: &Path) -> anyhow::Result<u64> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use std::os::unix::fs::MetadataExt as _;
|
||||
let metadata = fs::metadata(path).await?;
|
||||
return Ok(metadata.nlink());
|
||||
}
|
||||
// life if rust stabilized the nightly feature from 2019
|
||||
#[cfg(windows)]
|
||||
{
|
||||
use std::os::windows::ffi::OsStrExt as _;
|
||||
use windows::{
|
||||
core::PWSTR,
|
||||
Win32::{
|
||||
Foundation::CloseHandle,
|
||||
Storage::FileSystem::{
|
||||
CreateFileW, GetFileInformationByHandle, FILE_ATTRIBUTE_NORMAL,
|
||||
FILE_GENERIC_READ, FILE_SHARE_READ, OPEN_EXISTING,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
let path = path.to_path_buf();
|
||||
return tokio::task::spawn_blocking(move || unsafe {
|
||||
let handle = CreateFileW(
|
||||
PWSTR(
|
||||
path.as_os_str()
|
||||
.encode_wide()
|
||||
.chain(std::iter::once(0))
|
||||
.collect::<Vec<_>>()
|
||||
.as_mut_ptr(),
|
||||
),
|
||||
FILE_GENERIC_READ.0,
|
||||
FILE_SHARE_READ,
|
||||
None,
|
||||
OPEN_EXISTING,
|
||||
FILE_ATTRIBUTE_NORMAL,
|
||||
None,
|
||||
)?;
|
||||
|
||||
let mut info =
|
||||
windows::Win32::Storage::FileSystem::BY_HANDLE_FILE_INFORMATION::default();
|
||||
let res = GetFileInformationByHandle(handle, &mut info);
|
||||
CloseHandle(handle)?;
|
||||
res?;
|
||||
|
||||
Ok(info.nNumberOfLinks as u64)
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
#[cfg(not(any(unix, windows)))]
|
||||
{
|
||||
compile_error!("unsupported platform");
|
||||
}
|
||||
anyhow::bail!("unsupported platform")
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct ExtendJoinSet<T: Send + 'static>(JoinSet<T>);
|
||||
|
||||
impl<T: Send + 'static, F: Future<Output = T> + Send + 'static> Extend<F> for ExtendJoinSet<T> {
|
||||
fn extend<I: IntoIterator<Item = F>>(&mut self, iter: I) {
|
||||
for item in iter {
|
||||
self.0.spawn(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Send + 'static> Default for ExtendJoinSet<T> {
|
||||
fn default() -> Self {
|
||||
Self(JoinSet::new())
|
||||
}
|
||||
}
|
||||
|
||||
async fn discover_cas_packages(cas_dir: &Path) -> anyhow::Result<HashMap<PathBuf, PackageFs>> {
|
||||
fn read_entry(
|
||||
entry: fs::DirEntry,
|
||||
) -> BoxFuture<'static, anyhow::Result<HashMap<PathBuf, PackageFs>>> {
|
||||
async move {
|
||||
if entry
|
||||
.metadata()
|
||||
.await
|
||||
.context("failed to read entry metadata")?
|
||||
.is_dir()
|
||||
{
|
||||
let mut tasks = read_dir_stream(&entry.path())
|
||||
.await
|
||||
.context("failed to read entry directory")?
|
||||
.map(|entry| async move {
|
||||
read_entry(entry.context("failed to read inner cas index dir entry")?).await
|
||||
})
|
||||
.collect::<ExtendJoinSet<Result<_, anyhow::Error>>>()
|
||||
.await
|
||||
.0;
|
||||
|
||||
let mut res = HashMap::new();
|
||||
while let Some(entry) = tasks.join_next().await {
|
||||
res.extend(entry.unwrap()?);
|
||||
}
|
||||
|
||||
return Ok(res);
|
||||
}
|
||||
|
||||
let contents = fs::read_to_string(entry.path()).await?;
|
||||
let fs = toml::from_str(&contents).context("failed to deserialize PackageFs")?;
|
||||
|
||||
Ok(HashMap::from([(entry.path(), fs)]))
|
||||
}
|
||||
.boxed()
|
||||
}
|
||||
|
||||
let mut tasks = ["index", "wally_index", "git_index"]
|
||||
.into_iter()
|
||||
.map(|index| cas_dir.join(index))
|
||||
.map(|index| async move {
|
||||
let mut res = HashMap::new();
|
||||
|
||||
let tasks = match read_dir_stream(&index).await {
|
||||
Ok(tasks) => tasks,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(res),
|
||||
Err(e) => return Err(e).context("failed to read cas index directory"),
|
||||
};
|
||||
|
||||
let mut tasks = tasks
|
||||
.map(|entry| async move {
|
||||
read_entry(entry.context("failed to read cas index dir entry")?).await
|
||||
})
|
||||
.collect::<ExtendJoinSet<Result<_, anyhow::Error>>>()
|
||||
.await
|
||||
.0;
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
res.extend(task.unwrap()?);
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
})
|
||||
.collect::<JoinSet<Result<_, anyhow::Error>>>();
|
||||
|
||||
let mut cas_entries = HashMap::new();
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
cas_entries.extend(task.unwrap()?);
|
||||
}
|
||||
|
||||
Ok(cas_entries)
|
||||
}
|
||||
|
||||
async fn remove_hashes(cas_dir: &Path) -> anyhow::Result<HashSet<String>> {
|
||||
let mut res = HashSet::new();
|
||||
|
||||
let tasks = match read_dir_stream(cas_dir).await {
|
||||
Ok(tasks) => tasks,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(res),
|
||||
Err(e) => return Err(e).context("failed to read cas directory"),
|
||||
};
|
||||
|
||||
let mut tasks = tasks
|
||||
.map(|cas_entry| async move {
|
||||
let cas_entry = cas_entry.context("failed to read cas dir entry")?;
|
||||
let prefix = cas_entry.file_name();
|
||||
let Some(prefix) = prefix.to_str() else {
|
||||
return Ok(None);
|
||||
};
|
||||
// we only want hash directories
|
||||
if prefix.len() != 2 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let mut tasks = read_dir_stream(&cas_entry.path())
|
||||
.await
|
||||
.context("failed to read hash directory")?
|
||||
.map(|hash_entry| {
|
||||
let prefix = prefix.to_string();
|
||||
async move {
|
||||
let hash_entry = hash_entry.context("failed to read hash dir entry")?;
|
||||
let hash = hash_entry.file_name();
|
||||
let hash = hash.to_str().expect("non-UTF-8 hash").to_string();
|
||||
let hash = format!("{prefix}{hash}");
|
||||
|
||||
let path = hash_entry.path();
|
||||
let nlinks = get_nlinks(&path)
|
||||
.await
|
||||
.context("failed to count file usage")?;
|
||||
if nlinks > 1 {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
fs::remove_file(&path)
|
||||
.await
|
||||
.context("failed to remove unused file")?;
|
||||
|
||||
if let Some(parent) = path.parent() {
|
||||
remove_empty_dir(parent).await?;
|
||||
}
|
||||
|
||||
Ok(Some(hash))
|
||||
}
|
||||
})
|
||||
.collect::<ExtendJoinSet<Result<_, anyhow::Error>>>()
|
||||
.await
|
||||
.0;
|
||||
|
||||
let mut removed_hashes = HashSet::new();
|
||||
while let Some(removed_hash) = tasks.join_next().await {
|
||||
let Some(hash) = removed_hash.unwrap()? else {
|
||||
continue;
|
||||
};
|
||||
|
||||
removed_hashes.insert(hash);
|
||||
}
|
||||
|
||||
Ok(Some(removed_hashes))
|
||||
})
|
||||
.collect::<ExtendJoinSet<Result<_, anyhow::Error>>>()
|
||||
.await
|
||||
.0;
|
||||
|
||||
while let Some(removed_hashes) = tasks.join_next().await {
|
||||
let Some(removed_hashes) = removed_hashes.unwrap()? else {
|
||||
continue;
|
||||
};
|
||||
|
||||
res.extend(removed_hashes);
|
||||
}
|
||||
|
||||
Ok(res)
|
||||
}
|
||||
|
||||
impl PruneCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
// CAS structure:
|
||||
// /2 first chars of hash/rest of hash
|
||||
// /index/hash/name/version/target
|
||||
// /wally_index/hash/name/version
|
||||
// /git_index/hash/hash
|
||||
// the last thing in the path is the serialized PackageFs
|
||||
|
||||
let (cas_entries, removed_hashes) = run_with_reporter(|_, root_progress, _| async {
|
||||
let root_progress = root_progress;
|
||||
root_progress.reset();
|
||||
root_progress.set_message("discover packages");
|
||||
let cas_entries = discover_cas_packages(project.cas_dir()).await?;
|
||||
root_progress.reset();
|
||||
root_progress.set_message("remove unused files");
|
||||
let removed_hashes = remove_hashes(project.cas_dir()).await?;
|
||||
|
||||
Ok::<_, anyhow::Error>((cas_entries, removed_hashes))
|
||||
})
|
||||
.await?;
|
||||
|
||||
let mut tasks = JoinSet::new();
|
||||
|
||||
let mut removed_packages = 0usize;
|
||||
|
||||
'entry: for (path, fs) in cas_entries {
|
||||
let PackageFs::Cas(entries) = fs else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for entry in entries.into_values() {
|
||||
let FsEntry::File(hash) = entry else {
|
||||
continue;
|
||||
};
|
||||
|
||||
if removed_hashes.contains(&hash) {
|
||||
let cas_dir = project.cas_dir().to_path_buf();
|
||||
tasks.spawn(async move {
|
||||
fs::remove_file(&path)
|
||||
.await
|
||||
.context("failed to remove unused file")?;
|
||||
|
||||
// remove empty directories up to the cas dir
|
||||
let mut path = &*path;
|
||||
while let Some(parent) = path.parent() {
|
||||
if parent == cas_dir {
|
||||
break;
|
||||
}
|
||||
|
||||
remove_empty_dir(parent).await?;
|
||||
path = parent;
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
});
|
||||
removed_packages += 1;
|
||||
// if at least one file is removed, the package is not used
|
||||
continue 'entry;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
task.unwrap()?;
|
||||
}
|
||||
|
||||
println!(
|
||||
"{} removed {} unused packages and {} individual files!",
|
||||
SUCCESS_STYLE.apply_to("done!"),
|
||||
INFO_STYLE.apply_to(removed_packages),
|
||||
INFO_STYLE.apply_to(removed_hashes.len())
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -3,36 +3,36 @@ use clap::Args;
|
|||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct DefaultIndexCommand {
|
||||
/// The new index URL to set as default, don't pass any value to check the current default index
|
||||
#[arg(index = 1, value_parser = crate::cli::parse_gix_url)]
|
||||
index: Option<gix::Url>,
|
||||
/// The new index URL to set as default, don't pass any value to check the current default index
|
||||
#[arg(index = 1, value_parser = crate::cli::parse_gix_url)]
|
||||
index: Option<gix::Url>,
|
||||
|
||||
/// Resets the default index to the default value
|
||||
#[arg(short, long, conflicts_with = "index")]
|
||||
reset: bool,
|
||||
/// Resets the default index to the default value
|
||||
#[arg(short, long, conflicts_with = "index")]
|
||||
reset: bool,
|
||||
}
|
||||
|
||||
impl DefaultIndexCommand {
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
let mut config = read_config().await?;
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
let mut config = read_config().await?;
|
||||
|
||||
let index = if self.reset {
|
||||
Some(CliConfig::default().default_index)
|
||||
} else {
|
||||
self.index
|
||||
};
|
||||
let index = if self.reset {
|
||||
Some(CliConfig::default().default_index)
|
||||
} else {
|
||||
self.index
|
||||
};
|
||||
|
||||
match index {
|
||||
Some(index) => {
|
||||
config.default_index = index.clone();
|
||||
write_config(&config).await?;
|
||||
println!("default index set to: {index}");
|
||||
}
|
||||
None => {
|
||||
println!("current default index: {}", config.default_index);
|
||||
}
|
||||
}
|
||||
match index {
|
||||
Some(index) => {
|
||||
config.default_index = index.clone();
|
||||
write_config(&config).await?;
|
||||
println!("default index set to: {index}");
|
||||
}
|
||||
None => {
|
||||
println!("current default index: {}", config.default_index);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,14 +4,14 @@ mod default_index;
|
|||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum ConfigCommands {
|
||||
/// Configuration for the default index
|
||||
DefaultIndex(default_index::DefaultIndexCommand),
|
||||
/// Configuration for the default index
|
||||
DefaultIndex(default_index::DefaultIndexCommand),
|
||||
}
|
||||
|
||||
impl ConfigCommands {
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
match self {
|
||||
ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
|
||||
}
|
||||
}
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
match self {
|
||||
ConfigCommands::DefaultIndex(default_index) => default_index.run().await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
100
src/cli/commands/deprecate.rs
Normal file
100
src/cli/commands/deprecate.rs
Normal file
|
@ -0,0 +1,100 @@
|
|||
use crate::cli::{get_index, style::SUCCESS_STYLE};
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use pesde::{
|
||||
names::PackageName,
|
||||
source::{
|
||||
pesde::PesdePackageSource,
|
||||
traits::{PackageSource as _, RefreshOptions},
|
||||
},
|
||||
Project,
|
||||
};
|
||||
use reqwest::{header::AUTHORIZATION, Method, StatusCode};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct DeprecateCommand {
|
||||
/// Whether to undeprecate the package
|
||||
#[clap(long)]
|
||||
undo: bool,
|
||||
|
||||
/// The index to deprecate the package in
|
||||
#[clap(short, long)]
|
||||
index: Option<String>,
|
||||
|
||||
/// The package to deprecate
|
||||
#[clap(index = 1)]
|
||||
package: PackageName,
|
||||
|
||||
/// The reason for deprecating the package
|
||||
#[clap(index = 2, required_unless_present = "undo")]
|
||||
reason: Option<String>,
|
||||
}
|
||||
|
||||
impl DeprecateCommand {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let index_url = get_index(&project, self.index.as_deref()).await?;
|
||||
let source = PesdePackageSource::new(index_url.clone());
|
||||
source
|
||||
.refresh(&RefreshOptions {
|
||||
project: project.clone(),
|
||||
})
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
let config = source
|
||||
.config(&project)
|
||||
.await
|
||||
.context("failed to get index config")?;
|
||||
|
||||
let mut request = reqwest.request(
|
||||
if self.undo {
|
||||
Method::DELETE
|
||||
} else {
|
||||
Method::PUT
|
||||
},
|
||||
format!(
|
||||
"{}/v1/packages/{}/deprecate",
|
||||
config.api(),
|
||||
urlencoding::encode(&self.package.to_string()),
|
||||
),
|
||||
);
|
||||
|
||||
if !self.undo {
|
||||
request = request.body(
|
||||
self.reason
|
||||
.map(|reason| reason.trim().to_string())
|
||||
.filter(|reason| !reason.is_empty())
|
||||
.context("deprecating must have non-empty a reason")?,
|
||||
);
|
||||
}
|
||||
|
||||
if let Some(token) = project.auth_config().tokens().get(&index_url) {
|
||||
tracing::debug!("using token for {index_url}");
|
||||
request = request.header(AUTHORIZATION, token);
|
||||
}
|
||||
|
||||
let response = request.send().await.context("failed to send request")?;
|
||||
|
||||
let status = response.status();
|
||||
let text = response
|
||||
.text()
|
||||
.await
|
||||
.context("failed to get response text")?;
|
||||
let prefix = if self.undo { "un" } else { "" };
|
||||
match status {
|
||||
StatusCode::CONFLICT => {
|
||||
anyhow::bail!("version is already {prefix}deprecated");
|
||||
}
|
||||
StatusCode::FORBIDDEN => {
|
||||
anyhow::bail!("unauthorized to {prefix}deprecate under this scope");
|
||||
}
|
||||
code if !code.is_success() => {
|
||||
anyhow::bail!("failed to {prefix}deprecate package: {code} ({text})");
|
||||
}
|
||||
_ => {
|
||||
println!("{}", SUCCESS_STYLE.apply_to(text));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,167 +1,217 @@
|
|||
use crate::cli::{config::read_config, progress_bar, VersionedPackageName};
|
||||
use anyhow::Context;
|
||||
use crate::cli::{
|
||||
config::read_config,
|
||||
reporters::{self, CliReporter},
|
||||
VersionedPackageName,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use console::style;
|
||||
use fs_err::tokio as fs;
|
||||
use indicatif::MultiProgress;
|
||||
use pesde::{
|
||||
linking::generator::generate_bin_linking_module,
|
||||
manifest::target::TargetKind,
|
||||
names::PackageName,
|
||||
source::{
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
traits::PackageSource,
|
||||
},
|
||||
Project,
|
||||
download_and_link::DownloadAndLinkOptions,
|
||||
linking::generator::generate_bin_linking_module,
|
||||
manifest::target::TargetKind,
|
||||
names::{PackageName, PackageNames},
|
||||
source::{
|
||||
ids::PackageId,
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
traits::{
|
||||
DownloadOptions, GetTargetOptions, PackageSource as _, RefreshOptions, ResolveOptions,
|
||||
},
|
||||
PackageSources,
|
||||
},
|
||||
Project, RefreshedSources, DEFAULT_INDEX_NAME,
|
||||
};
|
||||
use semver::VersionReq;
|
||||
use std::{
|
||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write, process::Command, sync::Arc,
|
||||
env::current_dir,
|
||||
ffi::OsString,
|
||||
io::{Stderr, Write as _},
|
||||
process::Command,
|
||||
sync::Arc,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ExecuteCommand {
|
||||
/// The package name, script name, or path to a script to run
|
||||
#[arg(index = 1)]
|
||||
package: VersionedPackageName<VersionReq, PackageName>,
|
||||
/// The package name, script name, or path to a script to run
|
||||
#[arg(index = 1)]
|
||||
package: VersionedPackageName<VersionReq, PackageName>,
|
||||
|
||||
/// The index URL to use for the package
|
||||
#[arg(short, long, value_parser = crate::cli::parse_gix_url)]
|
||||
index: Option<gix::Url>,
|
||||
/// The index URL to use for the package
|
||||
#[arg(short, long, value_parser = crate::cli::parse_gix_url)]
|
||||
index: Option<gix::Url>,
|
||||
|
||||
/// Arguments to pass to the script
|
||||
#[arg(index = 2, last = true)]
|
||||
args: Vec<OsString>,
|
||||
/// Arguments to pass to the script
|
||||
#[arg(index = 2, last = true)]
|
||||
args: Vec<OsString>,
|
||||
}
|
||||
|
||||
impl ExecuteCommand {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let index = match self.index {
|
||||
Some(index) => Some(index),
|
||||
None => read_config().await.ok().map(|c| c.default_index),
|
||||
}
|
||||
.context("no index specified")?;
|
||||
let source = PesdePackageSource::new(index);
|
||||
source
|
||||
.refresh(&project)
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let multi_progress = MultiProgress::new();
|
||||
crate::PROGRESS_BARS
|
||||
.lock()
|
||||
.unwrap()
|
||||
.replace(multi_progress.clone());
|
||||
|
||||
let version_req = self.package.1.unwrap_or(VersionReq::STAR);
|
||||
let Some((version, pkg_ref)) = ('finder: {
|
||||
let specifier = PesdeDependencySpecifier {
|
||||
name: self.package.0.clone(),
|
||||
version: version_req.clone(),
|
||||
index: None,
|
||||
target: None,
|
||||
};
|
||||
let refreshed_sources = RefreshedSources::new();
|
||||
|
||||
if let Some(res) = source
|
||||
.resolve(&specifier, &project, TargetKind::Lune, &mut HashSet::new())
|
||||
.await
|
||||
.context("failed to resolve package")?
|
||||
.1
|
||||
.pop_last()
|
||||
{
|
||||
break 'finder Some(res);
|
||||
}
|
||||
let (tempdir, bin_path) = reporters::run_with_reporter_and_writer(
|
||||
std::io::stderr(),
|
||||
|multi_progress, root_progress, reporter| async {
|
||||
let multi_progress = multi_progress;
|
||||
let root_progress = root_progress;
|
||||
|
||||
source
|
||||
.resolve(&specifier, &project, TargetKind::Luau, &mut HashSet::new())
|
||||
.await
|
||||
.context("failed to resolve package")?
|
||||
.1
|
||||
.pop_last()
|
||||
}) else {
|
||||
anyhow::bail!(
|
||||
"no Lune or Luau package could be found for {}@{version_req}",
|
||||
self.package.0,
|
||||
);
|
||||
};
|
||||
root_progress.set_message("resolve");
|
||||
|
||||
println!("using {}@{version}", pkg_ref.name);
|
||||
let index = match self.index {
|
||||
Some(index) => Some(index),
|
||||
None => read_config().await.ok().map(|c| c.default_index),
|
||||
}
|
||||
.context("no index specified")?;
|
||||
let source = PesdePackageSource::new(index);
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&PackageSources::Pesde(source.clone()),
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
|
||||
let tmp_dir = project.cas_dir().join(".tmp");
|
||||
fs::create_dir_all(&tmp_dir)
|
||||
.await
|
||||
.context("failed to create temporary directory")?;
|
||||
let tempdir =
|
||||
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
|
||||
let version_req = self.package.1.unwrap_or(VersionReq::STAR);
|
||||
let Some((v_id, pkg_ref)) = source
|
||||
.resolve(
|
||||
&PesdeDependencySpecifier {
|
||||
name: self.package.0.clone(),
|
||||
version: version_req.clone(),
|
||||
index: DEFAULT_INDEX_NAME.into(),
|
||||
target: None,
|
||||
},
|
||||
&ResolveOptions {
|
||||
project: project.clone(),
|
||||
target: TargetKind::Luau,
|
||||
refreshed_sources: refreshed_sources.clone(),
|
||||
loose_target: true,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package")?
|
||||
.1
|
||||
.pop_last()
|
||||
else {
|
||||
anyhow::bail!(
|
||||
"no compatible package could be found for {}@{version_req}",
|
||||
self.package.0,
|
||||
);
|
||||
};
|
||||
|
||||
let project = Project::new(
|
||||
tempdir.path(),
|
||||
None::<std::path::PathBuf>,
|
||||
project.data_dir(),
|
||||
project.cas_dir(),
|
||||
project.auth_config().clone(),
|
||||
);
|
||||
let tmp_dir = project.cas_dir().join(".tmp");
|
||||
fs::create_dir_all(&tmp_dir)
|
||||
.await
|
||||
.context("failed to create temporary directory")?;
|
||||
let tempdir = tempfile::tempdir_in(tmp_dir)
|
||||
.context("failed to create temporary directory")?;
|
||||
|
||||
let (fs, target) = source
|
||||
.download(&pkg_ref, &project, &reqwest)
|
||||
.await
|
||||
.context("failed to download package")?;
|
||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||
let project = Project::new(
|
||||
tempdir.path(),
|
||||
None::<std::path::PathBuf>,
|
||||
project.data_dir(),
|
||||
project.cas_dir(),
|
||||
project.auth_config().clone(),
|
||||
);
|
||||
|
||||
fs.write_to(tempdir.path(), project.cas_dir(), true)
|
||||
.await
|
||||
.context("failed to write package contents")?;
|
||||
let id = Arc::new(PackageId::new(
|
||||
PackageNames::Pesde(self.package.0.clone()),
|
||||
v_id,
|
||||
));
|
||||
|
||||
let mut refreshed_sources = HashSet::new();
|
||||
let fs = source
|
||||
.download(
|
||||
&pkg_ref,
|
||||
&DownloadOptions {
|
||||
project: project.clone(),
|
||||
reqwest: reqwest.clone(),
|
||||
reporter: Arc::new(()),
|
||||
id: id.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to download package")?;
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(None, &mut refreshed_sources, true)
|
||||
.await
|
||||
.context("failed to build dependency graph")?;
|
||||
let graph = Arc::new(graph);
|
||||
fs.write_to(tempdir.path(), project.cas_dir(), true)
|
||||
.await
|
||||
.context("failed to write package contents")?;
|
||||
|
||||
let (rx, downloaded_graph) = project
|
||||
.download_and_link(
|
||||
&graph,
|
||||
&Arc::new(Mutex::new(refreshed_sources)),
|
||||
&reqwest,
|
||||
true,
|
||||
true,
|
||||
|_| async { Ok::<_, std::io::Error>(()) },
|
||||
)
|
||||
.await
|
||||
.context("failed to download dependencies")?;
|
||||
let target = source
|
||||
.get_target(
|
||||
&pkg_ref,
|
||||
&GetTargetOptions {
|
||||
project: project.clone(),
|
||||
path: Arc::from(tempdir.path()),
|
||||
id: id.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to get target")?;
|
||||
|
||||
progress_bar(
|
||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||
rx,
|
||||
"📥 ".to_string(),
|
||||
"downloading dependencies".to_string(),
|
||||
"downloaded dependencies".to_string(),
|
||||
)
|
||||
.await?;
|
||||
let bin_path = target.bin_path().context("package has no binary export")?;
|
||||
|
||||
downloaded_graph
|
||||
.await
|
||||
.context("failed to download & link dependencies")?;
|
||||
let graph = project
|
||||
.dependency_graph(None, refreshed_sources.clone(), true)
|
||||
.await
|
||||
.context("failed to build dependency graph")?;
|
||||
|
||||
let mut caller =
|
||||
tempfile::NamedTempFile::new_in(tempdir.path()).context("failed to create tempfile")?;
|
||||
caller
|
||||
.write_all(
|
||||
generate_bin_linking_module(
|
||||
tempdir.path(),
|
||||
&format!("{:?}", bin_path.to_path(tempdir.path())),
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.context("failed to write to tempfile")?;
|
||||
multi_progress.suspend(|| {
|
||||
eprintln!("{}", style(format!("using {}", style(id).bold())).dim());
|
||||
});
|
||||
|
||||
let status = Command::new("lune")
|
||||
.arg("run")
|
||||
.arg(caller.path())
|
||||
.arg("--")
|
||||
.args(&self.args)
|
||||
.current_dir(current_dir().context("failed to get current directory")?)
|
||||
.status()
|
||||
.context("failed to run script")?;
|
||||
root_progress.reset();
|
||||
root_progress.set_message("download");
|
||||
root_progress.set_style(reporters::root_progress_style_with_progress());
|
||||
|
||||
drop(caller);
|
||||
drop(tempdir);
|
||||
project
|
||||
.download_and_link(
|
||||
&Arc::new(graph),
|
||||
DownloadAndLinkOptions::<CliReporter<Stderr>, ()>::new(reqwest)
|
||||
.reporter(reporter)
|
||||
.refreshed_sources(refreshed_sources)
|
||||
.prod(true),
|
||||
)
|
||||
.await
|
||||
.context("failed to download and link dependencies")?;
|
||||
|
||||
std::process::exit(status.code().unwrap_or(1))
|
||||
}
|
||||
anyhow::Ok((tempdir, bin_path.to_relative_path_buf()))
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let mut caller =
|
||||
tempfile::NamedTempFile::new_in(tempdir.path()).context("failed to create tempfile")?;
|
||||
caller
|
||||
.write_all(
|
||||
generate_bin_linking_module(
|
||||
tempdir.path(),
|
||||
&format!("{:?}", bin_path.to_path(tempdir.path())),
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.context("failed to write to tempfile")?;
|
||||
|
||||
let status = Command::new("lune")
|
||||
.arg("run")
|
||||
.arg(caller.path())
|
||||
.arg("--")
|
||||
.args(&self.args)
|
||||
.current_dir(current_dir().context("failed to get current directory")?)
|
||||
.status()
|
||||
.context("failed to run script")?;
|
||||
|
||||
drop(caller);
|
||||
drop(tempdir);
|
||||
|
||||
std::process::exit(status.code().unwrap_or(1i32))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,262 +1,290 @@
|
|||
use crate::cli::config::read_config;
|
||||
use anyhow::Context;
|
||||
use crate::cli::{
|
||||
config::read_config,
|
||||
style::{ERROR_PREFIX, INFO_STYLE, SUCCESS_STYLE},
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use inquire::validator::Validation;
|
||||
use pesde::{
|
||||
errors::ManifestReadError,
|
||||
manifest::{target::TargetKind, DependencyType},
|
||||
names::PackageName,
|
||||
source::{
|
||||
git_index::GitBasedSource,
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::PackageSource,
|
||||
},
|
||||
Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
|
||||
errors::ManifestReadError,
|
||||
manifest::{target::TargetKind, DependencyType},
|
||||
names::{PackageName, PackageNames},
|
||||
source::{
|
||||
git_index::GitBasedSource as _,
|
||||
ids::PackageId,
|
||||
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::{GetTargetOptions, PackageSource as _, RefreshOptions, ResolveOptions},
|
||||
PackageSources,
|
||||
},
|
||||
Project, RefreshedSources, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
|
||||
};
|
||||
use semver::VersionReq;
|
||||
use std::{collections::HashSet, fmt::Display, str::FromStr};
|
||||
use std::{fmt::Display, path::Path, str::FromStr as _, sync::Arc};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct InitCommand {}
|
||||
pub struct InitCommand;
|
||||
|
||||
#[derive(Debug)]
|
||||
enum PackageNameOrCustom {
|
||||
PackageName(PackageName),
|
||||
Custom,
|
||||
PackageName(PackageName),
|
||||
Custom,
|
||||
}
|
||||
|
||||
impl Display for PackageNameOrCustom {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
PackageNameOrCustom::PackageName(n) => write!(f, "{n}"),
|
||||
PackageNameOrCustom::Custom => write!(f, "custom"),
|
||||
}
|
||||
}
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
PackageNameOrCustom::PackageName(n) => write!(f, "{n}"),
|
||||
PackageNameOrCustom::Custom => write!(f, "custom"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl InitCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
match project.read_manifest().await {
|
||||
Ok(_) => {
|
||||
println!("{}", "project already initialized".red());
|
||||
return Ok(());
|
||||
}
|
||||
Err(ManifestReadError::Io(e)) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
match project.read_manifest().await {
|
||||
Ok(_) => {
|
||||
anyhow::bail!("project already initialized");
|
||||
}
|
||||
Err(ManifestReadError::Io(e)) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
|
||||
let mut manifest = toml_edit::DocumentMut::new();
|
||||
let mut manifest = toml_edit::DocumentMut::new();
|
||||
|
||||
manifest["name"] = toml_edit::value(
|
||||
inquire::Text::new("what is the name of the project?")
|
||||
.with_validator(|name: &str| {
|
||||
Ok(match PackageName::from_str(name) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
})
|
||||
})
|
||||
.prompt()
|
||||
.unwrap(),
|
||||
);
|
||||
manifest["version"] = toml_edit::value("0.1.0");
|
||||
manifest["name"] = toml_edit::value(
|
||||
inquire::Text::new("what is the name of the project?")
|
||||
.with_validator(|name: &str| {
|
||||
Ok(match PackageName::from_str(name) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
})
|
||||
})
|
||||
.prompt()
|
||||
.unwrap(),
|
||||
);
|
||||
manifest["version"] = toml_edit::value("0.1.0");
|
||||
|
||||
let description = inquire::Text::new("what is the description of the project?")
|
||||
.with_help_message("a short description of the project. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
let description = inquire::Text::new("what is the description of the project?")
|
||||
.with_help_message("a short description of the project. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
|
||||
if !description.is_empty() {
|
||||
manifest["description"] = toml_edit::value(description);
|
||||
}
|
||||
if !description.is_empty() {
|
||||
manifest["description"] = toml_edit::value(description);
|
||||
}
|
||||
|
||||
let authors = inquire::Text::new("who are the authors of this project?")
|
||||
.with_help_message("comma separated list. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
let authors = inquire::Text::new("who are the authors of this project?")
|
||||
.with_help_message("comma separated list. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
|
||||
let authors = authors
|
||||
.split(',')
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<toml_edit::Array>();
|
||||
let authors = authors
|
||||
.split(',')
|
||||
.map(str::trim)
|
||||
.filter(|s| !s.is_empty())
|
||||
.collect::<toml_edit::Array>();
|
||||
|
||||
if !authors.is_empty() {
|
||||
manifest["authors"] = toml_edit::value(authors);
|
||||
}
|
||||
if !authors.is_empty() {
|
||||
manifest["authors"] = toml_edit::value(authors);
|
||||
}
|
||||
|
||||
let repo = inquire::Text::new("what is the repository URL of this project?")
|
||||
.with_validator(|repo: &str| {
|
||||
if repo.is_empty() {
|
||||
return Ok(Validation::Valid);
|
||||
}
|
||||
let repo = inquire::Text::new("what is the repository URL of this project?")
|
||||
.with_validator(|repo: &str| {
|
||||
if repo.is_empty() {
|
||||
return Ok(Validation::Valid);
|
||||
}
|
||||
|
||||
Ok(match url::Url::parse(repo) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
})
|
||||
})
|
||||
.with_help_message("leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
if !repo.is_empty() {
|
||||
manifest["repository"] = toml_edit::value(repo);
|
||||
}
|
||||
Ok(match url::Url::parse(repo) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
})
|
||||
})
|
||||
.with_help_message("leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
if !repo.is_empty() {
|
||||
manifest["repository"] = toml_edit::value(repo);
|
||||
}
|
||||
|
||||
let license = inquire::Text::new("what is the license of this project?")
|
||||
.with_initial_value("MIT")
|
||||
.with_help_message("an SPDX license identifier. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
if !license.is_empty() {
|
||||
manifest["license"] = toml_edit::value(license);
|
||||
}
|
||||
let license = inquire::Text::new("what is the license of this project?")
|
||||
.with_initial_value("MIT")
|
||||
.with_help_message("an SPDX license identifier. leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
if !license.is_empty() {
|
||||
manifest["license"] = toml_edit::value(license);
|
||||
}
|
||||
|
||||
let target_env = inquire::Select::new(
|
||||
"what environment are you targeting for your package?",
|
||||
TargetKind::VARIANTS.to_vec(),
|
||||
)
|
||||
.prompt()
|
||||
.unwrap();
|
||||
let target_env = inquire::Select::new(
|
||||
"what environment are you targeting for your package?",
|
||||
TargetKind::VARIANTS.to_vec(),
|
||||
)
|
||||
.prompt()
|
||||
.unwrap();
|
||||
|
||||
manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
["environment"] = toml_edit::value(target_env.to_string());
|
||||
manifest["target"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
["environment"] = toml_edit::value(target_env.to_string());
|
||||
|
||||
let source = PesdePackageSource::new(read_config().await?.default_index);
|
||||
let source = PesdePackageSource::new(read_config().await?.default_index);
|
||||
|
||||
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
[DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
|
||||
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
[DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
|
||||
|
||||
if target_env.is_roblox()
|
||||
|| inquire::prompt_confirmation(
|
||||
"would you like to setup default Roblox compatibility scripts?",
|
||||
)
|
||||
.unwrap()
|
||||
{
|
||||
PackageSource::refresh(&source, &project)
|
||||
.await
|
||||
.context("failed to refresh package source")?;
|
||||
let config = source
|
||||
.config(&project)
|
||||
.await
|
||||
.context("failed to get source config")?;
|
||||
let refreshed_sources = RefreshedSources::new();
|
||||
|
||||
let scripts_package = if config.scripts_packages.is_empty() {
|
||||
PackageNameOrCustom::Custom
|
||||
} else {
|
||||
inquire::Select::new(
|
||||
"which scripts package do you want to use?",
|
||||
config
|
||||
.scripts_packages
|
||||
.into_iter()
|
||||
.map(PackageNameOrCustom::PackageName)
|
||||
.chain(std::iter::once(PackageNameOrCustom::Custom))
|
||||
.collect(),
|
||||
)
|
||||
.prompt()
|
||||
.unwrap()
|
||||
};
|
||||
if target_env.is_roblox()
|
||||
|| inquire::prompt_confirmation("would you like to setup Roblox compatibility scripts?")
|
||||
.unwrap()
|
||||
{
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&PackageSources::Pesde(source.clone()),
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to refresh package source")?;
|
||||
let config = source
|
||||
.config(&project)
|
||||
.await
|
||||
.context("failed to get source config")?;
|
||||
|
||||
let scripts_package = match scripts_package {
|
||||
PackageNameOrCustom::PackageName(p) => Some(p),
|
||||
PackageNameOrCustom::Custom => {
|
||||
let name = inquire::Text::new("which scripts package to use?")
|
||||
.with_validator(|name: &str| {
|
||||
if name.is_empty() {
|
||||
return Ok(Validation::Valid);
|
||||
}
|
||||
let scripts_package = if config.scripts_packages.is_empty() {
|
||||
PackageNameOrCustom::Custom
|
||||
} else {
|
||||
inquire::Select::new(
|
||||
"which scripts package do you want to use?",
|
||||
config
|
||||
.scripts_packages
|
||||
.into_iter()
|
||||
.map(PackageNameOrCustom::PackageName)
|
||||
.chain(std::iter::once(PackageNameOrCustom::Custom))
|
||||
.collect(),
|
||||
)
|
||||
.prompt()
|
||||
.unwrap()
|
||||
};
|
||||
|
||||
Ok(match PackageName::from_str(name) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
})
|
||||
})
|
||||
.with_help_message("leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
let scripts_package = match scripts_package {
|
||||
PackageNameOrCustom::PackageName(p) => Some(p),
|
||||
PackageNameOrCustom::Custom => {
|
||||
let name = inquire::Text::new("which scripts package to use?")
|
||||
.with_validator(|name: &str| {
|
||||
if name.is_empty() {
|
||||
return Ok(Validation::Valid);
|
||||
}
|
||||
|
||||
if name.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PackageName::from_str(&name).unwrap())
|
||||
}
|
||||
}
|
||||
};
|
||||
Ok(match PackageName::from_str(name) {
|
||||
Ok(_) => Validation::Valid,
|
||||
Err(e) => Validation::Invalid(e.to_string().into()),
|
||||
})
|
||||
})
|
||||
.with_help_message("leave empty for none")
|
||||
.prompt()
|
||||
.unwrap();
|
||||
|
||||
if let Some(scripts_pkg_name) = scripts_package {
|
||||
let (v_id, pkg_ref) = source
|
||||
.resolve(
|
||||
&PesdeDependencySpecifier {
|
||||
name: scripts_pkg_name,
|
||||
version: VersionReq::STAR,
|
||||
index: None,
|
||||
target: None,
|
||||
},
|
||||
&project,
|
||||
TargetKind::Lune,
|
||||
&mut HashSet::new(),
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve scripts package")?
|
||||
.1
|
||||
.pop_last()
|
||||
.context("scripts package not found")?;
|
||||
if name.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(PackageName::from_str(&name).unwrap())
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let Some(scripts) = pkg_ref.target.scripts().filter(|s| !s.is_empty()) else {
|
||||
anyhow::bail!("scripts package has no scripts. this is an issue with the index")
|
||||
};
|
||||
if let Some(scripts_pkg_name) = scripts_package {
|
||||
let (v_id, pkg_ref) = source
|
||||
.resolve(
|
||||
&PesdeDependencySpecifier {
|
||||
name: scripts_pkg_name.clone(),
|
||||
version: VersionReq::STAR,
|
||||
index: DEFAULT_INDEX_NAME.into(),
|
||||
target: None,
|
||||
},
|
||||
&ResolveOptions {
|
||||
project: project.clone(),
|
||||
target: TargetKind::Luau,
|
||||
refreshed_sources,
|
||||
loose_target: true,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve scripts package")?
|
||||
.1
|
||||
.pop_last()
|
||||
.context("scripts package not found")?;
|
||||
|
||||
let scripts_field = &mut manifest["scripts"]
|
||||
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||
let id = Arc::new(PackageId::new(PackageNames::Pesde(scripts_pkg_name), v_id));
|
||||
|
||||
for script_name in scripts.keys() {
|
||||
scripts_field[script_name] = toml_edit::value(format!(
|
||||
"{SCRIPTS_LINK_FOLDER}/scripts/{script_name}.luau"
|
||||
));
|
||||
}
|
||||
let target = source
|
||||
.get_target(
|
||||
&pkg_ref,
|
||||
&GetTargetOptions {
|
||||
project: project.clone(),
|
||||
// HACK: the pesde package source doesn't use the path, so we can just use an empty one
|
||||
path: Arc::from(Path::new("")),
|
||||
id: id.clone(),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let dev_deps = &mut manifest["dev_dependencies"]
|
||||
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||
let Some(scripts) = target.scripts().filter(|s| !s.is_empty()) else {
|
||||
anyhow::bail!("scripts package has no scripts.")
|
||||
};
|
||||
|
||||
let field = &mut dev_deps["scripts"];
|
||||
field["name"] = toml_edit::value(pkg_ref.name.to_string());
|
||||
field["version"] = toml_edit::value(format!("^{}", v_id.version()));
|
||||
field["target"] = toml_edit::value(v_id.target().to_string());
|
||||
let scripts_field =
|
||||
manifest["scripts"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||
|
||||
for (alias, (spec, ty)) in pkg_ref.dependencies {
|
||||
if ty != DependencyType::Peer {
|
||||
continue;
|
||||
}
|
||||
for script_name in scripts.keys() {
|
||||
scripts_field[script_name] = toml_edit::value(format!(
|
||||
"{SCRIPTS_LINK_FOLDER}/scripts/{script_name}.luau"
|
||||
));
|
||||
}
|
||||
|
||||
let DependencySpecifiers::Pesde(spec) = spec else {
|
||||
continue;
|
||||
};
|
||||
let dev_deps = manifest["dev_dependencies"]
|
||||
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
|
||||
|
||||
let field = &mut dev_deps[alias];
|
||||
field["name"] = toml_edit::value(spec.name.to_string());
|
||||
field["version"] = toml_edit::value(spec.version.to_string());
|
||||
field["target"] =
|
||||
toml_edit::value(spec.target.unwrap_or_else(|| *v_id.target()).to_string());
|
||||
}
|
||||
} else {
|
||||
println!(
|
||||
"{}",
|
||||
"no scripts package configured, this can cause issues with Roblox compatibility".red()
|
||||
let field = &mut dev_deps["scripts"];
|
||||
field["name"] = toml_edit::value(id.name().to_string());
|
||||
field["version"] = toml_edit::value(format!("^{}", id.version_id().version()));
|
||||
field["target"] = toml_edit::value(id.version_id().target().to_string());
|
||||
|
||||
for (alias, (spec, ty)) in pkg_ref.dependencies {
|
||||
if ty != DependencyType::Peer {
|
||||
continue;
|
||||
}
|
||||
|
||||
let DependencySpecifiers::Pesde(spec) = spec else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let field = &mut dev_deps[alias.as_str()];
|
||||
field["name"] = toml_edit::value(spec.name.to_string());
|
||||
field["version"] = toml_edit::value(spec.version.to_string());
|
||||
field["target"] = toml_edit::value(
|
||||
spec.target
|
||||
.unwrap_or_else(|| id.version_id().target())
|
||||
.to_string(),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
println!(
|
||||
"{ERROR_PREFIX}: no scripts package configured, this can cause issues with Roblox compatibility"
|
||||
);
|
||||
if !inquire::prompt_confirmation("initialize regardless?").unwrap() {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
if !inquire::prompt_confirmation("initialize regardless?").unwrap() {
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
project.write_manifest(manifest.to_string()).await?;
|
||||
project.write_manifest(manifest.to_string()).await?;
|
||||
|
||||
println!(
|
||||
"{}\n{}: run `install` to fully finish setup",
|
||||
"initialized project".green(),
|
||||
"tip".cyan().bold()
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
println!(
|
||||
"{}\n{}: run `install` to fully finish setup",
|
||||
SUCCESS_STYLE.apply_to("initialized project"),
|
||||
INFO_STYLE.apply_to("tip")
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,328 +1,55 @@
|
|||
use crate::cli::{
|
||||
bin_dir, files::make_executable, progress_bar, run_on_workspace_members, up_to_date_lockfile,
|
||||
install::{install, InstallOptions},
|
||||
run_on_workspace_members,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use clap::Args;
|
||||
use colored::{ColoredString, Colorize};
|
||||
use fs_err::tokio as fs;
|
||||
use futures::future::try_join_all;
|
||||
use pesde::{
|
||||
download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project,
|
||||
MANIFEST_FILE_NAME,
|
||||
};
|
||||
use std::{
|
||||
collections::{BTreeSet, HashMap, HashSet},
|
||||
sync::Arc,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use pesde::Project;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
#[derive(Debug, Args, Copy, Clone)]
|
||||
pub struct InstallCommand {
|
||||
/// Whether to error on changes in the lockfile
|
||||
#[arg(long)]
|
||||
locked: bool,
|
||||
/// Whether to error on changes in the lockfile
|
||||
#[arg(long)]
|
||||
locked: bool,
|
||||
|
||||
/// Whether to not install dev dependencies
|
||||
#[arg(long)]
|
||||
prod: bool,
|
||||
}
|
||||
/// Whether to not install dev dependencies
|
||||
#[arg(long)]
|
||||
prod: bool,
|
||||
|
||||
fn bin_link_file(alias: &str) -> String {
|
||||
let mut all_combinations = BTreeSet::new();
|
||||
/// The maximum number of concurrent network requests
|
||||
#[arg(long, default_value = "16")]
|
||||
network_concurrency: NonZeroUsize,
|
||||
|
||||
for a in TargetKind::VARIANTS {
|
||||
for b in TargetKind::VARIANTS {
|
||||
all_combinations.insert((a, b));
|
||||
}
|
||||
}
|
||||
|
||||
let all_folders = all_combinations
|
||||
.into_iter()
|
||||
.map(|(a, b)| format!("{:?}", a.packages_folder(b)))
|
||||
.collect::<BTreeSet<_>>()
|
||||
.into_iter()
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ");
|
||||
|
||||
format!(
|
||||
r#"local process = require("@lune/process")
|
||||
local fs = require("@lune/fs")
|
||||
local stdio = require("@lune/stdio")
|
||||
|
||||
local project_root = process.cwd
|
||||
local path_components = string.split(string.gsub(project_root, "\\", "/"), "/")
|
||||
|
||||
for i = #path_components, 1, -1 do
|
||||
local path = table.concat(path_components, "/", 1, i)
|
||||
if fs.isFile(path .. "/{MANIFEST_FILE_NAME}") then
|
||||
project_root = path
|
||||
break
|
||||
end
|
||||
end
|
||||
|
||||
for _, packages_folder in {{ {all_folders} }} do
|
||||
local path = `{{project_root}}/{{packages_folder}}/{alias}.bin.luau`
|
||||
|
||||
if fs.isFile(path) then
|
||||
require(path)
|
||||
return
|
||||
end
|
||||
end
|
||||
|
||||
stdio.ewrite(stdio.color("red") .. "binary `{alias}` not found. are you in the right directory?" .. stdio.color("reset") .. "\n")
|
||||
"#,
|
||||
)
|
||||
}
|
||||
|
||||
#[cfg(feature = "patches")]
|
||||
const JOBS: u8 = 5;
|
||||
#[cfg(not(feature = "patches"))]
|
||||
const JOBS: u8 = 4;
|
||||
|
||||
fn job(n: u8) -> ColoredString {
|
||||
format!("[{n}/{JOBS}]").dimmed().bold()
|
||||
/// Whether to re-install all dependencies even if they are already installed
|
||||
#[arg(long)]
|
||||
force: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
struct CallbackError(#[from] anyhow::Error);
|
||||
|
||||
impl InstallCommand {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let mut refreshed_sources = HashSet::new();
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let options = InstallOptions {
|
||||
locked: self.locked,
|
||||
prod: self.prod,
|
||||
write: true,
|
||||
network_concurrency: self.network_concurrency,
|
||||
use_lockfile: true,
|
||||
force: self.force,
|
||||
};
|
||||
|
||||
let manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?;
|
||||
install(&options, &project, reqwest.clone(), true).await?;
|
||||
|
||||
let lockfile = if self.locked {
|
||||
match up_to_date_lockfile(&project).await? {
|
||||
None => {
|
||||
anyhow::bail!(
|
||||
"lockfile is out of sync, run `{} install` to update it",
|
||||
env!("CARGO_BIN_NAME")
|
||||
);
|
||||
}
|
||||
file => file,
|
||||
}
|
||||
} else {
|
||||
match project.deser_lockfile().await {
|
||||
Ok(lockfile) => {
|
||||
if lockfile.overrides != manifest.overrides {
|
||||
tracing::debug!("overrides are different");
|
||||
None
|
||||
} else if lockfile.target != manifest.target.kind() {
|
||||
tracing::debug!("target kind is different");
|
||||
None
|
||||
} else {
|
||||
Some(lockfile)
|
||||
}
|
||||
}
|
||||
Err(pesde::errors::LockfileReadError::Io(e))
|
||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||
{
|
||||
None
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
};
|
||||
run_on_workspace_members(&project, |project| {
|
||||
let reqwest = reqwest.clone();
|
||||
async move {
|
||||
install(&options, &project, reqwest, false).await?;
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
println!(
|
||||
"\n{}\n",
|
||||
format!("[now installing {} {}]", manifest.name, manifest.target)
|
||||
.bold()
|
||||
.on_bright_black()
|
||||
);
|
||||
|
||||
println!("{} ❌ removing current package folders", job(1));
|
||||
|
||||
{
|
||||
let mut deleted_folders = HashMap::new();
|
||||
|
||||
for target_kind in TargetKind::VARIANTS {
|
||||
let folder = manifest.target.kind().packages_folder(target_kind);
|
||||
let package_dir = project.package_dir();
|
||||
|
||||
deleted_folders
|
||||
.entry(folder.to_string())
|
||||
.or_insert_with(|| async move {
|
||||
tracing::debug!("deleting the {folder} folder");
|
||||
|
||||
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
|
||||
.await
|
||||
.err()
|
||||
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
|
||||
{
|
||||
return Err(e).context(format!("failed to remove the {folder} folder"));
|
||||
};
|
||||
|
||||
Ok(())
|
||||
});
|
||||
}
|
||||
|
||||
try_join_all(deleted_folders.into_values())
|
||||
.await
|
||||
.context("failed to remove package folders")?;
|
||||
}
|
||||
|
||||
let old_graph = lockfile.map(|lockfile| {
|
||||
lockfile
|
||||
.graph
|
||||
.into_iter()
|
||||
.map(|(name, versions)| {
|
||||
(
|
||||
name,
|
||||
versions
|
||||
.into_iter()
|
||||
.map(|(version, node)| (version, node.node))
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
||||
println!("{} 📦 building dependency graph", job(2));
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources, false)
|
||||
.await
|
||||
.context("failed to build dependency graph")?;
|
||||
let graph = Arc::new(graph);
|
||||
|
||||
let bin_folder = bin_dir().await?;
|
||||
|
||||
let downloaded_graph = {
|
||||
let (rx, downloaded_graph) = project
|
||||
.download_and_link(
|
||||
&graph,
|
||||
&Arc::new(Mutex::new(refreshed_sources)),
|
||||
&reqwest,
|
||||
self.prod,
|
||||
true,
|
||||
|graph| {
|
||||
let graph = graph.clone();
|
||||
|
||||
async move {
|
||||
try_join_all(
|
||||
graph
|
||||
.values()
|
||||
.flat_map(|versions| versions.values())
|
||||
.filter(|node| node.target.bin_path().is_some())
|
||||
.filter_map(|node| node.node.direct.as_ref())
|
||||
.map(|(alias, _, _)| alias)
|
||||
.filter(|alias| {
|
||||
if *alias == env!("CARGO_BIN_NAME") {
|
||||
tracing::warn!(
|
||||
"package {alias} has the same name as the CLI, skipping bin link"
|
||||
);
|
||||
return false;
|
||||
}
|
||||
|
||||
true
|
||||
})
|
||||
.map(|alias| {
|
||||
let bin_folder = bin_folder.clone();
|
||||
async move {
|
||||
let bin_exec_file = bin_folder.join(alias).with_extension(std::env::consts::EXE_EXTENSION);
|
||||
|
||||
let impl_folder = bin_folder.join(".impl");
|
||||
fs::create_dir_all(&impl_folder).await.context("failed to create bin link folder")?;
|
||||
|
||||
let bin_file = impl_folder.join(alias).with_extension("luau");
|
||||
fs::write(&bin_file, bin_link_file(alias))
|
||||
.await
|
||||
.context("failed to write bin link file")?;
|
||||
|
||||
|
||||
#[cfg(windows)]
|
||||
{
|
||||
fs::copy(
|
||||
std::env::current_exe()
|
||||
.context("failed to get current executable path")?,
|
||||
&bin_exec_file,
|
||||
)
|
||||
.await
|
||||
.context("failed to copy bin link file")?;
|
||||
}
|
||||
|
||||
#[cfg(not(windows))]
|
||||
{
|
||||
fs::write(
|
||||
&bin_exec_file,
|
||||
format!(r#"#!/bin/sh
|
||||
exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
|
||||
),
|
||||
)
|
||||
.await
|
||||
.context("failed to link bin link file")?;
|
||||
}
|
||||
|
||||
make_executable(&bin_exec_file).await.context("failed to make bin link file executable")?;
|
||||
|
||||
Ok::<_, CallbackError>(())
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
}
|
||||
)
|
||||
.await
|
||||
.context("failed to download dependencies")?;
|
||||
|
||||
progress_bar(
|
||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||
rx,
|
||||
format!("{} 📥 ", job(3)),
|
||||
"downloading dependencies".to_string(),
|
||||
"downloaded dependencies".to_string(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
downloaded_graph
|
||||
.await
|
||||
.context("failed to download & link dependencies")?
|
||||
};
|
||||
|
||||
#[cfg(feature = "patches")]
|
||||
{
|
||||
let rx = project
|
||||
.apply_patches(&filter_graph(&downloaded_graph, self.prod))
|
||||
.await
|
||||
.context("failed to apply patches")?;
|
||||
|
||||
progress_bar(
|
||||
manifest.patches.values().map(|v| v.len() as u64).sum(),
|
||||
rx,
|
||||
format!("{} 🩹 ", job(JOBS - 1)),
|
||||
"applying patches".to_string(),
|
||||
"applied patches".to_string(),
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
|
||||
println!("{} 🧹 finishing up", job(JOBS));
|
||||
|
||||
project
|
||||
.write_lockfile(Lockfile {
|
||||
name: manifest.name,
|
||||
version: manifest.version,
|
||||
target: manifest.target.kind(),
|
||||
overrides: manifest.overrides,
|
||||
|
||||
graph: downloaded_graph,
|
||||
|
||||
workspace: run_on_workspace_members(&project, |project| {
|
||||
let reqwest = reqwest.clone();
|
||||
async move { Box::pin(self.run(project, reqwest)).await }
|
||||
})
|
||||
.await?,
|
||||
})
|
||||
.await
|
||||
.context("failed to write lockfile")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
51
src/cli/commands/list.rs
Normal file
51
src/cli/commands/list.rs
Normal file
|
@ -0,0 +1,51 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
|
||||
use crate::cli::{
|
||||
dep_type_to_key,
|
||||
style::{INFO_STYLE, SUCCESS_STYLE},
|
||||
};
|
||||
use pesde::{
|
||||
manifest::{Alias, DependencyType},
|
||||
source::specifiers::DependencySpecifiers,
|
||||
Project,
|
||||
};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct ListCommand;
|
||||
|
||||
impl ListCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?;
|
||||
|
||||
let all_deps = manifest
|
||||
.all_dependencies()
|
||||
.context("failed to get all dependencies")?
|
||||
.into_iter()
|
||||
.fold(
|
||||
BTreeMap::<DependencyType, BTreeMap<Alias, DependencySpecifiers>>::new(),
|
||||
|mut acc, (alias, (spec, ty))| {
|
||||
acc.entry(ty).or_default().insert(alias, spec);
|
||||
acc
|
||||
},
|
||||
);
|
||||
|
||||
for (dep_ty, deps) in all_deps {
|
||||
let dep_key = dep_type_to_key(dep_ty);
|
||||
println!("{}", INFO_STYLE.apply_to(dep_key));
|
||||
|
||||
for (alias, spec) in deps {
|
||||
println!("{}: {spec}", SUCCESS_STYLE.apply_to(alias));
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -2,95 +2,122 @@ use pesde::Project;
|
|||
|
||||
mod add;
|
||||
mod auth;
|
||||
mod cas;
|
||||
mod config;
|
||||
mod deprecate;
|
||||
mod execute;
|
||||
mod init;
|
||||
mod install;
|
||||
mod list;
|
||||
mod outdated;
|
||||
#[cfg(feature = "patches")]
|
||||
mod patch;
|
||||
#[cfg(feature = "patches")]
|
||||
mod patch_commit;
|
||||
mod publish;
|
||||
mod remove;
|
||||
mod run;
|
||||
#[cfg(feature = "version-management")]
|
||||
mod self_install;
|
||||
#[cfg(feature = "version-management")]
|
||||
mod self_upgrade;
|
||||
mod update;
|
||||
mod yank;
|
||||
|
||||
#[derive(Debug, clap::Subcommand)]
|
||||
pub enum Subcommand {
|
||||
/// Authentication-related commands
|
||||
Auth(auth::AuthSubcommand),
|
||||
/// Authentication-related commands
|
||||
Auth(auth::AuthSubcommand),
|
||||
|
||||
/// Configuration-related commands
|
||||
#[command(subcommand)]
|
||||
Config(config::ConfigCommands),
|
||||
/// Configuration-related commands
|
||||
#[command(subcommand)]
|
||||
Config(config::ConfigCommands),
|
||||
|
||||
/// Initializes a manifest file in the current directory
|
||||
Init(init::InitCommand),
|
||||
/// CAS-related commands
|
||||
#[command(subcommand)]
|
||||
Cas(cas::CasCommands),
|
||||
|
||||
/// Runs a script, an executable package, or a file with Lune
|
||||
Run(run::RunCommand),
|
||||
/// Initializes a manifest file in the current directory
|
||||
Init(init::InitCommand),
|
||||
|
||||
/// Installs all dependencies for the project
|
||||
Install(install::InstallCommand),
|
||||
/// Adds a dependency to the project
|
||||
Add(add::AddCommand),
|
||||
|
||||
/// Publishes the project to the registry
|
||||
Publish(publish::PublishCommand),
|
||||
/// Removes a dependency from the project
|
||||
Remove(remove::RemoveCommand),
|
||||
|
||||
/// Installs the pesde binary and scripts
|
||||
#[cfg(feature = "version-management")]
|
||||
SelfInstall(self_install::SelfInstallCommand),
|
||||
/// Installs all dependencies for the project
|
||||
#[clap(name = "install", visible_alias = "i")]
|
||||
Install(install::InstallCommand),
|
||||
|
||||
/// Sets up a patching environment for a package
|
||||
#[cfg(feature = "patches")]
|
||||
Patch(patch::PatchCommand),
|
||||
/// Updates the project's lockfile. Run install to apply changes
|
||||
Update(update::UpdateCommand),
|
||||
|
||||
/// Finalizes a patching environment for a package
|
||||
#[cfg(feature = "patches")]
|
||||
PatchCommit(patch_commit::PatchCommitCommand),
|
||||
/// Checks for outdated dependencies
|
||||
Outdated(outdated::OutdatedCommand),
|
||||
|
||||
/// Installs the latest version of pesde
|
||||
#[cfg(feature = "version-management")]
|
||||
SelfUpgrade(self_upgrade::SelfUpgradeCommand),
|
||||
/// Lists all dependencies in the project
|
||||
List(list::ListCommand),
|
||||
|
||||
/// Adds a dependency to the project
|
||||
Add(add::AddCommand),
|
||||
/// Runs a script, an executable package, or a file with Lune
|
||||
Run(run::RunCommand),
|
||||
|
||||
/// Updates the project's lockfile. Run install to apply changes
|
||||
Update(update::UpdateCommand),
|
||||
/// Publishes the project to the registry
|
||||
Publish(publish::PublishCommand),
|
||||
|
||||
/// Checks for outdated dependencies
|
||||
Outdated(outdated::OutdatedCommand),
|
||||
/// Yanks a package from the registry
|
||||
Yank(yank::YankCommand),
|
||||
|
||||
/// Executes a binary package without needing to be run in a project directory
|
||||
#[clap(name = "x", visible_alias = "execute", visible_alias = "exec")]
|
||||
Execute(execute::ExecuteCommand),
|
||||
/// Deprecates a package from the registry
|
||||
Deprecate(deprecate::DeprecateCommand),
|
||||
|
||||
/// Sets up a patching environment for a package
|
||||
#[cfg(feature = "patches")]
|
||||
Patch(patch::PatchCommand),
|
||||
|
||||
/// Finalizes a patching environment for a package
|
||||
#[cfg(feature = "patches")]
|
||||
PatchCommit(patch_commit::PatchCommitCommand),
|
||||
|
||||
/// Executes a binary package without needing to be run in a project directory
|
||||
#[clap(name = "x", visible_alias = "execute", visible_alias = "exec")]
|
||||
Execute(execute::ExecuteCommand),
|
||||
|
||||
/// Installs the pesde binary and scripts
|
||||
#[cfg(feature = "version-management")]
|
||||
SelfInstall(self_install::SelfInstallCommand),
|
||||
|
||||
/// Installs the latest version of pesde
|
||||
#[cfg(feature = "version-management")]
|
||||
SelfUpgrade(self_upgrade::SelfUpgradeCommand),
|
||||
}
|
||||
|
||||
impl Subcommand {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
match self {
|
||||
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
|
||||
Subcommand::Config(config) => config.run().await,
|
||||
Subcommand::Init(init) => init.run(project).await,
|
||||
Subcommand::Run(run) => run.run(project).await,
|
||||
Subcommand::Install(install) => install.run(project, reqwest).await,
|
||||
Subcommand::Publish(publish) => publish.run(project, reqwest).await,
|
||||
#[cfg(feature = "version-management")]
|
||||
Subcommand::SelfInstall(self_install) => self_install.run().await,
|
||||
#[cfg(feature = "patches")]
|
||||
Subcommand::Patch(patch) => patch.run(project, reqwest).await,
|
||||
#[cfg(feature = "patches")]
|
||||
Subcommand::PatchCommit(patch_commit) => patch_commit.run(project).await,
|
||||
#[cfg(feature = "version-management")]
|
||||
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
|
||||
Subcommand::Add(add) => add.run(project).await,
|
||||
Subcommand::Update(update) => update.run(project, reqwest).await,
|
||||
Subcommand::Outdated(outdated) => outdated.run(project).await,
|
||||
Subcommand::Execute(execute) => execute.run(project, reqwest).await,
|
||||
}
|
||||
}
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
match self {
|
||||
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
|
||||
Subcommand::Config(config) => config.run().await,
|
||||
Subcommand::Cas(cas) => cas.run(project).await,
|
||||
Subcommand::Init(init) => init.run(project).await,
|
||||
Subcommand::Add(add) => add.run(project).await,
|
||||
Subcommand::Remove(remove) => remove.run(project).await,
|
||||
Subcommand::Install(install) => install.run(project, reqwest).await,
|
||||
Subcommand::Update(update) => update.run(project, reqwest).await,
|
||||
Subcommand::Outdated(outdated) => outdated.run(project).await,
|
||||
Subcommand::List(list) => list.run(project).await,
|
||||
Subcommand::Run(run) => run.run(project).await,
|
||||
Subcommand::Publish(publish) => publish.run(project, reqwest).await,
|
||||
Subcommand::Yank(yank) => yank.run(project, reqwest).await,
|
||||
Subcommand::Deprecate(deprecate) => deprecate.run(project, reqwest).await,
|
||||
#[cfg(feature = "patches")]
|
||||
Subcommand::Patch(patch) => patch.run(project, reqwest).await,
|
||||
#[cfg(feature = "patches")]
|
||||
Subcommand::PatchCommit(patch_commit) => patch_commit.run(project).await,
|
||||
Subcommand::Execute(execute) => execute.run(project, reqwest).await,
|
||||
#[cfg(feature = "version-management")]
|
||||
Subcommand::SelfInstall(self_install) => self_install.run().await,
|
||||
#[cfg(feature = "version-management")]
|
||||
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,136 +1,135 @@
|
|||
use crate::cli::up_to_date_lockfile;
|
||||
use anyhow::Context;
|
||||
use crate::cli::{
|
||||
style::{ADDED_STYLE, INFO_STYLE, REMOVED_STYLE, SUCCESS_STYLE},
|
||||
up_to_date_lockfile,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use futures::future::try_join_all;
|
||||
use pesde::{
|
||||
refresh_sources,
|
||||
source::{
|
||||
refs::PackageRefs,
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::{PackageRef, PackageSource},
|
||||
},
|
||||
Project,
|
||||
source::{
|
||||
specifiers::DependencySpecifiers,
|
||||
traits::{PackageRef as _, PackageSource as _, RefreshOptions, ResolveOptions},
|
||||
},
|
||||
Project, RefreshedSources,
|
||||
};
|
||||
use semver::VersionReq;
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
use tokio::sync::Mutex;
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct OutdatedCommand {
|
||||
/// Whether to check within version requirements
|
||||
#[arg(short, long)]
|
||||
strict: bool,
|
||||
/// Whether to check within version requirements
|
||||
#[arg(short, long)]
|
||||
strict: bool,
|
||||
}
|
||||
|
||||
impl OutdatedCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let graph = match up_to_date_lockfile(&project).await? {
|
||||
Some(file) => file.graph,
|
||||
None => {
|
||||
anyhow::bail!(
|
||||
"lockfile is out of sync, run `{} install` to update it",
|
||||
env!("CARGO_BIN_NAME")
|
||||
);
|
||||
}
|
||||
};
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let graph = match up_to_date_lockfile(&project).await? {
|
||||
Some(file) => file.graph,
|
||||
None => {
|
||||
anyhow::bail!(
|
||||
"lockfile is out of sync, run `{} install` to update it",
|
||||
env!("CARGO_BIN_NAME")
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
let manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?;
|
||||
let manifest_target_kind = manifest.target.kind();
|
||||
let manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?;
|
||||
let manifest_target_kind = manifest.target.kind();
|
||||
|
||||
let mut refreshed_sources = HashSet::new();
|
||||
let refreshed_sources = RefreshedSources::new();
|
||||
|
||||
refresh_sources(
|
||||
&project,
|
||||
graph
|
||||
.iter()
|
||||
.flat_map(|(_, versions)| versions.iter())
|
||||
.map(|(_, node)| node.node.pkg_ref.source()),
|
||||
&mut refreshed_sources,
|
||||
)
|
||||
.await?;
|
||||
let mut tasks = graph
|
||||
.into_iter()
|
||||
.map(|(current_id, node)| {
|
||||
let project = project.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
async move {
|
||||
let Some((alias, mut specifier, _)) = node.direct else {
|
||||
return Ok::<_, anyhow::Error>(None);
|
||||
};
|
||||
|
||||
let refreshed_sources = Arc::new(Mutex::new(refreshed_sources));
|
||||
if matches!(
|
||||
specifier,
|
||||
DependencySpecifiers::Git(_)
|
||||
| DependencySpecifiers::Workspace(_)
|
||||
| DependencySpecifiers::Path(_)
|
||||
) {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if try_join_all(
|
||||
graph
|
||||
.into_iter()
|
||||
.flat_map(|(_, versions)| versions.into_iter())
|
||||
.map(|(current_version_id, node)| {
|
||||
let project = project.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
async move {
|
||||
let Some((alias, mut specifier, _)) = node.node.direct else {
|
||||
return Ok::<bool, anyhow::Error>(true);
|
||||
};
|
||||
let source = node.pkg_ref.source();
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&source,
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
if matches!(
|
||||
specifier,
|
||||
DependencySpecifiers::Git(_) | DependencySpecifiers::Workspace(_)
|
||||
) {
|
||||
return Ok(true);
|
||||
}
|
||||
if !self.strict {
|
||||
match &mut specifier {
|
||||
DependencySpecifiers::Pesde(spec) => {
|
||||
spec.version = VersionReq::STAR;
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(spec) => {
|
||||
spec.version = VersionReq::STAR;
|
||||
}
|
||||
DependencySpecifiers::Git(_) => {}
|
||||
DependencySpecifiers::Workspace(_) => {}
|
||||
DependencySpecifiers::Path(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
let source = node.node.pkg_ref.source();
|
||||
let new_id = source
|
||||
.resolve(
|
||||
&specifier,
|
||||
&ResolveOptions {
|
||||
project: project.clone(),
|
||||
target: manifest_target_kind,
|
||||
refreshed_sources: refreshed_sources.clone(),
|
||||
loose_target: false,
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package versions")?
|
||||
.1
|
||||
.pop_last()
|
||||
.map(|(v_id, _)| v_id)
|
||||
.with_context(|| format!("no versions of {specifier} found"))?;
|
||||
|
||||
if !self.strict {
|
||||
match specifier {
|
||||
DependencySpecifiers::Pesde(ref mut spec) => {
|
||||
spec.version = VersionReq::STAR;
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
DependencySpecifiers::Wally(ref mut spec) => {
|
||||
spec.version = VersionReq::STAR;
|
||||
}
|
||||
DependencySpecifiers::Git(_) => {}
|
||||
DependencySpecifiers::Workspace(_) => {}
|
||||
};
|
||||
}
|
||||
Ok(Some((alias, current_id, new_id))
|
||||
.filter(|(_, current_id, new_id)| current_id.version_id() != new_id))
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
let version_id = source
|
||||
.resolve(
|
||||
&specifier,
|
||||
&project,
|
||||
manifest_target_kind,
|
||||
&mut *refreshed_sources.lock().await,
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve package versions")?
|
||||
.1
|
||||
.pop_last()
|
||||
.map(|(v_id, _)| v_id)
|
||||
.context(format!("no versions of {specifier} found"))?;
|
||||
let mut all_up_to_date = true;
|
||||
|
||||
if version_id != current_version_id {
|
||||
println!(
|
||||
"{} {} ({alias}) {} -> {}",
|
||||
match node.node.pkg_ref {
|
||||
PackageRefs::Pesde(pkg_ref) => pkg_ref.name.to_string(),
|
||||
#[cfg(feature = "wally-compat")]
|
||||
PackageRefs::Wally(pkg_ref) => pkg_ref.name.to_string(),
|
||||
_ => unreachable!(),
|
||||
},
|
||||
current_version_id.target(),
|
||||
current_version_id.version(),
|
||||
version_id.version()
|
||||
);
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
let Some((alias, current_id, new_id)) = task.unwrap()? else {
|
||||
continue;
|
||||
};
|
||||
|
||||
return Ok(false);
|
||||
}
|
||||
all_up_to_date = false;
|
||||
|
||||
Ok(true)
|
||||
}
|
||||
}),
|
||||
)
|
||||
.await?
|
||||
.into_iter()
|
||||
.all(|b| b)
|
||||
{
|
||||
println!("all packages are up to date");
|
||||
}
|
||||
println!(
|
||||
"{} ({}) {} → {}",
|
||||
current_id.name(),
|
||||
INFO_STYLE.apply_to(alias),
|
||||
REMOVED_STYLE.apply_to(current_id.version_id()),
|
||||
ADDED_STYLE.apply_to(new_id),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
if all_up_to_date {
|
||||
println!("{}", SUCCESS_STYLE.apply_to("all packages are up to date"));
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,79 +1,87 @@
|
|||
use crate::cli::{up_to_date_lockfile, VersionedPackageName};
|
||||
use anyhow::Context;
|
||||
use std::sync::Arc;
|
||||
|
||||
use crate::cli::{
|
||||
style::{CLI_STYLE, INFO_STYLE, WARN_PREFIX},
|
||||
up_to_date_lockfile, VersionedPackageName,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use console::style;
|
||||
use fs_err::tokio as fs;
|
||||
use pesde::{
|
||||
patches::setup_patches_repo,
|
||||
source::{
|
||||
refs::PackageRefs,
|
||||
traits::{PackageRef, PackageSource},
|
||||
},
|
||||
Project, MANIFEST_FILE_NAME,
|
||||
patches::setup_patches_repo,
|
||||
source::{
|
||||
refs::PackageRefs,
|
||||
traits::{DownloadOptions, PackageRef as _, PackageSource as _},
|
||||
},
|
||||
Project, MANIFEST_FILE_NAME,
|
||||
};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PatchCommand {
|
||||
/// The package name to patch
|
||||
#[arg(index = 1)]
|
||||
package: VersionedPackageName,
|
||||
/// The package name to patch
|
||||
#[arg(index = 1)]
|
||||
package: VersionedPackageName,
|
||||
}
|
||||
|
||||
impl PatchCommand {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||
lockfile.graph
|
||||
} else {
|
||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||
};
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||
lockfile.graph
|
||||
} else {
|
||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||
};
|
||||
|
||||
let (name, version_id) = self.package.get(&graph)?;
|
||||
let id = self.package.get(&graph)?;
|
||||
|
||||
let node = graph
|
||||
.get(&name)
|
||||
.and_then(|versions| versions.get(&version_id))
|
||||
.context("package not found in graph")?;
|
||||
let node = graph.get(&id).context("package not found in graph")?;
|
||||
|
||||
if matches!(node.node.pkg_ref, PackageRefs::Workspace(_)) {
|
||||
anyhow::bail!("cannot patch a workspace package")
|
||||
}
|
||||
if matches!(
|
||||
node.pkg_ref,
|
||||
PackageRefs::Workspace(_) | PackageRefs::Path(_)
|
||||
) {
|
||||
anyhow::bail!("cannot patch a workspace or a path package")
|
||||
}
|
||||
|
||||
let source = node.node.pkg_ref.source();
|
||||
let source = node.pkg_ref.source();
|
||||
|
||||
let directory = project
|
||||
.data_dir()
|
||||
.join("patches")
|
||||
.join(name.escaped())
|
||||
.join(version_id.escaped())
|
||||
.join(chrono::Utc::now().timestamp().to_string());
|
||||
fs::create_dir_all(&directory).await?;
|
||||
let directory = project
|
||||
.data_dir()
|
||||
.join("patches")
|
||||
.join(id.name().escaped())
|
||||
.join(id.version_id().escaped())
|
||||
.join(jiff::Timestamp::now().as_second().to_string());
|
||||
fs::create_dir_all(&directory).await?;
|
||||
|
||||
source
|
||||
.download(&node.node.pkg_ref, &project, &reqwest)
|
||||
.await?
|
||||
.0
|
||||
.write_to(&directory, project.cas_dir(), false)
|
||||
.await
|
||||
.context("failed to write package contents")?;
|
||||
source
|
||||
.download(
|
||||
&node.pkg_ref,
|
||||
&DownloadOptions {
|
||||
project: project.clone(),
|
||||
reqwest,
|
||||
reporter: Arc::new(()),
|
||||
id: Arc::new(id),
|
||||
},
|
||||
)
|
||||
.await?
|
||||
.write_to(&directory, project.cas_dir(), false)
|
||||
.await
|
||||
.context("failed to write package contents")?;
|
||||
|
||||
setup_patches_repo(&directory)?;
|
||||
setup_patches_repo(&directory)?;
|
||||
|
||||
println!(
|
||||
concat!(
|
||||
"done! modify the files in the directory, then run `",
|
||||
env!("CARGO_BIN_NAME"),
|
||||
r#" patch-commit {}` to apply.
|
||||
{}: do not commit these changes
|
||||
{}: the {} file will be ignored when patching"#
|
||||
),
|
||||
directory.display().to_string().bold().cyan(),
|
||||
"warning".yellow(),
|
||||
"note".blue(),
|
||||
MANIFEST_FILE_NAME
|
||||
);
|
||||
println!(
|
||||
r"done! modify the files in the directory, then run {} {}{} to apply.
|
||||
{WARN_PREFIX}: do not commit these changes
|
||||
{}: the {MANIFEST_FILE_NAME} file will be ignored when patching",
|
||||
CLI_STYLE.apply_to(concat!("`", env!("CARGO_BIN_NAME"), " patch-commit")),
|
||||
style(format!("'{}'", directory.display())).cyan().bold(),
|
||||
CLI_STYLE.apply_to("`"),
|
||||
INFO_STYLE.apply_to("note")
|
||||
);
|
||||
|
||||
open::that(directory)?;
|
||||
open::that(directory)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,97 +1,101 @@
|
|||
use crate::cli::up_to_date_lockfile;
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use fs_err::tokio as fs;
|
||||
use pesde::{names::PackageNames, patches::create_patch, source::version_id::VersionId, Project};
|
||||
use std::{path::PathBuf, str::FromStr};
|
||||
use pesde::{
|
||||
names::PackageNames,
|
||||
patches::create_patch,
|
||||
source::ids::{PackageId, VersionId},
|
||||
Project,
|
||||
};
|
||||
use std::{path::PathBuf, str::FromStr as _};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct PatchCommitCommand {
|
||||
/// The directory containing the patch to commit
|
||||
#[arg(index = 1)]
|
||||
directory: PathBuf,
|
||||
/// The directory containing the patch to commit
|
||||
#[arg(index = 1)]
|
||||
directory: PathBuf,
|
||||
}
|
||||
|
||||
impl PatchCommitCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||
lockfile.graph
|
||||
} else {
|
||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||
};
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||
lockfile.graph
|
||||
} else {
|
||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||
};
|
||||
|
||||
let (name, version_id) = (
|
||||
PackageNames::from_escaped(
|
||||
self.directory
|
||||
.parent()
|
||||
.context("directory has no parent")?
|
||||
.parent()
|
||||
.context("directory has no grandparent")?
|
||||
.file_name()
|
||||
.context("directory grandparent has no name")?
|
||||
.to_str()
|
||||
.context("directory grandparent name is not valid")?,
|
||||
)?,
|
||||
VersionId::from_escaped(
|
||||
self.directory
|
||||
.parent()
|
||||
.context("directory has no parent")?
|
||||
.file_name()
|
||||
.context("directory parent has no name")?
|
||||
.to_str()
|
||||
.context("directory parent name is not valid")?,
|
||||
)?,
|
||||
);
|
||||
let id = PackageId::new(
|
||||
PackageNames::from_escaped(
|
||||
self.directory
|
||||
.parent()
|
||||
.context("directory has no parent")?
|
||||
.parent()
|
||||
.context("directory has no grandparent")?
|
||||
.file_name()
|
||||
.context("directory grandparent has no name")?
|
||||
.to_str()
|
||||
.context("directory grandparent name is not valid")?,
|
||||
)?,
|
||||
VersionId::from_escaped(
|
||||
self.directory
|
||||
.parent()
|
||||
.context("directory has no parent")?
|
||||
.file_name()
|
||||
.context("directory parent has no name")?
|
||||
.to_str()
|
||||
.context("directory parent name is not valid")?,
|
||||
)?,
|
||||
);
|
||||
|
||||
graph
|
||||
.get(&name)
|
||||
.and_then(|versions| versions.get(&version_id))
|
||||
.context("package not found in graph")?;
|
||||
graph.get(&id).context("package not found in graph")?;
|
||||
|
||||
let mut manifest = toml_edit::DocumentMut::from_str(
|
||||
&project
|
||||
.read_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?,
|
||||
)
|
||||
.context("failed to parse manifest")?;
|
||||
let mut manifest = toml_edit::DocumentMut::from_str(
|
||||
&project
|
||||
.read_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?,
|
||||
)
|
||||
.context("failed to parse manifest")?;
|
||||
|
||||
let patch = create_patch(&self.directory).context("failed to create patch")?;
|
||||
fs::remove_dir_all(self.directory)
|
||||
.await
|
||||
.context("failed to remove patch directory")?;
|
||||
let patch = create_patch(&self.directory).context("failed to create patch")?;
|
||||
|
||||
let patches_dir = project.package_dir().join("patches");
|
||||
fs::create_dir_all(&patches_dir)
|
||||
.await
|
||||
.context("failed to create patches directory")?;
|
||||
let patches_dir = project.package_dir().join("patches");
|
||||
fs::create_dir_all(&patches_dir)
|
||||
.await
|
||||
.context("failed to create patches directory")?;
|
||||
|
||||
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
|
||||
let patch_file_name = format!(
|
||||
"{}-{}.patch",
|
||||
id.name().escaped(),
|
||||
id.version_id().escaped()
|
||||
);
|
||||
|
||||
let patch_file = patches_dir.join(&patch_file_name);
|
||||
if patch_file.exists() {
|
||||
anyhow::bail!("patch file already exists: {}", patch_file.display());
|
||||
}
|
||||
let patch_file = patches_dir.join(&patch_file_name);
|
||||
|
||||
fs::write(&patch_file, patch)
|
||||
.await
|
||||
.context("failed to write patch file")?;
|
||||
fs::write(&patch_file, patch)
|
||||
.await
|
||||
.context("failed to write patch file")?;
|
||||
|
||||
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
[&name.to_string()][&version_id.to_string()] =
|
||||
toml_edit::value(format!("patches/{patch_file_name}"));
|
||||
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
|
||||
[&id.name().to_string()][&id.version_id().to_string()] =
|
||||
toml_edit::value(format!("patches/{patch_file_name}"));
|
||||
|
||||
project
|
||||
.write_manifest(manifest.to_string())
|
||||
.await
|
||||
.context("failed to write manifest")?;
|
||||
project
|
||||
.write_manifest(manifest.to_string())
|
||||
.await
|
||||
.context("failed to write manifest")?;
|
||||
|
||||
println!(concat!(
|
||||
"done! run `",
|
||||
env!("CARGO_BIN_NAME"),
|
||||
" install` to apply the patch"
|
||||
));
|
||||
fs::remove_dir_all(self.directory)
|
||||
.await
|
||||
.context("failed to remove patch directory")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
println!(concat!(
|
||||
"done! run `",
|
||||
env!("CARGO_BIN_NAME"),
|
||||
" install` to apply the patch"
|
||||
));
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
59
src/cli/commands/remove.rs
Normal file
59
src/cli/commands/remove.rs
Normal file
|
@ -0,0 +1,59 @@
|
|||
use std::str::FromStr as _;
|
||||
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
|
||||
use crate::cli::{
|
||||
dep_type_to_key,
|
||||
style::{INFO_STYLE, SUCCESS_STYLE},
|
||||
};
|
||||
use pesde::{
|
||||
manifest::{Alias, DependencyType},
|
||||
Project,
|
||||
};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct RemoveCommand {
|
||||
/// The alias of the package to remove
|
||||
#[arg(index = 1)]
|
||||
alias: Alias,
|
||||
}
|
||||
|
||||
impl RemoveCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let mut manifest = toml_edit::DocumentMut::from_str(
|
||||
&project
|
||||
.read_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?,
|
||||
)
|
||||
.context("failed to parse manifest")?;
|
||||
|
||||
let Some(dep_key) = DependencyType::VARIANTS
|
||||
.iter()
|
||||
.copied()
|
||||
.map(dep_type_to_key)
|
||||
.find(|dependency_key| {
|
||||
manifest[dependency_key]
|
||||
.as_table_mut()
|
||||
.is_some_and(|table| table.remove(self.alias.as_str()).is_some())
|
||||
})
|
||||
else {
|
||||
anyhow::bail!("package under alias `{}` not found in manifest", self.alias)
|
||||
};
|
||||
|
||||
project
|
||||
.write_manifest(manifest.to_string())
|
||||
.await
|
||||
.context("failed to write manifest")?;
|
||||
|
||||
println!(
|
||||
"{} removed {} from {}!",
|
||||
SUCCESS_STYLE.apply_to("success!"),
|
||||
INFO_STYLE.apply_to(self.alias),
|
||||
INFO_STYLE.apply_to(dep_key)
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,177 +1,217 @@
|
|||
use crate::cli::up_to_date_lockfile;
|
||||
use anyhow::Context;
|
||||
use crate::cli::{style::WARN_STYLE, up_to_date_lockfile};
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use futures::{StreamExt, TryStreamExt};
|
||||
use fs_err::tokio as fs;
|
||||
use futures::{StreamExt as _, TryStreamExt as _};
|
||||
use pesde::{
|
||||
linking::generator::generate_bin_linking_module,
|
||||
names::{PackageName, PackageNames},
|
||||
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
|
||||
errors::{ManifestReadError, WorkspaceMembersError},
|
||||
linking::generator::generate_bin_linking_module,
|
||||
manifest::Alias,
|
||||
names::{PackageName, PackageNames},
|
||||
source::traits::{GetTargetOptions, PackageRef as _, PackageSource as _, RefreshOptions},
|
||||
Project, MANIFEST_FILE_NAME,
|
||||
};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::{
|
||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::PathBuf,
|
||||
process::Command,
|
||||
collections::HashSet, env::current_dir, ffi::OsString, io::Write as _, path::Path,
|
||||
process::Command, sync::Arc,
|
||||
};
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct RunCommand {
|
||||
/// The package name, script name, or path to a script to run
|
||||
#[arg(index = 1)]
|
||||
package_or_script: Option<String>,
|
||||
/// The package name, script name, or path to a script to run
|
||||
#[arg(index = 1)]
|
||||
package_or_script: Option<String>,
|
||||
|
||||
/// Arguments to pass to the script
|
||||
#[arg(index = 2, last = true)]
|
||||
args: Vec<OsString>,
|
||||
/// Arguments to pass to the script
|
||||
#[arg(index = 2, last = true)]
|
||||
args: Vec<OsString>,
|
||||
}
|
||||
|
||||
impl RunCommand {
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let run = |root: PathBuf, file_path: PathBuf| {
|
||||
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
||||
caller
|
||||
.write_all(
|
||||
generate_bin_linking_module(
|
||||
root,
|
||||
&format!("{:?}", file_path.to_string_lossy()),
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.expect("failed to write to tempfile");
|
||||
pub async fn run(self, project: Project) -> anyhow::Result<()> {
|
||||
let run = |root: &Path, file_path: &Path| -> ! {
|
||||
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
|
||||
caller
|
||||
.write_all(
|
||||
generate_bin_linking_module(
|
||||
root,
|
||||
&format!("{:?}", file_path.to_string_lossy()),
|
||||
)
|
||||
.as_bytes(),
|
||||
)
|
||||
.expect("failed to write to tempfile");
|
||||
|
||||
let status = Command::new("lune")
|
||||
.arg("run")
|
||||
.arg(caller.path())
|
||||
.arg("--")
|
||||
.args(&self.args)
|
||||
.current_dir(current_dir().expect("failed to get current directory"))
|
||||
.status()
|
||||
.expect("failed to run script");
|
||||
let status = Command::new("lune")
|
||||
.arg("run")
|
||||
.arg(caller.path())
|
||||
.arg("--")
|
||||
.args(&self.args)
|
||||
.current_dir(current_dir().expect("failed to get current directory"))
|
||||
.status()
|
||||
.expect("failed to run script");
|
||||
|
||||
drop(caller);
|
||||
drop(caller);
|
||||
|
||||
std::process::exit(status.code().unwrap_or(1))
|
||||
};
|
||||
std::process::exit(status.code().unwrap_or(1i32))
|
||||
};
|
||||
|
||||
let Some(package_or_script) = self.package_or_script else {
|
||||
if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
|
||||
run(
|
||||
project.package_dir().to_owned(),
|
||||
script_path.to_path(project.package_dir()),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
let Some(package_or_script) = self.package_or_script else {
|
||||
if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
|
||||
run(
|
||||
project.package_dir(),
|
||||
&script_path.to_path(project.package_dir()),
|
||||
);
|
||||
}
|
||||
|
||||
anyhow::bail!("no package or script specified, and no bin path found in manifest")
|
||||
};
|
||||
anyhow::bail!("no package or script specified, and no bin path found in manifest")
|
||||
};
|
||||
|
||||
if let Ok(pkg_name) = package_or_script.parse::<PackageName>() {
|
||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||
lockfile.graph
|
||||
} else {
|
||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||
};
|
||||
let mut package_info = None;
|
||||
|
||||
let pkg_name = PackageNames::Pesde(pkg_name);
|
||||
if let Ok(pkg_name) = package_or_script.parse::<PackageName>() {
|
||||
let graph = if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||
lockfile.graph
|
||||
} else {
|
||||
anyhow::bail!("outdated lockfile, please run the install command first")
|
||||
};
|
||||
|
||||
for (version_id, node) in graph.get(&pkg_name).context("package not found in graph")? {
|
||||
if node.node.direct.is_none() {
|
||||
continue;
|
||||
}
|
||||
let pkg_name = PackageNames::Pesde(pkg_name);
|
||||
|
||||
let Some(bin_path) = node.target.bin_path() else {
|
||||
anyhow::bail!("package has no bin path");
|
||||
};
|
||||
let mut versions = graph
|
||||
.into_iter()
|
||||
.filter(|(id, node)| *id.name() == pkg_name && node.direct.is_some())
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let base_folder = project
|
||||
.deser_manifest()
|
||||
.await?
|
||||
.target
|
||||
.kind()
|
||||
.packages_folder(version_id.target());
|
||||
let container_folder = node.node.container_folder(
|
||||
&project
|
||||
.package_dir()
|
||||
.join(base_folder)
|
||||
.join(PACKAGES_CONTAINER_NAME),
|
||||
&pkg_name,
|
||||
version_id.version(),
|
||||
);
|
||||
package_info = Some(match versions.len() {
|
||||
0 => anyhow::bail!("package not found"),
|
||||
1 => versions.pop().unwrap(),
|
||||
_ => anyhow::bail!("multiple versions found. use the package's alias instead."),
|
||||
});
|
||||
} else if let Ok(alias) = package_or_script.parse::<Alias>() {
|
||||
if let Some(lockfile) = up_to_date_lockfile(&project).await? {
|
||||
package_info = lockfile
|
||||
.graph
|
||||
.into_iter()
|
||||
.find(|(_, node)| node.direct.as_ref().is_some_and(|(a, _, _)| alias == *a));
|
||||
} else {
|
||||
eprintln!(
|
||||
"{}",
|
||||
WARN_STYLE.apply_to(
|
||||
"outdated lockfile, please run the install command first to use an alias"
|
||||
)
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
let path = bin_path.to_path(&container_folder);
|
||||
if let Some((id, node)) = package_info {
|
||||
let container_folder = node.container_folder_from_project(
|
||||
&id,
|
||||
&project,
|
||||
project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to deserialize manifest")?
|
||||
.target
|
||||
.kind(),
|
||||
);
|
||||
|
||||
run(path.clone(), path);
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
let source = node.pkg_ref.source();
|
||||
source
|
||||
.refresh(&RefreshOptions {
|
||||
project: project.clone(),
|
||||
})
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
let target = source
|
||||
.get_target(
|
||||
&node.pkg_ref,
|
||||
&GetTargetOptions {
|
||||
project,
|
||||
path: Arc::from(container_folder.as_path()),
|
||||
id: Arc::new(id),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
if let Ok(manifest) = project.deser_manifest().await {
|
||||
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
|
||||
run(
|
||||
project.package_dir().to_path_buf(),
|
||||
script_path.to_path(project.package_dir()),
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
};
|
||||
let Some(bin_path) = target.bin_path() else {
|
||||
anyhow::bail!("package has no bin path");
|
||||
};
|
||||
|
||||
let relative_path = RelativePathBuf::from(package_or_script);
|
||||
let path = relative_path.to_path(project.package_dir());
|
||||
let path = bin_path.to_path(&container_folder);
|
||||
|
||||
if !path.exists() {
|
||||
anyhow::bail!("path `{}` does not exist", path.display());
|
||||
}
|
||||
run(&path, &path);
|
||||
}
|
||||
|
||||
let workspace_dir = project
|
||||
.workspace_dir()
|
||||
.unwrap_or_else(|| project.package_dir());
|
||||
if let Ok(manifest) = project.deser_manifest().await {
|
||||
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
|
||||
run(
|
||||
project.package_dir(),
|
||||
&script_path.to_path(project.package_dir()),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let members = match project.workspace_members(workspace_dir, false).await {
|
||||
Ok(members) => members.boxed(),
|
||||
Err(pesde::errors::WorkspaceMembersError::ManifestMissing(e))
|
||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||
{
|
||||
futures::stream::empty().boxed()
|
||||
}
|
||||
Err(e) => Err(e).context("failed to get workspace members")?,
|
||||
};
|
||||
let relative_path = RelativePathBuf::from(package_or_script);
|
||||
let path = relative_path.to_path(project.package_dir());
|
||||
|
||||
let members = members
|
||||
.map(|res| {
|
||||
res.map_err(anyhow::Error::from)
|
||||
.and_then(|(path, _)| path.canonicalize().map_err(Into::into))
|
||||
})
|
||||
.chain(futures::stream::once(async {
|
||||
workspace_dir.canonicalize().map_err(Into::into)
|
||||
}))
|
||||
.try_collect::<HashSet<_>>()
|
||||
.await
|
||||
.context("failed to collect workspace members")?;
|
||||
if fs::metadata(&path).await.is_err() {
|
||||
anyhow::bail!("path `{}` does not exist", path.display());
|
||||
}
|
||||
|
||||
let root = 'finder: {
|
||||
let mut current_path = path.to_path_buf();
|
||||
loop {
|
||||
let canonical_path = current_path
|
||||
.canonicalize()
|
||||
.context("failed to canonicalize parent")?;
|
||||
let workspace_dir = project
|
||||
.workspace_dir()
|
||||
.unwrap_or_else(|| project.package_dir());
|
||||
|
||||
if members.contains(&canonical_path)
|
||||
&& canonical_path.join(MANIFEST_FILE_NAME).exists()
|
||||
{
|
||||
break 'finder canonical_path;
|
||||
}
|
||||
let members = match project.workspace_members(false).await {
|
||||
Ok(members) => members.boxed(),
|
||||
Err(WorkspaceMembersError::ManifestParse(ManifestReadError::Io(e)))
|
||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||
{
|
||||
futures::stream::empty().boxed()
|
||||
}
|
||||
Err(e) => Err(e).context("failed to get workspace members")?,
|
||||
};
|
||||
|
||||
if let Some(parent) = current_path.parent() {
|
||||
current_path = parent.to_path_buf();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let members = members
|
||||
.map(|res| {
|
||||
res.map_err(anyhow::Error::from)?
|
||||
.0
|
||||
.canonicalize()
|
||||
.map_err(anyhow::Error::from)
|
||||
})
|
||||
.chain(futures::stream::once(async {
|
||||
workspace_dir.canonicalize().map_err(Into::into)
|
||||
}))
|
||||
.try_collect::<HashSet<_>>()
|
||||
.await
|
||||
.context("failed to collect workspace members")?;
|
||||
|
||||
project.package_dir().to_path_buf()
|
||||
};
|
||||
let root = 'finder: {
|
||||
let mut current_path = path.clone();
|
||||
loop {
|
||||
let canonical_path = current_path
|
||||
.canonicalize()
|
||||
.context("failed to canonicalize parent")?;
|
||||
|
||||
run(root, path);
|
||||
if members.contains(&canonical_path)
|
||||
&& fs::metadata(canonical_path.join(MANIFEST_FILE_NAME))
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
break 'finder canonical_path;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
if let Some(parent) = current_path.parent() {
|
||||
current_path = parent.to_path_buf();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
project.package_dir().to_path_buf()
|
||||
};
|
||||
|
||||
run(&root, &path);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,77 +1,78 @@
|
|||
use crate::cli::{version::update_bin_exe, HOME_DIR};
|
||||
use anyhow::Context;
|
||||
use crate::cli::{
|
||||
style::{ADDED_STYLE, CLI_STYLE},
|
||||
version::replace_pesde_bin_exe,
|
||||
HOME_DIR,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use console::style;
|
||||
use std::env::current_exe;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct SelfInstallCommand {
|
||||
/// Skip adding the bin directory to the PATH
|
||||
#[cfg(windows)]
|
||||
#[arg(short, long)]
|
||||
skip_add_to_path: bool,
|
||||
/// Skip adding the bin directory to the PATH
|
||||
#[cfg(windows)]
|
||||
#[arg(short, long)]
|
||||
skip_add_to_path: bool,
|
||||
}
|
||||
|
||||
impl SelfInstallCommand {
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if !self.skip_add_to_path {
|
||||
use anyhow::Context;
|
||||
use winreg::{enums::HKEY_CURRENT_USER, RegKey};
|
||||
pub async fn run(self) -> anyhow::Result<()> {
|
||||
#[cfg(windows)]
|
||||
{
|
||||
if !self.skip_add_to_path {
|
||||
use crate::cli::style::WARN_STYLE;
|
||||
use anyhow::Context as _;
|
||||
use windows_registry::CURRENT_USER;
|
||||
|
||||
let current_user = RegKey::predef(HKEY_CURRENT_USER);
|
||||
let env = current_user
|
||||
.create_subkey("Environment")
|
||||
.context("failed to open Environment key")?
|
||||
.0;
|
||||
let path: String = env.get_value("Path").context("failed to get Path value")?;
|
||||
let bin_dir = crate::cli::bin_dir().await?;
|
||||
|
||||
let bin_dir = crate::cli::bin_dir().await?;
|
||||
let bin_dir = bin_dir.to_string_lossy();
|
||||
let env = CURRENT_USER
|
||||
.create("Environment")
|
||||
.context("failed to open Environment key")?;
|
||||
let path = env.get_string("Path").context("failed to get Path value")?;
|
||||
|
||||
let exists = path.split(';').any(|part| *part == bin_dir);
|
||||
let bin_dir = bin_dir.to_string_lossy();
|
||||
|
||||
if !exists {
|
||||
let new_path = format!("{path};{bin_dir}");
|
||||
env.set_value("Path", &new_path)
|
||||
.context("failed to set Path value")?;
|
||||
let exists = path.split(';').any(|part| *part == bin_dir);
|
||||
|
||||
println!(
|
||||
"\nin order to allow binary exports as executables {}.\n\n{}",
|
||||
format!("`~/{HOME_DIR}/bin` was added to PATH").green(),
|
||||
"please restart your shell for this to take effect"
|
||||
.yellow()
|
||||
.bold()
|
||||
);
|
||||
}
|
||||
}
|
||||
if !exists {
|
||||
let new_path = format!("{path};{bin_dir}");
|
||||
env.set_string("Path", &new_path)
|
||||
.context("failed to set Path value")?;
|
||||
|
||||
println!(
|
||||
"installed {} {}!",
|
||||
env!("CARGO_BIN_NAME").cyan(),
|
||||
env!("CARGO_PKG_VERSION").yellow(),
|
||||
);
|
||||
}
|
||||
println!(
|
||||
"\nin order to allow proper functionality {} was added to PATH.\n\n{}",
|
||||
style(format!("`~/{HOME_DIR}/bin`")).green(),
|
||||
WARN_STYLE.apply_to("please restart your shell for this to take effect")
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
println!(
|
||||
r#"installed {} {}! add the following line to your shell profile in order to get the binary and binary exports as executables usable from anywhere:
|
||||
println!(
|
||||
"installed {} {}!",
|
||||
CLI_STYLE.apply_to(env!("CARGO_BIN_NAME")),
|
||||
ADDED_STYLE.apply_to(env!("CARGO_PKG_VERSION")),
|
||||
);
|
||||
};
|
||||
|
||||
#[cfg(unix)]
|
||||
{
|
||||
println!(
|
||||
r"installed {} {}! add the following line to your shell profile in order to get the binary and binary exports as executables usable from anywhere:
|
||||
|
||||
{}
|
||||
|
||||
and then restart your shell.
|
||||
"#,
|
||||
env!("CARGO_BIN_NAME").cyan(),
|
||||
env!("CARGO_PKG_VERSION").yellow(),
|
||||
format!(r#"export PATH="$PATH:~/{HOME_DIR}/bin""#)
|
||||
.bold()
|
||||
.green()
|
||||
);
|
||||
}
|
||||
",
|
||||
CLI_STYLE.apply_to(env!("CARGO_BIN_NAME")),
|
||||
ADDED_STYLE.apply_to(env!("CARGO_PKG_VERSION")),
|
||||
style(format!(r#"export PATH="$PATH:$HOME/{HOME_DIR}/bin""#)).green(),
|
||||
);
|
||||
};
|
||||
|
||||
update_bin_exe(¤t_exe().context("failed to get current exe path")?).await?;
|
||||
replace_pesde_bin_exe(¤t_exe().context("failed to get current exe path")?).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,58 +1,66 @@
|
|||
use crate::cli::{
|
||||
config::read_config,
|
||||
version::{
|
||||
current_version, get_or_download_version, get_remote_version, no_build_metadata,
|
||||
update_bin_exe, TagInfo, VersionType,
|
||||
},
|
||||
use crate::{
|
||||
cli::{
|
||||
config::read_config,
|
||||
style::{ADDED_STYLE, CLI_STYLE, REMOVED_STYLE},
|
||||
version::{
|
||||
current_version, find_latest_version, get_or_download_engine, replace_pesde_bin_exe,
|
||||
},
|
||||
},
|
||||
util::no_build_metadata,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use pesde::engine::EngineKind;
|
||||
use semver::VersionReq;
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct SelfUpgradeCommand {
|
||||
/// Whether to use the version from the "upgrades available" message
|
||||
#[clap(long, default_value_t = false)]
|
||||
use_cached: bool,
|
||||
/// Whether to use the version from the "upgrades available" message
|
||||
#[clap(long, default_value_t = false)]
|
||||
use_cached: bool,
|
||||
}
|
||||
|
||||
impl SelfUpgradeCommand {
|
||||
pub async fn run(self, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let latest_version = if self.use_cached {
|
||||
read_config()
|
||||
.await?
|
||||
.last_checked_updates
|
||||
.context("no cached version found")?
|
||||
.1
|
||||
} else {
|
||||
get_remote_version(&reqwest, VersionType::Latest).await?
|
||||
};
|
||||
pub async fn run(self, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let latest_version = if self.use_cached {
|
||||
read_config()
|
||||
.await?
|
||||
.last_checked_updates
|
||||
.context("no cached version found")?
|
||||
.1
|
||||
} else {
|
||||
find_latest_version(&reqwest).await?
|
||||
};
|
||||
|
||||
let latest_version_no_metadata = no_build_metadata(&latest_version);
|
||||
let latest_version_no_metadata = no_build_metadata(&latest_version);
|
||||
|
||||
if latest_version_no_metadata <= current_version() {
|
||||
println!("already up to date");
|
||||
return Ok(());
|
||||
}
|
||||
if latest_version_no_metadata <= current_version() {
|
||||
println!("already up to date");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let display_latest_version = latest_version_no_metadata.to_string().yellow().bold();
|
||||
let display_latest_version = ADDED_STYLE.apply_to(latest_version_no_metadata);
|
||||
|
||||
if !inquire::prompt_confirmation(format!(
|
||||
"are you sure you want to upgrade {} from {} to {display_latest_version}?",
|
||||
env!("CARGO_BIN_NAME").cyan(),
|
||||
env!("CARGO_PKG_VERSION").yellow().bold()
|
||||
))? {
|
||||
println!("cancelled upgrade");
|
||||
return Ok(());
|
||||
}
|
||||
let confirmed = inquire::prompt_confirmation(format!(
|
||||
"are you sure you want to upgrade {} from {} to {display_latest_version}?",
|
||||
CLI_STYLE.apply_to(env!("CARGO_BIN_NAME")),
|
||||
REMOVED_STYLE.apply_to(env!("CARGO_PKG_VERSION"))
|
||||
))?;
|
||||
if !confirmed {
|
||||
println!("cancelled upgrade");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let path = get_or_download_version(&reqwest, &TagInfo::Complete(latest_version), true)
|
||||
.await?
|
||||
.unwrap();
|
||||
update_bin_exe(&path).await?;
|
||||
let path = get_or_download_engine(
|
||||
&reqwest,
|
||||
EngineKind::Pesde,
|
||||
VersionReq::parse(&format!("={latest_version}")).unwrap(),
|
||||
)
|
||||
.await?;
|
||||
replace_pesde_bin_exe(&path).await?;
|
||||
|
||||
println!("upgraded to version {display_latest_version}!");
|
||||
println!("upgraded to version {display_latest_version}!");
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,85 +1,48 @@
|
|||
use crate::cli::{progress_bar, run_on_workspace_members};
|
||||
use anyhow::Context;
|
||||
use crate::cli::{
|
||||
install::{install, InstallOptions},
|
||||
run_on_workspace_members,
|
||||
};
|
||||
use clap::Args;
|
||||
use colored::Colorize;
|
||||
use pesde::{lockfile::Lockfile, Project};
|
||||
use std::{collections::HashSet, sync::Arc};
|
||||
use tokio::sync::Mutex;
|
||||
use pesde::Project;
|
||||
use std::num::NonZeroUsize;
|
||||
|
||||
#[derive(Debug, Args, Copy, Clone)]
|
||||
pub struct UpdateCommand {}
|
||||
pub struct UpdateCommand {
|
||||
/// Update the dependencies but don't install them
|
||||
#[arg(long)]
|
||||
no_install: bool,
|
||||
|
||||
/// The maximum number of concurrent network requests
|
||||
#[arg(long, default_value = "16")]
|
||||
network_concurrency: NonZeroUsize,
|
||||
|
||||
/// Whether to re-install all dependencies even if they are already installed
|
||||
#[arg(long)]
|
||||
force: bool,
|
||||
}
|
||||
|
||||
impl UpdateCommand {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let mut refreshed_sources = HashSet::new();
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let options = InstallOptions {
|
||||
locked: false,
|
||||
prod: false,
|
||||
write: !self.no_install,
|
||||
network_concurrency: self.network_concurrency,
|
||||
use_lockfile: false,
|
||||
force: self.force,
|
||||
};
|
||||
|
||||
let manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?;
|
||||
install(&options, &project, reqwest.clone(), true).await?;
|
||||
|
||||
println!(
|
||||
"\n{}\n",
|
||||
format!("[now updating {} {}]", manifest.name, manifest.target)
|
||||
.bold()
|
||||
.on_bright_black()
|
||||
);
|
||||
run_on_workspace_members(&project, |project| {
|
||||
let reqwest = reqwest.clone();
|
||||
async move {
|
||||
install(&options, &project, reqwest, false).await?;
|
||||
Ok(())
|
||||
}
|
||||
})
|
||||
.await?;
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(None, &mut refreshed_sources, false)
|
||||
.await
|
||||
.context("failed to build dependency graph")?;
|
||||
let graph = Arc::new(graph);
|
||||
|
||||
project
|
||||
.write_lockfile(Lockfile {
|
||||
name: manifest.name,
|
||||
version: manifest.version,
|
||||
target: manifest.target.kind(),
|
||||
overrides: manifest.overrides,
|
||||
|
||||
graph: {
|
||||
let (rx, downloaded_graph) = project
|
||||
.download_and_link(
|
||||
&graph,
|
||||
&Arc::new(Mutex::new(refreshed_sources)),
|
||||
&reqwest,
|
||||
false,
|
||||
false,
|
||||
|_| async { Ok::<_, std::io::Error>(()) },
|
||||
)
|
||||
.await
|
||||
.context("failed to download dependencies")?;
|
||||
|
||||
progress_bar(
|
||||
graph.values().map(|versions| versions.len() as u64).sum(),
|
||||
rx,
|
||||
"📥 ".to_string(),
|
||||
"downloading dependencies".to_string(),
|
||||
"downloaded dependencies".to_string(),
|
||||
)
|
||||
.await?;
|
||||
|
||||
downloaded_graph
|
||||
.await
|
||||
.context("failed to download dependencies")?
|
||||
},
|
||||
|
||||
workspace: run_on_workspace_members(&project, |project| {
|
||||
let reqwest = reqwest.clone();
|
||||
async move { Box::pin(self.run(project, reqwest)).await }
|
||||
})
|
||||
.await?,
|
||||
})
|
||||
.await
|
||||
.context("failed to write lockfile")?;
|
||||
|
||||
println!(
|
||||
"\n\n{}. run `{} install` in order to install the new dependencies",
|
||||
"✅ done".green(),
|
||||
env!("CARGO_BIN_NAME")
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
|
148
src/cli/commands/yank.rs
Normal file
148
src/cli/commands/yank.rs
Normal file
|
@ -0,0 +1,148 @@
|
|||
use crate::cli::{get_index, style::SUCCESS_STYLE};
|
||||
use anyhow::Context as _;
|
||||
use clap::Args;
|
||||
use pesde::{
|
||||
manifest::target::TargetKind,
|
||||
names::PackageName,
|
||||
source::{
|
||||
pesde::PesdePackageSource,
|
||||
traits::{PackageSource as _, RefreshOptions},
|
||||
},
|
||||
Project,
|
||||
};
|
||||
use reqwest::{header::AUTHORIZATION, Method, StatusCode};
|
||||
use semver::Version;
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum TargetKindOrAll {
|
||||
All,
|
||||
Specific(TargetKind),
|
||||
}
|
||||
|
||||
impl Display for TargetKindOrAll {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
TargetKindOrAll::All => write!(f, "all"),
|
||||
TargetKindOrAll::Specific(kind) => write!(f, "{kind}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for TargetKindOrAll {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if s.eq_ignore_ascii_case("all") {
|
||||
return Ok(TargetKindOrAll::All);
|
||||
}
|
||||
|
||||
s.parse()
|
||||
.map(TargetKindOrAll::Specific)
|
||||
.context("failed to parse target kind")
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct YankId(PackageName, Version, TargetKindOrAll);
|
||||
|
||||
impl FromStr for YankId {
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let (package, version) = s
|
||||
.split_once('@')
|
||||
.context("package is not in format of `scope/name@version target`")?;
|
||||
let target = match version.split(' ').nth(1) {
|
||||
Some(target) => target
|
||||
.parse()
|
||||
.context("package is not in format of `scope/name@version target`")?,
|
||||
None => TargetKindOrAll::All,
|
||||
};
|
||||
|
||||
Ok(YankId(
|
||||
package.parse().context("failed to parse package name")?,
|
||||
version.parse().context("failed to parse version")?,
|
||||
target,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Args)]
|
||||
pub struct YankCommand {
|
||||
/// Whether to unyank the package
|
||||
#[clap(long)]
|
||||
undo: bool,
|
||||
|
||||
/// The index to yank the package from
|
||||
#[clap(short, long)]
|
||||
index: Option<String>,
|
||||
|
||||
/// The package to yank
|
||||
#[clap(index = 1)]
|
||||
package: YankId,
|
||||
}
|
||||
|
||||
impl YankCommand {
|
||||
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
|
||||
let YankId(package, version, target) = self.package;
|
||||
|
||||
let index_url = get_index(&project, self.index.as_deref()).await?;
|
||||
let source = PesdePackageSource::new(index_url.clone());
|
||||
source
|
||||
.refresh(&RefreshOptions {
|
||||
project: project.clone(),
|
||||
})
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
let config = source
|
||||
.config(&project)
|
||||
.await
|
||||
.context("failed to get index config")?;
|
||||
|
||||
let mut request = reqwest.request(
|
||||
if self.undo {
|
||||
Method::DELETE
|
||||
} else {
|
||||
Method::PUT
|
||||
},
|
||||
format!(
|
||||
"{}/v1/packages/{}/{}/{}/yank",
|
||||
config.api(),
|
||||
urlencoding::encode(&package.to_string()),
|
||||
urlencoding::encode(&version.to_string()),
|
||||
urlencoding::encode(&target.to_string()),
|
||||
),
|
||||
);
|
||||
|
||||
if let Some(token) = project.auth_config().tokens().get(&index_url) {
|
||||
tracing::debug!("using token for {index_url}");
|
||||
request = request.header(AUTHORIZATION, token);
|
||||
}
|
||||
|
||||
let response = request.send().await.context("failed to send request")?;
|
||||
|
||||
let status = response.status();
|
||||
let text = response
|
||||
.text()
|
||||
.await
|
||||
.context("failed to get response text")?;
|
||||
let prefix = if self.undo { "un" } else { "" };
|
||||
match status {
|
||||
StatusCode::CONFLICT => {
|
||||
anyhow::bail!("version is already {prefix}yanked");
|
||||
}
|
||||
StatusCode::FORBIDDEN => {
|
||||
anyhow::bail!("unauthorized to {prefix}yank under this scope");
|
||||
}
|
||||
code if !code.is_success() => {
|
||||
anyhow::bail!("failed to {prefix}yank package: {code} ({text})");
|
||||
}
|
||||
_ => {
|
||||
println!("{}", SUCCESS_STYLE.apply_to(text));
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
|
@ -1,5 +1,5 @@
|
|||
use crate::cli::{auth::Tokens, home_dir};
|
||||
use anyhow::Context;
|
||||
use anyhow::Context as _;
|
||||
use fs_err::tokio as fs;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::instrument;
|
||||
|
@ -7,51 +7,51 @@ use tracing::instrument;
|
|||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
#[serde(default)]
|
||||
pub struct CliConfig {
|
||||
#[serde(
|
||||
serialize_with = "crate::util::serialize_gix_url",
|
||||
deserialize_with = "crate::util::deserialize_gix_url"
|
||||
)]
|
||||
pub default_index: gix::Url,
|
||||
#[serde(
|
||||
serialize_with = "crate::util::serialize_gix_url",
|
||||
deserialize_with = "crate::util::deserialize_gix_url"
|
||||
)]
|
||||
pub default_index: gix::Url,
|
||||
|
||||
pub tokens: Tokens,
|
||||
pub tokens: Tokens,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub last_checked_updates: Option<(chrono::DateTime<chrono::Utc>, semver::Version)>,
|
||||
#[serde(default, skip_serializing_if = "Option::is_none")]
|
||||
pub last_checked_updates: Option<(jiff::Timestamp, semver::Version)>,
|
||||
}
|
||||
|
||||
impl Default for CliConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(),
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(),
|
||||
|
||||
tokens: Tokens(Default::default()),
|
||||
tokens: Tokens::default(),
|
||||
|
||||
last_checked_updates: None,
|
||||
}
|
||||
}
|
||||
last_checked_updates: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn read_config() -> anyhow::Result<CliConfig> {
|
||||
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
|
||||
Ok(config_string) => config_string,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(CliConfig::default());
|
||||
}
|
||||
Err(e) => return Err(e).context("failed to read config file"),
|
||||
};
|
||||
let config_string = match fs::read_to_string(home_dir()?.join("config.toml")).await {
|
||||
Ok(config_string) => config_string,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {
|
||||
return Ok(CliConfig::default());
|
||||
}
|
||||
Err(e) => return Err(e).context("failed to read config file"),
|
||||
};
|
||||
|
||||
let config = toml::from_str(&config_string).context("failed to parse config file")?;
|
||||
let config = toml::from_str(&config_string).context("failed to parse config file")?;
|
||||
|
||||
Ok(config)
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn write_config(config: &CliConfig) -> anyhow::Result<()> {
|
||||
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
||||
fs::write(home_dir()?.join("config.toml"), config_string)
|
||||
.await
|
||||
.context("failed to write config file")?;
|
||||
let config_string = toml::to_string(config).context("failed to serialize config")?;
|
||||
fs::write(home_dir()?.join("config.toml"), config_string)
|
||||
.await
|
||||
.context("failed to write config file")?;
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
|
|
|
@ -1,21 +1,21 @@
|
|||
use std::path::Path;
|
||||
|
||||
pub async fn make_executable<P: AsRef<Path>>(_path: P) -> anyhow::Result<()> {
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use anyhow::Context;
|
||||
use fs_err::tokio as fs;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
#[cfg(unix)]
|
||||
{
|
||||
use anyhow::Context as _;
|
||||
use fs_err::tokio as fs;
|
||||
use std::os::unix::fs::PermissionsExt as _;
|
||||
|
||||
let mut perms = fs::metadata(&_path)
|
||||
.await
|
||||
.context("failed to get bin link file metadata")?
|
||||
.permissions();
|
||||
perms.set_mode(perms.mode() | 0o111);
|
||||
fs::set_permissions(&_path, perms)
|
||||
.await
|
||||
.context("failed to set bin link file permissions")?;
|
||||
}
|
||||
let mut perms = fs::metadata(&_path)
|
||||
.await
|
||||
.context("failed to get bin link file metadata")?
|
||||
.permissions();
|
||||
perms.set_mode(perms.mode() | 0o111);
|
||||
fs::set_permissions(&_path, perms)
|
||||
.await
|
||||
.context("failed to set bin link file permissions")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
|
|
574
src/cli/install.rs
Normal file
574
src/cli/install.rs
Normal file
|
@ -0,0 +1,574 @@
|
|||
use super::files::make_executable;
|
||||
use crate::cli::{
|
||||
bin_dir, dep_type_to_key,
|
||||
reporters::{self, CliReporter},
|
||||
resolve_overrides, run_on_workspace_members,
|
||||
style::{ADDED_STYLE, REMOVED_STYLE, WARN_PREFIX},
|
||||
up_to_date_lockfile,
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use console::style;
|
||||
use fs_err::tokio as fs;
|
||||
use pesde::{
|
||||
download_and_link::{DownloadAndLinkHooks, DownloadAndLinkOptions},
|
||||
engine::EngineKind,
|
||||
graph::{DependencyGraph, DependencyGraphWithTarget},
|
||||
lockfile::Lockfile,
|
||||
manifest::{DependencyType, Manifest},
|
||||
names::PackageNames,
|
||||
source::{
|
||||
pesde::PesdePackageSource,
|
||||
refs::PackageRefs,
|
||||
traits::{PackageRef as _, RefreshOptions},
|
||||
PackageSources,
|
||||
},
|
||||
version_matches, Project, RefreshedSources, MANIFEST_FILE_NAME,
|
||||
};
|
||||
use std::{
|
||||
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
|
||||
num::NonZeroUsize,
|
||||
path::Path,
|
||||
sync::Arc,
|
||||
time::Instant,
|
||||
};
|
||||
use tokio::task::JoinSet;
|
||||
|
||||
pub struct InstallHooks {
|
||||
pub bin_folder: std::path::PathBuf,
|
||||
}
|
||||
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
#[error(transparent)]
|
||||
pub struct InstallHooksError(#[from] anyhow::Error);
|
||||
|
||||
impl DownloadAndLinkHooks for InstallHooks {
|
||||
type Error = InstallHooksError;
|
||||
|
||||
async fn on_bins_downloaded(
|
||||
&self,
|
||||
graph: &DependencyGraphWithTarget,
|
||||
) -> Result<(), Self::Error> {
|
||||
let binary_packages = graph
|
||||
.iter()
|
||||
.filter_map(|(id, node)| node.target.bin_path().is_some().then_some(id))
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let aliases = graph
|
||||
.iter()
|
||||
.flat_map(|(_, node)| node.node.dependencies.iter())
|
||||
.filter_map(|(id, alias)| binary_packages.contains(id).then_some(alias.as_str()))
|
||||
.chain(
|
||||
graph
|
||||
.iter()
|
||||
.filter_map(|(_, node)| node.node.direct.as_ref())
|
||||
.map(|(alias, _, _)| alias.as_str()),
|
||||
)
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let curr_exe: Arc<Path> = std::env::current_exe()
|
||||
.context("failed to get current executable path")?
|
||||
.as_path()
|
||||
.into();
|
||||
|
||||
let mut tasks = aliases
|
||||
.into_iter()
|
||||
.map(|alias| {
|
||||
let bin_exec_file = self
|
||||
.bin_folder
|
||||
.join(alias)
|
||||
.with_extension(std::env::consts::EXE_EXTENSION);
|
||||
let curr_exe = curr_exe.clone();
|
||||
|
||||
async move {
|
||||
// TODO: remove this in a major release
|
||||
#[cfg(unix)]
|
||||
if fs::metadata(&bin_exec_file)
|
||||
.await
|
||||
.is_ok_and(|m| !m.is_symlink())
|
||||
{
|
||||
fs::remove_file(&bin_exec_file)
|
||||
.await
|
||||
.context("failed to remove outdated bin linker")?;
|
||||
}
|
||||
|
||||
#[cfg(windows)]
|
||||
let res = fs::symlink_file(curr_exe, &bin_exec_file).await;
|
||||
#[cfg(unix)]
|
||||
let res = fs::symlink(curr_exe, &bin_exec_file).await;
|
||||
|
||||
match res {
|
||||
Ok(_) => {}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {}
|
||||
e => e.context("failed to symlink bin link file")?,
|
||||
}
|
||||
|
||||
make_executable(&bin_exec_file)
|
||||
.await
|
||||
.context("failed to make bin link file executable")?;
|
||||
|
||||
Ok::<_, anyhow::Error>(())
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
task.unwrap()?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct InstallOptions {
|
||||
pub locked: bool,
|
||||
pub prod: bool,
|
||||
pub write: bool,
|
||||
pub use_lockfile: bool,
|
||||
pub network_concurrency: NonZeroUsize,
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
pub async fn install(
|
||||
options: &InstallOptions,
|
||||
project: &Project,
|
||||
reqwest: reqwest::Client,
|
||||
is_root: bool,
|
||||
) -> anyhow::Result<()> {
|
||||
let start = Instant::now();
|
||||
|
||||
let refreshed_sources = RefreshedSources::new();
|
||||
|
||||
let manifest = project
|
||||
.deser_manifest()
|
||||
.await
|
||||
.context("failed to read manifest")?;
|
||||
|
||||
let mut has_irrecoverable_changes = false;
|
||||
|
||||
let lockfile = if options.locked {
|
||||
match up_to_date_lockfile(project).await? {
|
||||
None => {
|
||||
anyhow::bail!(
|
||||
"lockfile is out of sync, run `{} install` to update it",
|
||||
env!("CARGO_BIN_NAME")
|
||||
);
|
||||
}
|
||||
file => file,
|
||||
}
|
||||
} else {
|
||||
match project.deser_lockfile().await {
|
||||
Ok(lockfile) => {
|
||||
if lockfile.overrides != resolve_overrides(&manifest)? {
|
||||
tracing::debug!("overrides are different");
|
||||
has_irrecoverable_changes = true;
|
||||
None
|
||||
} else if lockfile.target != manifest.target.kind() {
|
||||
tracing::debug!("target kind is different");
|
||||
has_irrecoverable_changes = true;
|
||||
None
|
||||
} else {
|
||||
Some(lockfile)
|
||||
}
|
||||
}
|
||||
Err(pesde::errors::LockfileReadError::Io(e))
|
||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||
{
|
||||
None
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
};
|
||||
|
||||
let overrides = resolve_overrides(&manifest)?;
|
||||
|
||||
let (new_lockfile, old_graph) =
|
||||
reporters::run_with_reporter(|multi, root_progress, reporter| async {
|
||||
let multi = multi;
|
||||
let root_progress = root_progress;
|
||||
|
||||
root_progress.set_prefix(format!("{} {}: ", manifest.name, manifest.target));
|
||||
#[cfg(feature = "version-management")]
|
||||
{
|
||||
root_progress.reset();
|
||||
root_progress.set_message("update engine linkers");
|
||||
|
||||
let mut tasks = manifest
|
||||
.engines
|
||||
.keys()
|
||||
.map(|engine| crate::cli::version::make_linker_if_needed(*engine))
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
task.unwrap()?;
|
||||
}
|
||||
}
|
||||
|
||||
root_progress.reset();
|
||||
root_progress.set_message("resolve");
|
||||
|
||||
let old_graph = lockfile.map(|lockfile| lockfile.graph);
|
||||
|
||||
let graph = project
|
||||
.dependency_graph(
|
||||
old_graph.as_ref().filter(|_| options.use_lockfile),
|
||||
refreshed_sources.clone(),
|
||||
false,
|
||||
)
|
||||
.await
|
||||
.context("failed to build dependency graph")?;
|
||||
|
||||
let mut tasks = graph
|
||||
.iter()
|
||||
.filter_map(|(id, node)| {
|
||||
let PackageSources::Pesde(source) = node.pkg_ref.source() else {
|
||||
return None;
|
||||
};
|
||||
#[allow(irrefutable_let_patterns)]
|
||||
let PackageNames::Pesde(name) = id.name().clone() else {
|
||||
panic!("unexpected package name");
|
||||
};
|
||||
let project = project.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
|
||||
Some(async move {
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&PackageSources::Pesde(source.clone()),
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
|
||||
let file = source.read_index_file(&name, &project)
|
||||
.await
|
||||
.context("failed to read package index file")?
|
||||
.context("package not found in index")?;
|
||||
|
||||
Ok::<_, anyhow::Error>(if file.meta.deprecated.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some((name, file.meta.deprecated))
|
||||
})
|
||||
})
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
let Some((name, reason)) = task.unwrap()? else {
|
||||
continue;
|
||||
};
|
||||
|
||||
multi.suspend(|| {
|
||||
println!("{WARN_PREFIX}: package {name} is deprecated: {reason}");
|
||||
});
|
||||
}
|
||||
|
||||
let graph = Arc::new(graph);
|
||||
|
||||
if options.write {
|
||||
root_progress.reset();
|
||||
root_progress.set_length(0);
|
||||
root_progress.set_message("download");
|
||||
root_progress.set_style(reporters::root_progress_style_with_progress());
|
||||
|
||||
let hooks = InstallHooks {
|
||||
bin_folder: bin_dir().await?,
|
||||
};
|
||||
|
||||
#[allow(unused_variables)]
|
||||
let downloaded_graph = project
|
||||
.download_and_link(
|
||||
&graph,
|
||||
DownloadAndLinkOptions::<CliReporter, InstallHooks>::new(reqwest.clone())
|
||||
.reporter(reporter)
|
||||
.hooks(hooks)
|
||||
.refreshed_sources(refreshed_sources.clone())
|
||||
.prod(options.prod)
|
||||
.network_concurrency(options.network_concurrency)
|
||||
.force(options.force || has_irrecoverable_changes),
|
||||
)
|
||||
.await
|
||||
.context("failed to download and link dependencies")?;
|
||||
|
||||
#[cfg(feature = "version-management")]
|
||||
{
|
||||
let mut tasks = manifest
|
||||
.engines
|
||||
.into_iter()
|
||||
.map(|(engine, req)| async move {
|
||||
Ok::<_, anyhow::Error>(
|
||||
crate::cli::version::get_installed_versions(engine)
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter(|version| version_matches(&req, version))
|
||||
.next_back()
|
||||
.map(|version| (engine, version)),
|
||||
)
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
let mut resolved_engine_versions = HashMap::new();
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
let Some((engine, version)) = task.unwrap()? else {
|
||||
continue;
|
||||
};
|
||||
resolved_engine_versions.insert(engine, version);
|
||||
}
|
||||
|
||||
let manifest_target_kind = manifest.target.kind();
|
||||
let mut tasks = downloaded_graph.iter()
|
||||
.map(|(id, node)| {
|
||||
let id = id.clone();
|
||||
let node = node.clone();
|
||||
let project = project.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
|
||||
async move {
|
||||
let engines = match &node.node.pkg_ref {
|
||||
PackageRefs::Pesde(pkg_ref) => {
|
||||
let source = PesdePackageSource::new(pkg_ref.index_url.clone());
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&PackageSources::Pesde(source.clone()),
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to refresh source")?;
|
||||
|
||||
#[allow(irrefutable_let_patterns)]
|
||||
let PackageNames::Pesde(name) = id.name() else {
|
||||
panic!("unexpected package name");
|
||||
};
|
||||
|
||||
let mut file = source.read_index_file(name, &project)
|
||||
.await
|
||||
.context("failed to read package index file")?
|
||||
.context("package not found in index")?;
|
||||
|
||||
file
|
||||
.entries
|
||||
.remove(id.version_id())
|
||||
.context("package version not found in index")?
|
||||
.engines
|
||||
}
|
||||
#[cfg(feature = "wally-compat")]
|
||||
PackageRefs::Wally(_) => Default::default(),
|
||||
_ => {
|
||||
let path = node.node.container_folder_from_project(
|
||||
&id,
|
||||
&project,
|
||||
manifest_target_kind,
|
||||
);
|
||||
|
||||
match fs::read_to_string(path.join(MANIFEST_FILE_NAME)).await {
|
||||
Ok(manifest) => match toml::from_str::<Manifest>(&manifest) {
|
||||
Ok(manifest) => manifest.engines,
|
||||
Err(e) => return Err(e).context("failed to read package manifest"),
|
||||
},
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Default::default(),
|
||||
Err(e) => return Err(e).context("failed to read package manifest"),
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
Ok((id, engines))
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
let (id, required_engines) = task.unwrap()?;
|
||||
|
||||
for (engine, req) in required_engines {
|
||||
if engine == EngineKind::Pesde {
|
||||
continue;
|
||||
}
|
||||
|
||||
let Some(version) = resolved_engine_versions.get(&engine) else {
|
||||
tracing::debug!("package {id} requires {engine} {req}, but it is not installed");
|
||||
continue;
|
||||
};
|
||||
|
||||
if !version_matches(&req, version) {
|
||||
multi.suspend(|| {
|
||||
println!("{WARN_PREFIX}: package {id} requires {engine} {req}, but {version} is installed");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
root_progress.reset();
|
||||
root_progress.set_message("finish");
|
||||
|
||||
let new_lockfile = Lockfile {
|
||||
name: manifest.name.clone(),
|
||||
version: manifest.version,
|
||||
target: manifest.target.kind(),
|
||||
overrides,
|
||||
|
||||
graph: Arc::into_inner(graph).unwrap(),
|
||||
|
||||
workspace: run_on_workspace_members(project, |_| async { Ok(()) }).await?,
|
||||
};
|
||||
|
||||
project
|
||||
.write_lockfile(&new_lockfile)
|
||||
.await
|
||||
.context("failed to write lockfile")?;
|
||||
|
||||
anyhow::Ok((new_lockfile, old_graph.unwrap_or_default()))
|
||||
})
|
||||
.await?;
|
||||
|
||||
let elapsed = start.elapsed();
|
||||
|
||||
if is_root {
|
||||
println!();
|
||||
}
|
||||
|
||||
print_package_diff(
|
||||
&format!("{} {}:", manifest.name, manifest.target),
|
||||
&old_graph,
|
||||
&new_lockfile.graph,
|
||||
);
|
||||
|
||||
println!("done in {:.2}s", elapsed.as_secs_f64());
|
||||
println!();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Prints the difference between two graphs.
|
||||
pub fn print_package_diff(prefix: &str, old_graph: &DependencyGraph, new_graph: &DependencyGraph) {
|
||||
let mut old_pkg_map = BTreeMap::new();
|
||||
let mut old_direct_pkg_map = BTreeMap::new();
|
||||
let mut new_pkg_map = BTreeMap::new();
|
||||
let mut new_direct_pkg_map = BTreeMap::new();
|
||||
|
||||
for (id, node) in old_graph {
|
||||
old_pkg_map.insert(id, node);
|
||||
if node.direct.is_some() {
|
||||
old_direct_pkg_map.insert(id, node);
|
||||
}
|
||||
}
|
||||
|
||||
for (id, node) in new_graph {
|
||||
new_pkg_map.insert(id, node);
|
||||
if node.direct.is_some() {
|
||||
new_direct_pkg_map.insert(id, node);
|
||||
}
|
||||
}
|
||||
|
||||
let added_pkgs = new_pkg_map
|
||||
.iter()
|
||||
.filter(|(key, _)| !old_pkg_map.contains_key(*key))
|
||||
.map(|(key, &node)| (key, node))
|
||||
.collect::<Vec<_>>();
|
||||
let removed_pkgs = old_pkg_map
|
||||
.iter()
|
||||
.filter(|(key, _)| !new_pkg_map.contains_key(*key))
|
||||
.map(|(key, &node)| (key, node))
|
||||
.collect::<Vec<_>>();
|
||||
let added_direct_pkgs = new_direct_pkg_map
|
||||
.iter()
|
||||
.filter(|(key, _)| !old_direct_pkg_map.contains_key(*key))
|
||||
.map(|(key, &node)| (key, node))
|
||||
.collect::<Vec<_>>();
|
||||
let removed_direct_pkgs = old_direct_pkg_map
|
||||
.iter()
|
||||
.filter(|(key, _)| !new_direct_pkg_map.contains_key(*key))
|
||||
.map(|(key, &node)| (key, node))
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let prefix = style(prefix).bold();
|
||||
|
||||
let no_changes = added_pkgs.is_empty()
|
||||
&& removed_pkgs.is_empty()
|
||||
&& added_direct_pkgs.is_empty()
|
||||
&& removed_direct_pkgs.is_empty();
|
||||
|
||||
if no_changes {
|
||||
println!("{prefix} already up to date");
|
||||
} else {
|
||||
let mut change_signs = [
|
||||
(!added_pkgs.is_empty()).then(|| {
|
||||
ADDED_STYLE
|
||||
.apply_to(format!("+{}", added_pkgs.len()))
|
||||
.to_string()
|
||||
}),
|
||||
(!removed_pkgs.is_empty()).then(|| {
|
||||
REMOVED_STYLE
|
||||
.apply_to(format!("-{}", removed_pkgs.len()))
|
||||
.to_string()
|
||||
}),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
|
||||
let changes_empty = change_signs.is_empty();
|
||||
if changes_empty {
|
||||
change_signs = style("(no changes)").dim().to_string();
|
||||
}
|
||||
|
||||
println!("{prefix} {change_signs}");
|
||||
|
||||
if !changes_empty {
|
||||
println!(
|
||||
"{}{}",
|
||||
ADDED_STYLE.apply_to("+".repeat(added_pkgs.len())),
|
||||
REMOVED_STYLE.apply_to("-".repeat(removed_pkgs.len()))
|
||||
);
|
||||
}
|
||||
|
||||
let dependency_groups = added_direct_pkgs
|
||||
.iter()
|
||||
.map(|(key, node)| (true, key, node))
|
||||
.chain(
|
||||
removed_direct_pkgs
|
||||
.iter()
|
||||
.map(|(key, node)| (false, key, node)),
|
||||
)
|
||||
.filter_map(|(added, key, node)| {
|
||||
node.direct.as_ref().map(|(_, _, ty)| (added, key, ty))
|
||||
})
|
||||
.fold(
|
||||
BTreeMap::<DependencyType, BTreeSet<_>>::new(),
|
||||
|mut map, (added, key, &ty)| {
|
||||
map.entry(ty).or_default().insert((key, added));
|
||||
map
|
||||
},
|
||||
);
|
||||
|
||||
for (ty, set) in dependency_groups {
|
||||
println!();
|
||||
println!(
|
||||
"{}",
|
||||
style(format!("{}:", dep_type_to_key(ty))).yellow().bold()
|
||||
);
|
||||
|
||||
for (id, added) in set {
|
||||
println!(
|
||||
"{} {} {}",
|
||||
if added {
|
||||
ADDED_STYLE.apply_to("+")
|
||||
} else {
|
||||
REMOVED_STYLE.apply_to("-")
|
||||
},
|
||||
id.name(),
|
||||
style(id.version_id()).dim()
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
println!();
|
||||
}
|
||||
}
|
496
src/cli/mod.rs
496
src/cli/mod.rs
|
@ -1,21 +1,30 @@
|
|||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use crate::cli::{
|
||||
config::read_config,
|
||||
style::{ERROR_STYLE, INFO_STYLE, WARN_STYLE},
|
||||
};
|
||||
use anyhow::Context as _;
|
||||
use fs_err::tokio as fs;
|
||||
use futures::StreamExt;
|
||||
use futures::StreamExt as _;
|
||||
use pesde::{
|
||||
lockfile::Lockfile,
|
||||
manifest::target::TargetKind,
|
||||
names::{PackageName, PackageNames},
|
||||
source::{version_id::VersionId, workspace::specifier::VersionTypeOrReq},
|
||||
Project,
|
||||
errors::ManifestReadError,
|
||||
lockfile::Lockfile,
|
||||
manifest::{
|
||||
overrides::{OverrideKey, OverrideSpecifier},
|
||||
target::TargetKind,
|
||||
DependencyType, Manifest,
|
||||
},
|
||||
names::{PackageName, PackageNames},
|
||||
source::{
|
||||
ids::VersionId, specifiers::DependencySpecifiers, workspace::specifier::VersionTypeOrReq,
|
||||
},
|
||||
Project, DEFAULT_INDEX_NAME,
|
||||
};
|
||||
use relative_path::RelativePathBuf;
|
||||
use std::{
|
||||
collections::{BTreeMap, HashSet},
|
||||
future::Future,
|
||||
path::PathBuf,
|
||||
str::FromStr,
|
||||
time::Duration,
|
||||
collections::{BTreeMap, HashSet},
|
||||
future::Future,
|
||||
path::PathBuf,
|
||||
str::FromStr,
|
||||
};
|
||||
use tokio::pin;
|
||||
use tracing::instrument;
|
||||
|
@ -24,280 +33,321 @@ pub mod auth;
|
|||
pub mod commands;
|
||||
pub mod config;
|
||||
pub mod files;
|
||||
pub mod install;
|
||||
pub mod reporters;
|
||||
pub mod style;
|
||||
#[cfg(feature = "version-management")]
|
||||
pub mod version;
|
||||
|
||||
pub const HOME_DIR: &str = concat!(".", env!("CARGO_PKG_NAME"));
|
||||
|
||||
pub fn home_dir() -> anyhow::Result<PathBuf> {
|
||||
Ok(dirs::home_dir()
|
||||
.context("failed to get home directory")?
|
||||
.join(HOME_DIR))
|
||||
Ok(dirs::home_dir()
|
||||
.context("failed to get home directory")?
|
||||
.join(HOME_DIR))
|
||||
}
|
||||
|
||||
pub async fn bin_dir() -> anyhow::Result<PathBuf> {
|
||||
let bin_dir = home_dir()?.join("bin");
|
||||
fs::create_dir_all(&bin_dir)
|
||||
.await
|
||||
.context("failed to create bin folder")?;
|
||||
Ok(bin_dir)
|
||||
let bin_dir = home_dir()?.join("bin");
|
||||
fs::create_dir_all(&bin_dir)
|
||||
.await
|
||||
.context("failed to create bin folder")?;
|
||||
Ok(bin_dir)
|
||||
}
|
||||
|
||||
pub fn resolve_overrides(
|
||||
manifest: &Manifest,
|
||||
) -> anyhow::Result<BTreeMap<OverrideKey, DependencySpecifiers>> {
|
||||
let mut dependencies = None;
|
||||
let mut overrides = BTreeMap::new();
|
||||
|
||||
for (key, spec) in &manifest.overrides {
|
||||
overrides.insert(
|
||||
key.clone(),
|
||||
match spec {
|
||||
OverrideSpecifier::Specifier(spec) => spec,
|
||||
OverrideSpecifier::Alias(alias) => {
|
||||
if dependencies.is_none() {
|
||||
dependencies = Some(
|
||||
manifest
|
||||
.all_dependencies()
|
||||
.context("failed to get all dependencies")?,
|
||||
);
|
||||
}
|
||||
|
||||
&dependencies
|
||||
.as_ref()
|
||||
.and_then(|deps| deps.get(alias))
|
||||
.with_context(|| format!("alias `{alias}` not found in manifest"))?
|
||||
.0
|
||||
}
|
||||
}
|
||||
.clone(),
|
||||
);
|
||||
}
|
||||
|
||||
Ok(overrides)
|
||||
}
|
||||
|
||||
#[instrument(skip(project), ret(level = "trace"), level = "debug")]
|
||||
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
|
||||
let manifest = project.deser_manifest().await?;
|
||||
let lockfile = match project.deser_lockfile().await {
|
||||
Ok(lockfile) => lockfile,
|
||||
Err(pesde::errors::LockfileReadError::Io(e))
|
||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
let manifest = project.deser_manifest().await?;
|
||||
let lockfile = match project.deser_lockfile().await {
|
||||
Ok(lockfile) => lockfile,
|
||||
Err(pesde::errors::LockfileReadError::Io(e))
|
||||
if e.kind() == std::io::ErrorKind::NotFound =>
|
||||
{
|
||||
return Ok(None);
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
};
|
||||
|
||||
if manifest.overrides != lockfile.overrides {
|
||||
tracing::debug!("overrides are different");
|
||||
return Ok(None);
|
||||
}
|
||||
if resolve_overrides(&manifest)? != lockfile.overrides {
|
||||
tracing::debug!("overrides are different");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if manifest.target.kind() != lockfile.target {
|
||||
tracing::debug!("target kind is different");
|
||||
return Ok(None);
|
||||
}
|
||||
if manifest.target.kind() != lockfile.target {
|
||||
tracing::debug!("target kind is different");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
if manifest.name != lockfile.name || manifest.version != lockfile.version {
|
||||
tracing::debug!("name or version is different");
|
||||
return Ok(None);
|
||||
}
|
||||
if manifest.name != lockfile.name || manifest.version != lockfile.version {
|
||||
tracing::debug!("name or version is different");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let specs = lockfile
|
||||
.graph
|
||||
.iter()
|
||||
.flat_map(|(_, versions)| versions)
|
||||
.filter_map(|(_, node)| {
|
||||
node.node
|
||||
.direct
|
||||
.as_ref()
|
||||
.map(|(_, spec, source_ty)| (spec, source_ty))
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
let specs = lockfile
|
||||
.graph
|
||||
.iter()
|
||||
.filter_map(|(_, node)| {
|
||||
node.direct
|
||||
.as_ref()
|
||||
.map(|(_, spec, source_ty)| (spec, source_ty))
|
||||
})
|
||||
.collect::<HashSet<_>>();
|
||||
|
||||
let same_dependencies = manifest
|
||||
.all_dependencies()
|
||||
.context("failed to get all dependencies")?
|
||||
.iter()
|
||||
.all(|(_, (spec, ty))| specs.contains(&(spec, ty)));
|
||||
let same_dependencies = manifest
|
||||
.all_dependencies()
|
||||
.context("failed to get all dependencies")?
|
||||
.iter()
|
||||
.all(|(_, (spec, ty))| specs.contains(&(spec, ty)));
|
||||
|
||||
tracing::debug!("dependencies are the same: {same_dependencies}");
|
||||
tracing::debug!("dependencies are the same: {same_dependencies}");
|
||||
|
||||
Ok(if same_dependencies {
|
||||
Some(lockfile)
|
||||
} else {
|
||||
None
|
||||
})
|
||||
Ok(same_dependencies.then_some(lockfile))
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct VersionedPackageName<V: FromStr = VersionId, N: FromStr = PackageNames>(N, Option<V>);
|
||||
|
||||
impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<anyhow::Error>>
|
||||
FromStr for VersionedPackageName<V, N>
|
||||
FromStr for VersionedPackageName<V, N>
|
||||
{
|
||||
type Err = anyhow::Error;
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.splitn(2, '@');
|
||||
let name = parts.next().unwrap();
|
||||
let version = parts
|
||||
.next()
|
||||
.map(FromStr::from_str)
|
||||
.transpose()
|
||||
.map_err(Into::into)?;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let mut parts = s.splitn(2, '@');
|
||||
let name = parts.next().unwrap();
|
||||
let version = parts
|
||||
.next()
|
||||
.map(FromStr::from_str)
|
||||
.transpose()
|
||||
.map_err(Into::into)?;
|
||||
|
||||
Ok(VersionedPackageName(
|
||||
name.parse().map_err(Into::into)?,
|
||||
version,
|
||||
))
|
||||
}
|
||||
Ok(VersionedPackageName(
|
||||
name.parse().map_err(Into::into)?,
|
||||
version,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl VersionedPackageName {
|
||||
#[cfg(feature = "patches")]
|
||||
fn get(
|
||||
self,
|
||||
graph: &pesde::lockfile::DownloadedGraph,
|
||||
) -> anyhow::Result<(PackageNames, VersionId)> {
|
||||
let version_id = match self.1 {
|
||||
Some(version) => version,
|
||||
None => {
|
||||
let versions = graph.get(&self.0).context("package not found in graph")?;
|
||||
if versions.len() == 1 {
|
||||
let version = versions.keys().next().unwrap().clone();
|
||||
tracing::debug!("only one version found, using {version}");
|
||||
version
|
||||
} else {
|
||||
anyhow::bail!(
|
||||
"multiple versions found, please specify one of: {}",
|
||||
versions
|
||||
.keys()
|
||||
.map(|v| v.to_string())
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
#[cfg(feature = "patches")]
|
||||
fn get(
|
||||
self,
|
||||
graph: &pesde::graph::DependencyGraph,
|
||||
) -> anyhow::Result<pesde::source::ids::PackageId> {
|
||||
let version_id = if let Some(version) = self.1 {
|
||||
version
|
||||
} else {
|
||||
let versions = graph
|
||||
.keys()
|
||||
.filter(|id| *id.name() == self.0)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
Ok((self.0, version_id))
|
||||
}
|
||||
match versions.len() {
|
||||
0 => anyhow::bail!("package not found"),
|
||||
1 => versions[0].version_id().clone(),
|
||||
_ => anyhow::bail!(
|
||||
"multiple versions found, please specify one of: {}",
|
||||
versions
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.collect::<Vec<_>>()
|
||||
.join(", ")
|
||||
),
|
||||
}
|
||||
};
|
||||
|
||||
Ok(pesde::source::ids::PackageId::new(self.0, version_id))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
enum AnyPackageIdentifier<V: FromStr = VersionId, N: FromStr = PackageNames> {
|
||||
PackageName(VersionedPackageName<V, N>),
|
||||
Url((gix::Url, String)),
|
||||
Workspace(VersionedPackageName<VersionTypeOrReq, PackageName>),
|
||||
PackageName(VersionedPackageName<V, N>),
|
||||
Url((gix::Url, String)),
|
||||
Workspace(VersionedPackageName<VersionTypeOrReq, PackageName>),
|
||||
Path(PathBuf),
|
||||
}
|
||||
|
||||
impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<anyhow::Error>>
|
||||
FromStr for AnyPackageIdentifier<V, N>
|
||||
FromStr for AnyPackageIdentifier<V, N>
|
||||
{
|
||||
type Err = anyhow::Error;
|
||||
type Err = anyhow::Error;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if let Some(s) = s.strip_prefix("gh#") {
|
||||
let s = format!("https://github.com/{s}");
|
||||
let (repo, rev) = s.split_once('#').context("missing revision")?;
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
if let Some(s) = s.strip_prefix("gh#") {
|
||||
let s = format!("https://github.com/{s}");
|
||||
let (repo, rev) = s.split_once('#').context("missing revision")?;
|
||||
|
||||
Ok(AnyPackageIdentifier::Url((
|
||||
repo.try_into()?,
|
||||
rev.to_string(),
|
||||
)))
|
||||
} else if let Some(rest) = s.strip_prefix("workspace:") {
|
||||
Ok(AnyPackageIdentifier::Workspace(rest.parse()?))
|
||||
} else if s.contains(':') {
|
||||
let (url, rev) = s.split_once('#').context("missing revision")?;
|
||||
Ok(AnyPackageIdentifier::Url((
|
||||
repo.try_into()?,
|
||||
rev.to_string(),
|
||||
)))
|
||||
} else if let Some(rest) = s.strip_prefix("workspace:") {
|
||||
Ok(AnyPackageIdentifier::Workspace(rest.parse()?))
|
||||
} else if let Some(rest) = s.strip_prefix("path:") {
|
||||
Ok(AnyPackageIdentifier::Path(rest.into()))
|
||||
} else if s.contains(':') {
|
||||
let (url, rev) = s.split_once('#').context("missing revision")?;
|
||||
|
||||
Ok(AnyPackageIdentifier::Url((
|
||||
url.try_into()?,
|
||||
rev.to_string(),
|
||||
)))
|
||||
} else {
|
||||
Ok(AnyPackageIdentifier::PackageName(s.parse()?))
|
||||
}
|
||||
}
|
||||
Ok(AnyPackageIdentifier::Url((
|
||||
url.try_into()?,
|
||||
rev.to_string(),
|
||||
)))
|
||||
} else {
|
||||
Ok(AnyPackageIdentifier::PackageName(s.parse()?))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
|
||||
s.try_into()
|
||||
}
|
||||
|
||||
pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>(
|
||||
len: u64,
|
||||
mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>,
|
||||
prefix: String,
|
||||
progress_msg: String,
|
||||
finish_msg: String,
|
||||
) -> anyhow::Result<()> {
|
||||
let bar = indicatif::ProgressBar::new(len)
|
||||
.with_style(
|
||||
indicatif::ProgressStyle::default_bar()
|
||||
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
|
||||
.progress_chars("█▓▒░ "),
|
||||
)
|
||||
.with_prefix(prefix)
|
||||
.with_message(progress_msg);
|
||||
bar.enable_steady_tick(Duration::from_millis(100));
|
||||
|
||||
while let Some(result) = rx.recv().await {
|
||||
bar.inc(1);
|
||||
|
||||
match result {
|
||||
Ok(text) => {
|
||||
bar.set_message(text);
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
}
|
||||
|
||||
bar.finish_with_message(finish_msg);
|
||||
|
||||
Ok(())
|
||||
s.try_into()
|
||||
}
|
||||
|
||||
pub fn shift_project_dir(project: &Project, pkg_dir: PathBuf) -> Project {
|
||||
Project::new(
|
||||
pkg_dir,
|
||||
Some(project.package_dir()),
|
||||
project.data_dir(),
|
||||
project.cas_dir(),
|
||||
project.auth_config().clone(),
|
||||
)
|
||||
Project::new(
|
||||
pkg_dir,
|
||||
Some(project.package_dir()),
|
||||
project.data_dir(),
|
||||
project.cas_dir(),
|
||||
project.auth_config().clone(),
|
||||
)
|
||||
}
|
||||
|
||||
pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
|
||||
project: &Project,
|
||||
f: impl Fn(Project) -> F,
|
||||
project: &Project,
|
||||
f: impl Fn(Project) -> F,
|
||||
) -> anyhow::Result<BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>> {
|
||||
// this might seem counterintuitive, but remember that
|
||||
// the presence of a workspace dir means that this project is a member of one
|
||||
if project.workspace_dir().is_some() {
|
||||
return Ok(Default::default());
|
||||
}
|
||||
// this might seem counterintuitive, but remember that
|
||||
// the presence of a workspace dir means that this project is a member of one
|
||||
if project.workspace_dir().is_some() {
|
||||
return Ok(Default::default());
|
||||
}
|
||||
|
||||
let members_future = project
|
||||
.workspace_members(project.package_dir(), true)
|
||||
.await?;
|
||||
pin!(members_future);
|
||||
let members_future = project.workspace_members(true).await?;
|
||||
pin!(members_future);
|
||||
|
||||
let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
|
||||
let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
|
||||
|
||||
while let Some((path, manifest)) = members_future.next().await.transpose()? {
|
||||
let relative_path =
|
||||
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap()).unwrap();
|
||||
while let Some((path, manifest)) = members_future.next().await.transpose()? {
|
||||
let relative_path =
|
||||
RelativePathBuf::from_path(path.strip_prefix(project.package_dir()).unwrap()).unwrap();
|
||||
|
||||
// don't run on the current workspace root
|
||||
if relative_path != "" {
|
||||
f(shift_project_dir(project, path)).await?;
|
||||
}
|
||||
// don't run on the current workspace root
|
||||
if relative_path != "" {
|
||||
f(shift_project_dir(project, path)).await?;
|
||||
}
|
||||
|
||||
results
|
||||
.entry(manifest.name)
|
||||
.or_default()
|
||||
.insert(manifest.target.kind(), relative_path);
|
||||
}
|
||||
results
|
||||
.entry(manifest.name)
|
||||
.or_default()
|
||||
.insert(manifest.target.kind(), relative_path);
|
||||
}
|
||||
|
||||
Ok(results)
|
||||
Ok(results)
|
||||
}
|
||||
|
||||
pub fn display_err(result: anyhow::Result<()>, prefix: &str) {
|
||||
if let Err(err) = result {
|
||||
eprintln!("{}: {err}\n", format!("error{prefix}").red().bold());
|
||||
if let Err(err) = result {
|
||||
eprintln!(
|
||||
"{}: {err}\n",
|
||||
ERROR_STYLE.apply_to(format!("error{prefix}"))
|
||||
);
|
||||
|
||||
let cause = err.chain().skip(1).collect::<Vec<_>>();
|
||||
let cause = err.chain().skip(1).collect::<Vec<_>>();
|
||||
|
||||
if !cause.is_empty() {
|
||||
eprintln!("{}:", "caused by".red().bold());
|
||||
for err in cause {
|
||||
eprintln!(" - {err}");
|
||||
}
|
||||
}
|
||||
if !cause.is_empty() {
|
||||
eprintln!("{}:", ERROR_STYLE.apply_to("caused by"));
|
||||
for err in cause {
|
||||
eprintln!("\t- {err}");
|
||||
}
|
||||
}
|
||||
|
||||
let backtrace = err.backtrace();
|
||||
match backtrace.status() {
|
||||
std::backtrace::BacktraceStatus::Disabled => {
|
||||
eprintln!(
|
||||
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
|
||||
"help".yellow().bold()
|
||||
);
|
||||
}
|
||||
std::backtrace::BacktraceStatus::Captured => {
|
||||
eprintln!("\n{}:\n{backtrace}", "backtrace".yellow().bold());
|
||||
}
|
||||
_ => {
|
||||
eprintln!("\n{}: not captured", "backtrace".yellow().bold());
|
||||
}
|
||||
}
|
||||
}
|
||||
let backtrace = err.backtrace();
|
||||
match backtrace.status() {
|
||||
std::backtrace::BacktraceStatus::Disabled => {
|
||||
eprintln!(
|
||||
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
|
||||
INFO_STYLE.apply_to("help")
|
||||
);
|
||||
}
|
||||
std::backtrace::BacktraceStatus::Captured => {
|
||||
eprintln!("\n{}:\n{backtrace}", WARN_STYLE.apply_to("backtrace"));
|
||||
}
|
||||
_ => {
|
||||
eprintln!("\n{}: not captured", WARN_STYLE.apply_to("backtrace"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn get_index(project: &Project, index: Option<&str>) -> anyhow::Result<gix::Url> {
|
||||
let manifest = match project.deser_manifest().await {
|
||||
Ok(manifest) => Some(manifest),
|
||||
Err(e) => match e {
|
||||
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
|
||||
e => return Err(e.into()),
|
||||
},
|
||||
};
|
||||
|
||||
let index_url = match index {
|
||||
Some(index) => index.try_into().ok(),
|
||||
None => match manifest {
|
||||
Some(_) => None,
|
||||
None => Some(read_config().await?.default_index),
|
||||
},
|
||||
};
|
||||
|
||||
if let Some(url) = index_url {
|
||||
return Ok(url);
|
||||
}
|
||||
|
||||
let index_name = index.unwrap_or(DEFAULT_INDEX_NAME);
|
||||
|
||||
manifest
|
||||
.unwrap()
|
||||
.indices
|
||||
.remove(index_name)
|
||||
.with_context(|| format!("index {index_name} not found in manifest"))
|
||||
}
|
||||
|
||||
pub fn dep_type_to_key(dep_type: DependencyType) -> &'static str {
|
||||
match dep_type {
|
||||
DependencyType::Standard => "dependencies",
|
||||
DependencyType::Dev => "dev_dependencies",
|
||||
DependencyType::Peer => "peer_dependencies",
|
||||
}
|
||||
}
|
||||
|
|
211
src/cli/reporters.rs
Normal file
211
src/cli/reporters.rs
Normal file
|
@ -0,0 +1,211 @@
|
|||
//! Progress reporters for the CLI
|
||||
|
||||
use std::{
|
||||
future::Future,
|
||||
io::{Stdout, Write},
|
||||
sync::{Arc, Mutex, Once, OnceLock},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
|
||||
use pesde::reporters::{
|
||||
DownloadProgressReporter, DownloadsReporter, PatchProgressReporter, PatchesReporter,
|
||||
};
|
||||
|
||||
pub const TICK_CHARS: &str = "⣷⣯⣟⡿⢿⣻⣽⣾";
|
||||
|
||||
pub fn root_progress_style() -> ProgressStyle {
|
||||
ProgressStyle::with_template("{prefix:.dim}{msg:>8.214/yellow} {spinner} [{elapsed_precise}]")
|
||||
.unwrap()
|
||||
.tick_chars(TICK_CHARS)
|
||||
}
|
||||
|
||||
pub fn root_progress_style_with_progress() -> ProgressStyle {
|
||||
ProgressStyle::with_template(
|
||||
"{prefix:.dim}{msg:>8.214/yellow} {spinner} [{elapsed_precise}] {bar:20} {pos}/{len}",
|
||||
)
|
||||
.unwrap()
|
||||
.tick_chars(TICK_CHARS)
|
||||
}
|
||||
|
||||
pub async fn run_with_reporter_and_writer<W, F, R, Fut>(writer: W, f: F) -> R
|
||||
where
|
||||
W: Write + Send + Sync + 'static,
|
||||
F: FnOnce(MultiProgress, ProgressBar, Arc<CliReporter<W>>) -> Fut,
|
||||
Fut: Future<Output = R>,
|
||||
{
|
||||
let multi_progress = MultiProgress::new();
|
||||
crate::PROGRESS_BARS
|
||||
.lock()
|
||||
.unwrap()
|
||||
.replace(multi_progress.clone());
|
||||
|
||||
let root_progress = multi_progress.add(ProgressBar::new(0));
|
||||
root_progress.set_style(root_progress_style());
|
||||
root_progress.enable_steady_tick(Duration::from_millis(100));
|
||||
|
||||
let reporter = Arc::new(CliReporter::with_writer(
|
||||
writer,
|
||||
multi_progress.clone(),
|
||||
root_progress.clone(),
|
||||
));
|
||||
let result = f(multi_progress.clone(), root_progress.clone(), reporter).await;
|
||||
|
||||
root_progress.finish();
|
||||
multi_progress.clear().unwrap();
|
||||
crate::PROGRESS_BARS.lock().unwrap().take();
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
pub async fn run_with_reporter<F, R, Fut>(f: F) -> R
|
||||
where
|
||||
F: FnOnce(MultiProgress, ProgressBar, Arc<CliReporter<Stdout>>) -> Fut,
|
||||
Fut: Future<Output = R>,
|
||||
{
|
||||
run_with_reporter_and_writer(std::io::stdout(), f).await
|
||||
}
|
||||
|
||||
pub struct CliReporter<W = Stdout> {
|
||||
writer: Mutex<W>,
|
||||
child_style: ProgressStyle,
|
||||
child_style_with_bytes: ProgressStyle,
|
||||
child_style_with_bytes_without_total: ProgressStyle,
|
||||
multi_progress: MultiProgress,
|
||||
root_progress: ProgressBar,
|
||||
}
|
||||
|
||||
impl<W> CliReporter<W> {
|
||||
#[allow(unknown_lints, clippy::literal_string_with_formatting_args)]
|
||||
pub fn with_writer(
|
||||
writer: W,
|
||||
multi_progress: MultiProgress,
|
||||
root_progress: ProgressBar,
|
||||
) -> Self {
|
||||
Self {
|
||||
writer: Mutex::new(writer),
|
||||
child_style: ProgressStyle::with_template("{msg:.dim}").unwrap(),
|
||||
child_style_with_bytes: ProgressStyle::with_template(
|
||||
"{msg:.dim} {bytes:.dim}/{total_bytes:.dim}",
|
||||
)
|
||||
.unwrap(),
|
||||
child_style_with_bytes_without_total: ProgressStyle::with_template(
|
||||
"{msg:.dim} {bytes:.dim}",
|
||||
)
|
||||
.unwrap(),
|
||||
multi_progress,
|
||||
root_progress,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CliDownloadProgressReporter<W> {
|
||||
root_reporter: Arc<CliReporter<W>>,
|
||||
name: String,
|
||||
progress: OnceLock<ProgressBar>,
|
||||
set_progress: Once,
|
||||
}
|
||||
|
||||
impl<W: Write + Send + Sync + 'static> DownloadsReporter for CliReporter<W> {
|
||||
type DownloadProgressReporter = CliDownloadProgressReporter<W>;
|
||||
|
||||
fn report_download(self: Arc<Self>, name: String) -> Self::DownloadProgressReporter {
|
||||
self.root_progress.inc_length(1);
|
||||
|
||||
CliDownloadProgressReporter {
|
||||
root_reporter: self,
|
||||
name,
|
||||
progress: OnceLock::new(),
|
||||
set_progress: Once::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write + Send + Sync + 'static> DownloadProgressReporter for CliDownloadProgressReporter<W> {
|
||||
fn report_start(&self) {
|
||||
let progress = self.root_reporter.multi_progress.add(ProgressBar::new(0));
|
||||
progress.set_style(self.root_reporter.child_style.clone());
|
||||
progress.set_message(format!("- {}", self.name));
|
||||
|
||||
self.progress
|
||||
.set(progress)
|
||||
.expect("report_start called more than once");
|
||||
}
|
||||
|
||||
fn report_progress(&self, total: u64, len: u64) {
|
||||
if let Some(progress) = self.progress.get() {
|
||||
progress.set_length(total);
|
||||
progress.set_position(len);
|
||||
|
||||
self.set_progress.call_once(|| {
|
||||
if total > 0 {
|
||||
progress.set_style(self.root_reporter.child_style_with_bytes.clone());
|
||||
} else {
|
||||
progress.set_style(
|
||||
self.root_reporter
|
||||
.child_style_with_bytes_without_total
|
||||
.clone(),
|
||||
);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
fn report_done(&self) {
|
||||
if let Some(progress) = self.progress.get() {
|
||||
if progress.is_hidden() {
|
||||
writeln!(
|
||||
self.root_reporter.writer.lock().unwrap(),
|
||||
"downloaded {}",
|
||||
self.name
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
progress.finish();
|
||||
self.root_reporter.multi_progress.remove(progress);
|
||||
self.root_reporter.root_progress.inc(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CliPatchProgressReporter<W> {
|
||||
root_reporter: Arc<CliReporter<W>>,
|
||||
name: String,
|
||||
progress: ProgressBar,
|
||||
}
|
||||
|
||||
impl<W: Write + Send + Sync + 'static> PatchesReporter for CliReporter<W> {
|
||||
type PatchProgressReporter = CliPatchProgressReporter<W>;
|
||||
|
||||
fn report_patch(self: Arc<Self>, name: String) -> Self::PatchProgressReporter {
|
||||
let progress = self.multi_progress.add(ProgressBar::new(0));
|
||||
progress.set_style(self.child_style.clone());
|
||||
progress.set_message(format!("- {name}"));
|
||||
|
||||
self.root_progress.inc_length(1);
|
||||
|
||||
CliPatchProgressReporter {
|
||||
root_reporter: self,
|
||||
name,
|
||||
progress,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<W: Write + Send + Sync + 'static> PatchProgressReporter for CliPatchProgressReporter<W> {
|
||||
fn report_done(&self) {
|
||||
if self.progress.is_hidden() {
|
||||
writeln!(
|
||||
self.root_reporter.writer.lock().unwrap(),
|
||||
"patched {}",
|
||||
self.name
|
||||
)
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
self.progress.finish();
|
||||
self.root_reporter.multi_progress.remove(&self.progress);
|
||||
self.root_reporter.root_progress.inc(1);
|
||||
}
|
||||
}
|
54
src/cli/style.rs
Normal file
54
src/cli/style.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
use console::{Style, StyledObject};
|
||||
use paste::paste;
|
||||
use std::{fmt::Display, sync::LazyLock};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct LazyStyle<T>(LazyLock<T>);
|
||||
|
||||
impl LazyStyle<Style> {
|
||||
pub fn apply_to<D>(&self, text: D) -> StyledObject<D> {
|
||||
LazyLock::force(&self.0).apply_to(text)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Display> Display for LazyStyle<T> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", LazyLock::force(&self.0))
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! make_style {
|
||||
($name:ident, $color:ident) => {
|
||||
make_style!($name, $color());
|
||||
};
|
||||
($name:ident, $($color:tt)+) => {
|
||||
paste! {
|
||||
pub static [<$name _STYLE>]: LazyStyle<Style> = LazyStyle(LazyLock::new(||
|
||||
Style::new().$($color)+.bold()
|
||||
));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
macro_rules! make_prefix {
|
||||
($name:ident) => {
|
||||
paste! {
|
||||
pub static [<$name:upper _PREFIX>]: LazyStyle<StyledObject<&'static str>> = LazyStyle(LazyLock::new(||
|
||||
[<$name:upper _STYLE>].apply_to(stringify!($name))
|
||||
));
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub const CLI_COLOR_256: u8 = 214;
|
||||
|
||||
make_style!(INFO, cyan);
|
||||
make_style!(WARN, yellow);
|
||||
make_prefix!(warn);
|
||||
make_style!(ERROR, red);
|
||||
make_prefix!(error);
|
||||
make_style!(SUCCESS, green);
|
||||
make_style!(CLI, color256(CLI_COLOR_256));
|
||||
make_style!(ADDED, green);
|
||||
make_style!(REMOVED, red);
|
||||
make_style!(URL, blue().underlined());
|
|
@ -1,361 +1,291 @@
|
|||
use crate::cli::{
|
||||
bin_dir,
|
||||
config::{read_config, write_config, CliConfig},
|
||||
files::make_executable,
|
||||
home_dir,
|
||||
use crate::{
|
||||
cli::{
|
||||
bin_dir,
|
||||
config::{read_config, write_config, CliConfig},
|
||||
files::make_executable,
|
||||
home_dir,
|
||||
reporters::run_with_reporter,
|
||||
style::{ADDED_STYLE, CLI_STYLE, REMOVED_STYLE, URL_STYLE},
|
||||
},
|
||||
util::no_build_metadata,
|
||||
};
|
||||
use anyhow::Context;
|
||||
use colored::Colorize;
|
||||
use anyhow::Context as _;
|
||||
use console::Style;
|
||||
use fs_err::tokio as fs;
|
||||
use futures::StreamExt;
|
||||
use reqwest::header::ACCEPT;
|
||||
use semver::Version;
|
||||
use serde::Deserialize;
|
||||
use std::{
|
||||
env::current_exe,
|
||||
path::{Path, PathBuf},
|
||||
use jiff::SignedDuration;
|
||||
use pesde::{
|
||||
engine::{
|
||||
source::{
|
||||
traits::{DownloadOptions, EngineSource as _, ResolveOptions},
|
||||
EngineSources,
|
||||
},
|
||||
EngineKind,
|
||||
},
|
||||
reporters::DownloadsReporter as _,
|
||||
version_matches,
|
||||
};
|
||||
use semver::{Version, VersionReq};
|
||||
use std::{
|
||||
collections::BTreeSet,
|
||||
env::current_exe,
|
||||
path::{Path, PathBuf},
|
||||
sync::Arc,
|
||||
};
|
||||
use tokio::io::AsyncWrite;
|
||||
use tracing::instrument;
|
||||
|
||||
pub fn current_version() -> Version {
|
||||
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
||||
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Release {
|
||||
tag_name: String,
|
||||
assets: Vec<Asset>,
|
||||
const CHECK_INTERVAL: SignedDuration = SignedDuration::from_hours(6);
|
||||
|
||||
pub async fn find_latest_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> {
|
||||
let version = EngineSources::pesde()
|
||||
.resolve(
|
||||
&VersionReq::STAR,
|
||||
&ResolveOptions {
|
||||
reqwest: reqwest.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve version")?
|
||||
.pop_last()
|
||||
.context("no versions found")?
|
||||
.0;
|
||||
|
||||
Ok(version)
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct Asset {
|
||||
name: String,
|
||||
url: url::Url,
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
fn get_repo() -> (String, String) {
|
||||
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
||||
let (owner, repo) = (
|
||||
parts.next().unwrap().to_string(),
|
||||
parts.next().unwrap().to_string(),
|
||||
);
|
||||
|
||||
tracing::trace!("repository for updates: {owner}/{repo}");
|
||||
|
||||
(owner, repo)
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum VersionType {
|
||||
Latest,
|
||||
Specific(Version),
|
||||
}
|
||||
|
||||
#[instrument(skip(reqwest), level = "trace")]
|
||||
pub async fn get_remote_version(
|
||||
reqwest: &reqwest::Client,
|
||||
ty: VersionType,
|
||||
) -> anyhow::Result<Version> {
|
||||
let (owner, repo) = get_repo();
|
||||
|
||||
let mut releases = reqwest
|
||||
.get(format!(
|
||||
"https://api.github.com/repos/{owner}/{repo}/releases",
|
||||
))
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send request to GitHub API")?
|
||||
.error_for_status()
|
||||
.context("failed to get GitHub API response")?
|
||||
.json::<Vec<Release>>()
|
||||
.await
|
||||
.context("failed to parse GitHub API response")?
|
||||
.into_iter()
|
||||
.filter_map(|release| Version::parse(release.tag_name.trim_start_matches('v')).ok());
|
||||
|
||||
match ty {
|
||||
VersionType::Latest => releases.max(),
|
||||
VersionType::Specific(version) => {
|
||||
releases.find(|v| no_build_metadata(v) == no_build_metadata(&version))
|
||||
}
|
||||
}
|
||||
.context("failed to find latest version")
|
||||
}
|
||||
|
||||
pub fn no_build_metadata(version: &Version) -> Version {
|
||||
let mut version = version.clone();
|
||||
version.build = semver::BuildMetadata::EMPTY;
|
||||
version
|
||||
}
|
||||
|
||||
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
|
||||
|
||||
#[instrument(skip(reqwest), level = "trace")]
|
||||
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
|
||||
let config = read_config().await?;
|
||||
let config = read_config().await?;
|
||||
|
||||
let version = if let Some((_, version)) = config
|
||||
.last_checked_updates
|
||||
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
|
||||
{
|
||||
tracing::debug!("using cached version");
|
||||
version
|
||||
} else {
|
||||
tracing::debug!("checking for updates");
|
||||
let version = get_remote_version(reqwest, VersionType::Latest).await?;
|
||||
let version = if let Some((_, version)) = config
|
||||
.last_checked_updates
|
||||
.filter(|(time, _)| jiff::Timestamp::now().duration_since(*time) < CHECK_INTERVAL)
|
||||
{
|
||||
tracing::debug!("using cached version");
|
||||
version
|
||||
} else {
|
||||
tracing::debug!("checking for updates");
|
||||
let version = find_latest_version(reqwest).await?;
|
||||
|
||||
write_config(&CliConfig {
|
||||
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
|
||||
..config
|
||||
})
|
||||
.await?;
|
||||
write_config(&CliConfig {
|
||||
last_checked_updates: Some((jiff::Timestamp::now(), version.clone())),
|
||||
..config
|
||||
})
|
||||
.await?;
|
||||
|
||||
version
|
||||
};
|
||||
let current_version = current_version();
|
||||
let version_no_metadata = no_build_metadata(&version);
|
||||
version
|
||||
};
|
||||
let current_version = current_version();
|
||||
let version_no_metadata = no_build_metadata(&version);
|
||||
|
||||
if version_no_metadata <= current_version {
|
||||
return Ok(());
|
||||
}
|
||||
if version_no_metadata <= current_version {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let name = env!("CARGO_BIN_NAME");
|
||||
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"));
|
||||
let alert_style = Style::new().yellow();
|
||||
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"));
|
||||
|
||||
let unformatted_messages = [
|
||||
"".to_string(),
|
||||
format!("update available! {current_version} → {version_no_metadata}"),
|
||||
format!("changelog: {changelog}"),
|
||||
format!("run `{name} self-upgrade` to upgrade"),
|
||||
"".to_string(),
|
||||
];
|
||||
let messages = [
|
||||
format!(
|
||||
"{} {} → {}",
|
||||
alert_style.apply_to("update available!").bold(),
|
||||
REMOVED_STYLE.apply_to(current_version),
|
||||
ADDED_STYLE.apply_to(version_no_metadata)
|
||||
),
|
||||
format!(
|
||||
"run {} to upgrade",
|
||||
CLI_STYLE.apply_to(concat!("`", env!("CARGO_BIN_NAME"), " self-upgrade`")),
|
||||
),
|
||||
"".to_string(),
|
||||
format!("changelog: {}", URL_STYLE.apply_to(changelog)),
|
||||
];
|
||||
|
||||
let width = unformatted_messages
|
||||
.iter()
|
||||
.map(|s| s.chars().count())
|
||||
.max()
|
||||
.unwrap()
|
||||
+ 4;
|
||||
let column = alert_style.apply_to("┃");
|
||||
|
||||
let column = "│".bright_magenta();
|
||||
let message = messages
|
||||
.into_iter()
|
||||
.map(|s| format!("{column} {s}"))
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
|
||||
let message = [
|
||||
"".to_string(),
|
||||
format!(
|
||||
"update available! {} → {}",
|
||||
current_version.to_string().red(),
|
||||
version_no_metadata.to_string().green()
|
||||
),
|
||||
format!("changelog: {}", changelog.blue()),
|
||||
format!(
|
||||
"run `{} {}` to upgrade",
|
||||
name.blue(),
|
||||
"self-upgrade".yellow()
|
||||
),
|
||||
"".to_string(),
|
||||
]
|
||||
.into_iter()
|
||||
.enumerate()
|
||||
.map(|(i, s)| {
|
||||
let text_length = unformatted_messages[i].chars().count();
|
||||
let padding = (width as f32 - text_length as f32) / 2f32;
|
||||
let padding_l = " ".repeat(padding.floor() as usize);
|
||||
let padding_r = " ".repeat(padding.ceil() as usize);
|
||||
format!("{column}{padding_l}{s}{padding_r}{column}")
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("\n");
|
||||
eprintln!("\n{message}\n");
|
||||
|
||||
let lines = "─".repeat(width).bright_magenta();
|
||||
|
||||
let tl = "╭".bright_magenta();
|
||||
let tr = "╮".bright_magenta();
|
||||
let bl = "╰".bright_magenta();
|
||||
let br = "╯".bright_magenta();
|
||||
|
||||
println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n");
|
||||
|
||||
Ok(())
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip(reqwest, writer), level = "trace")]
|
||||
pub async fn download_github_release<W: AsyncWrite + Unpin>(
|
||||
reqwest: &reqwest::Client,
|
||||
version: &Version,
|
||||
mut writer: W,
|
||||
) -> anyhow::Result<()> {
|
||||
let (owner, repo) = get_repo();
|
||||
const ENGINES_DIR: &str = "engines";
|
||||
|
||||
let release = reqwest
|
||||
.get(format!(
|
||||
"https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}",
|
||||
))
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send request to GitHub API")?
|
||||
.error_for_status()
|
||||
.context("failed to get GitHub API response")?
|
||||
.json::<Release>()
|
||||
.await
|
||||
.context("failed to parse GitHub API response")?;
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn get_installed_versions(engine: EngineKind) -> anyhow::Result<BTreeSet<Version>> {
|
||||
let source = engine.source();
|
||||
let path = home_dir()?.join(ENGINES_DIR).join(source.directory());
|
||||
let mut installed_versions = BTreeSet::new();
|
||||
|
||||
let asset = release
|
||||
.assets
|
||||
.into_iter()
|
||||
.find(|asset| {
|
||||
asset.name.ends_with(&format!(
|
||||
"-{}-{}.tar.gz",
|
||||
std::env::consts::OS,
|
||||
std::env::consts::ARCH
|
||||
))
|
||||
})
|
||||
.context("failed to find asset for current platform")?;
|
||||
let mut read_dir = match fs::read_dir(&path).await {
|
||||
Ok(read_dir) => read_dir,
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(installed_versions),
|
||||
Err(e) => return Err(e).context("failed to read engines directory"),
|
||||
};
|
||||
|
||||
let bytes = reqwest
|
||||
.get(asset.url)
|
||||
.header(ACCEPT, "application/octet-stream")
|
||||
.send()
|
||||
.await
|
||||
.context("failed to send request to download asset")?
|
||||
.error_for_status()
|
||||
.context("failed to download asset")?
|
||||
.bytes()
|
||||
.await
|
||||
.context("failed to download asset")?;
|
||||
while let Some(entry) = read_dir.next_entry().await? {
|
||||
let path = entry.path();
|
||||
|
||||
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
|
||||
let mut archive = tokio_tar::Archive::new(&mut decoder);
|
||||
let Some(version) = path.file_name().and_then(|s| s.to_str()) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let mut entry = archive
|
||||
.entries()
|
||||
.context("failed to read archive entries")?
|
||||
.next()
|
||||
.await
|
||||
.context("archive has no entry")?
|
||||
.context("failed to get first archive entry")?;
|
||||
if let Ok(version) = Version::parse(version) {
|
||||
installed_versions.insert(version);
|
||||
}
|
||||
}
|
||||
|
||||
tokio::io::copy(&mut entry, &mut writer)
|
||||
.await
|
||||
.context("failed to write archive entry to file")
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TagInfo {
|
||||
Complete(Version),
|
||||
Incomplete(Version),
|
||||
Ok(installed_versions)
|
||||
}
|
||||
|
||||
#[instrument(skip(reqwest), level = "trace")]
|
||||
pub async fn get_or_download_version(
|
||||
reqwest: &reqwest::Client,
|
||||
tag: &TagInfo,
|
||||
always_give_path: bool,
|
||||
) -> anyhow::Result<Option<PathBuf>> {
|
||||
let path = home_dir()?.join("versions");
|
||||
fs::create_dir_all(&path)
|
||||
.await
|
||||
.context("failed to create versions directory")?;
|
||||
pub async fn get_or_download_engine(
|
||||
reqwest: &reqwest::Client,
|
||||
engine: EngineKind,
|
||||
req: VersionReq,
|
||||
) -> anyhow::Result<PathBuf> {
|
||||
let source = engine.source();
|
||||
let path = home_dir()?.join(ENGINES_DIR).join(source.directory());
|
||||
|
||||
let version = match tag {
|
||||
TagInfo::Complete(version) => version,
|
||||
// don't fetch the version since it could be cached
|
||||
TagInfo::Incomplete(version) => version,
|
||||
};
|
||||
let installed_versions = get_installed_versions(engine).await?;
|
||||
|
||||
let path = path.join(format!(
|
||||
"{}{}",
|
||||
no_build_metadata(version),
|
||||
std::env::consts::EXE_SUFFIX
|
||||
));
|
||||
let max_matching = installed_versions
|
||||
.iter()
|
||||
.filter(|v| version_matches(&req, v))
|
||||
.next_back();
|
||||
if let Some(version) = max_matching {
|
||||
return Ok(path
|
||||
.join(version.to_string())
|
||||
.join(source.expected_file_name())
|
||||
.with_extension(std::env::consts::EXE_EXTENSION));
|
||||
}
|
||||
|
||||
let is_requested_version = !always_give_path && *version == current_version();
|
||||
run_with_reporter(|_, root_progress, reporter| async {
|
||||
let root_progress = root_progress;
|
||||
let reporter = reporter;
|
||||
|
||||
if path.exists() {
|
||||
tracing::debug!("version already exists");
|
||||
root_progress.set_message("resolve version");
|
||||
let mut versions = source
|
||||
.resolve(
|
||||
&req,
|
||||
&ResolveOptions {
|
||||
reqwest: reqwest.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to resolve versions")?;
|
||||
let (version, engine_ref) = versions.pop_last().context("no matching versions found")?;
|
||||
|
||||
return Ok(if is_requested_version {
|
||||
None
|
||||
} else {
|
||||
Some(path)
|
||||
});
|
||||
}
|
||||
root_progress.set_message("download");
|
||||
|
||||
if is_requested_version {
|
||||
tracing::debug!("copying current executable to version directory");
|
||||
fs::copy(current_exe()?, &path)
|
||||
.await
|
||||
.context("failed to copy current executable to version directory")?;
|
||||
} else {
|
||||
let version = match tag {
|
||||
TagInfo::Complete(version) => version.clone(),
|
||||
TagInfo::Incomplete(version) => {
|
||||
get_remote_version(reqwest, VersionType::Specific(version.clone()))
|
||||
.await
|
||||
.context("failed to get remote version")?
|
||||
}
|
||||
};
|
||||
let reporter = reporter.report_download(format!("{engine} v{version}"));
|
||||
|
||||
tracing::debug!("downloading version");
|
||||
download_github_release(
|
||||
reqwest,
|
||||
&version,
|
||||
fs::File::create(&path)
|
||||
.await
|
||||
.context("failed to create version file")?,
|
||||
)
|
||||
.await?;
|
||||
}
|
||||
let archive = source
|
||||
.download(
|
||||
&engine_ref,
|
||||
&DownloadOptions {
|
||||
reqwest: reqwest.clone(),
|
||||
reporter: Arc::new(reporter),
|
||||
version: version.clone(),
|
||||
},
|
||||
)
|
||||
.await
|
||||
.context("failed to download engine")?;
|
||||
|
||||
make_executable(&path)
|
||||
.await
|
||||
.context("failed to make downloaded version executable")?;
|
||||
let path = path.join(version.to_string());
|
||||
fs::create_dir_all(&path)
|
||||
.await
|
||||
.context("failed to create engine container folder")?;
|
||||
let path = path
|
||||
.join(source.expected_file_name())
|
||||
.with_extension(std::env::consts::EXE_EXTENSION);
|
||||
|
||||
Ok(if is_requested_version {
|
||||
None
|
||||
} else {
|
||||
Some(path)
|
||||
})
|
||||
let mut file = fs::File::create(&path)
|
||||
.await
|
||||
.context("failed to create new file")?;
|
||||
|
||||
tokio::io::copy(
|
||||
&mut archive
|
||||
.find_executable(source.expected_file_name())
|
||||
.await
|
||||
.context("failed to find executable")?,
|
||||
&mut file,
|
||||
)
|
||||
.await
|
||||
.context("failed to write to file")?;
|
||||
|
||||
make_executable(&path)
|
||||
.await
|
||||
.context("failed to make downloaded version executable")?;
|
||||
|
||||
if engine != EngineKind::Pesde {
|
||||
make_linker_if_needed(engine).await?;
|
||||
}
|
||||
|
||||
Ok::<_, anyhow::Error>(path)
|
||||
})
|
||||
.await
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
|
||||
let bin_exe_path = bin_dir().await?.join(format!(
|
||||
"{}{}",
|
||||
env!("CARGO_BIN_NAME"),
|
||||
std::env::consts::EXE_SUFFIX
|
||||
));
|
||||
let mut downloaded_file = downloaded_file.to_path_buf();
|
||||
pub async fn replace_pesde_bin_exe(with: &Path) -> anyhow::Result<()> {
|
||||
let bin_exe_path = bin_dir()
|
||||
.await?
|
||||
.join(EngineKind::Pesde.to_string())
|
||||
.with_extension(std::env::consts::EXE_EXTENSION);
|
||||
|
||||
let exists = bin_exe_path.exists();
|
||||
let exists = fs::metadata(&bin_exe_path).await.is_ok();
|
||||
|
||||
if cfg!(target_os = "linux") && exists {
|
||||
fs::remove_file(&bin_exe_path)
|
||||
.await
|
||||
.context("failed to remove existing executable")?;
|
||||
} else if exists {
|
||||
let tempfile = tempfile::Builder::new()
|
||||
.make(|_| Ok(()))
|
||||
.context("failed to create temporary file")?;
|
||||
let path = tempfile.into_temp_path().to_path_buf();
|
||||
#[cfg(windows)]
|
||||
let path = path.with_extension("exe");
|
||||
if cfg!(target_os = "linux") && exists {
|
||||
fs::remove_file(&bin_exe_path)
|
||||
.await
|
||||
.context("failed to remove existing executable")?;
|
||||
} else if exists {
|
||||
let tempfile = tempfile::Builder::new()
|
||||
.make(|_| Ok(()))
|
||||
.context("failed to create temporary file")?;
|
||||
let temp_path = tempfile.into_temp_path().to_path_buf();
|
||||
#[cfg(windows)]
|
||||
let temp_path = temp_path.with_extension("exe");
|
||||
|
||||
let current_exe = current_exe().context("failed to get current exe path")?;
|
||||
if current_exe == downloaded_file {
|
||||
downloaded_file = path.to_path_buf();
|
||||
}
|
||||
match fs::rename(&bin_exe_path, &temp_path).await {
|
||||
Ok(_) => {}
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
|
||||
Err(e) => return Err(e).context("failed to rename existing executable"),
|
||||
}
|
||||
}
|
||||
|
||||
fs::rename(&bin_exe_path, &path)
|
||||
.await
|
||||
.context("failed to rename current executable")?;
|
||||
}
|
||||
fs::copy(with, &bin_exe_path)
|
||||
.await
|
||||
.context("failed to copy executable to bin folder")?;
|
||||
|
||||
fs::copy(downloaded_file, &bin_exe_path)
|
||||
.await
|
||||
.context("failed to copy executable to bin folder")?;
|
||||
|
||||
make_executable(&bin_exe_path).await
|
||||
make_executable(&bin_exe_path).await
|
||||
}
|
||||
|
||||
#[instrument(level = "trace")]
|
||||
pub async fn make_linker_if_needed(engine: EngineKind) -> anyhow::Result<()> {
|
||||
let bin_dir = bin_dir().await?;
|
||||
let linker = bin_dir
|
||||
.join(engine.to_string())
|
||||
.with_extension(std::env::consts::EXE_EXTENSION);
|
||||
|
||||
if fs::metadata(&linker).await.is_err() {
|
||||
let exe = current_exe().context("failed to get current exe path")?;
|
||||
|
||||
#[cfg(windows)]
|
||||
let result = fs::symlink_file(exe, linker);
|
||||
#[cfg(not(windows))]
|
||||
let result = fs::symlink(exe, linker);
|
||||
|
||||
result.await.context("failed to create symlink")?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
|
339
src/download.rs
339
src/download.rs
|
@ -1,190 +1,203 @@
|
|||
use crate::{
|
||||
lockfile::{DependencyGraph, DownloadedDependencyGraphNode, DownloadedGraph},
|
||||
manifest::DependencyType,
|
||||
refresh_sources,
|
||||
source::{
|
||||
traits::{PackageRef, PackageSource},
|
||||
PackageSources,
|
||||
},
|
||||
Project, PACKAGES_CONTAINER_NAME,
|
||||
graph::{DependencyGraph, DependencyGraphNode},
|
||||
reporters::{DownloadProgressReporter as _, DownloadsReporter},
|
||||
source::{
|
||||
fs::PackageFs,
|
||||
ids::PackageId,
|
||||
traits::{DownloadOptions, PackageRef as _, PackageSource as _, RefreshOptions},
|
||||
},
|
||||
Project, RefreshedSources,
|
||||
};
|
||||
use fs_err::tokio as fs;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
use tracing::{instrument, Instrument};
|
||||
use async_stream::try_stream;
|
||||
use futures::Stream;
|
||||
use std::{num::NonZeroUsize, sync::Arc};
|
||||
use tokio::{sync::Semaphore, task::JoinSet};
|
||||
use tracing::{instrument, Instrument as _};
|
||||
|
||||
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
|
||||
/// Options for downloading.
|
||||
#[derive(Debug)]
|
||||
pub(crate) struct DownloadGraphOptions<Reporter> {
|
||||
/// The reqwest client.
|
||||
pub reqwest: reqwest::Client,
|
||||
/// The downloads reporter.
|
||||
pub reporter: Option<Arc<Reporter>>,
|
||||
/// The refreshed sources.
|
||||
pub refreshed_sources: RefreshedSources,
|
||||
/// The max number of concurrent network requests.
|
||||
pub network_concurrency: NonZeroUsize,
|
||||
}
|
||||
|
||||
pub(crate) type MultithreadDownloadJob = (
|
||||
tokio::sync::mpsc::Receiver<Result<String, errors::DownloadGraphError>>,
|
||||
MultithreadedGraph,
|
||||
);
|
||||
impl<Reporter> DownloadGraphOptions<Reporter>
|
||||
where
|
||||
Reporter: DownloadsReporter + Send + Sync + 'static,
|
||||
{
|
||||
/// Creates a new download options with the given reqwest client and reporter.
|
||||
pub(crate) fn new(reqwest: reqwest::Client) -> Self {
|
||||
Self {
|
||||
reqwest,
|
||||
reporter: None,
|
||||
refreshed_sources: Default::default(),
|
||||
network_concurrency: NonZeroUsize::new(16).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the downloads reporter.
|
||||
pub(crate) fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
|
||||
self.reporter.replace(reporter.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the refreshed sources.
|
||||
pub(crate) fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
|
||||
self.refreshed_sources = refreshed_sources;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the max number of concurrent network requests.
|
||||
pub(crate) fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
|
||||
self.network_concurrency = network_concurrency;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl<Reporter> Clone for DownloadGraphOptions<Reporter> {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
reqwest: self.reqwest.clone(),
|
||||
reporter: self.reporter.clone(),
|
||||
refreshed_sources: self.refreshed_sources.clone(),
|
||||
network_concurrency: self.network_concurrency,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Project {
|
||||
/// Downloads a graph of dependencies
|
||||
#[instrument(skip(self, graph, refreshed_sources, reqwest), level = "debug")]
|
||||
pub async fn download_graph(
|
||||
&self,
|
||||
graph: &DependencyGraph,
|
||||
refreshed_sources: &mut HashSet<PackageSources>,
|
||||
reqwest: &reqwest::Client,
|
||||
prod: bool,
|
||||
write: bool,
|
||||
wally: bool,
|
||||
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
|
||||
let manifest = self.deser_manifest().await?;
|
||||
let manifest_target_kind = manifest.target.kind();
|
||||
let downloaded_graph: MultithreadedGraph = Arc::new(Mutex::new(Default::default()));
|
||||
/// Downloads a graph of dependencies.
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
pub(crate) async fn download_graph<Reporter>(
|
||||
&self,
|
||||
graph: &DependencyGraph,
|
||||
options: DownloadGraphOptions<Reporter>,
|
||||
) -> Result<
|
||||
impl Stream<
|
||||
Item = Result<(PackageId, DependencyGraphNode, PackageFs), errors::DownloadGraphError>,
|
||||
>,
|
||||
errors::DownloadGraphError,
|
||||
>
|
||||
where
|
||||
Reporter: DownloadsReporter + Send + Sync + 'static,
|
||||
{
|
||||
let DownloadGraphOptions {
|
||||
reqwest,
|
||||
reporter,
|
||||
refreshed_sources,
|
||||
network_concurrency,
|
||||
} = options;
|
||||
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(
|
||||
graph
|
||||
.iter()
|
||||
.map(|(_, versions)| versions.len())
|
||||
.sum::<usize>()
|
||||
.max(1),
|
||||
);
|
||||
let semaphore = Arc::new(Semaphore::new(network_concurrency.get()));
|
||||
|
||||
refresh_sources(
|
||||
self,
|
||||
graph
|
||||
.iter()
|
||||
.flat_map(|(_, versions)| versions.iter())
|
||||
.map(|(_, node)| node.pkg_ref.source()),
|
||||
refreshed_sources,
|
||||
)
|
||||
.await?;
|
||||
let mut tasks = graph
|
||||
.iter()
|
||||
.map(|(package_id, node)| {
|
||||
let span = tracing::info_span!("download", package_id = package_id.to_string());
|
||||
|
||||
let project = Arc::new(self.clone());
|
||||
let project = self.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
let reporter = reporter.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
let semaphore = semaphore.clone();
|
||||
let package_id = Arc::new(package_id.clone());
|
||||
let node = node.clone();
|
||||
|
||||
for (name, versions) in graph {
|
||||
for (version_id, node) in versions {
|
||||
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
|
||||
if node.pkg_ref.like_wally() != wally {
|
||||
continue;
|
||||
}
|
||||
async move {
|
||||
let progress_reporter = reporter
|
||||
.clone()
|
||||
.map(|reporter| reporter.report_download(package_id.to_string()));
|
||||
|
||||
let tx = tx.clone();
|
||||
let _permit = semaphore.acquire().await;
|
||||
|
||||
let name = name.clone();
|
||||
let version_id = version_id.clone();
|
||||
let node = node.clone();
|
||||
if let Some(progress_reporter) = &progress_reporter {
|
||||
progress_reporter.report_start();
|
||||
}
|
||||
|
||||
let span = tracing::info_span!(
|
||||
"download",
|
||||
name = name.to_string(),
|
||||
version_id = version_id.to_string()
|
||||
);
|
||||
let source = node.pkg_ref.source();
|
||||
refreshed_sources
|
||||
.refresh(
|
||||
&source,
|
||||
&RefreshOptions {
|
||||
project: project.clone(),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
let project = project.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
let downloaded_graph = downloaded_graph.clone();
|
||||
tracing::debug!("downloading");
|
||||
|
||||
let package_dir = self.package_dir().to_path_buf();
|
||||
let fs = match progress_reporter {
|
||||
Some(progress_reporter) => {
|
||||
source
|
||||
.download(
|
||||
&node.pkg_ref,
|
||||
&DownloadOptions {
|
||||
project: project.clone(),
|
||||
reqwest,
|
||||
id: package_id.clone(),
|
||||
reporter: Arc::new(progress_reporter),
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
None => {
|
||||
source
|
||||
.download(
|
||||
&node.pkg_ref,
|
||||
&DownloadOptions {
|
||||
project: project.clone(),
|
||||
reqwest,
|
||||
id: package_id.clone(),
|
||||
reporter: Arc::new(()),
|
||||
},
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
.map_err(Box::new)?;
|
||||
|
||||
tokio::spawn(
|
||||
async move {
|
||||
let source = node.pkg_ref.source();
|
||||
tracing::debug!("downloaded");
|
||||
|
||||
let container_folder = node.container_folder(
|
||||
&package_dir
|
||||
.join(manifest_target_kind.packages_folder(version_id.target()))
|
||||
.join(PACKAGES_CONTAINER_NAME),
|
||||
&name,
|
||||
version_id.version(),
|
||||
);
|
||||
Ok((Arc::into_inner(package_id).unwrap(), node, fs))
|
||||
}
|
||||
.instrument(span)
|
||||
})
|
||||
.collect::<JoinSet<Result<_, errors::DownloadGraphError>>>();
|
||||
|
||||
match fs::create_dir_all(&container_folder).await {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
tx.send(Err(errors::DownloadGraphError::Io(e)))
|
||||
.await
|
||||
.unwrap();
|
||||
return;
|
||||
}
|
||||
}
|
||||
let stream = try_stream! {
|
||||
while let Some(res) = tasks.join_next().await {
|
||||
yield res.unwrap()?;
|
||||
}
|
||||
};
|
||||
|
||||
let project = project.clone();
|
||||
|
||||
tracing::debug!("downloading");
|
||||
|
||||
let (fs, target) =
|
||||
match source.download(&node.pkg_ref, &project, &reqwest).await {
|
||||
Ok(target) => target,
|
||||
Err(e) => {
|
||||
tx.send(Err(Box::new(e).into())).await.unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
|
||||
tracing::debug!("downloaded");
|
||||
|
||||
if write {
|
||||
if !prod || node.resolved_ty != DependencyType::Dev {
|
||||
match fs.write_to(container_folder, project.cas_dir(), true).await {
|
||||
Ok(_) => {}
|
||||
Err(e) => {
|
||||
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
|
||||
.await
|
||||
.unwrap();
|
||||
return;
|
||||
}
|
||||
};
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"skipping write to disk, dev dependency in prod mode"
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let display_name = format!("{name}@{version_id}");
|
||||
|
||||
{
|
||||
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||
downloaded_graph
|
||||
.entry(name)
|
||||
.or_default()
|
||||
.insert(version_id, DownloadedDependencyGraphNode { node, target });
|
||||
}
|
||||
|
||||
tx.send(Ok(display_name)).await.unwrap();
|
||||
}
|
||||
.instrument(span),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
Ok((rx, downloaded_graph))
|
||||
}
|
||||
Ok(stream)
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur when downloading a graph
|
||||
pub mod errors {
|
||||
use thiserror::Error;
|
||||
use thiserror::Error;
|
||||
|
||||
/// Errors that can occur when downloading a graph
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum DownloadGraphError {
|
||||
/// An error occurred deserializing the project manifest
|
||||
#[error("error deserializing project manifest")]
|
||||
ManifestDeserializationFailed(#[from] crate::errors::ManifestReadError),
|
||||
/// Errors that can occur when downloading a graph
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum DownloadGraphError {
|
||||
/// An error occurred refreshing a package source
|
||||
#[error("failed to refresh package source")]
|
||||
RefreshFailed(#[from] crate::source::errors::RefreshError),
|
||||
|
||||
/// An error occurred refreshing a package source
|
||||
#[error("failed to refresh package source")]
|
||||
RefreshFailed(#[from] Box<crate::source::errors::RefreshError>),
|
||||
/// Error interacting with the filesystem
|
||||
#[error("error interacting with the filesystem")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// Error interacting with the filesystem
|
||||
#[error("error interacting with the filesystem")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// Error downloading a package
|
||||
#[error("failed to download package")]
|
||||
DownloadFailed(#[from] Box<crate::source::errors::DownloadError>),
|
||||
|
||||
/// Error writing package contents
|
||||
#[error("failed to write package contents")]
|
||||
WriteFailed(#[source] std::io::Error),
|
||||
}
|
||||
/// Error downloading a package
|
||||
#[error("failed to download package")]
|
||||
DownloadFailed(#[from] Box<crate::source::errors::DownloadError>),
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,176 +1,476 @@
|
|||
use crate::{
|
||||
lockfile::{DependencyGraph, DownloadedGraph},
|
||||
manifest::DependencyType,
|
||||
source::PackageSources,
|
||||
Project,
|
||||
all_packages_dirs,
|
||||
download::DownloadGraphOptions,
|
||||
graph::{
|
||||
DependencyGraph, DependencyGraphNode, DependencyGraphNodeWithTarget,
|
||||
DependencyGraphWithTarget,
|
||||
},
|
||||
manifest::{target::TargetKind, DependencyType},
|
||||
reporters::{DownloadsReporter, PatchesReporter},
|
||||
source::{
|
||||
ids::PackageId,
|
||||
traits::{GetTargetOptions, PackageRef as _, PackageSource as _},
|
||||
},
|
||||
Project, RefreshedSources, SCRIPTS_LINK_FOLDER,
|
||||
};
|
||||
use futures::FutureExt;
|
||||
use fs_err::tokio as fs;
|
||||
use futures::TryStreamExt as _;
|
||||
use std::{
|
||||
collections::HashSet,
|
||||
future::Future,
|
||||
sync::{Arc, Mutex as StdMutex},
|
||||
collections::HashMap,
|
||||
convert::Infallible,
|
||||
future::{self, Future},
|
||||
num::NonZeroUsize,
|
||||
path::PathBuf,
|
||||
sync::Arc,
|
||||
};
|
||||
use tokio::sync::Mutex;
|
||||
use tracing::{instrument, Instrument};
|
||||
use tokio::{pin, task::JoinSet};
|
||||
use tracing::{instrument, Instrument as _};
|
||||
|
||||
/// Filters a graph to only include production dependencies, if `prod` is `true`
|
||||
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
|
||||
if !prod {
|
||||
return graph.clone();
|
||||
}
|
||||
/// Hooks to perform actions after certain events during download and linking.
|
||||
#[allow(unused_variables)]
|
||||
pub trait DownloadAndLinkHooks: Send + Sync {
|
||||
/// The error type for the hooks.
|
||||
type Error: std::error::Error + Send + Sync + 'static;
|
||||
|
||||
graph
|
||||
.iter()
|
||||
.map(|(name, versions)| {
|
||||
(
|
||||
name.clone(),
|
||||
versions
|
||||
.iter()
|
||||
.filter(|(_, node)| node.node.resolved_ty != DependencyType::Dev)
|
||||
.map(|(v_id, node)| (v_id.clone(), node.clone()))
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.collect()
|
||||
/// Called after scripts have been downloaded. The `downloaded_graph`
|
||||
/// contains all downloaded packages.
|
||||
fn on_scripts_downloaded(
|
||||
&self,
|
||||
graph: &DependencyGraphWithTarget,
|
||||
) -> impl Future<Output = Result<(), Self::Error>> + Send {
|
||||
future::ready(Ok(()))
|
||||
}
|
||||
|
||||
/// Called after binary dependencies have been downloaded. The
|
||||
/// `downloaded_graph` contains all downloaded packages.
|
||||
fn on_bins_downloaded(
|
||||
&self,
|
||||
graph: &DependencyGraphWithTarget,
|
||||
) -> impl Future<Output = Result<(), Self::Error>> + Send {
|
||||
future::ready(Ok(()))
|
||||
}
|
||||
|
||||
/// Called after all dependencies have been downloaded. The
|
||||
/// `downloaded_graph` contains all downloaded packages.
|
||||
fn on_all_downloaded(
|
||||
&self,
|
||||
graph: &DependencyGraphWithTarget,
|
||||
) -> impl Future<Output = Result<(), Self::Error>> + Send {
|
||||
future::ready(Ok(()))
|
||||
}
|
||||
}
|
||||
|
||||
/// Receiver for dependencies downloaded and linked
|
||||
pub type DownloadAndLinkReceiver =
|
||||
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
|
||||
impl DownloadAndLinkHooks for () {
|
||||
type Error = Infallible;
|
||||
}
|
||||
|
||||
/// Options for downloading and linking.
|
||||
#[derive(Debug)]
|
||||
pub struct DownloadAndLinkOptions<Reporter = (), Hooks = ()> {
|
||||
/// The reqwest client.
|
||||
pub reqwest: reqwest::Client,
|
||||
/// The downloads reporter.
|
||||
pub reporter: Option<Arc<Reporter>>,
|
||||
/// The download and link hooks.
|
||||
pub hooks: Option<Arc<Hooks>>,
|
||||
/// The refreshed sources.
|
||||
pub refreshed_sources: RefreshedSources,
|
||||
/// Whether to skip dev dependencies.
|
||||
pub prod: bool,
|
||||
/// The max number of concurrent network requests.
|
||||
pub network_concurrency: NonZeroUsize,
|
||||
/// Whether to re-install all dependencies even if they are already installed
|
||||
pub force: bool,
|
||||
}
|
||||
|
||||
impl<Reporter, Hooks> DownloadAndLinkOptions<Reporter, Hooks>
|
||||
where
|
||||
Reporter: DownloadsReporter + PatchesReporter + Send + Sync + 'static,
|
||||
Hooks: DownloadAndLinkHooks + Send + Sync + 'static,
|
||||
{
|
||||
/// Creates a new download options with the given reqwest client and reporter.
|
||||
#[must_use]
|
||||
pub fn new(reqwest: reqwest::Client) -> Self {
|
||||
Self {
|
||||
reqwest,
|
||||
reporter: None,
|
||||
hooks: None,
|
||||
refreshed_sources: Default::default(),
|
||||
prod: false,
|
||||
network_concurrency: NonZeroUsize::new(16).unwrap(),
|
||||
force: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Sets the downloads reporter.
|
||||
#[must_use]
|
||||
pub fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
|
||||
self.reporter.replace(reporter.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the download and link hooks.
|
||||
#[must_use]
|
||||
pub fn hooks(mut self, hooks: impl Into<Arc<Hooks>>) -> Self {
|
||||
self.hooks.replace(hooks.into());
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the refreshed sources.
|
||||
#[must_use]
|
||||
pub fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
|
||||
self.refreshed_sources = refreshed_sources;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets whether to skip dev dependencies.
|
||||
#[must_use]
|
||||
pub fn prod(mut self, prod: bool) -> Self {
|
||||
self.prod = prod;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets the max number of concurrent network requests.
|
||||
#[must_use]
|
||||
pub fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
|
||||
self.network_concurrency = network_concurrency;
|
||||
self
|
||||
}
|
||||
|
||||
/// Sets whether to re-install all dependencies even if they are already installed
|
||||
#[must_use]
|
||||
pub fn force(mut self, force: bool) -> Self {
|
||||
self.force = force;
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for DownloadAndLinkOptions {
|
||||
fn clone(&self) -> Self {
|
||||
Self {
|
||||
reqwest: self.reqwest.clone(),
|
||||
reporter: self.reporter.clone(),
|
||||
hooks: self.hooks.clone(),
|
||||
refreshed_sources: self.refreshed_sources.clone(),
|
||||
prod: self.prod,
|
||||
network_concurrency: self.network_concurrency,
|
||||
force: self.force,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Project {
|
||||
/// Downloads a graph of dependencies and links them in the correct order
|
||||
#[instrument(
|
||||
skip(self, graph, refreshed_sources, reqwest, pesde_cb),
|
||||
level = "debug"
|
||||
)]
|
||||
pub async fn download_and_link<
|
||||
F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static,
|
||||
R: Future<Output = Result<(), E>> + Send,
|
||||
E: Send + Sync + 'static,
|
||||
>(
|
||||
&self,
|
||||
graph: &Arc<DependencyGraph>,
|
||||
refreshed_sources: &Arc<Mutex<HashSet<PackageSources>>>,
|
||||
reqwest: &reqwest::Client,
|
||||
prod: bool,
|
||||
write: bool,
|
||||
pesde_cb: F,
|
||||
) -> Result<
|
||||
(
|
||||
DownloadAndLinkReceiver,
|
||||
impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>,
|
||||
),
|
||||
errors::DownloadAndLinkError<E>,
|
||||
> {
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(
|
||||
graph
|
||||
.iter()
|
||||
.map(|(_, versions)| versions.len())
|
||||
.sum::<usize>()
|
||||
.max(1),
|
||||
);
|
||||
let downloaded_graph = Arc::new(StdMutex::new(DownloadedGraph::default()));
|
||||
/// Downloads a graph of dependencies and links them in the correct order
|
||||
#[instrument(skip_all, fields(prod = options.prod), level = "debug")]
|
||||
pub async fn download_and_link<Reporter, Hooks>(
|
||||
&self,
|
||||
graph: &Arc<DependencyGraph>,
|
||||
options: DownloadAndLinkOptions<Reporter, Hooks>,
|
||||
) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>>
|
||||
where
|
||||
Reporter: DownloadsReporter + PatchesReporter + 'static,
|
||||
Hooks: DownloadAndLinkHooks + 'static,
|
||||
{
|
||||
let DownloadAndLinkOptions {
|
||||
reqwest,
|
||||
reporter,
|
||||
hooks,
|
||||
refreshed_sources,
|
||||
prod,
|
||||
network_concurrency,
|
||||
force,
|
||||
} = options;
|
||||
|
||||
let this = self.clone();
|
||||
let graph = graph.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
let refreshed_sources = refreshed_sources.clone();
|
||||
let graph = graph.clone();
|
||||
let reqwest = reqwest.clone();
|
||||
let manifest = self.deser_manifest().await?;
|
||||
|
||||
Ok((
|
||||
rx,
|
||||
tokio::spawn(async move {
|
||||
let mut refreshed_sources = refreshed_sources.lock().await;
|
||||
if force {
|
||||
async fn remove_dir(dir: PathBuf) -> std::io::Result<()> {
|
||||
tracing::debug!("force deleting the `{}` folder", dir.display());
|
||||
|
||||
// step 1. download pesde dependencies
|
||||
let (mut pesde_rx, pesde_graph) = this
|
||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, false)
|
||||
.instrument(tracing::debug_span!("download (pesde)"))
|
||||
.await?;
|
||||
match fs::remove_dir_all(dir).await {
|
||||
Ok(()) => Ok(()),
|
||||
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
|
||||
while let Some(result) = pesde_rx.recv().await {
|
||||
tx.send(result).await.unwrap();
|
||||
}
|
||||
let mut tasks = all_packages_dirs()
|
||||
.into_iter()
|
||||
.map(|folder| remove_dir(self.package_dir().join(&folder)))
|
||||
.chain(std::iter::once(remove_dir(
|
||||
self.package_dir().join(SCRIPTS_LINK_FOLDER),
|
||||
)))
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
let pesde_graph = Arc::into_inner(pesde_graph).unwrap().into_inner().unwrap();
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
task.unwrap()?;
|
||||
}
|
||||
}
|
||||
|
||||
// step 2. link pesde dependencies. do so without types
|
||||
if write {
|
||||
this.link_dependencies(&filter_graph(&pesde_graph, prod), false)
|
||||
.instrument(tracing::debug_span!("link (pesde)"))
|
||||
.await?;
|
||||
}
|
||||
// step 1. download dependencies
|
||||
let graph_to_download = {
|
||||
let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone())
|
||||
.refreshed_sources(refreshed_sources.clone())
|
||||
.network_concurrency(network_concurrency);
|
||||
|
||||
let pesde_graph = Arc::new(pesde_graph);
|
||||
if let Some(reporter) = reporter.clone() {
|
||||
download_graph_options = download_graph_options.reporter(reporter);
|
||||
}
|
||||
|
||||
pesde_cb(&pesde_graph)
|
||||
.await
|
||||
.map_err(errors::DownloadAndLinkError::PesdeCallback)?;
|
||||
let mut downloaded_graph = DependencyGraph::new();
|
||||
|
||||
let pesde_graph = Arc::into_inner(pesde_graph).unwrap();
|
||||
let graph_to_download = if force {
|
||||
graph.clone()
|
||||
} else {
|
||||
let mut tasks = graph
|
||||
.iter()
|
||||
.map(|(id, node)| {
|
||||
let id = id.clone();
|
||||
let node = node.clone();
|
||||
let container_folder =
|
||||
node.container_folder_from_project(&id, self, manifest.target.kind());
|
||||
|
||||
// step 3. download wally dependencies
|
||||
let (mut wally_rx, wally_graph) = this
|
||||
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, true)
|
||||
.instrument(tracing::debug_span!("download (wally)"))
|
||||
.await?;
|
||||
async move {
|
||||
return (id, node, fs::metadata(&container_folder).await.is_ok());
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(result) = wally_rx.recv().await {
|
||||
tx.send(result).await.unwrap();
|
||||
}
|
||||
let mut graph_to_download = DependencyGraph::new();
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
let (id, node, installed) = task.unwrap();
|
||||
if installed {
|
||||
downloaded_graph.insert(id, node);
|
||||
continue;
|
||||
}
|
||||
|
||||
let wally_graph = Arc::into_inner(wally_graph).unwrap().into_inner().unwrap();
|
||||
graph_to_download.insert(id, node);
|
||||
}
|
||||
|
||||
{
|
||||
let mut downloaded_graph = downloaded_graph.lock().unwrap();
|
||||
downloaded_graph.extend(pesde_graph);
|
||||
for (name, versions) in wally_graph {
|
||||
for (version_id, node) in versions {
|
||||
downloaded_graph
|
||||
.entry(name.clone())
|
||||
.or_default()
|
||||
.insert(version_id, node);
|
||||
}
|
||||
}
|
||||
}
|
||||
Arc::new(graph_to_download)
|
||||
};
|
||||
|
||||
let graph = Arc::into_inner(downloaded_graph)
|
||||
.unwrap()
|
||||
.into_inner()
|
||||
.unwrap();
|
||||
let downloaded = self
|
||||
.download_graph(&graph_to_download, download_graph_options.clone())
|
||||
.instrument(tracing::debug_span!("download"))
|
||||
.await?;
|
||||
pin!(downloaded);
|
||||
|
||||
// step 4. link ALL dependencies. do so with types
|
||||
if write {
|
||||
this.link_dependencies(&filter_graph(&graph, prod), true)
|
||||
.instrument(tracing::debug_span!("link (all)"))
|
||||
.await?;
|
||||
}
|
||||
let mut tasks = JoinSet::new();
|
||||
|
||||
Ok(graph)
|
||||
})
|
||||
.map(|r| r.unwrap()),
|
||||
))
|
||||
}
|
||||
while let Some((id, node, fs)) = downloaded.try_next().await? {
|
||||
let container_folder =
|
||||
node.container_folder_from_project(&id, self, manifest.target.kind());
|
||||
|
||||
downloaded_graph.insert(id, node);
|
||||
|
||||
let cas_dir = self.cas_dir().to_path_buf();
|
||||
tasks.spawn(async move {
|
||||
fs::create_dir_all(&container_folder).await?;
|
||||
fs.write_to(container_folder, cas_dir, true).await
|
||||
});
|
||||
}
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
task.unwrap()?;
|
||||
}
|
||||
|
||||
downloaded_graph
|
||||
};
|
||||
|
||||
let (wally_graph_to_download, other_graph_to_download) =
|
||||
graph_to_download
|
||||
.into_iter()
|
||||
.partition::<HashMap<_, _>, _>(|(_, node)| node.pkg_ref.is_wally_package());
|
||||
|
||||
let mut graph = Arc::new(DependencyGraphWithTarget::new());
|
||||
|
||||
async fn get_graph_targets<Hooks: DownloadAndLinkHooks>(
|
||||
graph: &mut Arc<DependencyGraphWithTarget>,
|
||||
project: &Project,
|
||||
manifest_target_kind: TargetKind,
|
||||
downloaded_graph: HashMap<PackageId, DependencyGraphNode>,
|
||||
) -> Result<(), errors::DownloadAndLinkError<Hooks::Error>> {
|
||||
let mut tasks = downloaded_graph
|
||||
.into_iter()
|
||||
.map(|(id, node)| {
|
||||
let source = node.pkg_ref.source();
|
||||
let path = Arc::from(
|
||||
node.container_folder_from_project(&id, project, manifest_target_kind)
|
||||
.as_path(),
|
||||
);
|
||||
let id = Arc::new(id);
|
||||
let project = project.clone();
|
||||
|
||||
async move {
|
||||
let target = source
|
||||
.get_target(
|
||||
&node.pkg_ref,
|
||||
&GetTargetOptions {
|
||||
project,
|
||||
path,
|
||||
id: id.clone(),
|
||||
},
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok::<_, errors::DownloadAndLinkError<Hooks::Error>>((
|
||||
Arc::into_inner(id).unwrap(),
|
||||
DependencyGraphNodeWithTarget { target, node },
|
||||
))
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
let (id, node) = task.unwrap()?;
|
||||
Arc::get_mut(graph).unwrap().insert(id, node);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// step 2. get targets for non Wally packages (Wally packages require the scripts packages to be downloaded first)
|
||||
get_graph_targets::<Hooks>(
|
||||
&mut graph,
|
||||
self,
|
||||
manifest.target.kind(),
|
||||
other_graph_to_download,
|
||||
)
|
||||
.instrument(tracing::debug_span!("get targets (non-wally)"))
|
||||
.await?;
|
||||
|
||||
self.link_dependencies(graph.clone(), false)
|
||||
.instrument(tracing::debug_span!("link (non-wally)"))
|
||||
.await?;
|
||||
|
||||
if let Some(hooks) = &hooks {
|
||||
hooks
|
||||
.on_scripts_downloaded(&graph)
|
||||
.await
|
||||
.map_err(errors::DownloadAndLinkError::Hook)?;
|
||||
|
||||
hooks
|
||||
.on_bins_downloaded(&graph)
|
||||
.await
|
||||
.map_err(errors::DownloadAndLinkError::Hook)?;
|
||||
}
|
||||
|
||||
// step 3. get targets for Wally packages
|
||||
get_graph_targets::<Hooks>(
|
||||
&mut graph,
|
||||
self,
|
||||
manifest.target.kind(),
|
||||
wally_graph_to_download,
|
||||
)
|
||||
.instrument(tracing::debug_span!("get targets (wally)"))
|
||||
.await?;
|
||||
|
||||
#[cfg(feature = "patches")]
|
||||
{
|
||||
use crate::patches::apply_patch;
|
||||
let mut tasks = manifest
|
||||
.patches
|
||||
.iter()
|
||||
.flat_map(|(name, versions)| {
|
||||
versions
|
||||
.iter()
|
||||
.map(|(v_id, path)| (PackageId::new(name.clone(), v_id.clone()), path))
|
||||
})
|
||||
.filter_map(|(id, patch_path)| graph.get(&id).map(|node| (id, node, patch_path)))
|
||||
.map(|(id, node, patch_path)| {
|
||||
let patch_path = patch_path.to_path(self.package_dir());
|
||||
let container_folder =
|
||||
node.node
|
||||
.container_folder_from_project(&id, self, manifest.target.kind());
|
||||
let reporter = reporter.clone();
|
||||
|
||||
async move {
|
||||
match reporter {
|
||||
Some(reporter) => {
|
||||
apply_patch(&id, container_folder, &patch_path, reporter.clone())
|
||||
.await
|
||||
}
|
||||
None => {
|
||||
apply_patch(&id, container_folder, &patch_path, Arc::new(())).await
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
.collect::<JoinSet<_>>();
|
||||
|
||||
while let Some(task) = tasks.join_next().await {
|
||||
task.unwrap()?;
|
||||
}
|
||||
}
|
||||
|
||||
// step 4. link ALL dependencies. do so with types
|
||||
self.link_dependencies(graph.clone(), true)
|
||||
.instrument(tracing::debug_span!("link (all)"))
|
||||
.await?;
|
||||
|
||||
if let Some(hooks) = &hooks {
|
||||
hooks
|
||||
.on_all_downloaded(&graph)
|
||||
.await
|
||||
.map_err(errors::DownloadAndLinkError::Hook)?;
|
||||
}
|
||||
|
||||
let mut graph = Arc::into_inner(graph).unwrap();
|
||||
|
||||
if prod {
|
||||
graph.retain(|_, node| node.node.resolved_ty != DependencyType::Dev);
|
||||
}
|
||||
|
||||
if prod || !force {
|
||||
self.remove_unused(&graph).await?;
|
||||
}
|
||||
|
||||
Ok(graph)
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur when downloading and linking dependencies
|
||||
pub mod errors {
|
||||
use thiserror::Error;
|
||||
use thiserror::Error;
|
||||
|
||||
/// An error that can occur when downloading and linking dependencies
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum DownloadAndLinkError<E> {
|
||||
/// An error occurred while downloading the graph
|
||||
#[error("error downloading graph")]
|
||||
DownloadGraph(#[from] crate::download::errors::DownloadGraphError),
|
||||
/// An error that can occur when downloading and linking dependencies
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum DownloadAndLinkError<E> {
|
||||
/// Reading the manifest failed
|
||||
#[error("error reading manifest")]
|
||||
ManifestRead(#[from] crate::errors::ManifestReadError),
|
||||
|
||||
/// An error occurred while linking dependencies
|
||||
#[error("error linking dependencies")]
|
||||
Linking(#[from] crate::linking::errors::LinkingError),
|
||||
/// An error occurred while downloading the graph
|
||||
#[error("error downloading graph")]
|
||||
DownloadGraph(#[from] crate::download::errors::DownloadGraphError),
|
||||
|
||||
/// An error occurred while executing the pesde callback
|
||||
#[error("error executing pesde callback")]
|
||||
PesdeCallback(#[source] E),
|
||||
}
|
||||
/// An error occurred while linking dependencies
|
||||
#[error("error linking dependencies")]
|
||||
Linking(#[from] crate::linking::errors::LinkingError),
|
||||
|
||||
/// An error occurred while executing the pesde callback
|
||||
#[error("error executing hook")]
|
||||
Hook(#[source] E),
|
||||
|
||||
/// IO error
|
||||
#[error("io error")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// Error getting a target
|
||||
#[error("error getting target")]
|
||||
GetTarget(#[from] crate::source::errors::GetTargetError),
|
||||
|
||||
/// Removing unused dependencies failed
|
||||
#[error("error removing unused dependencies")]
|
||||
RemoveUnused(#[from] crate::linking::incremental::errors::RemoveUnusedError),
|
||||
|
||||
/// Patching a package failed
|
||||
#[cfg(feature = "patches")]
|
||||
#[error("error applying patch")]
|
||||
Patch(#[from] crate::patches::errors::ApplyPatchError),
|
||||
}
|
||||
}
|
||||
|
|
62
src/engine/mod.rs
Normal file
62
src/engine/mod.rs
Normal file
|
@ -0,0 +1,62 @@
|
|||
/// Sources of engines
|
||||
pub mod source;
|
||||
|
||||
use crate::{engine::source::EngineSources, ser_display_deser_fromstr};
|
||||
use std::{fmt::Display, str::FromStr};
|
||||
|
||||
/// All supported engines
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
|
||||
#[cfg_attr(test, derive(schemars::JsonSchema))]
|
||||
#[cfg_attr(test, schemars(rename_all = "snake_case"))]
|
||||
pub enum EngineKind {
|
||||
/// The pesde package manager
|
||||
Pesde,
|
||||
/// The Lune runtime
|
||||
Lune,
|
||||
}
|
||||
ser_display_deser_fromstr!(EngineKind);
|
||||
|
||||
impl Display for EngineKind {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
EngineKind::Pesde => write!(f, "pesde"),
|
||||
EngineKind::Lune => write!(f, "lune"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for EngineKind {
|
||||
type Err = errors::EngineKindFromStrError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
match s.to_lowercase().as_str() {
|
||||
"pesde" => Ok(EngineKind::Pesde),
|
||||
"lune" => Ok(EngineKind::Lune),
|
||||
_ => Err(errors::EngineKindFromStrError::Unknown(s.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EngineKind {
|
||||
/// Returns the source to get this engine from
|
||||
#[must_use]
|
||||
pub fn source(self) -> EngineSources {
|
||||
match self {
|
||||
EngineKind::Pesde => EngineSources::pesde(),
|
||||
EngineKind::Lune => EngineSources::lune(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors related to engine kinds
|
||||
pub mod errors {
|
||||
use thiserror::Error;
|
||||
|
||||
/// Errors which can occur while using the FromStr implementation of EngineKind
|
||||
#[derive(Debug, Error)]
|
||||
pub enum EngineKindFromStrError {
|
||||
/// The string isn't a recognized EngineKind
|
||||
#[error("unknown engine kind {0}")]
|
||||
Unknown(String),
|
||||
}
|
||||
}
|
320
src/engine/source/archive.rs
Normal file
320
src/engine/source/archive.rs
Normal file
|
@ -0,0 +1,320 @@
|
|||
use futures::StreamExt as _;
|
||||
use std::{
|
||||
collections::BTreeSet,
|
||||
mem::ManuallyDrop,
|
||||
path::{Path, PathBuf},
|
||||
pin::Pin,
|
||||
str::FromStr,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
use tokio::{
|
||||
io::{AsyncBufRead, AsyncRead, AsyncReadExt as _, ReadBuf},
|
||||
pin,
|
||||
};
|
||||
use tokio_util::compat::{Compat, FuturesAsyncReadCompatExt as _};
|
||||
|
||||
/// The kind of encoding used for the archive
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum EncodingKind {
|
||||
/// Gzip
|
||||
Gzip,
|
||||
}
|
||||
|
||||
/// The kind of archive
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ArchiveKind {
|
||||
/// Tar
|
||||
Tar,
|
||||
/// Zip
|
||||
Zip,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub(crate) struct ArchiveInfo(ArchiveKind, Option<EncodingKind>);
|
||||
|
||||
impl FromStr for ArchiveInfo {
|
||||
type Err = errors::ArchiveInfoFromStrError;
|
||||
|
||||
fn from_str(s: &str) -> Result<Self, Self::Err> {
|
||||
let parts = s.split('.').collect::<Vec<_>>();
|
||||
|
||||
Ok(match &*parts {
|
||||
[.., "tar", "gz"] => ArchiveInfo(ArchiveKind::Tar, Some(EncodingKind::Gzip)),
|
||||
[.., "tar"] => ArchiveInfo(ArchiveKind::Tar, None),
|
||||
[.., "zip", "gz"] => {
|
||||
return Err(errors::ArchiveInfoFromStrError::Unsupported(
|
||||
ArchiveKind::Zip,
|
||||
Some(EncodingKind::Gzip),
|
||||
))
|
||||
}
|
||||
[.., "zip"] => ArchiveInfo(ArchiveKind::Zip, None),
|
||||
_ => return Err(errors::ArchiveInfoFromStrError::Invalid(s.to_string())),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) type ArchiveReader = Pin<Box<dyn AsyncBufRead + Send>>;
|
||||
|
||||
/// An archive
|
||||
pub struct Archive {
|
||||
pub(crate) info: ArchiveInfo,
|
||||
pub(crate) reader: ArchiveReader,
|
||||
}
|
||||
|
||||
enum TarReader {
|
||||
Gzip(async_compression::tokio::bufread::GzipDecoder<ArchiveReader>),
|
||||
Plain(ArchiveReader),
|
||||
}
|
||||
|
||||
// TODO: try to see if we can avoid the unsafe blocks
|
||||
|
||||
impl AsyncRead for TarReader {
|
||||
fn poll_read(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut ReadBuf<'_>,
|
||||
) -> Poll<std::io::Result<()>> {
|
||||
unsafe {
|
||||
match self.get_unchecked_mut() {
|
||||
Self::Gzip(r) => Pin::new_unchecked(r).poll_read(cx, buf),
|
||||
Self::Plain(r) => Pin::new_unchecked(r).poll_read(cx, buf),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
enum ArchiveEntryInner {
|
||||
Tar(Box<tokio_tar::Entry<tokio_tar::Archive<TarReader>>>),
|
||||
Zip {
|
||||
archive: *mut async_zip::tokio::read::seek::ZipFileReader<std::io::Cursor<Vec<u8>>>,
|
||||
reader: ManuallyDrop<
|
||||
Compat<
|
||||
async_zip::tokio::read::ZipEntryReader<
|
||||
'static,
|
||||
std::io::Cursor<Vec<u8>>,
|
||||
async_zip::base::read::WithoutEntry,
|
||||
>,
|
||||
>,
|
||||
>,
|
||||
},
|
||||
}
|
||||
|
||||
impl Drop for ArchiveEntryInner {
|
||||
fn drop(&mut self) {
|
||||
match self {
|
||||
Self::Tar(_) => {}
|
||||
Self::Zip { archive, reader } => unsafe {
|
||||
ManuallyDrop::drop(reader);
|
||||
drop(Box::from_raw(*archive));
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An entry in an archive. Usually the executable
|
||||
pub struct ArchiveEntry(ArchiveEntryInner);
|
||||
|
||||
impl AsyncRead for ArchiveEntry {
|
||||
fn poll_read(
|
||||
self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut ReadBuf<'_>,
|
||||
) -> Poll<std::io::Result<()>> {
|
||||
unsafe {
|
||||
match &mut self.get_unchecked_mut().0 {
|
||||
ArchiveEntryInner::Tar(r) => Pin::new_unchecked(r).poll_read(cx, buf),
|
||||
ArchiveEntryInner::Zip { reader, .. } => {
|
||||
Pin::new_unchecked(&mut **reader).poll_read(cx, buf)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Archive {
|
||||
/// Finds the executable in the archive and returns it as an [`ArchiveEntry`]
|
||||
pub async fn find_executable(
|
||||
self,
|
||||
expected_file_name: &str,
|
||||
) -> Result<ArchiveEntry, errors::FindExecutableError> {
|
||||
#[derive(Debug, PartialEq, Eq)]
|
||||
struct Candidate {
|
||||
path: PathBuf,
|
||||
file_name_matches: bool,
|
||||
extension_matches: bool,
|
||||
has_permissions: bool,
|
||||
}
|
||||
|
||||
impl Candidate {
|
||||
fn new(path: PathBuf, perms: u32, expected_file_name: &str) -> Self {
|
||||
Self {
|
||||
file_name_matches: path
|
||||
.file_name()
|
||||
.is_some_and(|name| name == expected_file_name),
|
||||
extension_matches: match path.extension() {
|
||||
Some(ext) if ext == std::env::consts::EXE_EXTENSION => true,
|
||||
None if std::env::consts::EXE_EXTENSION.is_empty() => true,
|
||||
_ => false,
|
||||
},
|
||||
path,
|
||||
has_permissions: perms & 0o111 != 0,
|
||||
}
|
||||
}
|
||||
|
||||
fn should_be_considered(&self) -> bool {
|
||||
// if nothing matches, we should not consider this candidate as it is most likely not
|
||||
self.file_name_matches || self.extension_matches || self.has_permissions
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Candidate {
|
||||
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
|
||||
self.file_name_matches
|
||||
.cmp(&other.file_name_matches)
|
||||
.then(self.extension_matches.cmp(&other.extension_matches))
|
||||
.then(self.has_permissions.cmp(&other.has_permissions))
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for Candidate {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
let mut candidates = BTreeSet::new();
|
||||
|
||||
match self.info {
|
||||
ArchiveInfo(ArchiveKind::Tar, encoding) => {
|
||||
use async_compression::tokio::bufread as decoders;
|
||||
|
||||
let reader = match encoding {
|
||||
Some(EncodingKind::Gzip) => {
|
||||
TarReader::Gzip(decoders::GzipDecoder::new(self.reader))
|
||||
}
|
||||
None => TarReader::Plain(self.reader),
|
||||
};
|
||||
|
||||
let mut archive = tokio_tar::Archive::new(reader);
|
||||
let mut entries = archive.entries()?;
|
||||
|
||||
while let Some(entry) = entries.next().await.transpose()? {
|
||||
if entry.header().entry_type().is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let candidate = Candidate::new(
|
||||
entry.path()?.to_path_buf(),
|
||||
entry.header().mode()?,
|
||||
expected_file_name,
|
||||
);
|
||||
if candidate.should_be_considered() {
|
||||
candidates.insert(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
let Some(candidate) = candidates.pop_last() else {
|
||||
return Err(errors::FindExecutableError::ExecutableNotFound);
|
||||
};
|
||||
|
||||
let mut entries = archive.entries()?;
|
||||
|
||||
while let Some(entry) = entries.next().await.transpose()? {
|
||||
if entry.header().entry_type().is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path = entry.path()?;
|
||||
if path == candidate.path {
|
||||
return Ok(ArchiveEntry(ArchiveEntryInner::Tar(Box::new(entry))));
|
||||
}
|
||||
}
|
||||
}
|
||||
ArchiveInfo(ArchiveKind::Zip, _) => {
|
||||
let reader = self.reader;
|
||||
pin!(reader);
|
||||
|
||||
// TODO: would be lovely to not have to read the whole archive into memory
|
||||
let mut buf = vec![];
|
||||
reader.read_to_end(&mut buf).await?;
|
||||
|
||||
let archive = async_zip::base::read::seek::ZipFileReader::with_tokio(
|
||||
std::io::Cursor::new(buf),
|
||||
)
|
||||
.await?;
|
||||
for entry in archive.file().entries() {
|
||||
if entry.dir()? {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path: &Path = entry.filename().as_str()?.as_ref();
|
||||
let candidate = Candidate::new(
|
||||
path.to_path_buf(),
|
||||
entry.unix_permissions().unwrap_or(0) as u32,
|
||||
expected_file_name,
|
||||
);
|
||||
if candidate.should_be_considered() {
|
||||
candidates.insert(candidate);
|
||||
}
|
||||
}
|
||||
|
||||
let Some(candidate) = candidates.pop_last() else {
|
||||
return Err(errors::FindExecutableError::ExecutableNotFound);
|
||||
};
|
||||
|
||||
for (i, entry) in archive.file().entries().iter().enumerate() {
|
||||
if entry.dir()? {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path: &Path = entry.filename().as_str()?.as_ref();
|
||||
if candidate.path == path {
|
||||
let ptr = Box::into_raw(Box::new(archive));
|
||||
let reader = (unsafe { &mut *ptr }).reader_without_entry(i).await?;
|
||||
return Ok(ArchiveEntry(ArchiveEntryInner::Zip {
|
||||
archive: ptr,
|
||||
reader: ManuallyDrop::new(reader.compat()),
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(errors::FindExecutableError::ExecutableNotFound)
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur when working with archives
|
||||
pub mod errors {
|
||||
use thiserror::Error;
|
||||
|
||||
/// Errors that can occur when parsing archive info
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum ArchiveInfoFromStrError {
|
||||
/// The string is not a valid archive descriptor. E.g. `{name}.tar.gz`
|
||||
#[error("string `{0}` is not a valid archive descriptor")]
|
||||
Invalid(String),
|
||||
|
||||
/// The archive type is not supported. E.g. `{name}.zip.gz`
|
||||
#[error("archive type {0:?} with encoding {1:?} is not supported")]
|
||||
Unsupported(super::ArchiveKind, Option<super::EncodingKind>),
|
||||
}
|
||||
|
||||
/// Errors that can occur when finding an executable in an archive
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum FindExecutableError {
|
||||
/// The executable was not found in the archive
|
||||
#[error("failed to find executable in archive")]
|
||||
ExecutableNotFound,
|
||||
|
||||
/// An IO error occurred
|
||||
#[error("IO error")]
|
||||
Io(#[from] std::io::Error),
|
||||
|
||||
/// An error occurred reading the zip archive
|
||||
#[error("failed to read zip archive")]
|
||||
Zip(#[from] async_zip::error::ZipError),
|
||||
}
|
||||
}
|
19
src/engine/source/github/engine_ref.rs
Normal file
19
src/engine/source/github/engine_ref.rs
Normal file
|
@ -0,0 +1,19 @@
|
|||
use serde::Deserialize;
|
||||
|
||||
/// A GitHub release
|
||||
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
|
||||
pub struct Release {
|
||||
/// The tag name of the release
|
||||
pub tag_name: String,
|
||||
/// The assets of the release
|
||||
pub assets: Vec<Asset>,
|
||||
}
|
||||
|
||||
/// An asset of a GitHub release
|
||||
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
|
||||
pub struct Asset {
|
||||
/// The name of the asset
|
||||
pub name: String,
|
||||
/// The download URL of the asset
|
||||
pub url: url::Url,
|
||||
}
|
146
src/engine/source/github/mod.rs
Normal file
146
src/engine/source/github/mod.rs
Normal file
|
@ -0,0 +1,146 @@
|
|||
/// The GitHub engine reference
|
||||
pub mod engine_ref;
|
||||
|
||||
use crate::{
|
||||
engine::source::{
|
||||
archive::Archive,
|
||||
github::engine_ref::Release,
|
||||
traits::{DownloadOptions, EngineSource, ResolveOptions},
|
||||
},
|
||||
reporters::{response_to_async_read, DownloadProgressReporter},
|
||||
util::no_build_metadata,
|
||||
version_matches,
|
||||
};
|
||||
use reqwest::header::ACCEPT;
|
||||
use semver::{Version, VersionReq};
|
||||
use std::{collections::BTreeMap, path::PathBuf};
|
||||
|
||||
/// The GitHub engine source
|
||||
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
|
||||
pub struct GitHubEngineSource {
|
||||
/// The owner of the repository to download from
|
||||
pub owner: String,
|
||||
/// The repository of which to download releases from
|
||||
pub repo: String,
|
||||
/// The template for the asset name. `{VERSION}` will be replaced with the version
|
||||
pub asset_template: String,
|
||||
}
|
||||
|
||||
impl EngineSource for GitHubEngineSource {
|
||||
type Ref = Release;
|
||||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
|
||||
fn directory(&self) -> PathBuf {
|
||||
PathBuf::from("github").join(&self.owner).join(&self.repo)
|
||||
}
|
||||
|
||||
fn expected_file_name(&self) -> &str {
|
||||
&self.repo
|
||||
}
|
||||
|
||||
async fn resolve(
|
||||
&self,
|
||||
requirement: &VersionReq,
|
||||
options: &ResolveOptions,
|
||||
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
|
||||
let ResolveOptions { reqwest, .. } = options;
|
||||
|
||||
Ok(reqwest
|
||||
.get(format!(
|
||||
"https://api.github.com/repos/{}/{}/releases",
|
||||
urlencoding::encode(&self.owner),
|
||||
urlencoding::encode(&self.repo),
|
||||
))
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?
|
||||
.json::<Vec<Release>>()
|
||||
.await?
|
||||
.into_iter()
|
||||
.filter_map(
|
||||
|release| match release.tag_name.trim_start_matches('v').parse() {
|
||||
Ok(version) if version_matches(requirement, &version) => {
|
||||
Some((version, release))
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
)
|
||||
.collect())
|
||||
}
|
||||
|
||||
async fn download<R: DownloadProgressReporter + 'static>(
|
||||
&self,
|
||||
engine_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> Result<Archive, Self::DownloadError> {
|
||||
let DownloadOptions {
|
||||
reqwest,
|
||||
reporter,
|
||||
version,
|
||||
..
|
||||
} = options;
|
||||
|
||||
let desired_asset_names = [
|
||||
self.asset_template
|
||||
.replace("{VERSION}", &version.to_string()),
|
||||
self.asset_template
|
||||
.replace("{VERSION}", &no_build_metadata(version).to_string()),
|
||||
];
|
||||
|
||||
let asset = engine_ref
|
||||
.assets
|
||||
.iter()
|
||||
.find(|asset| {
|
||||
desired_asset_names
|
||||
.iter()
|
||||
.any(|name| asset.name.eq_ignore_ascii_case(name))
|
||||
})
|
||||
.ok_or(errors::DownloadError::AssetNotFound)?;
|
||||
|
||||
reporter.report_start();
|
||||
|
||||
let response = reqwest
|
||||
.get(asset.url.clone())
|
||||
.header(ACCEPT, "application/octet-stream")
|
||||
.send()
|
||||
.await?
|
||||
.error_for_status()?;
|
||||
|
||||
Ok(Archive {
|
||||
info: asset.name.parse()?,
|
||||
reader: Box::pin(response_to_async_read(response, reporter.clone())),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur when working with the GitHub engine source
|
||||
pub mod errors {
|
||||
use thiserror::Error;
|
||||
|
||||
/// Errors that can occur when resolving a GitHub engine
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum ResolveError {
|
||||
/// Handling the request failed
|
||||
#[error("failed to handle GitHub API request")]
|
||||
Request(#[from] reqwest::Error),
|
||||
}
|
||||
|
||||
/// Errors that can occur when downloading a GitHub engine
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum DownloadError {
|
||||
/// An asset for the current platform could not be found
|
||||
#[error("failed to find asset for current platform")]
|
||||
AssetNotFound,
|
||||
|
||||
/// Handling the request failed
|
||||
#[error("failed to handle GitHub API request")]
|
||||
Request(#[from] reqwest::Error),
|
||||
|
||||
/// The asset's name could not be parsed
|
||||
#[error("failed to parse asset name")]
|
||||
ParseAssetName(#[from] crate::engine::source::archive::errors::ArchiveInfoFromStrError),
|
||||
}
|
||||
}
|
145
src/engine/source/mod.rs
Normal file
145
src/engine/source/mod.rs
Normal file
|
@ -0,0 +1,145 @@
|
|||
use crate::{
|
||||
engine::source::{
|
||||
archive::Archive,
|
||||
traits::{DownloadOptions, EngineSource, ResolveOptions},
|
||||
},
|
||||
reporters::DownloadProgressReporter,
|
||||
};
|
||||
use semver::{Version, VersionReq};
|
||||
use std::{collections::BTreeMap, path::PathBuf};
|
||||
|
||||
/// Archives
|
||||
pub mod archive;
|
||||
/// The GitHub engine source
|
||||
pub mod github;
|
||||
/// Traits for engine sources
|
||||
pub mod traits;
|
||||
|
||||
/// Engine references
|
||||
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
|
||||
pub enum EngineRefs {
|
||||
/// A GitHub engine reference
|
||||
GitHub(github::engine_ref::Release),
|
||||
}
|
||||
|
||||
/// Engine sources
|
||||
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
|
||||
pub enum EngineSources {
|
||||
/// A GitHub engine source
|
||||
GitHub(github::GitHubEngineSource),
|
||||
}
|
||||
|
||||
impl EngineSource for EngineSources {
|
||||
type Ref = EngineRefs;
|
||||
type ResolveError = errors::ResolveError;
|
||||
type DownloadError = errors::DownloadError;
|
||||
|
||||
fn directory(&self) -> PathBuf {
|
||||
match self {
|
||||
EngineSources::GitHub(source) => source.directory(),
|
||||
}
|
||||
}
|
||||
|
||||
fn expected_file_name(&self) -> &str {
|
||||
match self {
|
||||
EngineSources::GitHub(source) => source.expected_file_name(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn resolve(
|
||||
&self,
|
||||
requirement: &VersionReq,
|
||||
options: &ResolveOptions,
|
||||
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
|
||||
match self {
|
||||
EngineSources::GitHub(source) => source
|
||||
.resolve(requirement, options)
|
||||
.await
|
||||
.map(|map| {
|
||||
map.into_iter()
|
||||
.map(|(version, release)| (version, EngineRefs::GitHub(release)))
|
||||
.collect()
|
||||
})
|
||||
.map_err(Into::into),
|
||||
}
|
||||
}
|
||||
|
||||
async fn download<R: DownloadProgressReporter + 'static>(
|
||||
&self,
|
||||
engine_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> Result<Archive, Self::DownloadError> {
|
||||
match (self, engine_ref) {
|
||||
(EngineSources::GitHub(source), EngineRefs::GitHub(release)) => {
|
||||
source.download(release, options).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
// for the future
|
||||
#[allow(unreachable_patterns)]
|
||||
_ => Err(errors::DownloadError::Mismatch),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl EngineSources {
|
||||
/// Returns the source for the pesde engine
|
||||
#[must_use]
|
||||
pub fn pesde() -> Self {
|
||||
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
|
||||
let (owner, repo) = (
|
||||
parts.next().unwrap().to_string(),
|
||||
parts.next().unwrap().to_string(),
|
||||
);
|
||||
|
||||
EngineSources::GitHub(github::GitHubEngineSource {
|
||||
owner,
|
||||
repo,
|
||||
asset_template: format!(
|
||||
"pesde-{{VERSION}}-{}-{}.zip",
|
||||
std::env::consts::OS,
|
||||
std::env::consts::ARCH
|
||||
),
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the source for the lune engine
|
||||
#[must_use]
|
||||
pub fn lune() -> Self {
|
||||
EngineSources::GitHub(github::GitHubEngineSource {
|
||||
owner: "lune-org".into(),
|
||||
repo: "lune".into(),
|
||||
asset_template: format!(
|
||||
"lune-{{VERSION}}-{}-{}.zip",
|
||||
std::env::consts::OS,
|
||||
std::env::consts::ARCH
|
||||
),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/// Errors that can occur when working with engine sources
|
||||
pub mod errors {
|
||||
use thiserror::Error;
|
||||
|
||||
/// Errors that can occur when resolving an engine
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum ResolveError {
|
||||
/// Failed to resolve the GitHub engine
|
||||
#[error("failed to resolve github engine")]
|
||||
GitHub(#[from] super::github::errors::ResolveError),
|
||||
}
|
||||
|
||||
/// Errors that can occur when downloading an engine
|
||||
#[derive(Debug, Error)]
|
||||
#[non_exhaustive]
|
||||
pub enum DownloadError {
|
||||
/// Failed to download the GitHub engine
|
||||
#[error("failed to download github engine")]
|
||||
GitHub(#[from] super::github::errors::DownloadError),
|
||||
|
||||
/// Mismatched engine reference
|
||||
#[error("mismatched engine reference")]
|
||||
Mismatch,
|
||||
}
|
||||
}
|
51
src/engine/source/traits.rs
Normal file
51
src/engine/source/traits.rs
Normal file
|
@ -0,0 +1,51 @@
|
|||
use crate::{engine::source::archive::Archive, reporters::DownloadProgressReporter};
|
||||
use semver::{Version, VersionReq};
|
||||
use std::{collections::BTreeMap, fmt::Debug, future::Future, path::PathBuf, sync::Arc};
|
||||
|
||||
/// Options for resolving an engine
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ResolveOptions {
|
||||
/// The reqwest client to use
|
||||
pub reqwest: reqwest::Client,
|
||||
}
|
||||
|
||||
/// Options for downloading an engine
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DownloadOptions<R: DownloadProgressReporter> {
|
||||
/// The reqwest client to use
|
||||
pub reqwest: reqwest::Client,
|
||||
/// The reporter to use
|
||||
pub reporter: Arc<R>,
|
||||
/// The version of the engine to be downloaded
|
||||
pub version: Version,
|
||||
}
|
||||
|
||||
/// A source of engines
|
||||
pub trait EngineSource: Debug {
|
||||
/// The reference type for this source
|
||||
type Ref;
|
||||
/// The error type for resolving an engine from this source
|
||||
type ResolveError: std::error::Error + Send + Sync + 'static;
|
||||
/// The error type for downloading an engine from this source
|
||||
type DownloadError: std::error::Error + Send + Sync + 'static;
|
||||
|
||||
/// Returns the folder to store the engine's versions in
|
||||
fn directory(&self) -> PathBuf;
|
||||
|
||||
/// Returns the expected file name of the engine in the archive
|
||||
fn expected_file_name(&self) -> &str;
|
||||
|
||||
/// Resolves a requirement to a reference
|
||||
fn resolve(
|
||||
&self,
|
||||
requirement: &VersionReq,
|
||||
options: &ResolveOptions,
|
||||
) -> impl Future<Output = Result<BTreeMap<Version, Self::Ref>, Self::ResolveError>> + Send + Sync;
|
||||
|
||||
/// Downloads an engine
|
||||
fn download<R: DownloadProgressReporter + 'static>(
|
||||
&self,
|
||||
engine_ref: &Self::Ref,
|
||||
options: &DownloadOptions<R>,
|
||||
) -> impl Future<Output = Result<Archive, Self::DownloadError>> + Send + Sync;
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue