Compare commits

..

No commits in common. "v0.6.0-rc.6+registry.0.2.0-rc.2" and "0.5" have entirely different histories.

154 changed files with 11094 additions and 16212 deletions

View file

@ -1,2 +1,2 @@
PUBLIC_REGISTRY_URL= # url of the registry API, this must have a trailing slash and include the version
# example: https://registry.pesde.daimond113.com/v1/
# example: https://registry.pesde.daimond113.com/v0/

View file

@ -1,3 +0,0 @@
# .git-blame-ignore-revs
# Enabled the `hard_tabs` option in rustfmt.toml
0ceb2f6653b12e8261533ef528d78e3dde7ed757

1
.github/FUNDING.yml vendored
View file

@ -1 +1,2 @@
buy_me_a_coffee: daimond113
ko_fi: daimond113

View file

@ -40,11 +40,6 @@ jobs:
runs-on: ubuntu-latest
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-linux-x86_64
- job-name: linux-aarch64
target: aarch64-unknown-linux-gnu
runs-on: ubuntu-24.04-arm
artifact-name: pesde-debug-${{ needs.get-version.outputs.version }}-linux-aarch64
- job-name: macos-x86_64
target: x86_64-apple-darwin
runs-on: macos-13
@ -63,7 +58,7 @@ jobs:
uses: actions/checkout@v4
- name: Install Linux build dependencies
if: ${{ startsWith(matrix.runs-on, 'ubuntu') }}
if: ${{ matrix.runs-on == 'ubuntu-latest' }}
run: |
sudo apt-get update
sudo apt-get install libdbus-1-dev pkg-config

View file

@ -51,11 +51,6 @@ jobs:
arch: x86_64
target: x86_64-unknown-linux-gnu
- os: ubuntu-24.04-arm
host: linux
arch: aarch64
target: aarch64-unknown-linux-gnu
- os: windows-latest
host: windows
arch: x86_64
@ -101,9 +96,11 @@ jobs:
if [ ${{ matrix.host }} = "windows" ]; then
mv target/${{ matrix.target }}/release/${{ env.BIN_NAME }}.exe ${{ env.BIN_NAME }}.exe
7z a ${{ env.ARCHIVE_NAME }}.zip ${{ env.BIN_NAME }}.exe
tar -czf ${{ env.ARCHIVE_NAME }}.tar.gz ${{ env.BIN_NAME }}.exe
else
mv target/${{ matrix.target }}/release/${{ env.BIN_NAME }} ${{ env.BIN_NAME }}
zip -r ${{ env.ARCHIVE_NAME }}.zip ${{ env.BIN_NAME }}
tar -czf ${{ env.ARCHIVE_NAME }}.tar.gz ${{ env.BIN_NAME }}
fi
- name: Upload zip artifact
@ -112,6 +109,12 @@ jobs:
name: ${{ env.ARCHIVE_NAME }}.zip
path: ${{ env.ARCHIVE_NAME }}.zip
- name: Upload tar.gz artifact
uses: actions/upload-artifact@v4
with:
name: ${{ env.ARCHIVE_NAME }}.tar.gz
path: ${{ env.ARCHIVE_NAME }}.tar.gz
publish:
name: Publish to crates.io
runs-on: ubuntu-latest

2
.gitignore vendored
View file

@ -5,5 +5,3 @@ cobertura.xml
tarpaulin-report.html
build_rs_cov.profraw
registry/data
data
manifest.schema.json

View file

@ -5,76 +5,6 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.6.0-rc.6] - 2025-02-10
### Fixed
- Fix double path long prefix issues on Windows by @daimond113
## [0.6.0-rc.5] - 2025-02-10
### Fixed
- Correct script linker require paths on Windows by @daimond113
- Improve patches in incremental installs by @daimond113
- Patches now include newly created files by @daimond113
### Changed
- Patches are now applied before type extraction to allow patches to modify types by @daimond113
## [0.6.0-rc.4] - 2025-02-08
### Fixed
- Refresh sources before reading package data to ensure the index is even cloned (remote changes to lockfile) by @daimond113
## [0.6.0-rc.3] - 2025-02-08
### Fixed
- Fix `self-upgrade` using the wrong path when doing a fresh download by @daimond113
- Fix types not being re-exported by @daimond113
## [0.6.0-rc.2] - 2025-02-07
### Fixed
- Colour deprecate output to match yank output by @daimond113
- Fix zbus panic on Linux by @daimond113
## [0.6.0-rc.1] - 2025-02-06
### Added
- Improve installation experience by @lukadev-0
- Support using aliases of own dependencies for overrides by @daimond113
- Support ignoring parse errors in Luau files by @daimond113
- Add path dependencies by @daimond113
- Inherit pesde-managed scripts from workspace root by @daimond113
- Allow using binaries from workspace root in member packages by @daimond113
- Add yanking & deprecating by @daimond113
- Add engines as a form of managing runtimes by @daimond113
- Modify existing installed packages instead of always reinstalling by @daimond113
- Add `cas prune` command to remove unused CAS files & packages by @daimond113
- Add `list` and `remove` commands to manage packages in the manifest by @daimond113
### Fixed
- Install dev packages in prod mode and remove them after use to allow them to be used in scripts by @daimond113
- Fix infinite loop in the resolver in packages depending on themselves by @daimond113
- Do Git operations inside spawn_blocking to avoid performance issues by @daimond113
- Scope CAS package indices to the source by @daimond113
- Do not copy `default.project.json` in workspace dependencies by @daimond113
### Changed
- Change handling of graphs to a flat structure by @daimond113
- Store dependency over downloaded graphs in the lockfile by @daimond113
- Improve linking process by @daimond113
- Use a proper url encoding library to ensure compatibility with all characters by @daimond113
- The `*` specifier now matches all versions, even prereleases by @daimond113
- Switch CLI dependencies to ones used by other dependencies to optimize the binary size by @daimond113
- Reorder the `help` command by @daimond113
- Ignore submodules instead of failing when using Git dependencies with submodules by @daimond113
- Exit with code 1 from invalid directory binary linkers by @daimond113
### Removed
- Remove old includes format compatibility by @daimond113
- Remove data redundancy for workspace package references by @daimond113
- Remove dependency checks from CLI in publish command in favor of registry checks by @daimond113
### Performance
- Use `Arc` for more efficient cloning of multiple structs by @daimond113
- Avoid cloning where possible by @daimond113
- Remove unnecessary mutex in Wally package download by @daimond113
- Lazily format error messages by @daimond113
## [0.5.3] - 2024-12-30
### Added
- Add meta field in index files to preserve compatibility with potential future changes by @daimond113
@ -182,12 +112,6 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- Asyncify dependency linking by @daimond113
- Use `exec` in Unix bin linking to reduce the number of processes by @daimond113
[0.6.0-rc.5]: https://github.com/daimond113/pesde/compare/v0.6.0-rc.5%2Bregistry.0.2.0-rc.2..v0.6.0-rc.6%2Bregistry.0.2.0-rc.2
[0.6.0-rc.5]: https://github.com/daimond113/pesde/compare/v0.6.0-rc.4%2Bregistry.0.2.0-rc.1..v0.6.0-rc.5%2Bregistry.0.2.0-rc.2
[0.6.0-rc.4]: https://github.com/daimond113/pesde/compare/v0.6.0-rc.3%2Bregistry.0.2.0-rc.1..v0.6.0-rc.4%2Bregistry.0.2.0-rc.1
[0.6.0-rc.3]: https://github.com/daimond113/pesde/compare/v0.6.0-rc.2%2Bregistry.0.2.0-rc.1..v0.6.0-rc.3%2Bregistry.0.2.0-rc.1
[0.6.0-rc.2]: https://github.com/daimond113/pesde/compare/v0.6.0-rc.1%2Bregistry.0.2.0-rc.1..v0.6.0-rc.2%2Bregistry.0.2.0-rc.1
[0.6.0-rc.1]: https://github.com/daimond113/pesde/compare/v0.5.3%2Bregistry.0.1.2..v0.6.0-rc.1%2Bregistry.0.2.0-rc.1
[0.5.3]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
[0.5.2]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
[0.5.1]: https://github.com/daimond113/pesde/compare/v0.5.0%2Bregistry.0.1.0..v0.5.1%2Bregistry.0.1.0

1686
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
[package]
name = "pesde"
version = "0.6.0-rc.6"
version = "0.5.3"
edition = "2021"
license = "MIT"
authors = ["daimond113 <contact@daimond113.com>"]
@ -10,29 +10,28 @@ repository = "https://github.com/pesde-pkg/pesde"
include = ["src/**/*", "Cargo.toml", "Cargo.lock", "README.md", "LICENSE", "CHANGELOG.md"]
[features]
default = ["wally-compat", "patches"]
bin = [
"dep:clap",
"dep:dirs",
"dep:tracing-subscriber",
"reqwest/json",
"dep:indicatif",
"dep:tracing-indicatif",
"dep:inquire",
"dep:toml_edit",
"dep:console",
"dep:colored",
"dep:anyhow",
"dep:keyring",
"dep:open",
"dep:paste",
"dep:serde_json",
"dep:windows-registry",
"dep:windows",
"gix/worktree-mutation",
"dep:serde_json",
"dep:winreg",
"fs-err/expose_original_error",
"tokio/rt",
"tokio/rt-multi-thread",
"tokio/macros",
]
wally-compat = ["dep:serde_json"]
wally-compat = ["dep:async_zip", "dep:serde_json"]
patches = ["dep:git2"]
version-management = ["bin"]
@ -45,54 +44,50 @@ required-features = ["bin"]
uninlined_format_args = "warn"
[dependencies]
serde = { version = "1.0.217", features = ["derive"] }
toml = "0.8.20"
gix = { version = "0.70.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
semver = { version = "1.0.25", features = ["serde"] }
reqwest = { version = "0.12.12", default-features = false, features = ["rustls-tls", "stream", "json"] }
serde = { version = "1.0.216", features = ["derive"] }
toml = "0.8.19"
serde_with = "3.11.0"
gix = { version = "0.68.0", default-features = false, features = ["blocking-http-transport-reqwest-rust-tls", "revparse-regex", "credentials", "parallel"] }
semver = { version = "1.0.24", features = ["serde"] }
reqwest = { version = "0.12.9", default-features = false, features = ["rustls-tls"] }
tokio-tar = "0.3.1"
async-compression = { version = "0.4.18", features = ["tokio", "gzip"] }
pathdiff = "0.2.3"
relative-path = { version = "1.9.3", features = ["serde"] }
tracing = { version = "0.1.41", features = ["attributes"] }
thiserror = "2.0.11"
tokio = { version = "1.43.0", features = ["process", "macros"] }
thiserror = "2.0.7"
tokio = { version = "1.42.0", features = ["process"] }
tokio-util = "0.7.13"
async-stream = "0.3.6"
futures = "0.3.31"
full_moon = { version = "1.2.0", features = ["luau"] }
full_moon = { version = "1.1.2", features = ["luau"] }
url = { version = "2.5.4", features = ["serde"] }
jiff = { version = "0.1.29", default-features = false, features = ["serde", "std"] }
chrono = { version = "0.4.39", features = ["serde"] }
sha2 = "0.10.8"
tempfile = "3.16.0"
tempfile = "3.14.0"
wax = { version = "0.6.0", default-features = false }
fs-err = { version = "3.1.0", features = ["tokio"] }
urlencoding = "2.1.3"
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"] }
fs-err = { version = "3.0.0", features = ["tokio"] }
# TODO: remove this when gitoxide adds support for: committing, pushing, adding
git2 = { version = "0.20.0", optional = true }
git2 = { version = "0.19.0", optional = true }
serde_json = { version = "1.0.138", optional = true }
async_zip = { version = "0.0.17", features = ["tokio", "deflate", "deflate64", "tokio-fs"], optional = true }
serde_json = { version = "1.0.133", optional = true }
anyhow = { version = "1.0.95", optional = true }
open = { version = "5.3.2", optional = true }
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "sync-secret-service"], optional = true }
console = { version = "0.15.10", optional = true }
toml_edit = { version = "0.22.23", optional = true }
clap = { version = "4.5.28", features = ["derive"], optional = true }
dirs = { version = "6.0.0", optional = true }
anyhow = { version = "1.0.94", optional = true }
open = { version = "5.3.1", optional = true }
keyring = { version = "3.6.1", features = ["crypto-rust", "windows-native", "apple-native", "async-secret-service", "async-io"], optional = true }
colored = { version = "2.1.0", optional = true }
toml_edit = { version = "0.22.22", optional = true }
clap = { version = "4.5.23", features = ["derive"], optional = true }
dirs = { version = "5.0.1", optional = true }
tracing-subscriber = { version = "0.3.19", features = ["env-filter"], optional = true }
indicatif = { version = "0.17.11", optional = true }
inquire = { version = "0.7.5", default-features = false, features = ["console", "one-liners"], optional = true }
paste = { version = "1.0.15", optional = true }
indicatif = { version = "0.17.9", optional = true }
tracing-indicatif = { version = "0.3.8", optional = true }
inquire = { version = "0.7.5", optional = true }
[target.'cfg(target_os = "windows")'.dependencies]
windows-registry = { version = "0.4.0", optional = true }
windows = { version = "0.59.0", features = ["Win32_Storage", "Win32_Storage_FileSystem", "Win32_Security"], optional = true }
[dev-dependencies]
schemars = { git = "https://github.com/daimond113/schemars", rev = "bc7c7d6", features = ["semver1", "url2"] }
winreg = { version = "0.52.0", optional = true }
[workspace]
resolver = "2"
@ -101,15 +96,11 @@ members = ["registry"]
[profile.dev.package.full_moon]
opt-level = 3
[profile.dev.package.miniz_oxide]
opt-level = 3
[profile.release]
opt-level = "s"
lto = true
incremental = true
codegen-units = 1
panic = "abort"
[profile.release.package.pesde-registry]
# add debug symbols for Sentry stack traces

Binary file not shown.

View file

@ -10,20 +10,20 @@
"astro": "astro"
},
"dependencies": {
"@astrojs/check": "0.9.4",
"@astrojs/starlight": "0.30.6",
"@astrojs/starlight-tailwind": "3.0.0",
"@astrojs/tailwind": "5.1.4",
"@fontsource-variable/nunito-sans": "^5.1.1",
"@shikijs/rehype": "^1.26.2",
"astro": "5.1.5",
"@astrojs/check": "^0.9.3",
"@astrojs/starlight": "^0.28.2",
"@astrojs/starlight-tailwind": "^2.0.3",
"@astrojs/tailwind": "^5.1.1",
"@fontsource-variable/nunito-sans": "^5.1.0",
"@shikijs/rehype": "^1.21.0",
"astro": "^4.15.9",
"sharp": "^0.33.5",
"shiki": "^1.26.2",
"tailwindcss": "^3.4.17",
"typescript": "^5.7.3"
"shiki": "^1.21.0",
"tailwindcss": "^3.4.13",
"typescript": "^5.6.2"
},
"devDependencies": {
"prettier-plugin-astro": "^0.14.1",
"prettier-plugin-tailwindcss": "^0.6.9"
"prettier-plugin-tailwindcss": "^0.6.8"
}
}

View file

@ -3,7 +3,12 @@
href="https://pesde.daimond113.com/"
class="flex text-[var(--sl-color-text-accent)] hover:opacity-80"
>
<svg viewBox="0 0 56 28" class="h-7" fill="none" xmlns="http://www.w3.org/2000/svg">
<svg
viewBox="0 0 56 28"
class="h-7"
fill="none"
xmlns="http://www.w3.org/2000/svg"
>
<title>pesde</title>
<path
d="M0 28V26.3156H2.25652V12.2361H0.0635639V10.5517H4.44947L4.48125 11.9819L3.78205 12.3315C4.41769 11.6746 5.16986 11.1661 6.03857 10.8059C6.92846 10.4245 7.82895 10.2338 8.74003 10.2338C9.863 10.2338 10.88 10.4775 11.7911 10.9648C12.7234 11.4522 13.4544 12.1726 13.9841 13.126C14.5349 14.0795 14.8104 15.2448 14.8104 16.6221C14.8104 18.0416 14.5138 19.26 13.9205 20.277C13.3272 21.2728 12.5327 22.0356 11.5368 22.5653C10.5622 23.095 9.5028 23.3598 8.35865 23.3598C7.72301 23.3598 7.11916 23.2751 6.54708 23.1056C5.99619 22.9361 5.50887 22.7242 5.08511 22.4699C4.66135 22.1945 4.34353 21.8873 4.13165 21.5483L4.60838 21.4529L4.5766 26.3156H7.02381V28H0ZM7.94549 21.6118C9.19558 21.6118 10.2444 21.2092 11.0919 20.4041C11.9394 19.5778 12.3632 18.3807 12.3632 16.8127C12.3632 15.2872 11.9606 14.1113 11.1555 13.2849C10.3503 12.4586 9.3333 12.0454 8.1044 12.0454C7.72301 12.0454 7.26747 12.1196 6.73777 12.2679C6.20807 12.395 5.67837 12.6069 5.14867 12.9035C4.61898 13.2002 4.17403 13.5922 3.81383 14.0795L4.5766 12.7446L4.60838 20.7219L3.8774 19.7367C4.42828 20.3299 5.06392 20.7961 5.78431 21.1351C6.5047 21.4529 7.2251 21.6118 7.94549 21.6118Z"
@ -22,7 +27,8 @@
fill="currentColor"></path>
</svg>
</a>
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span>
<span class="-mt-px ml-2.5 mr-2 text-xl text-[var(--sl-color-gray-5)]">/</span
>
<a
class="font-medium text-[var(--sl-color-gray-2)] no-underline hover:opacity-80 md:text-lg"
href="/">docs</a

View file

@ -1,7 +1,6 @@
import { defineCollection } from "astro:content"
import { docsLoader } from "@astrojs/starlight/loaders"
import { docsSchema } from "@astrojs/starlight/schema"
export const collections = {
docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }),
docs: defineCollection({ schema: docsSchema() }),
}

View file

@ -42,9 +42,6 @@ hello
# Hello, pesde! (pesde/hello@1.0.0, lune)
```
Note that they are scoped to the nearest `pesde.toml` file. However, you can use
binaries of the workspace root from member packages.
## Making a binary package
To make a binary package you must use a target compatible with binary exports.

View file

@ -137,24 +137,6 @@ pesde add workspace:acme/bar
href="/guides/workspaces/"
/>
## Path Dependencies
Path dependencies are dependencies found anywhere available to the operating system.
They are useful for local development, but are forbidden in published packages.
The path must be absolute and point to a directory containing a `pesde.toml` file.
```toml title="pesde.toml"
[dependencies]
foo = { path = "/home/user/foo" }
```
You can also add a path dependency by running the following command:
```sh
pesde add path:/home/user/foo
```
## Peer Dependencies
Peer dependencies are dependencies that are not installed automatically when

View file

@ -32,29 +32,6 @@ foo = { name = "acme/foo", version = "^1.0.0" }
Now, when you run `pesde install`, `bar` 2.0.0 will be used instead of 1.0.0.
Overrides are also able to use aliases to share the specifier you use for your
own dependencies:
```toml title="pesde.toml"
[dependencies]
foo = { name = "acme/foo", version = "^1.0.0" }
bar = { name = "acme/bar", version = "^2.0.0" }
[overrides]
"foo>bar" = "bar"
```
This is the same as if you had written:
```toml title="pesde.toml"
[dependencies]
foo = { name = "acme/foo", version = "^1.0.0" }
bar = { name = "acme/bar", version = "^2.0.0" }
[overrides]
"foo>bar" = { name = "acme/bar", version = "^2.0.0" }
```
You can learn more about the syntax for dependency overrides in the
[reference](/reference/manifest#overrides).

View file

@ -91,13 +91,6 @@ For example, you may publish a package that can be used in both Roblox and
Luau environments by publishing two versions of the package, one for each
environment.
<Aside type="caution">
Packages for different targets but on the same version must have
the same description.
</Aside>
## Documentation
The `README.md` file in the root of the package will be displayed on the

View file

@ -1,56 +0,0 @@
---
title: Removing Packages
description: Learn how to remove packages from the registry.
---
pesde doesn't support removing packages from the registry. This is to ensure
that the registry remains a reliable source of packages for everyone. However,
pesde provides other mechanisms to handle packages that are no longer needed.
## Yanking
Yanking is limited to a specific version (and target) of a package. It is used
to mark a version as broken or deprecated. Yanked versions are unavailable
to download fresh, but they can still be installed if they are present in the
lockfile of a project.
To yank a package, you can use the `pesde yank` command:
```sh
pesde yank <PACKAGE>@<VERSION> <TARGET>
```
You can leave out the target if you want to yank all targets of the version:
```sh
pesde yank <PACKAGE>@<VERSION>
```
## Deprecating
On the other hand, deprecating a package is used to mark a package as deprecated
in the registry. This is useful when you want to discourage users from using
a package, but don't want to break existing projects that depend on it. Unlike
yanking, your package will still be able to be installed fresh. However, when it
is installed, a warning will be shown to the user.
To deprecate a package, you can use the `pesde deprecate` command:
```sh
pesde deprecate <PACKAGE> [REASON]
```
You must provide a non-empty reason when deprecating a package. This is to
inform users why the package is deprecated. For example, if your package
has been replaced by another package, you can provide a reason like:
```sh
pesde deprecate acme/old-package "This package has been replaced by acme/new-package."
```
## Other Options
There are other situations in which you might want to remove a package from
the registry. Please refer to the policies of the registry you are using for
more information on how to handle these situations. The process for the official
registry is described [here](/registry/policies/#package-removal).

View file

@ -188,13 +188,10 @@ This will cause the `src` directory to be directly synced into Roblox.
In pesde, you should not have a `default.project.json` file in your package.
Instead, you are required to use the `build_files` field to specify a 1:1 match
between Roblox and the file system. These are given to the
`roblox_sync_config_generator` script to generate the configuration for the sync
tool the user is using. pesde forbids `default.project.json` to be part of a
published package, as well as ignoring them from Git dependencies. This allows
the consumer of your package to choose the sync tool they want to use, instead
of being constrained to only using Rojo as well as preventing broken packages
from being published (for example, if the project is configured as a DataModel).
between Roblox and the file system. pesde forbids `default.project.json` to be
part of a published package, and regenerates it when installing a pesde git
dependency. This allows the consumer of your package to choose the sync tool
they want to use, instead of being constrained to only using Rojo.
This has the effect that the structure of the files in the file system ends up
being reflected inside Roblox.

View file

@ -23,7 +23,7 @@ the following content:
api = "https://registry.acme.local/"
# package download URL (optional)
download = "{API_URL}/v1/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}/archive"
download = "{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}"
# the client ID of the GitHub OAuth app (optional)
github_oauth_client_id = "a1d648966fdfbdcd9295"
@ -58,7 +58,7 @@ scripts_packages = ["pesde/scripts_rojo"]
- `{PACKAGE_VERSION}`: The package version.
- `{PACKAGE_TARGET}`: The package target.
Defaults to `{API_URL}/v1/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}/archive`.
Defaults to `{API_URL}/v0/packages/{PACKAGE}/{PACKAGE_VERSION}/{PACKAGE_TARGET}`.
- **github_oauth_client_id**: This is required if you use GitHub OAuth for
authentication. See below for more information.
@ -115,11 +115,11 @@ for this purpose.
`GITHUB_USERNAME`. This is required.
- **COMMITTER_GIT_NAME**: The name to use for the committer when updating the
index repository. This is required.\
index repository.\
Example: `pesde index updater`
- **COMMITTER_GIT_EMAIL**: The email to use for the committer when updating the
index repository. This is required.\
index repository.\
Example: `pesde@localhost`
- **DATA_DIR**: The directory where the registry stores miscellaneous data.

View file

@ -5,11 +5,22 @@ description: Install pesde
import { Aside, Steps, TabItem, Tabs } from "@astrojs/starlight/components"
## Prerequisites
pesde requires [Lune](https://lune-org.github.io/docs) to be installed on your
system in order to function properly.
You can follow the installation instructions in the
[Lune documentation](https://lune-org.github.io/docs/getting-started/1-installation).
## Installing pesde
<Steps>
1. Go to the [GitHub releases page](https://github.com/pesde-pkg/pesde/releases/latest).
2. Download the corresponding archive for your operating system.
2. Download the corresponding archive for your operating system. You can choose
whether to use the `.zip` or `.tar.gz` files.
3. Extract the downloaded archive to a folder on your computer.
@ -65,7 +76,6 @@ import { Aside, Steps, TabItem, Tabs } from "@astrojs/starlight/components"
</TabItem>
</Tabs>
<br />
5. Verify that pesde is installed by running the following command:
@ -82,8 +92,8 @@ import { Aside, Steps, TabItem, Tabs } from "@astrojs/starlight/components"
It is not recommended to use toolchain managers (such as Rokit or Aftman) to
install pesde. You can use `pesde self-upgrade` if you need to update pesde.
If you need everyone to use a compatible version of pesde, you can use the
`[engines.pesde]` field in `pesde.toml` to specify the version of pesde to use
If you need everyone to use the same version of pesde, you can use the
`pesde_version` field in `pesde.toml` to specify the version of pesde to use
for the current project.
</Aside>

View file

@ -33,7 +33,7 @@ pesde init
# what is the repository URL of this project?
# what is the license of this project? MIT
# what environment are you targeting for your package? luau
# would you like to setup Roblox compatibility scripts? No
# would you like to setup default Roblox compatibility scripts? No
```
The command will create a `pesde.toml` file in the current folder. Go ahead

View file

@ -55,83 +55,10 @@ is printed.
The default index is [`pesde-index`](https://github.com/pesde-pkg/index).
## `pesde cas`
Content-addressable storage (CAS) related commands.
### `pesde cas prune`
Removes unused CAS files and packages.
## `pesde init`
Initializes a new pesde project in the current directory.
## `pesde add`
```sh
pesde add <PACKAGE>
```
Adds a package to the dependencies of the current project.
- `-i, --index <INDEX>`: The index in which to search for the package.
- `-t, --target <TARGET>`: The target environment for the package.
- `-a, --alias <ALIAS>`: The alias to use for the package, defaults to the
package name.
- `-p, --peer`: Adds the package as a peer dependency.
- `-d, --dev`: Adds the package as a dev dependency.
The following formats are supported:
```sh
pesde add pesde/hello
pesde add pesde/hello@1.2.3
pesde add gh#acme/package#main
pesde add https://git.acme.local/package.git#aeff6
pesde add workspace:pesde/hello
pesde add workspace:pesde/hello@1.2.3
pesde add path:/home/user/package
```
## `pesde remove`
```sh
pesde remove <ALIAS>
```
Removes a package from the dependencies of the current project.
## `pesde install`
Installs dependencies for the current project.
- `--locked`: Whether to error if the lockfile is out of date.
- `--prod`: Whether to not linking dev dependencies.
- `--network-concurrency <CONCURRENCY>`: The number of concurrent network
requests to make at most. Defaults to 16.
- `--force`: Whether to force reinstall all packages even if they are already
installed (useful if there is any issue with the current installation).
## `pesde update`
Updates the dependencies of the current project.
- `--no-install`: Whether to only update the lockfile without installing the
dependencies.
- `--network-concurrency <CONCURRENCY>`: The number of concurrent network
requests to make at most. Defaults to 16.
- `--force`: Whether to force reinstall all packages even if they are already
installed (useful if there is any issue with the current installation).
## `pesde outdated`
Lists outdated dependencies of the current project.
## `pesde list`
Lists the dependencies of the current project.
## `pesde run`
Runs a script from the current project using Lune.
@ -156,6 +83,13 @@ Arguments can be passed to the script by using `--` followed by the arguments.
pesde run foo -- --arg1 --arg2
```
## `pesde install`
Installs dependencies for the current project.
- `--locked`: Whether to error if the lockfile is out of date.
- `--prod`: Whether to skip installing dev dependencies.
## `pesde publish`
Publishes the current project to the pesde registry.
@ -165,26 +99,18 @@ Publishes the current project to the pesde registry.
publish it.
- `-y, --yes`: Whether to skip the confirmation prompt.
- `-i, --index`: Name of the index to publish to. Defaults to `default`.
- `--no-verify`: Whether to skip syntax validation of the exports of the
package.
## `pesde yank`
## `pesde self-install`
Yanks a version of a package from the registry.
Performs the pesde installation process. This should be the first command run
after downloading the pesde binary.
- `--undo`: Whether to unyank the package.
- `-i, --index`: Name of the index to yank from. Defaults to `default`.
## `pesde self-upgrade`
## `pesde deprecate`
Upgrades the pesde binary to the latest version.
```sh
pesde deprecate <PACKAGE> [REASON]
```
Deprecates a package in the registry. A non-empty reason must be provided.
- `--undo`: Whether to undepricate the package.
- `-i, --index`: Name of the index to deprecate from. Defaults to `default`.
- `--use-cached`: Whether to use the version displayed in the "upgrade available"
message instead of checking for the latest version.
## `pesde patch`
@ -211,6 +137,33 @@ pesde patch-commit <PATH>
Applies the changes made in the patching environment created by `pesde patch`.
## `pesde add`
```sh
pesde add <PACKAGE>
```
Adds a package to the dependencies of the current project.
- `-i, --index <INDEX>`: The index in which to search for the package.
- `-t, --target <TARGET>`: The target environment for the package.
- `-a, --alias <ALIAS>`: The alias to use for the package, defaults to the
package name.
- `-p, --peer`: Adds the package as a peer dependency.
- `-d, --dev`: Adds the package as a dev dependency.
The following formats are supported:
```sh
pesde add pesde/hello
pesde add gh#acme/package#main
pesde add https://git.acme.local/package.git#aeff6
```
## `pesde update`
Updates the dependencies of the current project.
## `pesde x`
Runs a one-off binary package.
@ -225,15 +178,3 @@ a pesde project.
```sh
pesde x pesde/hello
```
## `pesde self-install`
Performs the pesde installation process. This should be the first command run
after downloading the pesde binary.
## `pesde self-upgrade`
Upgrades the pesde binary to the latest version.
- `--use-cached`: Whether to use the version displayed in the "upgrade available"
message instead of checking for the latest version.

View file

@ -84,6 +84,11 @@ includes = [
]
```
### `pesde_version`
The version of pesde to use within this project. The `pesde` CLI will look at
this field and run the correct version of pesde for this project.
### `workspace_members`
A list of globs containing the members of this workspace.
@ -268,27 +273,10 @@ version `1.0.0`, and the `bar` and `baz` dependencies of the `foo` package with
version `2.0.0`.
Each key in the overrides table is a comma-separated list of package paths. The
path is a list of aliases separated by `>`. For example, `foo>bar>baz`
path is a list of package names separated by `>`. For example, `foo>bar>baz`
refers to the `baz` dependency of the `bar` package, which is a dependency of
the `foo` package.
The value of an override entry can be either a specifier or an alias. If it is an
alias (a string), it will be equivalent to putting the specifier of the dependency
under that alias. For example, the following two overrides are equivalent:
```toml
[dependencies]
bar = { name = "acme/bar", version = "2.0.0" }
[overrides]
"foo>bar" = "bar"
```
```toml
[overrides]
"foo>bar" = { name = "acme/bar", version = "2.0.0" }
```
<LinkCard
title="Overrides"
description="Learn more about overriding and patching packages."
@ -411,19 +399,18 @@ foo = { workspace = "acme/foo", version = "^" }
href="/guides/workspaces/#workspace-dependencies"
/>
### Path
## `[peer_dependencies]`
The `[peer_dependencies]` section contains a list of peer dependencies for the
package. These are dependencies that are required by the package, but are not
installed automatically. Instead, they must be installed by the user of the
package.
```toml
[dependencies]
foo = { path = "/home/user/foo" }
[peer_dependencies]
foo = { name = "acme/foo", version = "1.2.3" }
```
**Path dependencies** contain the following fields:
- `path`: The path to the package on the local filesystem.
Path dependencies are forbidden in published packages.
## `[dev_dependencies]`
The `[dev_dependencies]` section contains a list of development dependencies for
@ -443,31 +430,3 @@ foo = { name = "acme/foo", version = "1.2.3" }
description="Learn more about specifying dependencies in pesde."
href="/guides/dependencies/"
/>
## `[peer_dependencies]`
The `[peer_dependencies]` section contains a list of peer dependencies for the
package. These are dependencies that are required by the package, but are not
installed automatically. Instead, they must be installed by the user of the
package.
```toml
[peer_dependencies]
foo = { name = "acme/foo", version = "1.2.3" }
```
## `[engines]`
The `[engines]` section contains a list of engines that the package is compatible
with.
```toml
[engines]
pesde = "^0.6.0"
lune = "^0.8.9"
```
Currently, the only engines that can be specified are `pesde` and `lune`.
Additionally, the engines you declared in your project will be installed when
you run `pesde install`. Then, a version of the engine that satisfies the
specified version range will be used when you run the engine.

View file

@ -5,37 +5,18 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.2.0-rc.2] - 2025-02-10
### Changed
- Remove native-tls dependency by @daimond113
## [0.2.0-rc.1] - 2025-02-06
### Added
- Support deprecating and yanking packages by @daimond113
- Add yanking & deprecating to registry by @daimond113
- Log more information about configured auth & storage by @daimond113
- Add individual endpoints for package data over using `Accept` header conditional returns by @daimond113
- Set `Content-Length` header for FS storage backend by @daimond113
### Performance
- Switch to using a `RwLock` over a `Mutex` to store repository data by @daimond113
- Asyncify blocking operations by @daimond113
- Asyncify reading of package data of top search results by @daimond113
## [0.1.2] - 2024-12-30
## [0.1.2]
### Changed
- Update to pesde lib API changes by @daimond113
## [0.1.1] - 2024-12-19
### Changed
- Switch to tracing for logging by @daimond113
- Switch to traccing for logging by @daimond113
## [0.1.0] - 2024-12-14
### Added
- Rewrite registry for pesde v0.5.0 by @daimond113
[0.2.0-rc.2]: https://github.com/daimond113/pesde/compare/v0.6.0-rc.4%2Bregistry.0.2.0-rc.1..v0.6.0-rc.5%2Bregistry.0.2.0-rc.2
[0.2.0-rc.1]: https://github.com/daimond113/pesde/compare/v0.5.3%2Bregistry.0.1.2..v0.6.0-rc.1%2Bregistry.0.2.0-rc.1
[0.1.2]: https://github.com/daimond113/pesde/compare/v0.5.2%2Bregistry.0.1.1..v0.5.3%2Bregistry.0.1.2
[0.1.1]: https://github.com/daimond113/pesde/compare/v0.5.1%2Bregistry.0.1.0..v0.5.2%2Bregistry.0.1.1
[0.1.0]: https://github.com/daimond113/pesde/compare/v0.4.7..v0.5.0%2Bregistry.0.1.0

View file

@ -1,6 +1,6 @@
[package]
name = "pesde-registry"
version = "0.2.0-rc.2"
version = "0.1.2"
edition = "2021"
repository = "https://github.com/pesde-pkg/index"
publish = false
@ -10,32 +10,31 @@ actix-web = "4.9.0"
actix-cors = "0.7.0"
actix-governor = "0.8.0"
dotenvy = "0.15.7"
thiserror = "2.0.11"
thiserror = "2.0.7"
tantivy = "0.22.0"
semver = "1.0.25"
jiff = { version = "0.1.29", features = ["serde"] }
semver = "1.0.24"
chrono = { version = "0.4.39", features = ["serde"] }
futures = "0.3.31"
tokio = "1.43.0"
tokio-util = "0.7.13"
tempfile = "3.16.0"
fs-err = { version = "3.1.0", features = ["tokio"] }
tokio = "1.42.0"
tempfile = "3.14.0"
fs-err = { version = "3.0.0", features = ["tokio"] }
async-stream = "0.3.6"
git2 = "0.20.0"
gix = { version = "0.70.0", default-features = false, features = [
git2 = "0.19.0"
gix = { version = "0.68.0", default-features = false, features = [
"blocking-http-transport-reqwest-rust-tls",
"credentials",
] }
serde = "1.0.217"
serde_json = "1.0.138"
serde = "1.0.216"
serde_json = "1.0.133"
serde_yaml = "0.9.34"
toml = "0.8.20"
convert_case = "0.7.1"
toml = "0.8.19"
convert_case = "0.6.0"
sha2 = "0.10.8"
rusty-s3 = "0.7.0"
reqwest = { version = "0.12.12", default-features = false, features = ["json", "rustls-tls"] }
rusty-s3 = "0.5.0"
reqwest = { version = "0.12.9", features = ["json", "rustls-tls"] }
constant_time_eq = "0.3.1"
tokio-tar = "0.3.1"
@ -45,7 +44,7 @@ tracing = { version = "0.1.41", features = ["attributes"] }
tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
tracing-actix-web = "0.7.15"
sentry = { version = "0.36.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] }
sentry-actix = "0.36.0"
sentry = { version = "0.35.0", default-features = false, features = ["backtrace", "contexts", "debug-images", "panic", "reqwest", "rustls", "tracing"] }
sentry-actix = "0.35.0"
pesde = { path = "..", default-features = false, features = ["wally-compat"] }
pesde = { path = "..", features = ["wally-compat"] }

View file

@ -72,7 +72,7 @@ impl AuthImpl for GitHubAuth {
impl Display for GitHubAuth {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "GitHub (client id: {})", self.client_id)
write!(f, "GitHub")
}
}

View file

@ -1,76 +0,0 @@
use crate::{
auth::UserId,
error::{ErrorResponse, RegistryError},
git::push_changes,
package::{read_package, read_scope_info},
search::search_version_changed,
AppState,
};
use actix_web::{http::Method, web, HttpRequest, HttpResponse};
use pesde::names::PackageName;
use std::collections::HashMap;
pub async fn deprecate_package_version(
request: HttpRequest,
app_state: web::Data<AppState>,
path: web::Path<PackageName>,
bytes: web::Bytes,
user_id: web::ReqData<UserId>,
) -> Result<HttpResponse, RegistryError> {
let deprecated = request.method() != Method::DELETE;
let reason = if deprecated {
match String::from_utf8(bytes.to_vec()).map(|s| s.trim().to_string()) {
Ok(reason) if !reason.is_empty() => reason,
Err(e) => {
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
error: format!("invalid utf-8: {e}"),
}))
}
_ => {
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
error: "deprecating must have a non-empty reason".to_string(),
}))
}
}
} else {
String::new()
};
let name = path.into_inner();
let source = app_state.source.write().await;
let Some(scope_info) = read_scope_info(&app_state, name.scope(), &source).await? else {
return Ok(HttpResponse::NotFound().finish());
};
if !scope_info.owners.contains(&user_id.0) {
return Ok(HttpResponse::Forbidden().finish());
}
let Some(mut file) = read_package(&app_state, &name, &source).await? else {
return Ok(HttpResponse::NotFound().finish());
};
if file.meta.deprecated == reason {
return Ok(HttpResponse::Conflict().finish());
}
file.meta.deprecated = reason;
let file_string = toml::to_string(&file)?;
push_changes(
&app_state,
&source,
name.scope().to_string(),
HashMap::from([(name.name().to_string(), file_string.into_bytes())]),
format!("{}deprecate {name}", if deprecated { "" } else { "un" }),
)
.await?;
search_version_changed(&app_state, &name, &file);
Ok(HttpResponse::Ok().body(format!(
"{}deprecated {name}",
if deprecated { "" } else { "un" },
)))
}

View file

@ -1,9 +1,4 @@
pub mod deprecate_version;
pub mod package_archive;
pub mod package_doc;
pub mod package_readme;
pub mod package_version;
pub mod package_versions;
pub mod publish_version;
pub mod search;
pub mod yank_version;

View file

@ -1,27 +0,0 @@
use actix_web::{web, HttpResponse};
use crate::{
error::RegistryError,
package::read_package,
request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion},
storage::StorageImpl,
AppState,
};
use pesde::names::PackageName;
pub async fn get_package_archive(
app_state: web::Data<AppState>,
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
) -> Result<HttpResponse, RegistryError> {
let (name, version, target) = path.into_inner();
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
return Ok(HttpResponse::NotFound().finish());
};
let Some(v_id) = resolve_version_and_target(&file, version, target) else {
return Ok(HttpResponse::NotFound().finish());
};
app_state.storage.get_package(&name, v_id).await
}

View file

@ -1,66 +0,0 @@
use crate::{
error::RegistryError,
package::read_package,
request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion},
storage::StorageImpl,
AppState,
};
use actix_web::{web, HttpResponse};
use pesde::{
names::PackageName,
source::{
ids::VersionId,
pesde::{DocEntryKind, IndexFile},
},
};
use serde::Deserialize;
pub fn find_package_doc<'a>(
file: &'a IndexFile,
v_id: &VersionId,
doc_name: &str,
) -> Option<&'a str> {
let mut queue = file.entries[v_id]
.docs
.iter()
.map(|doc| &doc.kind)
.collect::<Vec<_>>();
while let Some(doc) = queue.pop() {
match doc {
DocEntryKind::Page { name, hash } if name == doc_name => return Some(hash.as_str()),
DocEntryKind::Category { items, .. } => {
queue.extend(items.iter().map(|item| &item.kind))
}
_ => continue,
};
}
None
}
#[derive(Debug, Deserialize)]
pub struct Query {
doc: String,
}
pub async fn get_package_doc(
app_state: web::Data<AppState>,
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
request_query: web::Query<Query>,
) -> Result<HttpResponse, RegistryError> {
let (name, version, target) = path.into_inner();
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
return Ok(HttpResponse::NotFound().finish());
};
let Some(v_id) = resolve_version_and_target(&file, version, target) else {
return Ok(HttpResponse::NotFound().finish());
};
let Some(hash) = find_package_doc(&file, v_id, &request_query.doc) else {
return Ok(HttpResponse::NotFound().finish());
};
app_state.storage.get_doc(hash).await
}

View file

@ -1,27 +0,0 @@
use actix_web::{web, HttpResponse};
use crate::{
error::RegistryError,
package::read_package,
request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion},
storage::StorageImpl,
AppState,
};
use pesde::names::PackageName;
pub async fn get_package_readme(
app_state: web::Data<AppState>,
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
) -> Result<HttpResponse, RegistryError> {
let (name, version, target) = path.into_inner();
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
return Ok(HttpResponse::NotFound().finish());
};
let Some(v_id) = resolve_version_and_target(&file, version, target) else {
return Ok(HttpResponse::NotFound().finish());
};
app_state.storage.get_readme(&name, v_id).await
}

View file

@ -1,43 +1,137 @@
use actix_web::{http::header::ACCEPT, web, HttpRequest, HttpResponse};
use serde::Deserialize;
use actix_web::{http::header::ACCEPT, web, HttpRequest, HttpResponse, Responder};
use semver::Version;
use serde::{Deserialize, Deserializer};
use crate::{
endpoints::package_doc::find_package_doc,
error::RegistryError,
package::{read_package, PackageResponse},
request_path::{resolve_version_and_target, AnyOrSpecificTarget, LatestOrSpecificVersion},
storage::StorageImpl,
AppState,
use crate::{error::Error, package::PackageResponse, storage::StorageImpl, AppState};
use pesde::{
manifest::target::TargetKind,
names::PackageName,
source::{
git_index::{read_file, root_tree, GitBasedSource},
pesde::{DocEntryKind, IndexFile},
},
};
use pesde::names::PackageName;
#[derive(Debug)]
pub enum VersionRequest {
Latest,
Specific(Version),
}
impl<'de> Deserialize<'de> for VersionRequest {
fn deserialize<D>(deserializer: D) -> Result<VersionRequest, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("latest") {
return Ok(VersionRequest::Latest);
}
s.parse()
.map(VersionRequest::Specific)
.map_err(serde::de::Error::custom)
}
}
#[derive(Debug)]
pub enum TargetRequest {
Any,
Specific(TargetKind),
}
impl<'de> Deserialize<'de> for TargetRequest {
fn deserialize<D>(deserializer: D) -> Result<TargetRequest, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("any") {
return Ok(TargetRequest::Any);
}
s.parse()
.map(TargetRequest::Specific)
.map_err(serde::de::Error::custom)
}
}
#[derive(Debug, Deserialize)]
pub struct Query {
doc: Option<String>,
}
pub async fn get_package_version_v0(
pub async fn get_package_version(
request: HttpRequest,
app_state: web::Data<AppState>,
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
request_query: web::Query<Query>,
) -> Result<HttpResponse, RegistryError> {
path: web::Path<(PackageName, VersionRequest, TargetRequest)>,
query: web::Query<Query>,
) -> Result<impl Responder, Error> {
let (name, version, target) = path.into_inner();
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
let (scope, name_part) = name.as_str();
let file: IndexFile = {
let source = app_state.source.lock().await;
let repo = gix::open(source.path(&app_state.project))?;
let tree = root_tree(&repo)?;
match read_file(&tree, [scope, name_part])? {
Some(versions) => toml::de::from_str(&versions)?,
None => return Ok(HttpResponse::NotFound().finish()),
}
};
let Some((v_id, entry, targets)) = ({
let version = match version {
VersionRequest::Latest => match file.entries.keys().map(|k| k.version()).max() {
Some(latest) => latest.clone(),
None => return Ok(HttpResponse::NotFound().finish()),
},
VersionRequest::Specific(version) => version,
};
let versions = file
.entries
.iter()
.filter(|(v_id, _)| *v_id.version() == version);
match target {
TargetRequest::Any => versions.clone().min_by_key(|(v_id, _)| *v_id.target()),
TargetRequest::Specific(kind) => versions
.clone()
.find(|(_, entry)| entry.target.kind() == kind),
}
.map(|(v_id, entry)| {
(
v_id,
entry,
versions.map(|(_, entry)| (&entry.target).into()).collect(),
)
})
}) else {
return Ok(HttpResponse::NotFound().finish());
};
let Some(v_id) = resolve_version_and_target(&file, version, target) else {
if let Some(doc_name) = query.doc.as_deref() {
let hash = 'finder: {
let mut hash = entry.docs.iter().map(|doc| &doc.kind).collect::<Vec<_>>();
while let Some(doc) = hash.pop() {
match doc {
DocEntryKind::Page { name, hash } if name == doc_name => {
break 'finder hash.clone()
}
DocEntryKind::Category { items, .. } => {
hash.extend(items.iter().map(|item| &item.kind))
}
_ => continue,
};
}
return Ok(HttpResponse::NotFound().finish());
};
if let Some(doc_name) = request_query.doc.as_deref() {
let Some(hash) = find_package_doc(&file, v_id, doc_name) else {
return Ok(HttpResponse::NotFound().finish());
};
return app_state.storage.get_doc(hash).await;
return app_state.storage.get_doc(&hash).await;
}
let accept = request
@ -58,22 +152,20 @@ pub async fn get_package_version_v0(
};
}
Ok(HttpResponse::Ok().json(PackageResponse::new(&name, v_id, &file)))
}
pub async fn get_package_version(
app_state: web::Data<AppState>,
path: web::Path<(PackageName, LatestOrSpecificVersion, AnyOrSpecificTarget)>,
) -> Result<HttpResponse, RegistryError> {
let (name, version, target) = path.into_inner();
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
return Ok(HttpResponse::NotFound().finish());
let response = PackageResponse {
name: name.to_string(),
version: v_id.version().to_string(),
targets,
description: entry.description.clone().unwrap_or_default(),
published_at: entry.published_at,
license: entry.license.clone().unwrap_or_default(),
authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()),
};
let Some(v_id) = resolve_version_and_target(&file, version, target) else {
return Ok(HttpResponse::NotFound().finish());
};
let mut value = serde_json::to_value(response)?;
value["docs"] = serde_json::to_value(entry.docs.clone())?;
value["dependencies"] = serde_json::to_value(entry.dependencies.clone())?;
Ok(HttpResponse::Ok().json(PackageResponse::new(&name, v_id, &file)))
Ok(HttpResponse::Ok().json(value))
}

View file

@ -1,55 +1,54 @@
use crate::{
error::RegistryError,
package::{read_package, PackageResponse, PackageVersionsResponse},
AppState,
};
use std::collections::{BTreeMap, BTreeSet};
use actix_web::{web, HttpResponse, Responder};
use pesde::{names::PackageName, source::ids::VersionId};
use semver::Version;
use std::collections::{btree_map::Entry, BTreeMap};
pub async fn get_package_versions_v0(
app_state: web::Data<AppState>,
path: web::Path<PackageName>,
) -> Result<impl Responder, RegistryError> {
let name = path.into_inner();
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
return Ok(HttpResponse::NotFound().finish());
use crate::{error::Error, package::PackageResponse, AppState};
use pesde::{
names::PackageName,
source::{
git_index::{read_file, root_tree, GitBasedSource},
pesde::IndexFile,
},
};
let mut versions = BTreeMap::<&Version, &VersionId>::new();
for v_id in file.entries.keys() {
match versions.entry(v_id.version()) {
Entry::Vacant(entry) => {
entry.insert(v_id);
}
Entry::Occupied(mut entry) => {
if entry.get() < &v_id {
entry.insert(v_id);
}
}
}
}
let responses = versions
.into_values()
.map(|v_id| PackageResponse::new(&name, v_id, &file))
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(responses))
}
pub async fn get_package_versions(
app_state: web::Data<AppState>,
path: web::Path<PackageName>,
) -> Result<impl Responder, RegistryError> {
) -> Result<impl Responder, Error> {
let name = path.into_inner();
let Some(file) = read_package(&app_state, &name, &*app_state.source.read().await).await? else {
return Ok(HttpResponse::NotFound().finish());
let (scope, name_part) = name.as_str();
let file: IndexFile = {
let source = app_state.source.lock().await;
let repo = gix::open(source.path(&app_state.project))?;
let tree = root_tree(&repo)?;
match read_file(&tree, [scope, name_part])? {
Some(versions) => toml::de::from_str(&versions)?,
None => return Ok(HttpResponse::NotFound().finish()),
}
};
Ok(HttpResponse::Ok().json(PackageVersionsResponse::new(&name, &file)))
let mut responses = BTreeMap::new();
for (v_id, entry) in file.entries {
let info = responses
.entry(v_id.version().clone())
.or_insert_with(|| PackageResponse {
name: name.to_string(),
version: v_id.version().to_string(),
targets: BTreeSet::new(),
description: entry.description.unwrap_or_default(),
published_at: entry.published_at,
license: entry.license.unwrap_or_default(),
authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()),
});
info.targets.insert(entry.target.into());
info.published_at = info.published_at.max(entry.published_at);
}
Ok(HttpResponse::Ok().json(responses.into_values().collect::<Vec<_>>()))
}

View file

@ -1,25 +1,25 @@
use crate::{
auth::UserId,
error::{ErrorResponse, RegistryError},
git::push_changes,
package::{read_package, read_scope_info},
search::update_search_version,
benv,
error::{Error, ErrorResponse},
search::update_version,
storage::StorageImpl,
AppState,
};
use actix_web::{web, web::Bytes, HttpResponse};
use actix_web::{web, web::Bytes, HttpResponse, Responder};
use async_compression::Level;
use convert_case::{Case, Casing};
use fs_err::tokio as fs;
use futures::{future::join_all, join};
use git2::{Remote, Repository, Signature};
use pesde::{
manifest::{DependencyType, Manifest},
manifest::Manifest,
source::{
git_index::GitBasedSource,
ids::VersionId,
pesde::{DocEntry, DocEntryKind, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
git_index::{read_file, root_tree, GitBasedSource},
pesde::{DocEntry, DocEntryKind, IndexFile, IndexFileEntry, ScopeInfo, SCOPE_INFO_FILE},
specifiers::DependencySpecifiers,
traits::RefreshOptions,
ADDITIONAL_FORBIDDEN_FILES, IGNORED_DIRS, IGNORED_FILES,
version_id::VersionId,
IGNORED_DIRS, IGNORED_FILES,
},
MANIFEST_FILE_NAME,
};
@ -28,12 +28,33 @@ use serde::Deserialize;
use sha2::{Digest, Sha256};
use std::{
collections::{BTreeSet, HashMap},
io::Cursor,
};
use tokio::{
io::{AsyncReadExt, AsyncWriteExt},
task::JoinSet,
io::{Cursor, Write},
};
use tokio::io::{AsyncReadExt, AsyncWriteExt};
fn signature<'a>() -> Signature<'a> {
Signature::now(
&benv!(required "COMMITTER_GIT_NAME"),
&benv!(required "COMMITTER_GIT_EMAIL"),
)
.unwrap()
}
fn get_refspec(repo: &Repository, remote: &mut Remote) -> Result<String, git2::Error> {
let upstream_branch_buf = repo.branch_upstream_name(repo.head()?.name().unwrap())?;
let upstream_branch = upstream_branch_buf.as_str().unwrap();
let refspec_buf = remote
.refspecs()
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
.unwrap()
.rtransform(upstream_branch)?;
let refspec = refspec_buf.as_str().unwrap();
Ok(refspec.to_string())
}
const ADDITIONAL_FORBIDDEN_FILES: &[&str] = &["default.project.json"];
#[derive(Debug, Deserialize, Default)]
struct DocEntryInfo {
@ -49,14 +70,9 @@ pub async fn publish_package(
app_state: web::Data<AppState>,
bytes: Bytes,
user_id: web::ReqData<UserId>,
) -> Result<HttpResponse, RegistryError> {
let source = app_state.source.write().await;
source
.refresh(&RefreshOptions {
project: app_state.project.clone(),
})
.await
.map_err(Box::new)?;
) -> Result<impl Responder, Error> {
let source = app_state.source.lock().await;
source.refresh(&app_state.project).await.map_err(Box::new)?;
let config = source.config(&app_state.project).await?;
let package_dir = tempfile::tempdir()?;
@ -78,14 +94,12 @@ pub async fn publish_package(
let file_name = entry
.file_name()
.to_str()
.ok_or_else(|| {
RegistryError::InvalidArchive("file name contains non UTF-8 characters".into())
})?
.ok_or_else(|| Error::InvalidArchive("file name contains non UTF-8 characters".into()))?
.to_string();
if entry.file_type().await?.is_dir() {
if IGNORED_DIRS.contains(&file_name.as_str()) {
return Err(RegistryError::InvalidArchive(format!(
return Err(Error::InvalidArchive(format!(
"archive contains forbidden directory: {file_name}"
)));
}
@ -103,7 +117,7 @@ pub async fn publish_package(
.file_name()
.to_str()
.ok_or_else(|| {
RegistryError::InvalidArchive(
Error::InvalidArchive(
"file name contains non UTF-8 characters".into(),
)
})?
@ -170,7 +184,7 @@ pub async fn publish_package(
let info: DocEntryInfo =
serde_yaml::from_str(&front_matter).map_err(|_| {
RegistryError::InvalidArchive(format!(
Error::InvalidArchive(format!(
"doc {file_name}'s frontmatter isn't valid YAML"
))
})?;
@ -186,7 +200,7 @@ pub async fn publish_package(
.with_extension("")
.to_str()
.ok_or_else(|| {
RegistryError::InvalidArchive(
Error::InvalidArchive(
"file name contains non UTF-8 characters".into(),
)
})?
@ -226,7 +240,7 @@ pub async fn publish_package(
if IGNORED_FILES.contains(&file_name.as_str())
|| ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.as_str())
{
return Err(RegistryError::InvalidArchive(format!(
return Err(Error::InvalidArchive(format!(
"archive contains forbidden file: {file_name}"
)));
}
@ -242,7 +256,7 @@ pub async fn publish_package(
.is_some()
{
if readme.is_some() {
return Err(RegistryError::InvalidArchive(
return Err(Error::InvalidArchive(
"archive contains multiple readme files".into(),
));
}
@ -257,7 +271,7 @@ pub async fn publish_package(
}
let Some(manifest) = manifest else {
return Err(RegistryError::InvalidArchive(
return Err(Error::InvalidArchive(
"archive doesn't contain a manifest".into(),
));
};
@ -278,15 +292,10 @@ pub async fn publish_package(
{
let dependencies = manifest.all_dependencies().map_err(|e| {
RegistryError::InvalidArchive(format!("manifest has invalid dependencies: {e}"))
Error::InvalidArchive(format!("manifest has invalid dependencies: {e}"))
})?;
for (specifier, ty) in dependencies.values() {
// we need not verify dev dependencies, as they won't be installed
if *ty == DependencyType::Dev {
continue;
}
for (specifier, _) in dependencies.values() {
match specifier {
DependencySpecifiers::Pesde(specifier) => {
if specifier
@ -300,7 +309,7 @@ pub async fn publish_package(
})
.is_none()
{
return Err(RegistryError::InvalidArchive(format!(
return Err(Error::InvalidArchive(format!(
"invalid index in pesde dependency {specifier}"
)));
}
@ -315,37 +324,38 @@ pub async fn publish_package(
})
.is_none()
{
return Err(RegistryError::InvalidArchive(format!(
return Err(Error::InvalidArchive(format!(
"invalid index in wally dependency {specifier}"
)));
}
}
DependencySpecifiers::Git(specifier) => {
if !config.git_allowed.is_allowed(specifier.repo.clone()) {
return Err(RegistryError::InvalidArchive(
return Err(Error::InvalidArchive(
"git dependencies are not allowed".into(),
));
}
}
DependencySpecifiers::Workspace(_) => {
// workspace specifiers are to be transformed into pesde specifiers by the sender
return Err(RegistryError::InvalidArchive(
return Err(Error::InvalidArchive(
"non-transformed workspace dependency".into(),
));
}
DependencySpecifiers::Path(_) => {
return Err(RegistryError::InvalidArchive(
"path dependencies are not allowed".into(),
));
}
}
}
let mut files = HashMap::new();
let repo = Repository::open_bare(source.path(&app_state.project))?;
let gix_repo = gix::open(repo.path())?;
let scope = read_scope_info(&app_state, manifest.name.scope(), &source).await?;
match scope {
let gix_tree = root_tree(&gix_repo)?;
let (scope, name) = manifest.name.as_str();
let mut oids = vec![];
match read_file(&gix_tree, [scope, SCOPE_INFO_FILE])? {
Some(info) => {
let info: ScopeInfo = toml::de::from_str(&info)?;
if !info.owners.contains(&user_id.0) {
return Ok(HttpResponse::Forbidden().finish());
}
@ -355,37 +365,44 @@ pub async fn publish_package(
owners: BTreeSet::from([user_id.0]),
})?;
files.insert(SCOPE_INFO_FILE.to_string(), scope_info.into_bytes());
}
let mut blob_writer = repo.blob_writer(None)?;
blob_writer.write_all(scope_info.as_bytes())?;
oids.push((SCOPE_INFO_FILE, blob_writer.commit()?));
}
};
let mut file = read_package(&app_state, &manifest.name, &source)
.await?
.unwrap_or_default();
let mut file: IndexFile =
toml::de::from_str(&read_file(&gix_tree, [scope, name])?.unwrap_or_default())?;
let new_entry = IndexFileEntry {
target: manifest.target.clone(),
published_at: jiff::Timestamp::now(),
engines: manifest.engines.clone(),
published_at: chrono::Utc::now(),
description: manifest.description.clone(),
license: manifest.license.clone(),
authors: manifest.authors.clone(),
repository: manifest.repository.clone(),
yanked: false,
docs,
dependencies,
};
let same_version = file
let this_version = file
.entries
.iter()
.find(|(v_id, _)| *v_id.version() == manifest.version);
if let Some((_, other_entry)) = same_version {
.keys()
.find(|v_id| *v_id.version() == manifest.version);
if let Some(this_version) = this_version {
let other_entry = file.entries.get(this_version).unwrap();
// description cannot be different - which one to render in the "Recently published" list?
if other_entry.description != new_entry.description {
// the others cannot be different because what to return from the versions endpoint?
if other_entry.description != new_entry.description
|| other_entry.license != new_entry.license
|| other_entry.authors != new_entry.authors
|| other_entry.repository != new_entry.repository
{
return Ok(HttpResponse::BadRequest().json(ErrorResponse {
error: "same versions with different descriptions are forbidden".to_string(),
error: "same version with different description or license already exists"
.to_string(),
}));
}
}
@ -401,65 +418,90 @@ pub async fn publish_package(
return Ok(HttpResponse::Conflict().finish());
}
files.insert(
manifest.name.name().to_string(),
toml::to_string(&file)?.into_bytes(),
);
let mut remote = repo.find_remote("origin")?;
let refspec = get_refspec(&repo, &mut remote)?;
push_changes(
&app_state,
&source,
manifest.name.scope().to_string(),
files,
format!(
let reference = repo.find_reference(&refspec)?;
{
let index_content = toml::to_string(&file)?;
let mut blob_writer = repo.blob_writer(None)?;
blob_writer.write_all(index_content.as_bytes())?;
oids.push((name, blob_writer.commit()?));
}
let old_root_tree = reference.peel_to_tree()?;
let old_scope_tree = match old_root_tree.get_name(scope) {
Some(entry) => Some(repo.find_tree(entry.id())?),
None => None,
};
let mut scope_tree = repo.treebuilder(old_scope_tree.as_ref())?;
for (file, oid) in oids {
scope_tree.insert(file, oid, 0o100644)?;
}
let scope_tree_id = scope_tree.write()?;
let mut root_tree = repo.treebuilder(Some(&repo.find_tree(old_root_tree.id())?))?;
root_tree.insert(scope, scope_tree_id, 0o040000)?;
let tree_oid = root_tree.write()?;
repo.commit(
Some("HEAD"),
&signature(),
&signature(),
&format!(
"add {}@{} {}",
manifest.name, manifest.version, manifest.target
),
)
.await?;
&repo.find_tree(tree_oid)?,
&[&reference.peel_to_commit()?],
)?;
update_search_version(&app_state, &manifest.name, &new_entry);
let mut push_options = git2::PushOptions::new();
let mut remote_callbacks = git2::RemoteCallbacks::new();
let git_creds = app_state.project.auth_config().git_credentials().unwrap();
remote_callbacks.credentials(|_, _, _| {
git2::Cred::userpass_plaintext(&git_creds.username, &git_creds.password)
});
push_options.remote_callbacks(remote_callbacks);
remote.push(&[refspec], Some(&mut push_options))?;
update_version(&app_state, &manifest.name, new_entry);
}
let version_id = VersionId::new(manifest.version.clone(), manifest.target.kind());
let mut tasks = docs_pages
let (a, b, c) = join!(
app_state
.storage
.store_package(&manifest.name, &version_id, bytes.to_vec()),
join_all(
docs_pages
.into_iter()
.map(|(hash, content)| {
let app_state = app_state.clone();
async move { app_state.storage.store_doc(hash, content).await }
})
.collect::<JoinSet<_>>();
{
let app_state = app_state.clone();
let name = manifest.name.clone();
let version_id = version_id.clone();
tasks.spawn(async move {
app_state
.storage
.store_package(&name, &version_id, bytes.to_vec())
.await
});
}
.map(|(hash, content)| app_state.storage.store_doc(hash, content)),
),
async {
if let Some(readme) = readme {
let app_state = app_state.clone();
let name = manifest.name.clone();
let version_id = version_id.clone();
tasks.spawn(async move {
app_state
.storage
.store_readme(&name, &version_id, readme)
.store_readme(&manifest.name, &version_id, readme)
.await
});
} else {
Ok(())
}
}
);
a?;
b.into_iter().collect::<Result<(), _>>()?;
c?;
while let Some(res) = tasks.join_next().await {
res.unwrap()?;
}
Ok(HttpResponse::Ok().body(format!("published {}@{version_id}", manifest.name)))
Ok(HttpResponse::Ok().body(format!(
"published {}@{} {}",
manifest.name, manifest.version, manifest.target
)))
}

View file

@ -1,34 +1,36 @@
use crate::{
error::RegistryError,
package::{read_package, PackageResponse},
search::find_max_searchable,
AppState,
};
use actix_web::{web, HttpResponse};
use pesde::names::PackageName;
use std::collections::HashMap;
use actix_web::{web, HttpResponse, Responder};
use serde::Deserialize;
use std::{collections::HashMap, sync::Arc};
use tantivy::{collector::Count, query::AllQuery, schema::Value, DateTime, Order};
use tokio::task::JoinSet;
use crate::{error::Error, package::PackageResponse, AppState};
use pesde::{
names::PackageName,
source::{
git_index::{read_file, root_tree, GitBasedSource},
pesde::IndexFile,
},
};
#[derive(Deserialize)]
pub struct Request {
#[serde(default)]
query: String,
query: Option<String>,
#[serde(default)]
offset: usize,
offset: Option<usize>,
}
pub async fn search_packages(
app_state: web::Data<AppState>,
request_query: web::Query<Request>,
) -> Result<HttpResponse, RegistryError> {
request: web::Query<Request>,
) -> Result<impl Responder, Error> {
let searcher = app_state.search_reader.searcher();
let schema = searcher.schema();
let id = schema.get_field("id").unwrap();
let query = request_query.query.trim();
let query = request.query.as_deref().unwrap_or_default().trim();
let query = if query.is_empty() {
Box::new(AllQuery)
@ -42,28 +44,21 @@ pub async fn search_packages(
&(
Count,
tantivy::collector::TopDocs::with_limit(50)
.and_offset(request_query.offset)
.and_offset(request.offset.unwrap_or_default())
.order_by_fast_field::<DateTime>("published_at", Order::Desc),
),
)
.unwrap();
let source = Arc::new(app_state.source.clone().read_owned().await);
let source = app_state.source.lock().await;
let repo = gix::open(source.path(&app_state.project))?;
let tree = root_tree(&repo)?;
let mut results = top_docs
.iter()
.map(|_| None::<PackageResponse>)
.collect::<Vec<_>>();
let mut tasks = top_docs
let top_docs = top_docs
.into_iter()
.enumerate()
.map(|(i, (_, doc_address))| {
let app_state = app_state.clone();
.map(|(_, doc_address)| {
let doc = searcher.doc::<HashMap<_, _>>(doc_address).unwrap();
let source = source.clone();
async move {
let id = doc
.get(&id)
.unwrap()
@ -71,23 +66,42 @@ pub async fn search_packages(
.unwrap()
.parse::<PackageName>()
.unwrap();
let (scope, name) = id.as_str();
let file = read_package(&app_state, &id, &source).await?.unwrap();
let file: IndexFile =
toml::de::from_str(&read_file(&tree, [scope, name]).unwrap().unwrap()).unwrap();
let (version_id, _) = find_max_searchable(&file).unwrap();
let (latest_version, entry) = file
.entries
.iter()
.max_by_key(|(v_id, _)| v_id.version())
.unwrap();
Ok::<_, RegistryError>((i, PackageResponse::new(&id, version_id, &file)))
PackageResponse {
name: id.to_string(),
version: latest_version.version().to_string(),
targets: file
.entries
.iter()
.filter(|(v_id, _)| v_id.version() == latest_version.version())
.map(|(_, entry)| (&entry.target).into())
.collect(),
description: entry.description.clone().unwrap_or_default(),
published_at: file
.entries
.values()
.map(|entry| entry.published_at)
.max()
.unwrap(),
license: entry.license.clone().unwrap_or_default(),
authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()),
}
})
.collect::<JoinSet<_>>();
while let Some(res) = tasks.join_next().await {
let (i, res) = res.unwrap()?;
results[i] = Some(res);
}
.collect::<Vec<_>>();
Ok(HttpResponse::Ok().json(serde_json::json!({
"data": results,
"data": top_docs,
"count": count,
})))
}

View file

@ -1,83 +0,0 @@
use crate::{
auth::UserId,
error::RegistryError,
git::push_changes,
package::{read_package, read_scope_info},
request_path::AllOrSpecificTarget,
search::search_version_changed,
AppState,
};
use actix_web::{http::Method, web, HttpRequest, HttpResponse};
use pesde::names::PackageName;
use semver::Version;
use std::collections::HashMap;
pub async fn yank_package_version(
request: HttpRequest,
app_state: web::Data<AppState>,
path: web::Path<(PackageName, Version, AllOrSpecificTarget)>,
user_id: web::ReqData<UserId>,
) -> Result<HttpResponse, RegistryError> {
let yanked = request.method() != Method::DELETE;
let (name, version, target) = path.into_inner();
let source = app_state.source.write().await;
let Some(scope_info) = read_scope_info(&app_state, name.scope(), &source).await? else {
return Ok(HttpResponse::NotFound().finish());
};
if !scope_info.owners.contains(&user_id.0) {
return Ok(HttpResponse::Forbidden().finish());
}
let Some(mut file) = read_package(&app_state, &name, &source).await? else {
return Ok(HttpResponse::NotFound().finish());
};
let mut targets = vec![];
for (v_id, entry) in &mut file.entries {
if *v_id.version() != version {
continue;
}
match target {
AllOrSpecificTarget::Specific(kind) if entry.target.kind() != kind => continue,
_ => {}
}
if entry.yanked == yanked {
continue;
}
targets.push(entry.target.kind().to_string());
entry.yanked = yanked;
}
if targets.is_empty() {
return Ok(HttpResponse::Conflict().finish());
}
let file_string = toml::to_string(&file)?;
push_changes(
&app_state,
&source,
name.scope().to_string(),
HashMap::from([(name.name().to_string(), file_string.into_bytes())]),
format!(
"{}yank {name}@{version} {}",
if yanked { "" } else { "un" },
targets.join(", "),
),
)
.await?;
search_version_changed(&app_state, &name, &file);
Ok(HttpResponse::Ok().body(format!(
"{}yanked {name}@{version} {}",
if yanked { "" } else { "un" },
targets.join(", "),
)))
}

View file

@ -4,7 +4,7 @@ use serde::Serialize;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum RegistryError {
pub enum Error {
#[error("failed to parse query")]
Query(#[from] tantivy::query::QueryParserError),
@ -53,16 +53,16 @@ pub struct ErrorResponse {
pub error: String,
}
impl ResponseError for RegistryError {
impl ResponseError for Error {
fn error_response(&self) -> HttpResponse<BoxBody> {
match self {
RegistryError::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
Error::Query(e) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("failed to parse query: {e}"),
}),
RegistryError::Tar(_) => HttpResponse::BadRequest().json(ErrorResponse {
Error::Tar(_) => HttpResponse::BadRequest().json(ErrorResponse {
error: "corrupt archive".to_string(),
}),
RegistryError::InvalidArchive(e) => HttpResponse::BadRequest().json(ErrorResponse {
Error::InvalidArchive(e) => HttpResponse::BadRequest().json(ErrorResponse {
error: format!("archive is invalid: {e}"),
}),
e => {
@ -74,16 +74,16 @@ impl ResponseError for RegistryError {
}
pub trait ReqwestErrorExt {
async fn into_error(self) -> Result<Self, RegistryError>
async fn into_error(self) -> Result<Self, Error>
where
Self: Sized;
}
impl ReqwestErrorExt for reqwest::Response {
async fn into_error(self) -> Result<Self, RegistryError> {
async fn into_error(self) -> Result<Self, Error> {
match self.error_for_status_ref() {
Ok(_) => Ok(self),
Err(e) => Err(RegistryError::ReqwestResponse(self.text().await?, e)),
Err(e) => Err(Error::ReqwestResponse(self.text().await?, e)),
}
}
}

View file

@ -1,98 +0,0 @@
use crate::{benv, error::RegistryError, AppState};
use git2::{Remote, Repository, Signature};
use pesde::source::{git_index::GitBasedSource, pesde::PesdePackageSource};
use std::collections::HashMap;
use tokio::task::spawn_blocking;
fn signature<'a>() -> Signature<'a> {
Signature::now(
&benv!(required "COMMITTER_GIT_NAME"),
&benv!(required "COMMITTER_GIT_EMAIL"),
)
.unwrap()
}
fn get_refspec(repo: &Repository, remote: &mut Remote) -> Result<String, git2::Error> {
let upstream_branch_buf = repo.branch_upstream_name(repo.head()?.name().unwrap())?;
let upstream_branch = upstream_branch_buf.as_str().unwrap();
let refspec_buf = remote
.refspecs()
.find(|r| r.direction() == git2::Direction::Fetch && r.dst_matches(upstream_branch))
.unwrap()
.rtransform(upstream_branch)?;
let refspec = refspec_buf.as_str().unwrap();
Ok(refspec.to_string())
}
const FILE_FILEMODE: i32 = 0o100644;
const DIR_FILEMODE: i32 = 0o040000;
pub async fn push_changes(
app_state: &AppState,
source: &PesdePackageSource,
directory: String,
files: HashMap<String, Vec<u8>>,
message: String,
) -> Result<(), RegistryError> {
let path = source.path(&app_state.project);
let auth_config = app_state.project.auth_config().clone();
spawn_blocking(move || {
let repo = Repository::open_bare(path)?;
let mut oids = HashMap::new();
let mut remote = repo.find_remote("origin")?;
let refspec = get_refspec(&repo, &mut remote)?;
let reference = repo.find_reference(&refspec)?;
for (name, contents) in files {
let oid = repo.blob(&contents)?;
oids.insert(name, oid);
}
let old_root_tree = reference.peel_to_tree()?;
let old_dir_tree = match old_root_tree.get_name(&directory) {
Some(entry) => Some(repo.find_tree(entry.id())?),
None => None,
};
let mut dir_tree = repo.treebuilder(old_dir_tree.as_ref())?;
for (file, oid) in oids {
dir_tree.insert(file, oid, FILE_FILEMODE)?;
}
let dir_tree_id = dir_tree.write()?;
let mut root_tree = repo.treebuilder(Some(&repo.find_tree(old_root_tree.id())?))?;
root_tree.insert(directory, dir_tree_id, DIR_FILEMODE)?;
let tree_oid = root_tree.write()?;
repo.commit(
Some("HEAD"),
&signature(),
&signature(),
&message,
&repo.find_tree(tree_oid)?,
&[&reference.peel_to_commit()?],
)?;
let mut push_options = git2::PushOptions::new();
let mut remote_callbacks = git2::RemoteCallbacks::new();
let git_creds = auth_config.git_credentials().unwrap();
remote_callbacks.credentials(|_, _, _| {
git2::Cred::userpass_plaintext(&git_creds.username, &git_creds.password)
});
push_options.remote_callbacks(remote_callbacks);
remote.push(&[refspec], Some(&mut push_options))?;
Ok(())
})
.await
.unwrap()
}

View file

@ -14,13 +14,10 @@ use actix_web::{
};
use fs_err::tokio as fs;
use pesde::{
source::{
pesde::PesdePackageSource,
traits::{PackageSource, RefreshOptions},
},
source::{pesde::PesdePackageSource, traits::PackageSource},
AuthConfig, Project,
};
use std::{env::current_dir, path::PathBuf, sync::Arc};
use std::{env::current_dir, path::PathBuf};
use tracing::level_filters::LevelFilter;
use tracing_subscriber::{
fmt::format::FmtSpan, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter,
@ -29,9 +26,7 @@ use tracing_subscriber::{
mod auth;
mod endpoints;
mod error;
mod git;
mod package;
mod request_path;
mod search;
mod storage;
@ -47,7 +42,7 @@ pub fn make_reqwest() -> reqwest::Client {
}
pub struct AppState {
pub source: Arc<tokio::sync::RwLock<PesdePackageSource>>,
pub source: tokio::sync::Mutex<PesdePackageSource>,
pub project: Project,
pub storage: Storage,
pub auth: Auth,
@ -111,9 +106,7 @@ async fn run() -> std::io::Result<()> {
);
let source = PesdePackageSource::new(benv!(required "INDEX_REPO_URL").try_into().unwrap());
source
.refresh(&RefreshOptions {
project: project.clone(),
})
.refresh(&project)
.await
.expect("failed to refresh source");
let config = source
@ -134,7 +127,7 @@ async fn run() -> std::io::Result<()> {
tracing::info!("auth: {auth}");
auth
},
source: Arc::new(tokio::sync::RwLock::new(source)),
source: tokio::sync::Mutex::new(source),
project,
search_reader,
@ -150,8 +143,6 @@ async fn run() -> std::io::Result<()> {
.finish()
.unwrap();
let publish_payload_config = PayloadConfig::new(config.max_archive_size);
HttpServer::new(move || {
App::new()
.wrap(sentry_actix::Sentry::with_transaction())
@ -168,38 +159,6 @@ async fn run() -> std::io::Result<()> {
)
.service(
web::scope("/v0")
.route(
"/search",
web::get()
.to(endpoints::search::search_packages)
.wrap(from_fn(auth::read_mw)),
)
.route(
"/packages/{name}",
web::get()
.to(endpoints::package_versions::get_package_versions_v0)
.wrap(from_fn(auth::read_mw)),
)
.route(
"/packages/{name}/{version}/{target}",
web::get()
.to(endpoints::package_version::get_package_version_v0)
.wrap(from_fn(auth::read_mw)),
)
.service(
web::scope("/packages")
.app_data(publish_payload_config.clone())
.route(
"",
web::post()
.to(endpoints::publish_version::publish_package)
.wrap(Governor::new(&publish_governor_config))
.wrap(from_fn(auth::write_mw)),
),
),
)
.service(
web::scope("/v1")
.route(
"/search",
web::get()
@ -212,45 +171,15 @@ async fn run() -> std::io::Result<()> {
.to(endpoints::package_versions::get_package_versions)
.wrap(from_fn(auth::read_mw)),
)
.service(
web::resource("/packages/{name}/deprecate")
.put(endpoints::deprecate_version::deprecate_package_version)
.delete(endpoints::deprecate_version::deprecate_package_version)
.wrap(from_fn(auth::write_mw)),
)
.route(
"/packages/{name}/{version}/{target}",
web::get()
.to(endpoints::package_version::get_package_version)
.wrap(from_fn(auth::read_mw)),
)
.route(
"/packages/{name}/{version}/{target}/archive",
web::get()
.to(endpoints::package_archive::get_package_archive)
.wrap(from_fn(auth::read_mw)),
)
.route(
"/packages/{name}/{version}/{target}/doc",
web::get()
.to(endpoints::package_doc::get_package_doc)
.wrap(from_fn(auth::read_mw)),
)
.route(
"/packages/{name}/{version}/{target}/readme",
web::get()
.to(endpoints::package_readme::get_package_readme)
.wrap(from_fn(auth::read_mw)),
)
.service(
web::resource("/packages/{name}/{version}/{target}/yank")
.put(endpoints::yank_version::yank_package_version)
.delete(endpoints::yank_version::yank_package_version)
.wrap(from_fn(auth::write_mw)),
)
.service(
web::scope("/packages")
.app_data(publish_payload_config.clone())
.app_data(PayloadConfig::new(config.max_archive_size))
.route(
"",
web::post()

View file

@ -1,33 +1,27 @@
use crate::AppState;
use pesde::{
manifest::{
target::{Target, TargetKind},
Alias, DependencyType,
},
names::PackageName,
source::{
git_index::{read_file, root_tree, GitBasedSource},
ids::VersionId,
pesde::{IndexFile, IndexFileEntry, PesdePackageSource, ScopeInfo, SCOPE_INFO_FILE},
specifiers::DependencySpecifiers,
},
};
use semver::Version;
use chrono::{DateTime, Utc};
use pesde::manifest::target::{Target, TargetKind};
use serde::Serialize;
use std::collections::{BTreeMap, BTreeSet};
use tokio::task::spawn_blocking;
use std::collections::BTreeSet;
#[derive(Debug, Serialize, Eq, PartialEq)]
struct TargetInfoInner {
pub struct TargetInfo {
kind: TargetKind,
lib: bool,
bin: bool,
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
scripts: BTreeSet<String>,
}
impl TargetInfoInner {
fn new(target: &Target) -> Self {
TargetInfoInner {
impl From<Target> for TargetInfo {
fn from(target: Target) -> Self {
(&target).into()
}
}
impl From<&Target> for TargetInfo {
fn from(target: &Target) -> Self {
TargetInfo {
kind: target.kind(),
lib: target.lib_path().is_some(),
bin: target.bin_path().is_some(),
scripts: target
@ -38,25 +32,6 @@ impl TargetInfoInner {
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
pub struct TargetInfo {
kind: TargetKind,
#[serde(skip_serializing_if = "std::ops::Not::not")]
yanked: bool,
#[serde(flatten)]
inner: TargetInfoInner,
}
impl TargetInfo {
fn new(target: &Target, yanked: bool) -> Self {
TargetInfo {
kind: target.kind(),
yanked,
inner: TargetInfoInner::new(target),
}
}
}
impl Ord for TargetInfo {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.kind.cmp(&other.kind)
@ -69,199 +44,18 @@ impl PartialOrd for TargetInfo {
}
}
#[derive(Debug, Serialize, Ord, PartialOrd, Eq, PartialEq)]
#[serde(untagged)]
pub enum RegistryDocEntryKind {
Page {
name: String,
},
Category {
#[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
items: BTreeSet<RegistryDocEntry>,
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
collapsed: bool,
},
}
#[derive(Debug, Serialize, Ord, PartialOrd, Eq, PartialEq)]
pub struct RegistryDocEntry {
label: String,
#[serde(default, skip_serializing_if = "Option::is_none")]
position: Option<usize>,
#[serde(flatten)]
kind: RegistryDocEntryKind,
}
impl From<pesde::source::pesde::DocEntry> for RegistryDocEntry {
fn from(entry: pesde::source::pesde::DocEntry) -> Self {
Self {
label: entry.label,
position: entry.position,
kind: match entry.kind {
pesde::source::pesde::DocEntryKind::Page { name, .. } => {
RegistryDocEntryKind::Page { name }
}
pesde::source::pesde::DocEntryKind::Category { items, collapsed } => {
RegistryDocEntryKind::Category {
items: items.into_iter().map(Into::into).collect(),
collapsed,
}
}
},
}
}
}
#[derive(Debug, Serialize)]
pub struct PackageResponseInner {
published_at: jiff::Timestamp,
#[serde(skip_serializing_if = "String::is_empty")]
license: String,
#[serde(skip_serializing_if = "Vec::is_empty")]
authors: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
repository: Option<String>,
#[serde(skip_serializing_if = "BTreeSet::is_empty")]
docs: BTreeSet<RegistryDocEntry>,
#[serde(skip_serializing_if = "BTreeMap::is_empty")]
dependencies: BTreeMap<Alias, (DependencySpecifiers, DependencyType)>,
}
impl PackageResponseInner {
pub fn new(entry: &IndexFileEntry) -> Self {
PackageResponseInner {
published_at: entry.published_at,
license: entry.license.clone().unwrap_or_default(),
authors: entry.authors.clone(),
repository: entry.repository.clone().map(|url| url.to_string()),
docs: entry.docs.iter().cloned().map(Into::into).collect(),
dependencies: entry.dependencies.clone(),
}
}
}
#[derive(Debug, Serialize)]
pub struct PackageResponse {
name: String,
version: String,
targets: BTreeSet<TargetInfo>,
pub name: String,
pub version: String,
pub targets: BTreeSet<TargetInfo>,
#[serde(skip_serializing_if = "String::is_empty")]
description: String,
pub description: String,
pub published_at: DateTime<Utc>,
#[serde(skip_serializing_if = "String::is_empty")]
deprecated: String,
#[serde(flatten)]
inner: PackageResponseInner,
}
impl PackageResponse {
pub fn new(name: &PackageName, version_id: &VersionId, file: &IndexFile) -> Self {
let entry = file.entries.get(version_id).unwrap();
PackageResponse {
name: name.to_string(),
version: version_id.version().to_string(),
targets: file
.entries
.iter()
.filter(|(ver, _)| ver.version() == version_id.version())
.map(|(_, entry)| TargetInfo::new(&entry.target, entry.yanked))
.collect(),
description: entry.description.clone().unwrap_or_default(),
deprecated: file.meta.deprecated.clone(),
inner: PackageResponseInner::new(entry),
}
}
}
#[derive(Debug, Serialize)]
struct PackageVersionsResponseVersionInner {
target: TargetInfoInner,
#[serde(skip_serializing_if = "std::ops::Not::not")]
yanked: bool,
#[serde(flatten)]
inner: PackageResponseInner,
}
#[derive(Debug, Serialize, Default)]
struct PackageVersionsResponseVersion {
#[serde(skip_serializing_if = "String::is_empty")]
description: String,
targets: BTreeMap<TargetKind, PackageVersionsResponseVersionInner>,
}
#[derive(Debug, Serialize)]
pub struct PackageVersionsResponse {
name: String,
#[serde(skip_serializing_if = "String::is_empty")]
deprecated: String,
versions: BTreeMap<Version, PackageVersionsResponseVersion>,
}
impl PackageVersionsResponse {
pub fn new(name: &PackageName, file: &IndexFile) -> Self {
let mut versions = BTreeMap::<Version, PackageVersionsResponseVersion>::new();
for (v_id, entry) in file.entries.iter() {
let versions_resp = versions.entry(v_id.version().clone()).or_default();
versions_resp.description = entry.description.clone().unwrap_or_default();
versions_resp.targets.insert(
entry.target.kind(),
PackageVersionsResponseVersionInner {
target: TargetInfoInner::new(&entry.target),
yanked: entry.yanked,
inner: PackageResponseInner::new(entry),
},
);
}
PackageVersionsResponse {
name: name.to_string(),
deprecated: file.meta.deprecated.clone(),
versions,
}
}
}
pub async fn read_package(
app_state: &AppState,
package: &PackageName,
source: &PesdePackageSource,
) -> Result<Option<IndexFile>, crate::error::RegistryError> {
let path = source.path(&app_state.project);
let package = package.clone();
spawn_blocking(move || {
let (scope, name) = package.as_str();
let repo = gix::open(path)?;
let tree = root_tree(&repo)?;
let Some(versions) = read_file(&tree, [scope, name])? else {
return Ok(None);
};
toml::de::from_str(&versions).map_err(Into::into)
})
.await
.unwrap()
}
pub async fn read_scope_info(
app_state: &AppState,
scope: &str,
source: &PesdePackageSource,
) -> Result<Option<ScopeInfo>, crate::error::RegistryError> {
let path = source.path(&app_state.project);
let scope = scope.to_string();
spawn_blocking(move || {
let repo = gix::open(path)?;
let tree = root_tree(&repo)?;
let Some(versions) = read_file(&tree, [&*scope, SCOPE_INFO_FILE])? else {
return Ok(None);
};
toml::de::from_str(&versions).map_err(Into::into)
})
.await
.unwrap()
pub license: String,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub authors: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub repository: Option<String>,
}

View file

@ -1,99 +0,0 @@
use pesde::{
manifest::target::TargetKind,
source::{ids::VersionId, pesde::IndexFile},
};
use semver::Version;
use serde::{Deserialize, Deserializer};
#[derive(Debug)]
pub enum LatestOrSpecificVersion {
Latest,
Specific(Version),
}
impl<'de> Deserialize<'de> for LatestOrSpecificVersion {
fn deserialize<D>(deserializer: D) -> Result<LatestOrSpecificVersion, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("latest") {
return Ok(LatestOrSpecificVersion::Latest);
}
s.parse()
.map(LatestOrSpecificVersion::Specific)
.map_err(serde::de::Error::custom)
}
}
#[derive(Debug)]
pub enum AnyOrSpecificTarget {
Any,
Specific(TargetKind),
}
impl<'de> Deserialize<'de> for AnyOrSpecificTarget {
fn deserialize<D>(deserializer: D) -> Result<AnyOrSpecificTarget, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("any") {
return Ok(AnyOrSpecificTarget::Any);
}
s.parse()
.map(AnyOrSpecificTarget::Specific)
.map_err(serde::de::Error::custom)
}
}
pub fn resolve_version_and_target(
file: &IndexFile,
version: LatestOrSpecificVersion,
target: AnyOrSpecificTarget,
) -> Option<&VersionId> {
let version = match version {
LatestOrSpecificVersion::Latest => match file.entries.keys().map(|k| k.version()).max() {
Some(latest) => latest.clone(),
None => return None,
},
LatestOrSpecificVersion::Specific(version) => version,
};
let mut versions = file
.entries
.iter()
.filter(|(v_id, _)| *v_id.version() == version);
match target {
AnyOrSpecificTarget::Any => versions.min_by_key(|(v_id, _)| v_id.target()),
AnyOrSpecificTarget::Specific(kind) => {
versions.find(|(_, entry)| entry.target.kind() == kind)
}
}
.map(|(v_id, _)| v_id)
}
#[derive(Debug)]
pub enum AllOrSpecificTarget {
All,
Specific(TargetKind),
}
impl<'de> Deserialize<'de> for AllOrSpecificTarget {
fn deserialize<D>(deserializer: D) -> Result<AllOrSpecificTarget, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
if s.eq_ignore_ascii_case("all") {
return Ok(AllOrSpecificTarget::All);
}
s.parse()
.map(AllOrSpecificTarget::Specific)
.map_err(serde::de::Error::custom)
}
}

View file

@ -5,7 +5,6 @@ use pesde::{
names::PackageName,
source::{
git_index::{root_tree, GitBasedSource},
ids::VersionId,
pesde::{IndexFile, IndexFileEntry, PesdePackageSource, SCOPE_INFO_FILE},
},
Project,
@ -19,7 +18,7 @@ use tantivy::{
};
use tokio::pin;
async fn all_packages(
pub async fn all_packages(
source: &PesdePackageSource,
project: &Project,
) -> impl Stream<Item = (PackageName, IndexFile)> {
@ -68,18 +67,6 @@ async fn all_packages(
}
}
pub fn find_max_searchable(file: &IndexFile) -> Option<(&VersionId, &IndexFileEntry)> {
file.entries
.iter()
.filter(|(_, entry)| !entry.yanked)
.max_by(|(v_id_a, entry_a), (v_id_b, entry_b)| {
v_id_a
.version()
.cmp(v_id_b.version())
.then(entry_a.published_at.cmp(&entry_b.published_at))
})
}
pub async fn make_search(
project: &Project,
source: &PesdePackageSource,
@ -93,7 +80,6 @@ pub async fn make_search(
);
let id_field = schema_builder.add_text_field("id", STRING | STORED);
let scope = schema_builder.add_text_field("scope", field_options.clone());
let name = schema_builder.add_text_field("name", field_options.clone());
let description = schema_builder.add_text_field("description", field_options);
@ -117,24 +103,19 @@ pub async fn make_search(
let stream = all_packages(source, project).await;
pin!(stream);
while let Some((pkg_name, file)) = stream.next().await {
if !file.meta.deprecated.is_empty() {
continue;
}
let Some((_, latest_entry)) = find_max_searchable(&file) else {
while let Some((pkg_name, mut file)) = stream.next().await {
let Some((_, latest_entry)) = file.entries.pop_last() else {
tracing::error!("no versions found for {pkg_name}");
continue;
};
search_writer
.add_document(doc!(
search_writer.add_document(doc!(
id_field => pkg_name.to_string(),
scope => pkg_name.scope(),
name => pkg_name.name(),
description => latest_entry.description.clone().unwrap_or_default(),
published_at => DateTime::from_timestamp_nanos(latest_entry.published_at.as_nanosecond() as i64),
))
.unwrap();
scope => pkg_name.as_str().0,
name => pkg_name.as_str().1,
description => latest_entry.description.unwrap_or_default(),
published_at => DateTime::from_timestamp_secs(latest_entry.published_at.timestamp()),
)).unwrap();
}
search_writer.commit().unwrap();
@ -147,7 +128,7 @@ pub async fn make_search(
(search_reader, search_writer, query_parser)
}
pub fn update_search_version(app_state: &AppState, name: &PackageName, entry: &IndexFileEntry) {
pub fn update_version(app_state: &AppState, name: &PackageName, entry: IndexFileEntry) {
let mut search_writer = app_state.search_writer.lock().unwrap();
let schema = search_writer.index().schema();
let id_field = schema.get_field("id").unwrap();
@ -156,34 +137,12 @@ pub fn update_search_version(app_state: &AppState, name: &PackageName, entry: &I
search_writer.add_document(doc!(
id_field => name.to_string(),
schema.get_field("scope").unwrap() => name.scope(),
schema.get_field("name").unwrap() => name.name(),
schema.get_field("description").unwrap() => entry.description.clone().unwrap_or_default(),
schema.get_field("published_at").unwrap() => DateTime::from_timestamp_nanos(entry.published_at.as_nanosecond() as i64)
schema.get_field("scope").unwrap() => name.as_str().0,
schema.get_field("name").unwrap() => name.as_str().1,
schema.get_field("description").unwrap() => entry.description.unwrap_or_default(),
schema.get_field("published_at").unwrap() => DateTime::from_timestamp_secs(entry.published_at.timestamp())
)).unwrap();
search_writer.commit().unwrap();
app_state.search_reader.reload().unwrap();
}
pub fn search_version_changed(app_state: &AppState, name: &PackageName, file: &IndexFile) {
let entry = if file.meta.deprecated.is_empty() {
find_max_searchable(file)
} else {
None
};
let Some((_, entry)) = entry else {
let mut search_writer = app_state.search_writer.lock().unwrap();
let schema = search_writer.index().schema();
let id_field = schema.get_field("id").unwrap();
search_writer.delete_term(Term::from_field_text(id_field, &name.to_string()));
search_writer.commit().unwrap();
app_state.search_reader.reload().unwrap();
return;
};
update_search_version(app_state, name, entry);
}

View file

@ -1,31 +1,26 @@
use crate::{error::RegistryError, storage::StorageImpl};
use crate::{error::Error, storage::StorageImpl};
use actix_web::{
http::header::{CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE},
http::header::{CONTENT_ENCODING, CONTENT_TYPE},
HttpResponse,
};
use fs_err::tokio as fs;
use pesde::{names::PackageName, source::ids::VersionId};
use pesde::{names::PackageName, source::version_id::VersionId};
use std::{
fmt::Display,
path::{Path, PathBuf},
};
use tokio_util::io::ReaderStream;
#[derive(Debug)]
pub struct FSStorage {
pub root: PathBuf,
}
async fn read_file_to_response(
path: &Path,
content_type: &str,
) -> Result<HttpResponse, RegistryError> {
Ok(match fs::File::open(path).await {
Ok(file) => HttpResponse::Ok()
async fn read_file_to_response(path: &Path, content_type: &str) -> Result<HttpResponse, Error> {
Ok(match fs::read(path).await {
Ok(contents) => HttpResponse::Ok()
.append_header((CONTENT_TYPE, content_type))
.append_header((CONTENT_ENCODING, "gzip"))
.append_header((CONTENT_LENGTH, file.metadata().await?.len()))
.streaming(ReaderStream::new(file)),
.body(contents),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => HttpResponse::NotFound().finish(),
Err(e) => return Err(e.into()),
})
@ -37,7 +32,7 @@ impl StorageImpl for FSStorage {
package_name: &PackageName,
version: &VersionId,
contents: Vec<u8>,
) -> Result<(), RegistryError> {
) -> Result<(), Error> {
let (scope, name) = package_name.as_str();
let path = self
@ -57,7 +52,7 @@ impl StorageImpl for FSStorage {
&self,
package_name: &PackageName,
version: &VersionId,
) -> Result<HttpResponse, RegistryError> {
) -> Result<HttpResponse, Error> {
let (scope, name) = package_name.as_str();
let path = self
@ -75,7 +70,7 @@ impl StorageImpl for FSStorage {
package_name: &PackageName,
version: &VersionId,
contents: Vec<u8>,
) -> Result<(), RegistryError> {
) -> Result<(), Error> {
let (scope, name) = package_name.as_str();
let path = self
@ -95,7 +90,7 @@ impl StorageImpl for FSStorage {
&self,
package_name: &PackageName,
version: &VersionId,
) -> Result<HttpResponse, RegistryError> {
) -> Result<HttpResponse, Error> {
let (scope, name) = package_name.as_str();
let path = self
@ -108,7 +103,7 @@ impl StorageImpl for FSStorage {
read_file_to_response(&path.join("readme.gz"), "text/plain").await
}
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
let path = self.root.join("Doc");
fs::create_dir_all(&path).await?;
@ -117,7 +112,7 @@ impl StorageImpl for FSStorage {
Ok(())
}
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
let path = self.root.join("Doc");
read_file_to_response(&path.join(format!("{doc_hash}.gz")), "text/plain").await
@ -126,6 +121,6 @@ impl StorageImpl for FSStorage {
impl Display for FSStorage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "FS ({})", self.root.display())
write!(f, "FS")
}
}

View file

@ -1,6 +1,6 @@
use crate::{benv, error::RegistryError, make_reqwest};
use crate::{benv, error::Error, make_reqwest};
use actix_web::HttpResponse;
use pesde::{names::PackageName, source::ids::VersionId};
use pesde::{names::PackageName, source::version_id::VersionId};
use rusty_s3::{Bucket, Credentials, UrlStyle};
use std::fmt::Display;
@ -19,27 +19,31 @@ pub trait StorageImpl: Display {
package_name: &PackageName,
version: &VersionId,
contents: Vec<u8>,
) -> Result<(), RegistryError>;
) -> Result<(), crate::error::Error>;
async fn get_package(
&self,
package_name: &PackageName,
version: &VersionId,
) -> Result<HttpResponse, RegistryError>;
) -> Result<HttpResponse, crate::error::Error>;
async fn store_readme(
&self,
package_name: &PackageName,
version: &VersionId,
contents: Vec<u8>,
) -> Result<(), RegistryError>;
) -> Result<(), crate::error::Error>;
async fn get_readme(
&self,
package_name: &PackageName,
version: &VersionId,
) -> Result<HttpResponse, RegistryError>;
) -> Result<HttpResponse, crate::error::Error>;
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError>;
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError>;
async fn store_doc(
&self,
doc_hash: String,
contents: Vec<u8>,
) -> Result<(), crate::error::Error>;
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, crate::error::Error>;
}
impl StorageImpl for Storage {
@ -48,7 +52,7 @@ impl StorageImpl for Storage {
package_name: &PackageName,
version: &VersionId,
contents: Vec<u8>,
) -> Result<(), RegistryError> {
) -> Result<(), Error> {
match self {
Storage::S3(s3) => s3.store_package(package_name, version, contents).await,
Storage::FS(fs) => fs.store_package(package_name, version, contents).await,
@ -59,7 +63,7 @@ impl StorageImpl for Storage {
&self,
package_name: &PackageName,
version: &VersionId,
) -> Result<HttpResponse, RegistryError> {
) -> Result<HttpResponse, Error> {
match self {
Storage::S3(s3) => s3.get_package(package_name, version).await,
Storage::FS(fs) => fs.get_package(package_name, version).await,
@ -71,7 +75,7 @@ impl StorageImpl for Storage {
package_name: &PackageName,
version: &VersionId,
contents: Vec<u8>,
) -> Result<(), RegistryError> {
) -> Result<(), Error> {
match self {
Storage::S3(s3) => s3.store_readme(package_name, version, contents).await,
Storage::FS(fs) => fs.store_readme(package_name, version, contents).await,
@ -82,21 +86,21 @@ impl StorageImpl for Storage {
&self,
package_name: &PackageName,
version: &VersionId,
) -> Result<HttpResponse, RegistryError> {
) -> Result<HttpResponse, Error> {
match self {
Storage::S3(s3) => s3.get_readme(package_name, version).await,
Storage::FS(fs) => fs.get_readme(package_name, version).await,
}
}
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
match self {
Storage::S3(s3) => s3.store_doc(doc_hash, contents).await,
Storage::FS(fs) => fs.store_doc(doc_hash, contents).await,
}
}
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
match self {
Storage::S3(s3) => s3.get_doc(doc_hash).await,
Storage::FS(fs) => fs.get_doc(doc_hash).await,
@ -116,14 +120,14 @@ impl Display for Storage {
pub fn get_storage_from_env() -> Storage {
if let Ok(endpoint) = benv!(parse "S3_ENDPOINT") {
Storage::S3(s3::S3Storage {
bucket: Bucket::new(
s3_bucket: Bucket::new(
endpoint,
UrlStyle::Path,
benv!(required "S3_BUCKET_NAME"),
benv!(required "S3_REGION"),
)
.unwrap(),
credentials: Credentials::new(
s3_credentials: Credentials::new(
benv!(required "S3_ACCESS_KEY"),
benv!(required "S3_SECRET_KEY"),
),

View file

@ -1,9 +1,9 @@
use crate::{
error::{RegistryError, ReqwestErrorExt},
error::{Error, ReqwestErrorExt},
storage::StorageImpl,
};
use actix_web::{http::header::LOCATION, HttpResponse};
use pesde::{names::PackageName, source::ids::VersionId};
use pesde::{names::PackageName, source::version_id::VersionId};
use reqwest::header::{CONTENT_ENCODING, CONTENT_TYPE};
use rusty_s3::{
actions::{GetObject, PutObject},
@ -13,8 +13,8 @@ use std::{fmt::Display, time::Duration};
#[derive(Debug)]
pub struct S3Storage {
pub bucket: Bucket,
pub credentials: Credentials,
pub s3_bucket: Bucket,
pub s3_credentials: Credentials,
pub reqwest_client: reqwest::Client,
}
@ -26,10 +26,10 @@ impl StorageImpl for S3Storage {
package_name: &PackageName,
version: &VersionId,
contents: Vec<u8>,
) -> Result<(), RegistryError> {
) -> Result<(), Error> {
let object_url = PutObject::new(
&self.bucket,
Some(&self.credentials),
&self.s3_bucket,
Some(&self.s3_credentials),
&format!(
"{package_name}/{}/{}/pkg.tar.gz",
version.version(),
@ -55,10 +55,10 @@ impl StorageImpl for S3Storage {
&self,
package_name: &PackageName,
version: &VersionId,
) -> Result<HttpResponse, RegistryError> {
) -> Result<HttpResponse, Error> {
let object_url = GetObject::new(
&self.bucket,
Some(&self.credentials),
&self.s3_bucket,
Some(&self.s3_credentials),
&format!(
"{package_name}/{}/{}/pkg.tar.gz",
version.version(),
@ -77,10 +77,10 @@ impl StorageImpl for S3Storage {
package_name: &PackageName,
version: &VersionId,
contents: Vec<u8>,
) -> Result<(), RegistryError> {
) -> Result<(), Error> {
let object_url = PutObject::new(
&self.bucket,
Some(&self.credentials),
&self.s3_bucket,
Some(&self.s3_credentials),
&format!(
"{package_name}/{}/{}/readme.gz",
version.version(),
@ -106,10 +106,10 @@ impl StorageImpl for S3Storage {
&self,
package_name: &PackageName,
version: &VersionId,
) -> Result<HttpResponse, RegistryError> {
) -> Result<HttpResponse, Error> {
let object_url = GetObject::new(
&self.bucket,
Some(&self.credentials),
&self.s3_bucket,
Some(&self.s3_credentials),
&format!(
"{package_name}/{}/{}/readme.gz",
version.version(),
@ -123,10 +123,10 @@ impl StorageImpl for S3Storage {
.finish())
}
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), RegistryError> {
async fn store_doc(&self, doc_hash: String, contents: Vec<u8>) -> Result<(), Error> {
let object_url = PutObject::new(
&self.bucket,
Some(&self.credentials),
&self.s3_bucket,
Some(&self.s3_credentials),
// capitalize Doc to prevent conflicts with scope names
&format!("Doc/{}.gz", doc_hash),
)
@ -145,10 +145,10 @@ impl StorageImpl for S3Storage {
Ok(())
}
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, RegistryError> {
async fn get_doc(&self, doc_hash: &str) -> Result<HttpResponse, Error> {
let object_url = GetObject::new(
&self.bucket,
Some(&self.credentials),
&self.s3_bucket,
Some(&self.s3_credentials),
&format!("Doc/{}.gz", doc_hash),
)
.sign(S3_SIGN_DURATION);
@ -161,6 +161,6 @@ impl StorageImpl for S3Storage {
impl Display for S3Storage {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "S3 (bucket name: {})", self.bucket.name())
write!(f, "S3")
}
}

View file

@ -1,2 +1 @@
imports_granularity = "Crate"
hard_tabs = true

View file

@ -5,10 +5,9 @@ use keyring::Entry;
use reqwest::header::AUTHORIZATION;
use serde::{ser::SerializeMap, Deserialize, Serialize};
use std::collections::BTreeMap;
use tokio::task::spawn_blocking;
use tracing::instrument;
#[derive(Debug, Clone, Default)]
#[derive(Debug, Clone)]
pub struct Tokens(pub BTreeMap<gix::Url, String>);
impl Serialize for Tokens {
@ -47,54 +46,41 @@ pub async fn get_tokens() -> anyhow::Result<Tokens> {
return Ok(config.tokens);
}
let keyring_tokens = spawn_blocking(|| match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
match Entry::new("tokens", env!("CARGO_PKG_NAME")) {
Ok(entry) => match entry.get_password() {
Ok(token) => serde_json::from_str(&token)
.map(Some)
.context("failed to parse tokens"),
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => Ok(None),
Err(e) => Err(e.into()),
},
Err(keyring::Error::PlatformFailure(_)) => Ok(None),
Err(e) => Err(e.into()),
})
.await
.unwrap()?;
if let Some(tokens) = keyring_tokens {
Ok(token) => {
tracing::debug!("using tokens from keyring");
return Ok(tokens);
return serde_json::from_str(&token).context("failed to parse tokens");
}
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
Err(e) => return Err(e.into()),
},
Err(keyring::Error::PlatformFailure(_)) => {}
Err(e) => return Err(e.into()),
}
Ok(Tokens::default())
Ok(Tokens(BTreeMap::new()))
}
#[instrument(level = "trace")]
pub async fn set_tokens(tokens: Tokens) -> anyhow::Result<()> {
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
let json = serde_json::to_string(&tokens).context("failed to serialize tokens")?;
let to_keyring = spawn_blocking(move || {
let entry = Entry::new("tokens", env!("CARGO_PKG_NAME"))?;
match entry.set_password(&json) {
Ok(()) => Ok::<_, anyhow::Error>(true),
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => Ok(false),
Err(e) => Err(e.into()),
}
})
.await
.unwrap()?;
if to_keyring {
Ok(()) => {
tracing::debug!("tokens saved to keyring");
return Ok(());
}
Err(keyring::Error::PlatformFailure(_) | keyring::Error::NoEntry) => {}
Err(e) => return Err(e.into()),
}
tracing::debug!("saving tokens to config");
tracing::debug!("tokens saved to config");
let mut config = read_config().await?;
config.tokens = tokens;
write_config(&config).await
write_config(&config).await.map_err(Into::into)
}
pub async fn set_token(repo: &gix::Url, token: Option<&str>) -> anyhow::Result<()> {

View file

@ -1,81 +0,0 @@
local process = require("@lune/process")
local fs = require("@lune/fs")
local stdio = require("@lune/stdio")
local serde = require("@lune/serde")
local project_root = nil
local path_components = string.split(string.gsub(process.cwd, "\\", "/"), "/")
if path_components[#path_components] == "" then
table.remove(path_components)
end
local function in_lockfile(lockfile)
if not lockfile.graph then
return false
end
for _, versions in lockfile.graph do
for _, node in versions do
if node.direct and node.direct[1] == "{alias}" then
return true
end
end
end
return false
end
for i = #path_components, 1, -1 do
local path = table.concat(path_components, "/", 1, i)
if not fs.isFile(path .. "/{MANIFEST_FILE_NAME}") then
continue
end
if project_root == nil then
project_root = path
end
if project_root and fs.isFile(path .. "/{LOCKFILE_FILE_NAME}") then
local lockfile = serde.decode("toml", fs.readFile(path .. "/{LOCKFILE_FILE_NAME}"))
if not lockfile.workspace then
continue
end
local search_for = string.gsub(project_root, path, "")
if string.sub(search_for, 1, 1) == "/" then
search_for = string.sub(search_for, 2)
end
if search_for == "" then
if in_lockfile(lockfile) then
break
end
continue
end
for _, targets in lockfile.workspace do
for _, member_path in targets do
local path_normalized = string.gsub(member_path, "\\", "/")
if path_normalized == search_for and in_lockfile(lockfile) then
project_root = path
break
end
end
end
end
end
if project_root ~= nil then
for _, packages_folder in {{ {all_folders} }} do
local path = `{{project_root}}/{{packages_folder}}/{alias}.bin.luau`
if fs.isFile(path) then
require(path)
return
end
end
end
stdio.ewrite(stdio.color("red") .. "binary `{alias}` not found. are you in the right directory?" .. stdio.color("reset") .. "\n")
process.exit(1)

View file

@ -1,25 +1,23 @@
use std::str::FromStr;
use std::{collections::HashSet, str::FromStr};
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use semver::VersionReq;
use crate::cli::{
config::read_config, dep_type_to_key, AnyPackageIdentifier, VersionedPackageName,
};
use crate::cli::{config::read_config, AnyPackageIdentifier, VersionedPackageName};
use pesde::{
manifest::{target::TargetKind, Alias, DependencyType},
manifest::target::TargetKind,
names::PackageNames,
source::{
git::{specifier::GitDependencySpecifier, GitPackageSource},
path::{specifier::PathDependencySpecifier, PathPackageSource},
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
specifiers::DependencySpecifiers,
traits::{PackageSource, RefreshOptions, ResolveOptions},
workspace::{specifier::WorkspaceDependencySpecifier, WorkspacePackageSource},
traits::PackageSource,
workspace::WorkspacePackageSource,
PackageSources,
},
Project, RefreshedSources, DEFAULT_INDEX_NAME,
Project, DEFAULT_INDEX_NAME,
};
#[derive(Debug, Args)]
@ -38,7 +36,7 @@ pub struct AddCommand {
/// The alias to use for the package
#[arg(short, long)]
alias: Option<Alias>,
alias: Option<String>,
/// Whether to add the package as a peer dependency
#[arg(short, long)]
@ -65,7 +63,8 @@ impl AddCommand {
.cloned();
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
anyhow::bail!("index {index} not found");
println!("{}: index {index} not found", "error".red().bold());
return Ok(());
}
let index = match index {
@ -91,7 +90,8 @@ impl AddCommand {
.cloned();
if let Some(index) = self.index.as_ref().filter(|_| index.is_none()) {
anyhow::bail!("wally index {index} not found");
println!("{}: wally index {index} not found", "error".red().bold());
return Ok(());
}
let index = index.context("no wally index found")?;
@ -119,38 +119,26 @@ impl AddCommand {
),
AnyPackageIdentifier::Workspace(VersionedPackageName(name, version)) => (
PackageSources::Workspace(WorkspacePackageSource),
DependencySpecifiers::Workspace(WorkspaceDependencySpecifier {
DependencySpecifiers::Workspace(
pesde::source::workspace::specifier::WorkspaceDependencySpecifier {
name: name.clone(),
version: version.clone().unwrap_or_default(),
target: self.target,
}),
},
),
AnyPackageIdentifier::Path(path) => (
PackageSources::Path(PathPackageSource),
DependencySpecifiers::Path(PathDependencySpecifier { path: path.clone() }),
),
};
let refreshed_sources = RefreshedSources::new();
refreshed_sources
.refresh(
&source,
&RefreshOptions {
project: project.clone(),
},
)
source
.refresh(&project)
.await
.context("failed to refresh package source")?;
let Some(version_id) = source
.resolve(
&specifier,
&ResolveOptions {
project: project.clone(),
target: manifest.target.kind(),
refreshed_sources,
},
&project,
manifest.target.kind(),
&mut HashSet::new(),
)
.await
.context("failed to resolve package")?
@ -158,7 +146,9 @@ impl AddCommand {
.pop_last()
.map(|(v_id, _)| v_id)
else {
anyhow::bail!("no versions found for package");
println!("{}: no versions found for package", "error".red().bold());
return Ok(());
};
let project_target = manifest.target.kind();
@ -169,44 +159,35 @@ impl AddCommand {
.context("failed to read manifest")?,
)
.context("failed to parse manifest")?;
let dependency_key = dep_type_to_key(if self.peer {
DependencyType::Peer
let dependency_key = if self.peer {
"peer_dependencies"
} else if self.dev {
DependencyType::Dev
"dev_dependencies"
} else {
DependencyType::Standard
});
"dependencies"
};
let alias = match self.alias {
Some(alias) => alias,
None => match &self.name {
AnyPackageIdentifier::PackageName(versioned) => versioned.0.name().to_string(),
let alias = self.alias.unwrap_or_else(|| match self.name.clone() {
AnyPackageIdentifier::PackageName(versioned) => versioned.0.as_str().1.to_string(),
AnyPackageIdentifier::Url((url, _)) => url
.path
.to_string()
.split('/')
.next_back()
.last()
.map(|s| s.to_string())
.unwrap_or(url.path.to_string()),
AnyPackageIdentifier::Workspace(versioned) => versioned.0.name().to_string(),
AnyPackageIdentifier::Path(path) => path
.file_name()
.map(|s| s.to_string_lossy().to_string())
.expect("path has no file name"),
}
.parse()
.context("auto-generated alias is invalid. use --alias to specify one")?,
};
AnyPackageIdentifier::Workspace(versioned) => versioned.0.as_str().1.to_string(),
});
let field = &mut manifest[dependency_key]
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()))[alias.as_str()];
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()))[&alias];
match specifier {
DependencySpecifiers::Pesde(spec) => {
field["name"] = toml_edit::value(spec.name.clone().to_string());
field["version"] = toml_edit::value(format!("^{}", version_id.version()));
if version_id.target() != project_target {
if *version_id.target() != project_target {
field["target"] = toml_edit::value(version_id.target().to_string());
}
@ -215,17 +196,16 @@ impl AddCommand {
}
println!(
"added {}@{} {} to {dependency_key}",
"added {}@{} {} to {}",
spec.name,
version_id.version(),
version_id.target()
version_id.target(),
dependency_key
);
}
#[cfg(feature = "wally-compat")]
DependencySpecifiers::Wally(spec) => {
let name_str = spec.name.to_string();
let name_str = name_str.trim_start_matches("wally#");
field["wally"] = toml_edit::value(name_str);
field["wally"] = toml_edit::value(spec.name.clone().to_string());
field["version"] = toml_edit::value(format!("^{}", version_id.version()));
if let Some(index) = spec.index.filter(|i| i != DEFAULT_INDEX_NAME) {
@ -233,15 +213,17 @@ impl AddCommand {
}
println!(
"added wally {name_str}@{} to {dependency_key}",
version_id.version()
"added wally {}@{} to {}",
spec.name,
version_id.version(),
dependency_key
);
}
DependencySpecifiers::Git(spec) => {
field["repo"] = toml_edit::value(spec.repo.to_bstring().to_string());
field["rev"] = toml_edit::value(spec.rev.clone());
println!("added git {}#{} to {dependency_key}", spec.repo, spec.rev);
println!("added git {}#{} to {}", spec.repo, spec.rev, dependency_key);
}
DependencySpecifiers::Workspace(spec) => {
field["workspace"] = toml_edit::value(spec.name.clone().to_string());
@ -252,15 +234,10 @@ impl AddCommand {
}
println!(
"added workspace {}@{} to {dependency_key}",
spec.name, spec.version
"added workspace {}@{} to {}",
spec.name, spec.version, dependency_key
);
}
DependencySpecifiers::Path(spec) => {
field["path"] = toml_edit::value(spec.path.to_string_lossy().to_string());
println!("added path {} to {dependency_key}", spec.path.display());
}
}
project

View file

@ -1,23 +1,18 @@
use anyhow::Context;
use clap::Args;
use console::style;
use colored::Colorize;
use serde::Deserialize;
use std::thread::spawn;
use tokio::time::sleep;
use url::Url;
use crate::cli::{
auth::{get_token_login, set_token},
style::URL_STYLE,
};
use pesde::{
source::{
pesde::PesdePackageSource,
traits::{PackageSource, RefreshOptions},
},
source::{pesde::PesdePackageSource, traits::PackageSource},
Project,
};
use crate::cli::auth::{get_token_login, set_token};
#[derive(Debug, Args)]
pub struct LoginCommand {
/// The token to use for authentication, skipping login
@ -62,9 +57,7 @@ impl LoginCommand {
let source = PesdePackageSource::new(index_url.clone());
source
.refresh(&RefreshOptions {
project: project.clone(),
})
.refresh(project)
.await
.context("failed to refresh index")?;
@ -92,8 +85,8 @@ impl LoginCommand {
println!(
"copy your one-time code: {}\npress enter to open {} in your browser...",
style(response.user_code).bold(),
URL_STYLE.apply_to(response.verification_uri.as_str())
response.user_code.bold(),
response.verification_uri.as_str().blue()
);
spawn(move || {
@ -187,7 +180,7 @@ impl LoginCommand {
let token = format!("Bearer {token}");
println!(
"logged in as {} for {index_url}",
style(get_token_login(&reqwest, &token).await?).bold()
get_token_login(&reqwest, &token).await?.bold()
);
token

View file

@ -1,6 +1,6 @@
use crate::cli::get_index;
use crate::cli::config::read_config;
use clap::{Args, Subcommand};
use pesde::Project;
use pesde::{errors::ManifestReadError, Project, DEFAULT_INDEX_NAME};
mod login;
mod logout;
@ -32,7 +32,36 @@ pub enum AuthCommands {
impl AuthSubcommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let index_url = get_index(&project, self.index.as_deref()).await?;
let manifest = match project.deser_manifest().await {
Ok(manifest) => Some(manifest),
Err(e) => match e {
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
e => return Err(e.into()),
},
};
let index_url = match self.index.as_deref() {
Some(index) => match index.try_into() {
Ok(url) => Some(url),
Err(_) => None,
},
None => match manifest {
Some(_) => None,
None => Some(read_config().await?.default_index),
},
};
let index_url = match index_url {
Some(url) => url,
None => {
let index_name = self.index.as_deref().unwrap_or(DEFAULT_INDEX_NAME);
match manifest.unwrap().indices.get(index_name) {
Some(index) => index.clone(),
None => anyhow::bail!("index {index_name} not found in manifest"),
}
}
};
match self.command {
AuthCommands::Login(login) => login.run(index_url, project, reqwest).await,

View file

@ -1,6 +1,6 @@
use crate::cli::auth::{get_token_login, get_tokens};
use clap::Args;
use console::style;
use colored::Colorize;
#[derive(Debug, Args)]
pub struct WhoAmICommand {}
@ -18,7 +18,7 @@ impl WhoAmICommand {
println!(
"logged in as {} into {index_url}",
style(get_token_login(&reqwest, token).await?).bold()
get_token_login(&reqwest, token).await?.bold()
);
Ok(())

View file

@ -1,18 +0,0 @@
use clap::Subcommand;
use pesde::Project;
mod prune;
#[derive(Debug, Subcommand)]
pub enum CasCommands {
/// Removes unused files from the CAS
Prune(prune::PruneCommand),
}
impl CasCommands {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
match self {
CasCommands::Prune(prune) => prune.run(project).await,
}
}
}

View file

@ -1,346 +0,0 @@
use crate::{
cli::{
reporters::run_with_reporter,
style::{INFO_STYLE, SUCCESS_STYLE},
},
util::remove_empty_dir,
};
use anyhow::Context;
use async_stream::try_stream;
use clap::Args;
use fs_err::tokio as fs;
use futures::{future::BoxFuture, FutureExt, Stream, StreamExt};
use pesde::{
source::fs::{FsEntry, PackageFs},
Project,
};
use std::{
collections::{HashMap, HashSet},
future::Future,
path::{Path, PathBuf},
};
use tokio::task::JoinSet;
#[derive(Debug, Args)]
pub struct PruneCommand {}
async fn read_dir_stream(
dir: &Path,
) -> std::io::Result<impl Stream<Item = std::io::Result<fs::DirEntry>>> {
let mut read_dir = fs::read_dir(dir).await?;
Ok(try_stream! {
while let Some(entry) = read_dir.next_entry().await? {
yield entry;
}
})
}
#[allow(unreachable_code)]
async fn get_nlinks(path: &Path) -> anyhow::Result<u64> {
#[cfg(unix)]
{
use std::os::unix::fs::MetadataExt;
let metadata = fs::metadata(path).await?;
return Ok(metadata.nlink());
}
// life if rust stabilized the nightly feature from 2019
#[cfg(windows)]
{
use std::os::windows::ffi::OsStrExt;
use windows::{
core::PWSTR,
Win32::{
Foundation::CloseHandle,
Storage::FileSystem::{
CreateFileW, GetFileInformationByHandle, FILE_ATTRIBUTE_NORMAL,
FILE_GENERIC_READ, FILE_SHARE_READ, OPEN_EXISTING,
},
},
};
let path = path.to_path_buf();
return tokio::task::spawn_blocking(move || unsafe {
let handle = CreateFileW(
PWSTR(
path.as_os_str()
.encode_wide()
.chain(std::iter::once(0))
.collect::<Vec<_>>()
.as_mut_ptr(),
),
FILE_GENERIC_READ.0,
FILE_SHARE_READ,
None,
OPEN_EXISTING,
FILE_ATTRIBUTE_NORMAL,
None,
)?;
let mut info =
windows::Win32::Storage::FileSystem::BY_HANDLE_FILE_INFORMATION::default();
let res = GetFileInformationByHandle(handle, &mut info);
CloseHandle(handle)?;
res?;
Ok(info.nNumberOfLinks as u64)
})
.await
.unwrap();
}
#[cfg(not(any(unix, windows)))]
{
compile_error!("unsupported platform");
}
anyhow::bail!("unsupported platform")
}
#[derive(Debug)]
struct ExtendJoinSet<T: Send + 'static>(JoinSet<T>);
impl<T: Send + 'static, F: Future<Output = T> + Send + 'static> Extend<F> for ExtendJoinSet<T> {
fn extend<I: IntoIterator<Item = F>>(&mut self, iter: I) {
for item in iter {
self.0.spawn(item);
}
}
}
impl<T: Send + 'static> Default for ExtendJoinSet<T> {
fn default() -> Self {
Self(JoinSet::new())
}
}
async fn discover_cas_packages(cas_dir: &Path) -> anyhow::Result<HashMap<PathBuf, PackageFs>> {
fn read_entry(
entry: fs::DirEntry,
) -> BoxFuture<'static, anyhow::Result<HashMap<PathBuf, PackageFs>>> {
async move {
if entry
.metadata()
.await
.context("failed to read entry metadata")?
.is_dir()
{
let mut tasks = read_dir_stream(&entry.path())
.await
.context("failed to read entry directory")?
.map(|entry| async move {
read_entry(entry.context("failed to read inner cas index dir entry")?).await
})
.collect::<ExtendJoinSet<Result<_, anyhow::Error>>>()
.await
.0;
let mut res = HashMap::new();
while let Some(entry) = tasks.join_next().await {
res.extend(entry.unwrap()?);
}
return Ok(res);
};
let contents = fs::read_to_string(entry.path()).await?;
let fs = toml::from_str(&contents).context("failed to deserialize PackageFs")?;
Ok(HashMap::from([(entry.path(), fs)]))
}
.boxed()
}
let mut tasks = ["index", "wally_index", "git_index"]
.into_iter()
.map(|index| cas_dir.join(index))
.map(|index| async move {
let mut res = HashMap::new();
let tasks = match read_dir_stream(&index).await {
Ok(tasks) => tasks,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(res),
Err(e) => return Err(e).context("failed to read cas index directory"),
};
let mut tasks = tasks
.map(|entry| async move {
read_entry(entry.context("failed to read cas index dir entry")?).await
})
.collect::<ExtendJoinSet<Result<_, anyhow::Error>>>()
.await
.0;
while let Some(task) = tasks.join_next().await {
res.extend(task.unwrap()?);
}
Ok(res)
})
.collect::<JoinSet<Result<_, anyhow::Error>>>();
let mut cas_entries = HashMap::new();
while let Some(task) = tasks.join_next().await {
cas_entries.extend(task.unwrap()?);
}
Ok(cas_entries)
}
async fn remove_hashes(cas_dir: &Path) -> anyhow::Result<HashSet<String>> {
let mut res = HashSet::new();
let tasks = match read_dir_stream(cas_dir).await {
Ok(tasks) => tasks,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(res),
Err(e) => return Err(e).context("failed to read cas directory"),
};
let mut tasks = tasks
.map(|cas_entry| async move {
let cas_entry = cas_entry.context("failed to read cas dir entry")?;
let prefix = cas_entry.file_name();
let Some(prefix) = prefix.to_str() else {
return Ok(None);
};
// we only want hash directories
if prefix.len() != 2 {
return Ok(None);
}
let mut tasks = read_dir_stream(&cas_entry.path())
.await
.context("failed to read hash directory")?
.map(|hash_entry| {
let prefix = prefix.to_string();
async move {
let hash_entry = hash_entry.context("failed to read hash dir entry")?;
let hash = hash_entry.file_name();
let hash = hash.to_str().expect("non-UTF-8 hash").to_string();
let hash = format!("{prefix}{hash}");
let path = hash_entry.path();
let nlinks = get_nlinks(&path)
.await
.context("failed to count file usage")?;
if nlinks > 1 {
return Ok(None);
}
fs::remove_file(&path)
.await
.context("failed to remove unused file")?;
if let Some(parent) = path.parent() {
remove_empty_dir(parent).await?;
}
Ok(Some(hash))
}
})
.collect::<ExtendJoinSet<Result<_, anyhow::Error>>>()
.await
.0;
let mut removed_hashes = HashSet::new();
while let Some(removed_hash) = tasks.join_next().await {
let Some(hash) = removed_hash.unwrap()? else {
continue;
};
removed_hashes.insert(hash);
}
Ok(Some(removed_hashes))
})
.collect::<ExtendJoinSet<Result<_, anyhow::Error>>>()
.await
.0;
while let Some(removed_hashes) = tasks.join_next().await {
let Some(removed_hashes) = removed_hashes.unwrap()? else {
continue;
};
res.extend(removed_hashes);
}
Ok(res)
}
impl PruneCommand {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
// CAS structure:
// /2 first chars of hash/rest of hash
// /index/hash/name/version/target
// /wally_index/hash/name/version
// /git_index/hash/hash
// the last thing in the path is the serialized PackageFs
let (cas_entries, removed_hashes) = run_with_reporter(|_, root_progress, _| async {
let root_progress = root_progress;
root_progress.reset();
root_progress.set_message("discover packages");
let cas_entries = discover_cas_packages(project.cas_dir()).await?;
root_progress.reset();
root_progress.set_message("remove unused files");
let removed_hashes = remove_hashes(project.cas_dir()).await?;
Ok::<_, anyhow::Error>((cas_entries, removed_hashes))
})
.await?;
let mut tasks = JoinSet::new();
let mut removed_packages = 0usize;
'entry: for (path, fs) in cas_entries {
let PackageFs::Cas(entries) = fs else {
continue;
};
for entry in entries.into_values() {
let FsEntry::File(hash) = entry else {
continue;
};
if removed_hashes.contains(&hash) {
let cas_dir = project.cas_dir().to_path_buf();
tasks.spawn(async move {
fs::remove_file(&path)
.await
.context("failed to remove unused file")?;
// remove empty directories up to the cas dir
let mut path = &*path;
while let Some(parent) = path.parent() {
if parent == cas_dir {
break;
}
remove_empty_dir(parent).await?;
path = parent;
}
Ok::<_, anyhow::Error>(())
});
removed_packages += 1;
// if at least one file is removed, the package is not used
continue 'entry;
}
}
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
println!(
"{} removed {} unused packages and {} individual files!",
SUCCESS_STYLE.apply_to("done!"),
INFO_STYLE.apply_to(removed_packages),
INFO_STYLE.apply_to(removed_hashes.len())
);
Ok(())
}
}

View file

@ -1,100 +0,0 @@
use crate::cli::{get_index, style::SUCCESS_STYLE};
use anyhow::Context;
use clap::Args;
use pesde::{
names::PackageName,
source::{
pesde::PesdePackageSource,
traits::{PackageSource, RefreshOptions},
},
Project,
};
use reqwest::{header::AUTHORIZATION, Method, StatusCode};
#[derive(Debug, Args)]
pub struct DeprecateCommand {
/// Whether to undeprecate the package
#[clap(long)]
undo: bool,
/// The index to deprecate the package in
#[clap(short, long)]
index: Option<String>,
/// The package to deprecate
#[clap(index = 1)]
package: PackageName,
/// The reason for deprecating the package
#[clap(index = 2, required_unless_present = "undo")]
reason: Option<String>,
}
impl DeprecateCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let index_url = get_index(&project, self.index.as_deref()).await?;
let source = PesdePackageSource::new(index_url.clone());
source
.refresh(&RefreshOptions {
project: project.clone(),
})
.await
.context("failed to refresh source")?;
let config = source
.config(&project)
.await
.context("failed to get index config")?;
let mut request = reqwest.request(
if self.undo {
Method::DELETE
} else {
Method::PUT
},
format!(
"{}/v1/packages/{}/deprecate",
config.api(),
urlencoding::encode(&self.package.to_string()),
),
);
if !self.undo {
request = request.body(
self.reason
.map(|reason| reason.trim().to_string())
.filter(|reason| !reason.is_empty())
.context("deprecating must have non-empty a reason")?,
);
}
if let Some(token) = project.auth_config().tokens().get(&index_url) {
tracing::debug!("using token for {index_url}");
request = request.header(AUTHORIZATION, token);
}
let response = request.send().await.context("failed to send request")?;
let status = response.status();
let text = response
.text()
.await
.context("failed to get response text")?;
let prefix = if self.undo { "un" } else { "" };
match status {
StatusCode::CONFLICT => {
anyhow::bail!("version is already {prefix}deprecated");
}
StatusCode::FORBIDDEN => {
anyhow::bail!("unauthorized to {prefix}deprecate under this scope");
}
code if !code.is_success() => {
anyhow::bail!("failed to {prefix}deprecate package: {code} ({text})");
}
_ => {
println!("{}", SUCCESS_STYLE.apply_to(text));
}
}
Ok(())
}
}

View file

@ -1,36 +1,22 @@
use crate::cli::{
config::read_config,
reporters::{self, CliReporter},
VersionedPackageName,
};
use crate::cli::{config::read_config, progress_bar, VersionedPackageName};
use anyhow::Context;
use clap::Args;
use console::style;
use fs_err::tokio as fs;
use indicatif::MultiProgress;
use pesde::{
download_and_link::DownloadAndLinkOptions,
linking::generator::generate_bin_linking_module,
manifest::target::TargetKind,
names::{PackageName, PackageNames},
names::PackageName,
source::{
ids::PackageId,
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
traits::{
DownloadOptions, GetTargetOptions, PackageSource, RefreshOptions, ResolveOptions,
traits::PackageSource,
},
PackageSources,
},
Project, RefreshedSources,
Project,
};
use semver::VersionReq;
use std::{
env::current_dir,
ffi::OsString,
io::{Stderr, Write},
process::Command,
sync::Arc,
collections::HashSet, env::current_dir, ffi::OsString, io::Write, process::Command, sync::Arc,
};
use tokio::sync::Mutex;
#[derive(Debug, Args)]
pub struct ExecuteCommand {
@ -49,40 +35,19 @@ pub struct ExecuteCommand {
impl ExecuteCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let multi_progress = MultiProgress::new();
crate::PROGRESS_BARS
.lock()
.unwrap()
.replace(multi_progress.clone());
let refreshed_sources = RefreshedSources::new();
let (tempdir, bin_path) = reporters::run_with_reporter_and_writer(
std::io::stderr(),
|multi_progress, root_progress, reporter| async {
let multi_progress = multi_progress;
let root_progress = root_progress;
root_progress.set_message("resolve");
let index = match self.index {
Some(index) => Some(index),
None => read_config().await.ok().map(|c| c.default_index),
}
.context("no index specified")?;
let source = PesdePackageSource::new(index);
refreshed_sources
.refresh(
&PackageSources::Pesde(source.clone()),
&RefreshOptions {
project: project.clone(),
},
)
source
.refresh(&project)
.await
.context("failed to refresh source")?;
let version_req = self.package.1.unwrap_or(VersionReq::STAR);
let Some((id, pkg_ref)) = ('finder: {
let Some((version, pkg_ref)) = ('finder: {
let specifier = PesdeDependencySpecifier {
name: self.package.0.clone(),
version: version_req.clone(),
@ -90,45 +55,22 @@ impl ExecuteCommand {
target: None,
};
if let Some((v_id, pkg_ref)) = source
.resolve(
&specifier,
&ResolveOptions {
project: project.clone(),
target: TargetKind::Lune,
refreshed_sources: refreshed_sources.clone(),
},
)
if let Some(res) = source
.resolve(&specifier, &project, TargetKind::Lune, &mut HashSet::new())
.await
.context("failed to resolve package")?
.1
.pop_last()
{
break 'finder Some((
PackageId::new(PackageNames::Pesde(self.package.0.clone()), v_id),
pkg_ref,
));
break 'finder Some(res);
}
source
.resolve(
&specifier,
&ResolveOptions {
project: project.clone(),
target: TargetKind::Luau,
refreshed_sources: refreshed_sources.clone(),
},
)
.resolve(&specifier, &project, TargetKind::Luau, &mut HashSet::new())
.await
.context("failed to resolve package")?
.1
.pop_last()
.map(|(v_id, pkg_ref)| {
(
PackageId::new(PackageNames::Pesde(self.package.0.clone()), v_id),
pkg_ref,
)
})
}) else {
anyhow::bail!(
"no Lune or Luau package could be found for {}@{version_req}",
@ -136,12 +78,14 @@ impl ExecuteCommand {
);
};
println!("using {}@{version}", pkg_ref.name);
let tmp_dir = project.cas_dir().join(".tmp");
fs::create_dir_all(&tmp_dir)
.await
.context("failed to create temporary directory")?;
let tempdir = tempfile::tempdir_in(tmp_dir)
.context("failed to create temporary directory")?;
let tempdir =
tempfile::tempdir_in(tmp_dir).context("failed to create temporary directory")?;
let project = Project::new(
tempdir.path(),
@ -151,68 +95,49 @@ impl ExecuteCommand {
project.auth_config().clone(),
);
let id = Arc::new(id);
let fs = source
.download(
&pkg_ref,
&DownloadOptions {
project: project.clone(),
reqwest: reqwest.clone(),
reporter: Arc::new(()),
id: id.clone(),
},
)
let (fs, target) = source
.download(&pkg_ref, &project, &reqwest)
.await
.context("failed to download package")?;
let bin_path = target.bin_path().context("package has no binary export")?;
fs.write_to(tempdir.path(), project.cas_dir(), true)
.await
.context("failed to write package contents")?;
let target = source
.get_target(
&pkg_ref,
&GetTargetOptions {
project: project.clone(),
path: Arc::from(tempdir.path()),
id: id.clone(),
},
)
.await
.context("failed to get target")?;
let bin_path = target.bin_path().context("package has no binary export")?;
let mut refreshed_sources = HashSet::new();
let graph = project
.dependency_graph(None, refreshed_sources.clone(), true)
.dependency_graph(None, &mut refreshed_sources, true)
.await
.context("failed to build dependency graph")?;
let graph = Arc::new(graph);
multi_progress.suspend(|| {
eprintln!("{}", style(format!("using {}", style(id).bold())).dim());
});
root_progress.reset();
root_progress.set_message("download");
root_progress.set_style(reporters::root_progress_style_with_progress());
project
let (rx, downloaded_graph) = project
.download_and_link(
&Arc::new(graph),
DownloadAndLinkOptions::<CliReporter<Stderr>, ()>::new(reqwest)
.reporter(reporter)
.refreshed_sources(refreshed_sources)
.prod(true),
&graph,
&Arc::new(Mutex::new(refreshed_sources)),
&reqwest,
true,
true,
|_| async { Ok::<_, std::io::Error>(()) },
)
.await
.context("failed to download and link dependencies")?;
.context("failed to download dependencies")?;
anyhow::Ok((tempdir, bin_path.to_relative_path_buf()))
},
progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(),
rx,
"📥 ".to_string(),
"downloading dependencies".to_string(),
"downloaded dependencies".to_string(),
)
.await?;
downloaded_graph
.await
.context("failed to download & link dependencies")?;
let mut caller =
tempfile::NamedTempFile::new_in(tempdir.path()).context("failed to create tempfile")?;
caller

View file

@ -1,26 +1,22 @@
use crate::cli::{
config::read_config,
style::{ERROR_PREFIX, INFO_STYLE, SUCCESS_STYLE},
};
use crate::cli::config::read_config;
use anyhow::Context;
use clap::Args;
use colored::Colorize;
use inquire::validator::Validation;
use pesde::{
errors::ManifestReadError,
manifest::{target::TargetKind, DependencyType},
names::{PackageName, PackageNames},
names::PackageName,
source::{
git_index::GitBasedSource,
ids::PackageId,
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
specifiers::DependencySpecifiers,
traits::{GetTargetOptions, PackageSource, RefreshOptions, ResolveOptions},
PackageSources,
traits::PackageSource,
},
Project, RefreshedSources, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
Project, DEFAULT_INDEX_NAME, SCRIPTS_LINK_FOLDER,
};
use semver::VersionReq;
use std::{fmt::Display, path::Path, str::FromStr, sync::Arc};
use std::{collections::HashSet, fmt::Display, str::FromStr};
#[derive(Debug, Args)]
pub struct InitCommand {}
@ -44,7 +40,8 @@ impl InitCommand {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
match project.read_manifest().await {
Ok(_) => {
anyhow::bail!("project already initialized");
println!("{}", "project already initialized".red());
return Ok(());
}
Err(ManifestReadError::Io(e)) if e.kind() == std::io::ErrorKind::NotFound => {}
Err(e) => return Err(e.into()),
@ -131,19 +128,13 @@ impl InitCommand {
manifest["indices"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[DEFAULT_INDEX_NAME] = toml_edit::value(source.repo_url().to_bstring().to_string());
let refreshed_sources = RefreshedSources::new();
if target_env.is_roblox()
|| inquire::prompt_confirmation("would you like to setup Roblox compatibility scripts?")
|| inquire::prompt_confirmation(
"would you like to setup default Roblox compatibility scripts?",
)
.unwrap()
{
refreshed_sources
.refresh(
&PackageSources::Pesde(source.clone()),
&RefreshOptions {
project: project.clone(),
},
)
PackageSource::refresh(&source, &project)
.await
.context("failed to refresh package source")?;
let config = source
@ -197,16 +188,14 @@ impl InitCommand {
let (v_id, pkg_ref) = source
.resolve(
&PesdeDependencySpecifier {
name: scripts_pkg_name.clone(),
name: scripts_pkg_name,
version: VersionReq::STAR,
index: None,
target: None,
},
&ResolveOptions {
project: project.clone(),
target: TargetKind::Lune,
refreshed_sources,
},
&project,
TargetKind::Lune,
&mut HashSet::new(),
)
.await
.context("failed to resolve scripts package")?
@ -214,22 +203,8 @@ impl InitCommand {
.pop_last()
.context("scripts package not found")?;
let id = Arc::new(PackageId::new(PackageNames::Pesde(scripts_pkg_name), v_id));
let target = source
.get_target(
&pkg_ref,
&GetTargetOptions {
project: project.clone(),
// HACK: the pesde package source doesn't use the path, so we can just use an empty one
path: Arc::from(Path::new("")),
id: id.clone(),
},
)
.await?;
let Some(scripts) = target.scripts().filter(|s| !s.is_empty()) else {
anyhow::bail!("scripts package has no scripts.")
let Some(scripts) = pkg_ref.target.scripts().filter(|s| !s.is_empty()) else {
anyhow::bail!("scripts package has no scripts. this is an issue with the index")
};
let scripts_field = &mut manifest["scripts"]
@ -245,9 +220,9 @@ impl InitCommand {
.or_insert(toml_edit::Item::Table(toml_edit::Table::new()));
let field = &mut dev_deps["scripts"];
field["name"] = toml_edit::value(id.name().to_string());
field["version"] = toml_edit::value(format!("^{}", id.version_id().version()));
field["target"] = toml_edit::value(id.version_id().target().to_string());
field["name"] = toml_edit::value(pkg_ref.name.to_string());
field["version"] = toml_edit::value(format!("^{}", v_id.version()));
field["target"] = toml_edit::value(v_id.target().to_string());
for (alias, (spec, ty)) in pkg_ref.dependencies {
if ty != DependencyType::Peer {
@ -258,18 +233,16 @@ impl InitCommand {
continue;
};
let field = &mut dev_deps[alias.as_str()];
let field = &mut dev_deps[alias];
field["name"] = toml_edit::value(spec.name.to_string());
field["version"] = toml_edit::value(spec.version.to_string());
field["target"] = toml_edit::value(
spec.target
.unwrap_or_else(|| id.version_id().target())
.to_string(),
);
field["target"] =
toml_edit::value(spec.target.unwrap_or_else(|| *v_id.target()).to_string());
}
} else {
println!(
"{ERROR_PREFIX}: no scripts package configured, this can cause issues with Roblox compatibility"
"{}",
"no scripts package configured, this can cause issues with Roblox compatibility".red()
);
if !inquire::prompt_confirmation("initialize regardless?").unwrap() {
return Ok(());
@ -281,8 +254,8 @@ impl InitCommand {
println!(
"{}\n{}: run `install` to fully finish setup",
SUCCESS_STYLE.apply_to("initialized project"),
INFO_STYLE.apply_to("tip")
"initialized project".green(),
"tip".cyan().bold()
);
Ok(())
}

View file

@ -1,10 +1,20 @@
use crate::cli::{
install::{install, InstallOptions},
run_on_workspace_members,
bin_dir, files::make_executable, progress_bar, run_on_workspace_members, up_to_date_lockfile,
};
use anyhow::Context;
use clap::Args;
use pesde::Project;
use std::num::NonZeroUsize;
use colored::{ColoredString, Colorize};
use fs_err::tokio as fs;
use futures::future::try_join_all;
use pesde::{
download_and_link::filter_graph, lockfile::Lockfile, manifest::target::TargetKind, Project,
MANIFEST_FILE_NAME,
};
use std::{
collections::{BTreeSet, HashMap, HashSet},
sync::Arc,
};
use tokio::sync::Mutex;
#[derive(Debug, Args, Copy, Clone)]
pub struct InstallCommand {
@ -15,41 +25,304 @@ pub struct InstallCommand {
/// Whether to not install dev dependencies
#[arg(long)]
prod: bool,
}
/// The maximum number of concurrent network requests
#[arg(long, default_value = "16")]
network_concurrency: NonZeroUsize,
fn bin_link_file(alias: &str) -> String {
let mut all_combinations = BTreeSet::new();
/// Whether to re-install all dependencies even if they are already installed
#[arg(long)]
force: bool,
for a in TargetKind::VARIANTS {
for b in TargetKind::VARIANTS {
all_combinations.insert((a, b));
}
}
let all_folders = all_combinations
.into_iter()
.map(|(a, b)| format!("{:?}", a.packages_folder(b)))
.collect::<BTreeSet<_>>()
.into_iter()
.collect::<Vec<_>>()
.join(", ");
format!(
r#"local process = require("@lune/process")
local fs = require("@lune/fs")
local stdio = require("@lune/stdio")
local project_root = process.cwd
local path_components = string.split(string.gsub(project_root, "\\", "/"), "/")
for i = #path_components, 1, -1 do
local path = table.concat(path_components, "/", 1, i)
if fs.isFile(path .. "/{MANIFEST_FILE_NAME}") then
project_root = path
break
end
end
for _, packages_folder in {{ {all_folders} }} do
local path = `{{project_root}}/{{packages_folder}}/{alias}.bin.luau`
if fs.isFile(path) then
require(path)
return
end
end
stdio.ewrite(stdio.color("red") .. "binary `{alias}` not found. are you in the right directory?" .. stdio.color("reset") .. "\n")
"#,
)
}
#[cfg(feature = "patches")]
const JOBS: u8 = 5;
#[cfg(not(feature = "patches"))]
const JOBS: u8 = 4;
fn job(n: u8) -> ColoredString {
format!("[{n}/{JOBS}]").dimmed().bold()
}
#[derive(Debug, thiserror::Error)]
#[error(transparent)]
struct CallbackError(#[from] anyhow::Error);
impl InstallCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let options = InstallOptions {
locked: self.locked,
prod: self.prod,
write: true,
network_concurrency: self.network_concurrency,
use_lockfile: true,
force: self.force,
let mut refreshed_sources = HashSet::new();
let manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
let lockfile = if self.locked {
match up_to_date_lockfile(&project).await? {
None => {
anyhow::bail!(
"lockfile is out of sync, run `{} install` to update it",
env!("CARGO_BIN_NAME")
);
}
file => file,
}
} else {
match project.deser_lockfile().await {
Ok(lockfile) => {
if lockfile.overrides != manifest.overrides {
tracing::debug!("overrides are different");
None
} else if lockfile.target != manifest.target.kind() {
tracing::debug!("target kind is different");
None
} else {
Some(lockfile)
}
}
Err(pesde::errors::LockfileReadError::Io(e))
if e.kind() == std::io::ErrorKind::NotFound =>
{
None
}
Err(e) => return Err(e.into()),
}
};
install(&options, &project, reqwest.clone(), true).await?;
println!(
"\n{}\n",
format!("[now installing {} {}]", manifest.name, manifest.target)
.bold()
.on_bright_black()
);
println!("{} ❌ removing current package folders", job(1));
{
let mut deleted_folders = HashMap::new();
for target_kind in TargetKind::VARIANTS {
let folder = manifest.target.kind().packages_folder(target_kind);
let package_dir = project.package_dir();
deleted_folders
.entry(folder.to_string())
.or_insert_with(|| async move {
tracing::debug!("deleting the {folder} folder");
if let Some(e) = fs::remove_dir_all(package_dir.join(&folder))
.await
.err()
.filter(|e| e.kind() != std::io::ErrorKind::NotFound)
{
return Err(e).context(format!("failed to remove the {folder} folder"));
};
run_on_workspace_members(&project, |project| {
let reqwest = reqwest.clone();
async move {
install(&options, &project, reqwest, false).await?;
Ok(())
});
}
try_join_all(deleted_folders.into_values())
.await
.context("failed to remove package folders")?;
}
let old_graph = lockfile.map(|lockfile| {
lockfile
.graph
.into_iter()
.map(|(name, versions)| {
(
name,
versions
.into_iter()
.map(|(version, node)| (version, node.node))
.collect(),
)
})
.collect()
});
println!("{} 📦 building dependency graph", job(2));
let graph = project
.dependency_graph(old_graph.as_ref(), &mut refreshed_sources, false)
.await
.context("failed to build dependency graph")?;
let graph = Arc::new(graph);
let bin_folder = bin_dir().await?;
let downloaded_graph = {
let (rx, downloaded_graph) = project
.download_and_link(
&graph,
&Arc::new(Mutex::new(refreshed_sources)),
&reqwest,
self.prod,
true,
|graph| {
let graph = graph.clone();
async move {
try_join_all(
graph
.values()
.flat_map(|versions| versions.values())
.filter(|node| node.target.bin_path().is_some())
.filter_map(|node| node.node.direct.as_ref())
.map(|(alias, _, _)| alias)
.filter(|alias| {
if *alias == env!("CARGO_BIN_NAME") {
tracing::warn!(
"package {alias} has the same name as the CLI, skipping bin link"
);
return false;
}
true
})
.map(|alias| {
let bin_folder = bin_folder.clone();
async move {
let bin_exec_file = bin_folder.join(alias).with_extension(std::env::consts::EXE_EXTENSION);
let impl_folder = bin_folder.join(".impl");
fs::create_dir_all(&impl_folder).await.context("failed to create bin link folder")?;
let bin_file = impl_folder.join(alias).with_extension("luau");
fs::write(&bin_file, bin_link_file(alias))
.await
.context("failed to write bin link file")?;
#[cfg(windows)]
{
fs::copy(
std::env::current_exe()
.context("failed to get current executable path")?,
&bin_exec_file,
)
.await
.context("failed to copy bin link file")?;
}
#[cfg(not(windows))]
{
fs::write(
&bin_exec_file,
format!(r#"#!/bin/sh
exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
),
)
.await
.context("failed to link bin link file")?;
}
make_executable(&bin_exec_file).await.context("failed to make bin link file executable")?;
Ok::<_, CallbackError>(())
}
}),
)
.await
.map(|_| ())
}
}
)
.await
.context("failed to download dependencies")?;
progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(),
rx,
format!("{} 📥 ", job(3)),
"downloading dependencies".to_string(),
"downloaded dependencies".to_string(),
)
.await?;
downloaded_graph
.await
.context("failed to download & link dependencies")?
};
#[cfg(feature = "patches")]
{
let rx = project
.apply_patches(&filter_graph(&downloaded_graph, self.prod))
.await
.context("failed to apply patches")?;
progress_bar(
manifest.patches.values().map(|v| v.len() as u64).sum(),
rx,
format!("{} 🩹 ", job(JOBS - 1)),
"applying patches".to_string(),
"applied patches".to_string(),
)
.await?;
}
println!("{} 🧹 finishing up", job(JOBS));
project
.write_lockfile(Lockfile {
name: manifest.name,
version: manifest.version,
target: manifest.target.kind(),
overrides: manifest.overrides,
graph: downloaded_graph,
workspace: run_on_workspace_members(&project, |project| {
let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, reqwest)).await }
})
.await?,
})
.await
.context("failed to write lockfile")?;
Ok(())
}
}

View file

@ -1,51 +0,0 @@
use std::collections::BTreeMap;
use anyhow::Context;
use clap::Args;
use crate::cli::{
dep_type_to_key,
style::{INFO_STYLE, SUCCESS_STYLE},
};
use pesde::{
manifest::{Alias, DependencyType},
source::specifiers::DependencySpecifiers,
Project,
};
#[derive(Debug, Args)]
pub struct ListCommand {}
impl ListCommand {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
let manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
let all_deps = manifest
.all_dependencies()
.context("failed to get all dependencies")?
.into_iter()
.fold(
BTreeMap::<DependencyType, BTreeMap<Alias, DependencySpecifiers>>::new(),
|mut acc, (alias, (spec, ty))| {
acc.entry(ty).or_default().insert(alias, spec);
acc
},
);
for (dep_ty, deps) in all_deps {
let dep_key = dep_type_to_key(dep_ty);
println!("{}", INFO_STYLE.apply_to(dep_key));
for (alias, spec) in deps {
println!("{}: {spec}", SUCCESS_STYLE.apply_to(alias));
}
println!();
}
Ok(())
}
}

View file

@ -2,27 +2,22 @@ use pesde::Project;
mod add;
mod auth;
mod cas;
mod config;
mod deprecate;
mod execute;
mod init;
mod install;
mod list;
mod outdated;
#[cfg(feature = "patches")]
mod patch;
#[cfg(feature = "patches")]
mod patch_commit;
mod publish;
mod remove;
mod run;
#[cfg(feature = "version-management")]
mod self_install;
#[cfg(feature = "version-management")]
mod self_upgrade;
mod update;
mod yank;
#[derive(Debug, clap::Subcommand)]
pub enum Subcommand {
@ -33,42 +28,21 @@ pub enum Subcommand {
#[command(subcommand)]
Config(config::ConfigCommands),
/// CAS-related commands
#[command(subcommand)]
Cas(cas::CasCommands),
/// Initializes a manifest file in the current directory
Init(init::InitCommand),
/// Adds a dependency to the project
Add(add::AddCommand),
/// Removes a dependency from the project
Remove(remove::RemoveCommand),
/// Installs all dependencies for the project
Install(install::InstallCommand),
/// Updates the project's lockfile. Run install to apply changes
Update(update::UpdateCommand),
/// Checks for outdated dependencies
Outdated(outdated::OutdatedCommand),
/// Lists all dependencies in the project
List(list::ListCommand),
/// Runs a script, an executable package, or a file with Lune
Run(run::RunCommand),
/// Installs all dependencies for the project
Install(install::InstallCommand),
/// Publishes the project to the registry
Publish(publish::PublishCommand),
/// Yanks a package from the registry
Yank(yank::YankCommand),
/// Deprecates a package from the registry
Deprecate(deprecate::DeprecateCommand),
/// Installs the pesde binary and scripts
#[cfg(feature = "version-management")]
SelfInstall(self_install::SelfInstallCommand),
/// Sets up a patching environment for a package
#[cfg(feature = "patches")]
@ -78,17 +52,22 @@ pub enum Subcommand {
#[cfg(feature = "patches")]
PatchCommit(patch_commit::PatchCommitCommand),
/// Executes a binary package without needing to be run in a project directory
#[clap(name = "x", visible_alias = "execute", visible_alias = "exec")]
Execute(execute::ExecuteCommand),
/// Installs the pesde binary and scripts
#[cfg(feature = "version-management")]
SelfInstall(self_install::SelfInstallCommand),
/// Installs the latest version of pesde
#[cfg(feature = "version-management")]
SelfUpgrade(self_upgrade::SelfUpgradeCommand),
/// Adds a dependency to the project
Add(add::AddCommand),
/// Updates the project's lockfile. Run install to apply changes
Update(update::UpdateCommand),
/// Checks for outdated dependencies
Outdated(outdated::OutdatedCommand),
/// Executes a binary package without needing to be run in a project directory
#[clap(name = "x", visible_alias = "execute", visible_alias = "exec")]
Execute(execute::ExecuteCommand),
}
impl Subcommand {
@ -96,27 +75,22 @@ impl Subcommand {
match self {
Subcommand::Auth(auth) => auth.run(project, reqwest).await,
Subcommand::Config(config) => config.run().await,
Subcommand::Cas(cas) => cas.run(project).await,
Subcommand::Init(init) => init.run(project).await,
Subcommand::Add(add) => add.run(project).await,
Subcommand::Remove(remove) => remove.run(project).await,
Subcommand::Install(install) => install.run(project, reqwest).await,
Subcommand::Update(update) => update.run(project, reqwest).await,
Subcommand::Outdated(outdated) => outdated.run(project).await,
Subcommand::List(list) => list.run(project).await,
Subcommand::Run(run) => run.run(project).await,
Subcommand::Install(install) => install.run(project, reqwest).await,
Subcommand::Publish(publish) => publish.run(project, reqwest).await,
Subcommand::Yank(yank) => yank.run(project, reqwest).await,
Subcommand::Deprecate(deprecate) => deprecate.run(project, reqwest).await,
#[cfg(feature = "version-management")]
Subcommand::SelfInstall(self_install) => self_install.run().await,
#[cfg(feature = "patches")]
Subcommand::Patch(patch) => patch.run(project, reqwest).await,
#[cfg(feature = "patches")]
Subcommand::PatchCommit(patch_commit) => patch_commit.run(project).await,
Subcommand::Execute(execute) => execute.run(project, reqwest).await,
#[cfg(feature = "version-management")]
Subcommand::SelfInstall(self_install) => self_install.run().await,
#[cfg(feature = "version-management")]
Subcommand::SelfUpgrade(self_upgrade) => self_upgrade.run(reqwest).await,
Subcommand::Add(add) => add.run(project).await,
Subcommand::Update(update) => update.run(project, reqwest).await,
Subcommand::Outdated(outdated) => outdated.run(project).await,
Subcommand::Execute(execute) => execute.run(project, reqwest).await,
}
}
}

View file

@ -1,15 +1,19 @@
use crate::cli::up_to_date_lockfile;
use anyhow::Context;
use clap::Args;
use futures::future::try_join_all;
use pesde::{
refresh_sources,
source::{
refs::PackageRefs,
specifiers::DependencySpecifiers,
traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions},
traits::{PackageRef, PackageSource},
},
Project, RefreshedSources,
Project,
};
use semver::VersionReq;
use tokio::task::JoinSet;
use std::{collections::HashSet, sync::Arc};
use tokio::sync::Mutex;
#[derive(Debug, Args)]
pub struct OutdatedCommand {
@ -36,73 +40,81 @@ impl OutdatedCommand {
.context("failed to read manifest")?;
let manifest_target_kind = manifest.target.kind();
let refreshed_sources = RefreshedSources::new();
let mut refreshed_sources = HashSet::new();
let mut tasks = graph
refresh_sources(
&project,
graph
.iter()
.flat_map(|(_, versions)| versions.iter())
.map(|(_, node)| node.node.pkg_ref.source()),
&mut refreshed_sources,
)
.await?;
let refreshed_sources = Arc::new(Mutex::new(refreshed_sources));
if try_join_all(
graph
.into_iter()
.map(|(current_id, node)| {
.flat_map(|(_, versions)| versions.into_iter())
.map(|(current_version_id, node)| {
let project = project.clone();
let refreshed_sources = refreshed_sources.clone();
async move {
let Some((alias, mut specifier, _)) = node.direct else {
let Some((alias, mut specifier, _)) = node.node.direct else {
return Ok::<bool, anyhow::Error>(true);
};
if matches!(
specifier,
DependencySpecifiers::Git(_)
| DependencySpecifiers::Workspace(_)
| DependencySpecifiers::Path(_)
DependencySpecifiers::Git(_) | DependencySpecifiers::Workspace(_)
) {
return Ok(true);
}
let source = node.pkg_ref.source();
refreshed_sources
.refresh(
&source,
&RefreshOptions {
project: project.clone(),
},
)
.await?;
let source = node.node.pkg_ref.source();
if !self.strict {
match &mut specifier {
DependencySpecifiers::Pesde(spec) => {
match specifier {
DependencySpecifiers::Pesde(ref mut spec) => {
spec.version = VersionReq::STAR;
}
#[cfg(feature = "wally-compat")]
DependencySpecifiers::Wally(spec) => {
DependencySpecifiers::Wally(ref mut spec) => {
spec.version = VersionReq::STAR;
}
DependencySpecifiers::Git(_) => {}
DependencySpecifiers::Workspace(_) => {}
DependencySpecifiers::Path(_) => {}
};
}
let version_id = source
.resolve(
&specifier,
&ResolveOptions {
project: project.clone(),
target: manifest_target_kind,
refreshed_sources: refreshed_sources.clone(),
},
&project,
manifest_target_kind,
&mut *refreshed_sources.lock().await,
)
.await
.context("failed to resolve package versions")?
.1
.pop_last()
.map(|(v_id, _)| v_id)
.with_context(|| format!("no versions of {specifier} found"))?;
.context(format!("no versions of {specifier} found"))?;
if version_id != *current_id.version_id() {
if version_id != current_version_id {
println!(
"{} ({alias}) {} -> {version_id}",
current_id.name(),
current_id.version_id(),
"{} {} ({alias}) {} -> {}",
match node.node.pkg_ref {
PackageRefs::Pesde(pkg_ref) => pkg_ref.name.to_string(),
#[cfg(feature = "wally-compat")]
PackageRefs::Wally(pkg_ref) => pkg_ref.name.to_string(),
_ => unreachable!(),
},
current_version_id.target(),
current_version_id.version(),
version_id.version()
);
return Ok(false);
@ -110,18 +122,12 @@ impl OutdatedCommand {
Ok(true)
}
})
.collect::<JoinSet<_>>();
let mut all_up_to_date = true;
while let Some(task) = tasks.join_next().await {
if !task.unwrap()? {
all_up_to_date = false;
}
}
if all_up_to_date {
}),
)
.await?
.into_iter()
.all(|b| b)
{
println!("all packages are up to date");
}

View file

@ -1,18 +1,13 @@
use std::sync::Arc;
use crate::cli::{
style::{CLI_STYLE, INFO_STYLE, WARN_PREFIX},
up_to_date_lockfile, VersionedPackageName,
};
use crate::cli::{up_to_date_lockfile, VersionedPackageName};
use anyhow::Context;
use clap::Args;
use console::style;
use colored::Colorize;
use fs_err::tokio as fs;
use pesde::{
patches::setup_patches_repo,
source::{
refs::PackageRefs,
traits::{DownloadOptions, PackageRef, PackageSource},
traits::{PackageRef, PackageSource},
},
Project, MANIFEST_FILE_NAME,
};
@ -32,38 +27,31 @@ impl PatchCommand {
anyhow::bail!("outdated lockfile, please run the install command first")
};
let id = self.package.get(&graph)?;
let (name, version_id) = self.package.get(&graph)?;
let node = graph.get(&id).context("package not found in graph")?;
let node = graph
.get(&name)
.and_then(|versions| versions.get(&version_id))
.context("package not found in graph")?;
if matches!(
node.pkg_ref,
PackageRefs::Workspace(_) | PackageRefs::Path(_)
) {
anyhow::bail!("cannot patch a workspace or a path package")
if matches!(node.node.pkg_ref, PackageRefs::Workspace(_)) {
anyhow::bail!("cannot patch a workspace package")
}
let source = node.pkg_ref.source();
let source = node.node.pkg_ref.source();
let directory = project
.data_dir()
.join("patches")
.join(id.name().escaped())
.join(id.version_id().escaped())
.join(jiff::Timestamp::now().as_second().to_string());
.join(name.escaped())
.join(version_id.escaped())
.join(chrono::Utc::now().timestamp().to_string());
fs::create_dir_all(&directory).await?;
source
.download(
&node.pkg_ref,
&DownloadOptions {
project: project.clone(),
reqwest,
reporter: Arc::new(()),
id: Arc::new(id),
},
)
.download(&node.node.pkg_ref, &project, &reqwest)
.await?
.0
.write_to(&directory, project.cas_dir(), false)
.await
.context("failed to write package contents")?;
@ -71,13 +59,17 @@ impl PatchCommand {
setup_patches_repo(&directory)?;
println!(
r#"done! modify the files in the directory, then run {} {}{} to apply.
{WARN_PREFIX}: do not commit these changes
{}: the {MANIFEST_FILE_NAME} file will be ignored when patching"#,
CLI_STYLE.apply_to(concat!("`", env!("CARGO_BIN_NAME"), " patch-commit")),
style(format!("'{}'", directory.display())).cyan().bold(),
CLI_STYLE.apply_to("`"),
INFO_STYLE.apply_to("note")
concat!(
"done! modify the files in the directory, then run `",
env!("CARGO_BIN_NAME"),
r#" patch-commit {}` to apply.
{}: do not commit these changes
{}: the {} file will be ignored when patching"#
),
directory.display().to_string().bold().cyan(),
"warning".yellow(),
"note".blue(),
MANIFEST_FILE_NAME
);
open::that(directory)?;

View file

@ -2,12 +2,7 @@ use crate::cli::up_to_date_lockfile;
use anyhow::Context;
use clap::Args;
use fs_err::tokio as fs;
use pesde::{
names::PackageNames,
patches::create_patch,
source::ids::{PackageId, VersionId},
Project,
};
use pesde::{names::PackageNames, patches::create_patch, source::version_id::VersionId, Project};
use std::{path::PathBuf, str::FromStr};
#[derive(Debug, Args)]
@ -25,7 +20,7 @@ impl PatchCommitCommand {
anyhow::bail!("outdated lockfile, please run the install command first")
};
let id = PackageId::new(
let (name, version_id) = (
PackageNames::from_escaped(
self.directory
.parent()
@ -48,7 +43,10 @@ impl PatchCommitCommand {
)?,
);
graph.get(&id).context("package not found in graph")?;
graph
.get(&name)
.and_then(|versions| versions.get(&version_id))
.context("package not found in graph")?;
let mut manifest = toml_edit::DocumentMut::from_str(
&project
@ -59,26 +57,28 @@ impl PatchCommitCommand {
.context("failed to parse manifest")?;
let patch = create_patch(&self.directory).context("failed to create patch")?;
fs::remove_dir_all(self.directory)
.await
.context("failed to remove patch directory")?;
let patches_dir = project.package_dir().join("patches");
fs::create_dir_all(&patches_dir)
.await
.context("failed to create patches directory")?;
let patch_file_name = format!(
"{}-{}.patch",
id.name().escaped(),
id.version_id().escaped()
);
let patch_file_name = format!("{}-{}.patch", name.escaped(), version_id.escaped());
let patch_file = patches_dir.join(&patch_file_name);
if patch_file.exists() {
anyhow::bail!("patch file already exists: {}", patch_file.display());
}
fs::write(&patch_file, patch)
.await
.context("failed to write patch file")?;
manifest["patches"].or_insert(toml_edit::Item::Table(toml_edit::Table::new()))
[&id.name().to_string()][&id.version_id().to_string()] =
[&name.to_string()][&version_id.to_string()] =
toml_edit::value(format!("patches/{patch_file_name}"));
project
@ -86,10 +86,6 @@ impl PatchCommitCommand {
.await
.context("failed to write manifest")?;
fs::remove_dir_all(self.directory)
.await
.context("failed to remove patch directory")?;
println!(concat!(
"done! run `",
env!("CARGO_BIN_NAME"),

View file

@ -1,37 +1,32 @@
use crate::cli::{
display_err, run_on_workspace_members,
style::{ERROR_PREFIX, ERROR_STYLE, SUCCESS_STYLE, WARN_PREFIX},
up_to_date_lockfile,
};
use crate::cli::{display_err, run_on_workspace_members, up_to_date_lockfile};
use anyhow::Context;
use async_compression::Level;
use clap::Args;
use console::style;
use colored::Colorize;
use fs_err::tokio as fs;
#[allow(deprecated)]
use pesde::{
manifest::{target::Target, DependencyType},
matching_globs,
matching_globs_old_behaviour,
scripts::ScriptName,
source::{
git_index::GitBasedSource,
pesde::{specifier::PesdeDependencySpecifier, PesdePackageSource},
specifiers::DependencySpecifiers,
traits::{GetTargetOptions, PackageRef, PackageSource, RefreshOptions, ResolveOptions},
traits::PackageSource,
workspace::{
specifier::{VersionType, VersionTypeOrReq},
WorkspacePackageSource,
},
PackageSources, ADDITIONAL_FORBIDDEN_FILES, IGNORED_DIRS, IGNORED_FILES,
IGNORED_DIRS, IGNORED_FILES,
},
Project, RefreshedSources, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
Project, DEFAULT_INDEX_NAME, MANIFEST_FILE_NAME,
};
use reqwest::{header::AUTHORIZATION, StatusCode};
use semver::VersionReq;
use std::{path::PathBuf, sync::Arc};
use std::{collections::HashSet, path::PathBuf};
use tempfile::Builder;
use tokio::{
io::{AsyncSeekExt, AsyncWriteExt},
task::JoinSet,
};
use tokio::io::{AsyncSeekExt, AsyncWriteExt};
#[derive(Debug, Args, Clone)]
pub struct PublishCommand {
@ -46,39 +41,14 @@ pub struct PublishCommand {
/// The index to publish to
#[arg(short, long, default_value_t = DEFAULT_INDEX_NAME.to_string())]
index: String,
/// Whether to skip syntax validation
#[arg(long)]
no_verify: bool,
}
impl PublishCommand {
fn validate_luau_file(&self, name: &str, contents: &str) -> anyhow::Result<()> {
if self.no_verify {
return Ok(());
}
if let Err(err) = full_moon::parse(contents) {
eprintln!(
"{ERROR_PREFIX}: {name} is not a valid Luau file:\n{}",
err.into_iter()
.map(|err| format!("\t- {}", ERROR_STYLE.apply_to(err)))
.collect::<Vec<_>>()
.join("\n")
);
anyhow::bail!("failed to validate Luau file");
}
Ok(())
}
async fn run_impl(
self,
project: &Project,
reqwest: reqwest::Client,
is_root: bool,
refreshed_sources: &RefreshedSources,
) -> anyhow::Result<()> {
let mut manifest = project
.deser_manifest()
@ -87,20 +57,14 @@ impl PublishCommand {
println!(
"\n{}\n",
style(format!(
"[now publishing {} {}]",
manifest.name, manifest.target
))
format!("[now publishing {} {}]", manifest.name, manifest.target)
.bold()
.on_color256(235)
.on_bright_black()
);
if manifest.private {
if !is_root {
println!(
"{}",
ERROR_STYLE.apply_to("package is private, cannot publish")
);
println!("{}", "package is private, cannot publish".red().bold());
}
return Ok(());
@ -123,61 +87,19 @@ impl PublishCommand {
match up_to_date_lockfile(project).await? {
Some(lockfile) => {
let mut tasks = lockfile
if lockfile
.graph
.iter()
.filter(|(_, node)| node.direct.is_some())
.map(|(id, node)| {
let project = project.clone();
let container_folder = node.container_folder_from_project(
id,
&project,
manifest.target.kind(),
);
let id = Arc::new(id.clone());
let node = node.clone();
let refreshed_sources = refreshed_sources.clone();
async move {
let source = node.pkg_ref.source();
refreshed_sources
.refresh(
&source,
&RefreshOptions {
project: project.clone(),
},
)
.await
.context("failed to refresh source")?;
let target = source
.get_target(
&node.pkg_ref,
&GetTargetOptions {
project,
path: Arc::from(container_folder),
id,
},
)
.await?;
Ok::<_, anyhow::Error>(
target.build_files().is_none()
&& !matches!(node.resolved_ty, DependencyType::Dev),
)
}
.values()
.flatten()
.filter_map(|(_, node)| node.node.direct.as_ref().map(|_| node))
.any(|node| {
node.target.build_files().is_none()
&& !matches!(node.node.resolved_ty, DependencyType::Dev)
})
.collect::<JoinSet<_>>();
while let Some(result) = tasks.join_next().await {
let result = result
.unwrap()
.context("failed to get target of dependency node")?;
if result {
{
anyhow::bail!("roblox packages may not depend on non-roblox packages");
}
}
}
None => {
anyhow::bail!("outdated lockfile, please run the install command first")
}
@ -196,8 +118,8 @@ impl PublishCommand {
let mut display_build_files: Vec<String> = vec![];
let (lib_path, bin_path, scripts, target_kind) = (
manifest.target.lib_path().map(|p| p.to_relative_path_buf()),
manifest.target.bin_path().map(|p| p.to_relative_path_buf()),
manifest.target.lib_path().cloned(),
manifest.target.bin_path().cloned(),
manifest.target.scripts().cloned(),
manifest.target.kind(),
);
@ -208,17 +130,20 @@ impl PublishCommand {
_ => None,
};
let mut paths = matching_globs(
#[allow(deprecated)]
let mut paths = matching_globs_old_behaviour(
project.package_dir(),
manifest.includes.iter().map(|s| s.as_str()),
true,
false,
)
.await
.context("failed to get included files")?;
if paths.insert(PathBuf::from(MANIFEST_FILE_NAME)) {
println!("{WARN_PREFIX}: {MANIFEST_FILE_NAME} was not included, adding it");
println!(
"{}: {MANIFEST_FILE_NAME} was not included, adding it",
"warn".yellow().bold()
);
}
if paths.iter().any(|p| p.starts_with(".git")) {
@ -231,32 +156,29 @@ impl PublishCommand {
"readme" | "readme.md" | "readme.txt"
)
}) {
println!("{WARN_PREFIX}: no README file included, consider adding one");
println!(
"{}: no README file included, consider adding one",
"warn".yellow().bold()
);
}
if !paths.iter().any(|p| p.starts_with("docs")) {
println!("{WARN_PREFIX}: docs directory not included, consider adding one");
println!(
"{}: docs directory not included, consider adding one",
"warn".yellow().bold()
);
}
for path in &paths {
let Some(file_name) = path.file_name() else {
continue;
};
if ADDITIONAL_FORBIDDEN_FILES.contains(&file_name.to_string_lossy().as_ref()) {
if file_name == "default.project.json" {
if path
.file_name()
.is_some_and(|n| n == "default.project.json")
{
anyhow::bail!(
"default.project.json was included at `{}`, this should be generated by the {} script upon dependants installation",
path.display(),
ScriptName::RobloxSyncConfigGenerator
);
} else {
anyhow::bail!(
"forbidden file {} was included at `{}`",
file_name.to_string_lossy(),
path.display()
);
}
}
}
@ -295,14 +217,21 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
let export_path = export_path
.canonicalize()
.with_context(|| format!("failed to canonicalize {name}"))?;
.context(format!("failed to canonicalize {name}"))?;
self.validate_luau_file(&format!("file at {name}"), &contents)?;
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
errs.into_iter()
.map(|err| err.to_string())
.collect::<Vec<_>>()
.join(", ")
}) {
anyhow::bail!("{name} is not a valid Luau file: {err}");
}
let first_part = relative_export_path
.components()
.next()
.with_context(|| format!("{name} must contain at least one part"))?;
.context(format!("{name} must contain at least one part"))?;
let first_part = match first_part {
relative_path::Component::Normal(part) => part,
@ -315,14 +244,20 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
.unwrap()
.to_path_buf(),
) {
println!("{WARN_PREFIX}: {name} was not included, adding {relative_export_path}");
println!(
"{}: {name} was not included, adding {relative_export_path}",
"warn".yellow().bold()
);
}
if roblox_target
.as_mut()
.is_some_and(|build_files| build_files.insert(first_part.to_string()))
{
println!("{WARN_PREFIX}: {name} was not in build files, adding {first_part}");
println!(
"{}: {name} was not in build files, adding {first_part}",
"warn".yellow().bold()
);
}
}
@ -330,7 +265,8 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
for build_file in build_files.iter() {
if build_file.eq_ignore_ascii_case(MANIFEST_FILE_NAME) {
println!(
"{WARN_PREFIX}: {MANIFEST_FILE_NAME} is in build files, please remove it",
"{}: {MANIFEST_FILE_NAME} is in build files, please remove it",
"warn".yellow().bold()
);
continue;
@ -373,9 +309,16 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
let script_path = script_path
.canonicalize()
.with_context(|| format!("failed to canonicalize script {name}"))?;
.context(format!("failed to canonicalize script {name}"))?;
self.validate_luau_file(&format!("the `{name}` script"), &contents)?;
if let Err(err) = full_moon::parse(&contents).map_err(|errs| {
errs.into_iter()
.map(|err| err.to_string())
.collect::<Vec<_>>()
.join(", ")
}) {
anyhow::bail!("script {name} is not a valid Luau file: {err}");
}
if paths.insert(
script_path
@ -383,7 +326,10 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
.unwrap()
.to_path_buf(),
) {
println!("{WARN_PREFIX}: script {name} was not included, adding {path}");
println!(
"{}: script {name} was not included, adding {path}",
"warn".yellow().bold()
);
}
}
}
@ -412,9 +358,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
&relative_path,
fs::File::open(&path)
.await
.with_context(|| {
format!("failed to read `{}`", relative_path.display())
})?
.context(format!("failed to read `{}`", relative_path.display()))?
.file_mut(),
)
.await?;
@ -438,9 +382,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
manifest
.indices
.get(&index_name)
.with_context(|| {
format!("index {index_name} not found in indices field")
})?
.context(format!("index {index_name} not found in indices field"))?
.to_string(),
);
}
@ -455,23 +397,16 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
manifest
.wally_indices
.get(&index_name)
.with_context(|| {
format!("index {index_name} not found in wally_indices field")
})?
.context(format!(
"index {index_name} not found in wally_indices field"
))?
.to_string(),
);
}
DependencySpecifiers::Git(_) => {}
DependencySpecifiers::Workspace(spec) => {
let pkg_ref = WorkspacePackageSource
.resolve(
spec,
&ResolveOptions {
project: project.clone(),
target: target_kind,
refreshed_sources: refreshed_sources.clone(),
},
)
.resolve(spec, project, target_kind, &mut HashSet::new())
.await
.context("failed to resolve workspace package")?
.1
@ -501,7 +436,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
}
VersionTypeOrReq::Req(r) => r,
v => VersionReq::parse(&format!("{v}{}", manifest.version))
.with_context(|| format!("failed to parse version for {v}"))?,
.context(format!("failed to parse version for {v}"))?,
},
index: Some(
manifest
@ -513,17 +448,11 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
target: Some(spec.target.unwrap_or(manifest.target.kind())),
});
}
DependencySpecifiers::Path(_) => {
anyhow::bail!("path dependencies are not allowed in published packages")
}
}
}
{
println!(
"\n{}",
style("please confirm the following information:").bold()
);
println!("\n{}", "please confirm the following information:".bold());
println!("name: {}", manifest.name);
println!("version: {}", manifest.version);
println!(
@ -594,9 +523,10 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
);
if !self.dry_run
&& !self.yes && !inquire::Confirm::new("is this information correct?").prompt()?
&& !self.yes
&& !inquire::Confirm::new("is this information correct?").prompt()?
{
println!("\n{}", ERROR_STYLE.apply_to("publish aborted"));
println!("\n{}", "publish aborted".red().bold());
return Ok(());
}
@ -643,15 +573,9 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
let index_url = manifest
.indices
.get(&self.index)
.with_context(|| format!("missing index {}", self.index))?;
.context(format!("missing index {}", self.index))?;
let source = PesdePackageSource::new(index_url.clone());
refreshed_sources
.refresh(
&PackageSources::Pesde(source.clone()),
&RefreshOptions {
project: project.clone(),
},
)
PackageSource::refresh(&source, project)
.await
.context("failed to refresh source")?;
let config = source
@ -667,19 +591,38 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
);
}
let deps = manifest.all_dependencies().context("dependency conflict")?;
if let Some((disallowed, _)) = deps.iter().find(|(_, (spec, _))| match spec {
DependencySpecifiers::Pesde(spec) => {
!config.other_registries_allowed.is_allowed_or_same(
source.repo_url().clone(),
gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap(),
)
}
DependencySpecifiers::Git(spec) => !config.git_allowed.is_allowed(spec.repo.clone()),
#[cfg(feature = "wally-compat")]
DependencySpecifiers::Wally(spec) => !config
.wally_allowed
.is_allowed(gix::Url::try_from(spec.index.as_deref().unwrap()).unwrap()),
_ => false,
}) {
anyhow::bail!("dependency `{disallowed}` is not allowed on this index");
}
if self.dry_run {
fs::write("package.tar.gz", archive).await?;
println!(
"{}",
SUCCESS_STYLE.apply_to("(dry run) package written to package.tar.gz")
"(dry run) package written to package.tar.gz".green().bold()
);
return Ok(());
}
let mut request = reqwest
.post(format!("{}/v1/packages", config.api()))
.post(format!("{}/v0/packages", config.api()))
.body(archive);
if let Some(token) = project.auth_config().tokens().get(index_url) {
@ -696,19 +639,22 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
.context("failed to get response text")?;
match status {
StatusCode::CONFLICT => {
anyhow::bail!("package version already exists");
println!("{}", "package version already exists".red().bold());
}
StatusCode::FORBIDDEN => {
anyhow::bail!("unauthorized to publish under this scope");
println!(
"{}",
"unauthorized to publish under this scope".red().bold()
);
}
StatusCode::BAD_REQUEST => {
anyhow::bail!("invalid package: {text}");
println!("{}: {text}", "invalid package".red().bold());
}
code if !code.is_success() => {
anyhow::bail!("failed to publish package: {code} ({text})");
}
_ => {
println!("{}", SUCCESS_STYLE.apply_to(text));
println!("{text}");
}
}
@ -716,12 +662,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
}
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let refreshed_sources = RefreshedSources::new();
let result = self
.clone()
.run_impl(&project, reqwest.clone(), true, &refreshed_sources)
.await;
let result = self.clone().run_impl(&project, reqwest.clone(), true).await;
if project.workspace_dir().is_some() {
return result;
} else {
@ -731,11 +672,7 @@ info: otherwise, the file was deemed unnecessary, if you don't understand why, p
run_on_workspace_members(&project, |project| {
let reqwest = reqwest.clone();
let this = self.clone();
let refreshed_sources = refreshed_sources.clone();
async move {
this.run_impl(&project, reqwest, false, &refreshed_sources)
.await
}
async move { this.run_impl(&project, reqwest, false).await }
})
.await
.map(|_| ())

View file

@ -1,59 +0,0 @@
use std::str::FromStr;
use anyhow::Context;
use clap::Args;
use crate::cli::{
dep_type_to_key,
style::{INFO_STYLE, SUCCESS_STYLE},
};
use pesde::{
manifest::{Alias, DependencyType},
Project,
};
#[derive(Debug, Args)]
pub struct RemoveCommand {
/// The alias of the package to remove
#[arg(index = 1)]
alias: Alias,
}
impl RemoveCommand {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
let mut manifest = toml_edit::DocumentMut::from_str(
&project
.read_manifest()
.await
.context("failed to read manifest")?,
)
.context("failed to parse manifest")?;
let Some(dep_key) = DependencyType::VARIANTS
.iter()
.copied()
.map(dep_type_to_key)
.find(|dependency_key| {
manifest[dependency_key]
.as_table_mut()
.is_some_and(|table| table.remove(self.alias.as_str()).is_some())
})
else {
anyhow::bail!("package under alias `{}` not found in manifest", self.alias)
};
project
.write_manifest(manifest.to_string())
.await
.context("failed to write manifest")?;
println!(
"{} removed {} from {}!",
SUCCESS_STYLE.apply_to("success!"),
INFO_STYLE.apply_to(self.alias),
INFO_STYLE.apply_to(dep_key)
);
Ok(())
}
}

View file

@ -3,16 +3,14 @@ use anyhow::Context;
use clap::Args;
use futures::{StreamExt, TryStreamExt};
use pesde::{
errors::{ManifestReadError, WorkspaceMembersError},
linking::generator::generate_bin_linking_module,
names::{PackageName, PackageNames},
source::traits::{GetTargetOptions, PackageRef, PackageSource, RefreshOptions},
Project, MANIFEST_FILE_NAME,
Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME,
};
use relative_path::RelativePathBuf;
use std::{
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::Path, process::Command,
sync::Arc,
collections::HashSet, env::current_dir, ffi::OsString, io::Write, path::PathBuf,
process::Command,
};
#[derive(Debug, Args)]
@ -28,7 +26,7 @@ pub struct RunCommand {
impl RunCommand {
pub async fn run(self, project: Project) -> anyhow::Result<()> {
let run = |root: &Path, file_path: &Path| {
let run = |root: PathBuf, file_path: PathBuf| {
let mut caller = tempfile::NamedTempFile::new().expect("failed to create tempfile");
caller
.write_all(
@ -57,8 +55,8 @@ impl RunCommand {
let Some(package_or_script) = self.package_or_script else {
if let Some(script_path) = project.deser_manifest().await?.target.bin_path() {
run(
project.package_dir(),
&script_path.to_path(project.package_dir()),
project.package_dir().to_owned(),
script_path.to_path(project.package_dir()),
);
return Ok(());
}
@ -75,61 +73,42 @@ impl RunCommand {
let pkg_name = PackageNames::Pesde(pkg_name);
let mut versions = graph
.into_iter()
.filter(|(id, node)| *id.name() == pkg_name && node.direct.is_some())
.collect::<Vec<_>>();
for (version_id, node) in graph.get(&pkg_name).context("package not found in graph")? {
if node.node.direct.is_none() {
continue;
}
let (id, node) = match versions.len() {
0 => anyhow::bail!("package not found"),
1 => versions.pop().unwrap(),
_ => anyhow::bail!("multiple versions found. use the package's alias instead."),
};
let container_folder = node.container_folder_from_project(
&id,
&project,
project
.deser_manifest()
.await
.context("failed to deserialize manifest")?
.target
.kind(),
);
let source = node.pkg_ref.source();
source
.refresh(&RefreshOptions {
project: project.clone(),
})
.await
.context("failed to refresh source")?;
let target = source
.get_target(
&node.pkg_ref,
&GetTargetOptions {
project,
path: Arc::from(container_folder.as_path()),
id: Arc::new(id),
},
)
.await?;
let Some(bin_path) = target.bin_path() else {
let Some(bin_path) = node.target.bin_path() else {
anyhow::bail!("package has no bin path");
};
let base_folder = project
.deser_manifest()
.await?
.target
.kind()
.packages_folder(version_id.target());
let container_folder = node.node.container_folder(
&project
.package_dir()
.join(base_folder)
.join(PACKAGES_CONTAINER_NAME),
&pkg_name,
version_id.version(),
);
let path = bin_path.to_path(&container_folder);
run(&path, &path);
run(path.clone(), path);
return Ok(());
}
}
if let Ok(manifest) = project.deser_manifest().await {
if let Some(script_path) = manifest.scripts.get(&package_or_script) {
run(
project.package_dir(),
&script_path.to_path(project.package_dir()),
project.package_dir().to_path_buf(),
script_path.to_path(project.package_dir()),
);
return Ok(());
}
@ -146,9 +125,9 @@ impl RunCommand {
.workspace_dir()
.unwrap_or_else(|| project.package_dir());
let members = match project.workspace_members(false).await {
let members = match project.workspace_members(workspace_dir, false).await {
Ok(members) => members.boxed(),
Err(WorkspaceMembersError::ManifestParse(ManifestReadError::Io(e)))
Err(pesde::errors::WorkspaceMembersError::ManifestMissing(e))
if e.kind() == std::io::ErrorKind::NotFound =>
{
futures::stream::empty().boxed()
@ -191,7 +170,7 @@ impl RunCommand {
project.package_dir().to_path_buf()
};
run(&root, &path);
run(root, path);
Ok(())
}

View file

@ -1,13 +1,8 @@
use crate::cli::{
style::{ADDED_STYLE, CLI_STYLE},
version::replace_pesde_bin_exe,
HOME_DIR,
};
use crate::cli::{version::update_bin_exe, HOME_DIR};
use anyhow::Context;
use clap::Args;
use console::style;
use colored::Colorize;
use std::env::current_exe;
#[derive(Debug, Args)]
pub struct SelfInstallCommand {
/// Skip adding the bin directory to the PATH
@ -21,14 +16,15 @@ impl SelfInstallCommand {
#[cfg(windows)]
{
if !self.skip_add_to_path {
use crate::cli::style::WARN_STYLE;
use anyhow::Context;
use windows_registry::CURRENT_USER;
use winreg::{enums::HKEY_CURRENT_USER, RegKey};
let env = CURRENT_USER
.create("Environment")
.context("failed to open Environment key")?;
let path = env.get_string("Path").context("failed to get Path value")?;
let current_user = RegKey::predef(HKEY_CURRENT_USER);
let env = current_user
.create_subkey("Environment")
.context("failed to open Environment key")?
.0;
let path: String = env.get_value("Path").context("failed to get Path value")?;
let bin_dir = crate::cli::bin_dir().await?;
let bin_dir = bin_dir.to_string_lossy();
@ -37,21 +33,23 @@ impl SelfInstallCommand {
if !exists {
let new_path = format!("{path};{bin_dir}");
env.set_string("Path", &new_path)
env.set_value("Path", &new_path)
.context("failed to set Path value")?;
println!(
"\nin order to allow proper functionality {} was added to PATH.\n\n{}",
style(format!("`~/{HOME_DIR}/bin`")).green(),
WARN_STYLE.apply_to("please restart your shell for this to take effect")
"\nin order to allow binary exports as executables {}.\n\n{}",
format!("`~/{HOME_DIR}/bin` was added to PATH").green(),
"please restart your shell for this to take effect"
.yellow()
.bold()
);
}
}
println!(
"installed {} {}!",
CLI_STYLE.apply_to(env!("CARGO_BIN_NAME")),
ADDED_STYLE.apply_to(env!("CARGO_PKG_VERSION")),
env!("CARGO_BIN_NAME").cyan(),
env!("CARGO_PKG_VERSION").yellow(),
);
}
@ -64,13 +62,15 @@ impl SelfInstallCommand {
and then restart your shell.
"#,
CLI_STYLE.apply_to(env!("CARGO_BIN_NAME")),
ADDED_STYLE.apply_to(env!("CARGO_PKG_VERSION")),
style(format!(r#"export PATH="$PATH:$HOME/{HOME_DIR}/bin""#)).green(),
env!("CARGO_BIN_NAME").cyan(),
env!("CARGO_PKG_VERSION").yellow(),
format!(r#"export PATH="$PATH:~/{HOME_DIR}/bin""#)
.bold()
.green()
);
}
replace_pesde_bin_exe(&current_exe().context("failed to get current exe path")?).await?;
update_bin_exe(&current_exe().context("failed to get current exe path")?).await?;
Ok(())
}

View file

@ -1,17 +1,13 @@
use crate::{
cli::{
use crate::cli::{
config::read_config,
style::{ADDED_STYLE, CLI_STYLE, REMOVED_STYLE},
version::{
current_version, find_latest_version, get_or_download_engine, replace_pesde_bin_exe,
current_version, get_or_download_version, get_remote_version, no_build_metadata,
update_bin_exe, TagInfo, VersionType,
},
},
util::no_build_metadata,
};
use anyhow::Context;
use clap::Args;
use pesde::engine::EngineKind;
use semver::VersionReq;
use colored::Colorize;
#[derive(Debug, Args)]
pub struct SelfUpgradeCommand {
@ -29,7 +25,7 @@ impl SelfUpgradeCommand {
.context("no cached version found")?
.1
} else {
find_latest_version(&reqwest).await?
get_remote_version(&reqwest, VersionType::Latest).await?
};
let latest_version_no_metadata = no_build_metadata(&latest_version);
@ -39,25 +35,21 @@ impl SelfUpgradeCommand {
return Ok(());
}
let display_latest_version = ADDED_STYLE.apply_to(latest_version_no_metadata);
let display_latest_version = latest_version_no_metadata.to_string().yellow().bold();
let confirmed = inquire::prompt_confirmation(format!(
if !inquire::prompt_confirmation(format!(
"are you sure you want to upgrade {} from {} to {display_latest_version}?",
CLI_STYLE.apply_to(env!("CARGO_BIN_NAME")),
REMOVED_STYLE.apply_to(env!("CARGO_PKG_VERSION"))
))?;
if !confirmed {
env!("CARGO_BIN_NAME").cyan(),
env!("CARGO_PKG_VERSION").yellow().bold()
))? {
println!("cancelled upgrade");
return Ok(());
}
let path = get_or_download_engine(
&reqwest,
EngineKind::Pesde,
VersionReq::parse(&format!("={latest_version}")).unwrap(),
)
.await?;
replace_pesde_bin_exe(&path).await?;
let path = get_or_download_version(&reqwest, &TagInfo::Complete(latest_version), true)
.await?
.unwrap();
update_bin_exe(&path).await?;
println!("upgraded to version {display_latest_version}!");

View file

@ -1,48 +1,85 @@
use crate::cli::{
install::{install, InstallOptions},
run_on_workspace_members,
};
use crate::cli::{progress_bar, run_on_workspace_members};
use anyhow::Context;
use clap::Args;
use pesde::Project;
use std::num::NonZeroUsize;
use colored::Colorize;
use pesde::{lockfile::Lockfile, Project};
use std::{collections::HashSet, sync::Arc};
use tokio::sync::Mutex;
#[derive(Debug, Args, Copy, Clone)]
pub struct UpdateCommand {
/// Update the dependencies but don't install them
#[arg(long)]
no_install: bool,
/// The maximum number of concurrent network requests
#[arg(long, default_value = "16")]
network_concurrency: NonZeroUsize,
/// Whether to re-install all dependencies even if they are already installed
#[arg(long)]
force: bool,
}
pub struct UpdateCommand {}
impl UpdateCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let options = InstallOptions {
locked: false,
prod: false,
write: !self.no_install,
network_concurrency: self.network_concurrency,
use_lockfile: false,
force: self.force,
};
let mut refreshed_sources = HashSet::new();
install(&options, &project, reqwest.clone(), true).await?;
let manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
run_on_workspace_members(&project, |project| {
let reqwest = reqwest.clone();
async move {
install(&options, &project, reqwest, false).await?;
Ok(())
}
})
println!(
"\n{}\n",
format!("[now updating {} {}]", manifest.name, manifest.target)
.bold()
.on_bright_black()
);
let graph = project
.dependency_graph(None, &mut refreshed_sources, false)
.await
.context("failed to build dependency graph")?;
let graph = Arc::new(graph);
project
.write_lockfile(Lockfile {
name: manifest.name,
version: manifest.version,
target: manifest.target.kind(),
overrides: manifest.overrides,
graph: {
let (rx, downloaded_graph) = project
.download_and_link(
&graph,
&Arc::new(Mutex::new(refreshed_sources)),
&reqwest,
false,
false,
|_| async { Ok::<_, std::io::Error>(()) },
)
.await
.context("failed to download dependencies")?;
progress_bar(
graph.values().map(|versions| versions.len() as u64).sum(),
rx,
"📥 ".to_string(),
"downloading dependencies".to_string(),
"downloaded dependencies".to_string(),
)
.await?;
downloaded_graph
.await
.context("failed to download dependencies")?
},
workspace: run_on_workspace_members(&project, |project| {
let reqwest = reqwest.clone();
async move { Box::pin(self.run(project, reqwest)).await }
})
.await?,
})
.await
.context("failed to write lockfile")?;
println!(
"\n\n{}. run `{} install` in order to install the new dependencies",
"✅ done".green(),
env!("CARGO_BIN_NAME")
);
Ok(())
}
}

View file

@ -1,148 +0,0 @@
use crate::cli::{get_index, style::SUCCESS_STYLE};
use anyhow::Context;
use clap::Args;
use pesde::{
manifest::target::TargetKind,
names::PackageName,
source::{
pesde::PesdePackageSource,
traits::{PackageSource, RefreshOptions},
},
Project,
};
use reqwest::{header::AUTHORIZATION, Method, StatusCode};
use semver::Version;
use std::{fmt::Display, str::FromStr};
#[derive(Debug, Clone)]
enum TargetKindOrAll {
All,
Specific(TargetKind),
}
impl Display for TargetKindOrAll {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
TargetKindOrAll::All => write!(f, "all"),
TargetKindOrAll::Specific(kind) => write!(f, "{kind}"),
}
}
}
impl FromStr for TargetKindOrAll {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.eq_ignore_ascii_case("all") {
return Ok(TargetKindOrAll::All);
}
s.parse()
.map(TargetKindOrAll::Specific)
.context("failed to parse target kind")
}
}
#[derive(Debug, Clone)]
struct YankId(PackageName, Version, TargetKindOrAll);
impl FromStr for YankId {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (package, version) = s
.split_once('@')
.context("package is not in format of `scope/name@version target`")?;
let target = match version.split(' ').nth(1) {
Some(target) => target
.parse()
.context("package is not in format of `scope/name@version target`")?,
None => TargetKindOrAll::All,
};
Ok(YankId(
package.parse().context("failed to parse package name")?,
version.parse().context("failed to parse version")?,
target,
))
}
}
#[derive(Debug, Args)]
pub struct YankCommand {
/// Whether to unyank the package
#[clap(long)]
undo: bool,
/// The index to yank the package from
#[clap(short, long)]
index: Option<String>,
/// The package to yank
#[clap(index = 1)]
package: YankId,
}
impl YankCommand {
pub async fn run(self, project: Project, reqwest: reqwest::Client) -> anyhow::Result<()> {
let YankId(package, version, target) = self.package;
let index_url = get_index(&project, self.index.as_deref()).await?;
let source = PesdePackageSource::new(index_url.clone());
source
.refresh(&RefreshOptions {
project: project.clone(),
})
.await
.context("failed to refresh source")?;
let config = source
.config(&project)
.await
.context("failed to get index config")?;
let mut request = reqwest.request(
if self.undo {
Method::DELETE
} else {
Method::PUT
},
format!(
"{}/v1/packages/{}/{}/{}/yank",
config.api(),
urlencoding::encode(&package.to_string()),
urlencoding::encode(&version.to_string()),
urlencoding::encode(&target.to_string()),
),
);
if let Some(token) = project.auth_config().tokens().get(&index_url) {
tracing::debug!("using token for {index_url}");
request = request.header(AUTHORIZATION, token);
}
let response = request.send().await.context("failed to send request")?;
let status = response.status();
let text = response
.text()
.await
.context("failed to get response text")?;
let prefix = if self.undo { "un" } else { "" };
match status {
StatusCode::CONFLICT => {
anyhow::bail!("version is already {prefix}yanked");
}
StatusCode::FORBIDDEN => {
anyhow::bail!("unauthorized to {prefix}yank under this scope");
}
code if !code.is_success() => {
anyhow::bail!("failed to {prefix}yank package: {code} ({text})");
}
_ => {
println!("{}", SUCCESS_STYLE.apply_to(text));
}
}
Ok(())
}
}

View file

@ -16,7 +16,7 @@ pub struct CliConfig {
pub tokens: Tokens,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub last_checked_updates: Option<(jiff::Timestamp, semver::Version)>,
pub last_checked_updates: Option<(chrono::DateTime<chrono::Utc>, semver::Version)>,
}
impl Default for CliConfig {
@ -24,7 +24,7 @@ impl Default for CliConfig {
Self {
default_index: "https://github.com/pesde-pkg/index".try_into().unwrap(),
tokens: Tokens::default(),
tokens: Tokens(Default::default()),
last_checked_updates: None,
}

View file

@ -1,591 +0,0 @@
use super::files::make_executable;
use crate::cli::{
bin_dir, dep_type_to_key,
reporters::{self, CliReporter},
resolve_overrides, run_on_workspace_members,
style::{ADDED_STYLE, REMOVED_STYLE, WARN_PREFIX},
up_to_date_lockfile,
};
use anyhow::Context;
use console::style;
use fs_err::tokio as fs;
use pesde::{
download_and_link::{DownloadAndLinkHooks, DownloadAndLinkOptions},
engine::EngineKind,
graph::{DependencyGraph, DependencyGraphWithTarget},
lockfile::Lockfile,
manifest::{target::TargetKind, Alias, DependencyType, Manifest},
names::PackageNames,
source::{
pesde::PesdePackageSource,
refs::PackageRefs,
traits::{PackageRef, RefreshOptions},
PackageSources,
},
version_matches, Project, RefreshedSources, LOCKFILE_FILE_NAME, MANIFEST_FILE_NAME,
};
use std::{
collections::{BTreeMap, BTreeSet, HashMap},
num::NonZeroUsize,
sync::Arc,
time::Instant,
};
use tokio::task::JoinSet;
fn bin_link_file(alias: &Alias) -> String {
let mut all_combinations = BTreeSet::new();
for a in TargetKind::VARIANTS {
for b in TargetKind::VARIANTS {
all_combinations.insert((*a, *b));
}
}
let all_folders = all_combinations
.into_iter()
.map(|(a, b)| format!("{:?}", a.packages_folder(b)))
.collect::<BTreeSet<_>>()
.into_iter()
.collect::<Vec<_>>()
.join(", ");
format!(
include_str!("bin_link.luau"),
alias = alias,
all_folders = all_folders,
MANIFEST_FILE_NAME = MANIFEST_FILE_NAME,
LOCKFILE_FILE_NAME = LOCKFILE_FILE_NAME
)
}
pub struct InstallHooks {
pub bin_folder: std::path::PathBuf,
}
#[derive(Debug, thiserror::Error)]
#[error(transparent)]
pub struct InstallHooksError(#[from] anyhow::Error);
impl DownloadAndLinkHooks for InstallHooks {
type Error = InstallHooksError;
async fn on_bins_downloaded(
&self,
graph: &DependencyGraphWithTarget,
) -> Result<(), Self::Error> {
let mut tasks = graph
.values()
.filter(|node| node.target.bin_path().is_some())
.filter_map(|node| node.node.direct.as_ref())
.map(|(alias, _, _)| {
let bin_folder = self.bin_folder.clone();
let alias = alias.clone();
async move {
let bin_exec_file = bin_folder
.join(alias.as_str())
.with_extension(std::env::consts::EXE_EXTENSION);
let impl_folder = bin_folder.join(".impl");
fs::create_dir_all(&impl_folder)
.await
.context("failed to create bin link folder")?;
let bin_file = impl_folder.join(alias.as_str()).with_extension("luau");
fs::write(&bin_file, bin_link_file(&alias))
.await
.context("failed to write bin link file")?;
#[cfg(windows)]
match fs::symlink_file(
std::env::current_exe().context("failed to get current executable path")?,
&bin_exec_file,
)
.await
{
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => {}
e => e.context("failed to copy bin link file")?,
}
#[cfg(not(windows))]
fs::write(
&bin_exec_file,
format!(
r#"#!/bin/sh
exec lune run "$(dirname "$0")/.impl/{alias}.luau" -- "$@""#
),
)
.await
.context("failed to link bin link file")?;
make_executable(&bin_exec_file)
.await
.context("failed to make bin link file executable")?;
Ok::<_, anyhow::Error>(())
}
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
Ok(())
}
}
#[derive(Debug, Clone, Copy)]
pub struct InstallOptions {
pub locked: bool,
pub prod: bool,
pub write: bool,
pub use_lockfile: bool,
pub network_concurrency: NonZeroUsize,
pub force: bool,
}
pub async fn install(
options: &InstallOptions,
project: &Project,
reqwest: reqwest::Client,
is_root: bool,
) -> anyhow::Result<()> {
let start = Instant::now();
let refreshed_sources = RefreshedSources::new();
let manifest = project
.deser_manifest()
.await
.context("failed to read manifest")?;
let mut has_irrecoverable_changes = false;
let lockfile = if options.locked {
match up_to_date_lockfile(project).await? {
None => {
anyhow::bail!(
"lockfile is out of sync, run `{} install` to update it",
env!("CARGO_BIN_NAME")
);
}
file => file,
}
} else {
match project.deser_lockfile().await {
Ok(lockfile) => {
if lockfile.overrides != resolve_overrides(&manifest)? {
tracing::debug!("overrides are different");
has_irrecoverable_changes = true;
None
} else if lockfile.target != manifest.target.kind() {
tracing::debug!("target kind is different");
has_irrecoverable_changes = true;
None
} else {
Some(lockfile)
}
}
Err(pesde::errors::LockfileReadError::Io(e))
if e.kind() == std::io::ErrorKind::NotFound =>
{
None
}
Err(e) => return Err(e.into()),
}
};
let overrides = resolve_overrides(&manifest)?;
let (new_lockfile, old_graph) =
reporters::run_with_reporter(|multi, root_progress, reporter| async {
let multi = multi;
let root_progress = root_progress;
root_progress.set_prefix(format!("{} {}: ", manifest.name, manifest.target));
#[cfg(feature = "version-management")]
{
root_progress.reset();
root_progress.set_message("update engine linkers");
let mut tasks = manifest
.engines
.keys()
.map(|engine| crate::cli::version::make_linker_if_needed(*engine))
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
}
root_progress.reset();
root_progress.set_message("resolve");
let old_graph = lockfile.map(|lockfile| lockfile.graph);
let graph = project
.dependency_graph(
old_graph.as_ref().filter(|_| options.use_lockfile),
refreshed_sources.clone(),
false,
)
.await
.context("failed to build dependency graph")?;
let mut tasks = graph
.iter()
.filter_map(|(id, node)| {
let PackageSources::Pesde(source) = node.pkg_ref.source() else {
return None;
};
#[allow(irrefutable_let_patterns)]
let PackageNames::Pesde(name) = id.name().clone() else {
panic!("unexpected package name");
};
let project = project.clone();
let refreshed_sources = refreshed_sources.clone();
Some(async move {
refreshed_sources
.refresh(
&PackageSources::Pesde(source.clone()),
&RefreshOptions {
project: project.clone(),
},
)
.await
.context("failed to refresh source")?;
let file = source.read_index_file(&name, &project)
.await
.context("failed to read package index file")?
.context("package not found in index")?;
Ok::<_, anyhow::Error>(if file.meta.deprecated.is_empty() {
None
} else {
Some((name, file.meta.deprecated))
})
})
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
let Some((name, reason)) = task.unwrap()? else {
continue;
};
multi.suspend(|| {
println!("{WARN_PREFIX}: package {name} is deprecated: {reason}");
});
}
let graph = Arc::new(graph);
if options.write {
root_progress.reset();
root_progress.set_length(0);
root_progress.set_message("download");
root_progress.set_style(reporters::root_progress_style_with_progress());
let hooks = InstallHooks {
bin_folder: bin_dir().await?,
};
#[allow(unused_variables)]
let downloaded_graph = project
.download_and_link(
&graph,
DownloadAndLinkOptions::<CliReporter, InstallHooks>::new(reqwest.clone())
.reporter(reporter)
.hooks(hooks)
.refreshed_sources(refreshed_sources.clone())
.prod(options.prod)
.network_concurrency(options.network_concurrency)
.force(options.force || has_irrecoverable_changes),
)
.await
.context("failed to download and link dependencies")?;
#[cfg(feature = "version-management")]
{
let mut tasks = manifest
.engines
.into_iter()
.map(|(engine, req)| async move {
Ok::<_, anyhow::Error>(
crate::cli::version::get_installed_versions(engine)
.await?
.into_iter()
.filter(|version| version_matches(&req, version))
.next_back()
.map(|version| (engine, version)),
)
})
.collect::<JoinSet<_>>();
let mut resolved_engine_versions = HashMap::new();
while let Some(task) = tasks.join_next().await {
let Some((engine, version)) = task.unwrap()? else {
continue;
};
resolved_engine_versions.insert(engine, version);
}
let manifest_target_kind = manifest.target.kind();
let mut tasks = downloaded_graph.iter()
.map(|(id, node)| {
let id = id.clone();
let node = node.clone();
let project = project.clone();
let refreshed_sources = refreshed_sources.clone();
async move {
let engines = match &node.node.pkg_ref {
PackageRefs::Pesde(pkg_ref) => {
let source = PesdePackageSource::new(pkg_ref.index_url.clone());
refreshed_sources
.refresh(
&PackageSources::Pesde(source.clone()),
&RefreshOptions {
project: project.clone(),
},
)
.await
.context("failed to refresh source")?;
#[allow(irrefutable_let_patterns)]
let PackageNames::Pesde(name) = id.name() else {
panic!("unexpected package name");
};
let mut file = source.read_index_file(name, &project)
.await
.context("failed to read package index file")?
.context("package not found in index")?;
file
.entries
.remove(id.version_id())
.context("package version not found in index")?
.engines
}
#[cfg(feature = "wally-compat")]
PackageRefs::Wally(_) => Default::default(),
_ => {
let path = node.node.container_folder_from_project(
&id,
&project,
manifest_target_kind,
);
match fs::read_to_string(path.join(MANIFEST_FILE_NAME)).await {
Ok(manifest) => match toml::from_str::<Manifest>(&manifest) {
Ok(manifest) => manifest.engines,
Err(e) => return Err(e).context("failed to read package manifest"),
},
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Default::default(),
Err(e) => return Err(e).context("failed to read package manifest"),
}
}
};
Ok((id, engines))
}
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
let (id, required_engines) = task.unwrap()?;
for (engine, req) in required_engines {
if engine == EngineKind::Pesde {
continue;
}
let Some(version) = resolved_engine_versions.get(&engine) else {
tracing::debug!("package {id} requires {engine} {req}, but it is not installed");
continue;
};
if !version_matches(&req, version) {
multi.suspend(|| {
println!("{WARN_PREFIX}: package {id} requires {engine} {req}, but {version} is installed");
});
}
}
}
}
}
root_progress.reset();
root_progress.set_message("finish");
let new_lockfile = Lockfile {
name: manifest.name.clone(),
version: manifest.version,
target: manifest.target.kind(),
overrides,
graph: Arc::into_inner(graph).unwrap(),
workspace: run_on_workspace_members(project, |_| async { Ok(()) }).await?,
};
project
.write_lockfile(&new_lockfile)
.await
.context("failed to write lockfile")?;
anyhow::Ok((new_lockfile, old_graph.unwrap_or_default()))
})
.await?;
let elapsed = start.elapsed();
if is_root {
println!();
}
print_package_diff(
&format!("{} {}:", manifest.name, manifest.target),
old_graph,
new_lockfile.graph,
);
println!("done in {:.2}s", elapsed.as_secs_f64());
println!();
Ok(())
}
/// Prints the difference between two graphs.
pub fn print_package_diff(prefix: &str, old_graph: DependencyGraph, new_graph: DependencyGraph) {
let mut old_pkg_map = BTreeMap::new();
let mut old_direct_pkg_map = BTreeMap::new();
let mut new_pkg_map = BTreeMap::new();
let mut new_direct_pkg_map = BTreeMap::new();
for (id, node) in &old_graph {
old_pkg_map.insert(id, node);
if node.direct.is_some() {
old_direct_pkg_map.insert(id, node);
}
}
for (id, node) in &new_graph {
new_pkg_map.insert(id, node);
if node.direct.is_some() {
new_direct_pkg_map.insert(id, node);
}
}
let added_pkgs = new_pkg_map
.iter()
.filter(|(key, _)| !old_pkg_map.contains_key(*key))
.map(|(key, &node)| (key, node))
.collect::<Vec<_>>();
let removed_pkgs = old_pkg_map
.iter()
.filter(|(key, _)| !new_pkg_map.contains_key(*key))
.map(|(key, &node)| (key, node))
.collect::<Vec<_>>();
let added_direct_pkgs = new_direct_pkg_map
.iter()
.filter(|(key, _)| !old_direct_pkg_map.contains_key(*key))
.map(|(key, &node)| (key, node))
.collect::<Vec<_>>();
let removed_direct_pkgs = old_direct_pkg_map
.iter()
.filter(|(key, _)| !new_direct_pkg_map.contains_key(*key))
.map(|(key, &node)| (key, node))
.collect::<Vec<_>>();
let prefix = style(prefix).bold();
let no_changes = added_pkgs.is_empty()
&& removed_pkgs.is_empty()
&& added_direct_pkgs.is_empty()
&& removed_direct_pkgs.is_empty();
if no_changes {
println!("{prefix} already up to date");
} else {
let mut change_signs = [
(!added_pkgs.is_empty()).then(|| {
ADDED_STYLE
.apply_to(format!("+{}", added_pkgs.len()))
.to_string()
}),
(!removed_pkgs.is_empty()).then(|| {
REMOVED_STYLE
.apply_to(format!("-{}", removed_pkgs.len()))
.to_string()
}),
]
.into_iter()
.flatten()
.collect::<Vec<_>>()
.join(" ");
let changes_empty = change_signs.is_empty();
if changes_empty {
change_signs = style("(no changes)").dim().to_string();
}
println!("{prefix} {change_signs}");
if !changes_empty {
println!(
"{}{}",
ADDED_STYLE.apply_to("+".repeat(added_pkgs.len())),
REMOVED_STYLE.apply_to("-".repeat(removed_pkgs.len()))
);
}
let dependency_groups = added_direct_pkgs
.iter()
.map(|(key, node)| (true, key, node))
.chain(
removed_direct_pkgs
.iter()
.map(|(key, node)| (false, key, node)),
)
.filter_map(|(added, key, node)| {
node.direct.as_ref().map(|(_, _, ty)| (added, key, ty))
})
.fold(
BTreeMap::<DependencyType, BTreeSet<_>>::new(),
|mut map, (added, key, &ty)| {
map.entry(ty).or_default().insert((key, added));
map
},
);
for (ty, set) in dependency_groups {
println!();
println!(
"{}",
style(format!("{}:", dep_type_to_key(ty))).yellow().bold()
);
for (id, added) in set {
println!(
"{} {} {}",
if added {
ADDED_STYLE.apply_to("+")
} else {
REMOVED_STYLE.apply_to("-")
},
id.name(),
style(id.version_id()).dim()
);
}
}
println!();
}
}

View file

@ -1,23 +1,13 @@
use crate::cli::{
config::read_config,
style::{ERROR_STYLE, INFO_STYLE, WARN_STYLE},
};
use anyhow::Context;
use colored::Colorize;
use fs_err::tokio as fs;
use futures::StreamExt;
use pesde::{
errors::ManifestReadError,
lockfile::Lockfile,
manifest::{
overrides::{OverrideKey, OverrideSpecifier},
target::TargetKind,
DependencyType, Manifest,
},
manifest::target::TargetKind,
names::{PackageName, PackageNames},
source::{
ids::VersionId, specifiers::DependencySpecifiers, workspace::specifier::VersionTypeOrReq,
},
Project, DEFAULT_INDEX_NAME,
source::{version_id::VersionId, workspace::specifier::VersionTypeOrReq},
Project,
};
use relative_path::RelativePathBuf;
use std::{
@ -25,6 +15,7 @@ use std::{
future::Future,
path::PathBuf,
str::FromStr,
time::Duration,
};
use tokio::pin;
use tracing::instrument;
@ -33,9 +24,6 @@ pub mod auth;
pub mod commands;
pub mod config;
pub mod files;
pub mod install;
pub mod reporters;
pub mod style;
#[cfg(feature = "version-management")]
pub mod version;
@ -55,40 +43,6 @@ pub async fn bin_dir() -> anyhow::Result<PathBuf> {
Ok(bin_dir)
}
pub fn resolve_overrides(
manifest: &Manifest,
) -> anyhow::Result<BTreeMap<OverrideKey, DependencySpecifiers>> {
let mut dependencies = None;
let mut overrides = BTreeMap::new();
for (key, spec) in &manifest.overrides {
overrides.insert(
key.clone(),
match spec {
OverrideSpecifier::Specifier(spec) => spec,
OverrideSpecifier::Alias(alias) => {
if dependencies.is_none() {
dependencies = Some(
manifest
.all_dependencies()
.context("failed to get all dependencies")?,
);
}
&dependencies
.as_ref()
.and_then(|deps| deps.get(alias))
.with_context(|| format!("alias `{alias}` not found in manifest"))?
.0
}
}
.clone(),
);
}
Ok(overrides)
}
#[instrument(skip(project), ret(level = "trace"), level = "debug")]
pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Lockfile>> {
let manifest = project.deser_manifest().await?;
@ -102,7 +56,7 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
Err(e) => return Err(e.into()),
};
if resolve_overrides(&manifest)? != lockfile.overrides {
if manifest.overrides != lockfile.overrides {
tracing::debug!("overrides are different");
return Ok(None);
}
@ -120,8 +74,10 @@ pub async fn up_to_date_lockfile(project: &Project) -> anyhow::Result<Option<Loc
let specs = lockfile
.graph
.iter()
.flat_map(|(_, versions)| versions)
.filter_map(|(_, node)| {
node.direct
node.node
.direct
.as_ref()
.map(|(_, spec, source_ty)| (spec, source_ty))
})
@ -170,32 +126,30 @@ impl VersionedPackageName {
#[cfg(feature = "patches")]
fn get(
self,
graph: &pesde::graph::DependencyGraph,
) -> anyhow::Result<pesde::source::ids::PackageId> {
graph: &pesde::lockfile::DownloadedGraph,
) -> anyhow::Result<(PackageNames, VersionId)> {
let version_id = match self.1 {
Some(version) => version,
None => {
let versions = graph
.keys()
.filter(|id| *id.name() == self.0)
.collect::<Vec<_>>();
match versions.len() {
0 => anyhow::bail!("package not found"),
1 => versions[0].version_id().clone(),
_ => anyhow::bail!(
let versions = graph.get(&self.0).context("package not found in graph")?;
if versions.len() == 1 {
let version = versions.keys().next().unwrap().clone();
tracing::debug!("only one version found, using {version}");
version
} else {
anyhow::bail!(
"multiple versions found, please specify one of: {}",
versions
.iter()
.keys()
.map(|v| v.to_string())
.collect::<Vec<_>>()
.join(", ")
),
);
}
}
};
Ok(pesde::source::ids::PackageId::new(self.0, version_id))
Ok((self.0, version_id))
}
}
@ -204,7 +158,6 @@ enum AnyPackageIdentifier<V: FromStr = VersionId, N: FromStr = PackageNames> {
PackageName(VersionedPackageName<V, N>),
Url((gix::Url, String)),
Workspace(VersionedPackageName<VersionTypeOrReq, PackageName>),
Path(PathBuf),
}
impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<anyhow::Error>>
@ -223,8 +176,6 @@ impl<V: FromStr<Err = E>, E: Into<anyhow::Error>, N: FromStr<Err = F>, F: Into<a
)))
} else if let Some(rest) = s.strip_prefix("workspace:") {
Ok(AnyPackageIdentifier::Workspace(rest.parse()?))
} else if let Some(rest) = s.strip_prefix("path:") {
Ok(AnyPackageIdentifier::Path(rest.into()))
} else if s.contains(':') {
let (url, rev) = s.split_once('#').context("missing revision")?;
@ -242,6 +193,39 @@ pub fn parse_gix_url(s: &str) -> Result<gix::Url, gix::url::parse::Error> {
s.try_into()
}
pub async fn progress_bar<E: std::error::Error + Into<anyhow::Error>>(
len: u64,
mut rx: tokio::sync::mpsc::Receiver<Result<String, E>>,
prefix: String,
progress_msg: String,
finish_msg: String,
) -> anyhow::Result<()> {
let bar = indicatif::ProgressBar::new(len)
.with_style(
indicatif::ProgressStyle::default_bar()
.template("{prefix}[{elapsed_precise}] {bar:40.208/166} {pos}/{len} {msg}")?
.progress_chars("█▓▒░ "),
)
.with_prefix(prefix)
.with_message(progress_msg);
bar.enable_steady_tick(Duration::from_millis(100));
while let Some(result) = rx.recv().await {
bar.inc(1);
match result {
Ok(text) => {
bar.set_message(text);
}
Err(e) => return Err(e.into()),
}
}
bar.finish_with_message(finish_msg);
Ok(())
}
pub fn shift_project_dir(project: &Project, pkg_dir: PathBuf) -> Project {
Project::new(
pkg_dir,
@ -262,7 +246,9 @@ pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
return Ok(Default::default());
}
let members_future = project.workspace_members(true).await?;
let members_future = project
.workspace_members(project.package_dir(), true)
.await?;
pin!(members_future);
let mut results = BTreeMap::<PackageName, BTreeMap<TargetKind, RelativePathBuf>>::new();
@ -287,17 +273,14 @@ pub async fn run_on_workspace_members<F: Future<Output = anyhow::Result<()>>>(
pub fn display_err(result: anyhow::Result<()>, prefix: &str) {
if let Err(err) = result {
eprintln!(
"{}: {err}\n",
ERROR_STYLE.apply_to(format!("error{prefix}"))
);
eprintln!("{}: {err}\n", format!("error{prefix}").red().bold());
let cause = err.chain().skip(1).collect::<Vec<_>>();
if !cause.is_empty() {
eprintln!("{}:", ERROR_STYLE.apply_to("caused by"));
eprintln!("{}:", "caused by".red().bold());
for err in cause {
eprintln!("\t- {err}");
eprintln!(" - {err}");
}
}
@ -306,54 +289,15 @@ pub fn display_err(result: anyhow::Result<()>, prefix: &str) {
std::backtrace::BacktraceStatus::Disabled => {
eprintln!(
"\n{}: set RUST_BACKTRACE=1 for a backtrace",
INFO_STYLE.apply_to("help")
"help".yellow().bold()
);
}
std::backtrace::BacktraceStatus::Captured => {
eprintln!("\n{}:\n{backtrace}", WARN_STYLE.apply_to("backtrace"));
eprintln!("\n{}:\n{backtrace}", "backtrace".yellow().bold());
}
_ => {
eprintln!("\n{}: not captured", WARN_STYLE.apply_to("backtrace"));
eprintln!("\n{}: not captured", "backtrace".yellow().bold());
}
}
}
}
pub async fn get_index(project: &Project, index: Option<&str>) -> anyhow::Result<gix::Url> {
let manifest = match project.deser_manifest().await {
Ok(manifest) => Some(manifest),
Err(e) => match e {
ManifestReadError::Io(e) if e.kind() == std::io::ErrorKind::NotFound => None,
e => return Err(e.into()),
},
};
let index_url = match index {
Some(index) => index.try_into().ok(),
None => match manifest {
Some(_) => None,
None => Some(read_config().await?.default_index),
},
};
match index_url {
Some(url) => Ok(url),
None => {
let index_name = index.unwrap_or(DEFAULT_INDEX_NAME);
manifest
.unwrap()
.indices
.remove(index_name)
.with_context(|| format!("index {index_name} not found in manifest"))
}
}
}
pub fn dep_type_to_key(dep_type: DependencyType) -> &'static str {
match dep_type {
DependencyType::Standard => "dependencies",
DependencyType::Dev => "dev_dependencies",
DependencyType::Peer => "peer_dependencies",
}
}

View file

@ -1,211 +0,0 @@
//! Progress reporters for the CLI
use std::{
future::Future,
io::{Stdout, Write},
sync::{Arc, Mutex, Once, OnceLock},
time::Duration,
};
use indicatif::{MultiProgress, ProgressBar, ProgressStyle};
use pesde::reporters::{
DownloadProgressReporter, DownloadsReporter, PatchProgressReporter, PatchesReporter,
};
pub const TICK_CHARS: &str = "⣷⣯⣟⡿⢿⣻⣽⣾";
pub fn root_progress_style() -> ProgressStyle {
ProgressStyle::with_template("{prefix:.dim}{msg:>8.214/yellow} {spinner} [{elapsed_precise}]")
.unwrap()
.tick_chars(TICK_CHARS)
}
pub fn root_progress_style_with_progress() -> ProgressStyle {
ProgressStyle::with_template(
"{prefix:.dim}{msg:>8.214/yellow} {spinner} [{elapsed_precise}] {bar:20} {pos}/{len}",
)
.unwrap()
.tick_chars(TICK_CHARS)
}
pub async fn run_with_reporter_and_writer<W, F, R, Fut>(writer: W, f: F) -> R
where
W: Write + Send + Sync + 'static,
F: FnOnce(MultiProgress, ProgressBar, Arc<CliReporter<W>>) -> Fut,
Fut: Future<Output = R>,
{
let multi_progress = MultiProgress::new();
crate::PROGRESS_BARS
.lock()
.unwrap()
.replace(multi_progress.clone());
let root_progress = multi_progress.add(ProgressBar::new(0));
root_progress.set_style(root_progress_style());
root_progress.enable_steady_tick(Duration::from_millis(100));
let reporter = Arc::new(CliReporter::with_writer(
writer,
multi_progress.clone(),
root_progress.clone(),
));
let result = f(multi_progress.clone(), root_progress.clone(), reporter).await;
root_progress.finish();
multi_progress.clear().unwrap();
crate::PROGRESS_BARS.lock().unwrap().take();
result
}
pub async fn run_with_reporter<F, R, Fut>(f: F) -> R
where
F: FnOnce(MultiProgress, ProgressBar, Arc<CliReporter<Stdout>>) -> Fut,
Fut: Future<Output = R>,
{
run_with_reporter_and_writer(std::io::stdout(), f).await
}
pub struct CliReporter<W = Stdout> {
writer: Mutex<W>,
child_style: ProgressStyle,
child_style_with_bytes: ProgressStyle,
child_style_with_bytes_without_total: ProgressStyle,
multi_progress: MultiProgress,
root_progress: ProgressBar,
}
impl<W> CliReporter<W> {
#[allow(unknown_lints, clippy::literal_string_with_formatting_args)]
pub fn with_writer(
writer: W,
multi_progress: MultiProgress,
root_progress: ProgressBar,
) -> Self {
Self {
writer: Mutex::new(writer),
child_style: ProgressStyle::with_template("{msg:.dim}").unwrap(),
child_style_with_bytes: ProgressStyle::with_template(
"{msg:.dim} {bytes:.dim}/{total_bytes:.dim}",
)
.unwrap(),
child_style_with_bytes_without_total: ProgressStyle::with_template(
"{msg:.dim} {bytes:.dim}",
)
.unwrap(),
multi_progress,
root_progress,
}
}
}
pub struct CliDownloadProgressReporter<W> {
root_reporter: Arc<CliReporter<W>>,
name: String,
progress: OnceLock<ProgressBar>,
set_progress: Once,
}
impl<W: Write + Send + Sync + 'static> DownloadsReporter for CliReporter<W> {
type DownloadProgressReporter = CliDownloadProgressReporter<W>;
fn report_download(self: Arc<Self>, name: String) -> Self::DownloadProgressReporter {
self.root_progress.inc_length(1);
CliDownloadProgressReporter {
root_reporter: self,
name,
progress: OnceLock::new(),
set_progress: Once::new(),
}
}
}
impl<W: Write + Send + Sync + 'static> DownloadProgressReporter for CliDownloadProgressReporter<W> {
fn report_start(&self) {
let progress = self.root_reporter.multi_progress.add(ProgressBar::new(0));
progress.set_style(self.root_reporter.child_style.clone());
progress.set_message(format!("- {}", self.name));
self.progress
.set(progress)
.expect("report_start called more than once");
}
fn report_progress(&self, total: u64, len: u64) {
if let Some(progress) = self.progress.get() {
progress.set_length(total);
progress.set_position(len);
self.set_progress.call_once(|| {
if total > 0 {
progress.set_style(self.root_reporter.child_style_with_bytes.clone());
} else {
progress.set_style(
self.root_reporter
.child_style_with_bytes_without_total
.clone(),
);
}
});
}
}
fn report_done(&self) {
if let Some(progress) = self.progress.get() {
if progress.is_hidden() {
writeln!(
self.root_reporter.writer.lock().unwrap(),
"downloaded {}",
self.name
)
.unwrap();
}
progress.finish();
self.root_reporter.multi_progress.remove(progress);
self.root_reporter.root_progress.inc(1);
}
}
}
pub struct CliPatchProgressReporter<W> {
root_reporter: Arc<CliReporter<W>>,
name: String,
progress: ProgressBar,
}
impl<W: Write + Send + Sync + 'static> PatchesReporter for CliReporter<W> {
type PatchProgressReporter = CliPatchProgressReporter<W>;
fn report_patch(self: Arc<Self>, name: String) -> Self::PatchProgressReporter {
let progress = self.multi_progress.add(ProgressBar::new(0));
progress.set_style(self.child_style.clone());
progress.set_message(format!("- {name}"));
self.root_progress.inc_length(1);
CliPatchProgressReporter {
root_reporter: self,
name: name.to_string(),
progress,
}
}
}
impl<W: Write + Send + Sync + 'static> PatchProgressReporter for CliPatchProgressReporter<W> {
fn report_done(&self) {
if self.progress.is_hidden() {
writeln!(
self.root_reporter.writer.lock().unwrap(),
"patched {}",
self.name
)
.unwrap();
}
self.progress.finish();
self.root_reporter.multi_progress.remove(&self.progress);
self.root_reporter.root_progress.inc(1);
}
}

View file

@ -1,54 +0,0 @@
use console::{Style, StyledObject};
use paste::paste;
use std::{fmt::Display, sync::LazyLock};
#[derive(Debug)]
pub struct LazyStyle<T>(LazyLock<T>);
impl LazyStyle<Style> {
pub fn apply_to<D>(&self, text: D) -> StyledObject<D> {
LazyLock::force(&self.0).apply_to(text)
}
}
impl<T: Display> Display for LazyStyle<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", LazyLock::force(&self.0))
}
}
macro_rules! make_style {
($name:ident, $color:ident) => {
make_style!($name, $color());
};
($name:ident, $($color:tt)+) => {
paste! {
pub static [<$name _STYLE>]: LazyStyle<Style> = LazyStyle(LazyLock::new(||
Style::new().$($color)+.bold()
));
}
};
}
macro_rules! make_prefix {
($name:ident) => {
paste! {
pub static [<$name:upper _PREFIX>]: LazyStyle<StyledObject<&'static str>> = LazyStyle(LazyLock::new(||
[<$name:upper _STYLE>].apply_to(stringify!($name))
));
}
};
}
pub const CLI_COLOR_256: u8 = 214;
make_style!(INFO, cyan);
make_style!(WARN, yellow);
make_prefix!(warn);
make_style!(ERROR, red);
make_prefix!(error);
make_style!(SUCCESS, green);
make_style!(CLI, color256(CLI_COLOR_256));
make_style!(ADDED, green);
make_style!(REMOVED, red);
make_style!(URL, blue().underlined());

View file

@ -1,77 +1,113 @@
use crate::{
cli::{
use crate::cli::{
bin_dir,
config::{read_config, write_config, CliConfig},
files::make_executable,
home_dir,
reporters::run_with_reporter,
style::{ADDED_STYLE, CLI_STYLE, REMOVED_STYLE, URL_STYLE},
},
util::no_build_metadata,
};
use anyhow::Context;
use console::Style;
use colored::Colorize;
use fs_err::tokio as fs;
use jiff::SignedDuration;
use pesde::{
engine::{
source::{
traits::{DownloadOptions, EngineSource, ResolveOptions},
EngineSources,
},
EngineKind,
},
reporters::DownloadsReporter,
version_matches,
};
use semver::{Version, VersionReq};
use futures::StreamExt;
use reqwest::header::ACCEPT;
use semver::Version;
use serde::Deserialize;
use std::{
collections::BTreeSet,
env::current_exe,
path::{Path, PathBuf},
sync::Arc,
};
use tokio::io::AsyncWrite;
use tracing::instrument;
pub fn current_version() -> Version {
Version::parse(env!("CARGO_PKG_VERSION")).unwrap()
}
const CHECK_INTERVAL: SignedDuration = SignedDuration::from_hours(6);
pub async fn find_latest_version(reqwest: &reqwest::Client) -> anyhow::Result<Version> {
let version = EngineSources::pesde()
.resolve(
&VersionReq::STAR,
&ResolveOptions {
reqwest: reqwest.clone(),
},
)
.await
.context("failed to resolve version")?
.pop_last()
.context("no versions found")?
.0;
Ok(version)
#[derive(Debug, Deserialize)]
struct Release {
tag_name: String,
assets: Vec<Asset>,
}
#[derive(Debug, Deserialize)]
struct Asset {
name: String,
url: url::Url,
}
#[instrument(level = "trace")]
fn get_repo() -> (String, String) {
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
let (owner, repo) = (
parts.next().unwrap().to_string(),
parts.next().unwrap().to_string(),
);
tracing::trace!("repository for updates: {owner}/{repo}");
(owner, repo)
}
#[derive(Debug)]
pub enum VersionType {
Latest,
Specific(Version),
}
#[instrument(skip(reqwest), level = "trace")]
pub async fn get_remote_version(
reqwest: &reqwest::Client,
ty: VersionType,
) -> anyhow::Result<Version> {
let (owner, repo) = get_repo();
let mut releases = reqwest
.get(format!(
"https://api.github.com/repos/{owner}/{repo}/releases",
))
.send()
.await
.context("failed to send request to GitHub API")?
.error_for_status()
.context("failed to get GitHub API response")?
.json::<Vec<Release>>()
.await
.context("failed to parse GitHub API response")?
.into_iter()
.filter_map(|release| Version::parse(release.tag_name.trim_start_matches('v')).ok());
match ty {
VersionType::Latest => releases.max(),
VersionType::Specific(version) => {
releases.find(|v| no_build_metadata(v) == no_build_metadata(&version))
}
}
.context("failed to find latest version")
}
pub fn no_build_metadata(version: &Version) -> Version {
let mut version = version.clone();
version.build = semver::BuildMetadata::EMPTY;
version
}
const CHECK_INTERVAL: chrono::Duration = chrono::Duration::hours(6);
#[instrument(skip(reqwest), level = "trace")]
pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()> {
let config = read_config().await?;
let version = if let Some((_, version)) = config
.last_checked_updates
.filter(|(time, _)| jiff::Timestamp::now().duration_since(*time) < CHECK_INTERVAL)
.filter(|(time, _)| chrono::Utc::now() - *time < CHECK_INTERVAL)
{
tracing::debug!("using cached version");
version
} else {
tracing::debug!("checking for updates");
let version = find_latest_version(reqwest).await?;
let version = get_remote_version(reqwest, VersionType::Latest).await?;
write_config(&CliConfig {
last_checked_updates: Some((jiff::Timestamp::now(), version.clone())),
last_checked_updates: Some((chrono::Utc::now(), version.clone())),
..config
})
.await?;
@ -85,163 +121,213 @@ pub async fn check_for_updates(reqwest: &reqwest::Client) -> anyhow::Result<()>
return Ok(());
}
let alert_style = Style::new().yellow();
let name = env!("CARGO_BIN_NAME");
let changelog = format!("{}/releases/tag/v{version}", env!("CARGO_PKG_REPOSITORY"));
let messages = [
format!(
"{} {} → {}",
alert_style.apply_to("update available!").bold(),
REMOVED_STYLE.apply_to(current_version),
ADDED_STYLE.apply_to(version_no_metadata)
),
format!(
"run {} to upgrade",
CLI_STYLE.apply_to(concat!("`", env!("CARGO_BIN_NAME"), " self-upgrade`")),
),
let unformatted_messages = [
"".to_string(),
format!("update available! {current_version}{version_no_metadata}"),
format!("changelog: {changelog}"),
format!("run `{name} self-upgrade` to upgrade"),
"".to_string(),
format!("changelog: {}", URL_STYLE.apply_to(changelog)),
];
let column = alert_style.apply_to("");
let width = unformatted_messages
.iter()
.map(|s| s.chars().count())
.max()
.unwrap()
+ 4;
let message = messages
let column = "".bright_magenta();
let message = [
"".to_string(),
format!(
"update available! {} → {}",
current_version.to_string().red(),
version_no_metadata.to_string().green()
),
format!("changelog: {}", changelog.blue()),
format!(
"run `{} {}` to upgrade",
name.blue(),
"self-upgrade".yellow()
),
"".to_string(),
]
.into_iter()
.map(|s| format!("{column} {s}"))
.enumerate()
.map(|(i, s)| {
let text_length = unformatted_messages[i].chars().count();
let padding = (width as f32 - text_length as f32) / 2f32;
let padding_l = " ".repeat(padding.floor() as usize);
let padding_r = " ".repeat(padding.ceil() as usize);
format!("{column}{padding_l}{s}{padding_r}{column}")
})
.collect::<Vec<_>>()
.join("\n");
println!("\n{message}\n");
let lines = "".repeat(width).bright_magenta();
let tl = "".bright_magenta();
let tr = "".bright_magenta();
let bl = "".bright_magenta();
let br = "".bright_magenta();
println!("\n{tl}{lines}{tr}\n{message}\n{bl}{lines}{br}\n");
Ok(())
}
const ENGINES_DIR: &str = "engines";
#[instrument(skip(reqwest, writer), level = "trace")]
pub async fn download_github_release<W: AsyncWrite + Unpin>(
reqwest: &reqwest::Client,
version: &Version,
mut writer: W,
) -> anyhow::Result<()> {
let (owner, repo) = get_repo();
#[instrument(level = "trace")]
pub async fn get_installed_versions(engine: EngineKind) -> anyhow::Result<BTreeSet<Version>> {
let source = engine.source();
let path = home_dir()?.join(ENGINES_DIR).join(source.directory());
let mut installed_versions = BTreeSet::new();
let release = reqwest
.get(format!(
"https://api.github.com/repos/{owner}/{repo}/releases/tags/v{version}",
))
.send()
.await
.context("failed to send request to GitHub API")?
.error_for_status()
.context("failed to get GitHub API response")?
.json::<Release>()
.await
.context("failed to parse GitHub API response")?;
let mut read_dir = match fs::read_dir(&path).await {
Ok(read_dir) => read_dir,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(installed_versions),
Err(e) => return Err(e).context("failed to read engines directory"),
};
let asset = release
.assets
.into_iter()
.find(|asset| {
asset.name.ends_with(&format!(
"-{}-{}.tar.gz",
std::env::consts::OS,
std::env::consts::ARCH
))
})
.context("failed to find asset for current platform")?;
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let bytes = reqwest
.get(asset.url)
.header(ACCEPT, "application/octet-stream")
.send()
.await
.context("failed to send request to download asset")?
.error_for_status()
.context("failed to download asset")?
.bytes()
.await
.context("failed to download asset")?;
let Some(version) = path.file_name().and_then(|s| s.to_str()) else {
continue;
};
let mut decoder = async_compression::tokio::bufread::GzipDecoder::new(bytes.as_ref());
let mut archive = tokio_tar::Archive::new(&mut decoder);
if let Ok(version) = Version::parse(version) {
installed_versions.insert(version);
}
let mut entry = archive
.entries()
.context("failed to read archive entries")?
.next()
.await
.context("archive has no entry")?
.context("failed to get first archive entry")?;
tokio::io::copy(&mut entry, &mut writer)
.await
.context("failed to write archive entry to file")
.map(|_| ())
}
Ok(installed_versions)
#[derive(Debug)]
pub enum TagInfo {
Complete(Version),
Incomplete(Version),
}
#[instrument(skip(reqwest), level = "trace")]
pub async fn get_or_download_engine(
pub async fn get_or_download_version(
reqwest: &reqwest::Client,
engine: EngineKind,
req: VersionReq,
) -> anyhow::Result<PathBuf> {
let source = engine.source();
let path = home_dir()?.join(ENGINES_DIR).join(source.directory());
let installed_versions = get_installed_versions(engine).await?;
let max_matching = installed_versions
.iter()
.filter(|v| version_matches(&req, v))
.next_back();
if let Some(version) = max_matching {
return Ok(path
.join(version.to_string())
.join(source.expected_file_name())
.with_extension(std::env::consts::EXE_EXTENSION));
}
let path = run_with_reporter(|_, root_progress, reporter| async {
let root_progress = root_progress;
let reporter = reporter;
root_progress.set_message("resolve version");
let mut versions = source
.resolve(
&req,
&ResolveOptions {
reqwest: reqwest.clone(),
},
)
.await
.context("failed to resolve versions")?;
let (version, engine_ref) = versions.pop_last().context("no matching versions found")?;
root_progress.set_message("download");
let reporter = reporter.report_download(format!("{engine} v{version}"));
let archive = source
.download(
&engine_ref,
&DownloadOptions {
reqwest: reqwest.clone(),
reporter: Arc::new(reporter),
version: version.clone(),
},
)
.await
.context("failed to download engine")?;
let path = path.join(version.to_string());
tag: &TagInfo,
always_give_path: bool,
) -> anyhow::Result<Option<PathBuf>> {
let path = home_dir()?.join("versions");
fs::create_dir_all(&path)
.await
.context("failed to create engine container folder")?;
let path = path
.join(source.expected_file_name())
.with_extension(std::env::consts::EXE_EXTENSION);
.context("failed to create versions directory")?;
let mut file = fs::File::create(&path)
.await
.context("failed to create new file")?;
let version = match tag {
TagInfo::Complete(version) => version,
// don't fetch the version since it could be cached
TagInfo::Incomplete(version) => version,
};
tokio::io::copy(
&mut archive
.find_executable(source.expected_file_name())
let path = path.join(format!(
"{}{}",
no_build_metadata(version),
std::env::consts::EXE_SUFFIX
));
let is_requested_version = !always_give_path && *version == current_version();
if path.exists() {
tracing::debug!("version already exists");
return Ok(if is_requested_version {
None
} else {
Some(path)
});
}
if is_requested_version {
tracing::debug!("copying current executable to version directory");
fs::copy(current_exe()?, &path)
.await
.context("failed to find executable")?,
&mut file,
.context("failed to copy current executable to version directory")?;
} else {
let version = match tag {
TagInfo::Complete(version) => version.clone(),
TagInfo::Incomplete(version) => {
get_remote_version(reqwest, VersionType::Specific(version.clone()))
.await
.context("failed to get remote version")?
}
};
tracing::debug!("downloading version");
download_github_release(
reqwest,
&version,
fs::File::create(&path)
.await
.context("failed to create version file")?,
)
.await
.context("failed to write to file")?;
Ok::<_, anyhow::Error>(path)
})
.await?;
}
make_executable(&path)
.await
.context("failed to make downloaded version executable")?;
if engine != EngineKind::Pesde {
make_linker_if_needed(engine).await?;
}
Ok(path)
Ok(if is_requested_version {
None
} else {
Some(path)
})
}
#[instrument(level = "trace")]
pub async fn replace_pesde_bin_exe(with: &Path) -> anyhow::Result<()> {
let bin_exe_path = bin_dir()
.await?
.join(EngineKind::Pesde.to_string())
.with_extension(std::env::consts::EXE_EXTENSION);
pub async fn update_bin_exe(downloaded_file: &Path) -> anyhow::Result<()> {
let bin_exe_path = bin_dir().await?.join(format!(
"{}{}",
env!("CARGO_BIN_NAME"),
std::env::consts::EXE_SUFFIX
));
let mut downloaded_file = downloaded_file.to_path_buf();
let exists = bin_exe_path.exists();
@ -253,42 +339,23 @@ pub async fn replace_pesde_bin_exe(with: &Path) -> anyhow::Result<()> {
let tempfile = tempfile::Builder::new()
.make(|_| Ok(()))
.context("failed to create temporary file")?;
let temp_path = tempfile.into_temp_path().to_path_buf();
let path = tempfile.into_temp_path().to_path_buf();
#[cfg(windows)]
let temp_path = temp_path.with_extension("exe");
let path = path.with_extension("exe");
match fs::rename(&bin_exe_path, &temp_path).await {
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
Err(e) => return Err(e).context("failed to rename existing executable"),
}
let current_exe = current_exe().context("failed to get current exe path")?;
if current_exe == downloaded_file {
downloaded_file = path.to_path_buf();
}
fs::copy(with, &bin_exe_path)
fs::rename(&bin_exe_path, &path)
.await
.context("failed to rename current executable")?;
}
fs::copy(downloaded_file, &bin_exe_path)
.await
.context("failed to copy executable to bin folder")?;
make_executable(&bin_exe_path).await
}
#[instrument(level = "trace")]
pub async fn make_linker_if_needed(engine: EngineKind) -> anyhow::Result<()> {
let bin_dir = bin_dir().await?;
let linker = bin_dir
.join(engine.to_string())
.with_extension(std::env::consts::EXE_EXTENSION);
let exists = linker.exists();
if !exists {
let exe = current_exe().context("failed to get current exe path")?;
#[cfg(windows)]
let result = fs::symlink_file(exe, linker);
#[cfg(not(windows))]
let result = fs::symlink(exe, linker);
result.await.context("failed to create symlink")?;
}
Ok(())
}

View file

@ -1,182 +1,161 @@
use crate::{
graph::{DependencyGraph, DependencyGraphNode},
reporters::{DownloadProgressReporter, DownloadsReporter},
lockfile::{DependencyGraph, DownloadedDependencyGraphNode, DownloadedGraph},
manifest::DependencyType,
refresh_sources,
source::{
fs::PackageFs,
ids::PackageId,
traits::{DownloadOptions, PackageRef, PackageSource, RefreshOptions},
traits::{PackageRef, PackageSource},
PackageSources,
},
Project, RefreshedSources,
Project, PACKAGES_CONTAINER_NAME,
};
use fs_err::tokio as fs;
use std::{
collections::HashSet,
sync::{Arc, Mutex},
};
use async_stream::try_stream;
use futures::Stream;
use std::{num::NonZeroUsize, sync::Arc};
use tokio::{sync::Semaphore, task::JoinSet};
use tracing::{instrument, Instrument};
/// Options for downloading.
#[derive(Debug)]
pub(crate) struct DownloadGraphOptions<Reporter> {
/// The reqwest client.
pub reqwest: reqwest::Client,
/// The downloads reporter.
pub reporter: Option<Arc<Reporter>>,
/// The refreshed sources.
pub refreshed_sources: RefreshedSources,
/// The max number of concurrent network requests.
pub network_concurrency: NonZeroUsize,
}
type MultithreadedGraph = Arc<Mutex<DownloadedGraph>>;
impl<Reporter> DownloadGraphOptions<Reporter>
where
Reporter: DownloadsReporter + Send + Sync + 'static,
{
/// Creates a new download options with the given reqwest client and reporter.
pub(crate) fn new(reqwest: reqwest::Client) -> Self {
Self {
reqwest,
reporter: None,
refreshed_sources: Default::default(),
network_concurrency: NonZeroUsize::new(16).unwrap(),
}
}
/// Sets the downloads reporter.
pub(crate) fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
self.reporter.replace(reporter.into());
self
}
/// Sets the refreshed sources.
pub(crate) fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
self.refreshed_sources = refreshed_sources;
self
}
/// Sets the max number of concurrent network requests.
pub(crate) fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
self.network_concurrency = network_concurrency;
self
}
}
impl<Reporter> Clone for DownloadGraphOptions<Reporter> {
fn clone(&self) -> Self {
Self {
reqwest: self.reqwest.clone(),
reporter: self.reporter.clone(),
refreshed_sources: self.refreshed_sources.clone(),
network_concurrency: self.network_concurrency,
}
}
}
pub(crate) type MultithreadDownloadJob = (
tokio::sync::mpsc::Receiver<Result<String, errors::DownloadGraphError>>,
MultithreadedGraph,
);
impl Project {
/// Downloads a graph of dependencies.
#[instrument(skip_all, level = "debug")]
pub(crate) async fn download_graph<Reporter>(
/// Downloads a graph of dependencies
#[instrument(skip(self, graph, refreshed_sources, reqwest), level = "debug")]
pub async fn download_graph(
&self,
graph: &DependencyGraph,
options: DownloadGraphOptions<Reporter>,
) -> Result<
impl Stream<
Item = Result<(PackageId, DependencyGraphNode, PackageFs), errors::DownloadGraphError>,
>,
errors::DownloadGraphError,
>
where
Reporter: DownloadsReporter + Send + Sync + 'static,
{
let DownloadGraphOptions {
reqwest,
reporter,
refreshed_sources,
network_concurrency,
} = options;
refreshed_sources: &mut HashSet<PackageSources>,
reqwest: &reqwest::Client,
prod: bool,
write: bool,
wally: bool,
) -> Result<MultithreadDownloadJob, errors::DownloadGraphError> {
let manifest = self.deser_manifest().await?;
let manifest_target_kind = manifest.target.kind();
let downloaded_graph: MultithreadedGraph = Arc::new(Mutex::new(Default::default()));
let semaphore = Arc::new(Semaphore::new(network_concurrency.get()));
let mut tasks = graph
let (tx, rx) = tokio::sync::mpsc::channel(
graph
.iter()
.map(|(package_id, node)| {
let span = tracing::info_span!("download", package_id = package_id.to_string());
.map(|(_, versions)| versions.len())
.sum::<usize>()
.max(1),
);
let project = self.clone();
let reqwest = reqwest.clone();
let reporter = reporter.clone();
let refreshed_sources = refreshed_sources.clone();
let semaphore = semaphore.clone();
let package_id = Arc::new(package_id.clone());
let node = node.clone();
async move {
let progress_reporter = reporter
.clone()
.map(|reporter| reporter.report_download(package_id.to_string()));
let _permit = semaphore.acquire().await;
if let Some(ref progress_reporter) = progress_reporter {
progress_reporter.report_start();
}
let source = node.pkg_ref.source();
refreshed_sources
.refresh(
&source,
&RefreshOptions {
project: project.clone(),
},
refresh_sources(
self,
graph
.iter()
.flat_map(|(_, versions)| versions.iter())
.map(|(_, node)| node.pkg_ref.source()),
refreshed_sources,
)
.await?;
let project = Arc::new(self.clone());
for (name, versions) in graph {
for (version_id, node) in versions {
// we need to download pesde packages first, since scripts (for target finding for example) can depend on them
if node.pkg_ref.like_wally() != wally {
continue;
}
let tx = tx.clone();
let name = name.clone();
let version_id = version_id.clone();
let node = node.clone();
let span = tracing::info_span!(
"download",
name = name.to_string(),
version_id = version_id.to_string()
);
let project = project.clone();
let reqwest = reqwest.clone();
let downloaded_graph = downloaded_graph.clone();
let package_dir = self.package_dir().to_path_buf();
tokio::spawn(
async move {
let source = node.pkg_ref.source();
let container_folder = node.container_folder(
&package_dir
.join(manifest_target_kind.packages_folder(version_id.target()))
.join(PACKAGES_CONTAINER_NAME),
&name,
version_id.version(),
);
match fs::create_dir_all(&container_folder).await {
Ok(_) => {}
Err(e) => {
tx.send(Err(errors::DownloadGraphError::Io(e)))
.await
.unwrap();
return;
}
}
let project = project.clone();
tracing::debug!("downloading");
let fs = match progress_reporter {
Some(progress_reporter) => {
source
.download(
&node.pkg_ref,
&DownloadOptions {
project: project.clone(),
reqwest,
id: package_id.clone(),
reporter: Arc::new(progress_reporter),
},
)
.await
}
None => {
source
.download(
&node.pkg_ref,
&DownloadOptions {
project: project.clone(),
reqwest,
id: package_id.clone(),
reporter: Arc::new(()),
},
)
.await
}
}
.map_err(Box::new)?;
tracing::debug!("downloaded");
Ok((Arc::into_inner(package_id).unwrap(), node, fs))
}
.instrument(span)
})
.collect::<JoinSet<Result<_, errors::DownloadGraphError>>>();
let stream = try_stream! {
while let Some(res) = tasks.join_next().await {
yield res.unwrap()?;
let (fs, target) =
match source.download(&node.pkg_ref, &project, &reqwest).await {
Ok(target) => target,
Err(e) => {
tx.send(Err(Box::new(e).into())).await.unwrap();
return;
}
};
Ok(stream)
tracing::debug!("downloaded");
if write {
if !prod || node.resolved_ty != DependencyType::Dev {
match fs.write_to(container_folder, project.cas_dir(), true).await {
Ok(_) => {}
Err(e) => {
tx.send(Err(errors::DownloadGraphError::WriteFailed(e)))
.await
.unwrap();
return;
}
};
} else {
tracing::debug!(
"skipping write to disk, dev dependency in prod mode"
);
}
}
let display_name = format!("{name}@{version_id}");
{
let mut downloaded_graph = downloaded_graph.lock().unwrap();
downloaded_graph
.entry(name)
.or_default()
.insert(version_id, DownloadedDependencyGraphNode { node, target });
}
tx.send(Ok(display_name)).await.unwrap();
}
.instrument(span),
);
}
}
Ok((rx, downloaded_graph))
}
}
@ -188,9 +167,13 @@ pub mod errors {
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadGraphError {
/// An error occurred deserializing the project manifest
#[error("error deserializing project manifest")]
ManifestDeserializationFailed(#[from] crate::errors::ManifestReadError),
/// An error occurred refreshing a package source
#[error("failed to refresh package source")]
RefreshFailed(#[from] crate::source::errors::RefreshError),
RefreshFailed(#[from] Box<crate::source::errors::RefreshError>),
/// Error interacting with the filesystem
#[error("error interacting with the filesystem")]
@ -199,5 +182,9 @@ pub mod errors {
/// Error downloading a package
#[error("failed to download package")]
DownloadFailed(#[from] Box<crate::source::errors::DownloadError>),
/// Error writing package contents
#[error("failed to write package contents")]
WriteFailed(#[source] std::io::Error),
}
}

View file

@ -1,427 +1,155 @@
use crate::{
all_packages_dirs,
download::DownloadGraphOptions,
graph::{
DependencyGraph, DependencyGraphNode, DependencyGraphNodeWithTarget,
DependencyGraphWithTarget,
},
manifest::{target::TargetKind, DependencyType},
reporters::{DownloadsReporter, PatchesReporter},
source::{
ids::PackageId,
traits::{GetTargetOptions, PackageRef, PackageSource},
},
Project, RefreshedSources, SCRIPTS_LINK_FOLDER,
lockfile::{DependencyGraph, DownloadedGraph},
manifest::DependencyType,
source::PackageSources,
Project,
};
use fs_err::tokio as fs;
use futures::TryStreamExt;
use futures::FutureExt;
use std::{
collections::HashMap,
convert::Infallible,
future::{self, Future},
num::NonZeroUsize,
path::PathBuf,
sync::Arc,
collections::HashSet,
future::Future,
sync::{Arc, Mutex as StdMutex},
};
use tokio::{pin, task::JoinSet};
use tokio::sync::Mutex;
use tracing::{instrument, Instrument};
/// Hooks to perform actions after certain events during download and linking.
#[allow(unused_variables)]
pub trait DownloadAndLinkHooks {
/// The error type for the hooks.
type Error: std::error::Error + Send + Sync + 'static;
/// Called after scripts have been downloaded. The `downloaded_graph`
/// contains all downloaded packages.
fn on_scripts_downloaded(
&self,
graph: &DependencyGraphWithTarget,
) -> impl Future<Output = Result<(), Self::Error>> + Send {
future::ready(Ok(()))
/// Filters a graph to only include production dependencies, if `prod` is `true`
pub fn filter_graph(graph: &DownloadedGraph, prod: bool) -> DownloadedGraph {
if !prod {
return graph.clone();
}
/// Called after binary dependencies have been downloaded. The
/// `downloaded_graph` contains all downloaded packages.
fn on_bins_downloaded(
&self,
graph: &DependencyGraphWithTarget,
) -> impl Future<Output = Result<(), Self::Error>> + Send {
future::ready(Ok(()))
graph
.iter()
.map(|(name, versions)| {
(
name.clone(),
versions
.iter()
.filter(|(_, node)| node.node.resolved_ty != DependencyType::Dev)
.map(|(v_id, node)| (v_id.clone(), node.clone()))
.collect(),
)
})
.collect()
}
/// Called after all dependencies have been downloaded. The
/// `downloaded_graph` contains all downloaded packages.
fn on_all_downloaded(
&self,
graph: &DependencyGraphWithTarget,
) -> impl Future<Output = Result<(), Self::Error>> + Send {
future::ready(Ok(()))
}
}
impl DownloadAndLinkHooks for () {
type Error = Infallible;
}
/// Options for downloading and linking.
#[derive(Debug)]
pub struct DownloadAndLinkOptions<Reporter = (), Hooks = ()> {
/// The reqwest client.
pub reqwest: reqwest::Client,
/// The downloads reporter.
pub reporter: Option<Arc<Reporter>>,
/// The download and link hooks.
pub hooks: Option<Arc<Hooks>>,
/// The refreshed sources.
pub refreshed_sources: RefreshedSources,
/// Whether to skip dev dependencies.
pub prod: bool,
/// The max number of concurrent network requests.
pub network_concurrency: NonZeroUsize,
/// Whether to re-install all dependencies even if they are already installed
pub force: bool,
}
impl<Reporter, Hooks> DownloadAndLinkOptions<Reporter, Hooks>
where
Reporter: DownloadsReporter + PatchesReporter + Send + Sync + 'static,
Hooks: DownloadAndLinkHooks + Send + Sync + 'static,
{
/// Creates a new download options with the given reqwest client and reporter.
pub fn new(reqwest: reqwest::Client) -> Self {
Self {
reqwest,
reporter: None,
hooks: None,
refreshed_sources: Default::default(),
prod: false,
network_concurrency: NonZeroUsize::new(16).unwrap(),
force: false,
}
}
/// Sets the downloads reporter.
pub fn reporter(mut self, reporter: impl Into<Arc<Reporter>>) -> Self {
self.reporter.replace(reporter.into());
self
}
/// Sets the download and link hooks.
pub fn hooks(mut self, hooks: impl Into<Arc<Hooks>>) -> Self {
self.hooks.replace(hooks.into());
self
}
/// Sets the refreshed sources.
pub fn refreshed_sources(mut self, refreshed_sources: RefreshedSources) -> Self {
self.refreshed_sources = refreshed_sources;
self
}
/// Sets whether to skip dev dependencies.
pub fn prod(mut self, prod: bool) -> Self {
self.prod = prod;
self
}
/// Sets the max number of concurrent network requests.
pub fn network_concurrency(mut self, network_concurrency: NonZeroUsize) -> Self {
self.network_concurrency = network_concurrency;
self
}
/// Sets whether to re-install all dependencies even if they are already installed
pub fn force(mut self, force: bool) -> Self {
self.force = force;
self
}
}
impl Clone for DownloadAndLinkOptions {
fn clone(&self) -> Self {
Self {
reqwest: self.reqwest.clone(),
reporter: self.reporter.clone(),
hooks: self.hooks.clone(),
refreshed_sources: self.refreshed_sources.clone(),
prod: self.prod,
network_concurrency: self.network_concurrency,
force: self.force,
}
}
}
/// Receiver for dependencies downloaded and linked
pub type DownloadAndLinkReceiver =
tokio::sync::mpsc::Receiver<Result<String, crate::download::errors::DownloadGraphError>>;
impl Project {
/// Downloads a graph of dependencies and links them in the correct order
#[instrument(skip_all, fields(prod = options.prod), level = "debug")]
pub async fn download_and_link<Reporter, Hooks>(
#[instrument(
skip(self, graph, refreshed_sources, reqwest, pesde_cb),
level = "debug"
)]
pub async fn download_and_link<
F: FnOnce(&Arc<DownloadedGraph>) -> R + Send + 'static,
R: Future<Output = Result<(), E>> + Send,
E: Send + Sync + 'static,
>(
&self,
graph: &Arc<DependencyGraph>,
options: DownloadAndLinkOptions<Reporter, Hooks>,
) -> Result<DependencyGraphWithTarget, errors::DownloadAndLinkError<Hooks::Error>>
where
Reporter: DownloadsReporter + PatchesReporter + 'static,
Hooks: DownloadAndLinkHooks + 'static,
{
let DownloadAndLinkOptions {
reqwest,
reporter,
hooks,
refreshed_sources,
prod,
network_concurrency,
force,
} = options;
refreshed_sources: &Arc<Mutex<HashSet<PackageSources>>>,
reqwest: &reqwest::Client,
prod: bool,
write: bool,
pesde_cb: F,
) -> Result<
(
DownloadAndLinkReceiver,
impl Future<Output = Result<DownloadedGraph, errors::DownloadAndLinkError<E>>>,
),
errors::DownloadAndLinkError<E>,
> {
let (tx, rx) = tokio::sync::mpsc::channel(
graph
.iter()
.map(|(_, versions)| versions.len())
.sum::<usize>()
.max(1),
);
let downloaded_graph = Arc::new(StdMutex::new(DownloadedGraph::default()));
let this = self.clone();
let graph = graph.clone();
let reqwest = reqwest.clone();
let manifest = self.deser_manifest().await?;
let refreshed_sources = refreshed_sources.clone();
if force {
async fn remove_dir(dir: PathBuf) -> std::io::Result<()> {
tracing::debug!("force deleting the `{}` folder", dir.display());
Ok((
rx,
tokio::spawn(async move {
let mut refreshed_sources = refreshed_sources.lock().await;
match fs::remove_dir_all(dir).await {
Ok(()) => Ok(()),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()),
Err(e) => Err(e),
}
}
let mut tasks = all_packages_dirs()
.into_iter()
.map(|folder| remove_dir(self.package_dir().join(&folder)))
.chain(std::iter::once(remove_dir(
self.package_dir().join(SCRIPTS_LINK_FOLDER),
)))
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
}
// step 1. download dependencies
let graph_to_download = {
let mut download_graph_options = DownloadGraphOptions::<Reporter>::new(reqwest.clone())
.refreshed_sources(refreshed_sources.clone())
.network_concurrency(network_concurrency);
if let Some(reporter) = reporter.clone() {
download_graph_options = download_graph_options.reporter(reporter);
}
let mut downloaded_graph = DependencyGraph::new();
let graph_to_download = if force {
graph.clone()
} else {
let mut tasks = graph
.iter()
.map(|(id, node)| {
let id = id.clone();
let node = node.clone();
let container_folder =
node.container_folder_from_project(&id, self, manifest.target.kind());
async move {
return (id, node, fs::metadata(&container_folder).await.is_ok());
}
})
.collect::<JoinSet<_>>();
let mut graph_to_download = DependencyGraph::new();
while let Some(task) = tasks.join_next().await {
let (id, node, installed) = task.unwrap();
if installed {
downloaded_graph.insert(id, node);
continue;
}
graph_to_download.insert(id, node);
}
Arc::new(graph_to_download)
};
let downloaded = self
.download_graph(&graph_to_download, download_graph_options.clone())
.instrument(tracing::debug_span!("download"))
.await?;
pin!(downloaded);
let mut tasks = JoinSet::new();
while let Some((id, node, fs)) = downloaded.try_next().await? {
let container_folder =
node.container_folder_from_project(&id, self, manifest.target.kind());
downloaded_graph.insert(id, node);
let cas_dir = self.cas_dir().to_path_buf();
tasks.spawn(async move {
fs::create_dir_all(&container_folder).await?;
fs.write_to(container_folder, cas_dir, true).await
});
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
downloaded_graph
};
let (wally_graph_to_download, other_graph_to_download) =
graph_to_download
.into_iter()
.partition::<HashMap<_, _>, _>(|(_, node)| node.pkg_ref.is_wally_package());
let mut graph = Arc::new(DependencyGraphWithTarget::new());
async fn get_graph_targets<Hooks: DownloadAndLinkHooks>(
graph: &mut Arc<DependencyGraphWithTarget>,
project: &Project,
manifest_target_kind: TargetKind,
downloaded_graph: HashMap<PackageId, DependencyGraphNode>,
) -> Result<(), errors::DownloadAndLinkError<Hooks::Error>> {
let mut tasks = downloaded_graph
.into_iter()
.map(|(id, node)| {
let source = node.pkg_ref.source();
let path = Arc::from(
node.container_folder_from_project(&id, project, manifest_target_kind)
.as_path(),
);
let id = Arc::new(id.clone());
let project = project.clone();
async move {
let target = source
.get_target(
&node.pkg_ref,
&GetTargetOptions {
project,
path,
id: id.clone(),
},
)
// step 1. download pesde dependencies
let (mut pesde_rx, pesde_graph) = this
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, false)
.instrument(tracing::debug_span!("download (pesde)"))
.await?;
Ok::<_, errors::DownloadAndLinkError<Hooks::Error>>((
Arc::into_inner(id).unwrap(),
DependencyGraphNodeWithTarget { node, target },
))
}
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
let (id, node) = task.unwrap()?;
Arc::get_mut(graph).unwrap().insert(id, node);
while let Some(result) = pesde_rx.recv().await {
tx.send(result).await.unwrap();
}
Ok(())
}
let pesde_graph = Arc::into_inner(pesde_graph).unwrap().into_inner().unwrap();
// step 2. get targets for non Wally packages (Wally packages require the scripts packages to be downloaded first)
get_graph_targets::<Hooks>(
&mut graph,
self,
manifest.target.kind(),
other_graph_to_download,
)
.instrument(tracing::debug_span!("get targets (non-wally)"))
// step 2. link pesde dependencies. do so without types
if write {
this.link_dependencies(&filter_graph(&pesde_graph, prod), false)
.instrument(tracing::debug_span!("link (pesde)"))
.await?;
}
self.link_dependencies(graph.clone(), false)
.instrument(tracing::debug_span!("link (non-wally)"))
.await?;
let pesde_graph = Arc::new(pesde_graph);
if let Some(hooks) = &hooks {
hooks
.on_scripts_downloaded(&graph)
pesde_cb(&pesde_graph)
.await
.map_err(errors::DownloadAndLinkError::Hook)?;
.map_err(errors::DownloadAndLinkError::PesdeCallback)?;
hooks
.on_bins_downloaded(&graph)
.await
.map_err(errors::DownloadAndLinkError::Hook)?;
}
let pesde_graph = Arc::into_inner(pesde_graph).unwrap();
// step 3. get targets for Wally packages
get_graph_targets::<Hooks>(
&mut graph,
self,
manifest.target.kind(),
wally_graph_to_download,
)
.instrument(tracing::debug_span!("get targets (wally)"))
// step 3. download wally dependencies
let (mut wally_rx, wally_graph) = this
.download_graph(&graph, &mut refreshed_sources, &reqwest, prod, write, true)
.instrument(tracing::debug_span!("download (wally)"))
.await?;
#[cfg(feature = "patches")]
while let Some(result) = wally_rx.recv().await {
tx.send(result).await.unwrap();
}
let wally_graph = Arc::into_inner(wally_graph).unwrap().into_inner().unwrap();
{
use crate::patches::apply_patch;
let mut tasks = manifest
.patches
.iter()
.flat_map(|(name, versions)| {
versions
.iter()
.map(|(v_id, path)| (PackageId::new(name.clone(), v_id.clone()), path))
})
.filter_map(|(id, patch_path)| graph.get(&id).map(|node| (id, node, patch_path)))
.map(|(id, node, patch_path)| {
let patch_path = patch_path.to_path(self.package_dir());
let container_folder =
node.node
.container_folder_from_project(&id, self, manifest.target.kind());
let reporter = reporter.clone();
let mut downloaded_graph = downloaded_graph.lock().unwrap();
downloaded_graph.extend(pesde_graph);
for (name, versions) in wally_graph {
for (version_id, node) in versions {
downloaded_graph
.entry(name.clone())
.or_default()
.insert(version_id, node);
}
}
}
async move {
match reporter {
Some(reporter) => {
apply_patch(&id, container_folder, &patch_path, reporter.clone())
.await
}
None => {
apply_patch(&id, container_folder, &patch_path, Arc::new(())).await
}
}
}
})
.collect::<JoinSet<_>>();
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
}
let graph = Arc::into_inner(downloaded_graph)
.unwrap()
.into_inner()
.unwrap();
// step 4. link ALL dependencies. do so with types
self.link_dependencies(graph.clone(), true)
if write {
this.link_dependencies(&filter_graph(&graph, prod), true)
.instrument(tracing::debug_span!("link (all)"))
.await?;
if let Some(hooks) = &hooks {
hooks
.on_all_downloaded(&graph)
.await
.map_err(errors::DownloadAndLinkError::Hook)?;
}
let mut graph = Arc::into_inner(graph).unwrap();
if prod {
graph.retain(|_, node| node.node.resolved_ty != DependencyType::Dev);
}
if prod || !force {
self.remove_unused(&graph).await?;
}
Ok(graph)
})
.map(|r| r.unwrap()),
))
}
}
@ -433,10 +161,6 @@ pub mod errors {
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadAndLinkError<E> {
/// Reading the manifest failed
#[error("error reading manifest")]
ManifestRead(#[from] crate::errors::ManifestReadError),
/// An error occurred while downloading the graph
#[error("error downloading graph")]
DownloadGraph(#[from] crate::download::errors::DownloadGraphError),
@ -446,24 +170,7 @@ pub mod errors {
Linking(#[from] crate::linking::errors::LinkingError),
/// An error occurred while executing the pesde callback
#[error("error executing hook")]
Hook(#[source] E),
/// IO error
#[error("io error")]
Io(#[from] std::io::Error),
/// Error getting a target
#[error("error getting target")]
GetTarget(#[from] crate::source::errors::GetTargetError),
/// Removing unused dependencies failed
#[error("error removing unused dependencies")]
RemoveUnused(#[from] crate::linking::incremental::errors::RemoveUnusedError),
/// Patching a package failed
#[cfg(feature = "patches")]
#[error("error applying patch")]
Patch(#[from] crate::patches::errors::ApplyPatchError),
#[error("error executing pesde callback")]
PesdeCallback(#[source] E),
}
}

View file

@ -1,61 +0,0 @@
/// Sources of engines
pub mod source;
use crate::{engine::source::EngineSources, ser_display_deser_fromstr};
use std::{fmt::Display, str::FromStr};
/// All supported engines
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[cfg_attr(test, derive(schemars::JsonSchema))]
#[cfg_attr(test, schemars(rename_all = "snake_case"))]
pub enum EngineKind {
/// The pesde package manager
Pesde,
/// The Lune runtime
Lune,
}
ser_display_deser_fromstr!(EngineKind);
impl Display for EngineKind {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
EngineKind::Pesde => write!(f, "pesde"),
EngineKind::Lune => write!(f, "lune"),
}
}
}
impl FromStr for EngineKind {
type Err = errors::EngineKindFromStrError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s.to_lowercase().as_str() {
"pesde" => Ok(EngineKind::Pesde),
"lune" => Ok(EngineKind::Lune),
_ => Err(errors::EngineKindFromStrError::Unknown(s.to_string())),
}
}
}
impl EngineKind {
/// Returns the source to get this engine from
pub fn source(&self) -> EngineSources {
match self {
EngineKind::Pesde => EngineSources::pesde(),
EngineKind::Lune => EngineSources::lune(),
}
}
}
/// Errors related to engine kinds
pub mod errors {
use thiserror::Error;
/// Errors which can occur while using the FromStr implementation of EngineKind
#[derive(Debug, Error)]
pub enum EngineKindFromStrError {
/// The string isn't a recognized EngineKind
#[error("unknown engine kind {0}")]
Unknown(String),
}
}

View file

@ -1,320 +0,0 @@
use futures::StreamExt;
use std::{
collections::BTreeSet,
mem::ManuallyDrop,
path::{Path, PathBuf},
pin::Pin,
str::FromStr,
task::{Context, Poll},
};
use tokio::{
io::{AsyncBufRead, AsyncRead, AsyncReadExt, ReadBuf},
pin,
};
use tokio_util::compat::{Compat, FuturesAsyncReadCompatExt};
/// The kind of encoding used for the archive
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum EncodingKind {
/// Gzip
Gzip,
}
/// The kind of archive
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum ArchiveKind {
/// Tar
Tar,
/// Zip
Zip,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub(crate) struct ArchiveInfo(ArchiveKind, Option<EncodingKind>);
impl FromStr for ArchiveInfo {
type Err = errors::ArchiveInfoFromStrError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let parts = s.split('.').collect::<Vec<_>>();
Ok(match &*parts {
[.., "tar", "gz"] => ArchiveInfo(ArchiveKind::Tar, Some(EncodingKind::Gzip)),
[.., "tar"] => ArchiveInfo(ArchiveKind::Tar, None),
[.., "zip", "gz"] => {
return Err(errors::ArchiveInfoFromStrError::Unsupported(
ArchiveKind::Zip,
Some(EncodingKind::Gzip),
))
}
[.., "zip"] => ArchiveInfo(ArchiveKind::Zip, None),
_ => return Err(errors::ArchiveInfoFromStrError::Invalid(s.to_string())),
})
}
}
pub(crate) type ArchiveReader = Pin<Box<dyn AsyncBufRead>>;
/// An archive
pub struct Archive {
pub(crate) info: ArchiveInfo,
pub(crate) reader: ArchiveReader,
}
enum TarReader {
Gzip(async_compression::tokio::bufread::GzipDecoder<ArchiveReader>),
Plain(ArchiveReader),
}
// TODO: try to see if we can avoid the unsafe blocks
impl AsyncRead for TarReader {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
unsafe {
match self.get_unchecked_mut() {
Self::Gzip(r) => Pin::new_unchecked(r).poll_read(cx, buf),
Self::Plain(r) => Pin::new_unchecked(r).poll_read(cx, buf),
}
}
}
}
enum ArchiveEntryInner {
Tar(tokio_tar::Entry<tokio_tar::Archive<TarReader>>),
Zip {
archive: *mut async_zip::tokio::read::seek::ZipFileReader<std::io::Cursor<Vec<u8>>>,
reader: ManuallyDrop<
Compat<
async_zip::tokio::read::ZipEntryReader<
'static,
std::io::Cursor<Vec<u8>>,
async_zip::base::read::WithoutEntry,
>,
>,
>,
},
}
impl Drop for ArchiveEntryInner {
fn drop(&mut self) {
match self {
Self::Tar(_) => {}
Self::Zip { archive, reader } => unsafe {
ManuallyDrop::drop(reader);
drop(Box::from_raw(*archive));
},
}
}
}
/// An entry in an archive. Usually the executable
pub struct ArchiveEntry(ArchiveEntryInner);
impl AsyncRead for ArchiveEntry {
fn poll_read(
self: Pin<&mut Self>,
cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
unsafe {
match &mut self.get_unchecked_mut().0 {
ArchiveEntryInner::Tar(r) => Pin::new_unchecked(r).poll_read(cx, buf),
ArchiveEntryInner::Zip { reader, .. } => {
Pin::new_unchecked(&mut **reader).poll_read(cx, buf)
}
}
}
}
}
impl Archive {
/// Finds the executable in the archive and returns it as an [`ArchiveEntry`]
pub async fn find_executable(
self,
expected_file_name: &str,
) -> Result<ArchiveEntry, errors::FindExecutableError> {
#[derive(Debug, PartialEq, Eq)]
struct Candidate {
path: PathBuf,
file_name_matches: bool,
extension_matches: bool,
has_permissions: bool,
}
impl Candidate {
fn new(path: PathBuf, perms: u32, expected_file_name: &str) -> Self {
Self {
file_name_matches: path
.file_name()
.is_some_and(|name| name == expected_file_name),
extension_matches: match path.extension() {
Some(ext) if ext == std::env::consts::EXE_EXTENSION => true,
None if std::env::consts::EXE_EXTENSION.is_empty() => true,
_ => false,
},
path,
has_permissions: perms & 0o111 != 0,
}
}
fn should_be_considered(&self) -> bool {
// if nothing matches, we should not consider this candidate as it is most likely not
self.file_name_matches || self.extension_matches || self.has_permissions
}
}
impl Ord for Candidate {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.file_name_matches
.cmp(&other.file_name_matches)
.then(self.extension_matches.cmp(&other.extension_matches))
.then(self.has_permissions.cmp(&other.has_permissions))
}
}
impl PartialOrd for Candidate {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
let mut candidates = BTreeSet::new();
match self.info {
ArchiveInfo(ArchiveKind::Tar, encoding) => {
use async_compression::tokio::bufread as decoders;
let reader = match encoding {
Some(EncodingKind::Gzip) => {
TarReader::Gzip(decoders::GzipDecoder::new(self.reader))
}
None => TarReader::Plain(self.reader),
};
let mut archive = tokio_tar::Archive::new(reader);
let mut entries = archive.entries()?;
while let Some(entry) = entries.next().await.transpose()? {
if entry.header().entry_type().is_dir() {
continue;
}
let candidate = Candidate::new(
entry.path()?.to_path_buf(),
entry.header().mode()?,
expected_file_name,
);
if candidate.should_be_considered() {
candidates.insert(candidate);
}
}
let Some(candidate) = candidates.pop_last() else {
return Err(errors::FindExecutableError::ExecutableNotFound);
};
let mut entries = archive.entries()?;
while let Some(entry) = entries.next().await.transpose()? {
if entry.header().entry_type().is_dir() {
continue;
}
let path = entry.path()?;
if path == candidate.path {
return Ok(ArchiveEntry(ArchiveEntryInner::Tar(entry)));
}
}
}
ArchiveInfo(ArchiveKind::Zip, _) => {
let reader = self.reader;
pin!(reader);
// TODO: would be lovely to not have to read the whole archive into memory
let mut buf = vec![];
reader.read_to_end(&mut buf).await?;
let archive = async_zip::base::read::seek::ZipFileReader::with_tokio(
std::io::Cursor::new(buf),
)
.await?;
for entry in archive.file().entries() {
if entry.dir()? {
continue;
}
let path: &Path = entry.filename().as_str()?.as_ref();
let candidate = Candidate::new(
path.to_path_buf(),
entry.unix_permissions().unwrap_or(0) as u32,
expected_file_name,
);
if candidate.should_be_considered() {
candidates.insert(candidate);
}
}
let Some(candidate) = candidates.pop_last() else {
return Err(errors::FindExecutableError::ExecutableNotFound);
};
for (i, entry) in archive.file().entries().iter().enumerate() {
if entry.dir()? {
continue;
}
let path: &Path = entry.filename().as_str()?.as_ref();
if candidate.path == path {
let ptr = Box::into_raw(Box::new(archive));
let reader = (unsafe { &mut *ptr }).reader_without_entry(i).await?;
return Ok(ArchiveEntry(ArchiveEntryInner::Zip {
archive: ptr,
reader: ManuallyDrop::new(reader.compat()),
}));
}
}
}
}
Err(errors::FindExecutableError::ExecutableNotFound)
}
}
/// Errors that can occur when working with archives
pub mod errors {
use thiserror::Error;
/// Errors that can occur when parsing archive info
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ArchiveInfoFromStrError {
/// The string is not a valid archive descriptor. E.g. `{name}.tar.gz`
#[error("string `{0}` is not a valid archive descriptor")]
Invalid(String),
/// The archive type is not supported. E.g. `{name}.zip.gz`
#[error("archive type {0:?} with encoding {1:?} is not supported")]
Unsupported(super::ArchiveKind, Option<super::EncodingKind>),
}
/// Errors that can occur when finding an executable in an archive
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum FindExecutableError {
/// The executable was not found in the archive
#[error("failed to find executable in archive")]
ExecutableNotFound,
/// An IO error occurred
#[error("IO error")]
Io(#[from] std::io::Error),
/// An error occurred reading the zip archive
#[error("failed to read zip archive")]
Zip(#[from] async_zip::error::ZipError),
}
}

View file

@ -1,19 +0,0 @@
use serde::Deserialize;
/// A GitHub release
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
pub struct Release {
/// The tag name of the release
pub tag_name: String,
/// The assets of the release
pub assets: Vec<Asset>,
}
/// An asset of a GitHub release
#[derive(Debug, Eq, PartialEq, Hash, Clone, Deserialize)]
pub struct Asset {
/// The name of the asset
pub name: String,
/// The download URL of the asset
pub url: url::Url,
}

View file

@ -1,146 +0,0 @@
/// The GitHub engine reference
pub mod engine_ref;
use crate::{
engine::source::{
archive::Archive,
github::engine_ref::Release,
traits::{DownloadOptions, EngineSource, ResolveOptions},
},
reporters::{response_to_async_read, DownloadProgressReporter},
util::no_build_metadata,
version_matches,
};
use reqwest::header::ACCEPT;
use semver::{Version, VersionReq};
use std::{collections::BTreeMap, path::PathBuf};
/// The GitHub engine source
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub struct GitHubEngineSource {
/// The owner of the repository to download from
pub owner: String,
/// The repository of which to download releases from
pub repo: String,
/// The template for the asset name. `{VERSION}` will be replaced with the version
pub asset_template: String,
}
impl EngineSource for GitHubEngineSource {
type Ref = Release;
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn directory(&self) -> PathBuf {
PathBuf::from("github").join(&self.owner).join(&self.repo)
}
fn expected_file_name(&self) -> &str {
&self.repo
}
async fn resolve(
&self,
requirement: &VersionReq,
options: &ResolveOptions,
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
let ResolveOptions { reqwest, .. } = options;
Ok(reqwest
.get(format!(
"https://api.github.com/repos/{}/{}/releases",
urlencoding::encode(&self.owner),
urlencoding::encode(&self.repo),
))
.send()
.await?
.error_for_status()?
.json::<Vec<Release>>()
.await?
.into_iter()
.filter_map(
|release| match release.tag_name.trim_start_matches('v').parse() {
Ok(version) if version_matches(requirement, &version) => {
Some((version, release))
}
_ => None,
},
)
.collect())
}
async fn download<R: DownloadProgressReporter + 'static>(
&self,
engine_ref: &Self::Ref,
options: &DownloadOptions<R>,
) -> Result<Archive, Self::DownloadError> {
let DownloadOptions {
reqwest,
reporter,
version,
..
} = options;
let desired_asset_names = [
self.asset_template
.replace("{VERSION}", &version.to_string()),
self.asset_template
.replace("{VERSION}", &no_build_metadata(version).to_string()),
];
let asset = engine_ref
.assets
.iter()
.find(|asset| {
desired_asset_names
.iter()
.any(|name| asset.name.eq_ignore_ascii_case(name))
})
.ok_or(errors::DownloadError::AssetNotFound)?;
reporter.report_start();
let response = reqwest
.get(asset.url.clone())
.header(ACCEPT, "application/octet-stream")
.send()
.await?
.error_for_status()?;
Ok(Archive {
info: asset.name.parse()?,
reader: Box::pin(response_to_async_read(response, reporter.clone())),
})
}
}
/// Errors that can occur when working with the GitHub engine source
pub mod errors {
use thiserror::Error;
/// Errors that can occur when resolving a GitHub engine
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ResolveError {
/// Handling the request failed
#[error("failed to handle GitHub API request")]
Request(#[from] reqwest::Error),
}
/// Errors that can occur when downloading a GitHub engine
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadError {
/// An asset for the current platform could not be found
#[error("failed to find asset for current platform")]
AssetNotFound,
/// Handling the request failed
#[error("failed to handle GitHub API request")]
Request(#[from] reqwest::Error),
/// The asset's name could not be parsed
#[error("failed to parse asset name")]
ParseAssetName(#[from] crate::engine::source::archive::errors::ArchiveInfoFromStrError),
}
}

View file

@ -1,143 +0,0 @@
use crate::{
engine::source::{
archive::Archive,
traits::{DownloadOptions, EngineSource, ResolveOptions},
},
reporters::DownloadProgressReporter,
};
use semver::{Version, VersionReq};
use std::{collections::BTreeMap, path::PathBuf};
/// Archives
pub mod archive;
/// The GitHub engine source
pub mod github;
/// Traits for engine sources
pub mod traits;
/// Engine references
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub enum EngineRefs {
/// A GitHub engine reference
GitHub(github::engine_ref::Release),
}
/// Engine sources
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub enum EngineSources {
/// A GitHub engine source
GitHub(github::GitHubEngineSource),
}
impl EngineSource for EngineSources {
type Ref = EngineRefs;
type ResolveError = errors::ResolveError;
type DownloadError = errors::DownloadError;
fn directory(&self) -> PathBuf {
match self {
EngineSources::GitHub(source) => source.directory(),
}
}
fn expected_file_name(&self) -> &str {
match self {
EngineSources::GitHub(source) => source.expected_file_name(),
}
}
async fn resolve(
&self,
requirement: &VersionReq,
options: &ResolveOptions,
) -> Result<BTreeMap<Version, Self::Ref>, Self::ResolveError> {
match self {
EngineSources::GitHub(source) => source
.resolve(requirement, options)
.await
.map(|map| {
map.into_iter()
.map(|(version, release)| (version, EngineRefs::GitHub(release)))
.collect()
})
.map_err(Into::into),
}
}
async fn download<R: DownloadProgressReporter + 'static>(
&self,
engine_ref: &Self::Ref,
options: &DownloadOptions<R>,
) -> Result<Archive, Self::DownloadError> {
match (self, engine_ref) {
(EngineSources::GitHub(source), EngineRefs::GitHub(release)) => {
source.download(release, options).await.map_err(Into::into)
}
// for the future
#[allow(unreachable_patterns)]
_ => Err(errors::DownloadError::Mismatch),
}
}
}
impl EngineSources {
/// Returns the source for the pesde engine
pub fn pesde() -> Self {
let mut parts = env!("CARGO_PKG_REPOSITORY").split('/').skip(3);
let (owner, repo) = (
parts.next().unwrap().to_string(),
parts.next().unwrap().to_string(),
);
EngineSources::GitHub(github::GitHubEngineSource {
owner,
repo,
asset_template: format!(
"pesde-{{VERSION}}-{}-{}.zip",
std::env::consts::OS,
std::env::consts::ARCH
),
})
}
/// Returns the source for the lune engine
pub fn lune() -> Self {
EngineSources::GitHub(github::GitHubEngineSource {
owner: "lune-org".into(),
repo: "lune".into(),
asset_template: format!(
"lune-{{VERSION}}-{}-{}.zip",
std::env::consts::OS,
std::env::consts::ARCH
),
})
}
}
/// Errors that can occur when working with engine sources
pub mod errors {
use thiserror::Error;
/// Errors that can occur when resolving an engine
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ResolveError {
/// Failed to resolve the GitHub engine
#[error("failed to resolve github engine")]
GitHub(#[from] super::github::errors::ResolveError),
}
/// Errors that can occur when downloading an engine
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum DownloadError {
/// Failed to download the GitHub engine
#[error("failed to download github engine")]
GitHub(#[from] super::github::errors::DownloadError),
/// Mismatched engine reference
#[error("mismatched engine reference")]
Mismatch,
}
}

View file

@ -1,51 +0,0 @@
use crate::{engine::source::archive::Archive, reporters::DownloadProgressReporter};
use semver::{Version, VersionReq};
use std::{collections::BTreeMap, fmt::Debug, future::Future, path::PathBuf, sync::Arc};
/// Options for resolving an engine
#[derive(Debug, Clone)]
pub struct ResolveOptions {
/// The reqwest client to use
pub reqwest: reqwest::Client,
}
/// Options for downloading an engine
#[derive(Debug, Clone)]
pub struct DownloadOptions<R: DownloadProgressReporter> {
/// The reqwest client to use
pub reqwest: reqwest::Client,
/// The reporter to use
pub reporter: Arc<R>,
/// The version of the engine to be downloaded
pub version: Version,
}
/// A source of engines
pub trait EngineSource: Debug {
/// The reference type for this source
type Ref;
/// The error type for resolving an engine from this source
type ResolveError: std::error::Error + Send + Sync + 'static;
/// The error type for downloading an engine from this source
type DownloadError: std::error::Error + Send + Sync + 'static;
/// Returns the folder to store the engine's versions in
fn directory(&self) -> PathBuf;
/// Returns the expected file name of the engine in the archive
fn expected_file_name(&self) -> &str;
/// Resolves a requirement to a reference
fn resolve(
&self,
requirement: &VersionReq,
options: &ResolveOptions,
) -> impl Future<Output = Result<BTreeMap<Version, Self::Ref>, Self::ResolveError>> + Send + Sync;
/// Downloads an engine
fn download<R: DownloadProgressReporter + 'static>(
&self,
engine_ref: &Self::Ref,
options: &DownloadOptions<R>,
) -> impl Future<Output = Result<Archive, Self::DownloadError>> + Send + Sync;
}

View file

@ -1,110 +0,0 @@
use crate::{
manifest::{
target::{Target, TargetKind},
Alias, DependencyType,
},
source::{
ids::{PackageId, VersionId},
refs::PackageRefs,
specifiers::DependencySpecifiers,
traits::PackageRef,
},
Project, PACKAGES_CONTAINER_NAME,
};
use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, path::PathBuf};
/// A graph of dependencies
pub type Graph<Node> = BTreeMap<PackageId, Node>;
/// A dependency graph node
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DependencyGraphNode {
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direct: Option<(Alias, DependencySpecifiers, DependencyType)>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<PackageId, Alias>,
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
pub resolved_ty: DependencyType,
/// Whether the resolved type should be Peer if this isn't depended on
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub is_peer: bool,
/// The package reference
pub pkg_ref: PackageRefs,
}
impl DependencyGraphNode {
pub(crate) fn dependencies_dir(
&self,
version_id: &VersionId,
project_target: TargetKind,
) -> String {
if self.pkg_ref.use_new_structure() {
version_id.target().packages_folder(project_target)
} else {
"..".to_string()
}
}
/// Returns the folder to store the contents of the package in
pub fn container_folder(&self, package_id: &PackageId) -> PathBuf {
let (name, v_id) = package_id.parts();
if self.pkg_ref.is_wally_package() {
return PathBuf::from(format!(
"{}_{}@{}",
name.scope(),
name.name(),
v_id.version()
))
.join(name.name());
}
PathBuf::from(name.escaped())
.join(v_id.version().to_string())
.join(name.name())
}
/// Returns the folder to store the contents of the package in starting from the project's package directory
pub fn container_folder_from_project(
&self,
package_id: &PackageId,
project: &Project,
manifest_target_kind: TargetKind,
) -> PathBuf {
project
.package_dir()
.join(manifest_target_kind.packages_folder(package_id.version_id().target()))
.join(PACKAGES_CONTAINER_NAME)
.join(self.container_folder(package_id))
}
}
/// A graph of `DependencyGraphNode`s
pub type DependencyGraph = Graph<DependencyGraphNode>;
/// A dependency graph node with a `Target`
#[derive(Debug, Clone)]
pub struct DependencyGraphNodeWithTarget {
/// The target of the package
pub target: Target,
/// The node
pub node: DependencyGraphNode,
}
/// A graph of `DownloadedDependencyGraphNode`s
pub type DependencyGraphWithTarget = Graph<DependencyGraphNodeWithTarget>;
/// A trait for converting a graph to a different type of graph
pub trait ConvertableGraph<Node> {
/// Converts the graph to a different type of graph
fn convert(self) -> Graph<Node>;
}
impl ConvertableGraph<DependencyGraphNode> for DependencyGraphWithTarget {
fn convert(self) -> Graph<DependencyGraphNode> {
self.into_iter().map(|(id, node)| (id, node.node)).collect()
}
}

View file

@ -1,27 +1,21 @@
#![warn(missing_docs, clippy::redundant_closure_for_method_calls)]
#![deny(missing_docs)]
//! A package manager for the Luau programming language, supporting multiple runtimes including Roblox and Lune.
//! pesde has its own registry, however it can also use Wally, and Git repositories as package sources.
//! It has been designed with multiple targets in mind, namely Roblox, Lune, and Luau.
use crate::{
lockfile::Lockfile,
manifest::{target::TargetKind, Manifest},
source::{
traits::{PackageSource, RefreshOptions},
PackageSources,
},
manifest::Manifest,
source::{traits::PackageSource, PackageSources},
};
use async_stream::try_stream;
use async_stream::stream;
use fs_err::tokio as fs;
use futures::Stream;
use futures::{future::try_join_all, Stream};
use gix::sec::identity::Account;
use semver::{Version, VersionReq};
use std::{
collections::{HashMap, HashSet},
fmt::Debug,
hash::{Hash, Hasher},
path::{Path, PathBuf},
sync::Arc,
};
use tracing::instrument;
use wax::Pattern;
@ -30,10 +24,6 @@ use wax::Pattern;
pub mod download;
/// Utility for downloading and linking in the correct order
pub mod download_and_link;
/// Handling of engines
pub mod engine;
/// Graphs
pub mod graph;
/// Linking packages
pub mod linking;
/// Lockfile
@ -45,7 +35,6 @@ pub mod names;
/// Patching packages
#[cfg(feature = "patches")]
pub mod patches;
pub mod reporters;
/// Resolving packages
pub mod resolver;
/// Running scripts
@ -66,18 +55,13 @@ pub(crate) const LINK_LIB_NO_FILE_FOUND: &str = "____pesde_no_export_file_found"
/// The folder in which scripts are linked
pub const SCRIPTS_LINK_FOLDER: &str = ".pesde";
#[derive(Debug, Default)]
struct AuthConfigShared {
/// Struct containing the authentication configuration
#[derive(Debug, Default, Clone)]
pub struct AuthConfig {
tokens: HashMap<gix::Url, String>,
git_credentials: Option<Account>,
}
/// Struct containing the authentication configuration
#[derive(Debug, Clone, Default)]
pub struct AuthConfig {
shared: Arc<AuthConfigShared>,
}
impl AuthConfig {
/// Create a new `AuthConfig`
pub fn new() -> Self {
@ -85,12 +69,11 @@ impl AuthConfig {
}
/// Set the tokens
/// Panics if the `AuthConfig` is shared
pub fn with_tokens<I: IntoIterator<Item = (gix::Url, S)>, S: AsRef<str>>(
mut self,
tokens: I,
) -> Self {
Arc::get_mut(&mut self.shared).unwrap().tokens = tokens
self.tokens = tokens
.into_iter()
.map(|(url, s)| (url, s.as_ref().to_string()))
.collect();
@ -98,88 +81,79 @@ impl AuthConfig {
}
/// Set the git credentials
/// Panics if the `AuthConfig` is shared
pub fn with_git_credentials(mut self, git_credentials: Option<Account>) -> Self {
Arc::get_mut(&mut self.shared).unwrap().git_credentials = git_credentials;
self.git_credentials = git_credentials;
self
}
/// Get the tokens
pub fn tokens(&self) -> &HashMap<gix::Url, String> {
&self.shared.tokens
&self.tokens
}
/// Get the git credentials
pub fn git_credentials(&self) -> Option<&Account> {
self.shared.git_credentials.as_ref()
self.git_credentials.as_ref()
}
}
#[derive(Debug)]
struct ProjectShared {
package_dir: PathBuf,
workspace_dir: Option<PathBuf>,
data_dir: PathBuf,
cas_dir: PathBuf,
auth_config: AuthConfig,
}
/// The main struct of the pesde library, representing a project
/// Unlike `ProjectShared`, this struct is `Send` and `Sync` and is cheap to clone because it is `Arc`-backed
#[derive(Debug, Clone)]
pub struct Project {
shared: Arc<ProjectShared>,
package_dir: PathBuf,
workspace_dir: Option<PathBuf>,
data_dir: PathBuf,
auth_config: AuthConfig,
cas_dir: PathBuf,
}
impl Project {
/// Create a new `Project`
pub fn new(
package_dir: impl AsRef<Path>,
workspace_dir: Option<impl AsRef<Path>>,
data_dir: impl AsRef<Path>,
cas_dir: impl AsRef<Path>,
pub fn new<P: AsRef<Path>, Q: AsRef<Path>, R: AsRef<Path>, S: AsRef<Path>>(
package_dir: P,
workspace_dir: Option<Q>,
data_dir: R,
cas_dir: S,
auth_config: AuthConfig,
) -> Self {
Project {
shared: Arc::new(ProjectShared {
package_dir: package_dir.as_ref().to_path_buf(),
workspace_dir: workspace_dir.map(|d| d.as_ref().to_path_buf()),
data_dir: data_dir.as_ref().to_path_buf(),
cas_dir: cas_dir.as_ref().to_path_buf(),
auth_config,
}),
cas_dir: cas_dir.as_ref().to_path_buf(),
}
}
/// The directory of the package
pub fn package_dir(&self) -> &Path {
&self.shared.package_dir
&self.package_dir
}
/// The directory of the workspace this package belongs to, if any
pub fn workspace_dir(&self) -> Option<&Path> {
self.shared.workspace_dir.as_deref()
self.workspace_dir.as_deref()
}
/// The directory to store general-purpose data
pub fn data_dir(&self) -> &Path {
&self.shared.data_dir
}
/// The CAS (content-addressable storage) directory
pub fn cas_dir(&self) -> &Path {
&self.shared.cas_dir
&self.data_dir
}
/// The authentication configuration
pub fn auth_config(&self) -> &AuthConfig {
&self.shared.auth_config
&self.auth_config
}
/// The CAS (content-addressable storage) directory
pub fn cas_dir(&self) -> &Path {
&self.cas_dir
}
/// Read the manifest file
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn read_manifest(&self) -> Result<String, errors::ManifestReadError> {
let string = fs::read_to_string(self.package_dir().join(MANIFEST_FILE_NAME)).await?;
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(string)
}
@ -187,89 +161,155 @@ impl Project {
/// Deserialize the manifest file
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn deser_manifest(&self) -> Result<Manifest, errors::ManifestReadError> {
deser_manifest(self.package_dir()).await
}
/// Deserialize the manifest file of the workspace root
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn deser_workspace_manifest(
&self,
) -> Result<Option<Manifest>, errors::ManifestReadError> {
let Some(workspace_dir) = self.workspace_dir() else {
return Ok(None);
};
deser_manifest(workspace_dir).await.map(Some)
let string = fs::read_to_string(self.package_dir.join(MANIFEST_FILE_NAME)).await?;
Ok(toml::from_str(&string)?)
}
/// Write the manifest file
#[instrument(skip(self, manifest), level = "debug")]
pub async fn write_manifest<S: AsRef<[u8]>>(&self, manifest: S) -> Result<(), std::io::Error> {
fs::write(
self.package_dir().join(MANIFEST_FILE_NAME),
manifest.as_ref(),
)
.await
fs::write(self.package_dir.join(MANIFEST_FILE_NAME), manifest.as_ref()).await
}
/// Deserialize the lockfile
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub async fn deser_lockfile(&self) -> Result<Lockfile, errors::LockfileReadError> {
let string = fs::read_to_string(self.package_dir().join(LOCKFILE_FILE_NAME)).await?;
Ok(match toml::from_str(&string) {
Ok(lockfile) => lockfile,
Err(e) => {
#[allow(deprecated)]
let Ok(old_lockfile) = toml::from_str::<lockfile::old::LockfileOld>(&string) else {
return Err(errors::LockfileReadError::Serde(e));
};
#[allow(deprecated)]
old_lockfile.to_new()
}
})
let string = fs::read_to_string(self.package_dir.join(LOCKFILE_FILE_NAME)).await?;
Ok(toml::from_str(&string)?)
}
/// Write the lockfile
#[instrument(skip(self, lockfile), level = "debug")]
pub async fn write_lockfile(
&self,
lockfile: &Lockfile,
lockfile: Lockfile,
) -> Result<(), errors::LockfileWriteError> {
let string = toml::to_string(lockfile)?;
fs::write(self.package_dir().join(LOCKFILE_FILE_NAME), string).await?;
let string = toml::to_string(&lockfile)?;
fs::write(self.package_dir.join(LOCKFILE_FILE_NAME), string).await?;
Ok(())
}
/// Get the workspace members
#[instrument(skip(self), level = "debug")]
pub async fn workspace_members(
pub async fn workspace_members<P: AsRef<Path> + Debug>(
&self,
dir: P,
can_ref_self: bool,
) -> Result<
impl Stream<Item = Result<(PathBuf, Manifest), errors::WorkspaceMembersError>>,
errors::WorkspaceMembersError,
> {
let dir = self.workspace_dir().unwrap_or(self.package_dir());
let manifest = deser_manifest(dir).await?;
let dir = dir.as_ref().to_path_buf();
let manifest = fs::read_to_string(dir.join(MANIFEST_FILE_NAME))
.await
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
errors::WorkspaceMembersError::ManifestDeser(dir.to_path_buf(), Box::new(e))
})?;
let members = matching_globs(
dir,
manifest.workspace_members.iter().map(String::as_str),
manifest.workspace_members.iter().map(|s| s.as_str()),
false,
can_ref_self,
)
.await?;
Ok(try_stream! {
Ok(stream! {
for path in members {
let manifest = deser_manifest(&path).await?;
yield (path, manifest);
let manifest = fs::read_to_string(path.join(MANIFEST_FILE_NAME))
.await
.map_err(errors::WorkspaceMembersError::ManifestMissing)?;
let manifest = toml::from_str::<Manifest>(&manifest).map_err(|e| {
errors::WorkspaceMembersError::ManifestDeser(path.clone(), Box::new(e))
})?;
yield Ok((path, manifest));
}
})
}
}
/// Gets all matching paths in a directory
#[deprecated(
since = "0.5.0-rc.13",
note = "use `matching_globs` instead, which does not have the old behaviour of including whole directories by their name (`src` instead of `src/**`)"
)]
#[instrument(ret, level = "trace")]
pub async fn matching_globs_old_behaviour<
'a,
P: AsRef<Path> + Debug,
I: IntoIterator<Item = &'a str> + Debug,
>(
dir: P,
globs: I,
relative: bool,
) -> Result<HashSet<PathBuf>, errors::MatchingGlobsError> {
let (negative_globs, positive_globs) = globs
.into_iter()
.partition::<Vec<_>, _>(|glob| glob.starts_with('!'));
let negative_globs = wax::any(
negative_globs
.into_iter()
.map(|glob| wax::Glob::new(&glob[1..]))
.collect::<Result<Vec<_>, _>>()?,
)?;
let (positive_globs, file_names) = positive_globs
.into_iter()
// only globs we can be sure of (maintaining compatibility with old "only file/dir name" system)
.partition::<Vec<_>, _>(|glob| glob.contains('/'));
let file_names = file_names.into_iter().collect::<HashSet<_>>();
let positive_globs = wax::any(
positive_globs
.into_iter()
.map(wax::Glob::new)
.collect::<Result<Vec<_>, _>>()?,
)?;
let mut read_dirs = vec![(fs::read_dir(dir.as_ref().to_path_buf()).await?, false)];
let mut paths = HashSet::new();
let mut is_root = true;
while let Some((mut read_dir, is_entire_dir_included)) = read_dirs.pop() {
while let Some(entry) = read_dir.next_entry().await? {
let path = entry.path();
let relative_path = path.strip_prefix(dir.as_ref()).unwrap();
let file_name = path.file_name().unwrap();
let is_filename_match =
is_root && file_name.to_str().is_some_and(|s| file_names.contains(s));
if entry.file_type().await?.is_dir() {
read_dirs.push((
fs::read_dir(&path).await?,
is_entire_dir_included || is_filename_match,
));
if is_filename_match {
tracing::warn!("directory name usage found for {}. this is deprecated and will be removed in the future", path.display());
}
}
if (is_entire_dir_included || is_filename_match)
|| (positive_globs.is_match(relative_path)
&& !negative_globs.is_match(relative_path))
{
paths.insert(if relative {
relative_path.to_path_buf()
} else {
path.to_path_buf()
});
}
}
is_root = false;
}
Ok(paths)
}
/// Gets all matching paths in a directory
#[instrument(ret, level = "trace")]
pub async fn matching_globs<'a, P: AsRef<Path> + Debug, I: IntoIterator<Item = &'a str> + Debug>(
@ -329,119 +369,24 @@ pub async fn matching_globs<'a, P: AsRef<Path> + Debug, I: IntoIterator<Item = &
Ok(paths)
}
/// A struct containing sources already having been refreshed
#[derive(Debug, Clone, Default)]
pub struct RefreshedSources(Arc<tokio::sync::Mutex<HashSet<u64>>>);
impl RefreshedSources {
/// Create a new empty `RefreshedSources`
pub fn new() -> Self {
RefreshedSources::default()
}
/// Refreshes the source asynchronously if it has not already been refreshed.
/// Will prevent more refreshes of the same source.
pub async fn refresh(
&self,
source: &PackageSources,
options: &RefreshOptions,
) -> Result<(), source::errors::RefreshError> {
let mut hasher = std::hash::DefaultHasher::new();
source.hash(&mut hasher);
let hash = hasher.finish();
let mut refreshed_sources = self.0.lock().await;
if refreshed_sources.insert(hash) {
source.refresh(options).await
/// Refreshes the sources asynchronously
pub async fn refresh_sources<I: Iterator<Item = PackageSources>>(
project: &Project,
sources: I,
refreshed_sources: &mut HashSet<PackageSources>,
) -> Result<(), Box<source::errors::RefreshError>> {
try_join_all(sources.map(|source| {
let needs_refresh = refreshed_sources.insert(source.clone());
async move {
if needs_refresh {
source.refresh(project).await.map_err(Box::new)
} else {
Ok(())
}
}
}
async fn deser_manifest(path: &Path) -> Result<Manifest, errors::ManifestReadError> {
let string = fs::read_to_string(path.join(MANIFEST_FILE_NAME)).await?;
toml::from_str(&string).map_err(|e| errors::ManifestReadError::Serde(path.to_path_buf(), e))
}
/// Find the project & workspace directory roots
pub async fn find_roots(
cwd: PathBuf,
) -> Result<(PathBuf, Option<PathBuf>), errors::FindRootsError> {
let mut current_path = Some(cwd.clone());
let mut project_root = None::<PathBuf>;
let mut workspace_dir = None::<PathBuf>;
async fn get_workspace_members(
path: &Path,
) -> Result<HashSet<PathBuf>, errors::FindRootsError> {
let manifest = deser_manifest(path).await?;
if manifest.workspace_members.is_empty() {
return Ok(HashSet::new());
}
matching_globs(
path,
manifest.workspace_members.iter().map(String::as_str),
false,
false,
)
}))
.await
.map_err(errors::FindRootsError::Globbing)
}
while let Some(path) = current_path {
current_path = path.parent().map(Path::to_path_buf);
if !path.join(MANIFEST_FILE_NAME).exists() {
continue;
}
match (project_root.as_ref(), workspace_dir.as_ref()) {
(Some(project_root), Some(workspace_dir)) => {
return Ok((project_root.clone(), Some(workspace_dir.clone())));
}
(Some(project_root), None) => {
if get_workspace_members(&path).await?.contains(project_root) {
workspace_dir = Some(path);
}
}
(None, None) => {
if get_workspace_members(&path).await?.contains(&cwd) {
// initializing a new member of a workspace
return Ok((cwd, Some(path)));
} else {
project_root = Some(path);
}
}
(None, Some(_)) => unreachable!(),
}
}
// we mustn't expect the project root to be found, as that would
// disable the ability to run pesde in a non-project directory (for example to init it)
Ok((project_root.unwrap_or(cwd), workspace_dir))
}
/// Returns whether a version matches a version requirement
/// Differs from `VersionReq::matches` in that EVERY version matches `*`
pub fn version_matches(req: &VersionReq, version: &Version) -> bool {
*req == VersionReq::STAR || req.matches(version)
}
pub(crate) fn all_packages_dirs() -> HashSet<String> {
let mut dirs = HashSet::new();
for target_kind_a in TargetKind::VARIANTS {
for target_kind_b in TargetKind::VARIANTS {
dirs.insert(target_kind_a.packages_folder(*target_kind_b));
}
}
dirs
.map(|_| ())
}
/// Errors that can occur when using the pesde library
@ -458,8 +403,8 @@ pub mod errors {
Io(#[from] std::io::Error),
/// An error occurred while deserializing the manifest file
#[error("error deserializing manifest file at {0}")]
Serde(PathBuf, #[source] toml::de::Error),
#[error("error deserializing manifest file")]
Serde(#[from] toml::de::Error),
}
/// Errors that can occur when reading the lockfile
@ -492,9 +437,13 @@ pub mod errors {
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum WorkspaceMembersError {
/// An error occurred parsing the manifest file
#[error("error parsing manifest file")]
ManifestParse(#[from] ManifestReadError),
/// The manifest file could not be found
#[error("missing manifest file")]
ManifestMissing(#[source] std::io::Error),
/// An error occurred deserializing the manifest file
#[error("error deserializing manifest file at {0}")]
ManifestDeser(PathBuf, #[source] Box<toml::de::Error>),
/// An error occurred interacting with the filesystem
#[error("error interacting with the filesystem")]
@ -517,17 +466,4 @@ pub mod errors {
#[error("error building glob")]
BuildGlob(#[from] wax::BuildError),
}
/// Errors that can occur when finding project roots
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum FindRootsError {
/// Reading the manifest failed
#[error("error reading manifest")]
ManifestRead(#[from] ManifestReadError),
/// Globbing failed
#[error("error globbing")]
Globbing(#[from] MatchingGlobsError),
}
}

View file

@ -2,8 +2,7 @@ use std::path::{Component, Path};
use crate::manifest::{target::TargetKind, Manifest};
use full_moon::{ast::luau::ExportedTypeDeclaration, visitors::Visitor};
use relative_path::RelativePath;
use tracing::instrument;
use relative_path::RelativePathBuf;
struct TypeVisitor {
types: Vec<String>,
@ -42,25 +41,13 @@ impl Visitor for TypeVisitor {
}
}
pub(crate) fn get_file_types(file: &str) -> Vec<String> {
let ast = match full_moon::parse(file) {
Ok(ast) => ast,
Err(err) => {
tracing::error!(
"failed to parse file to extract types:\n{}",
err.into_iter()
.map(|err| format!("\t- {err}"))
.collect::<Vec<_>>()
.join("\n")
);
return vec![];
}
};
/// Get the types exported by a file
pub fn get_file_types(file: &str) -> Result<Vec<String>, Vec<full_moon::Error>> {
let ast = full_moon::parse(file)?;
let mut visitor = TypeVisitor { types: vec![] };
visitor.visit_ast(&ast);
visitor.types
Ok(visitor.types)
}
/// Generate a linking module for a library
@ -117,12 +104,11 @@ fn luau_style_path(path: &Path) -> String {
// This function should be simplified (especially to reduce the number of arguments),
// but it's not clear how to do that while maintaining the current functionality.
/// Get the require path for a library
#[instrument(skip(project_manifest), level = "trace")]
#[allow(clippy::too_many_arguments)]
pub fn get_lib_require_path(
target: TargetKind,
target: &TargetKind,
base_dir: &Path,
lib_file: &RelativePath,
lib_file: &RelativePathBuf,
destination_dir: &Path,
use_new_structure: bool,
root_container_dir: &Path,
@ -130,10 +116,11 @@ pub fn get_lib_require_path(
project_manifest: &Manifest,
) -> Result<String, errors::GetLibRequirePath> {
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
tracing::debug!("diffed lib path: {}", path.display());
let path = if use_new_structure {
tracing::debug!("using new structure for require path with {lib_file:?}");
lib_file.to_path(path)
} else {
tracing::debug!("using old structure for require path with {lib_file:?}");
path
};
@ -201,14 +188,12 @@ return require({require_path})"#,
}
/// Get the require path for a binary
#[instrument(level = "trace")]
pub fn get_bin_require_path(
base_dir: &Path,
bin_file: &RelativePath,
bin_file: &RelativePathBuf,
destination_dir: &Path,
) -> String {
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
tracing::debug!("diffed bin path: {}", path.display());
let path = bin_file.to_path(path);
luau_style_path(&path)
@ -220,14 +205,12 @@ pub fn generate_script_linking_module(require_path: &str) -> String {
}
/// Get the require path for a script
#[instrument(level = "trace")]
pub fn get_script_require_path(
base_dir: &Path,
script_file: &RelativePath,
script_file: &RelativePathBuf,
destination_dir: &Path,
) -> String {
let path = pathdiff::diff_paths(destination_dir, base_dir).unwrap();
tracing::debug!("diffed script path: {}", path.display());
let path = script_file.to_path(path);
luau_style_path(&path)

View file

@ -1,328 +0,0 @@
use crate::{
all_packages_dirs, graph::DependencyGraphWithTarget, manifest::Alias, util::remove_empty_dir,
Project, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
};
use fs_err::tokio as fs;
use futures::FutureExt;
use std::{
collections::HashSet,
path::{Component, Path, PathBuf},
sync::Arc,
};
use tokio::task::JoinSet;
fn index_entry(
entry: fs::DirEntry,
packages_index_dir: &Path,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
used_paths: &Arc<HashSet<PathBuf>>,
#[cfg(feature = "patches")] patched_packages: &Arc<HashSet<PathBuf>>,
) {
fn get_package_name_from_container(container: &Path) -> (bool, String) {
let Component::Normal(first_component) = container.components().next().unwrap() else {
panic!("invalid container path: `{}`", container.display());
};
let first_component = first_component.to_string_lossy();
let Some((name, _)) = first_component.split_once('@') else {
return (
false,
first_component.split_once('+').unwrap().1.to_string(),
);
};
(true, name.split_once('_').unwrap().1.to_string())
}
let path = entry.path();
let path_relative = path.strip_prefix(packages_index_dir).unwrap().to_path_buf();
#[cfg_attr(not(feature = "patches"), allow(unused_variables))]
let (is_wally, package_name) = get_package_name_from_container(&path_relative);
let used_paths = used_paths.clone();
#[cfg(feature = "patches")]
let patched_packages = patched_packages.clone();
tasks.spawn(async move {
if is_wally {
#[cfg(not(feature = "wally-compat"))]
{
tracing::error!(
"found Wally package in index despite feature being disabled at `{}`",
path.display()
);
}
#[cfg(feature = "wally-compat")]
{
if !used_paths.contains(&path_relative) {
fs::remove_dir_all(path).await?;
} else {
#[cfg(feature = "patches")]
if !patched_packages.contains(&path_relative) {
crate::patches::remove_patch(path.join(package_name)).await?;
}
}
return Ok(());
}
}
let mut tasks = JoinSet::<Result<_, errors::RemoveUnusedError>>::new();
let mut entries = fs::read_dir(&path).await?;
while let Some(entry) = entries.next_entry().await? {
let version = entry.file_name();
let path_relative = path_relative.join(&version);
if used_paths.contains(&path_relative) {
#[cfg(feature = "patches")]
if !patched_packages.contains(&path_relative) {
let path = entry.path().join(&package_name);
tasks.spawn(async {
crate::patches::remove_patch(path).await.map_err(Into::into)
});
}
continue;
}
let path = entry.path();
tasks.spawn(async { fs::remove_dir_all(path).await.map_err(Into::into) });
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&path).await.map_err(Into::into)
});
}
fn packages_entry(
entry: fs::DirEntry,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
expected_aliases: &Arc<HashSet<Alias>>,
) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move {
if !entry.file_type().await?.is_file() {
return Ok(());
}
let path = entry.path();
let name = path
.file_stem()
.unwrap()
.to_str()
.expect("non UTF-8 file name in packages folder");
let name = name.strip_suffix(".bin").unwrap_or(name);
let name = match name.parse::<Alias>() {
Ok(name) => name,
Err(e) => {
tracing::error!("invalid alias in packages folder: {e}");
return Ok(());
}
};
if !expected_aliases.contains(&name) {
fs::remove_file(path).await?;
}
Ok(())
});
}
fn scripts_entry(
entry: fs::DirEntry,
tasks: &mut JoinSet<Result<(), errors::RemoveUnusedError>>,
expected_aliases: &Arc<HashSet<Alias>>,
) {
let expected_aliases = expected_aliases.clone();
tasks.spawn(async move {
if !entry.file_type().await?.is_dir() {
return Ok(());
}
let path = entry.path();
let name = path
.file_name()
.unwrap()
.to_str()
.expect("non UTF-8 file name in scripts folder");
let name = match name.parse::<Alias>() {
Ok(name) => name,
Err(e) => {
tracing::error!("invalid alias in scripts folder: {e}");
return Ok(());
}
};
if !expected_aliases.contains(&name) {
fs::remove_dir_all(&path).await?;
}
Ok(())
});
}
impl Project {
/// Removes unused packages from the project
pub async fn remove_unused(
&self,
graph: &DependencyGraphWithTarget,
) -> Result<(), errors::RemoveUnusedError> {
let manifest = self.deser_manifest().await?;
let used_paths = graph
.iter()
.map(|(id, node)| {
node.node
.container_folder(id)
.parent()
.unwrap()
.to_path_buf()
})
.collect::<HashSet<_>>();
let used_paths = Arc::new(used_paths);
#[cfg(feature = "patches")]
let patched_packages = manifest
.patches
.iter()
.flat_map(|(name, versions)| {
versions
.iter()
.map(|(v_id, _)| crate::source::ids::PackageId::new(name.clone(), v_id.clone()))
})
.filter_map(|id| graph.get(&id).map(|node| (id, node)))
.map(|(id, node)| {
node.node
.container_folder(&id)
.parent()
.unwrap()
.to_path_buf()
})
.collect::<HashSet<_>>();
#[cfg(feature = "patches")]
let patched_packages = Arc::new(patched_packages);
let mut tasks = all_packages_dirs()
.into_iter()
.map(|folder| {
let packages_dir = self.package_dir().join(&folder);
let packages_index_dir = packages_dir.join(PACKAGES_CONTAINER_NAME);
let used_paths = used_paths.clone();
#[cfg(feature = "patches")]
let patched_packages = patched_packages.clone();
let expected_aliases = graph
.iter()
.filter(|(id, _)| {
manifest
.target
.kind()
.packages_folder(id.version_id().target())
== folder
})
.filter_map(|(_, node)| {
node.node.direct.as_ref().map(|(alias, _, _)| alias.clone())
})
.collect::<HashSet<_>>();
let expected_aliases = Arc::new(expected_aliases);
async move {
let mut index_entries = match fs::read_dir(&packages_index_dir).await {
Ok(entries) => entries,
Err(e) if e.kind() == std::io::ErrorKind::NotFound => return Ok(()),
Err(e) => return Err(e.into()),
};
// we don't handle NotFound here because the upper level will handle it
let mut packages_entries = fs::read_dir(&packages_dir).await?;
let mut tasks = JoinSet::new();
loop {
tokio::select! {
Some(entry) = index_entries.next_entry().map(Result::transpose) => {
index_entry(
entry?,
&packages_index_dir,
&mut tasks,
&used_paths,
#[cfg(feature = "patches")]
&patched_packages,
);
}
Some(entry) = packages_entries.next_entry().map(Result::transpose) => {
packages_entry(
entry?,
&mut tasks,
&expected_aliases,
);
}
else => break,
}
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&packages_index_dir).await?;
remove_empty_dir(&packages_dir).await?;
Ok::<_, errors::RemoveUnusedError>(())
}
})
.collect::<JoinSet<_>>();
let scripts_dir = self.package_dir().join(SCRIPTS_LINK_FOLDER);
match fs::read_dir(&scripts_dir).await {
Ok(mut entries) => {
let expected_aliases = graph
.iter()
.filter_map(|(_, node)| {
node.node
.direct
.as_ref()
.map(|(alias, _, _)| alias.clone())
.filter(|_| node.target.scripts().is_some_and(|s| !s.is_empty()))
})
.collect::<HashSet<_>>();
let expected_aliases = Arc::new(expected_aliases);
while let Some(entry) = entries.next_entry().await? {
scripts_entry(entry, &mut tasks, &expected_aliases);
}
}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
Err(e) => return Err(e.into()),
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
}
remove_empty_dir(&scripts_dir).await?;
Ok(())
}
}
/// Errors that can occur when using incremental installs
pub mod errors {
use thiserror::Error;
/// Errors that can occur when removing unused packages
#[derive(Debug, Error)]
pub enum RemoveUnusedError {
/// Reading the manifest failed
#[error("error reading manifest")]
ManifestRead(#[from] crate::errors::ManifestReadError),
/// IO error
#[error("IO error")]
Io(#[from] std::io::Error),
/// Removing a patch failed
#[cfg(feature = "patches")]
#[error("error removing patch")]
PatchRemove(#[from] crate::patches::errors::ApplyPatchError),
}
}

View file

@ -1,29 +1,29 @@
use crate::{
graph::{DependencyGraphNodeWithTarget, DependencyGraphWithTarget},
linking::generator::get_file_types,
manifest::{Alias, Manifest},
scripts::{execute_script, ExecuteScriptHooks, ScriptName},
lockfile::{DownloadedDependencyGraphNode, DownloadedGraph},
manifest::Manifest,
names::PackageNames,
scripts::{execute_script, ScriptName},
source::{
fs::{cas_path, store_in_cas},
ids::PackageId,
traits::PackageRef,
version_id::VersionId,
},
Project, LINK_LIB_NO_FILE_FOUND, PACKAGES_CONTAINER_NAME, SCRIPTS_LINK_FOLDER,
};
use fs_err::tokio as fs;
use futures::future::try_join_all;
use std::{
collections::HashMap,
ffi::OsStr,
path::{Path, PathBuf},
sync::Arc,
};
use tokio::task::{spawn_blocking, JoinSet};
use tokio::task::spawn_blocking;
use tracing::{instrument, Instrument};
/// Generates linking modules for a project
pub mod generator;
/// Incremental installs
pub mod incremental;
async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<PathBuf> {
let p = path.as_ref();
@ -32,39 +32,23 @@ async fn create_and_canonicalize<P: AsRef<Path>>(path: P) -> std::io::Result<Pat
}
async fn write_cas(destination: PathBuf, cas_dir: &Path, contents: &str) -> std::io::Result<()> {
let hash = store_in_cas(cas_dir, contents.as_bytes()).await?;
let hash = store_in_cas(cas_dir, contents.as_bytes(), |_| async { Ok(()) }).await?;
match fs::remove_file(&destination).await {
Ok(_) => {}
Err(e) if e.kind() == std::io::ErrorKind::NotFound => {}
// TODO: investigate why this happens and whether we can avoid it without ignoring all PermissionDenied errors
#[cfg(windows)]
Err(e) if e.kind() == std::io::ErrorKind::PermissionDenied => {}
Err(e) => return Err(e),
};
fs::hard_link(cas_path(&hash, cas_dir), destination).await
}
#[derive(Debug, Clone, Copy)]
struct LinkingExecuteScriptHooks;
impl ExecuteScriptHooks for LinkingExecuteScriptHooks {
fn not_found(&self, script: ScriptName) {
tracing::warn!(
"not having a `{script}` script in the manifest might cause issues with linking"
);
}
}
type PackageTypes = HashMap<PackageId, Vec<String>>;
impl Project {
/// Links the dependencies of the project
#[instrument(skip(self, graph), level = "debug")]
pub(crate) async fn link_dependencies(
pub async fn link_dependencies(
&self,
graph: Arc<DependencyGraphWithTarget>,
graph: &DownloadedGraph,
with_types: bool,
) -> Result<(), errors::LinkingError> {
let manifest = self.deser_manifest().await?;
@ -73,7 +57,7 @@ impl Project {
// step 1. link all non-wally packages (and their dependencies) temporarily without types
// we do this separately to allow the required tools for the scripts to be installed
self.link(&graph, &manifest, &Arc::new(PackageTypes::default()), false)
self.link(graph, &manifest, &Arc::new(Default::default()), false)
.await?;
if !with_types {
@ -81,25 +65,25 @@ impl Project {
}
// step 2. extract the types from libraries, prepare Roblox packages for syncing
let mut tasks = graph
.iter()
.map(|(package_id, node)| {
let span =
tracing::info_span!("extract types", package_id = package_id.to_string());
let roblox_sync_config_gen_script = manifest
.scripts
.get(&ScriptName::RobloxSyncConfigGenerator.to_string());
let package_id = package_id.clone();
let node = node.clone();
let project = self.clone();
async move {
let package_types = try_join_all(graph.iter().map(|(name, versions)| async move {
Ok::<_, errors::LinkingError>((
name,
try_join_all(versions.iter().map(|(version_id, node)| async move {
let Some(lib_file) = node.target.lib_path() else {
return Ok((package_id, vec![]));
return Ok((version_id, vec![]));
};
let container_folder = node.node.container_folder_from_project(
&package_id,
&project,
manifest_target_kind,
let container_folder = node.node.container_folder(
&self
.package_dir()
.join(manifest_target_kind.packages_folder(version_id.target()))
.join(PACKAGES_CONTAINER_NAME),
name,
version_id.version(),
);
let types = if lib_file.as_str() != LINK_LIB_NO_FILE_FOUND {
@ -115,9 +99,18 @@ impl Project {
Err(e) => return Err(e.into()),
};
let types = spawn_blocking(move || get_file_types(&contents))
let types = match spawn_blocking(move || get_file_types(&contents))
.await
.unwrap();
.unwrap()
{
Ok(types) => types,
Err(e) => {
return Err(errors::LinkingError::FullMoon(
lib_file.display().to_string(),
e,
))
}
};
tracing::debug!("contains {} exported types", types.len());
@ -127,65 +120,66 @@ impl Project {
};
if let Some(build_files) = Some(&node.target)
.filter(|_| !node.node.pkg_ref.is_wally_package())
.filter(|_| !node.node.pkg_ref.like_wally())
.and_then(|t| t.build_files())
{
let Some(script_path) = roblox_sync_config_gen_script else {
tracing::warn!("not having a `{}` script in the manifest might cause issues with Roblox linking", ScriptName::RobloxSyncConfigGenerator);
return Ok((version_id, types));
};
execute_script(
ScriptName::RobloxSyncConfigGenerator,
&project,
LinkingExecuteScriptHooks,
&script_path.to_path(self.package_dir()),
std::iter::once(container_folder.as_os_str())
.chain(build_files.iter().map(OsStr::new)),
self,
false,
).await
.map_err(|e| {
errors::LinkingError::GenerateRobloxSyncConfig(
container_folder.display().to_string(),
e,
)
.await
.map_err(errors::LinkingError::ExecuteScript)?;
})?;
}
Ok((package_id, types))
}
.instrument(span)
})
.collect::<JoinSet<_>>();
let mut package_types = PackageTypes::new();
while let Some(task) = tasks.join_next().await {
let (package_id, types) = task.unwrap()?;
package_types.insert(package_id, types);
}
Ok((version_id, types))
}.instrument(tracing::debug_span!("extract types", name = name.to_string(), version_id = version_id.to_string()))))
.await?
.into_iter()
.collect::<HashMap<_, _>>(),
))
}))
.await?
.into_iter()
.collect::<HashMap<_, _>>();
// step 3. link all packages (and their dependencies), this time with types
self.link(&graph, &manifest, &Arc::new(package_types), true)
self.link(graph, &manifest, &Arc::new(package_types), true)
.await
}
async fn link(
#[allow(clippy::too_many_arguments)]
async fn link_files(
&self,
graph: &Arc<DependencyGraphWithTarget>,
manifest: &Arc<Manifest>,
package_types: &Arc<PackageTypes>,
is_complete: bool,
) -> Result<(), errors::LinkingError> {
let package_dir_canonical = fs::canonicalize(self.package_dir()).await?;
let mut tasks = JoinSet::<Result<_, errors::LinkingError>>::new();
let mut link_files = |base_folder: &Path,
base_folder: &Path,
container_folder: &Path,
root_container_folder: &Path,
relative_container_folder: &Path,
node: &DependencyGraphNodeWithTarget,
package_id: &PackageId,
alias: &Alias,
is_root: bool|
-> Result<(), errors::LinkingError> {
node: &DownloadedDependencyGraphNode,
name: &PackageNames,
version_id: &VersionId,
alias: &str,
package_types: &HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>,
manifest: &Manifest,
) -> Result<(), errors::LinkingError> {
static NO_TYPES: Vec<String> = Vec::new();
if let Some(lib_file) = node.target.lib_path() {
let destination = base_folder.join(format!("{alias}.luau"));
let lib_module = generator::generate_lib_linking_module(
&generator::get_lib_require_path(
node.target.kind(),
&node.target.kind(),
base_folder,
lib_file,
container_folder,
@ -194,200 +188,176 @@ impl Project {
relative_container_folder,
manifest,
)?,
package_types.get(package_id).unwrap_or(&NO_TYPES),
package_types
.get(name)
.and_then(|v| v.get(version_id))
.unwrap_or(&NO_TYPES),
);
let cas_dir = self.cas_dir().to_path_buf();
tasks.spawn(async move {
write_cas(destination, &cas_dir, &lib_module)
.await
.map_err(Into::into)
});
write_cas(
base_folder.join(format!("{alias}.luau")),
self.cas_dir(),
&lib_module,
)
.await?;
}
if let Some(bin_file) = node.target.bin_path() {
let destination = base_folder.join(format!("{alias}.bin.luau"));
let bin_module = generator::generate_bin_linking_module(
container_folder,
&generator::get_bin_require_path(base_folder, bin_file, container_folder),
);
let cas_dir = self.cas_dir().to_path_buf();
tasks.spawn(async move {
write_cas(destination, &cas_dir, &bin_module)
.await
.map_err(Into::into)
});
write_cas(
base_folder.join(format!("{alias}.bin.luau")),
self.cas_dir(),
&bin_module,
)
.await?;
}
if let Some(scripts) = node
.target
.scripts()
.filter(|s| !s.is_empty() && node.node.direct.is_some() && is_root)
{
let scripts_base = package_dir_canonical
.join(SCRIPTS_LINK_FOLDER)
.join(alias.as_str());
if let Some(scripts) = node.target.scripts().filter(|s| !s.is_empty()) {
let scripts_base =
create_and_canonicalize(self.package_dir().join(SCRIPTS_LINK_FOLDER).join(alias))
.await?;
for (script_name, script_path) in scripts {
let destination = scripts_base.join(format!("{script_name}.luau"));
let script_module = generator::generate_script_linking_module(
&generator::get_script_require_path(
let script_module =
generator::generate_script_linking_module(&generator::get_script_require_path(
&scripts_base,
script_path,
container_folder,
),
);
let cas_dir = self.cas_dir().to_path_buf();
));
tasks.spawn(async move {
fs::create_dir_all(destination.parent().unwrap()).await?;
write_cas(destination, &cas_dir, &script_module)
.await
.map_err(Into::into)
});
write_cas(
scripts_base.join(format!("{script_name}.luau")),
self.cas_dir(),
&script_module,
)
.await?;
}
}
Ok(())
};
}
let mut node_tasks = graph
.iter()
.map(|(id, node)| {
let base_folder = self.package_dir().join(
manifest
.target
.kind()
.packages_folder(id.version_id().target()),
async fn link(
&self,
graph: &DownloadedGraph,
manifest: &Arc<Manifest>,
package_types: &Arc<HashMap<&PackageNames, HashMap<&VersionId, Vec<String>>>>,
is_complete: bool,
) -> Result<(), errors::LinkingError> {
try_join_all(graph.iter().flat_map(|(name, versions)| {
versions.iter().map(|(version_id, node)| {
let name = name.clone();
let manifest = manifest.clone();
let package_types = package_types.clone();
let span = tracing::info_span!(
"link",
name = name.to_string(),
version_id = version_id.to_string()
);
let id = id.clone();
let node = node.clone();
async move {
Ok::<_, errors::LinkingError>((
id,
node,
create_and_canonicalize(base_folder).await?,
))
}
})
.collect::<JoinSet<_>>();
let mut dependency_tasks = JoinSet::<Result<_, errors::LinkingError>>::new();
loop {
tokio::select! {
Some(res) = node_tasks.join_next() => {
let (package_id, node, base_folder) = res.unwrap()?;
let (node_container_folder, node_packages_folder) = {
let base_folder = create_and_canonicalize(
self.package_dir()
.join(manifest.target.kind().packages_folder(version_id.target())),
)
.await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder =
packages_container_folder.join(node.node.container_folder(&package_id));
let container_folder = node.node.container_folder(
&packages_container_folder,
&name,
version_id.version(),
);
if let Some((alias, _, _)) = &node.node.direct {
link_files(
self.link_files(
&base_folder,
&container_folder,
&base_folder,
container_folder.strip_prefix(&base_folder).unwrap(),
&node,
&package_id,
node,
&name,
version_id,
alias,
true,
)?;
&package_types,
&manifest,
)
.await?;
}
(container_folder, base_folder)
};
for (dep_id, dep_alias) in &node.node.dependencies {
let dep_id = dep_id.clone();
let dep_alias = dep_alias.clone();
let graph = graph.clone();
let node = node.clone();
let package_id = package_id.clone();
let node_container_folder = node_container_folder.clone();
let node_packages_folder = node_packages_folder.clone();
let package_dir = self.package_dir().to_path_buf();
dependency_tasks.spawn(async move {
let Some(dep_node) = graph.get(&dep_id) else {
return if is_complete {
Err(errors::LinkingError::DependencyNotFound(
dep_id.to_string(),
package_id.to_string(),
))
} else {
Ok(None)
};
};
let base_folder = package_dir.join(
package_id
.version_id()
.target()
.packages_folder(dep_id.version_id().target()),
);
let linker_folder = node_container_folder.join(node.node.dependencies_dir(
package_id.version_id(),
dep_id.version_id().target(),
));
Ok(Some((
dep_node.clone(),
dep_id,
dep_alias,
create_and_canonicalize(base_folder).await?,
create_and_canonicalize(linker_folder).await?,
node_packages_folder,
)))
});
}
},
Some(res) = dependency_tasks.join_next() => {
let Some((
dependency_node,
dependency_id,
dependency_alias,
base_folder,
linker_folder,
node_packages_folder,
)) = res.unwrap()?
for (dependency_name, (dependency_version_id, dependency_alias)) in
&node.node.dependencies
{
let Some(dependency_node) = graph
.get(dependency_name)
.and_then(|v| v.get(dependency_version_id))
else {
if is_complete {
return Err(errors::LinkingError::DependencyNotFound(
format!("{dependency_name}@{dependency_version_id}"),
format!("{name}@{version_id}"),
));
}
continue;
};
let base_folder = create_and_canonicalize(
self.package_dir().join(
version_id
.target()
.packages_folder(dependency_version_id.target()),
),
)
.await?;
let packages_container_folder = base_folder.join(PACKAGES_CONTAINER_NAME);
let container_folder = packages_container_folder
.join(dependency_node.node.container_folder(&dependency_id));
let container_folder = dependency_node.node.container_folder(
&packages_container_folder,
dependency_name,
dependency_version_id.version(),
);
link_files(
let linker_folder = create_and_canonicalize(
node_container_folder.join(
node.node
.base_folder(version_id, dependency_node.target.kind()),
),
)
.await?;
self.link_files(
&linker_folder,
&container_folder,
&node_packages_folder,
container_folder.strip_prefix(&base_folder).unwrap(),
&dependency_node,
&dependency_id,
&dependency_alias,
false,
)?;
},
else => break,
}
}
while let Some(task) = tasks.join_next().await {
task.unwrap()?;
dependency_node,
dependency_name,
dependency_version_id,
dependency_alias,
&package_types,
&manifest,
)
.await?;
}
Ok(())
}
.instrument(span)
})
}))
.await
.map(|_| ())
}
}
/// Errors that can occur while linking dependencies
@ -414,9 +384,13 @@ pub mod errors {
#[error("library file at {0} not found")]
LibFileNotFound(String),
/// Executing a script failed
#[error("error executing script")]
ExecuteScript(#[from] crate::scripts::errors::ExecuteScriptError),
/// An error occurred while parsing a Luau script
#[error("error parsing Luau script at {0}")]
FullMoon(String, Vec<full_moon::Error>),
/// An error occurred while generating a Roblox sync config
#[error("error generating roblox sync config for {0}")]
GenerateRobloxSyncConfig(String, #[source] std::io::Error),
/// An error occurred while getting the require path for a library
#[error("error getting require path for library")]

View file

@ -1,14 +1,94 @@
#![allow(deprecated)]
use crate::{
graph::DependencyGraph,
manifest::{overrides::OverrideKey, target::TargetKind},
names::PackageName,
source::specifiers::DependencySpecifiers,
manifest::{
overrides::OverrideKey,
target::{Target, TargetKind},
DependencyType,
},
names::{PackageName, PackageNames},
source::{
refs::PackageRefs, specifiers::DependencySpecifiers, traits::PackageRef,
version_id::VersionId,
},
};
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::{
collections::BTreeMap,
path::{Path, PathBuf},
};
/// A graph of dependencies
pub type Graph<Node> = BTreeMap<PackageNames, BTreeMap<VersionId, Node>>;
/// A dependency graph node
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DependencyGraphNode {
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direct: Option<(String, DependencySpecifiers, DependencyType)>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<PackageNames, (VersionId, String)>,
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
pub resolved_ty: DependencyType,
/// Whether the resolved type should be Peer if this isn't depended on
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub is_peer: bool,
/// The package reference
pub pkg_ref: PackageRefs,
}
impl DependencyGraphNode {
pub(crate) fn base_folder(&self, version_id: &VersionId, project_target: TargetKind) -> String {
if self.pkg_ref.use_new_structure() {
version_id.target().packages_folder(&project_target)
} else {
"..".to_string()
}
}
/// Returns the folder to store the contents of the package in
pub fn container_folder<P: AsRef<Path>>(
&self,
path: &P,
name: &PackageNames,
version: &Version,
) -> PathBuf {
if self.pkg_ref.like_wally() {
return path
.as_ref()
.join(format!(
"{}_{}@{}",
name.as_str().0,
name.as_str().1,
version
))
.join(name.as_str().1);
}
path.as_ref()
.join(name.escaped())
.join(version.to_string())
.join(name.as_str().1)
}
}
/// A graph of `DependencyGraphNode`s
pub type DependencyGraph = Graph<DependencyGraphNode>;
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DownloadedDependencyGraphNode {
/// The target of the package
pub target: Target,
/// The node
#[serde(flatten)]
pub node: DependencyGraphNode,
}
/// A graph of `DownloadedDependencyGraphNode`s
pub type DownloadedGraph = Graph<DownloadedDependencyGraphNode>;
/// A lockfile
#[derive(Serialize, Deserialize, Debug, Clone)]
@ -28,118 +108,6 @@ pub struct Lockfile {
pub workspace: BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>,
/// The graph of dependencies
#[serde(default, skip_serializing_if = "DependencyGraph::is_empty")]
pub graph: DependencyGraph,
}
/// Old lockfile stuff. Will be removed in a future version.
#[deprecated(
note = "Intended to be used to migrate old lockfiles to the new format. Will be removed in a future version."
)]
pub mod old {
use crate::{
manifest::{
overrides::OverrideKey,
target::{Target, TargetKind},
Alias, DependencyType,
},
names::{PackageName, PackageNames},
source::{
ids::{PackageId, VersionId},
refs::PackageRefs,
specifiers::DependencySpecifiers,
},
};
use relative_path::RelativePathBuf;
use semver::Version;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
/// An old dependency graph node
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DependencyGraphNodeOld {
/// The alias, specifier, and original (as in the manifest) type for the dependency, if it is a direct dependency (i.e. used by the current project)
#[serde(default, skip_serializing_if = "Option::is_none")]
pub direct: Option<(Alias, DependencySpecifiers, DependencyType)>,
/// The dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<PackageNames, (VersionId, Alias)>,
/// The resolved (transformed, for example Peer -> Standard) type of the dependency
pub resolved_ty: DependencyType,
/// Whether the resolved type should be Peer if this isn't depended on
#[serde(default, skip_serializing_if = "std::ops::Not::not")]
pub is_peer: bool,
/// The package reference
pub pkg_ref: PackageRefs,
}
/// A downloaded dependency graph node, i.e. a `DependencyGraphNode` with a `Target`
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct DownloadedDependencyGraphNodeOld {
/// The target of the package
pub target: Target,
/// The node
#[serde(flatten)]
pub node: DependencyGraphNodeOld,
}
/// An old version of a lockfile
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct LockfileOld {
/// The name of the package
pub name: PackageName,
/// The version of the package
pub version: Version,
/// The target of the package
pub target: TargetKind,
/// The overrides of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
/// The workspace members
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub workspace: BTreeMap<PackageName, BTreeMap<TargetKind, RelativePathBuf>>,
/// The graph of dependencies
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub graph: BTreeMap<PackageNames, BTreeMap<VersionId, DownloadedDependencyGraphNodeOld>>,
}
impl LockfileOld {
/// Converts this lockfile to a new lockfile
pub fn to_new(self) -> super::Lockfile {
super::Lockfile {
name: self.name,
version: self.version,
target: self.target,
overrides: self.overrides,
workspace: self.workspace,
graph: self
.graph
.into_iter()
.flat_map(|(name, versions)| {
versions.into_iter().map(move |(version, node)| {
(
PackageId(name.clone(), version),
crate::graph::DependencyGraphNode {
direct: node.node.direct,
dependencies: node
.node
.dependencies
.into_iter()
.map(|(name, (version, alias))| {
(PackageId(name, version), alias)
})
.collect(),
resolved_ty: node.node.resolved_ty,
is_peer: node.node.is_peer,
pkg_ref: node.node.pkg_ref,
},
)
})
})
.collect(),
}
}
}
pub graph: DownloadedGraph,
}

View file

@ -1,22 +1,19 @@
#[cfg(feature = "version-management")]
use crate::cli::version::{check_for_updates, current_version, get_or_download_engine};
use crate::cli::version::{check_for_updates, get_or_download_version, TagInfo};
use crate::cli::{auth::get_tokens, display_err, home_dir, HOME_DIR};
use anyhow::Context;
use clap::{builder::styling::AnsiColor, Parser};
use fs_err::tokio as fs;
use indicatif::MultiProgress;
use pesde::{engine::EngineKind, find_roots, version_matches, AuthConfig, Project};
use semver::VersionReq;
use pesde::{matching_globs, AuthConfig, Project, MANIFEST_FILE_NAME};
use std::{
io,
collections::HashSet,
path::{Path, PathBuf},
str::FromStr,
sync::Mutex,
};
use tempfile::NamedTempFile;
use tracing::instrument;
use tracing_indicatif::{filter::IndicatifFilter, IndicatifLayer};
use tracing_subscriber::{
filter::LevelFilter, fmt::MakeWriter, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter,
filter::LevelFilter, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter, Layer,
};
mod cli;
@ -91,85 +88,29 @@ async fn get_linkable_dir(path: &Path) -> PathBuf {
);
}
pub static PROGRESS_BARS: Mutex<Option<MultiProgress>> = Mutex::new(None);
#[derive(Clone, Copy)]
pub struct IndicatifWriter;
impl IndicatifWriter {
fn suspend<F: FnOnce() -> R, R>(f: F) -> R {
match *PROGRESS_BARS.lock().unwrap() {
Some(ref progress_bars) => progress_bars.suspend(f),
None => f(),
}
}
}
impl io::Write for IndicatifWriter {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
Self::suspend(|| io::stderr().write(buf))
}
fn flush(&mut self) -> io::Result<()> {
Self::suspend(|| io::stderr().flush())
}
fn write_vectored(&mut self, bufs: &[io::IoSlice<'_>]) -> io::Result<usize> {
Self::suspend(|| io::stderr().write_vectored(bufs))
}
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
Self::suspend(|| io::stderr().write_all(buf))
}
fn write_fmt(&mut self, fmt: std::fmt::Arguments<'_>) -> io::Result<()> {
Self::suspend(|| io::stderr().write_fmt(fmt))
}
}
impl<'a> MakeWriter<'a> for IndicatifWriter {
type Writer = IndicatifWriter;
fn make_writer(&'a self) -> Self::Writer {
*self
}
}
async fn run() -> anyhow::Result<()> {
let cwd = std::env::current_dir().expect("failed to get current working directory");
// Unix doesn't return the symlinked path, so we need to get it from the 0 argument
#[cfg(unix)]
let current_exe = PathBuf::from(std::env::args_os().next().expect("argument 0 not set"));
#[cfg(not(unix))]
let current_exe = std::env::current_exe().expect("failed to get current executable path");
let exe_name = current_exe
.file_stem()
.unwrap()
.to_str()
.expect("exe name is not valid utf-8");
let exe_name_engine = EngineKind::from_str(exe_name);
#[cfg(windows)]
'scripts: {
// if we're an engine, we don't want to run any scripts
if exe_name_engine.is_ok() {
break 'scripts;
}
if let Some(bin_folder) = current_exe.parent() {
// we're not in {path}/bin/{exe}
if bin_folder.file_name().is_some_and(|parent| parent != "bin") {
break 'scripts;
}
// we're not in {path}/.pesde/bin/{exe}
if bin_folder
let exe = std::env::current_exe().expect("failed to get current executable path");
if exe.parent().is_some_and(|parent| {
parent.file_name().is_some_and(|parent| parent != "bin")
|| parent
.parent()
.and_then(|home_folder| home_folder.file_name())
.is_some_and(|home_folder| home_folder != HOME_DIR)
{
.and_then(|parent| parent.file_name())
.is_some_and(|parent| parent != HOME_DIR)
}) {
break 'scripts;
}
let exe_name = exe.file_name().unwrap().to_string_lossy();
let exe_name = exe_name
.strip_suffix(std::env::consts::EXE_SUFFIX)
.unwrap_or(&exe_name);
if exe_name == env!("CARGO_BIN_NAME") {
break 'scripts;
}
// the bin script will search for the project root itself, so we do that to ensure
@ -178,11 +119,9 @@ async fn run() -> anyhow::Result<()> {
let status = std::process::Command::new("lune")
.arg("run")
.arg(
current_exe
.parent()
.unwrap_or(&current_exe)
.join(".impl")
.join(current_exe.file_name().unwrap())
exe.parent()
.map(|p| p.join(".impl").join(exe.file_name().unwrap()))
.unwrap_or(exe)
.with_extension("luau"),
)
.arg("--")
@ -194,6 +133,8 @@ async fn run() -> anyhow::Result<()> {
std::process::exit(status.code().unwrap());
}
let indicatif_layer = IndicatifLayer::new().with_filter(IndicatifFilter::new(false));
let tracing_env_filter = EnvFilter::builder()
.with_default_directive(LevelFilter::INFO.into())
.from_env_lossy()
@ -205,7 +146,8 @@ async fn run() -> anyhow::Result<()> {
.add_directive("hyper=info".parse().unwrap())
.add_directive("h2=info".parse().unwrap());
let fmt_layer = tracing_subscriber::fmt::layer().with_writer(IndicatifWriter);
let fmt_layer =
tracing_subscriber::fmt::layer().with_writer(indicatif_layer.inner().get_stderr_writer());
#[cfg(debug_assertions)]
let fmt_layer = fmt_layer.with_timer(tracing_subscriber::fmt::time::uptime());
@ -221,11 +163,70 @@ async fn run() -> anyhow::Result<()> {
tracing_subscriber::registry()
.with(tracing_env_filter)
.with(fmt_layer)
.with(indicatif_layer)
.init();
let (project_root_dir, project_workspace_dir) = find_roots(cwd.clone())
let (project_root_dir, project_workspace_dir) = 'finder: {
let mut current_path = Some(cwd.clone());
let mut project_root = None::<PathBuf>;
let mut workspace_dir = None::<PathBuf>;
async fn get_workspace_members(path: &Path) -> anyhow::Result<HashSet<PathBuf>> {
let manifest = fs::read_to_string(path.join(MANIFEST_FILE_NAME))
.await
.context("failed to find project root")?;
.context("failed to read manifest")?;
let manifest: pesde::manifest::Manifest =
toml::from_str(&manifest).context("failed to parse manifest")?;
if manifest.workspace_members.is_empty() {
return Ok(HashSet::new());
}
matching_globs(
path,
manifest.workspace_members.iter().map(|s| s.as_str()),
false,
false,
)
.await
.context("failed to get workspace members")
}
while let Some(path) = current_path {
current_path = path.parent().map(|p| p.to_path_buf());
if !path.join(MANIFEST_FILE_NAME).exists() {
continue;
}
match (project_root.as_ref(), workspace_dir.as_ref()) {
(Some(project_root), Some(workspace_dir)) => {
break 'finder (project_root.clone(), Some(workspace_dir.clone()));
}
(Some(project_root), None) => {
if get_workspace_members(&path).await?.contains(project_root) {
workspace_dir = Some(path);
}
}
(None, None) => {
if get_workspace_members(&path).await?.contains(&cwd) {
// initializing a new member of a workspace
break 'finder (cwd, Some(path));
} else {
project_root = Some(path);
}
}
(None, Some(_)) => unreachable!(),
}
}
// we mustn't expect the project root to be found, as that would
// disable the ability to run pesde in a non-project directory (for example to init it)
(project_root.unwrap_or_else(|| cwd.clone()), workspace_dir)
};
tracing::trace!(
"project root: {}\nworkspace root: {}",
@ -281,35 +282,20 @@ async fn run() -> anyhow::Result<()> {
};
#[cfg(feature = "version-management")]
'engines: {
let Ok(engine) = exe_name_engine else {
break 'engines;
};
{
let target_version = project
.deser_manifest()
.await
.ok()
.and_then(|manifest| manifest.pesde_version);
let req = match project.deser_manifest().await {
Ok(mut manifest) => manifest.engines.remove(&engine),
Err(pesde::errors::ManifestReadError::Io(e)) if e.kind() == io::ErrorKind::NotFound => {
let exe_path = if let Some(version) = target_version {
get_or_download_version(&reqwest, &TagInfo::Incomplete(version), false).await?
} else {
None
}
Err(e) => return Err(e.into()),
};
if engine == EngineKind::Pesde {
match &req {
// we're already running a compatible version
Some(req) if version_matches(req, &current_version()) => break 'engines,
// the user has not requested a specific version, so we'll just use the current one
None => break 'engines,
_ => (),
}
}
let exe_path =
get_or_download_engine(&reqwest, engine, req.unwrap_or(VersionReq::STAR)).await?;
if exe_path == current_exe {
anyhow::bail!("engine linker executed by itself")
}
if let Some(exe_path) = exe_path {
let status = std::process::Command::new(exe_path)
.args(std::env::args_os().skip(1))
.status()
@ -318,11 +304,11 @@ async fn run() -> anyhow::Result<()> {
std::process::exit(status.code().unwrap());
}
#[cfg(feature = "version-management")]
display_err(
check_for_updates(&reqwest).await,
" while checking for updates",
);
}
let cli = Cli::parse();

View file

@ -1,21 +1,12 @@
use crate::{
engine::EngineKind,
manifest::{
overrides::{OverrideKey, OverrideSpecifier},
target::Target,
},
manifest::{overrides::OverrideKey, target::Target},
names::PackageName,
ser_display_deser_fromstr,
source::specifiers::DependencySpecifiers,
};
use relative_path::RelativePathBuf;
use semver::{Version, VersionReq};
use semver::Version;
use serde::{Deserialize, Serialize};
use std::{
collections::{BTreeMap, HashMap},
fmt::Display,
str::FromStr,
};
use std::collections::{BTreeMap, HashMap};
use tracing::instrument;
/// Overrides
@ -25,7 +16,6 @@ pub mod target;
/// A package manifest
#[derive(Serialize, Deserialize, Debug, Clone)]
#[cfg_attr(test, derive(schemars::JsonSchema))]
pub struct Manifest {
/// The name of the package
pub name: PackageName,
@ -50,7 +40,6 @@ pub struct Manifest {
pub private: bool,
/// The scripts of the package
#[serde(default, skip_serializing)]
#[cfg_attr(test, schemars(with = "BTreeMap<String, std::path::PathBuf>"))]
pub scripts: BTreeMap<String, RelativePathBuf>,
/// The indices to use for the package
#[serde(
@ -58,7 +47,6 @@ pub struct Manifest {
skip_serializing,
deserialize_with = "crate::util::deserialize_gix_url_map"
)]
#[cfg_attr(test, schemars(with = "BTreeMap<String, url::Url>"))]
pub indices: BTreeMap<String, gix::Url>,
/// The indices to use for the package's wally dependencies
#[cfg(feature = "wally-compat")]
@ -67,110 +55,46 @@ pub struct Manifest {
skip_serializing,
deserialize_with = "crate::util::deserialize_gix_url_map"
)]
#[cfg_attr(test, schemars(with = "BTreeMap<String, url::Url>"))]
pub wally_indices: BTreeMap<String, gix::Url>,
/// The overrides this package has
#[serde(default, skip_serializing)]
pub overrides: BTreeMap<OverrideKey, OverrideSpecifier>,
pub overrides: BTreeMap<OverrideKey, DependencySpecifiers>,
/// The files to include in the package
#[serde(default)]
pub includes: Vec<String>,
/// The patches to apply to packages
#[cfg(feature = "patches")]
#[serde(default, skip_serializing)]
#[cfg_attr(
test,
schemars(
with = "BTreeMap<crate::names::PackageNames, BTreeMap<crate::source::ids::VersionId, std::path::PathBuf>>"
)
)]
pub patches: BTreeMap<
crate::names::PackageNames,
BTreeMap<crate::source::ids::VersionId, RelativePathBuf>,
BTreeMap<crate::source::version_id::VersionId, RelativePathBuf>,
>,
#[serde(default, skip_serializing)]
/// Which version of the pesde CLI this package uses
pub pesde_version: Option<Version>,
/// A list of globs pointing to workspace members' directories
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub workspace_members: Vec<String>,
/// The Roblox place of this project
#[serde(default, skip_serializing)]
pub place: BTreeMap<target::RobloxPlaceKind, String>,
/// The engines this package supports
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
#[cfg_attr(test, schemars(with = "BTreeMap<EngineKind, String>"))]
pub engines: BTreeMap<EngineKind, VersionReq>,
/// The standard dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dependencies: BTreeMap<Alias, DependencySpecifiers>,
pub dependencies: BTreeMap<String, DependencySpecifiers>,
/// The peer dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub peer_dependencies: BTreeMap<Alias, DependencySpecifiers>,
pub peer_dependencies: BTreeMap<String, DependencySpecifiers>,
/// The dev dependencies of the package
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub dev_dependencies: BTreeMap<Alias, DependencySpecifiers>,
pub dev_dependencies: BTreeMap<String, DependencySpecifiers>,
/// The user-defined fields of the package
#[cfg_attr(test, schemars(skip))]
#[serde(flatten)]
pub user_defined_fields: HashMap<String, toml::Value>,
}
/// An alias of a dependency
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct Alias(String);
ser_display_deser_fromstr!(Alias);
impl Display for Alias {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.pad(&self.0)
}
}
impl FromStr for Alias {
type Err = errors::AliasFromStr;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s.is_empty() {
return Err(errors::AliasFromStr::Empty);
}
if !s
.chars()
.all(|c| c.is_ascii_alphanumeric() || c == '-' || c == '_')
{
return Err(errors::AliasFromStr::InvalidCharacters(s.to_string()));
}
if EngineKind::from_str(s).is_ok() {
return Err(errors::AliasFromStr::EngineName(s.to_string()));
}
Ok(Self(s.to_string()))
}
}
#[cfg(test)]
impl schemars::JsonSchema for Alias {
fn schema_name() -> std::borrow::Cow<'static, str> {
"Alias".into()
}
fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
schemars::json_schema!({
"type": "string",
"pattern": r#"^[a-zA-Z0-9_-]+$"#,
})
}
}
impl Alias {
/// Get the alias as a string
pub fn as_str(&self) -> &str {
&self.0
}
}
/// A dependency type
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[serde(rename_all = "snake_case")]
pub enum DependencyType {
/// A standard dependency
@ -181,22 +105,15 @@ pub enum DependencyType {
Dev,
}
impl DependencyType {
/// All possible dependency types
pub const VARIANTS: &'static [DependencyType] = &[
DependencyType::Standard,
DependencyType::Peer,
DependencyType::Dev,
];
}
impl Manifest {
/// Get all dependencies from the manifest
#[instrument(skip(self), ret(level = "trace"), level = "debug")]
pub fn all_dependencies(
&self,
) -> Result<BTreeMap<Alias, (DependencySpecifiers, DependencyType)>, errors::AllDependenciesError>
{
) -> Result<
BTreeMap<String, (DependencySpecifiers, DependencyType)>,
errors::AllDependenciesError,
> {
let mut all_deps = BTreeMap::new();
for (deps, ty) in [
@ -217,43 +134,14 @@ impl Manifest {
/// Errors that can occur when interacting with manifests
pub mod errors {
use crate::manifest::Alias;
use thiserror::Error;
/// Errors that can occur when parsing an alias from a string
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum AliasFromStr {
/// The alias is empty
#[error("the alias is empty")]
Empty,
/// The alias contains characters outside a-z, A-Z, 0-9, -, and _
#[error("alias `{0}` contains characters outside a-z, A-Z, 0-9, -, and _")]
InvalidCharacters(String),
/// The alias is an engine name
#[error("alias `{0}` is an engine name")]
EngineName(String),
}
/// Errors that can occur when trying to get all dependencies from a manifest
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum AllDependenciesError {
/// Another specifier is already using the alias
#[error("another specifier is already using the alias {0}")]
AliasConflict(Alias),
}
}
#[cfg(test)]
mod tests {
#[test]
pub fn generate_schema() {
let schema = schemars::schema_for!(super::Manifest);
let schema = serde_json::to_string_pretty(&schema).unwrap();
std::fs::write("manifest.schema.json", schema).unwrap();
AliasConflict(String),
}
}

View file

@ -1,14 +1,14 @@
use crate::{manifest::Alias, ser_display_deser_fromstr, source::specifiers::DependencySpecifiers};
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use std::{
fmt::{Display, Formatter},
str::FromStr,
};
/// An override key
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct OverrideKey(pub Vec<Vec<Alias>>);
ser_display_deser_fromstr!(OverrideKey);
#[derive(
Debug, DeserializeFromStr, SerializeDisplay, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
pub struct OverrideKey(pub Vec<Vec<String>>);
impl FromStr for OverrideKey {
type Err = errors::OverrideKeyFromStr;
@ -16,13 +16,8 @@ impl FromStr for OverrideKey {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let overrides = s
.split(',')
.map(|overrides| {
overrides
.split('>')
.map(Alias::from_str)
.collect::<Result<_, _>>()
})
.collect::<Result<Vec<Vec<Alias>>, _>>()?;
.map(|overrides| overrides.split('>').map(|s| s.to_string()).collect())
.collect::<Vec<Vec<String>>>();
if overrides.is_empty() {
return Err(errors::OverrideKeyFromStr::Empty);
@ -32,20 +27,6 @@ impl FromStr for OverrideKey {
}
}
#[cfg(test)]
impl schemars::JsonSchema for OverrideKey {
fn schema_name() -> std::borrow::Cow<'static, str> {
"OverrideKey".into()
}
fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
schemars::json_schema!({
"type": "string",
"pattern": r#"^(?:[a-zA-Z0-9_-]+>[a-zA-Z0-9_-]+(?:>[a-zA-Z0-9_-]+)*)(?:,(?:[a-zA-Z0-9_-]+>[a-zA-Z0-9_-]+(?:>[a-zA-Z0-9_-]+)*))*$"#,
})
}
}
impl Display for OverrideKey {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
@ -56,7 +37,7 @@ impl Display for OverrideKey {
.map(|overrides| {
overrides
.iter()
.map(Alias::as_str)
.map(|o| o.as_str())
.collect::<Vec<_>>()
.join(">")
})
@ -66,17 +47,6 @@ impl Display for OverrideKey {
}
}
/// A specifier for an override
#[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq, Hash)]
#[cfg_attr(test, derive(schemars::JsonSchema))]
#[serde(untagged)]
pub enum OverrideSpecifier {
/// A specifier for a dependency
Specifier(DependencySpecifiers),
/// An alias for a dependency the current project depends on
Alias(Alias),
}
/// Errors that can occur when interacting with override keys
pub mod errors {
use thiserror::Error;
@ -88,9 +58,5 @@ pub mod errors {
/// The override key is empty
#[error("empty override key")]
Empty,
/// An alias in the override key is invalid
#[error("invalid alias in override key")]
InvalidAlias(#[from] crate::manifest::errors::AliasFromStr),
}
}

View file

@ -1,6 +1,6 @@
use crate::ser_display_deser_fromstr;
use relative_path::{RelativePath, RelativePathBuf};
use relative_path::RelativePathBuf;
use serde::{Deserialize, Serialize};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use std::{
collections::{BTreeMap, BTreeSet},
fmt::{Display, Formatter},
@ -8,9 +8,9 @@ use std::{
};
/// A kind of target
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[cfg_attr(test, derive(schemars::JsonSchema))]
#[cfg_attr(test, schemars(rename_all = "snake_case"))]
#[derive(
SerializeDisplay, DeserializeFromStr, Debug, Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
pub enum TargetKind {
/// A Roblox target
Roblox,
@ -21,7 +21,6 @@ pub enum TargetKind {
/// A Luau target
Luau,
}
ser_display_deser_fromstr!(TargetKind);
impl Display for TargetKind {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
@ -59,7 +58,7 @@ impl TargetKind {
/// The folder to store packages in for this target
/// self is the project's target, dependency is the target of the dependency
pub fn packages_folder(self, dependency: Self) -> String {
pub fn packages_folder(&self, dependency: &Self) -> String {
// the code below might seem better, but it's just going to create issues with users trying
// to use a build script, since imports would break between targets
@ -78,14 +77,12 @@ impl TargetKind {
/// A target of a package
#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[cfg_attr(test, derive(schemars::JsonSchema))]
#[serde(rename_all = "snake_case", tag = "environment")]
pub enum Target {
/// A Roblox target
Roblox {
/// The path to the lib export file
#[serde(default, skip_serializing_if = "Option::is_none")]
#[cfg_attr(test, schemars(with = "Option<std::path::PathBuf>"))]
lib: Option<RelativePathBuf>,
/// The files to include in the sync tool's config
#[serde(default)]
@ -95,7 +92,6 @@ pub enum Target {
RobloxServer {
/// The path to the lib export file
#[serde(default, skip_serializing_if = "Option::is_none")]
#[cfg_attr(test, schemars(with = "Option<std::path::PathBuf>"))]
lib: Option<RelativePathBuf>,
/// The files to include in the sync tool's config
#[serde(default)]
@ -105,30 +101,24 @@ pub enum Target {
Lune {
/// The path to the lib export file
#[serde(default, skip_serializing_if = "Option::is_none")]
#[cfg_attr(test, schemars(with = "Option<std::path::PathBuf>"))]
lib: Option<RelativePathBuf>,
/// The path to the bin export file
#[serde(default, skip_serializing_if = "Option::is_none")]
#[cfg_attr(test, schemars(with = "Option<std::path::PathBuf>"))]
bin: Option<RelativePathBuf>,
/// The exported scripts
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
#[cfg_attr(test, schemars(with = "BTreeMap<String, std::path::PathBuf>"))]
scripts: BTreeMap<String, RelativePathBuf>,
},
/// A Luau target
Luau {
/// The path to the lib export file
#[serde(default, skip_serializing_if = "Option::is_none")]
#[cfg_attr(test, schemars(with = "Option<std::path::PathBuf>"))]
lib: Option<RelativePathBuf>,
/// The path to the bin export file
#[serde(default, skip_serializing_if = "Option::is_none")]
#[cfg_attr(test, schemars(with = "Option<std::path::PathBuf>"))]
bin: Option<RelativePathBuf>,
/// The exported scripts
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
#[cfg_attr(test, schemars(with = "BTreeMap<String, std::path::PathBuf>"))]
scripts: BTreeMap<String, RelativePathBuf>,
},
}
@ -145,22 +135,22 @@ impl Target {
}
/// Returns the path to the lib export file
pub fn lib_path(&self) -> Option<&RelativePath> {
pub fn lib_path(&self) -> Option<&RelativePathBuf> {
match self {
Target::Roblox { lib, .. } => lib.as_deref(),
Target::RobloxServer { lib, .. } => lib.as_deref(),
Target::Lune { lib, .. } => lib.as_deref(),
Target::Luau { lib, .. } => lib.as_deref(),
Target::Roblox { lib, .. } => lib.as_ref(),
Target::RobloxServer { lib, .. } => lib.as_ref(),
Target::Lune { lib, .. } => lib.as_ref(),
Target::Luau { lib, .. } => lib.as_ref(),
}
}
/// Returns the path to the bin export file
pub fn bin_path(&self) -> Option<&RelativePath> {
pub fn bin_path(&self) -> Option<&RelativePathBuf> {
match self {
Target::Roblox { .. } => None,
Target::RobloxServer { .. } => None,
Target::Lune { bin, .. } => bin.as_deref(),
Target::Luau { bin, .. } => bin.as_deref(),
Target::Lune { bin, .. } => bin.as_ref(),
Target::Luau { bin, .. } => bin.as_ref(),
}
}
@ -190,8 +180,9 @@ impl Display for Target {
}
/// The kind of a Roblox place property
#[derive(Serialize, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd)]
#[cfg_attr(test, derive(schemars::JsonSchema))]
#[derive(
SerializeDisplay, Deserialize, Debug, Clone, Copy, PartialEq, Eq, Hash, Ord, PartialOrd,
)]
#[serde(rename_all = "snake_case")]
pub enum RobloxPlaceKind {
/// The shared dependencies location
@ -200,7 +191,7 @@ pub enum RobloxPlaceKind {
Server,
}
impl TryInto<RobloxPlaceKind> for TargetKind {
impl TryInto<RobloxPlaceKind> for &TargetKind {
type Error = ();
fn try_into(self) -> Result<RobloxPlaceKind, Self::Error> {

View file

@ -1,6 +1,7 @@
use crate::ser_display_deser_fromstr;
use std::{fmt::Display, str::FromStr};
use serde_with::{DeserializeFromStr, SerializeDisplay};
/// The invalid part of a package name
#[derive(Debug)]
pub enum ErrorReason {
@ -20,11 +21,11 @@ impl Display for ErrorReason {
}
/// A pesde package name
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(
Debug, DeserializeFromStr, SerializeDisplay, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
pub struct PackageName(String, String);
ser_display_deser_fromstr!(PackageName);
impl FromStr for PackageName {
type Err = errors::PackageNameError;
@ -72,20 +73,6 @@ impl Display for PackageName {
}
}
#[cfg(test)]
impl schemars::JsonSchema for PackageName {
fn schema_name() -> std::borrow::Cow<'static, str> {
"PackageName".into()
}
fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
schemars::json_schema!({
"type": "string",
"pattern": r#"^(?!_)(?![0-9]+\/)[a-z0-9_]{3,32}(?<!_)\/(?!_)(?![0-9]+\/)[a-z0-9_]{1,32}(?<!_)$"#
})
}
}
impl PackageName {
/// Returns the parts of the package name
pub fn as_str(&self) -> (&str, &str) {
@ -96,22 +83,12 @@ impl PackageName {
pub fn escaped(&self) -> String {
format!("{}+{}", self.0, self.1)
}
/// Returns the scope of the package name
pub fn scope(&self) -> &str {
&self.0
}
/// Returns the name of the package name
pub fn name(&self) -> &str {
&self.1
}
}
/// All possible package names
#[derive(Debug, Clone, Hash, PartialEq, Eq, PartialOrd, Ord)]
#[cfg_attr(test, derive(schemars::JsonSchema))]
#[cfg_attr(test, schemars(untagged))]
#[derive(
Debug, DeserializeFromStr, SerializeDisplay, Clone, Hash, PartialEq, Eq, PartialOrd, Ord,
)]
pub enum PackageNames {
/// A pesde package name
Pesde(PackageName),
@ -119,7 +96,6 @@ pub enum PackageNames {
#[cfg(feature = "wally-compat")]
Wally(wally::WallyPackageName),
}
ser_display_deser_fromstr!(PackageNames);
impl PackageNames {
/// Returns the parts of the package name
@ -144,24 +120,6 @@ impl PackageNames {
pub fn from_escaped(s: &str) -> Result<Self, errors::PackageNamesError> {
PackageNames::from_str(s.replacen('+', "/", 1).as_str())
}
/// Returns the scope of the package name
pub fn scope(&self) -> &str {
match self {
PackageNames::Pesde(name) => name.scope(),
#[cfg(feature = "wally-compat")]
PackageNames::Wally(name) => name.scope(),
}
}
/// Returns the name of the package name
pub fn name(&self) -> &str {
match self {
PackageNames::Pesde(name) => name.name(),
#[cfg(feature = "wally-compat")]
PackageNames::Wally(name) => name.name(),
}
}
}
impl Display for PackageNames {
@ -200,15 +158,15 @@ impl FromStr for PackageNames {
pub mod wally {
use std::{fmt::Display, str::FromStr};
use crate::{
names::{errors, ErrorReason},
ser_display_deser_fromstr,
};
use serde_with::{DeserializeFromStr, SerializeDisplay};
use crate::names::{errors, ErrorReason};
/// A Wally package name
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord)]
#[derive(
Debug, DeserializeFromStr, SerializeDisplay, Clone, PartialEq, Eq, Hash, PartialOrd, Ord,
)]
pub struct WallyPackageName(String, String);
ser_display_deser_fromstr!(WallyPackageName);
impl FromStr for WallyPackageName {
type Err = errors::WallyPackageNameError;
@ -243,20 +201,6 @@ pub mod wally {
}
}
#[cfg(test)]
impl schemars::JsonSchema for WallyPackageName {
fn schema_name() -> std::borrow::Cow<'static, str> {
"WallyPackageName".into()
}
fn json_schema(_: &mut schemars::SchemaGenerator) -> schemars::Schema {
schemars::json_schema!({
"type": "string",
"pattern": r#"^(wally#)?[a-z0-9-]{1,64}\/[a-z0-9-]{1,64}$"#
})
}
}
impl WallyPackageName {
/// Returns the parts of the package name
pub fn as_str(&self) -> (&str, &str) {
@ -267,16 +211,6 @@ pub mod wally {
pub fn escaped(&self) -> String {
format!("wally#{}+{}", self.0, self.1)
}
/// Returns the scope of the package name
pub fn scope(&self) -> &str {
&self.0
}
/// Returns the name of the package name
pub fn name(&self) -> &str {
&self.1
}
}
}

View file

@ -1,16 +1,8 @@
use crate::{
reporters::{PatchProgressReporter, PatchesReporter},
source::ids::PackageId,
MANIFEST_FILE_NAME,
};
use crate::{lockfile::DownloadedGraph, Project, MANIFEST_FILE_NAME, PACKAGES_CONTAINER_NAME};
use fs_err::tokio as fs;
use futures::TryFutureExt;
use git2::{ApplyLocation, Diff, DiffFormat, DiffLineType, Repository, Signature};
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use tokio::task::{spawn_blocking, JoinSet};
use relative_path::RelativePathBuf;
use std::path::Path;
use tracing::instrument;
/// Set up a git repository for patches
@ -44,7 +36,7 @@ pub fn setup_patches_repo<P: AsRef<Path>>(dir: P) -> Result<Repository, git2::Er
/// Create a patch from the current state of the repository
pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
let mut patch = vec![];
let mut patches = vec![];
let repo = Repository::open(dir.as_ref())?;
let original = repo.head()?.peel_to_tree()?;
@ -55,12 +47,7 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
checkout_builder.path(MANIFEST_FILE_NAME);
repo.checkout_tree(original.as_object(), Some(&mut checkout_builder))?;
let mut diff_options = git2::DiffOptions::default();
diff_options.include_untracked(true);
diff_options.recurse_untracked_dirs(true);
diff_options.show_untracked_content(true);
let diff = repo.diff_tree_to_workdir(Some(&original), Some(&mut diff_options))?;
let diff = repo.diff_tree_to_workdir(Some(&original), None)?;
diff.print(DiffFormat::Patch, |_delta, _hunk, line| {
if matches!(
@ -70,127 +57,164 @@ pub fn create_patch<P: AsRef<Path>>(dir: P) -> Result<Vec<u8>, git2::Error> {
let origin = line.origin();
let mut buffer = vec![0; origin.len_utf8()];
origin.encode_utf8(&mut buffer);
patch.extend(buffer);
patches.extend(buffer);
}
patch.extend(line.content());
patches.extend(line.content());
true
})?;
Ok(patch)
Ok(patches)
}
// unlike a simple hard reset, this will also remove untracked files
fn reset_repo(repo: &Repository) -> Result<(), git2::Error> {
let mut checkout_builder = git2::build::CheckoutBuilder::new();
checkout_builder.force();
checkout_builder.remove_untracked(true);
repo.checkout_head(Some(&mut checkout_builder))?;
impl Project {
/// Apply patches to the project's dependencies
#[instrument(skip(self, graph), level = "debug")]
pub async fn apply_patches(
&self,
graph: &DownloadedGraph,
) -> Result<
tokio::sync::mpsc::Receiver<Result<String, errors::ApplyPatchesError>>,
errors::ApplyPatchesError,
> {
let manifest = self.deser_manifest().await?;
let (tx, rx) = tokio::sync::mpsc::channel(
manifest
.patches
.values()
.map(|v| v.len())
.sum::<usize>()
.max(1),
);
Ok(())
for (name, versions) in manifest.patches {
for (version_id, patch_path) in versions {
let tx = tx.clone();
let name = name.clone();
let patch_path = patch_path.to_path(self.package_dir());
let Some(node) = graph
.get(&name)
.and_then(|versions| versions.get(&version_id))
else {
tracing::warn!(
"patch for {name}@{version_id} not applied because it is not in the graph"
);
tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap();
continue;
};
let container_folder = node.node.container_folder(
&self
.package_dir()
.join(manifest.target.kind().packages_folder(version_id.target()))
.join(PACKAGES_CONTAINER_NAME),
&name,
version_id.version(),
);
tokio::spawn(async move {
tracing::debug!("applying patch to {name}@{version_id}");
let patch = match fs::read(&patch_path).await {
Ok(patch) => patch,
Err(e) => {
tx.send(Err(errors::ApplyPatchesError::PatchRead(e)))
.await
.unwrap();
return;
}
};
let patch = match Diff::from_buffer(&patch) {
Ok(patch) => patch,
Err(e) => {
tx.send(Err(errors::ApplyPatchesError::Git(e)))
.await
.unwrap();
return;
}
};
/// Apply a patch to a dependency
#[instrument(skip(container_folder, patch_path, reporter), level = "debug")]
pub async fn apply_patch<Reporter>(
package_id: &PackageId,
container_folder: PathBuf,
patch_path: &Path,
reporter: Arc<Reporter>,
) -> Result<(), errors::ApplyPatchError>
where
Reporter: PatchesReporter + Send + Sync + 'static,
{
let dot_git = container_folder.join(".git");
tracing::debug!("applying patch");
let progress_reporter = reporter.report_patch(package_id.to_string());
let patch = fs::read(&patch_path)
let repo = match setup_patches_repo(&container_folder) {
Ok(repo) => repo,
Err(e) => {
tx.send(Err(errors::ApplyPatchesError::Git(e)))
.await
.map_err(errors::ApplyPatchError::PatchRead)?;
let patch = spawn_blocking(move || Diff::from_buffer(&patch))
.await
.unwrap()?;
.unwrap();
return;
}
};
let mut apply_delta_tasks = patch
let modified_files = patch
.deltas()
.filter(|delta| matches!(delta.status(), git2::Delta::Modified))
.filter_map(|delta| delta.new_file().path())
.map(|path| {
let path = container_folder.join(path);
RelativePathBuf::from_path(path)
.unwrap()
.to_path(&container_folder)
})
.filter(|path| path.is_file())
.collect::<Vec<_>>();
async {
// prevent CAS corruption by the file being modified
for path in modified_files {
// there is no way (as far as I know) to check if it's hardlinked
// so, we always unlink it
let content = match fs::read(&path).await {
Ok(content) => content,
Err(e) if e.kind() == std::io::ErrorKind::IsADirectory => return Ok(()),
Err(e) => return Err(e),
};
fs::remove_file(&path).await?;
fs::write(path, content).await?;
Ok(())
Err(e) => {
tx.send(Err(errors::ApplyPatchesError::File(e)))
.await
.unwrap();
return;
}
.map_err(errors::ApplyPatchError::File)
})
.collect::<JoinSet<_>>();
while let Some(res) = apply_delta_tasks.join_next().await {
res.unwrap()?;
}
spawn_blocking(move || {
let repo = if dot_git.exists() {
let repo = Repository::open(&container_folder)?;
reset_repo(&repo)?;
repo
} else {
setup_patches_repo(&container_folder)?
};
repo.apply(&patch, ApplyLocation::WorkDir, None)
})
if let Err(e) = fs::remove_file(&path).await {
tx.send(Err(errors::ApplyPatchesError::File(e)))
.await
.unwrap()?;
tracing::debug!("patch applied");
progress_reporter.report_done();
Ok::<_, errors::ApplyPatchError>(())
.unwrap();
return;
}
/// Remove a patch from a dependency
#[instrument(level = "debug")]
pub async fn remove_patch(container_folder: PathBuf) -> Result<(), errors::ApplyPatchError> {
let dot_git = container_folder.join(".git");
tracing::debug!("removing patch");
if fs::metadata(&dot_git).await.is_err() {
return Ok(());
}
spawn_blocking(move || {
let repo = Repository::open(&container_folder)?;
reset_repo(&repo)?;
Ok::<_, git2::Error>(())
})
if let Err(e) = fs::write(path, content).await {
tx.send(Err(errors::ApplyPatchesError::File(e)))
.await
.unwrap()?;
match fs::remove_dir_all(&dot_git).await {
Ok(()) => (),
Err(e) if e.kind() == std::io::ErrorKind::NotFound => (),
Err(e) => return Err(errors::ApplyPatchError::File(e)),
.unwrap();
return;
}
}
tracing::debug!("patch removed");
if let Err(e) = repo.apply(&patch, ApplyLocation::Both, None) {
tx.send(Err(errors::ApplyPatchesError::Git(e)))
.await
.unwrap();
return;
}
}
Ok::<_, errors::ApplyPatchError>(())
tracing::debug!(
"patch applied to {name}@{version_id}, removing .git directory"
);
if let Err(e) = fs::remove_dir_all(container_folder.join(".git")).await {
tx.send(Err(errors::ApplyPatchesError::DotGitRemove(e)))
.await
.unwrap();
return;
}
tx.send(Ok(format!("{name}@{version_id}"))).await.unwrap();
});
}
}
Ok(rx)
}
}
/// Errors that can occur when using patches
@ -200,7 +224,11 @@ pub mod errors {
/// Errors that can occur when applying patches
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum ApplyPatchError {
pub enum ApplyPatchesError {
/// Error deserializing the project manifest
#[error("error deserializing project manifest")]
ManifestDeserializationFailed(#[from] crate::errors::ManifestReadError),
/// Error interacting with git
#[error("error interacting with git")]
Git(#[from] git2::Error),

View file

@ -1,97 +0,0 @@
//! Progress reporting
//!
//! Certain operations will ask for a progress reporter to be passed in, this
//! allows the caller to be notified of progress during the operation. This can
//! be used to show progress to the user.
//!
//! All reporter traits are implemented for `()`. These implementations do
//! nothing, and can be used to ignore progress reporting.
#![allow(unused_variables)]
use async_stream::stream;
use futures::StreamExt;
use std::sync::Arc;
use tokio::io::AsyncBufRead;
/// Reports downloads.
pub trait DownloadsReporter: Send + Sync {
/// The [`DownloadProgressReporter`] type associated with this reporter.
type DownloadProgressReporter: DownloadProgressReporter + 'static;
/// Starts a new download.
fn report_download(self: Arc<Self>, name: String) -> Self::DownloadProgressReporter;
}
impl DownloadsReporter for () {
type DownloadProgressReporter = ();
fn report_download(self: Arc<Self>, name: String) -> Self::DownloadProgressReporter {}
}
/// Reports the progress of a single download.
pub trait DownloadProgressReporter: Send + Sync {
/// Reports that the download has started.
fn report_start(&self) {}
/// Reports the progress of the download.
///
/// `total` is the total number of bytes to download, and `len` is the number
/// of bytes downloaded so far.
fn report_progress(&self, total: u64, len: u64) {}
/// Reports that the download is done.
fn report_done(&self) {}
}
impl DownloadProgressReporter for () {}
/// Reports the progress of applying patches.
pub trait PatchesReporter: Send + Sync {
/// The [`PatchProgressReporter`] type associated with this reporter.
type PatchProgressReporter: PatchProgressReporter + 'static;
/// Starts a new patch.
fn report_patch(self: Arc<Self>, name: String) -> Self::PatchProgressReporter;
}
impl PatchesReporter for () {
type PatchProgressReporter = ();
fn report_patch(self: Arc<Self>, name: String) -> Self::PatchProgressReporter {}
}
/// Reports the progress of a single patch.
pub trait PatchProgressReporter: Send + Sync {
/// Reports that the patch has been applied.
fn report_done(&self) {}
}
impl PatchProgressReporter for () {}
pub(crate) fn response_to_async_read<R: DownloadProgressReporter>(
response: reqwest::Response,
reporter: Arc<R>,
) -> impl AsyncBufRead {
let total_len = response.content_length().unwrap_or(0);
reporter.report_progress(total_len, 0);
let mut bytes_downloaded = 0;
let mut stream = response.bytes_stream();
let bytes = stream!({
while let Some(chunk) = stream.next().await {
let chunk = match chunk {
Ok(chunk) => chunk,
Err(err) => {
yield Err(std::io::Error::new(std::io::ErrorKind::Other, err));
continue;
}
};
bytes_downloaded += chunk.len() as u64;
reporter.report_progress(total_len, bytes_downloaded);
yield Ok(chunk);
}
reporter.report_done();
});
tokio_util::io::StreamReader::new(bytes)
}

View file

@ -1,31 +1,37 @@
use crate::{
graph::{DependencyGraph, DependencyGraphNode},
manifest::{overrides::OverrideSpecifier, Alias, DependencyType},
lockfile::{DependencyGraph, DependencyGraphNode},
manifest::DependencyType,
names::PackageNames,
source::{
ids::PackageId,
pesde::PesdePackageSource,
specifiers::DependencySpecifiers,
traits::{PackageRef, PackageSource, RefreshOptions, ResolveOptions},
traits::{PackageRef, PackageSource},
version_id::VersionId,
PackageSources,
},
Project, RefreshedSources, DEFAULT_INDEX_NAME,
Project, DEFAULT_INDEX_NAME,
};
use std::collections::{btree_map::Entry, HashMap, VecDeque};
use std::collections::{btree_map::Entry, HashMap, HashSet, VecDeque};
use tracing::{instrument, Instrument};
fn insert_node(
graph: &mut DependencyGraph,
package_id: &PackageId,
name: PackageNames,
version: VersionId,
mut node: DependencyGraphNode,
is_top_level: bool,
) {
if !is_top_level && node.direct.take().is_some() {
tracing::debug!(
"tried to insert {package_id} as direct dependency from a non top-level context",
"tried to insert {name}@{version} as direct dependency from a non top-level context",
);
}
match graph.entry(package_id.clone()) {
match graph
.entry(name.clone())
.or_default()
.entry(version.clone())
{
Entry::Vacant(entry) => {
entry.insert(node);
}
@ -34,7 +40,7 @@ fn insert_node(
match (&current_node.direct, &node.direct) {
(Some(_), Some(_)) => {
tracing::warn!("duplicate direct dependency for {package_id}");
tracing::warn!("duplicate direct dependency for {name}@{version}");
}
(None, Some(_)) => {
@ -57,7 +63,7 @@ impl Project {
pub async fn dependency_graph(
&self,
previous_graph: Option<&DependencyGraph>,
refreshed_sources: RefreshedSources,
refreshed_sources: &mut HashSet<PackageSources>,
// used by `x` command - if true, specifier indices are expected to be URLs. will not do peer dependency checks
is_published_package: bool,
) -> Result<DependencyGraph, Box<errors::DependencyGraphError>> {
@ -66,12 +72,9 @@ impl Project {
.await
.map_err(|e| Box::new(e.into()))?;
let all_current_dependencies = manifest
let mut all_specifiers = manifest
.all_dependencies()
.map_err(|e| Box::new(e.into()))?;
let mut all_specifiers = all_current_dependencies
.clone()
.map_err(|e| Box::new(e.into()))?
.into_iter()
.map(|(alias, (spec, ty))| ((spec, ty), alias))
.collect::<HashMap<_, _>>();
@ -79,7 +82,8 @@ impl Project {
let mut graph = DependencyGraph::default();
if let Some(previous_graph) = previous_graph {
for (package_id, node) in previous_graph {
for (name, versions) in previous_graph {
for (version, node) in versions {
let Some((old_alias, specifier, source_ty)) = &node.direct else {
// this is not a direct dependency, will be added if it's still being used later
continue;
@ -90,20 +94,22 @@ impl Project {
continue;
}
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty)) else {
let Some(alias) = all_specifiers.remove(&(specifier.clone(), *source_ty))
else {
tracing::debug!(
"dependency {package_id} (old alias {old_alias}) from old dependency graph is no longer in the manifest",
"dependency {name}@{version} (old alias {old_alias}) from old dependency graph is no longer in the manifest",
);
continue;
};
let span = tracing::info_span!("resolve from old graph", alias = alias.as_str());
let span = tracing::info_span!("resolve from old graph", alias);
let _guard = span.enter();
tracing::debug!("resolved {package_id} from old dependency graph");
tracing::debug!("resolved {}@{} from old dependency graph", name, version);
insert_node(
&mut graph,
package_id,
name.clone(),
version.clone(),
DependencyGraphNode {
direct: Some((alias.clone(), specifier.clone(), *source_ty)),
..node.clone()
@ -114,30 +120,39 @@ impl Project {
let mut queue = node
.dependencies
.iter()
.map(|(id, dep_alias)| (id, vec![alias.to_string(), dep_alias.to_string()]))
.map(|(name, (version, dep_alias))| {
(
name,
version,
vec![alias.to_string(), dep_alias.to_string()],
)
})
.collect::<VecDeque<_>>();
while let Some((dep_id, path)) = queue.pop_front() {
while let Some((dep_name, dep_version, path)) = queue.pop_front() {
let inner_span =
tracing::info_span!("resolve dependency", path = path.join(">"));
let _inner_guard = inner_span.enter();
if let Some(dep_node) = previous_graph.get(dep_id) {
tracing::debug!("resolved sub-dependency {dep_id}");
if graph.contains_key(dep_id) {
tracing::debug!(
"sub-dependency {dep_id} already resolved in new graph",
if let Some(dep_node) = previous_graph
.get(dep_name)
.and_then(|v| v.get(dep_version))
{
tracing::debug!("resolved sub-dependency {dep_name}@{dep_version}");
insert_node(
&mut graph,
dep_name.clone(),
dep_version.clone(),
dep_node.clone(),
false,
);
continue;
}
insert_node(&mut graph, dep_id, dep_node.clone(), false);
dep_node
.dependencies
.iter()
.map(|(id, alias)| {
.map(|(name, (version, alias))| {
(
id,
name,
version,
path.iter()
.cloned()
.chain(std::iter::once(alias.to_string()))
@ -146,7 +161,10 @@ impl Project {
})
.for_each(|dep| queue.push_back(dep));
} else {
tracing::warn!("dependency {dep_id} not found in previous graph");
tracing::warn!(
"dependency {dep_name}@{dep_version} not found in previous graph"
);
}
}
}
}
@ -158,21 +176,17 @@ impl Project {
(
spec,
ty,
None::<PackageId>,
vec![alias],
None::<(PackageNames, VersionId)>,
vec![alias.to_string()],
false,
manifest.target.kind(),
)
})
.collect::<VecDeque<_>>();
let refresh_options = RefreshOptions {
project: self.clone(),
};
while let Some((specifier, ty, dependant, path, overridden, target)) = queue.pop_front() {
async {
let alias = path.last().unwrap();
let alias = path.last().unwrap().clone();
let depth = path.len() - 1;
tracing::debug!("resolving {specifier} ({ty:?})");
@ -189,7 +203,10 @@ impl Project {
))?
.clone()
} else {
specifier.index.as_deref().unwrap()
let index_url = specifier.index.clone().unwrap();
index_url
.clone()
.try_into()
// specifiers in indices store the index url in this field
.unwrap()
@ -210,7 +227,10 @@ impl Project {
))?
.clone()
} else {
specifier.index.as_deref().unwrap()
let index_url = specifier.index.clone().unwrap();
index_url
.clone()
.try_into()
// specifiers in indices store the index url in this field
.unwrap()
@ -224,33 +244,28 @@ impl Project {
DependencySpecifiers::Workspace(_) => {
PackageSources::Workspace(crate::source::workspace::WorkspacePackageSource)
}
DependencySpecifiers::Path(_) => {
PackageSources::Path(crate::source::path::PathPackageSource)
}
};
refreshed_sources.refresh(
&source,
&refresh_options,
)
.await
.map_err(|e| Box::new(e.into()))?;
if refreshed_sources.insert(source.clone()) {
source.refresh(self).await.map_err(|e| Box::new(e.into()))?;
}
let (name, resolved) = source
.resolve(&specifier, &ResolveOptions {
project: self.clone(),
target,
refreshed_sources: refreshed_sources.clone(),
})
.resolve(&specifier, self, target, refreshed_sources)
.await
.map_err(|e| Box::new(e.into()))?;
let Some(package_id) = graph
let Some(target_version_id) = graph
.get(&name)
.and_then(|versions| {
versions
.keys()
.filter(|id| *id.name() == name && resolved.contains_key(id.version_id()))
// only consider versions that are compatible with the specifier
.filter(|ver| resolved.contains_key(ver))
.max()
})
.or_else(|| resolved.last_key_value().map(|(ver, _)| ver))
.cloned()
.or_else(|| resolved.last_key_value().map(|(ver, _)| PackageId::new(name, ver.clone())))
else {
return Err(Box::new(errors::DependencyGraphError::NoMatchingVersion(
format!("{specifier} ({target})"),
@ -264,22 +279,33 @@ impl Project {
ty
};
if let Some(dependant_id) = dependant {
if let Some((dependant_name, dependant_version_id)) = dependant {
graph
.get_mut(&dependant_id)
.expect("dependant package not found in graph")
.dependencies
.insert(package_id.clone(), alias.clone());
.get_mut(&dependant_name)
.and_then(|versions| versions.get_mut(&dependant_version_id))
.and_then(|node| {
node.dependencies
.insert(name.clone(), (target_version_id.clone(), alias.clone()))
});
}
let pkg_ref = &resolved[package_id.version_id()];
let pkg_ref = &resolved[&target_version_id];
if let Some(already_resolved) = graph.get_mut(&package_id) {
tracing::debug!("{package_id} already resolved");
if let Some(already_resolved) = graph
.get_mut(&name)
.and_then(|versions| versions.get_mut(&target_version_id))
{
tracing::debug!(
"{}@{} already resolved",
name,
target_version_id
);
if std::mem::discriminant(&already_resolved.pkg_ref) != std::mem::discriminant(pkg_ref) {
if std::mem::discriminant(&already_resolved.pkg_ref)
!= std::mem::discriminant(pkg_ref)
{
tracing::warn!(
"resolved package {package_id} has a different source than previously resolved one, this may cause issues",
"resolved package {name}@{target_version_id} has a different source than previously resolved one, this may cause issues",
);
}
@ -315,12 +341,17 @@ impl Project {
};
insert_node(
&mut graph,
&package_id,
node,
name.clone(),
target_version_id.clone(),
node.clone(),
depth == 0,
);
tracing::debug!("resolved {package_id} from new dependency graph");
tracing::debug!(
"resolved {}@{} from new dependency graph",
name,
target_version_id
);
for (dependency_alias, (dependency_spec, dependency_ty)) in
pkg_ref.dependencies().clone()
@ -346,7 +377,7 @@ impl Project {
tracing::debug!(
"overridden specifier found for {} ({dependency_spec})",
path.iter()
.map(Alias::as_str)
.map(|s| s.as_str())
.chain(std::iter::once(dependency_alias.as_str()))
.collect::<Vec<_>>()
.join(">"),
@ -354,38 +385,33 @@ impl Project {
}
queue.push_back((
match overridden {
Some(OverrideSpecifier::Specifier(spec)) => spec.clone(),
Some(OverrideSpecifier::Alias(alias)) => all_current_dependencies.get(alias)
.map(|(spec, _)| spec)
.ok_or_else(|| errors::DependencyGraphError::AliasNotFound(alias.clone()))?
.clone(),
None => dependency_spec,
},
overridden.cloned().unwrap_or(dependency_spec),
dependency_ty,
Some(package_id.clone()),
Some((name.clone(), target_version_id.clone())),
path.iter()
.cloned()
.chain(std::iter::once(dependency_alias))
.collect(),
overridden.is_some(),
package_id.version_id().target(),
*target_version_id.target(),
));
}
Ok(())
}
.instrument(tracing::info_span!("resolve new/changed", path = path.iter().map(Alias::as_str).collect::<Vec<_>>().join(">")))
.instrument(tracing::info_span!("resolve new/changed", path = path.join(">")))
.await?;
}
for (id, node) in &mut graph {
for (name, versions) in &mut graph {
for (version_id, node) in versions {
if node.is_peer && node.direct.is_none() {
node.resolved_ty = DependencyType::Peer;
}
if node.resolved_ty == DependencyType::Peer {
tracing::warn!("peer dependency {id} was not resolved");
tracing::warn!("peer dependency {name}@{version_id} was not resolved");
}
}
}
@ -395,7 +421,6 @@ impl Project {
/// Errors that can occur when resolving dependencies
pub mod errors {
use crate::manifest::Alias;
use thiserror::Error;
/// Errors that can occur when creating a dependency graph
@ -430,9 +455,5 @@ pub mod errors {
/// No matching version was found for a specifier
#[error("no matching version found for {0}")]
NoMatchingVersion(String),
/// An alias for an override was not found in the manifest
#[error("alias `{0}` not found in manifest")]
AliasNotFound(Alias),
}
}

View file

@ -1,9 +1,8 @@
use crate::Project;
use futures::FutureExt;
use std::{
ffi::OsStr,
fmt::{Debug, Display, Formatter},
path::PathBuf,
path::Path,
process::Stdio,
};
use tokio::{
@ -32,57 +31,14 @@ impl Display for ScriptName {
}
}
/// Finds a script in the project, whether it be in the current package or it's workspace
pub async fn find_script(
project: &Project,
#[instrument(skip(project), level = "debug")]
pub(crate) async fn execute_script<A: IntoIterator<Item = S> + Debug, S: AsRef<OsStr> + Debug>(
script_name: ScriptName,
) -> Result<Option<PathBuf>, errors::FindScriptError> {
let script_name_str = script_name.to_string();
let script_path = match project
.deser_manifest()
.await?
.scripts
.remove(&script_name_str)
{
Some(script) => script.to_path(project.package_dir()),
None => match project
.deser_workspace_manifest()
.await?
.and_then(|mut manifest| manifest.scripts.remove(&script_name_str))
{
Some(script) => script.to_path(project.workspace_dir().unwrap()),
None => {
return Ok(None);
}
},
};
Ok(Some(script_path))
}
#[allow(unused_variables)]
pub(crate) trait ExecuteScriptHooks {
fn not_found(&self, script: ScriptName) {}
}
#[instrument(skip(project, hooks), level = "debug")]
pub(crate) async fn execute_script<
A: IntoIterator<Item = S> + Debug,
S: AsRef<OsStr> + Debug,
H: ExecuteScriptHooks,
>(
script_name: ScriptName,
project: &Project,
hooks: H,
script_path: &Path,
args: A,
project: &Project,
return_stdout: bool,
) -> Result<Option<String>, errors::ExecuteScriptError> {
let Some(script_path) = find_script(project, script_name).await? else {
hooks.not_found(script_name);
return Ok(None);
};
) -> Result<Option<String>, std::io::Error> {
match Command::new("lune")
.arg("run")
.arg(script_path.as_os_str())
@ -98,32 +54,39 @@ pub(crate) async fn execute_script<
let mut stdout = BufReader::new(child.stdout.take().unwrap()).lines();
let mut stderr = BufReader::new(child.stderr.take().unwrap()).lines();
let script = script_name.to_string();
let script_2 = script.to_string();
tokio::spawn(async move {
while let Some(line) = stderr.next_line().await.transpose() {
match line {
Ok(line) => {
tracing::error!("[{script}]: {line}");
}
Err(e) => {
tracing::error!("ERROR IN READING STDERR OF {script}: {e}");
break;
}
}
}
});
let mut stdout_str = String::new();
loop {
tokio::select! {
Some(line) = stdout.next_line().map(Result::transpose) => match line {
while let Some(line) = stdout.next_line().await.transpose() {
match line {
Ok(line) => {
if return_stdout {
stdout_str.push_str(&line);
stdout_str.push('\n');
} else {
tracing::info!("[{script_name}]: {line}");
tracing::info!("[{script_2}]: {line}");
}
}
Err(e) => {
tracing::error!("ERROR IN READING STDOUT OF {script_name}: {e}");
tracing::error!("ERROR IN READING STDOUT OF {script_2}: {e}");
break;
}
},
Some(line) = stderr.next_line().map(Result::transpose) => match line {
Ok(line) => {
tracing::error!("[{script_name}]: {line}");
}
Err(e) => {
tracing::error!("ERROR IN READING STDERR OF {script_name}: {e}");
}
},
else => break,
}
}
@ -138,35 +101,6 @@ pub(crate) async fn execute_script<
Ok(None)
}
Err(e) => Err(e.into()),
}
}
/// Errors that can occur when using scripts
pub mod errors {
use thiserror::Error;
/// Errors that can occur when finding a script
#[derive(Debug, Error)]
pub enum FindScriptError {
/// Reading the manifest failed
#[error("error reading manifest")]
ManifestRead(#[from] crate::errors::ManifestReadError),
/// An IO error occurred
#[error("IO error")]
Io(#[from] std::io::Error),
}
/// Errors which can occur while executing a script
#[derive(Debug, Error)]
pub enum ExecuteScriptError {
/// Finding the script failed
#[error("finding the script failed")]
FindScript(#[from] FindScriptError),
/// An IO error occurred
#[error("IO error")]
Io(#[from] std::io::Error),
Err(e) => Err(e),
}
}

Some files were not shown because too many files have changed in this diff Show more